commit
bb49f7d857
|
@ -40,3 +40,10 @@ jobs:
|
|||
echo running with mock downloader
|
||||
./gradlew check --stacktrace -Ddownloader=MOCK
|
||||
fi
|
||||
|
||||
- name: Upload test reports when failure occurs
|
||||
uses: actions/upload-artifact@v3
|
||||
if: failure()
|
||||
with:
|
||||
name: NewPipeExtractor-test-reports
|
||||
path: extractor/build/reports/tests/test/**
|
||||
|
|
|
@ -30,6 +30,7 @@ allprojects {
|
|||
nanojsonVersion = "1d9e1aea9049fc9f85e68b43ba39fe7be1c1f751"
|
||||
spotbugsVersion = "4.6.0"
|
||||
junitVersion = "5.8.2"
|
||||
checkstyleVersion = "9.3" // do not use latest version (10.0) as it requires compile JDK 11
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,189 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE module PUBLIC
|
||||
"-//Checkstyle//DTD Checkstyle Configuration 1.3//EN"
|
||||
"https://checkstyle.org/dtds/configuration_1_3.dtd">
|
||||
<module name="Checker">
|
||||
<!--
|
||||
If you set the basedir property below, then all reported file
|
||||
names will be relative to the specified directory. See
|
||||
https://checkstyle.org/5.x/config.html#Checker
|
||||
|
||||
<property name="basedir" value="${basedir}"/>
|
||||
-->
|
||||
<property name="severity" value="error"/>
|
||||
|
||||
<property name="fileExtensions" value="java, properties, xml"/>
|
||||
|
||||
<!-- Excludes all 'module-info.java' files -->
|
||||
<!-- See https://checkstyle.org/config_filefilters.html -->
|
||||
<module name="BeforeExecutionExclusionFileFilter">
|
||||
<property name="fileNamePattern" value="module\-info\.java$"/>
|
||||
</module>
|
||||
|
||||
<!-- https://checkstyle.org/config_filters.html#SuppressionFilter -->
|
||||
<module name="SuppressionFilter">
|
||||
<property name="file" value="${config_loc}/suppressions.xml" />
|
||||
<property name="optional" value="true"/>
|
||||
</module>
|
||||
|
||||
<!-- Checks that a package-info.java file exists for each package. -->
|
||||
<!-- See https://checkstyle.org/config_javadoc.html#JavadocPackage -->
|
||||
<!--<module name="JavadocPackage"/>-->
|
||||
|
||||
<!-- Checks whether files end with a new line. -->
|
||||
<!-- See https://checkstyle.org/config_misc.html#NewlineAtEndOfFile -->
|
||||
<module name="NewlineAtEndOfFile"/>
|
||||
|
||||
<!-- Checks that property files contain the same keys. -->
|
||||
<!-- See https://checkstyle.org/config_misc.html#Translation -->
|
||||
<module name="Translation"/>
|
||||
|
||||
<!-- Checks for Size Violations. -->
|
||||
<!-- See https://checkstyle.org/config_sizes.html -->
|
||||
<module name="FileLength"/>
|
||||
<module name="LineLength">
|
||||
<property name="max" value="100"/>
|
||||
<property name="fileExtensions" value="java"/>
|
||||
</module>
|
||||
|
||||
<!-- Checks for whitespace -->
|
||||
<!-- See https://checkstyle.org/config_whitespace.html -->
|
||||
<module name="FileTabCharacter"/>
|
||||
|
||||
<!-- Miscellaneous other checks. -->
|
||||
<!-- See https://checkstyle.org/config_misc.html -->
|
||||
<module name="RegexpSingleline">
|
||||
<property name="format" value="\s+$"/>
|
||||
<property name="minimum" value="0"/>
|
||||
<property name="maximum" value="0"/>
|
||||
<property name="message" value="Line has trailing spaces."/>
|
||||
</module>
|
||||
|
||||
<!-- Checks for Headers -->
|
||||
<!-- See https://checkstyle.org/config_header.html -->
|
||||
<!-- <module name="Header"> -->
|
||||
<!-- <property name="headerFile" value="${checkstyle.header.file}"/> -->
|
||||
<!-- <property name="fileExtensions" value="java"/> -->
|
||||
<!-- </module> -->
|
||||
|
||||
<module name="SuppressWarningsFilter" />
|
||||
|
||||
<module name="TreeWalker">
|
||||
<!-- Checks for Javadoc comments. -->
|
||||
<!-- See https://checkstyle.org/config_javadoc.html -->
|
||||
<module name="InvalidJavadocPosition"/>
|
||||
<module name="JavadocMethod">
|
||||
<property name="allowMissingParamTags" value="true"/>
|
||||
<property name="allowMissingReturnTag" value="true"/>
|
||||
</module>
|
||||
<module name="JavadocType"/>
|
||||
<!--<module name="JavadocVariable"/>-->
|
||||
<module name="JavadocStyle">
|
||||
<property name="checkFirstSentence" value="false"/>
|
||||
</module>
|
||||
<!--<module name="MissingJavadocMethod"/>-->
|
||||
|
||||
<!-- Checks for Naming Conventions. -->
|
||||
<!-- See https://checkstyle.org/config_naming.html -->
|
||||
<module name="ConstantName"/>
|
||||
<module name="LocalFinalVariableName"/>
|
||||
<module name="LocalVariableName"/>
|
||||
<module name="MemberName">
|
||||
<property name="format" value="^(TAG|DEBUG|[a-z][a-zA-Z0-9]*)$"/>
|
||||
</module>
|
||||
<module name="MethodName"/>
|
||||
<module name="PackageName"/>
|
||||
<module name="ParameterName"/>
|
||||
<module name="StaticVariableName"/>
|
||||
<module name="TypeName"/>
|
||||
|
||||
<!-- Checks for imports -->
|
||||
<!-- See https://checkstyle.org/config_import.html -->
|
||||
<module name="AvoidStarImport"/>
|
||||
<module name="IllegalImport"/> <!-- defaults to sun.* packages -->
|
||||
<module name="RedundantImport"/>
|
||||
<module name="UnusedImports"/>
|
||||
|
||||
<!-- Checks for Size Violations. -->
|
||||
<!-- See https://checkstyle.org/config_sizes.html -->
|
||||
<module name="MethodLength">
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
<module name="ParameterNumber">
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<!-- Checks for whitespace -->
|
||||
<!-- See https://checkstyle.org/config_whitespace.html -->
|
||||
<module name="EmptyForIteratorPad"/>
|
||||
<module name="GenericWhitespace"/>
|
||||
<module name="MethodParamPad"/>
|
||||
<module name="NoWhitespaceAfter"/>
|
||||
<module name="NoWhitespaceBefore"/>
|
||||
<module name="OperatorWrap"/>
|
||||
<module name="ParenPad"/>
|
||||
<module name="TypecastParenPad"/>
|
||||
<module name="WhitespaceAfter"/>
|
||||
<module name="WhitespaceAround"/>
|
||||
|
||||
<!-- Modifier Checks -->
|
||||
<!-- See https://checkstyle.org/config_modifiers.html -->
|
||||
<module name="ModifierOrder"/>
|
||||
<module name="RedundantModifier"/>
|
||||
|
||||
<!-- Checks for blocks. You know, those {}'s -->
|
||||
<!-- See https://checkstyle.org/config_blocks.html -->
|
||||
<module name="AvoidNestedBlocks"/>
|
||||
<module name="EmptyBlock"/>
|
||||
<module name="LeftCurly"/>
|
||||
<module name="NeedBraces"/>
|
||||
<module name="RightCurly"/>
|
||||
|
||||
<!-- Checks for common coding problems -->
|
||||
<!-- See https://checkstyle.org/config_coding.html -->
|
||||
<module name="EmptyStatement"/>
|
||||
<module name="EqualsHashCode">
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
<module name="HiddenField">
|
||||
<property name="ignoreConstructorParameter" value="true"/>
|
||||
<property name="ignoreSetter" value="true"/>
|
||||
</module>
|
||||
<module name="IllegalInstantiation"/>
|
||||
<module name="InnerAssignment"/>
|
||||
<!--<module name="MagicNumber"/>-->
|
||||
<!--<module name="MissingSwitchDefault">
|
||||
<property name="severity" value="warning"/>
|
||||
</module>-->
|
||||
<module name="MultipleVariableDeclarations"/>
|
||||
<module name="SimplifyBooleanExpression"/>
|
||||
<module name="SimplifyBooleanReturn"/>
|
||||
<module name="FinalLocalVariable">
|
||||
<property name="tokens" value="VARIABLE_DEF,PARAMETER_DEF"/>
|
||||
<property name="validateEnhancedForLoopVariable" value="true"/>
|
||||
</module>
|
||||
|
||||
<!-- Checks for class design -->
|
||||
<!-- See https://checkstyle.org/config_design.html -->
|
||||
<!--<module name="DesignForExtension"/>-->
|
||||
<module name="FinalClass"/>
|
||||
<module name="HideUtilityClassConstructor"/>
|
||||
<module name="InterfaceIsType"/>
|
||||
<!--<module name="VisibilityModifier">
|
||||
<property name="ignoreAnnotationCanonicalNames" value="State,ColumnInfo"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>-->
|
||||
|
||||
<!-- Miscellaneous other checks. -->
|
||||
<!-- See https://checkstyle.org/config_misc.html -->
|
||||
<module name="ArrayTypeStyle"/>
|
||||
<module name="FinalParameters"/>
|
||||
<!--<module name="TodoComment">
|
||||
<property name="format" value="(TODO:|FIXME:)"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>-->
|
||||
<module name="UpperEll"/>
|
||||
|
||||
<module name="SuppressWarningsHolder" />
|
||||
</module>
|
||||
</module>
|
|
@ -0,0 +1,15 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
|
||||
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
|
||||
<suppressions>
|
||||
<!-- Use @SuppressWarnings("...") if it is possible, only use this file if it is not -->
|
||||
|
||||
<suppress checks="LineLength"
|
||||
files="BandcampExtractorHelper.java"
|
||||
lines="54"/>
|
||||
|
||||
<suppress checks="LineLength"
|
||||
files="ItagItem.java"
|
||||
lines="19"/>
|
||||
</suppressions>
|
|
@ -1,9 +1,25 @@
|
|||
plugins {
|
||||
id 'checkstyle'
|
||||
}
|
||||
|
||||
test {
|
||||
// Pass on downloader type to tests for different CI jobs. See DownloaderFactory.java and ci.yml
|
||||
// Pass on downloader type to tests for different CI jobs. See DownloaderFactory.java and ci.yml
|
||||
if (System.properties.containsKey('downloader')) {
|
||||
systemProperty('downloader', System.getProperty('downloader'))
|
||||
}
|
||||
useJUnitPlatform()
|
||||
dependsOn checkstyleMain // run checkstyle when testing
|
||||
}
|
||||
|
||||
checkstyle {
|
||||
getConfigDirectory().set(rootProject.file("checkstyle"))
|
||||
ignoreFailures false
|
||||
showViolations true
|
||||
toolVersion checkstyleVersion
|
||||
}
|
||||
|
||||
checkstyleTest {
|
||||
enabled false // do not checkstyle test files
|
||||
}
|
||||
|
||||
dependencies {
|
||||
|
@ -15,6 +31,8 @@ dependencies {
|
|||
implementation "com.github.spotbugs:spotbugs-annotations:$spotbugsVersion"
|
||||
implementation 'org.nibor.autolink:autolink:0.10.0'
|
||||
|
||||
checkstyle "com.puppycrawl.tools:checkstyle:$checkstyleVersion"
|
||||
|
||||
testImplementation platform("org.junit:junit-bom:$junitVersion")
|
||||
testImplementation 'org.junit.jupiter:junit-jupiter-api'
|
||||
testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine'
|
||||
|
|
|
@ -10,13 +10,15 @@ import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
|||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public abstract class Extractor {
|
||||
/**
|
||||
* {@link StreamingService} currently related to this extractor.<br>
|
||||
* Useful for getting other things from a service (like the url handlers for cleaning/accepting/get id from urls).
|
||||
* Useful for getting other things from a service (like the url handlers for
|
||||
* cleaning/accepting/get id from urls).
|
||||
*/
|
||||
private final StreamingService service;
|
||||
private final LinkHandler linkHandler;
|
||||
|
@ -27,16 +29,18 @@ public abstract class Extractor {
|
|||
private ContentCountry forcedContentCountry = null;
|
||||
|
||||
private boolean pageFetched = false;
|
||||
// called like this to prevent checkstyle errors about "hiding a field"
|
||||
private final Downloader downloader;
|
||||
|
||||
public Extractor(final StreamingService service, final LinkHandler linkHandler) {
|
||||
protected Extractor(final StreamingService service, final LinkHandler linkHandler) {
|
||||
this.service = Objects.requireNonNull(service, "service is null");
|
||||
this.linkHandler = Objects.requireNonNull(linkHandler, "LinkHandler is null");
|
||||
this.downloader = Objects.requireNonNull(NewPipe.getDownloader(), "downloader is null");
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The {@link LinkHandler} of the current extractor object (e.g. a ChannelExtractor should return a channel url handler).
|
||||
* @return The {@link LinkHandler} of the current extractor object (e.g. a ChannelExtractor
|
||||
* should return a channel url handler).
|
||||
*/
|
||||
@Nonnull
|
||||
public LinkHandler getLinkHandler() {
|
||||
|
@ -50,13 +54,17 @@ public abstract class Extractor {
|
|||
* @throws ExtractionException if the pages content is not understood
|
||||
*/
|
||||
public void fetchPage() throws IOException, ExtractionException {
|
||||
if (pageFetched) return;
|
||||
if (pageFetched) {
|
||||
return;
|
||||
}
|
||||
onFetchPage(downloader);
|
||||
pageFetched = true;
|
||||
}
|
||||
|
||||
protected void assertPageFetched() {
|
||||
if (!pageFetched) throw new IllegalStateException("Page is not fetched. Make sure you call fetchPage()");
|
||||
if (!pageFetched) {
|
||||
throw new IllegalStateException("Page is not fetched. Make sure you call fetchPage()");
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean isPageFetched() {
|
||||
|
@ -66,11 +74,13 @@ public abstract class Extractor {
|
|||
/**
|
||||
* Fetch the current page.
|
||||
*
|
||||
* @param downloader the download to use
|
||||
* @param downloader the downloader to use
|
||||
* @throws IOException if the page can not be loaded
|
||||
* @throws ExtractionException if the pages content is not understood
|
||||
*/
|
||||
public abstract void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException;
|
||||
@SuppressWarnings("HiddenField")
|
||||
public abstract void onFetchPage(@Nonnull Downloader downloader)
|
||||
throws IOException, ExtractionException;
|
||||
|
||||
@Nonnull
|
||||
public String getId() throws ParsingException {
|
||||
|
@ -118,11 +128,11 @@ public abstract class Extractor {
|
|||
// Localization
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public void forceLocalization(Localization localization) {
|
||||
public void forceLocalization(final Localization localization) {
|
||||
this.forcedLocalization = localization;
|
||||
}
|
||||
|
||||
public void forceContentCountry(ContentCountry contentCountry) {
|
||||
public void forceContentCountry(final ContentCountry contentCountry) {
|
||||
this.forcedContentCountry = contentCountry;
|
||||
}
|
||||
|
||||
|
@ -133,7 +143,8 @@ public abstract class Extractor {
|
|||
|
||||
@Nonnull
|
||||
public ContentCountry getExtractorContentCountry() {
|
||||
return forcedContentCountry == null ? getService().getContentCountry() : forcedContentCountry;
|
||||
return forcedContentCountry == null ? getService().getContentCountry()
|
||||
: forcedContentCountry;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
|
|
@ -17,7 +17,8 @@ public abstract class Info implements Serializable {
|
|||
*/
|
||||
private final String id;
|
||||
/**
|
||||
* Different than the {@link #originalUrl} in the sense that it <i>may</i> be set as a cleaned url.
|
||||
* Different than the {@link #originalUrl} in the sense that it <i>may</i> be set as a cleaned
|
||||
* url.
|
||||
*
|
||||
* @see LinkHandler#getUrl()
|
||||
* @see Extractor#getOriginalUrl()
|
||||
|
@ -33,15 +34,19 @@ public abstract class Info implements Serializable {
|
|||
|
||||
private final List<Throwable> errors = new ArrayList<>();
|
||||
|
||||
public void addError(Throwable throwable) {
|
||||
public void addError(final Throwable throwable) {
|
||||
this.errors.add(throwable);
|
||||
}
|
||||
|
||||
public void addAllErrors(Collection<Throwable> errors) {
|
||||
this.errors.addAll(errors);
|
||||
public void addAllErrors(final Collection<Throwable> throwables) {
|
||||
this.errors.addAll(throwables);
|
||||
}
|
||||
|
||||
public Info(int serviceId, String id, String url, String originalUrl, String name) {
|
||||
public Info(final int serviceId,
|
||||
final String id,
|
||||
final String url,
|
||||
final String originalUrl,
|
||||
final String name) {
|
||||
this.serviceId = serviceId;
|
||||
this.id = id;
|
||||
this.url = url;
|
||||
|
@ -49,7 +54,7 @@ public abstract class Info implements Serializable {
|
|||
this.name = name;
|
||||
}
|
||||
|
||||
public Info(int serviceId, LinkHandler linkHandler, String name) {
|
||||
public Info(final int serviceId, final LinkHandler linkHandler, final String name) {
|
||||
this(serviceId,
|
||||
linkHandler.getId(),
|
||||
linkHandler.getUrl(),
|
||||
|
@ -59,14 +64,16 @@ public abstract class Info implements Serializable {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
final String ifDifferentString = !url.equals(originalUrl) ? " (originalUrl=\"" + originalUrl + "\")" : "";
|
||||
return getClass().getSimpleName() + "[url=\"" + url + "\"" + ifDifferentString + ", name=\"" + name + "\"]";
|
||||
final String ifDifferentString
|
||||
= url.equals(originalUrl) ? "" : " (originalUrl=\"" + originalUrl + "\")";
|
||||
return getClass().getSimpleName() + "[url=\"" + url + "\"" + ifDifferentString
|
||||
+ ", name=\"" + name + "\"]";
|
||||
}
|
||||
|
||||
// if you use an api and want to handle the website url
|
||||
// overriding original url is essential
|
||||
public void setOriginalUrl(String url) {
|
||||
originalUrl = url;
|
||||
public void setOriginalUrl(final String originalUrl) {
|
||||
this.originalUrl = originalUrl;
|
||||
}
|
||||
|
||||
public int getServiceId() {
|
||||
|
|
|
@ -29,7 +29,10 @@ public abstract class InfoItem implements Serializable {
|
|||
private final String name;
|
||||
private String thumbnailUrl;
|
||||
|
||||
public InfoItem(InfoType infoType, int serviceId, String url, String name) {
|
||||
public InfoItem(final InfoType infoType,
|
||||
final int serviceId,
|
||||
final String url,
|
||||
final String name) {
|
||||
this.infoType = infoType;
|
||||
this.serviceId = serviceId;
|
||||
this.url = url;
|
||||
|
@ -52,7 +55,7 @@ public abstract class InfoItem implements Serializable {
|
|||
return name;
|
||||
}
|
||||
|
||||
public void setThumbnailUrl(String thumbnailUrl) {
|
||||
public void setThumbnailUrl(final String thumbnailUrl) {
|
||||
this.thumbnailUrl = thumbnailUrl;
|
||||
}
|
||||
|
||||
|
|
|
@ -29,7 +29,8 @@ import java.util.List;
|
|||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
public abstract class InfoItemsCollector<I extends InfoItem, E extends InfoItemExtractor> implements Collector<I, E> {
|
||||
public abstract class InfoItemsCollector<I extends InfoItem, E extends InfoItemExtractor>
|
||||
implements Collector<I, E> {
|
||||
|
||||
private final List<I> itemList = new ArrayList<>();
|
||||
private final List<Throwable> errors = new ArrayList<>();
|
||||
|
@ -77,7 +78,7 @@ public abstract class InfoItemsCollector<I extends InfoItem, E extends InfoItemE
|
|||
* Add an error
|
||||
* @param error the error
|
||||
*/
|
||||
protected void addError(Exception error) {
|
||||
protected void addError(final Exception error) {
|
||||
errors.add(error);
|
||||
}
|
||||
|
||||
|
@ -85,7 +86,7 @@ public abstract class InfoItemsCollector<I extends InfoItem, E extends InfoItemE
|
|||
* Add an item
|
||||
* @param item the item
|
||||
*/
|
||||
protected void addItem(I item) {
|
||||
protected void addItem(final I item) {
|
||||
itemList.add(item);
|
||||
}
|
||||
|
||||
|
@ -98,12 +99,12 @@ public abstract class InfoItemsCollector<I extends InfoItem, E extends InfoItemE
|
|||
}
|
||||
|
||||
@Override
|
||||
public void commit(E extractor) {
|
||||
public void commit(final E extractor) {
|
||||
try {
|
||||
addItem(extract(extractor));
|
||||
} catch (FoundAdException ae) {
|
||||
} catch (final FoundAdException ae) {
|
||||
// found an ad. Maybe a debug line could be placed here
|
||||
} catch (ParsingException e) {
|
||||
} catch (final ParsingException e) {
|
||||
addError(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ import javax.annotation.Nonnull;
|
|||
|
||||
/**
|
||||
* Base class to extractors that have a list (e.g. playlists, users).
|
||||
* @param <R> the info item type this list extractor provides
|
||||
*/
|
||||
public abstract class ListExtractor<R extends InfoItem> extends Extractor {
|
||||
/**
|
||||
|
@ -30,7 +31,7 @@ public abstract class ListExtractor<R extends InfoItem> extends Extractor {
|
|||
*/
|
||||
public static final long ITEM_COUNT_MORE_THAN_100 = -3;
|
||||
|
||||
public ListExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
public ListExtractor(final StreamingService service, final ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
|
@ -50,8 +51,9 @@ public abstract class ListExtractor<R extends InfoItem> extends Extractor {
|
|||
* @return a {@link InfoItemsPage} corresponding to the requested page
|
||||
* @see InfoItemsPage#getNextPage()
|
||||
*/
|
||||
public abstract InfoItemsPage<R> getPage(final Page page) throws IOException, ExtractionException;
|
||||
public abstract InfoItemsPage<R> getPage(Page page) throws IOException, ExtractionException;
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public ListLinkHandler getLinkHandler() {
|
||||
return (ListLinkHandler) super.getLinkHandler();
|
||||
|
@ -64,15 +66,17 @@ public abstract class ListExtractor<R extends InfoItem> extends Extractor {
|
|||
/**
|
||||
* A class that is used to wrap a list of gathered items and eventual errors, it
|
||||
* also contains a field that points to the next available page ({@link #nextPage}).
|
||||
* @param <T> the info item type that this page is supposed to store and provide
|
||||
*/
|
||||
public static class InfoItemsPage<T extends InfoItem> {
|
||||
private static final InfoItemsPage<InfoItem> EMPTY =
|
||||
new InfoItemsPage<>(Collections.<InfoItem>emptyList(), null, Collections.<Throwable>emptyList());
|
||||
new InfoItemsPage<>(Collections.emptyList(), null, Collections.emptyList());
|
||||
|
||||
/**
|
||||
* A convenient method that returns a representation of an empty page.
|
||||
*
|
||||
* @return a type-safe page with the list of items and errors empty and the nextPage set to {@code null}.
|
||||
* @return a type-safe page with the list of items and errors empty and the nextPage set to
|
||||
* {@code null}.
|
||||
*/
|
||||
public static <T extends InfoItem> InfoItemsPage<T> emptyPage() {
|
||||
//noinspection unchecked
|
||||
|
@ -97,11 +101,13 @@ public abstract class ListExtractor<R extends InfoItem> extends Extractor {
|
|||
*/
|
||||
private final List<Throwable> errors;
|
||||
|
||||
public InfoItemsPage(InfoItemsCollector<T, ?> collector, Page nextPage) {
|
||||
public InfoItemsPage(final InfoItemsCollector<T, ?> collector, final Page nextPage) {
|
||||
this(collector.getItems(), nextPage, collector.getErrors());
|
||||
}
|
||||
|
||||
public InfoItemsPage(List<T> itemsList, Page nextPage, List<Throwable> errors) {
|
||||
public InfoItemsPage(final List<T> itemsList,
|
||||
final Page nextPage,
|
||||
final List<Throwable> errors) {
|
||||
this.itemsList = itemsList;
|
||||
this.nextPage = nextPage;
|
||||
this.errors = errors;
|
||||
|
|
|
@ -10,19 +10,21 @@ public abstract class ListInfo<T extends InfoItem> extends Info {
|
|||
private final List<String> contentFilters;
|
||||
private final String sortFilter;
|
||||
|
||||
public ListInfo(int serviceId,
|
||||
String id,
|
||||
String url,
|
||||
String originalUrl,
|
||||
String name,
|
||||
List<String> contentFilter,
|
||||
String sortFilter) {
|
||||
public ListInfo(final int serviceId,
|
||||
final String id,
|
||||
final String url,
|
||||
final String originalUrl,
|
||||
final String name,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter) {
|
||||
super(serviceId, id, url, originalUrl, name);
|
||||
this.contentFilters = contentFilter;
|
||||
this.sortFilter = sortFilter;
|
||||
}
|
||||
|
||||
public ListInfo(int serviceId, ListLinkHandler listUrlIdHandler, String name) {
|
||||
public ListInfo(final int serviceId,
|
||||
final ListLinkHandler listUrlIdHandler,
|
||||
final String name) {
|
||||
super(serviceId, listUrlIdHandler, name);
|
||||
this.contentFilters = listUrlIdHandler.getContentFilters();
|
||||
this.sortFilter = listUrlIdHandler.getSortFilter();
|
||||
|
@ -32,7 +34,7 @@ public abstract class ListInfo<T extends InfoItem> extends Info {
|
|||
return relatedItems;
|
||||
}
|
||||
|
||||
public void setRelatedItems(List<T> relatedItems) {
|
||||
public void setRelatedItems(final List<T> relatedItems) {
|
||||
this.relatedItems = relatedItems;
|
||||
}
|
||||
|
||||
|
@ -44,7 +46,7 @@ public abstract class ListInfo<T extends InfoItem> extends Info {
|
|||
return nextPage;
|
||||
}
|
||||
|
||||
public void setNextPage(Page page) {
|
||||
public void setNextPage(final Page page) {
|
||||
this.nextPage = page;
|
||||
}
|
||||
|
||||
|
|
|
@ -22,13 +22,18 @@ package org.schabi.newpipe.extractor;
|
|||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* Static data about various media formats support by NewPipe, eg mime type, extension
|
||||
*/
|
||||
|
||||
@SuppressWarnings("MethodParamPad") // we want the media format table below to be aligned
|
||||
public enum MediaFormat {
|
||||
// @formatter:off
|
||||
//video and audio combined formats
|
||||
// id name suffix mime type
|
||||
// id name suffix mimeType
|
||||
MPEG_4 (0x0, "MPEG-4", "mp4", "video/mp4"),
|
||||
v3GPP (0x10, "3GPP", "3gp", "video/3gpp"),
|
||||
WEBM (0x20, "WebM", "webm", "video/webm"),
|
||||
|
@ -38,67 +43,69 @@ public enum MediaFormat {
|
|||
MP3 (0x300, "MP3", "mp3", "audio/mpeg"),
|
||||
OPUS (0x400, "opus", "opus", "audio/opus"),
|
||||
OGG (0x500, "ogg", "ogg", "audio/ogg"),
|
||||
WEBMA_OPUS (0x200, "WebM Opus", "webm", "audio/webm"),
|
||||
WEBMA_OPUS(0x200, "WebM Opus", "webm", "audio/webm"),
|
||||
// subtitles formats
|
||||
VTT (0x1000, "WebVTT", "vtt", "text/vtt"),
|
||||
TTML (0x2000, "Timed Text Markup Language", "ttml", "application/ttml+xml"),
|
||||
TRANSCRIPT1 (0x3000, "TranScript v1", "srv1", "text/xml"),
|
||||
TRANSCRIPT2 (0x4000, "TranScript v2", "srv2", "text/xml"),
|
||||
TRANSCRIPT3 (0x5000, "TranScript v3", "srv3", "text/xml"),
|
||||
TRANSCRIPT1(0x3000, "TranScript v1", "srv1", "text/xml"),
|
||||
TRANSCRIPT2(0x4000, "TranScript v2", "srv2", "text/xml"),
|
||||
TRANSCRIPT3(0x5000, "TranScript v3", "srv3", "text/xml"),
|
||||
SRT (0x6000, "SubRip file format", "srt", "text/srt");
|
||||
// @formatter:on
|
||||
|
||||
public final int id;
|
||||
public final String name;
|
||||
public final String suffix;
|
||||
public final String mimeType;
|
||||
|
||||
MediaFormat(int id, String name, String suffix, String mimeType) {
|
||||
MediaFormat(final int id, final String name, final String suffix, final String mimeType) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.suffix = suffix;
|
||||
this.mimeType = mimeType;
|
||||
}
|
||||
|
||||
private static <T> T getById(final int id,
|
||||
final Function<MediaFormat, T> field,
|
||||
final T orElse) {
|
||||
return Arrays.stream(MediaFormat.values())
|
||||
.filter(mediaFormat -> mediaFormat.id == id)
|
||||
.map(field)
|
||||
.findFirst()
|
||||
.orElse(orElse);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the friendly name of the media format with the supplied id
|
||||
*
|
||||
* @param ident the id of the media format. Currently an arbitrary, NewPipe-specific number.
|
||||
* @param id the id of the media format. Currently an arbitrary, NewPipe-specific number.
|
||||
* @return the friendly name of the MediaFormat associated with this ids,
|
||||
* or an empty String if none match it.
|
||||
*/
|
||||
public static String getNameById(int ident) {
|
||||
for (MediaFormat vf : MediaFormat.values()) {
|
||||
if (vf.id == ident) return vf.name;
|
||||
}
|
||||
return "";
|
||||
public static String getNameById(final int id) {
|
||||
return getById(id, MediaFormat::getName, "");
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the file extension of the media format with the supplied id
|
||||
*
|
||||
* @param ident the id of the media format. Currently an arbitrary, NewPipe-specific number.
|
||||
* @param id the id of the media format. Currently an arbitrary, NewPipe-specific number.
|
||||
* @return the file extension of the MediaFormat associated with this ids,
|
||||
* or an empty String if none match it.
|
||||
*/
|
||||
public static String getSuffixById(int ident) {
|
||||
for (MediaFormat vf : MediaFormat.values()) {
|
||||
if (vf.id == ident) return vf.suffix;
|
||||
}
|
||||
return "";
|
||||
public static String getSuffixById(final int id) {
|
||||
return getById(id, MediaFormat::getSuffix, "");
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the MIME type of the media format with the supplied id
|
||||
*
|
||||
* @param ident the id of the media format. Currently an arbitrary, NewPipe-specific number.
|
||||
* @param id the id of the media format. Currently an arbitrary, NewPipe-specific number.
|
||||
* @return the MIME type of the MediaFormat associated with this ids,
|
||||
* or an empty String if none match it.
|
||||
*/
|
||||
public static String getMimeById(int ident) {
|
||||
for (MediaFormat vf : MediaFormat.values()) {
|
||||
if (vf.id == ident) return vf.mimeType;
|
||||
}
|
||||
return "";
|
||||
public static String getMimeById(final int id) {
|
||||
return getById(id, MediaFormat::getMimeType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -107,11 +114,11 @@ public enum MediaFormat {
|
|||
* @return MediaFormat associated with this mime type,
|
||||
* or null if none match it.
|
||||
*/
|
||||
public static MediaFormat getFromMimeType(String mimeType) {
|
||||
for (MediaFormat vf : MediaFormat.values()) {
|
||||
if (vf.mimeType.equals(mimeType)) return vf;
|
||||
}
|
||||
return null;
|
||||
public static MediaFormat getFromMimeType(final String mimeType) {
|
||||
return Arrays.stream(MediaFormat.values())
|
||||
.filter(mediaFormat -> mediaFormat.mimeType.equals(mimeType))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -120,18 +127,15 @@ public enum MediaFormat {
|
|||
* @param id the id
|
||||
* @return the id of the media format or null.
|
||||
*/
|
||||
public static MediaFormat getFormatById(int id) {
|
||||
for (MediaFormat vf : values()) {
|
||||
if (vf.id == id) return vf;
|
||||
}
|
||||
return null;
|
||||
public static MediaFormat getFormatById(final int id) {
|
||||
return getById(id, mediaFormat -> mediaFormat, null);
|
||||
}
|
||||
|
||||
public static MediaFormat getFromSuffix(String suffix) {
|
||||
for (MediaFormat vf : values()) {
|
||||
if (vf.suffix.equals(suffix)) return vf;
|
||||
}
|
||||
return null;
|
||||
public static MediaFormat getFromSuffix(final String suffix) {
|
||||
return Arrays.stream(MediaFormat.values())
|
||||
.filter(mediaFormat -> mediaFormat.suffix.equals(suffix))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -16,8 +16,10 @@ public class MetaInfo implements Serializable {
|
|||
private List<URL> urls = new ArrayList<>();
|
||||
private List<String> urlTexts = new ArrayList<>();
|
||||
|
||||
public MetaInfo(@Nonnull final String title, @Nonnull final Description content,
|
||||
@Nonnull final List<URL> urls, @Nonnull final List<String> urlTexts) {
|
||||
public MetaInfo(@Nonnull final String title,
|
||||
@Nonnull final Description content,
|
||||
@Nonnull final List<URL> urls,
|
||||
@Nonnull final List<String> urlTexts) {
|
||||
this.title = title;
|
||||
this.content = content;
|
||||
this.urls = urls;
|
||||
|
|
|
@ -50,7 +50,7 @@ public class MultiInfoItemsCollector extends InfoItemsCollector<InfoItem, InfoIt
|
|||
private final ChannelInfoItemsCollector userCollector;
|
||||
private final PlaylistInfoItemsCollector playlistCollector;
|
||||
|
||||
public MultiInfoItemsCollector(int serviceId) {
|
||||
public MultiInfoItemsCollector(final int serviceId) {
|
||||
super(serviceId);
|
||||
streamCollector = new StreamInfoItemsCollector(serviceId);
|
||||
userCollector = new ChannelInfoItemsCollector(serviceId);
|
||||
|
@ -76,7 +76,7 @@ public class MultiInfoItemsCollector extends InfoItemsCollector<InfoItem, InfoIt
|
|||
}
|
||||
|
||||
@Override
|
||||
public InfoItem extract(InfoItemExtractor extractor) throws ParsingException {
|
||||
public InfoItem extract(final InfoItemExtractor extractor) throws ParsingException {
|
||||
// Use the corresponding collector for each item extractor type
|
||||
if (extractor instanceof StreamInfoItemExtractor) {
|
||||
return streamCollector.extract((StreamInfoItemExtractor) extractor);
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.List;
|
|||
/**
|
||||
* Provides access to streaming services supported by NewPipe.
|
||||
*/
|
||||
public class NewPipe {
|
||||
public final class NewPipe {
|
||||
private static Downloader downloader;
|
||||
private static Localization preferredLocalization;
|
||||
private static ContentCountry preferredContentCountry;
|
||||
|
@ -40,19 +40,20 @@ public class NewPipe {
|
|||
private NewPipe() {
|
||||
}
|
||||
|
||||
public static void init(Downloader d) {
|
||||
public static void init(final Downloader d) {
|
||||
downloader = d;
|
||||
preferredLocalization = Localization.DEFAULT;
|
||||
preferredContentCountry = ContentCountry.DEFAULT;
|
||||
}
|
||||
|
||||
public static void init(Downloader d, Localization l) {
|
||||
public static void init(final Downloader d, final Localization l) {
|
||||
downloader = d;
|
||||
preferredLocalization = l;
|
||||
preferredContentCountry = l.getCountryCode().isEmpty() ? ContentCountry.DEFAULT : new ContentCountry(l.getCountryCode());
|
||||
preferredContentCountry = l.getCountryCode().isEmpty()
|
||||
? ContentCountry.DEFAULT : new ContentCountry(l.getCountryCode());
|
||||
}
|
||||
|
||||
public static void init(Downloader d, Localization l, ContentCountry c) {
|
||||
public static void init(final Downloader d, final Localization l, final ContentCountry c) {
|
||||
downloader = d;
|
||||
preferredLocalization = l;
|
||||
preferredContentCountry = c;
|
||||
|
@ -70,26 +71,24 @@ public class NewPipe {
|
|||
return ServiceList.all();
|
||||
}
|
||||
|
||||
public static StreamingService getService(int serviceId) throws ExtractionException {
|
||||
for (StreamingService service : ServiceList.all()) {
|
||||
if (service.getServiceId() == serviceId) {
|
||||
return service;
|
||||
}
|
||||
}
|
||||
throw new ExtractionException("There's no service with the id = \"" + serviceId + "\"");
|
||||
public static StreamingService getService(final int serviceId) throws ExtractionException {
|
||||
return ServiceList.all().stream()
|
||||
.filter(service -> service.getServiceId() == serviceId)
|
||||
.findFirst()
|
||||
.orElseThrow(() -> new ExtractionException(
|
||||
"There's no service with the id = \"" + serviceId + "\""));
|
||||
}
|
||||
|
||||
public static StreamingService getService(String serviceName) throws ExtractionException {
|
||||
for (StreamingService service : ServiceList.all()) {
|
||||
if (service.getServiceInfo().getName().equals(serviceName)) {
|
||||
return service;
|
||||
}
|
||||
}
|
||||
throw new ExtractionException("There's no service with the name = \"" + serviceName + "\"");
|
||||
public static StreamingService getService(final String serviceName) throws ExtractionException {
|
||||
return ServiceList.all().stream()
|
||||
.filter(service -> service.getServiceInfo().getName().equals(serviceName))
|
||||
.findFirst()
|
||||
.orElseThrow(() -> new ExtractionException(
|
||||
"There's no service with the name = \"" + serviceName + "\""));
|
||||
}
|
||||
|
||||
public static StreamingService getServiceByUrl(String url) throws ExtractionException {
|
||||
for (StreamingService service : ServiceList.all()) {
|
||||
public static StreamingService getServiceByUrl(final String url) throws ExtractionException {
|
||||
for (final StreamingService service : ServiceList.all()) {
|
||||
if (service.getLinkTypeByUrl(url) != StreamingService.LinkType.NONE) {
|
||||
return service;
|
||||
}
|
||||
|
@ -97,18 +96,18 @@ public class NewPipe {
|
|||
throw new ExtractionException("No service can handle the url = \"" + url + "\"");
|
||||
}
|
||||
|
||||
public static int getIdOfService(String serviceName) {
|
||||
public static int getIdOfService(final String serviceName) {
|
||||
try {
|
||||
return getService(serviceName).getServiceId();
|
||||
} catch (ExtractionException ignored) {
|
||||
} catch (final ExtractionException ignored) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
public static String getNameOfService(int id) {
|
||||
public static String getNameOfService(final int id) {
|
||||
try {
|
||||
return getService(id).getServiceInfo().getName();
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
System.err.println("Service id not known");
|
||||
e.printStackTrace();
|
||||
return "<unknown>";
|
||||
|
@ -119,19 +118,21 @@ public class NewPipe {
|
|||
// Localization
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public static void setupLocalization(Localization preferredLocalization) {
|
||||
setupLocalization(preferredLocalization, null);
|
||||
public static void setupLocalization(final Localization thePreferredLocalization) {
|
||||
setupLocalization(thePreferredLocalization, null);
|
||||
}
|
||||
|
||||
public static void setupLocalization(Localization preferredLocalization, @Nullable ContentCountry preferredContentCountry) {
|
||||
NewPipe.preferredLocalization = preferredLocalization;
|
||||
public static void setupLocalization(
|
||||
final Localization thePreferredLocalization,
|
||||
@Nullable final ContentCountry thePreferredContentCountry) {
|
||||
NewPipe.preferredLocalization = thePreferredLocalization;
|
||||
|
||||
if (preferredContentCountry != null) {
|
||||
NewPipe.preferredContentCountry = preferredContentCountry;
|
||||
if (thePreferredContentCountry != null) {
|
||||
NewPipe.preferredContentCountry = thePreferredContentCountry;
|
||||
} else {
|
||||
NewPipe.preferredContentCountry = preferredLocalization.getCountryCode().isEmpty()
|
||||
NewPipe.preferredContentCountry = thePreferredLocalization.getCountryCode().isEmpty()
|
||||
? ContentCountry.DEFAULT
|
||||
: new ContentCountry(preferredLocalization.getCountryCode());
|
||||
: new ContentCountry(thePreferredLocalization.getCountryCode());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -140,7 +141,7 @@ public class NewPipe {
|
|||
return preferredLocalization == null ? Localization.DEFAULT : preferredLocalization;
|
||||
}
|
||||
|
||||
public static void setPreferredLocalization(Localization preferredLocalization) {
|
||||
public static void setPreferredLocalization(final Localization preferredLocalization) {
|
||||
NewPipe.preferredLocalization = preferredLocalization;
|
||||
}
|
||||
|
||||
|
@ -149,7 +150,7 @@ public class NewPipe {
|
|||
return preferredContentCountry == null ? ContentCountry.DEFAULT : preferredContentCountry;
|
||||
}
|
||||
|
||||
public static void setPreferredContentCountry(ContentCountry preferredContentCountry) {
|
||||
public static void setPreferredContentCountry(final ContentCountry preferredContentCountry) {
|
||||
NewPipe.preferredContentCountry = preferredContentCountry;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,8 +17,11 @@ public class Page implements Serializable {
|
|||
@Nullable
|
||||
private final byte[] body;
|
||||
|
||||
public Page(final String url, final String id, final List<String> ids,
|
||||
final Map<String, String> cookies, @Nullable final byte[] body) {
|
||||
public Page(final String url,
|
||||
final String id,
|
||||
final List<String> ids,
|
||||
final Map<String, String> cookies,
|
||||
@Nullable final byte[] body) {
|
||||
this.url = url;
|
||||
this.id = id;
|
||||
this.ids = ids;
|
||||
|
|
|
@ -31,6 +31,7 @@ import java.util.List;
|
|||
/**
|
||||
* A list of supported services.
|
||||
*/
|
||||
@SuppressWarnings({"ConstantName", "InnerAssignment"}) // keep unusual names and inner assignments
|
||||
public final class ServiceList {
|
||||
private ServiceList() {
|
||||
//no instance
|
||||
|
|
|
@ -6,7 +6,12 @@ import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
|||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.feed.FeedExtractor;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||
import org.schabi.newpipe.extractor.linkhandler.*;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.localization.ContentCountry;
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||
|
@ -55,7 +60,7 @@ public abstract class StreamingService {
|
|||
* @param name the name of the service
|
||||
* @param mediaCapabilities the type of media this service can handle
|
||||
*/
|
||||
public ServiceInfo(String name, List<MediaCapability> mediaCapabilities) {
|
||||
public ServiceInfo(final String name, final List<MediaCapability> mediaCapabilities) {
|
||||
this.name = name;
|
||||
this.mediaCapabilities = Collections.unmodifiableList(mediaCapabilities);
|
||||
}
|
||||
|
@ -74,8 +79,8 @@ public abstract class StreamingService {
|
|||
}
|
||||
|
||||
/**
|
||||
* LinkType will be used to determine which type of URL you are handling, and therefore which part
|
||||
* of NewPipe should handle a certain URL.
|
||||
* LinkType will be used to determine which type of URL you are handling, and therefore which
|
||||
* part of NewPipe should handle a certain URL.
|
||||
*/
|
||||
public enum LinkType {
|
||||
NONE,
|
||||
|
@ -90,14 +95,15 @@ public abstract class StreamingService {
|
|||
/**
|
||||
* Creates a new Streaming service.
|
||||
* If you Implement one do not set id within your implementation of this extractor, instead
|
||||
* set the id when you put the extractor into
|
||||
* <a href="https://teamnewpipe.github.io/NewPipeExtractor/javadoc/org/schabi/newpipe/extractor/ServiceList.html">ServiceList</a>.
|
||||
* set the id when you put the extractor into {@link ServiceList}
|
||||
* All other parameters can be set directly from the overriding constructor.
|
||||
* @param id the number of the service to identify him within the NewPipe frontend
|
||||
* @param name the name of the service
|
||||
* @param capabilities the type of media this service can handle
|
||||
*/
|
||||
public StreamingService(int id, String name, List<ServiceInfo.MediaCapability> capabilities) {
|
||||
public StreamingService(final int id,
|
||||
final String name,
|
||||
final List<ServiceInfo.MediaCapability> capabilities) {
|
||||
this.serviceId = id;
|
||||
this.serviceInfo = new ServiceInfo(name, capabilities);
|
||||
}
|
||||
|
@ -172,22 +178,21 @@ public abstract class StreamingService {
|
|||
public abstract SubscriptionExtractor getSubscriptionExtractor();
|
||||
|
||||
/**
|
||||
* This method decides which strategy will be chosen to fetch the feed. In YouTube, for example, a separate feed
|
||||
* exists which is lightweight and made specifically to be used like this.
|
||||
* This method decides which strategy will be chosen to fetch the feed. In YouTube, for example,
|
||||
* a separate feed exists which is lightweight and made specifically to be used like this.
|
||||
* <p>
|
||||
* In services which there's no other way to retrieve them, null should be returned.
|
||||
*
|
||||
* @return a {@link FeedExtractor} instance or null.
|
||||
*/
|
||||
@Nullable
|
||||
public FeedExtractor getFeedExtractor(String url) throws ExtractionException {
|
||||
public FeedExtractor getFeedExtractor(final String url) throws ExtractionException {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Must create a new instance of a KioskList implementation.
|
||||
* @return a new KioskList instance
|
||||
* @throws ExtractionException
|
||||
*/
|
||||
public abstract KioskList getKioskList() throws ExtractionException;
|
||||
|
||||
|
@ -195,49 +200,52 @@ public abstract class StreamingService {
|
|||
* Must create a new instance of a ChannelExtractor implementation.
|
||||
* @param linkHandler is pointing to the channel which should be handled by this new instance.
|
||||
* @return a new ChannelExtractor
|
||||
* @throws ExtractionException
|
||||
*/
|
||||
public abstract ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler) throws ExtractionException;
|
||||
public abstract ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler)
|
||||
throws ExtractionException;
|
||||
|
||||
/**
|
||||
* Must crete a new instance of a PlaylistExtractor implementation.
|
||||
* @param linkHandler is pointing to the playlist which should be handled by this new instance.
|
||||
* @return a new PlaylistExtractor
|
||||
* @throws ExtractionException
|
||||
*/
|
||||
public abstract PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler) throws ExtractionException;
|
||||
public abstract PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler)
|
||||
throws ExtractionException;
|
||||
|
||||
/**
|
||||
* Must create a new instance of a StreamExtractor implementation.
|
||||
* @param linkHandler is pointing to the stream which should be handled by this new instance.
|
||||
* @return a new StreamExtractor
|
||||
* @throws ExtractionException
|
||||
*/
|
||||
public abstract StreamExtractor getStreamExtractor(LinkHandler linkHandler) throws ExtractionException;
|
||||
public abstract StreamExtractor getStreamExtractor(LinkHandler linkHandler)
|
||||
throws ExtractionException;
|
||||
|
||||
public abstract CommentsExtractor getCommentsExtractor(ListLinkHandler linkHandler) throws ExtractionException;
|
||||
public abstract CommentsExtractor getCommentsExtractor(ListLinkHandler linkHandler)
|
||||
throws ExtractionException;
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Extractors without link handler
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public SearchExtractor getSearchExtractor(String query,
|
||||
List<String> contentFilter,
|
||||
String sortFilter) throws ExtractionException {
|
||||
public SearchExtractor getSearchExtractor(final String query,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter) throws ExtractionException {
|
||||
return getSearchExtractor(getSearchQHFactory()
|
||||
.fromQuery(query, contentFilter, sortFilter));
|
||||
}
|
||||
|
||||
public ChannelExtractor getChannelExtractor(String id,
|
||||
List<String> contentFilter,
|
||||
String sortFilter) throws ExtractionException {
|
||||
public ChannelExtractor getChannelExtractor(final String id,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter)
|
||||
throws ExtractionException {
|
||||
return getChannelExtractor(getChannelLHFactory()
|
||||
.fromQuery(id, contentFilter, sortFilter));
|
||||
}
|
||||
|
||||
public PlaylistExtractor getPlaylistExtractor(String id,
|
||||
List<String> contentFilter,
|
||||
String sortFilter) throws ExtractionException {
|
||||
public PlaylistExtractor getPlaylistExtractor(final String id,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter)
|
||||
throws ExtractionException {
|
||||
return getPlaylistExtractor(getPlaylistLHFactory()
|
||||
.fromQuery(id, contentFilter, sortFilter));
|
||||
}
|
||||
|
@ -246,28 +254,28 @@ public abstract class StreamingService {
|
|||
// Short extractors overloads
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public SearchExtractor getSearchExtractor(String query) throws ExtractionException {
|
||||
public SearchExtractor getSearchExtractor(final String query) throws ExtractionException {
|
||||
return getSearchExtractor(getSearchQHFactory().fromQuery(query));
|
||||
}
|
||||
|
||||
public ChannelExtractor getChannelExtractor(String url) throws ExtractionException {
|
||||
public ChannelExtractor getChannelExtractor(final String url) throws ExtractionException {
|
||||
return getChannelExtractor(getChannelLHFactory().fromUrl(url));
|
||||
}
|
||||
|
||||
public PlaylistExtractor getPlaylistExtractor(String url) throws ExtractionException {
|
||||
public PlaylistExtractor getPlaylistExtractor(final String url) throws ExtractionException {
|
||||
return getPlaylistExtractor(getPlaylistLHFactory().fromUrl(url));
|
||||
}
|
||||
|
||||
public StreamExtractor getStreamExtractor(String url) throws ExtractionException {
|
||||
public StreamExtractor getStreamExtractor(final String url) throws ExtractionException {
|
||||
return getStreamExtractor(getStreamLHFactory().fromUrl(url));
|
||||
}
|
||||
|
||||
public CommentsExtractor getCommentsExtractor(String url) throws ExtractionException {
|
||||
ListLinkHandlerFactory llhf = getCommentsLHFactory();
|
||||
if (llhf == null) {
|
||||
public CommentsExtractor getCommentsExtractor(final String url) throws ExtractionException {
|
||||
final ListLinkHandlerFactory listLinkHandlerFactory = getCommentsLHFactory();
|
||||
if (listLinkHandlerFactory == null) {
|
||||
return null;
|
||||
}
|
||||
return getCommentsExtractor(llhf.fromUrl(url));
|
||||
return getCommentsExtractor(listLinkHandlerFactory.fromUrl(url));
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
|
@ -320,7 +328,8 @@ public abstract class StreamingService {
|
|||
* the user prefer (using {@link NewPipe#getPreferredLocalization()}), then it will:
|
||||
* <ul>
|
||||
* <li>Check if the exactly localization is supported by this service.</li>
|
||||
* <li>If not, check if a less specific localization is available, using only the language code.</li>
|
||||
* <li>If not, check if a less specific localization is available, using only the language
|
||||
* code.</li>
|
||||
* <li>Fallback to the {@link Localization#DEFAULT default} localization.</li>
|
||||
* </ul>
|
||||
*/
|
||||
|
@ -333,8 +342,9 @@ public abstract class StreamingService {
|
|||
}
|
||||
|
||||
// Fallback to the first supported language that matches the preferred language
|
||||
for (Localization supportedLanguage : getSupportedLocalizations()) {
|
||||
if (supportedLanguage.getLanguageCode().equals(preferredLocalization.getLanguageCode())) {
|
||||
for (final Localization supportedLanguage : getSupportedLocalizations()) {
|
||||
if (supportedLanguage.getLanguageCode()
|
||||
.equals(preferredLocalization.getLanguageCode())) {
|
||||
return supportedLanguage;
|
||||
}
|
||||
}
|
||||
|
@ -343,8 +353,8 @@ public abstract class StreamingService {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns the country that should be used to fetch content in this service. It will get which country
|
||||
* the user prefer (using {@link NewPipe#getPreferredContentCountry()}), then it will:
|
||||
* Returns the country that should be used to fetch content in this service. It will get which
|
||||
* country the user prefer (using {@link NewPipe#getPreferredContentCountry()}), then it will:
|
||||
* <ul>
|
||||
* <li>Check if the country is supported by this service.</li>
|
||||
* <li>If not, fallback to the {@link ContentCountry#DEFAULT default} country.</li>
|
||||
|
@ -361,14 +371,15 @@ public abstract class StreamingService {
|
|||
}
|
||||
|
||||
/**
|
||||
* Get an instance of the time ago parser using the patterns related to the passed localization.<br>
|
||||
* <br>
|
||||
* Just like {@link #getLocalization()}, it will also try to fallback to a less specific localization if
|
||||
* the exact one is not available/supported.
|
||||
* Get an instance of the time ago parser using the patterns related to the passed localization.
|
||||
* <br><br>
|
||||
* Just like {@link #getLocalization()}, it will also try to fallback to a less specific
|
||||
* localization if the exact one is not available/supported.
|
||||
*
|
||||
* @throws IllegalArgumentException if the localization is not supported (parsing patterns are not present).
|
||||
* @throws IllegalArgumentException if the localization is not supported (parsing patterns are
|
||||
* not present).
|
||||
*/
|
||||
public TimeAgoParser getTimeAgoParser(Localization localization) {
|
||||
public TimeAgoParser getTimeAgoParser(final Localization localization) {
|
||||
final TimeAgoParser targetParser = TimeAgoPatternsManager.getTimeAgoParserFor(localization);
|
||||
|
||||
if (targetParser != null) {
|
||||
|
@ -376,15 +387,18 @@ public abstract class StreamingService {
|
|||
}
|
||||
|
||||
if (!localization.getCountryCode().isEmpty()) {
|
||||
final Localization lessSpecificLocalization = new Localization(localization.getLanguageCode());
|
||||
final TimeAgoParser lessSpecificParser = TimeAgoPatternsManager.getTimeAgoParserFor(lessSpecificLocalization);
|
||||
final Localization lessSpecificLocalization
|
||||
= new Localization(localization.getLanguageCode());
|
||||
final TimeAgoParser lessSpecificParser
|
||||
= TimeAgoPatternsManager.getTimeAgoParserFor(lessSpecificLocalization);
|
||||
|
||||
if (lessSpecificParser != null) {
|
||||
return lessSpecificParser;
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("Localization is not supported (\"" + localization.toString() + "\")");
|
||||
throw new IllegalArgumentException(
|
||||
"Localization is not supported (\"" + localization + "\")");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ public abstract class ChannelExtractor extends ListExtractor<StreamInfoItem> {
|
|||
|
||||
public static final long UNKNOWN_SUBSCRIBER_COUNT = -1;
|
||||
|
||||
public ChannelExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
public ChannelExtractor(final StreamingService service, final ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
|
|
|
@ -34,27 +34,36 @@ import java.io.IOException;
|
|||
|
||||
public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
||||
|
||||
public ChannelInfo(int serviceId, String id, String url, String originalUrl, String name, ListLinkHandler listLinkHandler) {
|
||||
super(serviceId, id, url, originalUrl, name, listLinkHandler.getContentFilters(), listLinkHandler.getSortFilter());
|
||||
public ChannelInfo(final int serviceId,
|
||||
final String id,
|
||||
final String url,
|
||||
final String originalUrl,
|
||||
final String name,
|
||||
final ListLinkHandler listLinkHandler) {
|
||||
super(serviceId, id, url, originalUrl, name, listLinkHandler.getContentFilters(),
|
||||
listLinkHandler.getSortFilter());
|
||||
}
|
||||
|
||||
public static ChannelInfo getInfo(String url) throws IOException, ExtractionException {
|
||||
public static ChannelInfo getInfo(final String url) throws IOException, ExtractionException {
|
||||
return getInfo(NewPipe.getServiceByUrl(url), url);
|
||||
}
|
||||
|
||||
public static ChannelInfo getInfo(StreamingService service, String url) throws IOException, ExtractionException {
|
||||
ChannelExtractor extractor = service.getChannelExtractor(url);
|
||||
public static ChannelInfo getInfo(final StreamingService service, final String url)
|
||||
throws IOException, ExtractionException {
|
||||
final ChannelExtractor extractor = service.getChannelExtractor(url);
|
||||
extractor.fetchPage();
|
||||
return getInfo(extractor);
|
||||
}
|
||||
|
||||
public static InfoItemsPage<StreamInfoItem> getMoreItems(StreamingService service,
|
||||
String url,
|
||||
Page page) throws IOException, ExtractionException {
|
||||
public static InfoItemsPage<StreamInfoItem> getMoreItems(final StreamingService service,
|
||||
final String url,
|
||||
final Page page)
|
||||
throws IOException, ExtractionException {
|
||||
return service.getChannelExtractor(url).getPage(page);
|
||||
}
|
||||
|
||||
public static ChannelInfo getInfo(ChannelExtractor extractor) throws IOException, ExtractionException {
|
||||
public static ChannelInfo getInfo(final ChannelExtractor extractor)
|
||||
throws IOException, ExtractionException {
|
||||
|
||||
final int serviceId = extractor.getServiceId();
|
||||
final String id = extractor.getId();
|
||||
|
@ -62,60 +71,62 @@ public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
|||
final String originalUrl = extractor.getOriginalUrl();
|
||||
final String name = extractor.getName();
|
||||
|
||||
final ChannelInfo info = new ChannelInfo(serviceId, id, url, originalUrl, name, extractor.getLinkHandler());
|
||||
final ChannelInfo info =
|
||||
new ChannelInfo(serviceId, id, url, originalUrl, name, extractor.getLinkHandler());
|
||||
|
||||
try {
|
||||
info.setAvatarUrl(extractor.getAvatarUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
try {
|
||||
info.setBannerUrl(extractor.getBannerUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
try {
|
||||
info.setFeedUrl(extractor.getFeedUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
|
||||
final InfoItemsPage<StreamInfoItem> itemsPage = ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
final InfoItemsPage<StreamInfoItem> itemsPage =
|
||||
ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
info.setRelatedItems(itemsPage.getItems());
|
||||
info.setNextPage(itemsPage.getNextPage());
|
||||
|
||||
try {
|
||||
info.setSubscriberCount(extractor.getSubscriberCount());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
try {
|
||||
info.setDescription(extractor.getDescription());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
|
||||
try {
|
||||
info.setParentChannelName(extractor.getParentChannelName());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
|
||||
try {
|
||||
info.setParentChannelUrl(extractor.getParentChannelUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
|
||||
try {
|
||||
info.setParentChannelAvatarUrl(extractor.getParentChannelAvatarUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
|
||||
try {
|
||||
info.setVerified(extractor.isVerified());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
|
||||
|
@ -137,7 +148,7 @@ public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
|||
return parentChannelName;
|
||||
}
|
||||
|
||||
public void setParentChannelName(String parentChannelName) {
|
||||
public void setParentChannelName(final String parentChannelName) {
|
||||
this.parentChannelName = parentChannelName;
|
||||
}
|
||||
|
||||
|
@ -145,7 +156,7 @@ public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
|||
return parentChannelUrl;
|
||||
}
|
||||
|
||||
public void setParentChannelUrl(String parentChannelUrl) {
|
||||
public void setParentChannelUrl(final String parentChannelUrl) {
|
||||
this.parentChannelUrl = parentChannelUrl;
|
||||
}
|
||||
|
||||
|
@ -153,7 +164,7 @@ public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
|||
return parentChannelAvatarUrl;
|
||||
}
|
||||
|
||||
public void setParentChannelAvatarUrl(String parentChannelAvatarUrl) {
|
||||
public void setParentChannelAvatarUrl(final String parentChannelAvatarUrl) {
|
||||
this.parentChannelAvatarUrl = parentChannelAvatarUrl;
|
||||
}
|
||||
|
||||
|
@ -161,7 +172,7 @@ public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
|||
return avatarUrl;
|
||||
}
|
||||
|
||||
public void setAvatarUrl(String avatarUrl) {
|
||||
public void setAvatarUrl(final String avatarUrl) {
|
||||
this.avatarUrl = avatarUrl;
|
||||
}
|
||||
|
||||
|
@ -169,7 +180,7 @@ public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
|||
return bannerUrl;
|
||||
}
|
||||
|
||||
public void setBannerUrl(String bannerUrl) {
|
||||
public void setBannerUrl(final String bannerUrl) {
|
||||
this.bannerUrl = bannerUrl;
|
||||
}
|
||||
|
||||
|
@ -177,7 +188,7 @@ public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
|||
return feedUrl;
|
||||
}
|
||||
|
||||
public void setFeedUrl(String feedUrl) {
|
||||
public void setFeedUrl(final String feedUrl) {
|
||||
this.feedUrl = feedUrl;
|
||||
}
|
||||
|
||||
|
@ -185,7 +196,7 @@ public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
|||
return subscriberCount;
|
||||
}
|
||||
|
||||
public void setSubscriberCount(long subscriberCount) {
|
||||
public void setSubscriberCount(final long subscriberCount) {
|
||||
this.subscriberCount = subscriberCount;
|
||||
}
|
||||
|
||||
|
@ -193,7 +204,7 @@ public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
|||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
public void setDescription(final String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
|
@ -201,7 +212,7 @@ public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
|||
return donationLinks;
|
||||
}
|
||||
|
||||
public void setDonationLinks(String[] donationLinks) {
|
||||
public void setDonationLinks(final String[] donationLinks) {
|
||||
this.donationLinks = donationLinks;
|
||||
}
|
||||
|
||||
|
@ -209,7 +220,7 @@ public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
|||
return verified;
|
||||
}
|
||||
|
||||
public void setVerified(boolean verified) {
|
||||
public void setVerified(final boolean verified) {
|
||||
this.verified = verified;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ public class ChannelInfoItem extends InfoItem {
|
|||
private long streamCount = -1;
|
||||
private boolean verified = false;
|
||||
|
||||
public ChannelInfoItem(int serviceId, String url, String name) {
|
||||
public ChannelInfoItem(final int serviceId, final String url, final String name) {
|
||||
super(InfoType.CHANNEL, serviceId, url, name);
|
||||
}
|
||||
|
||||
|
@ -37,7 +37,7 @@ public class ChannelInfoItem extends InfoItem {
|
|||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
public void setDescription(final String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
|
@ -45,23 +45,23 @@ public class ChannelInfoItem extends InfoItem {
|
|||
return subscriberCount;
|
||||
}
|
||||
|
||||
public void setSubscriberCount(long subscriber_count) {
|
||||
this.subscriberCount = subscriber_count;
|
||||
public void setSubscriberCount(final long subscriberCount) {
|
||||
this.subscriberCount = subscriberCount;
|
||||
}
|
||||
|
||||
public long getStreamCount() {
|
||||
return streamCount;
|
||||
}
|
||||
|
||||
public void setStreamCount(long stream_count) {
|
||||
this.streamCount = stream_count;
|
||||
public void setStreamCount(final long streamCount) {
|
||||
this.streamCount = streamCount;
|
||||
}
|
||||
|
||||
public boolean isVerified() {
|
||||
return verified;
|
||||
}
|
||||
|
||||
public void setVerified(boolean verified) {
|
||||
public void setVerified(final boolean verified) {
|
||||
this.verified = verified;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,45 +23,42 @@ import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
public class ChannelInfoItemsCollector extends InfoItemsCollector<ChannelInfoItem, ChannelInfoItemExtractor> {
|
||||
public ChannelInfoItemsCollector(int serviceId) {
|
||||
public final class ChannelInfoItemsCollector
|
||||
extends InfoItemsCollector<ChannelInfoItem, ChannelInfoItemExtractor> {
|
||||
public ChannelInfoItemsCollector(final int serviceId) {
|
||||
super(serviceId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChannelInfoItem extract(ChannelInfoItemExtractor extractor) throws ParsingException {
|
||||
// important information
|
||||
int serviceId = getServiceId();
|
||||
String name = extractor.getName();
|
||||
String url = extractor.getUrl();
|
||||
|
||||
ChannelInfoItem resultItem = new ChannelInfoItem(serviceId, url, name);
|
||||
|
||||
public ChannelInfoItem extract(final ChannelInfoItemExtractor extractor)
|
||||
throws ParsingException {
|
||||
final ChannelInfoItem resultItem = new ChannelInfoItem(
|
||||
getServiceId(), extractor.getUrl(), extractor.getName());
|
||||
|
||||
// optional information
|
||||
try {
|
||||
resultItem.setSubscriberCount(extractor.getSubscriberCount());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setStreamCount(extractor.getStreamCount());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setThumbnailUrl(extractor.getThumbnailUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setDescription(extractor.getDescription());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setVerified(extractor.isVerified());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class CommentsInfo extends ListInfo<CommentsInfoItem> {
|
||||
public final class CommentsInfo extends ListInfo<CommentsInfoItem> {
|
||||
|
||||
private CommentsInfo(
|
||||
final int serviceId,
|
||||
|
@ -56,7 +56,8 @@ public class CommentsInfo extends ListInfo<CommentsInfoItem> {
|
|||
public static InfoItemsPage<CommentsInfoItem> getMoreItems(
|
||||
final CommentsInfo commentsInfo,
|
||||
final Page page) throws ExtractionException, IOException {
|
||||
return getMoreItems(NewPipe.getService(commentsInfo.getServiceId()), commentsInfo.getUrl(), page);
|
||||
return getMoreItems(NewPipe.getService(commentsInfo.getServiceId()), commentsInfo.getUrl(),
|
||||
page);
|
||||
}
|
||||
|
||||
public static InfoItemsPage<CommentsInfoItem> getMoreItems(
|
||||
|
@ -86,7 +87,7 @@ public class CommentsInfo extends ListInfo<CommentsInfoItem> {
|
|||
|
||||
/**
|
||||
* @apiNote Warning: This method is experimental and may get removed in a future release.
|
||||
* @return <code>true</code> if the comments are disabled otherwise <code>false</code> (default)
|
||||
* @return {@code true} if the comments are disabled otherwise {@code false} (default)
|
||||
* @see CommentsExtractor#isCommentsDisabled()
|
||||
*/
|
||||
public boolean isCommentsDisabled() {
|
||||
|
@ -95,7 +96,7 @@ public class CommentsInfo extends ListInfo<CommentsInfoItem> {
|
|||
|
||||
/**
|
||||
* @apiNote Warning: This method is experimental and may get removed in a future release.
|
||||
* @param commentsDisabled <code>true</code> if the comments are disabled otherwise <code>false</code>
|
||||
* @param commentsDisabled {@code true} if the comments are disabled otherwise {@code false}
|
||||
*/
|
||||
public void setCommentsDisabled(final boolean commentsDisabled) {
|
||||
this.commentsDisabled = commentsDisabled;
|
||||
|
|
|
@ -28,7 +28,7 @@ public class CommentsInfoItem extends InfoItem {
|
|||
public static final int NO_LIKE_COUNT = -1;
|
||||
public static final int NO_STREAM_POSITION = -1;
|
||||
|
||||
public CommentsInfoItem(int serviceId, String url, String name) {
|
||||
public CommentsInfoItem(final int serviceId, final String url, final String name) {
|
||||
super(InfoType.COMMENT, serviceId, url, name);
|
||||
}
|
||||
|
||||
|
@ -36,7 +36,7 @@ public class CommentsInfoItem extends InfoItem {
|
|||
return commentId;
|
||||
}
|
||||
|
||||
public void setCommentId(String commentId) {
|
||||
public void setCommentId(final String commentId) {
|
||||
this.commentId = commentId;
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,7 @@ public class CommentsInfoItem extends InfoItem {
|
|||
return commentText;
|
||||
}
|
||||
|
||||
public void setCommentText(String commentText) {
|
||||
public void setCommentText(final String commentText) {
|
||||
this.commentText = commentText;
|
||||
}
|
||||
|
||||
|
@ -52,7 +52,7 @@ public class CommentsInfoItem extends InfoItem {
|
|||
return uploaderName;
|
||||
}
|
||||
|
||||
public void setUploaderName(String uploaderName) {
|
||||
public void setUploaderName(final String uploaderName) {
|
||||
this.uploaderName = uploaderName;
|
||||
}
|
||||
|
||||
|
@ -60,7 +60,7 @@ public class CommentsInfoItem extends InfoItem {
|
|||
return uploaderAvatarUrl;
|
||||
}
|
||||
|
||||
public void setUploaderAvatarUrl(String uploaderAvatarUrl) {
|
||||
public void setUploaderAvatarUrl(final String uploaderAvatarUrl) {
|
||||
this.uploaderAvatarUrl = uploaderAvatarUrl;
|
||||
}
|
||||
|
||||
|
@ -68,7 +68,7 @@ public class CommentsInfoItem extends InfoItem {
|
|||
return uploaderUrl;
|
||||
}
|
||||
|
||||
public void setUploaderUrl(String uploaderUrl) {
|
||||
public void setUploaderUrl(final String uploaderUrl) {
|
||||
this.uploaderUrl = uploaderUrl;
|
||||
}
|
||||
|
||||
|
@ -76,7 +76,7 @@ public class CommentsInfoItem extends InfoItem {
|
|||
return textualUploadDate;
|
||||
}
|
||||
|
||||
public void setTextualUploadDate(String textualUploadDate) {
|
||||
public void setTextualUploadDate(final String textualUploadDate) {
|
||||
this.textualUploadDate = textualUploadDate;
|
||||
}
|
||||
|
||||
|
@ -85,7 +85,7 @@ public class CommentsInfoItem extends InfoItem {
|
|||
return uploadDate;
|
||||
}
|
||||
|
||||
public void setUploadDate(@Nullable DateWrapper uploadDate) {
|
||||
public void setUploadDate(@Nullable final DateWrapper uploadDate) {
|
||||
this.uploadDate = uploadDate;
|
||||
}
|
||||
|
||||
|
@ -97,7 +97,7 @@ public class CommentsInfoItem extends InfoItem {
|
|||
return likeCount;
|
||||
}
|
||||
|
||||
public void setLikeCount(int likeCount) {
|
||||
public void setLikeCount(final int likeCount) {
|
||||
this.likeCount = likeCount;
|
||||
}
|
||||
|
||||
|
@ -105,11 +105,11 @@ public class CommentsInfoItem extends InfoItem {
|
|||
return textualLikeCount;
|
||||
}
|
||||
|
||||
public void setTextualLikeCount(String textualLikeCount) {
|
||||
public void setTextualLikeCount(final String textualLikeCount) {
|
||||
this.textualLikeCount = textualLikeCount;
|
||||
}
|
||||
|
||||
public void setHeartedByUploader(boolean isHeartedByUploader) {
|
||||
public void setHeartedByUploader(final boolean isHeartedByUploader) {
|
||||
this.heartedByUploader = isHeartedByUploader;
|
||||
}
|
||||
|
||||
|
@ -121,11 +121,11 @@ public class CommentsInfoItem extends InfoItem {
|
|||
return pinned;
|
||||
}
|
||||
|
||||
public void setPinned(boolean pinned) {
|
||||
public void setPinned(final boolean pinned) {
|
||||
this.pinned = pinned;
|
||||
}
|
||||
|
||||
public void setUploaderVerified(boolean uploaderVerified) {
|
||||
public void setUploaderVerified(final boolean uploaderVerified) {
|
||||
this.uploaderVerified = uploaderVerified;
|
||||
}
|
||||
|
||||
|
@ -146,7 +146,12 @@ public class CommentsInfoItem extends InfoItem {
|
|||
return streamPosition;
|
||||
}
|
||||
|
||||
public void setReplies(@Nullable Page replies) { this.replies = replies; }
|
||||
public void setReplies(@Nullable final Page replies) {
|
||||
this.replies = replies;
|
||||
}
|
||||
|
||||
public Page getReplies() { return this.replies; }
|
||||
@Nullable
|
||||
public Page getReplies() {
|
||||
return this.replies;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,8 @@ public interface CommentsInfoItemExtractor extends InfoItemExtractor {
|
|||
*
|
||||
* <br>
|
||||
*
|
||||
* NOTE: Currently only implemented for YT {@link YoutubeCommentsInfoItemExtractor#getLikeCount()}
|
||||
* NOTE: Currently only implemented for YT {@link
|
||||
* YoutubeCommentsInfoItemExtractor#getLikeCount()}
|
||||
* with limitations (only approximate like count is returned)
|
||||
*
|
||||
* @see StreamExtractor#getLikeCount()
|
||||
|
|
|
@ -1,101 +1,97 @@
|
|||
package org.schabi.newpipe.extractor.comments;
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class CommentsInfoItemsCollector extends InfoItemsCollector<CommentsInfoItem, CommentsInfoItemExtractor> {
|
||||
public final class CommentsInfoItemsCollector
|
||||
extends InfoItemsCollector<CommentsInfoItem, CommentsInfoItemExtractor> {
|
||||
|
||||
public CommentsInfoItemsCollector(int serviceId) {
|
||||
public CommentsInfoItemsCollector(final int serviceId) {
|
||||
super(serviceId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CommentsInfoItem extract(CommentsInfoItemExtractor extractor) throws ParsingException {
|
||||
|
||||
// important information
|
||||
int serviceId = getServiceId();
|
||||
String url = extractor.getUrl();
|
||||
String name = extractor.getName();
|
||||
|
||||
CommentsInfoItem resultItem = new CommentsInfoItem(serviceId, url, name);
|
||||
public CommentsInfoItem extract(final CommentsInfoItemExtractor extractor)
|
||||
throws ParsingException {
|
||||
final CommentsInfoItem resultItem = new CommentsInfoItem(
|
||||
getServiceId(), extractor.getUrl(), extractor.getName());
|
||||
|
||||
// optional information
|
||||
try {
|
||||
resultItem.setCommentId(extractor.getCommentId());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setCommentText(extractor.getCommentText());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setUploaderName(extractor.getUploaderName());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setUploaderAvatarUrl(extractor.getUploaderAvatarUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setUploaderUrl(extractor.getUploaderUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setTextualUploadDate(extractor.getTextualUploadDate());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setUploadDate(extractor.getUploadDate());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setLikeCount(extractor.getLikeCount());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setTextualLikeCount(extractor.getTextualLikeCount());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setThumbnailUrl(extractor.getThumbnailUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
|
||||
try {
|
||||
resultItem.setHeartedByUploader(extractor.isHeartedByUploader());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
|
||||
try {
|
||||
resultItem.setPinned(extractor.isPinned());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
|
||||
try {
|
||||
resultItem.setStreamPosition(extractor.getStreamPosition());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
|
||||
try {
|
||||
resultItem.setReplies(extractor.getReplies());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
|
||||
|
@ -103,10 +99,10 @@ public class CommentsInfoItemsCollector extends InfoItemsCollector<CommentsInfoI
|
|||
}
|
||||
|
||||
@Override
|
||||
public void commit(CommentsInfoItemExtractor extractor) {
|
||||
public void commit(final CommentsInfoItemExtractor extractor) {
|
||||
try {
|
||||
addItem(extract(extractor));
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,13 +19,14 @@ public abstract class Downloader {
|
|||
/**
|
||||
* Do a GET request to get the resource that the url is pointing to.<br>
|
||||
* <br>
|
||||
* This method calls {@link #get(String, Map, Localization)} with the default preferred localization. It should only be
|
||||
* used when the resource that will be fetched won't be affected by the localization.
|
||||
* This method calls {@link #get(String, Map, Localization)} with the default preferred
|
||||
* localization. It should only be used when the resource that will be fetched won't be affected
|
||||
* by the localization.
|
||||
*
|
||||
* @param url the URL that is pointing to the wanted resource
|
||||
* @return the result of the GET request
|
||||
*/
|
||||
public Response get(String url) throws IOException, ReCaptchaException {
|
||||
public Response get(final String url) throws IOException, ReCaptchaException {
|
||||
return get(url, null, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
|
@ -38,7 +39,8 @@ public abstract class Downloader {
|
|||
* @param localization the source of the value of the {@code Accept-Language} header
|
||||
* @return the result of the GET request
|
||||
*/
|
||||
public Response get(String url, @Nullable Localization localization) throws IOException, ReCaptchaException {
|
||||
public Response get(final String url, @Nullable final Localization localization)
|
||||
throws IOException, ReCaptchaException {
|
||||
return get(url, null, localization);
|
||||
}
|
||||
|
||||
|
@ -50,7 +52,8 @@ public abstract class Downloader {
|
|||
* Any default headers <b>should</b> be overridden by these.
|
||||
* @return the result of the GET request
|
||||
*/
|
||||
public Response get(String url, @Nullable Map<String, List<String>> headers) throws IOException, ReCaptchaException {
|
||||
public Response get(final String url, @Nullable final Map<String, List<String>> headers)
|
||||
throws IOException, ReCaptchaException {
|
||||
return get(url, headers, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
|
@ -65,7 +68,9 @@ public abstract class Downloader {
|
|||
* @param localization the source of the value of the {@code Accept-Language} header
|
||||
* @return the result of the GET request
|
||||
*/
|
||||
public Response get(String url, @Nullable Map<String, List<String>> headers, @Nullable Localization localization)
|
||||
public Response get(final String url,
|
||||
@Nullable final Map<String, List<String>> headers,
|
||||
@Nullable final Localization localization)
|
||||
throws IOException, ReCaptchaException {
|
||||
return execute(Request.newBuilder()
|
||||
.get(url)
|
||||
|
@ -80,7 +85,7 @@ public abstract class Downloader {
|
|||
* @param url the URL that is pointing to the wanted resource
|
||||
* @return the result of the HEAD request
|
||||
*/
|
||||
public Response head(String url) throws IOException, ReCaptchaException {
|
||||
public Response head(final String url) throws IOException, ReCaptchaException {
|
||||
return head(url, null);
|
||||
}
|
||||
|
||||
|
@ -92,7 +97,7 @@ public abstract class Downloader {
|
|||
* Any default headers <b>should</b> be overridden by these.
|
||||
* @return the result of the HEAD request
|
||||
*/
|
||||
public Response head(String url, @Nullable Map<String, List<String>> headers)
|
||||
public Response head(final String url, @Nullable final Map<String, List<String>> headers)
|
||||
throws IOException, ReCaptchaException {
|
||||
return execute(Request.newBuilder()
|
||||
.head(url)
|
||||
|
@ -109,7 +114,9 @@ public abstract class Downloader {
|
|||
* @param dataToSend byte array that will be sent when doing the request.
|
||||
* @return the result of the GET request
|
||||
*/
|
||||
public Response post(String url, @Nullable Map<String, List<String>> headers, @Nullable byte[] dataToSend)
|
||||
public Response post(final String url,
|
||||
@Nullable final Map<String, List<String>> headers,
|
||||
@Nullable final byte[] dataToSend)
|
||||
throws IOException, ReCaptchaException {
|
||||
return post(url, headers, dataToSend, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
@ -126,7 +133,10 @@ public abstract class Downloader {
|
|||
* @param localization the source of the value of the {@code Accept-Language} header
|
||||
* @return the result of the GET request
|
||||
*/
|
||||
public Response post(String url, @Nullable Map<String, List<String>> headers, @Nullable byte[] dataToSend, @Nullable Localization localization)
|
||||
public Response post(final String url,
|
||||
@Nullable final Map<String, List<String>> headers,
|
||||
@Nullable final byte[] dataToSend,
|
||||
@Nullable final Localization localization)
|
||||
throws IOException, ReCaptchaException {
|
||||
return execute(Request.newBuilder()
|
||||
.post(url, dataToSend)
|
||||
|
@ -140,5 +150,6 @@ public abstract class Downloader {
|
|||
*
|
||||
* @return the result of the request
|
||||
*/
|
||||
public abstract Response execute(@Nonnull Request request) throws IOException, ReCaptchaException;
|
||||
public abstract Response execute(@Nonnull Request request)
|
||||
throws IOException, ReCaptchaException;
|
||||
}
|
||||
|
|
|
@ -2,42 +2,60 @@ package org.schabi.newpipe.extractor.downloader;
|
|||
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* An object that holds request information used when {@link Downloader#execute(Request) executing} a request.
|
||||
* An object that holds request information used when {@link Downloader#execute(Request) executing}
|
||||
* a request.
|
||||
*/
|
||||
public class Request {
|
||||
private final String httpMethod;
|
||||
private final String url;
|
||||
private final Map<String, List<String>> headers;
|
||||
@Nullable private final byte[] dataToSend;
|
||||
@Nullable private final Localization localization;
|
||||
@Nullable
|
||||
private final byte[] dataToSend;
|
||||
@Nullable
|
||||
private final Localization localization;
|
||||
|
||||
public Request(String httpMethod, String url, Map<String, List<String>> headers, @Nullable byte[] dataToSend,
|
||||
@Nullable Localization localization, boolean automaticLocalizationHeader) {
|
||||
if (httpMethod == null) throw new IllegalArgumentException("Request's httpMethod is null");
|
||||
if (url == null) throw new IllegalArgumentException("Request's url is null");
|
||||
public Request(final String httpMethod,
|
||||
final String url,
|
||||
@Nullable final Map<String, List<String>> headers,
|
||||
@Nullable final byte[] dataToSend,
|
||||
@Nullable final Localization localization,
|
||||
final boolean automaticLocalizationHeader) {
|
||||
if (httpMethod == null) {
|
||||
throw new IllegalArgumentException("Request's httpMethod is null");
|
||||
}
|
||||
if (url == null) {
|
||||
throw new IllegalArgumentException("Request's url is null");
|
||||
}
|
||||
|
||||
this.httpMethod = httpMethod;
|
||||
this.url = url;
|
||||
this.dataToSend = dataToSend;
|
||||
this.localization = localization;
|
||||
|
||||
Map<String, List<String>> headersToSet = null;
|
||||
if (headers == null) headers = Collections.emptyMap();
|
||||
|
||||
final Map<String, List<String>> actualHeaders = new LinkedHashMap<>();
|
||||
if (headers != null) {
|
||||
actualHeaders.putAll(headers);
|
||||
}
|
||||
if (automaticLocalizationHeader && localization != null) {
|
||||
headersToSet = new LinkedHashMap<>(headersFromLocalization(localization));
|
||||
headersToSet.putAll(headers);
|
||||
actualHeaders.putAll(headersFromLocalization(localization));
|
||||
}
|
||||
|
||||
this.headers = Collections.unmodifiableMap(headersToSet == null ? headers : headersToSet);
|
||||
this.headers = Collections.unmodifiableMap(actualHeaders);
|
||||
}
|
||||
|
||||
private Request(Builder builder) {
|
||||
private Request(final Builder builder) {
|
||||
this(builder.httpMethod, builder.url, builder.headers, builder.dataToSend,
|
||||
builder.localization, builder.automaticLocalizationHeader);
|
||||
}
|
||||
|
@ -94,7 +112,7 @@ public class Request {
|
|||
public static final class Builder {
|
||||
private String httpMethod;
|
||||
private String url;
|
||||
private Map<String, List<String>> headers = new LinkedHashMap<>();
|
||||
private final Map<String, List<String>> headers = new LinkedHashMap<>();
|
||||
private byte[] dataToSend;
|
||||
private Localization localization;
|
||||
private boolean automaticLocalizationHeader = true;
|
||||
|
@ -105,27 +123,28 @@ public class Request {
|
|||
/**
|
||||
* A http method (i.e. {@code GET, POST, HEAD}).
|
||||
*/
|
||||
public Builder httpMethod(String httpMethod) {
|
||||
this.httpMethod = httpMethod;
|
||||
public Builder httpMethod(final String httpMethodToSet) {
|
||||
this.httpMethod = httpMethodToSet;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL that is pointing to the wanted resource.
|
||||
*/
|
||||
public Builder url(String url) {
|
||||
this.url = url;
|
||||
public Builder url(final String urlToSet) {
|
||||
this.url = urlToSet;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* A list of headers that will be used in the request.<br>
|
||||
* Any default headers that the implementation may have, <b>should</b> be overridden by these.
|
||||
* Any default headers that the implementation may have, <b>should</b> be overridden by
|
||||
* these.
|
||||
*/
|
||||
public Builder headers(@Nullable Map<String, List<String>> headers) {
|
||||
public Builder headers(@Nullable final Map<String, List<String>> headersToSet) {
|
||||
this.headers.clear();
|
||||
if (headers != null) {
|
||||
this.headers.putAll(headers);
|
||||
if (headersToSet != null) {
|
||||
this.headers.putAll(headersToSet);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
@ -137,8 +156,8 @@ public class Request {
|
|||
* The implementation should make note of some recommended headers
|
||||
* (for example, {@code Content-Length} in a post request).
|
||||
*/
|
||||
public Builder dataToSend(byte[] dataToSend) {
|
||||
this.dataToSend = dataToSend;
|
||||
public Builder dataToSend(final byte[] dataToSendToSet) {
|
||||
this.dataToSend = dataToSendToSet;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -148,16 +167,16 @@ public class Request {
|
|||
* Usually the {@code Accept-Language} will be set to this value (a helper
|
||||
* method to do this easily: {@link Request#headersFromLocalization(Localization)}).
|
||||
*/
|
||||
public Builder localization(Localization localization) {
|
||||
this.localization = localization;
|
||||
public Builder localization(final Localization localizationToSet) {
|
||||
this.localization = localizationToSet;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* If localization headers should automatically be included in the request.
|
||||
*/
|
||||
public Builder automaticLocalizationHeader(boolean automaticLocalizationHeader) {
|
||||
this.automaticLocalizationHeader = automaticLocalizationHeader;
|
||||
public Builder automaticLocalizationHeader(final boolean automaticLocalizationHeaderToSet) {
|
||||
this.automaticLocalizationHeader = automaticLocalizationHeaderToSet;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -170,22 +189,22 @@ public class Request {
|
|||
// Http Methods Utils
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public Builder get(String url) {
|
||||
public Builder get(final String urlToSet) {
|
||||
this.httpMethod = "GET";
|
||||
this.url = url;
|
||||
this.url = urlToSet;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder head(String url) {
|
||||
public Builder head(final String urlToSet) {
|
||||
this.httpMethod = "HEAD";
|
||||
this.url = url;
|
||||
this.url = urlToSet;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder post(String url, @Nullable byte[] dataToSend) {
|
||||
public Builder post(final String urlToSet, @Nullable final byte[] dataToSendToSet) {
|
||||
this.httpMethod = "POST";
|
||||
this.url = url;
|
||||
this.dataToSend = dataToSend;
|
||||
this.url = urlToSet;
|
||||
this.dataToSend = dataToSendToSet;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -193,13 +212,13 @@ public class Request {
|
|||
// Additional Headers Utils
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public Builder setHeaders(String headerName, List<String> headerValueList) {
|
||||
public Builder setHeaders(final String headerName, final List<String> headerValueList) {
|
||||
this.headers.remove(headerName);
|
||||
this.headers.put(headerName, headerValueList);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addHeaders(String headerName, List<String> headerValueList) {
|
||||
public Builder addHeaders(final String headerName, final List<String> headerValueList) {
|
||||
@Nullable List<String> currentHeaderValueList = this.headers.get(headerName);
|
||||
if (currentHeaderValueList == null) {
|
||||
currentHeaderValueList = new ArrayList<>();
|
||||
|
@ -210,11 +229,11 @@ public class Request {
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder setHeader(String headerName, String headerValue) {
|
||||
public Builder setHeader(final String headerName, final String headerValue) {
|
||||
return setHeaders(headerName, Collections.singletonList(headerValue));
|
||||
}
|
||||
|
||||
public Builder addHeader(String headerName, String headerValue) {
|
||||
public Builder addHeader(final String headerName, final String headerValue) {
|
||||
return addHeaders(headerName, Collections.singletonList(headerValue));
|
||||
}
|
||||
|
||||
|
@ -226,15 +245,20 @@ public class Request {
|
|||
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
@Nonnull
|
||||
public static Map<String, List<String>> headersFromLocalization(@Nullable Localization localization) {
|
||||
if (localization == null) return Collections.emptyMap();
|
||||
public static Map<String, List<String>> headersFromLocalization(
|
||||
@Nullable final Localization localization) {
|
||||
if (localization == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
final Map<String, List<String>> headers = new LinkedHashMap<>();
|
||||
if (!localization.getCountryCode().isEmpty()) {
|
||||
headers.put("Accept-Language", Collections.singletonList(localization.getLocalizationCode() +
|
||||
", " + localization.getLanguageCode() + ";q=0.9"));
|
||||
headers.put("Accept-Language",
|
||||
Collections.singletonList(localization.getLocalizationCode()
|
||||
+ ", " + localization.getLanguageCode() + ";q=0.9"));
|
||||
} else {
|
||||
headers.put("Accept-Language", Collections.singletonList(localization.getLanguageCode()));
|
||||
headers.put("Accept-Language",
|
||||
Collections.singletonList(localization.getLanguageCode()));
|
||||
}
|
||||
|
||||
return headers;
|
||||
|
@ -245,15 +269,19 @@ public class Request {
|
|||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Request request = (Request) o;
|
||||
return httpMethod.equals(request.httpMethod) &&
|
||||
url.equals(request.url) &&
|
||||
headers.equals(request.headers) &&
|
||||
Arrays.equals(dataToSend, request.dataToSend) &&
|
||||
Objects.equals(localization, request.localization);
|
||||
public boolean equals(final Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final Request request = (Request) o;
|
||||
return httpMethod.equals(request.httpMethod)
|
||||
&& url.equals(request.url)
|
||||
&& headers.equals(request.headers)
|
||||
&& Arrays.equals(dataToSend, request.dataToSend)
|
||||
&& Objects.equals(localization, request.localization);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -17,11 +17,14 @@ public class Response {
|
|||
|
||||
private final String latestUrl;
|
||||
|
||||
public Response(int responseCode, String responseMessage, Map<String, List<String>> responseHeaders,
|
||||
@Nullable String responseBody, @Nullable String latestUrl) {
|
||||
public Response(final int responseCode,
|
||||
final String responseMessage,
|
||||
@Nullable final Map<String, List<String>> responseHeaders,
|
||||
@Nullable final String responseBody,
|
||||
@Nullable final String latestUrl) {
|
||||
this.responseCode = responseCode;
|
||||
this.responseMessage = responseMessage;
|
||||
this.responseHeaders = responseHeaders != null ? responseHeaders : Collections.<String, List<String>>emptyMap();
|
||||
this.responseHeaders = responseHeaders == null ? Collections.emptyMap() : responseHeaders;
|
||||
|
||||
this.responseBody = responseBody == null ? "" : responseBody;
|
||||
this.latestUrl = latestUrl;
|
||||
|
@ -60,14 +63,15 @@ public class Response {
|
|||
|
||||
/**
|
||||
* For easy access to some header value that (usually) don't repeat itself.
|
||||
* <p>For getting all the values associated to the header, use {@link #responseHeaders()} (e.g. {@code Set-Cookie}).
|
||||
* <p>For getting all the values associated to the header, use {@link #responseHeaders()} (e.g.
|
||||
* {@code Set-Cookie}).
|
||||
*
|
||||
* @param name the name of the header
|
||||
* @return the first value assigned to this header
|
||||
*/
|
||||
@Nullable
|
||||
public String getHeader(String name) {
|
||||
for (Map.Entry<String, List<String>> headerEntry : responseHeaders.entrySet()) {
|
||||
public String getHeader(final String name) {
|
||||
for (final Map.Entry<String, List<String>> headerEntry : responseHeaders.entrySet()) {
|
||||
final String key = headerEntry.getKey();
|
||||
if (key != null && key.equalsIgnoreCase(name) && !headerEntry.getValue().isEmpty()) {
|
||||
return headerEntry.getValue().get(0);
|
||||
|
|
|
@ -11,7 +11,7 @@ import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
|||
* YouTube is an example of a service that has this alternative available.
|
||||
*/
|
||||
public abstract class FeedExtractor extends ListExtractor<StreamInfoItem> {
|
||||
public FeedExtractor(StreamingService service, ListLinkHandler listLinkHandler) {
|
||||
public FeedExtractor(final StreamingService service, final ListLinkHandler listLinkHandler) {
|
||||
super(service, listLinkHandler);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,26 +13,35 @@ import java.util.List;
|
|||
|
||||
public class FeedInfo extends ListInfo<StreamInfoItem> {
|
||||
|
||||
public FeedInfo(int serviceId, String id, String url, String originalUrl, String name, List<String> contentFilter, String sortFilter) {
|
||||
public FeedInfo(final int serviceId,
|
||||
final String id,
|
||||
final String url,
|
||||
final String originalUrl,
|
||||
final String name,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter) {
|
||||
super(serviceId, id, url, originalUrl, name, contentFilter, sortFilter);
|
||||
}
|
||||
|
||||
public static FeedInfo getInfo(String url) throws IOException, ExtractionException {
|
||||
public static FeedInfo getInfo(final String url) throws IOException, ExtractionException {
|
||||
return getInfo(NewPipe.getServiceByUrl(url), url);
|
||||
}
|
||||
|
||||
public static FeedInfo getInfo(StreamingService service, String url) throws IOException, ExtractionException {
|
||||
public static FeedInfo getInfo(final StreamingService service, final String url)
|
||||
throws IOException, ExtractionException {
|
||||
final FeedExtractor extractor = service.getFeedExtractor(url);
|
||||
|
||||
if (extractor == null) {
|
||||
throw new IllegalArgumentException("Service \"" + service.getServiceInfo().getName() + "\" doesn't support FeedExtractor.");
|
||||
throw new IllegalArgumentException("Service \"" + service.getServiceInfo().getName()
|
||||
+ "\" doesn't support FeedExtractor.");
|
||||
}
|
||||
|
||||
extractor.fetchPage();
|
||||
return getInfo(extractor);
|
||||
}
|
||||
|
||||
public static FeedInfo getInfo(FeedExtractor extractor) throws IOException, ExtractionException {
|
||||
public static FeedInfo getInfo(final FeedExtractor extractor)
|
||||
throws IOException, ExtractionException {
|
||||
extractor.fetchPage();
|
||||
|
||||
final int serviceId = extractor.getServiceId();
|
||||
|
@ -43,7 +52,8 @@ public class FeedInfo extends ListInfo<StreamInfoItem> {
|
|||
|
||||
final FeedInfo info = new FeedInfo(serviceId, id, url, originalUrl, name, null, null);
|
||||
|
||||
final InfoItemsPage<StreamInfoItem> itemsPage = ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
final InfoItemsPage<StreamInfoItem> itemsPage
|
||||
= ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
info.setRelatedItems(itemsPage.getItems());
|
||||
info.setNextPage(itemsPage.getNextPage());
|
||||
|
||||
|
|
|
@ -31,9 +31,9 @@ import javax.annotation.Nonnull;
|
|||
public abstract class KioskExtractor<T extends InfoItem> extends ListExtractor<T> {
|
||||
private final String id;
|
||||
|
||||
public KioskExtractor(StreamingService streamingService,
|
||||
ListLinkHandler linkHandler,
|
||||
String kioskId) {
|
||||
public KioskExtractor(final StreamingService streamingService,
|
||||
final ListLinkHandler linkHandler,
|
||||
final String kioskId) {
|
||||
super(streamingService, linkHandler);
|
||||
this.id = kioskId;
|
||||
}
|
||||
|
@ -50,7 +50,6 @@ public abstract class KioskExtractor<T extends InfoItem> extends ListExtractor<T
|
|||
* In order to get the name of the kiosk in the desired language we have to
|
||||
* crawl if from the website.
|
||||
* @return the translated version of id
|
||||
* @throws ParsingException
|
||||
*/
|
||||
@Nonnull
|
||||
@Override
|
||||
|
|
|
@ -26,34 +26,30 @@ import org.schabi.newpipe.extractor.NewPipe;
|
|||
import org.schabi.newpipe.extractor.Page;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class KioskInfo extends ListInfo<StreamInfoItem> {
|
||||
private KioskInfo(int serviceId, ListLinkHandler linkHandler, String name) throws ParsingException {
|
||||
public final class KioskInfo extends ListInfo<StreamInfoItem> {
|
||||
private KioskInfo(final int serviceId, final ListLinkHandler linkHandler, final String name) {
|
||||
super(serviceId, linkHandler, name);
|
||||
}
|
||||
|
||||
public static ListExtractor.InfoItemsPage<StreamInfoItem> getMoreItems(StreamingService service,
|
||||
String url,
|
||||
Page page)
|
||||
public static ListExtractor.InfoItemsPage<StreamInfoItem> getMoreItems(
|
||||
final StreamingService service, final String url, final Page page)
|
||||
throws IOException, ExtractionException {
|
||||
KioskList kl = service.getKioskList();
|
||||
KioskExtractor extractor = kl.getExtractorByUrl(url, page);
|
||||
return extractor.getPage(page);
|
||||
return service.getKioskList().getExtractorByUrl(url, page).getPage(page);
|
||||
}
|
||||
|
||||
public static KioskInfo getInfo(String url) throws IOException, ExtractionException {
|
||||
public static KioskInfo getInfo(final String url) throws IOException, ExtractionException {
|
||||
return getInfo(NewPipe.getServiceByUrl(url), url);
|
||||
}
|
||||
|
||||
public static KioskInfo getInfo(StreamingService service, String url) throws IOException, ExtractionException {
|
||||
KioskList kl = service.getKioskList();
|
||||
KioskExtractor extractor = kl.getExtractorByUrl(url, null);
|
||||
public static KioskInfo getInfo(final StreamingService service, final String url)
|
||||
throws IOException, ExtractionException {
|
||||
final KioskExtractor extractor = service.getKioskList().getExtractorByUrl(url, null);
|
||||
extractor.fetchPage();
|
||||
return getInfo(extractor);
|
||||
}
|
||||
|
@ -63,13 +59,14 @@ public class KioskInfo extends ListInfo<StreamInfoItem> {
|
|||
*
|
||||
* @param extractor an extractor where fetchPage() was already got called on.
|
||||
*/
|
||||
public static KioskInfo getInfo(KioskExtractor extractor) throws ExtractionException {
|
||||
public static KioskInfo getInfo(final KioskExtractor extractor) throws ExtractionException {
|
||||
|
||||
final KioskInfo info = new KioskInfo(extractor.getServiceId(),
|
||||
extractor.getLinkHandler(),
|
||||
extractor.getName());
|
||||
|
||||
final ListExtractor.InfoItemsPage<StreamInfoItem> itemsPage = ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
final ListExtractor.InfoItemsPage<StreamInfoItem> itemsPage
|
||||
= ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
info.setRelatedItems(itemsPage.getItems());
|
||||
info.setNextPage(itemsPage.getNextPage());
|
||||
|
||||
|
|
|
@ -19,9 +19,9 @@ import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
|
|||
public class KioskList {
|
||||
|
||||
public interface KioskExtractorFactory {
|
||||
KioskExtractor createNewKiosk(final StreamingService streamingService,
|
||||
final String url,
|
||||
final String kioskId)
|
||||
KioskExtractor createNewKiosk(StreamingService streamingService,
|
||||
String url,
|
||||
String kioskId)
|
||||
throws ExtractionException, IOException;
|
||||
}
|
||||
|
||||
|
@ -34,8 +34,8 @@ public class KioskList {
|
|||
@Nullable
|
||||
private ContentCountry forcedContentCountry;
|
||||
|
||||
private class KioskEntry {
|
||||
public KioskEntry(KioskExtractorFactory ef, ListLinkHandlerFactory h) {
|
||||
private static class KioskEntry {
|
||||
KioskEntry(final KioskExtractorFactory ef, final ListLinkHandlerFactory h) {
|
||||
extractorFactory = ef;
|
||||
handlerFactory = h;
|
||||
}
|
||||
|
@ -44,11 +44,13 @@ public class KioskList {
|
|||
final ListLinkHandlerFactory handlerFactory;
|
||||
}
|
||||
|
||||
public KioskList(StreamingService service) {
|
||||
public KioskList(final StreamingService service) {
|
||||
this.service = service;
|
||||
}
|
||||
|
||||
public void addKioskEntry(KioskExtractorFactory extractorFactory, ListLinkHandlerFactory handlerFactory, String id)
|
||||
public void addKioskEntry(final KioskExtractorFactory extractorFactory,
|
||||
final ListLinkHandlerFactory handlerFactory,
|
||||
final String id)
|
||||
throws Exception {
|
||||
if (kioskList.get(id) != null) {
|
||||
throw new Exception("Kiosk with type " + id + " already exists.");
|
||||
|
@ -56,7 +58,7 @@ public class KioskList {
|
|||
kioskList.put(id, new KioskEntry(extractorFactory, handlerFactory));
|
||||
}
|
||||
|
||||
public void setDefaultKiosk(String kioskType) {
|
||||
public void setDefaultKiosk(final String kioskType) {
|
||||
defaultKiosk = kioskType;
|
||||
}
|
||||
|
||||
|
@ -65,19 +67,20 @@ public class KioskList {
|
|||
return getDefaultKioskExtractor(null);
|
||||
}
|
||||
|
||||
public KioskExtractor getDefaultKioskExtractor(Page nextPage)
|
||||
public KioskExtractor getDefaultKioskExtractor(final Page nextPage)
|
||||
throws ExtractionException, IOException {
|
||||
return getDefaultKioskExtractor(nextPage, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
public KioskExtractor getDefaultKioskExtractor(Page nextPage, Localization localization)
|
||||
public KioskExtractor getDefaultKioskExtractor(final Page nextPage,
|
||||
final Localization localization)
|
||||
throws ExtractionException, IOException {
|
||||
if (!isNullOrEmpty(defaultKiosk)) {
|
||||
return getExtractorById(defaultKiosk, nextPage, localization);
|
||||
} else {
|
||||
if (!kioskList.isEmpty()) {
|
||||
// if not set get any entry
|
||||
Object[] keySet = kioskList.keySet().toArray();
|
||||
final Object[] keySet = kioskList.keySet().toArray();
|
||||
return getExtractorById(keySet[0].toString(), nextPage, localization);
|
||||
} else {
|
||||
return null;
|
||||
|
@ -89,22 +92,28 @@ public class KioskList {
|
|||
return defaultKiosk;
|
||||
}
|
||||
|
||||
public KioskExtractor getExtractorById(String kioskId, Page nextPage)
|
||||
public KioskExtractor getExtractorById(final String kioskId, final Page nextPage)
|
||||
throws ExtractionException, IOException {
|
||||
return getExtractorById(kioskId, nextPage, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
public KioskExtractor getExtractorById(String kioskId, Page nextPage, Localization localization)
|
||||
public KioskExtractor getExtractorById(final String kioskId,
|
||||
final Page nextPage,
|
||||
final Localization localization)
|
||||
throws ExtractionException, IOException {
|
||||
KioskEntry ke = kioskList.get(kioskId);
|
||||
final KioskEntry ke = kioskList.get(kioskId);
|
||||
if (ke == null) {
|
||||
throw new ExtractionException("No kiosk found with the type: " + kioskId);
|
||||
} else {
|
||||
final KioskExtractor kioskExtractor = ke.extractorFactory.createNewKiosk(service,
|
||||
ke.handlerFactory.fromId(kioskId).getUrl(), kioskId);
|
||||
|
||||
if (forcedLocalization != null) kioskExtractor.forceLocalization(forcedLocalization);
|
||||
if (forcedContentCountry != null) kioskExtractor.forceContentCountry(forcedContentCountry);
|
||||
if (forcedLocalization != null) {
|
||||
kioskExtractor.forceLocalization(forcedLocalization);
|
||||
}
|
||||
if (forcedContentCountry != null) {
|
||||
kioskExtractor.forceContentCountry(forcedContentCountry);
|
||||
}
|
||||
|
||||
return kioskExtractor;
|
||||
}
|
||||
|
@ -114,15 +123,17 @@ public class KioskList {
|
|||
return kioskList.keySet();
|
||||
}
|
||||
|
||||
public KioskExtractor getExtractorByUrl(String url, Page nextPage)
|
||||
public KioskExtractor getExtractorByUrl(final String url, final Page nextPage)
|
||||
throws ExtractionException, IOException {
|
||||
return getExtractorByUrl(url, nextPage, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
public KioskExtractor getExtractorByUrl(String url, Page nextPage, Localization localization)
|
||||
public KioskExtractor getExtractorByUrl(final String url,
|
||||
final Page nextPage,
|
||||
final Localization localization)
|
||||
throws ExtractionException, IOException {
|
||||
for (Map.Entry<String, KioskEntry> e : kioskList.entrySet()) {
|
||||
KioskEntry ke = e.getValue();
|
||||
for (final Map.Entry<String, KioskEntry> e : kioskList.entrySet()) {
|
||||
final KioskEntry ke = e.getValue();
|
||||
if (ke.handlerFactory.acceptUrl(url)) {
|
||||
return getExtractorById(ke.handlerFactory.getId(url), nextPage, localization);
|
||||
}
|
||||
|
@ -130,15 +141,15 @@ public class KioskList {
|
|||
throw new ExtractionException("Could not find a kiosk that fits to the url: " + url);
|
||||
}
|
||||
|
||||
public ListLinkHandlerFactory getListLinkHandlerFactoryByType(String type) {
|
||||
public ListLinkHandlerFactory getListLinkHandlerFactoryByType(final String type) {
|
||||
return kioskList.get(type).handlerFactory;
|
||||
}
|
||||
|
||||
public void forceLocalization(@Nullable Localization localization) {
|
||||
public void forceLocalization(@Nullable final Localization localization) {
|
||||
this.forcedLocalization = localization;
|
||||
}
|
||||
|
||||
public void forceContentCountry(@Nullable ContentCountry contentCountry) {
|
||||
public void forceContentCountry(@Nullable final ContentCountry contentCountry) {
|
||||
this.forcedContentCountry = contentCountry;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,13 +10,13 @@ public class LinkHandler implements Serializable {
|
|||
protected final String url;
|
||||
protected final String id;
|
||||
|
||||
public LinkHandler(String originalUrl, String url, String id) {
|
||||
public LinkHandler(final String originalUrl, final String url, final String id) {
|
||||
this.originalUrl = originalUrl;
|
||||
this.url = url;
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public LinkHandler(LinkHandler handler) {
|
||||
public LinkHandler(final LinkHandler handler) {
|
||||
this(handler.originalUrl, handler.url, handler.id);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package org.schabi.newpipe.extractor.linkhandler;
|
||||
|
||||
import org.schabi.newpipe.extractor.exceptions.FoundAdException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
|
@ -31,10 +30,12 @@ public abstract class LinkHandlerFactory {
|
|||
///////////////////////////////////
|
||||
|
||||
public abstract String getId(String url) throws ParsingException;
|
||||
public abstract String getUrl(String id) throws ParsingException;
|
||||
public abstract boolean onAcceptUrl(final String url) throws ParsingException;
|
||||
|
||||
public String getUrl(String id, String baseUrl) throws ParsingException {
|
||||
public abstract String getUrl(String id) throws ParsingException;
|
||||
|
||||
public abstract boolean onAcceptUrl(String url) throws ParsingException;
|
||||
|
||||
public String getUrl(final String id, final String baseUrl) throws ParsingException {
|
||||
return getUrl(id);
|
||||
}
|
||||
|
||||
|
@ -46,6 +47,7 @@ public abstract class LinkHandlerFactory {
|
|||
* Builds a {@link LinkHandler} from a url.<br>
|
||||
* Be sure to call {@link Utils#followGoogleRedirectIfNeeded(String)} on the url if overriding
|
||||
* this function.
|
||||
*
|
||||
* @param url the url to extract path and id from
|
||||
* @return a {@link LinkHandler} complete with information
|
||||
*/
|
||||
|
@ -64,12 +66,15 @@ public abstract class LinkHandlerFactory {
|
|||
* extracted?).<br>
|
||||
* So do not call {@link Utils#followGoogleRedirectIfNeeded(String)} on the URL if overriding
|
||||
* this function, since that should be done in {@link #fromUrl(String)}.
|
||||
*
|
||||
* @param url the URL without Google search redirects to extract id from
|
||||
* @param baseUrl the base URL
|
||||
* @return a {@link LinkHandler} complete with information
|
||||
*/
|
||||
public LinkHandler fromUrl(String url, String baseUrl) throws ParsingException {
|
||||
if (url == null) throw new IllegalArgumentException("URL cannot be null");
|
||||
public LinkHandler fromUrl(final String url, final String baseUrl) throws ParsingException {
|
||||
if (url == null) {
|
||||
throw new IllegalArgumentException("URL cannot be null");
|
||||
}
|
||||
if (!acceptUrl(url)) {
|
||||
throw new ParsingException("URL not accepted: " + url);
|
||||
}
|
||||
|
@ -78,14 +83,18 @@ public abstract class LinkHandlerFactory {
|
|||
return new LinkHandler(url, getUrl(id, baseUrl), id);
|
||||
}
|
||||
|
||||
public LinkHandler fromId(String id) throws ParsingException {
|
||||
if (id == null) throw new IllegalArgumentException("id can not be null");
|
||||
public LinkHandler fromId(final String id) throws ParsingException {
|
||||
if (id == null) {
|
||||
throw new IllegalArgumentException("id can not be null");
|
||||
}
|
||||
final String url = getUrl(id);
|
||||
return new LinkHandler(url, url, id);
|
||||
}
|
||||
|
||||
public LinkHandler fromId(String id, String baseUrl) throws ParsingException {
|
||||
if (id == null) throw new IllegalArgumentException("id can not be null");
|
||||
public LinkHandler fromId(final String id, final String baseUrl) throws ParsingException {
|
||||
if (id == null) {
|
||||
throw new IllegalArgumentException("id can not be null");
|
||||
}
|
||||
final String url = getUrl(id, baseUrl);
|
||||
return new LinkHandler(url, url, id);
|
||||
}
|
||||
|
@ -96,11 +105,6 @@ public abstract class LinkHandlerFactory {
|
|||
* Return false if this service shall not allow to be called through ACTIONs.
|
||||
*/
|
||||
public boolean acceptUrl(final String url) throws ParsingException {
|
||||
try {
|
||||
return onAcceptUrl(url);
|
||||
} catch (FoundAdException fe) {
|
||||
throw fe;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
package org.schabi.newpipe.extractor.linkhandler;
|
||||
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.EMPTY_STRING;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -7,17 +9,17 @@ public class ListLinkHandler extends LinkHandler {
|
|||
protected final List<String> contentFilters;
|
||||
protected final String sortFilter;
|
||||
|
||||
public ListLinkHandler(String originalUrl,
|
||||
String url,
|
||||
String id,
|
||||
List<String> contentFilters,
|
||||
String sortFilter) {
|
||||
public ListLinkHandler(final String originalUrl,
|
||||
final String url,
|
||||
final String id,
|
||||
final List<String> contentFilters,
|
||||
final String sortFilter) {
|
||||
super(originalUrl, url, id);
|
||||
this.contentFilters = Collections.unmodifiableList(contentFilters);
|
||||
this.sortFilter = sortFilter;
|
||||
}
|
||||
|
||||
public ListLinkHandler(ListLinkHandler handler) {
|
||||
public ListLinkHandler(final ListLinkHandler handler) {
|
||||
this(handler.originalUrl,
|
||||
handler.url,
|
||||
handler.id,
|
||||
|
@ -25,14 +27,12 @@ public class ListLinkHandler extends LinkHandler {
|
|||
handler.sortFilter);
|
||||
}
|
||||
|
||||
public ListLinkHandler(LinkHandler handler,
|
||||
List<String> contentFilters,
|
||||
String sortFilter) {
|
||||
public ListLinkHandler(final LinkHandler handler) {
|
||||
this(handler.originalUrl,
|
||||
handler.url,
|
||||
handler.id,
|
||||
contentFilters,
|
||||
sortFilter);
|
||||
Collections.emptyList(),
|
||||
EMPTY_STRING);
|
||||
}
|
||||
|
||||
public List<String> getContentFilters() {
|
||||
|
|
|
@ -4,7 +4,6 @@ import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public abstract class ListLinkHandlerFactory extends LinkHandlerFactory {
|
||||
|
@ -13,17 +12,13 @@ public abstract class ListLinkHandlerFactory extends LinkHandlerFactory {
|
|||
// To Override
|
||||
///////////////////////////////////
|
||||
|
||||
public List<String> getContentFilter(String url) throws ParsingException {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
public abstract String getUrl(String id, List<String> contentFilter, String sortFilter)
|
||||
throws ParsingException;
|
||||
|
||||
public String getSortFilter(String url) throws ParsingException {
|
||||
return "";
|
||||
}
|
||||
|
||||
public abstract String getUrl(String id, List<String> contentFilter, String sortFilter) throws ParsingException;
|
||||
|
||||
public String getUrl(String id, List<String> contentFilter, String sortFilter, String baseUrl) throws ParsingException {
|
||||
public String getUrl(final String id,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter,
|
||||
final String baseUrl) throws ParsingException {
|
||||
return getUrl(id, contentFilter, sortFilter);
|
||||
}
|
||||
|
||||
|
@ -39,55 +34,58 @@ public abstract class ListLinkHandlerFactory extends LinkHandlerFactory {
|
|||
}
|
||||
|
||||
@Override
|
||||
public ListLinkHandler fromUrl(String url, String baseUrl) throws ParsingException {
|
||||
if (url == null) throw new IllegalArgumentException("url may not be null");
|
||||
public ListLinkHandler fromUrl(final String url, final String baseUrl) throws ParsingException {
|
||||
if (url == null) {
|
||||
throw new IllegalArgumentException("url may not be null");
|
||||
}
|
||||
|
||||
return new ListLinkHandler(super.fromUrl(url, baseUrl), getContentFilter(url), getSortFilter(url));
|
||||
return new ListLinkHandler(super.fromUrl(url, baseUrl));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListLinkHandler fromId(String id) throws ParsingException {
|
||||
return new ListLinkHandler(super.fromId(id), new ArrayList<String>(0), "");
|
||||
public ListLinkHandler fromId(final String id) throws ParsingException {
|
||||
return new ListLinkHandler(super.fromId(id));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListLinkHandler fromId(String id, String baseUrl) throws ParsingException {
|
||||
return new ListLinkHandler(super.fromId(id, baseUrl), new ArrayList<String>(0), "");
|
||||
public ListLinkHandler fromId(final String id, final String baseUrl) throws ParsingException {
|
||||
return new ListLinkHandler(super.fromId(id, baseUrl));
|
||||
}
|
||||
|
||||
public ListLinkHandler fromQuery(String id,
|
||||
List<String> contentFilters,
|
||||
String sortFilter) throws ParsingException {
|
||||
public ListLinkHandler fromQuery(final String id,
|
||||
final List<String> contentFilters,
|
||||
final String sortFilter) throws ParsingException {
|
||||
final String url = getUrl(id, contentFilters, sortFilter);
|
||||
return new ListLinkHandler(url, url, id, contentFilters, sortFilter);
|
||||
}
|
||||
|
||||
public ListLinkHandler fromQuery(String id,
|
||||
List<String> contentFilters,
|
||||
String sortFilter, String baseUrl) throws ParsingException {
|
||||
public ListLinkHandler fromQuery(final String id,
|
||||
final List<String> contentFilters,
|
||||
final String sortFilter,
|
||||
final String baseUrl) throws ParsingException {
|
||||
final String url = getUrl(id, contentFilters, sortFilter, baseUrl);
|
||||
return new ListLinkHandler(url, url, id, contentFilters, sortFilter);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* For making ListLinkHandlerFactory compatible with LinkHandlerFactory we need to override this,
|
||||
* however it should not be overridden by the actual implementation.
|
||||
* For making ListLinkHandlerFactory compatible with LinkHandlerFactory we need to override
|
||||
* this, however it should not be overridden by the actual implementation.
|
||||
*
|
||||
* @param id
|
||||
* @return the url corresponding to id without any filters applied
|
||||
*/
|
||||
public String getUrl(String id) throws ParsingException {
|
||||
return getUrl(id, new ArrayList<String>(0), "");
|
||||
public String getUrl(final String id) throws ParsingException {
|
||||
return getUrl(id, new ArrayList<>(0), "");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, String baseUrl) throws ParsingException {
|
||||
return getUrl(id, new ArrayList<String>(0), "", baseUrl);
|
||||
public String getUrl(final String id, final String baseUrl) throws ParsingException {
|
||||
return getUrl(id, new ArrayList<>(0), "", baseUrl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Will returns content filter the corresponding extractor can handle like "channels", "videos", "music", etc.
|
||||
* Will returns content filter the corresponding extractor can handle like "channels", "videos",
|
||||
* "music", etc.
|
||||
*
|
||||
* @return filter that can be applied when building a query for getting a list
|
||||
*/
|
||||
|
@ -96,7 +94,8 @@ public abstract class ListLinkHandlerFactory extends LinkHandlerFactory {
|
|||
}
|
||||
|
||||
/**
|
||||
* Will returns sort filter the corresponding extractor can handle like "A-Z", "oldest first", "size", etc.
|
||||
* Will returns sort filter the corresponding extractor can handle like "A-Z", "oldest first",
|
||||
* "size", etc.
|
||||
*
|
||||
* @return filter that can be applied when building a query for getting a list
|
||||
*/
|
||||
|
|
|
@ -4,15 +4,15 @@ import java.util.List;
|
|||
|
||||
public class SearchQueryHandler extends ListLinkHandler {
|
||||
|
||||
public SearchQueryHandler(String originalUrl,
|
||||
String url,
|
||||
String searchString,
|
||||
List<String> contentFilters,
|
||||
String sortFilter) {
|
||||
public SearchQueryHandler(final String originalUrl,
|
||||
final String url,
|
||||
final String searchString,
|
||||
final List<String> contentFilters,
|
||||
final String sortFilter) {
|
||||
super(originalUrl, url, searchString, contentFilters, sortFilter);
|
||||
}
|
||||
|
||||
public SearchQueryHandler(ListLinkHandler handler) {
|
||||
public SearchQueryHandler(final ListLinkHandler handler) {
|
||||
this(handler.originalUrl,
|
||||
handler.url,
|
||||
handler.id,
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
package org.schabi.newpipe.extractor.linkhandler;
|
||||
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.EMPTY_STRING;
|
||||
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.EMPTY_STRING;
|
||||
|
||||
public abstract class SearchQueryHandlerFactory extends ListLinkHandlerFactory {
|
||||
|
||||
///////////////////////////////////
|
||||
|
@ -14,9 +14,11 @@ public abstract class SearchQueryHandlerFactory extends ListLinkHandlerFactory {
|
|||
///////////////////////////////////
|
||||
|
||||
@Override
|
||||
public abstract String getUrl(String query, List<String> contentFilter, String sortFilter) throws ParsingException;
|
||||
public abstract String getUrl(String query, List<String> contentFilter, String sortFilter)
|
||||
throws ParsingException;
|
||||
|
||||
public String getSearchString(String url) {
|
||||
@SuppressWarnings("unused")
|
||||
public String getSearchString(final String url) {
|
||||
return "";
|
||||
}
|
||||
|
||||
|
@ -25,28 +27,26 @@ public abstract class SearchQueryHandlerFactory extends ListLinkHandlerFactory {
|
|||
///////////////////////////////////
|
||||
|
||||
@Override
|
||||
public String getId(String url) {
|
||||
public String getId(final String url) {
|
||||
return getSearchString(url);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchQueryHandler fromQuery(String query,
|
||||
List<String> contentFilter,
|
||||
String sortFilter) throws ParsingException {
|
||||
public SearchQueryHandler fromQuery(final String query,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter) throws ParsingException {
|
||||
return new SearchQueryHandler(super.fromQuery(query, contentFilter, sortFilter));
|
||||
}
|
||||
|
||||
public SearchQueryHandler fromQuery(String query) throws ParsingException {
|
||||
return fromQuery(query, new ArrayList<>(0), EMPTY_STRING);
|
||||
public SearchQueryHandler fromQuery(final String query) throws ParsingException {
|
||||
return fromQuery(query, Collections.emptyList(), EMPTY_STRING);
|
||||
}
|
||||
|
||||
/**
|
||||
* It's not mandatory for NewPipe to handle the Url
|
||||
*
|
||||
* @param url
|
||||
*/
|
||||
@Override
|
||||
public boolean onAcceptUrl(String url) {
|
||||
public boolean onAcceptUrl(final String url) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package org.schabi.newpipe.extractor.localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
|
@ -9,23 +10,26 @@ import java.util.List;
|
|||
/**
|
||||
* Represents a country that should be used when fetching content.
|
||||
* <p>
|
||||
* YouTube, for example, give different results in their feed depending on which country is selected.
|
||||
* YouTube, for example, give different results in their feed depending on which country is
|
||||
* selected.
|
||||
* </p>
|
||||
*/
|
||||
public class ContentCountry implements Serializable {
|
||||
public static final ContentCountry DEFAULT = new ContentCountry(Localization.DEFAULT.getCountryCode());
|
||||
public static final ContentCountry DEFAULT =
|
||||
new ContentCountry(Localization.DEFAULT.getCountryCode());
|
||||
|
||||
@Nonnull private final String countryCode;
|
||||
@Nonnull
|
||||
private final String countryCode;
|
||||
|
||||
public static List<ContentCountry> listFrom(String... countryCodeList) {
|
||||
public static List<ContentCountry> listFrom(final String... countryCodeList) {
|
||||
final List<ContentCountry> toReturn = new ArrayList<>();
|
||||
for (String countryCode : countryCodeList) {
|
||||
for (final String countryCode : countryCodeList) {
|
||||
toReturn.add(new ContentCountry(countryCode));
|
||||
}
|
||||
return Collections.unmodifiableList(toReturn);
|
||||
}
|
||||
|
||||
public ContentCountry(@Nonnull String countryCode) {
|
||||
public ContentCountry(@Nonnull final String countryCode) {
|
||||
this.countryCode = countryCode;
|
||||
}
|
||||
|
||||
|
@ -40,11 +44,15 @@ public class ContentCountry implements Serializable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof ContentCountry)) return false;
|
||||
public boolean equals(final Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof ContentCountry)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ContentCountry that = (ContentCountry) o;
|
||||
final ContentCountry that = (ContentCountry) o;
|
||||
|
||||
return countryCode.equals(that.countryCode);
|
||||
}
|
||||
|
|
|
@ -9,7 +9,8 @@ import java.util.Calendar;
|
|||
import java.util.GregorianCalendar;
|
||||
|
||||
/**
|
||||
* A wrapper class that provides a field to describe if the date/time is precise or just an approximation.
|
||||
* A wrapper class that provides a field to describe if the date/time is precise or just an
|
||||
* approximation.
|
||||
*/
|
||||
public class DateWrapper implements Serializable {
|
||||
@Nonnull
|
||||
|
@ -20,7 +21,8 @@ public class DateWrapper implements Serializable {
|
|||
* @deprecated Use {@link #DateWrapper(OffsetDateTime)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public DateWrapper(@Nonnull Calendar calendar) {
|
||||
public DateWrapper(@Nonnull final Calendar calendar) {
|
||||
//noinspection deprecation
|
||||
this(calendar, false);
|
||||
}
|
||||
|
||||
|
@ -28,15 +30,16 @@ public class DateWrapper implements Serializable {
|
|||
* @deprecated Use {@link #DateWrapper(OffsetDateTime, boolean)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public DateWrapper(@Nonnull Calendar calendar, boolean isApproximation) {
|
||||
public DateWrapper(@Nonnull final Calendar calendar, final boolean isApproximation) {
|
||||
this(OffsetDateTime.ofInstant(calendar.toInstant(), ZoneOffset.UTC), isApproximation);
|
||||
}
|
||||
|
||||
public DateWrapper(@Nonnull OffsetDateTime offsetDateTime) {
|
||||
public DateWrapper(@Nonnull final OffsetDateTime offsetDateTime) {
|
||||
this(offsetDateTime, false);
|
||||
}
|
||||
|
||||
public DateWrapper(@Nonnull OffsetDateTime offsetDateTime, boolean isApproximation) {
|
||||
public DateWrapper(@Nonnull final OffsetDateTime offsetDateTime,
|
||||
final boolean isApproximation) {
|
||||
this.offsetDateTime = offsetDateTime.withOffsetSameInstant(ZoneOffset.UTC);
|
||||
this.isApproximation = isApproximation;
|
||||
}
|
||||
|
@ -60,8 +63,8 @@ public class DateWrapper implements Serializable {
|
|||
}
|
||||
|
||||
/**
|
||||
* @return if the date is considered is precise or just an approximation (e.g. service only returns an approximation
|
||||
* like 2 weeks ago instead of a precise date).
|
||||
* @return if the date is considered is precise or just an approximation (e.g. service only
|
||||
* returns an approximation like 2 weeks ago instead of a precise date).
|
||||
*/
|
||||
public boolean isApproximation() {
|
||||
return isApproximation;
|
||||
|
|
|
@ -4,8 +4,15 @@ import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class Localization implements Serializable {
|
||||
public static final Localization DEFAULT = new Localization("en", "GB");
|
||||
|
@ -16,11 +23,12 @@ public class Localization implements Serializable {
|
|||
private final String countryCode;
|
||||
|
||||
/**
|
||||
* @param localizationCodeList a list of localization code, formatted like {@link #getLocalizationCode()}
|
||||
* @param localizationCodeList a list of localization code, formatted like {@link
|
||||
* #getLocalizationCode()}
|
||||
*/
|
||||
public static List<Localization> listFrom(String... localizationCodeList) {
|
||||
public static List<Localization> listFrom(final String... localizationCodeList) {
|
||||
final List<Localization> toReturn = new ArrayList<>();
|
||||
for (String localizationCode : localizationCodeList) {
|
||||
for (final String localizationCode : localizationCodeList) {
|
||||
toReturn.add(fromLocalizationCode(localizationCode));
|
||||
}
|
||||
return Collections.unmodifiableList(toReturn);
|
||||
|
@ -29,10 +37,11 @@ public class Localization implements Serializable {
|
|||
/**
|
||||
* @param localizationCode a localization code, formatted like {@link #getLocalizationCode()}
|
||||
*/
|
||||
public static Localization fromLocalizationCode(String localizationCode) {
|
||||
public static Localization fromLocalizationCode(final String localizationCode) {
|
||||
final int indexSeparator = localizationCode.indexOf("-");
|
||||
|
||||
final String languageCode, countryCode;
|
||||
final String languageCode;
|
||||
final String countryCode;
|
||||
if (indexSeparator != -1) {
|
||||
languageCode = localizationCode.substring(0, indexSeparator);
|
||||
countryCode = localizationCode.substring(indexSeparator + 1);
|
||||
|
@ -44,15 +53,16 @@ public class Localization implements Serializable {
|
|||
return new Localization(languageCode, countryCode);
|
||||
}
|
||||
|
||||
public Localization(@Nonnull String languageCode, @Nullable String countryCode) {
|
||||
public Localization(@Nonnull final String languageCode, @Nullable final String countryCode) {
|
||||
this.languageCode = languageCode;
|
||||
this.countryCode = countryCode;
|
||||
}
|
||||
|
||||
public Localization(@Nonnull String languageCode) {
|
||||
public Localization(@Nonnull final String languageCode) {
|
||||
this(languageCode, null);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public String getLanguageCode() {
|
||||
return languageCode;
|
||||
}
|
||||
|
@ -66,7 +76,7 @@ public class Localization implements Serializable {
|
|||
return new Locale(getLanguageCode(), getCountryCode());
|
||||
}
|
||||
|
||||
public static Localization fromLocale(@Nonnull Locale locale) {
|
||||
public static Localization fromLocale(@Nonnull final Locale locale) {
|
||||
return new Localization(locale.getLanguage(), locale.getCountry());
|
||||
}
|
||||
|
||||
|
@ -84,14 +94,18 @@ public class Localization implements Serializable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof Localization)) return false;
|
||||
public boolean equals(final Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof Localization)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Localization that = (Localization) o;
|
||||
final Localization that = (Localization) o;
|
||||
|
||||
return languageCode.equals(that.languageCode) &&
|
||||
Objects.equals(countryCode, that.countryCode);
|
||||
return languageCode.equals(that.languageCode)
|
||||
&& Objects.equals(countryCode, that.countryCode);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -108,17 +122,19 @@ public class Localization implements Serializable {
|
|||
* @param code a three letter language code
|
||||
* @return the Locale corresponding
|
||||
*/
|
||||
public static Locale getLocaleFromThreeLetterCode(@Nonnull String code) throws ParsingException {
|
||||
public static Locale getLocaleFromThreeLetterCode(@Nonnull final String code)
|
||||
throws ParsingException {
|
||||
final String[] languages = Locale.getISOLanguages();
|
||||
final Map<String, Locale> localeMap = new HashMap<>(languages.length);
|
||||
for (String language : languages) {
|
||||
for (final String language : languages) {
|
||||
final Locale locale = new Locale(language);
|
||||
localeMap.put(locale.getISO3Language(), locale);
|
||||
}
|
||||
if (localeMap.containsKey(code)) {
|
||||
return localeMap.get(code);
|
||||
} else {
|
||||
throw new ParsingException("Could not get Locale from this three letter language code" + code);
|
||||
throw new ParsingException(
|
||||
"Could not get Locale from this three letter language code" + code);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,16 +25,17 @@ public class TimeAgoParser {
|
|||
* Instantiate a new {@link TimeAgoParser} every time you extract a new batch of items.
|
||||
* </p>
|
||||
*
|
||||
* @param patternsHolder An object that holds the "time ago" patterns, special cases, and the language word separator.
|
||||
* @param patternsHolder An object that holds the "time ago" patterns, special cases, and the
|
||||
* language word separator.
|
||||
*/
|
||||
public TimeAgoParser(PatternsHolder patternsHolder) {
|
||||
public TimeAgoParser(final PatternsHolder patternsHolder) {
|
||||
this.patternsHolder = patternsHolder;
|
||||
now = OffsetDateTime.now(ZoneOffset.UTC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a textual date in the format '2 days ago' into a Calendar representation which is then wrapped in a
|
||||
* {@link DateWrapper} object.
|
||||
* Parses a textual date in the format '2 days ago' into a Calendar representation which is then
|
||||
* wrapped in a {@link DateWrapper} object.
|
||||
* <p>
|
||||
* Beginning with days ago, the date is considered as an approximation.
|
||||
*
|
||||
|
@ -42,10 +43,12 @@ public class TimeAgoParser {
|
|||
* @return The parsed time (can be approximated)
|
||||
* @throws ParsingException if the time unit could not be recognized
|
||||
*/
|
||||
public DateWrapper parse(String textualDate) throws ParsingException {
|
||||
for (Map.Entry<ChronoUnit, Map<String, Integer>> caseUnitEntry : patternsHolder.specialCases().entrySet()) {
|
||||
public DateWrapper parse(final String textualDate) throws ParsingException {
|
||||
for (final Map.Entry<ChronoUnit, Map<String, Integer>> caseUnitEntry
|
||||
: patternsHolder.specialCases().entrySet()) {
|
||||
final ChronoUnit chronoUnit = caseUnitEntry.getKey();
|
||||
for (Map.Entry<String, Integer> caseMapToAmountEntry : caseUnitEntry.getValue().entrySet()) {
|
||||
for (final Map.Entry<String, Integer> caseMapToAmountEntry
|
||||
: caseUnitEntry.getValue().entrySet()) {
|
||||
final String caseText = caseMapToAmountEntry.getKey();
|
||||
final Integer caseAmount = caseMapToAmountEntry.getValue();
|
||||
|
||||
|
@ -58,7 +61,7 @@ public class TimeAgoParser {
|
|||
int timeAgoAmount;
|
||||
try {
|
||||
timeAgoAmount = parseTimeAgoAmount(textualDate);
|
||||
} catch (NumberFormatException e) {
|
||||
} catch (final NumberFormatException e) {
|
||||
// If there is no valid number in the textual date,
|
||||
// assume it is 1 (as in 'a second ago').
|
||||
timeAgoAmount = 1;
|
||||
|
@ -68,16 +71,16 @@ public class TimeAgoParser {
|
|||
return getResultFor(timeAgoAmount, chronoUnit);
|
||||
}
|
||||
|
||||
private int parseTimeAgoAmount(String textualDate) throws NumberFormatException {
|
||||
String timeValueStr = textualDate.replaceAll("\\D+", "");
|
||||
return Integer.parseInt(timeValueStr);
|
||||
private int parseTimeAgoAmount(final String textualDate) throws NumberFormatException {
|
||||
return Integer.parseInt(textualDate.replaceAll("\\D+", ""));
|
||||
}
|
||||
|
||||
private ChronoUnit parseChronoUnit(String textualDate) throws ParsingException {
|
||||
for (Map.Entry<ChronoUnit, Collection<String>> entry : patternsHolder.asMap().entrySet()) {
|
||||
private ChronoUnit parseChronoUnit(final String textualDate) throws ParsingException {
|
||||
for (final Map.Entry<ChronoUnit, Collection<String>> entry
|
||||
: patternsHolder.asMap().entrySet()) {
|
||||
final ChronoUnit chronoUnit = entry.getKey();
|
||||
|
||||
for (String agoPhrase : entry.getValue()) {
|
||||
for (final String agoPhrase : entry.getValue()) {
|
||||
if (textualDateMatches(textualDate, agoPhrase)) {
|
||||
return chronoUnit;
|
||||
}
|
||||
|
@ -87,7 +90,7 @@ public class TimeAgoParser {
|
|||
throw new ParsingException("Unable to parse the date: " + textualDate);
|
||||
}
|
||||
|
||||
private boolean textualDateMatches(String textualDate, String agoPhrase) {
|
||||
private boolean textualDateMatches(final String textualDate, final String agoPhrase) {
|
||||
if (textualDate.equals(agoPhrase)) {
|
||||
return true;
|
||||
}
|
||||
|
@ -98,7 +101,8 @@ public class TimeAgoParser {
|
|||
final String escapedPhrase = Pattern.quote(agoPhrase.toLowerCase());
|
||||
final String escapedSeparator;
|
||||
if (patternsHolder.wordSeparator().equals(" ")) {
|
||||
// From JDK8 → \h - Treat horizontal spaces as a normal one (non-breaking space, thin space, etc.)
|
||||
// From JDK8 → \h - Treat horizontal spaces as a normal one
|
||||
// (non-breaking space, thin space, etc.)
|
||||
escapedSeparator = "[ \\t\\xA0\\u1680\\u180e\\u2000-\\u200a\\u202f\\u205f\\u3000]";
|
||||
} else {
|
||||
escapedSeparator = Pattern.quote(patternsHolder.wordSeparator());
|
||||
|
@ -113,7 +117,7 @@ public class TimeAgoParser {
|
|||
}
|
||||
}
|
||||
|
||||
private DateWrapper getResultFor(int timeAgoAmount, ChronoUnit chronoUnit) {
|
||||
private DateWrapper getResultFor(final int timeAgoAmount, final ChronoUnit chronoUnit) {
|
||||
OffsetDateTime offsetDateTime = now;
|
||||
boolean isApproximation = false;
|
||||
|
||||
|
|
|
@ -6,14 +6,18 @@ import org.schabi.newpipe.extractor.timeago.PatternsManager;
|
|||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public class TimeAgoPatternsManager {
|
||||
@Nullable
|
||||
private static PatternsHolder getPatternsFor(@Nonnull Localization localization) {
|
||||
return PatternsManager.getPatterns(localization.getLanguageCode(), localization.getCountryCode());
|
||||
public final class TimeAgoPatternsManager {
|
||||
private TimeAgoPatternsManager() {
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static TimeAgoParser getTimeAgoParserFor(@Nonnull Localization localization) {
|
||||
private static PatternsHolder getPatternsFor(@Nonnull final Localization localization) {
|
||||
return PatternsManager.getPatterns(localization.getLanguageCode(),
|
||||
localization.getCountryCode());
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static TimeAgoParser getTimeAgoParserFor(@Nonnull final Localization localization) {
|
||||
final PatternsHolder holder = getPatternsFor(localization);
|
||||
|
||||
if (holder == null) {
|
||||
|
|
|
@ -15,7 +15,7 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
||||
public final class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
||||
|
||||
/**
|
||||
* Mixes are handled as particular playlists in NewPipeExtractor. {@link PlaylistType#NORMAL} is
|
||||
|
@ -52,23 +52,27 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
MIX_GENRE,
|
||||
}
|
||||
|
||||
private PlaylistInfo(int serviceId, ListLinkHandler linkHandler, String name) throws ParsingException {
|
||||
@SuppressWarnings("RedundantThrows")
|
||||
private PlaylistInfo(final int serviceId, final ListLinkHandler linkHandler, final String name)
|
||||
throws ParsingException {
|
||||
super(serviceId, linkHandler, name);
|
||||
}
|
||||
|
||||
public static PlaylistInfo getInfo(String url) throws IOException, ExtractionException {
|
||||
public static PlaylistInfo getInfo(final String url) throws IOException, ExtractionException {
|
||||
return getInfo(NewPipe.getServiceByUrl(url), url);
|
||||
}
|
||||
|
||||
public static PlaylistInfo getInfo(StreamingService service, String url) throws IOException, ExtractionException {
|
||||
PlaylistExtractor extractor = service.getPlaylistExtractor(url);
|
||||
public static PlaylistInfo getInfo(final StreamingService service, final String url)
|
||||
throws IOException, ExtractionException {
|
||||
final PlaylistExtractor extractor = service.getPlaylistExtractor(url);
|
||||
extractor.fetchPage();
|
||||
return getInfo(extractor);
|
||||
}
|
||||
|
||||
public static InfoItemsPage<StreamInfoItem> getMoreItems(StreamingService service,
|
||||
String url,
|
||||
Page page) throws IOException, ExtractionException {
|
||||
public static InfoItemsPage<StreamInfoItem> getMoreItems(final StreamingService service,
|
||||
final String url,
|
||||
final Page page)
|
||||
throws IOException, ExtractionException {
|
||||
return service.getPlaylistExtractor(url).getPage(page);
|
||||
}
|
||||
|
||||
|
@ -77,7 +81,8 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
*
|
||||
* @param extractor an extractor where fetchPage() was already got called on.
|
||||
*/
|
||||
public static PlaylistInfo getInfo(PlaylistExtractor extractor) throws ExtractionException {
|
||||
public static PlaylistInfo getInfo(final PlaylistExtractor extractor)
|
||||
throws ExtractionException {
|
||||
|
||||
final PlaylistInfo info = new PlaylistInfo(
|
||||
extractor.getServiceId(),
|
||||
|
@ -85,73 +90,75 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
extractor.getName());
|
||||
// collect uploader extraction failures until we are sure this is not
|
||||
// just a playlist without an uploader
|
||||
List<Throwable> uploaderParsingErrors = new ArrayList<Throwable>(3);
|
||||
final List<Throwable> uploaderParsingErrors = new ArrayList<>();
|
||||
|
||||
try {
|
||||
info.setOriginalUrl(extractor.getOriginalUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
try {
|
||||
info.setStreamCount(extractor.getStreamCount());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
try {
|
||||
info.setThumbnailUrl(extractor.getThumbnailUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
try {
|
||||
info.setUploaderUrl(extractor.getUploaderUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.setUploaderUrl("");
|
||||
uploaderParsingErrors.add(e);
|
||||
}
|
||||
try {
|
||||
info.setUploaderName(extractor.getUploaderName());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.setUploaderName("");
|
||||
uploaderParsingErrors.add(e);
|
||||
}
|
||||
try {
|
||||
info.setUploaderAvatarUrl(extractor.getUploaderAvatarUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.setUploaderAvatarUrl("");
|
||||
uploaderParsingErrors.add(e);
|
||||
}
|
||||
try {
|
||||
info.setSubChannelUrl(extractor.getSubChannelUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
uploaderParsingErrors.add(e);
|
||||
}
|
||||
try {
|
||||
info.setSubChannelName(extractor.getSubChannelName());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
uploaderParsingErrors.add(e);
|
||||
}
|
||||
try {
|
||||
info.setSubChannelAvatarUrl(extractor.getSubChannelAvatarUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
uploaderParsingErrors.add(e);
|
||||
}
|
||||
try {
|
||||
info.setBannerUrl(extractor.getBannerUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
try {
|
||||
info.setPlaylistType(extractor.getPlaylistType());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
// do not fail if everything but the uploader infos could be collected
|
||||
if (!uploaderParsingErrors.isEmpty() &&
|
||||
(!info.getErrors().isEmpty() || uploaderParsingErrors.size() < 3)) {
|
||||
|
||||
// do not fail if everything but the uploader infos could be collected (TODO better comment)
|
||||
if (!uploaderParsingErrors.isEmpty()
|
||||
&& (!info.getErrors().isEmpty() || uploaderParsingErrors.size() < 3)) {
|
||||
info.addAllErrors(uploaderParsingErrors);
|
||||
}
|
||||
|
||||
final InfoItemsPage<StreamInfoItem> itemsPage = ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
final InfoItemsPage<StreamInfoItem> itemsPage
|
||||
= ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
info.setRelatedItems(itemsPage.getItems());
|
||||
info.setNextPage(itemsPage.getNextPage());
|
||||
|
||||
|
@ -173,7 +180,7 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
return thumbnailUrl;
|
||||
}
|
||||
|
||||
public void setThumbnailUrl(String thumbnailUrl) {
|
||||
public void setThumbnailUrl(final String thumbnailUrl) {
|
||||
this.thumbnailUrl = thumbnailUrl;
|
||||
}
|
||||
|
||||
|
@ -181,7 +188,7 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
return bannerUrl;
|
||||
}
|
||||
|
||||
public void setBannerUrl(String bannerUrl) {
|
||||
public void setBannerUrl(final String bannerUrl) {
|
||||
this.bannerUrl = bannerUrl;
|
||||
}
|
||||
|
||||
|
@ -189,7 +196,7 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
return uploaderUrl;
|
||||
}
|
||||
|
||||
public void setUploaderUrl(String uploaderUrl) {
|
||||
public void setUploaderUrl(final String uploaderUrl) {
|
||||
this.uploaderUrl = uploaderUrl;
|
||||
}
|
||||
|
||||
|
@ -197,7 +204,7 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
return uploaderName;
|
||||
}
|
||||
|
||||
public void setUploaderName(String uploaderName) {
|
||||
public void setUploaderName(final String uploaderName) {
|
||||
this.uploaderName = uploaderName;
|
||||
}
|
||||
|
||||
|
@ -205,7 +212,7 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
return uploaderAvatarUrl;
|
||||
}
|
||||
|
||||
public void setUploaderAvatarUrl(String uploaderAvatarUrl) {
|
||||
public void setUploaderAvatarUrl(final String uploaderAvatarUrl) {
|
||||
this.uploaderAvatarUrl = uploaderAvatarUrl;
|
||||
}
|
||||
|
||||
|
@ -213,7 +220,7 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
return subChannelUrl;
|
||||
}
|
||||
|
||||
public void setSubChannelUrl(String subChannelUrl) {
|
||||
public void setSubChannelUrl(final String subChannelUrl) {
|
||||
this.subChannelUrl = subChannelUrl;
|
||||
}
|
||||
|
||||
|
@ -221,7 +228,7 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
return subChannelName;
|
||||
}
|
||||
|
||||
public void setSubChannelName(String subChannelName) {
|
||||
public void setSubChannelName(final String subChannelName) {
|
||||
this.subChannelName = subChannelName;
|
||||
}
|
||||
|
||||
|
@ -229,7 +236,7 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
return subChannelAvatarUrl;
|
||||
}
|
||||
|
||||
public void setSubChannelAvatarUrl(String subChannelAvatarUrl) {
|
||||
public void setSubChannelAvatarUrl(final String subChannelAvatarUrl) {
|
||||
this.subChannelAvatarUrl = subChannelAvatarUrl;
|
||||
}
|
||||
|
||||
|
@ -237,7 +244,7 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
return streamCount;
|
||||
}
|
||||
|
||||
public void setStreamCount(long streamCount) {
|
||||
public void setStreamCount(final long streamCount) {
|
||||
this.streamCount = streamCount;
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ public class PlaylistInfoItem extends InfoItem {
|
|||
private long streamCount = 0;
|
||||
private PlaylistInfo.PlaylistType playlistType;
|
||||
|
||||
public PlaylistInfoItem(int serviceId, String url, String name) {
|
||||
public PlaylistInfoItem(final int serviceId, final String url, final String name) {
|
||||
super(InfoType.PLAYLIST, serviceId, url, name);
|
||||
}
|
||||
|
||||
|
@ -19,16 +19,16 @@ public class PlaylistInfoItem extends InfoItem {
|
|||
return uploaderName;
|
||||
}
|
||||
|
||||
public void setUploaderName(String uploader_name) {
|
||||
this.uploaderName = uploader_name;
|
||||
public void setUploaderName(final String uploaderName) {
|
||||
this.uploaderName = uploaderName;
|
||||
}
|
||||
|
||||
public long getStreamCount() {
|
||||
return streamCount;
|
||||
}
|
||||
|
||||
public void setStreamCount(long stream_count) {
|
||||
this.streamCount = stream_count;
|
||||
public void setStreamCount(final long streamCount) {
|
||||
this.streamCount = streamCount;
|
||||
}
|
||||
|
||||
public PlaylistInfo.PlaylistType getPlaylistType() {
|
||||
|
|
|
@ -10,14 +10,12 @@ public interface PlaylistInfoItemExtractor extends InfoItemExtractor {
|
|||
/**
|
||||
* Get the uploader name
|
||||
* @return the uploader name
|
||||
* @throws ParsingException
|
||||
*/
|
||||
String getUploaderName() throws ParsingException;
|
||||
|
||||
/**
|
||||
* Get the number of streams
|
||||
* @return the number of streams
|
||||
* @throws ParsingException
|
||||
*/
|
||||
long getStreamCount() throws ParsingException;
|
||||
|
||||
|
|
|
@ -3,39 +3,37 @@ package org.schabi.newpipe.extractor.playlist;
|
|||
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
|
||||
public class PlaylistInfoItemsCollector extends InfoItemsCollector<PlaylistInfoItem, PlaylistInfoItemExtractor> {
|
||||
public class PlaylistInfoItemsCollector
|
||||
extends InfoItemsCollector<PlaylistInfoItem, PlaylistInfoItemExtractor> {
|
||||
|
||||
public PlaylistInfoItemsCollector(int serviceId) {
|
||||
public PlaylistInfoItemsCollector(final int serviceId) {
|
||||
super(serviceId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PlaylistInfoItem extract(PlaylistInfoItemExtractor extractor) throws ParsingException {
|
||||
|
||||
String name = extractor.getName();
|
||||
int serviceId = getServiceId();
|
||||
String url = extractor.getUrl();
|
||||
|
||||
PlaylistInfoItem resultItem = new PlaylistInfoItem(serviceId, url, name);
|
||||
public PlaylistInfoItem extract(final PlaylistInfoItemExtractor extractor)
|
||||
throws ParsingException {
|
||||
final PlaylistInfoItem resultItem = new PlaylistInfoItem(
|
||||
getServiceId(), extractor.getUrl(), extractor.getName());
|
||||
|
||||
try {
|
||||
resultItem.setUploaderName(extractor.getUploaderName());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setThumbnailUrl(extractor.getThumbnailUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setStreamCount(extractor.getStreamCount());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setPlaylistType(extractor.getPlaylistType());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
return resultItem;
|
||||
|
|
|
@ -14,12 +14,12 @@ import java.util.List;
|
|||
public abstract class SearchExtractor extends ListExtractor<InfoItem> {
|
||||
|
||||
public static class NothingFoundException extends ExtractionException {
|
||||
public NothingFoundException(String message) {
|
||||
public NothingFoundException(final String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
public SearchExtractor(StreamingService service, SearchQueryHandler linkHandler) {
|
||||
public SearchExtractor(final StreamingService service, final SearchQueryHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
|
@ -34,11 +34,11 @@ public abstract class SearchExtractor extends ListExtractor<InfoItem> {
|
|||
* {@link SearchExtractor#isCorrectedSearch()} is true.
|
||||
*
|
||||
* @return a suggestion to another query, the corrected query, or an empty String.
|
||||
* @throws ParsingException
|
||||
*/
|
||||
@Nonnull
|
||||
public abstract String getSearchSuggestion() throws ParsingException;
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public SearchQueryHandler getLinkHandler() {
|
||||
return (SearchQueryHandler) super.getLinkHandler();
|
||||
|
@ -66,7 +66,6 @@ public abstract class SearchExtractor extends ListExtractor<InfoItem> {
|
|||
* Example: on YouTube, if you search for "Covid-19",
|
||||
* there is a box with information from the WHO about Covid-19 and a link to the WHO's website.
|
||||
* @return additional meta information about the search query
|
||||
* @throws ParsingException
|
||||
*/
|
||||
@Nonnull
|
||||
public abstract List<MetaInfo> getMetaInfo() throws ParsingException;
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
package org.schabi.newpipe.extractor.search;
|
||||
|
||||
import org.schabi.newpipe.extractor.*;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.ListInfo;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.Page;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
||||
|
@ -11,26 +16,29 @@ import java.util.List;
|
|||
import javax.annotation.Nonnull;
|
||||
|
||||
public class SearchInfo extends ListInfo<InfoItem> {
|
||||
private String searchString;
|
||||
private final String searchString;
|
||||
private String searchSuggestion;
|
||||
private boolean isCorrectedSearch;
|
||||
private List<MetaInfo> metaInfo;
|
||||
|
||||
public SearchInfo(int serviceId,
|
||||
SearchQueryHandler qIHandler,
|
||||
String searchString) {
|
||||
public SearchInfo(final int serviceId,
|
||||
final SearchQueryHandler qIHandler,
|
||||
final String searchString) {
|
||||
super(serviceId, qIHandler, "Search");
|
||||
this.searchString = searchString;
|
||||
}
|
||||
|
||||
|
||||
public static SearchInfo getInfo(StreamingService service, SearchQueryHandler searchQuery) throws ExtractionException, IOException {
|
||||
SearchExtractor extractor = service.getSearchExtractor(searchQuery);
|
||||
public static SearchInfo getInfo(final StreamingService service,
|
||||
final SearchQueryHandler searchQuery)
|
||||
throws ExtractionException, IOException {
|
||||
final SearchExtractor extractor = service.getSearchExtractor(searchQuery);
|
||||
extractor.fetchPage();
|
||||
return getInfo(extractor);
|
||||
}
|
||||
|
||||
public static SearchInfo getInfo(SearchExtractor extractor) throws ExtractionException, IOException {
|
||||
public static SearchInfo getInfo(final SearchExtractor extractor)
|
||||
throws ExtractionException, IOException {
|
||||
final SearchInfo info = new SearchInfo(
|
||||
extractor.getServiceId(),
|
||||
extractor.getLinkHandler(),
|
||||
|
@ -38,26 +46,27 @@ public class SearchInfo extends ListInfo<InfoItem> {
|
|||
|
||||
try {
|
||||
info.setOriginalUrl(extractor.getOriginalUrl());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
try {
|
||||
info.setSearchSuggestion(extractor.getSearchSuggestion());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
try {
|
||||
info.setIsCorrectedSearch(extractor.isCorrectedSearch());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
try {
|
||||
info.setMetaInfo(extractor.getMetaInfo());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
|
||||
ListExtractor.InfoItemsPage<InfoItem> page = ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
final ListExtractor.InfoItemsPage<InfoItem> page
|
||||
= ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
info.setRelatedItems(page.getItems());
|
||||
info.setNextPage(page.getNextPage());
|
||||
|
||||
|
@ -65,9 +74,9 @@ public class SearchInfo extends ListInfo<InfoItem> {
|
|||
}
|
||||
|
||||
|
||||
public static ListExtractor.InfoItemsPage<InfoItem> getMoreItems(StreamingService service,
|
||||
SearchQueryHandler query,
|
||||
Page page)
|
||||
public static ListExtractor.InfoItemsPage<InfoItem> getMoreItems(final StreamingService service,
|
||||
final SearchQueryHandler query,
|
||||
final Page page)
|
||||
throws IOException, ExtractionException {
|
||||
return service.getSearchExtractor(query).getPage(page);
|
||||
}
|
||||
|
@ -85,11 +94,11 @@ public class SearchInfo extends ListInfo<InfoItem> {
|
|||
return this.isCorrectedSearch;
|
||||
}
|
||||
|
||||
public void setIsCorrectedSearch(boolean isCorrectedSearch) {
|
||||
public void setIsCorrectedSearch(final boolean isCorrectedSearch) {
|
||||
this.isCorrectedSearch = isCorrectedSearch;
|
||||
}
|
||||
|
||||
public void setSearchSuggestion(String searchSuggestion) {
|
||||
public void setSearchSuggestion(final String searchSuggestion) {
|
||||
this.searchSuggestion = searchSuggestion;
|
||||
}
|
||||
|
||||
|
@ -98,7 +107,7 @@ public class SearchInfo extends ListInfo<InfoItem> {
|
|||
return metaInfo;
|
||||
}
|
||||
|
||||
public void setMetaInfo(@Nonnull List<MetaInfo> metaInfo) {
|
||||
public void setMetaInfo(@Nonnull final List<MetaInfo> metaInfo) {
|
||||
this.metaInfo = metaInfo;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,23 +2,6 @@
|
|||
|
||||
package org.schabi.newpipe.extractor.services.bandcamp;
|
||||
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||
import org.schabi.newpipe.extractor.linkhandler.*;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.extractors.*;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.linkHandler.*;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.AUDIO;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.COMMENTS;
|
||||
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.BASE_URL;
|
||||
|
@ -27,6 +10,41 @@ import static org.schabi.newpipe.extractor.services.bandcamp.extractors.Bandcamp
|
|||
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampRadioExtractor.KIOSK_RADIO;
|
||||
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampRadioExtractor.RADIO_API_URL;
|
||||
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampCommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampFeaturedExtractor;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampPlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampRadioExtractor;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampRadioStreamExtractor;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampStreamExtractor;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampSuggestionExtractor;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.linkHandler.BandcampChannelLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.linkHandler.BandcampCommentsLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.linkHandler.BandcampFeaturedLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.linkHandler.BandcampPlaylistLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.linkHandler.BandcampSearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.bandcamp.linkHandler.BandcampStreamLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
public class BandcampService extends StreamingService {
|
||||
|
||||
public BandcampService(final int id) {
|
||||
|
@ -81,19 +99,28 @@ public class BandcampService extends StreamingService {
|
|||
@Override
|
||||
public KioskList getKioskList() throws ExtractionException {
|
||||
|
||||
KioskList kioskList = new KioskList(this);
|
||||
final KioskList kioskList = new KioskList(this);
|
||||
|
||||
try {
|
||||
kioskList.addKioskEntry((streamingService, url, kioskId) ->
|
||||
new BandcampFeaturedExtractor(
|
||||
kioskList.addKioskEntry(
|
||||
(streamingService, url, kioskId) -> new BandcampFeaturedExtractor(
|
||||
BandcampService.this,
|
||||
new BandcampFeaturedLinkHandlerFactory().fromUrl(FEATURED_API_URL), kioskId),
|
||||
new BandcampFeaturedLinkHandlerFactory(), KIOSK_FEATURED);
|
||||
new BandcampFeaturedLinkHandlerFactory().fromUrl(FEATURED_API_URL),
|
||||
kioskId
|
||||
),
|
||||
new BandcampFeaturedLinkHandlerFactory(),
|
||||
KIOSK_FEATURED
|
||||
);
|
||||
|
||||
kioskList.addKioskEntry((streamingService, url, kioskId) ->
|
||||
new BandcampRadioExtractor(BandcampService.this,
|
||||
new BandcampFeaturedLinkHandlerFactory().fromUrl(RADIO_API_URL), kioskId),
|
||||
new BandcampFeaturedLinkHandlerFactory(), KIOSK_RADIO);
|
||||
kioskList.addKioskEntry(
|
||||
(streamingService, url, kioskId) -> new BandcampRadioExtractor(
|
||||
BandcampService.this,
|
||||
new BandcampFeaturedLinkHandlerFactory().fromUrl(RADIO_API_URL),
|
||||
kioskId
|
||||
),
|
||||
new BandcampFeaturedLinkHandlerFactory(),
|
||||
KIOSK_RADIO
|
||||
);
|
||||
|
||||
kioskList.setDefaultKiosk(KIOSK_FEATURED);
|
||||
|
||||
|
@ -116,14 +143,14 @@ public class BandcampService extends StreamingService {
|
|||
|
||||
@Override
|
||||
public StreamExtractor getStreamExtractor(final LinkHandler linkHandler) {
|
||||
if (BandcampExtractorHelper.isRadioUrl(linkHandler.getUrl()))
|
||||
if (BandcampExtractorHelper.isRadioUrl(linkHandler.getUrl())) {
|
||||
return new BandcampRadioStreamExtractor(this, linkHandler);
|
||||
else
|
||||
}
|
||||
return new BandcampStreamExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CommentsExtractor getCommentsExtractor(ListLinkHandler linkHandler) {
|
||||
public CommentsExtractor getCommentsExtractor(final ListLinkHandler linkHandler) {
|
||||
return new BandcampCommentsExtractor(this, linkHandler);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ package org.schabi.newpipe.extractor.services.bandcamp.extractors;
|
|||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
|
||||
import org.jsoup.Jsoup;
|
||||
import org.schabi.newpipe.extractor.Page;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
|
@ -17,20 +18,24 @@ import org.schabi.newpipe.extractor.services.bandcamp.extractors.streaminfoitem.
|
|||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public class BandcampChannelExtractor extends ChannelExtractor {
|
||||
|
||||
private JsonObject channelInfo;
|
||||
|
||||
public BandcampChannelExtractor(final StreamingService service, final ListLinkHandler linkHandler) {
|
||||
public BandcampChannelExtractor(final StreamingService service,
|
||||
final ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAvatarUrl() {
|
||||
if (channelInfo.getLong("bio_image_id") == 0) return "";
|
||||
if (channelInfo.getLong("bio_image_id") == 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
return BandcampExtractorHelper.getImageUrl(channelInfo.getLong("bio_image_id"), false);
|
||||
}
|
||||
|
@ -43,7 +48,8 @@ public class BandcampChannelExtractor extends ChannelExtractor {
|
|||
*/
|
||||
try {
|
||||
final String html = getDownloader()
|
||||
.get(channelInfo.getString("bandcamp_url").replace("http://", "https://"))
|
||||
.get(channelInfo.getString("bandcamp_url")
|
||||
.replace("http://", "https://"))
|
||||
.responseBody();
|
||||
|
||||
return Jsoup.parse(html)
|
||||
|
@ -110,7 +116,9 @@ public class BandcampChannelExtractor extends ChannelExtractor {
|
|||
// A discograph is as an item appears in a discography
|
||||
final JsonObject discograph = discography.getObject(i);
|
||||
|
||||
if (!discograph.getString("item_type").equals("track")) continue;
|
||||
if (!discograph.getString("item_type").equals("track")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
collector.commit(new BandcampDiscographStreamInfoItemExtractor(discograph, getUrl()));
|
||||
}
|
||||
|
@ -119,12 +127,13 @@ public class BandcampChannelExtractor extends ChannelExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getPage(Page page) {
|
||||
public InfoItemsPage<StreamInfoItem> getPage(final Page page) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
public void onFetchPage(@Nonnull final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
channelInfo = BandcampExtractorHelper.getArtistDetails(getId());
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,8 @@ import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|||
|
||||
public class BandcampChannelInfoItemExtractor implements ChannelInfoItemExtractor {
|
||||
|
||||
private final Element resultInfo, searchResult;
|
||||
private final Element resultInfo;
|
||||
private final Element searchResult;
|
||||
|
||||
public BandcampChannelInfoItemExtractor(final Element searchResult) {
|
||||
this.searchResult = searchResult;
|
||||
|
|
|
@ -21,25 +21,27 @@ public class BandcampCommentsExtractor extends CommentsExtractor {
|
|||
private Document document;
|
||||
|
||||
|
||||
public BandcampCommentsExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
public BandcampCommentsExtractor(final StreamingService service,
|
||||
final ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
String html = downloader.get(getLinkHandler().getUrl()).responseBody();
|
||||
document = Jsoup.parse(html);
|
||||
public void onFetchPage(@Nonnull final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
document = Jsoup.parse(downloader.get(getLinkHandler().getUrl()).responseBody());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<CommentsInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
public InfoItemsPage<CommentsInfoItem> getInitialPage()
|
||||
throws IOException, ExtractionException {
|
||||
|
||||
CommentsInfoItemsCollector collector = new CommentsInfoItemsCollector(getServiceId());
|
||||
final CommentsInfoItemsCollector collector = new CommentsInfoItemsCollector(getServiceId());
|
||||
|
||||
Elements writings = document.getElementsByClass("writing");
|
||||
final Elements writings = document.getElementsByClass("writing");
|
||||
|
||||
for (Element writing : writings) {
|
||||
for (final Element writing : writings) {
|
||||
collector.commit(new BandcampCommentsInfoItemExtractor(writing, getUrl()));
|
||||
}
|
||||
|
||||
|
@ -47,7 +49,8 @@ public class BandcampCommentsExtractor extends CommentsExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<CommentsInfoItem> getPage(Page page) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<CommentsInfoItem> getPage(final Page page)
|
||||
throws IOException, ExtractionException {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ public class BandcampCommentsInfoItemExtractor implements CommentsInfoItemExtrac
|
|||
private final Element writing;
|
||||
private final String url;
|
||||
|
||||
public BandcampCommentsInfoItemExtractor(Element writing, String url) {
|
||||
public BandcampCommentsInfoItemExtractor(final Element writing, final String url) {
|
||||
this.writing = writing;
|
||||
this.url = url;
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ import com.grack.nanojson.JsonObject;
|
|||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import com.grack.nanojson.JsonWriter;
|
||||
|
||||
import org.jsoup.Jsoup;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
|
@ -18,18 +19,21 @@ import java.time.ZonedDateTime;
|
|||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Locale;
|
||||
|
||||
public class BandcampExtractorHelper {
|
||||
public final class BandcampExtractorHelper {
|
||||
|
||||
public static final String BASE_URL = "https://bandcamp.com";
|
||||
public static final String BASE_API_URL = BASE_URL + "/api";
|
||||
|
||||
private BandcampExtractorHelper() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Translate all these parameters together to the URL of the corresponding album or track
|
||||
* using the mobile API
|
||||
*/
|
||||
public static String getStreamUrlFromIds(final long bandId, final long itemId, final String itemType)
|
||||
throws ParsingException {
|
||||
|
||||
public static String getStreamUrlFromIds(final long bandId,
|
||||
final long itemId,
|
||||
final String itemType) throws ParsingException {
|
||||
try {
|
||||
final String jsonString = NewPipe.getDownloader().get(
|
||||
BASE_API_URL + "/mobile/22/tralbum_details?band_id=" + bandId
|
||||
|
@ -50,7 +54,7 @@ public class BandcampExtractorHelper {
|
|||
* <a href=https://notabug.org/fynngodau/bandcampDirect/wiki/rewindBandcamp+%E2%80%93+Fetching+artist+details>
|
||||
* More technical info.</a>
|
||||
*/
|
||||
public static JsonObject getArtistDetails(String id) throws ParsingException {
|
||||
public static JsonObject getArtistDetails(final String id) throws ParsingException {
|
||||
try {
|
||||
return
|
||||
JsonParser.object().from(
|
||||
|
@ -91,24 +95,24 @@ public class BandcampExtractorHelper {
|
|||
public static boolean isSupportedDomain(final String url) throws ParsingException {
|
||||
|
||||
// Accept all bandcamp.com URLs
|
||||
if (url.toLowerCase().matches("https?://.+\\.bandcamp\\.com(/.*)?")) return true;
|
||||
if (url.toLowerCase().matches("https?://.+\\.bandcamp\\.com(/.*)?")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
// Test other URLs for whether they contain a footer that links to bandcamp
|
||||
return Jsoup.parse(
|
||||
NewPipe.getDownloader().get(url).responseBody()
|
||||
)
|
||||
return Jsoup.parse(NewPipe.getDownloader().get(url).responseBody())
|
||||
.getElementById("pgFt")
|
||||
.getElementById("pgFt-inner")
|
||||
.getElementById("footer-logo-wrapper")
|
||||
.getElementById("footer-logo")
|
||||
.getElementsByClass("hiddenAccess")
|
||||
.text().equals("Bandcamp");
|
||||
} catch (NullPointerException e) {
|
||||
} catch (final NullPointerException e) {
|
||||
return false;
|
||||
} catch (IOException | ReCaptchaException e) {
|
||||
throw new ParsingException("Could not determine whether URL is custom domain " +
|
||||
"(not available? network error?)");
|
||||
} catch (final IOException | ReCaptchaException e) {
|
||||
throw new ParsingException("Could not determine whether URL is custom domain "
|
||||
+ "(not available? network error?)");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -121,10 +125,10 @@ public class BandcampExtractorHelper {
|
|||
return url.toLowerCase().matches("https?://bandcamp\\.com/\\?show=\\d+");
|
||||
}
|
||||
|
||||
static DateWrapper parseDate(final String textDate) throws ParsingException {
|
||||
public static DateWrapper parseDate(final String textDate) throws ParsingException {
|
||||
try {
|
||||
final ZonedDateTime zonedDateTime = ZonedDateTime.parse(
|
||||
textDate, DateTimeFormatter.ofPattern("dd MMM yyyy HH:mm:ss zzz", Locale.ENGLISH));
|
||||
final ZonedDateTime zonedDateTime = ZonedDateTime.parse(textDate,
|
||||
DateTimeFormatter.ofPattern("dd MMM yyyy HH:mm:ss zzz", Locale.ENGLISH));
|
||||
return new DateWrapper(zonedDateTime.toOffsetDateTime(), false);
|
||||
} catch (final DateTimeException e) {
|
||||
throw new ParsingException("Could not parse date '" + textDate + "'", e);
|
||||
|
|
|
@ -25,17 +25,20 @@ public class BandcampFeaturedExtractor extends KioskExtractor<PlaylistInfoItem>
|
|||
|
||||
public static final String KIOSK_FEATURED = "Featured";
|
||||
public static final String FEATURED_API_URL = BASE_API_URL + "/mobile/24/bootstrap_data";
|
||||
public static final String MORE_FEATURED_API_URL = BASE_API_URL + "/mobile/24/feed_older_logged_out";
|
||||
public static final String MORE_FEATURED_API_URL
|
||||
= BASE_API_URL + "/mobile/24/feed_older_logged_out";
|
||||
|
||||
private JsonObject json;
|
||||
|
||||
public BandcampFeaturedExtractor(final StreamingService streamingService, final ListLinkHandler listLinkHandler,
|
||||
public BandcampFeaturedExtractor(final StreamingService streamingService,
|
||||
final ListLinkHandler listLinkHandler,
|
||||
final String kioskId) {
|
||||
super(streamingService, listLinkHandler, kioskId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
public void onFetchPage(@Nonnull final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
try {
|
||||
json = JsonParser.object().from(
|
||||
getDownloader().post(
|
||||
|
@ -55,9 +58,8 @@ public class BandcampFeaturedExtractor extends KioskExtractor<PlaylistInfoItem>
|
|||
|
||||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<PlaylistInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
|
||||
|
||||
public InfoItemsPage<PlaylistInfoItem> getInitialPage()
|
||||
throws IOException, ExtractionException {
|
||||
final JsonArray featuredStories = json.getObject("feed_content")
|
||||
.getObject("stories")
|
||||
.getArray("featured");
|
||||
|
@ -65,8 +67,7 @@ public class BandcampFeaturedExtractor extends KioskExtractor<PlaylistInfoItem>
|
|||
return extractItems(featuredStories);
|
||||
}
|
||||
|
||||
private InfoItemsPage<PlaylistInfoItem> extractItems(JsonArray featuredStories) {
|
||||
|
||||
private InfoItemsPage<PlaylistInfoItem> extractItems(final JsonArray featuredStories) {
|
||||
final PlaylistInfoItemsCollector c = new PlaylistInfoItemsCollector(getServiceId());
|
||||
|
||||
for (int i = 0; i < featuredStories.size(); i++) {
|
||||
|
@ -81,14 +82,13 @@ public class BandcampFeaturedExtractor extends KioskExtractor<PlaylistInfoItem>
|
|||
}
|
||||
|
||||
final JsonObject lastFeaturedStory = featuredStories.getObject(featuredStories.size() - 1);
|
||||
|
||||
return new InfoItemsPage<>(c, getNextPageFrom(lastFeaturedStory));
|
||||
}
|
||||
|
||||
/**
|
||||
* Next Page can be generated from metadata of last featured story
|
||||
*/
|
||||
private Page getNextPageFrom(JsonObject lastFeaturedStory) {
|
||||
private Page getNextPageFrom(final JsonObject lastFeaturedStory) {
|
||||
final long lastStoryDate = lastFeaturedStory.getLong("story_date");
|
||||
final long lastStoryId = lastFeaturedStory.getLong("ntid");
|
||||
final String lastStoryType = lastFeaturedStory.getString("story_type");
|
||||
|
@ -99,9 +99,10 @@ public class BandcampFeaturedExtractor extends KioskExtractor<PlaylistInfoItem>
|
|||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<PlaylistInfoItem> getPage(Page page) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<PlaylistInfoItem> getPage(final Page page)
|
||||
throws IOException, ExtractionException {
|
||||
|
||||
JsonObject response;
|
||||
final JsonObject response;
|
||||
try {
|
||||
response = JsonParser.object().from(
|
||||
getDownloader().get(page.getUrl()).responseBody()
|
||||
|
|
|
@ -30,9 +30,9 @@ import static org.schabi.newpipe.extractor.utils.Utils.HTTPS;
|
|||
public class BandcampPlaylistExtractor extends PlaylistExtractor {
|
||||
|
||||
/**
|
||||
* An arbitrarily chosen number above which cover arts won't be fetched individually for each track;
|
||||
* instead, it will be assumed that every track has the same cover art as the album, which is not
|
||||
* always the case.
|
||||
* An arbitrarily chosen number above which cover arts won't be fetched individually for each
|
||||
* track; instead, it will be assumed that every track has the same cover art as the album,
|
||||
* which is not always the case.
|
||||
*/
|
||||
private static final int MAXIMUM_INDIVIDUAL_COVER_ARTS = 10;
|
||||
|
||||
|
@ -41,12 +41,14 @@ public class BandcampPlaylistExtractor extends PlaylistExtractor {
|
|||
private JsonArray trackInfo;
|
||||
private String name;
|
||||
|
||||
public BandcampPlaylistExtractor(final StreamingService service, final ListLinkHandler linkHandler) {
|
||||
public BandcampPlaylistExtractor(final StreamingService service,
|
||||
final ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull final Downloader downloader) throws IOException, ExtractionException {
|
||||
public void onFetchPage(@Nonnull final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
final String html = downloader.get(getLinkHandler().getUrl()).responseBody();
|
||||
document = Jsoup.parse(html);
|
||||
albumJson = getAlbumInfoJson(html);
|
||||
|
@ -115,7 +117,7 @@ public class BandcampPlaylistExtractor extends PlaylistExtractor {
|
|||
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
|
||||
for (int i = 0; i < trackInfo.size(); i++) {
|
||||
JsonObject track = trackInfo.getObject(i);
|
||||
final JsonObject track = trackInfo.getObject(i);
|
||||
|
||||
if (trackInfo.size() < MAXIMUM_INDIVIDUAL_COVER_ARTS) {
|
||||
// Load cover art of every track individually
|
||||
|
|
|
@ -6,9 +6,10 @@ import org.schabi.newpipe.extractor.playlist.PlaylistInfoItemExtractor;
|
|||
import javax.annotation.Nonnull;
|
||||
|
||||
public class BandcampPlaylistInfoItemExtractor implements PlaylistInfoItemExtractor {
|
||||
private final Element searchResult, resultInfo;
|
||||
private final Element searchResult;
|
||||
private final Element resultInfo;
|
||||
|
||||
public BandcampPlaylistInfoItemExtractor(@Nonnull Element searchResult) {
|
||||
public BandcampPlaylistInfoItemExtractor(@Nonnull final Element searchResult) {
|
||||
this.searchResult = searchResult;
|
||||
resultInfo = searchResult.getElementsByClass("result-info").first();
|
||||
}
|
||||
|
@ -41,6 +42,8 @@ public class BandcampPlaylistInfoItemExtractor implements PlaylistInfoItemExtrac
|
|||
.getElementsByTag("img").first();
|
||||
if (img != null) {
|
||||
return img.attr("src");
|
||||
} else return null;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,13 +28,15 @@ public class BandcampRadioExtractor extends KioskExtractor<StreamInfoItem> {
|
|||
|
||||
private JsonObject json = null;
|
||||
|
||||
public BandcampRadioExtractor(final StreamingService streamingService, final ListLinkHandler linkHandler,
|
||||
public BandcampRadioExtractor(final StreamingService streamingService,
|
||||
final ListLinkHandler linkHandler,
|
||||
final String kioskId) {
|
||||
super(streamingService, linkHandler, kioskId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull final Downloader downloader) throws IOException, ExtractionException {
|
||||
public void onFetchPage(@Nonnull final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
try {
|
||||
json = JsonParser.object().from(
|
||||
getDownloader().get(RADIO_API_URL).responseBody());
|
||||
|
|
|
@ -23,10 +23,9 @@ public class BandcampRadioInfoItemExtractor implements StreamInfoItemExtractor {
|
|||
|
||||
@Override
|
||||
public long getDuration() {
|
||||
/* Duration is only present in the more detailed information that has to be queried separately.
|
||||
* Therefore, over 300 queries would be needed every time the kiosk is opened if we were to
|
||||
* display the real value.
|
||||
*/
|
||||
/* Duration is only present in the more detailed information that has to be queried
|
||||
separately. Therefore, over 300 queries would be needed every time the kiosk is opened if we
|
||||
were to display the real value. */
|
||||
//return query(show.getInt("id")).getLong("audio_duration");
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -26,28 +26,31 @@ import java.util.ArrayList;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.*;
|
||||
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.BASE_API_URL;
|
||||
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.BASE_URL;
|
||||
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.getImageUrl;
|
||||
|
||||
public class BandcampRadioStreamExtractor extends BandcampStreamExtractor {
|
||||
|
||||
private JsonObject showInfo;
|
||||
|
||||
public BandcampRadioStreamExtractor(final StreamingService service, final LinkHandler linkHandler) {
|
||||
public BandcampRadioStreamExtractor(final StreamingService service,
|
||||
final LinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
static JsonObject query(final int id) throws ParsingException {
|
||||
try {
|
||||
return JsonParser.object().from(
|
||||
NewPipe.getDownloader().get(BASE_API_URL + "/bcweekly/1/get?id=" + id).responseBody()
|
||||
);
|
||||
return JsonParser.object().from(NewPipe.getDownloader()
|
||||
.get(BASE_API_URL + "/bcweekly/1/get?id=" + id).responseBody());
|
||||
} catch (final IOException | ReCaptchaException | JsonParserException e) {
|
||||
throw new ParsingException("could not get show data", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull final Downloader downloader) throws IOException, ExtractionException {
|
||||
public void onFetchPage(@Nonnull final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
showInfo = query(Integer.parseInt(getId()));
|
||||
}
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ import javax.annotation.Nonnull;
|
|||
public class BandcampRelatedPlaylistInfoItemExtractor implements PlaylistInfoItemExtractor {
|
||||
private final Element relatedAlbum;
|
||||
|
||||
public BandcampRelatedPlaylistInfoItemExtractor(@Nonnull Element relatedAlbum) {
|
||||
public BandcampRelatedPlaylistInfoItemExtractor(@Nonnull final Element relatedAlbum) {
|
||||
this.relatedAlbum = relatedAlbum;
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,8 @@ import java.util.List;
|
|||
|
||||
public class BandcampSearchExtractor extends SearchExtractor {
|
||||
|
||||
public BandcampSearchExtractor(StreamingService service, SearchQueryHandler linkHandler) {
|
||||
public BandcampSearchExtractor(final StreamingService service,
|
||||
final SearchQueryHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
|
@ -47,7 +48,8 @@ public class BandcampSearchExtractor extends SearchExtractor {
|
|||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
public InfoItemsPage<InfoItem> getPage(final Page page) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<InfoItem> getPage(final Page page)
|
||||
throws IOException, ExtractionException {
|
||||
final String html = getDownloader().get(page.getUrl()).responseBody();
|
||||
|
||||
final MultiInfoItemsCollector collector = new MultiInfoItemsCollector(getServiceId());
|
||||
|
@ -86,8 +88,9 @@ public class BandcampSearchExtractor extends SearchExtractor {
|
|||
|
||||
// Count pages
|
||||
final Elements pageLists = d.getElementsByClass("pagelist");
|
||||
if (pageLists.isEmpty())
|
||||
if (pageLists.isEmpty()) {
|
||||
return new InfoItemsPage<>(collector, null);
|
||||
}
|
||||
|
||||
final Elements pages = pageLists.first().getElementsByTag("li");
|
||||
|
||||
|
@ -120,7 +123,7 @@ public class BandcampSearchExtractor extends SearchExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull final Downloader downloader) throws IOException, ExtractionException {
|
||||
|
||||
public void onFetchPage(@Nonnull final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,14 +2,16 @@
|
|||
|
||||
package org.schabi.newpipe.extractor.services.bandcamp.extractors;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.getImageUrl;
|
||||
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.jsoup.select.Elements;
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
|
@ -17,19 +19,21 @@ import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.stream.*;
|
||||
import org.schabi.newpipe.extractor.stream.AudioStream;
|
||||
import org.schabi.newpipe.extractor.stream.Description;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||
import org.schabi.newpipe.extractor.stream.VideoStream;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.getImageUrl;
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public class BandcampStreamExtractor extends StreamExtractor {
|
||||
|
||||
|
@ -43,7 +47,8 @@ public class BandcampStreamExtractor extends StreamExtractor {
|
|||
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull final Downloader downloader) throws IOException, ExtractionException {
|
||||
public void onFetchPage(@Nonnull final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
final String html = downloader.get(getLinkHandler().getUrl()).responseBody();
|
||||
document = Jsoup.parse(html);
|
||||
albumJson = getAlbumInfoJson(html);
|
||||
|
@ -94,7 +99,7 @@ public class BandcampStreamExtractor extends StreamExtractor {
|
|||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getUploaderName() {
|
||||
public String getUploaderName() throws ParsingException {
|
||||
return albumJson.getString("artist");
|
||||
}
|
||||
|
||||
|
@ -113,8 +118,11 @@ public class BandcampStreamExtractor extends StreamExtractor {
|
|||
@Nonnull
|
||||
@Override
|
||||
public String getThumbnailUrl() throws ParsingException {
|
||||
if (albumJson.isNull("art_id")) return "";
|
||||
else return getImageUrl(albumJson.getLong("art_id"), true);
|
||||
if (albumJson.isNull("art_id")) {
|
||||
return "";
|
||||
} else {
|
||||
return getImageUrl(albumJson.getLong("art_id"), true);
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
@ -170,15 +178,12 @@ public class BandcampStreamExtractor extends StreamExtractor {
|
|||
|
||||
@Override
|
||||
public PlaylistInfoItemsCollector getRelatedItems() {
|
||||
final PlaylistInfoItemsCollector collector = new PlaylistInfoItemsCollector(getServiceId());
|
||||
final Elements recommendedAlbums = document.getElementsByClass("recommended-album");
|
||||
|
||||
PlaylistInfoItemsCollector collector = new PlaylistInfoItemsCollector(getServiceId());
|
||||
|
||||
Elements recommendedAlbums = document.getElementsByClass("recommended-album");
|
||||
|
||||
for (Element album : recommendedAlbums) {
|
||||
for (final Element album : recommendedAlbums) {
|
||||
collector.commit(new BandcampRelatedPlaylistInfoItemExtractor(album));
|
||||
}
|
||||
|
||||
return collector;
|
||||
}
|
||||
|
||||
|
@ -186,22 +191,21 @@ public class BandcampStreamExtractor extends StreamExtractor {
|
|||
@Override
|
||||
public String getCategory() {
|
||||
// Get first tag from html, which is the artist's Genre
|
||||
return document
|
||||
.getElementsByClass("tralbum-tags").first()
|
||||
.getElementsByClass("tag").first().text();
|
||||
return document.getElementsByClass("tralbum-tags").stream()
|
||||
.flatMap(element -> element.getElementsByClass("tag").stream())
|
||||
.map(Element::text)
|
||||
.findFirst()
|
||||
.orElse("");
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getLicence() {
|
||||
/* Tests resulted in this mapping of ints to licence:
|
||||
https://cloud.disroot.org/s/ZTWBxbQ9fKRmRWJ/preview (screenshot from a Bandcamp artist's
|
||||
account) */
|
||||
|
||||
int license = current.getInt("license_type");
|
||||
|
||||
/* Tests resulted in this mapping of ints to licence: https://cloud.disroot.org/s/ZTWBxbQ9fKRmRWJ/preview
|
||||
* (screenshot from a Bandcamp artist's account)
|
||||
*/
|
||||
|
||||
switch (license) {
|
||||
switch (current.getInt("license_type")) {
|
||||
case 1:
|
||||
return "All rights reserved ©";
|
||||
case 2:
|
||||
|
|
|
@ -2,10 +2,13 @@
|
|||
|
||||
package org.schabi.newpipe.extractor.services.bandcamp.extractors;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.BASE_API_URL;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
|
@ -18,8 +21,6 @@ import java.util.ArrayList;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.BASE_API_URL;
|
||||
|
||||
public class BandcampSuggestionExtractor extends SuggestionExtractor {
|
||||
|
||||
private static final String AUTOCOMPLETE_URL = BASE_API_URL + "/fuzzysearch/1/autocomplete?q=";
|
||||
|
@ -32,9 +33,8 @@ public class BandcampSuggestionExtractor extends SuggestionExtractor {
|
|||
final Downloader downloader = NewPipe.getDownloader();
|
||||
|
||||
try {
|
||||
final JsonObject fuzzyResults = JsonParser.object().from(
|
||||
downloader.get(AUTOCOMPLETE_URL + URLEncoder.encode(query, "UTF-8")).responseBody()
|
||||
);
|
||||
final JsonObject fuzzyResults = JsonParser.object().from(downloader
|
||||
.get(AUTOCOMPLETE_URL + URLEncoder.encode(query, "UTF-8")).responseBody());
|
||||
|
||||
final JsonArray jsonArray = fuzzyResults.getObject("auto")
|
||||
.getArray("results");
|
||||
|
@ -44,7 +44,9 @@ public class BandcampSuggestionExtractor extends SuggestionExtractor {
|
|||
for (final Object fuzzyResult : jsonArray) {
|
||||
final String res = ((JsonObject) fuzzyResult).getString("name");
|
||||
|
||||
if (!suggestions.contains(res)) suggestions.add(res);
|
||||
if (!suggestions.contains(res)) {
|
||||
suggestions.add(res);
|
||||
}
|
||||
}
|
||||
|
||||
return suggestions;
|
||||
|
|
|
@ -9,9 +9,9 @@ import javax.annotation.Nullable;
|
|||
public class BandcampDiscographStreamInfoItemExtractor extends BandcampStreamInfoItemExtractor {
|
||||
|
||||
private final JsonObject discograph;
|
||||
public BandcampDiscographStreamInfoItemExtractor(final JsonObject discograph, final String uploaderUrl) {
|
||||
public BandcampDiscographStreamInfoItemExtractor(final JsonObject discograph,
|
||||
final String uploaderUrl) {
|
||||
super(uploaderUrl);
|
||||
|
||||
this.discograph = discograph;
|
||||
}
|
||||
|
||||
|
|
|
@ -18,14 +18,16 @@ public class BandcampPlaylistStreamInfoItemExtractor extends BandcampStreamInfoI
|
|||
private String substituteCoverUrl;
|
||||
private final StreamingService service;
|
||||
|
||||
public BandcampPlaylistStreamInfoItemExtractor(final JsonObject track, final String uploaderUrl,
|
||||
public BandcampPlaylistStreamInfoItemExtractor(final JsonObject track,
|
||||
final String uploaderUrl,
|
||||
final StreamingService service) {
|
||||
super(uploaderUrl);
|
||||
this.track = track;
|
||||
this.service = service;
|
||||
}
|
||||
|
||||
public BandcampPlaylistStreamInfoItemExtractor(final JsonObject track, final String uploaderUrl,
|
||||
public BandcampPlaylistStreamInfoItemExtractor(final JsonObject track,
|
||||
final String uploaderUrl,
|
||||
final String substituteCoverUrl) {
|
||||
this(track, uploaderUrl, (StreamingService) null);
|
||||
this.substituteCoverUrl = substituteCoverUrl;
|
||||
|
|
|
@ -7,9 +7,11 @@ import javax.annotation.Nullable;
|
|||
|
||||
public class BandcampSearchStreamInfoItemExtractor extends BandcampStreamInfoItemExtractor {
|
||||
|
||||
private final Element resultInfo, searchResult;
|
||||
private final Element resultInfo;
|
||||
private final Element searchResult;
|
||||
|
||||
public BandcampSearchStreamInfoItemExtractor(final Element searchResult, final String uploaderUrl) {
|
||||
public BandcampSearchStreamInfoItemExtractor(final Element searchResult,
|
||||
final String uploaderUrl) {
|
||||
super(uploaderUrl);
|
||||
this.searchResult = searchResult;
|
||||
resultInfo = searchResult.getElementsByClass("result-info").first();
|
||||
|
|
|
@ -30,7 +30,8 @@ public class BandcampChannelLinkHandlerFactory extends ListLinkHandlerFactory {
|
|||
|
||||
return String.valueOf(bandData.getLong("id"));
|
||||
|
||||
} catch (final IOException | ReCaptchaException | ArrayIndexOutOfBoundsException | JsonParserException e) {
|
||||
} catch (final IOException | ReCaptchaException | ArrayIndexOutOfBoundsException
|
||||
| JsonParserException e) {
|
||||
throw new ParsingException("Download failed", e);
|
||||
}
|
||||
}
|
||||
|
@ -46,7 +47,8 @@ public class BandcampChannelLinkHandlerFactory extends ListLinkHandlerFactory {
|
|||
.getString("bandcamp_url")
|
||||
.replace("http://", "https://");
|
||||
} catch (final NullPointerException e) {
|
||||
throw new ParsingException("JSON does not contain URL (invalid id?) or is otherwise invalid", e);
|
||||
throw new ParsingException(
|
||||
"JSON does not contain URL (invalid id?) or is otherwise invalid", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -55,16 +57,18 @@ public class BandcampChannelLinkHandlerFactory extends ListLinkHandlerFactory {
|
|||
* Accepts only pages that lead to the root of an artist profile. Supports external pages.
|
||||
*/
|
||||
@Override
|
||||
public boolean onAcceptUrl(String url) throws ParsingException {
|
||||
public boolean onAcceptUrl(final String url) throws ParsingException {
|
||||
|
||||
url = url.toLowerCase();
|
||||
final String lowercaseUrl = url.toLowerCase();
|
||||
|
||||
// https: | | artist.bandcamp.com | releases
|
||||
// 0 1 2 3
|
||||
String[] splitUrl = url.split("/");
|
||||
final String[] splitUrl = lowercaseUrl.split("/");
|
||||
|
||||
// URL is too short
|
||||
if (splitUrl.length < 3) return false;
|
||||
if (splitUrl.length < 3) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Must have "releases" or "music" as segment after url or none at all
|
||||
if (splitUrl.length > 3 && !(
|
||||
|
@ -80,7 +84,7 @@ public class BandcampChannelLinkHandlerFactory extends ListLinkHandlerFactory {
|
|||
}
|
||||
|
||||
// Test whether domain is supported
|
||||
return BandcampExtractorHelper.isSupportedDomain(url);
|
||||
return BandcampExtractorHelper.isSupportedDomain(lowercaseUrl);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,21 +13,25 @@ import java.util.List;
|
|||
public class BandcampCommentsLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
|
||||
@Override
|
||||
public String getId(String url) throws ParsingException {
|
||||
public String getId(final String url) throws ParsingException {
|
||||
return url;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onAcceptUrl(String url) throws ParsingException {
|
||||
public boolean onAcceptUrl(final String url) throws ParsingException {
|
||||
// Don't accept URLs that don't point to a track
|
||||
if (!url.toLowerCase().matches("https?://.+\\..+/(track|album)/.+")) return false;
|
||||
if (!url.toLowerCase().matches("https?://.+\\..+/(track|album)/.+")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Test whether domain is supported
|
||||
return BandcampExtractorHelper.isSupportedDomain(url);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, List<String> contentFilter, String sortFilter) throws ParsingException {
|
||||
public String getUrl(final String id,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter) throws ParsingException {
|
||||
return id;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,9 @@ import static org.schabi.newpipe.extractor.services.bandcamp.extractors.Bandcamp
|
|||
public class BandcampFeaturedLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
|
||||
@Override
|
||||
public String getUrl(final String id, final List<String> contentFilter, final String sortFilter) {
|
||||
public String getUrl(final String id,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter) {
|
||||
if (id.equals(KIOSK_FEATURED)) {
|
||||
return FEATURED_API_URL; // doesn't have a website
|
||||
} else if (id.equals(KIOSK_RADIO)) {
|
||||
|
@ -27,11 +29,11 @@ public class BandcampFeaturedLinkHandlerFactory extends ListLinkHandlerFactory {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getId(String url) {
|
||||
url = Utils.replaceHttpWithHttps(url);
|
||||
if (BandcampExtractorHelper.isRadioUrl(url) || url.equals(RADIO_API_URL)) {
|
||||
public String getId(final String url) {
|
||||
final String fixedUrl = Utils.replaceHttpWithHttps(url);
|
||||
if (BandcampExtractorHelper.isRadioUrl(fixedUrl) || fixedUrl.equals(RADIO_API_URL)) {
|
||||
return KIOSK_RADIO;
|
||||
} else if (url.equals(FEATURED_API_URL)) {
|
||||
} else if (fixedUrl.equals(FEATURED_API_URL)) {
|
||||
return KIOSK_FEATURED;
|
||||
} else {
|
||||
return null;
|
||||
|
@ -39,8 +41,10 @@ public class BandcampFeaturedLinkHandlerFactory extends ListLinkHandlerFactory {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean onAcceptUrl(String url) {
|
||||
url = Utils.replaceHttpWithHttps(url);
|
||||
return url.equals(FEATURED_API_URL) || (url.equals(RADIO_API_URL) || BandcampExtractorHelper.isRadioUrl(url));
|
||||
public boolean onAcceptUrl(final String url) {
|
||||
final String fixedUrl = Utils.replaceHttpWithHttps(url);
|
||||
return fixedUrl.equals(FEATURED_API_URL)
|
||||
|| fixedUrl.equals(RADIO_API_URL)
|
||||
|| BandcampExtractorHelper.isRadioUrl(fixedUrl);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,9 @@ public class BandcampPlaylistLinkHandlerFactory extends ListLinkHandlerFactory {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(final String url, final List<String> contentFilter, final String sortFilter)
|
||||
throws ParsingException {
|
||||
public String getUrl(final String url,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter) throws ParsingException {
|
||||
return url;
|
||||
}
|
||||
|
||||
|
@ -30,7 +31,9 @@ public class BandcampPlaylistLinkHandlerFactory extends ListLinkHandlerFactory {
|
|||
public boolean onAcceptUrl(final String url) throws ParsingException {
|
||||
|
||||
// Exclude URLs which do not lead to an album
|
||||
if (!url.toLowerCase().matches("https?://.+\\..+/album/.+")) return false;
|
||||
if (!url.toLowerCase().matches("https?://.+\\..+/album/.+")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Test whether domain is supported
|
||||
return BandcampExtractorHelper.isSupportedDomain(url);
|
||||
|
|
|
@ -15,14 +15,11 @@ public class BandcampSearchQueryHandlerFactory extends SearchQueryHandlerFactory
|
|||
|
||||
|
||||
@Override
|
||||
public String getUrl(final String query, final List<String> contentFilter, final String sortFilter)
|
||||
throws ParsingException {
|
||||
public String getUrl(final String query,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter) throws ParsingException {
|
||||
try {
|
||||
|
||||
return BASE_URL + "/search?q=" +
|
||||
URLEncoder.encode(query, "UTF-8")
|
||||
+ "&page=1";
|
||||
|
||||
return BASE_URL + "/search?q=" + URLEncoder.encode(query, "UTF-8") + "&page=1";
|
||||
} catch (final UnsupportedEncodingException e) {
|
||||
throw new ParsingException("query \"" + query + "\" could not be encoded", e);
|
||||
}
|
||||
|
|
|
@ -50,10 +50,14 @@ public class BandcampStreamLinkHandlerFactory extends LinkHandlerFactory {
|
|||
public boolean onAcceptUrl(final String url) throws ParsingException {
|
||||
|
||||
// Accept Bandcamp radio
|
||||
if (BandcampExtractorHelper.isRadioUrl(url)) return true;
|
||||
if (BandcampExtractorHelper.isRadioUrl(url)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Don't accept URLs that don't point to a track
|
||||
if (!url.toLowerCase().matches("https?://.+\\..+/track/.+")) return false;
|
||||
if (!url.toLowerCase().matches("https?://.+\\..+/track/.+")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Test whether domain is supported
|
||||
return BandcampExtractorHelper.isSupportedDomain(url);
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
package org.schabi.newpipe.extractor.services.media_ccc;
|
||||
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.AUDIO;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.VIDEO;
|
||||
import static java.util.Arrays.asList;
|
||||
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
||||
|
@ -14,16 +17,24 @@ import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
|||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.*;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.*;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCConferenceExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCConferenceKiosk;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCLiveStreamExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCLiveStreamKiosk;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCParsingHelper;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCRecentKiosk;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCStreamExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCConferenceLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCConferencesListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCLiveListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCRecentListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCStreamLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.AUDIO;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.VIDEO;
|
||||
|
||||
public class MediaCCCService extends StreamingService {
|
||||
public MediaCCCService(final int id) {
|
||||
super(id, "media.ccc.de", asList(AUDIO, VIDEO));
|
||||
|
@ -79,42 +90,42 @@ public class MediaCCCService extends StreamingService {
|
|||
|
||||
@Override
|
||||
public KioskList getKioskList() throws ExtractionException {
|
||||
KioskList list = new KioskList(this);
|
||||
final KioskList list = new KioskList(this);
|
||||
|
||||
// add kiosks here e.g.:
|
||||
try {
|
||||
list.addKioskEntry(new KioskList.KioskExtractorFactory() {
|
||||
@Override
|
||||
public KioskExtractor createNewKiosk(final StreamingService streamingService,
|
||||
final String url, final String kioskId)
|
||||
throws ExtractionException {
|
||||
return new MediaCCCConferenceKiosk(MediaCCCService.this,
|
||||
new MediaCCCConferencesListLinkHandlerFactory().fromUrl(url), kioskId);
|
||||
}
|
||||
}, new MediaCCCConferencesListLinkHandlerFactory(), "conferences");
|
||||
list.addKioskEntry(
|
||||
(streamingService, url, kioskId) -> new MediaCCCConferenceKiosk(
|
||||
MediaCCCService.this,
|
||||
new MediaCCCConferencesListLinkHandlerFactory().fromUrl(url),
|
||||
kioskId
|
||||
),
|
||||
new MediaCCCConferencesListLinkHandlerFactory(),
|
||||
"conferences"
|
||||
);
|
||||
|
||||
list.addKioskEntry(new KioskList.KioskExtractorFactory() {
|
||||
@Override
|
||||
public KioskExtractor createNewKiosk(final StreamingService streamingService,
|
||||
final String url, final String kioskId)
|
||||
throws ExtractionException {
|
||||
return new MediaCCCRecentKiosk(MediaCCCService.this,
|
||||
new MediaCCCRecentListLinkHandlerFactory().fromUrl(url), kioskId);
|
||||
}
|
||||
}, new MediaCCCRecentListLinkHandlerFactory(), "recent");
|
||||
list.addKioskEntry(
|
||||
(streamingService, url, kioskId) -> new MediaCCCRecentKiosk(
|
||||
MediaCCCService.this,
|
||||
new MediaCCCRecentListLinkHandlerFactory().fromUrl(url),
|
||||
kioskId
|
||||
),
|
||||
new MediaCCCRecentListLinkHandlerFactory(),
|
||||
"recent"
|
||||
);
|
||||
|
||||
list.addKioskEntry(new KioskList.KioskExtractorFactory() {
|
||||
@Override
|
||||
public KioskExtractor createNewKiosk(final StreamingService streamingService,
|
||||
final String url, final String kioskId)
|
||||
throws ExtractionException {
|
||||
return new MediaCCCLiveStreamKiosk(MediaCCCService.this,
|
||||
new MediaCCCLiveListLinkHandlerFactory().fromUrl(url), kioskId);
|
||||
}
|
||||
}, new MediaCCCLiveListLinkHandlerFactory(), "live");
|
||||
list.addKioskEntry(
|
||||
(streamingService, url, kioskId) -> new MediaCCCLiveStreamKiosk(
|
||||
MediaCCCService.this,
|
||||
new MediaCCCLiveListLinkHandlerFactory().fromUrl(url),
|
||||
kioskId
|
||||
),
|
||||
new MediaCCCLiveListLinkHandlerFactory(),
|
||||
"live"
|
||||
);
|
||||
|
||||
list.setDefaultKiosk("recent");
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
throw new ExtractionException(e);
|
||||
}
|
||||
|
||||
|
|
|
@ -53,22 +53,22 @@ public class MediaCCCConferenceExtractor extends ChannelExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getParentChannelName() throws ParsingException {
|
||||
public String getParentChannelName() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getParentChannelUrl() throws ParsingException {
|
||||
public String getParentChannelUrl() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getParentChannelAvatarUrl() throws ParsingException {
|
||||
public String getParentChannelAvatarUrl() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isVerified() throws ParsingException {
|
||||
public boolean isVerified() {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -91,10 +91,11 @@ public class MediaCCCConferenceExtractor extends ChannelExtractor {
|
|||
@Override
|
||||
public void onFetchPage(@Nonnull final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
final String conferenceUrl = MediaCCCConferenceLinkHandlerFactory.CONFERENCE_API_ENDPOINT + getId();
|
||||
final String conferenceUrl
|
||||
= MediaCCCConferenceLinkHandlerFactory.CONFERENCE_API_ENDPOINT + getId();
|
||||
try {
|
||||
conferenceData = JsonParser.object().from(downloader.get(conferenceUrl).responseBody());
|
||||
} catch (JsonParserException jpe) {
|
||||
} catch (final JsonParserException jpe) {
|
||||
throw new ExtractionException("Could not parse json returnd by url: " + conferenceUrl);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,8 +32,8 @@ public class MediaCCCConferenceKiosk extends KioskExtractor<ChannelInfoItem> {
|
|||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<ChannelInfoItem> getInitialPage() {
|
||||
JsonArray conferences = doc.getArray("conferences");
|
||||
ChannelInfoItemsCollector collector = new ChannelInfoItemsCollector(getServiceId());
|
||||
final JsonArray conferences = doc.getArray("conferences");
|
||||
final ChannelInfoItemsCollector collector = new ChannelInfoItemsCollector(getServiceId());
|
||||
for (int i = 0; i < conferences.size(); i++) {
|
||||
collector.commit(new MediaCCCConferenceInfoItemExtractor(conferences.getObject(i)));
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ public class MediaCCCConferenceKiosk extends KioskExtractor<ChannelInfoItem> {
|
|||
.responseBody();
|
||||
try {
|
||||
doc = JsonParser.object().from(site);
|
||||
} catch (JsonParserException jpe) {
|
||||
} catch (final JsonParserException jpe) {
|
||||
throw new ExtractionException("Could not parse json.", jpe);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,50 +2,51 @@ package org.schabi.newpipe.extractor.services.media_ccc.extractors;
|
|||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.stream.*;
|
||||
import org.schabi.newpipe.extractor.stream.AudioStream;
|
||||
import org.schabi.newpipe.extractor.stream.Description;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||
import org.schabi.newpipe.extractor.stream.VideoStream;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public class MediaCCCLiveStreamExtractor extends StreamExtractor {
|
||||
private JsonArray doc = null;
|
||||
private JsonObject conference = null;
|
||||
private String group = "";
|
||||
private JsonObject room = null;
|
||||
|
||||
public MediaCCCLiveStreamExtractor(StreamingService service, LinkHandler linkHandler) {
|
||||
public MediaCCCLiveStreamExtractor(final StreamingService service,
|
||||
final LinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
doc = MediaCCCParsingHelper.getLiveStreams(downloader, getExtractorLocalization());
|
||||
public void onFetchPage(@Nonnull final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
final JsonArray doc =
|
||||
MediaCCCParsingHelper.getLiveStreams(downloader, getExtractorLocalization());
|
||||
// find correct room
|
||||
for (int c = 0; c < doc.size(); c++) {
|
||||
final JsonObject conference = doc.getObject(c);
|
||||
conference = doc.getObject(c);
|
||||
final JsonArray groups = conference.getArray("groups");
|
||||
for (int g = 0; g < groups.size(); g++) {
|
||||
final String group = groups.getObject(g).getString("group");
|
||||
group = groups.getObject(g).getString("group");
|
||||
final JsonArray rooms = groups.getObject(g).getArray("rooms");
|
||||
for (int r = 0; r < rooms.size(); r++) {
|
||||
final JsonObject room = rooms.getObject(r);
|
||||
if (getId().equals(conference.getString("slug") + "/" + room.getString("slug"))) {
|
||||
this.conference = conference;
|
||||
this.group = group;
|
||||
this.room = room;
|
||||
room = rooms.getObject(r);
|
||||
if (getId().equals(
|
||||
conference.getString("slug") + "/" + room.getString("slug"))) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -69,7 +70,8 @@ public class MediaCCCLiveStreamExtractor extends StreamExtractor {
|
|||
@Nonnull
|
||||
@Override
|
||||
public Description getDescription() throws ParsingException {
|
||||
return new Description(conference.getString("description") + " - " + group, Description.PLAIN_TEXT);
|
||||
return new Description(conference.getString("description")
|
||||
+ " - " + group, Description.PLAIN_TEXT);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -93,12 +95,11 @@ public class MediaCCCLiveStreamExtractor extends StreamExtractor {
|
|||
@Override
|
||||
public String getHlsUrl() {
|
||||
// TODO: There are multiple HLS streams.
|
||||
// Make getHlsUrl() and getDashMpdUrl() return lists of VideoStreams, so the user can choose a resolution.
|
||||
// Make getHlsUrl() and getDashMpdUrl() return lists of VideoStreams,
|
||||
// so the user can choose a resolution.
|
||||
for (int s = 0; s < room.getArray("streams").size(); s++) {
|
||||
final JsonObject stream = room.getArray("streams").getObject(s);
|
||||
if (stream.getString("type").equals("video")) {
|
||||
final String resolution = stream.getArray("videoSize").getInt(0) + "x"
|
||||
+ stream.getArray("videoSize").getInt(1);
|
||||
if (stream.has("hls")) {
|
||||
return stream.getObject("urls").getObject("hls").getString("url");
|
||||
}
|
||||
|
@ -115,7 +116,8 @@ public class MediaCCCLiveStreamExtractor extends StreamExtractor {
|
|||
if (stream.getString("type").equals("audio")) {
|
||||
for (final String type : stream.getObject("urls").keySet()) {
|
||||
final JsonObject url = stream.getObject("urls").getObject(type);
|
||||
audioStreams.add(new AudioStream(url.getString("url"), MediaFormat.getFromSuffix(type), -1));
|
||||
audioStreams.add(new AudioStream(url.getString("url"),
|
||||
MediaFormat.getFromSuffix(type), -1));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,19 +18,22 @@ import java.io.IOException;
|
|||
public class MediaCCCLiveStreamKiosk extends KioskExtractor<StreamInfoItem> {
|
||||
private JsonArray doc;
|
||||
|
||||
public MediaCCCLiveStreamKiosk(StreamingService streamingService, ListLinkHandler linkHandler, String kioskId) {
|
||||
public MediaCCCLiveStreamKiosk(final StreamingService streamingService,
|
||||
final ListLinkHandler linkHandler,
|
||||
final String kioskId) {
|
||||
super(streamingService, linkHandler, kioskId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
public void onFetchPage(@Nonnull final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
doc = MediaCCCParsingHelper.getLiveStreams(downloader, getExtractorLocalization());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
for (int c = 0; c < doc.size(); c++) {
|
||||
final JsonObject conference = doc.getObject(c);
|
||||
final JsonArray groups = conference.getArray("groups");
|
||||
|
@ -48,7 +51,8 @@ public class MediaCCCLiveStreamKiosk extends KioskExtractor<StreamInfoItem> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getPage(Page page) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getPage(final Page page)
|
||||
throws IOException, ExtractionException {
|
||||
return InfoItemsPage.emptyPage();
|
||||
}
|
||||
|
||||
|
|
|
@ -14,7 +14,8 @@ public class MediaCCCLiveStreamKioskExtractor implements StreamInfoItemExtractor
|
|||
private final String group;
|
||||
private final JsonObject roomInfo;
|
||||
|
||||
public MediaCCCLiveStreamKioskExtractor(final JsonObject conferenceInfo, final String group,
|
||||
public MediaCCCLiveStreamKioskExtractor(final JsonObject conferenceInfo,
|
||||
final String group,
|
||||
final JsonObject roomInfo) {
|
||||
this.conferenceInfo = conferenceInfo;
|
||||
this.group = group;
|
||||
|
@ -39,7 +40,7 @@ public class MediaCCCLiveStreamKioskExtractor implements StreamInfoItemExtractor
|
|||
@Override
|
||||
public StreamType getStreamType() throws ParsingException {
|
||||
boolean isVideo = false;
|
||||
for (Object stream : roomInfo.getArray("streams")) {
|
||||
for (final Object stream : roomInfo.getArray("streams")) {
|
||||
if ("video".equals(((JsonObject) stream).getString("type"))) {
|
||||
isVideo = true;
|
||||
break;
|
||||
|
@ -65,7 +66,8 @@ public class MediaCCCLiveStreamKioskExtractor implements StreamInfoItemExtractor
|
|||
|
||||
@Override
|
||||
public String getUploaderName() throws ParsingException {
|
||||
return conferenceInfo.getString("conference") + " - " + group + " - " + roomInfo.getString("display");
|
||||
return conferenceInfo.getString("conference") + " - " + group
|
||||
+ " - " + roomInfo.getString("display");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -15,15 +15,17 @@ import java.time.format.DateTimeParseException;
|
|||
import java.util.regex.Pattern;
|
||||
|
||||
public final class MediaCCCParsingHelper {
|
||||
private static final Pattern LIVE_STREAM_ID_PATTERN = Pattern.compile("\\w+/\\w+"); // {conference_slug}/{room_slug}
|
||||
// {conference_slug}/{room_slug}
|
||||
private static final Pattern LIVE_STREAM_ID_PATTERN = Pattern.compile("\\w+/\\w+");
|
||||
private static JsonArray liveStreams = null;
|
||||
|
||||
private MediaCCCParsingHelper() { }
|
||||
|
||||
public static OffsetDateTime parseDateFrom(final String textualUploadDate) throws ParsingException {
|
||||
public static OffsetDateTime parseDateFrom(final String textualUploadDate)
|
||||
throws ParsingException {
|
||||
try {
|
||||
return OffsetDateTime.parse(textualUploadDate);
|
||||
} catch (DateTimeParseException e) {
|
||||
} catch (final DateTimeParseException e) {
|
||||
throw new ParsingException("Could not parse date: \"" + textualUploadDate + "\"", e);
|
||||
}
|
||||
}
|
||||
|
@ -31,8 +33,8 @@ public final class MediaCCCParsingHelper {
|
|||
/**
|
||||
* Check whether an id is a live stream id
|
||||
* @param id the {@code id} to check
|
||||
* @return returns {@code true} if the {@code id} is formatted like {@code {conference_slug}/{room_slug}};
|
||||
* {@code false} otherwise
|
||||
* @return returns {@code true} if the {@code id} is formatted like
|
||||
* {@code {conference_slug}/{room_slug}}; {@code false} otherwise
|
||||
*/
|
||||
public static boolean isLiveStreamId(final String id) {
|
||||
return LIVE_STREAM_ID_PATTERN.matcher(id).find();
|
||||
|
@ -40,24 +42,28 @@ public final class MediaCCCParsingHelper {
|
|||
|
||||
/**
|
||||
* Get currently available live streams from
|
||||
* <a href="https://streaming.media.ccc.de/streams/v2.json">https://streaming.media.ccc.de/streams/v2.json</a>.
|
||||
* <a href="https://streaming.media.ccc.de/streams/v2.json">
|
||||
* https://streaming.media.ccc.de/streams/v2.json</a>.
|
||||
* Use this method to cache requests, because they can get quite big.
|
||||
* TODO: implement better caching policy (max-age: 3 min)
|
||||
* @param downloader The downloader to use for making the request
|
||||
* @param localization The localization to be used. Will most likely be ignored.
|
||||
* @return {@link JsonArray} containing current conferences and info about their rooms and streams.
|
||||
* @throws ExtractionException if the data could not be fetched or the retrieved data could not be parsed to a {@link JsonArray}
|
||||
* @return {@link JsonArray} containing current conferences and info about their rooms and
|
||||
* streams.
|
||||
* @throws ExtractionException if the data could not be fetched or the retrieved data could not
|
||||
* be parsed to a {@link JsonArray}
|
||||
*/
|
||||
public static JsonArray getLiveStreams(final Downloader downloader, final Localization localization)
|
||||
public static JsonArray getLiveStreams(final Downloader downloader,
|
||||
final Localization localization)
|
||||
throws ExtractionException {
|
||||
if (liveStreams == null) {
|
||||
try {
|
||||
final String site = downloader.get("https://streaming.media.ccc.de/streams/v2.json",
|
||||
localization).responseBody();
|
||||
liveStreams = JsonParser.array().from(site);
|
||||
} catch (IOException | ReCaptchaException e) {
|
||||
} catch (final IOException | ReCaptchaException e) {
|
||||
throw new ExtractionException("Could not get live stream JSON.", e);
|
||||
} catch (JsonParserException e) {
|
||||
} catch (final JsonParserException e) {
|
||||
throw new ExtractionException("Could not parse JSON.", e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,17 +22,20 @@ public class MediaCCCRecentKiosk extends KioskExtractor<StreamInfoItem> {
|
|||
|
||||
private JsonObject doc;
|
||||
|
||||
public MediaCCCRecentKiosk(StreamingService streamingService, ListLinkHandler linkHandler, String kioskId) {
|
||||
public MediaCCCRecentKiosk(final StreamingService streamingService,
|
||||
final ListLinkHandler linkHandler,
|
||||
final String kioskId) {
|
||||
super(streamingService, linkHandler, kioskId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
public void onFetchPage(@Nonnull final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
final String site = downloader.get("https://api.media.ccc.de/public/events/recent",
|
||||
getExtractorLocalization()).responseBody();
|
||||
try {
|
||||
doc = JsonParser.object().from(site);
|
||||
} catch (JsonParserException jpe) {
|
||||
} catch (final JsonParserException jpe) {
|
||||
throw new ExtractionException("Could not parse json.", jpe);
|
||||
}
|
||||
}
|
||||
|
@ -48,7 +51,8 @@ public class MediaCCCRecentKiosk extends KioskExtractor<StreamInfoItem> {
|
|||
streamInfoItem -> streamInfoItem.getUploadDate().offsetDateTime());
|
||||
comparator = comparator.reversed();
|
||||
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId(), comparator);
|
||||
final StreamInfoItemsCollector collector
|
||||
= new StreamInfoItemsCollector(getServiceId(), comparator);
|
||||
for (int i = 0; i < events.size(); i++) {
|
||||
collector.commit(new MediaCCCRecentKioskExtractor(events.getObject(i)));
|
||||
}
|
||||
|
@ -56,7 +60,8 @@ public class MediaCCCRecentKiosk extends KioskExtractor<StreamInfoItem> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getPage(Page page) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getPage(final Page page)
|
||||
throws IOException, ExtractionException {
|
||||
return InfoItemsPage.emptyPage();
|
||||
}
|
||||
|
||||
|
|
|
@ -46,8 +46,8 @@ public class MediaCCCRecentKioskExtractor implements StreamInfoItemExtractor {
|
|||
|
||||
@Override
|
||||
public long getDuration() throws ParsingException {
|
||||
// duration and length have the same value
|
||||
// see https://github.com/voc/voctoweb/blob/master/app/views/public/shared/_event.json.jbuilder
|
||||
// duration and length have the same value, see
|
||||
// https://github.com/voc/voctoweb/blob/master/app/views/public/shared/_event.json.jbuilder
|
||||
return event.getInt("duration");
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
package org.schabi.newpipe.extractor.services.media_ccc.extractors;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory.ALL;
|
||||
import static org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory.CONFERENCES;
|
||||
import static org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory.EVENTS;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
|
@ -13,7 +17,6 @@ import org.schabi.newpipe.extractor.channel.ChannelInfoItem;
|
|||
import org.schabi.newpipe.extractor.channel.ChannelInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.MultiInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
|
@ -26,10 +29,6 @@ import java.util.List;
|
|||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory.ALL;
|
||||
import static org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory.CONFERENCES;
|
||||
import static org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory.EVENTS;
|
||||
|
||||
public class MediaCCCSearchExtractor extends SearchExtractor {
|
||||
private JsonObject doc;
|
||||
private MediaCCCConferenceKiosk conferenceKiosk;
|
||||
|
@ -41,7 +40,7 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
|
|||
conferenceKiosk = new MediaCCCConferenceKiosk(service,
|
||||
new MediaCCCConferencesListLinkHandlerFactory().fromId("conferences"),
|
||||
"conferences");
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
@ -79,7 +78,7 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
|
|||
if (getLinkHandler().getContentFilters().contains(EVENTS)
|
||||
|| getLinkHandler().getContentFilters().contains(ALL)
|
||||
|| getLinkHandler().getContentFilters().isEmpty()) {
|
||||
JsonArray events = doc.getArray("events");
|
||||
final JsonArray events = doc.getArray("events");
|
||||
for (int i = 0; i < events.size(); i++) {
|
||||
// Ensure only uploaded talks are shown in the search results.
|
||||
// If the release date is null, the talk has not been held or uploaded yet
|
||||
|
@ -109,7 +108,7 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
|
|||
site = downloader.get(url, getExtractorLocalization()).responseBody();
|
||||
try {
|
||||
doc = JsonParser.object().from(site);
|
||||
} catch (JsonParserException jpe) {
|
||||
} catch (final JsonParserException jpe) {
|
||||
throw new ExtractionException("Could not parse JSON.", jpe);
|
||||
}
|
||||
}
|
||||
|
@ -143,7 +142,7 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean isVerified() throws ParsingException {
|
||||
public boolean isVerified() {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@ import com.grack.nanojson.JsonArray;
|
|||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
|
@ -15,17 +15,21 @@ import org.schabi.newpipe.extractor.localization.DateWrapper;
|
|||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCConferenceLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCStreamLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.stream.*;
|
||||
import org.schabi.newpipe.extractor.stream.AudioStream;
|
||||
import org.schabi.newpipe.extractor.stream.Description;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||
import org.schabi.newpipe.extractor.stream.VideoStream;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public class MediaCCCStreamExtractor extends StreamExtractor {
|
||||
private JsonObject data;
|
||||
private JsonObject conferenceData;
|
||||
|
@ -158,7 +162,7 @@ public class MediaCCCStreamExtractor extends StreamExtractor {
|
|||
data = JsonParser.object().from(downloader.get(videoUrl).responseBody());
|
||||
conferenceData = JsonParser.object()
|
||||
.from(downloader.get(data.getString("conference_url")).responseBody());
|
||||
} catch (JsonParserException jpe) {
|
||||
} catch (final JsonParserException jpe) {
|
||||
throw new ExtractionException("Could not parse json returned by url: " + videoUrl, jpe);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ import org.schabi.newpipe.extractor.stream.StreamType;
|
|||
import javax.annotation.Nullable;
|
||||
|
||||
public class MediaCCCStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
||||
private JsonObject event;
|
||||
private final JsonObject event;
|
||||
|
||||
public MediaCCCStreamInfoItemExtractor(final JsonObject event) {
|
||||
this.event = event;
|
||||
|
|
|
@ -7,9 +7,12 @@ import org.schabi.newpipe.extractor.utils.Parser;
|
|||
import java.util.List;
|
||||
|
||||
public class MediaCCCConferenceLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
public static final String CONFERENCE_API_ENDPOINT = "https://api.media.ccc.de/public/conferences/";
|
||||
public static final String CONFERENCE_API_ENDPOINT
|
||||
= "https://api.media.ccc.de/public/conferences/";
|
||||
public static final String CONFERENCE_PATH = "https://media.ccc.de/c/";
|
||||
private static final String ID_PATTERN = "(?:(?:(?:api\\.)?media\\.ccc\\.de/public/conferences/)|(?:media\\.ccc\\.de/[bc]/))([^/?&#]*)";
|
||||
private static final String ID_PATTERN
|
||||
= "(?:(?:(?:api\\.)?media\\.ccc\\.de/public/conferences/)"
|
||||
+ "|(?:media\\.ccc\\.de/[bc]/))([^/?&#]*)";
|
||||
|
||||
@Override
|
||||
public String getUrl(final String id,
|
||||
|
@ -27,7 +30,7 @@ public class MediaCCCConferenceLinkHandlerFactory extends ListLinkHandlerFactory
|
|||
public boolean onAcceptUrl(final String url) {
|
||||
try {
|
||||
return getId(url) != null;
|
||||
} catch (ParsingException e) {
|
||||
} catch (final ParsingException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,20 +7,22 @@ import java.util.List;
|
|||
import java.util.regex.Pattern;
|
||||
|
||||
public class MediaCCCLiveListLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
private static final String streamPattern = "^(?:https?://)?media\\.ccc\\.de/live$";
|
||||
private static final String STREAM_PATTERN = "^(?:https?://)?media\\.ccc\\.de/live$";
|
||||
|
||||
@Override
|
||||
public String getId(String url) throws ParsingException {
|
||||
public String getId(final String url) throws ParsingException {
|
||||
return "live";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onAcceptUrl(String url) throws ParsingException {
|
||||
return Pattern.matches(streamPattern, url);
|
||||
public boolean onAcceptUrl(final String url) throws ParsingException {
|
||||
return Pattern.matches(STREAM_PATTERN, url);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, List<String> contentFilter, String sortFilter) throws ParsingException {
|
||||
public String getUrl(final String id,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter) throws ParsingException {
|
||||
// FIXME: wrong URL; should be https://streaming.media.ccc.de/{conference_slug}/{room_slug}
|
||||
return "https://media.ccc.de/live";
|
||||
}
|
||||
|
|
|
@ -7,7 +7,9 @@ import org.schabi.newpipe.extractor.utils.Parser;
|
|||
public class MediaCCCLiveStreamLinkHandlerFactory extends LinkHandlerFactory {
|
||||
public static final String VIDEO_API_ENDPOINT = "https://api.media.ccc.de/public/events/";
|
||||
private static final String VIDEO_PATH = "https://streaming.media.ccc.de/v/";
|
||||
private static final String ID_PATTERN = "(?:(?:(?:api\\.)?media\\.ccc\\.de/public/events/)|(?:media\\.ccc\\.de/v/))([^/?&#]*)";
|
||||
private static final String ID_PATTERN
|
||||
= "(?:(?:(?:api\\.)?media\\.ccc\\.de/public/events/)"
|
||||
+ "|(?:media\\.ccc\\.de/v/))([^/?&#]*)";
|
||||
|
||||
@Override
|
||||
public String getId(final String url) throws ParsingException {
|
||||
|
@ -23,7 +25,7 @@ public class MediaCCCLiveStreamLinkHandlerFactory extends LinkHandlerFactory {
|
|||
public boolean onAcceptUrl(final String url) {
|
||||
try {
|
||||
return getId(url) != null;
|
||||
} catch (ParsingException e) {
|
||||
} catch (final ParsingException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,20 +6,22 @@ import java.util.List;
|
|||
import java.util.regex.Pattern;
|
||||
|
||||
public class MediaCCCRecentListLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
private static final String pattern = "^(https?://)?media\\.ccc\\.de/recent/?$";
|
||||
private static final String PATTERN = "^(https?://)?media\\.ccc\\.de/recent/?$";
|
||||
|
||||
@Override
|
||||
public String getId(String url) {
|
||||
public String getId(final String url) {
|
||||
return "recent";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onAcceptUrl(String url) {
|
||||
return Pattern.matches(pattern, url);
|
||||
public boolean onAcceptUrl(final String url) {
|
||||
return Pattern.matches(PATTERN, url);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, List<String> contentFilter, String sortFilter) {
|
||||
public String getUrl(final String id,
|
||||
final List<String> contentFilter,
|
||||
final String sortFilter) {
|
||||
return "https://media.ccc.de/recent";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ public class MediaCCCSearchQueryHandlerFactory extends SearchQueryHandlerFactory
|
|||
try {
|
||||
return "https://media.ccc.de/public/events/search?q="
|
||||
+ URLEncoder.encode(query, UTF_8);
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
} catch (final UnsupportedEncodingException e) {
|
||||
throw new ParsingException("Could not create search string with query: " + query, e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,19 +8,21 @@ import org.schabi.newpipe.extractor.utils.Parser;
|
|||
public class MediaCCCStreamLinkHandlerFactory extends LinkHandlerFactory {
|
||||
public static final String VIDEO_API_ENDPOINT = "https://api.media.ccc.de/public/events/";
|
||||
private static final String VIDEO_PATH = "https://media.ccc.de/v/";
|
||||
private static final String RECORDING_ID_PATTERN = "(?:(?:(?:api\\.)?media\\.ccc\\.de/public/events/)|(?:media\\.ccc\\.de/v/))([^/?&#]*)";
|
||||
private static final String LIVE_STREAM_API_ENDPOINT = "https://streaming.media.ccc.de/streams/v2.json";
|
||||
private static final String RECORDING_ID_PATTERN
|
||||
= "(?:(?:(?:api\\.)?media\\.ccc\\.de/public/events/)"
|
||||
+ "|(?:media\\.ccc\\.de/v/))([^/?&#]*)";
|
||||
private static final String LIVE_STREAM_PATH = "https://streaming.media.ccc.de/";
|
||||
private static final String LIVE_STREAM_ID_PATTERN = "streaming\\.media\\.ccc\\.de\\/(\\w+\\/\\w+)";
|
||||
private static final String LIVE_STREAM_ID_PATTERN
|
||||
= "streaming\\.media\\.ccc\\.de\\/(\\w+\\/\\w+)";
|
||||
|
||||
@Override
|
||||
public String getId(final String url) throws ParsingException {
|
||||
String streamId = null;
|
||||
try {
|
||||
streamId = Parser.matchGroup1(LIVE_STREAM_ID_PATTERN, url);
|
||||
} catch (Parser.RegexException ignored) {
|
||||
|
||||
} catch (final Parser.RegexException ignored) {
|
||||
}
|
||||
|
||||
if (streamId == null) {
|
||||
return Parser.matchGroup1(RECORDING_ID_PATTERN, url);
|
||||
}
|
||||
|
@ -39,7 +41,7 @@ public class MediaCCCStreamLinkHandlerFactory extends LinkHandlerFactory {
|
|||
public boolean onAcceptUrl(final String url) {
|
||||
try {
|
||||
return getId(url) != null;
|
||||
} catch (ParsingException e) {
|
||||
} catch (final ParsingException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,6 @@ import com.grack.nanojson.JsonParser;
|
|||
import com.grack.nanojson.JsonParserException;
|
||||
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||
|
@ -18,14 +17,15 @@ public class PeertubeInstance {
|
|||
|
||||
private final String url;
|
||||
private String name;
|
||||
public static final PeertubeInstance defaultInstance = new PeertubeInstance("https://framatube.org", "FramaTube");
|
||||
public static final PeertubeInstance DEFAULT_INSTANCE
|
||||
= new PeertubeInstance("https://framatube.org", "FramaTube");
|
||||
|
||||
public PeertubeInstance(String url) {
|
||||
public PeertubeInstance(final String url) {
|
||||
this.url = url;
|
||||
this.name = "PeerTube";
|
||||
}
|
||||
|
||||
public PeertubeInstance(String url, String name) {
|
||||
public PeertubeInstance(final String url, final String name) {
|
||||
this.url = url;
|
||||
this.name = name;
|
||||
}
|
||||
|
@ -35,11 +35,9 @@ public class PeertubeInstance {
|
|||
}
|
||||
|
||||
public void fetchInstanceMetaData() throws Exception {
|
||||
Downloader downloader = NewPipe.getDownloader();
|
||||
Response response = null;
|
||||
|
||||
final Response response;
|
||||
try {
|
||||
response = downloader.get(url + "/api/v1/config");
|
||||
response = NewPipe.getDownloader().get(url + "/api/v1/config");
|
||||
} catch (ReCaptchaException | IOException e) {
|
||||
throw new Exception("unable to configure instance " + url, e);
|
||||
}
|
||||
|
@ -49,7 +47,7 @@ public class PeertubeInstance {
|
|||
}
|
||||
|
||||
try {
|
||||
JsonObject json = JsonParser.object().from(response.responseBody());
|
||||
final JsonObject json = JsonParser.object().from(response.responseBody());
|
||||
this.name = JsonUtils.getString(json, "instance.name");
|
||||
} catch (JsonParserException | ParsingException e) {
|
||||
throw new Exception("unable to parse instance config", e);
|
||||
|
|
|
@ -17,7 +17,7 @@ import java.time.OffsetDateTime;
|
|||
import java.time.ZoneOffset;
|
||||
import java.time.format.DateTimeParseException;
|
||||
|
||||
public class PeertubeParsingHelper {
|
||||
public final class PeertubeParsingHelper {
|
||||
public static final String START_KEY = "start";
|
||||
public static final String COUNT_KEY = "count";
|
||||
public static final int ITEMS_PER_PAGE = 12;
|
||||
|
@ -33,10 +33,11 @@ public class PeertubeParsingHelper {
|
|||
}
|
||||
}
|
||||
|
||||
public static OffsetDateTime parseDateFrom(final String textualUploadDate) throws ParsingException {
|
||||
public static OffsetDateTime parseDateFrom(final String textualUploadDate)
|
||||
throws ParsingException {
|
||||
try {
|
||||
return OffsetDateTime.ofInstant(Instant.parse(textualUploadDate), ZoneOffset.UTC);
|
||||
} catch (DateTimeParseException e) {
|
||||
} catch (final DateTimeParseException e) {
|
||||
throw new ParsingException("Could not parse date: \"" + textualUploadDate + "\"", e);
|
||||
}
|
||||
}
|
||||
|
@ -45,25 +46,31 @@ public class PeertubeParsingHelper {
|
|||
final String prevStart;
|
||||
try {
|
||||
prevStart = Parser.matchGroup1(START_PATTERN, prevPageUrl);
|
||||
} catch (Parser.RegexException e) {
|
||||
} catch (final Parser.RegexException e) {
|
||||
return null;
|
||||
}
|
||||
if (Utils.isBlank(prevStart)) return null;
|
||||
if (Utils.isBlank(prevStart)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final long nextStart;
|
||||
try {
|
||||
nextStart = Long.parseLong(prevStart) + ITEMS_PER_PAGE;
|
||||
} catch (NumberFormatException e) {
|
||||
} catch (final NumberFormatException e) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (nextStart >= total) {
|
||||
return null;
|
||||
} else {
|
||||
return new Page(prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + nextStart));
|
||||
return new Page(prevPageUrl.replace(
|
||||
START_KEY + "=" + prevStart, START_KEY + "=" + nextStart));
|
||||
}
|
||||
}
|
||||
|
||||
public static void collectStreamsFrom(final InfoItemsCollector collector, final JsonObject json, final String baseUrl) throws ParsingException {
|
||||
public static void collectStreamsFrom(final InfoItemsCollector collector,
|
||||
final JsonObject json,
|
||||
final String baseUrl) throws ParsingException {
|
||||
collectStreamsFrom(collector, json, baseUrl, false);
|
||||
}
|
||||
|
||||
|
@ -74,13 +81,15 @@ public class PeertubeParsingHelper {
|
|||
* @param json the file to retrieve data from
|
||||
* @param baseUrl the base Url of the instance
|
||||
* @param sepia if we should use PeertubeSepiaStreamInfoItemExtractor
|
||||
* @throws ParsingException
|
||||
*/
|
||||
public static void collectStreamsFrom(final InfoItemsCollector collector, final JsonObject json, final String baseUrl, boolean sepia) throws ParsingException {
|
||||
public static void collectStreamsFrom(final InfoItemsCollector collector,
|
||||
final JsonObject json,
|
||||
final String baseUrl,
|
||||
final boolean sepia) throws ParsingException {
|
||||
final JsonArray contents;
|
||||
try {
|
||||
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
throw new ParsingException("Unable to extract list info", e);
|
||||
}
|
||||
|
||||
|
@ -93,7 +102,7 @@ public class PeertubeParsingHelper {
|
|||
item = item.getObject("video");
|
||||
}
|
||||
|
||||
PeertubeStreamInfoItemExtractor extractor;
|
||||
final PeertubeStreamInfoItemExtractor extractor;
|
||||
if (sepia) {
|
||||
extractor = new PeertubeSepiaStreamInfoItemExtractor(item, baseUrl);
|
||||
} else {
|
||||
|
|
|
@ -1,35 +1,51 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube;
|
||||
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.COMMENTS;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.VIDEO;
|
||||
import static java.util.Arrays.asList;
|
||||
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||
import org.schabi.newpipe.extractor.linkhandler.*;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.*;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.*;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeAccountExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeCommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubePlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeStreamExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeSuggestionExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeTrendingExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeChannelLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeCommentsLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubePlaylistLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeSearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeStreamLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeTrendingLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.COMMENTS;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.VIDEO;
|
||||
|
||||
public class PeertubeService extends StreamingService {
|
||||
|
||||
private PeertubeInstance instance;
|
||||
|
||||
public PeertubeService(int id) {
|
||||
this(id, PeertubeInstance.defaultInstance);
|
||||
public PeertubeService(final int id) {
|
||||
this(id, PeertubeInstance.DEFAULT_INSTANCE);
|
||||
}
|
||||
|
||||
public PeertubeService(int id, PeertubeInstance instance) {
|
||||
public PeertubeService(final int id, final PeertubeInstance instance) {
|
||||
super(id, "PeerTube", asList(VIDEO, COMMENTS));
|
||||
this.instance = instance;
|
||||
}
|
||||
|
@ -60,13 +76,10 @@ public class PeertubeService extends StreamingService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SearchExtractor getSearchExtractor(SearchQueryHandler queryHandler) {
|
||||
public SearchExtractor getSearchExtractor(final SearchQueryHandler queryHandler) {
|
||||
final List<String> contentFilters = queryHandler.getContentFilters();
|
||||
boolean external = false;
|
||||
if (!contentFilters.isEmpty() && contentFilters.get(0).startsWith("sepia_")) {
|
||||
external = true;
|
||||
}
|
||||
return new PeertubeSearchExtractor(this, queryHandler, external);
|
||||
return new PeertubeSearchExtractor(this, queryHandler,
|
||||
!contentFilters.isEmpty() && contentFilters.get(0).startsWith("sepia_"));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -80,7 +93,7 @@ public class PeertubeService extends StreamingService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler)
|
||||
public ChannelExtractor getChannelExtractor(final ListLinkHandler linkHandler)
|
||||
throws ExtractionException {
|
||||
|
||||
if (linkHandler.getUrl().contains("/video-channels/")) {
|
||||
|
@ -91,19 +104,19 @@ public class PeertubeService extends StreamingService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler)
|
||||
public PlaylistExtractor getPlaylistExtractor(final ListLinkHandler linkHandler)
|
||||
throws ExtractionException {
|
||||
return new PeertubePlaylistExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamExtractor getStreamExtractor(LinkHandler linkHandler)
|
||||
public StreamExtractor getStreamExtractor(final LinkHandler linkHandler)
|
||||
throws ExtractionException {
|
||||
return new PeertubeStreamExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CommentsExtractor getCommentsExtractor(ListLinkHandler linkHandler)
|
||||
public CommentsExtractor getCommentsExtractor(final ListLinkHandler linkHandler)
|
||||
throws ExtractionException {
|
||||
return new PeertubeCommentsExtractor(this, linkHandler);
|
||||
}
|
||||
|
@ -117,34 +130,31 @@ public class PeertubeService extends StreamingService {
|
|||
return this.instance;
|
||||
}
|
||||
|
||||
public void setInstance(PeertubeInstance instance) {
|
||||
public void setInstance(final PeertubeInstance instance) {
|
||||
this.instance = instance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public KioskList getKioskList() throws ExtractionException {
|
||||
KioskList.KioskExtractorFactory kioskFactory = new KioskList.KioskExtractorFactory() {
|
||||
@Override
|
||||
public KioskExtractor createNewKiosk(StreamingService streamingService,
|
||||
String url,
|
||||
String id)
|
||||
throws ExtractionException {
|
||||
return new PeertubeTrendingExtractor(PeertubeService.this,
|
||||
new PeertubeTrendingLinkHandlerFactory().fromId(id), id);
|
||||
}
|
||||
};
|
||||
final KioskList.KioskExtractorFactory kioskFactory = (streamingService, url, id) ->
|
||||
new PeertubeTrendingExtractor(
|
||||
PeertubeService.this,
|
||||
new PeertubeTrendingLinkHandlerFactory().fromId(id),
|
||||
id
|
||||
);
|
||||
|
||||
KioskList list = new KioskList(this);
|
||||
final KioskList list = new KioskList(this);
|
||||
|
||||
// add kiosks here e.g.:
|
||||
final PeertubeTrendingLinkHandlerFactory h = new PeertubeTrendingLinkHandlerFactory();
|
||||
try {
|
||||
list.addKioskEntry(kioskFactory, h, PeertubeTrendingLinkHandlerFactory.KIOSK_TRENDING);
|
||||
list.addKioskEntry(kioskFactory, h, PeertubeTrendingLinkHandlerFactory.KIOSK_MOST_LIKED);
|
||||
list.addKioskEntry(kioskFactory, h,
|
||||
PeertubeTrendingLinkHandlerFactory.KIOSK_MOST_LIKED);
|
||||
list.addKioskEntry(kioskFactory, h, PeertubeTrendingLinkHandlerFactory.KIOSK_RECENT);
|
||||
list.addKioskEntry(kioskFactory, h, PeertubeTrendingLinkHandlerFactory.KIOSK_LOCAL);
|
||||
list.setDefaultKiosk(PeertubeTrendingLinkHandlerFactory.KIOSK_TRENDING);
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
throw new ExtractionException(e);
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,10 @@ import org.schabi.newpipe.extractor.utils.Utils;
|
|||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.*;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.collectStreamsFrom;
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
|
||||
|
||||
public class PeertubeAccountExtractor extends ChannelExtractor {
|
||||
|
@ -31,7 +34,8 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
|
|||
private final String baseUrl;
|
||||
private static final String ACCOUNTS = "accounts/";
|
||||
|
||||
public PeertubeAccountExtractor(final StreamingService service, final ListLinkHandler linkHandler) throws ParsingException {
|
||||
public PeertubeAccountExtractor(final StreamingService service,
|
||||
final ListLinkHandler linkHandler) throws ParsingException {
|
||||
super(service, linkHandler);
|
||||
this.baseUrl = getBaseUrl();
|
||||
}
|
||||
|
@ -41,7 +45,7 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
|
|||
String value;
|
||||
try {
|
||||
value = JsonUtils.getString(json, "avatar.path");
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
value = "/client/assets/images/default-avatar.png";
|
||||
}
|
||||
return baseUrl + value;
|
||||
|
@ -80,7 +84,8 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
|
|||
subscribersCount += videoChannelJsonObject.getInt("followersCount");
|
||||
}
|
||||
} catch (final IOException | JsonParserException | ReCaptchaException ignored) {
|
||||
// something went wrong during video channels extraction, only return subscribers of ownerAccount
|
||||
// something went wrong during video channels extraction,
|
||||
// only return subscribers of ownerAccount
|
||||
}
|
||||
return subscribersCount;
|
||||
}
|
||||
|
@ -89,7 +94,7 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
|
|||
public String getDescription() {
|
||||
try {
|
||||
return JsonUtils.getString(json, "description");
|
||||
} catch (ParsingException e) {
|
||||
} catch (final ParsingException e) {
|
||||
return "No description";
|
||||
}
|
||||
}
|
||||
|
@ -117,8 +122,8 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
|
|||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
return getPage(new Page(
|
||||
baseUrl + "/api/v1/" + getId() + "/videos?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE));
|
||||
return getPage(new Page(baseUrl + "/api/v1/" + getId() + "/videos?" + START_KEY + "=0&"
|
||||
+ COUNT_KEY + "=" + ITEMS_PER_PAGE));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -130,23 +135,24 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
|
|||
|
||||
final Response response = getDownloader().get(page.getUrl());
|
||||
|
||||
JsonObject json = null;
|
||||
JsonObject pageJson = null;
|
||||
if (response != null && !Utils.isBlank(response.responseBody())) {
|
||||
try {
|
||||
json = JsonParser.object().from(response.responseBody());
|
||||
} catch (Exception e) {
|
||||
pageJson = JsonParser.object().from(response.responseBody());
|
||||
} catch (final Exception e) {
|
||||
throw new ParsingException("Could not parse json data for account info", e);
|
||||
}
|
||||
}
|
||||
|
||||
if (json != null) {
|
||||
PeertubeParsingHelper.validate(json);
|
||||
final long total = json.getLong("total");
|
||||
if (pageJson != null) {
|
||||
PeertubeParsingHelper.validate(pageJson);
|
||||
final long total = pageJson.getLong("total");
|
||||
|
||||
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
collectStreamsFrom(collector, json, getBaseUrl());
|
||||
collectStreamsFrom(collector, pageJson, getBaseUrl());
|
||||
|
||||
return new InfoItemsPage<>(collector, PeertubeParsingHelper.getNextPage(page.getUrl(), total));
|
||||
return new InfoItemsPage<>(collector,
|
||||
PeertubeParsingHelper.getNextPage(page.getUrl(), total));
|
||||
} else {
|
||||
throw new ExtractionException("Unable to get PeerTube account info");
|
||||
}
|
||||
|
@ -173,10 +179,12 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
|
|||
private void setInitialData(final String responseBody) throws ExtractionException {
|
||||
try {
|
||||
json = JsonParser.object().from(responseBody);
|
||||
} catch (JsonParserException e) {
|
||||
} catch (final JsonParserException e) {
|
||||
throw new ExtractionException("Unable to extract PeerTube account data", e);
|
||||
}
|
||||
if (json == null) throw new ExtractionException("Unable to extract PeerTube account data");
|
||||
if (json == null) {
|
||||
throw new ExtractionException("Unable to extract PeerTube account data");
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
|
|
@ -21,15 +21,18 @@ import org.schabi.newpipe.extractor.utils.Utils;
|
|||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.*;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.collectStreamsFrom;
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
|
||||
|
||||
|
||||
public class PeertubeChannelExtractor extends ChannelExtractor {
|
||||
private JsonObject json;
|
||||
private final String baseUrl;
|
||||
|
||||
public PeertubeChannelExtractor(final StreamingService service, final ListLinkHandler linkHandler) throws ParsingException {
|
||||
public PeertubeChannelExtractor(final StreamingService service,
|
||||
final ListLinkHandler linkHandler) throws ParsingException {
|
||||
super(service, linkHandler);
|
||||
this.baseUrl = getBaseUrl();
|
||||
}
|
||||
|
@ -39,7 +42,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||
String value;
|
||||
try {
|
||||
value = JsonUtils.getString(json, "avatar.path");
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
value = "/client/assets/images/default-avatar.png";
|
||||
}
|
||||
return baseUrl + value;
|
||||
|
@ -64,7 +67,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||
public String getDescription() {
|
||||
try {
|
||||
return JsonUtils.getString(json, "description");
|
||||
} catch (ParsingException e) {
|
||||
} catch (final ParsingException e) {
|
||||
return "No description";
|
||||
}
|
||||
}
|
||||
|
@ -84,7 +87,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||
String value;
|
||||
try {
|
||||
value = JsonUtils.getString(json, "ownerAccount.avatar.path");
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
value = "/client/assets/images/default-avatar.png";
|
||||
}
|
||||
return baseUrl + value;
|
||||
|
@ -98,45 +101,48 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
return getPage(new Page(
|
||||
baseUrl + "/api/v1/" + getId() + "/videos?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE));
|
||||
return getPage(new Page(baseUrl + "/api/v1/" + getId() + "/videos?" + START_KEY + "=0&"
|
||||
+ COUNT_KEY + "=" + ITEMS_PER_PAGE));
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getPage(final Page page) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getPage(final Page page)
|
||||
throws IOException, ExtractionException {
|
||||
if (page == null || isNullOrEmpty(page.getUrl())) {
|
||||
throw new IllegalArgumentException("Page doesn't contain an URL");
|
||||
}
|
||||
|
||||
final Response response = getDownloader().get(page.getUrl());
|
||||
|
||||
JsonObject json = null;
|
||||
JsonObject pageJson = null;
|
||||
if (response != null && !Utils.isBlank(response.responseBody())) {
|
||||
try {
|
||||
json = JsonParser.object().from(response.responseBody());
|
||||
} catch (Exception e) {
|
||||
pageJson = JsonParser.object().from(response.responseBody());
|
||||
} catch (final Exception e) {
|
||||
throw new ParsingException("Could not parse json data for channel info", e);
|
||||
}
|
||||
}
|
||||
|
||||
if (json != null) {
|
||||
PeertubeParsingHelper.validate(json);
|
||||
final long total = json.getLong("total");
|
||||
if (pageJson != null) {
|
||||
PeertubeParsingHelper.validate(pageJson);
|
||||
final long total = pageJson.getLong("total");
|
||||
|
||||
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
collectStreamsFrom(collector, json, getBaseUrl());
|
||||
collectStreamsFrom(collector, pageJson, getBaseUrl());
|
||||
|
||||
return new InfoItemsPage<>(collector, PeertubeParsingHelper.getNextPage(page.getUrl(), total));
|
||||
return new InfoItemsPage<>(collector,
|
||||
PeertubeParsingHelper.getNextPage(page.getUrl(), total));
|
||||
} else {
|
||||
throw new ExtractionException("Unable to get PeerTube channel info");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(final Downloader downloader) throws IOException, ExtractionException {
|
||||
public void onFetchPage(final Downloader downloader)
|
||||
throws IOException, ExtractionException {
|
||||
final Response response = downloader.get(
|
||||
baseUrl + PeertubeChannelLinkHandlerFactory.API_ENDPOINT + getId());
|
||||
if (response != null ) {
|
||||
if (response != null) {
|
||||
setInitialData(response.responseBody());
|
||||
} else {
|
||||
throw new ExtractionException("Unable to extract PeerTube channel data");
|
||||
|
@ -146,10 +152,12 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||
private void setInitialData(final String responseBody) throws ExtractionException {
|
||||
try {
|
||||
json = JsonParser.object().from(responseBody);
|
||||
} catch (JsonParserException e) {
|
||||
} catch (final JsonParserException e) {
|
||||
throw new ExtractionException("Unable to extract PeerTube channel data", e);
|
||||
}
|
||||
if (json == null) throw new ExtractionException("Unable to extract PeerTube channel data");
|
||||
if (json == null) {
|
||||
throw new ExtractionException("Unable to extract PeerTube channel data");
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
|
|
@ -18,36 +18,41 @@ import org.schabi.newpipe.extractor.utils.Utils;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.*;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
|
||||
|
||||
public class PeertubeCommentsExtractor extends CommentsExtractor {
|
||||
public PeertubeCommentsExtractor(final StreamingService service, final ListLinkHandler uiHandler) {
|
||||
public PeertubeCommentsExtractor(final StreamingService service,
|
||||
final ListLinkHandler uiHandler) {
|
||||
super(service, uiHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<CommentsInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
final String pageUrl = getUrl() + "?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
|
||||
return getPage(new Page(pageUrl));
|
||||
public InfoItemsPage<CommentsInfoItem> getInitialPage()
|
||||
throws IOException, ExtractionException {
|
||||
return getPage(new Page(getUrl() + "?" + START_KEY + "=0&"
|
||||
+ COUNT_KEY + "=" + ITEMS_PER_PAGE));
|
||||
}
|
||||
|
||||
private void collectCommentsFrom(final CommentsInfoItemsCollector collector, final JsonObject json) throws ParsingException {
|
||||
private void collectCommentsFrom(final CommentsInfoItemsCollector collector,
|
||||
final JsonObject json) throws ParsingException {
|
||||
final JsonArray contents = json.getArray("data");
|
||||
|
||||
for (final Object c : contents) {
|
||||
if (c instanceof JsonObject) {
|
||||
final JsonObject item = (JsonObject) c;
|
||||
if (!item.getBoolean("isDeleted")) {
|
||||
final PeertubeCommentsInfoItemExtractor extractor = new PeertubeCommentsInfoItemExtractor(item, this);
|
||||
collector.commit(extractor);
|
||||
collector.commit(new PeertubeCommentsInfoItemExtractor(item, this));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<CommentsInfoItem> getPage(final Page page) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<CommentsInfoItem> getPage(final Page page)
|
||||
throws IOException, ExtractionException {
|
||||
if (page == null || isNullOrEmpty(page.getUrl())) {
|
||||
throw new IllegalArgumentException("Page doesn't contain an URL");
|
||||
}
|
||||
|
@ -58,7 +63,7 @@ public class PeertubeCommentsExtractor extends CommentsExtractor {
|
|||
if (response != null && !Utils.isBlank(response.responseBody())) {
|
||||
try {
|
||||
json = JsonParser.object().from(response.responseBody());
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
throw new ParsingException("Could not parse json data for comments info", e);
|
||||
}
|
||||
}
|
||||
|
@ -67,15 +72,18 @@ public class PeertubeCommentsExtractor extends CommentsExtractor {
|
|||
PeertubeParsingHelper.validate(json);
|
||||
final long total = json.getLong("total");
|
||||
|
||||
final CommentsInfoItemsCollector collector = new CommentsInfoItemsCollector(getServiceId());
|
||||
final CommentsInfoItemsCollector collector
|
||||
= new CommentsInfoItemsCollector(getServiceId());
|
||||
collectCommentsFrom(collector, json);
|
||||
|
||||
return new InfoItemsPage<>(collector, PeertubeParsingHelper.getNextPage(page.getUrl(), total));
|
||||
return new InfoItemsPage<>(collector,
|
||||
PeertubeParsingHelper.getNextPage(page.getUrl(), total));
|
||||
} else {
|
||||
throw new ExtractionException("Unable to get PeerTube kiosk info");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(Downloader downloader) { }
|
||||
public void onFetchPage(final Downloader downloader) {
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue