Make some methods return the specific InfoItem type
- Some methods were returning a broader range of InfoItem types than it should be. For example, a ChannelInfo should return a List containing only StreamInfoItem, instead of the more general InfoItem. - Renamed and changed return type of ListExtractor.getInfoItems to getInitialPage returning a InfoItemsPage, to be consistent with getPage(url)
This commit is contained in:
parent
5dd2daad37
commit
37f2e5cfda
|
@ -4,21 +4,45 @@ import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
|||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Base class to extractors that have a list (e.g. playlists, users).
|
||||
*/
|
||||
public abstract class ListExtractor extends Extractor {
|
||||
public abstract class ListExtractor<R extends InfoItem> extends Extractor {
|
||||
|
||||
public ListExtractor(StreamingService service, String url) {
|
||||
super(service, url);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link InfoItemsPage InfoItemsPage} corresponding to the initial page where the items are from the initial request and
|
||||
* the nextPageUrl relative to it.
|
||||
*
|
||||
* @return a {@link InfoItemsPage} corresponding to the initial page
|
||||
*/
|
||||
@Nonnull
|
||||
public abstract InfoItemsCollector<? extends InfoItem, ?> getInfoItems() throws IOException, ExtractionException;
|
||||
public abstract InfoItemsPage<R> getInitialPage() throws IOException, ExtractionException;
|
||||
|
||||
/**
|
||||
* Returns an url that can be used to get the next page relative to the initial one.<br/>
|
||||
* <p>Usually, these links will only work in the implementation itself.</p>
|
||||
*
|
||||
* @return an url pointing to the next page relative to the initial page
|
||||
* @see #getPage(String)
|
||||
*/
|
||||
public abstract String getNextPageUrl() throws IOException, ExtractionException;
|
||||
public abstract InfoItemPage<? extends InfoItem> getPage(final String nextPageUrl) throws IOException, ExtractionException;
|
||||
|
||||
/**
|
||||
* Get a list of items corresponding to the specific requested page.
|
||||
*
|
||||
* @param nextPageUrl any next page url got from the exclusive implementation of the list extractor
|
||||
* @return a {@link InfoItemsPage} corresponding to the requested page
|
||||
* @see #getNextPageUrl()
|
||||
* @see InfoItemsPage#getNextPageUrl()
|
||||
*/
|
||||
public abstract InfoItemsPage<R> getPage(final String nextPageUrl) throws IOException, ExtractionException;
|
||||
|
||||
public boolean hasNextPage() throws IOException, ExtractionException {
|
||||
final String nextPageUrl = getNextPageUrl();
|
||||
|
@ -29,14 +53,34 @@ public abstract class ListExtractor extends Extractor {
|
|||
// Inner
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public static class InfoItemPage<T extends InfoItem> {
|
||||
/**
|
||||
* The current list of items to this result
|
||||
* A class that is used to wrap a list of gathered items and eventual errors, it
|
||||
* also contains a field that points to the next available page ({@link #nextPageUrl}).
|
||||
*/
|
||||
public static class InfoItemsPage<T extends InfoItem> {
|
||||
private static final InfoItemsPage<InfoItem> EMPTY =
|
||||
new InfoItemsPage<>(Collections.<InfoItem>emptyList(), "", Collections.<Throwable>emptyList());
|
||||
|
||||
/**
|
||||
* A convenient method that returns a representation of an empty page.
|
||||
*
|
||||
* @return a type-safe page with the list of items and errors empty and the nextPageUrl set to an empty string.
|
||||
*/
|
||||
public static <T extends InfoItem> InfoItemsPage<T> emptyPage() {
|
||||
//noinspection unchecked
|
||||
return (InfoItemsPage<T>) EMPTY;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The current list of items of this page
|
||||
*/
|
||||
private final List<T> itemsList;
|
||||
|
||||
/**
|
||||
* Next url to fetch more items
|
||||
* Url pointing to the next page relative to this one
|
||||
*
|
||||
* @see ListExtractor#getPage(String)
|
||||
*/
|
||||
private final String nextPageUrl;
|
||||
|
||||
|
@ -45,11 +89,11 @@ public abstract class ListExtractor extends Extractor {
|
|||
*/
|
||||
private final List<Throwable> errors;
|
||||
|
||||
public InfoItemPage(InfoItemsCollector<T, ?> collector, String nextPageUrl) {
|
||||
public InfoItemsPage(InfoItemsCollector<T, ?> collector, String nextPageUrl) {
|
||||
this(collector.getItems(), nextPageUrl, collector.getErrors());
|
||||
}
|
||||
|
||||
public InfoItemPage(List<T> itemsList, String nextPageUrl, List<Throwable> errors) {
|
||||
public InfoItemsPage(List<T> itemsList, String nextPageUrl, List<Throwable> errors) {
|
||||
this.itemsList = itemsList;
|
||||
this.nextPageUrl = nextPageUrl;
|
||||
this.errors = errors;
|
||||
|
@ -59,7 +103,7 @@ public abstract class ListExtractor extends Extractor {
|
|||
return nextPageUrl != null && !nextPageUrl.isEmpty();
|
||||
}
|
||||
|
||||
public List<T> getItemsList() {
|
||||
public List<T> getItems() {
|
||||
return itemsList;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,16 +1,12 @@
|
|||
package org.schabi.newpipe.extractor.channel;
|
||||
|
||||
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.UrlIdHandler;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 25.07.16.
|
||||
|
@ -32,7 +28,7 @@ import java.io.IOException;
|
|||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
public abstract class ChannelExtractor extends ListExtractor {
|
||||
public abstract class ChannelExtractor extends ListExtractor<StreamInfoItem> {
|
||||
|
||||
public ChannelExtractor(StreamingService service, String url) {
|
||||
super(service, url);
|
||||
|
@ -44,12 +40,6 @@ public abstract class ChannelExtractor extends ListExtractor {
|
|||
return getService().getChannelUrlIdHandler();
|
||||
}
|
||||
|
||||
@NonNull
|
||||
@Override
|
||||
public abstract StreamInfoItemsCollector getInfoItems() throws IOException, ExtractionException;
|
||||
@Override
|
||||
public abstract InfoItemPage<StreamInfoItem> getPage(String nextPageUrl) throws IOException, ExtractionException;
|
||||
|
||||
public abstract String getAvatarUrl() throws ParsingException;
|
||||
public abstract String getBannerUrl() throws ParsingException;
|
||||
public abstract String getFeedUrl() throws ParsingException;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package org.schabi.newpipe.extractor.channel;
|
||||
|
||||
import org.schabi.newpipe.extractor.ListExtractor.InfoItemPage;
|
||||
import org.schabi.newpipe.extractor.ListExtractor.InfoItemsPage;
|
||||
import org.schabi.newpipe.extractor.ListInfo;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
|
@ -30,18 +30,12 @@ import java.io.IOException;
|
|||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
public class ChannelInfo extends ListInfo {
|
||||
public class ChannelInfo extends ListInfo<StreamInfoItem> {
|
||||
|
||||
public ChannelInfo(int serviceId, String url, String id, String name) {
|
||||
super(serviceId, id, url, name);
|
||||
}
|
||||
|
||||
|
||||
public static InfoItemPage<StreamInfoItem> getMoreItems(StreamingService service, String url, String pageUrl)
|
||||
throws IOException, ExtractionException {
|
||||
return service.getChannelExtractor(url).getPage(pageUrl);
|
||||
}
|
||||
|
||||
public static ChannelInfo getInfo(String url) throws IOException, ExtractionException {
|
||||
return getInfo(NewPipe.getServiceByUrl(url), url);
|
||||
}
|
||||
|
@ -52,6 +46,10 @@ public class ChannelInfo extends ListInfo {
|
|||
return getInfo(extractor);
|
||||
}
|
||||
|
||||
public static InfoItemsPage<StreamInfoItem> getMoreItems(StreamingService service, String url, String pageUrl) throws IOException, ExtractionException {
|
||||
return service.getChannelExtractor(url).getPage(pageUrl);
|
||||
}
|
||||
|
||||
public static ChannelInfo getInfo(ChannelExtractor extractor) throws IOException, ExtractionException {
|
||||
|
||||
// important data
|
||||
|
@ -79,7 +77,9 @@ public class ChannelInfo extends ListInfo {
|
|||
info.addError(e);
|
||||
}
|
||||
|
||||
info.setRelatedItems(ExtractorHelper.getInfoItemsOrLogError(info, extractor));
|
||||
final InfoItemsPage<StreamInfoItem> itemsPage = ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
info.setRelatedItems(itemsPage.getItems());
|
||||
info.setNextPageUrl(itemsPage.getNextPageUrl());
|
||||
|
||||
try {
|
||||
info.setSubscriberCount(extractor.getSubscriberCount());
|
||||
|
@ -92,7 +92,6 @@ public class ChannelInfo extends ListInfo {
|
|||
info.addError(e);
|
||||
}
|
||||
|
||||
info.setNextPageUrl(extractor.getNextPageUrl());
|
||||
return info;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,18 +20,15 @@ package org.schabi.newpipe.extractor.kiosk;
|
|||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
||||
public abstract class KioskExtractor extends ListExtractor {
|
||||
public abstract class KioskExtractor extends ListExtractor<StreamInfoItem> {
|
||||
private String contentCountry = null;
|
||||
private final String id;
|
||||
|
||||
|
@ -43,12 +40,6 @@ public abstract class KioskExtractor extends ListExtractor {
|
|||
this.id = kioskId;
|
||||
}
|
||||
|
||||
@NonNull
|
||||
@Override
|
||||
public abstract StreamInfoItemsCollector getInfoItems() throws IOException, ExtractionException;
|
||||
@Override
|
||||
public abstract InfoItemPage<StreamInfoItem> getPage(String nextPageUrl) throws IOException, ExtractionException;
|
||||
|
||||
/**
|
||||
* For certain Websites the content of a kiosk will be different depending
|
||||
* on the country you want to poen the website in. Therefore you should
|
||||
|
|
|
@ -30,13 +30,13 @@ import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class KioskInfo extends ListInfo {
|
||||
public class KioskInfo extends ListInfo<StreamInfoItem> {
|
||||
|
||||
private KioskInfo(int serviceId, String id, String url, String name) {
|
||||
super(serviceId, id, url, name);
|
||||
}
|
||||
|
||||
public static ListExtractor.InfoItemPage<StreamInfoItem> getMoreItems(StreamingService service,
|
||||
public static ListExtractor.InfoItemsPage<StreamInfoItem> getMoreItems(StreamingService service,
|
||||
String url,
|
||||
String pageUrl,
|
||||
String contentCountry) throws IOException, ExtractionException {
|
||||
|
@ -75,7 +75,9 @@ public class KioskInfo extends ListInfo {
|
|||
|
||||
KioskInfo info = new KioskInfo(serviceId, id, name, url);
|
||||
|
||||
info.setRelatedItems(ExtractorHelper.getInfoItemsOrLogError(info, extractor));
|
||||
final ListExtractor.InfoItemsPage<StreamInfoItem> itemsPage = ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
info.setRelatedItems(itemsPage.getItems());
|
||||
info.setNextPageUrl(itemsPage.getNextPageUrl());
|
||||
|
||||
return info;
|
||||
}
|
||||
|
|
|
@ -1,18 +1,14 @@
|
|||
package org.schabi.newpipe.extractor.playlist;
|
||||
|
||||
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.UrlIdHandler;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
||||
public abstract class PlaylistExtractor extends ListExtractor {
|
||||
public abstract class PlaylistExtractor extends ListExtractor<StreamInfoItem> {
|
||||
|
||||
public PlaylistExtractor(StreamingService service, String url) {
|
||||
super(service, url);
|
||||
|
@ -24,12 +20,6 @@ public abstract class PlaylistExtractor extends ListExtractor {
|
|||
return getService().getPlaylistUrlIdHandler();
|
||||
}
|
||||
|
||||
@NonNull
|
||||
@Override
|
||||
public abstract StreamInfoItemsCollector getInfoItems() throws IOException, ExtractionException;
|
||||
@Override
|
||||
public abstract InfoItemPage<StreamInfoItem> getPage(String nextPageUrl) throws IOException, ExtractionException;
|
||||
|
||||
public abstract String getThumbnailUrl() throws ParsingException;
|
||||
public abstract String getBannerUrl() throws ParsingException;
|
||||
|
||||
|
|
|
@ -1,26 +1,21 @@
|
|||
package org.schabi.newpipe.extractor.playlist;
|
||||
|
||||
import org.schabi.newpipe.extractor.ListExtractor.InfoItemPage;
|
||||
import org.schabi.newpipe.extractor.ListExtractor.InfoItemsPage;
|
||||
import org.schabi.newpipe.extractor.ListInfo;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.schabi.newpipe.extractor.utils.ExtractorHelper.getInfoItemsOrLogError;
|
||||
|
||||
public class PlaylistInfo extends ListInfo {
|
||||
public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
||||
|
||||
public PlaylistInfo(int serviceId, String id, String url, String name) {
|
||||
super(serviceId, id, url, name);
|
||||
}
|
||||
|
||||
public static InfoItemPage<StreamInfoItem> getMoreItems(StreamingService service, String url, String pageUrl) throws IOException, ExtractionException {
|
||||
return service.getPlaylistExtractor(url).getPage(pageUrl);
|
||||
}
|
||||
|
||||
public static PlaylistInfo getInfo(String url) throws IOException, ExtractionException {
|
||||
return getInfo(NewPipe.getServiceByUrl(url), url);
|
||||
}
|
||||
|
@ -31,6 +26,10 @@ public class PlaylistInfo extends ListInfo {
|
|||
return getInfo(extractor);
|
||||
}
|
||||
|
||||
public static InfoItemsPage<StreamInfoItem> getMoreItems(StreamingService service, String url, String pageUrl) throws IOException, ExtractionException {
|
||||
return service.getPlaylistExtractor(url).getPage(pageUrl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get PlaylistInfo from PlaylistExtractor
|
||||
*
|
||||
|
@ -75,8 +74,10 @@ public class PlaylistInfo extends ListInfo {
|
|||
info.addError(e);
|
||||
}
|
||||
|
||||
info.setRelatedItems(getInfoItemsOrLogError(info, extractor));
|
||||
info.setNextPageUrl(extractor.getNextPageUrl());
|
||||
final InfoItemsPage<StreamInfoItem> itemsPage = ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
info.setRelatedItems(itemsPage.getItems());
|
||||
info.setNextPageUrl(itemsPage.getNextPageUrl());
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
|
|
|
@ -92,11 +92,11 @@ public class SoundcloudChannelExtractor extends ChannelExtractor {
|
|||
|
||||
@Nonnull
|
||||
@Override
|
||||
public StreamInfoItemsCollector getInfoItems() throws ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws ExtractionException {
|
||||
if(streamInfoItemsCollector == null) {
|
||||
computeNextPageAndGetStreams();
|
||||
}
|
||||
return streamInfoItemsCollector;
|
||||
return new InfoItemsPage<>(streamInfoItemsCollector, getNextPageUrl());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -123,7 +123,7 @@ public class SoundcloudChannelExtractor extends ChannelExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public InfoItemPage<StreamInfoItem> getPage(final String pageUrl) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getPage(final String pageUrl) throws IOException, ExtractionException {
|
||||
if (pageUrl == null || pageUrl.isEmpty()) {
|
||||
throw new ExtractionException(new IllegalArgumentException("Page url is empty or null"));
|
||||
}
|
||||
|
@ -131,6 +131,6 @@ public class SoundcloudChannelExtractor extends ChannelExtractor {
|
|||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
String nextPageUrl = SoundcloudParsingHelper.getStreamsFromApiMinItems(15, collector, pageUrl);
|
||||
|
||||
return new InfoItemPage<>(collector, nextPageUrl);
|
||||
return new InfoItemsPage<>(collector, nextPageUrl);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ public class SoundcloudChartsExtractor extends KioskExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public InfoItemPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
if (pageUrl == null || pageUrl.isEmpty()) {
|
||||
throw new ExtractionException(new IllegalArgumentException("Page url is empty or null"));
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ public class SoundcloudChartsExtractor extends KioskExtractor {
|
|||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
String nextPageUrl = SoundcloudParsingHelper.getStreamsFromApi(collector, pageUrl, true);
|
||||
|
||||
return new InfoItemPage<>(collector, nextPageUrl);
|
||||
return new InfoItemsPage<>(collector, nextPageUrl);
|
||||
}
|
||||
|
||||
|
||||
|
@ -86,10 +86,10 @@ public class SoundcloudChartsExtractor extends KioskExtractor {
|
|||
|
||||
@Nonnull
|
||||
@Override
|
||||
public StreamInfoItemsCollector getInfoItems() throws IOException, ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
if(collector == null) {
|
||||
computNextPageAndStreams();
|
||||
}
|
||||
return collector;
|
||||
return new InfoItemsPage<>(collector, getNextPageUrl());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
|||
// If the thumbnail is null, traverse the items list and get a valid one,
|
||||
// if it also fails, return null
|
||||
try {
|
||||
final StreamInfoItemsCollector infoItems = getInfoItems();
|
||||
final InfoItemsPage<StreamInfoItem> infoItems = getInitialPage();
|
||||
if (infoItems.getItems().isEmpty()) return null;
|
||||
|
||||
for (StreamInfoItem item : infoItems.getItems()) {
|
||||
|
@ -113,11 +113,11 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
|||
|
||||
@Nonnull
|
||||
@Override
|
||||
public StreamInfoItemsCollector getInfoItems() throws IOException, ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
if (streamInfoItemsCollector == null) {
|
||||
computeStreamsAndNextPageUrl();
|
||||
}
|
||||
return streamInfoItemsCollector;
|
||||
return new InfoItemsPage<>(streamInfoItemsCollector, getNextPageUrl());
|
||||
}
|
||||
|
||||
private void computeStreamsAndNextPageUrl() throws ExtractionException, IOException {
|
||||
|
@ -141,7 +141,7 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public InfoItemPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
if (pageUrl == null || pageUrl.isEmpty()) {
|
||||
throw new ExtractionException(new IllegalArgumentException("Page url is empty or null"));
|
||||
}
|
||||
|
@ -149,6 +149,6 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
|||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
String nextPageUrl = SoundcloudParsingHelper.getStreamsFromApiMinItems(15, collector, pageUrl);
|
||||
|
||||
return new InfoItemPage<>(collector, nextPageUrl);
|
||||
return new InfoItemsPage<>(collector, nextPageUrl);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -150,15 +150,15 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
|||
|
||||
@Nonnull
|
||||
@Override
|
||||
public StreamInfoItemsCollector getInfoItems() throws ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws ExtractionException {
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
Element ul = doc.select("ul[id=\"browse-items-primary\"]").first();
|
||||
collectStreamsFrom(collector, ul);
|
||||
return collector;
|
||||
return new InfoItemsPage<>(collector, getNextPageUrl());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
if (pageUrl == null || pageUrl.isEmpty()) {
|
||||
throw new ExtractionException(new IllegalArgumentException("Page url is empty or null"));
|
||||
}
|
||||
|
@ -178,7 +178,7 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
|||
final Document ajaxHtml = Jsoup.parse(ajaxJson.getString("content_html"), pageUrl);
|
||||
collectStreamsFrom(collector, ajaxHtml.select("body").first());
|
||||
|
||||
return new InfoItemPage<>(collector, getNextPageUrlFromAjaxPage(ajaxJson, pageUrl));
|
||||
return new InfoItemsPage<>(collector, getNextPageUrlFromAjaxPage(ajaxJson, pageUrl));
|
||||
}
|
||||
|
||||
private String getNextPageUrlFromAjaxPage(final JsonObject ajaxJson, final String pageUrl)
|
||||
|
|
|
@ -129,15 +129,15 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||
|
||||
@Nonnull
|
||||
@Override
|
||||
public StreamInfoItemsCollector getInfoItems() throws IOException, ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
Element tbody = doc.select("tbody[id=\"pl-load-more-destination\"]").first();
|
||||
collectStreamsFrom(collector, tbody);
|
||||
return collector;
|
||||
return new InfoItemsPage<>(collector, getNextPageUrl());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemPage<StreamInfoItem> getPage(final String pageUrl) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<StreamInfoItem> getPage(final String pageUrl) throws IOException, ExtractionException {
|
||||
if (pageUrl == null || pageUrl.isEmpty()) {
|
||||
throw new ExtractionException(new IllegalArgumentException("Page url is empty or null"));
|
||||
}
|
||||
|
@ -156,7 +156,7 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||
|
||||
collectStreamsFrom(collector, pageHtml.select("tbody[id=\"pl-load-more-destination\"]").first());
|
||||
|
||||
return new InfoItemPage<>(collector, getNextPageUrlFromAjax(pageJson, pageUrl));
|
||||
return new InfoItemsPage<>(collector, getNextPageUrlFromAjax(pageJson, pageUrl));
|
||||
}
|
||||
|
||||
private String getNextPageUrlFromAjax(final JsonObject pageJson, final String pageUrl)
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.jsoup.nodes.Document;
|
|||
import org.jsoup.nodes.Element;
|
||||
import org.jsoup.select.Elements;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.UrlIdHandler;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
|
@ -70,7 +69,7 @@ public class YoutubeTrendingExtractor extends KioskExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public ListExtractor.InfoItemPage<StreamInfoItem> getPage(String pageUrl) {
|
||||
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -89,7 +88,7 @@ public class YoutubeTrendingExtractor extends KioskExtractor {
|
|||
|
||||
@Nonnull
|
||||
@Override
|
||||
public StreamInfoItemsCollector getInfoItems() throws ParsingException {
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws ParsingException {
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
Elements uls = doc.select("ul[class*=\"expanded-shelf-content-list\"]");
|
||||
for(Element ul : uls) {
|
||||
|
@ -165,6 +164,6 @@ public class YoutubeTrendingExtractor extends KioskExtractor {
|
|||
}
|
||||
}
|
||||
|
||||
return collector;
|
||||
return new InfoItemsPage<>(collector, getNextPageUrl());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,9 +4,9 @@ import org.schabi.newpipe.extractor.Info;
|
|||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.ListExtractor.InfoItemsPage;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfo;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -14,40 +14,29 @@ import java.util.List;
|
|||
public class ExtractorHelper {
|
||||
private ExtractorHelper() {}
|
||||
|
||||
public static List<InfoItem> getInfoItemsOrLogError(Info info, ListExtractor extractor) {
|
||||
InfoItemsCollector collector;
|
||||
public static <T extends InfoItem> InfoItemsPage<T> getItemsPageOrLogError(Info info, ListExtractor<T> extractor) {
|
||||
try {
|
||||
collector = extractor.getInfoItems();
|
||||
InfoItemsPage<T> page = extractor.getInitialPage();
|
||||
info.addAllErrors(page.getErrors());
|
||||
|
||||
return page;
|
||||
} catch (Exception e) {
|
||||
info.addError(e);
|
||||
return Collections.emptyList();
|
||||
return InfoItemsPage.emptyPage();
|
||||
}
|
||||
// Get from collector
|
||||
return getInfoItems(info, collector);
|
||||
}
|
||||
|
||||
|
||||
public static List<InfoItem> getRelatedVideosOrLogError(StreamInfo info, StreamExtractor extractor) {
|
||||
StreamInfoItemsCollector collector;
|
||||
try {
|
||||
collector = extractor.getRelatedVideos();
|
||||
} catch (Exception e) {
|
||||
info.addError(e);
|
||||
return Collections.emptyList();
|
||||
}
|
||||
// Get from collector
|
||||
return getInfoItems(info, collector);
|
||||
}
|
||||
|
||||
private static List<InfoItem> getInfoItems(Info info, InfoItemsCollector collector) {
|
||||
List<InfoItem> result;
|
||||
try {
|
||||
result = collector.getItems();
|
||||
InfoItemsCollector<? extends InfoItem, ?> collector = extractor.getRelatedVideos();
|
||||
info.addAllErrors(collector.getErrors());
|
||||
|
||||
//noinspection unchecked
|
||||
return (List<InfoItem>) collector.getItems();
|
||||
} catch (Exception e) {
|
||||
info.addError(e);
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package org.schabi.newpipe.extractor.services;
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
|
@ -32,28 +31,30 @@ public final class DefaultTests {
|
|||
}
|
||||
}
|
||||
|
||||
public static void defaultTestRelatedItems(ListExtractor extractor, int expectedServiceId) throws Exception {
|
||||
final InfoItemsCollector<? extends InfoItem, ?> itemsCollector = extractor.getInfoItems();
|
||||
final List<? extends InfoItem> itemsList = itemsCollector.getItems();
|
||||
List<Throwable> errors = itemsCollector.getErrors();
|
||||
public static <T extends InfoItem> ListExtractor.InfoItemsPage<T> defaultTestRelatedItems(ListExtractor<T> extractor, int expectedServiceId) throws Exception {
|
||||
final ListExtractor.InfoItemsPage<T> page = extractor.getInitialPage();
|
||||
final List<T> itemsList = page.getItems();
|
||||
List<Throwable> errors = page.getErrors();
|
||||
|
||||
defaultTestListOfItems(expectedServiceId, itemsList, errors);
|
||||
return page;
|
||||
}
|
||||
|
||||
public static ListExtractor.InfoItemPage<? extends InfoItem> defaultTestMoreItems(ListExtractor extractor, int expectedServiceId) throws Exception {
|
||||
public static <T extends InfoItem> ListExtractor.InfoItemsPage<T> defaultTestMoreItems(ListExtractor<T> extractor, int expectedServiceId) throws Exception {
|
||||
assertTrue("Doesn't have more items", extractor.hasNextPage());
|
||||
ListExtractor.InfoItemPage<? extends InfoItem> nextPage = extractor.getPage(extractor.getNextPageUrl());
|
||||
assertTrue("Next page is empty", !nextPage.getItemsList().isEmpty());
|
||||
ListExtractor.InfoItemsPage<T> nextPage = extractor.getPage(extractor.getNextPageUrl());
|
||||
final List<T> items = nextPage.getItems();
|
||||
assertTrue("Next page is empty", !items.isEmpty());
|
||||
assertEmptyErrors("Next page have errors", nextPage.getErrors());
|
||||
|
||||
defaultTestListOfItems(expectedServiceId, nextPage.getItemsList(), nextPage.getErrors());
|
||||
defaultTestListOfItems(expectedServiceId, nextPage.getItems(), nextPage.getErrors());
|
||||
return nextPage;
|
||||
}
|
||||
|
||||
public static void defaultTestGetPageInNewExtractor(ListExtractor extractor, ListExtractor newExtractor, int expectedServiceId) throws Exception {
|
||||
public static void defaultTestGetPageInNewExtractor(ListExtractor<? extends InfoItem> extractor, ListExtractor<? extends InfoItem> newExtractor, int expectedServiceId) throws Exception {
|
||||
final String nextPageUrl = extractor.getNextPageUrl();
|
||||
|
||||
final ListExtractor.InfoItemPage<? extends InfoItem> page = newExtractor.getPage(nextPageUrl);
|
||||
defaultTestListOfItems(expectedServiceId, page.getItemsList(), page.getErrors());
|
||||
final ListExtractor.InfoItemsPage<? extends InfoItem> page = newExtractor.getPage(nextPageUrl);
|
||||
defaultTestListOfItems(expectedServiceId, page.getItems(), page.getErrors());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,9 +4,10 @@ import org.junit.BeforeClass;
|
|||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.Downloader;
|
||||
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
@ -47,29 +48,29 @@ public class SoundcloudChartsExtractorTest {
|
|||
|
||||
@Test
|
||||
public void testGetStreams() throws Exception {
|
||||
InfoItemsCollector collector = extractor.getInfoItems();
|
||||
if(!collector.getErrors().isEmpty()) {
|
||||
ListExtractor.InfoItemsPage<StreamInfoItem> page = extractor.getInitialPage();
|
||||
if(!page.getErrors().isEmpty()) {
|
||||
System.err.println("----------");
|
||||
List<Throwable> errors = collector.getErrors();
|
||||
List<Throwable> errors = page.getErrors();
|
||||
for(Throwable e: errors) {
|
||||
e.printStackTrace();
|
||||
System.err.println("----------");
|
||||
}
|
||||
}
|
||||
assertTrue("no streams are received",
|
||||
!collector.getItems().isEmpty()
|
||||
&& collector.getErrors().isEmpty());
|
||||
!page.getItems().isEmpty()
|
||||
&& page.getErrors().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetStreamsErrors() throws Exception {
|
||||
assertTrue("errors during stream list extraction", extractor.getInfoItems().getErrors().isEmpty());
|
||||
assertTrue("errors during stream list extraction", extractor.getInitialPage().getErrors().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHasMoreStreams() throws Exception {
|
||||
// Setup the streams
|
||||
extractor.getInfoItems();
|
||||
extractor.getInitialPage();
|
||||
assertTrue("has more streams", extractor.hasNextPage());
|
||||
}
|
||||
|
||||
|
@ -80,9 +81,9 @@ public class SoundcloudChartsExtractorTest {
|
|||
|
||||
@Test
|
||||
public void testGetNextPage() throws Exception {
|
||||
extractor.getInfoItems().getItems();
|
||||
extractor.getInitialPage().getItems();
|
||||
assertFalse("extractor has next streams", extractor.getPage(extractor.getNextPageUrl()) == null
|
||||
|| extractor.getPage(extractor.getNextPageUrl()).getItemsList().isEmpty());
|
||||
|| extractor.getPage(extractor.getNextPageUrl()).getItems().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -6,12 +6,12 @@ import org.junit.Test;
|
|||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.schabi.newpipe.Downloader;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.ServiceList;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.services.BasePlaylistExtractorTest;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertIsSecureUrl;
|
||||
|
@ -274,11 +274,11 @@ public class SoundcloudPlaylistExtractorTest {
|
|||
|
||||
@Test
|
||||
public void testMoreRelatedItems() throws Exception {
|
||||
ListExtractor.InfoItemPage<? extends InfoItem> currentPage = defaultTestMoreItems(extractor, ServiceList.SoundCloud.getServiceId());
|
||||
ListExtractor.InfoItemsPage<StreamInfoItem> currentPage = defaultTestMoreItems(extractor, ServiceList.SoundCloud.getServiceId());
|
||||
// Test for 2 more levels
|
||||
for (int i = 0; i < 2; i++) {
|
||||
currentPage = extractor.getPage(currentPage.getNextPageUrl());
|
||||
defaultTestListOfItems(SoundCloud.getServiceId(), currentPage.getItemsList(), currentPage.getErrors());
|
||||
defaultTestListOfItems(SoundCloud.getServiceId(), currentPage.getItems(), currentPage.getErrors());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,12 +5,12 @@ import org.junit.Test;
|
|||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.schabi.newpipe.Downloader;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.ServiceList;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.services.BasePlaylistExtractorTest;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
@ -181,11 +181,11 @@ public class YoutubePlaylistExtractorTest {
|
|||
|
||||
@Test
|
||||
public void testMoreRelatedItems() throws Exception {
|
||||
ListExtractor.InfoItemPage<? extends InfoItem> currentPage = defaultTestMoreItems(extractor, ServiceList.YouTube.getServiceId());
|
||||
ListExtractor.InfoItemsPage<StreamInfoItem> currentPage = defaultTestMoreItems(extractor, ServiceList.YouTube.getServiceId());
|
||||
// Test for 2 more levels
|
||||
for (int i = 0; i < 2; i++) {
|
||||
currentPage = extractor.getPage(currentPage.getNextPageUrl());
|
||||
defaultTestListOfItems(YouTube.getServiceId(), currentPage.getItemsList(), currentPage.getErrors());
|
||||
defaultTestListOfItems(YouTube.getServiceId(), currentPage.getItems(), currentPage.getErrors());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,8 +23,9 @@ package org.schabi.newpipe.extractor.services.youtube;
|
|||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.Downloader;
|
||||
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import static junit.framework.TestCase.assertFalse;
|
||||
|
@ -66,27 +67,27 @@ public class YoutubeTrendingExtractorTest {
|
|||
|
||||
@Test
|
||||
public void testGetStreamsQuantity() throws Exception {
|
||||
InfoItemsCollector collector = extractor.getInfoItems();
|
||||
Utils.printErrors(collector.getErrors());
|
||||
assertTrue("no streams are received", collector.getItems().size() >= 20);
|
||||
ListExtractor.InfoItemsPage<StreamInfoItem> page = extractor.getInitialPage();
|
||||
Utils.printErrors(page.getErrors());
|
||||
assertTrue("no streams are received", page.getItems().size() >= 20);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetStreamsErrors() throws Exception {
|
||||
assertEmptyErrors("errors during stream list extraction", extractor.getInfoItems().getErrors());
|
||||
assertEmptyErrors("errors during stream list extraction", extractor.getInitialPage().getErrors());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHasMoreStreams() throws Exception {
|
||||
// Setup the streams
|
||||
extractor.getInfoItems();
|
||||
extractor.getInitialPage();
|
||||
assertFalse("has more streams", extractor.hasNextPage());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetNextPage() {
|
||||
assertTrue("extractor has next streams", extractor.getPage(extractor.getNextPageUrl()) == null
|
||||
|| extractor.getPage(extractor.getNextPageUrl()).getItemsList().isEmpty());
|
||||
|| extractor.getPage(extractor.getNextPageUrl()).getItems().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
Loading…
Reference in New Issue