Merge pull request #451 from TeamNewPipe/meta_info
Extract stream and search meta info for YouTube
This commit is contained in:
commit
c682ea0d18
|
@ -0,0 +1,76 @@
|
|||
package org.schabi.newpipe.extractor;
|
||||
|
||||
import org.schabi.newpipe.extractor.stream.Description;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public class MetaInfo implements Serializable {
|
||||
|
||||
private String title = "";
|
||||
private Description content;
|
||||
private List<URL> urls = new ArrayList<>();
|
||||
private List<String> urlTexts = new ArrayList<>();
|
||||
|
||||
public MetaInfo(@Nonnull final String title, @Nonnull final Description content,
|
||||
@Nonnull final List<URL> urls, @Nonnull final List<String> urlTexts) {
|
||||
this.title = title;
|
||||
this.content = content;
|
||||
this.urls = urls;
|
||||
this.urlTexts = urlTexts;
|
||||
}
|
||||
|
||||
public MetaInfo() {
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Title of the info. Can be empty.
|
||||
*/
|
||||
@Nonnull
|
||||
public String getTitle() {
|
||||
return title;
|
||||
}
|
||||
|
||||
public void setTitle(@Nonnull final String title) {
|
||||
this.title = title;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public Description getContent() {
|
||||
return content;
|
||||
}
|
||||
|
||||
public void setContent(@Nonnull final Description content) {
|
||||
this.content = content;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public List<URL> getUrls() {
|
||||
return urls;
|
||||
}
|
||||
|
||||
public void setUrls(@Nonnull final List<URL> urls) {
|
||||
this.urls = urls;
|
||||
}
|
||||
|
||||
public void addUrl(@Nonnull final URL url) {
|
||||
urls.add(url);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public List<String> getUrlTexts() {
|
||||
return urlTexts;
|
||||
}
|
||||
|
||||
public void setUrlTexts(@Nonnull final List<String> urlTexts) {
|
||||
this.urlTexts = urlTexts;
|
||||
}
|
||||
|
||||
public void addUrlText(@Nonnull final String urlText) {
|
||||
urlTexts.add(urlText);
|
||||
}
|
||||
}
|
|
@ -2,12 +2,14 @@ package org.schabi.newpipe.extractor.search;
|
|||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.List;
|
||||
|
||||
public abstract class SearchExtractor extends ListExtractor<InfoItem> {
|
||||
|
||||
|
@ -57,4 +59,15 @@ public abstract class SearchExtractor extends ListExtractor<InfoItem> {
|
|||
* @return whether the results comes from a corrected query or not.
|
||||
*/
|
||||
public abstract boolean isCorrectedSearch() throws ParsingException;
|
||||
|
||||
/**
|
||||
* Meta information about the search query.
|
||||
* <p>
|
||||
* Example: on YouTube, if you search for "Covid-19",
|
||||
* there is a box with information from the WHO about Covid-19 and a link to the WHO's website.
|
||||
* @return additional meta information about the search query
|
||||
* @throws ParsingException
|
||||
*/
|
||||
@Nonnull
|
||||
public abstract List<MetaInfo> getMetaInfo() throws ParsingException;
|
||||
}
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
package org.schabi.newpipe.extractor.search;
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.ListInfo;
|
||||
import org.schabi.newpipe.extractor.Page;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.*;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public class SearchInfo extends ListInfo<InfoItem> {
|
||||
private String searchString;
|
||||
private String searchSuggestion;
|
||||
private boolean isCorrectedSearch;
|
||||
private List<MetaInfo> metaInfo;
|
||||
|
||||
public SearchInfo(int serviceId,
|
||||
SearchQueryHandler qIHandler,
|
||||
|
@ -51,6 +51,11 @@ public class SearchInfo extends ListInfo<InfoItem> {
|
|||
} catch (Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
try {
|
||||
info.setMetaInfo(extractor.getMetaInfo());
|
||||
} catch (Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
|
||||
ListExtractor.InfoItemsPage<InfoItem> page = ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
info.setRelatedItems(page.getItems());
|
||||
|
@ -87,4 +92,13 @@ public class SearchInfo extends ListInfo<InfoItem> {
|
|||
public void setSearchSuggestion(String searchSuggestion) {
|
||||
this.searchSuggestion = searchSuggestion;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public List<MetaInfo> getMetaInfo() {
|
||||
return metaInfo;
|
||||
}
|
||||
|
||||
public void setMetaInfo(@Nonnull List<MetaInfo> metaInfo) {
|
||||
this.metaInfo = metaInfo;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ import com.grack.nanojson.JsonParser;
|
|||
import com.grack.nanojson.JsonParserException;
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.Page;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItem;
|
||||
|
@ -20,6 +21,7 @@ import org.schabi.newpipe.extractor.services.media_ccc.extractors.infoItems.Medi
|
|||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCConferencesListLinkHandlerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
@ -55,6 +57,12 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public List<MetaInfo> getMetaInfo() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getInitialPage() {
|
||||
|
|
|
@ -6,6 +6,7 @@ import com.grack.nanojson.JsonParser;
|
|||
import com.grack.nanojson.JsonParserException;
|
||||
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
|
@ -301,4 +302,10 @@ public class MediaCCCStreamExtractor extends StreamExtractor {
|
|||
public List<StreamSegment> getStreamSegments() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public List<MetaInfo> getMetaInfo() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import com.grack.nanojson.JsonObject;
|
|||
import com.grack.nanojson.JsonParser;
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.Page;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
|
@ -17,6 +18,8 @@ import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
|
|||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
|
@ -42,6 +45,12 @@ public class PeertubeSearchExtractor extends SearchExtractor {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public List<MetaInfo> getMetaInfo() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
final String pageUrl = getUrl() + "&" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
|
||||
|
|
|
@ -5,6 +5,7 @@ import com.grack.nanojson.JsonObject;
|
|||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
|
@ -309,6 +310,12 @@ public class PeertubeStreamExtractor extends StreamExtractor {
|
|||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public List<MetaInfo> getMetaInfo() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
private String getRelatedStreamsUrl(final List<String> tags) throws UnsupportedEncodingException {
|
||||
final String url = baseUrl + PeertubeSearchQueryHandlerFactory.SEARCH_ENDPOINT;
|
||||
final StringBuilder params = new StringBuilder();
|
||||
|
|
|
@ -8,6 +8,7 @@ import com.grack.nanojson.JsonParserException;
|
|||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.InfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.Page;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
|
@ -22,6 +23,8 @@ import java.io.IOException;
|
|||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
|
@ -47,6 +50,12 @@ public class SoundcloudSearchExtractor extends SearchExtractor {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public List<MetaInfo> getMetaInfo() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
|
|
|
@ -6,6 +6,7 @@ import com.grack.nanojson.JsonParser;
|
|||
import com.grack.nanojson.JsonParserException;
|
||||
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
|
@ -327,4 +328,10 @@ public class SoundcloudStreamExtractor extends StreamExtractor {
|
|||
public List<StreamSegment> getStreamSegments() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public List<MetaInfo> getMetaInfo() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package org.schabi.newpipe.extractor.services.youtube;
|
||||
|
||||
import com.grack.nanojson.*;
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
|
@ -8,6 +9,7 @@ import com.grack.nanojson.JsonWriter;
|
|||
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.Page;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
||||
|
@ -15,9 +17,12 @@ import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
|||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
import org.schabi.newpipe.extractor.stream.Description;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.MalformedURLException;
|
||||
|
@ -28,13 +33,11 @@ import java.time.LocalDate;
|
|||
import java.time.OffsetDateTime;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
|
||||
import static org.schabi.newpipe.extractor.NewPipe.getDownloader;
|
||||
import static org.schabi.newpipe.extractor.utils.JsonUtils.EMPTY_STRING;
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.*;
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.HTTP;
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.HTTPS;
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
|
||||
|
@ -76,41 +79,35 @@ public class YoutubeParsingHelper {
|
|||
private static final String FEED_BASE_CHANNEL_ID = "https://www.youtube.com/feeds/videos.xml?channel_id=";
|
||||
private static final String FEED_BASE_USER = "https://www.youtube.com/feeds/videos.xml?user=";
|
||||
|
||||
private static final String[] RECAPTCHA_DETECTION_SELECTORS = {
|
||||
"form[action*=\"/das_captcha\"]",
|
||||
"input[name*=\"action_recaptcha_verify\"]"
|
||||
};
|
||||
|
||||
public static Document parseAndCheckPage(final String url, final Response response) throws ReCaptchaException {
|
||||
final Document document = Jsoup.parse(response.responseBody(), url);
|
||||
|
||||
for (String detectionSelector : RECAPTCHA_DETECTION_SELECTORS) {
|
||||
if (!document.select(detectionSelector).isEmpty()) {
|
||||
throw new ReCaptchaException("reCAPTCHA challenge requested (detected with selector: \"" + detectionSelector + "\")", url);
|
||||
}
|
||||
private static boolean isGoogleURL(String url) {
|
||||
url = extractCachedUrlIfNeeded(url);
|
||||
try {
|
||||
final URL u = new URL(url);
|
||||
final String host = u.getHost();
|
||||
return host.startsWith("google.") || host.startsWith("m.google.");
|
||||
} catch (MalformedURLException e) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return document;
|
||||
}
|
||||
|
||||
public static boolean isYoutubeURL(URL url) {
|
||||
String host = url.getHost();
|
||||
public static boolean isYoutubeURL(final URL url) {
|
||||
final String host = url.getHost();
|
||||
return host.equalsIgnoreCase("youtube.com") || host.equalsIgnoreCase("www.youtube.com")
|
||||
|| host.equalsIgnoreCase("m.youtube.com") || host.equalsIgnoreCase("music.youtube.com");
|
||||
}
|
||||
|
||||
public static boolean isYoutubeServiceURL(URL url) {
|
||||
String host = url.getHost();
|
||||
public static boolean isYoutubeServiceURL(final URL url) {
|
||||
final String host = url.getHost();
|
||||
return host.equalsIgnoreCase("www.youtube-nocookie.com") || host.equalsIgnoreCase("youtu.be");
|
||||
}
|
||||
|
||||
public static boolean isHooktubeURL(URL url) {
|
||||
String host = url.getHost();
|
||||
public static boolean isHooktubeURL(final URL url) {
|
||||
final String host = url.getHost();
|
||||
return host.equalsIgnoreCase("hooktube.com");
|
||||
}
|
||||
|
||||
public static boolean isInvidioURL(URL url) {
|
||||
String host = url.getHost();
|
||||
public static boolean isInvidioURL(final URL url) {
|
||||
final String host = url.getHost();
|
||||
return host.equalsIgnoreCase("invidio.us")
|
||||
|| host.equalsIgnoreCase("dev.invidio.us")
|
||||
|| host.equalsIgnoreCase("www.invidio.us")
|
||||
|
@ -184,7 +181,7 @@ public class YoutubeParsingHelper {
|
|||
}
|
||||
}
|
||||
|
||||
public static OffsetDateTime parseDateFrom(String textualUploadDate) throws ParsingException {
|
||||
public static OffsetDateTime parseDateFrom(final String textualUploadDate) throws ParsingException {
|
||||
try {
|
||||
return OffsetDateTime.parse(textualUploadDate);
|
||||
} catch (DateTimeParseException e) {
|
||||
|
@ -247,7 +244,7 @@ public class YoutubeParsingHelper {
|
|||
}
|
||||
}
|
||||
|
||||
public static JsonObject getInitialData(String html) throws ParsingException {
|
||||
public static JsonObject getInitialData(final String html) throws ParsingException {
|
||||
try {
|
||||
try {
|
||||
final String initialData = Parser.matchGroup1("window\\[\"ytInitialData\"\\]\\s*=\\s*(\\{.*?\\});", html);
|
||||
|
@ -264,10 +261,9 @@ public class YoutubeParsingHelper {
|
|||
public static boolean isHardcodedClientVersionValid() throws IOException, ExtractionException {
|
||||
final String url = "https://www.youtube.com/results?search_query=test&pbj=1";
|
||||
|
||||
Map<String, List<String>> headers = new HashMap<>();
|
||||
final Map<String, List<String>> headers = new HashMap<>();
|
||||
headers.put("X-YouTube-Client-Name", Collections.singletonList("1"));
|
||||
headers.put("X-YouTube-Client-Version",
|
||||
Collections.singletonList(HARDCODED_CLIENT_VERSION));
|
||||
headers.put("X-YouTube-Client-Version", Collections.singletonList(HARDCODED_CLIENT_VERSION));
|
||||
final String response = getDownloader().get(url, headers).responseBody();
|
||||
|
||||
return response.length() > 50; // ensure to have a valid response
|
||||
|
@ -390,14 +386,14 @@ public class YoutubeParsingHelper {
|
|||
.end().done().getBytes("UTF-8");
|
||||
// @formatter:on
|
||||
|
||||
Map<String, List<String>> headers = new HashMap<>();
|
||||
final Map<String, List<String>> headers = new HashMap<>();
|
||||
headers.put("X-YouTube-Client-Name", Collections.singletonList(HARDCODED_YOUTUBE_MUSIC_KEYS[1]));
|
||||
headers.put("X-YouTube-Client-Version", Collections.singletonList(HARDCODED_YOUTUBE_MUSIC_KEYS[2]));
|
||||
headers.put("Origin", Collections.singletonList("https://music.youtube.com"));
|
||||
headers.put("Referer", Collections.singletonList("music.youtube.com"));
|
||||
headers.put("Content-Type", Collections.singletonList("application/json"));
|
||||
|
||||
String response = getDownloader().post(url, headers, json).responseBody();
|
||||
final String response = getDownloader().post(url, headers, json).responseBody();
|
||||
|
||||
return response.length() > 50; // ensure to have a valid response
|
||||
}
|
||||
|
@ -432,6 +428,7 @@ public class YoutubeParsingHelper {
|
|||
return youtubeMusicKeys = new String[]{key, clientName, clientVersion};
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static String getUrlFromNavigationEndpoint(JsonObject navigationEndpoint) throws ParsingException {
|
||||
if (navigationEndpoint.has("urlEndpoint")) {
|
||||
String internUrl = navigationEndpoint.getObject("urlEndpoint").getString("url");
|
||||
|
@ -493,6 +490,7 @@ public class YoutubeParsingHelper {
|
|||
* @param html whether to return HTML, by parsing the navigationEndpoint
|
||||
* @return text in the JSON object or {@code null}
|
||||
*/
|
||||
@Nullable
|
||||
public static String getTextFromObject(JsonObject textObject, boolean html) throws ParsingException {
|
||||
if (isNullOrEmpty(textObject)) return null;
|
||||
|
||||
|
@ -500,8 +498,8 @@ public class YoutubeParsingHelper {
|
|||
|
||||
if (textObject.getArray("runs").isEmpty()) return null;
|
||||
|
||||
StringBuilder textBuilder = new StringBuilder();
|
||||
for (Object textPart : textObject.getArray("runs")) {
|
||||
final StringBuilder textBuilder = new StringBuilder();
|
||||
for (final Object textPart : textObject.getArray("runs")) {
|
||||
String text = ((JsonObject) textPart).getString("text");
|
||||
if (html && ((JsonObject) textPart).has("navigationEndpoint")) {
|
||||
String url = getUrlFromNavigationEndpoint(((JsonObject) textPart).getObject("navigationEndpoint"));
|
||||
|
@ -523,6 +521,7 @@ public class YoutubeParsingHelper {
|
|||
return text;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static String getTextFromObject(JsonObject textObject) throws ParsingException {
|
||||
return getTextFromObject(textObject, false);
|
||||
}
|
||||
|
@ -650,4 +649,124 @@ public class YoutubeParsingHelper {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static List<MetaInfo> getMetaInfo(final JsonArray contents) throws ParsingException {
|
||||
final List<MetaInfo> metaInfo = new ArrayList<>();
|
||||
for (final Object content : contents) {
|
||||
final JsonObject resultObject = (JsonObject) content;
|
||||
if (resultObject.has("itemSectionRenderer")) {
|
||||
for (final Object sectionContentObject :
|
||||
resultObject.getObject("itemSectionRenderer").getArray("contents")) {
|
||||
|
||||
final JsonObject sectionContent = (JsonObject) sectionContentObject;
|
||||
if (sectionContent.has("infoPanelContentRenderer")) {
|
||||
metaInfo.add(getInfoPanelContent(sectionContent.getObject("infoPanelContentRenderer")));
|
||||
}
|
||||
if (sectionContent.has("clarificationRenderer")) {
|
||||
metaInfo.add(getClarificationRendererContent(sectionContent.getObject("clarificationRenderer")
|
||||
));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
return metaInfo;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static MetaInfo getInfoPanelContent(final JsonObject infoPanelContentRenderer)
|
||||
throws ParsingException {
|
||||
final MetaInfo metaInfo = new MetaInfo();
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
for (final Object paragraph : infoPanelContentRenderer.getArray("paragraphs")) {
|
||||
if (sb.length() != 0) {
|
||||
sb.append("<br>");
|
||||
}
|
||||
sb.append(YoutubeParsingHelper.getTextFromObject((JsonObject) paragraph));
|
||||
}
|
||||
metaInfo.setContent(new Description(sb.toString(), Description.HTML));
|
||||
if (infoPanelContentRenderer.has("sourceEndpoint")) {
|
||||
final String metaInfoLinkUrl = YoutubeParsingHelper.getUrlFromNavigationEndpoint(
|
||||
infoPanelContentRenderer.getObject("sourceEndpoint"));
|
||||
try {
|
||||
metaInfo.addUrl(new URL(Objects.requireNonNull(extractCachedUrlIfNeeded(metaInfoLinkUrl))));
|
||||
} catch (final NullPointerException | MalformedURLException e) {
|
||||
throw new ParsingException("Could not get metadata info URL", e);
|
||||
}
|
||||
|
||||
final String metaInfoLinkText = YoutubeParsingHelper.getTextFromObject(
|
||||
infoPanelContentRenderer.getObject("inlineSource"));
|
||||
if (isNullOrEmpty(metaInfoLinkText)) {
|
||||
throw new ParsingException("Could not get metadata info link text.");
|
||||
}
|
||||
metaInfo.addUrlText(metaInfoLinkText);
|
||||
}
|
||||
|
||||
return metaInfo;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static MetaInfo getClarificationRendererContent(final JsonObject clarificationRenderer)
|
||||
throws ParsingException {
|
||||
final MetaInfo metaInfo = new MetaInfo();
|
||||
|
||||
final String title = YoutubeParsingHelper.getTextFromObject(clarificationRenderer.getObject("contentTitle"));
|
||||
final String text = YoutubeParsingHelper.getTextFromObject(clarificationRenderer.getObject("text"));
|
||||
if (title == null || text == null) {
|
||||
throw new ParsingException("Could not extract clarification renderer content");
|
||||
}
|
||||
metaInfo.setTitle(title);
|
||||
metaInfo.setContent(new Description(text, Description.PLAIN_TEXT));
|
||||
|
||||
if (clarificationRenderer.has("actionButton")) {
|
||||
final JsonObject actionButton = clarificationRenderer.getObject("actionButton")
|
||||
.getObject("buttonRenderer");
|
||||
try {
|
||||
final String url = YoutubeParsingHelper.getUrlFromNavigationEndpoint(actionButton.getObject("command"));
|
||||
metaInfo.addUrl(new URL(Objects.requireNonNull(extractCachedUrlIfNeeded(url))));
|
||||
} catch (final NullPointerException | MalformedURLException e) {
|
||||
throw new ParsingException("Could not get metadata info URL", e);
|
||||
}
|
||||
|
||||
final String metaInfoLinkText = YoutubeParsingHelper.getTextFromObject(
|
||||
actionButton.getObject("text"));
|
||||
if (isNullOrEmpty(metaInfoLinkText)) {
|
||||
throw new ParsingException("Could not get metadata info link text.");
|
||||
}
|
||||
metaInfo.addUrlText(metaInfoLinkText);
|
||||
}
|
||||
|
||||
if (clarificationRenderer.has("secondaryEndpoint") && clarificationRenderer.has("secondarySource")) {
|
||||
final String url = getUrlFromNavigationEndpoint(clarificationRenderer.getObject("secondaryEndpoint"));
|
||||
// ignore Google URLs, because those point to a Google search about "Covid-19"
|
||||
if (url != null && !isGoogleURL(url)) {
|
||||
try {
|
||||
metaInfo.addUrl(new URL(url));
|
||||
final String description = getTextFromObject(clarificationRenderer.getObject("secondarySource"));
|
||||
metaInfo.addUrlText(description == null ? url : description);
|
||||
} catch (MalformedURLException e) {
|
||||
throw new ParsingException("Could not get metadata info secondary URL", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return metaInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sometimes, YouTube provides URLs which use Google's cache. They look like
|
||||
* {@code https://webcache.googleusercontent.com/search?q=cache:CACHED_URL}
|
||||
* @param url the URL which might refer to the Google's webcache
|
||||
* @return the URL which is referring to the original site
|
||||
*/
|
||||
public static String extractCachedUrlIfNeeded(final String url) {
|
||||
if (url == null) {
|
||||
return null;
|
||||
}
|
||||
if (url.contains("webcache.googleusercontent.com")) {
|
||||
return url.split("cache:")[1];
|
||||
}
|
||||
return url;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ import com.grack.nanojson.JsonParserException;
|
|||
import com.grack.nanojson.JsonWriter;
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.Page;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
|
@ -163,6 +164,12 @@ public class YoutubeMusicSearchExtractor extends SearchExtractor {
|
|||
return !showingResultsForRenderer.isEmpty();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public List<MetaInfo> getMetaInfo() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getInitialPage() throws ExtractionException, IOException {
|
||||
|
|
|
@ -7,6 +7,7 @@ import com.grack.nanojson.JsonParserException;
|
|||
import com.grack.nanojson.JsonWriter;
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.Page;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
|
@ -16,13 +17,11 @@ import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
|||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||
import org.schabi.newpipe.extractor.search.InfoItemsSearchCollector;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.YoutubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
|
@ -106,6 +105,13 @@ public class YoutubeSearchExtractor extends SearchExtractor {
|
|||
return !showingResultsForRenderer.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<MetaInfo> getMetaInfo() throws ParsingException {
|
||||
return YoutubeParsingHelper.getMetaInfo(
|
||||
initialData.getObject("contents").getObject("twoColumnSearchResultsRenderer")
|
||||
.getObject("primaryContents").getObject("sectionListRenderer").getArray("contents"));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
|
|
|
@ -13,6 +13,7 @@ import org.mozilla.javascript.Context;
|
|||
import org.mozilla.javascript.Function;
|
||||
import org.mozilla.javascript.ScriptableObject;
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
|
@ -45,6 +46,9 @@ import org.schabi.newpipe.extractor.utils.Utils;
|
|||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.time.LocalDate;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
|
@ -1118,4 +1122,12 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
}
|
||||
return segments;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public List<MetaInfo> getMetaInfo() throws ParsingException {
|
||||
return YoutubeParsingHelper.getMetaInfo(
|
||||
initialData.getObject("contents").getObject("twoColumnWatchNextResults")
|
||||
.getObject("results").getObject("results").getArray("contents"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.schabi.newpipe.extractor.stream;
|
|||
|
||||
import org.schabi.newpipe.extractor.Extractor;
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
|
@ -486,4 +487,18 @@ public abstract class StreamExtractor extends Extractor {
|
|||
*/
|
||||
@Nonnull
|
||||
public abstract List<StreamSegment> getStreamSegments() throws ParsingException;
|
||||
|
||||
/**
|
||||
* Meta information about the stream.
|
||||
* <p>
|
||||
* This can be information about the stream creator (e.g. if the creator is a public broadcaster)
|
||||
* or further information on the topic (e.g. hints that the video might contain conspiracy theories
|
||||
* or contains information about a current health situation like the Covid-19 pandemic).
|
||||
* </p>
|
||||
* The meta information often contains links to external sources like Wikipedia or the WHO.
|
||||
* @return The meta info of the stream or an empty List if not provided.
|
||||
* @throws ParsingException
|
||||
*/
|
||||
@Nonnull
|
||||
public abstract List<MetaInfo> getMetaInfo() throws ParsingException;
|
||||
}
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
package org.schabi.newpipe.extractor.stream;
|
||||
|
||||
import org.schabi.newpipe.extractor.Info;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.*;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotSupportedException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
|
@ -13,9 +10,12 @@ import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
|
||||
|
||||
/*
|
||||
|
@ -329,6 +329,11 @@ public class StreamInfo extends Info {
|
|||
} catch (Exception e) {
|
||||
streamInfo.addError(e);
|
||||
}
|
||||
try {
|
||||
streamInfo.setMetaInfo(extractor.getMetaInfo());
|
||||
} catch (Exception e) {
|
||||
streamInfo.addError(e);
|
||||
}
|
||||
|
||||
streamInfo.setRelatedStreams(ExtractorHelper.getRelatedVideosOrLogError(streamInfo, extractor));
|
||||
|
||||
|
@ -379,6 +384,7 @@ public class StreamInfo extends Info {
|
|||
private Locale language = null;
|
||||
private List<String> tags = new ArrayList<>();
|
||||
private List<StreamSegment> streamSegments = new ArrayList<>();
|
||||
private List<MetaInfo> metaInfo = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Get the stream type
|
||||
|
@ -684,4 +690,13 @@ public class StreamInfo extends Info {
|
|||
public void setStreamSegments(List<StreamSegment> streamSegments) {
|
||||
this.streamSegments = streamSegments;
|
||||
}
|
||||
|
||||
public void setMetaInfo(final List<MetaInfo> metaInfo) {
|
||||
this.metaInfo = metaInfo;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public List<MetaInfo> getMetaInfo() {
|
||||
return this.metaInfo;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,20 @@
|
|||
package org.schabi.newpipe.extractor.services;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertEmpty;
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
|
||||
|
||||
|
@ -20,6 +28,10 @@ public abstract class DefaultSearchExtractorTest extends DefaultListExtractorTes
|
|||
return false;
|
||||
}
|
||||
|
||||
public List<MetaInfo> expectedMetaInfo() throws MalformedURLException {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Override
|
||||
public void testSearchString() throws Exception {
|
||||
|
@ -41,4 +53,34 @@ public abstract class DefaultSearchExtractorTest extends DefaultListExtractorTes
|
|||
public void testSearchCorrected() throws Exception {
|
||||
assertEquals(isCorrectedSearch(), extractor().isCorrectedSearch());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DefaultStreamExtractorTest#testMetaInfo()
|
||||
*/
|
||||
@Test
|
||||
public void testMetaInfo() throws Exception {
|
||||
final List<MetaInfo> metaInfoList = extractor().getMetaInfo();
|
||||
final List<MetaInfo> expectedMetaInfoList = expectedMetaInfo();
|
||||
|
||||
for (final MetaInfo expectedMetaInfo : expectedMetaInfoList) {
|
||||
final List<String> texts = metaInfoList.stream()
|
||||
.map(metaInfo -> metaInfo.getContent().getContent())
|
||||
.collect(Collectors.toList());
|
||||
final List<String> titles = metaInfoList.stream().map(MetaInfo::getTitle).collect(Collectors.toList());
|
||||
final List<URL> urls = metaInfoList.stream().flatMap(info -> info.getUrls().stream())
|
||||
.collect(Collectors.toList());
|
||||
final List<String> urlTexts = metaInfoList.stream().flatMap(info -> info.getUrlTexts().stream())
|
||||
.collect(Collectors.toList());
|
||||
|
||||
assertTrue(texts.contains(expectedMetaInfo.getContent().getContent()));
|
||||
assertTrue(titles.contains(expectedMetaInfo.getTitle()));
|
||||
|
||||
for (final String expectedUrlText : expectedMetaInfo.getUrlTexts()) {
|
||||
assertTrue(urlTexts.contains(expectedUrlText));
|
||||
}
|
||||
for (final URL expectedUrl : expectedMetaInfo.getUrls()) {
|
||||
assertTrue(urls.contains(expectedUrl));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.schabi.newpipe.extractor.services;
|
|||
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.stream.AudioStream;
|
||||
import org.schabi.newpipe.extractor.stream.Description;
|
||||
|
@ -15,9 +16,12 @@ import org.schabi.newpipe.extractor.stream.VideoStream;
|
|||
import javax.annotation.Nullable;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
@ -67,6 +71,7 @@ public abstract class DefaultStreamExtractorTest extends DefaultExtractorTest<St
|
|||
public List<String> expectedTags() { return Collections.emptyList(); } // default: no tags
|
||||
public String expectedSupportInfo() { return ""; } // default: no support info available
|
||||
public int expectedStreamSegmentsCount() { return -1; } // return 0 or greater to test (default is -1 to ignore)
|
||||
public List<MetaInfo> expectedMetaInfo() throws MalformedURLException { return Collections.emptyList(); } // default: no metadata info available
|
||||
|
||||
@Test
|
||||
@Override
|
||||
|
@ -387,4 +392,35 @@ public abstract class DefaultStreamExtractorTest extends DefaultExtractorTest<St
|
|||
assertEquals(expectedStreamSegmentsCount(), extractor().getStreamSegments().size());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DefaultSearchExtractorTest#testMetaInfo()
|
||||
*/
|
||||
@Test
|
||||
public void testMetaInfo() throws Exception {
|
||||
final List<MetaInfo> metaInfoList = extractor().getMetaInfo();
|
||||
final List<MetaInfo> expectedMetaInfoList = expectedMetaInfo();
|
||||
|
||||
for (final MetaInfo expectedMetaInfo : expectedMetaInfoList) {
|
||||
final List<String> texts = metaInfoList.stream()
|
||||
.map((metaInfo) -> metaInfo.getContent().getContent())
|
||||
.collect(Collectors.toList());
|
||||
final List<String> titles = metaInfoList.stream().map(MetaInfo::getTitle).collect(Collectors.toList());
|
||||
final List<URL> urls = metaInfoList.stream().flatMap(info -> info.getUrls().stream())
|
||||
.collect(Collectors.toList());
|
||||
final List<String> urlTexts = metaInfoList.stream().flatMap(info -> info.getUrlTexts().stream())
|
||||
.collect(Collectors.toList());
|
||||
|
||||
assertTrue(texts.contains(expectedMetaInfo.getContent().getContent()));
|
||||
assertTrue(titles.contains(expectedMetaInfo.getTitle()));
|
||||
|
||||
for (final String expectedUrlText : expectedMetaInfo.getUrlTexts()) {
|
||||
assertTrue(urlTexts.contains(expectedUrlText));
|
||||
}
|
||||
for (final URL expectedUrl : expectedMetaInfo.getUrls()) {
|
||||
assertTrue(urls.contains(expectedUrl));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,4 +36,12 @@ public class YoutubeParsingHelperTest {
|
|||
assertEquals(4445767, YoutubeParsingHelper.parseDurationString("1,234:56:07"));
|
||||
assertEquals(754, YoutubeParsingHelper.parseDurationString("12:34 "));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertFromGoogleCacheUrl() throws ParsingException {
|
||||
assertEquals("https://mohfw.gov.in/",
|
||||
YoutubeParsingHelper.extractCachedUrlIfNeeded("https://webcache.googleusercontent.com/search?q=cache:https://mohfw.gov.in/"));
|
||||
assertEquals("https://www.infektionsschutz.de/coronavirus-sars-cov-2.html",
|
||||
YoutubeParsingHelper.extractCachedUrlIfNeeded("https://www.infektionsschutz.de/coronavirus-sars-cov-2.html"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,9 +10,8 @@ import org.schabi.newpipe.extractor.search.SearchExtractor;
|
|||
import org.schabi.newpipe.extractor.services.DefaultSearchExtractorTest;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeSearchQueryHandlerFactory;
|
||||
|
||||
import java.net.URLEncoder;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.net.URLEncoder;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.YouTube;
|
||||
|
|
|
@ -3,15 +3,21 @@ package org.schabi.newpipe.extractor.services.youtube.search;
|
|||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.*;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.DefaultSearchExtractorTest;
|
||||
import org.schabi.newpipe.extractor.services.youtube.YoutubeService;
|
||||
import org.schabi.newpipe.extractor.stream.Description;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static junit.framework.TestCase.assertFalse;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
@ -211,4 +217,39 @@ public class YoutubeSearchExtractorTest {
|
|||
assertNoDuplicatedItems(YouTube, page1, page2);
|
||||
}
|
||||
}
|
||||
|
||||
public static class MetaInfoTest extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "Covid";
|
||||
|
||||
@Test
|
||||
public void clarificationTest() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY, singletonList(VIDEOS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Override public String expectedSearchSuggestion() { return null; }
|
||||
@Override public List<MetaInfo> expectedMetaInfo() throws MalformedURLException {
|
||||
final List<URL> urls = new ArrayList<>();
|
||||
urls.add(new URL("https://www.who.int/emergencies/diseases/novel-coronavirus-2019"));
|
||||
urls.add(new URL("https://www.who.int/emergencies/diseases/novel-coronavirus-2019/covid-19-vaccines"));
|
||||
final List<String> urlTexts = new ArrayList<>();
|
||||
urlTexts.add("LEARN MORE");
|
||||
urlTexts.add("Learn about vaccine progress from the WHO");
|
||||
return Collections.singletonList(new MetaInfo(
|
||||
"COVID-19",
|
||||
new Description("Get the latest information from the WHO about coronavirus.", Description.PLAIN_TEXT),
|
||||
urls,
|
||||
urlTexts
|
||||
));
|
||||
}
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() throws Exception { return "youtube.com/results?search_query=" + QUERY; }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,16 +3,22 @@ package org.schabi.newpipe.extractor.services.youtube.stream;
|
|||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.MetaInfo;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.services.DefaultStreamExtractorTest;
|
||||
import org.schabi.newpipe.extractor.stream.Description;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamSegment;
|
||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
@ -258,4 +264,46 @@ public class YoutubeStreamExtractorDefaultTest {
|
|||
assertNotNull(segment.getPreviewUrl());
|
||||
}
|
||||
}
|
||||
|
||||
public static class PublicBroadcasterTest extends DefaultStreamExtractorTest {
|
||||
private static final String ID = "q6fgbYWsMgw";
|
||||
private static final int TIMESTAMP = 0;
|
||||
private static final String URL = BASE_URL + ID;
|
||||
private static StreamExtractor extractor;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getStreamExtractor(URL);
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public StreamExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return "Was verbirgt sich am tiefsten Punkt des Ozeans?"; }
|
||||
@Override public String expectedId() { return ID; }
|
||||
@Override public String expectedUrlContains() { return BASE_URL + ID; }
|
||||
@Override public String expectedOriginalUrlContains() { return URL; }
|
||||
|
||||
@Override public StreamType expectedStreamType() { return StreamType.VIDEO_STREAM; }
|
||||
@Override public String expectedUploaderName() { return "Dinge Erklärt – Kurzgesagt"; }
|
||||
@Override public String expectedUploaderUrl() { return "https://www.youtube.com/channel/UCwRH985XgMYXQ6NxXDo8npw"; }
|
||||
@Override public List<String> expectedDescriptionContains() { return Arrays.asList("Lasst uns abtauchen!", "Angebot von funk", "Dinge"); }
|
||||
@Override public long expectedLength() { return 631; }
|
||||
@Override public long expectedTimestamp() { return TIMESTAMP; }
|
||||
@Override public long expectedViewCountAtLeast() { return 1_600_000; }
|
||||
@Nullable @Override public String expectedUploadDate() { return "2019-06-12 00:00:00.000"; }
|
||||
@Nullable @Override public String expectedTextualUploadDate() { return "2019-06-12"; }
|
||||
@Override public long expectedLikeCountAtLeast() { return 70000; }
|
||||
@Override public long expectedDislikeCountAtLeast() { return 500; }
|
||||
@Override public List<MetaInfo> expectedMetaInfo() throws MalformedURLException {
|
||||
return Collections.singletonList(new MetaInfo(
|
||||
"",
|
||||
new Description("Funk is a German public broadcast service.", Description.PLAIN_TEXT),
|
||||
Collections.singletonList(new URL("https://de.wikipedia.org/wiki/Funk_(Medienangebot)?wprov=yicw1")),
|
||||
Collections.singletonList("Wikipedia (German)")
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue