Use pbj in YoutubeChannelExtractor
This commit is contained in:
parent
0973263aab
commit
89a41a7f69
|
@ -53,9 +53,7 @@ import static org.schabi.newpipe.extractor.utils.Utils.HTTPS;
|
||||||
@SuppressWarnings("WeakerAccess")
|
@SuppressWarnings("WeakerAccess")
|
||||||
public class YoutubeChannelExtractor extends ChannelExtractor {
|
public class YoutubeChannelExtractor extends ChannelExtractor {
|
||||||
/*package-private*/ static final String CHANNEL_URL_BASE = "https://www.youtube.com/channel/";
|
/*package-private*/ static final String CHANNEL_URL_BASE = "https://www.youtube.com/channel/";
|
||||||
private static final String CHANNEL_URL_PARAMETERS = "/videos?view=0&flow=list&sort=dd&live_view=10000";
|
|
||||||
|
|
||||||
private Document doc;
|
|
||||||
private JsonObject initialData;
|
private JsonObject initialData;
|
||||||
|
|
||||||
public YoutubeChannelExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
public YoutubeChannelExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||||
|
@ -64,16 +62,36 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||||
String channelUrl = super.getUrl() + CHANNEL_URL_PARAMETERS;
|
final String url = super.getUrl() + "/videos?pbj=1";
|
||||||
final Response response = downloader.get(channelUrl, getExtractorLocalization());
|
|
||||||
doc = YoutubeParsingHelper.parseAndCheckPage(channelUrl, response);
|
JsonArray ajaxJson;
|
||||||
initialData = YoutubeParsingHelper.getInitialData(response.responseBody());
|
|
||||||
|
Map<String, List<String>> headers = new HashMap<>();
|
||||||
|
headers.put("X-YouTube-Client-Name", Collections.singletonList("1"));
|
||||||
|
// Use the hardcoded client version first to get JSON with a structure we know
|
||||||
|
// TODO: Use YoutubeParsingHelper.getClientVersion() as fallback
|
||||||
|
headers.put("X-YouTube-Client-Version",
|
||||||
|
Collections.singletonList(YoutubeParsingHelper.HARDCODED_CLIENT_VERSION));
|
||||||
|
final String response = getDownloader().get(url, headers, getExtractorLocalization()).responseBody();
|
||||||
|
if (response.length() < 50) { // ensure to have a valid response
|
||||||
|
throw new ParsingException("Could not parse json data for next streams");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
ajaxJson = JsonParser.array().from(response);
|
||||||
|
} catch (JsonParserException e) {
|
||||||
|
throw new ParsingException("Could not parse json data for next streams", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
initialData = ajaxJson.getObject(1).getObject("response");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getNextPageUrl() throws ExtractionException {
|
public String getNextPageUrl() throws ExtractionException {
|
||||||
return getNextPageUrlFrom(getVideoTab().getObject("content").getObject("sectionListRenderer").getArray("continuations"));
|
return getNextPageUrlFrom(getVideoTab().getObject("content").getObject("sectionListRenderer")
|
||||||
|
.getArray("contents").getObject(0).getObject("itemSectionRenderer")
|
||||||
|
.getArray("contents").getObject(0).getObject("gridRenderer").getArray("continuations"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
|
@ -181,7 +199,9 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
||||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws ExtractionException {
|
public InfoItemsPage<StreamInfoItem> getInitialPage() throws ExtractionException {
|
||||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||||
|
|
||||||
JsonArray videos = getVideoTab().getObject("content").getObject("sectionListRenderer").getArray("contents");
|
JsonArray videos = getVideoTab().getObject("content").getObject("sectionListRenderer").getArray("contents")
|
||||||
|
.getObject(0).getObject("itemSectionRenderer").getArray("contents").getObject(0)
|
||||||
|
.getObject("gridRenderer").getArray("items");
|
||||||
collectStreamsFrom(collector, videos);
|
collectStreamsFrom(collector, videos);
|
||||||
|
|
||||||
return new InfoItemsPage<>(collector, getNextPageUrl());
|
return new InfoItemsPage<>(collector, getNextPageUrl());
|
||||||
|
@ -202,33 +222,25 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
||||||
|
|
||||||
Map<String, List<String>> headers = new HashMap<>();
|
Map<String, List<String>> headers = new HashMap<>();
|
||||||
headers.put("X-YouTube-Client-Name", Collections.singletonList("1"));
|
headers.put("X-YouTube-Client-Name", Collections.singletonList("1"));
|
||||||
|
// Use the hardcoded client version first to get JSON with a structure we know
|
||||||
|
// TODO: Use YoutubeParsingHelper.getClientVersion() as fallback
|
||||||
|
headers.put("X-YouTube-Client-Version",
|
||||||
|
Collections.singletonList(YoutubeParsingHelper.HARDCODED_CLIENT_VERSION));
|
||||||
|
final String response = getDownloader().get(pageUrl, headers, getExtractorLocalization()).responseBody();
|
||||||
|
if (response.length() < 50) { // ensure to have a valid response
|
||||||
|
throw new ParsingException("Could not parse json data for next streams");
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Use the hardcoded client version first to get JSON with a structure we know
|
|
||||||
headers.put("X-YouTube-Client-Version",
|
|
||||||
Collections.singletonList(YoutubeParsingHelper.HARDCODED_CLIENT_VERSION));
|
|
||||||
final String response = getDownloader().get(pageUrl, headers, getExtractorLocalization()).responseBody();
|
|
||||||
if (response.length() < 50) { // ensure to have a valid response
|
|
||||||
throw new ParsingException("Could not parse json data for next streams");
|
|
||||||
}
|
|
||||||
ajaxJson = JsonParser.array().from(response);
|
ajaxJson = JsonParser.array().from(response);
|
||||||
} catch (Exception e) {
|
} catch (JsonParserException e) {
|
||||||
try {
|
throw new ParsingException("Could not parse json data for next streams", e);
|
||||||
headers.put("X-YouTube-Client-Version",
|
|
||||||
Collections.singletonList(YoutubeParsingHelper.getClientVersion(initialData, doc.toString())));
|
|
||||||
final String response = getDownloader().get(pageUrl, headers, getExtractorLocalization()).responseBody();
|
|
||||||
if (response.length() < 50) { // ensure to have a valid response
|
|
||||||
throw new ParsingException("Could not parse json data for next streams");
|
|
||||||
}
|
|
||||||
ajaxJson = JsonParser.array().from(response);
|
|
||||||
} catch (JsonParserException ignored) {
|
|
||||||
throw new ParsingException("Could not parse json data for next streams", e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
JsonObject sectionListContinuation = ajaxJson.getObject(1).getObject("response")
|
JsonObject sectionListContinuation = ajaxJson.getObject(1).getObject("response")
|
||||||
.getObject("continuationContents").getObject("sectionListContinuation");
|
.getObject("continuationContents").getObject("gridContinuation");
|
||||||
|
|
||||||
collectStreamsFrom(collector, sectionListContinuation.getArray("contents"));
|
collectStreamsFrom(collector, sectionListContinuation.getArray("items"));
|
||||||
|
|
||||||
return new InfoItemsPage<>(collector, getNextPageUrlFrom(sectionListContinuation.getArray("continuations")));
|
return new InfoItemsPage<>(collector, getNextPageUrlFrom(sectionListContinuation.getArray("continuations")));
|
||||||
}
|
}
|
||||||
|
@ -254,10 +266,9 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
||||||
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
||||||
|
|
||||||
for (Object video : videos) {
|
for (Object video : videos) {
|
||||||
JsonObject videoInfo = ((JsonObject) video).getObject("itemSectionRenderer")
|
if (((JsonObject) video).getObject("gridVideoRenderer") != null) {
|
||||||
.getArray("contents").getObject(0);
|
collector.commit(new YoutubeStreamInfoItemExtractor(
|
||||||
if (videoInfo.getObject("videoRenderer") != null) {
|
((JsonObject) video).getObject("gridVideoRenderer"), timeAgoParser) {
|
||||||
collector.commit(new YoutubeStreamInfoItemExtractor(videoInfo.getObject("videoRenderer"), timeAgoParser) {
|
|
||||||
@Override
|
@Override
|
||||||
public String getUploaderName() {
|
public String getUploaderName() {
|
||||||
return uploaderName;
|
return uploaderName;
|
||||||
|
|
|
@ -91,12 +91,23 @@ public class YoutubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getDuration() throws ParsingException {
|
public long getDuration() throws ParsingException {
|
||||||
|
if (getStreamType() == StreamType.LIVE_STREAM) return -1;
|
||||||
|
String duration = null;
|
||||||
try {
|
try {
|
||||||
if (getStreamType() == StreamType.LIVE_STREAM) return -1;
|
duration = videoInfo.getObject("lengthText").getString("simpleText");
|
||||||
return YoutubeParsingHelper.parseDurationString(videoInfo.getObject("lengthText").getString("simpleText"));
|
} catch (Exception ignored) {}
|
||||||
} catch (Exception e) {
|
if (duration == null) {
|
||||||
throw new ParsingException("Could not get duration", e);
|
try {
|
||||||
|
for (Object thumbnailOverlay : videoInfo.getArray("thumbnailOverlays")) {
|
||||||
|
if (((JsonObject) thumbnailOverlay).getObject("thumbnailOverlayTimeStatusRenderer") != null) {
|
||||||
|
duration = ((JsonObject) thumbnailOverlay).getObject("thumbnailOverlayTimeStatusRenderer")
|
||||||
|
.getObject("text").getString("simpleText");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception ignored) {}
|
||||||
}
|
}
|
||||||
|
if (duration != null) return YoutubeParsingHelper.parseDurationString(duration);
|
||||||
|
throw new ParsingException("Could not get duration");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
Loading…
Reference in New Issue