made YoutubeTrendingExtractor work
This commit is contained in:
parent
88d2fff091
commit
7b7f6d2cbf
|
@ -87,7 +87,7 @@ public class YoutubeService extends StreamingService {
|
|||
// add kiosks here e.g.:
|
||||
YoutubeTrendingUrlIdHandler h = new YoutubeTrendingUrlIdHandler();
|
||||
try {
|
||||
list.addKioskEntry(new YoutubeTrendingExtractor(this, h.getUrl(""), h.getUrl("")), h);
|
||||
list.addKioskEntry(new YoutubeTrendingExtractor(this, h.getUrl(""), null), h);
|
||||
} catch (Exception e) {
|
||||
throw new ExtractionException(e);
|
||||
}
|
||||
|
|
|
@ -20,30 +20,38 @@ package org.schabi.newpipe.extractor.services.youtube;
|
|||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.UrlIdHandler;
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.schabi.newpipe.extractor.*;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemCollector;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class YoutubeTrendingExtractor extends KioskExtractor {
|
||||
|
||||
private Document doc;
|
||||
|
||||
public YoutubeTrendingExtractor(StreamingService service, String url, String nextStreamsUrl)
|
||||
throws IOException, ExtractionException {
|
||||
super(service, url, nextStreamsUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fetchPage()
|
||||
throws IOException, ExtractionException {
|
||||
public void fetchPage() throws IOException, ExtractionException {
|
||||
Downloader downloader = NewPipe.getDownloader();
|
||||
|
||||
String channelUrl = getCleanUrl();
|
||||
String pageContent = downloader.download(channelUrl);
|
||||
doc = Jsoup.parse(pageContent, channelUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return "Treinding";
|
||||
return "Trending";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -57,7 +65,63 @@ public class YoutubeTrendingExtractor extends KioskExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public StreamInfoItemCollector getStreams() {
|
||||
return null;
|
||||
public StreamInfoItemCollector getStreams() throws ParsingException {
|
||||
StreamInfoItemCollector collector = new StreamInfoItemCollector(getServiceId());
|
||||
Element ul = doc.select("ul[class*=\"expanded-shelf-content-list\"]").first();
|
||||
for(final Element li : ul.children()) {
|
||||
final Element el = li.select("div[class*=\"yt-lockup-dismissable\"]").first();
|
||||
collector.commit(new YoutubeStreamInfoItemExtractor(li) {
|
||||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
try {
|
||||
Element dl = el.select("h3").first().select("a").first();
|
||||
return dl.attr("abs:href");
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get web page url for the video", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
try {
|
||||
Element dl = el.select("h3").first().select("a").first();
|
||||
return dl.text();
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get web page url for the video", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploaderName() throws ParsingException {
|
||||
try {
|
||||
Element uploaderEl = el.select("div[class*=\"yt-lockup-byline \"]").first();
|
||||
return uploaderEl.select("a").text();
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get Uploader name");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getThumbnailUrl() throws ParsingException {
|
||||
try {
|
||||
String url;
|
||||
Element te = li.select("span[class=\"yt-thumb-simple\"]").first()
|
||||
.select("img").first();
|
||||
url = te.attr("abs:src");
|
||||
// Sometimes youtube sends links to gif files which somehow seem to not exist
|
||||
// anymore. Items with such gif also offer a secondary image source. So we are going
|
||||
// to use that if we've caught such an item.
|
||||
if (url.contains(".gif")) {
|
||||
url = te.attr("abs:data-thumb");
|
||||
}
|
||||
return url;
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get thumbnail url", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return collector;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
package org.schabi.newpipe.extractor.stream;
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.InfoItemCollector;
|
||||
import org.schabi.newpipe.extractor.exceptions.FoundAdException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Vector;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 28.02.16.
|
||||
*
|
||||
|
@ -80,4 +84,14 @@ public class StreamInfoItemCollector extends InfoItemCollector {
|
|||
addError(e);
|
||||
}
|
||||
}
|
||||
|
||||
public List<StreamInfoItem> getStreamInfoItemList() {
|
||||
List<StreamInfoItem> siiList = new Vector<>();
|
||||
for(InfoItem ii : super.getItemList()) {
|
||||
if(ii instanceof StreamInfoItem) {
|
||||
siiList.add((StreamInfoItem) ii);
|
||||
}
|
||||
}
|
||||
return siiList;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,9 +23,13 @@ package org.schabi.newpipe.extractor.services.youtube;
|
|||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.Downloader;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.InfoItemCollector;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static junit.framework.TestCase.assertFalse;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
|
@ -46,6 +50,7 @@ public class YoutubeTrendingExtractorTest {
|
|||
extractor = YouTube.getService()
|
||||
.getKioskList()
|
||||
.getExtractorByType("Trending");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -65,7 +70,17 @@ public class YoutubeTrendingExtractorTest {
|
|||
|
||||
@Test
|
||||
public void testGetStreams() throws Exception {
|
||||
assertTrue("no streams are received", !extractor.getStreams().getItemList().isEmpty());
|
||||
InfoItemCollector collector = extractor.getStreams();
|
||||
if(!collector.getErrors().isEmpty()) {
|
||||
System.err.println("----------");
|
||||
for(Throwable e : collector.getErrors()) {
|
||||
e.printStackTrace();
|
||||
System.err.println("----------");
|
||||
}
|
||||
}
|
||||
assertTrue("no streams are received",
|
||||
!collector.getItemList().isEmpty()
|
||||
&& collector.getErrors().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -77,12 +92,17 @@ public class YoutubeTrendingExtractorTest {
|
|||
public void testHasMoreStreams() throws Exception {
|
||||
// Setup the streams
|
||||
extractor.getStreams();
|
||||
assertTrue("don't have more streams", extractor.hasMoreStreams());
|
||||
assertFalse("has more streams", extractor.hasMoreStreams());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetNextStreams() throws Exception {
|
||||
assertFalse("extractor has next streams", !extractor.getNextStreams().nextItemsList.isEmpty());
|
||||
assertFalse("extractor has more streams after getNextStreams", extractor.hasMoreStreams());
|
||||
assertTrue("extractor has next streams", extractor.getNextStreams() == null
|
||||
|| extractor.getNextStreams().nextItemsList.isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetCleanUrl() throws Exception {
|
||||
assertEquals(extractor.getCleanUrl(), extractor.getCleanUrl(), "https://www.youtube.com/feed/trending");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,6 +71,7 @@ public class YoutubeTrendingUrlIdHandlerTest {
|
|||
assertFalse(urlIdHandler.acceptUrl("youtube.com/feed/trending askjkf"));
|
||||
assertFalse(urlIdHandler.acceptUrl("askdjfi youtube.com/feed/trending askjkf"));
|
||||
assertFalse(urlIdHandler.acceptUrl(" youtube.com/feed/trending"));
|
||||
assertFalse(urlIdHandler.acceptUrl("https://www.youtube.com/feed/trending.html"));
|
||||
assertFalse(urlIdHandler.acceptUrl(""));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue