Merge pull request #578 from TeamNewPipe/code_improvements
Code improvements
This commit is contained in:
commit
564d74c250
|
@ -4,6 +4,7 @@ import org.schabi.newpipe.extractor.InfoItem;
|
|||
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Vector;
|
||||
|
||||
|
@ -95,7 +96,7 @@ public class CommentsInfoItemsCollector extends InfoItemsCollector<CommentsInfoI
|
|||
}
|
||||
|
||||
public List<CommentsInfoItem> getCommentsInfoItemList() {
|
||||
List<CommentsInfoItem> siiList = new Vector<>();
|
||||
List<CommentsInfoItem> siiList = new ArrayList<>();
|
||||
for (InfoItem ii : super.getItems()) {
|
||||
if (ii instanceof CommentsInfoItem) {
|
||||
siiList.add((CommentsInfoItem) ii);
|
||||
|
|
|
@ -69,10 +69,8 @@ public class Response {
|
|||
public String getHeader(String name) {
|
||||
for (Map.Entry<String, List<String>> headerEntry : responseHeaders.entrySet()) {
|
||||
final String key = headerEntry.getKey();
|
||||
if (key != null && key.equalsIgnoreCase(name)) {
|
||||
if (headerEntry.getValue().size() > 0) {
|
||||
return headerEntry.getValue().get(0);
|
||||
}
|
||||
if (key != null && key.equalsIgnoreCase(name) && !headerEntry.getValue().isEmpty()) {
|
||||
return headerEntry.getValue().get(0);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.schabi.newpipe.extractor.exceptions;
|
|||
*/
|
||||
|
||||
public class ReCaptchaException extends ExtractionException {
|
||||
private String url;
|
||||
private final String url;
|
||||
|
||||
public ReCaptchaException(final String message, final String url) {
|
||||
super(message);
|
||||
|
|
|
@ -37,7 +37,7 @@ public abstract class SearchQueryHandlerFactory extends ListLinkHandlerFactory {
|
|||
}
|
||||
|
||||
public SearchQueryHandler fromQuery(String query) throws ParsingException {
|
||||
return fromQuery(query, new ArrayList<String>(0), EMPTY_STRING);
|
||||
return fromQuery(query, new ArrayList<>(0), EMPTY_STRING);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -106,7 +106,7 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
info.addError(e);
|
||||
}
|
||||
// do not fail if everything but the uploader infos could be collected
|
||||
if (uploaderParsingErrors.size() > 0 &&
|
||||
if (!uploaderParsingErrors.isEmpty() &&
|
||||
(!info.getErrors().isEmpty() || uploaderParsingErrors.size() < 3)) {
|
||||
info.addAllErrors(uploaderParsingErrors);
|
||||
}
|
||||
|
|
|
@ -87,7 +87,7 @@ public class BandcampSearchExtractor extends SearchExtractor {
|
|||
|
||||
// Count pages
|
||||
final Elements pageLists = d.getElementsByClass("pagelist");
|
||||
if (pageLists.size() == 0)
|
||||
if (pageLists.isEmpty())
|
||||
return new InfoItemsPage<>(collector, null);
|
||||
|
||||
final Elements pages = pageLists.first().getElementsByTag("li");
|
||||
|
@ -96,7 +96,7 @@ public class BandcampSearchExtractor extends SearchExtractor {
|
|||
int currentPage = -1;
|
||||
for (int i = 0; i < pages.size(); i++) {
|
||||
final Element pageElement = pages.get(i);
|
||||
if (pageElement.getElementsByTag("span").size() > 0) {
|
||||
if (!pageElement.getElementsByTag("span").isEmpty()) {
|
||||
currentPage = i + 1;
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -217,7 +217,7 @@ public class MediaCCCLiveStreamExtractor extends StreamExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<VideoStream> getVideoOnlyStreams() throws IOException, ExtractionException {
|
||||
public List<VideoStream> getVideoOnlyStreams() {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -251,8 +251,8 @@ public class MediaCCCLiveStreamExtractor extends StreamExtractor {
|
|||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getHost() throws ParsingException {
|
||||
return null;
|
||||
public String getHost() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
|
|
@ -2,8 +2,6 @@ package org.schabi.newpipe.extractor.services.media_ccc.extractors;
|
|||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.schabi.newpipe.extractor.Page;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
|
@ -18,7 +16,7 @@ import javax.annotation.Nonnull;
|
|||
import java.io.IOException;
|
||||
|
||||
public class MediaCCCLiveStreamKiosk extends KioskExtractor<StreamInfoItem> {
|
||||
public JsonArray doc;
|
||||
private JsonArray doc;
|
||||
|
||||
public MediaCCCLiveStreamKiosk(StreamingService streamingService, ListLinkHandler linkHandler, String kioskId) {
|
||||
super(streamingService, linkHandler, kioskId);
|
||||
|
|
|
@ -12,7 +12,6 @@ import org.schabi.newpipe.extractor.localization.Localization;
|
|||
import java.io.IOException;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.util.Locale;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public final class MediaCCCParsingHelper {
|
||||
|
|
|
@ -63,7 +63,7 @@ public class PeertubeService extends StreamingService {
|
|||
public SearchExtractor getSearchExtractor(SearchQueryHandler queryHandler) {
|
||||
final List<String> contentFilters = queryHandler.getContentFilters();
|
||||
boolean external = false;
|
||||
if (contentFilters.size() > 0 && contentFilters.get(0).startsWith("sepia_")) {
|
||||
if (!contentFilters.isEmpty() && contentFilters.get(0).startsWith("sepia_")) {
|
||||
external = true;
|
||||
}
|
||||
return new PeertubeSearchExtractor(this, queryHandler, external);
|
||||
|
|
|
@ -163,7 +163,7 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
|
|||
}
|
||||
|
||||
final Response response = downloader.get(accountUrl);
|
||||
if (response != null && response.responseBody() != null) {
|
||||
if (response != null) {
|
||||
setInitialData(response.responseBody());
|
||||
} else {
|
||||
throw new ExtractionException("Unable to extract PeerTube account data");
|
||||
|
|
|
@ -136,7 +136,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||
public void onFetchPage(final Downloader downloader) throws IOException, ExtractionException {
|
||||
final Response response = downloader.get(
|
||||
baseUrl + PeertubeChannelLinkHandlerFactory.API_ENDPOINT + getId());
|
||||
if (response != null && response.responseBody() != null) {
|
||||
if (response != null ) {
|
||||
setInitialData(response.responseBody());
|
||||
} else {
|
||||
throw new ExtractionException("Unable to extract PeerTube channel data");
|
||||
|
|
|
@ -3,7 +3,6 @@ package org.schabi.newpipe.extractor.services.peertube.extractors;
|
|||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
|
||||
import org.schabi.newpipe.extractor.Page;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||
|
@ -15,14 +14,11 @@ import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
|||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.*;
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
|
||||
|
||||
public class PeertubeCommentsExtractor extends CommentsExtractor {
|
||||
|
|
|
@ -33,6 +33,7 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.UTF_8;
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
|
||||
|
||||
public class PeertubeStreamExtractor extends StreamExtractor {
|
||||
private final String baseUrl;
|
||||
|
@ -364,7 +365,7 @@ public class PeertubeStreamExtractor extends StreamExtractor {
|
|||
@Override
|
||||
public void onFetchPage(final Downloader downloader) throws IOException, ExtractionException {
|
||||
final Response response = downloader.get(baseUrl + PeertubeStreamLinkHandlerFactory.VIDEO_API_ENDPOINT + getId());
|
||||
if (response != null && response.responseBody() != null) {
|
||||
if (response != null) {
|
||||
setInitialData(response.responseBody());
|
||||
} else {
|
||||
throw new ExtractionException("Unable to extract PeerTube channel data");
|
||||
|
@ -400,7 +401,7 @@ public class PeertubeStreamExtractor extends StreamExtractor {
|
|||
final String languageCode = JsonUtils.getString(caption, "language.id");
|
||||
final String ext = url.substring(url.lastIndexOf(".") + 1);
|
||||
final MediaFormat fmt = MediaFormat.getFromSuffix(ext);
|
||||
if (fmt != null && languageCode != null)
|
||||
if (fmt != null && !isNullOrEmpty(languageCode))
|
||||
subtitles.add(new SubtitlesStream(fmt, languageCode, url, false));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ public class PeertubeSearchQueryHandlerFactory extends SearchQueryHandlerFactory
|
|||
@Override
|
||||
public String getUrl(String searchString, List<String> contentFilters, String sortFilter) throws ParsingException {
|
||||
String baseUrl;
|
||||
if (contentFilters.size() > 0 && contentFilters.get(0).startsWith("sepia_")) {
|
||||
if (!contentFilters.isEmpty() && contentFilters.get(0).startsWith("sepia_")) {
|
||||
baseUrl = SEPIA_BASE_URL;
|
||||
} else {
|
||||
baseUrl = ServiceList.PeerTube.getBaseUrl();
|
||||
|
@ -35,10 +35,7 @@ public class PeertubeSearchQueryHandlerFactory extends SearchQueryHandlerFactory
|
|||
@Override
|
||||
public String getUrl(String searchString, List<String> contentFilters, String sortFilter, String baseUrl) throws ParsingException {
|
||||
try {
|
||||
final String url = baseUrl + SEARCH_ENDPOINT
|
||||
+ "?search=" + URLEncoder.encode(searchString, UTF_8);
|
||||
|
||||
return url;
|
||||
return baseUrl + SEARCH_ENDPOINT + "?search=" + URLEncoder.encode(searchString, UTF_8);
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new ParsingException("Could not encode query", e);
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ public class SoundcloudParsingHelper {
|
|||
private SoundcloudParsingHelper() {
|
||||
}
|
||||
|
||||
public synchronized static String clientId() throws ExtractionException, IOException {
|
||||
public static synchronized String clientId() throws ExtractionException, IOException {
|
||||
if (!isNullOrEmpty(clientId)) return clientId;
|
||||
|
||||
Downloader dl = NewPipe.getDownloader();
|
||||
|
@ -89,7 +89,7 @@ public class SoundcloudParsingHelper {
|
|||
SoundcloudStreamExtractor e = (SoundcloudStreamExtractor) SoundCloud
|
||||
.getStreamExtractor("https://soundcloud.com/liluzivert/do-what-i-want-produced-by-maaly-raw-don-cannon");
|
||||
e.fetchPage();
|
||||
return e.getAudioStreams().size() >= 1;
|
||||
return !e.getAudioStreams().isEmpty();
|
||||
} catch (Exception ignored) {
|
||||
// No need to throw an exception here. If something went wrong, the client_id is wrong
|
||||
return false;
|
||||
|
@ -131,7 +131,7 @@ public class SoundcloudParsingHelper {
|
|||
*
|
||||
* @return the url resolved
|
||||
*/
|
||||
public static String resolveUrlWithEmbedPlayer(String apiUrl) throws IOException, ReCaptchaException, ParsingException {
|
||||
public static String resolveUrlWithEmbedPlayer(String apiUrl) throws IOException, ReCaptchaException {
|
||||
|
||||
String response = NewPipe.getDownloader().get("https://w.soundcloud.com/player/?url="
|
||||
+ URLEncoder.encode(apiUrl, UTF_8), SoundCloud.getLocalization()).responseBody();
|
||||
|
|
|
@ -83,7 +83,7 @@ public class SoundcloudSearchExtractor extends SearchExtractor {
|
|||
throw new ParsingException("Could not parse json response", e);
|
||||
}
|
||||
|
||||
if (searchCollection.size() == 0) {
|
||||
if (searchCollection.isEmpty()) {
|
||||
throw new SearchExtractor.NothingFoundException("Nothing found");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ import java.util.List;
|
|||
|
||||
public class SoundcloudChannelLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
private static final SoundcloudChannelLinkHandlerFactory instance = new SoundcloudChannelLinkHandlerFactory();
|
||||
private final String URL_PATTERN = "^https?://(www\\.|m\\.)?soundcloud.com/[0-9a-z_-]+" +
|
||||
private static final String URL_PATTERN = "^https?://(www\\.|m\\.)?soundcloud.com/[0-9a-z_-]+" +
|
||||
"(/((tracks|albums|sets|reposts|followers|following)/?)?)?([#?].*)?$";
|
||||
|
||||
public static SoundcloudChannelLinkHandlerFactory getInstance() {
|
||||
|
|
|
@ -6,8 +6,8 @@ import org.schabi.newpipe.extractor.utils.Parser;
|
|||
import java.util.List;
|
||||
|
||||
public class SoundcloudChartsLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
private final String TOP_URL_PATTERN = "^https?://(www\\.|m\\.)?soundcloud.com/charts(/top)?/?([#?].*)?$";
|
||||
private final String URL_PATTERN = "^https?://(www\\.|m\\.)?soundcloud.com/charts(/top|/new)?/?([#?].*)?$";
|
||||
private static final String TOP_URL_PATTERN = "^https?://(www\\.|m\\.)?soundcloud.com/charts(/top)?/?([#?].*)?$";
|
||||
private static final String URL_PATTERN = "^https?://(www\\.|m\\.)?soundcloud.com/charts(/top|/new)?/?([#?].*)?$";
|
||||
|
||||
|
||||
@Override
|
||||
|
|
|
@ -10,7 +10,7 @@ import java.util.List;
|
|||
|
||||
public class SoundcloudPlaylistLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
private static final SoundcloudPlaylistLinkHandlerFactory instance = new SoundcloudPlaylistLinkHandlerFactory();
|
||||
private final String URL_PATTERN = "^https?://(www\\.|m\\.)?soundcloud.com/[0-9a-z_-]+" +
|
||||
private static final String URL_PATTERN = "^https?://(www\\.|m\\.)?soundcloud.com/[0-9a-z_-]+" +
|
||||
"/sets/[0-9a-z_-]+/?([#?].*)?$";
|
||||
|
||||
public static SoundcloudPlaylistLinkHandlerFactory getInstance() {
|
||||
|
|
|
@ -8,7 +8,7 @@ import org.schabi.newpipe.extractor.utils.Utils;
|
|||
|
||||
public class SoundcloudStreamLinkHandlerFactory extends LinkHandlerFactory {
|
||||
private static final SoundcloudStreamLinkHandlerFactory instance = new SoundcloudStreamLinkHandlerFactory();
|
||||
private final String URL_PATTERN = "^https?://(www\\.|m\\.)?soundcloud.com/[0-9a-z_-]+" +
|
||||
private static final String URL_PATTERN = "^https?://(www\\.|m\\.)?soundcloud.com/[0-9a-z_-]+" +
|
||||
"/(?!(tracks|albums|sets|reposts|followers|following)/?$)[0-9a-z_-]+/?([#?].*)?$";
|
||||
|
||||
private SoundcloudStreamLinkHandlerFactory() {
|
||||
|
|
|
@ -123,7 +123,7 @@ public class YoutubeService extends StreamingService {
|
|||
public SearchExtractor getSearchExtractor(SearchQueryHandler query) {
|
||||
final List<String> contentFilters = query.getContentFilters();
|
||||
|
||||
if (contentFilters.size() > 0 && contentFilters.get(0).startsWith("music_")) {
|
||||
if (!contentFilters.isEmpty() && contentFilters.get(0).startsWith("music_")) {
|
||||
return new YoutubeMusicSearchExtractor(this, query);
|
||||
} else {
|
||||
return new YoutubeSearchExtractor(this, query);
|
||||
|
@ -221,6 +221,7 @@ public class YoutubeService extends StreamingService {
|
|||
return SUPPORTED_LANGUAGES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ContentCountry> getSupportedCountries() {
|
||||
return SUPPORTED_COUNTRIES;
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ public class YoutubeSearchQueryHandlerFactory extends SearchQueryHandlerFactory
|
|||
@Override
|
||||
public String getUrl(String searchString, List<String> contentFilters, String sortFilter) throws ParsingException {
|
||||
try {
|
||||
if (contentFilters.size() > 0) {
|
||||
if (!contentFilters.isEmpty()) {
|
||||
switch (contentFilters.get(0)) {
|
||||
case ALL:
|
||||
default:
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
|
@ -153,11 +154,11 @@ public class StreamInfo extends Info {
|
|||
|
||||
// Lists can be null if a exception was thrown during extraction
|
||||
if (streamInfo.getVideoStreams() == null)
|
||||
streamInfo.setVideoStreams(new ArrayList<VideoStream>());
|
||||
streamInfo.setVideoStreams(Collections.emptyList());
|
||||
if (streamInfo.getVideoOnlyStreams() == null)
|
||||
streamInfo.setVideoOnlyStreams(new ArrayList<VideoStream>());
|
||||
streamInfo.setVideoOnlyStreams(Collections.emptyList());
|
||||
if (streamInfo.getAudioStreams() == null)
|
||||
streamInfo.setAudioStreams(new ArrayList<AudioStream>());
|
||||
streamInfo.setAudioStreams(Collections.emptyList());
|
||||
|
||||
Exception dashMpdError = null;
|
||||
if (!isNullOrEmpty(streamInfo.getDashMpdUrl())) {
|
||||
|
|
|
@ -5,6 +5,7 @@ import org.schabi.newpipe.extractor.InfoItemsCollector;
|
|||
import org.schabi.newpipe.extractor.exceptions.FoundAdException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Vector;
|
||||
|
@ -111,7 +112,7 @@ public class StreamInfoItemsCollector extends InfoItemsCollector<StreamInfoItem,
|
|||
}
|
||||
|
||||
public List<StreamInfoItem> getStreamInfoItemList() {
|
||||
List<StreamInfoItem> siiList = new Vector<>();
|
||||
List<StreamInfoItem> siiList = new ArrayList<>();
|
||||
for (InfoItem ii : super.getItems()) {
|
||||
if (ii instanceof StreamInfoItem) {
|
||||
siiList.add((StreamInfoItem) ii);
|
||||
|
|
Loading…
Reference in New Issue