Use a enum to understand better what source type is used.
This commit also allows a seamless transition for livestreams.
This commit is contained in:
parent
3db37166b4
commit
ba804c7d4a
|
@ -178,6 +178,7 @@ import org.schabi.newpipe.player.playqueue.PlayQueueItemTouchCallback;
|
|||
import org.schabi.newpipe.player.resolver.AudioPlaybackResolver;
|
||||
import org.schabi.newpipe.player.resolver.MediaSourceTag;
|
||||
import org.schabi.newpipe.player.resolver.VideoPlaybackResolver;
|
||||
import org.schabi.newpipe.player.resolver.VideoPlaybackResolver.SourceType;
|
||||
import org.schabi.newpipe.player.seekbarpreview.SeekbarPreviewThumbnailHelper;
|
||||
import org.schabi.newpipe.player.seekbarpreview.SeekbarPreviewThumbnailHolder;
|
||||
import org.schabi.newpipe.util.DeviceUtils;
|
||||
|
@ -3293,8 +3294,9 @@ public final class Player implements
|
|||
if (audioPlayerSelected()) {
|
||||
return audioResolver.resolve(info);
|
||||
} else {
|
||||
if (isAudioOnly
|
||||
&& !videoResolver.wasLastResolvedVideoAndAudioSeparated().orElse(false)) {
|
||||
if (isAudioOnly && videoResolver.getStreamSourceType().orElse(
|
||||
SourceType.VIDEO_WITH_AUDIO_OR_AUDIO_ONLY)
|
||||
== SourceType.VIDEO_WITH_AUDIO_OR_AUDIO_ONLY) {
|
||||
// If the current info has only video streams with audio and if the stream is
|
||||
// played as audio, we need to use the audio resolver, otherwise the video stream
|
||||
// will be played in background.
|
||||
|
@ -4196,18 +4198,30 @@ public final class Player implements
|
|||
stream will be fetched and the video stream will be fetched again when the user return to a
|
||||
video player.
|
||||
|
||||
For audio streams: nothing is done, it's not needed to reload the player with the same
|
||||
audio stream.
|
||||
For audio streams and audio live streams: nothing is done, it's not needed to reload the
|
||||
player with the same audio stream.
|
||||
|
||||
For video live streams: the play queue manager is not reloaded if the stream source is a
|
||||
live source (see VideoPlaybackResolver#resolve()) and if that's not the case, the
|
||||
requirements for video streams is applied.
|
||||
|
||||
In the case where we don't know the index of the video renderer, the play queue manager
|
||||
is also reloaded. */
|
||||
|
||||
final StreamType streamType = info.getStreamType();
|
||||
final SourceType sourceType = videoResolver.getStreamSourceType()
|
||||
.orElse(SourceType.VIDEO_WITH_SEPARATED_AUDIO);
|
||||
|
||||
final boolean isVideoWithSeparatedAudioOrVideoWithNoSeparatedAudioStreams =
|
||||
sourceType == SourceType.VIDEO_WITH_SEPARATED_AUDIO
|
||||
|| (sourceType == SourceType.VIDEO_WITH_AUDIO_OR_AUDIO_ONLY
|
||||
&& isNullOrEmpty(info.getAudioStreams()));
|
||||
final boolean isVideoStreamTypeAndIsVideoOnlyStreamOrNoAudioStreamsAvailable =
|
||||
(streamType == StreamType.VIDEO_STREAM || streamType == StreamType.LIVE_STREAM)
|
||||
&& (videoResolver.wasLastResolvedVideoAndAudioSeparated().orElse(false)
|
||||
|| isNullOrEmpty(info.getAudioStreams()));
|
||||
streamType == StreamType.VIDEO_STREAM
|
||||
&& isVideoWithSeparatedAudioOrVideoWithNoSeparatedAudioStreams
|
||||
|| (streamType == StreamType.LIVE_STREAM
|
||||
&& (sourceType == SourceType.LIVE_STREAM
|
||||
|| isVideoWithSeparatedAudioOrVideoWithNoSeparatedAudioStreams));
|
||||
|
||||
if (videoRenderIndex != RENDERER_UNAVAILABLE
|
||||
&& isVideoStreamTypeAndIsVideoOnlyStreamOrNoAudioStreamsAvailable) {
|
||||
|
|
|
@ -32,11 +32,16 @@ public class VideoPlaybackResolver implements PlaybackResolver {
|
|||
private final PlayerDataSource dataSource;
|
||||
@NonNull
|
||||
private final QualityResolver qualityResolver;
|
||||
private SourceType streamSourceType;
|
||||
|
||||
@Nullable
|
||||
private String playbackQuality;
|
||||
|
||||
private Boolean wasLastResolvedVideoAndAudioSeparated;
|
||||
public enum SourceType {
|
||||
LIVE_STREAM,
|
||||
VIDEO_WITH_SEPARATED_AUDIO,
|
||||
VIDEO_WITH_AUDIO_OR_AUDIO_ONLY
|
||||
}
|
||||
|
||||
public VideoPlaybackResolver(@NonNull final Context context,
|
||||
@NonNull final PlayerDataSource dataSource,
|
||||
|
@ -49,10 +54,9 @@ public class VideoPlaybackResolver implements PlaybackResolver {
|
|||
@Override
|
||||
@Nullable
|
||||
public MediaSource resolve(@NonNull final StreamInfo info) {
|
||||
boolean isVideoAndAudioSeparated = false;
|
||||
try {
|
||||
final MediaSource liveSource = maybeBuildLiveMediaSource(dataSource, info);
|
||||
if (liveSource != null) {
|
||||
streamSourceType = SourceType.LIVE_STREAM;
|
||||
return liveSource;
|
||||
}
|
||||
|
||||
|
@ -90,7 +94,9 @@ public class VideoPlaybackResolver implements PlaybackResolver {
|
|||
PlayerHelper.cacheKeyOf(info, audio),
|
||||
MediaFormat.getSuffixById(audio.getFormatId()), tag);
|
||||
mediaSources.add(audioSource);
|
||||
isVideoAndAudioSeparated = true;
|
||||
streamSourceType = SourceType.VIDEO_WITH_SEPARATED_AUDIO;
|
||||
} else {
|
||||
streamSourceType = SourceType.VIDEO_WITH_AUDIO_OR_AUDIO_ONLY;
|
||||
}
|
||||
|
||||
// If there is no audio or video sources, then this media source cannot be played back
|
||||
|
@ -122,9 +128,6 @@ public class VideoPlaybackResolver implements PlaybackResolver {
|
|||
return new MergingMediaSource(mediaSources.toArray(
|
||||
new MediaSource[0]));
|
||||
}
|
||||
} finally {
|
||||
wasLastResolvedVideoAndAudioSeparated = isVideoAndAudioSeparated;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -134,8 +137,8 @@ public class VideoPlaybackResolver implements PlaybackResolver {
|
|||
* @return {@link Optional#empty()} if nothing was resolved, otherwise {@code true} or
|
||||
* {@code false}
|
||||
*/
|
||||
public Optional<Boolean> wasLastResolvedVideoAndAudioSeparated() {
|
||||
return Optional.ofNullable(wasLastResolvedVideoAndAudioSeparated);
|
||||
public Optional<SourceType> getStreamSourceType() {
|
||||
return Optional.ofNullable(streamSourceType);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
|
|
Loading…
Reference in New Issue