video: apply SAR, identify selected streams

This commit is contained in:
Thibault Deckers 2021-04-15 16:03:51 +09:00
parent 484baaaccb
commit 9128380017
5 changed files with 127 additions and 19 deletions

View file

@ -35,6 +35,9 @@ class Keys {
static const sampleRate = 'sample_rate'; static const sampleRate = 'sample_rate';
static const sarDen = 'sar_den'; static const sarDen = 'sar_den';
static const sarNum = 'sar_num'; static const sarNum = 'sar_num';
static const selectedAudioStream = 'audio';
static const selectedTextStream = 'timedtext';
static const selectedVideoStream = 'video';
static const startMicros = 'start_us'; static const startMicros = 'start_us';
static const statisticsTags = '_statistics_tags'; static const statisticsTags = '_statistics_tags';
static const statisticsWritingApp = '_statistics_writing_app'; static const statisticsWritingApp = '_statistics_writing_app';
@ -43,6 +46,7 @@ class Keys {
static const tbrDen = 'tbr_den'; static const tbrDen = 'tbr_den';
static const tbrNum = 'tbr_num'; static const tbrNum = 'tbr_num';
static const streamType = 'type'; static const streamType = 'type';
static const title = 'title';
static const track = 'track'; static const track = 'track';
static const width = 'width'; static const width = 'width';
} }

View file

@ -96,11 +96,14 @@ class VideoMetadataFormatter {
case Keys.handlerName: case Keys.handlerName:
case Keys.index: case Keys.index:
case Keys.sarNum: case Keys.sarNum:
case Keys.selectedAudioStream:
case Keys.selectedTextStream:
case Keys.selectedVideoStream:
case Keys.statisticsTags:
case Keys.streams: case Keys.streams:
case Keys.streamType:
case Keys.tbrNum: case Keys.tbrNum:
case Keys.tbrDen: case Keys.tbrDen:
case Keys.statisticsTags:
case Keys.streamType:
break; break;
case Keys.androidCaptureFramerate: case Keys.androidCaptureFramerate:
final captureFps = double.parse(value); final captureFps = double.parse(value);

View file

@ -4,10 +4,14 @@ import 'dart:ui';
import 'package:aves/model/entry.dart'; import 'package:aves/model/entry.dart';
import 'package:aves/model/settings/settings.dart'; import 'package:aves/model/settings/settings.dart';
import 'package:aves/model/settings/video_loop_mode.dart'; import 'package:aves/model/settings/video_loop_mode.dart';
import 'package:aves/model/video/keys.dart';
import 'package:aves/model/video/metadata.dart';
import 'package:aves/utils/change_notifier.dart'; import 'package:aves/utils/change_notifier.dart';
import 'package:aves/widgets/common/video/controller.dart'; import 'package:aves/widgets/common/video/controller.dart';
import 'package:fijkplayer/fijkplayer.dart'; import 'package:fijkplayer/fijkplayer.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:tuple/tuple.dart';
class IjkPlayerAvesVideoController extends AvesVideoController { class IjkPlayerAvesVideoController extends AvesVideoController {
FijkPlayer _instance; FijkPlayer _instance;
@ -15,10 +19,16 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
final StreamController<FijkValue> _valueStreamController = StreamController.broadcast(); final StreamController<FijkValue> _valueStreamController = StreamController.broadcast();
final AChangeNotifier _completedNotifier = AChangeNotifier(); final AChangeNotifier _completedNotifier = AChangeNotifier();
Offset _macroBlockCrop = Offset.zero; Offset _macroBlockCrop = Offset.zero;
final List<StreamSummary> _streams = [];
final ValueNotifier<StreamSummary> _selectedVideoStream = ValueNotifier(null);
final ValueNotifier<StreamSummary> _selectedAudioStream = ValueNotifier(null);
final ValueNotifier<StreamSummary> _selectedTextStream = ValueNotifier(null);
final ValueNotifier<Tuple2<int, int>> _sar = ValueNotifier(Tuple2(1, 1));
Stream<FijkValue> get _valueStream => _valueStreamController.stream; Stream<FijkValue> get _valueStream => _valueStreamController.stream;
IjkPlayerAvesVideoController(AvesEntry entry) { IjkPlayerAvesVideoController(AvesEntry entry) {
FijkLog.setLevel(FijkLogLevel.Warn);
_instance = FijkPlayer(); _instance = FijkPlayer();
// FFmpeg options // FFmpeg options
@ -81,7 +91,56 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
_instance.release(); _instance.release();
} }
void _onValueChanged() => _valueStreamController.add(_instance.value); void _fetchSelectedStreams() async {
final mediaInfo = await _instance.getInfo();
if (!mediaInfo.containsKey(Keys.streams)) return;
_streams.clear();
final allStreams = (mediaInfo[Keys.streams] as List).cast<Map>();
allStreams.forEach((stream) {
final type = ExtraStreamType.fromTypeString(stream[Keys.streamType]);
if (type != null) {
_streams.add(StreamSummary(
type: type,
index: stream[Keys.index],
language: stream[Keys.language],
title: stream[Keys.title],
));
}
});
StreamSummary _getSelectedStream(String selectedIndexKey) {
final indexString = mediaInfo[selectedIndexKey];
if (indexString != null) {
final index = int.tryParse(indexString);
if (index != null && index != -1) {
return _streams.firstWhere((stream) => stream.index == index, orElse: () => null);
}
}
return null;
}
_selectedVideoStream.value = _getSelectedStream(Keys.selectedVideoStream);
_selectedAudioStream.value = _getSelectedStream(Keys.selectedAudioStream);
_selectedTextStream.value = _getSelectedStream(Keys.selectedTextStream);
if (_selectedVideoStream.value != null) {
final streamIndex = _selectedVideoStream.value.index;
final streamInfo = allStreams.firstWhere((stream) => stream[Keys.index] == streamIndex, orElse: () => null);
if (streamInfo != null) {
final num = streamInfo[Keys.sarNum];
final den = streamInfo[Keys.sarDen];
_sar.value = Tuple2((num ?? 0) != 0 ? num : 1, (den ?? 0) != 0 ? den : 1);
}
}
}
void _onValueChanged() {
if (_instance.state == FijkState.prepared && _streams.isEmpty) {
_fetchSelectedStreams();
}
_valueStreamController.add(_instance.value);
}
// enable autoplay, even when seeking on uninitialized player, otherwise the texture is not updated // enable autoplay, even when seeking on uninitialized player, otherwise the texture is not updated
// as a workaround, pausing after a brief duration is possible, but fiddly // as a workaround, pausing after a brief duration is possible, but fiddly
@ -126,19 +185,27 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
@override @override
Widget buildPlayerWidget(BuildContext context, AvesEntry entry) { Widget buildPlayerWidget(BuildContext context, AvesEntry entry) {
// TODO TLAD derive DAR (Display Aspect Ratio) from SAR (Storage Aspect Ratio), if any return ValueListenableBuilder<Tuple2<int, int>>(
// e.g. 960x536 (~16:9) with SAR 4:3 should be displayed as ~2.39:1 valueListenable: _sar,
return FijkView( builder: (context, sar, child) {
player: _instance, final sarNum = sar.item1;
fit: FijkFit( final sarDen = sar.item2;
sizeFactor: 1.0, // derive DAR (Display Aspect Ratio) from SAR (Storage Aspect Ratio), if any
aspectRatio: -1, // e.g. 960x536 (~16:9) with SAR 4:3 should be displayed as ~2.39:1
alignment: Alignment.topLeft, final dar = entry.displayAspectRatio * sarNum / sarDen;
macroBlockCrop: _macroBlockCrop, // TODO TLAD notify SAR to make the magnifier and minimap use the rendering DAR instead of entry DAR
), return FijkView(
panelBuilder: (player, data, context, viewSize, texturePos) => SizedBox(), player: _instance,
color: Colors.transparent, fit: FijkFit(
); sizeFactor: 1.0,
aspectRatio: dar,
alignment: Alignment.topLeft,
macroBlockCrop: _macroBlockCrop,
),
panelBuilder: (player, data, context, viewSize, texturePos) => SizedBox(),
color: Colors.transparent,
);
});
} }
} }
@ -165,3 +232,37 @@ extension ExtraIjkStatus on FijkState {
return VideoStatus.idle; return VideoStatus.idle;
} }
} }
enum StreamType { video, audio, text }
extension ExtraStreamType on StreamType {
static StreamType fromTypeString(String type) {
switch (type) {
case StreamTypes.video:
return StreamType.video;
case StreamTypes.audio:
return StreamType.audio;
case StreamTypes.subtitle:
case StreamTypes.timedText:
return StreamType.text;
default:
return null;
}
}
}
class StreamSummary {
final StreamType type;
final int index;
final String language, title;
const StreamSummary({
@required this.type,
@required this.index,
@required this.language,
@required this.title,
});
@override
String toString() => '$runtimeType#${shortHash(this)}{type: type, index: $index, language: $language, title: $title}';
}

View file

@ -189,7 +189,7 @@ class _MetadataSectionSliverState extends State<MetadataSectionSliver> with Auto
directories.add(MetadataDirectory(MetadataDirectory.mediaDirectory, null, _toSortedTags(formattedMediaTags))); directories.add(MetadataDirectory(MetadataDirectory.mediaDirectory, null, _toSortedTags(formattedMediaTags)));
} }
if (mediaInfo.containsKey('streams')) { if (mediaInfo.containsKey(Keys.streams)) {
String getTypeText(Map stream) { String getTypeText(Map stream) {
final type = stream[Keys.streamType] ?? StreamTypes.unknown; final type = stream[Keys.streamType] ?? StreamTypes.unknown;
switch (type) { switch (type) {
@ -208,7 +208,7 @@ class _MetadataSectionSliverState extends State<MetadataSectionSliver> with Auto
} }
} }
final allStreams = (mediaInfo['streams'] as List).cast<Map>(); final allStreams = (mediaInfo[Keys.streams] as List).cast<Map>();
final unknownStreams = allStreams.where((stream) => stream[Keys.streamType] == StreamTypes.unknown).toList(); final unknownStreams = allStreams.where((stream) => stream[Keys.streamType] == StreamTypes.unknown).toList();
final knownStreams = allStreams.whereNot(unknownStreams.contains); final knownStreams = allStreams.whereNot(unknownStreams.contains);

View file

@ -211,7 +211,7 @@ packages:
description: description:
path: "." path: "."
ref: aves ref: aves
resolved-ref: "3c6f4e0d350416932b3a4efcbf1833b7eaf4adc1" resolved-ref: "8fcf94a57e2a77a79d255f4499e26503ad411769"
url: "git://github.com/deckerst/fijkplayer.git" url: "git://github.com/deckerst/fijkplayer.git"
source: git source: git
version: "0.8.7" version: "0.8.7"