video: track selection, embedded subtitle support, replay 10s

This commit is contained in:
Thibault Deckers 2021-06-15 17:18:03 +09:00
parent 37e4d21277
commit 68b367a427
25 changed files with 529 additions and 85 deletions

View file

@ -93,10 +93,16 @@
"entryActionRemoveFavourite": "Remove from favourites",
"@entryActionRemoveFavourite": {},
"videoActionCaptureFrame": "Capture frame",
"@videoActionCaptureFrame": {},
"videoActionPause": "Pause",
"@videoActionPause": {},
"videoActionPlay": "Play",
"@videoActionPlay": {},
"videoActionReplay10": "Seek backward 10 seconds",
"@videoActionReplay10": {},
"videoActionSelectStreams": "Select tracks",
"@videoActionSelectStreams": {},
"videoActionSetSpeed": "Playback speed",
"@videoActionSetSpeed": {},
@ -273,6 +279,19 @@
"videoSpeedDialogLabel": "Playback speed",
"@videoSpeedDialogLabel": {},
"videoStreamSelectionDialogVideo": "Video",
"@videoStreamSelectionDialogVideo": {},
"videoStreamSelectionDialogAudio": "Audio",
"@videoStreamSelectionDialogAudio": {},
"videoStreamSelectionDialogText": "Subtitles",
"@videoStreamSelectionDialogText": {},
"videoStreamSelectionDialogOff": "Off",
"@videoStreamSelectionDialogOff": {},
"videoStreamSelectionDialogTrack": "Track",
"@videoStreamSelectionDialogTrack": {},
"videoStreamSelectionDialogNoSelection": "There are no other tracks.",
"@videoStreamSelectionDialogNoSelection": {},
"genericSuccessFeedback": "Done!",
"@genericSuccessFeedback": {},
"genericFailureFeedback": "Failed",

View file

@ -47,8 +47,11 @@
"entryActionAddFavourite": "즐겨찾기에 추가",
"entryActionRemoveFavourite": "즐겨찾기에서 삭제",
"videoActionCaptureFrame": "프레임 캡처",
"videoActionPause": "일시정지",
"videoActionPlay": "재생",
"videoActionReplay10": "10초 뒤로 탐색",
"videoActionSelectStreams": "트랙 선택",
"videoActionSetSpeed": "재생 배속",
"filterFavouriteLabel": "즐겨찾기",
@ -124,6 +127,13 @@
"videoSpeedDialogLabel": "재생 배속",
"videoStreamSelectionDialogVideo": "동영상",
"videoStreamSelectionDialogAudio": "오디오",
"videoStreamSelectionDialogText": "자막",
"videoStreamSelectionDialogOff": "해제",
"videoStreamSelectionDialogTrack": "트랙",
"videoStreamSelectionDialogNoSelection": "다른 트랙이 없습니다.",
"genericSuccessFeedback": "정상 처리됐습니다",
"genericFailureFeedback": "오류가 발생했습니다",

View file

@ -3,35 +3,53 @@ import 'package:aves/widgets/common/extensions/build_context.dart';
import 'package:flutter/widgets.dart';
enum VideoAction {
togglePlay,
captureFrame,
replay10,
selectStreams,
setSpeed,
togglePlay,
}
class VideoActions {
static const all = [
VideoAction.replay10,
VideoAction.togglePlay,
// VideoAction.captureFrame,
VideoAction.setSpeed,
VideoAction.selectStreams,
];
}
extension ExtraVideoAction on VideoAction {
String getText(BuildContext context) {
switch (this) {
case VideoAction.captureFrame:
return context.l10n.videoActionCaptureFrame;
case VideoAction.replay10:
return context.l10n.videoActionReplay10;
case VideoAction.selectStreams:
return context.l10n.videoActionSelectStreams;
case VideoAction.setSpeed:
return context.l10n.videoActionSetSpeed;
case VideoAction.togglePlay:
// different data depending on toggle state
return context.l10n.videoActionPlay;
case VideoAction.setSpeed:
return context.l10n.videoActionSetSpeed;
}
}
IconData? getIcon() {
switch (this) {
case VideoAction.captureFrame:
return AIcons.captureFrame;
case VideoAction.replay10:
return AIcons.replay10;
case VideoAction.selectStreams:
return AIcons.streams;
case VideoAction.setSpeed:
return AIcons.speed;
case VideoAction.togglePlay:
// different data depending on toggle state
return AIcons.play;
case VideoAction.setSpeed:
return AIcons.speed;
}
}
}

View file

@ -5,6 +5,7 @@ import 'package:aves/model/entry_cache.dart';
import 'package:aves/model/favourites.dart';
import 'package:aves/model/metadata.dart';
import 'package:aves/model/settings/settings.dart';
import 'package:aves/ref/mime_types.dart';
import 'package:aves/services/geocoding_service.dart';
import 'package:aves/services/service_policy.dart';
import 'package:aves/services/services.dart';
@ -18,8 +19,6 @@ import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:latlong2/latlong.dart';
import '../ref/mime_types.dart';
class AvesEntry {
String uri;
String? _path, _directory, _filename, _extension;

View file

@ -1,7 +1,9 @@
import 'package:aves/model/actions/entry_actions.dart';
import 'package:aves/model/actions/video_actions.dart';
import 'package:aves/model/filters/filters.dart';
import 'package:aves/model/settings/enums.dart';
import 'package:aves/model/settings/screen_on.dart';
import 'package:aves/model/source/enums.dart';
import 'package:collection/collection.dart';
import 'package:firebase_analytics/firebase_analytics.dart';
import 'package:firebase_core/firebase_core.dart';
@ -10,9 +12,6 @@ import 'package:flutter/widgets.dart';
import 'package:pedantic/pedantic.dart';
import 'package:shared_preferences/shared_preferences.dart';
import '../source/enums.dart';
import 'enums.dart';
final Settings settings = Settings._private();
class Settings extends ChangeNotifier {
@ -56,6 +55,7 @@ class Settings extends ChangeNotifier {
static const enableVideoHardwareAccelerationKey = 'video_hwaccel_mediacodec';
static const enableVideoAutoPlayKey = 'video_auto_play';
static const videoLoopModeKey = 'video_loop';
static const videoShowRawTimedTextKey = 'video_show_raw_timed_text';
// info
static const infoMapStyleKey = 'info_map_style';
@ -252,6 +252,10 @@ class Settings extends ChangeNotifier {
set videoLoopMode(VideoLoopMode newValue) => setAndNotify(videoLoopModeKey, newValue.toString());
set videoShowRawTimedText(bool newValue) => setAndNotify(videoShowRawTimedTextKey, newValue);
bool get videoShowRawTimedText => getBoolOrDefault(videoShowRawTimedTextKey, false);
// info
EntryMapStyle get infoMapStyle => getEnumOrDefault(infoMapStyleKey, EntryMapStyle.stamenWatercolor, EntryMapStyle.values);

View file

@ -12,7 +12,6 @@ import 'package:aves/utils/string_utils.dart';
import 'package:aves/utils/time_utils.dart';
import 'package:aves/widgets/viewer/video/fijkplayer.dart';
import 'package:collection/collection.dart';
// ignore: import_of_legacy_library_into_null_safe
import 'package:fijkplayer/fijkplayer.dart';
import 'package:flutter/foundation.dart';

View file

@ -10,7 +10,6 @@ import 'package:aves/services/service_policy.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
import 'package:flutter/widgets.dart';
// ignore: import_of_legacy_library_into_null_safe
import 'package:streams_channel/streams_channel.dart';
abstract class ImageFileService {

View file

@ -3,7 +3,6 @@ import 'dart:async';
import 'package:aves/model/entry.dart';
import 'package:flutter/services.dart';
import 'package:flutter/widgets.dart';
// ignore: import_of_legacy_library_into_null_safe
import 'package:streams_channel/streams_channel.dart';
abstract class MediaStoreService {

View file

@ -3,7 +3,6 @@ import 'dart:async';
import 'package:aves/utils/android_file_utils.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
// ignore: import_of_legacy_library_into_null_safe
import 'package:streams_channel/streams_channel.dart';
abstract class StorageService {

View file

@ -33,6 +33,8 @@ class AIcons {
// actions
static const IconData addShortcut = Icons.add_to_home_screen_outlined;
static const IconData replay10 = Icons.replay_10_outlined;
static const IconData captureFrame = Icons.screenshot_outlined;
static const IconData clear = Icons.clear_outlined;
static const IconData createAlbum = Icons.add_circle_outline;
static const IconData debug = Icons.whatshot_outlined;
@ -61,6 +63,10 @@ class AIcons {
static const IconData sort = Icons.sort_outlined;
static const IconData speed = Icons.speed_outlined;
static const IconData stats = Icons.pie_chart_outlined;
static const IconData streams = Icons.translate_outlined;
static const IconData streamVideo = Icons.movie_outlined;
static const IconData streamAudio = Icons.audiotrack_outlined;
static const IconData streamText = Icons.closed_caption_outlined;
static const IconData zoomIn = Icons.add_outlined;
static const IconData zoomOut = Icons.remove_outlined;
static const IconData collapse = Icons.expand_less_outlined;
@ -72,7 +78,8 @@ class AIcons {
static const IconData album = Icons.photo_album_outlined;
static const IconData cameraAlbum = Icons.photo_camera_outlined;
static const IconData downloadAlbum = Icons.file_download;
static const IconData screenshotAlbum = Icons.smartphone_outlined;
static const IconData screenshotAlbum = Icons.screenshot_outlined;
static const IconData recordingAlbum = Icons.smartphone_outlined;
// thumbnail overlay
static const IconData animated = Icons.slideshow;

View file

@ -1,24 +1,21 @@
import 'package:flutter/material.dart';
typedef OutlinedWidgetBuilder = Widget Function(BuildContext context, bool isShadow);
class OutlinedText extends StatelessWidget {
final OutlinedWidgetBuilder? leadingBuilder, trailingBuilder;
final String text;
final TextStyle style;
final double outlineWidth;
final Color outlineColor;
final TextAlign? textAlign;
static const widgetSpanAlignment = PlaceholderAlignment.middle;
const OutlinedText({
Key? key,
this.leadingBuilder,
required this.text,
this.trailingBuilder,
required this.style,
double? outlineWidth,
Color? outlineColor,
this.textAlign,
}) : outlineWidth = outlineWidth ?? 1,
outlineColor = outlineColor ?? Colors.black,
super(key: key);
@ -30,11 +27,6 @@ class OutlinedText extends StatelessWidget {
Text.rich(
TextSpan(
children: [
if (leadingBuilder != null)
WidgetSpan(
alignment: widgetSpanAlignment,
child: leadingBuilder!(context, true),
),
TextSpan(
text: text,
style: style.copyWith(
@ -44,33 +36,20 @@ class OutlinedText extends StatelessWidget {
..color = outlineColor,
),
),
if (trailingBuilder != null)
WidgetSpan(
alignment: widgetSpanAlignment,
child: trailingBuilder!(context, true),
),
],
),
textAlign: textAlign,
),
Text.rich(
TextSpan(
children: [
if (leadingBuilder != null)
WidgetSpan(
alignment: widgetSpanAlignment,
child: leadingBuilder!(context, false),
),
TextSpan(
text: text,
style: style,
),
if (trailingBuilder != null)
WidgetSpan(
alignment: widgetSpanAlignment,
child: trailingBuilder!(context, false),
),
],
),
textAlign: textAlign,
),
],
);

View file

@ -199,8 +199,9 @@ class IconUtils {
case AlbumType.camera:
return buildIcon(AIcons.cameraAlbum);
case AlbumType.screenshots:
case AlbumType.screenRecordings:
return buildIcon(AIcons.screenshotAlbum);
case AlbumType.screenRecordings:
return buildIcon(AIcons.recordingAlbum);
case AlbumType.download:
return buildIcon(AIcons.downloadAlbum);
case AlbumType.app:

View file

@ -1,10 +1,9 @@
import 'package:aves/widgets/common/magnifier/core/scale_gesture_recognizer.dart';
import 'package:aves/widgets/common/magnifier/pan/corner_hit_detector.dart';
import 'package:aves/widgets/common/magnifier/pan/gesture_detector_scope.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/widgets.dart';
import '../pan/corner_hit_detector.dart';
class MagnifierGestureDetector extends StatefulWidget {
const MagnifierGestureDetector({
Key? key,

View file

@ -1,10 +1,9 @@
import 'dart:math';
import 'package:aves/widgets/common/magnifier/pan/corner_hit_detector.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/widgets.dart';
import '../pan/corner_hit_detector.dart';
class MagnifierGestureRecognizer extends ScaleGestureRecognizer {
final CornerHitDetector hitDetector;
final List<Axis> validateAxis;

View file

@ -29,6 +29,11 @@ class DebugSettingsSection extends StatelessWidget {
onChanged: (v) => settings.hasAcceptedTerms = v,
title: const Text('hasAcceptedTerms'),
),
SwitchListTile(
value: settings.videoShowRawTimedText,
onChanged: (v) => settings.videoShowRawTimedText = v,
title: const Text('videoShowRawTimedText'),
),
Padding(
padding: const EdgeInsets.only(left: 8, right: 8, bottom: 8),
child: InfoRowGroup({

View file

@ -2,10 +2,9 @@ import 'dart:io';
import 'package:aves/services/services.dart';
import 'package:aves/widgets/common/extensions/build_context.dart';
import 'package:aves/widgets/dialogs/aves_dialog.dart';
import 'package:flutter/material.dart';
import '../dialogs/aves_dialog.dart';
class RenameAlbumDialog extends StatefulWidget {
final String album;

View file

@ -0,0 +1,184 @@
import 'package:aves/model/entry.dart';
import 'package:aves/ref/languages.dart';
import 'package:aves/theme/icons.dart';
import 'package:aves/widgets/common/extensions/build_context.dart';
import 'package:aves/widgets/viewer/video/controller.dart';
import 'package:collection/collection.dart';
import 'package:flutter/material.dart';
import 'aves_dialog.dart';
class VideoStreamSelectionDialog extends StatefulWidget {
final Map<StreamSummary, bool> streams;
const VideoStreamSelectionDialog({
required this.streams,
});
@override
_VideoStreamSelectionDialogState createState() => _VideoStreamSelectionDialogState();
}
class _VideoStreamSelectionDialogState extends State<VideoStreamSelectionDialog> {
late List<StreamSummary?> _videoStreams, _audioStreams, _textStreams;
StreamSummary? _currentVideo, _currentAudio, _currentText;
@override
void initState() {
super.initState();
final byType = groupBy<StreamSummary, StreamType>(widget.streams.keys, (v) => v.type);
// check width/height to exclude image streams (that are included among video streams)
_videoStreams = (byType[StreamType.video] ?? []).where((v) => v.width != null && v.height != null).toList();
_audioStreams = (byType[StreamType.audio] ?? []);
_textStreams = (byType[StreamType.text] ?? [])..insert(0, null);
final streamEntries = widget.streams.entries;
_currentVideo = streamEntries.firstWhereOrNull((kv) => kv.key.type == StreamType.video && kv.value)?.key;
_currentAudio = streamEntries.firstWhereOrNull((kv) => kv.key.type == StreamType.audio && kv.value)?.key;
_currentText = streamEntries.firstWhereOrNull((kv) => kv.key.type == StreamType.text && kv.value)?.key;
}
@override
Widget build(BuildContext context) {
final canSelectVideo = _videoStreams.length > 1;
final canSelectAudio = _audioStreams.length > 1;
final canSelectText = _textStreams.length > 1;
final canSelect = canSelectVideo || canSelectAudio || canSelectText;
return AvesDialog(
context: context,
scrollableContent: canSelect
? [
if (canSelectVideo)
..._buildSection(
icon: AIcons.streamVideo,
title: context.l10n.videoStreamSelectionDialogVideo,
streams: _videoStreams,
current: _currentVideo,
setter: (v) => _currentVideo = v,
),
if (canSelectAudio)
..._buildSection(
icon: AIcons.streamAudio,
title: context.l10n.videoStreamSelectionDialogAudio,
streams: _audioStreams,
current: _currentAudio,
setter: (v) => _currentAudio = v,
),
if (canSelectText)
..._buildSection(
icon: AIcons.streamText,
title: context.l10n.videoStreamSelectionDialogText,
streams: _textStreams,
current: _currentText,
setter: (v) => _currentText = v,
),
const SizedBox(height: 8),
]
: [
Container(
alignment: Alignment.center,
padding: const EdgeInsets.all(16),
child: Text(context.l10n.videoStreamSelectionDialogNoSelection),
),
],
actions: [
TextButton(
onPressed: () => Navigator.pop(context),
child: Text(MaterialLocalizations.of(context).cancelButtonLabel),
),
TextButton(
onPressed: () => _submit(context),
child: Text(context.l10n.applyButtonLabel),
),
],
);
}
static String _formatLanguage(String value) {
final language = Language.living639_2.firstWhereOrNull((language) => language.iso639_2 == value);
return language?.native ?? value;
}
String _commonStreamName(StreamSummary? stream) {
if (stream == null) return context.l10n.videoStreamSelectionDialogOff;
final title = stream.title;
final language = stream.language;
if (language != null && language != 'und') {
final formattedLanguage = _formatLanguage(language);
return '$formattedLanguage${title != null && title != formattedLanguage ? '$title' : ''}';
} else if (title != null) {
return title;
} else {
return '${context.l10n.videoStreamSelectionDialogTrack} ${stream.index} (${stream.codecName})';
}
}
String _streamName(StreamSummary? stream) {
final common = _commonStreamName(stream);
if (stream != null && stream.type == StreamType.video) {
final w = stream.width;
final h = stream.height;
if (w != null && h != null) {
return '$common$w${AvesEntry.resolutionSeparator}$h';
}
}
return common;
}
DropdownMenuItem<StreamSummary> _buildMenuItem(StreamSummary? value) {
return DropdownMenuItem(
value: value,
child: Text(_streamName(value)),
);
}
Widget _buildSelectedItem(StreamSummary? v) {
return Align(
alignment: AlignmentDirectional.centerStart,
child: Text(
_streamName(v),
softWrap: false,
overflow: TextOverflow.fade,
maxLines: 1,
),
);
}
List<Widget> _buildSection({
required IconData icon,
required String title,
required List<StreamSummary?> streams,
required StreamSummary? current,
required ValueSetter<StreamSummary?> setter,
}) {
return [
Padding(
padding: const EdgeInsets.only(left: 16, top: 16, right: 16),
child: Row(
children: [
Icon(icon),
const SizedBox(width: 16),
Text(title),
],
),
),
Padding(
padding: const EdgeInsets.symmetric(horizontal: 16),
child: DropdownButton<StreamSummary>(
items: streams.map(_buildMenuItem).toList(),
selectedItemBuilder: (context) => streams.map(_buildSelectedItem).toList(),
value: current,
onChanged: streams.length > 1 ? (newValue) => setState(() => setter(newValue)) : null,
isExpanded: true,
),
),
];
}
void _submit(BuildContext context) => Navigator.pop(context, {
StreamType.video: _currentVideo,
StreamType.audio: _currentAudio,
StreamType.text: _currentText,
});
}

View file

@ -12,6 +12,7 @@ import 'package:aves/widgets/common/extensions/build_context.dart';
import 'package:aves/widgets/common/fx/blurred.dart';
import 'package:aves/widgets/common/fx/borders.dart';
import 'package:aves/widgets/dialogs/video_speed_dialog.dart';
import 'package:aves/widgets/dialogs/video_stream_selection_dialog.dart';
import 'package:aves/widgets/viewer/overlay/common.dart';
import 'package:aves/widgets/viewer/overlay/notifications.dart';
import 'package:aves/widgets/viewer/video/controller.dart';
@ -219,6 +220,9 @@ class _ButtonRow extends StatelessWidget {
onPressed: onPressed,
);
break;
case VideoAction.captureFrame:
case VideoAction.replay10:
case VideoAction.selectStreams:
case VideoAction.setSpeed:
child = IconButton(
icon: Icon(action.getIcon()),
@ -245,6 +249,9 @@ class _ButtonRow extends StatelessWidget {
isMenuItem: true,
);
break;
case VideoAction.captureFrame:
case VideoAction.replay10:
case VideoAction.selectStreams:
case VideoAction.setSpeed:
child = MenuRow(text: action.getText(context), icon: action.getIcon());
break;
@ -263,6 +270,20 @@ class _ButtonRow extends StatelessWidget {
case VideoAction.setSpeed:
_showSpeedDialog(context);
break;
case VideoAction.selectStreams:
_showStreamSelectionDialog(context);
break;
case VideoAction.captureFrame:
controller?.captureFrame();
break;
case VideoAction.replay10:
{
final _controller = controller;
if (_controller != null && _controller.isReady) {
_controller.seekTo(_controller.currentPosition - 10000);
}
break;
}
}
}
@ -283,6 +304,25 @@ class _ButtonRow extends StatelessWidget {
_controller.speed = newSpeed;
}
Future<void> _showStreamSelectionDialog(BuildContext context) async {
final _controller = controller;
if (_controller == null) return;
final selectedStreams = await showDialog<Map<StreamType, StreamSummary>>(
context: context,
builder: (context) => VideoStreamSelectionDialog(
streams: _controller.streams,
),
);
if (selectedStreams == null || selectedStreams.isEmpty) return;
// TODO TLAD [video] get stream list & guess default selected streams, when the controller is not initialized yet
await Future.forEach<MapEntry<StreamType, StreamSummary>>(
selectedStreams.entries,
(kv) => _controller.selectStream(kv.key, kv.value),
);
}
Future<void> _togglePlayPause(BuildContext context) async {
final _controller = controller;
if (_controller == null) return;

View file

@ -2,18 +2,12 @@ import 'package:aves/model/entry.dart';
import 'package:aves/widgets/viewer/video/controller.dart';
import 'package:aves/widgets/viewer/video/fijkplayer.dart';
import 'package:collection/collection.dart';
// ignore: import_of_legacy_library_into_null_safe
import 'package:fijkplayer/fijkplayer.dart';
class VideoConductor {
final List<AvesVideoController> _controllers = [];
static const maxControllerCount = 3;
VideoConductor() {
FijkLog.setLevel(FijkLogLevel.Warn);
}
Future<void> dispose() async {
await Future.forEach<AvesVideoController>(_controllers, (controller) => controller.dispose());
_controllers.clear();

View file

@ -37,6 +37,8 @@ abstract class AvesVideoController {
Stream<int> get positionStream;
Stream<String?> get timedTextStream;
ValueNotifier<double> get sarNotifier;
double get speed;
@ -47,6 +49,14 @@ abstract class AvesVideoController {
set speed(double speed);
Future<void> selectStream(StreamType type, StreamSummary? selected);
Future<StreamSummary?> getSelectedStream(StreamType type);
Map<StreamSummary, bool> get streams;
Future<void> captureFrame();
Widget buildPlayerWidget(BuildContext context);
}
@ -58,3 +68,24 @@ enum VideoStatus {
completed,
error,
}
enum StreamType { video, audio, text }
class StreamSummary {
final StreamType type;
final int? index, width, height;
final String? codecName, language, title;
const StreamSummary({
required this.type,
required this.index,
required this.codecName,
required this.language,
required this.title,
required this.width,
required this.height,
});
@override
String toString() => '$runtimeType#${shortHash(this)}{type: type, index: $index, codecName: $codecName, language: $language, title: $title, width: $width, height: $height}';
}

View file

@ -1,4 +1,5 @@
import 'dart:async';
import 'dart:math';
import 'dart:ui';
import 'package:aves/model/entry.dart';
@ -9,13 +10,12 @@ import 'package:aves/model/video/metadata.dart';
import 'package:aves/utils/change_notifier.dart';
import 'package:aves/widgets/viewer/video/controller.dart';
import 'package:collection/collection.dart';
// ignore: import_of_legacy_library_into_null_safe
import 'package:fijkplayer/fijkplayer.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
class IjkPlayerAvesVideoController extends AvesVideoController {
static bool _staticInitialized = false;
late FijkPlayer _instance;
final List<StreamSubscription> _subscriptions = [];
final StreamController<FijkValue> _valueStreamController = StreamController.broadcast();
@ -46,6 +46,10 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
static const gifLikeVideoDurationThreshold = Duration(seconds: 10);
IjkPlayerAvesVideoController(AvesEntry entry) : super(entry) {
if (!_staticInitialized) {
FijkLog.setLevel(FijkLogLevel.Warn);
_staticInitialized = true;
}
_instance = FijkPlayer();
_startListening();
}
@ -99,10 +103,10 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
// so HW acceleration is always disabled for GIF-like videos where the last frames may be significant
final hwAccelerationEnabled = settings.enableVideoHardwareAcceleration && entry.durationMillis! > gifLikeVideoDurationThreshold.inMilliseconds;
// TODO TLAD HW codecs sometimes fail when seek-starting some videos, e.g. MP2TS/h264(HDPR)
// TODO TLAD [video] HW codecs sometimes fail when seek-starting some videos, e.g. MP2TS/h264(HDPR)
if (hwAccelerationEnabled) {
// when HW acceleration is enabled, videos with dimensions that do not fit 16x macroblocks need cropping
// TODO TLAD not all formats/devices need this correction, e.g. 498x278 MP4 on S7, 408x244 WEBM on S10e do not
// TODO TLAD [video] not all formats/devices need this correction, e.g. 498x278 MP4 on S7, 408x244 WEBM on S10e do not
final s = entry.displaySize % 16 * -1 % 16;
_macroBlockCrop = Offset(s.width, s.height);
}
@ -113,6 +117,12 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
// in practice the flag seems ineffective, but harmless too
options.setFormatOption('fflags', 'fastseek');
// `enable-snapshot`: enable snapshot interface
// default: 0, in [0, 1]
// cf https://fijkplayer.befovy.com/docs/zh/host-option.html
// there is a performance cost, and it should be set up before playing
options.setHostOption('enable-snapshot', 1);
// `accurate-seek-timeout`: accurate seek timeout
// default: 5000 ms, in [0, 5000]
options.setPlayerOption('accurate-seek-timeout', 1000);
@ -125,6 +135,11 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
// default: 0, in [0, 1]
options.setPlayerOption('enable-accurate-seek', accurateSeekEnabled ? 1 : 0);
// `min-frames`: minimal frames to stop pre-reading
// default: 50000, in [2, 50000]
// a comment in `IjkMediaPlayer.java` recommends setting this to 25 when de/selecting streams
options.setPlayerOption('min-frames', 25);
// `framedrop`: drop frames when cpu is too slow
// default: 0, in [-1, 120]
options.setPlayerOption('framedrop', 5);
@ -146,10 +161,9 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
// slowed down videos with SoundTouch enabled have a weird wobbly audio
options.setPlayerOption('soundtouch', 0);
// TODO TLAD try subs
// `subtitle`: decode subtitle stream
// default: 0, in [0, 1]
// option.setPlayerOption('subtitle', 1);
options.setPlayerOption('subtitle', 1);
_instance.applyOptions(options);
}
@ -166,8 +180,11 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
_streams.add(StreamSummary(
type: type,
index: stream[Keys.index],
codecName: stream[Keys.codecName],
language: stream[Keys.language],
title: stream[Keys.title],
width: stream[Keys.width] as int?,
height: stream[Keys.height] as int?,
));
}
});
@ -224,6 +241,7 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
@override
Future<void> seekTo(int targetMillis) async {
targetMillis = max(0, targetMillis);
if (isReady) {
await _instance.seekTo(targetMillis);
} else {
@ -258,6 +276,9 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
@override
Stream<int> get positionStream => _instance.onCurrentPosUpdate.map((pos) => pos.inMilliseconds);
@override
Stream<String?> get timedTextStream => _instance.onTimedText;
@override
double get speed => _speed;
@ -268,9 +289,62 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
_applySpeed();
}
// TODO TLAD setting speed fails when there is no audio stream or audio is disabled
// TODO TLAD [video] setting speed fails when there is no audio stream or audio is disabled
void _applySpeed() => _instance.setSpeed(speed);
ValueNotifier<StreamSummary?> selectedStreamNotifier(StreamType type) {
switch (type) {
case StreamType.video:
return _selectedVideoStream;
case StreamType.audio:
return _selectedAudioStream;
case StreamType.text:
return _selectedTextStream;
}
}
// When a stream is selected, the video accelerates to catch up with it.
// The duration of this acceleration phase depends on the player `min-frames` parameter.
// Calling `seekTo` after stream de/selection is a workaround to:
// 1) prevent video stream acceleration to catch up with audio
// 2) apply timed text stream
@override
Future<void> selectStream(StreamType type, StreamSummary? selected) async {
final current = await getSelectedStream(type);
if (current != selected) {
if (selected != null) {
final newIndex = selected.index;
if (newIndex != null) {
await _instance.selectTrack(newIndex);
selectedStreamNotifier(type).value = selected;
}
} else if (current != null) {
await _instance.deselectTrack(current.index!);
selectedStreamNotifier(type).value = null;
}
await seekTo(currentPosition);
}
}
@override
Future<StreamSummary?> getSelectedStream(StreamType type) async {
final currentIndex = await _instance.getSelectedTrack(type.code);
return currentIndex != -1 ? _streams.firstWhereOrNull((v) => v.index == currentIndex) : null;
}
@override
Map<StreamSummary, bool> get streams {
final selectedIndices = {_selectedVideoStream, _selectedAudioStream, _selectedTextStream}.map((v) => v.value?.index).toSet();
return Map.fromEntries(_streams.map((stream) => MapEntry(stream, selectedIndices.contains(stream.index))));
}
@override
Future<void> captureFrame() async {
final bytes = await _instance.takeSnapShot();
// TODO TLAD [video] export to DCIM/Videocaptures
debugPrint('captureFrame bytes=${bytes.length}');
}
@override
Widget buildPlayerWidget(BuildContext context) {
return ValueListenableBuilder<double>(
@ -357,8 +431,6 @@ extension ExtraFijkPlayer on FijkPlayer {
}
}
enum StreamType { video, audio, text }
extension ExtraStreamType on StreamType {
static StreamType? fromTypeString(String? type) {
switch (type) {
@ -373,20 +445,20 @@ extension ExtraStreamType on StreamType {
return null;
}
}
int get code {
// codes from ijkplayer ITrackInfo.java
switch (this) {
case StreamType.video:
return 1;
case StreamType.audio:
return 2;
case StreamType.text:
// TIMEDTEXT = 3, SUBTITLE = 4
return 3;
default:
// METADATA = 5, UNKNOWN = 0
return 0;
}
}
class StreamSummary {
final StreamType type;
final int? index;
final String? language, title;
const StreamSummary({
required this.type,
required this.index,
required this.language,
required this.title,
});
@override
String toString() => '$runtimeType#${shortHash(this)}{type: type, index: $index, language: $language, title: $title}';
}

View file

@ -20,6 +20,7 @@ import 'package:aves/widgets/viewer/video/controller.dart';
import 'package:aves/widgets/viewer/visual/error.dart';
import 'package:aves/widgets/viewer/visual/raster.dart';
import 'package:aves/widgets/viewer/visual/state.dart';
import 'package:aves/widgets/viewer/visual/subtitle.dart';
import 'package:aves/widgets/viewer/visual/vector.dart';
import 'package:aves/widgets/viewer/visual/video.dart';
import 'package:flutter/foundation.dart';
@ -196,12 +197,19 @@ class _EntryPageViewState extends State<EntryPageView> {
ValueListenableBuilder<double>(
valueListenable: videoController.sarNotifier,
builder: (context, sar, child) {
return _buildMagnifier(
return Stack(
children: [
_buildMagnifier(
displaySize: entry.videoDisplaySize(sar),
child: VideoView(
entry: entry,
controller: videoController,
),
),
VideoSubtitles(
controller: videoController,
),
],
);
}),
// fade out image to ease transition with the player

View file

@ -0,0 +1,80 @@
import 'package:aves/model/settings/settings.dart';
import 'package:aves/widgets/common/basic/outlined_text.dart';
import 'package:aves/widgets/viewer/video/controller.dart';
import 'package:flutter/material.dart';
import 'package:provider/provider.dart';
class VideoSubtitles extends StatelessWidget {
final AvesVideoController controller;
const VideoSubtitles({
Key? key,
required this.controller,
}) : super(key: key);
@override
Widget build(BuildContext context) {
return Selector<MediaQueryData, Orientation>(
selector: (c, mq) => mq.orientation,
builder: (c, orientation, child) {
return Align(
alignment: Alignment(0, orientation == Orientation.portrait ? .5 : .8),
child: StreamBuilder<String?>(
stream: controller.timedTextStream,
builder: (context, snapshot) {
final text = snapshot.data;
return text != null ? SubtitleText(text: text) : const SizedBox();
},
),
);
},
);
}
}
class SubtitleText extends StatelessWidget {
final String text;
const SubtitleText({
Key? key,
required this.text,
}) : super(key: key);
@override
Widget build(BuildContext context) {
late final String displayText;
if (!settings.videoShowRawTimedText) {
displayText = text;
} else {
// TODO TLAD [video] process ASS tags, cf https://aegi.vmoe.info/docs/3.0/ASS_Tags/
// e.g. `And I'm like, "We can't {\i1}not{\i0} see it."`
// e.g. `{\fad(200,200)\blur3}lorem ipsum"`
// e.g. `{\fnCrapFLTSB\an9\bord5\fs70\c&H403A2D&\3c&HE5E5E8&\pos(1868.286,27.429)}lorem ipsum"`
// implement these with RegExp + TextSpans:
// \i: italics
// \b: bold
// \c: fill color
// \1c: fill color
// \3c: border color
// \r: reset
displayText = text.replaceAll(RegExp('{.*?}'), '');
}
return OutlinedText(
text: displayText,
style: const TextStyle(
fontSize: 20,
shadows: [
Shadow(
color: Colors.black54,
offset: Offset(1, 1),
),
],
),
outlineWidth: 1,
outlineColor: Colors.black,
textAlign: TextAlign.center,
);
}
}

View file

@ -218,7 +218,7 @@ packages:
description:
path: "."
ref: aves
resolved-ref: "33aaf201bf761a50755971ba714c80693717b9f9"
resolved-ref: "789162b567e2eaef4d6047cb85e77d9c915e1bed"
url: "git://github.com/deckerst/fijkplayer.git"
source: git
version: "0.9.0"