video: capture frame

This commit is contained in:
Thibault Deckers 2021-06-17 12:27:07 +09:00
parent d203e0fe2e
commit 0d879c41f4
21 changed files with 447 additions and 144 deletions

View file

@ -14,6 +14,7 @@ import deckers.thibault.aves.model.FieldMap
import deckers.thibault.aves.model.provider.ImageProvider.ImageOpCallback
import deckers.thibault.aves.model.provider.ImageProviderFactory.getProvider
import deckers.thibault.aves.utils.MimeTypes
import deckers.thibault.aves.utils.StorageUtils.ensureTrailingSeparator
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.MethodChannel.MethodCallHandler
@ -32,6 +33,7 @@ class ImageFileHandler(private val activity: Activity) : MethodCallHandler {
"getEntry" -> GlobalScope.launch(Dispatchers.IO) { safe(call, result, ::getEntry) }
"getThumbnail" -> GlobalScope.launch(Dispatchers.IO) { safesus(call, result, ::getThumbnail) }
"getRegion" -> GlobalScope.launch(Dispatchers.IO) { safesus(call, result, ::getRegion) }
"captureFrame" -> GlobalScope.launch(Dispatchers.IO) { safesus(call, result, ::captureFrame) }
"rename" -> GlobalScope.launch(Dispatchers.IO) { safesus(call, result, ::rename) }
"rotate" -> GlobalScope.launch(Dispatchers.IO) { safe(call, result, ::rotate) }
"flip" -> GlobalScope.launch(Dispatchers.IO) { safe(call, result, ::flip) }
@ -131,6 +133,30 @@ class ImageFileHandler(private val activity: Activity) : MethodCallHandler {
}
}
private suspend fun captureFrame(call: MethodCall, result: MethodChannel.Result) {
val uri = call.argument<String>("uri")?.let { Uri.parse(it) }
val desiredName = call.argument<String>("desiredName")
val exifFields = call.argument<FieldMap>("exif") ?: HashMap()
val bytes = call.argument<ByteArray>("bytes")
var destinationDir = call.argument<String>("destinationPath")
if (uri == null || desiredName == null || bytes == null || destinationDir == null) {
result.error("captureFrame-args", "failed because of missing arguments", null)
return
}
val provider = getProvider(uri)
if (provider == null) {
result.error("captureFrame-provider", "failed to find provider for uri=$uri", null)
return
}
destinationDir = ensureTrailingSeparator(destinationDir)
provider.captureFrame(activity, desiredName, exifFields, bytes, destinationDir, object : ImageOpCallback {
override fun onSuccess(fields: FieldMap) = result.success(fields)
override fun onFailure(throwable: Throwable) = result.error("captureFrame-failure", "failed to capture frame", throwable.message)
})
}
private suspend fun rename(call: MethodCall, result: MethodChannel.Result) {
val entryMap = call.argument<FieldMap>("entry")
val newName = call.argument<String>("newName")

View file

@ -18,7 +18,7 @@ import kotlin.math.roundToLong
object ExifInterfaceHelper {
private val LOG_TAG = LogUtils.createTag<ExifInterfaceHelper>()
private val DATETIME_FORMAT = SimpleDateFormat("yyyy:MM:dd hh:mm:ss", Locale.ROOT)
val DATETIME_FORMAT = SimpleDateFormat("yyyy:MM:dd HH:mm:ss", Locale.ROOT)
private const val precisionErrorTolerance = 1e-10

View file

@ -8,7 +8,7 @@ import java.util.*
object MetadataExtractorHelper {
const val PNG_TIME_DIR_NAME = "PNG-tIME"
val PNG_LAST_MODIFICATION_TIME_FORMAT = SimpleDateFormat("yyyy:MM:dd hh:mm:ss", Locale.ROOT)
val PNG_LAST_MODIFICATION_TIME_FORMAT = SimpleDateFormat("yyyy:MM:dd HH:mm:ss", Locale.ROOT)
// extensions

View file

@ -17,6 +17,7 @@ import com.bumptech.glide.request.RequestOptions
import com.commonsware.cwac.document.DocumentFileCompat
import deckers.thibault.aves.decoder.MultiTrackImage
import deckers.thibault.aves.decoder.TiffImage
import deckers.thibault.aves.metadata.ExifInterfaceHelper
import deckers.thibault.aves.metadata.MultiPage
import deckers.thibault.aves.model.AvesEntry
import deckers.thibault.aves.model.ExifOrientationOp
@ -97,6 +98,7 @@ abstract class ImageProvider {
}
}
@Suppress("BlockingMethodInNonBlockingContext")
private suspend fun exportSingleByTreeDocAndScan(
context: Context,
sourceEntry: AvesEntry,
@ -109,9 +111,7 @@ abstract class ImageProvider {
val pageId = sourceEntry.pageId
var desiredNameWithoutExtension = if (sourceEntry.path != null) {
val sourcePath = sourceEntry.path
val sourceFile = File(sourcePath)
val sourceFileName = sourceFile.name
val sourceFileName = File(sourceEntry.path).name
sourceFileName.replaceFirst("[.][^.]+$".toRegex(), "")
} else {
sourceUri.lastPathSegment!!
@ -130,13 +130,11 @@ abstract class ImageProvider {
// but in order to open an output stream to it, we need to use a `SingleDocumentFile`
// through a document URI, not a tree URI
// note that `DocumentFile.getParentFile()` returns null if we did not pick a tree first
@Suppress("BlockingMethodInNonBlockingContext")
val destinationTreeFile = destinationDirDocFile.createFile(exportMimeType, desiredNameWithoutExtension)
val destinationDocFile = DocumentFileCompat.fromSingleUri(context, destinationTreeFile.uri)
if (isVideo(sourceMimeType)) {
val sourceDocFile = DocumentFileCompat.fromSingleUri(context, sourceUri)
@Suppress("BlockingMethodInNonBlockingContext")
sourceDocFile.copyTo(destinationDocFile)
} else {
val model: Any = if (MimeTypes.isHeic(sourceMimeType) && pageId != null) {
@ -159,14 +157,12 @@ abstract class ImageProvider {
.load(model)
.submit()
try {
@Suppress("BlockingMethodInNonBlockingContext")
var bitmap = target.get()
if (MimeTypes.needRotationAfterGlide(sourceMimeType)) {
bitmap = BitmapUtils.applyExifOrientation(context, bitmap, sourceEntry.rotationDegrees, sourceEntry.isFlipped)
}
bitmap ?: throw Exception("failed to get image from uri=$sourceUri page=$pageId")
@Suppress("BlockingMethodInNonBlockingContext")
destinationDocFile.openOutputStream().use { output ->
if (exportMimeType == MimeTypes.BMP) {
BmpWriter.writeRGB24(bitmap, output)
@ -201,6 +197,108 @@ abstract class ImageProvider {
return scanNewPath(context, destinationFullPath, exportMimeType)
}
@Suppress("BlockingMethodInNonBlockingContext")
suspend fun captureFrame(
context: Context,
desiredNameWithoutExtension: String,
exifFields: FieldMap,
bytes: ByteArray,
destinationDir: String,
callback: ImageOpCallback,
) {
val destinationDirDocFile = createDirectoryIfAbsent(context, destinationDir)
if (destinationDirDocFile == null) {
callback.onFailure(Exception("failed to create directory at path=$destinationDir"))
return
}
val captureMimeType = MimeTypes.JPEG
val desiredFileName = desiredNameWithoutExtension + extensionFor(captureMimeType)
if (File(destinationDir, desiredFileName).exists()) {
callback.onFailure(Exception("file with name=$desiredFileName already exists in destination directory"))
return
}
// the file created from a `TreeDocumentFile` is also a `TreeDocumentFile`
// but in order to open an output stream to it, we need to use a `SingleDocumentFile`
// through a document URI, not a tree URI
// note that `DocumentFile.getParentFile()` returns null if we did not pick a tree first
val destinationTreeFile = destinationDirDocFile.createFile(captureMimeType, desiredNameWithoutExtension)
val destinationDocFile = DocumentFileCompat.fromSingleUri(context, destinationTreeFile.uri)
try {
if (exifFields.isEmpty()) {
destinationDocFile.openOutputStream().use { output ->
output.write(bytes)
}
} else {
val editableFile = File.createTempFile("aves", null).apply {
deleteOnExit()
outputStream().use { output ->
ByteArrayInputStream(bytes).use { imageInput ->
imageInput.copyTo(output)
}
}
}
val exif = ExifInterface(editableFile)
val rotationDegrees = exifFields["rotationDegrees"] as Int?
if (rotationDegrees != null) {
// when the orientation is not defined, it returns `undefined (0)` instead of the orientation default value `normal (1)`
// in that case we explicitly set it to `normal` first
// because ExifInterface fails to rotate an image with undefined orientation
// as of androidx.exifinterface:exifinterface:1.3.0
val currentOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL)
if (currentOrientation == ExifInterface.ORIENTATION_UNDEFINED) {
exif.setAttribute(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL.toString())
}
exif.rotate(rotationDegrees)
}
val dateTimeMillis = (exifFields["dateTimeMillis"] as Number?)?.toLong()
if (dateTimeMillis != null) {
val dateString = ExifInterfaceHelper.DATETIME_FORMAT.format(Date(dateTimeMillis))
exif.setAttribute(ExifInterface.TAG_DATETIME, dateString)
exif.setAttribute(ExifInterface.TAG_DATETIME_ORIGINAL, dateString)
val offsetInMinutes = TimeZone.getDefault().getOffset(dateTimeMillis) / 60000
val offsetSign = if (offsetInMinutes < 0) "-" else "+"
val offsetHours = "${offsetInMinutes / 60}".padStart(2, '0')
val offsetMinutes = "${offsetInMinutes % 60}".padStart(2, '0')
val timeZoneString = "$offsetSign$offsetHours:$offsetMinutes"
exif.setAttribute(ExifInterface.TAG_OFFSET_TIME, timeZoneString)
exif.setAttribute(ExifInterface.TAG_OFFSET_TIME_ORIGINAL, timeZoneString)
val sub = dateTimeMillis % 1000
if (sub > 0) {
val subString = sub.toString()
exif.setAttribute(ExifInterface.TAG_SUBSEC_TIME, subString)
exif.setAttribute(ExifInterface.TAG_SUBSEC_TIME_ORIGINAL, subString)
}
}
val latitude = (exifFields["latitude"] as Number?)?.toDouble()
val longitude = (exifFields["longitude"] as Number?)?.toDouble()
if (latitude != null && longitude != null) {
exif.setLatLong(latitude, longitude)
}
exif.saveAttributes()
// copy the edited temporary file back to the original
DocumentFileCompat.fromFile(editableFile).copyTo(destinationDocFile)
}
val fileName = destinationDocFile.name
val destinationFullPath = destinationDir + fileName
val newFields = scanNewPath(context, destinationFullPath, captureMimeType)
callback.onSuccess(newFields)
} catch (e: Exception) {
callback.onFailure(e)
}
}
suspend fun rename(context: Context, oldPath: String, oldMediaUri: Uri, mimeType: String, newFilename: String, callback: ImageOpCallback) {
val oldFile = File(oldPath)
val newFile = File(oldFile.parent, newFilename)

View file

@ -54,18 +54,18 @@ object StorageUtils {
private fun getPathStepIterator(context: Context, anyPath: String, root: String?): Iterator<String?>? {
val rootLength = (root ?: getVolumePath(context, anyPath))?.length ?: return null
var filename: String? = null
var fileName: String? = null
var relativePath: String? = null
val lastSeparatorIndex = anyPath.lastIndexOf(File.separator) + 1
if (lastSeparatorIndex > rootLength) {
filename = anyPath.substring(lastSeparatorIndex)
fileName = anyPath.substring(lastSeparatorIndex)
relativePath = anyPath.substring(rootLength, lastSeparatorIndex)
}
relativePath ?: return null
val pathSteps = relativePath.split(File.separator).filter { it.isNotEmpty() }.toMutableList()
if (filename?.isNotEmpty() == true) {
pathSteps.add(filename)
if (fileName?.isNotEmpty() == true) {
pathSteps.add(fileName)
}
return pathSteps.iterator()
}
@ -187,7 +187,7 @@ object StorageUtils {
return "primary"
}
volume.uuid?.let { uuid ->
return uuid.toUpperCase(Locale.ROOT)
return uuid.uppercase(Locale.ROOT)
}
}
}
@ -199,7 +199,7 @@ object StorageUtils {
return "primary"
}
volumePath.split(File.separator).lastOrNull { it.isNotEmpty() }?.let { uuid ->
return uuid.toUpperCase(Locale.ROOT)
return uuid.uppercase(Locale.ROOT)
}
}
@ -434,11 +434,11 @@ object StorageUtils {
return if (dirPath.endsWith(File.separator)) dirPath else dirPath + File.separator
}
// `fullPath` should match "volumePath + relativeDir + filename"
// `fullPath` should match "volumePath + relativeDir + fileName"
class PathSegments(context: Context, fullPath: String) {
var volumePath: String? = null // `volumePath` with trailing "/"
var relativeDir: String? = null // `relativeDir` with trailing "/"
private var filename: String? = null // null for directories
private var fileName: String? = null // null for directories
init {
volumePath = getVolumePath(context, fullPath)
@ -446,7 +446,7 @@ object StorageUtils {
val lastSeparatorIndex = fullPath.lastIndexOf(File.separator) + 1
val volumePathLength = volumePath!!.length
if (lastSeparatorIndex > volumePathLength) {
filename = fullPath.substring(lastSeparatorIndex)
fileName = fullPath.substring(lastSeparatorIndex)
relativeDir = fullPath.substring(volumePathLength, lastSeparatorIndex)
}
}

View file

@ -492,6 +492,8 @@
"@albumScreenshots": {},
"albumScreenRecordings": "Screen recordings",
"@albumScreenRecordings": {},
"albumVideoCaptures": "Video Captures",
"@albumVideoCaptures": {},
"albumPageTitle": "Albums",
"@albumPageTitle": {},

View file

@ -224,6 +224,7 @@
"albumDownload": "다운로드",
"albumScreenshots": "스크린샷",
"albumScreenRecordings": "화면 녹화 파일",
"albumVideoCaptures": "동영상 캡처",
"albumPageTitle": "앨범",
"albumEmpty": "앨범이 없습니다",

View file

@ -14,7 +14,7 @@ class VideoActions {
static const all = [
VideoAction.replay10,
VideoAction.togglePlay,
// VideoAction.captureFrame,
VideoAction.captureFrame,
VideoAction.setSpeed,
VideoAction.selectStreams,
];

View file

@ -34,6 +34,7 @@ mixin AlbumMixin on SourceBase {
if (type == AlbumType.download) return context.l10n.albumDownload;
if (type == AlbumType.screenshots) return context.l10n.albumScreenshots;
if (type == AlbumType.screenRecordings) return context.l10n.albumScreenRecordings;
if (type == AlbumType.videoCaptures) return context.l10n.albumVideoCaptures;
}
final dir = VolumeRelativeDirectory.fromPath(dirPath);

View file

@ -80,6 +80,14 @@ abstract class ImageFileService {
required String destinationAlbum,
});
Future<Map<String, dynamic>> captureFrame(
AvesEntry entry, {
required String desiredName,
required Map<String, dynamic> exif,
required Uint8List bytes,
required String destinationAlbum,
});
Future<Map<String, dynamic>> rename(AvesEntry entry, String newName);
Future<Map<String, dynamic>> rotate(AvesEntry entry, {required bool clockwise});
@ -334,6 +342,29 @@ class PlatformImageFileService implements ImageFileService {
}
}
@override
Future<Map<String, dynamic>> captureFrame(
AvesEntry entry, {
required String desiredName,
required Map<String, dynamic> exif,
required Uint8List bytes,
required String destinationAlbum,
}) async {
try {
final result = await platform.invokeMethod('captureFrame', <String, dynamic>{
'uri': entry.uri,
'desiredName': desiredName,
'exif': exif,
'bytes': bytes,
'destinationPath': destinationAlbum,
});
if (result != null) return (result as Map).cast<String, dynamic>();
} on PlatformException catch (e) {
debugPrint('captureFrame failed with code=${e.code}, exception=${e.message}, details=${e.details}');
}
return {};
}
@override
Future<Map<String, dynamic>> rename(AvesEntry entry, String newName) async {
try {

View file

@ -9,7 +9,7 @@ import 'package:flutter/widgets.dart';
final AndroidFileUtils androidFileUtils = AndroidFileUtils._private();
class AndroidFileUtils {
late String primaryStorage, dcimPath, downloadPath, moviesPath, picturesPath;
late String primaryStorage, dcimPath, downloadPath, moviesPath, picturesPath, videoCapturesPath;
Set<StorageVolume> storageVolumes = {};
Set<Package> _packages = {};
List<String> _potentialAppDirs = [];
@ -28,6 +28,8 @@ class AndroidFileUtils {
downloadPath = pContext.join(primaryStorage, 'Download');
moviesPath = pContext.join(primaryStorage, 'Movies');
picturesPath = pContext.join(primaryStorage, 'Pictures');
// from Aves
videoCapturesPath = pContext.join(dcimPath, 'Video Captures');
}
Future<void> initAppNames() async {
@ -42,6 +44,8 @@ class AndroidFileUtils {
bool isScreenRecordingsPath(String path) => (path.startsWith(dcimPath) || path.startsWith(moviesPath)) && (path.endsWith('Screen recordings') || path.endsWith('ScreenRecords'));
bool isVideoCapturesPath(String path) => path == videoCapturesPath;
bool isDownloadPath(String path) => path == downloadPath;
StorageVolume? getStorageVolume(String? path) {
@ -59,6 +63,7 @@ class AndroidFileUtils {
if (isDownloadPath(albumPath)) return AlbumType.download;
if (isScreenRecordingsPath(albumPath)) return AlbumType.screenRecordings;
if (isScreenshotsPath(albumPath)) return AlbumType.screenshots;
if (isVideoCapturesPath(albumPath)) return AlbumType.videoCaptures;
final dir = pContext.split(albumPath).last;
if (albumPath.startsWith(primaryStorage) && _potentialAppDirs.contains(dir)) return AlbumType.app;
@ -78,7 +83,7 @@ class AndroidFileUtils {
}
}
enum AlbumType { regular, app, camera, download, screenRecordings, screenshots }
enum AlbumType { regular, app, camera, download, screenRecordings, screenshots, videoCaptures }
class Package {
final String packageName;

View file

@ -46,26 +46,49 @@ mixin SizeAwareMixin {
final hasEnoughSpace = needed < free;
if (!hasEnoughSpace) {
await showDialog(
context: context,
builder: (context) {
final neededSize = formatFilesize(needed);
final freeSize = formatFilesize(free);
final volume = destinationVolume.getDescription(context);
return AvesDialog(
context: context,
title: context.l10n.notEnoughSpaceDialogTitle,
content: Text(context.l10n.notEnoughSpaceDialogMessage(neededSize, freeSize, volume)),
actions: [
TextButton(
onPressed: () => Navigator.pop(context),
child: Text(MaterialLocalizations.of(context).okButtonLabel),
),
],
);
},
);
await _showNotEnoughSpaceDialog(context, needed, free, destinationVolume);
}
return hasEnoughSpace;
}
Future<bool> checkFreeSpace(
BuildContext context,
int needed,
String destinationAlbum,
) async {
// assume we have enough space if we cannot find the volume or its remaining free space
final destinationVolume = androidFileUtils.getStorageVolume(destinationAlbum);
if (destinationVolume == null) return true;
final free = await storageService.getFreeSpace(destinationVolume);
if (free == null) return true;
final hasEnoughSpace = needed < free;
if (!hasEnoughSpace) {
await _showNotEnoughSpaceDialog(context, needed, free, destinationVolume);
}
return hasEnoughSpace;
}
Future<void> _showNotEnoughSpaceDialog(BuildContext context, int needed, int free, StorageVolume destinationVolume) async {
await showDialog(
context: context,
builder: (context) {
final neededSize = formatFilesize(needed);
final freeSize = formatFilesize(free);
final volume = destinationVolume.getDescription(context);
return AvesDialog(
context: context,
title: context.l10n.notEnoughSpaceDialogTitle,
content: Text(context.l10n.notEnoughSpaceDialogMessage(neededSize, freeSize, volume)),
actions: [
TextButton(
onPressed: () => Navigator.pop(context),
child: Text(MaterialLocalizations.of(context).okButtonLabel),
),
],
);
},
);
}
}

View file

@ -199,6 +199,7 @@ class IconUtils {
case AlbumType.camera:
return buildIcon(AIcons.cameraAlbum);
case AlbumType.screenshots:
case AlbumType.videoCaptures:
return buildIcon(AIcons.screenshotAlbum);
case AlbumType.screenRecordings:
return buildIcon(AIcons.recordingAlbum);

View file

@ -196,16 +196,17 @@ class EntryActionDelegate with FeedbackMixin, PermissionAwareMixin, SizeAwareMix
onDone: (processed) {
final movedOps = processed.where((e) => e.success);
final movedCount = movedOps.length;
final showAction = collection != null && movedCount > 0
final _collection = collection;
final showAction = _collection != null && movedCount > 0
? SnackBarAction(
label: context.l10n.showButtonLabel,
onPressed: () async {
final highlightInfo = context.read<HighlightInfo>();
final targetCollection = CollectionLens(
source: collection!.source,
source: source,
filters: {AlbumFilter(destinationAlbum, source.getAlbumDisplayName(context, destinationAlbum))},
groupFactor: collection!.groupFactor,
sortFactor: collection!.sortFactor,
groupFactor: _collection.groupFactor,
sortFactor: _collection.sortFactor,
);
unawaited(Navigator.pushAndRemoveUntil(
context,

View file

@ -26,6 +26,7 @@ import 'package:aves/widgets/viewer/overlay/notifications.dart';
import 'package:aves/widgets/viewer/overlay/top.dart';
import 'package:aves/widgets/viewer/video/conductor.dart';
import 'package:aves/widgets/viewer/video/controller.dart';
import 'package:aves/widgets/viewer/video_action_delegate.dart';
import 'package:aves/widgets/viewer/visual/state.dart';
import 'package:collection/collection.dart';
import 'package:flutter/foundation.dart';
@ -60,7 +61,8 @@ class _EntryViewerStackState extends State<EntryViewerStack> with SingleTickerPr
late Animation<double> _topOverlayScale, _bottomOverlayScale;
late Animation<Offset> _bottomOverlayOffset;
EdgeInsets? _frozenViewInsets, _frozenViewPadding;
late EntryActionDelegate _actionDelegate;
late EntryActionDelegate _entryActionDelegate;
late VideoActionDelegate _videoActionDelegate;
final List<Tuple2<String, ValueNotifier<ViewState>>> _viewStateNotifiers = [];
final ValueNotifier<HeroInfo?> _heroInfoNotifier = ValueNotifier(null);
bool _isEntryTracked = true;
@ -108,10 +110,13 @@ class _EntryViewerStackState extends State<EntryViewerStack> with SingleTickerPr
curve: Curves.easeOutQuad,
));
_overlayVisible.addListener(_onOverlayVisibleChange);
_actionDelegate = EntryActionDelegate(
_entryActionDelegate = EntryActionDelegate(
collection: collection,
showInfo: () => _goToVerticalPage(infoPage),
);
_videoActionDelegate = VideoActionDelegate(
collection: collection,
);
_initEntryControllers();
_registerWidget(widget);
WidgetsBinding.instance!.addObserver(this);
@ -243,7 +248,7 @@ class _EntryViewerStackState extends State<EntryViewerStack> with SingleTickerPr
}
}
}
_actionDelegate.onActionSelected(context, targetEntry, action);
_entryActionDelegate.onActionSelected(context, targetEntry, action);
},
viewStateNotifier: _viewStateNotifiers.firstWhereOrNull((kv) => kv.item1 == mainEntry.uri)?.item2,
);
@ -290,6 +295,11 @@ class _EntryViewerStackState extends State<EntryViewerStack> with SingleTickerPr
entry: pageEntry,
controller: videoController,
scale: _bottomOverlayScale,
onActionSelected: (action) {
if (videoController != null) {
_videoActionDelegate.onActionSelected(context, videoController, action);
}
},
),
);
} else if (pageEntry.is360) {
@ -414,7 +424,7 @@ class _EntryViewerStackState extends State<EntryViewerStack> with SingleTickerPr
void _onVerticalPageChanged(int page) {
_currentVerticalPage.value = page;
if (page == transitionPage) {
_actionDelegate.dismissFeedback(context);
_entryActionDelegate.dismissFeedback(context);
_popVisual();
} else if (page == infoPage) {
// prevent hero when viewer is offscreen

View file

@ -12,10 +12,7 @@ import 'package:aves/widgets/common/basic/menu_row.dart';
import 'package:aves/widgets/common/extensions/build_context.dart';
import 'package:aves/widgets/common/fx/blurred.dart';
import 'package:aves/widgets/common/fx/borders.dart';
import 'package:aves/widgets/dialogs/video_speed_dialog.dart';
import 'package:aves/widgets/dialogs/video_stream_selection_dialog.dart';
import 'package:aves/widgets/viewer/overlay/common.dart';
import 'package:aves/widgets/viewer/overlay/notifications.dart';
import 'package:aves/widgets/viewer/video/controller.dart';
import 'package:flutter/material.dart';
import 'package:flutter/scheduler.dart';
@ -25,12 +22,14 @@ class VideoControlOverlay extends StatefulWidget {
final AvesEntry entry;
final AvesVideoController? controller;
final Animation<double> scale;
final Function(VideoAction value) onActionSelected;
const VideoControlOverlay({
Key? key,
required this.entry,
required this.controller,
required this.scale,
required this.onActionSelected,
}) : super(key: key);
@override
@ -93,6 +92,7 @@ class _VideoControlOverlayState extends State<VideoControlOverlay> with SingleTi
menuActions: menuActions,
scale: scale,
controller: controller,
onActionSelected: widget.onActionSelected,
),
const SizedBox(height: 8),
_buildProgressBar(),
@ -195,6 +195,7 @@ class _ButtonRow extends StatelessWidget {
final List<VideoAction> quickActions, menuActions;
final Animation<double> scale;
final AvesVideoController? controller;
final Function(VideoAction value) onActionSelected;
const _ButtonRow({
Key? key,
@ -202,6 +203,7 @@ class _ButtonRow extends StatelessWidget {
required this.menuActions,
required this.scale,
required this.controller,
required this.onActionSelected,
}) : super(key: key);
static const double padding = 8;
@ -223,7 +225,7 @@ class _ButtonRow extends StatelessWidget {
itemBuilder: (context) => menuActions.map((action) => _buildPopupMenuItem(context, action)).toList(),
onSelected: (action) {
// wait for the popup menu to hide before proceeding with the action
Future.delayed(Durations.popupMenuAnimation * timeDilation, () => _onActionSelected(context, action));
Future.delayed(Durations.popupMenuAnimation * timeDilation, () => onActionSelected(action));
},
),
),
@ -234,7 +236,7 @@ class _ButtonRow extends StatelessWidget {
Widget _buildOverlayButton(BuildContext context, VideoAction action) {
late Widget child;
void onPressed() => _onActionSelected(context, action);
void onPressed() => onActionSelected(action);
switch (action) {
case VideoAction.togglePlay:
child = _PlayToggler(
@ -283,81 +285,6 @@ class _ButtonRow extends StatelessWidget {
child: child,
);
}
void _onActionSelected(BuildContext context, VideoAction action) {
switch (action) {
case VideoAction.togglePlay:
_togglePlayPause(context);
break;
case VideoAction.setSpeed:
_showSpeedDialog(context);
break;
case VideoAction.selectStreams:
_showStreamSelectionDialog(context);
break;
case VideoAction.captureFrame:
controller?.captureFrame();
break;
case VideoAction.replay10:
{
final _controller = controller;
if (_controller != null && _controller.isReady) {
_controller.seekTo(_controller.currentPosition - 10000);
}
break;
}
}
}
Future<void> _showSpeedDialog(BuildContext context) async {
final _controller = controller;
if (_controller == null) return;
final newSpeed = await showDialog<double>(
context: context,
builder: (context) => VideoSpeedDialog(
current: _controller.speed,
min: _controller.minSpeed,
max: _controller.maxSpeed,
),
);
if (newSpeed == null) return;
_controller.speed = newSpeed;
}
Future<void> _showStreamSelectionDialog(BuildContext context) async {
final _controller = controller;
if (_controller == null) return;
final selectedStreams = await showDialog<Map<StreamType, StreamSummary>>(
context: context,
builder: (context) => VideoStreamSelectionDialog(
streams: _controller.streams,
),
);
if (selectedStreams == null || selectedStreams.isEmpty) return;
// TODO TLAD [video] get stream list & guess default selected streams, when the controller is not initialized yet
await Future.forEach<MapEntry<StreamType, StreamSummary>>(
selectedStreams.entries,
(kv) => _controller.selectStream(kv.key, kv.value),
);
}
Future<void> _togglePlayPause(BuildContext context) async {
final _controller = controller;
if (_controller == null) return;
if (isPlaying) {
await _controller.pause();
} else {
await _controller.play();
// hide overlay
await Future.delayed(Durations.iconAnimation);
ToggleOverlayNotification().dispatch(context);
}
}
}
class _PlayToggler extends StatefulWidget {

View file

@ -1,3 +1,5 @@
import 'dart:typed_data';
import 'package:aves/model/entry.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
@ -55,7 +57,7 @@ abstract class AvesVideoController {
Map<StreamSummary, bool> get streams;
Future<void> captureFrame();
Future<Uint8List> captureFrame();
Widget buildPlayerWidget(BuildContext context);
}

View file

@ -1,5 +1,6 @@
import 'dart:async';
import 'dart:math';
import 'dart:typed_data';
import 'dart:ui';
import 'package:aves/model/entry.dart';
@ -339,11 +340,7 @@ class IjkPlayerAvesVideoController extends AvesVideoController {
}
@override
Future<void> captureFrame() async {
final bytes = await _instance.takeSnapShot();
// TODO TLAD [video] export to DCIM/Videocaptures
debugPrint('captureFrame bytes=${bytes.length}');
}
Future<Uint8List> captureFrame() => _instance.takeSnapShot();
@override
Widget buildPlayerWidget(BuildContext context) {

View file

@ -0,0 +1,163 @@
import 'dart:async';
import 'package:aves/model/actions/video_actions.dart';
import 'package:aves/model/filters/album.dart';
import 'package:aves/model/highlight.dart';
import 'package:aves/model/source/collection_lens.dart';
import 'package:aves/services/services.dart';
import 'package:aves/theme/durations.dart';
import 'package:aves/utils/android_file_utils.dart';
import 'package:aves/widgets/collection/collection_page.dart';
import 'package:aves/widgets/common/action_mixins/feedback.dart';
import 'package:aves/widgets/common/action_mixins/permission_aware.dart';
import 'package:aves/widgets/common/action_mixins/size_aware.dart';
import 'package:aves/widgets/common/extensions/build_context.dart';
import 'package:aves/widgets/dialogs/video_speed_dialog.dart';
import 'package:aves/widgets/dialogs/video_stream_selection_dialog.dart';
import 'package:aves/widgets/viewer/overlay/notifications.dart';
import 'package:aves/widgets/viewer/video/controller.dart';
import 'package:collection/collection.dart';
import 'package:flutter/material.dart';
import 'package:pedantic/pedantic.dart';
import 'package:provider/provider.dart';
class VideoActionDelegate with FeedbackMixin, PermissionAwareMixin, SizeAwareMixin {
final CollectionLens? collection;
VideoActionDelegate({
required this.collection,
});
void onActionSelected(BuildContext context, AvesVideoController controller, VideoAction action) {
switch (action) {
case VideoAction.captureFrame:
_captureFrame(context, controller);
break;
case VideoAction.replay10:
if (controller.isReady) controller.seekTo(controller.currentPosition - 10000);
break;
case VideoAction.selectStreams:
_showStreamSelectionDialog(context, controller);
break;
case VideoAction.setSpeed:
_showSpeedDialog(context, controller);
break;
case VideoAction.togglePlay:
_togglePlayPause(context, controller);
break;
}
}
Future<void> _captureFrame(BuildContext context, AvesVideoController controller) async {
final positionMillis = controller.currentPosition;
final bytes = await controller.captureFrame();
final destinationAlbum = androidFileUtils.videoCapturesPath;
if (!await checkStoragePermissionForAlbums(context, {destinationAlbum})) return;
if (!await checkFreeSpace(context, bytes.length, destinationAlbum)) return;
final entry = controller.entry;
final rotationDegrees = entry.rotationDegrees;
final dateTimeMillis = entry.catalogMetadata?.dateMillis;
final latLng = entry.latLng;
final exif = {
if (rotationDegrees != 0) 'rotationDegrees': rotationDegrees,
if (dateTimeMillis != null && dateTimeMillis != 0) 'dateTimeMillis': dateTimeMillis,
if (latLng != null) ...{
'latitude': latLng.latitude,
'longitude': latLng.longitude,
}
};
final newFields = await imageFileService.captureFrame(
entry,
desiredName: '${entry.bestTitle}_${'$positionMillis'.padLeft(8, '0')}',
exif: exif,
bytes: bytes,
destinationAlbum: destinationAlbum,
);
final success = newFields.isNotEmpty;
if (success) {
final _collection = collection;
final showAction = _collection != null
? SnackBarAction(
label: context.l10n.showButtonLabel,
onPressed: () async {
final highlightInfo = context.read<HighlightInfo>();
final source = _collection.source;
final targetCollection = CollectionLens(
source: source,
filters: {AlbumFilter(destinationAlbum, source.getAlbumDisplayName(context, destinationAlbum))},
groupFactor: _collection.groupFactor,
sortFactor: _collection.sortFactor,
);
unawaited(Navigator.pushAndRemoveUntil(
context,
MaterialPageRoute(
settings: const RouteSettings(name: CollectionPage.routeName),
builder: (context) {
return CollectionPage(
targetCollection,
);
},
),
(route) => false,
));
await Future.delayed(Durations.staggeredAnimationPageTarget + Durations.highlightScrollInitDelay);
final newUri = newFields['uri'] as String?;
final targetEntry = targetCollection.sortedEntries.firstWhereOrNull((entry) => entry.uri == newUri);
if (targetEntry != null) {
highlightInfo.trackItem(targetEntry, highlightItem: targetEntry);
}
},
)
: null;
showFeedback(context, context.l10n.genericSuccessFeedback, showAction);
} else {
showFeedback(context, context.l10n.genericFailureFeedback);
}
}
Future<void> _showStreamSelectionDialog(BuildContext context, AvesVideoController controller) async {
final selectedStreams = await showDialog<Map<StreamType, StreamSummary>>(
context: context,
builder: (context) => VideoStreamSelectionDialog(
streams: controller.streams,
),
);
if (selectedStreams == null || selectedStreams.isEmpty) return;
// TODO TLAD [video] get stream list & guess default selected streams, when the controller is not initialized yet
await Future.forEach<MapEntry<StreamType, StreamSummary>>(
selectedStreams.entries,
(kv) => controller.selectStream(kv.key, kv.value),
);
}
Future<void> _showSpeedDialog(BuildContext context, AvesVideoController controller) async {
final newSpeed = await showDialog<double>(
context: context,
builder: (context) => VideoSpeedDialog(
current: controller.speed,
min: controller.minSpeed,
max: controller.maxSpeed,
),
);
if (newSpeed == null) return;
controller.speed = newSpeed;
}
Future<void> _togglePlayPause(BuildContext context, AvesVideoController controller) async {
if (controller.isPlaying) {
await controller.pause();
} else {
await controller.play();
// hide overlay
await Future.delayed(Durations.iconAnimation);
ToggleOverlayNotification().dispatch(context);
}
}
}

View file

@ -209,6 +209,11 @@ class _EntryPageViewState extends State<EntryPageView> {
VideoSubtitles(
controller: videoController,
),
if (settings.videoShowRawTimedText)
VideoSubtitles(
controller: videoController,
debugMode: true,
),
],
);
}),

View file

@ -1,4 +1,3 @@
import 'package:aves/model/settings/settings.dart';
import 'package:aves/widgets/common/basic/outlined_text.dart';
import 'package:aves/widgets/viewer/video/controller.dart';
import 'package:flutter/material.dart';
@ -6,10 +5,12 @@ import 'package:provider/provider.dart';
class VideoSubtitles extends StatelessWidget {
final AvesVideoController controller;
final bool debugMode;
const VideoSubtitles({
Key? key,
required this.controller,
this.debugMode = false,
}) : super(key: key);
@override
@ -17,34 +18,43 @@ class VideoSubtitles extends StatelessWidget {
return Selector<MediaQueryData, Orientation>(
selector: (c, mq) => mq.orientation,
builder: (c, orientation, child) {
final y = orientation == Orientation.portrait ? .5 : .8;
return Align(
alignment: Alignment(0, orientation == Orientation.portrait ? .5 : .8),
child: StreamBuilder<String?>(
stream: controller.timedTextStream,
builder: (context, snapshot) {
final text = snapshot.data;
return text != null ? SubtitleText(text: text) : const SizedBox();
},
),
alignment: Alignment(0, debugMode ? -y : y),
child: child,
);
},
child: StreamBuilder<String?>(
stream: controller.timedTextStream,
builder: (context, snapshot) {
final text = snapshot.data;
return text != null
? SubtitleText(
text: text,
debugMode: debugMode,
)
: const SizedBox();
},
),
);
}
}
class SubtitleText extends StatelessWidget {
final String text;
final bool debugMode;
const SubtitleText({
Key? key,
required this.text,
this.debugMode = false,
}) : super(key: key);
@override
Widget build(BuildContext context) {
late final String displayText;
if (!settings.videoShowRawTimedText) {
if (debugMode) {
displayText = text;
} else {
// TODO TLAD [video] process ASS tags, cf https://aegi.vmoe.info/docs/3.0/ASS_Tags/