full decoding: use raw image descriptor in Flutter on decoded bytes from Android

This commit is contained in:
Thibault Deckers 2025-03-02 23:29:00 +01:00
parent 152b942f57
commit b224709c5d
12 changed files with 191 additions and 128 deletions

View file

@ -8,8 +8,8 @@ import android.util.Log
import androidx.core.net.toUri
import com.bumptech.glide.Glide
import deckers.thibault.aves.decoder.AvesAppGlideModule
import deckers.thibault.aves.utils.BitmapUtils
import deckers.thibault.aves.utils.BitmapUtils.applyExifOrientation
import deckers.thibault.aves.utils.BitmapUtils.getDecodedBytes
import deckers.thibault.aves.utils.BitmapUtils.getEncodedBytes
import deckers.thibault.aves.utils.LogUtils
import deckers.thibault.aves.utils.MemoryUtils
@ -81,11 +81,13 @@ class ImageByteStreamHandler(private val context: Context, private val arguments
return
}
val decoded = arguments["decoded"] as Boolean
val mimeType = arguments["mimeType"] as String?
val uri = (arguments["uri"] as String?)?.toUri()
val sizeBytes = (arguments["sizeBytes"] as Number?)?.toLong()
val rotationDegrees = arguments["rotationDegrees"] as Int
val isFlipped = arguments["isFlipped"] as Boolean
val isAnimated = arguments["isAnimated"] as Boolean
val pageId = arguments["pageId"] as Int?
if (mimeType == null || uri == null) {
@ -94,19 +96,31 @@ class ImageByteStreamHandler(private val context: Context, private val arguments
return
}
if (isVideo(mimeType)) {
streamVideoByGlide(uri, mimeType, sizeBytes)
} else if (!canDecodeWithFlutter(mimeType, pageId, rotationDegrees, isFlipped)) {
// decode exotic format on platform side, then encode it in portable format for Flutter
streamImageByGlide(uri, pageId, mimeType, sizeBytes, rotationDegrees, isFlipped)
} else {
if (canDecodeWithFlutter(mimeType, isAnimated) && !decoded) {
// to be decoded by Flutter
streamImageAsIs(uri, mimeType, sizeBytes)
streamOriginalEncodedBytes(uri, mimeType, sizeBytes)
} else if (isVideo(mimeType)) {
streamVideoByGlide(
uri = uri,
mimeType = mimeType,
sizeBytes = sizeBytes,
decoded = decoded,
)
} else {
streamImageByGlide(
uri = uri,
pageId = pageId,
mimeType = mimeType,
sizeBytes = sizeBytes,
rotationDegrees = rotationDegrees,
isFlipped = isFlipped,
decoded = decoded,
)
}
endOfStream()
}
private fun streamImageAsIs(uri: Uri, mimeType: String, sizeBytes: Long?) {
private fun streamOriginalEncodedBytes(uri: Uri, mimeType: String, sizeBytes: Long?) {
if (!MemoryUtils.canAllocate(sizeBytes)) {
error("streamImage-image-read-large", "original image too large at $sizeBytes bytes, for mimeType=$mimeType uri=$uri", null)
return
@ -126,6 +140,7 @@ class ImageByteStreamHandler(private val context: Context, private val arguments
sizeBytes: Long?,
rotationDegrees: Int,
isFlipped: Boolean,
decoded: Boolean,
) {
val target = Glide.with(context)
.asBitmap()
@ -139,11 +154,12 @@ class ImageByteStreamHandler(private val context: Context, private val arguments
}
if (bitmap != null) {
val recycle = false
val canHaveAlpha = MimeTypes.canHaveAlpha(mimeType)
var bytes = bitmap.getEncodedBytes(canHaveAlpha, recycle = recycle)
if (bytes != null && bytes.isEmpty()) {
bytes = BitmapUtils.tryPixelFormatConversion(bitmap)?.getEncodedBytes(canHaveAlpha, recycle = recycle)
val bytes = if (decoded) {
bitmap.getDecodedBytes(recycle)
} else {
bitmap.getEncodedBytes(canHaveAlpha = MimeTypes.canHaveAlpha(mimeType), recycle = recycle)
}
if (MemoryUtils.canAllocate(sizeBytes)) {
success(bytes)
} else {
@ -159,7 +175,7 @@ class ImageByteStreamHandler(private val context: Context, private val arguments
}
}
private suspend fun streamVideoByGlide(uri: Uri, mimeType: String, sizeBytes: Long?) {
private suspend fun streamVideoByGlide(uri: Uri, mimeType: String, sizeBytes: Long?, decoded: Boolean) {
val target = Glide.with(context)
.asBitmap()
.apply(AvesAppGlideModule.uncachedFullImageOptions)
@ -168,7 +184,13 @@ class ImageByteStreamHandler(private val context: Context, private val arguments
try {
val bitmap = withContext(Dispatchers.IO) { target.get() }
if (bitmap != null) {
val bytes = bitmap.getEncodedBytes(canHaveAlpha = false, recycle = false)
val recycle = false
val bytes = if (decoded) {
bitmap.getDecodedBytes(recycle)
} else {
bitmap.getEncodedBytes(canHaveAlpha = false, recycle = false)
}
if (MemoryUtils.canAllocate(sizeBytes)) {
success(bytes)
} else {

View file

@ -17,6 +17,7 @@ import com.bumptech.glide.load.model.MultiModelLoaderFactory
import com.bumptech.glide.module.LibraryGlideModule
import com.bumptech.glide.signature.ObjectKey
import org.beyka.tiffbitmapfactory.TiffBitmapFactory
import androidx.core.graphics.scale
@GlideModule
class TiffGlideModule : LibraryGlideModule() {
@ -96,7 +97,7 @@ internal class TiffFetcher(val model: TiffImage, val width: Int, val height: Int
dstWidth = width
dstHeight = (width / aspectRatio).toInt()
}
callback.onDataReady(Bitmap.createScaledBitmap(bitmap, dstWidth, dstHeight, true))
callback.onDataReady(bitmap.scale(dstWidth, dstHeight))
} else {
callback.onDataReady(bitmap)
}

View file

@ -2,6 +2,7 @@ package deckers.thibault.aves.decoder
import android.content.Context
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.media.MediaMetadataRetriever
import android.net.Uri
import android.os.Build
@ -20,7 +21,6 @@ import com.bumptech.glide.load.model.MultiModelLoaderFactory
import com.bumptech.glide.module.LibraryGlideModule
import com.bumptech.glide.signature.ObjectKey
import deckers.thibault.aves.utils.BitmapUtils
import deckers.thibault.aves.utils.BitmapUtils.getEncodedBytes
import deckers.thibault.aves.utils.MemoryUtils
import deckers.thibault.aves.utils.StorageUtils.openMetadataRetriever
import kotlinx.coroutines.CoroutineScope
@ -28,45 +28,54 @@ import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.SupervisorJob
import kotlinx.coroutines.launch
import java.io.ByteArrayInputStream
import java.io.InputStream
import java.io.IOException
import kotlin.math.ceil
import kotlin.math.roundToInt
@GlideModule
class VideoThumbnailGlideModule : LibraryGlideModule() {
override fun registerComponents(context: Context, glide: Glide, registry: Registry) {
registry.append(VideoThumbnail::class.java, InputStream::class.java, VideoThumbnailLoader.Factory())
registry.append(VideoThumbnail::class.java, Bitmap::class.java, VideoThumbnailLoader.Factory())
}
}
class VideoThumbnail(val context: Context, val uri: Uri)
internal class VideoThumbnailLoader : ModelLoader<VideoThumbnail, InputStream> {
override fun buildLoadData(model: VideoThumbnail, width: Int, height: Int, options: Options): ModelLoader.LoadData<InputStream> {
internal class VideoThumbnailLoader : ModelLoader<VideoThumbnail, Bitmap> {
override fun buildLoadData(model: VideoThumbnail, width: Int, height: Int, options: Options): ModelLoader.LoadData<Bitmap> {
return ModelLoader.LoadData(ObjectKey(model.uri), VideoThumbnailFetcher(model, width, height))
}
override fun handles(model: VideoThumbnail): Boolean = true
internal class Factory : ModelLoaderFactory<VideoThumbnail, InputStream> {
override fun build(multiFactory: MultiModelLoaderFactory): ModelLoader<VideoThumbnail, InputStream> = VideoThumbnailLoader()
internal class Factory : ModelLoaderFactory<VideoThumbnail, Bitmap> {
override fun build(multiFactory: MultiModelLoaderFactory): ModelLoader<VideoThumbnail, Bitmap> = VideoThumbnailLoader()
override fun teardown() {}
}
}
internal class VideoThumbnailFetcher(private val model: VideoThumbnail, val width: Int, val height: Int) : DataFetcher<InputStream> {
internal class VideoThumbnailFetcher(private val model: VideoThumbnail, val width: Int, val height: Int) : DataFetcher<Bitmap> {
private val ioScope = CoroutineScope(SupervisorJob() + Dispatchers.IO)
override fun loadData(priority: Priority, callback: DataCallback<in InputStream>) {
override fun loadData(priority: Priority, callback: DataCallback<in Bitmap>) {
ioScope.launch {
val retriever = openMetadataRetriever(model.context, model.uri)
if (retriever == null) {
callback.onLoadFailed(Exception("failed to initialize MediaMetadataRetriever for uri=${model.uri}"))
} else {
try {
var bytes = retriever.embeddedPicture
if (bytes == null) {
var bitmap: Bitmap? = null
retriever.embeddedPicture?.let { bytes ->
try {
bitmap = BitmapFactory.decodeStream(ByteArrayInputStream(bytes))
} catch (e: IOException) {
// ignore
}
}
if (bitmap == null) {
// there is no consistent strategy across devices to match
// the thumbnails returned by the content resolver / Media Store
// so we derive one in an arbitrary way
@ -111,7 +120,7 @@ internal class VideoThumbnailFetcher(private val model: VideoThumbnail, val widt
}
// the returned frame is already rotated according to the video metadata
val frame = if (dstWidth > 0 && dstHeight > 0 && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) {
bitmap = if (dstWidth > 0 && dstHeight > 0 && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) {
val pixelCount = dstWidth * dstHeight
val targetBitmapSizeBytes = BitmapUtils.getExpectedImageSize(pixelCount.toLong(), getPreferredConfig())
if (!MemoryUtils.canAllocate(targetBitmapSizeBytes)) {
@ -134,13 +143,12 @@ internal class VideoThumbnailFetcher(private val model: VideoThumbnail, val widt
retriever.getFrameAtTime(timeMicros, option)
}
}
bytes = frame?.getEncodedBytes(canHaveAlpha = false, recycle = false)
}
if (bytes != null) {
callback.onDataReady(ByteArrayInputStream(bytes))
if (bitmap == null) {
callback.onLoadFailed(Exception("failed to get embedded picture or any frame for uri=${model.uri}"))
} else {
callback.onLoadFailed(Exception("failed to get embedded picture or any frame"))
callback.onDataReady(bitmap)
}
} catch (e: Exception) {
callback.onLoadFailed(e)
@ -175,7 +183,7 @@ internal class VideoThumbnailFetcher(private val model: VideoThumbnail, val widt
// cannot cancel
override fun cancel() {}
override fun getDataClass(): Class<InputStream> = InputStream::class.java
override fun getDataClass(): Class<Bitmap> = Bitmap::class.java
override fun getDataSource(): DataSource = DataSource.LOCAL
}

View file

@ -139,39 +139,6 @@ object BitmapUtils {
return null
}
// On some devices, RGBA_1010102 config can be displayed directly from the hardware buffer,
// but the native image decoder cannot convert RGBA_1010102 to another config like ARGB_8888,
// so we manually check the config and convert the pixels as a fallback mechanism.
fun tryPixelFormatConversion(bitmap: Bitmap): Bitmap? {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU && bitmap.config == Bitmap.Config.RGBA_1010102) {
val byteCount = bitmap.byteCount
if (MemoryUtils.canAllocate(byteCount)) {
val bytes = ByteBuffer.allocate(byteCount).apply {
bitmap.copyPixelsToBuffer(this)
rewind()
}.array()
val srcColorSpace = bitmap.colorSpace
if (srcColorSpace != null) {
val dstColorSpace = ColorSpace.get(ColorSpace.Named.SRGB)
val connector = ColorSpace.connect(srcColorSpace, dstColorSpace)
rgba1010102toArgb8888(bytes, connector)
val hasAlpha = false
return createBitmap(
bitmap.width,
bitmap.height,
Bitmap.Config.ARGB_8888,
hasAlpha = hasAlpha,
colorSpace = dstColorSpace,
).apply {
copyPixelsFromBuffer(ByteBuffer.wrap(bytes))
}
}
}
}
return null
}
@RequiresApi(Build.VERSION_CODES.O)
private fun argb8888toArgb8888(bytes: ByteArray, connector: ColorSpace.Connector, start: Int = 0, end: Int = bytes.size) {
// unpacking from ARGB_8888 and packing to ARGB_8888

View file

@ -84,11 +84,11 @@ object MimeTypes {
else -> false
}
// as of Flutter v3.16.4, with additional custom handling for SVG
fun canDecodeWithFlutter(mimeType: String, pageId: Int?, rotationDegrees: Int?, isFlipped: Boolean?) = when (mimeType) {
// as of Flutter v3.16.4, with additional custom handling for SVG in Dart,
// while handling still PNG and JPEG on Android for color space and config conversion
fun canDecodeWithFlutter(mimeType: String, isAnimated: Boolean) = when (mimeType) {
GIF, WEBP, BMP, WBMP, ICO, SVG -> true
JPEG -> (pageId ?: 0) == 0
PNG -> (rotationDegrees ?: 0) == 0 && !(isFlipped ?: false)
JPEG, PNG -> isAnimated
else -> false
}

View file

@ -1,7 +1,9 @@
import 'dart:async';
import 'dart:ui' as ui;
import 'package:aves/ref/mime_types.dart';
import 'package:aves/services/common/services.dart';
import 'package:aves/services/media/media_fetch_service.dart';
import 'package:aves_report/aves_report.dart';
import 'package:equatable/equatable.dart';
import 'package:flutter/foundation.dart';
@ -11,11 +13,11 @@ import 'package:flutter/widgets.dart';
class UriImage extends ImageProvider<UriImage> with EquatableMixin {
final String uri, mimeType;
final int? pageId, rotationDegrees, sizeBytes;
final bool isFlipped;
final bool isFlipped, isAnimated;
final double scale;
@override
List<Object?> get props => [uri, pageId, rotationDegrees, isFlipped, scale];
List<Object?> get props => [uri, pageId, rotationDegrees, isFlipped, isAnimated, scale];
const UriImage({
required this.uri,
@ -23,6 +25,7 @@ class UriImage extends ImageProvider<UriImage> with EquatableMixin {
required this.pageId,
required this.rotationDegrees,
required this.isFlipped,
required this.isAnimated,
this.sizeBytes,
this.scale = 1.0,
});
@ -46,15 +49,34 @@ class UriImage extends ImageProvider<UriImage> with EquatableMixin {
);
}
// as of Flutter v3.16.4, with additional custom handling for SVG in Dart,
// while handling still PNG and JPEG on Android for color space and config conversion
bool _canDecodeWithFlutter(String mimeType, bool isAnimated) {
switch(mimeType) {
case MimeTypes.gif:
case MimeTypes.webp:
case MimeTypes.bmp:
case MimeTypes.wbmp:
case MimeTypes.ico:
case MimeTypes.svg:
return true;
case MimeTypes.jpeg:
case MimeTypes.png:
return isAnimated;
default:
return false;
}
}
Future<ui.Codec> _loadAsync(UriImage key, ImageDecoderCallback decode, StreamController<ImageChunkEvent> chunkEvents) async {
assert(key == this);
try {
final bytes = await mediaFetchService.getImage(
final request = ImageRequest(
uri,
mimeType,
rotationDegrees: rotationDegrees,
isFlipped: isFlipped,
isAnimated: isAnimated,
pageId: pageId,
sizeBytes: sizeBytes,
onBytesReceived: (cumulative, total) {
@ -64,11 +86,23 @@ class UriImage extends ImageProvider<UriImage> with EquatableMixin {
));
},
);
try {
if (_canDecodeWithFlutter(mimeType, isAnimated)) {
// get original media bytes from platform, and rely on a codec instantiated by `ImageProvider`
final bytes = await mediaFetchService.getEncodedImage(request);
if (bytes.isEmpty) {
throw UnreportedStateError('$uri ($mimeType) loading failed');
throw UnreportedStateError('$uri ($mimeType) image loading failed');
}
final buffer = await ui.ImmutableBuffer.fromUint8List(bytes);
return await decode(buffer);
} else {
// get decoded media bytes from platform, and rely on a codec instantiated from raw bytes
final descriptor = await mediaFetchService.getDecodedImage(request);
if (descriptor == null) {
throw UnreportedStateError('$uri ($mimeType) image loading failed');
}
return descriptor.instantiateCodec();
}
} catch (error) {
// loading may fail if the provided MIME type is incorrect (e.g. the Media Store may report a JPEG as a TIFF)
debugPrint('$runtimeType _loadAsync failed with mimeType=$mimeType, uri=$uri, error=$error');

View file

@ -22,8 +22,9 @@ class EntryCache {
int? dateModifiedMillis,
int rotationDegrees,
bool isFlipped,
bool isAnimated,
) async {
debugPrint('Evict cached images for uri=$uri, mimeType=$mimeType, dateModifiedMillis=$dateModifiedMillis, rotationDegrees=$rotationDegrees, isFlipped=$isFlipped');
debugPrint('Evict cached images for uri=$uri, mimeType=$mimeType, dateModifiedMillis=$dateModifiedMillis, rotationDegrees=$rotationDegrees, isFlipped=$isFlipped, isAnimated=$isAnimated');
// TODO TLAD provide pageId parameter for multi page items, if someday image editing features are added for them
int? pageId;
@ -35,6 +36,7 @@ class EntryCache {
pageId: pageId,
rotationDegrees: rotationDegrees,
isFlipped: isFlipped,
isAnimated: isAnimated,
).evict();
// evict low quality thumbnail (without specified extents)

View file

@ -484,7 +484,7 @@ class AvesEntry with AvesEntryBase {
bool oldIsFlipped,
) async {
if ((!MimeTypes.refersToSameType(oldMimeType, mimeType) && !MimeTypes.isVideo(oldMimeType)) || oldDateModifiedMillis != dateModifiedMillis || oldRotationDegrees != rotationDegrees || oldIsFlipped != isFlipped) {
await EntryCache.evict(uri, oldMimeType, oldDateModifiedMillis, oldRotationDegrees, oldIsFlipped);
await EntryCache.evict(uri, oldMimeType, oldDateModifiedMillis, oldRotationDegrees, oldIsFlipped, isAnimated);
visualChangeNotifier.notify();
}
}

View file

@ -55,6 +55,7 @@ extension ExtraAvesEntryImages on AvesEntry {
pageId: pageId,
rotationDegrees: rotationDegrees,
isFlipped: isFlipped,
isAnimated: isAnimated,
sizeBytes: sizeBytes,
);

View file

@ -13,6 +13,7 @@ class MimeTypes {
static const svg = 'image/svg+xml';
static const tiff = 'image/tiff';
static const webp = 'image/webp';
static const wbmp = 'image/vnd.wap.wbmp';
static const art = 'image/x-jg';
static const cdr = 'image/x-coreldraw';

View file

@ -24,15 +24,9 @@ abstract class MediaFetchService {
BytesReceivedCallback? onBytesReceived,
});
Future<Uint8List> getImage(
String uri,
String mimeType, {
required int? rotationDegrees,
required bool isFlipped,
required int? pageId,
required int? sizeBytes,
BytesReceivedCallback? onBytesReceived,
});
Future<Uint8List> getEncodedImage(ImageRequest request);
Future<ui.ImageDescriptor?> getDecodedImage(ImageRequest request);
// `rect`: region to decode, with coordinates in reference to `imageSize`
Future<ui.ImageDescriptor?> getRegion(
@ -101,45 +95,52 @@ class PlatformMediaFetchService implements MediaFetchService {
required int? sizeBytes,
BytesReceivedCallback? onBytesReceived,
}) =>
getImage(
getEncodedImage(
ImageRequest(
uri,
mimeType,
rotationDegrees: 0,
isFlipped: false,
isAnimated: false,
pageId: null,
sizeBytes: sizeBytes,
onBytesReceived: onBytesReceived,
),
);
@override
Future<Uint8List> getImage(
String uri,
String mimeType, {
required int? rotationDegrees,
required bool isFlipped,
required int? pageId,
required int? sizeBytes,
BytesReceivedCallback? onBytesReceived,
}) async {
Future<Uint8List> getEncodedImage(ImageRequest request) {
return getBytes(request, decoded: false);
}
@override
Future<ui.ImageDescriptor?> getDecodedImage(ImageRequest request) async {
return getBytes(request, decoded: true).then(InteropDecoding.bytesToCodec);
}
Future<Uint8List> getBytes(ImageRequest request, {required bool decoded}) async {
final _onBytesReceived = request.onBytesReceived;
try {
final opCompleter = Completer<Uint8List>();
final sink = OutputBuffer();
var bytesReceived = 0;
_byteStream.receiveBroadcastStream(<String, dynamic>{
'uri': uri,
'mimeType': mimeType,
'sizeBytes': sizeBytes,
'rotationDegrees': rotationDegrees ?? 0,
'isFlipped': isFlipped,
'pageId': pageId,
'uri': request.uri,
'mimeType': request.mimeType,
'sizeBytes': request.sizeBytes,
'rotationDegrees': request.rotationDegrees ?? 0,
'isFlipped': request.isFlipped,
'isAnimated': request.isAnimated,
'pageId': request.pageId,
'decoded': decoded,
}).listen(
(data) {
final chunk = data as Uint8List;
sink.add(chunk);
if (onBytesReceived != null) {
if (_onBytesReceived != null) {
bytesReceived += chunk.length;
try {
onBytesReceived(bytesReceived, sizeBytes);
_onBytesReceived(bytesReceived, request.sizeBytes);
} catch (error, stack) {
opCompleter.completeError(error, stack);
return;
@ -156,7 +157,7 @@ class PlatformMediaFetchService implements MediaFetchService {
// `await` here, so that `completeError` will be caught below
return await opCompleter.future;
} on PlatformException catch (e, stack) {
if (_isUnknownVisual(mimeType)) {
if (_isUnknownVisual(request.mimeType)) {
await reportService.recordError(e, stack);
}
}
@ -313,3 +314,26 @@ class PlatformMediaFetchService implements MediaFetchService {
..._knownVideos,
};
}
@immutable
class ImageRequest {
final String uri;
final String mimeType;
final int? rotationDegrees;
final bool isFlipped;
final bool isAnimated;
final int? pageId;
final int? sizeBytes;
final BytesReceivedCallback? onBytesReceived;
const ImageRequest(
this.uri,
this.mimeType, {
required this.rotationDegrees,
required this.isFlipped,
required this.isAnimated,
required this.pageId,
required this.sizeBytes,
this.onBytesReceived,
});
}

View file

@ -6,6 +6,7 @@ import 'package:aves/model/entry/entry.dart';
import 'package:aves/model/entry/extensions/props.dart';
import 'package:aves/ref/upnp.dart';
import 'package:aves/services/common/services.dart';
import 'package:aves/services/media/media_fetch_service.dart';
import 'package:aves/widgets/dialogs/cast_dialog.dart';
import 'package:collection/collection.dart';
import 'package:dlna_dart/dlna.dart';
@ -108,14 +109,16 @@ mixin CastMixin {
Future<Response> _sendEntry(AvesEntry entry) async {
// TODO TLAD [cast] providing downscaled versions is suitable when properly serving with `MediaServer`, as the renderer can pick what is best
final bytes = await mediaFetchService.getImage(
final request = ImageRequest(
entry.uri,
entry.mimeType,
rotationDegrees: entry.rotationDegrees,
isFlipped: entry.isFlipped,
isAnimated: entry.isAnimated,
pageId: entry.pageId,
sizeBytes: entry.sizeBytes,
);
final bytes = await mediaFetchService.getEncodedImage(request);
debugPrint('cast: send ${bytes.length} bytes for entry=$entry');
return Response.ok(