Merge branch 'develop'

This commit is contained in:
Thibault Deckers 2025-03-05 23:30:01 +01:00
commit d4d6b40be5
539 changed files with 131721 additions and 1617 deletions

@ -1 +1 @@
Subproject commit d8a9f9a52e5af486f80d932e838ee93861ffd863 Subproject commit 35c388afb57ef061d06a39b537336c87e0e3d1b1

View file

@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Harden Runner - name: Harden Runner
uses: step-security/harden-runner@cb605e52c26070c328afc4562f0b4ada7618a84e # v2.10.4 uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
with: with:
egress-policy: audit egress-policy: audit

View file

@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Harden Runner - name: Harden Runner
uses: step-security/harden-runner@cb605e52c26070c328afc4562f0b4ada7618a84e # v2.10.4 uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
with: with:
egress-policy: audit egress-policy: audit
@ -52,7 +52,7 @@ jobs:
build-mode: manual build-mode: manual
steps: steps:
- name: Harden Runner - name: Harden Runner
uses: step-security/harden-runner@cb605e52c26070c328afc4562f0b4ada7618a84e # v2.10.4 uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
with: with:
egress-policy: audit egress-policy: audit
@ -69,7 +69,7 @@ jobs:
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@dd746615b3b9d728a6a37ca2045b68ca76d4841a # v3.28.8 uses: github/codeql-action/init@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }} build-mode: ${{ matrix.build-mode }}
@ -83,6 +83,6 @@ jobs:
./flutterw build apk --profile -t lib/main_play.dart --flavor play ./flutterw build apk --profile -t lib/main_play.dart --flavor play
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@dd746615b3b9d728a6a37ca2045b68ca76d4841a # v3.28.8 uses: github/codeql-action/analyze@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10
with: with:
category: "/language:${{matrix.language}}" category: "/language:${{matrix.language}}"

View file

@ -18,7 +18,7 @@ jobs:
id-token: write id-token: write
steps: steps:
- name: Harden Runner - name: Harden Runner
uses: step-security/harden-runner@cb605e52c26070c328afc4562f0b4ada7618a84e # v2.10.4 uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
with: with:
egress-policy: audit egress-policy: audit
@ -75,19 +75,19 @@ jobs:
AVES_GOOGLE_API_KEY: ${{ secrets.AVES_GOOGLE_API_KEY }} AVES_GOOGLE_API_KEY: ${{ secrets.AVES_GOOGLE_API_KEY }}
- name: Generate artifact attestation - name: Generate artifact attestation
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0 uses: actions/attest-build-provenance@bd77c077858b8d561b7a36cbe48ef4cc642ca39d # v2.2.2
with: with:
subject-path: 'outputs/*' subject-path: 'outputs/*'
- name: Create GitHub release - name: Create GitHub release
uses: ncipollo/release-action@cdcc88a9acf3ca41c16c37bb7d21b9ad48560d87 # v1.15.0 uses: ncipollo/release-action@440c8c1cb0ed28b9f43e4d1d670870f059653174 # v1.16.0
with: with:
artifacts: "outputs/*" artifacts: "outputs/*"
body: "[Changelog](https://github.com/${{ github.repository }}/blob/develop/CHANGELOG.md#${{ github.ref_name }})" body: "[Changelog](https://github.com/${{ github.repository }}/blob/develop/CHANGELOG.md#${{ github.ref_name }})"
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}
- name: Upload app bundle - name: Upload app bundle
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
with: with:
name: appbundle name: appbundle
path: outputs/app-play-release.aab path: outputs/app-play-release.aab
@ -98,7 +98,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Harden Runner - name: Harden Runner
uses: step-security/harden-runner@cb605e52c26070c328afc4562f0b4ada7618a84e # v2.10.4 uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
with: with:
egress-policy: audit egress-policy: audit
@ -106,7 +106,7 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Get appbundle from artifacts - name: Get appbundle from artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9
with: with:
name: appbundle name: appbundle

View file

@ -31,7 +31,7 @@ jobs:
steps: steps:
- name: Harden Runner - name: Harden Runner
uses: step-security/harden-runner@cb605e52c26070c328afc4562f0b4ada7618a84e # v2.10.4 uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0
with: with:
egress-policy: audit egress-policy: audit
@ -41,7 +41,7 @@ jobs:
persist-credentials: false persist-credentials: false
- name: "Run analysis" - name: "Run analysis"
uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1
with: with:
results_file: results.sarif results_file: results.sarif
results_format: sarif results_format: sarif
@ -63,7 +63,7 @@ jobs:
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab. # format to the repository Actions tab.
- name: "Upload artifact" - name: "Upload artifact"
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
with: with:
name: SARIF file name: SARIF file
path: results.sarif path: results.sarif
@ -71,6 +71,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard. # Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning" - name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@dd746615b3b9d728a6a37ca2045b68ca76d4841a # v3.28.8 uses: github/codeql-action/upload-sarif@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10
with: with:
sarif_file: results.sarif sarif_file: results.sarif

3
.gitignore vendored
View file

@ -5,9 +5,11 @@
*.swp *.swp
.DS_Store .DS_Store
.atom/ .atom/
.build/
.buildlog/ .buildlog/
.history .history
.svn/ .svn/
.swiftpm/
migrate_working_dir/ migrate_working_dir/
# IntelliJ related # IntelliJ related
@ -27,7 +29,6 @@ migrate_working_dir/
.dart_tool/ .dart_tool/
.flutter-plugins .flutter-plugins
.flutter-plugins-dependencies .flutter-plugins-dependencies
.packages
.pub-cache/ .pub-cache/
.pub/ .pub/
/build/ /build/

View file

@ -4,6 +4,28 @@ All notable changes to this project will be documented in this file.
## <a id="unreleased"></a>[Unreleased] ## <a id="unreleased"></a>[Unreleased]
## <a id="v1.12.4"></a>[v1.12.4] - 2025-03-05
### Added
- support for Samsung HEIC motion photos embedding video in sefd box
- Cataloguing: identify video location from Apple QuickTime metadata, and 3GPP `loci` atom
- Collection: stack RAW and HEIC with same file names
- display home tile in side drawer when customized
- Galician translation (thanks Rubén Castiñeiras Lorenzo)
### Changed
- increased precision of file modified date to milliseconds
- upgraded Flutter to stable v3.29.0
### Fixed
- opening motion photo embedded video when video track is not the first one
- some SVG rendering issues
- decoding of SVG containing references to namespaces in !ATTLIST
- fallback decoding of images packed in RGBA_1010102 config
## <a id="v1.12.3"></a>[v1.12.3] - 2025-02-06 ## <a id="v1.12.3"></a>[v1.12.3] - 2025-02-06
### Added ### Added

View file

@ -35,7 +35,7 @@ It scans your media collection to identify **motion photos**, **panoramas** (aka
**Navigation and search** is an important part of Aves. The goal is for users to easily flow from albums to photos to tags to maps, etc. **Navigation and search** is an important part of Aves. The goal is for users to easily flow from albums to photos to tags to maps, etc.
Aves integrates with Android (from KitKat to Android 14, including Android TV) with features such as **widgets**, **app shortcuts**, **screen saver** and **global search** handling. It also works as a **media viewer and picker**. Aves integrates with Android (including Android TV) with features such as **widgets**, **app shortcuts**, **screen saver** and **global search** handling. It also works as a **media viewer and picker**.
## Screenshots ## Screenshots

3
android/.gitignore vendored
View file

@ -5,9 +5,10 @@ gradle-wrapper.jar
/gradlew.bat /gradlew.bat
/local.properties /local.properties
GeneratedPluginRegistrant.java GeneratedPluginRegistrant.java
.cxx/
# Remember to never publicly share your keystore. # Remember to never publicly share your keystore.
# See https://flutter.dev/docs/deployment/android#reference-the-keystore-from-the-app # See https://flutter.dev/to/reference-keystore
key.properties key.properties
**/*.keystore **/*.keystore
**/*.jks **/*.jks

View file

@ -35,8 +35,6 @@ kotlin {
android { android {
namespace 'deckers.thibault.aves' namespace 'deckers.thibault.aves'
compileSdk 35 compileSdk 35
// cf https://developer.android.com/studio/projects/install-ndk#default-ndk-per-agp
ndkVersion '28.0.12916984'
defaultConfig { defaultConfig {
applicationId packageName applicationId packageName
@ -161,7 +159,6 @@ dependencies {
implementation 'androidx.security:security-crypto:1.1.0-alpha06' implementation 'androidx.security:security-crypto:1.1.0-alpha06'
implementation 'androidx.work:work-runtime-ktx:2.10.0' implementation 'androidx.work:work-runtime-ktx:2.10.0'
implementation 'com.caverock:androidsvg-aar:1.4'
implementation 'com.commonsware.cwac:document:0.5.0' implementation 'com.commonsware.cwac:document:0.5.0'
implementation 'com.drewnoakes:metadata-extractor:2.19.0' implementation 'com.drewnoakes:metadata-extractor:2.19.0'
implementation "com.github.bumptech.glide:glide:$glide_version" implementation "com.github.bumptech.glide:glide:$glide_version"
@ -171,9 +168,11 @@ dependencies {
// forked, built by JitPack: // forked, built by JitPack:
// - https://jitpack.io/p/deckerst/Android-TiffBitmapFactory // - https://jitpack.io/p/deckerst/Android-TiffBitmapFactory
// - https://jitpack.io/p/deckerst/androidsvg
// - https://jitpack.io/p/deckerst/mp4parser // - https://jitpack.io/p/deckerst/mp4parser
// - https://jitpack.io/p/deckerst/pixymeta-android // - https://jitpack.io/p/deckerst/pixymeta-android
implementation 'com.github.deckerst:Android-TiffBitmapFactory:3ed067f021' implementation 'com.github.deckerst:Android-TiffBitmapFactory:3ed067f021'
implementation 'com.github.deckerst:androidsvg:cc9d59a88f'
implementation 'com.github.deckerst.mp4parser:isoparser:d5caf7a3dd' implementation 'com.github.deckerst.mp4parser:isoparser:d5caf7a3dd'
implementation 'com.github.deckerst.mp4parser:muxer:d5caf7a3dd' implementation 'com.github.deckerst.mp4parser:muxer:d5caf7a3dd'
implementation 'com.github.deckerst:pixymeta-android:71eee77dc4' implementation 'com.github.deckerst:pixymeta-android:71eee77dc4'

View file

@ -330,8 +330,8 @@
android:name="flutterEmbedding" android:name="flutterEmbedding"
android:value="2" /> android:value="2" />
<!-- <!--
Screenshot driver scenario is not supported by Impeller: Screenshot driver scenario is not supported by Impeller: "Compressed screenshots not supported for Impeller".
"Compressed screenshots not supported for Impeller" As of Flutter v3.29.0, switching pages with alpha transition yields artifacts when Impeller is enabled.
--> -->
<meta-data <meta-data
android:name="io.flutter.embedding.android.EnableImpeller" android:name="io.flutter.embedding.android.EnableImpeller"

View file

@ -15,6 +15,7 @@ import android.os.Looper
import android.util.Log import android.util.Log
import android.util.SizeF import android.util.SizeF
import android.widget.RemoteViews import android.widget.RemoteViews
import androidx.core.graphics.createBitmap
import androidx.core.net.toUri import androidx.core.net.toUri
import app.loup.streams_channel.StreamsChannel import app.loup.streams_channel.StreamsChannel
import deckers.thibault.aves.channel.AvesByteSendingMethodCodec import deckers.thibault.aves.channel.AvesByteSendingMethodCodec
@ -218,7 +219,7 @@ class HomeWidgetProvider : AppWidgetProvider() {
val heightPx = (sizeDip.height * devicePixelRatio).roundToInt() val heightPx = (sizeDip.height * devicePixelRatio).roundToInt()
try { try {
val bitmap = Bitmap.createBitmap(widthPx, heightPx, Bitmap.Config.ARGB_8888).also { val bitmap = createBitmap(widthPx, heightPx, Bitmap.Config.ARGB_8888).also {
bitmaps.add(it) bitmaps.add(it)
it.copyPixelsFromBuffer(ByteBuffer.wrap(bytes)) it.copyPixelsFromBuffer(ByteBuffer.wrap(bytes))
} }

View file

@ -19,6 +19,7 @@ import androidx.annotation.RequiresApi
import androidx.core.content.pm.ShortcutInfoCompat import androidx.core.content.pm.ShortcutInfoCompat
import androidx.core.content.pm.ShortcutManagerCompat import androidx.core.content.pm.ShortcutManagerCompat
import androidx.core.graphics.drawable.IconCompat import androidx.core.graphics.drawable.IconCompat
import androidx.core.net.toUri
import app.loup.streams_channel.StreamsChannel import app.loup.streams_channel.StreamsChannel
import deckers.thibault.aves.channel.AvesByteSendingMethodCodec import deckers.thibault.aves.channel.AvesByteSendingMethodCodec
import deckers.thibault.aves.channel.calls.AccessibilityHandler import deckers.thibault.aves.channel.calls.AccessibilityHandler
@ -69,7 +70,6 @@ import kotlinx.coroutines.SupervisorJob
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import java.util.concurrent.CompletableFuture import java.util.concurrent.CompletableFuture
import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.ConcurrentHashMap
import androidx.core.net.toUri
// `FlutterFragmentActivity` because of local auth plugin // `FlutterFragmentActivity` because of local auth plugin
open class MainActivity : FlutterFragmentActivity() { open class MainActivity : FlutterFragmentActivity() {

View file

@ -2,12 +2,12 @@ package deckers.thibault.aves
import android.content.Intent import android.content.Intent
import android.net.Uri import android.net.Uri
import androidx.core.net.toUri
import deckers.thibault.aves.channel.calls.AppAdapterHandler import deckers.thibault.aves.channel.calls.AppAdapterHandler
import deckers.thibault.aves.model.FieldMap import deckers.thibault.aves.model.FieldMap
import deckers.thibault.aves.utils.getParcelableExtraCompat import deckers.thibault.aves.utils.getParcelableExtraCompat
import io.flutter.plugin.common.MethodCall import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel import io.flutter.plugin.common.MethodChannel
import androidx.core.net.toUri
class WallpaperActivity : MainActivity() { class WallpaperActivity : MainActivity() {
private var originalIntent: String? = null private var originalIntent: String? = null

View file

@ -21,27 +21,28 @@ class AvesByteSendingMethodCodec private constructor() : MethodCodec {
return STANDARD.encodeMethodCall(methodCall) return STANDARD.encodeMethodCall(methodCall)
} }
override fun encodeErrorEnvelope(errorCode: String, errorMessage: String?, errorDetails: Any?): ByteBuffer {
return STANDARD.encodeErrorEnvelope(errorCode, errorMessage, errorDetails)
}
override fun encodeErrorEnvelopeWithStacktrace(errorCode: String, errorMessage: String?, errorDetails: Any?, errorStacktrace: String?): ByteBuffer {
return STANDARD.encodeErrorEnvelopeWithStacktrace(errorCode, errorMessage, errorDetails, errorStacktrace)
}
// `StandardMethodCodec` writes the result to a `ByteArrayOutputStream`, then writes the stream to a `ByteBuffer`.
// Here we only handle `ByteArray` results, but we avoid the intermediate stream.
override fun encodeSuccessEnvelope(result: Any?): ByteBuffer { override fun encodeSuccessEnvelope(result: Any?): ByteBuffer {
if (result is ByteArray) { if (result is ByteArray) {
val size = result.size return ByteBuffer.allocateDirect(1 + result.size).apply {
return ByteBuffer.allocateDirect(4 + size).apply { // following `StandardMethodCodec`:
// First byte is zero in success case, and non-zero otherwise.
put(0) put(0)
put(result) put(result)
} }
} }
Log.e(LOG_TAG, "encodeSuccessEnvelope failed with result=$result") Log.e(LOG_TAG, "encodeSuccessEnvelope failed with result=$result")
return ByteBuffer.allocateDirect(0) return encodeErrorEnvelope("invalid-result-type", "Called success with a result which is not a `ByteArray`, type=${result?.javaClass}", null)
}
override fun encodeErrorEnvelope(errorCode: String, errorMessage: String?, errorDetails: Any?): ByteBuffer {
Log.e(LOG_TAG, "encodeErrorEnvelope failed with errorCode=$errorCode, errorMessage=$errorMessage, errorDetails=$errorDetails")
return ByteBuffer.allocateDirect(0)
}
override fun encodeErrorEnvelopeWithStacktrace(errorCode: String, errorMessage: String?, errorDetails: Any?, errorStacktrace: String?): ByteBuffer {
Log.e(LOG_TAG, "encodeErrorEnvelopeWithStacktrace failed with errorCode=$errorCode, errorMessage=$errorMessage, errorDetails=$errorDetails, errorStacktrace=$errorStacktrace")
return ByteBuffer.allocateDirect(0)
} }
companion object { companion object {

View file

@ -38,7 +38,6 @@ import deckers.thibault.aves.channel.calls.Coresult.Companion.safe
import deckers.thibault.aves.channel.calls.Coresult.Companion.safeSuspend import deckers.thibault.aves.channel.calls.Coresult.Companion.safeSuspend
import deckers.thibault.aves.model.FieldMap import deckers.thibault.aves.model.FieldMap
import deckers.thibault.aves.utils.BitmapUtils import deckers.thibault.aves.utils.BitmapUtils
import deckers.thibault.aves.utils.BitmapUtils.getBytes
import deckers.thibault.aves.utils.LogUtils import deckers.thibault.aves.utils.LogUtils
import deckers.thibault.aves.utils.anyCauseIs import deckers.thibault.aves.utils.anyCauseIs
import deckers.thibault.aves.utils.getApplicationInfoCompat import deckers.thibault.aves.utils.getApplicationInfoCompat
@ -154,7 +153,7 @@ class AppAdapterHandler(private val context: Context) : MethodCallHandler {
// convert DIP to physical pixels here, instead of using `devicePixelRatio` in Flutter // convert DIP to physical pixels here, instead of using `devicePixelRatio` in Flutter
val density = context.resources.displayMetrics.density val density = context.resources.displayMetrics.density
val size = (sizeDip * density).roundToInt() val size = (sizeDip * density).roundToInt()
var data: ByteArray? = null var bytes: ByteArray? = null
try { try {
val iconResourceId = context.packageManager.getApplicationInfoCompat(packageName, 0).icon val iconResourceId = context.packageManager.getApplicationInfoCompat(packageName, 0).icon
if (iconResourceId != Resources.ID_NULL) { if (iconResourceId != Resources.ID_NULL) {
@ -175,7 +174,9 @@ class AppAdapterHandler(private val context: Context) : MethodCallHandler {
try { try {
val bitmap = withContext(Dispatchers.IO) { target.get() } val bitmap = withContext(Dispatchers.IO) { target.get() }
data = bitmap?.getBytes(canHaveAlpha = true, recycle = false) // do not recycle bitmaps fetched from `ContentResolver` as their lifecycle is unknown
val recycle = false
bytes = BitmapUtils.getRawBytes(bitmap, recycle = recycle)
} catch (e: Exception) { } catch (e: Exception) {
Log.w(LOG_TAG, "failed to decode app icon for packageName=$packageName", e) Log.w(LOG_TAG, "failed to decode app icon for packageName=$packageName", e)
} }
@ -185,8 +186,8 @@ class AppAdapterHandler(private val context: Context) : MethodCallHandler {
Log.w(LOG_TAG, "failed to get app info for packageName=$packageName", e) Log.w(LOG_TAG, "failed to get app info for packageName=$packageName", e)
return return
} }
if (data != null) { if (bytes != null) {
result.success(data) result.success(bytes)
} else { } else {
result.error("getAppIcon-null", "failed to get icon for packageName=$packageName", null) result.error("getAppIcon-null", "failed to get icon for packageName=$packageName", null)
} }

View file

@ -316,7 +316,7 @@ class DebugHandler(private val context: Context) : MethodCallHandler {
} }
val sb = StringBuilder() val sb = StringBuilder()
if (mimeType == MimeTypes.MP4) { if (mimeType == MimeTypes.MP4 || MimeTypes.isHeic(mimeType)) {
try { try {
// we can skip uninteresting boxes with a seekable data source // we can skip uninteresting boxes with a seekable data source
val pfd = StorageUtils.openInputFileDescriptor(context, uri) ?: throw Exception("failed to open file descriptor for uri=$uri") val pfd = StorageUtils.openInputFileDescriptor(context, uri) ?: throw Exception("failed to open file descriptor for uri=$uri")

View file

@ -10,7 +10,6 @@ import com.adobe.internal.xmp.XMPUtils
import com.bumptech.glide.load.resource.bitmap.TransformationUtils import com.bumptech.glide.load.resource.bitmap.TransformationUtils
import com.drew.metadata.xmp.XmpDirectory import com.drew.metadata.xmp.XmpDirectory
import deckers.thibault.aves.channel.calls.Coresult.Companion.safe import deckers.thibault.aves.channel.calls.Coresult.Companion.safe
import deckers.thibault.aves.channel.calls.Coresult.Companion.safeSuspend
import deckers.thibault.aves.metadata.Metadata import deckers.thibault.aves.metadata.Metadata
import deckers.thibault.aves.metadata.MultiPage import deckers.thibault.aves.metadata.MultiPage
import deckers.thibault.aves.metadata.metadataextractor.Helper import deckers.thibault.aves.metadata.metadataextractor.Helper
@ -23,7 +22,6 @@ import deckers.thibault.aves.model.FieldMap
import deckers.thibault.aves.model.provider.ImageProvider import deckers.thibault.aves.model.provider.ImageProvider
import deckers.thibault.aves.model.provider.ImageProviderFactory.getProvider import deckers.thibault.aves.model.provider.ImageProviderFactory.getProvider
import deckers.thibault.aves.utils.BitmapUtils import deckers.thibault.aves.utils.BitmapUtils
import deckers.thibault.aves.utils.BitmapUtils.getBytes
import deckers.thibault.aves.utils.FileUtils.transferFrom import deckers.thibault.aves.utils.FileUtils.transferFrom
import deckers.thibault.aves.utils.LogUtils import deckers.thibault.aves.utils.LogUtils
import deckers.thibault.aves.utils.MimeTypes import deckers.thibault.aves.utils.MimeTypes
@ -47,7 +45,7 @@ class EmbeddedDataHandler(private val context: Context) : MethodCallHandler {
override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) { override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) {
when (call.method) { when (call.method) {
"getExifThumbnails" -> ioScope.launch { safeSuspend(call, result, ::getExifThumbnails) } "getExifThumbnails" -> ioScope.launch { safe(call, result, ::getExifThumbnails) }
"extractGoogleDeviceItem" -> ioScope.launch { safe(call, result, ::extractGoogleDeviceItem) } "extractGoogleDeviceItem" -> ioScope.launch { safe(call, result, ::extractGoogleDeviceItem) }
"extractJpegMpfItem" -> ioScope.launch { safe(call, result, ::extractJpegMpfItem) } "extractJpegMpfItem" -> ioScope.launch { safe(call, result, ::extractJpegMpfItem) }
"extractMotionPhotoImage" -> ioScope.launch { safe(call, result, ::extractMotionPhotoImage) } "extractMotionPhotoImage" -> ioScope.launch { safe(call, result, ::extractMotionPhotoImage) }
@ -58,7 +56,7 @@ class EmbeddedDataHandler(private val context: Context) : MethodCallHandler {
} }
} }
private suspend fun getExifThumbnails(call: MethodCall, result: MethodChannel.Result) { private fun getExifThumbnails(call: MethodCall, result: MethodChannel.Result) {
val mimeType = call.argument<String>("mimeType") val mimeType = call.argument<String>("mimeType")
val uri = call.argument<String>("uri")?.toUri() val uri = call.argument<String>("uri")?.toUri()
val sizeBytes = call.argument<Number>("sizeBytes")?.toLong() val sizeBytes = call.argument<Number>("sizeBytes")?.toLong()
@ -75,7 +73,9 @@ class EmbeddedDataHandler(private val context: Context) : MethodCallHandler {
val orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL) val orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL)
exif.thumbnailBitmap?.let { bitmap -> exif.thumbnailBitmap?.let { bitmap ->
TransformationUtils.rotateImageExif(BitmapUtils.getBitmapPool(context), bitmap, orientation)?.let { TransformationUtils.rotateImageExif(BitmapUtils.getBitmapPool(context), bitmap, orientation)?.let {
it.getBytes(canHaveAlpha = false, recycle = false)?.let { bytes -> thumbnails.add(bytes) } // do not recycle bitmaps fetched from `ExifInterface` as their lifecycle is unknown
val recycle = false
BitmapUtils.getRawBytes(it, recycle = recycle)?.let { bytes -> thumbnails.add(bytes) }
} }
} }
} }
@ -186,7 +186,7 @@ class EmbeddedDataHandler(private val context: Context) : MethodCallHandler {
return return
} }
MultiPage.getMotionPhotoVideoSize(context, uri, mimeType, sizeBytes)?.let { videoSizeBytes -> MultiPage.getTrailerVideoSize(context, uri, mimeType, sizeBytes)?.let { videoSizeBytes ->
val imageSizeBytes = sizeBytes - videoSizeBytes val imageSizeBytes = sizeBytes - videoSizeBytes
StorageUtils.openInputStream(context, uri)?.let { input -> StorageUtils.openInputStream(context, uri)?.let { input ->
copyEmbeddedBytes(result, mimeType, displayName, input, imageSizeBytes) copyEmbeddedBytes(result, mimeType, displayName, input, imageSizeBytes)
@ -207,11 +207,10 @@ class EmbeddedDataHandler(private val context: Context) : MethodCallHandler {
return return
} }
MultiPage.getMotionPhotoVideoSize(context, uri, mimeType, sizeBytes)?.let { videoSizeBytes -> MultiPage.getMotionPhotoVideoSizing(context, uri, mimeType, sizeBytes)?.let { (videoOffset, videoSize) ->
val videoStartOffset = sizeBytes - videoSizeBytes
StorageUtils.openInputStream(context, uri)?.let { input -> StorageUtils.openInputStream(context, uri)?.let { input ->
input.skip(videoStartOffset) input.skip(videoOffset)
copyEmbeddedBytes(result, MimeTypes.MP4, displayName, input, videoSizeBytes) copyEmbeddedBytes(result, MimeTypes.MP4, displayName, input, videoSize)
} }
return return
} }

View file

@ -3,11 +3,12 @@ package deckers.thibault.aves.channel.calls
import android.content.Context import android.content.Context
import android.graphics.Rect import android.graphics.Rect
import androidx.core.net.toUri import androidx.core.net.toUri
import deckers.thibault.aves.channel.calls.Coresult.Companion.safeSuspend import deckers.thibault.aves.channel.calls.Coresult.Companion.safe
import deckers.thibault.aves.channel.calls.fetchers.RegionFetcher import deckers.thibault.aves.channel.calls.fetchers.RegionFetcher
import deckers.thibault.aves.channel.calls.fetchers.SvgRegionFetcher import deckers.thibault.aves.channel.calls.fetchers.SvgRegionFetcher
import deckers.thibault.aves.channel.calls.fetchers.ThumbnailFetcher import deckers.thibault.aves.channel.calls.fetchers.ThumbnailFetcher
import deckers.thibault.aves.channel.calls.fetchers.TiffRegionFetcher import deckers.thibault.aves.channel.calls.fetchers.TiffRegionFetcher
import deckers.thibault.aves.model.EntryFields
import deckers.thibault.aves.utils.MimeTypes import deckers.thibault.aves.utils.MimeTypes
import io.flutter.plugin.common.MethodCall import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel import io.flutter.plugin.common.MethodChannel
@ -27,18 +28,18 @@ class MediaFetchBytesHandler(private val context: Context) : MethodCallHandler {
override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) { override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) {
when (call.method) { when (call.method) {
"getThumbnail" -> ioScope.launch { safeSuspend(call, result, ::getThumbnail) } "getThumbnail" -> ioScope.launch { safe(call, result, ::getThumbnail) }
"getRegion" -> ioScope.launch { safeSuspend(call, result, ::getRegion) } "getRegion" -> ioScope.launch { safe(call, result, ::getRegion) }
else -> result.notImplemented() else -> result.notImplemented()
} }
} }
private suspend fun getThumbnail(call: MethodCall, result: MethodChannel.Result) { private fun getThumbnail(call: MethodCall, result: MethodChannel.Result) {
val uri = call.argument<String>("uri") val uri = call.argument<String>(EntryFields.URI)
val mimeType = call.argument<String>("mimeType") val mimeType = call.argument<String>(EntryFields.MIME_TYPE)
val dateModifiedSecs = call.argument<Number>("dateModifiedSecs")?.toLong() val dateModifiedMillis = call.argument<Number>(EntryFields.DATE_MODIFIED_MILLIS)?.toLong()
val rotationDegrees = call.argument<Int>("rotationDegrees") val rotationDegrees = call.argument<Int>(EntryFields.ROTATION_DEGREES)
val isFlipped = call.argument<Boolean>("isFlipped") val isFlipped = call.argument<Boolean>(EntryFields.IS_FLIPPED)
val widthDip = call.argument<Number>("widthDip")?.toDouble() val widthDip = call.argument<Number>("widthDip")?.toDouble()
val heightDip = call.argument<Number>("heightDip")?.toDouble() val heightDip = call.argument<Number>("heightDip")?.toDouble()
val pageId = call.argument<Int>("pageId") val pageId = call.argument<Int>("pageId")
@ -55,7 +56,7 @@ class MediaFetchBytesHandler(private val context: Context) : MethodCallHandler {
context = context, context = context,
uri = uri, uri = uri,
mimeType = mimeType, mimeType = mimeType,
dateModifiedSecs = dateModifiedSecs ?: (Date().time / 1000), dateModifiedMillis = dateModifiedMillis ?: (Date().time),
rotationDegrees = rotationDegrees, rotationDegrees = rotationDegrees,
isFlipped = isFlipped, isFlipped = isFlipped,
width = (widthDip * density).roundToInt(), width = (widthDip * density).roundToInt(),
@ -67,7 +68,7 @@ class MediaFetchBytesHandler(private val context: Context) : MethodCallHandler {
).fetch() ).fetch()
} }
private suspend fun getRegion(call: MethodCall, result: MethodChannel.Result) { private fun getRegion(call: MethodCall, result: MethodChannel.Result) {
val uri = call.argument<String>("uri")?.toUri() val uri = call.argument<String>("uri")?.toUri()
val mimeType = call.argument<String>("mimeType") val mimeType = call.argument<String>("mimeType")
val pageId = call.argument<Int>("pageId") val pageId = call.argument<Int>("pageId")
@ -96,6 +97,7 @@ class MediaFetchBytesHandler(private val context: Context) : MethodCallHandler {
imageHeight = imageHeight, imageHeight = imageHeight,
result = result, result = result,
) )
MimeTypes.TIFF -> TiffRegionFetcher(context).fetch( MimeTypes.TIFF -> TiffRegionFetcher(context).fetch(
uri = uri, uri = uri,
page = pageId ?: 0, page = pageId ?: 0,
@ -103,6 +105,7 @@ class MediaFetchBytesHandler(private val context: Context) : MethodCallHandler {
regionRect = regionRect, regionRect = regionRect,
result = result, result = result,
) )
else -> regionFetcher.fetch( else -> regionFetcher.fetch(
uri = uri, uri = uri,
mimeType = mimeType, mimeType = mimeType,

View file

@ -6,6 +6,7 @@ import android.media.MediaMetadataRetriever
import android.net.Uri import android.net.Uri
import android.os.Build import android.os.Build
import android.util.Log import android.util.Log
import androidx.core.net.toUri
import com.adobe.internal.xmp.XMPException import com.adobe.internal.xmp.XMPException
import com.adobe.internal.xmp.XMPMeta import com.adobe.internal.xmp.XMPMeta
import com.adobe.internal.xmp.XMPMetaFactory import com.adobe.internal.xmp.XMPMetaFactory
@ -22,6 +23,7 @@ import com.drew.metadata.exif.GpsDirectory
import com.drew.metadata.file.FileTypeDirectory import com.drew.metadata.file.FileTypeDirectory
import com.drew.metadata.gif.GifAnimationDirectory import com.drew.metadata.gif.GifAnimationDirectory
import com.drew.metadata.iptc.IptcDirectory import com.drew.metadata.iptc.IptcDirectory
import com.drew.metadata.mov.metadata.QuickTimeMetadataDirectory
import com.drew.metadata.mp4.media.Mp4UuidBoxDirectory import com.drew.metadata.mp4.media.Mp4UuidBoxDirectory
import com.drew.metadata.png.PngDirectory import com.drew.metadata.png.PngDirectory
import com.drew.metadata.webp.WebpDirectory import com.drew.metadata.webp.WebpDirectory
@ -100,6 +102,8 @@ import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.SupervisorJob import kotlinx.coroutines.SupervisorJob
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import org.json.JSONObject import org.json.JSONObject
import org.mp4parser.boxes.threegpp.ts26244.LocationInformationBox
import org.mp4parser.tools.Path
import java.nio.charset.StandardCharsets import java.nio.charset.StandardCharsets
import java.text.DecimalFormat import java.text.DecimalFormat
import java.text.ParseException import java.text.ParseException
@ -107,7 +111,6 @@ import java.util.Locale
import kotlin.math.roundToInt import kotlin.math.roundToInt
import kotlin.math.roundToLong import kotlin.math.roundToLong
import androidx.exifinterface.media.ExifInterfaceFork as ExifInterface import androidx.exifinterface.media.ExifInterfaceFork as ExifInterface
import androidx.core.net.toUri
class MetadataFetchHandler(private val context: Context) : MethodCallHandler { class MetadataFetchHandler(private val context: Context) : MethodCallHandler {
private val ioScope = CoroutineScope(SupervisorJob() + Dispatchers.IO) private val ioScope = CoroutineScope(SupervisorJob() + Dispatchers.IO)
@ -449,9 +452,8 @@ class MetadataFetchHandler(private val context: Context) : MethodCallHandler {
if (isVideo(mimeType)) { if (isVideo(mimeType)) {
// `metadata-extractor` do not extract custom tags in user data box // `metadata-extractor` do not extract custom tags in user data box
val userDataDir = Mp4ParserHelper.getUserData(context, mimeType, uri) Mp4ParserHelper.getUserDataBox(context, mimeType, uri)?.let { box ->
if (userDataDir.isNotEmpty()) { metadataMap[Metadata.DIR_MP4_USER_DATA] = Mp4ParserHelper.extractBoxFields(box)
metadataMap[Metadata.DIR_MP4_USER_DATA] = userDataDir
} }
// this is used as fallback when the video metadata cannot be found on the Dart side // this is used as fallback when the video metadata cannot be found on the Dart side
@ -470,6 +472,12 @@ class MetadataFetchHandler(private val context: Context) : MethodCallHandler {
// Android's `MediaExtractor` and `MediaPlayer` cannot be used for details // Android's `MediaExtractor` and `MediaPlayer` cannot be used for details
// about embedded images as they do not list them as separate tracks // about embedded images as they do not list them as separate tracks
// and only identify at most one // and only identify at most one
} else if (isHeic(mimeType)) {
Mp4ParserHelper.getSamsungSefd(context, uri)?.let { (_, bytes) ->
metadataMap[Mp4ParserHelper.SAMSUNG_MAKERNOTE_BOX_TYPE] = hashMapOf(
"Size" to bytes.size.toString(),
)
}
} }
if (metadataMap.isNotEmpty()) { if (metadataMap.isNotEmpty()) {
@ -527,8 +535,29 @@ class MetadataFetchHandler(private val context: Context) : MethodCallHandler {
val metadataMap = HashMap<String, Any>() val metadataMap = HashMap<String, Any>()
getCatalogMetadataByMetadataExtractor(mimeType, uri, path, sizeBytes, metadataMap) getCatalogMetadataByMetadataExtractor(mimeType, uri, path, sizeBytes, metadataMap)
if (isVideo(mimeType) || isHeic(mimeType)) { if (isVideo(mimeType) || isHeic(mimeType)) {
getMultimediaCatalogMetadataByMediaMetadataRetriever(mimeType, uri, metadataMap) getMultimediaCatalogMetadataByMediaMetadataRetriever(mimeType, uri, metadataMap)
// fallback to MP4 `loci` box for location
if (!metadataMap.contains(KEY_LATITUDE) || !metadataMap.contains(KEY_LONGITUDE)) {
Mp4ParserHelper.getUserDataBox(context, mimeType, uri)?.let { userDataBox ->
Path.getPath<LocationInformationBox>(userDataBox, LocationInformationBox.TYPE)?.let { locationBox ->
if (!locationBox.isParsed) {
locationBox.parseDetails()
}
metadataMap[KEY_LATITUDE] = locationBox.latitude
metadataMap[KEY_LONGITUDE] = locationBox.longitude
}
}
}
}
if (isHeic(mimeType)) {
val flags = (metadataMap[KEY_FLAGS] ?: 0) as Int
if ((flags and MASK_IS_MOTION_PHOTO == 0) && MultiPage.isHeicSefdMotionPhoto(context, uri)) {
metadataMap[KEY_FLAGS] = flags or MASK_IS_MULTIPAGE or MASK_IS_MOTION_PHOTO
}
} }
// report success even when empty // report success even when empty
@ -686,6 +715,22 @@ class MetadataFetchHandler(private val context: Context) : MethodCallHandler {
} }
} }
if (!metadataMap.containsKey(KEY_LATITUDE) || !metadataMap.containsKey(KEY_LONGITUDE)) {
for (dir in metadata.getDirectoriesOfType(QuickTimeMetadataDirectory::class.java)) {
dir.getSafeString(QuickTimeMetadataDirectory.TAG_LOCATION_ISO6709) { locationString ->
val matcher = Metadata.VIDEO_LOCATION_PATTERN.matcher(locationString)
if (matcher.find() && matcher.groupCount() >= 2) {
val latitude = matcher.group(1)?.toDoubleOrNull()
val longitude = matcher.group(2)?.toDoubleOrNull()
if (latitude != null && longitude != null) {
metadataMap[KEY_LATITUDE] = latitude
metadataMap[KEY_LONGITUDE] = longitude
}
}
}
}
}
when (mimeType) { when (mimeType) {
MimeTypes.PNG -> { MimeTypes.PNG -> {
// date fallback to PNG time chunk // date fallback to PNG time chunk
@ -830,7 +875,7 @@ class MetadataFetchHandler(private val context: Context) : MethodCallHandler {
retriever.getSafeDateMillis(MediaMetadataRetriever.METADATA_KEY_DATE) { metadataMap[KEY_DATE_MILLIS] = it } retriever.getSafeDateMillis(MediaMetadataRetriever.METADATA_KEY_DATE) { metadataMap[KEY_DATE_MILLIS] = it }
} }
if (!metadataMap.containsKey(KEY_LATITUDE)) { if (!metadataMap.containsKey(KEY_LATITUDE) || !metadataMap.containsKey(KEY_LONGITUDE)) {
val locationString = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_LOCATION) val locationString = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_LOCATION)
if (locationString != null) { if (locationString != null) {
val matcher = Metadata.VIDEO_LOCATION_PATTERN.matcher(locationString) val matcher = Metadata.VIDEO_LOCATION_PATTERN.matcher(locationString)

View file

@ -4,15 +4,16 @@ import android.content.Context
import android.graphics.Bitmap import android.graphics.Bitmap
import android.graphics.BitmapFactory import android.graphics.BitmapFactory
import android.graphics.BitmapRegionDecoder import android.graphics.BitmapRegionDecoder
import android.graphics.ColorSpace
import android.graphics.Rect import android.graphics.Rect
import android.net.Uri import android.net.Uri
import android.os.Build
import android.util.Log import android.util.Log
import com.bumptech.glide.Glide import com.bumptech.glide.Glide
import deckers.thibault.aves.decoder.AvesAppGlideModule import deckers.thibault.aves.decoder.AvesAppGlideModule
import deckers.thibault.aves.decoder.MultiPageImage import deckers.thibault.aves.decoder.MultiPageImage
import deckers.thibault.aves.utils.BitmapRegionDecoderCompat import deckers.thibault.aves.utils.BitmapRegionDecoderCompat
import deckers.thibault.aves.utils.BitmapUtils.ARGB_8888_BYTE_SIZE import deckers.thibault.aves.utils.BitmapUtils
import deckers.thibault.aves.utils.BitmapUtils.getBytes
import deckers.thibault.aves.utils.LogUtils import deckers.thibault.aves.utils.LogUtils
import deckers.thibault.aves.utils.MathUtils import deckers.thibault.aves.utils.MathUtils
import deckers.thibault.aves.utils.MemoryUtils import deckers.thibault.aves.utils.MemoryUtils
@ -32,7 +33,10 @@ class RegionFetcher internal constructor(
private val exportUris = HashMap<Pair<Uri, Int?>, Uri>() private val exportUris = HashMap<Pair<Uri, Int?>, Uri>()
suspend fun fetch( // return decoded bytes in ARGB_8888, with trailer bytes:
// - width (int32)
// - height (int32)
fun fetch(
uri: Uri, uri: Uri,
mimeType: String, mimeType: String,
pageId: Int?, pageId: Int?,
@ -98,20 +102,38 @@ class RegionFetcher internal constructor(
} }
} }
// use `Long` as rect size could be unexpectedly large and go beyond `Int` max val options = BitmapFactory.Options().apply {
val targetBitmapSizeBytes: Long = ARGB_8888_BYTE_SIZE.toLong() * effectiveRect.width() * effectiveRect.height() / effectiveSampleSize inSampleSize = effectiveSampleSize
// Specifying preferred config and color space avoids the need for conversion afterwards,
// but may prevent decoding (e.g. from RGBA_1010102 to ARGB_8888 on some devices).
inPreferredConfig = PREFERRED_CONFIG
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
inPreferredColorSpace = ColorSpace.get(ColorSpace.Named.SRGB)
}
}
val pixelCount = effectiveRect.width() * effectiveRect.height() / effectiveSampleSize
val targetBitmapSizeBytes = BitmapUtils.getExpectedImageSize(pixelCount.toLong(), options.inPreferredConfig)
if (!MemoryUtils.canAllocate(targetBitmapSizeBytes)) { if (!MemoryUtils.canAllocate(targetBitmapSizeBytes)) {
// decoding a region that large would yield an OOM when creating the bitmap // decoding a region that large would yield an OOM when creating the bitmap
result.error("fetch-large-region", "Region too large for uri=$uri regionRect=$regionRect", null) result.error("fetch-large-region", "Region too large for uri=$uri regionRect=$regionRect", null)
return return
} }
val options = BitmapFactory.Options().apply { var bitmap = decoder.decodeRegion(effectiveRect, options)
inSampleSize = effectiveSampleSize if (bitmap == null) {
// retry without specifying config or color space,
// falling back to custom byte conversion afterwards
options.inPreferredConfig = null
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && options.inPreferredColorSpace != null) {
options.inPreferredColorSpace = null
}
bitmap = decoder.decodeRegion(effectiveRect, options)
} }
val bitmap = decoder.decodeRegion(effectiveRect, options)
if (bitmap != null) { val bytes = BitmapUtils.getRawBytes(bitmap, recycle = true)
result.success(bitmap.getBytes(MimeTypes.canHaveAlpha(mimeType), recycle = true)) if (bytes != null) {
result.success(bytes)
} else { } else {
result.error("fetch-null", "failed to decode region for uri=$uri regionRect=$regionRect", null) result.error("fetch-null", "failed to decode region for uri=$uri regionRect=$regionRect", null)
} }
@ -165,5 +187,6 @@ class RegionFetcher internal constructor(
companion object { companion object {
private val LOG_TAG = LogUtils.createTag<RegionFetcher>() private val LOG_TAG = LogUtils.createTag<RegionFetcher>()
private val PREFERRED_CONFIG = Bitmap.Config.ARGB_8888
} }
} }

View file

@ -6,14 +6,14 @@ import android.graphics.Canvas
import android.graphics.Rect import android.graphics.Rect
import android.graphics.RectF import android.graphics.RectF
import android.net.Uri import android.net.Uri
import androidx.core.graphics.createBitmap
import com.caverock.androidsvg.PreserveAspectRatio import com.caverock.androidsvg.PreserveAspectRatio
import com.caverock.androidsvg.RenderOptions import com.caverock.androidsvg.RenderOptions
import com.caverock.androidsvg.SVG import com.caverock.androidsvg.SVG
import com.caverock.androidsvg.SVGParseException import com.caverock.androidsvg.SVGParseException
import deckers.thibault.aves.metadata.SVGParserBufferedInputStream import deckers.thibault.aves.metadata.SVGParserBufferedInputStream
import deckers.thibault.aves.metadata.SvgHelper.normalizeSize import deckers.thibault.aves.metadata.SvgHelper.normalizeSize
import deckers.thibault.aves.utils.BitmapUtils.ARGB_8888_BYTE_SIZE import deckers.thibault.aves.utils.BitmapUtils
import deckers.thibault.aves.utils.BitmapUtils.getBytes
import deckers.thibault.aves.utils.MemoryUtils import deckers.thibault.aves.utils.MemoryUtils
import deckers.thibault.aves.utils.StorageUtils import deckers.thibault.aves.utils.StorageUtils
import io.flutter.plugin.common.MethodChannel import io.flutter.plugin.common.MethodChannel
@ -24,7 +24,7 @@ class SvgRegionFetcher internal constructor(
) { ) {
private var lastSvgRef: LastSvgRef? = null private var lastSvgRef: LastSvgRef? = null
suspend fun fetch( fun fetch(
uri: Uri, uri: Uri,
sizeBytes: Long?, sizeBytes: Long?,
scale: Int, scale: Int,
@ -91,25 +91,25 @@ class SvgRegionFetcher internal constructor(
val targetBitmapWidth = regionRect.width() val targetBitmapWidth = regionRect.width()
val targetBitmapHeight = regionRect.height() val targetBitmapHeight = regionRect.height()
val canvasWidth = targetBitmapWidth + bleedX * 2
val canvasHeight = targetBitmapHeight + bleedY * 2
// use `Long` as rect size could be unexpectedly large and go beyond `Int` max val config = PREFERRED_CONFIG
val targetBitmapSizeBytes: Long = ARGB_8888_BYTE_SIZE.toLong() * targetBitmapWidth * targetBitmapHeight val pixelCount = canvasWidth * canvasHeight
val targetBitmapSizeBytes = BitmapUtils.getExpectedImageSize(pixelCount.toLong(), config)
if (!MemoryUtils.canAllocate(targetBitmapSizeBytes)) { if (!MemoryUtils.canAllocate(targetBitmapSizeBytes)) {
// decoding a region that large would yield an OOM when creating the bitmap // decoding a region that large would yield an OOM when creating the bitmap
result.error("fetch-read-large-region", "SVG region too large for uri=$uri regionRect=$regionRect", null) result.error("fetch-read-large-region", "SVG region too large for uri=$uri regionRect=$regionRect", null)
return return
} }
var bitmap = Bitmap.createBitmap( var bitmap = createBitmap(canvasWidth, canvasHeight, config)
targetBitmapWidth + bleedX * 2,
targetBitmapHeight + bleedY * 2,
Bitmap.Config.ARGB_8888
)
val canvas = Canvas(bitmap) val canvas = Canvas(bitmap)
svg.renderToCanvas(canvas, renderOptions) svg.renderToCanvas(canvas, renderOptions)
bitmap = Bitmap.createBitmap(bitmap, bleedX, bleedY, targetBitmapWidth, targetBitmapHeight) bitmap = Bitmap.createBitmap(bitmap, bleedX, bleedY, targetBitmapWidth, targetBitmapHeight)
result.success(bitmap.getBytes(canHaveAlpha = true, recycle = true)) val bytes = BitmapUtils.getRawBytes(bitmap, recycle = true)
result.success(bytes)
} catch (e: Exception) { } catch (e: Exception) {
result.error("fetch-read-exception", "failed to initialize region decoder for uri=$uri regionRect=$regionRect", e.message) result.error("fetch-read-exception", "failed to initialize region decoder for uri=$uri regionRect=$regionRect", e.message)
} }
@ -119,4 +119,8 @@ class SvgRegionFetcher internal constructor(
val uri: Uri, val uri: Uri,
val svg: SVG, val svg: SVG,
) )
companion object {
private val PREFERRED_CONFIG = Bitmap.Config.ARGB_8888
}
} }

View file

@ -7,6 +7,7 @@ import android.os.Build
import android.provider.MediaStore import android.provider.MediaStore
import android.util.Size import android.util.Size
import androidx.annotation.RequiresApi import androidx.annotation.RequiresApi
import androidx.core.net.toUri
import com.bumptech.glide.Glide import com.bumptech.glide.Glide
import com.bumptech.glide.load.DecodeFormat import com.bumptech.glide.load.DecodeFormat
import com.bumptech.glide.load.engine.DiskCacheStrategy import com.bumptech.glide.load.engine.DiskCacheStrategy
@ -14,8 +15,8 @@ import com.bumptech.glide.request.RequestOptions
import com.bumptech.glide.signature.ObjectKey import com.bumptech.glide.signature.ObjectKey
import deckers.thibault.aves.decoder.AvesAppGlideModule import deckers.thibault.aves.decoder.AvesAppGlideModule
import deckers.thibault.aves.decoder.MultiPageImage import deckers.thibault.aves.decoder.MultiPageImage
import deckers.thibault.aves.utils.BitmapUtils
import deckers.thibault.aves.utils.BitmapUtils.applyExifOrientation import deckers.thibault.aves.utils.BitmapUtils.applyExifOrientation
import deckers.thibault.aves.utils.BitmapUtils.getBytes
import deckers.thibault.aves.utils.MimeTypes import deckers.thibault.aves.utils.MimeTypes
import deckers.thibault.aves.utils.MimeTypes.SVG import deckers.thibault.aves.utils.MimeTypes.SVG
import deckers.thibault.aves.utils.MimeTypes.isVideo import deckers.thibault.aves.utils.MimeTypes.isVideo
@ -24,13 +25,12 @@ import deckers.thibault.aves.utils.MimeTypes.needRotationAfterGlide
import deckers.thibault.aves.utils.StorageUtils import deckers.thibault.aves.utils.StorageUtils
import deckers.thibault.aves.utils.UriUtils.tryParseId import deckers.thibault.aves.utils.UriUtils.tryParseId
import io.flutter.plugin.common.MethodChannel import io.flutter.plugin.common.MethodChannel
import androidx.core.net.toUri
class ThumbnailFetcher internal constructor( class ThumbnailFetcher internal constructor(
private val context: Context, private val context: Context,
uri: String, uri: String,
private val mimeType: String, private val mimeType: String,
private val dateModifiedSecs: Long, private val dateModifiedMillis: Long,
private val rotationDegrees: Int, private val rotationDegrees: Int,
private val isFlipped: Boolean, private val isFlipped: Boolean,
width: Int?, width: Int?,
@ -48,7 +48,7 @@ class ThumbnailFetcher internal constructor(
private val multiPageFetch = pageId != null && MultiPageImage.isSupported(mimeType) private val multiPageFetch = pageId != null && MultiPageImage.isSupported(mimeType)
private val customFetch = svgFetch || tiffFetch || multiPageFetch private val customFetch = svgFetch || tiffFetch || multiPageFetch
suspend fun fetch() { fun fetch() {
var bitmap: Bitmap? = null var bitmap: Bitmap? = null
var exception: Exception? = null var exception: Exception? = null
@ -77,8 +77,11 @@ class ThumbnailFetcher internal constructor(
} }
} }
if (bitmap != null) { // do not recycle bitmaps fetched from `ContentResolver` or Glide as their lifecycle is unknown
result.success(bitmap.getBytes(MimeTypes.canHaveAlpha(mimeType), recycle = false, quality = quality)) val recycle = false
val bytes = BitmapUtils.getRawBytes(bitmap, recycle = recycle)
if (bytes != null) {
result.success(bytes)
} else { } else {
var errorDetails: String? = exception?.message var errorDetails: String? = exception?.message
if (errorDetails?.isNotEmpty() == true) { if (errorDetails?.isNotEmpty() == true) {
@ -119,7 +122,7 @@ class ThumbnailFetcher internal constructor(
// add signature to ignore cache for images which got modified but kept the same URI // add signature to ignore cache for images which got modified but kept the same URI
var options = RequestOptions() var options = RequestOptions()
.format(if (quality == 100) DecodeFormat.PREFER_ARGB_8888 else DecodeFormat.PREFER_RGB_565) .format(if (quality == 100) DecodeFormat.PREFER_ARGB_8888 else DecodeFormat.PREFER_RGB_565)
.signature(ObjectKey("$dateModifiedSecs-$rotationDegrees-$isFlipped-$width-$pageId")) .signature(ObjectKey("$dateModifiedMillis-$rotationDegrees-$isFlipped-$width-$pageId"))
.override(width, height) .override(width, height)
if (isVideo(mimeType)) { if (isVideo(mimeType)) {
options = options.diskCacheStrategy(DiskCacheStrategy.RESOURCE) options = options.diskCacheStrategy(DiskCacheStrategy.RESOURCE)

View file

@ -1,9 +1,10 @@
package deckers.thibault.aves.channel.calls.fetchers package deckers.thibault.aves.channel.calls.fetchers
import android.content.Context import android.content.Context
import android.graphics.Bitmap
import android.graphics.Rect import android.graphics.Rect
import android.net.Uri import android.net.Uri
import deckers.thibault.aves.utils.BitmapUtils.getBytes import deckers.thibault.aves.utils.BitmapUtils
import io.flutter.plugin.common.MethodChannel import io.flutter.plugin.common.MethodChannel
import org.beyka.tiffbitmapfactory.DecodeArea import org.beyka.tiffbitmapfactory.DecodeArea
import org.beyka.tiffbitmapfactory.TiffBitmapFactory import org.beyka.tiffbitmapfactory.TiffBitmapFactory
@ -11,7 +12,7 @@ import org.beyka.tiffbitmapfactory.TiffBitmapFactory
class TiffRegionFetcher internal constructor( class TiffRegionFetcher internal constructor(
private val context: Context, private val context: Context,
) { ) {
suspend fun fetch( fun fetch(
uri: Uri, uri: Uri,
page: Int, page: Int,
sampleSize: Int, sampleSize: Int,
@ -31,9 +32,10 @@ class TiffRegionFetcher internal constructor(
inSampleSize = sampleSize inSampleSize = sampleSize
inDecodeArea = DecodeArea(regionRect.left, regionRect.top, regionRect.width(), regionRect.height()) inDecodeArea = DecodeArea(regionRect.left, regionRect.top, regionRect.width(), regionRect.height())
} }
val bitmap = TiffBitmapFactory.decodeFileDescriptor(fd, options) val bitmap: Bitmap? = TiffBitmapFactory.decodeFileDescriptor(fd, options)
if (bitmap != null) { val bytes = BitmapUtils.getRawBytes(bitmap, recycle = true)
result.success(bitmap.getBytes(canHaveAlpha = true, recycle = true)) if (bytes != null) {
result.success(bytes)
} else { } else {
result.error("getRegion-tiff-null", "failed to decode region for uri=$uri page=$page regionRect=$regionRect", null) result.error("getRegion-tiff-null", "failed to decode region for uri=$uri page=$page regionRect=$regionRect", null)
} }

View file

@ -77,19 +77,30 @@ class ActivityWindowHandler(private val activity: Activity) : WindowHandler(acti
) )
} }
override fun supportsHdr(call: MethodCall, result: MethodChannel.Result) { override fun supportsWideGamut(call: MethodCall, result: MethodChannel.Result) {
result.success(Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && activity.getDisplayCompat()?.isHdr ?: false) result.success(Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && activity.resources.configuration.isScreenWideColorGamut)
} }
override fun setHdrColorMode(call: MethodCall, result: MethodChannel.Result) { override fun supportsHdr(call: MethodCall, result: MethodChannel.Result) {
val on = call.argument<Boolean>("on") result.success(Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && activity.resources.configuration.isScreenHdr)
if (on == null) { }
result.error("setHdrColorMode-args", "missing arguments", null)
override fun setColorMode(call: MethodCall, result: MethodChannel.Result) {
val wideColorGamut = call.argument<Boolean>("wideColorGamut")
val hdr = call.argument<Boolean>("hdr")
if (wideColorGamut == null || hdr == null) {
result.error("setColorMode-args", "missing arguments", null)
return return
} }
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
activity.window.colorMode = if (on) ActivityInfo.COLOR_MODE_HDR else ActivityInfo.COLOR_MODE_DEFAULT activity.window.colorMode = if (hdr) {
ActivityInfo.COLOR_MODE_HDR
} else if (wideColorGamut) {
ActivityInfo.COLOR_MODE_WIDE_COLOR_GAMUT
} else {
ActivityInfo.COLOR_MODE_DEFAULT
}
} }
result.success(null) result.success(null)
} }

View file

@ -29,11 +29,15 @@ class ServiceWindowHandler(service: Service) : WindowHandler(service) {
result.success(HashMap<String, Any>()) result.success(HashMap<String, Any>())
} }
override fun supportsWideGamut(call: MethodCall, result: MethodChannel.Result) {
result.success(false)
}
override fun supportsHdr(call: MethodCall, result: MethodChannel.Result) { override fun supportsHdr(call: MethodCall, result: MethodChannel.Result) {
result.success(false) result.success(false)
} }
override fun setHdrColorMode(call: MethodCall, result: MethodChannel.Result) { override fun setColorMode(call: MethodCall, result: MethodChannel.Result) {
result.success(null) result.success(null)
} }
} }

View file

@ -18,8 +18,9 @@ abstract class WindowHandler(private val contextWrapper: ContextWrapper) : Metho
"requestOrientation" -> Coresult.safe(call, result, ::requestOrientation) "requestOrientation" -> Coresult.safe(call, result, ::requestOrientation)
"isCutoutAware" -> Coresult.safe(call, result, ::isCutoutAware) "isCutoutAware" -> Coresult.safe(call, result, ::isCutoutAware)
"getCutoutInsets" -> Coresult.safe(call, result, ::getCutoutInsets) "getCutoutInsets" -> Coresult.safe(call, result, ::getCutoutInsets)
"supportsWideGamut" -> Coresult.safe(call, result, ::supportsWideGamut)
"supportsHdr" -> Coresult.safe(call, result, ::supportsHdr) "supportsHdr" -> Coresult.safe(call, result, ::supportsHdr)
"setHdrColorMode" -> Coresult.safe(call, result, ::setHdrColorMode) "setColorMode" -> Coresult.safe(call, result, ::setColorMode)
else -> result.notImplemented() else -> result.notImplemented()
} }
} }
@ -46,9 +47,11 @@ abstract class WindowHandler(private val contextWrapper: ContextWrapper) : Metho
abstract fun getCutoutInsets(call: MethodCall, result: MethodChannel.Result) abstract fun getCutoutInsets(call: MethodCall, result: MethodChannel.Result)
abstract fun supportsWideGamut(call: MethodCall, result: MethodChannel.Result)
abstract fun supportsHdr(call: MethodCall, result: MethodChannel.Result) abstract fun supportsHdr(call: MethodCall, result: MethodChannel.Result)
abstract fun setHdrColorMode(call: MethodCall, result: MethodChannel.Result) abstract fun setColorMode(call: MethodCall, result: MethodChannel.Result)
companion object { companion object {
private val LOG_TAG = LogUtils.createTag<WindowHandler>() private val LOG_TAG = LogUtils.createTag<WindowHandler>()

View file

@ -8,8 +8,8 @@ import android.util.Log
import androidx.core.net.toUri import androidx.core.net.toUri
import com.bumptech.glide.Glide import com.bumptech.glide.Glide
import deckers.thibault.aves.decoder.AvesAppGlideModule import deckers.thibault.aves.decoder.AvesAppGlideModule
import deckers.thibault.aves.utils.BitmapUtils
import deckers.thibault.aves.utils.BitmapUtils.applyExifOrientation import deckers.thibault.aves.utils.BitmapUtils.applyExifOrientation
import deckers.thibault.aves.utils.BitmapUtils.getBytes
import deckers.thibault.aves.utils.LogUtils import deckers.thibault.aves.utils.LogUtils
import deckers.thibault.aves.utils.MemoryUtils import deckers.thibault.aves.utils.MemoryUtils
import deckers.thibault.aves.utils.MimeTypes import deckers.thibault.aves.utils.MimeTypes
@ -24,6 +24,7 @@ import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.SupervisorJob import kotlinx.coroutines.SupervisorJob
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext import kotlinx.coroutines.withContext
import java.io.ByteArrayInputStream
import java.io.InputStream import java.io.InputStream
class ImageByteStreamHandler(private val context: Context, private val arguments: Any?) : EventChannel.StreamHandler { class ImageByteStreamHandler(private val context: Context, private val arguments: Any?) : EventChannel.StreamHandler {
@ -80,11 +81,13 @@ class ImageByteStreamHandler(private val context: Context, private val arguments
return return
} }
val decoded = arguments["decoded"] as Boolean
val mimeType = arguments["mimeType"] as String? val mimeType = arguments["mimeType"] as String?
val uri = (arguments["uri"] as String?)?.toUri() val uri = (arguments["uri"] as String?)?.toUri()
val sizeBytes = (arguments["sizeBytes"] as Number?)?.toLong() val sizeBytes = (arguments["sizeBytes"] as Number?)?.toLong()
val rotationDegrees = arguments["rotationDegrees"] as Int val rotationDegrees = arguments["rotationDegrees"] as Int
val isFlipped = arguments["isFlipped"] as Boolean val isFlipped = arguments["isFlipped"] as Boolean
val isAnimated = arguments["isAnimated"] as Boolean
val pageId = arguments["pageId"] as Int? val pageId = arguments["pageId"] as Int?
if (mimeType == null || uri == null) { if (mimeType == null || uri == null) {
@ -93,19 +96,31 @@ class ImageByteStreamHandler(private val context: Context, private val arguments
return return
} }
if (isVideo(mimeType)) { if (canDecodeWithFlutter(mimeType, isAnimated) && !decoded) {
streamVideoByGlide(uri, mimeType, sizeBytes)
} else if (!canDecodeWithFlutter(mimeType, pageId, rotationDegrees, isFlipped)) {
// decode exotic format on platform side, then encode it in portable format for Flutter
streamImageByGlide(uri, pageId, mimeType, sizeBytes, rotationDegrees, isFlipped)
} else {
// to be decoded by Flutter // to be decoded by Flutter
streamImageAsIs(uri, mimeType, sizeBytes) streamOriginalEncodedBytes(uri, mimeType, sizeBytes)
} else if (isVideo(mimeType)) {
streamVideoByGlide(
uri = uri,
mimeType = mimeType,
sizeBytes = sizeBytes,
decoded = decoded,
)
} else {
streamImageByGlide(
uri = uri,
pageId = pageId,
mimeType = mimeType,
sizeBytes = sizeBytes,
rotationDegrees = rotationDegrees,
isFlipped = isFlipped,
decoded = decoded,
)
} }
endOfStream() endOfStream()
} }
private fun streamImageAsIs(uri: Uri, mimeType: String, sizeBytes: Long?) { private fun streamOriginalEncodedBytes(uri: Uri, mimeType: String, sizeBytes: Long?) {
if (!MemoryUtils.canAllocate(sizeBytes)) { if (!MemoryUtils.canAllocate(sizeBytes)) {
error("streamImage-image-read-large", "original image too large at $sizeBytes bytes, for mimeType=$mimeType uri=$uri", null) error("streamImage-image-read-large", "original image too large at $sizeBytes bytes, for mimeType=$mimeType uri=$uri", null)
return return
@ -125,6 +140,7 @@ class ImageByteStreamHandler(private val context: Context, private val arguments
sizeBytes: Long?, sizeBytes: Long?,
rotationDegrees: Int, rotationDegrees: Int,
isFlipped: Boolean, isFlipped: Boolean,
decoded: Boolean,
) { ) {
val target = Glide.with(context) val target = Glide.with(context)
.asBitmap() .asBitmap()
@ -137,9 +153,16 @@ class ImageByteStreamHandler(private val context: Context, private val arguments
bitmap = applyExifOrientation(context, bitmap, rotationDegrees, isFlipped) bitmap = applyExifOrientation(context, bitmap, rotationDegrees, isFlipped)
} }
if (bitmap != null) { if (bitmap != null) {
val bytes = bitmap.getBytes(MimeTypes.canHaveAlpha(mimeType), recycle = false) // do not recycle bitmaps fetched from Glide as their lifecycle is unknown
val recycle = false
val bytes = if (decoded) {
BitmapUtils.getRawBytes(bitmap, recycle = recycle)
} else {
BitmapUtils.getEncodedBytes(bitmap, canHaveAlpha = MimeTypes.canHaveAlpha(mimeType), recycle = recycle)
}
if (MemoryUtils.canAllocate(sizeBytes)) { if (MemoryUtils.canAllocate(sizeBytes)) {
success(bytes) streamBytes(ByteArrayInputStream(bytes))
} else { } else {
error("streamImage-image-decode-large", "decoded image too large at $sizeBytes bytes, for mimeType=$mimeType uri=$uri", null) error("streamImage-image-decode-large", "decoded image too large at $sizeBytes bytes, for mimeType=$mimeType uri=$uri", null)
} }
@ -153,7 +176,7 @@ class ImageByteStreamHandler(private val context: Context, private val arguments
} }
} }
private suspend fun streamVideoByGlide(uri: Uri, mimeType: String, sizeBytes: Long?) { private suspend fun streamVideoByGlide(uri: Uri, mimeType: String, sizeBytes: Long?, decoded: Boolean) {
val target = Glide.with(context) val target = Glide.with(context)
.asBitmap() .asBitmap()
.apply(AvesAppGlideModule.uncachedFullImageOptions) .apply(AvesAppGlideModule.uncachedFullImageOptions)
@ -162,9 +185,16 @@ class ImageByteStreamHandler(private val context: Context, private val arguments
try { try {
val bitmap = withContext(Dispatchers.IO) { target.get() } val bitmap = withContext(Dispatchers.IO) { target.get() }
if (bitmap != null) { if (bitmap != null) {
val bytes = bitmap.getBytes(canHaveAlpha = false, recycle = false) // do not recycle bitmaps fetched from Glide as their lifecycle is unknown
val recycle = false
val bytes = if (decoded) {
BitmapUtils.getRawBytes(bitmap, recycle = recycle)
} else {
BitmapUtils.getEncodedBytes(bitmap, canHaveAlpha = false, recycle = recycle)
}
if (MemoryUtils.canAllocate(sizeBytes)) { if (MemoryUtils.canAllocate(sizeBytes)) {
success(bytes) streamBytes(ByteArrayInputStream(bytes))
} else { } else {
error("streamImage-video-large", "decoded image too large at $sizeBytes bytes, for mimeType=$mimeType uri=$uri", null) error("streamImage-video-large", "decoded image too large at $sizeBytes bytes, for mimeType=$mimeType uri=$uri", null)
} }

View file

@ -19,12 +19,13 @@ class MediaStoreStreamHandler(private val context: Context, arguments: Any?) : E
private lateinit var eventSink: EventSink private lateinit var eventSink: EventSink
private lateinit var handler: Handler private lateinit var handler: Handler
private var knownEntries: Map<Long?, Int?>? = null // knownEntries: map of contentId -> dateModifiedMillis
private var knownEntries: Map<Long?, Long?>? = null
private var directory: String? = null private var directory: String? = null
init { init {
if (arguments is Map<*, *>) { if (arguments is Map<*, *>) {
knownEntries = (arguments["knownEntries"] as? Map<*, *>?)?.map { (it.key as Number?)?.toLong() to it.value as Int? }?.toMap() knownEntries = (arguments["knownEntries"] as? Map<*, *>?)?.map { (it.key as Number?)?.toLong() to (it.value as Number?)?.toLong() }?.toMap()
directory = arguments["directory"] as String? directory = arguments["directory"] as String?
} }
} }

View file

@ -4,6 +4,7 @@ import android.content.Context
import android.graphics.Bitmap import android.graphics.Bitmap
import android.graphics.Canvas import android.graphics.Canvas
import android.net.Uri import android.net.Uri
import androidx.core.graphics.createBitmap
import com.bumptech.glide.Glide import com.bumptech.glide.Glide
import com.bumptech.glide.Priority import com.bumptech.glide.Priority
import com.bumptech.glide.Registry import com.bumptech.glide.Registry
@ -68,7 +69,7 @@ internal class SvgFetcher(val model: SvgImage, val width: Int, val height: Int)
bitmapWidth = width bitmapWidth = width
bitmapHeight = ceil(svgHeight * width / svgWidth).toInt() bitmapHeight = ceil(svgHeight * width / svgWidth).toInt()
} }
val bitmap = Bitmap.createBitmap(bitmapWidth, bitmapHeight, Bitmap.Config.ARGB_8888) val bitmap = createBitmap(bitmapWidth, bitmapHeight, Bitmap.Config.ARGB_8888)
val canvas = Canvas(bitmap) val canvas = Canvas(bitmap)
svg.renderToCanvas(canvas) svg.renderToCanvas(canvas)

View file

@ -3,6 +3,7 @@ package deckers.thibault.aves.decoder
import android.content.Context import android.content.Context
import android.graphics.Bitmap import android.graphics.Bitmap
import android.net.Uri import android.net.Uri
import androidx.core.graphics.scale
import com.bumptech.glide.Glide import com.bumptech.glide.Glide
import com.bumptech.glide.Priority import com.bumptech.glide.Priority
import com.bumptech.glide.Registry import com.bumptech.glide.Registry
@ -82,7 +83,9 @@ internal class TiffFetcher(val model: TiffImage, val width: Int, val height: Int
inSampleSize = sampleSize inSampleSize = sampleSize
} }
try { try {
val bitmap = TiffBitmapFactory.decodeFileDescriptor(fd, options) val bitmap: Bitmap? = TiffBitmapFactory.decodeFileDescriptor(fd, options)
// calling `TiffBitmapFactory.closeFd(fd)` after decoding yields a segmentation fault
if (bitmap == null) { if (bitmap == null) {
callback.onLoadFailed(Exception("Decoding full TIFF yielded null bitmap")) callback.onLoadFailed(Exception("Decoding full TIFF yielded null bitmap"))
} else if (customSize) { } else if (customSize) {
@ -96,7 +99,7 @@ internal class TiffFetcher(val model: TiffImage, val width: Int, val height: Int
dstWidth = width dstWidth = width
dstHeight = (width / aspectRatio).toInt() dstHeight = (width / aspectRatio).toInt()
} }
callback.onDataReady(Bitmap.createScaledBitmap(bitmap, dstWidth, dstHeight, true)) callback.onDataReady(bitmap.scale(dstWidth, dstHeight))
} else { } else {
callback.onDataReady(bitmap) callback.onDataReady(bitmap)
} }

View file

@ -2,6 +2,7 @@ package deckers.thibault.aves.decoder
import android.content.Context import android.content.Context
import android.graphics.Bitmap import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.media.MediaMetadataRetriever import android.media.MediaMetadataRetriever
import android.net.Uri import android.net.Uri
import android.os.Build import android.os.Build
@ -20,7 +21,6 @@ import com.bumptech.glide.load.model.MultiModelLoaderFactory
import com.bumptech.glide.module.LibraryGlideModule import com.bumptech.glide.module.LibraryGlideModule
import com.bumptech.glide.signature.ObjectKey import com.bumptech.glide.signature.ObjectKey
import deckers.thibault.aves.utils.BitmapUtils import deckers.thibault.aves.utils.BitmapUtils
import deckers.thibault.aves.utils.BitmapUtils.getBytes
import deckers.thibault.aves.utils.MemoryUtils import deckers.thibault.aves.utils.MemoryUtils
import deckers.thibault.aves.utils.StorageUtils.openMetadataRetriever import deckers.thibault.aves.utils.StorageUtils.openMetadataRetriever
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope
@ -28,45 +28,54 @@ import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.SupervisorJob import kotlinx.coroutines.SupervisorJob
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import java.io.ByteArrayInputStream import java.io.ByteArrayInputStream
import java.io.InputStream import java.io.IOException
import kotlin.math.ceil import kotlin.math.ceil
import kotlin.math.roundToInt import kotlin.math.roundToInt
@GlideModule @GlideModule
class VideoThumbnailGlideModule : LibraryGlideModule() { class VideoThumbnailGlideModule : LibraryGlideModule() {
override fun registerComponents(context: Context, glide: Glide, registry: Registry) { override fun registerComponents(context: Context, glide: Glide, registry: Registry) {
registry.append(VideoThumbnail::class.java, InputStream::class.java, VideoThumbnailLoader.Factory()) registry.append(VideoThumbnail::class.java, Bitmap::class.java, VideoThumbnailLoader.Factory())
} }
} }
class VideoThumbnail(val context: Context, val uri: Uri) class VideoThumbnail(val context: Context, val uri: Uri)
internal class VideoThumbnailLoader : ModelLoader<VideoThumbnail, InputStream> { internal class VideoThumbnailLoader : ModelLoader<VideoThumbnail, Bitmap> {
override fun buildLoadData(model: VideoThumbnail, width: Int, height: Int, options: Options): ModelLoader.LoadData<InputStream> { override fun buildLoadData(model: VideoThumbnail, width: Int, height: Int, options: Options): ModelLoader.LoadData<Bitmap> {
return ModelLoader.LoadData(ObjectKey(model.uri), VideoThumbnailFetcher(model, width, height)) return ModelLoader.LoadData(ObjectKey(model.uri), VideoThumbnailFetcher(model, width, height))
} }
override fun handles(model: VideoThumbnail): Boolean = true override fun handles(model: VideoThumbnail): Boolean = true
internal class Factory : ModelLoaderFactory<VideoThumbnail, InputStream> { internal class Factory : ModelLoaderFactory<VideoThumbnail, Bitmap> {
override fun build(multiFactory: MultiModelLoaderFactory): ModelLoader<VideoThumbnail, InputStream> = VideoThumbnailLoader() override fun build(multiFactory: MultiModelLoaderFactory): ModelLoader<VideoThumbnail, Bitmap> = VideoThumbnailLoader()
override fun teardown() {} override fun teardown() {}
} }
} }
internal class VideoThumbnailFetcher(private val model: VideoThumbnail, val width: Int, val height: Int) : DataFetcher<InputStream> { internal class VideoThumbnailFetcher(private val model: VideoThumbnail, val width: Int, val height: Int) : DataFetcher<Bitmap> {
private val ioScope = CoroutineScope(SupervisorJob() + Dispatchers.IO) private val ioScope = CoroutineScope(SupervisorJob() + Dispatchers.IO)
override fun loadData(priority: Priority, callback: DataCallback<in InputStream>) { override fun loadData(priority: Priority, callback: DataCallback<in Bitmap>) {
ioScope.launch { ioScope.launch {
val retriever = openMetadataRetriever(model.context, model.uri) val retriever = openMetadataRetriever(model.context, model.uri)
if (retriever == null) { if (retriever == null) {
callback.onLoadFailed(Exception("failed to initialize MediaMetadataRetriever for uri=${model.uri}")) callback.onLoadFailed(Exception("failed to initialize MediaMetadataRetriever for uri=${model.uri}"))
} else { } else {
try { try {
var bytes = retriever.embeddedPicture var bitmap: Bitmap? = null
if (bytes == null) {
retriever.embeddedPicture?.let { bytes ->
try {
bitmap = BitmapFactory.decodeStream(ByteArrayInputStream(bytes))
} catch (e: IOException) {
// ignore
}
}
if (bitmap == null) {
// there is no consistent strategy across devices to match // there is no consistent strategy across devices to match
// the thumbnails returned by the content resolver / Media Store // the thumbnails returned by the content resolver / Media Store
// so we derive one in an arbitrary way // so we derive one in an arbitrary way
@ -111,8 +120,9 @@ internal class VideoThumbnailFetcher(private val model: VideoThumbnail, val widt
} }
// the returned frame is already rotated according to the video metadata // the returned frame is already rotated according to the video metadata
val frame = if (dstWidth > 0 && dstHeight > 0 && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) { bitmap = if (dstWidth > 0 && dstHeight > 0 && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) {
val targetBitmapSizeBytes: Long = FORMAT_BYTE_SIZE.toLong() * dstWidth * dstHeight val pixelCount = dstWidth * dstHeight
val targetBitmapSizeBytes = BitmapUtils.getExpectedImageSize(pixelCount.toLong(), getPreferredConfig())
if (!MemoryUtils.canAllocate(targetBitmapSizeBytes)) { if (!MemoryUtils.canAllocate(targetBitmapSizeBytes)) {
throw Exception("not enough memory to allocate $targetBitmapSizeBytes bytes for the scaled frame at $dstWidth x $dstHeight") throw Exception("not enough memory to allocate $targetBitmapSizeBytes bytes for the scaled frame at $dstWidth x $dstHeight")
} }
@ -122,7 +132,8 @@ internal class VideoThumbnailFetcher(private val model: VideoThumbnail, val widt
retriever.getScaledFrameAtTime(timeMicros, option, dstWidth, dstHeight) retriever.getScaledFrameAtTime(timeMicros, option, dstWidth, dstHeight)
} }
} else { } else {
val targetBitmapSizeBytes: Long = (FORMAT_BYTE_SIZE.toLong() * videoWidth * videoHeight).toLong() val pixelCount = videoWidth * videoHeight
val targetBitmapSizeBytes = BitmapUtils.getExpectedImageSize(pixelCount.toLong(), getPreferredConfig())
if (!MemoryUtils.canAllocate(targetBitmapSizeBytes)) { if (!MemoryUtils.canAllocate(targetBitmapSizeBytes)) {
throw Exception("not enough memory to allocate $targetBitmapSizeBytes bytes for the full frame at $videoWidth x $videoHeight") throw Exception("not enough memory to allocate $targetBitmapSizeBytes bytes for the full frame at $videoWidth x $videoHeight")
} }
@ -132,13 +143,12 @@ internal class VideoThumbnailFetcher(private val model: VideoThumbnail, val widt
retriever.getFrameAtTime(timeMicros, option) retriever.getFrameAtTime(timeMicros, option)
} }
} }
bytes = frame?.getBytes(canHaveAlpha = false, recycle = false)
} }
if (bytes != null) { if (bitmap == null) {
callback.onDataReady(ByteArrayInputStream(bytes)) callback.onLoadFailed(Exception("failed to get embedded picture or any frame for uri=${model.uri}"))
} else { } else {
callback.onLoadFailed(Exception("failed to get embedded picture or any frame")) callback.onDataReady(bitmap)
} }
} catch (e: Exception) { } catch (e: Exception) {
callback.onLoadFailed(e) callback.onLoadFailed(e)
@ -151,8 +161,14 @@ internal class VideoThumbnailFetcher(private val model: VideoThumbnail, val widt
} }
@RequiresApi(Build.VERSION_CODES.P) @RequiresApi(Build.VERSION_CODES.P)
private fun getBitmapParams() = MediaMetadataRetriever.BitmapParams().apply { private fun getBitmapParams(): MediaMetadataRetriever.BitmapParams {
preferredConfig = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { val params = MediaMetadataRetriever.BitmapParams()
params.preferredConfig = this.getPreferredConfig()
return params
}
private fun getPreferredConfig(): Bitmap.Config {
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
// improved precision with the same memory cost as `ARGB_8888` (4 bytes per pixel) // improved precision with the same memory cost as `ARGB_8888` (4 bytes per pixel)
// for wide-gamut and HDR content which does not require alpha blending // for wide-gamut and HDR content which does not require alpha blending
Bitmap.Config.RGBA_1010102 Bitmap.Config.RGBA_1010102
@ -167,12 +183,7 @@ internal class VideoThumbnailFetcher(private val model: VideoThumbnail, val widt
// cannot cancel // cannot cancel
override fun cancel() {} override fun cancel() {}
override fun getDataClass(): Class<InputStream> = InputStream::class.java override fun getDataClass(): Class<Bitmap> = Bitmap::class.java
override fun getDataSource(): DataSource = DataSource.LOCAL override fun getDataSource(): DataSource = DataSource.LOCAL
companion object {
// same for either `ARGB_8888` or `RGBA_1010102`
private const val FORMAT_BYTE_SIZE = BitmapUtils.ARGB_8888_BYTE_SIZE
}
} }

View file

@ -32,6 +32,7 @@ import org.mp4parser.boxes.iso14496.part12.SegmentIndexBox
import org.mp4parser.boxes.iso14496.part12.TrackHeaderBox import org.mp4parser.boxes.iso14496.part12.TrackHeaderBox
import org.mp4parser.boxes.iso14496.part12.UserDataBox import org.mp4parser.boxes.iso14496.part12.UserDataBox
import org.mp4parser.boxes.threegpp.ts26244.AuthorBox import org.mp4parser.boxes.threegpp.ts26244.AuthorBox
import org.mp4parser.boxes.threegpp.ts26244.LocationInformationBox
import org.mp4parser.support.AbstractBox import org.mp4parser.support.AbstractBox
import org.mp4parser.support.Matrix import org.mp4parser.support.Matrix
import org.mp4parser.tools.Path import org.mp4parser.tools.Path
@ -45,6 +46,15 @@ object Mp4ParserHelper {
// arbitrary size to detect boxes that may yield an OOM // arbitrary size to detect boxes that may yield an OOM
private const val BOX_SIZE_DANGER_THRESHOLD = 3 * (1 shl 20) // MB private const val BOX_SIZE_DANGER_THRESHOLD = 3 * (1 shl 20) // MB
const val SAMSUNG_MAKERNOTE_BOX_TYPE = "sefd"
const val SEFD_MOTION_PHOTO_NAME = "MotionPhoto_Data"
private val largerTypeWhitelist = listOf(
// HEIC motion photo may contain Samsung maker notes in `sefd` box,
// including a video larger than the danger threshold
SAMSUNG_MAKERNOTE_BOX_TYPE,
)
fun computeEdits(context: Context, uri: Uri, modifier: (isoFile: IsoFile) -> Unit): List<Pair<Long, ByteArray>> { fun computeEdits(context: Context, uri: Uri, modifier: (isoFile: IsoFile) -> Unit): List<Pair<Long, ByteArray>> {
// we can skip uninteresting boxes with a seekable data source // we can skip uninteresting boxes with a seekable data source
val pfd = StorageUtils.openInputFileDescriptor(context, uri) ?: throw Exception("failed to open file descriptor for uri=$uri") val pfd = StorageUtils.openInputFileDescriptor(context, uri) ?: throw Exception("failed to open file descriptor for uri=$uri")
@ -133,6 +143,35 @@ object Mp4ParserHelper {
return false return false
} }
// returns the offset and data of the Samsung maker notes box
fun getSamsungSefd(context: Context, uri: Uri): Pair<Long, ByteArray>? {
try {
// we can skip uninteresting boxes with a seekable data source
val pfd = StorageUtils.openInputFileDescriptor(context, uri) ?: throw Exception("failed to open file descriptor for uri=$uri")
pfd.use {
FileInputStream(it.fileDescriptor).use { stream ->
stream.channel.use { channel ->
IsoFile(channel, metadataBoxParser()).use { isoFile ->
var offset = 0L
for (box in isoFile.boxes) {
if (box is UnknownBox && box.type == SAMSUNG_MAKERNOTE_BOX_TYPE) {
if (!box.isParsed) {
box.parseDetails()
}
return Pair(offset + 8, box.data.toByteArray()) // skip 8 bytes for box header
}
offset += box.size
}
}
}
}
}
} catch (e: Exception) {
Log.w(LOG_TAG, "failed to read sefd box", e)
}
return null
}
// extensions // extensions
fun IsoFile.updateLocation(locationIso6709: String?) { fun IsoFile.updateLocation(locationIso6709: String?) {
@ -272,18 +311,18 @@ object Mp4ParserHelper {
) )
setBoxSkipper { type, size -> setBoxSkipper { type, size ->
if (skippedTypes.contains(type)) return@setBoxSkipper true if (skippedTypes.contains(type)) return@setBoxSkipper true
if (size > BOX_SIZE_DANGER_THRESHOLD) throw Exception("box (type=$type size=$size) is too large") if (size > BOX_SIZE_DANGER_THRESHOLD && !largerTypeWhitelist.contains(type)) throw Exception("box (type=$type size=$size) is too large")
false false
} }
} }
fun getUserData( fun getUserDataBox(
context: Context, context: Context,
mimeType: String, mimeType: String,
uri: Uri, uri: Uri,
): MutableMap<String, String> { ): UserDataBox? {
val fields = HashMap<String, String>() if (mimeType != MimeTypes.MP4) return null
if (mimeType != MimeTypes.MP4) return fields
try { try {
// we can skip uninteresting boxes with a seekable data source // we can skip uninteresting boxes with a seekable data source
val pfd = StorageUtils.openInputFileDescriptor(context, uri) ?: throw Exception("failed to open file descriptor for uri=$uri") val pfd = StorageUtils.openInputFileDescriptor(context, uri) ?: throw Exception("failed to open file descriptor for uri=$uri")
@ -292,10 +331,7 @@ object Mp4ParserHelper {
stream.channel.use { channel -> stream.channel.use { channel ->
// creating `IsoFile` with a `File` or a `File.inputStream()` yields `No such device` // creating `IsoFile` with a `File` or a `File.inputStream()` yields `No such device`
IsoFile(channel, metadataBoxParser()).use { isoFile -> IsoFile(channel, metadataBoxParser()).use { isoFile ->
val userDataBox = Path.getPath<UserDataBox>(isoFile.movieBox, UserDataBox.TYPE) return Path.getPath(isoFile.movieBox, UserDataBox.TYPE)
if (userDataBox != null) {
fields.putAll(extractBoxFields(userDataBox))
}
} }
} }
} }
@ -305,10 +341,10 @@ object Mp4ParserHelper {
} catch (e: Exception) { } catch (e: Exception) {
Log.w(LOG_TAG, "failed to get User Data box by MP4 parser for mimeType=$mimeType uri=$uri", e) Log.w(LOG_TAG, "failed to get User Data box by MP4 parser for mimeType=$mimeType uri=$uri", e)
} }
return fields return null
} }
private fun extractBoxFields(container: Container): HashMap<String, String> { fun extractBoxFields(container: Container): HashMap<String, String> {
val fields = HashMap<String, String>() val fields = HashMap<String, String>()
for (box in container.boxes) { for (box in container.boxes) {
if (box is AbstractBox && !box.isParsed) { if (box is AbstractBox && !box.isParsed) {
@ -322,9 +358,20 @@ object Mp4ParserHelper {
is AppleGPSCoordinatesBox -> fields[key] = box.value is AppleGPSCoordinatesBox -> fields[key] = box.value
is AppleItemListBox -> fields.putAll(extractBoxFields(box)) is AppleItemListBox -> fields.putAll(extractBoxFields(box))
is AppleVariableSignedIntegerBox -> fields[key] = box.value.toString() is AppleVariableSignedIntegerBox -> fields[key] = box.value.toString()
is Utf8AppleDataBox -> fields[key] = box.value
is HandlerBox -> {} is HandlerBox -> {}
is LocationInformationBox -> {
hashMapOf<String, String>(
"Language" to box.language,
"Name" to box.name,
"Role" to box.role.toString(),
"Longitude" to box.longitude.toString(),
"Latitude" to box.latitude.toString(),
"Altitude" to box.altitude.toString(),
"Astronomical Body" to box.astronomicalBody,
"Additional Notes" to box.additionalNotes,
).forEach { (k, v) -> fields["$key/$k"] = v }
}
is MetaBox -> { is MetaBox -> {
val handlerBox = Path.getPath<HandlerBox>(box, HandlerBox.TYPE).apply { parseDetails() } val handlerBox = Path.getPath<HandlerBox>(box, HandlerBox.TYPE).apply { parseDetails() }
when (val handlerType = handlerBox?.handlerType ?: MetaBox.TYPE) { when (val handlerType = handlerBox?.handlerType ?: MetaBox.TYPE) {
@ -349,6 +396,8 @@ object Mp4ParserHelper {
} }
} }
is Utf8AppleDataBox -> fields[key] = box.value
else -> fields[key] = box.toString() else -> fields[key] = box.toString()
} }
} }
@ -361,6 +410,7 @@ object Mp4ParserHelper {
"catg" -> "Category" "catg" -> "Category"
"covr" -> "Cover Art" "covr" -> "Cover Art"
"keyw" -> "Keyword" "keyw" -> "Keyword"
"loci" -> "Location"
"mcvr" -> "Preview Image" "mcvr" -> "Preview Image"
"pcst" -> "Podcast" "pcst" -> "Podcast"
"SDLN" -> "Play Mode" "SDLN" -> "Play Mode"

View file

@ -37,6 +37,8 @@ import androidx.exifinterface.media.ExifInterfaceFork as ExifInterface
object MultiPage { object MultiPage {
private val LOG_TAG = LogUtils.createTag<MultiPage>() private val LOG_TAG = LogUtils.createTag<MultiPage>()
// TODO TLAD more generic support, (e.g. 0x00000014 + `ftyp` + `qt `)
// atom length (variable, e.g. `0x00000018`) + atom type (`ftyp`) + type (variable, e.g. `mp42`, `qt`)
private val heicMotionPhotoVideoStartIndicator = byteArrayOf(0x00, 0x00, 0x00, 0x18) + "ftypmp42".toByteArray() private val heicMotionPhotoVideoStartIndicator = byteArrayOf(0x00, 0x00, 0x00, 0x18) + "ftypmp42".toByteArray()
// page info // page info
@ -84,6 +86,26 @@ object MultiPage {
return tracks return tracks
} }
fun isHeicSefdMotionPhoto(context: Context, uri: Uri): Boolean {
return getHeicSefdMotionPhotoVideoSizing(context, uri) != null
}
private fun getHeicSefdMotionPhotoVideoSizing(context: Context, uri: Uri): Pair<Long, Long>? {
Mp4ParserHelper.getSamsungSefd(context, uri)?.let { (sefdOffset, sefdBytes) ->
// we could properly parse each tag until we find the "embedded video" tag (0x0a30)
// but it seems that decoding the SEFT trailer is necessary for this,
// so we simply search for the "MotionPhoto_Data" sequence instead
val name = Mp4ParserHelper.SEFD_MOTION_PHOTO_NAME
val index = sefdBytes.indexOfBytes(name.toByteArray(Charsets.UTF_8))
if (index != -1) {
val videoOffset = sefdOffset + index + name.length
val videoSize = sefdBytes.size - (videoOffset - sefdOffset)
return Pair(videoOffset, videoSize)
}
}
return null
}
private fun getJpegMpfPrimaryRotation(context: Context, uri: Uri, sizeBytes: Long): Int { private fun getJpegMpfPrimaryRotation(context: Context, uri: Uri, sizeBytes: Long): Int {
val mimeType = MimeTypes.JPEG val mimeType = MimeTypes.JPEG
var rotationDegrees = 0 var rotationDegrees = 0
@ -245,40 +267,38 @@ object MultiPage {
fun getMotionPhotoPages(context: Context, uri: Uri, mimeType: String, sizeBytes: Long): ArrayList<FieldMap> { fun getMotionPhotoPages(context: Context, uri: Uri, mimeType: String, sizeBytes: Long): ArrayList<FieldMap> {
val pages = ArrayList<FieldMap>() val pages = ArrayList<FieldMap>()
getMotionPhotoVideoSize(context, uri, mimeType, sizeBytes)?.let { videoSizeBytes -> getMotionPhotoVideoInfo(context, uri, mimeType, sizeBytes)?.let { videoInfo ->
getTrailerVideoInfo(context, uri, fileSizeBytes = sizeBytes, videoSizeBytes = videoSizeBytes)?.let { videoInfo -> // set the original image as the first and default track
// set the original image as the first and default track var pageIndex = 0
var pageIndex = 0 pages.add(
pages.add( hashMapOf(
hashMapOf( KEY_PAGE to pageIndex++,
KEY_PAGE to pageIndex++, KEY_MIME_TYPE to mimeType,
KEY_MIME_TYPE to mimeType, KEY_IS_DEFAULT to true,
KEY_IS_DEFAULT to true,
)
) )
// add video tracks from the appended video )
videoInfo.getString(MediaFormat.KEY_MIME)?.let { mime -> // add video tracks from the appended video
if (MimeTypes.isVideo(mime)) { videoInfo.getString(MediaFormat.KEY_MIME)?.let { mime ->
val page: FieldMap = hashMapOf( if (MimeTypes.isVideo(mime)) {
KEY_PAGE to pageIndex++, val page: FieldMap = hashMapOf(
KEY_MIME_TYPE to MimeTypes.MP4, KEY_PAGE to pageIndex++,
KEY_IS_DEFAULT to false, KEY_MIME_TYPE to MimeTypes.MP4,
) KEY_IS_DEFAULT to false,
videoInfo.getSafeInt(MediaFormat.KEY_WIDTH) { page[KEY_WIDTH] = it } )
videoInfo.getSafeInt(MediaFormat.KEY_HEIGHT) { page[KEY_HEIGHT] = it } videoInfo.getSafeInt(MediaFormat.KEY_WIDTH) { page[KEY_WIDTH] = it }
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { videoInfo.getSafeInt(MediaFormat.KEY_HEIGHT) { page[KEY_HEIGHT] = it }
videoInfo.getSafeInt(MediaFormat.KEY_ROTATION) { page[KEY_ROTATION_DEGREES] = it } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
} videoInfo.getSafeInt(MediaFormat.KEY_ROTATION) { page[KEY_ROTATION_DEGREES] = it }
videoInfo.getSafeLong(MediaFormat.KEY_DURATION) { page[KEY_DURATION] = it / 1000 }
pages.add(page)
} }
videoInfo.getSafeLong(MediaFormat.KEY_DURATION) { page[KEY_DURATION] = it / 1000 }
pages.add(page)
} }
} }
} }
return pages return pages
} }
fun getMotionPhotoVideoSize(context: Context, uri: Uri, mimeType: String, sizeBytes: Long): Long? { fun getTrailerVideoSize(context: Context, uri: Uri, mimeType: String, sizeBytes: Long): Long? {
if (MimeTypes.isHeic(mimeType)) { if (MimeTypes.isHeic(mimeType)) {
// XMP in HEIC motion photos (as taken with a Samsung Camera v12.0.01.50) indicates an `Item:Length` of 68 bytes for the video. // XMP in HEIC motion photos (as taken with a Samsung Camera v12.0.01.50) indicates an `Item:Length` of 68 bytes for the video.
// This item does not contain the video itself, but only some kind of metadata (no doc, no spec), // This item does not contain the video itself, but only some kind of metadata (no doc, no spec),
@ -325,22 +345,35 @@ object MultiPage {
return offsetFromEnd return offsetFromEnd
} }
fun getTrailerVideoInfo(context: Context, uri: Uri, fileSizeBytes: Long, videoSizeBytes: Long): MediaFormat? { private fun getMotionPhotoVideoInfo(context: Context, uri: Uri, mimeType: String, sizeBytes: Long): MediaFormat? {
var format: MediaFormat? = null getMotionPhotoVideoSizing(context, uri, mimeType, sizeBytes)?.let { (videoOffset, videoSize) ->
return getEmbedVideoInfo(context, uri, videoOffset, videoSize)
}
return null
}
fun getTrailerVideoInfo(context: Context, uri: Uri, fileSize: Long, videoSize: Long): MediaFormat? {
return getEmbedVideoInfo(context, uri, videoOffset = fileSize - videoSize, videoSize = videoSize)
}
private fun getEmbedVideoInfo(context: Context, uri: Uri, videoOffset: Long, videoSize: Long): MediaFormat? {
val extractor = MediaExtractor() val extractor = MediaExtractor()
var pfd: ParcelFileDescriptor? = null var pfd: ParcelFileDescriptor? = null
try { try {
val videoStartOffset = fileSizeBytes - videoSizeBytes
pfd = context.contentResolver.openFileDescriptor(uri, "r") pfd = context.contentResolver.openFileDescriptor(uri, "r")
pfd?.fileDescriptor?.let { fd -> pfd?.fileDescriptor?.let { fd ->
extractor.setDataSource(fd, videoStartOffset, videoSizeBytes) extractor.setDataSource(fd, videoOffset, videoSize)
if (extractor.trackCount > 0) { // video track may be after an audio track
// only consider the first track to represent the appended video for (trackIndex in 0 until extractor.trackCount) {
val trackIndex = 0
try { try {
format = extractor.getTrackFormat(trackIndex) val format = extractor.getTrackFormat(trackIndex)
format.getString(MediaFormat.KEY_MIME)?.let {
if (MimeTypes.isVideo(it)) {
return format
}
}
} catch (e: Exception) { } catch (e: Exception) {
Log.w(LOG_TAG, "failed to get motion photo track information for uri=$uri, track num=$trackIndex", e) Log.w(LOG_TAG, "failed to get track information for uri=$uri, track num=$trackIndex", e)
} }
} }
} }
@ -350,7 +383,22 @@ object MultiPage {
extractor.release() extractor.release()
pfd?.close() pfd?.close()
} }
return format return null
}
fun getMotionPhotoVideoSizing(context: Context, uri: Uri, mimeType: String, sizeBytes: Long): Pair<Long, Long>? {
// default to trailer videos
getTrailerVideoSize(context, uri, mimeType, sizeBytes)?.let { videoSize ->
val videoOffset = sizeBytes - videoSize
return Pair(videoOffset, videoSize)
}
if (MimeTypes.isHeic(mimeType)) {
// fallback to video within Samsung SEFD box
return getHeicSefdMotionPhotoVideoSizing(context, uri)
}
return null
} }
fun getTiffPages(context: Context, uri: Uri): ArrayList<FieldMap> { fun getTiffPages(context: Context, uri: Uri): ArrayList<FieldMap> {

View file

@ -18,7 +18,7 @@ object EntryFields {
const val IS_FLIPPED = "isFlipped" // boolean const val IS_FLIPPED = "isFlipped" // boolean
const val DATE_ADDED_SECS = "dateAddedSecs" // long const val DATE_ADDED_SECS = "dateAddedSecs" // long
const val DATE_MODIFIED_SECS = "dateModifiedSecs" // long const val DATE_MODIFIED_MILLIS = "dateModifiedMillis" // long
const val SOURCE_DATE_TAKEN_MILLIS = "sourceDateTakenMillis" // long const val SOURCE_DATE_TAKEN_MILLIS = "sourceDateTakenMillis" // long
const val DURATION_MILLIS = "durationMillis" // long const val DURATION_MILLIS = "durationMillis" // long

View file

@ -5,6 +5,7 @@ import android.content.Context
import android.graphics.BitmapFactory import android.graphics.BitmapFactory
import android.media.MediaMetadataRetriever import android.media.MediaMetadataRetriever
import android.net.Uri import android.net.Uri
import androidx.core.net.toUri
import com.drew.metadata.avi.AviDirectory import com.drew.metadata.avi.AviDirectory
import com.drew.metadata.exif.ExifIFD0Directory import com.drew.metadata.exif.ExifIFD0Directory
import com.drew.metadata.jpeg.JpegDirectory import com.drew.metadata.jpeg.JpegDirectory
@ -29,7 +30,6 @@ import deckers.thibault.aves.utils.UriUtils.tryParseId
import org.beyka.tiffbitmapfactory.TiffBitmapFactory import org.beyka.tiffbitmapfactory.TiffBitmapFactory
import java.io.IOException import java.io.IOException
import androidx.exifinterface.media.ExifInterfaceFork as ExifInterface import androidx.exifinterface.media.ExifInterfaceFork as ExifInterface
import androidx.core.net.toUri
class SourceEntry { class SourceEntry {
private val origin: Int private val origin: Int
@ -42,7 +42,7 @@ class SourceEntry {
private var sourceRotationDegrees: Int? = null private var sourceRotationDegrees: Int? = null
private var sizeBytes: Long? = null private var sizeBytes: Long? = null
private var dateAddedSecs: Long? = null private var dateAddedSecs: Long? = null
private var dateModifiedSecs: Long? = null private var dateModifiedMillis: Long? = null
private var sourceDateTakenMillis: Long? = null private var sourceDateTakenMillis: Long? = null
private var durationMillis: Long? = null private var durationMillis: Long? = null
@ -65,16 +65,16 @@ class SourceEntry {
sizeBytes = toLong(map[EntryFields.SIZE_BYTES]) sizeBytes = toLong(map[EntryFields.SIZE_BYTES])
title = map[EntryFields.TITLE] as String? title = map[EntryFields.TITLE] as String?
dateAddedSecs = toLong(map[EntryFields.DATE_ADDED_SECS]) dateAddedSecs = toLong(map[EntryFields.DATE_ADDED_SECS])
dateModifiedSecs = toLong(map[EntryFields.DATE_MODIFIED_SECS]) dateModifiedMillis = toLong(map[EntryFields.DATE_MODIFIED_MILLIS])
sourceDateTakenMillis = toLong(map[EntryFields.SOURCE_DATE_TAKEN_MILLIS]) sourceDateTakenMillis = toLong(map[EntryFields.SOURCE_DATE_TAKEN_MILLIS])
durationMillis = toLong(map[EntryFields.DURATION_MILLIS]) durationMillis = toLong(map[EntryFields.DURATION_MILLIS])
} }
fun initFromFile(path: String, title: String, sizeBytes: Long, dateModifiedSecs: Long) { fun initFromFile(path: String, title: String, sizeBytes: Long, dateModifiedMillis: Long) {
this.path = path this.path = path
this.title = title this.title = title
this.sizeBytes = sizeBytes this.sizeBytes = sizeBytes
this.dateModifiedSecs = dateModifiedSecs this.dateModifiedMillis = dateModifiedMillis
} }
fun toMap(): FieldMap { fun toMap(): FieldMap {
@ -89,7 +89,7 @@ class SourceEntry {
EntryFields.SIZE_BYTES to sizeBytes, EntryFields.SIZE_BYTES to sizeBytes,
EntryFields.TITLE to title, EntryFields.TITLE to title,
EntryFields.DATE_ADDED_SECS to dateAddedSecs, EntryFields.DATE_ADDED_SECS to dateAddedSecs,
EntryFields.DATE_MODIFIED_SECS to dateModifiedSecs, EntryFields.DATE_MODIFIED_MILLIS to dateModifiedMillis,
EntryFields.SOURCE_DATE_TAKEN_MILLIS to sourceDateTakenMillis, EntryFields.SOURCE_DATE_TAKEN_MILLIS to sourceDateTakenMillis,
EntryFields.DURATION_MILLIS to durationMillis, EntryFields.DURATION_MILLIS to durationMillis,
// only for map export // only for map export

View file

@ -46,7 +46,7 @@ internal class FileImageProvider : ImageProvider() {
path = path, path = path,
title = file.name, title = file.name,
sizeBytes = file.length(), sizeBytes = file.length(),
dateModifiedSecs = file.lastModified() / 1000, dateModifiedMillis = file.lastModified(),
) )
} }
} catch (e: SecurityException) { } catch (e: SecurityException) {
@ -91,7 +91,7 @@ internal class FileImageProvider : ImageProvider() {
return hashMapOf( return hashMapOf(
EntryFields.URI to Uri.fromFile(newFile).toString(), EntryFields.URI to Uri.fromFile(newFile).toString(),
EntryFields.PATH to newFile.path, EntryFields.PATH to newFile.path,
EntryFields.DATE_MODIFIED_SECS to newFile.lastModified() / 1000, EntryFields.DATE_MODIFIED_MILLIS to newFile.lastModified(),
) )
} }
@ -99,7 +99,7 @@ internal class FileImageProvider : ImageProvider() {
try { try {
val file = File(path) val file = File(path)
if (file.exists()) { if (file.exists()) {
newFields[EntryFields.DATE_MODIFIED_SECS] = file.lastModified() / 1000 newFields[EntryFields.DATE_MODIFIED_MILLIS] = file.lastModified()
newFields[EntryFields.SIZE_BYTES] = file.length() newFields[EntryFields.SIZE_BYTES] = file.length()
} }
callback.onSuccess(newFields) callback.onSuccess(newFields)

View file

@ -11,6 +11,7 @@ import android.net.Uri
import android.os.Binder import android.os.Binder
import android.os.Build import android.os.Build
import android.util.Log import android.util.Log
import androidx.core.net.toUri
import com.bumptech.glide.Glide import com.bumptech.glide.Glide
import com.bumptech.glide.request.FutureTarget import com.bumptech.glide.request.FutureTarget
import com.commonsware.cwac.document.DocumentFileCompat import com.commonsware.cwac.document.DocumentFileCompat
@ -32,6 +33,7 @@ import deckers.thibault.aves.metadata.PixyMetaHelper.xmpDocString
import deckers.thibault.aves.metadata.metadataextractor.Helper import deckers.thibault.aves.metadata.metadataextractor.Helper
import deckers.thibault.aves.metadata.xmp.GoogleXMP import deckers.thibault.aves.metadata.xmp.GoogleXMP
import deckers.thibault.aves.model.AvesEntry import deckers.thibault.aves.model.AvesEntry
import deckers.thibault.aves.model.EntryFields
import deckers.thibault.aves.model.ExifOrientationOp import deckers.thibault.aves.model.ExifOrientationOp
import deckers.thibault.aves.model.FieldMap import deckers.thibault.aves.model.FieldMap
import deckers.thibault.aves.model.NameConflictResolution import deckers.thibault.aves.model.NameConflictResolution
@ -63,8 +65,6 @@ import java.util.Date
import java.util.TimeZone import java.util.TimeZone
import kotlin.math.absoluteValue import kotlin.math.absoluteValue
import androidx.exifinterface.media.ExifInterfaceFork as ExifInterface import androidx.exifinterface.media.ExifInterfaceFork as ExifInterface
import androidx.core.net.toUri
import deckers.thibault.aves.model.EntryFields
abstract class ImageProvider { abstract class ImageProvider {
open fun fetchSingle(context: Context, uri: Uri, sourceMimeType: String?, allowUnsized: Boolean, callback: ImageOpCallback) { open fun fetchSingle(context: Context, uri: Uri, sourceMimeType: String?, allowUnsized: Boolean, callback: ImageOpCallback) {
@ -648,13 +648,13 @@ abstract class ImageProvider {
val originalFileSize = File(path).length() val originalFileSize = File(path).length()
var trailerVideoBytes: ByteArray? = null var trailerVideoBytes: ByteArray? = null
val editableFile = StorageUtils.createTempFile(context).apply { val editableFile = StorageUtils.createTempFile(context).apply {
val videoSize = MultiPage.getMotionPhotoVideoSize(context, uri, mimeType, originalFileSize)?.let { it + trailerDiff } val trailerVideoSize = MultiPage.getTrailerVideoSize(context, uri, mimeType, originalFileSize)?.let { it + trailerDiff }
val isTrailerVideoValid = videoSize != null && MultiPage.getTrailerVideoInfo(context, uri, originalFileSize, videoSize) != null val isTrailerVideoValid = trailerVideoSize != null && MultiPage.getTrailerVideoInfo(context, uri, originalFileSize, trailerVideoSize) != null
try { try {
if (videoSize != null && isTrailerVideoValid) { if (trailerVideoSize != null && isTrailerVideoValid) {
// handle motion photo and embedded video separately // handle motion photo and embedded video separately
val imageSize = (originalFileSize - videoSize).toInt() val imageSize = (originalFileSize - trailerVideoSize).toInt()
val videoByteSize = videoSize.toInt() val videoByteSize = trailerVideoSize.toInt()
trailerVideoBytes = ByteArray(videoByteSize) trailerVideoBytes = ByteArray(videoByteSize)
StorageUtils.openInputStream(context, uri)?.let { input -> StorageUtils.openInputStream(context, uri)?.let { input ->
@ -733,13 +733,13 @@ abstract class ImageProvider {
val originalFileSize = File(path).length() val originalFileSize = File(path).length()
var trailerVideoBytes: ByteArray? = null var trailerVideoBytes: ByteArray? = null
val editableFile = StorageUtils.createTempFile(context).apply { val editableFile = StorageUtils.createTempFile(context).apply {
val videoSize = MultiPage.getMotionPhotoVideoSize(context, uri, mimeType, originalFileSize)?.let { it + trailerDiff } val trailerVideoSize = MultiPage.getTrailerVideoSize(context, uri, mimeType, originalFileSize)?.let { it + trailerDiff }
val isTrailerVideoValid = videoSize != null && MultiPage.getTrailerVideoInfo(context, uri, originalFileSize, videoSize) != null val isTrailerVideoValid = trailerVideoSize != null && MultiPage.getTrailerVideoInfo(context, uri, originalFileSize, trailerVideoSize) != null
try { try {
if (videoSize != null && isTrailerVideoValid) { if (trailerVideoSize != null && isTrailerVideoValid) {
// handle motion photo and embedded video separately // handle motion photo and embedded video separately
val imageSize = (originalFileSize - videoSize).toInt() val imageSize = (originalFileSize - trailerVideoSize).toInt()
val videoByteSize = videoSize.toInt() val videoByteSize = trailerVideoSize.toInt()
trailerVideoBytes = ByteArray(videoByteSize) trailerVideoBytes = ByteArray(videoByteSize)
StorageUtils.openInputStream(context, uri)?.let { input -> StorageUtils.openInputStream(context, uri)?.let { input ->
@ -899,7 +899,7 @@ abstract class ImageProvider {
} }
val originalFileSize = File(path).length() val originalFileSize = File(path).length()
val videoSize = MultiPage.getMotionPhotoVideoSize(context, uri, mimeType, originalFileSize)?.let { it.toInt() + trailerDiff } val trailerVideoSize = MultiPage.getTrailerVideoSize(context, uri, mimeType, originalFileSize)?.let { it.toInt() + trailerDiff }
val editableFile = StorageUtils.createTempFile(context).apply { val editableFile = StorageUtils.createTempFile(context).apply {
try { try {
editXmpWithPixy( editXmpWithPixy(
@ -921,7 +921,7 @@ abstract class ImageProvider {
// copy the edited temporary file back to the original // copy the edited temporary file back to the original
editableFile.transferTo(outputStream(context, mimeType, uri, path)) editableFile.transferTo(outputStream(context, mimeType, uri, path))
if (autoCorrectTrailerOffset && !checkTrailerOffset(context, path, uri, mimeType, videoSize, editableFile, callback)) { if (autoCorrectTrailerOffset && !checkTrailerOffset(context, path, uri, mimeType, trailerVideoSize, editableFile, callback)) {
return false return false
} }
editableFile.delete() editableFile.delete()
@ -1262,15 +1262,15 @@ abstract class ImageProvider {
callback: ImageOpCallback, callback: ImageOpCallback,
) { ) {
val originalFileSize = File(path).length() val originalFileSize = File(path).length()
val videoSize = MultiPage.getMotionPhotoVideoSize(context, uri, mimeType, originalFileSize) val trailerVideoSize = MultiPage.getTrailerVideoSize(context, uri, mimeType, originalFileSize)
if (videoSize == null) { if (trailerVideoSize == null) {
callback.onFailure(Exception("failed to get trailer video size")) callback.onFailure(Exception("failed to get trailer video size"))
return return
} }
val isTrailerVideoValid = MultiPage.getTrailerVideoInfo(context, uri, fileSizeBytes = originalFileSize, videoSizeBytes = videoSize) != null val isTrailerVideoValid = MultiPage.getTrailerVideoInfo(context, uri, fileSize = originalFileSize, videoSize = trailerVideoSize) != null
if (!isTrailerVideoValid) { if (!isTrailerVideoValid) {
callback.onFailure(Exception("failed to open trailer video with size=$videoSize")) callback.onFailure(Exception("failed to open trailer video with size=$trailerVideoSize"))
return return
} }
@ -1278,7 +1278,7 @@ abstract class ImageProvider {
try { try {
val inputStream = StorageUtils.openInputStream(context, uri) val inputStream = StorageUtils.openInputStream(context, uri)
// partial copy // partial copy
transferFrom(inputStream, originalFileSize - videoSize) transferFrom(inputStream, originalFileSize - trailerVideoSize)
} catch (e: Exception) { } catch (e: Exception) {
Log.d(LOG_TAG, "failed to remove trailer video", e) Log.d(LOG_TAG, "failed to remove trailer video", e)
callback.onFailure(e) callback.onFailure(e)
@ -1313,8 +1313,8 @@ abstract class ImageProvider {
} }
val originalFileSize = File(path).length() val originalFileSize = File(path).length()
val videoSize = MultiPage.getMotionPhotoVideoSize(context, uri, mimeType, originalFileSize) val trailerVideoSize = MultiPage.getTrailerVideoSize(context, uri, mimeType, originalFileSize)
val isTrailerVideoValid = videoSize != null && MultiPage.getTrailerVideoInfo(context, uri, originalFileSize, videoSize) != null val isTrailerVideoValid = trailerVideoSize != null && MultiPage.getTrailerVideoInfo(context, uri, originalFileSize, trailerVideoSize) != null
val editableFile = StorageUtils.createTempFile(context).apply { val editableFile = StorageUtils.createTempFile(context).apply {
try { try {
outputStream().use { output -> outputStream().use { output ->
@ -1334,7 +1334,7 @@ abstract class ImageProvider {
// copy the edited temporary file back to the original // copy the edited temporary file back to the original
editableFile.transferTo(outputStream(context, mimeType, uri, path)) editableFile.transferTo(outputStream(context, mimeType, uri, path))
if (!types.contains(TYPE_XMP) && isTrailerVideoValid && !checkTrailerOffset(context, path, uri, mimeType, videoSize, editableFile, callback)) { if (!types.contains(TYPE_XMP) && isTrailerVideoValid && !checkTrailerOffset(context, path, uri, mimeType, trailerVideoSize, editableFile, callback)) {
return return
} }
editableFile.delete() editableFile.delete()

View file

@ -51,14 +51,14 @@ import kotlin.coroutines.suspendCoroutine
class MediaStoreImageProvider : ImageProvider() { class MediaStoreImageProvider : ImageProvider() {
fun fetchAll( fun fetchAll(
context: Context, context: Context,
knownEntries: Map<Long?, Int?>, knownEntries: Map<Long?, Long?>,
directory: String?, directory: String?,
handleNewEntry: NewEntryHandler, handleNewEntry: NewEntryHandler,
) { ) {
Log.d(LOG_TAG, "fetching all media store items for ${knownEntries.size} known entries, directory=$directory") Log.d(LOG_TAG, "fetching all media store items for ${knownEntries.size} known entries, directory=$directory")
val isModified = fun(contentId: Long, dateModifiedSecs: Int): Boolean { val isModified = fun(contentId: Long, dateModifiedMillis: Long): Boolean {
val knownDate = knownEntries[contentId] val knownDate = knownEntries[contentId]
return knownDate == null || knownDate < dateModifiedSecs return knownDate == null || knownDate < dateModifiedMillis
} }
val handleNew: NewEntryHandler val handleNew: NewEntryHandler
var selection: String? = null var selection: String? = null
@ -96,7 +96,7 @@ class MediaStoreImageProvider : ImageProvider() {
var found = false var found = false
val fetched = arrayListOf<FieldMap>() val fetched = arrayListOf<FieldMap>()
val id = uri.tryParseId() val id = uri.tryParseId()
val alwaysValid: NewEntryChecker = fun(_: Long, _: Int): Boolean = true val alwaysValid: NewEntryChecker = fun(_: Long, _: Long): Boolean = true
val onSuccess: NewEntryHandler = fun(entry: FieldMap) { fetched.add(entry) } val onSuccess: NewEntryHandler = fun(entry: FieldMap) { fetched.add(entry) }
if (id != null) { if (id != null) {
if (sourceMimeType == null || isImage(sourceMimeType)) { if (sourceMimeType == null || isImage(sourceMimeType)) {
@ -227,8 +227,8 @@ class MediaStoreImageProvider : ImageProvider() {
val sizeColumn = cursor.getColumnIndexOrThrow(MediaStore.MediaColumns.SIZE) val sizeColumn = cursor.getColumnIndexOrThrow(MediaStore.MediaColumns.SIZE)
val widthColumn = cursor.getColumnIndexOrThrow(MediaStore.MediaColumns.WIDTH) val widthColumn = cursor.getColumnIndexOrThrow(MediaStore.MediaColumns.WIDTH)
val heightColumn = cursor.getColumnIndexOrThrow(MediaStore.MediaColumns.HEIGHT) val heightColumn = cursor.getColumnIndexOrThrow(MediaStore.MediaColumns.HEIGHT)
val dateAddedColumn = cursor.getColumnIndexOrThrow(MediaStore.MediaColumns.DATE_ADDED) val dateAddedSecsColumn = cursor.getColumnIndexOrThrow(MediaStore.MediaColumns.DATE_ADDED)
val dateModifiedColumn = cursor.getColumnIndexOrThrow(MediaStore.MediaColumns.DATE_MODIFIED) val dateModifiedSecsColumn = cursor.getColumnIndexOrThrow(MediaStore.MediaColumns.DATE_MODIFIED)
val dateTakenColumn = cursor.getColumnIndex(MediaColumns.DATE_TAKEN) val dateTakenColumn = cursor.getColumnIndex(MediaColumns.DATE_TAKEN)
// image & video for API >=29, only for images for API <29 // image & video for API >=29, only for images for API <29
@ -240,8 +240,8 @@ class MediaStoreImageProvider : ImageProvider() {
while (cursor.moveToNext()) { while (cursor.moveToNext()) {
val id = cursor.getLong(idColumn) val id = cursor.getLong(idColumn)
val dateModifiedSecs = cursor.getInt(dateModifiedColumn) val dateModifiedMillis = cursor.getInt(dateModifiedSecsColumn) * 1000L
if (isValidEntry(id, dateModifiedSecs)) { if (isValidEntry(id, dateModifiedMillis)) {
// for multiple items, `contentUri` is the root without ID, // for multiple items, `contentUri` is the root without ID,
// but for single items, `contentUri` already contains the ID // but for single items, `contentUri` already contains the ID
val itemUri = if (contentUriContainsId) contentUri else ContentUris.withAppendedId(contentUri, id) val itemUri = if (contentUriContainsId) contentUri else ContentUris.withAppendedId(contentUri, id)
@ -255,17 +255,18 @@ class MediaStoreImageProvider : ImageProvider() {
if (mimeType == null) { if (mimeType == null) {
Log.w(LOG_TAG, "failed to make entry from uri=$itemUri because of null MIME type") Log.w(LOG_TAG, "failed to make entry from uri=$itemUri because of null MIME type")
} else { } else {
var entryMap: FieldMap = hashMapOf( val path = cursor.getString(pathColumn)
var entryFields: FieldMap = hashMapOf(
EntryFields.ORIGIN to SourceEntry.ORIGIN_MEDIA_STORE_CONTENT, EntryFields.ORIGIN to SourceEntry.ORIGIN_MEDIA_STORE_CONTENT,
EntryFields.URI to itemUri.toString(), EntryFields.URI to itemUri.toString(),
EntryFields.PATH to cursor.getString(pathColumn), EntryFields.PATH to path,
EntryFields.SOURCE_MIME_TYPE to mimeType, EntryFields.SOURCE_MIME_TYPE to mimeType,
EntryFields.WIDTH to width, EntryFields.WIDTH to width,
EntryFields.HEIGHT to height, EntryFields.HEIGHT to height,
EntryFields.SOURCE_ROTATION_DEGREES to if (orientationColumn != -1) cursor.getInt(orientationColumn) else 0, EntryFields.SOURCE_ROTATION_DEGREES to if (orientationColumn != -1) cursor.getInt(orientationColumn) else 0,
EntryFields.SIZE_BYTES to cursor.getLong(sizeColumn), EntryFields.SIZE_BYTES to cursor.getLong(sizeColumn),
EntryFields.DATE_ADDED_SECS to cursor.getInt(dateAddedColumn), EntryFields.DATE_ADDED_SECS to cursor.getInt(dateAddedSecsColumn),
EntryFields.DATE_MODIFIED_SECS to dateModifiedSecs, EntryFields.DATE_MODIFIED_MILLIS to dateModifiedMillis,
EntryFields.SOURCE_DATE_TAKEN_MILLIS to if (dateTakenColumn != -1) cursor.getLong(dateTakenColumn) else null, EntryFields.SOURCE_DATE_TAKEN_MILLIS to if (dateTakenColumn != -1) cursor.getLong(dateTakenColumn) else null,
EntryFields.DURATION_MILLIS to durationMillis, EntryFields.DURATION_MILLIS to durationMillis,
// only for map export // only for map export
@ -285,8 +286,8 @@ class MediaStoreImageProvider : ImageProvider() {
if (outWidth > 0 && outHeight > 0) { if (outWidth > 0 && outHeight > 0) {
width = outWidth width = outWidth
height = outHeight height = outHeight
entryMap[EntryFields.WIDTH] = width entryFields[EntryFields.WIDTH] = width
entryMap[EntryFields.HEIGHT] = height entryFields[EntryFields.HEIGHT] = height
} }
} }
} catch (e: IOException) { } catch (e: IOException) {
@ -302,11 +303,13 @@ class MediaStoreImageProvider : ImageProvider() {
// missing some attributes such as width, height, orientation. // missing some attributes such as width, height, orientation.
// Also, the reported size of raw images is inconsistent across devices // Also, the reported size of raw images is inconsistent across devices
// and Android versions (sometimes the raw size, sometimes the decoded size). // and Android versions (sometimes the raw size, sometimes the decoded size).
val entry = SourceEntry(entryMap).fillPreCatalogMetadata(context) val entry = SourceEntry(entryFields).fillPreCatalogMetadata(context)
entryMap = entry.toMap() entryFields = entry.toMap()
} }
handleNewEntry(entryMap) getFileModifiedDateMillis(path)?.let { entryFields[EntryFields.DATE_MODIFIED_MILLIS] = it }
handleNewEntry(entryFields)
found = true found = true
} }
} }
@ -823,18 +826,32 @@ class MediaStoreImageProvider : ImageProvider() {
try { try {
val cursor = context.contentResolver.query(uri, projection, null, null, null) val cursor = context.contentResolver.query(uri, projection, null, null, null)
if (cursor != null && cursor.moveToFirst()) { if (cursor != null && cursor.moveToFirst()) {
cursor.getColumnIndex(MediaStore.MediaColumns.DATE_MODIFIED).let { if (it != -1) newFields["dateModifiedSecs"] = cursor.getInt(it) } cursor.getColumnIndex(MediaStore.MediaColumns.DATE_MODIFIED).let { if (it != -1) newFields[EntryFields.DATE_MODIFIED_MILLIS] = cursor.getInt(it) * 1000 }
cursor.getColumnIndex(MediaStore.MediaColumns.SIZE).let { if (it != -1) newFields["sizeBytes"] = cursor.getLong(it) } cursor.getColumnIndex(MediaStore.MediaColumns.SIZE).let { if (it != -1) newFields[EntryFields.SIZE_BYTES] = cursor.getLong(it) }
cursor.close() cursor.close()
} }
} catch (e: Exception) { } catch (e: Exception) {
callback.onFailure(e) callback.onFailure(e)
return@scanFile return@scanFile
} }
getFileModifiedDateMillis(path)?.let { newFields[EntryFields.DATE_MODIFIED_MILLIS] = it }
callback.onSuccess(newFields) callback.onSuccess(newFields)
} }
} }
// try to fetch the modified date from the file,
// as it is more precise than the one from the Media Store
private fun getFileModifiedDateMillis(path: String?): Long? {
if (path != null) {
try {
return File(path).lastModified()
} catch (securityException: SecurityException) {
// ignore
}
}
return null
}
private fun scanObsoletePath(context: Context, uri: Uri, path: String, mimeType: String) { private fun scanObsoletePath(context: Context, uri: Uri, path: String, mimeType: String) {
val file = File(path) val file = File(path)
val delayMillis = 500L val delayMillis = 500L
@ -918,8 +935,9 @@ class MediaStoreImageProvider : ImageProvider() {
EntryFields.PATH to path, EntryFields.PATH to path,
) )
cursor.getColumnIndex(MediaStore.MediaColumns.DATE_ADDED).let { if (it != -1) newFields[EntryFields.DATE_ADDED_SECS] = cursor.getInt(it) } cursor.getColumnIndex(MediaStore.MediaColumns.DATE_ADDED).let { if (it != -1) newFields[EntryFields.DATE_ADDED_SECS] = cursor.getInt(it) }
cursor.getColumnIndex(MediaStore.MediaColumns.DATE_MODIFIED).let { if (it != -1) newFields[EntryFields.DATE_MODIFIED_SECS] = cursor.getInt(it) } cursor.getColumnIndex(MediaStore.MediaColumns.DATE_MODIFIED).let { if (it != -1) newFields[EntryFields.DATE_MODIFIED_MILLIS] = cursor.getInt(it) * 1000 }
cursor.close() cursor.close()
getFileModifiedDateMillis(path)?.let { newFields[EntryFields.DATE_MODIFIED_MILLIS] = it }
return newFields return newFields
} }
} catch (e: Exception) { } catch (e: Exception) {
@ -1030,4 +1048,4 @@ object MediaColumns {
typealias NewEntryHandler = (entry: FieldMap) -> Unit typealias NewEntryHandler = (entry: FieldMap) -> Unit
private typealias NewEntryChecker = (contentId: Long, dateModifiedSecs: Int) -> Boolean private typealias NewEntryChecker = (contentId: Long, dateModifiedMillis: Long) -> Boolean

View file

@ -2,27 +2,121 @@ package deckers.thibault.aves.utils
import android.content.Context import android.content.Context
import android.graphics.Bitmap import android.graphics.Bitmap
import android.graphics.ColorSpace
import android.os.Build
import android.util.Half
import android.util.Log import android.util.Log
import androidx.annotation.RequiresApi
import com.bumptech.glide.Glide import com.bumptech.glide.Glide
import com.bumptech.glide.load.resource.bitmap.TransformationUtils import com.bumptech.glide.load.resource.bitmap.TransformationUtils
import deckers.thibault.aves.metadata.Metadata.getExifCode import deckers.thibault.aves.metadata.Metadata.getExifCode
import kotlinx.coroutines.sync.Mutex import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock import kotlinx.coroutines.sync.withLock
import java.io.ByteArrayOutputStream import java.io.ByteArrayOutputStream
import java.nio.ByteBuffer
object BitmapUtils { object BitmapUtils {
private val LOG_TAG = LogUtils.createTag<BitmapUtils>() private val LOG_TAG = LogUtils.createTag<BitmapUtils>()
private const val INITIAL_BUFFER_SIZE = 2 shl 17 // 256kB private const val INITIAL_BUFFER_SIZE = 2 shl 17 // 256kB
// arbitrary size to detect buffer that may yield an OOM
private const val BUFFER_SIZE_DANGER_THRESHOLD = 3 * (1 shl 20) // MB
private val freeBaos = ArrayList<ByteArrayOutputStream>() private val freeBaos = ArrayList<ByteArrayOutputStream>()
private val mutex = Mutex() private val mutex = Mutex()
const val ARGB_8888_BYTE_SIZE = 4 private const val INT_BYTE_SIZE = 4
private const val MAX_2_BITS_FLOAT = 0x3.toFloat()
private const val MAX_8_BITS_FLOAT = 0xff.toFloat()
private const val MAX_10_BITS_FLOAT = 0x3ff.toFloat()
private const val RAW_BYTES_TRAILER_LENGTH = INT_BYTE_SIZE * 2
// bytes per pixel with different bitmap config
private const val BPP_ALPHA_8 = 1
private const val BPP_RGB_565 = 2
private const val BPP_ARGB_8888 = 4
private const val BPP_RGBA_1010102 = 4
private const val BPP_RGBA_F16 = 8
private fun getBytePerPixel(config: Bitmap.Config?): Int {
return when (config) {
Bitmap.Config.ALPHA_8 -> BPP_ALPHA_8
Bitmap.Config.RGB_565 -> BPP_RGB_565
Bitmap.Config.ARGB_8888 -> BPP_ARGB_8888
else -> {
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && config == Bitmap.Config.RGBA_F16) {
BPP_RGBA_F16
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU && config == Bitmap.Config.RGBA_1010102) {
BPP_RGBA_1010102
} else {
// default
BPP_ARGB_8888
}
}
}
}
fun getExpectedImageSize(pixelCount: Long, config: Bitmap.Config?): Long {
return pixelCount * getBytePerPixel(config)
}
fun getRawBytes(bitmap: Bitmap?, recycle: Boolean): ByteArray? {
bitmap ?: return null
val byteCount = bitmap.byteCount
val width = bitmap.width
val height = bitmap.height
val config = bitmap.config
val colorSpace = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) bitmap.colorSpace else null
if (!MemoryUtils.canAllocate(byteCount)) {
throw Exception("bitmap buffer is $byteCount bytes, which cannot be allocated to a new byte array")
}
try {
// `ByteBuffer` initial order is always `BIG_ENDIAN`
var bytes = ByteBuffer.allocate(byteCount + RAW_BYTES_TRAILER_LENGTH).apply {
bitmap.copyPixelsToBuffer(this)
}.array()
// do not access bitmap after recycling
if (recycle) bitmap.recycle()
// convert pixel format and color space, if necessary
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
colorSpace?.let { srcColorSpace ->
val dstColorSpace = ColorSpace.get(ColorSpace.Named.SRGB)
val connector = ColorSpace.connect(srcColorSpace, dstColorSpace)
if (config == Bitmap.Config.ARGB_8888) {
if (srcColorSpace != dstColorSpace) {
argb8888ToArgb8888(bytes, connector, end = byteCount)
}
} else if (config == Bitmap.Config.RGBA_F16) {
rgbaf16ToArgb8888(bytes, connector, end = byteCount)
val newConfigByteCount = byteCount / (BPP_RGBA_F16 / BPP_ARGB_8888)
bytes = bytes.sliceArray(0..<newConfigByteCount + RAW_BYTES_TRAILER_LENGTH)
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU && config == Bitmap.Config.RGBA_1010102) {
rgba1010102ToArgb8888(bytes, connector, end = byteCount)
}
}
}
// append bitmap size for use by the caller to interpret the raw bytes
val trailerOffset = bytes.size - RAW_BYTES_TRAILER_LENGTH
bytes = ByteBuffer.wrap(bytes).apply {
position(trailerOffset)
putInt(width)
putInt(height)
}.array()
return bytes
} catch (e: Exception) {
Log.e(LOG_TAG, "failed to get bytes from bitmap", e)
}
return null
}
suspend fun getEncodedBytes(bitmap: Bitmap?, canHaveAlpha: Boolean = false, quality: Int = 100, recycle: Boolean): ByteArray? {
bitmap ?: return null
suspend fun Bitmap.getBytes(canHaveAlpha: Boolean = false, quality: Int = 100, recycle: Boolean): ByteArray? {
val stream: ByteArrayOutputStream val stream: ByteArrayOutputStream
mutex.withLock { mutex.withLock {
// this method is called a lot, so we try and reuse output streams // this method is called a lot, so we try and reuse output streams
@ -34,19 +128,17 @@ object BitmapUtils {
} }
} }
try { try {
// the Bitmap raw bytes are not decodable by Flutter
// we need to format them (compress, or add a BMP header) before sending them
// `Bitmap.CompressFormat.PNG` is slower than `JPEG`, but it allows transparency // `Bitmap.CompressFormat.PNG` is slower than `JPEG`, but it allows transparency
// the BMP format allows an alpha channel, but Android decoding seems to ignore it // the BMP format allows an alpha channel, but Android decoding seems to ignore it
if (canHaveAlpha && hasAlpha()) { if (canHaveAlpha && bitmap.hasAlpha()) {
this.compress(Bitmap.CompressFormat.PNG, quality, stream) bitmap.compress(Bitmap.CompressFormat.PNG, quality, stream)
} else { } else {
this.compress(Bitmap.CompressFormat.JPEG, quality, stream) bitmap.compress(Bitmap.CompressFormat.JPEG, quality, stream)
} }
if (recycle) this.recycle() if (recycle) bitmap.recycle()
val bufferSize = stream.size() val bufferSize = stream.size()
if (bufferSize > BUFFER_SIZE_DANGER_THRESHOLD && !MemoryUtils.canAllocate(bufferSize)) { if (!MemoryUtils.canAllocate(bufferSize)) {
throw Exception("bitmap compressed to $bufferSize bytes, which cannot be allocated to a new byte array") throw Exception("bitmap compressed to $bufferSize bytes, which cannot be allocated to a new byte array")
} }
@ -62,6 +154,107 @@ object BitmapUtils {
return null return null
} }
// convert bytes, without reallocation:
// - from original color space to sRGB.
@RequiresApi(Build.VERSION_CODES.O)
private fun argb8888ToArgb8888(bytes: ByteArray, connector: ColorSpace.Connector, start: Int = 0, end: Int = bytes.size) {
// unpacking from ARGB_8888 and packing to ARGB_8888
// stored as [3,2,1,0] -> [AAAAAAAA BBBBBBBB GGGGGGGG RRRRRRRR]
for (i in start..<end step BPP_ARGB_8888) {
// mask with `0xff` to yield values in [0, 255], instead of [-128, 127]
val iB = bytes[i + 2].toInt() and 0xff
val iG = bytes[i + 1].toInt() and 0xff
val iR = bytes[i].toInt() and 0xff
// components as floats in sRGB
val srgbFloats = connector.transform(iR / MAX_8_BITS_FLOAT, iG / MAX_8_BITS_FLOAT, iB / MAX_8_BITS_FLOAT)
val srgbR = (srgbFloats[0] * 255.0f + 0.5f).toInt()
val srgbG = (srgbFloats[1] * 255.0f + 0.5f).toInt()
val srgbB = (srgbFloats[2] * 255.0f + 0.5f).toInt()
// keep alpha as it is, in `bytes[i + 3]`
bytes[i + 2] = srgbB.toByte()
bytes[i + 1] = srgbG.toByte()
bytes[i] = srgbR.toByte()
}
}
// convert bytes, without reallocation:
// - from config RGBA_F16 to ARGB_8888,
// - from original color space to sRGB.
@RequiresApi(Build.VERSION_CODES.O)
private fun rgbaf16ToArgb8888(bytes: ByteArray, connector: ColorSpace.Connector, start: Int = 0, end: Int = bytes.size) {
val indexDivider = BPP_RGBA_F16 / BPP_ARGB_8888
for (i in start..<end step BPP_RGBA_F16) {
// unpacking from RGBA_F16
// stored as [7,6,5,4,3,2,1,0] -> [AAAAAAAA AAAAAAAA BBBBBBBB BBBBBBBB GGGGGGGG GGGGGGGG RRRRRRRR RRRRRRRR]
val i7 = bytes[i + 7].toInt()
val i6 = bytes[i + 6].toInt()
val i5 = bytes[i + 5].toInt()
val i4 = bytes[i + 4].toInt()
val i3 = bytes[i + 3].toInt()
val i2 = bytes[i + 2].toInt()
val i1 = bytes[i + 1].toInt()
val i0 = bytes[i].toInt()
val hA = Half((((i7 and 0xff) shl 8) or (i6 and 0xff)).toShort())
val hB = Half((((i5 and 0xff) shl 8) or (i4 and 0xff)).toShort())
val hG = Half((((i3 and 0xff) shl 8) or (i2 and 0xff)).toShort())
val hR = Half((((i1 and 0xff) shl 8) or (i0 and 0xff)).toShort())
// components as floats in sRGB
val srgbFloats = connector.transform(hR.toFloat(), hG.toFloat(), hB.toFloat())
val srgbR = (srgbFloats[0] * 255.0f + 0.5f).toInt()
val srgbG = (srgbFloats[1] * 255.0f + 0.5f).toInt()
val srgbB = (srgbFloats[2] * 255.0f + 0.5f).toInt()
val alpha = (hA.toFloat() * 255.0f + 0.5f).toInt()
// packing to ARGB_8888
// stored as [3,2,1,0] -> [AAAAAAAA BBBBBBBB GGGGGGGG RRRRRRRR]
val dstI = i / indexDivider
bytes[dstI + 3] = alpha.toByte()
bytes[dstI + 2] = srgbB.toByte()
bytes[dstI + 1] = srgbG.toByte()
bytes[dstI] = srgbR.toByte()
}
}
// convert bytes, without reallocation:
// - from config RGBA_1010102 to ARGB_8888,
// - from original color space to sRGB.
@RequiresApi(Build.VERSION_CODES.O)
private fun rgba1010102ToArgb8888(bytes: ByteArray, connector: ColorSpace.Connector, start: Int = 0, end: Int = bytes.size) {
val alphaFactor = 255.0f / MAX_2_BITS_FLOAT
for (i in start..<end step BPP_RGBA_1010102) {
// unpacking from RGBA_1010102
// stored as [3,2,1,0] -> [AABBBBBB BBBBGGGG GGGGGGRR RRRRRRRR]
val i3 = bytes[i + 3].toInt()
val i2 = bytes[i + 2].toInt()
val i1 = bytes[i + 1].toInt()
val i0 = bytes[i].toInt()
val iA = ((i3 and 0xc0) shr 6)
val iB = ((i3 and 0x3f) shl 4) or ((i2 and 0xf0) shr 4)
val iG = ((i2 and 0x0f) shl 6) or ((i1 and 0xfc) shr 2)
val iR = ((i1 and 0x03) shl 8) or ((i0 and 0xff) shr 0)
// components as floats in sRGB
val srgbFloats = connector.transform(iR / MAX_10_BITS_FLOAT, iG / MAX_10_BITS_FLOAT, iB / MAX_10_BITS_FLOAT)
val srgbR = (srgbFloats[0] * 255.0f + 0.5f).toInt()
val srgbG = (srgbFloats[1] * 255.0f + 0.5f).toInt()
val srgbB = (srgbFloats[2] * 255.0f + 0.5f).toInt()
val alpha = (iA * alphaFactor + 0.5f).toInt()
// packing to ARGB_8888
// stored as [3,2,1,0] -> [AAAAAAAA BBBBBBBB GGGGGGGG RRRRRRRR]
bytes[i + 3] = alpha.toByte()
bytes[i + 2] = srgbB.toByte()
bytes[i + 1] = srgbG.toByte()
bytes[i] = srgbR.toByte()
}
}
fun applyExifOrientation(context: Context, bitmap: Bitmap?, rotationDegrees: Int?, isFlipped: Boolean?): Bitmap? { fun applyExifOrientation(context: Context, bitmap: Bitmap?, rotationDegrees: Int?, isFlipped: Boolean?): Bitmap? {
if (bitmap == null || rotationDegrees == null || isFlipped == null) return bitmap if (bitmap == null || rotationDegrees == null || isFlipped == null) return bitmap
if (rotationDegrees == 0 && !isFlipped) return bitmap if (rotationDegrees == 0 && !isFlipped) return bitmap

View file

@ -90,12 +90,7 @@ object BmpWriter {
var column = 0 var column = 0
while (column < biWidth) { while (column < biWidth) {
/* // non-premultiplied ARGB values in the sRGB color space
alpha: (value shr 24 and 0xFF).toByte()
red: (value shr 16 and 0xFF).toByte()
green: (value shr 8 and 0xFF).toByte()
blue: (value and 0xFF).toByte()
*/
value = pixels[column] value = pixels[column]
// blue: [0], green: [1], red: [2] // blue: [0], green: [1], red: [2]
rgb[0] = (value and 0xFF).toByte() rgb[0] = (value and 0xFF).toByte()

View file

@ -8,6 +8,8 @@ fun ByteBuffer.toByteArray(): ByteArray {
return bytes return bytes
} }
fun Int.toHex(): String = "0x${byteArrayOf(shr(8).toByte(), toByte()).toHex()}"
fun ByteArray.toHex(): String = joinToString(separator = "") { it.toHex() } fun ByteArray.toHex(): String = joinToString(separator = "") { it.toHex() }
fun Byte.toHex(): String = "%02x".format(this) fun Byte.toHex(): String = "%02x".format(this)

View file

@ -20,6 +20,7 @@ fun <E> MutableList<E>.compatRemoveIf(filter: (t: E) -> Boolean): Boolean {
} }
// Boyer-Moore algorithm for pattern searching // Boyer-Moore algorithm for pattern searching
// Returns: an index of the first occurrence of the pattern or -1 if none is found.
fun ByteArray.indexOfBytes(pattern: ByteArray, start: Int = 0): Int { fun ByteArray.indexOfBytes(pattern: ByteArray, start: Int = 0): Int {
val n: Int = this.size val n: Int = this.size
val m: Int = pattern.size val m: Int = pattern.size

View file

@ -84,11 +84,11 @@ object MimeTypes {
else -> false else -> false
} }
// as of Flutter v3.16.4, with additional custom handling for SVG // as of Flutter v3.16.4, with additional custom handling for SVG in Dart,
fun canDecodeWithFlutter(mimeType: String, pageId: Int?, rotationDegrees: Int?, isFlipped: Boolean?) = when (mimeType) { // while handling still PNG and JPEG on Android for color space and config conversion
fun canDecodeWithFlutter(mimeType: String, isAnimated: Boolean) = when (mimeType) {
GIF, WEBP, BMP, WBMP, ICO, SVG -> true GIF, WEBP, BMP, WBMP, ICO, SVG -> true
JPEG -> (pageId ?: 0) == 0 JPEG, PNG -> isAnimated
PNG -> (rotationDegrees ?: 0) == 0 && !(isFlipped ?: false)
else -> false else -> false
} }

View file

@ -15,6 +15,8 @@ import android.provider.DocumentsContract
import android.provider.MediaStore import android.provider.MediaStore
import android.text.TextUtils import android.text.TextUtils
import android.util.Log import android.util.Log
import androidx.core.net.toUri
import androidx.core.text.isDigitsOnly
import com.commonsware.cwac.document.DocumentFileCompat import com.commonsware.cwac.document.DocumentFileCompat
import deckers.thibault.aves.model.provider.ImageProvider import deckers.thibault.aves.model.provider.ImageProvider
import deckers.thibault.aves.utils.FileUtils.transferFrom import deckers.thibault.aves.utils.FileUtils.transferFrom
@ -29,8 +31,6 @@ import java.io.InputStream
import java.io.OutputStream import java.io.OutputStream
import java.util.Locale import java.util.Locale
import java.util.regex.Pattern import java.util.regex.Pattern
import androidx.core.net.toUri
import androidx.core.text.isDigitsOnly
object StorageUtils { object StorageUtils {
private val LOG_TAG = LogUtils.createTag<StorageUtils>() private val LOG_TAG = LogUtils.createTag<StorageUtils>()

View file

@ -8,4 +8,5 @@
<string name="analysis_channel_name">Exploració de mitjans</string> <string name="analysis_channel_name">Exploració de mitjans</string>
<string name="analysis_notification_default_title">Explorant mitjans</string> <string name="analysis_notification_default_title">Explorant mitjans</string>
<string name="analysis_notification_action_stop">Atura</string> <string name="analysis_notification_action_stop">Atura</string>
<string name="map_shortcut_short_label">Mapa</string>
</resources> </resources>

View file

@ -8,4 +8,5 @@
<string name="analysis_notification_default_title">Prohledávání médií</string> <string name="analysis_notification_default_title">Prohledávání médií</string>
<string name="analysis_notification_action_stop">Zastavit</string> <string name="analysis_notification_action_stop">Zastavit</string>
<string name="app_widget_label">Fotorámeček</string> <string name="app_widget_label">Fotorámeček</string>
<string name="map_shortcut_short_label">Mapa</string>
</resources> </resources>

View file

@ -5,7 +5,8 @@
<string name="wallpaper">Fondo da pantalla</string> <string name="wallpaper">Fondo da pantalla</string>
<string name="search_shortcut_short_label">Procura</string> <string name="search_shortcut_short_label">Procura</string>
<string name="videos_shortcut_short_label">Vídeos</string> <string name="videos_shortcut_short_label">Vídeos</string>
<string name="analysis_channel_name">Escaneo multimedia</string> <string name="analysis_channel_name">Escanear medios</string>
<string name="analysis_notification_default_title">Escaneando medios</string> <string name="analysis_notification_default_title">Escaneando medios</string>
<string name="analysis_notification_action_stop">Pare</string> <string name="analysis_notification_action_stop">Pare</string>
<string name="map_shortcut_short_label">Mapa</string>
</resources> </resources>

View file

@ -91,7 +91,7 @@ import java.util.regex.Pattern;
import java.util.zip.CRC32; import java.util.zip.CRC32;
/* /*
* Forked from 'androidx.exifinterface:exifinterface:1.4.0-beta01' on 2025/01/21 * Forked from 'androidx.exifinterface:exifinterface:1.4.0'
* Named differently to let ExifInterface be loaded as subdependency. * Named differently to let ExifInterface be loaded as subdependency.
* cf https://maven.google.com/web/index.html?q=exifinterface#androidx.exifinterface:exifinterface * cf https://maven.google.com/web/index.html?q=exifinterface#androidx.exifinterface:exifinterface
* cf https://github.com/androidx/androidx/tree/androidx-main/exifinterface/exifinterface/src/main/java/androidx/exifinterface/media * cf https://github.com/androidx/androidx/tree/androidx-main/exifinterface/exifinterface/src/main/java/androidx/exifinterface/media

View file

@ -1,21 +1,10 @@
# Project-wide Gradle settings. org.gradle.jvmargs=-Xmx8G -XX:MaxMetaspaceSize=4G -XX:ReservedCodeCacheSize=512m -XX:+HeapDumpOnOutOfMemoryError
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx4G -Dfile.encoding=UTF-8
android.useAndroidX=true android.useAndroidX=true
android.enableJetifier=true android.enableJetifier=true
# Kotlin code style for this project: "official" or "obsolete": # Kotlin code style for this project: "official" or "obsolete":
kotlin.code.style=official kotlin.code.style=official
android.nonTransitiveRClass=false
android.nonFinalResIds=false
# full mode is too aggressive and removes essential code # full mode is too aggressive and removes essential code
# of `metadata-extractor` even when adding `-keep class com.drew.**{ *; }` # of `metadata-extractor` even when adding `-keep class com.drew.**{ *; }`
android.enableR8.fullMode=false android.enableR8.fullMode=false

View file

@ -1,33 +0,0 @@
pluginManagement {
def flutterSdkPath = {
def properties = new Properties()
file("local.properties").withInputStream { properties.load(it) }
def flutterSdkPath = properties.getProperty("flutter.sdk")
assert flutterSdkPath != null, "flutter.sdk not set in local.properties"
return flutterSdkPath
}
settings.ext.flutterSdkPath = flutterSdkPath()
settings.ext.kotlin_version = '2.1.10'
settings.ext.ksp_version = "$kotlin_version-1.0.29"
settings.ext.agp_version = '8.8.0'
includeBuild("${settings.ext.flutterSdkPath}/packages/flutter_tools/gradle")
repositories {
google()
mavenCentral()
gradlePluginPortal()
}
}
plugins {
id("dev.flutter.flutter-plugin-loader") version("1.0.0")
id("com.android.application") version("$agp_version") apply(false)
id("org.jetbrains.kotlin.android") version("$kotlin_version") apply(false)
id("com.google.devtools.ksp") version("$ksp_version") apply(false)
id("org.gradle.toolchains.foojay-resolver-convention") version("0.4.0")
}
include(":app")
include(":exifinterface")

View file

@ -0,0 +1,28 @@
pluginManagement {
val flutterSdkPath = run {
val properties = java.util.Properties()
file("local.properties").inputStream().use { properties.load(it) }
val flutterSdkPath = properties.getProperty("flutter.sdk")
require(flutterSdkPath != null) { "flutter.sdk not set in local.properties" }
flutterSdkPath
}
includeBuild("$flutterSdkPath/packages/flutter_tools/gradle")
repositories {
google()
mavenCentral()
gradlePluginPortal()
}
}
plugins {
id("dev.flutter.flutter-plugin-loader") version "1.0.0"
id("com.android.application") version "8.8.1" apply false
id("org.jetbrains.kotlin.android") version "2.1.10" apply false
id("com.google.devtools.ksp") version "2.1.10-1.0.29" apply false
id("org.gradle.toolchains.foojay-resolver-convention") version "0.8.0"
}
include(":app")
include(":exifinterface")

Binary file not shown.

Before

Width:  |  Height:  |  Size: 281 KiB

After

Width:  |  Height:  |  Size: 306 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 495 KiB

After

Width:  |  Height:  |  Size: 553 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 190 KiB

After

Width:  |  Height:  |  Size: 146 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 120 KiB

After

Width:  |  Height:  |  Size: 114 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 81 KiB

After

Width:  |  Height:  |  Size: 85 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 324 KiB

After

Width:  |  Height:  |  Size: 375 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 310 KiB

After

Width:  |  Height:  |  Size: 373 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 284 KiB

After

Width:  |  Height:  |  Size: 309 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 496 KiB

After

Width:  |  Height:  |  Size: 555 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 203 KiB

After

Width:  |  Height:  |  Size: 153 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 118 KiB

After

Width:  |  Height:  |  Size: 115 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 86 KiB

After

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 334 KiB

After

Width:  |  Height:  |  Size: 377 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 336 KiB

After

Width:  |  Height:  |  Size: 376 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 282 KiB

After

Width:  |  Height:  |  Size: 308 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 497 KiB

After

Width:  |  Height:  |  Size: 555 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 142 KiB

After

Width:  |  Height:  |  Size: 157 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 90 KiB

After

Width:  |  Height:  |  Size: 116 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 79 KiB

After

Width:  |  Height:  |  Size: 90 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 327 KiB

After

Width:  |  Height:  |  Size: 378 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 337 KiB

After

Width:  |  Height:  |  Size: 376 KiB

View file

@ -1,5 +1,5 @@
<i>Aves</i> can handle all sorts of images and videos, including your typical JPEGs and MP4s, but also more exotic things like <b>multi-page TIFFs, SVGs, old AVIs and more</b>! It scans your media collection to identify <b>motion photos</b>, <b>panoramas</b> (aka photo spheres), <b>360° videos</b>, as well as <b>GeoTIFF</b> files. <i>Aves</i> pot gestionar tot tipus d'imatges i vídeos, inclosos els típics JPEG i MP4, però també coses més exòtiques com <b>multi-pàgina TIFFs, SVG, AVI antics i més</b>! Escaneja la col·lecció multimèdia per identificar <b>fotos en moviment</b>, <b>panoràmiques</b>(àlies esferes de fotos), <b>vídeos 360.</b>, així com fitxers <b>GeoTIFF</b>.
<b>Navigation and search</b> is an important part of <i>Aves</i>. The goal is for users to easily flow from albums to photos to tags to maps, etc. <b>Navegació i cerca</b> és una part important de <i>Aves</i>. L'objectiu és que els usuaris puguin fluir fàcilment d'àlbums a fotos, etiquetes a mapes, etc.
<i>Aves</i> integrates with Android (including Android TV) with features such as <b>widgets</b>, <b>app shortcuts</b>, <b>screen saver</b> and <b>global search</b> handling. It also works as a <b>media viewer and picker</b>. <i>Aves</i> s'integra amb Android (incloent Android TV) amb funcions com <b>ginys</b>, <b>dreceres d'aplicació</b>, <b>estalvi de pantalla</b>i gestió de<b> la cerca global</b>. També funciona com a <b>visor i selector multimèdia</b>.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 281 KiB

After

Width:  |  Height:  |  Size: 309 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 497 KiB

After

Width:  |  Height:  |  Size: 556 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 203 KiB

After

Width:  |  Height:  |  Size: 153 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 120 KiB

After

Width:  |  Height:  |  Size: 115 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 83 KiB

After

Width:  |  Height:  |  Size: 92 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 327 KiB

After

Width:  |  Height:  |  Size: 377 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 338 KiB

After

Width:  |  Height:  |  Size: 378 KiB

View file

@ -1 +1 @@
Gallery and metadata explorer Galeria i explorador de metadades

Binary file not shown.

Before

Width:  |  Height:  |  Size: 280 KiB

After

Width:  |  Height:  |  Size: 308 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 496 KiB

After

Width:  |  Height:  |  Size: 555 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 201 KiB

After

Width:  |  Height:  |  Size: 150 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 119 KiB

After

Width:  |  Height:  |  Size: 115 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 83 KiB

After

Width:  |  Height:  |  Size: 90 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 326 KiB

After

Width:  |  Height:  |  Size: 376 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 337 KiB

After

Width:  |  Height:  |  Size: 377 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 281 KiB

After

Width:  |  Height:  |  Size: 307 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 496 KiB

After

Width:  |  Height:  |  Size: 555 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 135 KiB

After

Width:  |  Height:  |  Size: 149 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 93 KiB

After

Width:  |  Height:  |  Size: 115 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 78 KiB

After

Width:  |  Height:  |  Size: 89 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 325 KiB

After

Width:  |  Height:  |  Size: 376 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 336 KiB

After

Width:  |  Height:  |  Size: 377 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 279 KiB

After

Width:  |  Height:  |  Size: 307 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 497 KiB

After

Width:  |  Height:  |  Size: 555 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 200 KiB

After

Width:  |  Height:  |  Size: 150 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 119 KiB

After

Width:  |  Height:  |  Size: 115 KiB

Some files were not shown because too many files have changed in this diff Show more