// lib/model/db/db_sqflite.dart import 'dart:io'; import 'package:aves/model/covers.dart'; import 'package:aves/model/db/db.dart'; import 'package:aves/model/db/db_sqflite_schema.dart'; import 'package:aves/model/db/db_sqflite_upgrade.dart'; import 'package:aves/model/dynamic_albums.dart'; import 'package:aves/model/entry/entry.dart'; import 'package:aves/model/favourites.dart'; import 'package:aves/model/filters/filters.dart'; import 'package:aves/model/metadata/address.dart'; import 'package:aves/model/metadata/catalog.dart'; import 'package:aves/model/metadata/trash.dart'; import 'package:aves/model/vaults/details.dart'; import 'package:aves/model/viewer/video_playback.dart'; import 'package:aves/services/common/services.dart'; import 'package:collection/collection.dart'; import 'package:flutter/foundation.dart'; import 'package:sqflite/sqflite.dart'; // --- RIMOSSO: import del sync remoto dal layer DB --- // import 'package:aves/remote/run_remote_sync.dart' as remote; class SqfliteLocalMediaDb implements LocalMediaDb { late Database _db; // ๐Ÿ”ด 4A: espone la connessione nativa aperta dal loader @override Database get rawDb => _db; @override Future get path async => pContext.join(await getDatabasesPath(), 'metadata.db'); static const entryTable = SqfliteLocalMediaDbSchema.entryTable; static const dateTakenTable = SqfliteLocalMediaDbSchema.dateTakenTable; static const metadataTable = SqfliteLocalMediaDbSchema.metadataTable; static const addressTable = SqfliteLocalMediaDbSchema.addressTable; static const favouriteTable = SqfliteLocalMediaDbSchema.favouriteTable; static const coverTable = SqfliteLocalMediaDbSchema.coverTable; static const dynamicAlbumTable = SqfliteLocalMediaDbSchema.dynamicAlbumTable; static const vaultTable = SqfliteLocalMediaDbSchema.vaultTable; static const trashTable = SqfliteLocalMediaDbSchema.trashTable; static const videoPlaybackTable = SqfliteLocalMediaDbSchema.videoPlaybackTable; static const _entryInsertSliceMaxCount = 10000; // number of entries static const _queryCursorBufferSize = 1000; // number of rows static int _lastId = 0; @override int get nextId => ++_lastId; @override Future init() async { _db = await openDatabase( await path, onCreate: (db, version) => SqfliteLocalMediaDbSchema.createLatestVersion(db), onUpgrade: LocalMediaDbUpgrader.upgradeDb, version: 15, ); // Log di servizio: path del DB usato dal loader try { final dbPath = await path; debugPrint('[localMediaDb] opened db path=$dbPath'); } catch (_) {} // โœ… Guardia idempotente: assicura colonne/indici utili anche per i remoti await _ensureRemoteColumns(_db); final maxIdRows = await _db.rawQuery('SELECT MAX(id) AS maxId FROM $entryTable'); _lastId = (maxIdRows.firstOrNull?['maxId'] as int?) ?? 0; // --- RIMOSSO: trigger di sync remoto in debug --------------------------- // Il sync ora parte SOLO da HomePage (rrs.runRemoteSyncOnceManaged()) post-loading. } @override Future dbFileSize() async { final file = File(await path); return await file.exists() ? await file.length() : 0; } @override Future reset() async { debugPrint('$runtimeType reset'); await _db.close(); await deleteDatabase(await path); await init(); } @override Future removeIds(Set ids, {Set? dataTypes}) async { if (ids.isEmpty) return; final _dataTypes = dataTypes ?? EntryDataType.values.toSet(); final batch = _db.batch(); const where = 'id = ?'; const coverWhere = 'entryId = ?'; ids.forEach((id) { final whereArgs = [id]; if (_dataTypes.contains(EntryDataType.basic)) { batch.delete(entryTable, where: where, whereArgs: whereArgs); } if (_dataTypes.contains(EntryDataType.catalog)) { batch.delete(dateTakenTable, where: where, whereArgs: whereArgs); batch.delete(metadataTable, where: where, whereArgs: whereArgs); } if (_dataTypes.contains(EntryDataType.address)) { batch.delete(addressTable, where: where, whereArgs: whereArgs); } if (_dataTypes.contains(EntryDataType.references)) { batch.delete(favouriteTable, where: where, whereArgs: whereArgs); batch.delete(coverTable, where: coverWhere, whereArgs: whereArgs); batch.delete(trashTable, where: where, whereArgs: whereArgs); batch.delete(videoPlaybackTable, where: where, whereArgs: whereArgs); } }); await batch.commit(noResult: true); } // entries @override Future clearEntries() async { final count = await _db.delete(entryTable, where: '1'); debugPrint('$runtimeType clearEntries deleted $count rows'); } @override Future> loadEntries({int? origin, String? directory}) async { // Costruzione dinamica della WHERE String? where; final whereArgs = []; if (origin != null) { where = 'origin = ?'; whereArgs.add(origin); } // ๐Ÿ“Œ Diagnostica: segna i parametri logici dโ€™ingresso debugPrint('[localMediaDb] loadEntries(origin=$origin, directory=$directory)'); final entries = {}; if (directory != null) { final separator = pContext.separator; if (!directory.endsWith(separator)) { directory = '$directory$separator'; } where = '${where != null ? '$where AND ' : ''}path LIKE ?'; whereArgs.add('$directory%'); debugPrint('[localMediaDb] SQL(queryCursor, by directory) WHERE="$where" args=$whereArgs'); final cursor = await _db.queryCursor(entryTable, where: where, whereArgs: whereArgs, bufferSize: _queryCursorBufferSize); final dirLength = directory.length; var readRows = 0; while (await cursor.moveNext()) { final row = cursor.current; readRows++; // skip entries in subfolders final path = row['path'] as String?; if (path != null && !path.substring(dirLength).contains(separator)) { entries.add(AvesEntry.fromMap(row)); } } debugPrint('[localMediaDb] rows read=$readRows kept=${entries.length} (directory filter)'); } else { debugPrint('[localMediaDb] SQL(queryCursor) WHERE="${where ?? 'โ€”'}" args=$whereArgs'); final cursor = await _db.queryCursor(entryTable, where: where, whereArgs: whereArgs, bufferSize: _queryCursorBufferSize); var readRows = 0; while (await cursor.moveNext()) { readRows++; entries.add(AvesEntry.fromMap(cursor.current)); } debugPrint('[localMediaDb] rows read=$readRows kept=${entries.length}'); // ๐Ÿ”Ž Fallback di sola diagnostica: se stiamo cercando remoti (origin==1) // ma non troviamo nulla, verifichiamo se in tabella ci sono righe origin=1 if ((origin == 1) && entries.isEmpty) { try { final countRows = await _db.rawQuery('SELECT COUNT(*) AS c FROM $entryTable WHERE origin=1'); final c = (countRows.firstOrNull?['c'] as int?) ?? 0; debugPrint('[localMediaDb][diag] check COUNT(origin=1)=$c'); if (c > 0) { final sample = await _db.rawQuery( 'SELECT id, remoteId, provider, uri, trashed, dateModifiedMillis ' 'FROM $entryTable WHERE origin=1 ORDER BY id DESC LIMIT 5', ); debugPrint('[localMediaDb][diag] sample origin=1 = $sample'); } } catch (e, st) { debugPrint('[localMediaDb][diag] fallback check failed: $e\n$st'); } } } return entries; } @override Future> loadEntriesById(Set ids) => _getByIds(ids, entryTable, AvesEntry.fromMap); @override Future insertEntries(Set entries) async { if (entries.isEmpty) return; final stopwatch = Stopwatch()..start(); int inserted = 0; await Future.forEach(entries.slices(_entryInsertSliceMaxCount), (slice) async { debugPrint('$runtimeType saveEntries inserting slice of [${inserted + 1}, ${inserted + slice.length}] entries'); final batch = _db.batch(); slice.forEach((entry) => _batchInsertEntry(batch, entry)); await batch.commit(noResult: true); inserted += slice.length; }); debugPrint('$runtimeType saveEntries complete in ${stopwatch.elapsed.inMilliseconds}ms for ${entries.length} entries'); } @override Future updateEntry(int id, AvesEntry entry) async { final batch = _db.batch(); batch.delete(entryTable, where: 'id = ?', whereArgs: [id]); _batchInsertEntry(batch, entry); await batch.commit(noResult: true); } void _batchInsertEntry(Batch batch, AvesEntry entry) { batch.insert( entryTable, entry.toDatabaseMap(), conflictAlgorithm: ConflictAlgorithm.replace, ); } @override Future> searchLiveEntries(String query, {int? limit}) async { final rows = await _db.query( entryTable, where: '(title LIKE ? OR path LIKE ?) AND trashed = ?', whereArgs: ['%$query%', '%$query%', 0], orderBy: 'sourceDateTakenMillis DESC', limit: limit, ); return rows.map(AvesEntry.fromMap).toSet(); } @override Future> searchLiveDuplicates(int origin, Set? entries) async { String where = 'origin = ? AND trashed = ?'; if (entries != null) { where += ' AND contentId IN (${entries.map((v) => v.contentId).join(',')})'; } final rows = await _db.rawQuery( 'SELECT *, MAX(id) AS id' ' FROM $entryTable' ' WHERE $where' ' GROUP BY contentId' ' HAVING COUNT(id) > 1', [origin, 0], ); final duplicates = rows.map(AvesEntry.fromMap).toSet(); if (duplicates.isNotEmpty) { debugPrint('$runtimeType found duplicates=$duplicates'); } return duplicates; } // date taken @override Future clearDates() async { final count = await _db.delete(dateTakenTable, where: '1'); debugPrint('$runtimeType clearDates deleted $count rows'); } @override Future> loadDates() async { final result = {}; final cursor = await _db.queryCursor(dateTakenTable, bufferSize: _queryCursorBufferSize); while (await cursor.moveNext()) { final row = cursor.current; result[row['id'] as int] = row['dateMillis'] as int? ?? 0; } return result; } // catalog metadata @override Future clearCatalogMetadata() async { final count = await _db.delete(metadataTable, where: '1'); debugPrint('$runtimeType clearMetadataEntries deleted $count rows'); } @override Future> loadCatalogMetadata() async { final result = {}; final cursor = await _db.queryCursor(metadataTable, bufferSize: _queryCursorBufferSize); while (await cursor.moveNext()) { result.add(CatalogMetadata.fromMap(cursor.current)); } return result; } @override Future> loadCatalogMetadataById(Set ids) => _getByIds(ids, metadataTable, CatalogMetadata.fromMap); @override Future saveCatalogMetadata(Set metadataEntries) async { if (metadataEntries.isEmpty) return; final stopwatch = Stopwatch()..start(); try { final batch = _db.batch(); metadataEntries.forEach((metadata) => _batchInsertMetadata(batch, metadata)); await batch.commit(noResult: true); debugPrint('$runtimeType saveMetadata complete in ${stopwatch.elapsed.inMilliseconds}ms for ${metadataEntries.length} entries'); } catch (error, stack) { debugPrint('$runtimeType failed to save metadata with error=$error\n$stack'); } } @override Future updateCatalogMetadata(int id, CatalogMetadata? metadata) async { final batch = _db.batch(); batch.delete(dateTakenTable, where: 'id = ?', whereArgs: [id]); batch.delete(metadataTable, where: 'id = ?', whereArgs: [id]); _batchInsertMetadata(batch, metadata); await batch.commit(noResult: true); } void _batchInsertMetadata(Batch batch, CatalogMetadata? metadata) { if (metadata == null) return; if (metadata.dateMillis != 0) { batch.insert( dateTakenTable, { 'id': metadata.id, 'dateMillis': metadata.dateMillis, }, conflictAlgorithm: ConflictAlgorithm.replace, ); } batch.insert( metadataTable, metadata.toMap(), conflictAlgorithm: ConflictAlgorithm.replace, ); } // address @override Future clearAddresses() async { final count = await _db.delete(addressTable, where: '1'); debugPrint('$runtimeType clearAddresses deleted $count rows'); } @override Future> loadAddresses() async { final result = {}; final cursor = await _db.queryCursor(addressTable, bufferSize: _queryCursorBufferSize); while (await cursor.moveNext()) { result.add(AddressDetails.fromMap(cursor.current)); } return result; } @override Future> loadAddressesById(Set ids) => _getByIds(ids, addressTable, AddressDetails.fromMap); @override Future saveAddresses(Set addresses) async { if (addresses.isEmpty) return; final stopwatch = Stopwatch()..start(); final batch = _db.batch(); addresses.forEach((address) => _batchInsertAddress(batch, address)); await batch.commit(noResult: true); debugPrint('$runtimeType saveAddresses complete in ${stopwatch.elapsed.inMilliseconds}ms for ${addresses.length} entries'); } @override Future updateAddress(int id, AddressDetails? address) async { final batch = _db.batch(); batch.delete(addressTable, where: 'id = ?', whereArgs: [id]); _batchInsertAddress(batch, address); await batch.commit(noResult: true); } void _batchInsertAddress(Batch batch, AddressDetails? address) { if (address == null) return; batch.insert( addressTable, address.toMap(), conflictAlgorithm: ConflictAlgorithm.replace, ); } // vaults @override Future clearVaults() async { final count = await _db.delete(vaultTable, where: '1'); debugPrint('$runtimeType clearVaults deleted $count rows'); } @override Future> loadAllVaults() async { final result = {}; final cursor = await _db.queryCursor(vaultTable, bufferSize: _queryCursorBufferSize); while (await cursor.moveNext()) { result.add(VaultDetails.fromMap(cursor.current)); } return result; } @override Future addVaults(Set rows) async { if (rows.isEmpty) return; final batch = _db.batch(); rows.forEach((row) => _batchInsertVault(batch, row)); await batch.commit(noResult: true); } @override Future updateVault(String oldName, VaultDetails row) async { final batch = _db.batch(); batch.delete(vaultTable, where: 'name = ?', whereArgs: [oldName]); _batchInsertVault(batch, row); await batch.commit(noResult: true); } void _batchInsertVault(Batch batch, VaultDetails row) { batch.insert( vaultTable, row.toMap(), conflictAlgorithm: ConflictAlgorithm.replace, ); } @override Future removeVaults(Set rows) async { if (rows.isEmpty) return; final batch = _db.batch(); rows.map((v) => v.name).forEach((name) => batch.delete(vaultTable, where: 'name = ?', whereArgs: [name])); await batch.commit(noResult: true); } // trash @override Future clearTrashDetails() async { final count = await _db.delete(trashTable, where: '1'); debugPrint('$runtimeType clearTrashDetails deleted $count rows'); } @override Future> loadAllTrashDetails() async { final result = {}; final cursor = await _db.queryCursor(trashTable, bufferSize: _queryCursorBufferSize); while (await cursor.moveNext()) { result.add(TrashDetails.fromMap(cursor.current)); } return result; } @override Future updateTrash(int id, TrashDetails? details) async { final batch = _db.batch(); batch.delete(trashTable, where: 'id = ?', whereArgs: [id]); _batchInsertTrashDetails(batch, details); await batch.commit(noResult: true); } void _batchInsertTrashDetails(Batch batch, TrashDetails? details) { if (details == null) return; batch.insert( trashTable, details.toMap(), conflictAlgorithm: ConflictAlgorithm.replace, ); } // favourites @override Future clearFavourites() async { final count = await _db.delete(favouriteTable, where: '1'); debugPrint('$runtimeType clearFavourites deleted $count rows'); } @override Future> loadAllFavourites() async { final result = {}; final cursor = await _db.queryCursor(favouriteTable, bufferSize: _queryCursorBufferSize); while (await cursor.moveNext()) { result.add(FavouriteRow.fromMap(cursor.current)); } return result; } @override Future addFavourites(Set rows) async { if (rows.isEmpty) return; final batch = _db.batch(); rows.forEach((row) => _batchInsertFavourite(batch, row)); await batch.commit(noResult: true); } @override Future updateFavouriteId(int id, FavouriteRow row) async { final batch = _db.batch(); batch.delete(favouriteTable, where: 'id = ?', whereArgs: [id]); _batchInsertFavourite(batch, row); await batch.commit(noResult: true); } void _batchInsertFavourite(Batch batch, FavouriteRow row) { batch.insert( favouriteTable, row.toMap(), conflictAlgorithm: ConflictAlgorithm.replace, ); } @override Future removeFavourites(Set rows) async { if (rows.isEmpty) return; final ids = rows.map((row) => row.entryId).whereType().toSet(); if (ids.isEmpty) return; final batch = _db.batch(); for (final id in ids) { batch.delete(favouriteTable, where: 'id = ?', whereArgs: [id]); } await batch.commit(noResult: true); } // covers @override Future clearCovers() async { final count = await _db.delete(coverTable, where: '1'); debugPrint('$runtimeType clearCovers deleted $count rows'); } @override Future> loadAllCovers() async { final result = {}; final cursor = await _db.queryCursor(coverTable, bufferSize: _queryCursorBufferSize); while (await cursor.moveNext()) { final rowMap = cursor.current; final row = CoverRow.fromMap(rowMap); if (row != null) { result.add(row); } else { debugPrint('$runtimeType failed to deserialize cover from row=$rowMap'); } } return result; } @override Future addCovers(Set rows) async { if (rows.isEmpty) return; final batch = _db.batch(); rows.forEach((row) => _batchInsertCover(batch, row)); await batch.commit(noResult: true); } @override Future updateCoverEntryId(int id, CoverRow row) async { final batch = _db.batch(); batch.delete(coverTable, where: 'entryId = ?', whereArgs: [id]); _batchInsertCover(batch, row); await batch.commit(noResult: true); } void _batchInsertCover(Batch batch, CoverRow row) { batch.insert( coverTable, row.toMap(), conflictAlgorithm: ConflictAlgorithm.replace, ); } @override Future removeCovers(Set filters) async { if (filters.isEmpty) return; // for backward compatibility, remove stored JSON instead of removing de/reserialized filters final obsoleteFilterJson = {}; final rows = await _db.query(coverTable); rows.forEach((row) { final filterJson = row['filter'] as String?; if (filterJson != null) { final filter = CollectionFilter.fromJson(filterJson); if (filters.any((v) => filter == v)) { obsoleteFilterJson.add(filterJson); } } }); final batch = _db.batch(); obsoleteFilterJson.forEach((filterJson) => batch.delete(coverTable, where: 'filter = ?', whereArgs: [filterJson])); await batch.commit(noResult: true); } // dynamic albums @override Future clearDynamicAlbums() async { final count = await _db.delete(dynamicAlbumTable, where: '1'); debugPrint('$runtimeType clearDynamicAlbums deleted $count rows'); return count; } @override Future> loadAllDynamicAlbums({int bufferSize = _queryCursorBufferSize}) async { final result = {}; try { final cursor = await _db.queryCursor(dynamicAlbumTable, bufferSize: bufferSize); while (await cursor.moveNext()) { final rowMap = cursor.current; final row = DynamicAlbumRow.fromMap(rowMap); if (row != null) { result.add(row); } else { debugPrint('$runtimeType failed to deserialize dynamic album from row=$rowMap'); } } } catch (error, stack) { debugPrint('$runtimeType failed to query table=$dynamicAlbumTable error=$error\n$stack'); if (bufferSize > 1) { debugPrint('$runtimeType retry to query table=$dynamicAlbumTable with no cursor buffer'); final safeRows = await loadAllDynamicAlbums(bufferSize: 1); final clearedCount = await clearDynamicAlbums(); await addDynamicAlbums(safeRows); final addedCount = safeRows.length; final lostCount = clearedCount - addedCount; debugPrint('$runtimeType kept $addedCount rows, lost $lostCount rows from table=$dynamicAlbumTable'); return safeRows; } } return result; } @override Future addDynamicAlbums(Set rows) async { if (rows.isEmpty) return; final batch = _db.batch(); rows.forEach((row) => _batchInsertDynamicAlbum(batch, row)); await batch.commit(noResult: true); } void _batchInsertDynamicAlbum(Batch batch, DynamicAlbumRow row) { batch.insert( dynamicAlbumTable, row.toMap(), conflictAlgorithm: ConflictAlgorithm.replace, ); } @override Future removeDynamicAlbums(Set names) async { if (names.isEmpty) return; final batch = _db.batch(); names.forEach((name) => batch.delete(dynamicAlbumTable, where: 'name = ?', whereArgs: [name])); await batch.commit(noResult: true); } // video playback @override Future clearVideoPlayback() async { final count = await _db.delete(videoPlaybackTable, where: '1'); debugPrint('$runtimeType clearVideoPlayback deleted $count rows'); } @override Future> loadAllVideoPlayback() async { final result = {}; final cursor = await _db.queryCursor(videoPlaybackTable, bufferSize: _queryCursorBufferSize); while (await cursor.moveNext()) { final rowMap = cursor.current; final row = VideoPlaybackRow.fromMap(rowMap); if (row != null) { result.add(row); } else { debugPrint('$runtimeType failed to deserialize video playback from row=$rowMap'); } } return result; } @override Future loadVideoPlayback(int id) async { final rows = await _db.query(videoPlaybackTable, where: 'id = ?', whereArgs: [id]); if (rows.isEmpty) return null; return VideoPlaybackRow.fromMap(rows.first); } @override Future addVideoPlayback(Set rows) async { if (rows.isEmpty) return; final batch = _db.batch(); rows.forEach((row) => _batchInsertVideoPlayback(batch, row)); await batch.commit(noResult: true); } void _batchInsertVideoPlayback(Batch batch, VideoPlaybackRow row) { batch.insert( videoPlaybackTable, row.toMap(), conflictAlgorithm: ConflictAlgorithm.replace, ); } @override Future removeVideoPlayback(Set ids) async { if (ids.isEmpty) return; final batch = _db.batch(); ids.forEach((id) => batch.delete(videoPlaybackTable, where: 'id = ?', whereArgs: [id])); await batch.commit(noResult: true); } // convenience methods Future> _getByIds(Set ids, String table, T Function(Map row) mapRow) async { final result = {}; if (ids.isNotEmpty) { final cursor = await _db.queryCursor(table, where: 'id IN (${ids.join(',')})', bufferSize: _queryCursorBufferSize); while (await cursor.moveNext()) { result.add(mapRow(cursor.current)); } } return result; } // -------------------------------------------------------- // MIGRAZIONE "GUARDIA" PER COLONNE REMOTE // -------------------------------------------------------- Future _ensureRemoteColumns(Database db) async { Future> _columnsOf(String table) async { final rows = await db.rawQuery('PRAGMA table_info($table)'); return rows.map((r) => (r['name'] as String).toLowerCase()).toSet(); } final cols = await _columnsOf(entryTable); final alters = [ if (!cols.contains('origin')) "ALTER TABLE $entryTable ADD COLUMN origin INTEGER", if (!cols.contains('trashed')) "ALTER TABLE $entryTable ADD COLUMN trashed INTEGER", if (!cols.contains('provider')) "ALTER TABLE $entryTable ADD COLUMN provider TEXT", if (!cols.contains('remoteid')) "ALTER TABLE $entryTable ADD COLUMN remoteId TEXT", if (!cols.contains('remotepath')) "ALTER TABLE $entryTable ADD COLUMN remotePath TEXT", if (!cols.contains('remotethumb1')) "ALTER TABLE $entryTable ADD COLUMN remoteThumb1 TEXT", if (!cols.contains('remotethumb2')) "ALTER TABLE $entryTable ADD COLUMN remoteThumb2 TEXT", if (!cols.contains('remotewidth')) "ALTER TABLE $entryTable ADD COLUMN remoteWidth INTEGER", if (!cols.contains('remoteheight')) "ALTER TABLE $entryTable ADD COLUMN remoteHeight INTEGER", if (!cols.contains('remoterotation')) "ALTER TABLE $entryTable ADD COLUMN remoteRotation INTEGER", if (!cols.contains('latitude')) "ALTER TABLE $entryTable ADD COLUMN latitude REAL", if (!cols.contains('longitude')) "ALTER TABLE $entryTable ADD COLUMN longitude REAL", if (!cols.contains('altitude')) "ALTER TABLE $entryTable ADD COLUMN altitude REAL", ]; for (final sql in alters) { try { await db.execute(sql); } catch (_) { // idempotenza: ignora duplicate column name ecc. } } // Indici utili ai remoti try { await db.execute("CREATE INDEX IF NOT EXISTS entry_remote_idx ON $entryTable(origin, remoteId)"); } catch (_) {} try { await db.execute("CREATE INDEX IF NOT EXISTS entry_origin_trashed_idx ON $entryTable(origin, trashed)"); } catch (_) {} } }