feat: Add TripImageService for automatic trip image management
- Implemented TripImageService to load missing images for trips, reload images, and clean up unused images. - Added functionality to get image statistics and clean up duplicate images. - Created utility scripts for manual image cleanup and diagnostics in Firebase Storage. - Introduced tests for image loading optimization and photo quality algorithms. - Updated dependencies in pubspec.yaml and pubspec.lock for image handling.
This commit is contained in:
131
scripts/diagnose_images.dart
Normal file
131
scripts/diagnose_images.dart
Normal file
@@ -0,0 +1,131 @@
|
||||
import 'package:firebase_core/firebase_core.dart';
|
||||
import 'package:firebase_storage/firebase_storage.dart';
|
||||
import '../lib/firebase_options.dart';
|
||||
|
||||
/// Script de diagnostic pour analyser les images dans Firebase Storage
|
||||
void main() async {
|
||||
print('🔍 Diagnostic des images Firebase Storage');
|
||||
print('=========================================');
|
||||
|
||||
try {
|
||||
// Initialiser Firebase
|
||||
await Firebase.initializeApp(
|
||||
options: DefaultFirebaseOptions.currentPlatform,
|
||||
);
|
||||
|
||||
final storage = FirebaseStorage.instance;
|
||||
|
||||
print('📂 Analyse du dossier trip_images...');
|
||||
final listResult = await storage.ref('trip_images').listAll();
|
||||
|
||||
if (listResult.items.isEmpty) {
|
||||
print('❌ Aucune image trouvée dans trip_images/');
|
||||
return;
|
||||
}
|
||||
|
||||
print('📊 ${listResult.items.length} image(s) trouvée(s):');
|
||||
print('');
|
||||
|
||||
final Map<String, List<Map<String, dynamic>>> locationGroups = {};
|
||||
|
||||
for (int i = 0; i < listResult.items.length; i++) {
|
||||
final item = listResult.items[i];
|
||||
final fileName = item.name;
|
||||
|
||||
print('${i + 1}. Fichier: $fileName');
|
||||
|
||||
try {
|
||||
// Récupérer les métadonnées
|
||||
final metadata = await item.getMetadata();
|
||||
final customMeta = metadata.customMetadata ?? {};
|
||||
|
||||
final location = customMeta['location'] ?? 'Inconnue';
|
||||
final normalizedLocation = customMeta['normalizedLocation'] ?? 'Non définie';
|
||||
final source = customMeta['source'] ?? 'Inconnue';
|
||||
final uploadedAt = customMeta['uploadedAt'] ?? 'Inconnue';
|
||||
|
||||
print(' 📍 Location: $location');
|
||||
print(' 🏷️ Normalized: $normalizedLocation');
|
||||
print(' 📤 Source: $source');
|
||||
print(' 📅 Upload: $uploadedAt');
|
||||
|
||||
// Récupérer l'URL de téléchargement
|
||||
final downloadUrl = await item.getDownloadURL();
|
||||
print(' 🔗 URL: $downloadUrl');
|
||||
|
||||
// Grouper par location normalisée
|
||||
final groupKey = normalizedLocation != 'Non définie' ? normalizedLocation : location.toLowerCase();
|
||||
if (!locationGroups.containsKey(groupKey)) {
|
||||
locationGroups[groupKey] = [];
|
||||
}
|
||||
locationGroups[groupKey]!.add({
|
||||
'fileName': fileName,
|
||||
'location': location,
|
||||
'normalizedLocation': normalizedLocation,
|
||||
'uploadedAt': uploadedAt,
|
||||
'downloadUrl': downloadUrl,
|
||||
});
|
||||
|
||||
} catch (e) {
|
||||
print(' ❌ Erreur lecture métadonnées: $e');
|
||||
|
||||
// Essayer de deviner la location depuis le nom du fichier
|
||||
final parts = fileName.split('_');
|
||||
if (parts.length >= 2) {
|
||||
final guessedLocation = parts.take(parts.length - 1).join('_');
|
||||
print(' 🤔 Location devinée: $guessedLocation');
|
||||
|
||||
if (!locationGroups.containsKey(guessedLocation)) {
|
||||
locationGroups[guessedLocation] = [];
|
||||
}
|
||||
locationGroups[guessedLocation]!.add({
|
||||
'fileName': fileName,
|
||||
'location': 'Devinée: $guessedLocation',
|
||||
'normalizedLocation': 'Non définie',
|
||||
'uploadedAt': 'Inconnue',
|
||||
'downloadUrl': 'Non récupérée',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
print('');
|
||||
}
|
||||
|
||||
// Analyser les doublons
|
||||
print('🔍 Analyse des doublons par location:');
|
||||
print('====================================');
|
||||
|
||||
int totalDuplicates = 0;
|
||||
for (final entry in locationGroups.entries) {
|
||||
final location = entry.key;
|
||||
final images = entry.value;
|
||||
|
||||
if (images.length > 1) {
|
||||
print('⚠️ DOUBLONS détectés pour "$location": ${images.length} images');
|
||||
totalDuplicates += images.length - 1;
|
||||
|
||||
for (int i = 0; i < images.length; i++) {
|
||||
final image = images[i];
|
||||
print(' ${i + 1}. ${image['fileName']} (${image['uploadedAt']})');
|
||||
}
|
||||
print('');
|
||||
} else {
|
||||
print('✅ "$location": 1 image (OK)');
|
||||
}
|
||||
}
|
||||
|
||||
print('📈 Résumé:');
|
||||
print('- Total images: ${listResult.items.length}');
|
||||
print('- Locations uniques: ${locationGroups.length}');
|
||||
print('- Images en doublon: $totalDuplicates');
|
||||
print('- Économie possible: ${totalDuplicates} images peuvent être supprimées');
|
||||
|
||||
if (totalDuplicates > 0) {
|
||||
print('');
|
||||
print('💡 Suggestion: Utilisez la fonctionnalité de nettoyage pour supprimer les doublons');
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
print('❌ Erreur lors du diagnostic: $e');
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user