|
@@ -116,6 +116,39 @@ final Map<String, String> scopes = {
|
|
|
"ExportPGRGPNmois.txt": "date",
|
|
"ExportPGRGPNmois.txt": "date",
|
|
|
"exportlicence.txt": "tlc",
|
|
"exportlicence.txt": "tlc",
|
|
|
};
|
|
};
|
|
|
|
|
+final Map<String, List<String>> idToRemove = {
|
|
|
|
|
+ "secondprgtype.txt": ["day_of_origin"],
|
|
|
|
|
+ "exportPGRGPN.txt": ["date", "tlc"],
|
|
|
|
|
+ "ExportPGRGPNmois.txt": ["date", "tlc"],
|
|
|
|
|
+ "exportlicence.txt": ["tlc"],
|
|
|
|
|
+};
|
|
|
|
|
+final Map<String, Map<String, dynamic>> ids = {
|
|
|
|
|
+ "secondprgtype.txt": {
|
|
|
|
|
+ "table": "aclegs_log",
|
|
|
|
|
+ "headers": [
|
|
|
|
|
+ "day_of_origin",
|
|
|
|
|
+ "dep_sched_dt",
|
|
|
|
|
+ "fn_carrier",
|
|
|
|
|
+ "fn_number",
|
|
|
|
|
+ "dep_ap_sched",
|
|
|
|
|
+ "arr_ap_sched",
|
|
|
|
|
+ // "dep_ap_actual",
|
|
|
|
|
+ // "arr_ap_actual"
|
|
|
|
|
+ ]
|
|
|
|
|
+ },
|
|
|
|
|
+ "exportPGRGPN.txt": {
|
|
|
|
|
+ "table": "pnlegs_log",
|
|
|
|
|
+ "headers": ["tlc", "date", "dep", "des", "al", "fnum,", "label"]
|
|
|
|
|
+ },
|
|
|
|
|
+ "ExportPGRGPNmois.txt": {
|
|
|
|
|
+ "table": "pnlegs_log",
|
|
|
|
|
+ "headers": ["tlc", "date", "dep", "des", "al", "fnum,", "label"]
|
|
|
|
|
+ },
|
|
|
|
|
+ "exportlicence.txt": {
|
|
|
|
|
+ "table": "qualifs_log",
|
|
|
|
|
+ "headers": ["tlc", "college", "ac", "base"]
|
|
|
|
|
+ },
|
|
|
|
|
+};
|
|
|
List<Map<String, String?>> createListOfMaps(
|
|
List<Map<String, String?>> createListOfMaps(
|
|
|
List<String> headers, List<List<dynamic>> data) {
|
|
List<String> headers, List<List<dynamic>> data) {
|
|
|
// Initialize an empty list to hold the maps
|
|
// Initialize an empty list to hold the maps
|
|
@@ -1241,29 +1274,33 @@ const maxItemsToRemove = 500;
|
|
|
processCsvData(File tempfile, SupabaseClient supabase) async {
|
|
processCsvData(File tempfile, SupabaseClient supabase) async {
|
|
|
List<FilesAsData> csvData = [];
|
|
List<FilesAsData> csvData = [];
|
|
|
final filename = tempfile.path.split('/').last;
|
|
final filename = tempfile.path.split('/').last;
|
|
|
- final bytes = await tempfile.readAsBytes();
|
|
|
|
|
// Check if file is a zip archive
|
|
// Check if file is a zip archive
|
|
|
- String? csvcontent;
|
|
|
|
|
|
|
+
|
|
|
if (filename.toLowerCase().endsWith('.zip')) {
|
|
if (filename.toLowerCase().endsWith('.zip')) {
|
|
|
- final archive = ZipDecoder().decodeBytes(bytes);
|
|
|
|
|
|
|
+ Archive archive = ZipDecoder().decodeBytes(await tempfile.readAsBytes());
|
|
|
for (final file in archive) {
|
|
for (final file in archive) {
|
|
|
if (!file.isFile) continue;
|
|
if (!file.isFile) continue;
|
|
|
|
|
|
|
|
- csvcontent = utf8.decode(file.content as List<int>);
|
|
|
|
|
- csvData.add(FilesAsData(filename: file.name, data: csvcontent));
|
|
|
|
|
|
|
+ csvData.add(FilesAsData(
|
|
|
|
|
+ filename: file.name, data: utf8.decode(file.content as List<int>)));
|
|
|
}
|
|
}
|
|
|
|
|
+ archive.clearSync();
|
|
|
} else {
|
|
} else {
|
|
|
// For non-zip files, store in extracted folder
|
|
// For non-zip files, store in extracted folder
|
|
|
- csvcontent = utf8.decode(bytes);
|
|
|
|
|
- csvData.add(FilesAsData(filename: filename, data: csvcontent));
|
|
|
|
|
|
|
+
|
|
|
|
|
+ csvData.add(FilesAsData(
|
|
|
|
|
+ filename: filename, data: utf8.decode(await tempfile.readAsBytes())));
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
- for (final FilesAsData data in csvData) {
|
|
|
|
|
|
|
+ while (csvData.isNotEmpty) {
|
|
|
|
|
+ final data = csvData.first;
|
|
|
|
|
+ csvData.removeAt(0);
|
|
|
print(" processing ${data.filename}");
|
|
print(" processing ${data.filename}");
|
|
|
//inserting data
|
|
//inserting data
|
|
|
if (tables.keys.contains(data.filename)) {
|
|
if (tables.keys.contains(data.filename)) {
|
|
|
- final mapsToInsert =
|
|
|
|
|
|
|
+ List<Map<String, String?>> mapsToInsert =
|
|
|
createListOfMaps(headers[data.filename] ?? [], csv2list(data.data));
|
|
createListOfMaps(headers[data.filename] ?? [], csv2list(data.data));
|
|
|
|
|
+ final completeMapsToInsert = mapsToInsert;
|
|
|
final scopeName = scopes[data.filename!]!;
|
|
final scopeName = scopes[data.filename!]!;
|
|
|
final scopeInNew = mapsToInsert
|
|
final scopeInNew = mapsToInsert
|
|
|
.fold(<String>{}, (t, e) => t..add(e[scopeName] ?? "")).toList();
|
|
.fold(<String>{}, (t, e) => t..add(e[scopeName] ?? "")).toList();
|
|
@@ -1287,59 +1324,46 @@ processCsvData(File tempfile, SupabaseClient supabase) async {
|
|
|
|
|
|
|
|
List<int> indexToRemove = [];
|
|
List<int> indexToRemove = [];
|
|
|
List<int> indexToMaintain = [];
|
|
List<int> indexToMaintain = [];
|
|
|
|
|
+
|
|
|
// print("Deleting old data scope: ${mapsToInsert.length}");
|
|
// print("Deleting old data scope: ${mapsToInsert.length}");
|
|
|
|
|
+ final keys2check = idToRemove[data.filename]!;
|
|
|
|
|
+ final mawjoudin = oldComparable.fold(<Map>{},
|
|
|
|
|
+ (t, e) => {...t, extractMapWithKeys(e, keys2check)}).toList();
|
|
|
for (int i = 0; i < oldComparable.length; i++) {
|
|
for (int i = 0; i < oldComparable.length; i++) {
|
|
|
final item = oldComparable[i];
|
|
final item = oldComparable[i];
|
|
|
final index = findIndex(mapsToInsert, item);
|
|
final index = findIndex(mapsToInsert, item);
|
|
|
- if (index != -1) {
|
|
|
|
|
|
|
+
|
|
|
|
|
+ final mawjood =
|
|
|
|
|
+ mawjoudin.contains(extractMapWithKeys(item, keys2check));
|
|
|
|
|
+ if (index > -1) {
|
|
|
indexToMaintain.add(i);
|
|
indexToMaintain.add(i);
|
|
|
mapsToInsert.removeAt(index);
|
|
mapsToInsert.removeAt(index);
|
|
|
|
|
+ } else if (!mawjood) {
|
|
|
|
|
+ if (!mawjood) {
|
|
|
|
|
+ // print("not deleted: ${extractMapWithKeys(item, keys2check)}");
|
|
|
|
|
+ }
|
|
|
} else {
|
|
} else {
|
|
|
indexToRemove.add(i);
|
|
indexToRemove.add(i);
|
|
|
}
|
|
}
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
- // print(
|
|
|
|
|
- // " indexToRemove: ${indexToRemove.length} , indexToMaintain: ${indexToMaintain.length}");
|
|
|
|
|
- // print("delete from db");
|
|
|
|
|
-
|
|
|
|
|
- // print("delete from db ${indexToRemove.length}");
|
|
|
|
|
-
|
|
|
|
|
|
|
+//removing index to remove with id
|
|
|
for (var e in splitList(indexToRemove.map((e) => old[e]['id']).toList(),
|
|
for (var e in splitList(indexToRemove.map((e) => old[e]['id']).toList(),
|
|
|
headerToNb(indexToRemove.map((e) => old[e]['id']).toList()))) {
|
|
headerToNb(indexToRemove.map((e) => old[e]['id']).toList()))) {
|
|
|
await supabase
|
|
await supabase
|
|
|
.from(tables[data.filename]!) // Replace with your actual table name
|
|
.from(tables[data.filename]!) // Replace with your actual table name
|
|
|
.delete()
|
|
.delete()
|
|
|
.inFilter('id', e);
|
|
.inFilter('id', e);
|
|
|
- // print("Deleted old data scope: ${e.length}");
|
|
|
|
|
}
|
|
}
|
|
|
|
|
+ // print("Deleted old data scope: ${e.length}");
|
|
|
|
|
|
|
|
- // splitList(indexToRemove.map((e) => old[e]['id']).toList(), 200).forEach(
|
|
|
|
|
- // (e) async => await supabase
|
|
|
|
|
- // .from(
|
|
|
|
|
- // tables[data.filename]!) // Replace with your actual table name
|
|
|
|
|
- // .delete()
|
|
|
|
|
- // .inFilter('id', e));
|
|
|
|
|
- // print("insert in db");
|
|
|
|
|
-
|
|
|
|
|
|
|
+//insering new data
|
|
|
await supabase
|
|
await supabase
|
|
|
.from(tables[data.filename]!) // Replace with your actual table name
|
|
.from(tables[data.filename]!) // Replace with your actual table name
|
|
|
.insert(mapsToInsert);
|
|
.insert(mapsToInsert);
|
|
|
|
|
|
|
|
print(
|
|
print(
|
|
|
" insert:${mapsToInsert.length} remove:${indexToRemove.length} maintain:${indexToMaintain.length}");
|
|
" insert:${mapsToInsert.length} remove:${indexToRemove.length} maintain:${indexToMaintain.length}");
|
|
|
- // for (var e in splitList(mapsToInsert, headerToNb(mapsToInsert))) {
|
|
|
|
|
- // await supabase
|
|
|
|
|
- // .from(tables[data.filename]!) // Replace with your actual table name
|
|
|
|
|
- // .insert(e);
|
|
|
|
|
- // print("Inserted old data scope: ${e.length}");
|
|
|
|
|
- // }
|
|
|
|
|
- // splitList(mapsToInsert, 200).forEach((e) async => await supabase
|
|
|
|
|
- // .from(tables[data.filename]!) // Replace with your actual table name
|
|
|
|
|
- // .insert(e));
|
|
|
|
|
- // print("end");
|
|
|
|
|
-
|
|
|
|
|
- // print(createListOfMaps(headers[data.filename] ?? [], csv2list(data.data)));
|
|
|
|
|
} else {
|
|
} else {
|
|
|
print(" filename: ${data.filename} unknown, not inserted.");
|
|
print(" filename: ${data.filename} unknown, not inserted.");
|
|
|
}
|
|
}
|
|
@@ -1406,3 +1430,54 @@ int headerToNb(List list) {
|
|
|
//print("header2nb: $res");
|
|
//print("header2nb: $res");
|
|
|
return res;
|
|
return res;
|
|
|
}
|
|
}
|
|
|
|
|
+
|
|
|
|
|
+Map<String, dynamic> findDifferences(
|
|
|
|
|
+ Map<String, dynamic> map1, Map<String, dynamic> map2) {
|
|
|
|
|
+ Map<String, dynamic> differences = {};
|
|
|
|
|
+
|
|
|
|
|
+ // Iterate through keys in map2
|
|
|
|
|
+ for (var key in map2.keys) {
|
|
|
|
|
+ // Check if key exists in map1
|
|
|
|
|
+ if (!map1.containsKey(key) || map1[key] != map2[key]) {
|
|
|
|
|
+ // Add to differences if it's different or doesn't exist in map1
|
|
|
|
|
+ differences[key] = map2[key];
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ return differences;
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
|
|
+Map<String, dynamic> findSimilarities(
|
|
|
|
|
+ Map<String, dynamic> map1, Map<String, dynamic> map2) {
|
|
|
|
|
+ Map<String, dynamic> similarities = {};
|
|
|
|
|
+
|
|
|
|
|
+ // Iterate through keys in map1
|
|
|
|
|
+ for (var key in map1.keys) {
|
|
|
|
|
+ // Check if key exists in map2 and has the same value as map1
|
|
|
|
|
+ if (map2.containsKey(key) && map1[key] == map2[key]) {
|
|
|
|
|
+ // Add to similarities if it's the same in both maps
|
|
|
|
|
+ similarities[key] = map1[key];
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ return similarities;
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
|
|
+bool containsAll(List<String> sourceList, List<String> targetList) {
|
|
|
|
|
+ for (var item in sourceList) {
|
|
|
|
|
+ if (!targetList.contains(item)) {
|
|
|
|
|
+ return false;
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+ return true;
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
|
|
+Map<K, V> extractMapWithKeys<K, V>(Map<K, V> originalMap, List<K> keys) {
|
|
|
|
|
+ Map<K, V> extractedMap = {};
|
|
|
|
|
+ for (K key in keys) {
|
|
|
|
|
+ if (originalMap.containsKey(key)) {
|
|
|
|
|
+ extractedMap[key] = originalMap[key]!;
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+ return extractedMap;
|
|
|
|
|
+}
|