Browse Source

upload& insert

Fares 10 months ago
parent
commit
9e3623529b
3 changed files with 288 additions and 75 deletions
  1. 4 69
      lib/handlers/upload_handler.dart
  2. 280 2
      lib/models/data.dart
  3. 4 4
      pubspec.lock

+ 4 - 69
lib/handlers/upload_handler.dart

@@ -1,10 +1,8 @@
-import 'dart:convert';
 import 'dart:io';
 import 'package:myshelf/models/data.dart';
 import 'package:shelf/shelf.dart';
 import 'package:shelf_multipart/shelf_multipart.dart';
 import 'package:supabase/supabase.dart';
-import 'package:archive/archive.dart';
 
 Future<Response> handleFileUpload(Request request) async {
   // Check authorization
@@ -37,7 +35,7 @@ Future<Response> handleFileUpload(Request request) async {
             .firstMatch(contentDisposition)
             ?.group(1);
 
-        if (name != null && filename != null) {
+        if (name != null && name != "" && filename != null) {
           try {
             // Create temporary file
             final tempFile = File('uploads/$filename');
@@ -83,7 +81,7 @@ Future<Response> handleFileUpload(Request request) async {
               // Continue execution even if archive upload fails
             }
 
-            await processCsvFile(tempFile);
+            // await processCsvData(tempFile, supabase);
 
             // No need to subscribe to channel
             final channel = supabase.channel('csvhichstorage');
@@ -105,9 +103,8 @@ Future<Response> handleFileUpload(Request request) async {
             // Clean up temporary file
             await tempFile.delete();
           } catch (e) {
-            print('Error processing file: $e');
-            return Response.internalServerError(
-                body: 'Error processing file: ${e.toString()}');
+            print("Error processing file:\n $e");
+            return Response.internalServerError(body: e.toString());
           }
         }
       }
@@ -125,65 +122,3 @@ Future<Response> handleFileUpload(Request request) async {
     return Response(401, body: 'Not a multipart request');
   }
 }
-
-class FilesAsData {
-  String? filename;
-  String? updatedAt;
-  String data;
-  FilesAsData({
-    this.filename,
-    this.updatedAt,
-    required this.data,
-  });
-}
-
-processCsvFile(File tempfile) async {
-  List<FilesAsData> csvData = [];
-  final filename = tempfile.path.split('/').last;
-  final bytes = await tempfile.readAsBytes();
-  // Check if file is a zip archive
-  String? csvcontent;
-  if (filename.toLowerCase().endsWith('.zip')) {
-    final archive = ZipDecoder().decodeBytes(bytes);
-    for (final file in archive) {
-      if (!file.isFile) continue;
-
-      csvcontent = utf8.decode(file.content as List<int>);
-      csvData.add(FilesAsData(filename: file.name, data: csvcontent));
-    }
-  } else {
-    // For non-zip files, store in extracted folder
-    csvcontent = utf8.decode(bytes);
-    csvData.add(FilesAsData(filename: filename, data: csvcontent));
-  }
-
-// No need to subscribe to channel
-
-  print((csv2list(csvData!.first!.data!))
-      .map((e) => Pnleg.fromList(e))
-      .toList()
-      .first
-      .toMap()
-      .keys);
-
-  // print(
-  //     (csv2list(csvData!.first!.data!)).map((e) => Acleg.fromList(e)).toList());
-
-  //insert csv content to database
-  // List<List<dynamic>> csvData = [];
-  // if (csvcontent != null) {
-  //   List<String> lines = csvcontent.split('\n');
-  //   for (String line in lines) {
-  //     List<String> values = line.split(',');
-  //     csvData.add(values.map((value) {
-  //       String trimmedValue = value
-  //           .trim()
-  //           .replaceAll(RegExp(r'^"|"$'), '')
-  //           .replaceAll(RegExp(r'^ | $'), '')
-  //           .trim();
-  //       return trimmedValue.isEmpty ? null : trimmedValue;
-  //     }).toList());
-  //   }
-  //   print(csvData);
-  // }
-}

+ 280 - 2
lib/models/data.dart

@@ -1,10 +1,153 @@
 import 'dart:convert';
+import 'dart:io';
+import 'package:archive/archive_io.dart';
 import 'package:jiffy/jiffy.dart';
 import 'package:myshelf/models/dtinterval.dart';
+import 'package:supabase/supabase.dart';
 
-List<List<dynamic>> csv2list(String text,
+final Map<String, String> tables = {
+  "secondprgtype.txt": "aclegs_csv",
+  "ExportPGRGPNmois.txt": "pnlegs_csv",
+  "exportPGRGPN.txt": "pnlegs_csv",
+  "exportlicence.txt": "licences_csv",
+};
+
+final Map<String, List<String>> headers = {
+  "secondprgtype.txt": [
+    "leg_no",
+    "fn_carrier",
+    "fn_number",
+    "fn_suffix",
+    "day_of_origin",
+    "ac_owner",
+    "ac_subtype",
+    "ac_version",
+    "ac_registration",
+    "dep_ap_actual",
+    "dep_ap_sched",
+    "dep_dt_est",
+    "dep_sched_dt",
+    "arr_ap_actual",
+    "arr_ap_sched",
+    "arr_dt_est",
+    "arr_sched_dt",
+    "slot_time_actual",
+    "leg_type",
+    "status",
+    "employer_cockpit",
+    "employer_cabin",
+    "cycles",
+    "delay_code_01",
+    "delay_code_02",
+    "delay_code_03",
+    "delay_code_04",
+    "delay_time_01",
+    "delay_time_02",
+    "delay_time_03",
+    "delay_time_04",
+    "subdelay_code_01",
+    "subdelay_code_02",
+    "subdelay_code_03",
+    "subdelay_code_04",
+    "pax_booked_c",
+    "pax_booked_y",
+    "pax_booked_trs_c",
+    "pax_booked_trs_y",
+    "pad_booked_c",
+    "pad_booked_y",
+    "offblock_dt_a",
+    "airborne_dt_a",
+    "landing_dt_a",
+    "onblock_dt_a",
+    "offblock_dt_f",
+    "airborne_dt_f",
+    "landing_dt_f",
+    "onblock_dt_f",
+    "offblock_dt_m",
+    "airborne_dt_m",
+    "landing_dt_m",
+    "onblock_dt_m",
+    "eet",
+  ],
+  "exportPGRGPN.txt": [
+    "date",
+    "tlc",
+    "actype",
+    "al",
+    "fnum",
+    "ddep",
+    "hdep",
+    "ddes",
+    "hdes",
+    "dep",
+    "des",
+    "label",
+    "type",
+  ],
+  "ExportPGRGPNmois.txt": [
+    "date",
+    "tlc",
+    "actype",
+    "al",
+    "fnum",
+    "ddep",
+    "hdep",
+    "ddes",
+    "hdes",
+    "dep",
+    "des",
+    "label",
+    "type",
+  ],
+  "exportlicence.txt": [
+    "tlc",
+    "fname",
+    "mname",
+    "lname",
+    "expire",
+    "ac",
+    "college",
+    "base",
+  ],
+};
+final Map<String, String> scopes = {
+  "secondprgtype.txt": "day_of_origin",
+  "exportPGRGPN.txt": "date",
+  "ExportPGRGPNmois.txt": "date",
+  "exportlicence.txt": "tlc",
+};
+List<Map<String, String?>> createListOfMaps(
+    List<String> headers, List<List<dynamic>> data) {
+  // Initialize an empty list to hold the maps
+  List<Map<String, String?>> result = [];
+
+  // Iterate over each row of data
+  for (var row in data) {
+    // Create a map for the current row
+    Map<String, String?> map = {};
+
+    // Populate the map with header-value pairs
+    for (int i = 0; i < headers.length; i++) {
+      if (i < row.length) {
+        // Convert each value to String? and handle null values
+        map[headers[i]] = row[i]
+            ?.toString(); // Use toString() to ensure it's a String or null
+      } else {
+        // If there's no corresponding data, set it as null
+        map[headers[i]] = null;
+      }
+    }
+
+    // Add the map to the result list
+    result.add(map);
+  }
+
+  return result;
+}
+
+List<List<String?>> csv2list(String text,
     {bool nulling = true, bool quotes = true, bool trim = true}) {
-  List<List<dynamic>> out = [];
+  List<List<String?>> out = [];
   final lines = text.split("\n");
   for (var line in lines) {
     final cols = line.split(",").map((String? e) {
@@ -1079,3 +1222,138 @@ extension StringExtensions on String {
           isUtc: true)
       : null;
 }
+
+class FilesAsData {
+  String? filename;
+  String? updatedAt;
+  String data;
+  FilesAsData({
+    this.filename,
+    this.updatedAt,
+    required this.data,
+  });
+}
+
+processCsvData(File tempfile, SupabaseClient supabase) async {
+  List<FilesAsData> csvData = [];
+  final filename = tempfile.path.split('/').last;
+  final bytes = await tempfile.readAsBytes();
+  // Check if file is a zip archive
+  String? csvcontent;
+  if (filename.toLowerCase().endsWith('.zip')) {
+    final archive = ZipDecoder().decodeBytes(bytes);
+    for (final file in archive) {
+      if (!file.isFile) continue;
+
+      csvcontent = utf8.decode(file.content as List<int>);
+      csvData.add(FilesAsData(filename: file.name, data: csvcontent));
+    }
+  } else {
+    // For non-zip files, store in extracted folder
+    csvcontent = utf8.decode(bytes);
+    csvData.add(FilesAsData(filename: filename, data: csvcontent));
+  }
+
+  for (final FilesAsData data in csvData) {
+    //inserting data
+    if (tables.keys.contains(data.filename)) {
+      final mapsToInsert =
+          createListOfMaps(headers[data.filename] ?? [], csv2list(data.data));
+      final scopeName = scopes[data.filename!]!;
+      final scopeInNew = mapsToInsert
+          .fold(<String>{}, (t, e) => t..add(e[scopeName] ?? "")).toList();
+
+      final old = await supabase
+          .from(tables[data.filename]!)
+          .select()
+          .inFilter(scopeName, scopeInNew)
+          .limit(100000);
+      // Replace with your actual table name
+      final oldComparable = old
+          .map((e) => filterMapByKeys(e, headers[data.filename] ?? []))
+          .toList();
+
+      List<int> indexToRemove = [];
+      List<int> indexToMaintain = [];
+      for (int i = 0; i < oldComparable.length; i++) {
+        final item = oldComparable[i];
+        final index = findIndex(mapsToInsert, item);
+        if (index != -1) {
+          indexToMaintain.add(i);
+          mapsToInsert.removeAt(index);
+        } else {
+          indexToRemove.add(i);
+        }
+      }
+
+      print(
+          " indexToRemove: ${indexToRemove.length} , indexToMaintain: ${indexToMaintain.length}");
+      print("delete from db");
+
+      splitList(indexToRemove.map((e) => old[e]['id']).toList(), 50).forEach(
+          (e) async => await supabase
+              .from(
+                  tables[data.filename]!) // Replace with your actual table name
+              .delete()
+              .inFilter('id', e));
+      print("insert in db");
+
+      splitList(mapsToInsert, 50).forEach((e) async => await supabase
+          .from(tables[data.filename]!) // Replace with your actual table name
+          .insert(e));
+      print("end");
+
+      // print(createListOfMaps(headers[data.filename] ?? [], csv2list(data.data)));
+    } else {
+      print("filename: ${data.filename} unknown, not inserted.");
+    }
+  }
+}
+
+bool mapEquals(Map<String, dynamic> map1, Map<String, dynamic> map2) {
+  //if (map1.length != map2.length) return false;
+  for (var key in map1.keys) {
+    if (map1[key] != map2[key]) return false;
+  }
+  return true;
+}
+
+int findIndex(List<dynamic> list, dynamic element) {
+  for (int i = 0; i < list.length; i++) {
+    if (mapEquals(list[i], element)) {
+      return i;
+    }
+  }
+  return -1; // Return -1 if the element is not found
+}
+
+Map<String, dynamic> filterMapByKeys(
+    Map<String, dynamic> originalMap, List<String> keysToInclude) {
+  // Create a new map to hold the filtered results
+  Map<String, dynamic> filteredMap = {};
+
+  // Iterate through the list of keys to include
+  for (String key in keysToInclude) {
+    // Check if the key exists in the original map
+    if (originalMap.containsKey(key)) {
+      filteredMap[key] = originalMap[key]; // Add to the new map
+    }
+  }
+
+  return filteredMap;
+}
+
+List<List<T>> splitList<T>(List<T> originalList, int maxSize) {
+  List<List<T>> sublists = [];
+
+  for (int i = 0; i < originalList.length; i += maxSize) {
+    // Create a sublist for the current chunk
+    List<T> sublist = originalList.sublist(
+      i,
+      (i + maxSize > originalList.length) ? originalList.length : i + maxSize,
+    );
+    sublists.add(sublist);
+  }
+
+  return sublists;
+}

+ 4 - 4
pubspec.lock

@@ -450,10 +450,10 @@ packages:
     dependency: "direct dev"
     description:
       name: test
-      sha256: f2a018e2baa6fce7c8daa55b8bdf4b3d7d165f82caac269e4cbe5edd666c0e4c
+      sha256: be7697dcfa9d47333e6ceb38400b4c8b8de15dc97023e8eb219189ba19c5d423
       url: "https://pub.dev"
     source: hosted
-    version: "1.25.9"
+    version: "1.25.11"
   test_api:
     dependency: transitive
     description:
@@ -466,10 +466,10 @@ packages:
     dependency: transitive
     description:
       name: test_core
-      sha256: "60ff490bb383858015df7b7a0d883301a426edf9033989f55f091d91efb9dfaf"
+      sha256: "3e47cac78f28a4dd71ea232db15fda6345934f472c2f70f97d35476809e045ca"
       url: "https://pub.dev"
     source: hosted
-    version: "0.6.6"
+    version: "0.6.7"
   typed_data:
     dependency: transitive
     description: