|
@@ -1,3 +1,4 @@
|
|
|
|
|
+import 'dart:collection';
|
|
|
import 'dart:io';
|
|
import 'dart:io';
|
|
|
import 'package:file_upload_processor/handlers/base_api.dart';
|
|
import 'package:file_upload_processor/handlers/base_api.dart';
|
|
|
import 'package:intl/intl.dart';
|
|
import 'package:intl/intl.dart';
|
|
@@ -10,6 +11,12 @@ import 'package:supabase/supabase.dart';
|
|
|
|
|
|
|
|
class FileUploadApi extends BaseApi {
|
|
class FileUploadApi extends BaseApi {
|
|
|
FileUploadApi(shelf.Request request) : super(request);
|
|
FileUploadApi(shelf.Request request) : super(request);
|
|
|
|
|
+ final uploadFolder = "./uploaded";
|
|
|
|
|
+ String get rawFolder => "$uploadFolder/raw";
|
|
|
|
|
+ String get dataFolder => "$uploadFolder/data";
|
|
|
|
|
+ String workingFolder = "";
|
|
|
|
|
+ String get zipFolder => "$rawFolder/$workingFolder";
|
|
|
|
|
+ String get extFolder => "$dataFolder/$workingFolder";
|
|
|
|
|
|
|
|
SupabaseClient getSupabaseClient(shelf.Request request) {
|
|
SupabaseClient getSupabaseClient(shelf.Request request) {
|
|
|
final supabaseUrl = request.headers['supabase-url'];
|
|
final supabaseUrl = request.headers['supabase-url'];
|
|
@@ -63,8 +70,10 @@ class FileUploadApi extends BaseApi {
|
|
|
|
|
|
|
|
Future<void> initializeDirectories() async {
|
|
Future<void> initializeDirectories() async {
|
|
|
final directories = [
|
|
final directories = [
|
|
|
- Directory('./uploaded/raw'),
|
|
|
|
|
- Directory('./uploaded/data'),
|
|
|
|
|
|
|
+ Directory(rawFolder),
|
|
|
|
|
+ Directory(dataFolder),
|
|
|
|
|
+ Directory(zipFolder),
|
|
|
|
|
+ Directory(extFolder),
|
|
|
];
|
|
];
|
|
|
|
|
|
|
|
for (var dir in directories) {
|
|
for (var dir in directories) {
|
|
@@ -82,7 +91,8 @@ class FileUploadApi extends BaseApi {
|
|
|
bytes[3] == 0x04;
|
|
bytes[3] == 0x04;
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
- Future<void> processZipFile(String filePath) async {
|
|
|
|
|
|
|
+ Future<List<String>> processZipFile(String filePath) async {
|
|
|
|
|
+ List<String> files = [];
|
|
|
final bytes = await File(filePath).readAsBytes();
|
|
final bytes = await File(filePath).readAsBytes();
|
|
|
final archive = ZipDecoder().decodeBytes(bytes);
|
|
final archive = ZipDecoder().decodeBytes(bytes);
|
|
|
|
|
|
|
@@ -90,15 +100,19 @@ class FileUploadApi extends BaseApi {
|
|
|
final filename = file.name;
|
|
final filename = file.name;
|
|
|
if (file.isFile) {
|
|
if (file.isFile) {
|
|
|
final data = file.content as List<int>;
|
|
final data = file.content as List<int>;
|
|
|
- final outFile = File(path.join('./uploaded/data', filename));
|
|
|
|
|
|
|
+ final outFile = File(path.join(extFolder, filename));
|
|
|
await outFile.parent.create(recursive: true);
|
|
await outFile.parent.create(recursive: true);
|
|
|
await outFile.writeAsBytes(data);
|
|
await outFile.writeAsBytes(data);
|
|
|
|
|
+ files.add(path.join(extFolder, filename));
|
|
|
}
|
|
}
|
|
|
}
|
|
}
|
|
|
|
|
+ return files;
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
@override
|
|
|
response() async {
|
|
response() async {
|
|
|
|
|
+ workingFolder = DateTime.now().millisecondsSinceEpoch.toString();
|
|
|
|
|
+
|
|
|
final supabaseClient = getSupabaseClient(request);
|
|
final supabaseClient = getSupabaseClient(request);
|
|
|
await initializeDirectories();
|
|
await initializeDirectories();
|
|
|
|
|
|
|
@@ -137,17 +151,25 @@ class FileUploadApi extends BaseApi {
|
|
|
(prev, element) => [...prev, ...element],
|
|
(prev, element) => [...prev, ...element],
|
|
|
);
|
|
);
|
|
|
|
|
|
|
|
- final rawFilePath = path.join('./uploaded/raw', filename);
|
|
|
|
|
|
|
+ final rawFilePath = path.join(zipFolder, filename);
|
|
|
await File(rawFilePath).writeAsBytes(bytes);
|
|
await File(rawFilePath).writeAsBytes(bytes);
|
|
|
|
|
|
|
|
|
|
+ List<String> files = [];
|
|
|
if (isZipFile(bytes)) {
|
|
if (isZipFile(bytes)) {
|
|
|
- await processZipFile(rawFilePath);
|
|
|
|
|
|
|
+ files.addAll(await processZipFile(rawFilePath));
|
|
|
} else {
|
|
} else {
|
|
|
- final dataFilePath = path.join('./uploaded/data', filename);
|
|
|
|
|
|
|
+ final dataFilePath = path.join(extFolder, filename);
|
|
|
await File(rawFilePath).copy(dataFilePath);
|
|
await File(rawFilePath).copy(dataFilePath);
|
|
|
|
|
+ files.add(dataFilePath);
|
|
|
}
|
|
}
|
|
|
|
|
+ bytes.clear();
|
|
|
await uploadToSupabase(rawFilePath, filename, supabaseClient,
|
|
await uploadToSupabase(rawFilePath, filename, supabaseClient,
|
|
|
bucket: 'csvhich_archive', timestamped: true, upsert: false);
|
|
bucket: 'csvhich_archive', timestamped: true, upsert: false);
|
|
|
|
|
+
|
|
|
|
|
+ for (var file in files) {
|
|
|
|
|
+ final fileProcess = FileProcess(file, supabaseClient);
|
|
|
|
|
+ await fileProcess.go();
|
|
|
|
|
+ }
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
return shelf.Response.ok('File processed and uploaded successfully');
|
|
return shelf.Response.ok('File processed and uploaded successfully');
|
|
@@ -157,8 +179,352 @@ class FileUploadApi extends BaseApi {
|
|
|
body: 'Error processing upload: $e');
|
|
body: 'Error processing upload: $e');
|
|
|
} finally {
|
|
} finally {
|
|
|
supabaseClient.dispose();
|
|
supabaseClient.dispose();
|
|
|
- await File('./uploaded/raw').delete(recursive: true);
|
|
|
|
|
- await File('./uploaded/data').delete(recursive: true);
|
|
|
|
|
|
|
+ await File(zipFolder).delete(recursive: true);
|
|
|
|
|
+ await File(extFolder).delete(recursive: true);
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
|
|
+class FileProcess {
|
|
|
|
|
+ FileProcess(this.filepath, this.supabase);
|
|
|
|
|
+ final String filepath;
|
|
|
|
|
+ final SupabaseClient supabase;
|
|
|
|
|
+ String get filename => filepath.replaceAll('\\', "/").split("/").last;
|
|
|
|
|
+
|
|
|
|
|
+ final Map<String, String> tables = {
|
|
|
|
|
+ "secondprgtype.txt": "aclegs_csv",
|
|
|
|
|
+ "ExportPGRGPNmois.txt": "pnlegs_csv",
|
|
|
|
|
+ "exportPGRGPN.txt": "pnlegs_csv",
|
|
|
|
|
+ "exportlicence.txt": "licences_csv",
|
|
|
|
|
+ };
|
|
|
|
|
+
|
|
|
|
|
+ final Map<String, List<String>> _headers = {
|
|
|
|
|
+ "secondprgtype.txt": [
|
|
|
|
|
+ "leg_no",
|
|
|
|
|
+ "fn_carrier",
|
|
|
|
|
+ "fn_number",
|
|
|
|
|
+ "fn_suffix",
|
|
|
|
|
+ "day_of_origin",
|
|
|
|
|
+ "ac_owner",
|
|
|
|
|
+ "ac_subtype",
|
|
|
|
|
+ "ac_version",
|
|
|
|
|
+ "ac_registration",
|
|
|
|
|
+ "dep_ap_actual",
|
|
|
|
|
+ "dep_ap_sched",
|
|
|
|
|
+ "dep_dt_est",
|
|
|
|
|
+ "dep_sched_dt",
|
|
|
|
|
+ "arr_ap_actual",
|
|
|
|
|
+ "arr_ap_sched",
|
|
|
|
|
+ "arr_dt_est",
|
|
|
|
|
+ "arr_sched_dt",
|
|
|
|
|
+ "slot_time_actual",
|
|
|
|
|
+ "leg_type",
|
|
|
|
|
+ "status",
|
|
|
|
|
+ "employer_cockpit",
|
|
|
|
|
+ "employer_cabin",
|
|
|
|
|
+ "cycles",
|
|
|
|
|
+ "delay_code_01",
|
|
|
|
|
+ "delay_code_02",
|
|
|
|
|
+ "delay_code_03",
|
|
|
|
|
+ "delay_code_04",
|
|
|
|
|
+ "delay_time_01",
|
|
|
|
|
+ "delay_time_02",
|
|
|
|
|
+ "delay_time_03",
|
|
|
|
|
+ "delay_time_04",
|
|
|
|
|
+ "subdelay_code_01",
|
|
|
|
|
+ "subdelay_code_02",
|
|
|
|
|
+ "subdelay_code_03",
|
|
|
|
|
+ "subdelay_code_04",
|
|
|
|
|
+ "pax_booked_c",
|
|
|
|
|
+ "pax_booked_y",
|
|
|
|
|
+ "pax_booked_trs_c",
|
|
|
|
|
+ "pax_booked_trs_y",
|
|
|
|
|
+ "pad_booked_c",
|
|
|
|
|
+ "pad_booked_y",
|
|
|
|
|
+ "offblock_dt_a",
|
|
|
|
|
+ "airborne_dt_a",
|
|
|
|
|
+ "landing_dt_a",
|
|
|
|
|
+ "onblock_dt_a",
|
|
|
|
|
+ "offblock_dt_f",
|
|
|
|
|
+ "airborne_dt_f",
|
|
|
|
|
+ "landing_dt_f",
|
|
|
|
|
+ "onblock_dt_f",
|
|
|
|
|
+ "offblock_dt_m",
|
|
|
|
|
+ "airborne_dt_m",
|
|
|
|
|
+ "landing_dt_m",
|
|
|
|
|
+ "onblock_dt_m",
|
|
|
|
|
+ "eet",
|
|
|
|
|
+ ],
|
|
|
|
|
+ "exportPGRGPN.txt": [
|
|
|
|
|
+ "date",
|
|
|
|
|
+ "tlc",
|
|
|
|
|
+ "actype",
|
|
|
|
|
+ "al",
|
|
|
|
|
+ "fnum",
|
|
|
|
|
+ "ddep",
|
|
|
|
|
+ "hdep",
|
|
|
|
|
+ "ddes",
|
|
|
|
|
+ "hdes",
|
|
|
|
|
+ "dep",
|
|
|
|
|
+ "des",
|
|
|
|
|
+ "label",
|
|
|
|
|
+ "type",
|
|
|
|
|
+ ],
|
|
|
|
|
+ "ExportPGRGPNmois.txt": [
|
|
|
|
|
+ "date",
|
|
|
|
|
+ "tlc",
|
|
|
|
|
+ "actype",
|
|
|
|
|
+ "al",
|
|
|
|
|
+ "fnum",
|
|
|
|
|
+ "ddep",
|
|
|
|
|
+ "hdep",
|
|
|
|
|
+ "ddes",
|
|
|
|
|
+ "hdes",
|
|
|
|
|
+ "dep",
|
|
|
|
|
+ "des",
|
|
|
|
|
+ "label",
|
|
|
|
|
+ "type",
|
|
|
|
|
+ ],
|
|
|
|
|
+ "exportlicence.txt": [
|
|
|
|
|
+ "tlc",
|
|
|
|
|
+ "fname",
|
|
|
|
|
+ "mname",
|
|
|
|
|
+ "lname",
|
|
|
|
|
+ "expire",
|
|
|
|
|
+ "ac",
|
|
|
|
|
+ "college",
|
|
|
|
|
+ "base",
|
|
|
|
|
+ ],
|
|
|
|
|
+ };
|
|
|
|
|
+ final Map<String, String> scopes = {
|
|
|
|
|
+ "secondprgtype.txt": "day_of_origin",
|
|
|
|
|
+ "exportPGRGPN.txt": "date",
|
|
|
|
|
+ "ExportPGRGPNmois.txt": "date",
|
|
|
|
|
+ "exportlicence.txt": "tlc",
|
|
|
|
|
+ };
|
|
|
|
|
+ final Map<String, List<String>> idToRemove = {
|
|
|
|
|
+ "secondprgtype.txt": ["day_of_origin"],
|
|
|
|
|
+ "exportPGRGPN.txt": ["date", "tlc"],
|
|
|
|
|
+ "ExportPGRGPNmois.txt": ["date", "tlc"],
|
|
|
|
|
+ "exportlicence.txt": ["tlc"],
|
|
|
|
|
+ };
|
|
|
|
|
+ final Map<String, Map<String, dynamic>> ids = {
|
|
|
|
|
+ "secondprgtype.txt": {
|
|
|
|
|
+ "table": "aclegs_log",
|
|
|
|
|
+ "headers": [
|
|
|
|
|
+ "day_of_origin",
|
|
|
|
|
+ "dep_sched_dt",
|
|
|
|
|
+ "fn_carrier",
|
|
|
|
|
+ "fn_number",
|
|
|
|
|
+ "dep_ap_sched",
|
|
|
|
|
+ "arr_ap_sched",
|
|
|
|
|
+ // "dep_ap_actual",
|
|
|
|
|
+ // "arr_ap_actual"
|
|
|
|
|
+ ]
|
|
|
|
|
+ },
|
|
|
|
|
+ "exportPGRGPN.txt": {
|
|
|
|
|
+ "table": "pnlegs_log",
|
|
|
|
|
+ "headers": ["tlc", "date", "dep", "des", "al", "fnum,", "label"]
|
|
|
|
|
+ },
|
|
|
|
|
+ "ExportPGRGPNmois.txt": {
|
|
|
|
|
+ "table": "pnlegs_log",
|
|
|
|
|
+ "headers": ["tlc", "date", "dep", "des", "al", "fnum,", "label"]
|
|
|
|
|
+ },
|
|
|
|
|
+ "exportlicence.txt": {
|
|
|
|
|
+ "table": "qualifs_log",
|
|
|
|
|
+ "headers": ["tlc", "college", "ac", "base"]
|
|
|
|
|
+ },
|
|
|
|
|
+ };
|
|
|
|
|
+
|
|
|
|
|
+ Future<List<Map<String, dynamic>>> parseCsv() async {
|
|
|
|
|
+ final headers = _headers[filename] ?? [];
|
|
|
|
|
+ if (headers.isEmpty) {
|
|
|
|
|
+ throw Exception('No headers found for file: $filename');
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ // Initialize an empty list to hold the parsed data
|
|
|
|
|
+ List<Map<String, dynamic>> data = [];
|
|
|
|
|
+
|
|
|
|
|
+ // Read the CSV file
|
|
|
|
|
+ final file = File(filepath);
|
|
|
|
|
+ final lines = await file.readAsLines();
|
|
|
|
|
+
|
|
|
|
|
+ // Iterate over each line in the CSV file
|
|
|
|
|
+ for (int i = 0; i < lines.length; i++) {
|
|
|
|
|
+ // Split the line into individual values
|
|
|
|
|
+ final values = lines[i].split(',');
|
|
|
|
|
+ if (values.length != headers.length) {
|
|
|
|
|
+ // print('Skipping line $i: Incorrect number of values: line: $i');
|
|
|
|
|
+ continue;
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ // Create a map for the current row
|
|
|
|
|
+ Map<String, dynamic> row = {};
|
|
|
|
|
+
|
|
|
|
|
+ // Assign each value to the corresponding header
|
|
|
|
|
+ for (int j = 0; j < headers.length; j++) {
|
|
|
|
|
+ row[headers[j]] = values[j].removeQuotes.trim().nullIfEmpty;
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ // Add the row map to the data list
|
|
|
|
|
+ data.add(row);
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ // Return the parsed data
|
|
|
|
|
+ return data;
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ Future<void> go() async {
|
|
|
|
|
+ final mapsToInsert = await parseCsv();
|
|
|
|
|
+ final scopeName = scopes[filename] ?? "";
|
|
|
|
|
+ final scopeInNew = mapsToInsert
|
|
|
|
|
+ .fold(<String>{}, (t, e) => t..add(e[scopeName] ?? "")).toList();
|
|
|
|
|
+
|
|
|
|
|
+ List<Map<String, dynamic>> oldIds = [];
|
|
|
|
|
+ List<Map<String, dynamic>> oldComparable = [];
|
|
|
|
|
+
|
|
|
|
|
+ //load old data
|
|
|
|
|
+ for (var e in splitList(scopeInNew, headerToNb(scopeInNew))) {
|
|
|
|
|
+ final res = await supabase
|
|
|
|
|
+ .from(tables[filename]!)
|
|
|
|
|
+ .select()
|
|
|
|
|
+ .inFilter(scopeName, e)
|
|
|
|
|
+ .limit(100000);
|
|
|
|
|
+
|
|
|
|
|
+ oldIds.addAll(res.map((e) => filterMapByKeys(e, ["id"])).toList());
|
|
|
|
|
+ oldComparable.addAll(res
|
|
|
|
|
+ .map((e) => filterMapByKeys(e, _headers[filename] ?? []))
|
|
|
|
|
+ .toList());
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ List<int> indexToRemove = [];
|
|
|
|
|
+ List<int> indexToMaintain = [];
|
|
|
|
|
+
|
|
|
|
|
+ final keys2check = idToRemove[filename] ?? [];
|
|
|
|
|
+
|
|
|
|
|
+ for (int i = 0; i < oldComparable.length; i++) {
|
|
|
|
|
+ final item = oldComparable[i];
|
|
|
|
|
+ final index = findIndex(mapsToInsert, item);
|
|
|
|
|
+ if (index > -1) {
|
|
|
|
|
+ indexToMaintain.add(i);
|
|
|
|
|
+ mapsToInsert.removeAt(index);
|
|
|
|
|
+ } else {
|
|
|
|
|
+ final mawjoudin = oldComparable.fold(<Map<String, dynamic>>[], (t, e) {
|
|
|
|
|
+ return t..add(filterMapByKeys(e, keys2check));
|
|
|
|
|
+ });
|
|
|
|
|
+
|
|
|
|
|
+ final mawjood = mawjoudin
|
|
|
|
|
+ .firstWhere((e) => mapEquals(filterMapByKeys(item, keys2check), e),
|
|
|
|
|
+ orElse: () => {})
|
|
|
|
|
+ .keys
|
|
|
|
|
+ .isNotEmpty;
|
|
|
|
|
+
|
|
|
|
|
+ if (mawjood) {
|
|
|
|
|
+ indexToRemove.add(i);
|
|
|
|
|
+ } else {
|
|
|
|
|
+ print("saved item: $item");
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ // removing index to remove with id
|
|
|
|
|
+ for (var e in splitList(indexToRemove.map((e) => oldIds[e]['id']).toList(),
|
|
|
|
|
+ headerToNb(indexToRemove.map((e) => oldIds[e]['id']).toList()))) {
|
|
|
|
|
+ await supabase
|
|
|
|
|
+ .from(tables[filename]!) // Replace with your actual table name
|
|
|
|
|
+ .delete()
|
|
|
|
|
+ .inFilter('id', e);
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ // insering new data
|
|
|
|
|
+ await supabase
|
|
|
|
|
+ .from(tables[filename]!) // Replace with your actual table name
|
|
|
|
|
+ .insert(mapsToInsert);
|
|
|
|
|
+
|
|
|
|
|
+ print(
|
|
|
|
|
+ " insert:${mapsToInsert.length} remove:${indexToRemove.length} maintain:${indexToMaintain.length}");
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ bool mapEquals(Map<String, dynamic> map1, Map<String, dynamic> map2) {
|
|
|
|
|
+ //if (map1.length != map2.length) return false;
|
|
|
|
|
+ for (var key in map1.keys) {
|
|
|
|
|
+ if (map1[key] != map2[key]) return false;
|
|
|
|
|
+ }
|
|
|
|
|
+ return true;
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ int findIndex(List<dynamic> list, dynamic element) {
|
|
|
|
|
+ for (int i = 0; i < list.length; i++) {
|
|
|
|
|
+ if (mapEquals(list[i], element)) {
|
|
|
|
|
+ return i;
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+ return -1; // Return -1 if the element is not found
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ Map<String, dynamic> filterMapByKeys(
|
|
|
|
|
+ Map<String, dynamic> originalMap, List<String> keysToInclude) {
|
|
|
|
|
+ // Create a new map to hold the filtered results
|
|
|
|
|
+ Map<String, dynamic> filteredMap = {};
|
|
|
|
|
+
|
|
|
|
|
+ // Iterate through the list of keys to include
|
|
|
|
|
+ for (String key in keysToInclude) {
|
|
|
|
|
+ // Check if the key exists in the original map
|
|
|
|
|
+ if (originalMap.containsKey(key)) {
|
|
|
|
|
+ filteredMap[key] = originalMap[key]; // Add to the new map
|
|
|
|
|
+ }
|
|
|
}
|
|
}
|
|
|
|
|
+
|
|
|
|
|
+ return filteredMap;
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ List<List<T>> splitList<T>(List<T> originalList, int maxSize) {
|
|
|
|
|
+ List<List<T>> sublists = [];
|
|
|
|
|
+
|
|
|
|
|
+ for (int i = 0; i < originalList.length; i += maxSize) {
|
|
|
|
|
+ // Create a sublist for the current chunk
|
|
|
|
|
+ List<T> sublist = originalList.sublist(
|
|
|
|
|
+ i,
|
|
|
|
|
+ (i + maxSize > originalList.length) ? originalList.length : i + maxSize,
|
|
|
|
|
+ );
|
|
|
|
|
+ sublists.add(sublist);
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ return sublists;
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ int headerToNb(List list) {
|
|
|
|
|
+ //header max of 16k
|
|
|
|
|
+ final maxheader = 4 * 1024 * 8;
|
|
|
|
|
+ if (list.isEmpty) {
|
|
|
|
|
+ return list.length;
|
|
|
|
|
+ }
|
|
|
|
|
+ final length1 = (list.toString().length / list.length).ceil() * 8;
|
|
|
|
|
+ final lengthurl = 200 * 8;
|
|
|
|
|
+ final res = ((maxheader - lengthurl) / length1).floor();
|
|
|
|
|
+ //print("header2nb: $res");
|
|
|
|
|
+ return res;
|
|
|
|
|
+ }
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
|
|
+extension NullIfEmpty on String {
|
|
|
|
|
+ String? get nullIfEmpty => isEmpty ? null : this;
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
|
|
+extension RemoveQuotes on String {
|
|
|
|
|
+ String get removeQuotes {
|
|
|
|
|
+ if (isEmpty) return this; // Return if the string is empty
|
|
|
|
|
+
|
|
|
|
|
+ // Check if the first character is a quote
|
|
|
|
|
+ bool startsWithQuote = startsWith('"') || startsWith("'");
|
|
|
|
|
+ // Check if the last character is a quote
|
|
|
|
|
+ bool endsWithQuote = endsWith('"') || endsWith("'");
|
|
|
|
|
+
|
|
|
|
|
+ // Remove the first and last characters if they are quotes
|
|
|
|
|
+ String result = this;
|
|
|
|
|
+ if (startsWithQuote) result = result.substring(1);
|
|
|
|
|
+ if (endsWithQuote) result = result.substring(0, result.length - 1);
|
|
|
|
|
+
|
|
|
|
|
+ return result;
|
|
|
}
|
|
}
|
|
|
}
|
|
}
|