file_upload_api.dart 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717
  1. import 'dart:io';
  2. import 'package:file_upload_processor/handlers/base_api.dart';
  3. import 'package:intl/intl.dart';
  4. import 'package:mime/mime.dart';
  5. import 'package:shelf/shelf.dart' as shelf;
  6. import 'package:path/path.dart' as path;
  7. import 'package:http_parser/http_parser.dart';
  8. import 'package:archive/archive.dart';
  9. import 'package:supabase/supabase.dart';
  10. class FileUploadApi extends BaseApi {
  11. FileUploadApi(shelf.Request request) : super(request);
  12. final uploadFolder = "./uploaded";
  13. String get rawFolder => "$uploadFolder/raw";
  14. String get dataFolder => "$uploadFolder/data";
  15. String workingFolder = "";
  16. String get zipFolder => "$rawFolder/$workingFolder";
  17. String get extFolder => "$dataFolder/$workingFolder";
  18. SupabaseClient getSupabaseClient(shelf.Request request) {
  19. final supabaseUrl = request.headers['supabase-url'];
  20. final authHeader = request.headers['authorization'];
  21. final token = authHeader!.substring(7); // Remove 'Bearer ' prefix
  22. return SupabaseClient(
  23. supabaseUrl!,
  24. token,
  25. );
  26. }
  27. String getTimestampedFilename(String originalFilename) {
  28. final timestamp = DateFormat('yyyyMMdd_HHmmss').format(DateTime.now());
  29. return '${timestamp}_$originalFilename';
  30. }
  31. Future<void> uploadToSupabase(
  32. String filePath, String filename, SupabaseClient supabaseClient,
  33. {bool timestamped = false,
  34. required String bucket,
  35. bool upsert = true}) async {
  36. try {
  37. final file = File(filePath);
  38. final timestampedFilename =
  39. timestamped ? getTimestampedFilename(filename) : filename;
  40. await supabaseClient.storage.from(bucket).upload(
  41. timestampedFilename,
  42. file,
  43. fileOptions: FileOptions(
  44. cacheControl: '3600',
  45. upsert: upsert,
  46. ),
  47. );
  48. print('+File uploaded to <$bucket>: $timestampedFilename');
  49. } catch (e) {
  50. print('!Error uploading to Supabase: $e');
  51. rethrow;
  52. }
  53. }
  54. Future<void> initializeDirectories() async {
  55. final directories = [
  56. Directory(rawFolder),
  57. Directory(dataFolder),
  58. Directory(zipFolder),
  59. Directory(extFolder),
  60. ];
  61. for (var dir in directories) {
  62. if (!await dir.exists()) {
  63. await dir.create(recursive: true);
  64. }
  65. }
  66. }
  67. bool isZipFile(List<int> bytes) {
  68. if (bytes.length < 4) return false;
  69. return bytes[0] == 0x50 &&
  70. bytes[1] == 0x4B &&
  71. bytes[2] == 0x03 &&
  72. bytes[3] == 0x04;
  73. }
  74. Future<List<String>> processZipFile(String filePath) async {
  75. List<String> files = [];
  76. final bytes = await File(filePath).readAsBytes();
  77. final archive = ZipDecoder().decodeBytes(bytes);
  78. for (final file in archive) {
  79. final filename = file.name;
  80. if (file.isFile) {
  81. final data = file.content as List<int>;
  82. final outFile = File(path.join(extFolder, filename));
  83. await outFile.parent.create(recursive: true);
  84. await outFile.writeAsBytes(data);
  85. files.add(path.join(extFolder, filename));
  86. }
  87. }
  88. return files;
  89. }
  90. @override
  91. response() async {
  92. final supabaseUrl = request.headers['supabase-url'];
  93. final authHeader = request.headers['authorization'];
  94. if (authHeader == null || !authHeader.startsWith('Bearer ')) {
  95. return shelf.Response.badRequest(
  96. body: 'Invalid or missing Authorization Bearer token');
  97. }
  98. if (supabaseUrl == null) {
  99. return shelf.Response.badRequest(
  100. body: 'Supabase URL not provided in headers');
  101. }
  102. workingFolder = DateTime.now().millisecondsSinceEpoch.toString();
  103. final supabaseClient = getSupabaseClient(request);
  104. await initializeDirectories();
  105. final contentType = request.headers['content-type'];
  106. if (contentType == null ||
  107. !contentType.toLowerCase().startsWith('multipart/form-data')) {
  108. return shelf.Response.badRequest(
  109. body: 'Content-Type must be multipart/form-data');
  110. }
  111. try {
  112. final mediaType = MediaType.parse(contentType);
  113. final boundary = mediaType.parameters['boundary'];
  114. if (boundary == null) {
  115. return shelf.Response.badRequest(body: 'Boundary not found');
  116. }
  117. final transformer = MimeMultipartTransformer(boundary);
  118. final bodyBytes = await request.read().expand((e) => e).toList();
  119. final stream = Stream.fromIterable([bodyBytes]);
  120. final parts = await transformer.bind(stream).toList();
  121. for (var part in parts) {
  122. final contentDisposition = part.headers['content-disposition'];
  123. if (contentDisposition == null) continue;
  124. final filenameMatch =
  125. RegExp(r'filename="([^"]*)"').firstMatch(contentDisposition);
  126. if (filenameMatch == null) continue;
  127. final filename = filenameMatch.group(1);
  128. if (filename == null) continue;
  129. final bytes = await part.fold<List<int>>(
  130. [],
  131. (prev, element) => [...prev, ...element],
  132. );
  133. final rawFilePath = path.join(zipFolder, filename);
  134. await File(rawFilePath).writeAsBytes(bytes);
  135. List<String> files = [];
  136. if (isZipFile(bytes)) {
  137. files.addAll(await processZipFile(rawFilePath));
  138. } else {
  139. final dataFilePath = path.join(extFolder, filename);
  140. await File(rawFilePath).copy(dataFilePath);
  141. files.add(dataFilePath);
  142. }
  143. bytes.clear();
  144. //upload to supabase storage
  145. await uploadToSupabase(rawFilePath, filename, supabaseClient,
  146. bucket: 'csvhich', timestamped: false, upsert: true);
  147. //upload to supabase storage archive timestamped
  148. await uploadToSupabase(rawFilePath, filename, supabaseClient,
  149. bucket: 'csvhich_archive', timestamped: true, upsert: false);
  150. //insert data to supabase csvhichupdates
  151. await supabaseClient
  152. .from('csvhichupdates')
  153. .insert({'filename': filename});
  154. for (var file in files) {
  155. final fileProcess = FileProcess(file, supabaseClient);
  156. await fileProcess.go(donttouchdb: false);
  157. }
  158. }
  159. return shelf.Response.ok('File processed and uploaded successfully');
  160. } catch (e) {
  161. //print('Error: $e\n$stackTrace');
  162. return shelf.Response.internalServerError(
  163. body: 'Error processing upload: $e');
  164. } finally {
  165. supabaseClient.dispose();
  166. await File(zipFolder).delete(recursive: true);
  167. await File(extFolder).delete(recursive: true);
  168. }
  169. }
  170. }
  171. class FileProcess {
  172. FileProcess(this.filepath, this.supabase);
  173. final String filepath;
  174. final SupabaseClient supabase;
  175. String get filename => filepath.replaceAll('\\', "/").split("/").last;
  176. final Map<String, String> tables = {
  177. "secondprgtype.txt": "aclegs_csv",
  178. "ExportPGRGPNmois.txt": "pnlegs_csv",
  179. "exportPGRGPN.txt": "pnlegs_csv",
  180. "exportlicence.txt": "licences_csv",
  181. };
  182. final Map<String, List<String>> _headers = {
  183. "secondprgtype.txt": [
  184. "leg_no",
  185. "fn_carrier",
  186. "fn_number",
  187. "fn_suffix",
  188. "day_of_origin",
  189. "ac_owner",
  190. "ac_subtype",
  191. "ac_version",
  192. "ac_registration",
  193. "dep_ap_actual",
  194. "dep_ap_sched",
  195. "dep_dt_est",
  196. "dep_sched_dt",
  197. "arr_ap_actual",
  198. "arr_ap_sched",
  199. "arr_dt_est",
  200. "arr_sched_dt",
  201. "slot_time_actual",
  202. "leg_type",
  203. "status",
  204. "employer_cockpit",
  205. "employer_cabin",
  206. "cycles",
  207. "delay_code_01",
  208. "delay_code_02",
  209. "delay_code_03",
  210. "delay_code_04",
  211. "delay_time_01",
  212. "delay_time_02",
  213. "delay_time_03",
  214. "delay_time_04",
  215. "subdelay_code_01",
  216. "subdelay_code_02",
  217. "subdelay_code_03",
  218. "subdelay_code_04",
  219. "pax_booked_c",
  220. "pax_booked_y",
  221. "pax_booked_trs_c",
  222. "pax_booked_trs_y",
  223. "pad_booked_c",
  224. "pad_booked_y",
  225. "offblock_dt_a",
  226. "airborne_dt_a",
  227. "landing_dt_a",
  228. "onblock_dt_a",
  229. "offblock_dt_f",
  230. "airborne_dt_f",
  231. "landing_dt_f",
  232. "onblock_dt_f",
  233. "offblock_dt_m",
  234. "airborne_dt_m",
  235. "landing_dt_m",
  236. "onblock_dt_m",
  237. "eet",
  238. ],
  239. "exportPGRGPN.txt": [
  240. "date",
  241. "tlc",
  242. "actype",
  243. "al",
  244. "fnum",
  245. "ddep",
  246. "hdep",
  247. "ddes",
  248. "hdes",
  249. "dep",
  250. "des",
  251. "label",
  252. "type",
  253. ],
  254. "ExportPGRGPNmois.txt": [
  255. "date",
  256. "tlc",
  257. "actype",
  258. "al",
  259. "fnum",
  260. "ddep",
  261. "hdep",
  262. "ddes",
  263. "hdes",
  264. "dep",
  265. "des",
  266. "label",
  267. "type",
  268. ],
  269. "exportlicence.txt": [
  270. "tlc",
  271. "fname",
  272. "mname",
  273. "lname",
  274. "expire",
  275. "ac",
  276. "college",
  277. "base",
  278. ],
  279. };
  280. final Map<String, String> scopes = {
  281. "secondprgtype.txt": "day_of_origin",
  282. "exportPGRGPN.txt": "date",
  283. "ExportPGRGPNmois.txt": "date",
  284. "exportlicence.txt": "tlc",
  285. };
  286. final Map<String, String> logTables = {
  287. "secondprgtype.txt": "aclegs_csv_log",
  288. "ExportPGRGPNmois.txt": "pnlegs_csv_log",
  289. "exportPGRGPN.txt": "pnlegs_csv_log",
  290. "exportlicence.txt": "licences_csv_log",
  291. };
  292. //all tables trackers: key,add,remove
  293. final Map<String, List<Map<String, dynamic>>> trackers = {
  294. "secondprgtype.txt": [
  295. {
  296. "table": "aclegs_log_reg",
  297. "groupby": [
  298. "day_of_origin",
  299. "dep_sched_dt",
  300. "fn_carrier",
  301. "fn_number",
  302. "dep_ap_sched",
  303. "arr_ap_sched",
  304. // "dep_ap_actual",
  305. // "arr_ap_actual"
  306. ],
  307. "track": [
  308. "ac_registration",
  309. ]
  310. },
  311. {
  312. "table": "aclegs_log_leg",
  313. "groupby": [
  314. "day_of_origin",
  315. "dep_sched_dt",
  316. "fn_carrier",
  317. "fn_number",
  318. "dep_ap_sched",
  319. "arr_ap_sched",
  320. ],
  321. "track": [
  322. "dep_ap_actual",
  323. "arr_ap_actual",
  324. ]
  325. }
  326. ],
  327. "exportPGRGPN.txt": [
  328. {
  329. "table": "pnlegs_log_roster",
  330. "groupby": ["date", "tlc"],
  331. "track": ["dep", "des", "al", "fnum", "label"]
  332. },
  333. {
  334. "table": "pnlegs_log_duty",
  335. "groupby": ["date", "dep", "des", "al", "fnum", "label"],
  336. "track": ["tlc"]
  337. },
  338. {
  339. "table": "pnlegs_log_sched",
  340. "groupby": ["date", "dep", "des", "al", "fnum", "label"],
  341. "track": ["hdep", "hdes"]
  342. },
  343. ],
  344. "ExportPGRGPNmois.txt": [
  345. {
  346. "table": "pnlegs_log_roster",
  347. "groupby": ["date", "tlc"],
  348. "track": ["dep", "des", "al", "fnum", "label"]
  349. },
  350. {
  351. "table": "pnlegs_log_duty",
  352. "groupby": ["date", "dep", "des", "al", "fnum", "label"],
  353. "track": ["tlc"]
  354. },
  355. {
  356. "table": "pnlegs_log_sched",
  357. "groupby": ["date", "dep", "des", "al", "fnum", "label"],
  358. "track": ["hdep", "hdes"]
  359. },
  360. ],
  361. "exportlicence.txt": [
  362. {
  363. "table": "licences_log_qualif",
  364. "groupby": [
  365. "tlc",
  366. "fname",
  367. "mname",
  368. "lname",
  369. ],
  370. "track": [
  371. "ac",
  372. "college",
  373. "base",
  374. ]
  375. }
  376. ],
  377. };
  378. Future<List<Map<String, dynamic>>> parseCsv() async {
  379. final headers = _headers[filename] ?? [];
  380. if (headers.isEmpty) {
  381. throw Exception('No headers found for file: $filename');
  382. }
  383. // Initialize an empty list to hold the parsed data
  384. List<Map<String, dynamic>> data = [];
  385. // Read the CSV file
  386. final file = File(filepath);
  387. final lines = await file.readAsLines();
  388. // Iterate over each line in the CSV file
  389. for (int i = 0; i < lines.length; i++) {
  390. // Split the line into individual values
  391. final values = lines[i].split(',');
  392. if (values.length != headers.length) {
  393. //print('Skipping line $i: Incorrect number of values: line: $i');
  394. continue;
  395. }
  396. // Create a map for the current row
  397. Map<String, dynamic> row = {};
  398. // Assign each value to the corresponding header
  399. for (int j = 0; j < headers.length; j++) {
  400. row[headers[j]] = values[j].trim().removeQuotes.trim().nullIfEmpty;
  401. }
  402. // Add the row map to the data list
  403. data.add(row);
  404. }
  405. // Return the parsed data
  406. return data;
  407. }
  408. List<String> get filesTomonitor => _headers.keys.toList();
  409. Future<void> go({bool donttouchdb = false}) async {
  410. if (!filesTomonitor.contains(filename)) return;
  411. final allmapsToInsert = await parseCsv();
  412. final scopeName = scopes[filename] ?? "";
  413. final scopesInNew = allmapsToInsert
  414. .fold(<String>{}, (t, e) => t..add(e[scopeName] ?? "")).toList();
  415. for (var scopeInNew in scopesInNew) {
  416. final mapsToInsert =
  417. allmapsToInsert.where((e) => e[scopeName] == scopeInNew).toList();
  418. List<Map<String, dynamic>> oldIds = [];
  419. List<Map<String, dynamic>> oldComparable = [];
  420. //load old data
  421. final res = await supabase
  422. .from(tables[filename]!)
  423. .select()
  424. .eq(scopeName, scopeInNew)
  425. .limit(300000);
  426. oldIds.addAll(res.map((e) => {"id": e["id"]}));
  427. oldComparable.addAll(res.map((e) => e..remove("id")));
  428. final comparisonResult = compareLists(oldComparable, mapsToInsert);
  429. List<int> indexToRemove = comparisonResult.removeIndices;
  430. List<int> indexToMaintain = comparisonResult.maintainIndices;
  431. final dataToInsert = comparisonResult.insertData;
  432. //special export prgpn
  433. if (filename == "exportPGRGPN.txt" ||
  434. filename == "ExportPGRGPNmois.txt") {}
  435. try {
  436. if (!donttouchdb)
  437. for (var e in chunkList(
  438. indexToRemove.map((f) => oldIds[f]['id']).toList(), 100)) {
  439. await supabase
  440. .from(tables[filename]!) // Replace with your actual table name
  441. .delete()
  442. .inFilter('id', e);
  443. }
  444. // insering new data
  445. if (!donttouchdb)
  446. await supabase
  447. .from(tables[filename]!) // Replace with your actual table name
  448. .insert(dataToInsert);
  449. } catch (e, stackTrace) {
  450. print('Error: $e\n$stackTrace');
  451. }
  452. print(
  453. " Scope:$scopeInNew insert:${dataToInsert.length} remove:${indexToRemove.length} maintain:${indexToMaintain.length}");
  454. //logging changes into tables
  455. final logTable = logTables[filename]!;
  456. final logData = dataToInsert
  457. .map((e) => {"scope": scopeInNew, "data": e, "action": "insert"})
  458. .toList();
  459. for (var e in chunkList(
  460. indexToRemove.map((f) => oldComparable[f]).toList(), 100)) {
  461. // e.forEach((k) => print("log: -: $k"));
  462. if (!donttouchdb)
  463. await supabase
  464. .from(logTable) // Replace with your actual table name
  465. .insert(e
  466. .map((e) =>
  467. {"scope": scopeInNew, "data": e, "action": "delete"})
  468. .toList());
  469. }
  470. for (var e in chunkList(logData, 100)) {
  471. // e.forEach((k) => print("log: +: $k"));
  472. if (!donttouchdb) await supabase.from(logTable).insert(e);
  473. }
  474. //logging tracking data
  475. for (var tracker in trackers[filename] ?? []) {
  476. final String table = tracker["table"];
  477. final List<String> groupby = tracker["groupby"] ?? [];
  478. final List<String> track = tracker["track"] ?? [];
  479. final stateOld = oldComparable.groupBy(
  480. (e) => groupby.map((f) => e[f]).join("|"),
  481. dataFunction: (e) =>
  482. e.filterKeys(track).values.map((j) => j ?? "").join("|"));
  483. final stateNew = mapsToInsert.groupBy(
  484. (e) => groupby.map((f) => e[f]).join("|"),
  485. dataFunction: (e) =>
  486. e.filterKeys(track).values.map((j) => j ?? "").join("|"));
  487. List logs = [];
  488. for (var key
  489. in (stateOld.keys.toList()..addAll(stateNew.keys)).toSet()) {
  490. final (add, remove) =
  491. (stateNew[key] ?? []).compareWith(stateOld[key] ?? []);
  492. //if (!key.endsWith(tracktlc)) continue;
  493. //foreach add remove
  494. if (add.isNotEmpty || remove.isNotEmpty) {
  495. final row = {
  496. "key": list2map(groupby, key.split("|")),
  497. "add": add.isNotEmpty
  498. ? add.map((e) => list2map(track, e.split("|"))).toList()
  499. : [],
  500. "remove": remove.isNotEmpty
  501. ? remove.map((e) => list2map(track, e.split("|"))).toList()
  502. : [],
  503. };
  504. logs.add(row);
  505. }
  506. }
  507. //print(" Tracker:$table");
  508. for (var e in chunkList(logs, 100)) {
  509. // e.forEach((k) => print("log: +: $k"));
  510. if (!donttouchdb) await supabase.from(table).insert(e);
  511. }
  512. }
  513. }
  514. }
  515. Map<String, dynamic> list2map(List<String> keys, List<dynamic> data) {
  516. Map<String, dynamic> map = {};
  517. for (var i = 0; i < keys.length; i++) {
  518. final key = keys[i];
  519. final datum = data[i];
  520. map[key] = datum;
  521. }
  522. return map;
  523. }
  524. // Compare two lists of maps and return the indices to maintain, remove, and insert
  525. ({
  526. List<int> maintainIndices,
  527. List<int> removeIndices,
  528. // List<int> insertIndices
  529. List<Map> insertData
  530. }) compareLists(
  531. List<Map<String, dynamic>> map1, List<Map<String, dynamic>> map2) {
  532. List<int> maintainIndices = [];
  533. List<int> removeIndices = [];
  534. List<Map<String, dynamic>> insertData = List.from(map2);
  535. // Find indices to maintain and remove in map1
  536. for (int i = 0; i < map1.length; i++) {
  537. final pos = insertData.findMap(map1[i]);
  538. if (pos > -1) {
  539. maintainIndices.add(i); // Item exists in both lists
  540. insertData.removeAt(pos);
  541. } else {
  542. removeIndices.add(i); // Item does not exist in map2
  543. }
  544. }
  545. return (
  546. maintainIndices: maintainIndices,
  547. removeIndices: removeIndices,
  548. insertData: insertData
  549. );
  550. }
  551. List<List<T>> chunkList<T>(List<T> list, int chunkSize) {
  552. if (chunkSize <= 0) {
  553. throw ArgumentError('chunkSize must be greater than 0');
  554. }
  555. List<List<T>> chunks = [];
  556. for (var i = 0; i < list.length; i += chunkSize) {
  557. chunks.add(list.sublist(
  558. i, i + chunkSize > list.length ? list.length : i + chunkSize));
  559. }
  560. return chunks;
  561. }
  562. }
  563. extension CompareIterables<T> on Iterable<T> {
  564. /// Compares this iterable with another iterable and returns a map containing:
  565. /// - 'added': Items that are in the other iterable but not in this one.
  566. /// - 'removed': Items that are in this iterable but not in the other one.
  567. (Iterable<T> add, Iterable<T> remove) compareWith(Iterable<T> other) {
  568. final Set<T> thisSet = this.toSet();
  569. final Set<T> otherSet = other.toSet();
  570. final Set<T> added = otherSet.difference(thisSet);
  571. final Set<T> removed = thisSet.difference(otherSet);
  572. return (added, removed);
  573. }
  574. }
  575. extension FilterMapByKeys on Map {
  576. /// Returns a new map containing only the keys (and their associated values)
  577. /// that are present in the [keysToKeep] list.
  578. Map<K, V> filterKeys<K, V>(List<K> keysToKeep) {
  579. return Map<K, V>.fromEntries(
  580. entries
  581. .where((entry) => keysToKeep.contains(entry.key))
  582. .cast<MapEntry<K, V>>(),
  583. );
  584. }
  585. }
  586. extension RemoveNull<T> on Iterable<T?> {
  587. /// Returns a new iterable with all null values removed.
  588. Iterable<T> removeNull() {
  589. return where((element) => element != null).cast<T>();
  590. }
  591. }
  592. extension GroupBy<T> on Iterable<T> {
  593. Map<K, List> groupBy<K>(K Function(T) keyFunction,
  594. {Function(T)? dataFunction, bool Function(T)? keyIsNullFunction}) {
  595. final map = <K, List>{};
  596. for (final element in this) {
  597. final key = keyFunction(element);
  598. final keyIsNull =
  599. keyIsNullFunction == null ? false : keyIsNullFunction(element);
  600. if (keyIsNull || key == null) continue;
  601. if (dataFunction != null) {
  602. map.putIfAbsent(key, () => []).add(dataFunction(element));
  603. } else
  604. map.putIfAbsent(key, () => []).add(element);
  605. }
  606. return map;
  607. }
  608. }
  609. extension NullIfEmpty on String {
  610. String? get nullIfEmpty => isEmpty ? null : this;
  611. }
  612. extension RemoveQuotes on String {
  613. String get removeQuotes {
  614. if (isEmpty) return this; // Return if the string is empty
  615. // Remove the first and last characters if they are quotes
  616. String result = this;
  617. // Check if the first character is a quote
  618. bool startsWithQuote = result.startsWith('"') || result.startsWith("'");
  619. if (startsWithQuote) result = result.substring(1);
  620. // Check if the last character is a quote
  621. bool endsWithQuote = result.endsWith('"') || result.endsWith("'");
  622. if (endsWithQuote) result = result.substring(0, result.length - 1);
  623. return result;
  624. }
  625. }
  626. bool mapsAreEqual(Map<String, dynamic> map1, Map<String, dynamic> map2) {
  627. if (map1.length != map2.length) return false;
  628. for (var key in map1.keys) {
  629. if (map1[key] != map2[key]) return false;
  630. }
  631. return true;
  632. }
  633. extension ContainsMap on List<Map<String, dynamic>> {
  634. bool containsMap(Map<String, dynamic> map) {
  635. for (var item in this) {
  636. if (mapsAreEqual(item, map)) return true;
  637. }
  638. return false;
  639. }
  640. int findMap(Map<String, dynamic> map) {
  641. for (int i = 0; i < this.length; i++) {
  642. if (mapsAreEqual(this.elementAt(i), map)) return i;
  643. }
  644. return -1;
  645. }
  646. }