· 5 years ago · Jun 24, 2020, 10:18 PM
1"""
2Convert MTGJSON v4 to SQLite
3"""
4
5import argparse
6import json
7import logging
8import pathlib
9import sqlite3
10import time
11from typing import Any, Dict, List, Union
12
13LOGGER = logging.getLogger(__name__)
14
15
16def json_to_sql(json_input, output_file, check_extras=False) -> None:
17 """Main function to handle the logic
18
19 :param json_input: Input file (JSON)
20 :param output_file: Output dir
21 :param extras: additional json files to process
22 """
23 if not valid_input_output(json_input, output_file, extras):
24 exit(1)
25 check_extra_inputs(json_input, sql_output, check_extras)
26
27 LOGGER.info("Loading json file into memory")
28 with json_input.open("r", encoding="utf8") as json_file:
29 json_data = json.load(json_file)
30 build_sql_database(output_file)
31 build_sql_schema(json_data, output_file)
32 parse_and_import_cards(json_data, json_input, output_file)
33 parse_and_import_extras(json_input, output_file)
34 commit_changes_and_close_db(output_file)
35
36
37def valid_input_output(input_file: pathlib.Path,
38 output_dir: Dict) -> bool:
39 """
40 Ensure I/O paths are valid and clean for program
41 """
42 if not input_file.is_file():
43 LOGGER.fatal(f"Invalid input file/directory {input_file}")
44 return False
45
46 # Create the output directory if it doesn't exist
47 output_dir["path"].parent.mkdir(exist_ok=True)
48 if output_dir["path"].is_file():
49 LOGGER.warning(f"Output path {output_dir['path']} already exists,
50 moving it")
51 # Backup the existing file to another file with .old extension
52 output_dir["path"].replace(
53 output_dir["path"].parent.joinpath(output_dir["path"].name + ".old")
54 )
55 return True
56
57
58def check_extra_inputs(input_file: pathlib.Path,
59 output_dir: Dict, check_extras=False) -> bool:
60 """
61 Check if there are more json files to convert to sql
62 """
63 LOGGER.info("Building using AllPrintings.json master file")
64 if not extras:
65 return True
66
67 for extra in ["AllPrices.json, AllDeckFiles, Keywords.json,
68 CardTypes.json"]:
69 if input_file.parent.joinpath(extra).is_file():
70 LOGGER.info("Building with " + extra + " supplement")
71 output_dir[extra] = True
72 else
73 output_dir[extra] = False
74
75
76def build_sql_database(output_file: str) -> None:
77 if output_file["path"].suffix == ".sql":
78 version = get_version(json_data)
79 output_file["handle"] = open(output_file["path"], "w", encoding="utf8")
80 # TODO: comment why this is done
81 output_file["handle"].write(
82 "\n".join(
83 (
84 "-- MTGSQLive Output File",
85 "-- ({})".format(str(time.strftime("%Y-%m-%d %H:%M:%S"))),
86 "-- MTGJSON Version: {}".format(version),
87 "",
88 "START TRANSACTION;",
89 "SET names 'utf8mb4';",
90 "",
91 "",
92 )
93 )
94 )
95 else:
96 output_file["handle"] = sqlite3.connect(str(output_file["path"]))
97 output_file["handle"].execute("pragma journal_mode=wal;")
98
99
100def get_version(json_data: Dict) -> str:
101 if "meta" in json_data:
102 if "version" in json_data["meta"]:
103 return json_data["meta"]["version"]
104 else:
105 for set_code, set_data in json_data.items():
106 if "meta" in set_data:
107 if "version" in set_data["meta"]:
108 return set_data["meta"]["version"]
109 return "Unknown"
110
111
112def build_sql_schema(json_data: Dict, output_file: Dict) -> None:
113 """
114 Create the SQLite DB schema
115 """
116 LOGGER.info("Building SQLite schema")
117 if output_file["path"].suffix == ".sql":
118 schema = generate_sql_schema(json_data, output_file, "mysql")
119 output_file["handle"].write(schema)
120 output_file["handle"].write("COMMIT;\n\n")
121 else:
122 schema = generate_sql_schema(json_data, output_file, "sqlite")
123 cursor = output_file["handle"].cursor()
124 cursor.executescript(schema)
125 output_file["handle"].commit()
126
127
128def generate_sql_schema(json_data: Dict, output_file: Dict, engine: str) -> str:
129 """
130 Generate the SQL database schema from the JSON input
131
132 :param json_data: JSON dictionary
133 :param engine: target SQL distribution
134 """
135 schema = {
136 "sets": {},
137 "cards": {},
138 "tokens": {},
139 "prices": {
140 "price": {"type": "FLOAT" if engine == "sqlite" else
141 "DECIMAL(8,2)"},
142 "type": {"type": "TEXT" if engine == "sqlite" else "ENUM"},
143 "date": {"type": "DATE"},
144 },
145 "rulings": {
146 "text": {"type": "TEXT"},
147 "date": {"type": "DATE"},
148 },
149 "legalities": {
150 "format": {"type": "TEXT" if engine == "sqlite" else "ENUM"},
151 "status": {"type": "TEXT" if engine == "sqlite" else "ENUM"},
152 },
153 "foreign_data": {
154 "flavorText": {"type": "TEXT"},
155 "language": {"type": "TEXT" if engine == "sqlite" else "ENUM"},
156 "multiverseid": {"type": "INTEGER"},
157 "name": {"type": "TEXT"},
158 "text": {"type": "TEXT"},
159 "type": {"type": "TEXT"},
160 },
161 "set_translations": {
162 "language": {"type": "TEXT" if engine == "sqlite" else "ENUM"},
163 "translation": {"type": "TEXT"},
164 },
165 }
166 indexes = {
167 "cards" = {"uuid": "(36) UNIQUE"},
168 "tokens" = {"uuid": "(36)"},
169 "sets" = {"code": "(8) UNIQUE"},
170 }
171 enums = {
172 "sets": ["type"],
173 "prices": ["type"],
174 "foreign_data": ["language"],
175 "set_translations": ["language"],
176 "legalities": ["format", "status"],
177 "cards": ["borderColor", "frameEffect", "frameVersion", "layout",
178 "rarity"],
179 "tokens": ["borderColor", "layout"],
180 }
181 if "data" in json_data:
182 json_data = json_data["data"]
183 for setCode, setData in json_data.items():
184 # loop through each set property
185 for setKey, setValue in setData.items():
186 if setKey == "translations":
187 setKey = "set_translations"
188 # determine if the set property should be its own table
189 if setKey in schema:
190 if setKey == "cards" or setKey == "tokens":
191 # loop through each card/token property
192 for item in setValue:
193 for propKey, propValue in item.items():
194 if propKey == "foreignData":
195 propKey = "foreign_data"
196 # determine if the card/token property is a table
197 if propKey in schema:
198 # handle enum options
199 if propKey in enums:
200 if propKey == "foreign_data":
201 if schema[propKey]["language"]["type"] == "ENUM":
202 for foreign in propValue:
203 if "options" in schema[propKey]["language"]:
204 if not foreign["language"]
205 in schema[propKey]["language"]["options"]:
206 schema[propKey]["language"]["options"].append(foreign["language"])
207 else:
208 schema[propKey]["language"]["options"] = [foreign["language"]]
209 elif propKey == "legalities":
210 if schema[propKey]["format"]["type"] == "ENUM":
211 for format in propValue.keys():
212 if "options" in schema[propKey]["format"]:
213 if not format in schema[propKey]["format"]["options"]:
214 schema[propKey]["format"]["options"].append(format)
215 else:
216 schema[propKey]["format"]["options"] = [format]
217 if schema[propKey]["status"]["type"] == "ENUM":
218 for status in propValue.values():
219 if "options" in schema[propKey]["status"]:
220 if not status in schema[propKey]["status"]["options"]:
221 schema[propKey]["status"]["options"].append(status)
222 else:
223 schema[propKey]["status"]["options"] = [status]
224 elif propKey == "prices":
225 if schema[propKey]["type"]["type"] == "ENUM":
226 for type in propValue.keys():
227 if "options" in schema[propKey]["type"]:
228 if not type in schema[propKey]["type"]["options"]:
229 schema[propKey]["type"]["options"].append(type)
230 else:
231 schema[propKey]["type"]["options"] = [type]
232 # create the 'uuid' foreign key for each reference table
233 if not "uuid" in schema[propKey]:
234 if engine == "sqlite":
235 schema[propKey]["uuid"] = {
236 "type": "TEXT(36) REFERENCES cards(uuid) ON UPDATE CASCADE ON DELETE CASCADE"
237 }
238 else:
239 schema[propKey]["uuid"] = {
240 "type": "CHAR(36) NOT NULL,\n INDEX(uuid),\n FOREIGN KEY (uuid) REFERENCES cards(uuid) ON UPDATE CASCADE ON DELETE CASCADE"
241 }
242 else: # 'cards' table properties
243 # determine if the card property is already in the list
244 if propKey in schema[setKey].keys():
245 if propKey in enums[setKey] and not engine == "sqlite":
246 if not propValue in schema[setKey][propKey]["options"]:
247 schema[setKey][propKey]["options"].append(propValue)
248 else:
249 if propKey in enums[setKey] and not engine == "sqlite":
250 schema[setKey][propKey] = {"type": "ENUM", "options": [propValue]}
251 else:
252 # determine type of the property
253 schema[setKey][propKey] = {
254 "type": get_sql_type(propValue,
255 engine)
256 }
257 # determine if the card property is an index
258 if propKey in indexes[setKey]:
259 if engine == "sqlite":
260 schema[setKey][propKey]["type"] += (
261 indexes[setKey][propKey] + " NOT NULL"
262 )
263 else:
264 schema[setKey][propKey]["type"] = (
265 "CHAR"
266 + indexes[setKey][propKey]
267 + " NOT NULL"
268 )
269
270 if setKey == "set_translations":
271 if schema[setKey]["language"]["type"] == "ENUM":
272 if setValue:
273 for language in setValue.keys():
274 if not "options" in schema[setKey]["language"]:
275 schema[setKey]["language"]["options"] = [language]
276 else:
277 if not language in schema[setKey]["language"]["options"]:
278 schema[setKey]["language"]["options"].append(language)
279 # add 'setCode' to each table that references 'sets'
280 if not "setCode" in schema[setKey]:
281 if engine == "sqlite":
282 schema[setKey]["setCode"] = {
283 "type": "TEXT(8) REFERENCES sets(code) ON UPDATE CASCADE ON DELETE CASCADE"
284 }
285 else:
286 schema[setKey]["setCode"] = {
287 "type": "VARCHAR(8) NOT NULL,\n INDEX(setCode),\n FOREIGN KEY (setCode) REFERENCES sets(code) ON UPDATE CASCADE ON DELETE CASCADE"
288 }
289 else: # 'sets' table properties
290 if setKey in schema["sets"].keys():
291 # if the property type is enum add the value to list if necessary
292 if setKey in enums["sets"] and not engine == "sqlite":
293 if not setValue in schema["sets"][setKey]["options"]:
294 schema["sets"][setKey]["options"].append(setValue)
295 else:
296 # determine type of the set property
297 if setKey in enums["sets"] and not engine == "sqlite":
298 schema["sets"][setKey] = {"type": "ENUM", "options": [setValue]}
299 elif setKey == "releaseDate":
300 schema["sets"][setKey] = {"type": "DATE"}
301 else:
302 schema["sets"][setKey] = {
303 "type": get_sql_type(setValue, engine)
304 }
305 if setKey in indexes["sets"]:
306 if engine == "sqlite":
307 schema["sets"][setKey]["type"] += (
308 indexes["sets"][setKey] + " NOT NULL"
309 )
310 else:
311 schema["sets"][setKey]["type"] = (
312 "VARCHAR" + indexes["sets"][setKey] + " NOT NULL"
313 )
314
315 # add extra tables manually if necessary
316 if output_file["AllDeckFiles"]:
317 schema["decks"] = {
318 "fileName": {"type": "TEXT"},
319 "name": {"type": "TEXT"},
320 "mainboard": {"type": "TEXT NOT NULL"},
321 "sideboard": {"type": "TEXT"},
322 "type": {"type": "TEXT"},
323 "releaseDate": {"type": "TEXT"},
324 "code": {
325 "type": "TEXT(8) REFERENCES sets(code) ON UPDATE CASCADE ON DELETE CASCADE"
326 if engine == "sqlite"
327 else "VARCHAR(8) NOT NULL,\n INDEX(code),\n FOREIGN KEY (code) REFERENCES sets(code) ON UPDATE CASCADE ON DELETE CASCADE"
328 },
329 }
330 if output_file["Keywords.json"]:
331 schema["keywords"] = {
332 "word": {"type": "TEXT UNIQUE NOT NULL"},
333 "type": {"type": "TEXT NOT NULL"},
334 }
335 if output_file["CardTypes.json"]:
336 schema["types"] = {
337 "type": {"type": "TEXT UNIQUE NOT NULL"},
338 "subTypes": {"type": "TEXT"},
339 "supertypes": {"type": "TEXT"},
340 }
341 return get_query_from_dict(schema, engine)
342
343
344def get_sql_type(mixed, engine: str) -> str:
345 """
346 Return a string with the type of the parameter mixed
347
348 The type depends on the SQL distribution in some cases
349 """
350 if isinstance(mixed, str) or isinstance(mixed, list) or isinstance(mixed, dict):
351 return "TEXT"
352 elif isinstance(mixed, bool):
353 if engine == "sqlite":
354 return "INTEGER NOT NULL DEFAULT 0"
355 else:
356 return "TINYINT(1) NOT NULL DEFAULT 0"
357 elif isinstance(mixed, float):
358 return "FLOAT"
359 elif isinstance(mixed, int):
360 return "INTEGER"
361 return "TEXT"
362
363
364def get_query_from_dict(schema, engine):
365 q = ""
366 for table_name, table_data in schema.items():
367 q += f"CREATE TABLE `{table_name}` (\n"
368 if engine == "sqlite":
369 q += " id INTEGER PRIMARY KEY AUTOINCREMENT,\n"
370 else:
371 q += " id INTEGER PRIMARY KEY AUTO_INCREMENT,\n"
372 for attribute in sorted(table_data.keys()):
373 if table_data[attribute]["type"] == "ENUM" and not "options" in
374 table_data[attribute]:
375 table_data[attribute]["type"] = "TEXT"
376 q += f" {attribute} {table_data[attribute]['type']}"
377 if table_data[attribute]["type"] == "ENUM":
378 q += "('" + "', '".join(table_data[attribute]["options"]) + "')"
379 q += ",\n"
380 if engine == "sqlite":
381 q = q[:-2] + "\n);\n\n"
382 else:
383 q = q[:-2] + "\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n\n"
384
385 return q
386
387
388def parse_and_import_cards(
389 json_data: Dict, input_file: pathlib.Path, output_file: Dict
390) -> None:
391 """
392 Parse the JSON cards and input them into the database
393
394 :param input_file: AllSets.json file
395 :param output_file: Output info dictionary
396 """
397 LOGGER.info("Building sets")
398 if "data" in json_data:
399 json_data = json_data["data"]
400 for set_code, set_data in json_data.items():
401 LOGGER.info(f"Inserting set row for {set_code}")
402 set_insert_values = handle_set_row_insertion(set_data)
403 sql_dict_insert(set_insert_values, "sets", output_file)
404
405 for card in set_data.get("cards"):
406 LOGGER.debug(f"Inserting card row for {card.get('name')}")
407 card_attr: Dict[str, Any] = handle_card_row_insertion(card, set_code)
408 sql_insert_all_card_fields(card_attr, output_file)
409
410 for token in set_data.get("tokens"):
411 LOGGER.debug(f"Inserting token row for {token.get('name')}")
412 token_attr = handle_token_row_insertion(token, set_code)
413 sql_dict_insert(token_attr, "tokens", output_file)
414
415 for language, translation in set_data.get("translations", {}).items():
416 LOGGER.debug(f"Inserting set_translation row for {language}")
417 set_translation_attr = handle_set_translation_row_insertion(
418 language, translation, set_code
419 )
420 sql_dict_insert(set_translation_attr, "set_translations", output_file)
421
422
423def handle_set_row_insertion(set_data: Dict[str, Any]) -> Dict[str, Any]:
424 """
425 This method will take the set data and convert it,
426 preparing for SQLite insertion
427
428 :param set_data: Data to process
429 :return: Dictionary ready for insertion
430 """
431 set_skip_keys = ["cards", "tokens", "translations"]
432 set_insert_values = {}
433
434 for key, value in set_data.items():
435 if key in set_skip_keys:
436 continue
437
438 if key == "boosterV3":
439 set_insert_values[key] = modify_for_sql_insert(str(value))
440 continue
441
442 set_insert_values[key] = modify_for_sql_insert(value)
443
444 return set_insert_values
445
446
447def handle_card_row_insertion(
448 card_data: Dict[str, Any], set_name: str
449) -> Dict[str, Any]:
450 """
451 This method will take the card data and convert it,
452 preparing for SQLite insertion
453
454 :param card_data: Data to process
455 :param set_name: Set name, as it's a card element
456 :return: Dictionary ready for insertion
457 """
458 # ORDERING MATTERS HERE
459 card_skip_keys = ["foreignData", "legalities", "rulings", "prices"]
460
461 card_insert_values: Dict[str, Any] = {"setCode": set_name}
462 for key, value in card_data.items():
463 if key in card_skip_keys:
464 continue
465 card_insert_values[key] = modify_for_sql_insert(value)
466
467 foreign_insert_values: List[Dict[str, Any]] = []
468 if card_skip_keys[0] in card_data.keys():
469 foreign_insert_values = handle_foreign_rows(card_data, card_data["uuid"])
470
471 legal_insert_values: List[Dict[str, Any]] = []
472 if card_skip_keys[1] in card_data.keys():
473 legal_insert_values = handle_legal_rows(card_data, card_data["uuid"])
474
475 ruling_insert_values: List[Dict[str, Any]] = []
476 if card_skip_keys[2] in card_data.keys():
477 ruling_insert_values = handle_ruling_rows(card_data, card_data["uuid"])
478
479 price_insert_values: List[Dict[str, Any]] = []
480 if card_skip_keys[3] in card_data.keys():
481 price_insert_values = handle_price_rows(card_data, card_data["uuid"])
482
483 return {
484 "cards": card_insert_values,
485 "foreign_data": foreign_insert_values,
486 "legalities": legal_insert_values,
487 "rulings": ruling_insert_values,
488 "prices": price_insert_values,
489 }
490
491
492def sql_insert_all_card_fields(
493 card_attributes: Dict[str, Any], output_file: Dict
494) -> None:
495 """
496 Given all of the card's data, insert the data into the
497 appropriate SQLite tables.
498
499 :param card_attributes: Tuple of data
500 :param output_file: Output info dictionary
501 """
502 sql_dict_insert(card_attributes["cards"], "cards", output_file)
503
504 for foreign_val in card_attributes["foreign_data"]:
505 sql_dict_insert(foreign_val, "foreign_data", output_file)
506
507 for legal_val in card_attributes["legalities"]:
508 sql_dict_insert(legal_val, "legalities", output_file)
509
510 for rule_val in card_attributes["rulings"]:
511 sql_dict_insert(rule_val, "rulings", output_file)
512
513 if not output_file["AllPrices.json"]:
514 for price_val in card_attributes["prices"]:
515 sql_dict_insert(price_val, "prices", output_file)
516
517
518def handle_token_row_insertion(
519 token_data: Dict[str, Any], set_name: str
520) -> Dict[str, Any]:
521 """
522 This method will take the token data and convert it,
523 preparing for SQLite insertion
524
525 :param token_data: Data to process
526 :param set_name: Set name, as it's a card element
527 :return: Dictionary ready for insertion
528 """
529 token_insert_values: Dict[str, Any] = {"setCode": set_name}
530 for key, value in token_data.items():
531 token_insert_values[key] = modify_for_sql_insert(value)
532
533 return token_insert_values
534
535
536def handle_set_translation_row_insertion(
537 language: str, translation: str, set_name: str
538) -> Dict[str, Any]:
539 """
540 This method will take the set translation data and convert it,
541 preparing for SQLite insertion
542
543 :param language: The language of the set translation
544 :param translation: The set name translated in to the given language
545 :param set_name: Set name, as it's a card element
546 :return: Dictionary ready for insertion
547 """
548 set_translation_insert_values: Dict[str, Any] = {
549 "language": language,
550 "translation": translation,
551 "setCode": set_name,
552 }
553
554 return set_translation_insert_values
555
556
557def parse_and_import_extras(input_file: pathlib.Path, output_file: Dict) -> None:
558 """
559 Parse the extra data files and input them into the database
560
561 :param input_file: AllSets.json file
562 :param output_file: Output info dictionary
563 """
564 if output_file["AllPrices.json"]:
565 LOGGER.info("Inserting AllPrices rows")
566 with input_file.parent.joinpath("AllPrices.json").open(
567 "r", encoding="utf8"
568 ) as f:
569 json_data = json.load(f)
570 for card_uuid, price_data in json_data.items():
571 for price_type, price_dict in price_data["prices"].items():
572 if not price_type == "uuid":
573 for price_date, price_value in price_dict.items():
574 if price_value:
575 sql_dict_insert(
576 {
577 "uuid": card_uuid,
578 "type": price_type,
579 "date": price_date,
580 "price": float(price_value),
581 },
582 "prices",
583 output_file,
584 )
585
586 if output_file["AllDeckFiles"]:
587 LOGGER.info("Inserting Deck rows")
588 for deck_file in input_file.parent.joinpath("AllDeckFiles").glob("*.json"):
589 with deck_file.open("r", encoding="utf8") as f:
590 json_data = json.load(f)
591 deck_data = {}
592 for key, value in json_data.items():
593 if key == "meta":
594 continue
595 if key == "mainBoard" or key == "sideBoard":
596 cards = []
597 for card in value:
598 for i in range(0, card["count"]):
599 cards.append(card["uuid"])
600 deck_data[key] = ", ".join(cards)
601 else:
602 deck_data[key] = value
603 if not "fileName" in deck_data:
604 deck_data["fileName"] = deck_file.stem
605 sql_dict_insert(deck_data, "decks", output_file)
606
607 if output_file["Keywords.json"]:
608 LOGGER.info("Inserting Keyword rows")
609 with input_file.parent.joinpath("Keywords.json").open(
610 "r", encoding="utf8"
611 ) as f:
612 json_data = json.load(f)
613 for keyword_type in json_data:
614 if keyword_type == "meta":
615 continue
616 for keyword in json_data[keyword_type]:
617 sql_dict_insert(
618 {"word": keyword, "type": keyword_type}, "keywords", output_file
619 )
620
621 if output_file["CardTypes.json"]:
622 LOGGER.info("Inserting Card Type rows")
623 with input_file.parent.joinpath("CardTypes.json").open(
624 "r", encoding="utf8"
625 ) as f:
626 json_data = json.load(f)
627 for type in json_data["types"]:
628 subtypes = []
629 for subtype in json_data["types"][type]["subTypes"]:
630 subtypes.append(subtype)
631 supertypes = []
632 for supertype in json_data["types"][type]["superTypes"]:
633 supertypes.append(supertype)
634 sql_dict_insert(
635 {
636 "type": type,
637 "subTypes": ", ".join(subtypes),
638 "superTypes": ", ".join(supertypes),
639 },
640 "types",
641 output_file,
642 )
643
644
645def handle_foreign_rows(
646 card_data: Dict[str, Any], card_uuid: str
647) -> List[Dict[str, Any]]:
648 """
649 This method will take the card data and convert it,
650 preparing for SQLite insertion
651
652 :param card_data: Data to process
653 :param card_uuid: UUID to be used as a key
654 :return: List of dicts ready for insertion
655 """
656
657 foreign_entries = []
658 for entry in card_data["foreignData"]:
659 foreign_entries.append(
660 {
661 "uuid": card_uuid,
662 "flavorText": entry.get("flavorText", ""),
663 "language": entry.get("language", ""),
664 "multiverseId": entry.get("multiverseId", None),
665 "name": entry.get("name", ""),
666 "text": entry.get("text", ""),
667 "type": entry.get("type", ""),
668 }
669 )
670
671 return foreign_entries
672
673
674def handle_legal_rows(
675 card_data: Dict[str, Any], card_uuid: str
676) -> List[Dict[str, Any]]:
677 """
678 This method will take the card data and convert it,
679 preparing for SQLite insertion
680
681 :param card_data: Data to process
682 :param card_uuid: UUID to be used as a key
683 :return: List of dicts, ready for insertion
684 """
685 legalities = []
686 for card_format, format_status in card_data["legalities"].items():
687 legalities.append(
688 {"uuid": card_uuid, "format": card_format, "status": format_status}
689 )
690
691 return legalities
692
693
694def handle_ruling_rows(
695 card_data: Dict[str, Any], card_uuid: str
696) -> List[Dict[str, Any]]:
697 """This method will take the card data and convert it,
698 preparing for SQLite insertion
699
700 :param card_data: Data to process
701 :param card_uuid: UUID to be used as a key
702 :return: List of dicts, ready for insertion
703 """
704 rulings = []
705 for rule in card_data["rulings"]:
706 rulings.append(
707 {
708 "uuid": card_uuid,
709 "date": rule.get("date", ""),
710 "text": rule.get("text", ""),
711 }
712 )
713 return rulings
714
715
716def handle_price_rows(
717 card_data: Dict[str, Any], card_uuid: str
718) -> List[Dict[str, Any]]:
719 """This method will take the card data and convert it,
720 preparing for SQLite insertion
721
722 :param card_data: Data to process
723 :param card_uuid: UUID to be used as a key
724 :return: List of dicts, ready for insertion
725 """
726 prices = []
727 for price_type in card_data["prices"]:
728 if card_data["prices"][price_type] is not None:
729 for date, price in card_data["prices"][price_type].items():
730 if price:
731 prices.append(
732 {
733 "uuid": card_uuid,
734 "type": price_type,
735 "price": price,
736 "date": date,
737 }
738 )
739 return prices
740
741
742def modify_for_sql_insert(data: Any) -> Union[str, int, float, None]:
743 """
744 Arrays and booleans can't be inserted, so we need to stringify
745
746 :param data: Data to modify
747 :return: string value
748 """
749 if isinstance(data, (str, int, float)):
750 return data
751
752 # If the value is empty/null, mark it in SQL as such
753 if not data:
754 return None
755
756 if isinstance(data, list) and data and isinstance(data[0], str):
757 return ",".join(data)
758
759 if isinstance(data, bool):
760 return int(data)
761
762 if isinstance(data, dict):
763 return str(data)
764
765 return ""
766
767
768def modify_for_sql_file(data: Dict[str, Any]) -> Dict[str, Any]:
769 for key in data.keys():
770 if isinstance(data[key], str):
771 data[key] = "'" + data[key].replace("'", "''") + "'"
772 if str(data[key]) == "False":
773 data[key] = 0
774 if str(data[key]) == "True":
775 data[key] = 1
776 if data[key] is None:
777 data[key] = "NULL"
778 return data
779
780
781def sql_dict_insert(data: Dict[str, Any], table: str, output_file: Dict) -> None:
782 """
783 Insert a dictionary into a sqlite table
784
785 :param data: Dict to insert
786 :param table: Table to insert to
787 :param output_file: Output info dictionary
788 """
789 try:
790 if output_file["path"].suffix == ".sql":
791 data = modify_for_sql_file(data)
792 query = (
793 "INSERT INTO "
794 + table
795 + " ("
796 + ", ".join(data.keys())
797 + ") VALUES ({"
798 + "}, {".join(data.keys())
799 + "});\n"
800 )
801 query = query.format(**data)
802 output_file["handle"].write(query)
803 else:
804 cursor = output_file["handle"].cursor()
805 columns = ", ".join(data.keys())
806 placeholders = ":" + ", :".join(data.keys())
807 query = f"INSERT INTO {table} ({columns}) VALUES ({placeholders})"
808 cursor.execute(query, data)
809 except:
810 datastr = str(data)
811 LOGGER.warning(f"Failed to insert row in '{table}' with values: {datastr}")
812
813
814def commit_changes_and_close_db(output_file: Dict) ->None
815 if output_file["path"].suffix == ".sql":
816 output_file["handle"].write("COMMIT;")
817 else:
818 output_file["handle"].commit()
819 output_file["handle"].close()