-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathconvert.py
executable file
·42 lines (37 loc) · 1.81 KB
/
convert.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
#!/usr/bin/env python
import json
import argparse
import mysql_to_bigquery_schema_converter as converter
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Convert mysql schemas to bigquery')
parser.add_argument('Path',
type=str,
help='Path to .sql file to convert')
parser.add_argument('-o', '--output-path',
action='store',
help='Output path where to store the converted .json file. The file will be named after '
'the table name. E.g.: </output/path/>cool_table.json')
parser.add_argument('-t', '--extra-type-mappings',
action='store',
help='Path to a .json file used to extend and/or override type mapping.')
parser.add_argument('-f', '--extra-field-mappings',
action='store',
help='Path to a .json file used to assign a type to a spefic field.')
parser.add_argument('-d', '--drop-virtual-fields',
type=bool,
action='store',
help='The generated .json file will not contain VIRTUAL fields.')
args = parser.parse_args()
filepath = args.Path
output_path = args.output_path
extra_type_mappings = args.extra_type_mappings
extra_field_mappings = args.extra_field_mappings
table_name, big_query_list = converter.convert(
filepath, extra_type_mappings, extra_field_mappings, args.drop_virtual_fields)
if output_path is None:
print(f'\nSchema processed for table: {table_name}\n')
print(json.dumps(big_query_list, indent=4))
else:
with open(f'{output_path}/{table_name}.json', 'w') as outfile:
json.dump(big_query_list, outfile, indent=4)