diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..d2b64de --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,36 @@ +name: 'Test' + +on: + push: + branches: + - main + pull_request: + +jobs: + run: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v3 + + # Installing python directly due to poetry issue with upstream python + # https://github.com/python-poetry/poetry/issues/7343 + - uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install and configure Poetry + uses: snok/install-poetry@v1 + with: + version: 1.4.2 + virtualenvs-create: false + + - name: Install dependencies + run: make install + + - name: Run lint + run: make lint + + - name: Run test + run: make test diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1269488 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +data diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..10134d0 --- /dev/null +++ b/Makefile @@ -0,0 +1,35 @@ +FLAGS := + +ifeq ($(RAW),1) + FLAGS += --raw +endif + +ifeq ($(DRY_RUN),1) + FLAGS += --dry-run +endif + +ifeq ($(VERBOSE),1) + FLAGS += --verbose +endif + +export: + poetry run metabase_export.py ${FLAGS} all + +import: + poetry run metabase_import.py ${FLAGS} all ${COLLECTION} + +clean: + rm -rf ${MB_DATA_DIR} + +format: + poetry run isort . + +lint: format + poetry run flake8 --max-line-length=140 . + git diff --quiet --exit-code + +test: + poetry run pytest --verbose -v -s -k $(or ${TEST_FUNC},'') . + +install: + poetry install --verbose diff --git a/README.md b/README.md new file mode 100644 index 0000000..12004db --- /dev/null +++ b/README.md @@ -0,0 +1,134 @@ +# Metabase Export/Import + +This python library allows to export and import a community version instance of Metabase + +## Example Scripts + +Two scripts are provided to import and export fields, cards and dashboards of a specific database configuration of metabase: + +```shell +# Export each component separately +python3 metabase_export.py [cards|fields|metrics|dashboards] + +# Export everything together +python3 metabase_export.py all + +# Export everything together and store the raw JSON without parsing it +python3 metabase_export.py --raw all +``` + +The script produces 3 files for each exported elements (the name of the database is user as prefix) : `my_database_fields_exported.csv`, `my_database_cards_exported.json` and `my_database_dashboard_exported.json` + +```shell +# Import each component separately +python3 metabase_import.py [cards|fields|metrics|dashboards] + +# Export everything together +python3 metabase_import.py all +``` + +The script imports from 3 files, one for each elements : `my_database_fields_forimport.csv`, `my_database_cards_forimport.json` and `my_database_dashboard_forimport.json` + +## Configuration + +Available flags for the scripts: + +| flag | description | +|-----------|-------------------------------------------------| +| --verbose | increase output verbosity of each command | +| --dry-run | run the script without making POST/PUT requests | +| --raw | store the raw JSON without parsing it | + +It is recommended to predefine all the following environment variables in a `.env` file: + +| env var | description | +|----------------------|-----------------------------------------------------------| +| `MB_DATA_DIR` | Directory to use for the export/import operations | +| `MB_EXPORT_HOST` | Source Metabase instance (`https:///api/`) | +| `MB_EXPORT_USERNAME` | Admin username | +| `MB_EXPORT_PASSWORD` | Admin username password | +| `MB_EXPORT_DB` | The database name to export | +| `MB_IMPORT_HOST` | Destination Metabase instance (`https:///api/`) | +| `MB_IMPORT_USERNAME` | Admin username | +| `MB_IMPORT_PASSWORD` | Admin username password | +| `MB_IMPORT_DB` | The name of the same database in the destination instance | + +## Library calls + +### database creation/deletion + +```python +import metabase + +# connect to metabase +ametabase = metabase.MetabaseApi("http://localhost:3000/api/", "metabase_username", "metabase_password") + +# add a sqlite database located at /path/to/database.sqlite. The metabase associated name is my_database +ametabase.create_database("my_database", 'sqlite', {"db":"/path/to/database.sqlite"}) + +#ametabase.delete_database('my_database') +``` + + +### users and permisssions + +```python +ametabase.create_user("user@example.org", "the_password", {'first_name': 'John', 'last_name': 'Doe'}) + +# Add a group and associate it with our new user +ametabase.membership_add('user@example.org', 'a_group') + +# allow read data and create interraction with my_database for users members of our new group (a_group) +ametabase.permission_set_database('a_group', 'my_database', True, True) +``` + +### collections and permissions + +```python +# create a collection and its sub collection +ametabase.create_collection('sub_collection', 'main_collection') + +# allow write right on the new collections to the membres of a_group +ametabase.permission_set_collection('main_collection', 'a_group', 'write') +ametabase.permission_set_collection('sub_collection', 'a_group', 'write') +``` + + +### schema + +```python +# export and import the schema of fields +ametabase.export_fields_to_csv('my_database', 'my_database_fields.csv') +ametabase.import_fields_from_csv('my_database', 'my_database_fields.csv') +``` + +### cards and dashboards + +```python +ametabase.export_cards_to_json('my_database', 'my_database_cards.json') +ametabase.export_dashboards_to_json('my_database', 'my_database_dashboard.json') + +ametabase.import_cards_from_json('my_database', 'my_database_cards.json') +ametabase.import_dashboards_from_json('my_database', 'my_database_dashboard.json') +``` + +## Development + +Development of this repository is done with [Poetry](https://python-poetry.org/docs/). + +### Install dependencies + +```shell +make install +``` + +### Test + +The tests are running with pytest + +```shell +make test + +# Run a tests matching an expression TEST_FUNC= +TEST_FUNC="invalid" make test +``` diff --git a/metabase.py b/metabase.py index 9d263ab..85b086e 100644 --- a/metabase.py +++ b/metabase.py @@ -1,73 +1,84 @@ -import requests -import json import csv import datetime +import json import os +import re +from concurrent.futures import ThreadPoolExecutor, as_completed + +import requests +from rich.progress import Progress + class MetabaseApi: - def __init__(self, apiurl, username, password, debug=False): + def __init__(self, apiurl, username, password, debug: bool = False, dry_run: bool = False): self.apiurl = apiurl self.username = username self.password = password self.debug = debug - + self.dry_run = dry_run + self.metabase_session = None self.database_export = None self.cards_export = None self.metrics_export = None + self.dashboards_export = None self.dashboards_name2id = None self.cards_name2id = {} self.snippets_name2id = {} self.collections_name2id = {} self.metrics_name2id = {} - - def query (self, method, query_name, json_data = None): + + def query(self, method, query_name, json_data=None): json_str = None if json_data is not None: json_str = json.dumps(json_data) - - headers = { "Content-Type": "application/json;charset=utf-8" } - + + headers = {"Content-Type": "application/json;charset=utf-8"} + if self.metabase_session is not None: headers["X-Metabase-Session"] = self.metabase_session - - query_url = self.apiurl+query_name - - if (self.debug): - print(method+' '+query_url) - print(headers) - print(json_str) - + + query_url = self.apiurl + query_name + + if self.debug: + print(method + ' ' + query_url) + print('Headers:\t', headers) + print('Data:\t', json_str) + + if self.dry_run: + if method != "GET" and query_name != "session": + return {} + if method == 'POST': r = requests.post( - query_url , - data = json_str, - headers= headers - ) + query_url, + data=json_str, + headers=headers + ) elif method == 'GET': r = requests.get( - query_url, - data = json_str, - headers= headers - ) + query_url, + data=json_str, + headers=headers + ) elif method == 'PUT': r = requests.put( - query_url, - data = json_str, - headers= headers - ) + query_url, + data=json_str, + headers=headers + ) elif method == 'DELETE': r = requests.delete( - query_url, - data = json_str, - headers= headers - ) + query_url, + data=json_str, + headers=headers + ) else: - raise ConnectionError('unkown method: '+method+' (GET,POST,DELETE allowed)') - + raise ConnectionError('unkown method: ' + method + ' (GET,POST,DELETE allowed)') + if self.debug: - print(r.text) - + print('Response:\t', r.text) + try: query_response = r.json() if query_response.get('errors'): @@ -78,13 +89,13 @@ def query (self, method, query_name, json_data = None): raise ConnectionError(query_response) except AttributeError: if r.text.find('endpoint') > -1: - raise ConnectionError(query_url+" ("+method+"): "+r.text) + raise ConnectionError(query_url + " (" + method + "): " + r.text) return query_response except ValueError: if (r.text): raise ConnectionError(r.text) return {} - + return query_response def create_session(self): @@ -98,7 +109,7 @@ def create_session(self): def create_session_if_needed(self): if self.metabase_session: - return; + return self.create_session() def get_databases(self, full_info=False): @@ -116,7 +127,9 @@ def create_database(self, name, engine, details, is_full_sync=True, is_on_demand data = self.get_database(name, False, False) if data: return data - return self.query('POST', 'database', {"name": name, 'engine': engine, "details": details, "is_full_sync": is_full_sync, "is_on_demand": is_on_demand, "auto_run_queries": auto_run_queries}) + return self.query('POST', 'database', + {"name": name, 'engine': engine, "details": details, "is_full_sync": is_full_sync, + "is_on_demand": is_on_demand, "auto_run_queries": auto_run_queries}) def get_database(self, name, full_info=False, check_if_exists=True): name2database = {} @@ -126,18 +139,19 @@ def get_database(self, name, full_info=False, check_if_exists=True): if not data and not check_if_exists: return {} if not data: - raise ValueError("Database \"" + name + "\" does not exist. Existing databases are: " + ', '.join(name2database.keys())) + raise ValueError( + "Database \"" + name + "\" does not exist. Existing databases are: " + ', '.join(name2database.keys())) if not full_info: return data - return self.query('GET', 'database/'+str(data['id'])+'?include=tables.fields') + return self.query('GET', 'database/' + str(data['id']) + '?include=tables.fields') def delete_database(self, name): self.create_session_if_needed() data = self.get_database(name, False, False) if not data: return - return self.query('DELETE', 'database/'+str(data['id']), {'id': data['id']}) + return self.query('DELETE', 'database/' + str(data['id']), {'id': data['id']}) def get_all_tables(self): self.create_session_if_needed() @@ -155,7 +169,7 @@ def get_table(self, database_name, table_name): for t in self.get_tables_of_database(database_name): if t['name'] == table_name: return t - table = {} + return {} def get_field(self, database_name, table_name, field_name): table = self.get_table(database_name, table_name) @@ -171,7 +185,8 @@ def delete_session(self): self.query('DELETE', 'session', {'metabase-session-id': self.metabase_session}) self.metabase_session = None - def field_id2tablenameandfieldname(self, database_name, field_id): + def field_id2fullname(self, database_name, field_id): + # TODO: Support schemas, same table name in different schemas might cause issues if self.database_export is None: self.database_export = self.get_database(database_name, True) if not field_id: @@ -208,7 +223,15 @@ def metric_id2name(self, database_name, metric_id): return metric['name'] return '' - def field_tablenameandfieldname2field(self, database_name, table_name, field_name): + def dashboard_id2name(self, database_name, dashboard_id): + if self.dashboards_export is None: + self.dashboards_export = self.get_dashboards(database_name) + for dashboard in self.dashboards_export: + if dashboard['id'] == dashboard_id: + return dashboard['name'] + return None + + def field_fullname2id(self, database_name, table_name, field_name): if self.database_export is None: self.database_export = self.get_database(database_name, True) if not table_name or not field_name: @@ -243,27 +266,28 @@ def export_fields(self, database_name): table_name = table['name'] for field in table['fields']: field_id = field['fk_target_field_id'] - [fk_table, fk_field] = self.field_id2tablenameandfieldname(database_name, field_id) + [fk_table, fk_field] = self.field_id2fullname(database_name, field_id) if not field['semantic_type']: field['semantic_type'] = '' if not field['custom_position']: field['custom_position'] = '' result.append({ - 'table_name': table_name, 'field_name': field['name'], 'description': field['description'], - 'semantic_type': field['semantic_type'], - 'foreign_table': fk_table, 'foreign_field': fk_field, - 'visibility_type': field['visibility_type'], 'has_field_values': field['has_field_values'], - 'custom_position': field['custom_position'], 'effective_type': field['effective_type'], - 'base_type': field['base_type'], 'database_type': field['database_type'], 'field_id': field['id'] - }) + 'table_name': table_name, 'field_name': field['name'], + 'description': field['description'], + 'semantic_type': field['semantic_type'], + 'foreign_table': fk_table, 'foreign_field': fk_field, + 'visibility_type': field['visibility_type'], 'has_field_values': field['has_field_values'], + 'custom_position': field['custom_position'], 'effective_type': field['effective_type'], + 'base_type': field['base_type'], 'database_type': field['database_type'], 'field_id': field['id'], + }) return result def export_fields_to_csv(self, database_name, dirname): export = self.export_fields(database_name) if not export: return - with open(dirname+"/fields.csv", 'w', newline = '') as csvfile: - my_writer = csv.writer(csvfile, delimiter = ',') + with open(dirname + "/fields.csv", 'w', newline='') as csvfile: + my_writer = csv.writer(csvfile, delimiter=',') need_header = True for row in export: if need_header: @@ -271,38 +295,65 @@ def export_fields_to_csv(self, database_name, dirname): need_header = False my_writer.writerow(row.values()) - def import_fields_from_csv(self, database_name, dirname): + def import_fields_from_csv(self, database_name, dirname, field_ids: list[str]): fields = [] - with open(dirname+"/fields.csv", newline='') as csvfile: + with open(dirname + "/fields.csv", newline='') as csvfile: reader = csv.DictReader(csvfile) for row in reader: + if field_ids and row['field_id'] not in field_ids: + continue fields.append(row) return self.update_fields(database_name, fields) + def process_field(self, database_name, field): + # Update the field and return the result + return self.update_field(database_name, field) + def update_fields(self, database_name, fields): output = [] - for f in fields: - output.append(output.append(self.update_field(database_name, f))) + + with Progress() as progress: + task = progress.add_task("[cyan]Updating fields...", total=len(fields)) + output.append(self.update_field(database_name, fields[0])) + progress.update(task, advance=1) + + with ThreadPoolExecutor() as executor: + futures = { + executor.submit(self.update_field, database_name, field): field + for field in fields[1:] + } + + for future in as_completed(futures): + field = futures[future] + try: + result = future.result() + output.append(result) + except Exception as e: + print(f"An error occurred while processing field '{field}': {e}") + finally: + progress.update(task, advance=1) + return output def update_field(self, database_name, field): - field_from_api = self.field_tablenameandfieldname2field(database_name, field['table_name'], field['field_name']) + field_from_api = self.field_fullname2id(database_name, field['table_name'], field['field_name']) if not field_from_api: return None - fk = self.field_tablenameandfieldname2field(database_name, field['foreign_table'], field['foreign_field']) + fk = self.field_fullname2id(database_name, field['foreign_table'], field['foreign_field']) field.pop('foreign_table') field.pop('foreign_field') data = {'id': str(field_from_api['id'])} - for k in field.keys(): + for k in sorted(field.keys()): if field[k]: data[k] = field[k] else: data[k] = None - if fk : + if fk: data['fk_target_field_id'] = fk['id'] - return self.query('PUT', 'field/'+data['id'], data) + return self.query('PUT', 'field/' + data['id'], data) def database_name2id(self, database_name): + # TODO: Optimize with cache self.create_session_if_needed() data = self.query('GET', 'database') if isinstance(data, list): @@ -315,12 +366,12 @@ def database_name2id(self, database_name): return None def get_snippets(self, database_name): - database_id = self.database_name2id(database_name) + _ = self.database_name2id(database_name) return self.query('GET', 'native-query-snippet') def get_cards(self, database_name): database_id = self.database_name2id(database_name) - return self.query('GET', 'card?f=database&model_id='+str(database_id)) + return self.query('GET', 'card?f=database&model_id=' + str(database_id)) def get_collections(self): self.create_session_if_needed() @@ -328,14 +379,14 @@ def get_collections(self): def get_dashboard(self, database_name, dashboard_name): dashboard_id = self.dashboard_name2id(dashboard_name, dashboard_name) - return self.query('GET', 'dashboard/'+str(dashboard_id)) + return self.query('GET', 'dashboard/' + str(dashboard_id)) def get_dashboards(self, database_name): database_id = self.database_name2id(database_name) dashbords_light = self.query('GET', 'dashboard') dashboards = [] for d in dashbords_light: - res = self.query('GET', 'dashboard/'+str(d['id'])) + res = self.query('GET', 'dashboard/' + str(d['id'])) good_db = True for c in res['ordered_cards']: if c['card'].get('database_id') and c['card'].get('database_id') != database_id: @@ -360,18 +411,13 @@ def dashboard_name2id(self, database_name, dashboard_name): self.dashboards_name2id = {} for d in self.get_dashboards(database_name): if self.dashboards_name2id.get(d['name']): - print("dashboard "+d['name']+" not unique (already registered with id "+str(self.dashboards_name2id.get(d['name']))+" and trying to create it with id "+str(d['id'])+")") + print("dashboard " + d['name'] + " not unique (already registered with id " + str( + self.dashboards_name2id.get(d['name'])) + " and trying to create it with id " + str( + d['id']) + ")") continue self.dashboards_name2id[d['name']] = d['id'] return self.dashboards_name2id.get(dashboard_name) - def dashboard_id2name(self, database_name, dashboard_id): - self.dashboard_name2id(database_name, "a") - for dname in self.dashboards_name2id.keys(): - if self.dashboards_name2id[dname] == dashboard_id: - return dname - return None - def snippet_name2id(self, database_name, snippet_name): if not self.snippets_name2id: for s in self.get_snippets(database_name): @@ -403,7 +449,7 @@ def collection_name2id_or_create_it(self, collection_name): self.create_collection(collection_name) return self.collection_name2id(collection_name) - def create_collection(self, collection_name, parent_collection_name = None, param_args = {}): + def create_collection(self, collection_name, parent_collection_name=None, param_args={}): self.create_session_if_needed() param = param_args.copy() param['name'] = collection_name @@ -417,7 +463,7 @@ def create_collection(self, collection_name, parent_collection_name = None, para cid = self.collection_name2id(collection_name) self.collections_name2id = {} if cid: - return self.query('PUT', 'collection/'+str(cid), param) + return self.query('PUT', 'collection/' + str(cid), param) return self.query('POST', 'collection', param) def convert_pcnames2id(self, database_name, collection_name, fieldname, pcnames): @@ -426,7 +472,7 @@ def convert_pcnames2id(self, database_name, collection_name, fieldname, pcnames) sep = pcnames.find('%', 1) if sep == -1: raise ValueError('Not a convertible value') - [new_k, names] = pcnames[1:sep], pcnames[sep+1:] + [new_k, names] = pcnames[1:sep], pcnames[sep + 1:] if new_k == 'JSONCONV': data = self.convert_names2ids(database_name, collection_name, json.loads(names)) return [json.dumps(data), None] @@ -441,23 +487,23 @@ def convert_pcnames2id(self, database_name, collection_name, fieldname, pcnames) if fieldname == 'pseudo_table_card_name': card_id = self.card_name2id(database_name, names) if not card_id: - raise ValueError('card_name '+names+' not found') - return [new_k, 'card__'+str(card_id)] + raise ValueError('card_name ' + names + ' not found') + return [new_k, 'card__' + str(card_id)] resplit = names.split('|') if len(resplit) == 3: metricid = self.metric_name2id(database_name, resplit[2]) if metricid: - return[new_k, metricid] - raise ValueError('metric not found: '+resplit[2]) + return [new_k, metricid] + raise ValueError('metric not found: ' + resplit[2]) if len(resplit) == 2: - field = self.field_tablenameandfieldname2field(database_name, resplit[0], resplit[1]) + field = self.field_fullname2id(database_name, resplit[0], resplit[1]) if field: - return[new_k, field['id']] - raise ValueError('field not found: '+resplit[0]+'/'+resplit[1]) + return [new_k, field['id']] + raise ValueError('field not found: ' + resplit[0] + '/' + resplit[1]) if len(resplit) == 1: table_id = self.table_name2id(database_name, resplit[0]) return [new_k, table_id] - raise ValueError('Unknown '+str(fieldname)+' %'+str(new_k)+'% type') + raise ValueError('Unknown ' + str(fieldname) + ' %' + str(new_k) + '% type') def convert_names2ids(self, database_name, collection_name, obj): obj_res = obj @@ -471,7 +517,7 @@ def convert_names2ids(self, database_name, collection_name, obj): obj_res[i] = self.convert_names2ids(database_name, collection_name, obj[i]) elif isinstance(obj, dict): obj_res = obj.copy() - for k in obj.keys(): + for k in sorted(obj.keys()): if k[0] == '%': try: [new_k, value] = self.convert_pcnames2id(database_name, collection_name, None, k) @@ -479,7 +525,8 @@ def convert_names2ids(self, database_name, collection_name, obj): obj_res[new_k] = self.convert_names2ids(database_name, collection_name, obj[k]) except ValueError: obj_res[k] = obj[k] - elif k in ['field_name', 'table_name', 'database_name', 'card_name', 'pseudo_table_card_name', 'dashboard_name', 'collection_name'] and obj[k][0] == '%': + elif k in ['field_name', 'table_name', 'database_name', 'card_name', 'pseudo_table_card_name', + 'dashboard_name', 'collection_name'] and obj[k][0] == '%': [new_k, value] = self.convert_pcnames2id(database_name, collection_name, k, obj[k]) obj_res.pop(k) obj_res[new_k] = value @@ -494,11 +541,11 @@ def convert_ids2names(self, database_name, obj, previous_key): if len(obj): try: if obj[0] == 'field': - [t, f] = self.field_id2tablenameandfieldname(database_name, int(obj_res[1])) - obj_res[1] = '%%'+t+'|'+f + [t, f] = self.field_id2fullname(database_name, int(obj_res[1])) + obj_res[1] = '%%' + t + '|' + f elif obj[0] == 'metric': m = self.metric_id2name(database_name, int(obj_res[1])) - obj_res[1] = '%%||'+m + obj_res[1] = '%%||' + m else: for i in range(len(obj)): obj_res[i] = self.convert_ids2names(database_name, obj[i], previous_key) @@ -506,31 +553,31 @@ def convert_ids2names(self, database_name, obj, previous_key): obj_res[1] = obj[1] elif isinstance(obj, dict): obj_res = obj.copy() - for k in obj.keys(): + for k in sorted(obj.keys()): if k == 'collection': obj_res.pop(k) continue if isinstance(obj[k], dict) or isinstance(obj[k], list): k_previous = previous_key k2int = None - #Cas de clé d'un dictionnaire qui sont les id de fields + # Cas de clé d'un dictionnaire qui sont les id de fields try: k2int = int(k) k_name = k if k2int: - [t, f] = self.field_id2tablenameandfieldname(database_name, k2int) - k_name = '%%'+t+'|'+f + [t, f] = self.field_id2fullname(database_name, k2int) + k_name = '%%' + t + '|' + f except ValueError: k_name = k k_previous = k - #Cas de clé du dictionnaire qui sont du json encodé + # Cas de clé du dictionnaire qui sont du json encodé if not k2int: try: k_data = json.loads(k) - if k_data[0] == 'ref' and k_data[1][0] == 'field': - [t, f] = self.field_id2tablenameandfieldname(database_name, int(k_data[1][1])) - k_data[1][1] = '%%'+t+'|'+f - k_name = '%JSONCONV%'+json.dumps(k_data) + if k_data[0] in ('ref', 'dimension') and k_data[1][0] == 'field': + [t, f] = self.field_id2fullname(database_name, int(k_data[1][1])) + k_data[1][1] = '%%' + t + '|' + f + k_name = '%JSONCONV%' + json.dumps(k_data) else: k_name = k except json.decoder.JSONDecodeError: @@ -542,45 +589,60 @@ def convert_ids2names(self, database_name, obj, previous_key): if k in ['field_id'] or (k == 'id' and previous_key in ['result_metadata', 'param_fields']): id = obj_res.pop(k) if id: - [t, f] = self.field_id2tablenameandfieldname(database_name, int(id)) - obj_res['field_name'] = '%'+k+'%'+t+'|'+f + [t, f] = self.field_id2fullname(database_name, int(id)) + obj_res['field_name'] = '%' + k + '%' + t + '|' + f elif k in ['table_id', 'source-table']: id = obj_res.pop(k) if id: try: t = self.table_id2name(database_name, int(id)) - obj_res['table_name'] = '%'+k+'%'+t + obj_res['table_name'] = '%' + k + '%' + t except ValueError: if id[0:6] == 'card__': c = self.card_id2name(database_name, int(id[6:])) - obj_res['pseudo_table_card_name'] = '%'+k+'%'+c - elif k == 'card_id': + obj_res['pseudo_table_card_name'] = '%' + k + '%' + c + elif k in ['card_id', 'targetId']: id = obj_res.pop(k) if id: n = self.card_id2name(database_name, int(id)) - obj_res['card_name'] = '%'+k+'%'+n + obj_res['card_name'] = '%' + k + '%' + n elif k in ['database_id', 'database']: if obj.get(k): obj_res.pop(k) - obj_res['database_name'] = '%'+k+'%' + obj_res['database_name'] = '%' + k + '%' elif k == 'collection_id': obj_res.pop(k) - obj_res['collection_name'] = '%'+k+'%' - elif k == 'dashboard_id': + obj_res['collection_name'] = '%' + k + '%' + elif k == 'dashboard_id' or (k == 'id' and previous_key in ['entity']): id = obj_res.pop(k) name = self.dashboard_id2name(database_name, id) if not name: - raise Exception("no name for dashboard "+str(id)) - obj_res['dashboard_name'] = '%'+k+'%'+name + raise Exception("no name for dashboard " + str(id)) + obj_res['dashboard_name'] = '%' + k + '%' + name + elif k == 'id' and isinstance(obj_res[k], str) and re.match( + r'^\["dimension",\["field",\d+,null\]\]$', obj_res[k]): + try: + k_data = json.loads(obj_res[k]) + if k_data[0] in ('ref', 'dimension') and k_data[1][0] == 'field': + [t, f] = self.field_id2fullname(database_name, int(k_data[1][1])) + k_data[1][1] = '%%' + t + '|' + f + obj_res[k] = '%JSONCONV%' + json.dumps(k_data) + except json.decoder.JSONDecodeError: + pass + else: + obj_res[k] = self.convert_ids2names(database_name, obj[k], previous_key) return obj_res - def export_dashboards_to_json(self, database_name, dirname): + def export_dashboards_to_json(self, database_name, dirname, raw: bool = False): export = self.get_dashboards(database_name) for dash in export: if len(dash['ordered_cards']): dash = self.clean_object(dash) - with open(dirname+"/dashboard_"+dash['name'].replace('/', '')+".json", 'w', newline = '') as jsonfile: - jsonfile.write(json.dumps(self.convert_ids2names(database_name, dash, None))) + with open(dirname + "/dashboard_" + dash['name'].replace('/', '') + ".json", 'w', + newline='') as jsonfile: + if not raw: + dash = self.convert_ids2names(database_name, dash, None) + jsonfile.write(json.dumps(dash)) def clean_object(self, object): if 'updated_at' in object: @@ -595,7 +657,8 @@ def clean_object(self, object): del object['last-edit-info'] if 'result_metadata' in object and object['result_metadata']: for i in range(0, len(object['result_metadata'])): - del object['result_metadata'][i]['fingerprint'] + if 'fingerprint' in object['result_metadata'][i]: + del object['result_metadata'][i]['fingerprint'] if 'ordered_cards' in object: for c in object['ordered_cards']: c = self.clean_object(c) @@ -613,31 +676,37 @@ def clean_object(self, object): del object['public_uuid'] return object - def export_snippet_to_json(self, database_name, dirname): + def export_snippet_to_json(self, database_name, dirname, raw: bool = False): export = self.get_snippets(database_name) for sn in export: sn = self.clean_object(sn) - with open(dirname+"/snippet_"+sn['name'].replace('/', '')+".json", 'w', newline = '') as jsonfile: - jsonfile.write(json.dumps(self.convert_ids2names(database_name, sn, None))) + with open(dirname + "/snippet_" + sn['name'].replace('/', '') + ".json", 'w', newline='') as jsonfile: + if not raw: + sn = self.convert_ids2names(database_name, sn, None) + jsonfile.write(json.dumps(sn)) - def export_cards_to_json(self, database_name, dirname): + def export_cards_to_json(self, database_name, dirname, raw: bool = False): export = self.get_cards(database_name) for card in export: card = self.clean_object(card) - with open(dirname+"/card_"+card['name'].replace('/', '')+".json", 'w', newline = '') as jsonfile: - jsonfile.write(json.dumps(self.convert_ids2names(database_name, card, None))) + with open(dirname + "/card_" + card['name'].replace('/', '') + ".json", 'w', newline='') as jsonfile: + if not raw: + card = self.convert_ids2names(database_name, card, None) + jsonfile.write(json.dumps(card)) - def export_metrics_to_json(self, database_name, dirname): + def export_metrics_to_json(self, database_name, dirname, raw: bool = False): export = self.get_metrics(database_name) for metric in export: metric = self.clean_object(metric) - with open(dirname+"/metric_"+metric['name'].replace('/', '')+".json", 'w', newline = '') as jsonfile: - jsonfile.write(json.dumps(self.convert_ids2names(database_name, metric, None))) + with open(dirname + "/metric_" + metric['name'].replace('/', '') + ".json", 'w', newline='') as jsonfile: + if not raw: + metric = self.convert_ids2names(database_name, metric, None) + jsonfile.write(json.dumps(metric)) def dashboard_import(self, database_name, dash_from_json): dashid = self.dashboard_name2id(database_name, dash_from_json['name']) if dashid: - return self.query('PUT', 'dashboard/'+str(dashid), dash_from_json) + return self.query('PUT', 'dashboard/' + str(dashid), dash_from_json) self.dashboards_name2id = None return self.query('POST', 'dashboard', dash_from_json) @@ -646,7 +715,7 @@ def snippet_import(self, database_name, snippet_from_json): snippet_from_json['description'] = None snippetid = self.snippet_name2id(database_name, snippet_from_json['name']) if snippetid: - return self.query('PUT', 'native-query-snippet/'+str(snippetid), snippet_from_json) + return self.query('PUT', 'native-query-snippet/' + str(snippetid), snippet_from_json) self.snippets_name2id = {} return self.query('POST', 'native-query-snippet', snippet_from_json) @@ -655,23 +724,40 @@ def card_import(self, database_name, card_from_json): card_from_json['description'] = None cardid = self.card_name2id(database_name, card_from_json['name']) if cardid: - return self.query('PUT', 'card/'+str(cardid), card_from_json) + return self.query('PUT', 'card/' + str(cardid), card_from_json) self.cards_name2id = {} return self.query('POST', 'card', card_from_json) def metric_import(self, database_name, metric_from_json): metricid = self.metric_name2id(database_name, metric_from_json['name']) - metric_from_json['revision_message'] = "Import du "+datetime.datetime.now().isoformat() + metric_from_json['revision_message'] = "Import du " + datetime.datetime.now().isoformat() if metricid: - return self.query('PUT', 'metric/'+str(metricid), metric_from_json) + return self.query('PUT', 'metric/' + str(metricid), metric_from_json) self.metrics_name2id = {} return self.query('POST', 'metric', metric_from_json) def dashboard_delete_all_cards(self, database_name, dashboard_name): dash = self.get_dashboard(database_name, dashboard_name) res = [] - for c in dash['ordered_cards']: - res.append(self.query('DELETE', 'dashboard/'+str(dash['id'])+'/cards?dashcardId='+str(c['id']))) + + # Define a function to handle the query deletion + def delete_card(card): + return self.query('DELETE', 'dashboard/' + str(dash['id']) + '/cards?dashcardId=' + str(card['id'])) + + with Progress() as progress: + task = progress.add_task(f"[cyan]Deleting cards from dashboard {dash['name']}...", + total=len(dash['ordered_cards'])) + + with ThreadPoolExecutor() as executor: + # Submit delete_card function for each card in dash['ordered_cards'] + futures = [executor.submit(delete_card, card) for card in dash['ordered_cards']] + + # Collect the results as tasks complete + for future in as_completed(futures): + result = future.result() + res.append(result) + progress.update(task, advance=1) + return res def dashboard_import_card(self, database_name, dashboard_name, ordered_card_from_json): @@ -680,9 +766,9 @@ def dashboard_import_card(self, database_name, dashboard_name, ordered_card_from if cardid: ordered_card_from_json['cardId'] = cardid ordered_card_from_json.pop('card') - return self.query('POST', 'dashboard/'+str(dashid)+'/cards', ordered_card_from_json) + return self.query('POST', 'dashboard/' + str(dashid) + '/cards', ordered_card_from_json) - def import_snippets_from_json(self, database_name, dirname, collection_name = None): + def import_snippets_from_json(self, database_name, dirname, collection_name=None): res = [] jsondata = self.get_json_data('snippet_', dirname) if len(jsondata): @@ -694,51 +780,68 @@ def import_snippets_from_json(self, database_name, dirname, collection_name = No res.append(self.snippet_import(database_name, data)) except ValueError as e: if not errors: - errors = ValueError(snippet['name']+": "+ str(e)) + errors = ValueError(snippet['name'] + ": " + str(e)) else: - errors = ValueError(snippet['name']+": "+str(errors) + " ;\n" + str(e)) + errors = ValueError(snippet['name'] + ": " + str(errors) + " ;\n" + str(e)) if errors: raise errors return res - def import_cards_from_json(self, database_name, dirname, collection_name = None): + def import_cards_from_json(self, database_name, dirname, collection_name=None): res = [] - jsondata = self.get_json_data('card_', dirname) - for dash in self.get_json_data('dashboard_', dirname): - for embed_card in dash['ordered_cards']: + cards = self.get_json_data('card_', dirname) + for dash in self.get_json_data('dashboard_', dirname)[:1]: + for embed_card in dash['ordered_cards'][:1]: if embed_card and embed_card['card']: - jsondata.append(embed_card['card']) - if len(jsondata): + cards.append(embed_card['card']) + if len(cards): errors = None - for card in jsondata: + + def import_card(card): try: - res.append(self.card_import(database_name, self.convert_names2ids(database_name, collection_name, card))) - except ValueError as e: + return self.card_import(database_name, self.convert_names2ids(database_name, collection_name, card)) + except (ValueError, ConnectionError) as e: + nonlocal errors if not errors: - errors = ValueError(card['name']+": "+ str(e)) + errors = ValueError(card['name'] + ": " + str(e)) else: - errors = ValueError(card['name']+": "+str(errors) + " ;\n" + str(e)) - if errors: - raise errors + errors = ValueError(card['name'] + ": " + str(errors) + " ;\n" + str(e)) + + with Progress() as progress: + task = progress.add_task("[cyan]Updating cards...", total=len(cards)) + import_card(cards[0]) + progress.update(task, advance=1) + + # Create a thread pool + with ThreadPoolExecutor() as executor: + # Submit import_card function for each card in jsondata + futures = [executor.submit(import_card, card) for card in cards[1:]] + + # Update progress as tasks complete + for future in as_completed(futures): + progress.update(task, advance=1) + + # if errors: + # raise errors return res def importfiles_from_dirname(self, prefix, dirname): files = [] for file in os.listdir(dirname): if file.find(prefix) > -1: - files.append(dirname+'/'+file) + files.append(dirname + '/' + file) return files def get_json_data(self, prefix, dirname): jsondata = [] for filename in self.importfiles_from_dirname(prefix, dirname): - with open(filename, 'r', newline = '') as jsonfile: + with open(filename, 'r', newline='') as jsonfile: data = json.load(jsonfile) if len(data): jsondata.append(data) return jsondata - def import_metrics_from_json(self, database_name, dirname, collection_name = None): + def import_metrics_from_json(self, database_name, dirname, collection_name=None): res = [] jsondata = self.get_json_data('metric_', dirname) if jsondata: @@ -748,14 +851,14 @@ def import_metrics_from_json(self, database_name, dirname, collection_name = Non res.append(self.metric_import(database_name, self.convert_names2ids(database_name, None, metric))) except ValueError as e: if not errors: - errors = ValueError(metric['name']+": "+ str(e)) + errors = ValueError(metric['name'] + ": " + str(e)) else: - errors = ValueError(metric['name']+": "+str(errors) + " ;\n" + str(e)) + errors = ValueError(metric['name'] + ": " + str(errors) + " ;\n" + str(e)) if errors: raise errors return res - def import_dashboards_from_json(self, database_name, dirname, collection_name = None): + def import_dashboards_from_json(self, database_name, dirname, collection_name=None): res = [[], [], []] jsondata = self.get_json_data('dashboard_', dirname) if len(jsondata): @@ -763,8 +866,14 @@ def import_dashboards_from_json(self, database_name, dirname, collection_name = for dash in jsondata: res[0].append(self.dashboard_import(database_name, dash)) self.dashboard_delete_all_cards(database_name, dash['name']) - for ocard in dash['ordered_cards']: - res[1].append(self.dashboard_import_card(database_name, dash['name'], ocard)) + + with Progress() as progress: + task = progress.add_task(f"[cyan]Adding cards to dashboard {dash['name']}...", + total=len(dash['ordered_cards'])) + + for ocard in dash['ordered_cards']: + res[1].append(self.dashboard_import_card(database_name, dash['name'], ocard)) + progress.update(task, advance=1) return res def get_users(self): @@ -772,7 +881,7 @@ def get_users(self): users = self.query('GET', 'user?status=all') try: return users['data'] - except: + except KeyError: return None def user_email2id(self, user_email): @@ -781,13 +890,13 @@ def user_email2id(self, user_email): return u['id'] return None - def create_user(self, email, password, extra = {}): + def create_user(self, email, password, extra={}): self.create_session_if_needed() extra['email'] = email extra['password'] = password user_id = self.user_email2id(email) if (user_id): - return self.query('PUT', 'user/'+str(user_id), extra) + return self.query('PUT', 'user/' + str(user_id), extra) return self.query('POST', 'user', extra) def user_password(self, email, password): @@ -797,8 +906,8 @@ def user_password(self, email, password): data['password'] = password user_id = self.user_email2id(email) if not user_id: - raise ValueError('known user '+email) - return self.query('PUT', 'user/'+str(user_id)+'/password', data) + raise ValueError('known user ' + email) + return self.query('PUT', 'user/' + str(user_id) + '/password', data) def create_group(self, group_name): self.create_session_if_needed() @@ -806,11 +915,7 @@ def create_group(self, group_name): def get_groups(self): self.create_session_if_needed() - groups = self.query('GET', 'permissions/group') - try: - return groups - except: - return None + return self.query('GET', 'permissions/group') def group_name2id(self, group_name): for g in self.get_groups(): @@ -845,10 +950,10 @@ def permission_set_database(self, group_name, database_name, schema_data, native else: group_id = self.group_name2id(group_name) if not group_id: - raise ValueError("group "+group_name+" not found") + raise ValueError("group " + group_name + " not found") database_id = self.database_name2id(database_name) if not database_id: - raise ValueError("database "+database_name+" not found") + raise ValueError("database " + database_name + " not found") data = self.permission_get_database() if not data['groups'].get(group_id): data['groups'][group_id] = {} @@ -869,20 +974,20 @@ def permission_get_collection(self): return self.query('GET', 'collection/graph') def permission_set_collection(self, group_name, collection_name, right): - if not right in ['read', 'write', 'none']: + if right not in ['read', 'write', 'none']: raise ValueError('right not read/write/none') if group_name == 'all': group_id = '1' else: group_id = self.group_name2id(group_name) if not group_id: - raise ValueError("group "+group_name+" not found") + raise ValueError("group " + group_name + " not found") if collection_name == 'root': collection_id = 'root' else: collection_id = self.collection_name2id(collection_name) if not collection_id: - raise ValueError("collection "+collection_name+" not found") + raise ValueError("collection " + collection_name + " not found") data = self.permission_get_collection() if not data['groups'].get(group_id): data['groups'][group_id] = {} diff --git a/metabase_export.py b/metabase_export.py index 1cfd9de..d6ff2b9 100644 --- a/metabase_export.py +++ b/metabase_export.py @@ -1,22 +1,69 @@ +from pathlib import Path + +from typer import Option, Typer +from typing_extensions import Annotated + import metabase -import sys -import os - -metabase_apiurl = sys.argv[1] -metabase_username = sys.argv[2] -metabase_password = sys.argv[3] -metabase_base = sys.argv[4] -metabase_exportdir = sys.argv[5] - -ametabase = metabase.MetabaseApi(metabase_apiurl, metabase_username, metabase_password) -#ametabase.debug = True - -try: - os.mkdir("export") -except: - None - -ametabase.export_fields_to_csv(metabase_base, metabase_exportdir) -ametabase.export_cards_to_json(metabase_base, metabase_exportdir) -ametabase.export_dashboards_to_json(metabase_base, metabase_exportdir) -ametabase.export_metrics_to_json(metabase_base, metabase_exportdir) + +app = Typer() + +db_name: str +data_dir: Path +raw_mode: bool +metabaseAPI: metabase.MetabaseApi + + +@app.command('all') +def export_all(): + fields() + cards() + dashboards() + metrics() + + +@app.command() +def fields(): + metabaseAPI.export_fields_to_csv(db_name, str(data_dir)) + + +@app.command() +def metrics(): + metabaseAPI.export_metrics_to_json(db_name, str(data_dir), raw_mode) + + +@app.command() +def cards(): + metabaseAPI.export_cards_to_json(db_name, str(data_dir), raw_mode) + + +@app.command() +def dashboards(): + metabaseAPI.export_dashboards_to_json(db_name, str(data_dir), raw_mode) + + +@app.callback() +def common(api_url: Annotated[str, Option(envvar='MB_EXPORT_HOST')], + username: Annotated[str, Option(envvar='MB_EXPORT_USERNAME')], + password: Annotated[str, Option(envvar='MB_EXPORT_PASSWORD')], + database: Annotated[str, Option(envvar='MB_EXPORT_DB')], + data: Annotated[Path, Option(envvar='MB_DATA_DIR')], + verbose: bool = False, + dry_run: bool = False, + raw: bool = False): + global db_name, data_dir, metabaseAPI, raw_mode + + metabaseAPI = metabase.MetabaseApi(api_url, username, password, verbose, dry_run) + + db_name = database + data_dir = data + raw_mode = raw + + data_dir.mkdir(exist_ok=True) + + +def main(): + app() + + +if __name__ == '__main__': + main() diff --git a/metabase_full_import.py b/metabase_full_import.py index 37635a9..6259472 100644 --- a/metabase_full_import.py +++ b/metabase_full_import.py @@ -1,6 +1,7 @@ -import metabase import sys +import metabase + metabase_apiurl = sys.argv[1] metabase_username = sys.argv[2] metabase_password = sys.argv[3] @@ -11,9 +12,9 @@ pass_to_create = sys.argv[8] ametabase = metabase.MetabaseApi(metabase_apiurl, metabase_username, metabase_password) -#ametabase.debug = True +# ametabase.debug = True -#ametabase.delete_database('base') +# ametabase.delete_database('base') # ametabase.create_database(metabase_basename, 'sqlite', {'db': sqlite_database_path_to_create}) diff --git a/metabase_import.py b/metabase_import.py index 578b034..256d04f 100644 --- a/metabase_import.py +++ b/metabase_import.py @@ -1,16 +1,65 @@ +from pathlib import Path +from typing import Optional + +from typer import Argument, Option, Typer +from typing_extensions import Annotated + import metabase -import sys -metabase_apiurl = sys.argv[1] -metabase_username = sys.argv[2] -metabase_password = sys.argv[3] -metabase_base = sys.argv[4] -metabase_exportdir = sys.argv[5] +app = Typer() + +db_name: str +data_dir: Path +metabaseAPI: metabase.MetabaseApi + + +@app.command('all') +def import_all(collection: str): + fields() + metrics() + cards(collection) + dashboards(collection) + + +@app.command() +def fields(field_ids: Annotated[Optional[list[str]], Argument()] = None): + metabaseAPI.import_fields_from_csv(db_name, str(data_dir), field_ids) + + +@app.command() +def metrics(): + metabaseAPI.import_metrics_from_json(db_name, str(data_dir)) + + +@app.command() +def cards(collection: str): + metabaseAPI.import_cards_from_json(db_name, str(data_dir), collection) + + +@app.command() +def dashboards(collection: str): + metabaseAPI.import_dashboards_from_json(db_name, str(data_dir), collection) + + +@app.callback() +def common(api_url: Annotated[str, Option(envvar='MB_IMPORT_HOST')], + username: Annotated[str, Option(envvar='MB_IMPORT_USERNAME')], + password: Annotated[str, Option(envvar='MB_IMPORT_PASSWORD')], + database: Annotated[str, Option(envvar='MB_IMPORT_DB')], + data: Annotated[Path, Option(envvar='MB_DATA_DIR')], + verbose: bool = False, + dry_run: bool = False): + global db_name, data_dir, metabaseAPI + + metabaseAPI = metabase.MetabaseApi(api_url, username, password, verbose, dry_run) + + db_name = database + data_dir = data + + +def main(): + app() -ametabase = metabase.MetabaseApi(metabase_apiurl, metabase_username, metabase_password) -#ametabase.debug = True -ametabase.import_fields_from_csv(metabase_base, metabase_exportdir) -ametabase.import_metrics_from_json(metabase_base, metabase_exportdir) -ametabase.import_cards_from_json(metabase_base, metabase_exportdir) -ametabase.import_dashboards_from_json(metabase_base, metabase_exportdir) +if __name__ == '__main__': + main() diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..777a8db --- /dev/null +++ b/poetry.lock @@ -0,0 +1,448 @@ +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.2.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, +] + +[[package]] +name = "click" +version = "8.1.6" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, + {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.1.2" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, + {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "pluggy" +version = "1.2.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pycodestyle" +version = "2.11.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, + {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, +] + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pygments" +version = "2.15.1" +description = "Pygments is a syntax highlighting package written in Python." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, + {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pytest" +version = "7.4.0" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "13.5.2" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, + {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typer" +version = "0.9.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, + {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, +] + +[package.dependencies] +click = ">=7.1.1,<9.0.0" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "urllib3" +version = "2.0.4" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, + {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "1a1511d58cb3019560565b0894f6039d66f51ad82e66b15fb4350b0f4969321c" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..ad17693 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,29 @@ +[tool.poetry] +name = "metabase_export_import" +version = "0.1.0" +description = "A python library allowing to import and export metabase database configuration from a metabase API" + +authors = [ + "Tangui Morlier ", +] + +repository = "https://github.com/24eme/metabase_export_import" +readme = "README.md" + +keywords = ["metabase", "metabase-api", "metabase-python"] + +[tool.poetry.dependencies] +python = "^3.10" + +requests = "^2.31.0" +typer = "^0.9.0" +rich = "^13.5.2" + +[tool.poetry.group.dev.dependencies] +pytest = "^7.4.0" +flake8 = "^6.1.0" +isort = "^5.12.0" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/readme.md b/readme.md deleted file mode 100644 index b321b6c..0000000 --- a/readme.md +++ /dev/null @@ -1,63 +0,0 @@ -# Metabase Export/Import - -This python library allows to export and import a community version instance of Metabase - -## Example scripts - -Two scripts are provided to import and export fields, cards and dashboards of a specific database configuration of metabase : - - python3 metabase_export.py http://localhost:3000/api/ my_user my_password my_database - -The script produces 3 files for each exported elements (the name of the database is user as prefix) : `my_database_fields_exported.csv`, `my_database_cards_exported.json` and `my_database_dashboard_exported.json` - - python3 metabase_import.py http://localhost:3000/api/ my_user my_password my_database - -The script imports from 3 files, one for each elements : `my_database_fields_forimport.csv`, `my_database_cards_forimport.json` and `my_database_dashboard_forimport.json` - -## Library calls - -### database creation/deletion - - import metabase - - #connect to metabase - ametabase = metabase.MetabaseApi("http://localhost:3000/api/", "metabase_username", "metabase_password") - - #add a sqlite database located at /path/to/database.sqlite. The metabase associated name is my_database - ametabase.create_database("my_database", 'sqlite', {"db":"/path/to/database.sqlite"}) - - #ametabase.delete_database('my_database') - -### users and permisssions - - ametabase.create_user("user@example.org", "the_password", {'first_name': 'John', 'last_name': 'Doe'}) - - #Add a group and associate it with our new user - ametabase.membership_add('user@example.org', 'a_group') - - #allow read data and create interraction with my_database for users members of our new group (a_group) - ametabase.permission_set_database('a_group', 'my_database', True, True) - -### collections and permissions - - #create a collection and its sub collection - ametabase.create_collection('sub_collection', 'main_collection') - - #allow write right on the new collections to the membres of a_group - ametabase.permission_set_collection('main_collection', 'a_group', 'write') - ametabase.permission_set_collection('sub_collection', 'a_group', 'write') - -### schema - - #export and import the schema of fields - ametabase.export_fields_to_csv('my_database', 'my_database_fields.csv') - ametabase.import_fields_from_csv('my_database', 'my_database_fields.csv') - -### cards and dashboards - - ametabase.export_cards_to_json('my_database', 'my_database_cards.json') - ametabase.export_dashboards_to_json('my_database', 'my_database_dashboard.json') - - ametabase.import_cards_from_json('my_database', 'my_database_cards.json') - ametabase.import_dashboards_from_json('my_database', 'my_database_dashboard.json') - diff --git a/test_parser.py b/test_parser.py new file mode 100644 index 0000000..54e2e8c --- /dev/null +++ b/test_parser.py @@ -0,0 +1,252 @@ +from metabase import MetabaseApi + +# from json import dumps + +TEST_TABLE_NAME = 'DUMMY TABLE' +TEST_TABLE_ID = 111 +TEST_FIELD_NAME = 'DUMMY FIELD' +TEST_FIELD_ID = 99999 +TEST_CARD_NAME = 'DUMMY CARD' +TEST_CARD_ID = 2222 +TEST_DASHBOARD_NAME = 'DUMMY DASHBOARD' +TEST_DASHBOARD_ID = 3333 + +TEST_DB = { + "tables": [ + { + "name": TEST_TABLE_NAME, + "id": TEST_TABLE_ID, + "fields": [ + { + "id": TEST_FIELD_ID, + "name": TEST_FIELD_NAME + } + ] + } + ] +} + +TEST_CARDS = [ + { + "name": TEST_CARD_NAME, + "id": TEST_CARD_ID, + } +] + +TEST_DASHBOARDS = [ + { + "name": TEST_DASHBOARD_NAME, + "id": TEST_DASHBOARD_ID, + } +] + + +def create_test_api(): + api = MetabaseApi('', '', '', debug=True, dry_run=True) + api.database_export = TEST_DB + api.cards_export = TEST_CARDS + api.dashboards_export = TEST_DASHBOARDS + + api.cards_name2id = {} + for card in TEST_CARDS: + api.cards_name2id[card["name"]] = card["id"] + + api.dashboards_name2id = {} + for dashboard in TEST_DASHBOARDS: + api.dashboards_name2id[dashboard["name"]] = dashboard["id"] + + return api + + +def test_params_field_field_key(): + obj = { + "param_fields": { + f"{TEST_FIELD_ID}": { + "id": TEST_FIELD_ID, + "table_id": TEST_TABLE_ID, + }, + } + } + + expected = { + "param_fields": { + f"%%{TEST_TABLE_NAME}|{TEST_FIELD_NAME}": { + "field_name": f"%id%{TEST_TABLE_NAME}|{TEST_FIELD_NAME}", + "table_name": f"%table_id%{TEST_TABLE_NAME}", + }, + } + } + + api = create_test_api() + encoded = api.convert_ids2names('', obj, None) + # print(dumps(encoded, indent=4)) + assert encoded == expected + + # decoded = api.convert_names2ids('', '', encoded) + # assert obj == decoded + + +def test_parameter_mapping_field_array(): + obj = { + "ordered_cards": [ + { + "parameter_mappings": [ + { + "parameter_id": "8f8066f5", + "card_id": TEST_CARD_ID, + "target": [ + "dimension", + [ + "field", + TEST_FIELD_ID, + None + ] + ] + } + ], + }, + ], + } + + expected = { + "ordered_cards": [ + { + "parameter_mappings": [ + { + "parameter_id": "8f8066f5", + "card_name": f"%card_id%{TEST_CARD_NAME}", + "target": [ + "dimension", + [ + "field", + f"%%{TEST_TABLE_NAME}|{TEST_FIELD_NAME}", + None + ] + ] + } + ], + }, + ], + } + + api = create_test_api() + encoded = api.convert_ids2names('', obj, None) + # print(dumps(encoded, indent=4)) + assert encoded == expected + + decoded = api.convert_names2ids('', '', encoded) + assert obj == decoded + + +def test_json_keys_and_values(): + obj = { + "ordered_cards": [ + { + "visualization_settings": { + "click_behavior": { + "parameterMapping": { + f"[\"dimension\",[\"field\",{TEST_FIELD_ID},null]]": { + "target": { + "type": "dimension", + "id": f"[\"dimension\",[\"field\",{TEST_FIELD_ID},null]]", + "dimension": [ + "dimension", + [ + "field", + TEST_FIELD_ID, + None + ] + ] + }, + "id": f"[\"dimension\",[\"field\",{TEST_FIELD_ID},null]]" + } + }, + "targetId": TEST_CARD_ID, + "linkType": "question", + "type": "link" + } + }, + }, + ] + } + + expected = { + "ordered_cards": [ + { + "visualization_settings": { + "click_behavior": { + "parameterMapping": { + f"%JSONCONV%[\"dimension\", [\"field\", \"%%{TEST_TABLE_NAME}|{TEST_FIELD_NAME}\", null]]": { + "target": { + "type": "dimension", + "id": f"%JSONCONV%[\"dimension\", [\"field\", \"%%{TEST_TABLE_NAME}|{TEST_FIELD_NAME}\", null]]", + "dimension": [ + "dimension", + [ + "field", + f"%%{TEST_TABLE_NAME}|{TEST_FIELD_NAME}", + None + ] + ] + }, + "id": f"%JSONCONV%[\"dimension\", [\"field\", \"%%{TEST_TABLE_NAME}|{TEST_FIELD_NAME}\", null]]" + } + }, + "card_name": f"%targetId%{TEST_CARD_NAME}", + "linkType": "question", + "type": "link" + } + }, + }, + ] + } + + api = create_test_api() + encoded = api.convert_ids2names('', obj, None) + # print(dumps(encoded, indent=4)) + assert encoded == expected + + decoded = api.convert_names2ids('', '', encoded) + assert obj == decoded + + +def test_link_dashboard(): + obj = { + "ordered_cards": [ + { + "visualization_settings": { + "link": { + "entity": { + "model": "dashboard", + "id": TEST_DASHBOARD_ID, + } + } + }, + "dashboard_id": TEST_DASHBOARD_ID, + }, + ], + } + + expected = { + "ordered_cards": [ + { + "visualization_settings": { + "link": { + "entity": { + "model": "dashboard", + "dashboard_name": f"%id%{TEST_DASHBOARD_NAME}", + } + } + }, + "dashboard_name": f"%dashboard_id%{TEST_DASHBOARD_NAME}", + }, + ], + } + + api = create_test_api() + encoded = api.convert_ids2names('', obj, None) + # print(dumps(encoded, indent=4)) + assert encoded == expected + + decoded = api.convert_names2ids('', '', encoded) + assert obj == decoded