From c2ae429cbe4bae97e221f3afe42cb446b1e62419 Mon Sep 17 00:00:00 2001 From: Juan Marulanda Date: Wed, 30 Aug 2023 17:59:47 -0400 Subject: [PATCH] Added scripts to run a backup between tiled servers running different versions --- aimmdb/ingest/aimmdb_backup.py | 102 +++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 aimmdb/ingest/aimmdb_backup.py diff --git a/aimmdb/ingest/aimmdb_backup.py b/aimmdb/ingest/aimmdb_backup.py new file mode 100644 index 0000000..685816e --- /dev/null +++ b/aimmdb/ingest/aimmdb_backup.py @@ -0,0 +1,102 @@ +import json +import os +import pandas as pd + +def download_aimmdd_data(c): + """ + Navigate through a tiled client and download all the entries that are found + + Parameters + ---------- + c : tiled.client + + Returns + ------- + None. + + """ + + if not os.path.exists("files/"): + os.mkdir("files/") + + for key in c['dataset'].keys(): + print(f"Downloading {key} dataset...") + if not os.path.exists(f"files/{key}/"): + os.mkdir(f"files/{key}/") + + for node_value in c['dataset'][key]['uid'].values(): + uid = node_value.item['id'] + print(f"---- Saving node {uid}...") + metadata = dict(node_value.metadata) + del metadata['_tiled'] # it's not necessary to keep the uid generated by the server. The new server will generate a new uid + specs = list(node_value.specs) + + data_dict = node_value.read().to_dict() + + meta_content = {"metadata": metadata, "specs": specs, "data": data_dict} + + with open(f"files/{key}/{uid}.json", 'w') as file: + json.dump(meta_content, file) + + print("Download completed!") + +def upload_aimmdb_data(c, folder_path): + """ + Walks through a path in the local machine and writes the data into a remote instance of tiled + + Parameters + ---------- + c : tiled.client + folder_path : pathlib.Path + Path to folder contianing backup data. + + Returns + ------- + None. + + """ + + filepaths = sorted(folder_path.iterdir()) + + for i in range(len(filepaths)): + if filepaths[i].name.startswith("."): + # Skip hidden files. + continue + if not filepaths[i].is_file(): + # Explore subfolder for more labview files recursively + upload_aimmdb_data(c, filepaths[i]) + if filepaths[i].suffix == ".json": + if filepaths[i].stem not in c: + with open(filepaths[i]) as f: + sample = json.load(f) + + metadata = sample["metadata"] + specs = sample["specs"] + data = pd.DataFrame(sample["data"]) + + node = c.write_dataframe(data, key=filepaths[i].stem, metadata=metadata, specs=specs) + print(f"dataset: {folder_path.name} - node {node.item['id']} was created") + else: + print(f"dataset: {folder_path.name} - node {filepaths[i].stem} already exists") + + +def delete_dataset(c, dataset): + """ + Deletes all the entries of a given dataset + + Parameters + ---------- + c : tiled.client + dataset : str + name of a data set in a remote tiled instance. + + Returns + ------- + None. + + """ + + if dataset in c: + for key in c[dataset].keys(): + c[dataset].delete(key) + print(f"Node {key} has been deleted") \ No newline at end of file