From c8900d782a7e22966ac4ea04cd8e5af19dbb9ed1 Mon Sep 17 00:00:00 2001 From: Taghreed1k Date: Sun, 9 Nov 2025 19:17:42 +0300 Subject: [PATCH 1/4] feat: add Streamlit database uploader and schema extractor feature --- streamlit_app/__init__.py | 0 db_fetcher.py => streamlit_app/db_fetcher.py | 155 ++++++++++--------- streamlit_app/db_uploader.py | 27 ++++ 3 files changed, 105 insertions(+), 77 deletions(-) create mode 100644 streamlit_app/__init__.py rename db_fetcher.py => streamlit_app/db_fetcher.py (95%) create mode 100644 streamlit_app/db_uploader.py diff --git a/streamlit_app/__init__.py b/streamlit_app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/db_fetcher.py b/streamlit_app/db_fetcher.py similarity index 95% rename from db_fetcher.py rename to streamlit_app/db_fetcher.py index 163955d..545991b 100644 --- a/db_fetcher.py +++ b/streamlit_app/db_fetcher.py @@ -1,77 +1,78 @@ -import streamlit as st -import sqlite3 -import tempfile -import os -import json - -st.set_page_config(page_title="Database Fetcher", layout="wide") -st.title("Database Fetcher (Upload schema.json or DB file)") - -def extract_schema_from_sqlite(path): - conn = sqlite3.connect(path) - cur = conn.cursor() - cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'") - tables = [r[0] for r in cur.fetchall()] - schema = {"tables": []} - - for t in tables: - cur.execute(f"PRAGMA table_info('{t}')") - cols = [] - pks = [] - for cid, name, ctype, notnull, dflt, pk in cur.fetchall(): - cols.append({ - "name": name, - "type": ctype, - "nullable": notnull == 0, - "default": dflt - }) - if pk: - pks.append(name) - - cur.execute(f"PRAGMA foreign_key_list('{t}')") - fks = [ - { - "column": fk[3], - "referred_table": fk[2], - "referred_columns": [fk[4]] - } - for fk in cur.fetchall() - ] - - schema["tables"].append({ - "name": t, - "columns": cols, - "primary_key": pks, - "foreign_keys": fks - }) - - conn.close() - return schema - - -tab1, tab2 = st.tabs(["Upload DB", "Upload Schema JSON"]) - -with tab1: - db_file = st.file_uploader("Upload SQLite file (.db or .sqlite)", type=["db", "sqlite"]) - if db_file: - with tempfile.NamedTemporaryFile(delete=False, suffix=".sqlite") as tmp: - tmp.write(db_file.read()) - tmp_path = tmp.name - - schema = extract_schema_from_sqlite(tmp_path) - os.remove(tmp_path) - st.success("Schema extracted successfully") - st.json(schema) - st.download_button( - "Download schema.json", - json.dumps(schema, indent=2).encode("utf-8"), - "schema.json", - "application/json" - ) - -with tab2: - schema_file = st.file_uploader("Upload schema.json", type=["json"]) - if schema_file: - schema = json.load(schema_file) - st.success("Schema loaded successfully") - st.json(schema) +import streamlit as st +import sqlite3 +import tempfile +import os +import json + +st.set_page_config(page_title="Database Fetcher", layout="wide") +st.title("Database Fetcher (Upload schema.json or DB file)") + +def extract_sqlite_schema(path): + + conn = sqlite3.connect(path) + cur = conn.cursor() + cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'") + tables = [r[0] for r in cur.fetchall()] + schema = {"tables": []} + + for t in tables: + cur.execute(f"PRAGMA table_info('{t}')") + cols = [] + pks = [] + for cid, name, ctype, notnull, dflt, pk in cur.fetchall(): + cols.append({ + "name": name, + "type": ctype, + "nullable": notnull == 0, + "default": dflt + }) + if pk: + pks.append(name) + + cur.execute(f"PRAGMA foreign_key_list('{t}')") + fks = [ + { + "column": fk[3], + "referred_table": fk[2], + "referred_columns": [fk[4]] + } + for fk in cur.fetchall() + ] + + schema["tables"].append({ + "name": t, + "columns": cols, + "primary_key": pks, + "foreign_keys": fks + }) + + conn.close() + return schema + + +tab1, tab2 = st.tabs(["Upload DB", "Upload Schema JSON"]) + +with tab1: + db_file = st.file_uploader("Upload SQLite file (.db or .sqlite)", type=["db", "sqlite"]) + if db_file: + with tempfile.NamedTemporaryFile(delete=False, suffix=".sqlite") as tmp: + tmp.write(db_file.read()) + tmp_path = tmp.name + + schema = extract_schema_from_sqlite(tmp_path) + os.remove(tmp_path) + st.success("Schema extracted successfully") + st.json(schema) + st.download_button( + "Download schema.json", + json.dumps(schema, indent=2).encode("utf-8"), + "schema.json", + "application/json" + ) + +with tab2: + schema_file = st.file_uploader("Upload schema.json", type=["json"]) + if schema_file: + schema = json.load(schema_file) + st.success("Schema loaded successfully") + st.json(schema) diff --git a/streamlit_app/db_uploader.py b/streamlit_app/db_uploader.py new file mode 100644 index 0000000..24a4955 --- /dev/null +++ b/streamlit_app/db_uploader.py @@ -0,0 +1,27 @@ +import streamlit as st +import json +from db_fetcher import extract_sqlite_schema + +st.set_page_config(page_title="Database Uploader", layout="wide") + +st.title("Database Uploader") + +uploaded = st.file_uploader( + "Upload SQLite database (.db/.sqlite) or schema.json", + type=["db", "sqlite", "json"] +) + +if uploaded: + if uploaded.name.endswith(".json"): + schema = json.load(uploaded) + with open("data/schema.json", "w", encoding="utf-8") as f: + json.dump(schema, f, ensure_ascii=False, indent=2) + st.success("Schema JSON uploaded successfully.") + st.json(schema) + else: + with open(f"data/{uploaded.name}", "wb") as f: + f.write(uploaded.read()) + schema = extract_sqlite_schema(f"data/{uploaded.name}") + st.success("Schema extracted and saved to data/schema.json.") + st.json(schema) + From c71d1d3099a4c7636766747d522bc6c9ebd7c613 Mon Sep 17 00:00:00 2001 From: Taghreed1k Date: Sun, 9 Nov 2025 19:58:29 +0300 Subject: [PATCH 2/4] fix: correct import path for db_fetcher --- streamlit_app/db_uploader.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/streamlit_app/db_uploader.py b/streamlit_app/db_uploader.py index 24a4955..2ea1c9b 100644 --- a/streamlit_app/db_uploader.py +++ b/streamlit_app/db_uploader.py @@ -1,6 +1,7 @@ import streamlit as st import json -from db_fetcher import extract_sqlite_schema +from streamlit_app.db_fetcher import extract_sqlite_schema + st.set_page_config(page_title="Database Uploader", layout="wide") From 3d376ebe65430bbfe3ece2b2e51e4d7075aa1ea3 Mon Sep 17 00:00:00 2001 From: Taghreed1k Date: Mon, 10 Nov 2025 09:56:26 +0300 Subject: [PATCH 3/4] feat: enable database upload & SQL query testing in Streamlit --- streamlit_app/db_uploader.py | 47 +++++++++++++++++++++++++++++++----- 1 file changed, 41 insertions(+), 6 deletions(-) diff --git a/streamlit_app/db_uploader.py b/streamlit_app/db_uploader.py index 2ea1c9b..7bf23d7 100644 --- a/streamlit_app/db_uploader.py +++ b/streamlit_app/db_uploader.py @@ -1,10 +1,12 @@ import streamlit as st import json -from streamlit_app.db_fetcher import extract_sqlite_schema +import sqlite3 +import pandas as pd +import tempfile +from db_fetcher import extract_sqlite_schema st.set_page_config(page_title="Database Uploader", layout="wide") - st.title("Database Uploader") uploaded = st.file_uploader( @@ -19,10 +21,43 @@ json.dump(schema, f, ensure_ascii=False, indent=2) st.success("Schema JSON uploaded successfully.") st.json(schema) + else: - with open(f"data/{uploaded.name}", "wb") as f: - f.write(uploaded.read()) - schema = extract_sqlite_schema(f"data/{uploaded.name}") - st.success("Schema extracted and saved to data/schema.json.") + # Save the uploaded SQLite file temporarily + temp_file = tempfile.NamedTemporaryFile(delete=False, suffix=".sqlite") + temp_file.write(uploaded.getbuffer()) + temp_file.flush() + temp_file.close() + + # Connect to the database + conn = sqlite3.connect(temp_file.name) + st.session_state.conn = conn + st.success("Connected to database successfully.") + + # Extract and display schema + schema = extract_sqlite_schema(temp_file.name) st.json(schema) + # Display available tables + try: + tables = pd.read_sql_query( + "SELECT name FROM sqlite_master WHERE type='table';", conn + ) + st.write("**Available Tables:**", tables) + except Exception as e: + st.warning(f"Could not list tables: {e}") + + # SQL query testing section + query = st.text_area( + "Enter an SQL query to test", + "SELECT name FROM sqlite_master;", + height=120 + ) + if st.button("Run Query"): + try: + result = pd.read_sql_query(query, conn) + st.dataframe(result, use_container_width=True) + except Exception as e: + st.error(f"SQL Error: {e}") + + From ed3a6fecf29c08d06cd0934746611b0338d770dc Mon Sep 17 00:00:00 2001 From: Taghreed1k Date: Mon, 10 Nov 2025 13:32:36 +0300 Subject: [PATCH 4/4] feat: secure DB uploader (save to data) + SQL tester --- streamlit_app/db_uploader.py | 80 ++++++++++++++---------------------- 1 file changed, 31 insertions(+), 49 deletions(-) diff --git a/streamlit_app/db_uploader.py b/streamlit_app/db_uploader.py index 7bf23d7..d2482ca 100644 --- a/streamlit_app/db_uploader.py +++ b/streamlit_app/db_uploader.py @@ -1,63 +1,45 @@ import streamlit as st -import json import sqlite3 +import os import pandas as pd -import tempfile -from db_fetcher import extract_sqlite_schema - st.set_page_config(page_title="Database Uploader", layout="wide") -st.title("Database Uploader") +st.title("Database Uploader & SQL Tester") -uploaded = st.file_uploader( - "Upload SQLite database (.db/.sqlite) or schema.json", - type=["db", "sqlite", "json"] -) +DATA_DIR = "data" +os.makedirs(DATA_DIR, exist_ok=True) -if uploaded: - if uploaded.name.endswith(".json"): - schema = json.load(uploaded) - with open("data/schema.json", "w", encoding="utf-8") as f: - json.dump(schema, f, ensure_ascii=False, indent=2) - st.success("Schema JSON uploaded successfully.") - st.json(schema) +ACCESS_KEY = "SDAIA2025" +key_input = st.text_input("Enter access key:", type="password") - else: - # Save the uploaded SQLite file temporarily - temp_file = tempfile.NamedTemporaryFile(delete=False, suffix=".sqlite") - temp_file.write(uploaded.getbuffer()) - temp_file.flush() - temp_file.close() - - # Connect to the database - conn = sqlite3.connect(temp_file.name) - st.session_state.conn = conn - st.success("Connected to database successfully.") - - # Extract and display schema - schema = extract_sqlite_schema(temp_file.name) - st.json(schema) - - # Display available tables - try: - tables = pd.read_sql_query( - "SELECT name FROM sqlite_master WHERE type='table';", conn - ) - st.write("**Available Tables:**", tables) - except Exception as e: - st.warning(f"Could not list tables: {e}") - - # SQL query testing section - query = st.text_area( - "Enter an SQL query to test", - "SELECT name FROM sqlite_master;", - height=120 - ) +if key_input == ACCESS_KEY: + st.success("Access granted") + + uploaded = st.file_uploader("Upload SQLite database (.db / .sqlite)", type=["db", "sqlite"]) + if uploaded: + save_path = os.path.join(DATA_DIR, uploaded.name) + with open(save_path, "wb") as f: + f.write(uploaded.getbuffer()) + st.success(f"File saved to: {save_path}") + + db_files = [f for f in os.listdir(DATA_DIR) if f.endswith((".db", ".sqlite"))] + if db_files: + selected_db = st.selectbox("Select a database:", db_files) + db_path = os.path.join(DATA_DIR, selected_db) + + query = st.text_area("SQL query:", "SELECT name FROM sqlite_master WHERE type='table';", height=100) if st.button("Run Query"): try: - result = pd.read_sql_query(query, conn) - st.dataframe(result, use_container_width=True) + conn = sqlite3.connect(db_path) + df = pd.read_sql_query(query, conn) + st.dataframe(df, use_container_width=True) + conn.close() except Exception as e: st.error(f"SQL Error: {e}") + else: + st.info("No databases found in /data.") +else: + st.warning("Enter a valid access key to enable uploader.") +