Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file added streamlit_app/__init__.py
Empty file.
155 changes: 78 additions & 77 deletions db_fetcher.py → streamlit_app/db_fetcher.py
Original file line number Diff line number Diff line change
@@ -1,77 +1,78 @@
import streamlit as st
import sqlite3
import tempfile
import os
import json

st.set_page_config(page_title="Database Fetcher", layout="wide")
st.title("Database Fetcher (Upload schema.json or DB file)")

def extract_schema_from_sqlite(path):
conn = sqlite3.connect(path)
cur = conn.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'")
tables = [r[0] for r in cur.fetchall()]
schema = {"tables": []}

for t in tables:
cur.execute(f"PRAGMA table_info('{t}')")
cols = []
pks = []
for cid, name, ctype, notnull, dflt, pk in cur.fetchall():
cols.append({
"name": name,
"type": ctype,
"nullable": notnull == 0,
"default": dflt
})
if pk:
pks.append(name)

cur.execute(f"PRAGMA foreign_key_list('{t}')")
fks = [
{
"column": fk[3],
"referred_table": fk[2],
"referred_columns": [fk[4]]
}
for fk in cur.fetchall()
]

schema["tables"].append({
"name": t,
"columns": cols,
"primary_key": pks,
"foreign_keys": fks
})

conn.close()
return schema


tab1, tab2 = st.tabs(["Upload DB", "Upload Schema JSON"])

with tab1:
db_file = st.file_uploader("Upload SQLite file (.db or .sqlite)", type=["db", "sqlite"])
if db_file:
with tempfile.NamedTemporaryFile(delete=False, suffix=".sqlite") as tmp:
tmp.write(db_file.read())
tmp_path = tmp.name

schema = extract_schema_from_sqlite(tmp_path)
os.remove(tmp_path)
st.success("Schema extracted successfully")
st.json(schema)
st.download_button(
"Download schema.json",
json.dumps(schema, indent=2).encode("utf-8"),
"schema.json",
"application/json"
)

with tab2:
schema_file = st.file_uploader("Upload schema.json", type=["json"])
if schema_file:
schema = json.load(schema_file)
st.success("Schema loaded successfully")
st.json(schema)
import streamlit as st
import sqlite3
import tempfile
import os
import json

st.set_page_config(page_title="Database Fetcher", layout="wide")
st.title("Database Fetcher (Upload schema.json or DB file)")

def extract_sqlite_schema(path):

conn = sqlite3.connect(path)
cur = conn.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'")
tables = [r[0] for r in cur.fetchall()]
schema = {"tables": []}

for t in tables:
cur.execute(f"PRAGMA table_info('{t}')")
cols = []
pks = []
for cid, name, ctype, notnull, dflt, pk in cur.fetchall():
cols.append({
"name": name,
"type": ctype,
"nullable": notnull == 0,
"default": dflt
})
if pk:
pks.append(name)

cur.execute(f"PRAGMA foreign_key_list('{t}')")
fks = [
{
"column": fk[3],
"referred_table": fk[2],
"referred_columns": [fk[4]]
}
for fk in cur.fetchall()
]

schema["tables"].append({
"name": t,
"columns": cols,
"primary_key": pks,
"foreign_keys": fks
})

conn.close()
return schema


tab1, tab2 = st.tabs(["Upload DB", "Upload Schema JSON"])

with tab1:
db_file = st.file_uploader("Upload SQLite file (.db or .sqlite)", type=["db", "sqlite"])
if db_file:
with tempfile.NamedTemporaryFile(delete=False, suffix=".sqlite") as tmp:
tmp.write(db_file.read())
tmp_path = tmp.name

schema = extract_schema_from_sqlite(tmp_path)
os.remove(tmp_path)
st.success("Schema extracted successfully")
st.json(schema)
st.download_button(
"Download schema.json",
json.dumps(schema, indent=2).encode("utf-8"),
"schema.json",
"application/json"
)

with tab2:
schema_file = st.file_uploader("Upload schema.json", type=["json"])
if schema_file:
schema = json.load(schema_file)
st.success("Schema loaded successfully")
st.json(schema)
45 changes: 45 additions & 0 deletions streamlit_app/db_uploader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import streamlit as st
import sqlite3
import os
import pandas as pd

st.set_page_config(page_title="Database Uploader", layout="wide")
st.title("Database Uploader & SQL Tester")

DATA_DIR = "data"
os.makedirs(DATA_DIR, exist_ok=True)

ACCESS_KEY = "SDAIA2025"
key_input = st.text_input("Enter access key:", type="password")

if key_input == ACCESS_KEY:
st.success("Access granted")

uploaded = st.file_uploader("Upload SQLite database (.db / .sqlite)", type=["db", "sqlite"])
if uploaded:
save_path = os.path.join(DATA_DIR, uploaded.name)
with open(save_path, "wb") as f:
f.write(uploaded.getbuffer())
st.success(f"File saved to: {save_path}")

db_files = [f for f in os.listdir(DATA_DIR) if f.endswith((".db", ".sqlite"))]
if db_files:
selected_db = st.selectbox("Select a database:", db_files)
db_path = os.path.join(DATA_DIR, selected_db)

query = st.text_area("SQL query:", "SELECT name FROM sqlite_master WHERE type='table';", height=100)
if st.button("Run Query"):
try:
conn = sqlite3.connect(db_path)
df = pd.read_sql_query(query, conn)
st.dataframe(df, use_container_width=True)
conn.close()
except Exception as e:
st.error(f"SQL Error: {e}")
else:
st.info("No databases found in /data.")
else:
st.warning("Enter a valid access key to enable uploader.")