-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdebug_export.py
More file actions
58 lines (47 loc) · 1.67 KB
/
debug_export.py
File metadata and controls
58 lines (47 loc) · 1.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import requests
import sqlite3
import json
import os
DB_PATH = "data/db/mediavault.db"
API_URL = "http://localhost:8001/api" # Try 8001 as per history
def check_db_json():
print(f"--- Checking DB {DB_PATH} ---")
conn = sqlite3.connect(DB_PATH)
cursor = conn.cursor()
# Check dataset items
cursor.execute("SELECT id, annotations FROM dataset_items LIMIT 5")
rows = cursor.fetchall()
print(f"Found {len(rows)} items sample:")
for row in rows:
print(f"ID: {row[0]}, Ann: {row[1]}")
# Check json_each support
try:
cursor.execute("SELECT DISTINCT key FROM dataset_items, json_each(dataset_items.annotations)")
keys = cursor.fetchall()
print(f"Direct SQL JSON keys: {keys}")
except Exception as e:
print(f"SQL JSON Error: {e}")
conn.close()
def check_api():
print(f"\n--- Checking API {API_URL} ---")
try:
# List datasets
r = requests.get(f"{API_URL}/datasets/")
if r.status_code != 200:
print(f"List Datasets failed: {r.status_code}")
return
data = r.json()
datasets = data.get("datasets", [])
if not datasets:
print("No datasets found via API")
return
ds_id = datasets[0]["id"]
print(f"Testing Dataset ID: {ds_id} ({datasets[0]['name']})")
# Get fields
r = requests.get(f"{API_URL}/datasets/export-fields?dataset_id={ds_id}")
print(f"Export Fields Response ({r.status_code}): {r.text}")
except Exception as e:
print(f"API Error: {e}")
if __name__ == "__main__":
check_db_json()
check_api()