diff --git a/data/json/2539835.json b/data/json/2539835.json index 891f377..e02cb76 100755 --- a/data/json/2539835.json +++ b/data/json/2539835.json @@ -48,7 +48,7 @@ }, "caracteristiques": { "châssis": { - "cadre": "Carbone : Habit Full Carbon, 130mm travel, Proportional Response Suspension and Geo, 55mm chainline, ISCG05, BSA threaded BB, post mount brake, tapered headtube, DirectLine internal cable routing, UDH hanger", + "cadre": "Carbone : Habit Full Carbon, 130mm travel, Proportional Response Suspension and Geo, 55mm chainline, ISCG05, BSA threaded BB, post mount brake, tapered headtube, DirectLine internal cable routing, UDH hanger", "fourche": "RockShox Pike Select, 140mm, DebonAir, 15x110mm thru-axle, tapered steered, 42mm offset", "amortisseur": "RockShox Deluxe Select+, DebonAir, 2-Pos mode adjust, adjustable rebound" }, @@ -69,8 +69,8 @@ "jantes": "Stan's NoTubes Crest Arch MK4, 28h, tubeless ready", "moyeu avant": "Shimano MT400, 15x110mm thru-axle ", "moyeu arrière": "Shimano MT510, 12x148mm thru-axle", - "pneu avant": " Maxxis Dissector, 29x2.4\" (27.5x2.4\" - XS), 3C, EXO, tubeless ready ", - "pneu arrière": " Maxis Rekon, 29x2.4\" (27.5x2.4\" - XS), 3C, EXO, tubeless ready" + "pneu avant": "Maxxis Dissector, 29x2.4\" (27.5x2.4\" - XS), 3C, EXO, tubeless ready", + "pneu arrière": "Maxis Rekon, 29x2.4\" (27.5x2.4\" - XS), 3C, EXO, tubeless ready" }, "direction": { "cintre": "Cannondale 3 Riser, 6061 Alloy, 15mm rise, 8° sweep, 4° rise, 780mm", diff --git a/init_metabase.py b/init_metabase.py deleted file mode 100644 index 5887e0a..0000000 --- a/init_metabase.py +++ /dev/null @@ -1,193 +0,0 @@ -import requests -import time -import sys - -BASE_URL = "http://metabase:3000" - -ADMIN = { - "first_name": "Emmanuel", - "last_name": "Medina", - "email": "emmanuel.medina@univ-lorraine.fr", - "password": "!ChangeMe!" -} - -DB = { - "engine": "postgres", - "name": "ventdest", - "is_on_demand": False, - "is_full_sync": True, - "is_sample": False, - "details": { - "host": "database", - "port": 5432, - "dbname": "sql", - "user": "sql", - "password": "!ChangeMe!", - "ssl": False - } -} - - -def wait_for_metabase(timeout=300): - print("⏳ Attente de Metabase...") - start = time.time() - while time.time() - start < timeout: - try: - r = requests.get(f"{BASE_URL}/api/health", timeout=2) - if r.ok: - print("✅ Metabase est prêt") - return True - except requests.RequestException: - pass - time.sleep(2) - print("❌ Timeout en attendant Metabase") - return False - - -def get_setup_token(): - r = requests.get(f"{BASE_URL}/api/session/properties") - r.raise_for_status() - return r.json().get("setup-token") - - -def run_setup(token): - payload = { - "token": token, - "user": ADMIN, - "prefs": {"site_name": "IUT", "site_locale": "fr"} - } - r = requests.post(f"{BASE_URL}/api/setup", json=payload) - if r.status_code == 400 and "already" in r.text.lower(): - return login(ADMIN["email"], ADMIN["password"]) - r.raise_for_status() - return r.json()["id"] - - -def login(email, password): - r = requests.post(f"{BASE_URL}/api/session", - json={"username": email, "password": password}) - r.raise_for_status() - return r.json()["id"] - - -def database_exists(session_id, name): - headers = {"X-Metabase-Session": session_id} - r = requests.get(f"{BASE_URL}/api/database", headers=headers) - r.raise_for_status() - for db in r.json(): - if db["name"].lower() == name.lower(): - return db["id"] - return None - - -def add_database(session_id, db_config): - db_id = database_exists(session_id, db_config["name"]) - if db_id: - print(f"ℹ️ Base '{db_config['name']}' existe déjà (id={db_id})") - return db_id - headers = {"X-Metabase-Session": session_id} - r = requests.post(f"{BASE_URL}/api/database", - headers=headers, json=db_config) - r.raise_for_status() - db_id = r.json()["id"] - print(f"🗄️ Base '{db_config['name']}' ajoutée (id={db_id})") - return db_id - - -def get_or_create_collection(session_id, name, parent_id=None): - headers = {"X-Metabase-Session": session_id} - r = requests.get(f"{BASE_URL}/api/collection", headers=headers) - r.raise_for_status() - for coll in r.json(): - if coll["name"].lower() == name.lower(): - print(f"ℹ️ Collection '{name}' existe déjà (id={coll['id']})") - return coll["id"] - payload = {"name": name, "color": "#509EE3"} - if parent_id: - payload["parent_id"] = parent_id - r = requests.post(f"{BASE_URL}/api/collection", - headers=headers, json=payload) - r.raise_for_status() - print(f"📁 Collection '{name}' créée (id={r.json()['id']})") - return r.json()["id"] - - -def get_or_create_question(session_id, name, db_id, sql, collection_id): - headers = {"X-Metabase-Session": session_id} - r = requests.get(f"{BASE_URL}/api/card", headers=headers) - r.raise_for_status() - for q in r.json(): - if q["name"].lower() == name.lower(): - print(f"ℹ️ Question '{name}' existe déjà (id={q['id']})") - return q["id"] - - payload = { - "name": name, - "dataset_query": { - "database": db_id, - "type": "native", - "native": {"query": sql} - }, - "display": "table", - "collection_id": collection_id - } - r = requests.post(f"{BASE_URL}/api/card", - headers=headers, json=payload) - r.raise_for_status() - print(f"❓ Question '{name}' créée (id={r.json()['id']})") - return r.json()["id"] - - -def get_or_create_dashboard(session_id, name, collection_id): - headers = {"X-Metabase-Session": session_id} - r = requests.get(f"{BASE_URL}/api/dashboard", headers=headers) - r.raise_for_status() - for d in r.json(): - if d["name"].lower() == name.lower(): - print(f"ℹ️ Dashboard '{name}' existe déjà (id={d['id']})") - return d["id"] - - payload = {"name": name, "collection_id": collection_id} - r = requests.post(f"{BASE_URL}/api/dashboard", - headers=headers, json=payload) - r.raise_for_status() - print(f"📊 Dashboard '{name}' créé (id={r.json()['id']})") - return r.json()["id"] - - -def add_question_to_dashboard(session_id, dashboard_id, card_id): - headers = {"X-Metabase-Session": session_id} - payload = {"cardId": card_id} - r = requests.post(f"{BASE_URL}/api/dashboard/{dashboard_id}/cards", - headers=headers, json=payload) - if r.status_code == 400 and "already" in r.text.lower(): - print(f"ℹ️ Question {card_id} déjà liée au dashboard {dashboard_id}") - return - r.raise_for_status() - print(f"➕ Question {card_id} ajoutée au dashboard {dashboard_id}") - - -if __name__ == "__main__": - if not wait_for_metabase(): - sys.exit(1) - - token = get_setup_token() - session = run_setup(token) - - db_id = add_database(session, DB) - coll_id = get_or_create_collection(session, "IUT Dashboard") - - # Exemple : une question SQL - q1 = get_or_create_question( - session, - "Nombre de lignes", - db_id, - "SELECT COUNT(*) AS total FROM information_schema.tables;", - coll_id - ) - - # Exemple : un dashboard - dash_id = get_or_create_dashboard(session, "Vue d'ensemble", coll_id) - add_question_to_dashboard(session, dash_id, q1) - - print("🎉 Initialisation Metabase terminée avec dashboards/questions") diff --git a/metabase/init_metabase.py b/metabase/init_metabase.py index a110525..7d4747d 100644 --- a/metabase/init_metabase.py +++ b/metabase/init_metabase.py @@ -4,6 +4,7 @@ import time import sys METABASE_URL = os.getenv("METABASE_URL", "http://127.0.0.1:3000") + payload = { # Admin Metabase ADMIN_FIRST_NAME = os.getenv("MB_FIRST_NAME", "Admin") @@ -149,6 +150,7 @@ def get_or_create_question(session_id, name, db_id, sql, collection_id): "type": "native", "native": {"query": sql} }, + "visualization_settings": {}, "display": "table", "collection_id": collection_id } @@ -203,7 +205,7 @@ if __name__ == "__main__": session, "Nombre de lignes", db_id, - "SELECT COUNT(*) AS total FROM information_schema.tables;", + "SELECT COUNT(*) AS total FROM adherent;", coll_id ) diff --git a/transport/data.sql b/transport/data.sql new file mode 100644 index 0000000..7d61998 --- /dev/null +++ b/transport/data.sql @@ -0,0 +1,42 @@ +PRAGMA foreign_keys = ON; + +insert into societe values (1, 'UPS'); + +insert into entrepot (ville) values + ('Saint Dié'), + ('Paris'), + ('Lyon'), + ('Marseille'); + + +insert into permis (categorie, poids_maximum) +values + ('A', 0.75), + ('B', 3.5), + ('C', 35), + ('D', 44); + + +insert into chauffeur (id, nom) values +(1, 'Albert'), +(2, 'Samia'), +(3, 'Samy'); + + +insert into examen (numero, date, + permis, chauffeur_id) values +(11156, '2020-04-20', 'A', 1), +(98212, '2023-06-13', 'B', 1), +(66874, '2022-10-21', 'C', 2); + + +insert into camion (immatriculation, marque, capacite) values +('FR-234-PO', 'Volvo', 44), +('EU-324-FE', 'Renault', 35), +('DF-463-VD', 'Mercedes', 44), +('CD-333-LW', 'Volvo', 35), +('BG-468-VS', 'Iveco', 20); + + +insert into transport values +(1, '2025-09-20', 'FR-234-PO', 2, 1, 3); diff --git a/transport/initdb.sql b/transport/initdb.sql new file mode 100644 index 0000000..a85d59a --- /dev/null +++ b/transport/initdb.sql @@ -0,0 +1,61 @@ +-- La table société +create table societe ( + id integer primary key, + nom text +); + +-- La table chauffeur +create table chauffeur ( + id integer primary key, + nom text +); + + +-- La table entrepot +create table entrepot ( + id integer primary key, + ville text +); + +-- La table camion +create table camion ( + immatriculation text primary key, + marque text, + capacite numeric, + permis text, + foreign key (permis) references permis(categorie) +); + +-- La table permis +create table permis ( + categorie text primary key, + poids_maximum numeric +); + +create table examen ( + numero integer primary key, + date text, + permis text, + chauffeur_id integer, + foreign key (chauffeur_id) + references chauffeur(id), + foreign key (permis) + references permis(categorie) +); + +create table transport ( + id integer primary key, + jour text, + camion_immatriculation text, + chauffeur_id integer, + entrepot_depart_id integer, + entrepot_arrivee_id integer, + foreign key (chauffeur_id) + references chauffeur(id), + foreign key (camion_immatriculation) + references camion(immatriculation), + foreign key (entrepot_depart_id) + references entrepot(id), + foreign key (entrepot_arrivee_id) + references entrepot(id) +)