Upload 9 files
Browse files- api_client.py +55 -0
- app.py +29 -2
- auth.py +17 -5
- page_modules/analyze_audiodescriptions.py +7 -1
- persistent_data_gate.py +263 -0
api_client.py
CHANGED
|
@@ -171,6 +171,61 @@ class APIClient:
|
|
| 171 |
# Devolvemos un diccionario con error para que la UI lo muestre
|
| 172 |
return {"error": str(e)}
|
| 173 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 174 |
def generate_audiodescription(self, video_bytes: bytes, video_name: str) -> dict:
|
| 175 |
"""Llama al endpoint del engine /generate_audiodescription con un MP4 en memoria."""
|
| 176 |
url = f"{self.base_url}/generate_audiodescription"
|
|
|
|
| 171 |
# Devolvemos un diccionario con error para que la UI lo muestre
|
| 172 |
return {"error": str(e)}
|
| 173 |
|
| 174 |
+
|
| 175 |
+
def import_databases(self) -> dict:
|
| 176 |
+
url = f"{self.base_url}/import_databases"
|
| 177 |
+
try:
|
| 178 |
+
r = self.session.post(url, timeout=self.timeout * 2)
|
| 179 |
+
r.raise_for_status()
|
| 180 |
+
return r.json()
|
| 181 |
+
except requests.exceptions.RequestException as e:
|
| 182 |
+
return {"error": str(e)}
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def import_media(self, sha1sum: str) -> dict:
|
| 186 |
+
url = f"{self.base_url}/import_media/{sha1sum}"
|
| 187 |
+
try:
|
| 188 |
+
r = self.session.get(url, timeout=self.timeout * 5)
|
| 189 |
+
r.raise_for_status()
|
| 190 |
+
return {"zip_bytes": r.content}
|
| 191 |
+
except requests.exceptions.RequestException as e:
|
| 192 |
+
return {"error": str(e)}
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
def import_media_version(self, sha1sum: str, version: str) -> dict:
|
| 196 |
+
url = f"{self.base_url}/import_media_version/{sha1sum}/{version}"
|
| 197 |
+
try:
|
| 198 |
+
r = self.session.get(url, timeout=self.timeout * 5)
|
| 199 |
+
r.raise_for_status()
|
| 200 |
+
return {"zip_bytes": r.content}
|
| 201 |
+
except requests.exceptions.RequestException as e:
|
| 202 |
+
return {"error": str(e)}
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
def update_databases(self, payload: dict) -> dict:
|
| 206 |
+
"""Envia les sentències SQL generades a l'endpoint /update_databases."""
|
| 207 |
+
|
| 208 |
+
url = f"{self.base_url}/update_databases"
|
| 209 |
+
try:
|
| 210 |
+
r = self.session.post(url, json=payload, timeout=self.timeout * 5)
|
| 211 |
+
r.raise_for_status()
|
| 212 |
+
return r.json()
|
| 213 |
+
except requests.exceptions.RequestException as e:
|
| 214 |
+
return {"error": str(e)}
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
def export_media(self, zip_bytes: bytes) -> dict:
|
| 218 |
+
"""Envia un ZIP amb els nous vídeos a l'endpoint /export_media."""
|
| 219 |
+
|
| 220 |
+
url = f"{self.base_url}/export_media"
|
| 221 |
+
files = {"media_zip": ("media_export.zip", zip_bytes, "application/zip")}
|
| 222 |
+
try:
|
| 223 |
+
r = self.session.post(url, files=files, timeout=self.timeout * 10)
|
| 224 |
+
r.raise_for_status()
|
| 225 |
+
return r.json()
|
| 226 |
+
except requests.exceptions.RequestException as e:
|
| 227 |
+
return {"error": str(e)}
|
| 228 |
+
|
| 229 |
def generate_audiodescription(self, video_bytes: bytes, video_name: str) -> dict:
|
| 230 |
"""Llama al endpoint del engine /generate_audiodescription con un MP4 en memoria."""
|
| 231 |
url = f"{self.base_url}/generate_audiodescription"
|
app.py
CHANGED
|
@@ -13,6 +13,7 @@ from databases import set_db_path, init_schema, set_blockchain_enabled
|
|
| 13 |
from api_client import APIClient
|
| 14 |
from utils import ensure_dirs
|
| 15 |
from auth import initialize_auth_system, render_login_form, render_sidebar, require_login
|
|
|
|
| 16 |
from mobile_verification import render_mobile_verification_screen, get_user_permissions
|
| 17 |
from compliance_client import compliance_client
|
| 18 |
from page_modules.process_video import render_process_video_page
|
|
@@ -84,8 +85,15 @@ USE_MOCK = bool(CFG.get("app", {}).get("use_mock", False)) # si no la tienes en
|
|
| 84 |
BLOCKCHAIN_ENABLED = bool(CFG.get("blockchain", {}).get("enabled", False))
|
| 85 |
API_TOKEN = CFG.get("api", {}).get("token") or os.getenv("API_SHARED_TOKEN")
|
| 86 |
|
|
|
|
|
|
|
|
|
|
| 87 |
os.makedirs(DATA_DIR, exist_ok=True)
|
| 88 |
ensure_dirs(DATA_DIR)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 89 |
DB_PATH = os.path.join(DATA_DIR, "users.db")
|
| 90 |
set_db_path(DB_PATH)
|
| 91 |
|
|
@@ -97,14 +105,33 @@ init_schema()
|
|
| 97 |
# Initialize authentication system and sync default users
|
| 98 |
initialize_auth_system(DB_PATH)
|
| 99 |
|
| 100 |
-
api = APIClient(BACKEND_BASE_URL, use_mock=USE_MOCK, data_dir=DATA_DIR, token=API_TOKEN, tts_url=TTS_URL)
|
| 101 |
-
|
| 102 |
# Identificador de sessió per a traça d'esdeveniments
|
| 103 |
if "session_id" not in st.session_state:
|
| 104 |
st.session_state.session_id = str(uuid.uuid4())
|
| 105 |
|
|
|
|
|
|
|
|
|
|
| 106 |
st.set_page_config(page_title="Veureu — Audiodescripció", page_icon="🎬", layout="wide")
|
| 107 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 108 |
# Initialize session state for user
|
| 109 |
if "user" not in st.session_state:
|
| 110 |
st.session_state.user = None
|
|
|
|
| 13 |
from api_client import APIClient
|
| 14 |
from utils import ensure_dirs
|
| 15 |
from auth import initialize_auth_system, render_login_form, render_sidebar, require_login
|
| 16 |
+
from persistent_data_gate import ensure_temp_databases
|
| 17 |
from mobile_verification import render_mobile_verification_screen, get_user_permissions
|
| 18 |
from compliance_client import compliance_client
|
| 19 |
from page_modules.process_video import render_process_video_page
|
|
|
|
| 85 |
BLOCKCHAIN_ENABLED = bool(CFG.get("blockchain", {}).get("enabled", False))
|
| 86 |
API_TOKEN = CFG.get("api", {}).get("token") or os.getenv("API_SHARED_TOKEN")
|
| 87 |
|
| 88 |
+
# Cliente del backend (engine)
|
| 89 |
+
api = APIClient(BACKEND_BASE_URL, use_mock=USE_MOCK, data_dir=DATA_DIR, token=API_TOKEN, tts_url=TTS_URL)
|
| 90 |
+
|
| 91 |
os.makedirs(DATA_DIR, exist_ok=True)
|
| 92 |
ensure_dirs(DATA_DIR)
|
| 93 |
+
|
| 94 |
+
base_dir = Path(__file__).parent
|
| 95 |
+
ensure_temp_databases(base_dir, api)
|
| 96 |
+
|
| 97 |
DB_PATH = os.path.join(DATA_DIR, "users.db")
|
| 98 |
set_db_path(DB_PATH)
|
| 99 |
|
|
|
|
| 105 |
# Initialize authentication system and sync default users
|
| 106 |
initialize_auth_system(DB_PATH)
|
| 107 |
|
|
|
|
|
|
|
| 108 |
# Identificador de sessió per a traça d'esdeveniments
|
| 109 |
if "session_id" not in st.session_state:
|
| 110 |
st.session_state.session_id = str(uuid.uuid4())
|
| 111 |
|
| 112 |
+
# Exposar client d'API al session_state per a altres mòduls (p.ex. auth/persistent_data_gate)
|
| 113 |
+
st.session_state.api_client = api
|
| 114 |
+
|
| 115 |
st.set_page_config(page_title="Veureu — Audiodescripció", page_icon="🎬", layout="wide")
|
| 116 |
|
| 117 |
+
# Estil global: botons primaris en taronja (en lloc de blau per defecte)
|
| 118 |
+
st.markdown(
|
| 119 |
+
"""
|
| 120 |
+
<style>
|
| 121 |
+
.stButton > button[kind="primary"] {
|
| 122 |
+
background-color: #f97316;
|
| 123 |
+
border-color: #ea580c;
|
| 124 |
+
color: white;
|
| 125 |
+
}
|
| 126 |
+
.stButton > button[kind="primary"]:hover {
|
| 127 |
+
background-color: #ea580c;
|
| 128 |
+
border-color: #c2410c;
|
| 129 |
+
}
|
| 130 |
+
</style>
|
| 131 |
+
""",
|
| 132 |
+
unsafe_allow_html=True,
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
# Initialize session state for user
|
| 136 |
if "user" not in st.session_state:
|
| 137 |
st.session_state.user = None
|
auth.py
CHANGED
|
@@ -15,6 +15,7 @@ from mobile_verification import (
|
|
| 15 |
get_user_permissions,
|
| 16 |
show_verification_status_in_sidebar
|
| 17 |
)
|
|
|
|
| 18 |
|
| 19 |
|
| 20 |
def log(msg: str):
|
|
@@ -165,11 +166,6 @@ def render_sidebar():
|
|
| 165 |
|
| 166 |
# Mostrar estado de verificación SMS
|
| 167 |
show_verification_status_in_sidebar()
|
| 168 |
-
|
| 169 |
-
if st.button("Tancar sessió"):
|
| 170 |
-
st.session_state.user = None
|
| 171 |
-
st.session_state.sms_verified = None
|
| 172 |
-
st.rerun()
|
| 173 |
|
| 174 |
if st.session_state.user:
|
| 175 |
# Obtener permisos del usuario
|
|
@@ -202,6 +198,22 @@ def render_sidebar():
|
|
| 202 |
page_options,
|
| 203 |
index=0
|
| 204 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 205 |
else:
|
| 206 |
page = None
|
| 207 |
|
|
|
|
| 15 |
get_user_permissions,
|
| 16 |
show_verification_status_in_sidebar
|
| 17 |
)
|
| 18 |
+
from persistent_data_gate import confirm_changes_and_logout
|
| 19 |
|
| 20 |
|
| 21 |
def log(msg: str):
|
|
|
|
| 166 |
|
| 167 |
# Mostrar estado de verificación SMS
|
| 168 |
show_verification_status_in_sidebar()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 169 |
|
| 170 |
if st.session_state.user:
|
| 171 |
# Obtener permisos del usuario
|
|
|
|
| 198 |
page_options,
|
| 199 |
index=0
|
| 200 |
)
|
| 201 |
+
st.markdown("---")
|
| 202 |
+
st.button("Confirmar canvis", key="confirmar_canvis", use_container_width=True)
|
| 203 |
+
|
| 204 |
+
if st.button("Confirmar canvis i tancar sessió"):
|
| 205 |
+
# Persistir canvis de la sessió actual abans de tancar
|
| 206 |
+
try:
|
| 207 |
+
base_dir = Path(__file__).parent
|
| 208 |
+
session_id = st.session_state.get("session_id", "")
|
| 209 |
+
api_client = st.session_state.get("api_client")
|
| 210 |
+
confirm_changes_and_logout(base_dir, api_client, session_id)
|
| 211 |
+
except Exception:
|
| 212 |
+
pass
|
| 213 |
+
|
| 214 |
+
st.session_state.user = None
|
| 215 |
+
st.session_state.sms_verified = None
|
| 216 |
+
st.rerun()
|
| 217 |
else:
|
| 218 |
page = None
|
| 219 |
|
page_modules/analyze_audiodescriptions.py
CHANGED
|
@@ -11,6 +11,7 @@ import streamlit as st
|
|
| 11 |
import yaml
|
| 12 |
|
| 13 |
from utils import save_bytes
|
|
|
|
| 14 |
from databases import (
|
| 15 |
get_accessible_videos_with_sha1,
|
| 16 |
insert_demo_feedback_row,
|
|
@@ -145,7 +146,8 @@ def render_analyze_audiodescriptions_page(api, permissions: Dict[str, bool]) ->
|
|
| 145 |
st.stop()
|
| 146 |
|
| 147 |
# Base de media: demo/temp/media/<sha1sum>
|
| 148 |
-
|
|
|
|
| 149 |
|
| 150 |
# DEBUG: llistar subcarpetes actuals sota demo/temp/media
|
| 151 |
try:
|
|
@@ -181,6 +183,8 @@ def render_analyze_audiodescriptions_page(api, permissions: Dict[str, bool]) ->
|
|
| 181 |
del st.session_state["eval_values"]
|
| 182 |
st.rerun()
|
| 183 |
|
|
|
|
|
|
|
| 184 |
vid_dir = base_media_dir / selected_sha1
|
| 185 |
mp4s = sorted(vid_dir.glob("*.mp4"))
|
| 186 |
|
|
@@ -199,6 +203,8 @@ def render_analyze_audiodescriptions_page(api, permissions: Dict[str, bool]) ->
|
|
| 199 |
|
| 200 |
# Cargar valores de evaluación cuando cambia la versión
|
| 201 |
if subcarpeta_seleccio:
|
|
|
|
|
|
|
| 202 |
# Clau única per vídeo (sha1) + versió seleccionada
|
| 203 |
current_version_key = f"{selected_sha1}_{subcarpeta_seleccio}"
|
| 204 |
if "last_version_key" not in st.session_state or st.session_state.last_version_key != current_version_key:
|
|
|
|
| 11 |
import yaml
|
| 12 |
|
| 13 |
from utils import save_bytes
|
| 14 |
+
from persistent_data_gate import ensure_media_for_video
|
| 15 |
from databases import (
|
| 16 |
get_accessible_videos_with_sha1,
|
| 17 |
insert_demo_feedback_row,
|
|
|
|
| 146 |
st.stop()
|
| 147 |
|
| 148 |
# Base de media: demo/temp/media/<sha1sum>
|
| 149 |
+
base_dir = Path(__file__).resolve().parent.parent
|
| 150 |
+
base_media_dir = base_dir / "temp" / "media"
|
| 151 |
|
| 152 |
# DEBUG: llistar subcarpetes actuals sota demo/temp/media
|
| 153 |
try:
|
|
|
|
| 183 |
del st.session_state["eval_values"]
|
| 184 |
st.rerun()
|
| 185 |
|
| 186 |
+
ensure_media_for_video(base_dir, api, selected_sha1)
|
| 187 |
+
|
| 188 |
vid_dir = base_media_dir / selected_sha1
|
| 189 |
mp4s = sorted(vid_dir.glob("*.mp4"))
|
| 190 |
|
|
|
|
| 203 |
|
| 204 |
# Cargar valores de evaluación cuando cambia la versión
|
| 205 |
if subcarpeta_seleccio:
|
| 206 |
+
ensure_media_for_video(base_dir, api, selected_sha1, subcarpeta_seleccio)
|
| 207 |
+
|
| 208 |
# Clau única per vídeo (sha1) + versió seleccionada
|
| 209 |
current_version_key = f"{selected_sha1}_{subcarpeta_seleccio}"
|
| 210 |
if "last_version_key" not in st.session_state or st.session_state.last_version_key != current_version_key:
|
persistent_data_gate.py
ADDED
|
@@ -0,0 +1,263 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
import zipfile
|
| 4 |
+
import io
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from typing import Optional
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def _load_data_origin(base_dir: Path) -> str:
|
| 10 |
+
"""Lee demo/config.yaml y devuelve data_origin (internal|external)."""
|
| 11 |
+
|
| 12 |
+
cfg_path = base_dir / "config.yaml"
|
| 13 |
+
origin = "internal"
|
| 14 |
+
try:
|
| 15 |
+
import yaml
|
| 16 |
+
|
| 17 |
+
with cfg_path.open("r", encoding="utf-8") as f:
|
| 18 |
+
cfg = yaml.safe_load(f) or {}
|
| 19 |
+
origin = str(cfg.get("data_origin", "internal")).lower()
|
| 20 |
+
except Exception:
|
| 21 |
+
origin = "internal"
|
| 22 |
+
return origin
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def ensure_temp_databases(base_dir: Path, api_client) -> None:
|
| 26 |
+
"""Garantiza que las BDs *.db estén presentes en demo/temp antes del login.
|
| 27 |
+
|
| 28 |
+
- data_origin == "internal": copia demo/data/*.db -> demo/temp/*.db
|
| 29 |
+
- data_origin == "external": llama al endpoint remoto import_databases.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
data_origin = _load_data_origin(base_dir)
|
| 33 |
+
temp_dir = base_dir / "temp"
|
| 34 |
+
temp_dir.mkdir(parents=True, exist_ok=True)
|
| 35 |
+
|
| 36 |
+
if data_origin == "internal":
|
| 37 |
+
source_dir = base_dir / "data"
|
| 38 |
+
if source_dir.exists():
|
| 39 |
+
for entry in source_dir.glob("*.db"):
|
| 40 |
+
dest = temp_dir / entry.name
|
| 41 |
+
shutil.copy2(entry, dest)
|
| 42 |
+
else:
|
| 43 |
+
if api_client is None:
|
| 44 |
+
return
|
| 45 |
+
try:
|
| 46 |
+
api_client.import_databases()
|
| 47 |
+
except Exception:
|
| 48 |
+
return
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def _extract_zip_bytes(zip_bytes: bytes, target_dir: Path) -> None:
|
| 52 |
+
target_dir.mkdir(parents=True, exist_ok=True)
|
| 53 |
+
with zipfile.ZipFile(io.BytesIO(zip_bytes)) as zf:
|
| 54 |
+
zf.extractall(target_dir)
|
| 55 |
+
|
| 56 |
+
def ensure_media_for_video(
|
| 57 |
+
base_dir: Path,
|
| 58 |
+
api_client,
|
| 59 |
+
sha1sum: str,
|
| 60 |
+
version: Optional[str] = None,
|
| 61 |
+
) -> None:
|
| 62 |
+
"""Garantiza que exista demo/temp/media/<sha1sum>[/<version>].
|
| 63 |
+
|
| 64 |
+
- data_origin == "internal":
|
| 65 |
+
* copia video.mp4 desde demo/data/media/<sha1sum>/ a demo/temp/media/<sha1sum>/
|
| 66 |
+
* si version, copia carpeta demo/data/media/<sha1sum>/<version> -> demo/temp/media/<sha1sum>/<version>
|
| 67 |
+
- data_origin == "external":
|
| 68 |
+
* usa endpoints del engine para obtener zips de media y descomprimirlos en demo/temp/media.
|
| 69 |
+
"""
|
| 70 |
+
|
| 71 |
+
data_origin = _load_data_origin(base_dir)
|
| 72 |
+
temp_media_root = base_dir / "temp" / "media"
|
| 73 |
+
data_media_root = base_dir / "data" / "media"
|
| 74 |
+
|
| 75 |
+
video_dir_temp = temp_media_root / sha1sum
|
| 76 |
+
if not video_dir_temp.exists():
|
| 77 |
+
video_dir_temp.mkdir(parents=True, exist_ok=True)
|
| 78 |
+
if data_origin == "internal":
|
| 79 |
+
src_video = data_media_root / sha1sum / "video.mp4"
|
| 80 |
+
if src_video.exists():
|
| 81 |
+
shutil.copy2(src_video, video_dir_temp / "video.mp4")
|
| 82 |
+
else:
|
| 83 |
+
if api_client is not None:
|
| 84 |
+
try:
|
| 85 |
+
resp = api_client.import_media(sha1sum)
|
| 86 |
+
zip_bytes = resp.get("zip_bytes") if isinstance(resp, dict) else None
|
| 87 |
+
if zip_bytes:
|
| 88 |
+
_extract_zip_bytes(zip_bytes, video_dir_temp)
|
| 89 |
+
except Exception:
|
| 90 |
+
pass
|
| 91 |
+
|
| 92 |
+
if not version:
|
| 93 |
+
return
|
| 94 |
+
|
| 95 |
+
version_dir_temp = video_dir_temp / version
|
| 96 |
+
if version_dir_temp.exists():
|
| 97 |
+
return
|
| 98 |
+
|
| 99 |
+
if data_origin == "internal":
|
| 100 |
+
src_version_dir = data_media_root / sha1sum / version
|
| 101 |
+
if src_version_dir.exists():
|
| 102 |
+
shutil.copytree(src_version_dir, version_dir_temp, dirs_exist_ok=True)
|
| 103 |
+
else:
|
| 104 |
+
if api_client is None:
|
| 105 |
+
return
|
| 106 |
+
try:
|
| 107 |
+
resp = api_client.import_media_version(sha1sum, version)
|
| 108 |
+
zip_bytes = resp.get("zip_bytes") if isinstance(resp, dict) else None
|
| 109 |
+
if zip_bytes:
|
| 110 |
+
_extract_zip_bytes(zip_bytes, version_dir_temp)
|
| 111 |
+
except Exception:
|
| 112 |
+
return
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def confirm_changes_and_logout(base_dir: Path, api_client, session_id: str) -> None:
|
| 116 |
+
"""Confirma canvis de la sessió actual i els persisteix.
|
| 117 |
+
|
| 118 |
+
1) Per a totes les taules de demo/temp/*.db que tinguin camp 'session' amb
|
| 119 |
+
valor igual a session_id, es generen INSERTs per aplicar els canvis a
|
| 120 |
+
demo/data/*.db (mode internal) o s'envien a l'endpoint update_databases
|
| 121 |
+
(mode external).
|
| 122 |
+
|
| 123 |
+
2) Per als nous vídeos registrats a demo/temp/videos.db amb session=session_id,
|
| 124 |
+
es miren les carpetes demo/temp/media/<sha1sum> corresponents.
|
| 125 |
+
- internal: es copien a demo/data/media/<sha1sum>.
|
| 126 |
+
- external: es crea un ZIP amb totes aquestes carpetes i s'envia a
|
| 127 |
+
l'endpoint export_media del backend.
|
| 128 |
+
"""
|
| 129 |
+
|
| 130 |
+
if not session_id:
|
| 131 |
+
return
|
| 132 |
+
|
| 133 |
+
data_origin = _load_data_origin(base_dir)
|
| 134 |
+
temp_dir = base_dir / "temp"
|
| 135 |
+
data_dir = base_dir / "data"
|
| 136 |
+
|
| 137 |
+
# --- 1) Sincronitzar taules amb camp 'session' ---
|
| 138 |
+
sql_statements: list[str] = []
|
| 139 |
+
|
| 140 |
+
for db_path in temp_dir.glob("*.db"):
|
| 141 |
+
target_db = data_dir / db_path.name
|
| 142 |
+
import sqlite3
|
| 143 |
+
|
| 144 |
+
with sqlite3.connect(str(db_path)) as src_conn:
|
| 145 |
+
src_conn.row_factory = sqlite3.Row
|
| 146 |
+
cur = src_conn.cursor()
|
| 147 |
+
|
| 148 |
+
# Llistar taules usuari
|
| 149 |
+
cur.execute(
|
| 150 |
+
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
| 151 |
+
)
|
| 152 |
+
tables = [r[0] for r in cur.fetchall()]
|
| 153 |
+
|
| 154 |
+
for table in tables:
|
| 155 |
+
# Mirar si la taula té columna 'session'
|
| 156 |
+
cur.execute(f"PRAGMA table_info({table})")
|
| 157 |
+
cols = cur.fetchall()
|
| 158 |
+
col_names = [c[1] for c in cols]
|
| 159 |
+
if "session" not in col_names:
|
| 160 |
+
continue
|
| 161 |
+
|
| 162 |
+
# Files per a la sessió actual
|
| 163 |
+
cur.execute(f"SELECT * FROM {table} WHERE session = ?", (session_id,))
|
| 164 |
+
rows = cur.fetchall()
|
| 165 |
+
if not rows:
|
| 166 |
+
continue
|
| 167 |
+
|
| 168 |
+
columns_sql = ", ".join(col_names)
|
| 169 |
+
placeholders = ", ".join(["?"] * len(col_names))
|
| 170 |
+
insert_sql = f"INSERT OR REPLACE INTO {table} ({columns_sql}) VALUES ({placeholders})"
|
| 171 |
+
|
| 172 |
+
for row in rows:
|
| 173 |
+
values = [row[name] for name in col_names]
|
| 174 |
+
if data_origin == "internal":
|
| 175 |
+
# Aplicar directament a demo/data/*.db
|
| 176 |
+
target_db.parent.mkdir(parents=True, exist_ok=True)
|
| 177 |
+
with sqlite3.connect(str(target_db)) as dst_conn:
|
| 178 |
+
dst_conn.execute(insert_sql, values)
|
| 179 |
+
dst_conn.commit()
|
| 180 |
+
else:
|
| 181 |
+
# Guardar la sentència + valors com a SQL literal simple
|
| 182 |
+
# (assumim que el backend farà el parse o executarà directament).
|
| 183 |
+
# Per simplicitat, fem una interpolació segura bàsica.
|
| 184 |
+
def _sql_literal(v):
|
| 185 |
+
if v is None:
|
| 186 |
+
return "NULL"
|
| 187 |
+
if isinstance(v, (int, float)):
|
| 188 |
+
return str(v)
|
| 189 |
+
return "'" + str(v).replace("'", "''") + "'"
|
| 190 |
+
|
| 191 |
+
values_sql = ", ".join(_sql_literal(v) for v in values)
|
| 192 |
+
full_sql = f"INSERT OR REPLACE INTO {table} ({columns_sql}) VALUES ({values_sql});"
|
| 193 |
+
sql_statements.append(full_sql)
|
| 194 |
+
|
| 195 |
+
if data_origin != "internal" and sql_statements and api_client is not None:
|
| 196 |
+
try:
|
| 197 |
+
api_client.update_databases({"statements": sql_statements})
|
| 198 |
+
except Exception:
|
| 199 |
+
pass
|
| 200 |
+
|
| 201 |
+
# --- 2) Nous vídeos a videos.db associats a la sessió ---
|
| 202 |
+
videos_db = temp_dir / "videos.db"
|
| 203 |
+
new_sha1s: set[str] = set()
|
| 204 |
+
|
| 205 |
+
try:
|
| 206 |
+
import sqlite3
|
| 207 |
+
|
| 208 |
+
with sqlite3.connect(str(videos_db)) as vconn:
|
| 209 |
+
vconn.row_factory = sqlite3.Row
|
| 210 |
+
cur = vconn.cursor()
|
| 211 |
+
# Només si existeix columna 'session' i 'sha1sum'
|
| 212 |
+
cur.execute("PRAGMA table_info(videos)")
|
| 213 |
+
cols = cur.fetchall()
|
| 214 |
+
col_names = [c[1] for c in cols]
|
| 215 |
+
if "session" in col_names and "sha1sum" in col_names:
|
| 216 |
+
cur.execute(
|
| 217 |
+
"SELECT DISTINCT sha1sum FROM videos WHERE session = ?", (session_id,)
|
| 218 |
+
)
|
| 219 |
+
for r in cur.fetchall():
|
| 220 |
+
if r["sha1sum"]:
|
| 221 |
+
new_sha1s.add(str(r["sha1sum"]))
|
| 222 |
+
except Exception:
|
| 223 |
+
new_sha1s = set()
|
| 224 |
+
|
| 225 |
+
if not new_sha1s:
|
| 226 |
+
return
|
| 227 |
+
|
| 228 |
+
temp_media_root = temp_dir / "media"
|
| 229 |
+
|
| 230 |
+
if data_origin == "internal":
|
| 231 |
+
# Copiar carpetes de media noves a demo/data/media
|
| 232 |
+
data_media_root = data_dir / "media"
|
| 233 |
+
for sha1 in new_sha1s:
|
| 234 |
+
src_dir = temp_media_root / sha1
|
| 235 |
+
dst_dir = data_media_root / sha1
|
| 236 |
+
if src_dir.exists():
|
| 237 |
+
shutil.copytree(src_dir, dst_dir, dirs_exist_ok=True)
|
| 238 |
+
else:
|
| 239 |
+
# Crear un ZIP amb totes les carpetes de sha1sum i enviar-lo a export_media
|
| 240 |
+
if api_client is None:
|
| 241 |
+
return
|
| 242 |
+
|
| 243 |
+
import tempfile
|
| 244 |
+
|
| 245 |
+
try:
|
| 246 |
+
with tempfile.TemporaryDirectory() as tmpdir:
|
| 247 |
+
zip_path = Path(tmpdir) / "media_export.zip"
|
| 248 |
+
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zf:
|
| 249 |
+
for sha1 in new_sha1s:
|
| 250 |
+
src_dir = temp_media_root / sha1
|
| 251 |
+
if not src_dir.exists():
|
| 252 |
+
continue
|
| 253 |
+
for root, _, files in os.walk(src_dir):
|
| 254 |
+
root_path = Path(root)
|
| 255 |
+
for fname in files:
|
| 256 |
+
fpath = root_path / fname
|
| 257 |
+
rel_path = fpath.relative_to(temp_media_root)
|
| 258 |
+
zf.write(fpath, arcname=str(rel_path))
|
| 259 |
+
|
| 260 |
+
zip_bytes = zip_path.read_bytes()
|
| 261 |
+
api_client.export_media(zip_bytes)
|
| 262 |
+
except Exception:
|
| 263 |
+
return
|