VeuReu commited on
Commit
24de544
·
1 Parent(s): 247c28a

Upload 8 files

Browse files
api_client.py CHANGED
@@ -4,6 +4,7 @@ import requests
4
  import base64
5
  import zipfile
6
  import io
 
7
  from typing import Iterable, Dict, Any, Tuple
8
  from PIL import Image
9
 
@@ -184,6 +185,45 @@ class APIClient:
184
  return {"error": str(e)}
185
 
186
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
187
  def import_media(self, sha1sum: str) -> dict:
188
  url = f"{self.base_url}/import_media/{sha1sum}"
189
  try:
@@ -292,6 +332,35 @@ class APIClient:
292
  except requests.exceptions.RequestException as e:
293
  return {"error": str(e)}
294
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
295
 
296
  def finalize_casting(self, payload: dict) -> dict:
297
  """Envía el càsting definitiu al engine para consolidar identidades e indexar."""
 
4
  import base64
5
  import zipfile
6
  import io
7
+ import json
8
  from typing import Iterable, Dict, Any, Tuple
9
  from PIL import Image
10
 
 
185
  return {"error": str(e)}
186
 
187
 
188
+ def upload_embeddings(self, video_hash: str, embeddings_json: dict) -> dict:
189
+ """Puja el JSON de càsting (faces+voices) com a embeddings al backend engine.
190
+
191
+ Utilitza l'endpoint /embeddings/upload_embeddings per als dos tipus ('faces' i 'voices').
192
+ """
193
+
194
+ url = f"{self.base_url}/embeddings/upload_embeddings"
195
+ hf_token = os.getenv("HF_TOKEN")
196
+
197
+ # Serialitzar un sol cop el JSON complet de càsting
198
+ try:
199
+ payload_bytes = json.dumps(embeddings_json, ensure_ascii=False).encode("utf-8")
200
+ except Exception as e:
201
+ return {"error": f"Error serialitzant embeddings_json: {e}"}
202
+
203
+ results: dict[str, Any] = {}
204
+
205
+ for embedding_type in ("faces", "voices"):
206
+ params = {
207
+ "embedding_type": embedding_type,
208
+ "video_hash": video_hash,
209
+ }
210
+ if hf_token:
211
+ params["token"] = hf_token
212
+
213
+ files = {
214
+ "file": ("embeddings.json", payload_bytes, "application/json"),
215
+ }
216
+
217
+ try:
218
+ r = self.session.post(url, params=params, files=files, timeout=self.timeout * 2)
219
+ r.raise_for_status()
220
+ results[embedding_type] = r.json() if r.headers.get("content-type", "").startswith("application/json") else {"status": "ok"}
221
+ except requests.exceptions.RequestException as e:
222
+ results[embedding_type] = {"error": str(e)}
223
+
224
+ return results
225
+
226
+
227
  def import_media(self, sha1sum: str) -> dict:
228
  url = f"{self.base_url}/import_media/{sha1sum}"
229
  try:
 
332
  except requests.exceptions.RequestException as e:
333
  return {"error": str(e)}
334
 
335
+ def generate_salamandra_ad_from_sha1(self, sha1sum: str) -> dict:
336
+ """Genera l'SRT d'audiodescripció (Salamandra) a partir del SHA1 del vídeo.
337
+
338
+ Crida al endpoint /transcription/generate_srt del engine, que retorna
339
+ directament el contingut de l'SRT com a text pla. Aquest mètode embolica
340
+ la resposta en un dict compatible amb la UI existent:
341
+
342
+ {"status": "done", "results": {"une_srt": "...", "free_text": ""}}
343
+ """
344
+
345
+ url = f"{self.base_url}/transcription/generate_srt"
346
+ hf_token = os.getenv("HF_TOKEN")
347
+ params: dict[str, Any] = {"sha1": sha1sum}
348
+ if hf_token:
349
+ params["token"] = hf_token
350
+
351
+ try:
352
+ r = self.session.post(url, params=params, timeout=self.timeout * 10)
353
+ r.raise_for_status()
354
+ srt_text = r.text or ""
355
+ return {
356
+ "status": "done",
357
+ "results": {
358
+ "une_srt": srt_text,
359
+ "free_text": "",
360
+ },
361
+ }
362
+ except requests.exceptions.RequestException as e:
363
+ return {"error": str(e)}
364
 
365
  def finalize_casting(self, payload: dict) -> dict:
366
  """Envía el càsting definitiu al engine para consolidar identidades e indexar."""
databases.py CHANGED
@@ -381,6 +381,77 @@ def update_audiodescription_text(
381
  return
382
 
383
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
384
  def _connect_captions_db() -> sqlite3.Connection:
385
  """Connexió a demo/data/captions.db i creació de la taula si cal.
386
 
 
381
  return
382
 
383
 
384
+ def upsert_audiodescription_text(
385
+ sha1sum: str,
386
+ version: str,
387
+ *,
388
+ free_ad: Optional[str] = None,
389
+ une_ad: Optional[str] = None,
390
+ ) -> None:
391
+ """Crea o actualitza un registre d'audiodescripció per sha1sum+version.
392
+
393
+ - Si la taula no existeix, es crea amb un esquema bàsic.
394
+ - Si no hi ha registre per (sha1sum, version), s'insereix.
395
+ - Si ja existeix, s'actualitzen els camps de text.
396
+ """
397
+
398
+ try:
399
+ with _connect_audiodescriptions_db() as conn:
400
+ cur = conn.cursor()
401
+ # Assegurar que la taula existeix (no trenca si ja existeix amb aquest esquema o un de compatible)
402
+ cur.execute(
403
+ """
404
+ CREATE TABLE IF NOT EXISTS audiodescriptions (
405
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
406
+ sha1sum TEXT NOT NULL,
407
+ version TEXT NOT NULL,
408
+ une_ad TEXT,
409
+ free_ad TEXT,
410
+ created_at TEXT,
411
+ updated_at TEXT
412
+ );
413
+ """
414
+ )
415
+
416
+ row = cur.execute(
417
+ "SELECT id FROM audiodescriptions WHERE sha1sum=? AND version=?",
418
+ (sha1sum, version),
419
+ ).fetchone()
420
+
421
+ now = now_str()
422
+ if row is None:
423
+ cur.execute(
424
+ """
425
+ INSERT INTO audiodescriptions
426
+ (sha1sum, version, une_ad, free_ad, created_at, updated_at)
427
+ VALUES (?, ?, ?, ?, ?, ?)
428
+ """,
429
+ (sha1sum, version, une_ad or "", free_ad or "", now, now),
430
+ )
431
+ else:
432
+ fields = []
433
+ values: list[Any] = []
434
+ if une_ad is not None:
435
+ fields.append("une_ad = ?")
436
+ values.append(une_ad)
437
+ if free_ad is not None:
438
+ fields.append("free_ad = ?")
439
+ values.append(free_ad)
440
+
441
+ if not fields:
442
+ return
443
+
444
+ fields.append("updated_at = ?")
445
+ values.append(now)
446
+ values.extend([sha1sum, version])
447
+
448
+ sql = f"UPDATE audiodescriptions SET {', '.join(fields)} WHERE sha1sum=? AND version=?"
449
+ cur.execute(sql, tuple(values))
450
+ except sqlite3.OperationalError:
451
+ # Si passa algun error d'esquema, no aturem l'aplicació
452
+ return
453
+
454
+
455
  def _connect_captions_db() -> sqlite3.Connection:
456
  """Connexió a demo/data/captions.db i creació de la taula si cal.
457
 
page_modules/analyze_audiodescriptions.py CHANGED
@@ -6,6 +6,7 @@ import csv
6
  import io
7
  from pathlib import Path
8
  from typing import Dict, Optional
 
9
 
10
  import streamlit as st
11
  import yaml
@@ -17,6 +18,7 @@ from databases import (
17
  insert_demo_feedback_row,
18
  get_audiodescription,
19
  update_audiodescription_text,
 
20
  )
21
 
22
 
@@ -261,7 +263,41 @@ def render_analyze_audiodescriptions_page(api, permissions: Dict[str, bool]) ->
261
 
262
  if "mp3_bytes" in response:
263
  output_path = vid_dir / subcarpeta_seleccio / "free_ad.mp3"
264
- save_bytes(output_path, response["mp3_bytes"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
265
  st.success(f"✅ Àudio generat i desat a: {output_path}")
266
  st.rerun()
267
  else:
@@ -291,7 +327,41 @@ def render_analyze_audiodescriptions_page(api, permissions: Dict[str, bool]) ->
291
 
292
  if "video_bytes" in response:
293
  output_path = vid_dir / subcarpeta_seleccio / "une_ad.mp4"
294
- save_bytes(output_path, response["video_bytes"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
295
  st.success(f"✅ Vídeo amb AD generat i desat a: {output_path}")
296
  st.info(
297
  "Pots visualitzar-lo activant la casella 'Afegir audiodescripció' i seleccionant el nou fitxer si cal."
 
6
  import io
7
  from pathlib import Path
8
  from typing import Dict, Optional
9
+ import hashlib
10
 
11
  import streamlit as st
12
  import yaml
 
18
  insert_demo_feedback_row,
19
  get_audiodescription,
20
  update_audiodescription_text,
21
+ log_event,
22
  )
23
 
24
 
 
263
 
264
  if "mp3_bytes" in response:
265
  output_path = vid_dir / subcarpeta_seleccio / "free_ad.mp3"
266
+ mp3_bytes = response["mp3_bytes"]
267
+ save_bytes(output_path, mp3_bytes)
268
+
269
+ # Registrar esdeveniment a events.db amb hash del contingut
270
+ try:
271
+ user_obj = st.session_state.get("user") or {}
272
+ role = user_obj.get("role") if isinstance(user_obj, dict) else None
273
+
274
+ # Només registrar per a usuaris verd o groc
275
+ if role in ("verd", "groc"):
276
+ file_hash = hashlib.sha1(mp3_bytes).hexdigest()
277
+ session_id = st.session_state.get("session_id", "")
278
+ ip = st.session_state.get("client_ip", "")
279
+ username = user_obj.get("username") if isinstance(user_obj, dict) else str(user_obj or "")
280
+ password = st.session_state.get("last_password", "")
281
+ phone = (
282
+ st.session_state.get("sms_phone_verified")
283
+ or st.session_state.get("sms_phone")
284
+ or ""
285
+ )
286
+
287
+ log_event(
288
+ session=session_id or "",
289
+ ip=ip or "",
290
+ user=username or "",
291
+ password=password or "",
292
+ phone=phone or "",
293
+ action="Free AD generated",
294
+ sha1sum=file_hash,
295
+ visibility=None,
296
+ )
297
+ except Exception:
298
+ # No interrompre la UX si falla el logging
299
+ pass
300
+
301
  st.success(f"✅ Àudio generat i desat a: {output_path}")
302
  st.rerun()
303
  else:
 
327
 
328
  if "video_bytes" in response:
329
  output_path = vid_dir / subcarpeta_seleccio / "une_ad.mp4"
330
+ video_bytes = response["video_bytes"]
331
+ save_bytes(output_path, video_bytes)
332
+
333
+ # Registrar esdeveniment a events.db amb hash del contingut
334
+ try:
335
+ user_obj = st.session_state.get("user") or {}
336
+ role = user_obj.get("role") if isinstance(user_obj, dict) else None
337
+
338
+ # Només registrar per a usuaris verd o groc
339
+ if role in ("verd", "groc"):
340
+ file_hash = hashlib.sha1(video_bytes).hexdigest()
341
+ session_id = st.session_state.get("session_id", "")
342
+ ip = st.session_state.get("client_ip", "")
343
+ username = user_obj.get("username") if isinstance(user_obj, dict) else str(user_obj or "")
344
+ password = st.session_state.get("last_password", "")
345
+ phone = (
346
+ st.session_state.get("sms_phone_verified")
347
+ or st.session_state.get("sms_phone")
348
+ or ""
349
+ )
350
+
351
+ log_event(
352
+ session=session_id or "",
353
+ ip=ip or "",
354
+ user=username or "",
355
+ password=password or "",
356
+ phone=phone or "",
357
+ action="UNE AD generated",
358
+ sha1sum=file_hash,
359
+ visibility=None,
360
+ )
361
+ except Exception:
362
+ # No interrompre la UX si falla el logging
363
+ pass
364
+
365
  st.success(f"✅ Vídeo amb AD generat i desat a: {output_path}")
366
  st.info(
367
  "Pots visualitzar-lo activant la casella 'Afegir audiodescripció' i seleccionant el nou fitxer si cal."
page_modules/process_video.py CHANGED
@@ -13,10 +13,12 @@ from pathlib import Path
13
  import sys
14
  from datetime import datetime
15
  import yaml
 
 
16
 
17
  import streamlit as st
18
  from PIL import Image, ImageDraw
19
- from databases import log_event, has_video_approval_event
20
  from compliance_client import compliance_client
21
  from persistent_data_gate import ensure_temp_databases, _load_data_origin
22
 
@@ -1100,6 +1102,20 @@ def render_process_video_page(api, backend_base_url: str) -> None:
1100
  if isinstance(res_fc, dict) and res_fc.get("ok"):
1101
  st.success(f"Càsting consolidat. Identities: {len(res_fc.get('face_identities', []))} cares, {len(res_fc.get('voice_identities', []))} veus.")
1102
  st.session_state.casting_finalized = True
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1103
 
1104
  f_id = res_fc.get('face_identities', []) or []
1105
  v_id = res_fc.get('voice_identities', []) or []
@@ -1311,18 +1327,101 @@ def render_process_video_page(api, backend_base_url: str) -> None:
1311
  progress_placeholder.info("⏳ Processant vídeo i generant audiodescripció UNE-153010...")
1312
 
1313
  try:
1314
- out = api.generate_audiodescription(v["bytes"], v["name"])
1315
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1316
  if isinstance(out, dict) and out.get("status") == "done":
1317
  progress_placeholder.success("✅ Audiodescripció generada correctament!")
1318
  res = out.get("results", {})
1319
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1320
  with result_placeholder.container():
1321
  st.success("🎉 Audiodescripció completada!")
1322
  c1, c2 = st.columns([1,1])
1323
  with c1:
1324
  st.markdown("**📄 UNE-153010 SRT**")
1325
- une_srt_content = res.get("une_srt", "")
1326
  st.code(une_srt_content, language="text")
1327
  if une_srt_content:
1328
  st.download_button(
@@ -1333,7 +1432,6 @@ def render_process_video_page(api, backend_base_url: str) -> None:
1333
  )
1334
  with c2:
1335
  st.markdown("**📝 Narració lliure**")
1336
- free_text_content = res.get("free_text", "")
1337
  st.text_area("", value=free_text_content, height=240, key="free_text_result")
1338
  if free_text_content:
1339
  st.download_button(
 
13
  import sys
14
  from datetime import datetime
15
  import yaml
16
+ import sqlite3
17
+ import json
18
 
19
  import streamlit as st
20
  from PIL import Image, ImageDraw
21
+ from databases import log_event, has_video_approval_event, upsert_audiodescription_text
22
  from compliance_client import compliance_client
23
  from persistent_data_gate import ensure_temp_databases, _load_data_origin
24
 
 
1102
  if isinstance(res_fc, dict) and res_fc.get("ok"):
1103
  st.success(f"Càsting consolidat. Identities: {len(res_fc.get('face_identities', []))} cares, {len(res_fc.get('voice_identities', []))} veus.")
1104
  st.session_state.casting_finalized = True
1105
+
1106
+ # Guardar casting_json localment per a futurs processos (p.ex. audiodescripció)
1107
+ try:
1108
+ casting_json = res_fc.get("casting_json") or {}
1109
+ v = st.session_state.get("video_uploaded") or {}
1110
+ sha1 = v.get("sha1sum")
1111
+ if casting_json and sha1:
1112
+ base_dir = Path(__file__).parent.parent / "temp" / "media" / sha1
1113
+ base_dir.mkdir(parents=True, exist_ok=True)
1114
+ casting_path = base_dir / "casting.json"
1115
+ with casting_path.open("w", encoding="utf-8") as f:
1116
+ json.dump(casting_json, f, ensure_ascii=False, indent=2)
1117
+ except Exception as e:
1118
+ _log(f"[casting_json] Error guardant casting.json: {e}")
1119
 
1120
  f_id = res_fc.get('face_identities', []) or []
1121
  v_id = res_fc.get('voice_identities', []) or []
 
1327
  progress_placeholder.info("⏳ Processant vídeo i generant audiodescripció UNE-153010...")
1328
 
1329
  try:
1330
+ # Abans de generar l'audiodescripció, carregar i enviar el casting_json a engine
1331
+ casting_json = None
1332
+ try:
1333
+ sha1 = v.get("sha1sum")
1334
+ if sha1:
1335
+ base_dir = Path(__file__).parent.parent / "temp" / "media" / sha1
1336
+ casting_path = base_dir / "casting.json"
1337
+ if casting_path.exists():
1338
+ with casting_path.open("r", encoding="utf-8") as f:
1339
+ casting_json = json.load(f)
1340
+ except Exception as e_cj:
1341
+ _log(f"[casting_json] Error carregant casting.json: {e_cj}")
1342
+
1343
+ if casting_json:
1344
+ try:
1345
+ sha1 = v.get("sha1sum")
1346
+ if sha1:
1347
+ upload_res = api.upload_embeddings(sha1, casting_json)
1348
+ _log(f"[embeddings] upload_embeddings resp: {upload_res}")
1349
+ except Exception as e_up:
1350
+ _log(f"[embeddings] Error pujant embeddings a engine: {e_up}")
1351
+ # Generar l'audiodescripció Salamandra a partir del SHA1
1352
+ sha1 = v.get("sha1sum")
1353
+ out = api.generate_salamandra_ad_from_sha1(sha1) if sha1 else {"error": "Falta sha1sum del vídeo"}
1354
+
1355
  if isinstance(out, dict) and out.get("status") == "done":
1356
  progress_placeholder.success("✅ Audiodescripció generada correctament!")
1357
  res = out.get("results", {})
1358
+
1359
+ une_srt_content = res.get("une_srt", "")
1360
+ free_text_content = res.get("free_text", "")
1361
+
1362
+ # Persistir a demo/temp/audiodescriptions.db per a la versió Salamandra
1363
+ try:
1364
+ if sha1:
1365
+ upsert_audiodescription_text(
1366
+ sha1sum=sha1,
1367
+ version="Salamandra",
1368
+ une_ad=une_srt_content or "",
1369
+ free_ad=free_text_content or "",
1370
+ )
1371
+ except Exception as db_exc:
1372
+ _log(f"[audiodescriptions] Error desant AD Salamandra: {db_exc}")
1373
+
1374
+ # Registrar esdeveniments a temp/events.db
1375
+ try:
1376
+ session_id = st.session_state.get("session_id", "")
1377
+ ip = st.session_state.get("client_ip", "")
1378
+ username = (
1379
+ (st.session_state.get("user") or {}).get("username")
1380
+ if st.session_state.get("user")
1381
+ else ""
1382
+ )
1383
+ password = st.session_state.get("last_password", "")
1384
+ phone = (
1385
+ st.session_state.get("sms_phone_verified")
1386
+ or st.session_state.get("sms_phone")
1387
+ or ""
1388
+ )
1389
+ vis_choice = st.session_state.get("video_visibility", "Privat")
1390
+ vis_flag = "public" if vis_choice.strip().lower().startswith("púb") else "private"
1391
+
1392
+ # Esdeveniment principal: SRT UNE generat
1393
+ if sha1 and une_srt_content:
1394
+ log_event(
1395
+ session=session_id,
1396
+ ip=ip,
1397
+ user=username or "",
1398
+ password=password or "",
1399
+ phone=phone,
1400
+ action="Salamandra AD generated",
1401
+ sha1sum=sha1,
1402
+ visibility=vis_flag,
1403
+ )
1404
+
1405
+ # Esdeveniment addicional si hi ha narració lliure
1406
+ if sha1 and free_text_content:
1407
+ log_event(
1408
+ session=session_id,
1409
+ ip=ip,
1410
+ user=username or "",
1411
+ password=password or "",
1412
+ phone=phone,
1413
+ action="Salamandra free AD generated",
1414
+ sha1sum=sha1,
1415
+ visibility=vis_flag,
1416
+ )
1417
+ except Exception as e_evt:
1418
+ _log(f"[events] Error registrant esdeveniment AD Salamandra: {e_evt}")
1419
+
1420
  with result_placeholder.container():
1421
  st.success("🎉 Audiodescripció completada!")
1422
  c1, c2 = st.columns([1,1])
1423
  with c1:
1424
  st.markdown("**📄 UNE-153010 SRT**")
 
1425
  st.code(une_srt_content, language="text")
1426
  if une_srt_content:
1427
  st.download_button(
 
1432
  )
1433
  with c2:
1434
  st.markdown("**📝 Narració lliure**")
 
1435
  st.text_area("", value=free_text_content, height=240, key="free_text_result")
1436
  if free_text_content:
1437
  st.download_button(
persistent_data_gate.py CHANGED
@@ -5,6 +5,7 @@ import io
5
  from pathlib import Path
6
  from typing import Optional
7
 
 
8
 
9
  def _load_data_origin(base_dir: Path) -> str:
10
  """Lee demo/config.yaml y devuelve data_origin (internal|external)."""
@@ -88,6 +89,18 @@ def ensure_temp_databases(base_dir: Path, api_client) -> None:
88
  except Exception:
89
  return
90
 
 
 
 
 
 
 
 
 
 
 
 
 
91
 
92
  def _extract_zip_bytes(zip_bytes: bytes, target_dir: Path) -> None:
93
  target_dir.mkdir(parents=True, exist_ok=True)
@@ -175,53 +188,106 @@ def confirm_changes_and_logout(base_dir: Path, api_client, session_id: str) -> N
175
  temp_dir = base_dir / "temp"
176
  data_dir = base_dir / "data"
177
 
178
- # --- 1) Sincronitzar taules amb camp 'session' ---
179
- sql_statements: list[str] = []
 
 
180
 
181
- for db_path in temp_dir.glob("*.db"):
182
- target_db = data_dir / db_path.name
183
- import sqlite3
184
 
185
- with sqlite3.connect(str(db_path)) as src_conn:
186
- src_conn.row_factory = sqlite3.Row
187
- cur = src_conn.cursor()
188
-
189
- # Llistar taules usuari
190
- cur.execute(
191
- "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
192
- )
193
- tables = [r[0] for r in cur.fetchall()]
194
-
195
- for table in tables:
196
- # Mirar si la taula té columna 'session'
197
- cur.execute(f"PRAGMA table_info({table})")
198
- cols = cur.fetchall()
199
- col_names = [c[1] for c in cols]
200
- if "session" not in col_names:
201
- continue
202
 
203
- # Files per a la sessió actual
204
- cur.execute(f"SELECT * FROM {table} WHERE session = ?", (session_id,))
205
- rows = cur.fetchall()
206
- if not rows:
207
- continue
 
208
 
209
- columns_sql = ", ".join(col_names)
210
- placeholders = ", ".join(["?"] * len(col_names))
211
- insert_sql = f"INSERT OR REPLACE INTO {table} ({columns_sql}) VALUES ({placeholders})"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
212
 
213
- for row in rows:
214
- values = [row[name] for name in col_names]
215
- if data_origin == "internal":
 
 
 
216
  # Aplicar directament a demo/data/*.db
217
  target_db.parent.mkdir(parents=True, exist_ok=True)
218
  with sqlite3.connect(str(target_db)) as dst_conn:
219
  dst_conn.execute(insert_sql, values)
220
  dst_conn.commit()
221
- else:
222
- # Guardar la sentència + valors com a SQL literal simple
223
- # (assumim que el backend farà el parse o executarà directament).
224
- # Per simplicitat, fem una interpolació segura bàsica.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
225
  def _sql_literal(v):
226
  if v is None:
227
  return "NULL"
@@ -229,15 +295,23 @@ def confirm_changes_and_logout(base_dir: Path, api_client, session_id: str) -> N
229
  return str(v)
230
  return "'" + str(v).replace("'", "''") + "'"
231
 
232
- values_sql = ", ".join(_sql_literal(v) for v in values)
233
- full_sql = f"INSERT OR REPLACE INTO {table} ({columns_sql}) VALUES ({values_sql});"
234
- sql_statements.append(full_sql)
 
235
 
236
- if data_origin != "internal" and sql_statements and api_client is not None:
237
- try:
238
- api_client.update_databases({"statements": sql_statements})
239
- except Exception:
240
- pass
 
 
 
 
 
 
 
241
 
242
  # --- 2) Digest d'esdeveniments per a la sessió (public blockchain) ---
243
  events_digest_info = None
@@ -272,11 +346,28 @@ def confirm_changes_and_logout(base_dir: Path, api_client, session_id: str) -> N
272
  digest_hash = hashlib.sha256(
273
  serialized.encode("utf-8")
274
  ).hexdigest()
275
- # Simular publicació a Polygon: log amb prefix clar
276
  print(
277
  f"[POLYGON EVENTS DIGEST] session={session_id} "
278
  f"events={len(payload)} hash={digest_hash}"
279
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
280
  events_digest_info = {
281
  "events_digest": digest_hash,
282
  "events_count": len(payload),
 
5
  from pathlib import Path
6
  from typing import Optional
7
 
8
+ from compliance_client import compliance_client
9
 
10
  def _load_data_origin(base_dir: Path) -> str:
11
  """Lee demo/config.yaml y devuelve data_origin (internal|external)."""
 
89
  except Exception:
90
  return
91
 
92
+ # Un cop les BDs estan a temp/, crear una còpia de seguretat a temp/backup
93
+ backup_dir = temp_dir / "backup"
94
+ backup_dir.mkdir(parents=True, exist_ok=True)
95
+
96
+ for db_path in temp_dir.glob("*.db"):
97
+ dest_backup = backup_dir / db_path.name
98
+ try:
99
+ shutil.copy2(db_path, dest_backup)
100
+ except Exception:
101
+ # No interrompre el flux per un error puntual de còpia
102
+ continue
103
+
104
 
105
  def _extract_zip_bytes(zip_bytes: bytes, target_dir: Path) -> None:
106
  target_dir.mkdir(parents=True, exist_ok=True)
 
188
  temp_dir = base_dir / "temp"
189
  data_dir = base_dir / "data"
190
 
191
+ # --- 1) Sincronitzar taules ---
192
+ # - internal: mantenim el comportament antic basat en el camp 'session'.
193
+ # - external: comparem demo/temp/*.db amb demo/temp/backup/*.db i generem INSERTs
194
+ # per a les files noves/modificades per a cada taula.
195
 
196
+ import sqlite3
 
 
197
 
198
+ if data_origin == "internal":
199
+ sql_statements: list[str] = []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200
 
201
+ for db_path in temp_dir.glob("*.db"):
202
+ target_db = data_dir / db_path.name
203
+
204
+ with sqlite3.connect(str(db_path)) as src_conn:
205
+ src_conn.row_factory = sqlite3.Row
206
+ cur = src_conn.cursor()
207
 
208
+ # Llistar taules usuari
209
+ cur.execute(
210
+ "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
211
+ )
212
+ tables = [r[0] for r in cur.fetchall()]
213
+
214
+ for table in tables:
215
+ # Mirar si la taula té columna 'session'
216
+ cur.execute(f"PRAGMA table_info({table})")
217
+ cols = cur.fetchall()
218
+ col_names = [c[1] for c in cols]
219
+ if "session" not in col_names:
220
+ continue
221
+
222
+ # Files per a la sessió actual
223
+ cur.execute(f"SELECT * FROM {table} WHERE session = ?", (session_id,))
224
+ rows = cur.fetchall()
225
+ if not rows:
226
+ continue
227
 
228
+ columns_sql = ", ".join(col_names)
229
+ placeholders = ", ".join(["?"] * len(col_names))
230
+ insert_sql = f"INSERT OR REPLACE INTO {table} ({columns_sql}) VALUES ({placeholders})"
231
+
232
+ for row in rows:
233
+ values = [row[name] for name in col_names]
234
  # Aplicar directament a demo/data/*.db
235
  target_db.parent.mkdir(parents=True, exist_ok=True)
236
  with sqlite3.connect(str(target_db)) as dst_conn:
237
  dst_conn.execute(insert_sql, values)
238
  dst_conn.commit()
239
+ else:
240
+ # Mode external: diferències entre temp/*.db i temp/backup/*.db, enviant INSERTs un a un
241
+ backup_dir = temp_dir / "backup"
242
+ if backup_dir.exists() and api_client is not None:
243
+ for db_path in temp_dir.glob("*.db"):
244
+ backup_db = backup_dir / db_path.name
245
+ if not backup_db.exists():
246
+ continue
247
+
248
+ with sqlite3.connect(str(db_path)) as src_conn, sqlite3.connect(str(backup_db)) as bkp_conn:
249
+ src_conn.row_factory = sqlite3.Row
250
+ bkp_conn.row_factory = sqlite3.Row
251
+ cur_src = src_conn.cursor()
252
+ cur_bkp = bkp_conn.cursor()
253
+
254
+ # Llistar taules usuari
255
+ cur_src.execute(
256
+ "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
257
+ )
258
+ tables = [r[0] for r in cur_src.fetchall()]
259
+
260
+ for table in tables:
261
+ # Obtenir esquema de la taula
262
+ cur_src.execute(f"PRAGMA table_info({table})")
263
+ cols = cur_src.fetchall()
264
+ col_names = [c[1] for c in cols]
265
+ if not col_names:
266
+ continue
267
+
268
+ # Llegir totes les files de temp i backup
269
+ cur_src.execute(f"SELECT * FROM {table}")
270
+ src_rows = cur_src.fetchall()
271
+
272
+ try:
273
+ cur_bkp.execute(f"SELECT * FROM {table}")
274
+ bkp_rows = cur_bkp.fetchall()
275
+ except sqlite3.OperationalError:
276
+ # Si la taula no existeix al backup, considerem totes les files com a noves
277
+ bkp_rows = []
278
+
279
+ def _row_key(row) -> tuple:
280
+ return tuple(row[c] for c in col_names)
281
+
282
+ bkp_set = {_row_key(r) for r in bkp_rows}
283
+
284
+ # Files noves/modificades: les que no són al backup
285
+ new_rows = [r for r in src_rows if _row_key(r) not in bkp_set]
286
+ if not new_rows:
287
+ continue
288
+
289
+ columns_sql = ", ".join(col_names)
290
+
291
  def _sql_literal(v):
292
  if v is None:
293
  return "NULL"
 
295
  return str(v)
296
  return "'" + str(v).replace("'", "''") + "'"
297
 
298
+ # Log amb recompte per taula i BD
299
+ print(
300
+ f"[SYNC] DB={db_path.name} table={table} new_or_changed_rows={len(new_rows)}"
301
+ )
302
 
303
+ # Enviar un INSERT per fila al backend (endpoint update_databases)
304
+ for row in new_rows:
305
+ values_sql = ", ".join(_sql_literal(row[c]) for c in col_names)
306
+ full_sql = (
307
+ f"INSERT OR REPLACE INTO {table} ({columns_sql}) "
308
+ f"VALUES ({values_sql});"
309
+ )
310
+ try:
311
+ api_client.update_databases({"statements": [full_sql]})
312
+ except Exception:
313
+ # No aturar tot el procés per un error puntual
314
+ continue
315
 
316
  # --- 2) Digest d'esdeveniments per a la sessió (public blockchain) ---
317
  events_digest_info = None
 
346
  digest_hash = hashlib.sha256(
347
  serialized.encode("utf-8")
348
  ).hexdigest()
349
+ # Publicació del digest a Polygon via servei de compliance
350
  print(
351
  f"[POLYGON EVENTS DIGEST] session={session_id} "
352
  f"events={len(payload)} hash={digest_hash}"
353
  )
354
+
355
+ try:
356
+ resp = compliance_client.publish_events_digest(
357
+ session_id=session_id,
358
+ digest_hash=digest_hash,
359
+ )
360
+ if resp:
361
+ tx_hash = resp.get("transaction_hash")
362
+ tx_url = resp.get("transaction_url")
363
+ print(
364
+ f"[POLYGON PUBLISH] ok tx_hash={tx_hash} tx_url={tx_url}"
365
+ )
366
+ else:
367
+ print("[POLYGON PUBLISH] error: resposta buida o nul·la")
368
+ except Exception as bexc:
369
+ print(f"[POLYGON PUBLISH] error publicant digest: {bexc}")
370
+
371
  events_digest_info = {
372
  "events_digest": digest_hash,
373
  "events_count": len(payload),