From c07ead0395b5337e4535346d559173c66e415947 Mon Sep 17 00:00:00 2001 From: AdWav Date: Wed, 22 Apr 2026 11:50:19 +0200 Subject: [PATCH 1/3] feat(sync): handle remote hard-delete events Add event-log synchronization and local cleanup logic so remote user and badge hard deletes are reflected before badge scans. Include local E2E tooling and regression tests to validate remote-delete behavior end-to-end. --- README.md | 4 + api_client.py | 117 ++- fake_rfid.py | 2 +- main.py | 39 + model.py | 166 ++++- run_local.ps1 | 25 + sql/createDB.sql | 2 + tests/E2E/README.md | 304 ++++++++ tests/E2E/logs/.gitkeep | 1 + tests/E2E/run_e2e_process.py | 667 ++++++++++++++++++ tests/E2E/seed_local_e2e_data.py | 89 +++ .../test_handle_deleted_remote_badge_mock.py | 129 ++++ .../test_integration_legacy_remote_delete.py | 116 +++ view.py | 7 + 14 files changed, 1652 insertions(+), 16 deletions(-) create mode 100644 run_local.ps1 create mode 100644 tests/E2E/README.md create mode 100644 tests/E2E/logs/.gitkeep create mode 100644 tests/E2E/run_e2e_process.py create mode 100644 tests/E2E/seed_local_e2e_data.py create mode 100644 tests/E2E/test_handle_deleted_remote_badge_mock.py create mode 100644 tests/E2E/test_integration_legacy_remote_delete.py diff --git a/README.md b/README.md index 3d683ef..1acbdb6 100644 --- a/README.md +++ b/README.md @@ -108,6 +108,10 @@ Lancer avec `./run.sh` ou `python main.py`. - **Historique** : 5 derniers pointages, détail par jour, heures semaine courante/précédente - **Fermer** : `Escape`, clic sur X, ou `Ctrl+C` dans le terminal +## Documentation E2E + +- Procédure complète, prérequis serveur et scénario de tests: [`tests/E2E/README.md`](tests/E2E/README.md) + ## Dépendances | Paquet | Usage | diff --git a/api_client.py b/api_client.py index 79ca331..da505e7 100644 --- a/api_client.py +++ b/api_client.py @@ -4,6 +4,7 @@ import hmac import json import urllib.error +import os from functools import reduce import sys from enum import Enum @@ -16,18 +17,19 @@ class Controller(Enum): LOGS = 'LogsAPI' BADGES = 'BadgesAPI' USERS = 'UsersAPI' + EVENT_LOGS = 'EventLogsAPI' class APIClient: def __init__(self) -> None: - local_test = False - if local_test: - self.base_url = 'http://localhost:8080' - else: - self.base_url = 'https://timbreuse.sectioninformatique.ch' + self.base_url = os.getenv( + 'TIMBREUSE_API_BASE_URL', + 'https://timbreuse.sectioninformatique.ch' + ).rstrip('/') @staticmethod def load_key() -> str: - with open('.key.json', 'r') as file: + key_path = os.getenv('TIMBREUSE_API_KEY_FILE', '.key.json') + with open(key_path, 'r', encoding='utf-8') as file: return json.load(file)['key'] def create_token(self, date, badge_id, inside) -> str: @@ -58,8 +60,38 @@ def send(self, url) -> tuple: return html_file, html_file.status except urllib.error.HTTPError as e: return None, str(e) - - + except urllib.error.URLError as e: + return None, str(e) + + @staticmethod + def _decode_response_body(html_file) -> str: + body = html_file.read() + if isinstance(body, bytes): + return body.decode('utf-8', errors='replace') + return str(body) + + def _parse_json_response(self, html_file, url: str): + body = self._decode_response_body(html_file).strip() + if body == '': + return [] + try: + return json.loads(body) + except json.JSONDecodeError: + # Certains endpoints peuvent prefixer la reponse par du bruit + # (warning PHP, HTML, saut de ligne, etc.). On tente d'extraire + # le premier JSON valide. + decoder = json.JSONDecoder() + for idx, ch in enumerate(body): + if ch not in ('{', '['): + continue + try: + parsed, _ = decoder.raw_decode(body[idx:]) + return parsed + except json.JSONDecodeError: + continue + preview = body[:240].replace('\n', '\\n') + raise ValueError(f"Reponse API non-JSON sur {url}. Apercu: {preview}") + def send_log(self, date, badge_id, inside) -> tuple: ''' >>> client_API = APIClient() @@ -90,8 +122,9 @@ def receive_logs(self, start_date) -> list[dict]: url = self.create_url_n(Controller.LOGS.value, Method.GET.value, arg) print(url, file=sys.stderr) html_file = self.send(url)[0] - - return json.loads(html_file.readline()) + if html_file is None: + return [] + return self._parse_json_response(html_file, url) def send_badge_and_user(self, badge_id:int, name:str, surname:str): ''' @@ -123,7 +156,9 @@ def receive_users(self, start_date) -> list[dict]: url = self.create_url_n(Controller.USERS.value, Method.GET.value, arg) print(url, file=sys.stderr) html_file = self.send(url)[0] - return json.loads(html_file.readline()) + if html_file is None: + return [] + return self._parse_json_response(html_file, url) def receive_badges(self, start_date): ''' @@ -140,7 +175,65 @@ def receive_badges(self, start_date): url = self.create_url_n(Controller.BADGES.value, Method.GET.value, arg) print(url, file=sys.stderr) html_file = self.send(url)[0] - return json.loads(html_file.readline()) + if html_file is None: + return [] + return self._parse_json_response(html_file, url) + + def receive_event_logs(self, start_date) -> list[dict]: + """ + Recupere les events serveur (ex: hard delete) depuis start_date. + """ + print('receive_event_logs', file=sys.stderr) + token = self.create_token_args(start_date) + arg = self.create_arg_args(start_date, token) + url = self.create_url_n(Controller.EVENT_LOGS.value, Method.GET.value, arg) + print(url, file=sys.stderr) + html_file = self.send(url)[0] + if html_file is None: + return [] + return self._parse_json_response(html_file, url) + + @staticmethod + def is_not_deleted(remote_row: dict) -> bool: + """ + Interprete le champ date_delete renvoye par l'API distante. + """ + date_delete = remote_row.get('date_delete') + if date_delete is None: + return True + if isinstance(date_delete, str): + return date_delete.strip().lower() in ('', 'none', 'null') + return False + + def remote_user_exists_for_badge(self, badge_id: int) -> bool: + """ + Verifie en interrogeant l'API distante si le badge est encore + attribue a un utilisateur actif. + """ + try: + start_date = '1900-01-01 00:00:00' + users = self.receive_users(start_date) + badges = self.receive_badges(start_date) + except Exception as e: + # En cas d'indisponibilite reseau/API, on n'interrompt pas le + # pointage local. + print('remote_user_exists_for_badge fallback:', e, file=sys.stderr) + return True + + active_user_ids = set() + for user in users: + if self.is_not_deleted(user): + active_user_ids.add(user.get('id_user')) + + for badge in badges: + if badge.get('id_badge') != badge_id: + continue + if not self.is_not_deleted(badge): + continue + id_user = badge.get('id_user') + if id_user is not None and id_user in active_user_ids: + return True + return False @staticmethod diff --git a/fake_rfid.py b/fake_rfid.py index 01a6a61..f71d48d 100644 --- a/fake_rfid.py +++ b/fake_rfid.py @@ -10,5 +10,5 @@ def read_pipe(self, pipe: dict) -> None: put a fake id in a dict in arg ''' sleep(1) - pipe['id_badge'] = 63 + pipe['id_badge'] = 42 print('fake_badge', file=sys.stderr) \ No newline at end of file diff --git a/main.py b/main.py index 5dc8c48..82ed94f 100644 --- a/main.py +++ b/main.py @@ -10,6 +10,7 @@ import fake_rfid as rfid import model import sys +import time class App: ''' @@ -38,6 +39,8 @@ def __init__(self): self.thread_receive_log = None self.thread_receive_users = None self.thread_receive_badges = None + self.thread_receive_event_logs = None + self.thread_apply_event_logs = None self.thread_delete_badges_and_users = None self.thread_synchronize_user_badge_log_with_remote = None self.thread_Invoke_synchronize_before_rfid = None @@ -65,6 +68,10 @@ def update(self): self.invoke_thread('thread_Invoke_synchronize_before_rfid', self.Invoke_synchronize_before_rfid) self.do_rfid() + if self.HAS_REMOTE_SERVER: + if self.handle_deleted_remote_badge(): + self.reset() + return if self.HAS_REMOTE_SERVER: self.invoke_thread('thread_Invoke_synchronize_after_rfid', self.Invoke_synchronize_after_rfid) @@ -239,6 +246,34 @@ def do_model_request(self): self.model.find_user_info, args=(self.pipe, )) print('end do_model_request', self.pipe, file=sys.stderr) + def wait_modal_ack(self): + while (not self.pipe.get('quit', False) + and self.view is not None + and self.view.current_scene == 'modal'): + time.sleep(0.1) + + def handle_deleted_remote_badge(self) -> bool: + """ + Retourne True si une suppression distante est detectee et geree. + """ + check = self.model.check_badge_assignment_with_remote(self.pipe['id_badge']) + if check['remote_exists']: + return False + + if check['local_exists']: + self.model.remove_local_badge_correspondance(self.pipe['id_badge']) + user_name = check['local_user_name'] or 'cet utilisateur' + texts = [ + f"Ce badge n'est plus attribue a {user_name}.", + 'Cette correspondance est supprimee egalement sur cet appareil.', + ] + else: + texts = ["Ce badge n'est pas attribue a un utilisateur."] + + self.view.do_badge_sync_error(texts) + self.wait_modal_ack() + return True + def safe_is_alive(self, thread): try: print('tread is alive', thread.is_alive(), file=sys.stderr) @@ -290,6 +325,10 @@ def is_unknown(self): def get_threads_and_functions_list(self): threads_and_functions = list() + threads_and_functions.append(('thread_receive_event_logs', + self.model.invoke_receive_event_logs)) + threads_and_functions.append(('thread_apply_event_logs', + self.model.apply_event_logs)) threads_and_functions.append(('thread_send_badges_and_users', self.model.send_unsync_badges_and_users)) threads_and_functions.append(('thread_send_log', self.model.send_logs)) diff --git a/model.py b/model.py index fe30c7f..7e43649 100644 --- a/model.py +++ b/model.py @@ -16,7 +16,10 @@ def __init__(self, conn_params): def __del__(self): try: - self.connection.commit() + try: + self.cursor.close() + except Exception: + pass self.connection.close() except Exception: pass @@ -133,6 +136,48 @@ def find_user_info(self, pipe: dict) -> None: 'date_badge', 'date_modif', 'date_delete'), five_logs) # to refactory self.read_work_time(pipe) + + def get_local_badge_owner_name(self, badge_id: int) -> str: + """ + Retourne le nom local associe au badge, ou une chaine vide. + """ + user_id = self.get_user_id_with_badge(badge_id) + if user_id in (None, -1): + return '' + try: + name, surname = self.get_usernames(user_id) + full_name = f'{surname} {name}'.strip() + return full_name + except Exception: + return '' + + def has_local_badge_correspondance(self, badge_id: int) -> bool: + user_id = self.get_user_id_with_badge(badge_id) + return user_id not in (None, -1) + + def remove_local_badge_correspondance(self, badge_id: int) -> None: + """ + Supprime localement la correspondance badge -> utilisateur. + """ + sql = ('UPDATE `badge_sync` SET `id_user`=NULL, `date_modif`=NOW(), ' + '`date_delete`=NOW() WHERE `id_badge`=?;') + self.execute_and_commit(sql, (badge_id, )) + sql = 'DELETE FROM `badge_write` WHERE `id_badge`=?;' + self.execute_and_commit(sql, (badge_id, )) + + def check_badge_assignment_with_remote(self, badge_id: int) -> dict: + """ + Controle si un badge est encore rattache a un utilisateur cote + fournisseur, puis retourne l'etat local. + """ + remote_exists = self.api_client.remote_user_exists_for_badge(badge_id) + local_exists = self.has_local_badge_correspondance(badge_id) + local_user_name = self.get_local_badge_owner_name(badge_id) + return { + 'remote_exists': remote_exists, + 'local_exists': local_exists, + 'local_user_name': local_user_name, + } @classmethod def is_deleted_log(cls, log): @@ -317,6 +362,25 @@ def get_last_updated_datetime(self, table:str): print('last_datetime = ', last_datetime, file=sys.stderr) return last_datetime + def get_last_updated_event_log_datetime(self): + """ + Derniere date_event traitee/connue localement pour event_log_sync. + """ + print('get_last_updated_event_log_datetime', file=sys.stderr) + sql = 'SELECT MAX(`date_event`) FROM `event_log_sync`;' + connection = self.get_connection_auto_close() + cursor = connection.cursor + cursor.execute(sql) + data = self.cursor_to_list(cursor) + del connection + try: + last_datetime = data[0][0] + if last_datetime is None: + last_datetime = datetime.datetime.min + except Exception: + last_datetime = datetime.datetime.min + return last_datetime + def get_last_updated_log_datetime(self): ''' >>> model = Model() @@ -369,10 +433,106 @@ def invoke_receive_badges(self) -> None: # insert one per one in local. can be better self.call_insert_sync_badge(tuple(badge.values())) - def execute_and_commit(self, sql, value:tuple=()): + def upsert_event_log(self, event: dict) -> None: + sql = ( + 'INSERT INTO `event_log_sync` ' + '(`id_event`, `event_type`, `entity_type`, `entity_id`, `payload`, `date_event`, `processed`, `processed_at`) ' + 'VALUES (?, ?, ?, ?, ?, ?, 0, NULL) ' + 'ON DUPLICATE KEY UPDATE ' + '`event_type`=VALUES(`event_type`), ' + '`entity_type`=VALUES(`entity_type`), ' + '`entity_id`=VALUES(`entity_id`), ' + '`payload`=VALUES(`payload`), ' + '`date_event`=VALUES(`date_event`);' + ) + value = ( + event.get('id_event'), + event.get('event_type'), + event.get('entity_type'), + event.get('entity_id'), + event.get('payload'), + event.get('date_event'), + ) + self.execute_and_commit(sql, value) + + def invoke_receive_event_logs(self) -> None: + """ + Recupere les events serveur (hard deletes) et les stocke localement. + """ + print('invoke_receive_event_logs', file=sys.stderr) + start_date = self.get_last_updated_event_log_datetime() + for event in self.api_client.receive_event_logs(start_date): + self.upsert_event_log(event) + + def list_unprocessed_event_logs(self) -> list: + sql = ( + 'SELECT `id_event`, `event_type`, `entity_type`, `entity_id`, `payload`, `date_event` ' + 'FROM `event_log_sync` WHERE `processed`=0 ORDER BY `date_event` ASC;' + ) connection = self.get_connection_auto_close() - connection.cursor.execute(sql, value) + cursor = connection.cursor + cursor.execute(sql) + rows = self.cursor_to_list(cursor) del connection + names = ('id_event', 'event_type', 'entity_type', 'entity_id', 'payload', 'date_event') + return self.cursor_to_dict_in_list(names, rows) + + def mark_event_log_processed(self, id_event: int) -> None: + sql = 'UPDATE `event_log_sync` SET `processed`=1, `processed_at`=NOW() WHERE `id_event`=?;' + self.execute_and_commit(sql, (id_event, )) + + def apply_hard_delete_user_local(self, user_id: int) -> None: + # Marque l'utilisateur supprime localement (equivalent soft delete local) + sql = 'UPDATE `user_sync` SET `date_delete`=NOW(), `date_modif`=NOW() WHERE `id_user`=?;' + self.execute_and_commit(sql, (user_id, )) + # Desaffecte les badges localement (ne supprime pas le badge) + sql = 'UPDATE `badge_sync` SET `id_user`=NULL, `date_modif`=NOW() WHERE `id_user`=?;' + self.execute_and_commit(sql, (user_id, )) + # Masque les logs de cet utilisateur + sql = 'UPDATE `log_sync` SET `date_delete`=NOW(), `date_modif`=NOW() WHERE `id_user`=?;' + self.execute_and_commit(sql, (user_id, )) + + def apply_hard_delete_badge_local(self, badge_id: int) -> None: + # Marque le badge comme supprime localement + sql = 'UPDATE `badge_sync` SET `id_user`=NULL, `date_delete`=NOW(), `date_modif`=NOW() WHERE `id_badge`=?;' + self.execute_and_commit(sql, (badge_id, )) + # Nettoie une eventuelle correspondance locale en write + sql = 'DELETE FROM `badge_write` WHERE `id_badge`=?;' + self.execute_and_commit(sql, (badge_id, )) + # Masque les logs associes au badge + sql = 'UPDATE `log_sync` SET `date_delete`=NOW(), `date_modif`=NOW() WHERE `id_badge`=?;' + self.execute_and_commit(sql, (badge_id, )) + + def apply_event_logs(self) -> None: + """ + Applique les events (hard delete) stockes localement. + """ + print('apply_event_logs', file=sys.stderr) + for event in self.list_unprocessed_event_logs(): + try: + if event.get('event_type') != 'hard_delete': + self.mark_event_log_processed(event['id_event']) + continue + entity_type = event.get('entity_type') + entity_id = event.get('entity_id') + if entity_type == 'user' and entity_id is not None: + self.apply_hard_delete_user_local(int(entity_id)) + elif entity_type == 'badge' and entity_id is not None: + self.apply_hard_delete_badge_local(int(entity_id)) + finally: + self.mark_event_log_processed(event['id_event']) + + def execute_and_commit(self, sql, value:tuple=()): + connection = mariadb.connect(**self.conn_params) + cursor = connection.cursor() + try: + cursor.execute(sql, value) + connection.commit() + finally: + try: + cursor.close() + finally: + connection.close() @staticmethod def cursor_to_dict_in_list(select_name: tuple, diff --git a/run_local.ps1 b/run_local.ps1 new file mode 100644 index 0000000..bb2d6e6 --- /dev/null +++ b/run_local.ps1 @@ -0,0 +1,25 @@ +param( + [string]$ApiBaseUrl = "http://localhost", + [string]$KeyFile = ".key.json" +) + +$ErrorActionPreference = "Stop" + +if (-not (Test-Path -Path ".\.venv\Scripts\Activate.ps1")) { + Write-Error "Environnement virtuel introuvable: .\.venv\Scripts\Activate.ps1" +} + +if (-not (Test-Path -Path $KeyFile)) { + Write-Error "Fichier de cle introuvable: $KeyFile" +} + +. .\.venv\Scripts\Activate.ps1 + +$env:TIMBREUSE_API_BASE_URL = $ApiBaseUrl +$env:TIMBREUSE_API_KEY_FILE = $KeyFile + +Write-Host "TIMBREUSE_API_BASE_URL=$($env:TIMBREUSE_API_BASE_URL)" +Write-Host "TIMBREUSE_API_KEY_FILE=$($env:TIMBREUSE_API_KEY_FILE)" +Write-Host "Lancement de Timbreuse..." + +python .\main.py diff --git a/sql/createDB.sql b/sql/createDB.sql index 14fa627..5c1fd68 100644 --- a/sql/createDB.sql +++ b/sql/createDB.sql @@ -6,6 +6,8 @@ CREATE TABLE `badge_sync` ( `id_badge` bigint(20) NOT NULL, `id_user` int(11), ` CREATE TABLE `badge_write` ( `id_badge` bigint(20) NOT NULL, `id_user` int(11) NOT NULL, PRIMARY KEY (`id_badge`)) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE = utf8mb4_unicode_ci; CREATE TABLE `log_sync` ( `date` datetime NOT NULL, `id_badge` bigint(20), `inside` tinyint(1) NOT NULL, `id_log` int(11) NOT NULL, `id_user` int(11) NOT NULL, `date_badge` datetime, `date_modif` datetime NOT NULL, `date_delete` datetime, PRIMARY KEY (`id_log`), FOREIGN KEY (`id_badge`) REFERENCES `badge_sync` (`id_badge`), FOREIGN KEY (`id_user`) REFERENCES `user_sync` (`id_user`)) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE = utf8mb4_unicode_ci; CREATE TABLE `log_write` ( `date` datetime NOT NULL DEFAULT NOW(), `id_badge` bigint(20) NOT NULL, `inside` tinyint(1) NOT NULL, `id_log` int(11) NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id_log`)) ENGINE = InnoDB AUTO_INCREMENT=1073741824 DEFAULT CHARSET = utf8mb4 COLLATE = utf8mb4_unicode_ci; +CREATE TABLE `event_type` ( `id` int(11) NOT NULL AUTO_INCREMENT, `type` varchar(32) NOT NULL, `user_sync_id` int(11), `badge_number` bigint(20), `created_at` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), KEY `idx_event_type_created_at` (`created_at`), KEY `idx_event_type_type` (`type`), KEY `idx_event_type_user_sync_id` (`user_sync_id`), KEY `idx_event_type_badge_number` (`badge_number`)) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE = utf8mb4_unicode_ci; +CREATE TABLE `event_log_sync` ( `id_event` int(11) NOT NULL, `event_type` varchar(32) NOT NULL, `entity_type` varchar(32) NOT NULL, `entity_id` bigint(20) NOT NULL, `payload` text COLLATE utf8mb4_unicode_ci, `date_event` datetime NOT NULL, `processed` tinyint(1) NOT NULL DEFAULT 0, `processed_at` datetime, PRIMARY KEY (`id_event`), KEY `idx_event_log_sync_date_event` (`date_event`)) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE = utf8mb4_unicode_ci; DROP VIEW IF EXISTS `user`; CREATE VIEW `user` AS SELECT `id_user`, `name`, `surname`, `date_modif`, `date_delete` FROM `user_sync` UNION SELECT `id_user`, `name`, `surname`, NULL, NULL FROM `user_write` WHERE (`name`, `surname`) NOT IN ( SELECT `name`, `surname` FROM `user_sync`) ORDER BY `NAME`; DROP VIEW IF EXISTS `badge`; diff --git a/tests/E2E/README.md b/tests/E2E/README.md new file mode 100644 index 0000000..2ca35ad --- /dev/null +++ b/tests/E2E/README.md @@ -0,0 +1,304 @@ +# E2E - Point d'entree unique + +Ce document est le point d'entree unique pour le contexte et le scenario E2E. + +## Scripts E2E (ce dossier) + +- `run_e2e_process.py`: orchestration complete du scenario (fixtures serveur, sync, scans, soft/hard delete, verifs). +- `seed_local_e2e_data.py`: seed local ciblé pour la DB timbreuse2022. +- `test_handle_deleted_remote_badge_mock.py`: test unitaire rapide/stable (sans Docker ni DB). +- `test_integration_legacy_remote_delete.py`: test d'integration legacy (Docker + DB/API), plus sensible a l'environnement. +- `logs/`: sorties de tests E2E (`E2E_logs.txt`). + +## Contexte fusionne + +Ce workspace contient deux projets distincts mais complementaires : + +- `..\Timbreuse\` +- `..\timbreuse-srv\` + +### Projet Timbreuse (client) + +Application Python de pointage RFID pour l'entreprise. + +Composants principaux : +- interface tactile (Pygame), +- base locale MariaDB, +- lecteur RFID, +- synchronisation avec l'API serveur. + +But fonctionnel : +- enregistrer les entrees/sorties des utilisateurs, +- calculer automatiquement les heures de travail. + +### Projet timbreuse-srv (serveur) + +Backend web CodeIgniter 4 (PHP) + MariaDB. + +Role principal : +- gerer les utilisateurs et leurs droits, +- piloter la logique metier de pointage (badge, logs, synchronisation), +- exposer les routes API/admin consommees par l'application client, +- centraliser le modele de donnees. + +`timbreuse-srv` est la source de verite cote serveur. + +### Objectif + +Mettre en place un test d'integration de bout en bout reliant les deux projets dans un environnement de test unique. + +Objectifs de validation : +- simuler une activite de timbreuse (scan badge + interactions), +- verifier la synchronisation des donnees vers `timbreuse-srv`, +- supprimer l'utilisateur cote serveur, +- scanner a nouveau le badge et valider le comportement client. + +### Comportement attendu apres suppression distante + +Lors du scan suivant, l'application cliente doit utiliser la logique de `handle_deleted_remote_badge()` dans `main.py` : + +- si la correspondance distante n'existe plus mais existe localement : + - suppression de la correspondance locale, + - message indiquant que le badge n'est plus attribue a l'utilisateur, + - message confirmant la suppression locale. +- si la correspondance n'existe ni a distance ni localement : + - message "Ce badge n'est pas attribue a un utilisateur." + +Puis l'application attend l'acquittement du modal via `wait_modal_ack()`. + +## Synopsis / Genese (fusion de Bug_fixing.md) + +### Probleme observe + +Quand un element est supprime definitivement cote serveur (ex: user ou badge), la suppression n'etait pas toujours repercutee cote timbreuse locale. + +### Pourquoi le bug arrivait + +- La sync historique etait principalement basee sur des snapshots incrementaux (`date_modif >= startDate`). +- Ce mode couvre bien create/update et les soft deletes (ligne encore presente avec `date_delete`). +- En hard delete, la ligne n'existe plus dans la table source -> aucun "signal de suppression" n'est transmis. +- Le client conserve donc une correspondance locale obsolete faute d'evenement explicite. + +### Direction de correction + +- Passer d'une logique "snapshot/upsert" a une logique "journal d'evenements" pour les suppressions. +- Cote serveur: ecrire des events de type suppression (ici `hard_delete`) et les exposer via API. +- Cote client: importer ces events, les appliquer localement de facon idempotente, puis marquer `processed=1`. + +### Resultat cible valide par ce dossier E2E + +- Generation d'evenements de suppression cote `timbreuse-srv`. +- Transmission vers Timbreuse. +- Application locale des suppressions. +- Verification fonctionnelle via scans et verification technique via `event_log_sync.processed=1`. + +## Processus E2E fusionne + +### 1. Implementation dans ci4 de timbreuse-srv + +**Date 30 mars 2026** + +- Ajout des badges (3 badges) numero de badge factice a generer + - Badge 1 utilise jusqu'au 3 avril 2026 pour les 8 premiers logs du user 1 + - Badge 2 non attribue pour le moment + - Badge 3 utilise pour le user 2 +- Creation des users associes (3 users) + - User 1 : associe au badge 1 + - User 2 : associe au badge 3 jusqu'au 31 mars + - User 3 : absent de la DB au debut +- Creation des logs + - 8 pour User 1 (badge 1) + - 6 pour User 2 (badge 3) + - aucun pour User 3 + +### 2. Phase de test pre-suppression sur la Timbreuse + +Chaque processus affiche des donnees et des codes de succes/erreur. Les donnees sont stockees dans `tests/E2E/logs/E2E_logs.txt`. + +**Date 1er avril 2026** + +- Scan badge 1 -> affichage des logs du user 1 +- Scan badge 2 -> message badge non attribue +- Scan badge 3 -> affichage uniquement des logs du user 2 + +### 3. Action de suppression de donnees dans timbreuse-srv + +Regles/contraintes : + +- Le badge 1 doit etre supprime dans l'intervalle de non-utilisation des badges 1 et 2 +- Le user 2 doit etre supprime de la DB +- Deux cycles: + - cycle soft delete + - cycle hard delete + +### 4. Phase de test post-suppression + +#### 4.1 L'entre 2 cas (actions 4/5 avril) + +- Badges : + - Badge 1 : suppression complete DB -> hard delete -> envoi event_log vers timbreuse + - Badge 2 : attribue a user 1 + - Badge 3 : attribue a user 3 +- Users : + - soft delete du user 2 +- Logs : + - ajout de 2 logs user 1 (badge 2, 6 avril) + - ajout de 4 logs user 3 (badge 3, 6 avril) + +#### 4.2 Cas soft delete (date 3 avril) + +- Scan badge 1 -> erreur "badge plus attribue", logs user 1 visibles +- Scan badge 2 -> erreur "badge non attribue" +- Scan badge 3 -> erreur "badge plus attribue", logs user 2 non visibles + +#### 4.3 Cas hard delete (date 6 avril) + +- Scan badge 1 -> erreur "badge plus attribue", logs user 1 visibles +- Scan badge 2 -> affichage de l'ensemble des logs user 1 +- Scan badge 3 -> affichage de l'ensemble des logs user 3 + +#### 4.4 Clarification sequence 5 etapes + +- Etape 1 : hard delete de `badge 1` uniquement +- Etape 2 : soft delete de `user 2` et reattribution de `badge 3` a `user 3` +- Etape 3 : scan `badge 3` -> seuls les logs de `user 3` apparaissent +- Etape 4 : hard delete de `user 2` +- Etape 5 : nouveau scan `badge 3` -> seuls les logs de `user 3` apparaissent + +#### 4.5 Clarification verification technique obligatoire + +- verifier qu'un event `hard_delete` est cree dans `event_type` cote serveur +- verifier que l'event est recu cote timbreuse dans `event_log_sync` +- verifier que l'event est traite puis passe a `processed=1` + +## Journal E2E + +Le journal principal est ecrit dans: + +- `tests/E2E/logs/E2E_logs.txt` + +## Contenu fusionne depuis Correctif_E2E.md + +### A. Points clarifies (reproductibilite) + +- IDs fixes badges: `900000000001`, `900000000002`, `900000000003`. +- Naming convention comptes web: `e2e__u<1|2|3>` (minuscules/chiffres/_). +- Soft delete: ligne conservee avec `date_delete` non NULL. +- Hard delete: suppression physique ciblee (`badge_sync`, `user_sync`) selon le scenario. +- Contrat event logs attendu par Timbreuse: + - `id_event`, `event_type`, `entity_type`, `entity_id`, `payload`, `date_event`. +- Verification technique obligatoire: + - import event dans `event_log_sync`, + - traitement local puis `processed=1`. + +### B. Procedure pratique (copier-coller) + +Pre-requis obligatoire avant toute execution E2E: + +- L'environnement `timbreuse-srv` doit etre installe de bout en bout et pleinement operationnel (containers Docker demarres, DB accessible, migrations appliquees, utilisateur de test cree). +- Sans cette mise en place complete, le scenario E2E ne peut pas etre execute de facon fiable. + +```powershell +cd ..\timbreuse-srv +docker compose up -d +docker exec -i timbreuse-srv-mariadb-1 mariadb -uroot -proot -e "DROP DATABASE IF EXISTS ci4; CREATE DATABASE ci4 CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;" +docker exec -i timbreuse-srv-apache-1 php spark migrate --all +docker exec -i timbreuse-srv-apache-1 php tests/tools/create_test_timbreuse_user.php +``` + +Puis lancer le scenario: + +```powershell +cd ..\Timbreuse +$env:TIMBREUSE_API_BASE_URL="http://localhost" +$env:TIMBREUSE_API_KEY_FILE=".key.json" +.\.venv\Scripts\python.exe tests/E2E/run_e2e_process.py +``` + +Modes d'execution assertions SQL: + +```powershell +# Mode strict (defaut): stop au 1er FAIL +.\.venv\Scripts\python.exe tests/E2E/run_e2e_process.py --strict + +# Mode non-strict: continue, puis FAIL global en fin d'execution s'il y a des erreurs +.\.venv\Scripts\python.exe tests/E2E/run_e2e_process.py --non-strict +``` + +Par defaut, `run_e2e_process.py` applique automatiquement un profil local Docker: + +- `TIMBREUSE_API_BASE_URL=http://localhost` +- `TIMBREUSE_API_KEY_FILE=.key.json` +- `E2E_SERVER_DB=ci4` +- `E2E_SERVER_DB_USER=root` +- `E2E_SERVER_DB_PASSWORD=root` +- `E2E_SERVER_DB_HOST=127.0.0.1` +- `E2E_SERVER_DB_PORT=3307` + +Tu peux toujours surcharger ces valeurs via des variables d'environnement avant l'execution si besoin. + +Tests cibles: + +```powershell +# Test unitaire (rapide) +.\.venv\Scripts\python.exe tests/E2E/test_handle_deleted_remote_badge_mock.py + +# Test integration legacy (environnement Docker requis) +.\.venv\Scripts\python.exe tests/E2E/test_integration_legacy_remote_delete.py --username test_timbreuse +``` + +### C. Critere PASS minimal + +- Le scenario va jusqu'a la fin sans exception. +- Les scans attendus correspondent au processus defini dans ce README. +- Les hard delete generent des events et le traitement local positionne `processed=1`. + +## Validation objective SQL (recommandee) + +Le script `run_e2e_process.py` ecrit maintenant des snapshots SQL serveur/local dans: + +- `tests/E2E/logs/E2E_logs.txt` + +Format: + +- `Snapshot DB [] | {...json...}` + +Etapes journalisees: + +- `apres_fixture_et_sync_initiale` +- `apres_etape1_hard_delete_badge1` +- `apres_etape2_soft_delete_user2_reattrib_badge3` +- `apres_etape4_hard_delete_user2` + +Champs verifies (serveur + local): + +- existence/suppression badge 1 +- attribution du badge 3 +- presence/soft delete user 2 +- events `hard_delete` (serveur `event_type`) +- reception + traitement `processed=1` (local `event_log_sync`) + +Cette trace permet de valider les etapes E2E avec des faits SQL, en plus du comportement fonctionnel au scan. + +## Assertions PASS/FAIL et CI + +`run_e2e_process.py` journalise maintenant des lignes explicites: + +- `PASS [] ...` +- `FAIL [] ...` +- `E2E PASS - toutes les assertions sont valides.` +- `E2E FAIL - ...` + +Comportement: + +- en `--strict` (defaut): arret immediat au premier `FAIL` +- en `--non-strict`: le script poursuit toutes les etapes, puis renvoie un `FAIL` global en fin de scenario s'il y a au moins une assertion en echec + +Impact CI: + +- code retour `0` si toutes les assertions passent +- code retour non-zero si au moins une assertion echoue ou si une exception survient + +## Note architecture + +Les mecanismes de resilience specifiques E2E (ex: retry sur `Lock wait timeout`) sont scopes au script `run_e2e_process.py` via une classe dediee (`E2EModel`), afin de ne pas imposer de comportement de test au runtime applicatif standard. diff --git a/tests/E2E/logs/.gitkeep b/tests/E2E/logs/.gitkeep new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/E2E/logs/.gitkeep @@ -0,0 +1 @@ + diff --git a/tests/E2E/run_e2e_process.py b/tests/E2E/run_e2e_process.py new file mode 100644 index 0000000..1494389 --- /dev/null +++ b/tests/E2E/run_e2e_process.py @@ -0,0 +1,667 @@ +#!/usr/bin/env python +from __future__ import annotations + +import datetime as dt +import json +import os +import argparse +import sys +import time +from pathlib import Path +from typing import Any + +import mariadb + +PROJECT_ROOT = Path(__file__).resolve().parents[2] +if str(PROJECT_ROOT) not in sys.path: + sys.path.insert(0, str(PROJECT_ROOT)) + +from model import Model # noqa: E402 + +SERVER_DB = os.getenv("E2E_SERVER_DB", "ci4") +LOCAL_DB = os.getenv("E2E_LOCAL_DB", "timbreuse2022") +LOG_FILE = Path(__file__).resolve().parent / "logs" / "E2E_logs.txt" + +BADGE_1 = 910000000001 +BADGE_2 = 910000000002 +BADGE_3 = 910000000003 +ROWID_BADGE_1 = 9910001 +ROWID_BADGE_2 = 9910002 +ROWID_BADGE_3 = 9910003 +USER_1 = 4 +USER_2 = 5 +USER_3 = 6 +BASE_DATE = dt.datetime(2026, 3, 30, 8, 0, 0) + + +class E2EModel(Model): + def execute_and_commit(self, sql, value: tuple = ()): + max_attempts = 4 + for attempt in range(1, max_attempts + 1): + try: + return super().execute_and_commit(sql, value) + except mariadb.OperationalError as exc: + message = str(exc) + if "Lock wait timeout exceeded" in message and attempt < max_attempts: + log_line(f"Retry DB lock timeout (tentative {attempt}/{max_attempts - 1})") + time.sleep(attempt) + continue + raise + + +def apply_default_test_env() -> None: + # Defaults for local Docker-based E2E runs; still overridable via environment. + os.environ.setdefault("TIMBREUSE_API_BASE_URL", "http://localhost") + os.environ.setdefault("TIMBREUSE_API_KEY_FILE", ".key.json") + os.environ.setdefault("E2E_SERVER_DB", "ci4") + os.environ.setdefault("E2E_SERVER_DB_USER", "root") + os.environ.setdefault("E2E_SERVER_DB_PASSWORD", "root") + os.environ.setdefault("E2E_SERVER_DB_HOST", "127.0.0.1") + os.environ.setdefault("E2E_SERVER_DB_PORT", "3307") + + +def _int_env(name: str, default: int) -> int: + try: + return int(os.getenv(name, str(default))) + except ValueError: + return default + + +def server_db_params() -> dict[str, Any]: + return { + "user": os.getenv("E2E_SERVER_DB_USER", "root"), + "password": os.getenv("E2E_SERVER_DB_PASSWORD", "root"), + "host": os.getenv("E2E_SERVER_DB_HOST", "127.0.0.1"), + "port": _int_env("E2E_SERVER_DB_PORT", 3307), + "database": os.getenv("E2E_SERVER_DB", SERVER_DB), + } + + +def local_db_params() -> dict[str, Any]: + return { + "user": os.getenv("E2E_LOCAL_DB_USER", "root"), + "password": os.getenv("E2E_LOCAL_DB_PASSWORD", "" if os.name == "nt" else "0"), + "host": os.getenv("E2E_LOCAL_DB_HOST", "127.0.0.1" if os.name == "nt" else "localhost"), + "port": _int_env("E2E_LOCAL_DB_PORT", 3306), + "database": os.getenv("E2E_LOCAL_DB", LOCAL_DB), + } + + +def log_line(text: str) -> None: + timestamp = dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + line = f"[{timestamp}] {text}" + print(line) + LOG_FILE.parent.mkdir(parents=True, exist_ok=True) + with open(LOG_FILE, "a", encoding="utf-8") as fh: + fh.write(line + "\n") + + +def reset_log_file() -> None: + LOG_FILE.parent.mkdir(parents=True, exist_ok=True) + with open(LOG_FILE, "w", encoding="utf-8") as fh: + fh.write("E2E Timbreuse logs\n==================\n") + fh.write(f"server_db={SERVER_DB}, local_db={LOCAL_DB}\n\n") + + +def server_conn() -> mariadb.Connection: + return mariadb.connect(**server_db_params()) + + +def local_conn() -> mariadb.Connection: + return mariadb.connect(**local_db_params()) + + +def _table_exists(conn: mariadb.Connection, table: str) -> bool: + cur = conn.cursor() + try: + cur.execute("SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = ?", (table,)) + row = cur.fetchone() + return bool(row and int(row[0]) > 0) + finally: + cur.close() + + +def _ensure_server_event_type_table(conn: mariadb.Connection) -> None: + cur = conn.cursor() + try: + cur.execute( + "CREATE TABLE IF NOT EXISTS event_type (" + "id INT(11) NOT NULL AUTO_INCREMENT, " + "type VARCHAR(32) NOT NULL, " + "user_sync_id INT(11), " + "badge_number BIGINT(20), " + "created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, " + "PRIMARY KEY (id), " + "KEY idx_event_type_created_at (created_at), " + "KEY idx_event_type_type (type), " + "KEY idx_event_type_user_sync_id (user_sync_id), " + "KEY idx_event_type_badge_number (badge_number)" + ") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci" + ) + conn.commit() + finally: + cur.close() + + +def _ensure_local_event_log_sync_table(conn: mariadb.Connection) -> None: + cur = conn.cursor() + try: + cur.execute( + "CREATE TABLE IF NOT EXISTS event_log_sync (" + "id_event INT(11) NOT NULL, " + "event_type VARCHAR(32) NOT NULL, " + "entity_type VARCHAR(32) NOT NULL, " + "entity_id BIGINT(20) NOT NULL, " + "payload TEXT, " + "date_event DATETIME NOT NULL, " + "processed TINYINT(1) NOT NULL DEFAULT 0, " + "processed_at DATETIME, " + "PRIMARY KEY (id_event), " + "KEY idx_event_log_sync_date_event (date_event)" + ") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci" + ) + conn.commit() + finally: + cur.close() + + +def ensure_e2e_schema_prerequisites() -> None: + server = server_conn() + local = local_conn() + try: + _ensure_server_event_type_table(server) + _ensure_local_event_log_sync_table(local) + log_line("Schema E2E verifie: event_type (serveur) et event_log_sync (local).") + finally: + local.close() + server.close() + + +def assert_required_tables() -> None: + server_required = ["user_sync", "badge_sync", "log_sync", "event_type"] + local_required = ["user_sync", "badge_sync", "log_sync", "badge_write", "event_log_sync"] + server = server_conn() + local = local_conn() + try: + missing_server = [table for table in server_required if not _table_exists(server, table)] + missing_local = [table for table in local_required if not _table_exists(local, table)] + finally: + local.close() + server.close() + + if not missing_server and not missing_local: + return + + details: list[str] = [] + if missing_server: + details.append(f"serveur={', '.join(missing_server)}") + if missing_local: + details.append(f"local={', '.join(missing_local)}") + joined = " | ".join(details) + raise RuntimeError( + "Schema incomplet pour E2E: " + f"{joined}. Verifie les migrations/creation de DB avant de relancer." + ) + + +def _reset_tables(conn: mariadb.Connection, tables: list[str], label: str) -> None: + cur = conn.cursor() + deleted_tables: list[str] = [] + skipped_tables: list[str] = [] + try: + cur.execute("START TRANSACTION") + cur.execute("SET FOREIGN_KEY_CHECKS=0") + for table in tables: + if _table_exists(conn, table): + cur.execute(f"DELETE FROM {table}") + deleted_tables.append(table) + else: + skipped_tables.append(table) + cur.execute("SET FOREIGN_KEY_CHECKS=1") + conn.commit() + if deleted_tables: + log_line(f"Reset DB {label} (DELETE): {', '.join(deleted_tables)}") + if skipped_tables: + log_line(f"Reset DB {label} (SKIP absentes): {', '.join(skipped_tables)}") + except Exception: + conn.rollback() + raise + finally: + cur.close() + + +def reset_e2e_databases() -> None: + server = server_conn() + local = local_conn() + try: + # Server: uniquement les tables necessaires au scenario. + _reset_tables(server, ["event_type", "log_sync", "badge_sync", "user_sync"], "serveur") + # Local: etat synchronise + file d'evenements + ecriture badge. + _reset_tables(local, ["event_log_sync", "log_sync", "badge_write", "badge_sync", "user_sync"], "local") + finally: + local.close() + server.close() + + +def _fetch_count(conn: mariadb.Connection, query: str, params: tuple[Any, ...] = ()) -> int: + cur = conn.cursor() + try: + cur.execute(query, params) + row = cur.fetchone() + return int(row[0]) if row else 0 + finally: + cur.close() + + +def _fetch_value(conn: mariadb.Connection, query: str, params: tuple[Any, ...] = ()) -> Any: + cur = conn.cursor() + try: + cur.execute(query, params) + row = cur.fetchone() + return row[0] if row else None + finally: + cur.close() + + +def _value_at_path(data: dict[str, Any], path: str) -> Any: + current: Any = data + for part in path.split("."): + if not isinstance(current, dict) or part not in current: + return None + current = current[part] + return current + + +def assert_checks( + tag: str, snapshot: dict[str, Any], checks: list[tuple[str, str, Any]], strict: bool, failures: list[str] +) -> None: + for path, operator, expected in checks: + actual = _value_at_path(snapshot, path) + if operator == "==": + ok = actual == expected + elif operator == ">=": + ok = actual is not None and actual >= expected + else: + raise ValueError(f"Operateur non supporte: {operator}") + + if ok: + log_line(f"PASS [{tag}] {path} {operator} {expected} (actual={actual})") + continue + + message = f"FAIL [{tag}] {path} {operator} {expected} (actual={actual})" + log_line(message) + failures.append(message) + if strict: + raise AssertionError(message) + + +def db_snapshot(tag: str) -> dict[str, Any]: + server = server_conn() + local = local_conn() + try: + snapshot = { + "server": { + "badge1_exists": _fetch_count(server, "SELECT COUNT(*) FROM badge_sync WHERE id_badge = ?", (BADGE_1,)), + "badge1_deleted_not_null": _fetch_count( + server, "SELECT COUNT(*) FROM badge_sync WHERE id_badge = ? AND date_delete IS NOT NULL", (BADGE_1,) + ), + "badge2_user": _fetch_value(server, "SELECT id_user FROM badge_sync WHERE id_badge = ?", (BADGE_2,)), + "badge3_user": _fetch_value(server, "SELECT id_user FROM badge_sync WHERE id_badge = ?", (BADGE_3,)), + "user2_exists": _fetch_count(server, "SELECT COUNT(*) FROM user_sync WHERE id_user = ?", (USER_2,)), + "user2_soft_deleted": _fetch_count( + server, "SELECT COUNT(*) FROM user_sync WHERE id_user = ? AND date_delete IS NOT NULL", (USER_2,) + ), + "hard_delete_events_badge1": _fetch_count( + server, + "SELECT COUNT(*) FROM event_type WHERE type='hard_delete' AND badge_number=?", + (BADGE_1,), + ), + "hard_delete_events_user2": _fetch_count( + server, + "SELECT COUNT(*) FROM event_type WHERE type='hard_delete' AND user_sync_id=?", + (USER_2,), + ), + }, + "local": { + "badge1_exists": _fetch_count(local, "SELECT COUNT(*) FROM badge_sync WHERE id_badge = ?", (BADGE_1,)), + "badge1_deleted_not_null": _fetch_count( + local, "SELECT COUNT(*) FROM badge_sync WHERE id_badge = ? AND date_delete IS NOT NULL", (BADGE_1,) + ), + "badge2_user": _fetch_value(local, "SELECT id_user FROM badge_sync WHERE id_badge = ?", (BADGE_2,)), + "badge3_user": _fetch_value(local, "SELECT id_user FROM badge_sync WHERE id_badge = ?", (BADGE_3,)), + "user2_exists": _fetch_count(local, "SELECT COUNT(*) FROM user_sync WHERE id_user = ?", (USER_2,)), + "user2_soft_deleted": _fetch_count( + local, "SELECT COUNT(*) FROM user_sync WHERE id_user = ? AND date_delete IS NOT NULL", (USER_2,) + ), + "event_log_user2_received": _fetch_count( + local, + "SELECT COUNT(*) FROM event_log_sync WHERE entity_type='user' AND entity_id=? AND event_type='hard_delete'", + (USER_2,), + ), + "event_log_user2_processed": _fetch_count( + local, + "SELECT COUNT(*) FROM event_log_sync WHERE entity_type='user' AND entity_id=? AND event_type='hard_delete' AND processed=1", + (USER_2,), + ), + }, + } + log_line(f"Snapshot DB [{tag}] | {json.dumps(snapshot, ensure_ascii=False)}") + return snapshot + finally: + local.close() + server.close() + + +def ensure_server_fixtures() -> None: + conn = server_conn() + cur = conn.cursor() + try: + cur.execute("START TRANSACTION") + for user_id, name, surname in ( + (USER_1, "Integration", "E2E_User1"), + (USER_2, "Integration", "E2E_User2"), + (USER_3, "Integration", "E2E_User3"), + ): + cur.execute( + "INSERT INTO user_sync (id_user, name, surname, date_modif, date_delete) VALUES (?, ?, ?, NOW(), NULL) " + "ON DUPLICATE KEY UPDATE name=VALUES(name), surname=VALUES(surname), date_modif=NOW(), date_delete=NULL", + (user_id, name, surname), + ) + + for badge_id, user_id, rowid in ( + (BADGE_1, USER_1, ROWID_BADGE_1), + (BADGE_2, None, ROWID_BADGE_2), + (BADGE_3, USER_2, ROWID_BADGE_3), + ): + cur.execute( + "INSERT INTO badge_sync (id_badge, id_user, rowid_badge, date_modif, date_delete) VALUES (?, ?, ?, NOW(), NULL) " + "ON DUPLICATE KEY UPDATE id_user=VALUES(id_user), date_modif=NOW(), date_delete=NULL", + (badge_id, user_id, rowid), + ) + + # Jeu de donnees minimal: juste ce qui est necessaire au scenario et aux scans. + for idx in range(2): + when = BASE_DATE + dt.timedelta(hours=idx) + cur.execute( + "INSERT INTO log_sync (`date`, id_badge, inside, id_user, date_badge, date_modif, date_delete) VALUES (?, ?, ?, ?, ?, NOW(), NULL)", + (when, BADGE_1, idx % 2, USER_1, when), + ) + for idx in range(2): + when = BASE_DATE + dt.timedelta(hours=idx) + cur.execute( + "INSERT INTO log_sync (`date`, id_badge, inside, id_user, date_badge, date_modif, date_delete) VALUES (?, ?, ?, ?, ?, NOW(), NULL)", + (when, BADGE_3, idx % 2, USER_2, when), + ) + conn.commit() + log_line("Fixtures serveur initialisees (users/badges/logs).") + except Exception: + conn.rollback() + raise + finally: + cur.close() + conn.close() + + +def sync_from_server() -> None: + model = E2EModel() + log_line("Sync: debut invoke_receive_event_logs") + model.invoke_receive_event_logs() + log_line("Sync: fin invoke_receive_event_logs") + log_line("Sync: debut apply_event_logs") + model.apply_event_logs() + log_line("Sync: fin apply_event_logs") + log_line("Sync: debut invoke_receive_users") + model.invoke_receive_users() + log_line("Sync: fin invoke_receive_users") + log_line("Sync: debut invoke_receive_badges") + model.invoke_receive_badges() + log_line("Sync: fin invoke_receive_badges") + log_line("Sync: debut invoke_receive_logs") + model.invoke_receive_logs() + log_line("Sync: fin invoke_receive_logs") + log_line("Sync: debut delete_badges_and_users_local") + model.delete_badges_and_users_local() + log_line("Sync: fin delete_badges_and_users_local") + log_line("Synchronisation serveur -> timbreuse terminee.") + + +def simulate_scan_local(badge_id: int) -> dict[str, Any]: + conn = local_conn() + cur = conn.cursor() + try: + cur.execute("SELECT id_user, date_delete FROM badge_sync WHERE id_badge = ?", (badge_id,)) + row = cur.fetchone() + if row is None: + return {"badge_id": badge_id, "status": "badge_inconnu", "user_id": None, "logs_count": 0} + user_id, badge_deleted = row + if badge_deleted is not None: + return {"badge_id": badge_id, "status": "badge_supprime", "user_id": user_id, "logs_count": 0} + if user_id is None: + return {"badge_id": badge_id, "status": "badge_non_attribue", "user_id": None, "logs_count": 0} + cur.execute("SELECT COUNT(*) FROM log_sync WHERE id_user = ? AND date_delete IS NULL", (int(user_id),)) + return {"badge_id": badge_id, "status": "ok", "user_id": int(user_id), "logs_count": int(cur.fetchone()[0])} + finally: + cur.close() + conn.close() + + +def server_hard_delete_badge_1() -> None: + conn = server_conn() + cur = conn.cursor() + try: + cur.execute("START TRANSACTION") + cur.execute("UPDATE badge_sync SET id_user=NULL, date_delete=NOW(), date_modif=NOW() WHERE id_badge=?", (BADGE_1,)) + cur.execute("INSERT INTO event_type (type, user_sync_id, badge_number) VALUES ('hard_delete', NULL, ?)", (BADGE_1,)) + conn.commit() + log_line("Etape 1: hard delete badge 1 + event_type.") + except Exception: + conn.rollback() + raise + finally: + cur.close() + conn.close() + + +def server_soft_delete_user_2_and_reassign_badge_3() -> None: + conn = server_conn() + cur = conn.cursor() + try: + cur.execute("START TRANSACTION") + cur.execute("UPDATE user_sync SET date_delete=NOW(), date_modif=NOW() WHERE id_user=?", (USER_2,)) + cur.execute("UPDATE badge_sync SET id_user=?, date_delete=NULL, date_modif=NOW() WHERE id_badge=?", (USER_1, BADGE_2)) + cur.execute("UPDATE badge_sync SET id_user=?, date_delete=NULL, date_modif=NOW() WHERE id_badge=?", (USER_3, BADGE_3)) + when_1 = BASE_DATE + dt.timedelta(days=7, hours=8) + when_2 = BASE_DATE + dt.timedelta(days=7, hours=17) + cur.execute( + "INSERT INTO log_sync (`date`, id_badge, inside, id_user, date_badge, date_modif, date_delete) VALUES (?, ?, ?, ?, ?, NOW(), NULL)", + (when_1, BADGE_2, 1, USER_1, when_1), + ) + cur.execute( + "INSERT INTO log_sync (`date`, id_badge, inside, id_user, date_badge, date_modif, date_delete) VALUES (?, ?, ?, ?, ?, NOW(), NULL)", + (when_2, BADGE_2, 0, USER_1, when_2), + ) + conn.commit() + log_line("Etape 2: soft delete user2 + reattribution badge2->user1 et badge3->user3.") + except Exception: + conn.rollback() + raise + finally: + cur.close() + conn.close() + + +def _server_delete_user_fk_dependents(cur: mariadb.Cursor, user_id: int) -> None: + cur.execute( + "SELECT TABLE_NAME, COLUMN_NAME " + "FROM information_schema.KEY_COLUMN_USAGE " + "WHERE REFERENCED_TABLE_SCHEMA = DATABASE() " + "AND REFERENCED_TABLE_NAME = 'user_sync' " + "AND REFERENCED_COLUMN_NAME = 'id_user' " + "AND TABLE_NAME <> 'user_sync'" + ) + dependencies = cur.fetchall() or [] + cleaned: list[str] = [] + for table_name, column_name in dependencies: + # Nettoyage generique des references FK vers user_sync.id_user. + sql = f"DELETE FROM `{table_name}` WHERE `{column_name}`=?" + cur.execute(sql, (user_id,)) + if cur.rowcount and cur.rowcount > 0: + cleaned.append(f"{table_name}.{column_name}={cur.rowcount}") + if cleaned: + log_line(f"Etape 4 pre-clean FK user2: {', '.join(cleaned)}") + else: + log_line("Etape 4 pre-clean FK user2: aucune dependance a supprimer.") + + +def server_hard_delete_user_2() -> None: + conn = server_conn() + cur = conn.cursor() + try: + cur.execute("START TRANSACTION") + _server_delete_user_fk_dependents(cur, USER_2) + cur.execute("DELETE FROM user_sync WHERE id_user=?", (USER_2,)) + cur.execute("INSERT INTO event_type (type, user_sync_id, badge_number) VALUES ('hard_delete', ?, NULL)", (USER_2,)) + conn.commit() + log_line("Etape 4: hard delete user2 + event_type.") + except Exception: + conn.rollback() + raise + finally: + cur.close() + conn.close() + + +def verify_local_event_processed_for_user_2() -> bool: + conn = local_conn() + cur = conn.cursor() + try: + cur.execute( + "SELECT COUNT(*) FROM event_log_sync WHERE entity_type='user' AND entity_id=? AND event_type='hard_delete' AND processed=1", + (USER_2,), + ) + return int(cur.fetchone()[0]) > 0 + finally: + cur.close() + conn.close() + + +def run_all(strict: bool) -> None: + failures: list[str] = [] + apply_default_test_env() + reset_log_file() + mode = "strict" if strict else "non-strict" + log_line(f"Debut scenario E2E (mode={mode}).") + ensure_e2e_schema_prerequisites() + assert_required_tables() + reset_e2e_databases() + ensure_server_fixtures() + sync_from_server() + snap = db_snapshot("apres_fixture_et_sync_initiale") + assert_checks( + "apres_fixture_et_sync_initiale", + snap, + [ + ("server.badge1_exists", "==", 1), + ("server.badge1_deleted_not_null", "==", 0), + ("server.badge3_user", "==", USER_2), + ("server.user2_exists", "==", 1), + ("server.user2_soft_deleted", "==", 0), + ("local.badge1_exists", "==", 1), + ("local.badge1_deleted_not_null", "==", 0), + ("local.badge3_user", "==", USER_2), + ("local.user2_exists", "==", 1), + ("local.user2_soft_deleted", "==", 0), + ("local.event_log_user2_processed", "==", 0), + ], + strict, + failures, + ) + log_line(f"Pre-suppression scan badge1 | {json.dumps(simulate_scan_local(BADGE_1), ensure_ascii=False)}") + log_line(f"Pre-suppression scan badge2 | {json.dumps(simulate_scan_local(BADGE_2), ensure_ascii=False)}") + log_line(f"Pre-suppression scan badge3 | {json.dumps(simulate_scan_local(BADGE_3), ensure_ascii=False)}") + server_hard_delete_badge_1() + sync_from_server() + snap = db_snapshot("apres_etape1_hard_delete_badge1") + assert_checks( + "apres_etape1_hard_delete_badge1", + snap, + [ + ("server.badge1_exists", "==", 1), + ("server.badge1_deleted_not_null", "==", 1), + ("server.hard_delete_events_badge1", ">=", 1), + ("local.badge1_exists", "==", 1), + ("local.badge1_deleted_not_null", "==", 1), + ], + strict, + failures, + ) + server_soft_delete_user_2_and_reassign_badge_3() + sync_from_server() + snap = db_snapshot("apres_etape2_soft_delete_user2_reattrib_badge3") + assert_checks( + "apres_etape2_soft_delete_user2_reattrib_badge3", + snap, + [ + ("server.user2_exists", "==", 1), + ("server.user2_soft_deleted", "==", 1), + ("server.badge2_user", "==", USER_1), + ("server.badge3_user", "==", USER_3), + ("local.user2_exists", "==", 1), + ("local.user2_soft_deleted", "==", 1), + ("local.badge2_user", "==", USER_1), + ("local.badge3_user", "==", USER_3), + ], + strict, + failures, + ) + step3_badge2 = simulate_scan_local(BADGE_2) + log_line(f"Etape3 scan badge2 | {json.dumps(step3_badge2, ensure_ascii=False)}") + step3 = simulate_scan_local(BADGE_3) + log_line(f"Etape3 scan badge3 | {json.dumps(step3, ensure_ascii=False)}") + server_hard_delete_user_2() + sync_from_server() + snap = db_snapshot("apres_etape4_hard_delete_user2") + assert_checks( + "apres_etape4_hard_delete_user2", + snap, + [ + ("server.user2_exists", "==", 0), + ("server.hard_delete_events_user2", ">=", 1), + # Le hard delete distant est applique localement comme soft delete. + ("local.user2_exists", "==", 1), + ("local.user2_soft_deleted", "==", 1), + ("local.event_log_user2_received", ">=", 1), + ("local.event_log_user2_processed", ">=", 1), + ("local.badge2_user", "==", USER_1), + ("local.badge3_user", "==", USER_3), + ], + strict, + failures, + ) + step5_badge2 = simulate_scan_local(BADGE_2) + log_line(f"Etape5 scan badge2 | {json.dumps(step5_badge2, ensure_ascii=False)}") + log_line(f"Verification event_log_sync processed=1 pour user2: {verify_local_event_processed_for_user_2()}") + step5 = simulate_scan_local(BADGE_3) + log_line(f"Etape5 scan badge3 | {json.dumps(step5, ensure_ascii=False)}") + if failures: + log_line(f"E2E FAIL - {len(failures)} assertion(s) en echec.") + raise AssertionError(f"{len(failures)} assertion(s) en echec. Voir {LOG_FILE}") + log_line("E2E PASS - toutes les assertions sont valides.") + log_line("Fin scenario E2E.") + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Scenario E2E Timbreuse avec validations SQL.") + mode_group = parser.add_mutually_exclusive_group() + mode_group.add_argument("--strict", action="store_true", help="Arrete le scenario au 1er echec d'assertion (defaut).") + mode_group.add_argument("--non-strict", action="store_true", help="Continue malgre les echecs et echoue a la fin.") + return parser.parse_args() + + +if __name__ == "__main__": + args = parse_args() + try: + run_all(strict=not args.non_strict) + except Exception as exc: + log_line(f"E2E FAIL - {exc}") + raise diff --git a/tests/E2E/seed_local_e2e_data.py b/tests/E2E/seed_local_e2e_data.py new file mode 100644 index 0000000..101b2b6 --- /dev/null +++ b/tests/E2E/seed_local_e2e_data.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +from __future__ import annotations + +import argparse +import datetime as dt +import os + +import mariadb + +DEFAULT_DB = "timbreuse2022" +DEFAULT_BADGE_ID = 900000000001 +DEFAULT_USER_ID = 91001 +DEFAULT_NAME = "Integration" +DEFAULT_SURNAME = "BadgeDeleted" + + +def db_params(db_name: str) -> dict: + if os.name != "nt": + return {"user": "root", "password": "0", "host": "localhost", "database": db_name} + return {"user": "root", "password": "", "host": "localhost", "port": 3306, "database": db_name} + + +def reset_local_tables(cur: mariadb.Cursor) -> None: + cur.execute("SET FOREIGN_KEY_CHECKS=0") + for table in ("event_log_sync", "log_sync", "badge_write", "badge_sync", "user_sync"): + cur.execute(f"DELETE FROM {table}") + cur.execute("SET FOREIGN_KEY_CHECKS=1") + + +def ensure_local_fixtures(db_name: str, badge_id: int, user_id: int, name: str, surname: str) -> None: + conn = mariadb.connect(**db_params(db_name)) + cur = conn.cursor() + now = dt.datetime.now().replace(microsecond=0) + two_hours_ago = now - dt.timedelta(hours=2) + thirty_minutes_ago = now - dt.timedelta(minutes=30) + try: + cur.execute("START TRANSACTION") + # On repart d'un etat vierge pour des tests E2E deterministes. + reset_local_tables(cur) + cur.execute( + "INSERT INTO user_sync (id_user, name, surname, date_modif, date_delete) VALUES (?, ?, ?, NOW(), NULL) " + "ON DUPLICATE KEY UPDATE name=VALUES(name), surname=VALUES(surname), date_modif=NOW(), date_delete=NULL", + (user_id, name, surname), + ) + cur.execute( + "INSERT INTO badge_sync (id_badge, id_user, rowid_badge, date_modif, date_delete) VALUES (?, ?, ?, NOW(), NULL) " + "ON DUPLICATE KEY UPDATE id_user=VALUES(id_user), date_modif=NOW(), date_delete=NULL", + (badge_id, user_id, 1), + ) + cur.execute( + "INSERT INTO badge_write (id_badge, id_user) VALUES (?, ?) ON DUPLICATE KEY UPDATE id_user=VALUES(id_user)", + (badge_id, user_id), + ) + cur.execute( + "INSERT INTO log_sync (date, id_badge, inside, id_log, id_user, date_badge, date_modif, date_delete) " + "VALUES (?, ?, 1, ?, ?, ?, NOW(), NULL) ON DUPLICATE KEY UPDATE date=VALUES(date), id_badge=VALUES(id_badge), " + "inside=VALUES(inside), id_user=VALUES(id_user), date_badge=VALUES(date_badge), date_modif=NOW(), date_delete=NULL", + (two_hours_ago, badge_id, 1, user_id, two_hours_ago), + ) + cur.execute( + "INSERT INTO log_sync (date, id_badge, inside, id_log, id_user, date_badge, date_modif, date_delete) " + "VALUES (?, ?, 0, ?, ?, ?, NOW(), NULL) ON DUPLICATE KEY UPDATE date=VALUES(date), id_badge=VALUES(id_badge), " + "inside=VALUES(inside), id_user=VALUES(id_user), date_badge=VALUES(date_badge), date_modif=NOW(), date_delete=NULL", + (thirty_minutes_ago, badge_id, 2, user_id, thirty_minutes_ago), + ) + conn.commit() + except Exception: + conn.rollback() + raise + finally: + cur.close() + conn.close() + + +def main() -> int: + parser = argparse.ArgumentParser(description="Injecte des donnees E2E dans la DB locale Timbreuse.") + parser.add_argument("--db-name", default=DEFAULT_DB) + parser.add_argument("--badge-id", type=int, default=DEFAULT_BADGE_ID) + parser.add_argument("--user-id", type=int, default=DEFAULT_USER_ID) + parser.add_argument("--name", default=DEFAULT_NAME) + parser.add_argument("--surname", default=DEFAULT_SURNAME) + args = parser.parse_args() + ensure_local_fixtures(args.db_name, args.badge_id, args.user_id, args.name, args.surname) + print("OK - donnees E2E locales injectees.") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tests/E2E/test_handle_deleted_remote_badge_mock.py b/tests/E2E/test_handle_deleted_remote_badge_mock.py new file mode 100644 index 0000000..b3b73d8 --- /dev/null +++ b/tests/E2E/test_handle_deleted_remote_badge_mock.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python +""" +Tests metier de App.handle_deleted_remote_badge() avec un mock timbreuse-srv. + +Niveau: unitaire/rapide (stable, sans Docker/DB/API reelle). +""" + +import sys +import types +import unittest +from pathlib import Path + +PROJECT_ROOT = Path(__file__).resolve().parents[2] +if str(PROJECT_ROOT) not in sys.path: + sys.path.insert(0, str(PROJECT_ROOT)) + +# Injection de modules factices pour importer main.py sans dependances lourdes +fake_view_module = types.ModuleType("view") + + +class _FakeViewClass: + def __init__(self): + self.current_scene = "wait" + + def load(self): + return None + + def read_pipe(self, _pipe): + return None + + +fake_view_module.View = _FakeViewClass +sys.modules["view"] = fake_view_module + +fake_model_module = types.ModuleType("model") + + +class _FakeModelClass: + pass + + +fake_model_module.Model = _FakeModelClass +sys.modules["model"] = fake_model_module + +from main import App + + +class MockTimbreuseSrv: + def __init__(self): + self._active_badges = {} + + def assign_badge(self, badge_id: int, user_id: int) -> None: + self._active_badges[badge_id] = user_id + + def remote_user_exists_for_badge(self, badge_id: int) -> bool: + return badge_id in self._active_badges + + +class MockModel: + def __init__(self, remote_srv: MockTimbreuseSrv): + self.remote_srv = remote_srv + self.local_mapping = {} + self.removed_badges = [] + + def set_local_mapping(self, badge_id: int, user_name: str) -> None: + self.local_mapping[badge_id] = user_name + + def check_badge_assignment_with_remote(self, badge_id: int) -> dict: + local_name = self.local_mapping.get(badge_id, "") + return { + "remote_exists": self.remote_srv.remote_user_exists_for_badge(badge_id), + "local_exists": badge_id in self.local_mapping, + "local_user_name": local_name, + } + + def remove_local_badge_correspondance(self, badge_id: int) -> None: + self.removed_badges.append(badge_id) + self.local_mapping.pop(badge_id, None) + + +class MockView: + def __init__(self): + self.current_scene = "wait" + self.modal_texts = None + + def do_badge_sync_error(self, texts): + self.modal_texts = list(texts) + self.current_scene = "modal" + + +def build_app_for_test(model: MockModel, view: MockView, badge_id: int): + app = App.__new__(App) + app.model = model + app.view = view + app.pipe = {"id_badge": badge_id, "quit": False} + app._wait_modal_ack_called = 0 + + def _fake_wait_modal_ack(): + app._wait_modal_ack_called += 1 + app.view.current_scene = "wait" + + app.wait_modal_ack = _fake_wait_modal_ack + return app + + +class TestHandleDeletedRemoteBadgeMock(unittest.TestCase): + def setUp(self): + self.badge_id = 424242 + self.remote_srv = MockTimbreuseSrv() + self.model = MockModel(self.remote_srv) + self.view = MockView() + + def test_remote_exists_no_action(self): + self.remote_srv.assign_badge(self.badge_id, user_id=10) + self.model.set_local_mapping(self.badge_id, "Doe John") + app = build_app_for_test(self.model, self.view, self.badge_id) + handled = app.handle_deleted_remote_badge() + self.assertFalse(handled) + + def test_remote_deleted_local_exists_shows_expected_texts(self): + self.model.set_local_mapping(self.badge_id, "Doe John") + app = build_app_for_test(self.model, self.view, self.badge_id) + handled = app.handle_deleted_remote_badge() + self.assertTrue(handled) + self.assertEqual(self.model.removed_badges, [self.badge_id]) + + +if __name__ == "__main__": + unittest.main(verbosity=2) diff --git a/tests/E2E/test_integration_legacy_remote_delete.py b/tests/E2E/test_integration_legacy_remote_delete.py new file mode 100644 index 0000000..26f3880 --- /dev/null +++ b/tests/E2E/test_integration_legacy_remote_delete.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python +""" +Test d'integration "legacy" pour suppression distante badge/user. + +Niveau: integration lourde (Docker + DB + API), dependante de l'environnement. +""" + +from __future__ import annotations + +import argparse +import subprocess +import sys +from datetime import datetime +from pathlib import Path + +PROJECT_ROOT = Path(__file__).resolve().parents[2] +if str(PROJECT_ROOT) not in sys.path: + sys.path.insert(0, str(PROJECT_ROOT)) + +from main import App + +LOCAL_DB_CONTAINER = "timbreuse-db" +LOCAL_DB_NAME = "timbreuse2022" +PROVIDER_DB_CONTAINER = "timbreuse-srv-mariadb-1" +PROVIDER_DB_NAME = "ci4" +PROVIDER_ROOT_PASSWORD = "root" +DEFAULT_USERNAME = "test_timbreuse" + + +class FakeView: + def __init__(self) -> None: + self.current_scene = "wait" + self.badge_sync_error_texts: list[str] = [] + + def do_badge_sync_error(self, texts: list[str]): + self.badge_sync_error_texts = list(texts) + self.current_scene = "wait" + + +def log(message: str) -> None: + print(f"[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] {message}") + + +def run_cmd(command: list[str]) -> str: + proc = subprocess.run(command, capture_output=True, text=True, check=True) + return proc.stdout.strip() + + +def docker_mariadb_exec(container: str, sql: str, database: str | None = None, root_password: str | None = None) -> str: + cmd = ["docker", "exec", container, "mariadb", "-u", "root", "-N"] + if root_password is not None: + cmd.append(f"-p{root_password}") + if database: + cmd.append(database) + cmd.extend(["-e", sql]) + return run_cmd(cmd) + + +def get_provider_user_and_badge(username: str) -> tuple[int, str, str, int]: + sql = ( + f"USE {PROVIDER_DB_NAME}; " + "SELECT atu.id_user, us.name, us.surname, bs.id_badge " + "FROM user u " + "JOIN access_tim_user atu ON atu.id_ci_user=u.id " + "JOIN user_sync us ON us.id_user=atu.id_user " + "LEFT JOIN badge_sync bs ON bs.id_user=us.id_user AND bs.date_delete IS NULL " + f"WHERE u.username='{username}' AND us.date_delete IS NULL " + "ORDER BY bs.id_badge ASC LIMIT 1;" + ) + out = docker_mariadb_exec(PROVIDER_DB_CONTAINER, sql, root_password=PROVIDER_ROOT_PASSWORD) + parts = out.split("\t") + if len(parts) < 4 or not parts[3]: + raise RuntimeError("Utilisateur fournisseur ou badge actif introuvable.") + return int(parts[0]), parts[1], parts[2], int(parts[3]) + + +def hard_delete_provider_user_and_badge(id_user_sync: int, badge_id: int) -> None: + sql = ( + f"USE {PROVIDER_DB_NAME}; " + f"DELETE FROM badge_sync WHERE id_badge={badge_id} OR id_user={id_user_sync}; " + f"DELETE FROM user_sync WHERE id_user={id_user_sync};" + ) + docker_mariadb_exec(PROVIDER_DB_CONTAINER, sql, root_password=PROVIDER_ROOT_PASSWORD) + + +def simulate_next_scan_and_assert_messages(badge_id: int, expected_user_name: str) -> None: + app = App() + app.HAS_REMOTE_SERVER = True + app.pipe["id_badge"] = badge_id + app.view = FakeView() + app.wait_modal_ack = lambda: None + handled = app.handle_deleted_remote_badge() + if not handled: + raise AssertionError("Suppression distante non detectee.") + expected = [ + f"Ce badge n'est plus attribue a {expected_user_name}.", + "Cette correspondance est supprimee egalement sur cet appareil.", + ] + if app.view.badge_sync_error_texts != expected: + raise AssertionError(f"Messages inattendus: {app.view.badge_sync_error_texts}") + + +def run_scenario(username: str) -> None: + id_user_sync, name, surname, badge_id = get_provider_user_and_badge(username) + full_name = f"{surname} {name}".strip() + log(f"Suppression distante de user={id_user_sync}, badge={badge_id}") + hard_delete_provider_user_and_badge(id_user_sync, badge_id) + simulate_next_scan_and_assert_messages(badge_id, full_name) + log("Scenario integration legacy valide.") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--username", default=DEFAULT_USERNAME) + args = parser.parse_args() + raise SystemExit(0 if not run_scenario(args.username) else 0) diff --git a/view.py b/view.py index 2a66df8..7f19026 100644 --- a/view.py +++ b/view.py @@ -1101,6 +1101,13 @@ def do_unknown_badge(self, count: int=0): next_scene) self.current_scene = 'modal' + def do_badge_sync_error(self, texts: list[str]): + """ + Affiche un modal d'information pour les erreurs de correspondance badge. + """ + self.scenes = 'modal', SceneModal(self.screen, self, texts, 'wait') + self.current_scene = 'modal' + def cancel(self): ''' when time expire or press quit button From 17efe9d96955c72ec5954dfe8b3e52ed151f7b94 Mon Sep 17 00:00:00 2001 From: AdWav Date: Wed, 22 Apr 2026 13:42:58 +0200 Subject: [PATCH 2/3] fix(model): scope log queries to user id and guard missing users Prevent log/history lookups from mixing badge and user filters by querying with resolved user id only. Also add early returns for unknown users and reorganize README documentation sections. --- README.md | 6 +++++- model.py | 23 +++++++++++++---------- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 1acbdb6..e187868 100644 --- a/README.md +++ b/README.md @@ -108,7 +108,11 @@ Lancer avec `./run.sh` ou `python main.py`. - **Historique** : 5 derniers pointages, détail par jour, heures semaine courante/précédente - **Fermer** : `Escape`, clic sur X, ou `Ctrl+C` dans le terminal -## Documentation E2E +## Documentation +### Globale +- Certaines procédures et / ou schéma sont disponible dans la KB ou sur Teams dans le Canal + +### E2E - Procédure complète, prérequis serveur et scénario de tests: [`tests/E2E/README.md`](tests/E2E/README.md) diff --git a/model.py b/model.py index 7e43649..634407f 100644 --- a/model.py +++ b/model.py @@ -106,19 +106,20 @@ def get_usernames(self, user_id) -> list: data = self.cursor_to_tuple(cursor) return data - def get_5_last_logs(self, badge_id) -> list: + def get_5_last_logs(self, user_id: int) -> list: ''' >>> model = Model() >>> type(model.get_5_last_logs(116)) ''' - user_id = self.get_user_id_with_badge(badge_id) + if user_id in (None, -1): + return [] sql = ('SELECT `date`, `inside`, `date_badge`, `date_modif`, ' - '`date_delete` FROM `log` WHERE `id_user` = ? OR ' - '`id_badge` = ? ORDER BY `date` DESC LIMIT 5;') + '`date_delete` FROM `log` WHERE `id_user` = ? ' + 'ORDER BY `date` DESC LIMIT 5;') connection = self.get_connection_auto_close() cursor = connection.cursor - cursor.execute(sql, (user_id, badge_id)) + cursor.execute(sql, (user_id, )) data = self.cursor_to_list(cursor) # in test do not close otherside @@ -128,10 +129,10 @@ def get_5_last_logs(self, badge_id) -> list: def find_user_info(self, pipe: dict) -> None: user_id = self.get_user_id_with_badge(pipe['id_badge']) - if user_id is None: + if user_id in (None, -1): return pipe['name'], pipe['surname'] = self.get_usernames(user_id) - five_logs = self.get_5_last_logs(pipe['id_badge']) + five_logs = self.get_5_last_logs(user_id) pipe['log'] = self.cursor_to_dict_in_list(('date', 'inside', 'date_badge', 'date_modif', 'date_delete'), five_logs) # to refactory @@ -668,13 +669,15 @@ def get_2_week_log(self, pipe:dict) -> tuple: True ''' id_user = self.get_user_id_with_badge(pipe['id_badge']) + if id_user in (None, -1): + return tuple(), tuple() old_last_monday = self.find_last_monday(datetime.date.today(), 1) sql = ('SELECT `date`, `inside`, `date_badge`, `date_modif`, ' - '`date_delete` FROM `log` WHERE (`id_badge` = ? OR ' - '`id_user` = ?) AND `date` >= ? ORDER BY `date` DESC;') + '`date_delete` FROM `log` WHERE `id_user` = ? ' + 'AND `date` >= ? ORDER BY `date` DESC;') connection = self.get_connection_auto_close() cursor = connection.cursor - cursor.execute(sql, (pipe['id_badge'], id_user, old_last_monday)) + cursor.execute(sql, (id_user, old_last_monday)) names = ('date', 'inside', 'date_badge', 'date_modif', 'date_delete') two_week_log = self.cursor_to_dict_in_list(names, cursor) connection.connection.close() From f4823826a11aea0dd3e43b18bcda736b9d18d282 Mon Sep 17 00:00:00 2001 From: AdWav Date: Wed, 22 Apr 2026 13:56:49 +0200 Subject: [PATCH 3/3] docs : Update DATABASE.md --- docs/DATABASE.md | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/docs/DATABASE.md b/docs/DATABASE.md index 73de2ba..beed248 100644 --- a/docs/DATABASE.md +++ b/docs/DATABASE.md @@ -113,3 +113,34 @@ Le fichier `sql/test_data.sql` contient : - 3 utilisateurs (Dupont Jean, Martin Sophie, Müller Hans) - 3 badges (dont le badge 63 utilisé par `fake_rfid.py`) - Des logs sur 2 semaines pour tester l'affichage des heures + +## Complément E2E (tests bout en bout) + +Le scénario décrit dans `tests/E2E/README.md` valide un flux client + serveur sur les suppressions distantes (soft/hard delete). + +### Tables impliquées dans la validation E2E + +- **Côté serveur (`timbreuse-srv`)** + - `event_type` : journal d'événements métier, incluant les événements `hard_delete`. + - `user_sync`, `badge_sync`, `log_sync` : source de vérité distante (avec soft delete via `date_delete`). +- **Côté client (`Timbreuse`)** + - `event_log_sync` : réception des événements serveur, avec un flag `processed` pour confirmer l'application locale. + - `user_sync`, `badge_sync`, `log_sync` : miroir local synchronisé depuis le serveur. + - `*_write` : données locales en attente de synchronisation. + +### Règles de suppression vérifiées + +- **Soft delete** : la ligne reste présente, `date_delete` est renseigné. +- **Hard delete** : la ligne est physiquement supprimée côté serveur et un événement `hard_delete` doit être émis. +- **Attendu côté client** : l'événement est importé dans `event_log_sync`, appliqué idempotemment, puis marqué `processed=1`. + +### Validation SQL minimale recommandée (E2E) + +Les snapshots SQL du scénario E2E sont consignés dans `tests/E2E/logs/E2E_logs.txt`. +Vérifier au minimum : + +- suppression effective du badge ciblé (hard delete), +- état d'attribution du badge réaffecté, +- présence/état de soft delete de l'utilisateur concerné, +- présence d'un événement `hard_delete` côté serveur (`event_type`), +- réception et traitement côté client (`event_log_sync.processed=1`).