From ed4210673ab05cc529845a97fffdd7d1df816436 Mon Sep 17 00:00:00 2001 From: "herve.le-bars" Date: Mon, 3 Jun 2024 19:01:04 +0200 Subject: [PATCH 01/29] feat: add fastapi package cd backend && poetry add fastapi --- backend/poetry.lock | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/poetry.lock b/backend/poetry.lock index a825f673..f50c9c14 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1811,7 +1811,6 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] From 314242347235060cc2ac78fae33a51798559b366 Mon Sep 17 00:00:00 2001 From: "herve.le-bars" Date: Mon, 3 Jun 2024 21:41:08 +0200 Subject: [PATCH 02/29] feat: docker add load-data script and volume persistence --- docker-compose.yaml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index e0db921c..c80250b6 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -46,7 +46,7 @@ services: environment: - REDIS_PASSWORD=${REDIS_PASSWORD:-redis} - REDIS_PORT=${REDIS_PORT:-6379} - volumes: + volumes: - bloom-redis:/data networks: - bloom_net @@ -63,6 +63,8 @@ services: - bloom-data:/var/lib/postgresql/data networks: - bloom_net + volumes: + - bloom-data:/var/lib/postgresql/data healthcheck: # PostGis database initialization is done with two steps (postgres+postgis) # This causes healthcheck to be valid before real full initialization @@ -96,7 +98,7 @@ services: dockerfile: ./docker/frontend/dev.Dockerfile args: APP_DIR: /app - + volumes: #- ./frontend:/app #- ./frontend/node_modules:/app/node_modules From f0cefaced94e1ba0be29c104b04400041debc448 Mon Sep 17 00:00:00 2001 From: "herve.le-bars" Date: Mon, 3 Jun 2024 21:45:47 +0200 Subject: [PATCH 03/29] feat: add ports & ports/{id} endpoints --- backend/bloom/services/api.py | 187 ++++++++++++++++++---------------- 1 file changed, 101 insertions(+), 86 deletions(-) diff --git a/backend/bloom/services/api.py b/backend/bloom/services/api.py index 5e0e72a3..08eb166b 100644 --- a/backend/bloom/services/api.py +++ b/backend/bloom/services/api.py @@ -5,61 +5,62 @@ import json from bloom.config import settings from bloom.container import UseCases -from bloom.domain.vessel import Vessel from bloom.logger import logger rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) -from datetime import datetime import time - app = FastAPI() + @app.get("/cache/all/flush") -async def cache_all_flush(request:Request): +async def cache_all_flush(request: Request): rd.flushall() - return {"code":0} + return {"code": 0} + @app.get("/vessels") -async def list_vessels(nocache:bool=False): - endpoint=f"/vessels" - cache= rd.get(endpoint) +async def list_vessels(nocache: bool = False): + endpoint = f"/vessels" + cache = rd.get(endpoint) start = time.time() if cache and not nocache: logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: use_cases = UseCases() vessel_repository = use_cases.vessel_repository() db = use_cases.db() with db.session() as session: - + json_data = [json.loads(v.model_dump_json() if v else "{}") - for v in vessel_repository.get_vessels_list(session)] + for v in vessel_repository.get_vessels_list(session)] rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) + rd.expire(endpoint, settings.redis_cache_expiration) return json_data + @app.get("/vessels/{vessel_id}") async def get_vessel(vessel_id: int): use_cases = UseCases() vessel_repository = use_cases.vessel_repository() db = use_cases.db() with db.session() as session: - return vessel_repository.get_vessel_by_id(session,vessel_id) + return vessel_repository.get_vessel_by_id(session, vessel_id) + @app.get("/vessels/all/positions/last") -async def list_all_vessel_last_position(nocache:bool=False): - endpoint=f"/vessels/all/positions/last" - cache= rd.get(endpoint) +async def list_all_vessel_last_position(nocache: bool = False): + endpoint = f"/vessels/all/positions/last" + cache = rd.get(endpoint) start = time.time() if cache and not nocache: logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: use_cases = UseCases() @@ -69,41 +70,43 @@ async def list_all_vessel_last_position(nocache:bool=False): json_data = [json.loads(p.model_dump_json() if p else "{}") for p in segment_repository.get_all_vessels_last_position(session)] rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return json_data + @app.get("/vessels/{vessel_id}/positions/last") -async def get_vessel_last_position(vessel_id: int, nocache:bool=False): - endpoint=f"/vessels/{vessel_id}/positions/last" - cache= rd.get(endpoint) +async def get_vessel_last_position(vessel_id: int, nocache: bool = False): + endpoint = f"/vessels/{vessel_id}/positions/last" + cache = rd.get(endpoint) start = time.time() if cache and not nocache: logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: use_cases = UseCases() segment_repository = use_cases.segment_repository() db = use_cases.db() with db.session() as session: - result=segment_repository.get_vessel_last_position(session,vessel_id) + result = segment_repository.get_vessel_last_position(session, vessel_id) json_data = json.loads(result.model_dump_json() if result else "{}") rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return json_data + @app.get("/vessels/{vessel_id}/excursions") -async def list_vessel_excursions(vessel_id: int, nocache:bool=False): - endpoint=f"/vessels/{vessel_id}/excursions" - cache= rd.get(endpoint) +async def list_vessel_excursions(vessel_id: int, nocache: bool = False): + endpoint = f"/vessels/{vessel_id}/excursions" + cache = rd.get(endpoint) start = time.time() if cache and not nocache: logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: use_cases = UseCases() @@ -111,37 +114,39 @@ async def list_vessel_excursions(vessel_id: int, nocache:bool=False): db = use_cases.db() with db.session() as session: json_data = [json.loads(p.model_dump_json() if p else "{}") - for p in excursion_repository.get_excursions_by_vessel_id(session,vessel_id)] + for p in excursion_repository.get_excursions_by_vessel_id(session, vessel_id)] rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return json_data @app.get("/vessels/{vessel_id}/excursions/{excursions_id}") -async def get_vessel_excursion(vessel_id: int,excursions_id: int): +async def get_vessel_excursion(vessel_id: int, excursions_id: int): use_cases = UseCases() excursion_repository = use_cases.excursion_repository() db = use_cases.db() with db.session() as session: - return excursion_repository.get_vessel_excursion_by_id(session,vessel_id,excursions_id) + return excursion_repository.get_vessel_excursion_by_id(session, vessel_id, excursions_id) @app.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments") -async def list_vessel_excursion_segments(vessel_id: int,excursions_id: int): +async def list_vessel_excursion_segments(vessel_id: int, excursions_id: int): use_cases = UseCases() segment_repository = use_cases.segment_repository() db = use_cases.db() with db.session() as session: - return segment_repository.list_vessel_excursion_segments(session,vessel_id,excursions_id) + return segment_repository.list_vessel_excursion_segments(session, vessel_id, excursions_id) + @app.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments/{segment_id}") -async def get_vessel_excursion_segment(vessel_id: int,excursions_id: int, segment_id:int): +async def get_vessel_excursion_segment(vessel_id: int, excursions_id: int, segment_id: int): use_cases = UseCases() segment_repository = use_cases.segment_repository() db = use_cases.db() with db.session() as session: - return segment_repository.get_vessel_excursion_segment_by_id(session,vessel_id,excursions_id,segment_id) + return segment_repository.get_vessel_excursion_segment_by_id(session, vessel_id, excursions_id, segment_id) + @app.get("/ports") async def list_ports(request:Request,nocache:bool=False): @@ -164,25 +169,27 @@ async def list_ports(request:Request,nocache:bool=False): rd.expire(endpoint,settings.redis_cache_expiration) logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") return json_data - + + @app.get("/ports/{port_id}") -async def get_port(port_id:int): +async def get_port(port_id: int): use_cases = UseCases() port_repository = use_cases.port_repository() db = use_cases.db() with db.session() as session: - return port_repository.get_port_by_id(session,port_id) + return port_repository.get_port_by_id(session, port_id) + @app.get("/zones") -async def list_zones(request:Request,nocache:bool=False): - endpoint=f"/zones" - cache= rd.get(endpoint) +async def list_zones(request: Request, nocache: bool = False): + endpoint = f"/zones" + cache = rd.get(endpoint) start = time.time() if cache and not nocache: logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: use_cases = UseCases() @@ -192,41 +199,43 @@ async def list_zones(request:Request,nocache:bool=False): json_data = [json.loads(z.model_dump_json() if z else "{}") for z in zone_repository.get_all_zones(session)] rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return json_data + @app.get("/zones/all/categories") -async def list_zone_categories(request:Request,nocache:bool=False): - endpoint=f"/zones/all/categories" - cache= rd.get(endpoint) +async def list_zone_categories(request: Request, nocache: bool = False): + endpoint = f"/zones/all/categories" + cache = rd.get(endpoint) start = time.time() if cache and not nocache: logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: use_cases = UseCases() zone_repository = use_cases.zone_repository() db = use_cases.db() with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") + json_data = [json.loads(z.model_dump_json() if z else "{}") for z in zone_repository.get_all_zone_categories(session)] rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return json_data + @app.get("/zones/by-category/{category}/by-sub-category/{sub}") -async def get_zone_all_by_category(category:str="all",sub:str=None,nocache:bool=False): - endpoint=f"/zones/by-category/{category}/by-sub-category/{sub}" - cache= rd.get(endpoint) +async def get_zone_all_by_category(category: str = "all", sub: str = None, nocache: bool = False): + endpoint = f"/zones/by-category/{category}/by-sub-category/{sub}" + cache = rd.get(endpoint) start = time.time() if cache and not nocache: logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: use_cases = UseCases() @@ -234,21 +243,24 @@ async def get_zone_all_by_category(category:str="all",sub:str=None,nocache:bool= db = use_cases.db() with db.session() as session: json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zones_by_category(session,category if category != 'all' else None,sub)] + for z in + zone_repository.get_all_zones_by_category(session, category if category != 'all' else None, + sub)] rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return json_data + @app.get("/zones/by-category/{category}") -async def get_zone_all_by_category(category:str="all",nocache:bool=False): - endpoint=f"/zones/by-category/{category}" - cache= rd.get(endpoint) +async def get_zone_all_by_category(category: str = "all", nocache: bool = False): + endpoint = f"/zones/by-category/{category}" + cache = rd.get(endpoint) start = time.time() if cache and not nocache: logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: use_cases = UseCases() @@ -256,25 +268,28 @@ async def get_zone_all_by_category(category:str="all",nocache:bool=False): db = use_cases.db() with db.session() as session: json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zones_by_category(session,category if category != 'all' else None)] + for z in + zone_repository.get_all_zones_by_category(session, category if category != 'all' else None)] rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return json_data - + + @app.get("/zones/{zones_id}") -async def get_zone(zones_id:int): +async def get_zone(zones_id: int): use_cases = UseCases() zone_repository = use_cases.zone_repository() db = use_cases.db() with db.session() as session: - return zone_repository.get_zone_by_id(session,zones_id) + return zone_repository.get_zone_by_id(session, zones_id) + @app.get("/") -async def root(request:Request): +async def root(request: Request): return { - "cache_all_flush": f"{request.url_for('cache_all_flush')}", - "ports": f"{request.url_for('list_ports')}", - "vessels": f"{request.url_for('list_vessels')}", - "zones": f"{request.url_for('list_zones')}", - } \ No newline at end of file + "cache_all_flush": f"{request.url_for('cache_all_flush')}", + "ports": f"{request.url_for('list_ports')}", + "vessels": f"{request.url_for('list_vessels')}", + "zones": f"{request.url_for('list_zones')}", + } From a97e6655c9d948a6543cd755338532f9f703f96b Mon Sep 17 00:00:00 2001 From: "herve.le-bars" Date: Fri, 7 Jun 2024 13:25:57 +0200 Subject: [PATCH 04/29] feat: api add endpoint /zones/by-category/{category} --- backend/bloom/services/api.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/backend/bloom/services/api.py b/backend/bloom/services/api.py index 08eb166b..489c19de 100644 --- a/backend/bloom/services/api.py +++ b/backend/bloom/services/api.py @@ -275,6 +275,27 @@ async def get_zone_all_by_category(category: str = "all", nocache: bool = False) logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return json_data +@app.get("/zones/by-category/{category}") +async def get_zone_all_by_category(category:str="amp",nocache:bool=0): + endpoint=f"/zones/by-category/{category}" + cache= rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload=json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return payload + else: + use_cases = UseCases() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [z.model_dump_json() + for z in zone_repository.get_all_zones_by_category(session,category)] + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint,settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return json_data @app.get("/zones/{zones_id}") async def get_zone(zones_id: int): From 7ccc3f1fd1e0390cf1ed4ef66971611c2f887bee Mon Sep 17 00:00:00 2001 From: "herve.le-bars" Date: Fri, 7 Jun 2024 13:53:19 +0200 Subject: [PATCH 05/29] =?UTF-8?q?feat:=20ajout=20endpoint=20/zones/all/cat?= =?UTF-8?q?egories=20pour=20avoir=20la=20liste=20des=20cat=C3=A9gories=20d?= =?UTF-8?q?isponibles?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../bloom/infra/repositories/repository_zone.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/backend/bloom/infra/repositories/repository_zone.py b/backend/bloom/infra/repositories/repository_zone.py index aa9c2472..c93bb297 100644 --- a/backend/bloom/infra/repositories/repository_zone.py +++ b/backend/bloom/infra/repositories/repository_zone.py @@ -25,6 +25,14 @@ def get_all_zones(self, session: Session) -> list[Zone]: if not q: return [] return [ZoneRepository.map_to_domain(entity) for entity in q] + + def get_all_zone_categories(self, session: Session) -> List[Zone]: + q = session.query(sql_model.Zone.category, + sql_model.Zone.sub_category).distinct() + q=session.execute(q) + if not q: + return [] + return [ZoneRepository.map_to_domain(ZoneCategory(category=cat,sub_category=sub)) for cat,sub in q] def get_all_zone_categories(self, session: Session) -> list[ZoneCategory]: q = session.query(sql_model.Zone.category, @@ -75,6 +83,12 @@ def map_to_domain(zone: sql_model.Zone) -> Zone: json_data=zone.json_data, created_at=zone.created_at, ) + @staticmethod + def map_to_domain(category: ZoneCategory) -> Zone: + return ZoneCategory( + category=category.category, + sub_category=category.sub_category + ) @staticmethod def map_zonecategory_to_domain(category: ZoneCategory) -> ZoneCategory: From 37300457445f397fccef00d76883544c9cd5ce53 Mon Sep 17 00:00:00 2001 From: "herve.le-bars" Date: Fri, 7 Jun 2024 17:46:38 +0200 Subject: [PATCH 06/29] feat: add endpoint /zones/by-category/{cat|all}/by-sub-category/{sub} --- backend/bloom/services/api.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/backend/bloom/services/api.py b/backend/bloom/services/api.py index 489c19de..8e98d09f 100644 --- a/backend/bloom/services/api.py +++ b/backend/bloom/services/api.py @@ -275,6 +275,28 @@ async def get_zone_all_by_category(category: str = "all", nocache: bool = False) logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return json_data +@app.get("/zones/by-category/{category}/by-sub-category/{sub}") +async def get_zone_all_by_category(category:str,sub:str=None,nocache:bool=False): + endpoint=f"/zones/by-category/{category}/by-sub-category/{sub}" + cache= rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload=json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return payload + else: + use_cases = UseCases() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in zone_repository.get_all_zones_by_category(session,category if category != 'all' else None,sub)] + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint,settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return json_data + @app.get("/zones/by-category/{category}") async def get_zone_all_by_category(category:str="amp",nocache:bool=0): endpoint=f"/zones/by-category/{category}" From d1559d872f3489501dd0af57e45bc26de7863f8c Mon Sep 17 00:00:00 2001 From: njouanin Date: Sat, 8 Jun 2024 19:01:38 +0200 Subject: [PATCH 07/29] Corrections de types --- backend/bloom/infra/repositories/repository_zone.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/bloom/infra/repositories/repository_zone.py b/backend/bloom/infra/repositories/repository_zone.py index c93bb297..b359aa46 100644 --- a/backend/bloom/infra/repositories/repository_zone.py +++ b/backend/bloom/infra/repositories/repository_zone.py @@ -25,8 +25,8 @@ def get_all_zones(self, session: Session) -> list[Zone]: if not q: return [] return [ZoneRepository.map_to_domain(entity) for entity in q] - - def get_all_zone_categories(self, session: Session) -> List[Zone]: + + def get_all_zone_categories(self, session: Session) -> list[ZoneCategory]: q = session.query(sql_model.Zone.category, sql_model.Zone.sub_category).distinct() q=session.execute(q) @@ -84,7 +84,7 @@ def map_to_domain(zone: sql_model.Zone) -> Zone: created_at=zone.created_at, ) @staticmethod - def map_to_domain(category: ZoneCategory) -> Zone: + def map_to_domain(category: ZoneCategory) -> ZoneCategory: return ZoneCategory( category=category.category, sub_category=category.sub_category From 4ea40beb0bc0d9d8fba0c2b401c563b1e12fa32f Mon Sep 17 00:00:00 2001 From: njouanin Date: Mon, 10 Jun 2024 21:42:45 +0200 Subject: [PATCH 08/29] MAJ poetry.lock --- backend/poetry.lock | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/poetry.lock b/backend/poetry.lock index f50c9c14..a825f673 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1811,6 +1811,7 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] From d5d116aaddaf1b0ead67694f4a30349a4965bd11 Mon Sep 17 00:00:00 2001 From: SaboniAmine Date: Mon, 10 Jun 2024 11:07:38 +0200 Subject: [PATCH 09/29] wip => removed file in main --- backend/bloom/config.py | 51 ++++---- backend/bloom/container.py | 2 +- .../repositories/repository_excursion.py | 45 ++----- .../infra/repositories/repository_port.py | 14 ++- backend/bloom/main.py | 65 ++++++++++ backend/bloom/routers/excursions.py | 71 +++++++++++ backend/bloom/routers/ports.py | 45 +++++++ backend/bloom/routers/vessels.py | 91 ++++++++++++++ backend/bloom/routers/zones.py | 119 ++++++++++++++++++ backend/bloom/services/geo.py | 4 +- backend/bloom/tasks/clean_positions.py | 4 +- .../tasks/compute_port_geometry_buffer.py | 4 +- ...convert_spire_vessels_to_spire_ais_data.py | 4 +- .../create_update_excursions_segments.py | 8 +- backend/bloom/tasks/load_dim_port_from_csv.py | 4 +- .../bloom/tasks/load_dim_vessel_from_csv.py | 4 +- .../bloom/tasks/load_dim_zone_amp_from_csv.py | 6 +- .../tasks/load_fct_excursions_from_csv.py | 4 +- .../bloom/tasks/load_spire_data_from_api.py | 4 +- .../bloom/tasks/load_spire_data_from_csv.py | 4 +- .../bloom/tasks/load_spire_data_from_json.py | 4 +- .../bloom/tasks/update_vessel_data_voyage.py | 4 +- backend/bloom/usecase/Excursions.py | 30 +++++ backend/bloom/usecase/Ports.py | 20 +++ backend/tests/test_alert.py | 4 +- 25 files changed, 518 insertions(+), 97 deletions(-) create mode 100644 backend/bloom/main.py create mode 100644 backend/bloom/routers/excursions.py create mode 100644 backend/bloom/routers/ports.py create mode 100644 backend/bloom/routers/vessels.py create mode 100644 backend/bloom/routers/zones.py create mode 100644 backend/bloom/usecase/Excursions.py create mode 100644 backend/bloom/usecase/Ports.py diff --git a/backend/bloom/config.py b/backend/bloom/config.py index a89a4462..8cd70e0a 100644 --- a/backend/bloom/config.py +++ b/backend/bloom/config.py @@ -16,6 +16,7 @@ model_validator ) + class Settings(BaseSettings): model_config = SettingsConfigDict( # validate_assignment=True allows to update db_url value as soon as one of @@ -27,44 +28,44 @@ class Settings(BaseSettings): env_ignore_empty=True, env_nested_delimiter='__', env_file='.env', - env_file_encoding = 'utf-8', + env_file_encoding='utf-8', extra='ignore' - ) - + ) + # Déclaration des attributs/paramètres disponibles au sein de la class settings - postgres_user:str = Field(default='') - postgres_password:str = Field(default='') - postgres_hostname:str = Field(min_length=1, - default='localhost') - postgres_port:int = Field(gt=1024, - default=5432) + postgres_user: str = Field(default='') + postgres_password: str = Field(default='') + postgres_hostname: str = Field(min_length=1, + default='localhost') + postgres_port: int = Field(gt=1024, + default=5432) - postgres_db:str = Field(min_length=1,max_length=32,pattern=r'^(?:[a-zA-Z]|_)[\w\d_]*$') + postgres_db: str = Field(min_length=1, max_length=32, pattern=r'^(?:[a-zA-Z]|_)[\w\d_]*$') srid: int = Field(default=4326) - spire_token:str = Field(default='') - data_folder:str=Field(default=str(Path(__file__).parent.parent.parent.joinpath('./data'))) - db_url:str=Field(default='') + spire_token: str = Field(default='') + data_folder: str = Field(default=str(Path(__file__).parent.parent.parent.joinpath('./data'))) + db_url: str = Field(default='') redis_host: str = Field(default='localhost') redis_port: int = Field(default=6379) redis_cache_expiration: int = Field(default=900) - - logging_level:str=Field( - default="INFO", - pattern=r'NOTSET|DEBUG|INFO|WARNING|ERROR|CRITICAL' - ) + + logging_level: str = Field( + default="INFO", + pattern=r'NOTSET|DEBUG|INFO|WARNING|ERROR|CRITICAL' + ) @model_validator(mode='after') - def update_db_url(self)->dict: - new_url= f"postgresql://{self.postgres_user}:"\ - f"{self.postgres_password}@{self.postgres_hostname}:"\ - f"{self.postgres_port}/{self.postgres_db}" + def update_db_url(self) -> dict: + new_url = f"postgresql://{self.postgres_user}:" \ + f"{self.postgres_password}@{self.postgres_hostname}:" \ + f"{self.postgres_port}/{self.postgres_db}" if self.db_url != new_url: - self.db_url = new_url + self.db_url = new_url return self settings = Settings(_env_file=os.getenv('BLOOM_CONFIG', - Path(__file__).parent.parent.parent.joinpath('.env')), + Path(__file__).parent.parent.parent.joinpath('.env')), _secrets_dir=os.getenv('BLOOM_SECRETS_DIR', - Path(__file__).parent.parent.parent.joinpath('./secrets'))) + Path(__file__).parent.parent.parent.joinpath('./secrets'))) diff --git a/backend/bloom/container.py b/backend/bloom/container.py index c8c693cd..338701d9 100644 --- a/backend/bloom/container.py +++ b/backend/bloom/container.py @@ -14,7 +14,7 @@ from dependency_injector import containers, providers -class UseCases(containers.DeclarativeContainer): +class UseCasesContainer(containers.DeclarativeContainer): config = providers.Configuration() db_url = settings.db_url db = providers.Singleton( diff --git a/backend/bloom/infra/repositories/repository_excursion.py b/backend/bloom/infra/repositories/repository_excursion.py index 5f869585..e01eb668 100644 --- a/backend/bloom/infra/repositories/repository_excursion.py +++ b/backend/bloom/infra/repositories/repository_excursion.py @@ -34,18 +34,21 @@ def get_param_from_last_excursion(self, session: Session, vessel_id: int) -> Uni return None return {"arrival_port_id": result.arrival_port_id, "arrival_position": result.arrival_position} - def get_excursions_by_vessel_id(self, session: Session, vessel_id: int) -> List[Excursion]: - """Recheche l'excursion en cours d'un bateau, c'est-à-dire l'excursion qui n'a pas de date d'arrivée""" - stmt = select(sql_model.Excursion).where(sql_model.Excursion.vessel_id == vessel_id) - result = session.execute(stmt).scalars() - if not result: - return [] + def get_excursions_by_vessel_id(self, vessel_id: int) -> List[Excursion]: + with self.session_factory as session: + """Recheche l'excursion en cours d'un bateau, c'est-à-dire l'excursion qui n'a pas de date d'arrivée""" + stmt = select(sql_model.Excursion).where(sql_model.Excursion.vessel_id == vessel_id) + result = session.execute(stmt).scalars() + if not result: + return [] return [ExcursionRepository.map_to_domain(r) for r in result] def get_vessel_excursion_by_id(self, session: Session, vessel_id: int, excursion_id: int) -> Union[Excursion, None]: """Recheche l'excursion en cours d'un bateau, c'est-à-dire l'excursion qui n'a pas de date d'arrivée""" - stmt = select(sql_model.Excursion).where((sql_model.Excursion.vessel_id == vessel_id) - & (sql_model.Excursion.id == excursion_id)) + stmt = select(sql_model.Excursion).where( + (sql_model.Excursion.vessel_id == vessel_id) + & (sql_model.Excursion.id == excursion_id) + ) result = session.execute(stmt).scalar() if not result: return None @@ -164,29 +167,3 @@ def map_to_domain(excursion: sql_model.Excursion) -> Excursion: created_at=excursion.created_at, updated_at=excursion.updated_at ) - - @staticmethod - def map_to_orm(excursion: Excursion) -> sql_model.Excursion: - return sql_model.Excursion( - id=excursion.id, - vessel_id=excursion.vessel_id, - departure_port_id=excursion.departure_port_id, - departure_at=excursion.departure_at, - departure_position=from_shape( - excursion.departure_position) if excursion.departure_position is not None else None, - arrival_port_id=excursion.arrival_port_id, - arrival_at=excursion.arrival_at, - arrival_position=from_shape(excursion.arrival_position) if excursion.arrival_position is not None else None, - excursion_duration=excursion.excursion_duration, - total_time_at_sea=excursion.total_time_at_sea, - total_time_in_amp=excursion.total_time_in_amp, - total_time_in_territorial_waters=excursion.total_time_fishing_in_territorial_waters, - total_time_in_costal_waters=excursion.total_time_fishing_in_costal_waters, - total_time_fishing=excursion.total_time_fishing, - total_time_fishing_in_amp=excursion.total_time_fishing_in_amp, - total_time_fishing_in_territorial_waters=excursion.total_time_fishing_in_territorial_waters, - total_time_fishing_in_costal_waters=excursion.total_time_fishing_in_costal_waters, - total_time_extincting_amp=excursion.total_time_extincting_amp, - created_at=excursion.created_at, - updated_at=excursion.updated_at - ) diff --git a/backend/bloom/infra/repositories/repository_port.py b/backend/bloom/infra/repositories/repository_port.py index 264dc6e1..4783f62c 100644 --- a/backend/bloom/infra/repositories/repository_port.py +++ b/backend/bloom/infra/repositories/repository_port.py @@ -17,17 +17,19 @@ class PortRepository: def __init__(self, session_factory: Callable) -> None: self.session_factory = session_factory - def get_port_by_id(self, session: Session, port_id: int) -> Union[Port, None]: - entity = session.get(sql_model.Port, port_id) + def get_port_by_id(self, port_id: int) -> Union[Port, None]: + with self.session_factory() as session: + entity = session.get(sql_model.Port, port_id) if entity is not None: return PortRepository.map_to_domain(entity) else: return None - def get_all_ports(self, session: Session) -> List[Port]: - q = session.query(sql_model.Port) - if not q: - return [] + def get_all_ports(self) -> List[Port]: + with self.session_factory() as session: + q = session.query(sql_model.Port) + if not q: + return [] return [PortRepository.map_to_domain(entity) for entity in q] def get_empty_geometry_buffer_ports(self, session: Session) -> list[Port]: diff --git a/backend/bloom/main.py b/backend/bloom/main.py new file mode 100644 index 00000000..7b2a2c08 --- /dev/null +++ b/backend/bloom/main.py @@ -0,0 +1,65 @@ +from fastapi import FastAPI +from starlette.requests import Request + +from bloom.container import UseCasesContainer +from bloom.routers import excursions, zones, vessels, ports +from bloom.routers.vessels import router, rd + + +def init_db(container): + db = container.db() + db.create_database() + + +def create_app() -> FastAPI: + container = init_container() + + init_db(container) + server = init_server(container) + # server.add_exception_handler(DBException, db_exception_handler) + # server.add_exception_handler(ValidationError, validation_exception_handler) + # server.add_exception_handler(Exception, generic_exception_handler) + + return server + + +def init_container(): + container = UseCasesContainer() + container.wire( + modules=[ + zones, + vessels, + excursions, + ports + ] + ) + return container + + +def init_server(container): + server = FastAPI(dependencies=[]) + server.container = container + server.include_router(excursions.router) + server.include_router(ports.router) + server.include_router(vessels.router) + server.include_router(zones.router) + return server + + +app = create_app() + + +@app.get("/") +async def root(request: Request): + return { + "maptiles": f"{request.url_for('list_maptiles')}", + "ports": f"{request.url_for('list_ports')}", + "vessels": f"{request.url_for('list_vessels')}", + "zones": f"{request.url_for('list_zones')}", + } + + +@router.get("/cache/all/flush") +async def cache_all_flush(request: Request): + await rd.flushall() + return {"code": 0} diff --git a/backend/bloom/routers/excursions.py b/backend/bloom/routers/excursions.py new file mode 100644 index 00000000..8008c51b --- /dev/null +++ b/backend/bloom/routers/excursions.py @@ -0,0 +1,71 @@ +import json +import time + +import redis +from dependency_injector.wiring import inject, Provide +from fastapi import APIRouter, Depends + +from bloom.config import settings +from bloom.container import UseCasesContainer +from bloom.logger import logger +from bloom.usecase.Excursions import ExcursionUseCase + +rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) + +router = APIRouter() + + +@router.get("/vessels/{vessel_id}/excursions") +@inject +async def list_vessel_excursions( + vessel_id: int, + nocache: bool = False, + excursion_usecase: ExcursionUseCase = Depends( + Provide[UseCasesContainer.emission_service] + ) +): + endpoint = f"/vessels/{vessel_id}/excursions" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + return excursion_usecase.list_vessel_excursions(vessel_id) + + +@router.get("/vessels/{vessel_id}/excursions/{excursions_id}") +async def get_vessel_excursion( + vessel_id: int, + excursions_id: int, + excursion_usecase: ExcursionUseCase = Depends( + Provide[UseCasesContainer.emission_service] + )): + return excursion_usecase.get_excursion_by_id(vessel_id, excursions_id) + + +@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments") +@inject +async def list_vessel_excursion_segments( + vessel_id: int, + excursions_id: int, + excursion_usecase: ExcursionUseCase = Depends( + Provide[UseCasesContainer.emission_service] + ) +): + return excursion_usecase.get_excursions_segments(vessel_id, excursions_id) + + +@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments/{segment_id}") +@inject +async def get_vessel_excursion_segment( + vessel_id: int, + excursions_id: int, + segment_id: int, + excursion_usecase: ExcursionUseCase = Depends( + Provide[UseCasesContainer.emission_service] + ) +): + return await excursion_usecase.get_segment_by_id(vessel_id, excursions_id, segment_id) \ No newline at end of file diff --git a/backend/bloom/routers/ports.py b/backend/bloom/routers/ports.py new file mode 100644 index 00000000..2c5df458 --- /dev/null +++ b/backend/bloom/routers/ports.py @@ -0,0 +1,45 @@ +import json +import time + +from redis import Redis +from dependency_injector.wiring import inject, Provide +from fastapi import APIRouter, Depends +from bloom.config import settings +from bloom.container import UseCasesContainer +from bloom.logger import logger +from bloom.services.api import rd +from bloom.usecase.Ports import PortUseCase + +router = APIRouter() +redis_client = Redis(host=settings.redis_host, port=settings.redis_port, db=0) + + +@router.get("/ports") +@inject +async def list_ports( + nocache: bool = False, + ports_usecase: PortUseCase = Depends( + Provide[UseCasesContainer.emission_service] + ) +): + endpoint = f"/ports" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + return ports_usecase.list_ports() + + +@router.get("/ports/{port_id}") +@inject +async def get_port( + port_id: int, + ports_usecase: PortUseCase = Depends( + Provide[UseCasesContainer.emission_service] + ) +): + return ports_usecase.get_port_by_id(port_id) diff --git a/backend/bloom/routers/vessels.py b/backend/bloom/routers/vessels.py new file mode 100644 index 00000000..d60e8481 --- /dev/null +++ b/backend/bloom/routers/vessels.py @@ -0,0 +1,91 @@ +from fastapi import APIRouter + +from redis import Redis +import json +import time +from bloom.config import settings +from bloom.container import UseCasesContainer +from bloom.logger import logger + +rd = Redis(host=settings.redis_host, port=settings.redis_port, db=0) + + +router = APIRouter() + + +@router.get("/vessels") +async def list_vessels(nocache: bool = False): + endpoint = f"/vessels" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + vessel_repository = use_cases.vessel_repository() + db = use_cases.db() + with db.session() as session: + + json_data = [json.loads(v.model_dump_json() if v else "{}") + for v in vessel_repository.get_vessels_list(session)] + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint, settings.redis_cache_expiration) + return json_data + + +@router.get("/vessels/{vessel_id}") +async def get_vessel(vessel_id: int): + use_cases = UseCasesContainer() + vessel_repository = use_cases.vessel_repository() + db = use_cases.db() + with db.session() as session: + return vessel_repository.get_vessel_by_id(session, vessel_id) + + +@router.get("/vessels/all/positions/last") +async def list_all_vessel_last_position(nocache: bool = False): + endpoint = f"/vessels/all/positions/last" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + segment_repository = use_cases.segment_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(p.model_dump_json() if p else "{}") + for p in segment_repository.get_all_vessels_last_position(session)] + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return json_data + + +@router.get("/vessels/{vessel_id}/positions/last") +async def get_vessel_last_position(vessel_id: int, nocache: bool = False): + endpoint = f"/vessels/{vessel_id}/positions/last" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + segment_repository = use_cases.segment_repository() + db = use_cases.db() + with db.session() as session: + result = segment_repository.get_vessel_last_position(session, vessel_id) + json_data = json.loads(result.model_dump_json() if result else "{}") + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return json_data diff --git a/backend/bloom/routers/zones.py b/backend/bloom/routers/zones.py new file mode 100644 index 00000000..2c1b5978 --- /dev/null +++ b/backend/bloom/routers/zones.py @@ -0,0 +1,119 @@ +import json +import time + +import redis +from fastapi import APIRouter +from starlette.requests import Request + +from bloom.config import settings +from bloom.container import UseCasesContainer +from bloom.logger import logger +from bloom.main import app + +rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) + +router = APIRouter() + + +@router.get("/zones") +async def list_zones(request: Request, nocache: bool = False): + endpoint = f"/zones" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in zone_repository.get_all_zones(session)] + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return json_data + + +@router.get("/zones/all/categories") +async def list_zone_categories(request: Request, nocache: bool = False): + endpoint = f"/zones/all/categories" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in zone_repository.get_all_zone_categories(session)] + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return json_data + + +@router.get("/zones/by-category/{category}/by-sub-category/{sub}") +async def get_zone_all_by_category(category: str = "all", sub: str = None, nocache: bool = False): + endpoint = f"/zones/by-category/{category}/by-sub-category/{sub}" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in + zone_repository.get_all_zones_by_category(session, category if category != 'all' else None, + sub)] + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return json_data + + +@router.get("/zones/by-category/{category}") +async def get_zone_all_by_category(category: str = "all", nocache: bool = False): + endpoint = f"/zones/by-category/{category}" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in + zone_repository.get_all_zones_by_category(session, category if category != 'all' else None)] + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return json_data + + +@router.get("/zones/{zones_id}") +async def get_zone(zones_id: int): + use_cases = UseCasesContainer() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + return zone_repository.get_zone_by_id(session, zones_id) diff --git a/backend/bloom/services/geo.py b/backend/bloom/services/geo.py index 81f3d674..588aa69b 100644 --- a/backend/bloom/services/geo.py +++ b/backend/bloom/services/geo.py @@ -2,7 +2,7 @@ import pandas as pd import geopandas as gpd -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.config import settings def find_positions_in_port_buffer(vessel_positions: List[tuple]) -> List[tuple]: @@ -28,7 +28,7 @@ def find_positions_in_port_buffer(vessel_positions: List[tuple]) -> List[tuple]: ) # Get all ports from DataBase - use_cases = UseCases() + use_cases = UseCasesContainer() port_repository = use_cases.port_repository() db = use_cases.db() with db.session() as session: diff --git a/backend/bloom/tasks/clean_positions.py b/backend/bloom/tasks/clean_positions.py index fc6e26cc..c9d1448e 100644 --- a/backend/bloom/tasks/clean_positions.py +++ b/backend/bloom/tasks/clean_positions.py @@ -8,7 +8,7 @@ from geopy import distance from shapely.geometry import Point -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.vessel_position import VesselPosition from bloom.infra.repositories.repository_task_execution import TaskExecutionRepository from bloom.logger import logger @@ -51,7 +51,7 @@ def to_coords(row: pd.Series) -> pd.Series: def run(batch_time): - use_cases = UseCases() + use_cases = UseCasesContainer() db = use_cases.db() spire_repository = use_cases.spire_ais_data_repository() excursion_repository = use_cases.excursion_repository() diff --git a/backend/bloom/tasks/compute_port_geometry_buffer.py b/backend/bloom/tasks/compute_port_geometry_buffer.py index 186caa86..fb1cd858 100644 --- a/backend/bloom/tasks/compute_port_geometry_buffer.py +++ b/backend/bloom/tasks/compute_port_geometry_buffer.py @@ -5,7 +5,7 @@ import pyproj import shapely from bloom.config import settings -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.logger import logger from scipy.spatial import Voronoi from shapely.geometry import LineString, Polygon @@ -93,7 +93,7 @@ def assign_voronoi_buffer(ports: gpd.GeoDataFrame) -> gpd.GeoDataFrame: def run() -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() port_repository = use_cases.port_repository() db = use_cases.db() items = [] diff --git a/backend/bloom/tasks/convert_spire_vessels_to_spire_ais_data.py b/backend/bloom/tasks/convert_spire_vessels_to_spire_ais_data.py index df120b9e..c1bcaea0 100644 --- a/backend/bloom/tasks/convert_spire_vessels_to_spire_ais_data.py +++ b/backend/bloom/tasks/convert_spire_vessels_to_spire_ais_data.py @@ -1,7 +1,7 @@ from time import perf_counter from typing import Generator -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.spire_ais_data import SpireAisData from bloom.infra.database.sql_model import VesselPositionSpire from bloom.logger import logger @@ -9,7 +9,7 @@ from shapely import Point from sqlalchemy.orm.session import Session -use_cases = UseCases() +use_cases = UseCasesContainer() vessel_repo = use_cases.vessel_repository() spire_ais_data_repo = use_cases.spire_ais_data_repository() db = use_cases.db() diff --git a/backend/bloom/tasks/create_update_excursions_segments.py b/backend/bloom/tasks/create_update_excursions_segments.py index a9fa2f22..2fbcea40 100644 --- a/backend/bloom/tasks/create_update_excursions_segments.py +++ b/backend/bloom/tasks/create_update_excursions_segments.py @@ -10,7 +10,7 @@ from shapely.geometry import Point from sqlalchemy.orm import Session -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.excursion import Excursion from bloom.domain.segment import Segment from bloom.infra.repositories.repository_task_execution import TaskExecutionRepository @@ -38,7 +38,7 @@ def to_coords(row: pd.Series) -> pd.Series: def add_excursion(session: Session, vessel_id: int, departure_at: datetime, departure_position: Optional[Point] = None) -> int: - use_cases = UseCases() + use_cases = UseCasesContainer() excursion_repository = use_cases.excursion_repository() result = excursion_repository.get_param_from_last_excursion(session, vessel_id) @@ -74,7 +74,7 @@ def add_excursion(session: Session, vessel_id: int, departure_at: datetime, def close_excursion(session: Session, excursion_id: int, port_id: int, latitude: float, longitude: float, arrived_at: datetime) -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() excursion_repository = use_cases.excursion_repository() excursion = excursion_repository.get_excursion_by_id(session, excursion_id) @@ -87,7 +87,7 @@ def close_excursion(session: Session, excursion_id: int, port_id: int, latitude: def run(): - use_cases = UseCases() + use_cases = UseCasesContainer() db = use_cases.db() segment_repository = use_cases.segment_repository() vessel_position_repository = use_cases.vessel_position_repository() diff --git a/backend/bloom/tasks/load_dim_port_from_csv.py b/backend/bloom/tasks/load_dim_port_from_csv.py index 00a2eef8..ad275304 100644 --- a/backend/bloom/tasks/load_dim_port_from_csv.py +++ b/backend/bloom/tasks/load_dim_port_from_csv.py @@ -5,7 +5,7 @@ import pandas as pd import pycountry from bloom.config import settings -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.port import Port from bloom.infra.database.errors import DBException from bloom.logger import logger @@ -29,7 +29,7 @@ def map_to_domain(row) -> Port: def run(csv_file_name: str) -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() port_repository = use_cases.port_repository() db = use_cases.db() diff --git a/backend/bloom/tasks/load_dim_vessel_from_csv.py b/backend/bloom/tasks/load_dim_vessel_from_csv.py index 1d0a1e96..0dde3873 100644 --- a/backend/bloom/tasks/load_dim_vessel_from_csv.py +++ b/backend/bloom/tasks/load_dim_vessel_from_csv.py @@ -3,7 +3,7 @@ import pandas as pd from bloom.config import settings -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.vessel import Vessel from bloom.infra.database.errors import DBException from bloom.logger import logger @@ -33,7 +33,7 @@ def map_to_domain(row: pd.Series) -> Vessel: def run(csv_file_name: str) -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() vessel_repository = use_cases.vessel_repository() db = use_cases.db() diff --git a/backend/bloom/tasks/load_dim_zone_amp_from_csv.py b/backend/bloom/tasks/load_dim_zone_amp_from_csv.py index 99e3ae1a..b2ed85ad 100644 --- a/backend/bloom/tasks/load_dim_zone_amp_from_csv.py +++ b/backend/bloom/tasks/load_dim_zone_amp_from_csv.py @@ -5,7 +5,7 @@ from shapely import wkb from bloom.config import settings -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.zone import Zone from bloom.logger import logger @@ -34,8 +34,8 @@ def map_to_domain(row: pd.Series) -> Zone: ) -def run(): - use_cases = UseCases() +def run(csv_file_name: str): + use_cases = UseCasesContainer() db = use_cases.db() zone_repository = use_cases.zone_repository() diff --git a/backend/bloom/tasks/load_fct_excursions_from_csv.py b/backend/bloom/tasks/load_fct_excursions_from_csv.py index 208deb13..c96e1d5b 100644 --- a/backend/bloom/tasks/load_fct_excursions_from_csv.py +++ b/backend/bloom/tasks/load_fct_excursions_from_csv.py @@ -6,7 +6,7 @@ from datetime import datetime from shapely.geometry import Point from bloom.config import settings -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.infra.database.errors import DBException from bloom.logger import logger from bloom.domain.spire_ais_data import SpireAisData @@ -119,7 +119,7 @@ def get_point(end_position: str) -> Point: return Point(end_position[1], end_position[0]) def run(excursion_csv_filename: str, segment_csv_filename: str, spire_csv_filename: str) -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() excursion_repository = use_cases.excursion_repository() # vessel_position_repository = use_cases.vessel_position_repository() segment_repository = use_cases.segment_repository() diff --git a/backend/bloom/tasks/load_spire_data_from_api.py b/backend/bloom/tasks/load_spire_data_from_api.py index 29d9b1e5..214b55a0 100644 --- a/backend/bloom/tasks/load_spire_data_from_api.py +++ b/backend/bloom/tasks/load_spire_data_from_api.py @@ -4,7 +4,7 @@ from pathlib import Path from time import perf_counter -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.vessel import Vessel from bloom.infra.http.spire_api_utils import map_raw_vessels_to_domain from bloom.logger import logger @@ -12,7 +12,7 @@ def run(dump_path: str) -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() spire_ais_data_repository = use_cases.spire_ais_data_repository() spire_traffic_usecase = use_cases.get_spire_data_usecase() vessel_repository = use_cases.vessel_repository() diff --git a/backend/bloom/tasks/load_spire_data_from_csv.py b/backend/bloom/tasks/load_spire_data_from_csv.py index 3968b1f1..e02bcd70 100644 --- a/backend/bloom/tasks/load_spire_data_from_csv.py +++ b/backend/bloom/tasks/load_spire_data_from_csv.py @@ -3,7 +3,7 @@ import pandas as pd from bloom.config import settings -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.spire_ais_data import SpireAisData from bloom.infra.database.errors import DBException from bloom.logger import logger @@ -51,7 +51,7 @@ def map_to_domain(row: pd.Series) -> SpireAisData: def run(csv_file_name: str): - use_cases = UseCases() + use_cases = UseCasesContainer() db = use_cases.db() spire_ais_data_repository = use_cases.spire_ais_data_repository() diff --git a/backend/bloom/tasks/load_spire_data_from_json.py b/backend/bloom/tasks/load_spire_data_from_json.py index 04fbf907..36dc4512 100644 --- a/backend/bloom/tasks/load_spire_data_from_json.py +++ b/backend/bloom/tasks/load_spire_data_from_json.py @@ -3,14 +3,14 @@ from pathlib import Path from time import perf_counter -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.infra.http.spire_api_utils import map_raw_vessels_to_domain from bloom.logger import logger from pydantic import ValidationError def run(file_name: str) -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() spire_ais_data_repository = use_cases.spire_ais_data_repository() db = use_cases.db() diff --git a/backend/bloom/tasks/update_vessel_data_voyage.py b/backend/bloom/tasks/update_vessel_data_voyage.py index ef4886ff..daff9964 100644 --- a/backend/bloom/tasks/update_vessel_data_voyage.py +++ b/backend/bloom/tasks/update_vessel_data_voyage.py @@ -1,7 +1,7 @@ from datetime import datetime, timezone from time import perf_counter -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.spire_ais_data import SpireAisData from bloom.domain.vessel import Vessel from bloom.domain.vessel_data import VesselData @@ -47,7 +47,7 @@ def map_ais_data_to_vessel_voyage(ais_data: SpireAisData, vessel: Vessel) -> Uni def run() -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() spire_ais_data_repository = use_cases.spire_ais_data_repository() vessel_repository = use_cases.vessel_repository() db = use_cases.db() diff --git a/backend/bloom/usecase/Excursions.py b/backend/bloom/usecase/Excursions.py new file mode 100644 index 00000000..716ee3cf --- /dev/null +++ b/backend/bloom/usecase/Excursions.py @@ -0,0 +1,30 @@ +import json + +from bloom.config import settings +from bloom.logger import logger + + +class ExcursionUseCase: + def __init__(self, excursions_repository, redis_client): + self.excursions_repository = excursions_repository + self.redis_client = redis_client + self.endpoint = f"/vessels/excursions" + + def list_vessel_excursions(self, vessel_id, with_cache=True): + return self.excursions_repository.get_vessel_excursions(vessel_id, with_cache) + + async def get_excursions_by_vessel_id(self, vessel_id): + excursions = self.excursions_repository.get_excursions_by_vessel_id(vessel_id) + + await self.redis_client.set(self.endpoint, json.dumps(excursions)) + await self.redis_client.expire(self.endpoint, settings.redis_cache_expiration) + return self.excursions_repository.get_excursions_by_vessel_id(vessel_id) + + async def get_excursion_by_id(self, vessel_id, excursions_id): + return self.excursions_repository.get_excursion_by_id(vessel_id, excursions_id) + + async def get_excursions_segments(self, vessel_id, excursions_id, segment_id): + return self.excursions_repository.get(vessel_id, excursions_id, segment_id) + + async def get_segment_by_id(self, vessel_id, excursions_id, segment_id): + return self.excursions_repository.get_segment_by_id(vessel_id, excursions_id, segment_id) diff --git a/backend/bloom/usecase/Ports.py b/backend/bloom/usecase/Ports.py new file mode 100644 index 00000000..83d69b11 --- /dev/null +++ b/backend/bloom/usecase/Ports.py @@ -0,0 +1,20 @@ +import json + +from bloom.config import settings +from bloom.infra.repositories.repository_port import PortRepository + + +class PortUseCase: + def __init__(self, ports_repository: PortRepository, redis_client): + self.ports_repository = ports_repository + self.redis_client = redis_client + self.caching_key = 'ports:caching' + + async def list_ports(self): + ports = self.ports_repository.get_all_ports() + await self.redis_client.set(self.caching_key, json.dumps(ports)) + await self.redis_client.expire(self.caching_key, settings.redis_cache_expiration) + return ports + + async def get_port_by_id(self, port_id): + return self.ports_repository.get_port_by_id(port_id) diff --git a/backend/tests/test_alert.py b/backend/tests/test_alert.py index e8a15a56..b1fe7929 100644 --- a/backend/tests/test_alert.py +++ b/backend/tests/test_alert.py @@ -1,4 +1,4 @@ -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.alert import Alert from datetime import datetime, timezone @@ -13,7 +13,7 @@ def test_launch_alert(): - use_cases = UseCases() + use_cases = UseCasesContainer() alert_usecase = use_cases.generate_alert_usecase() status_code = alert_usecase.send_slack_alert( test_alert, From 902f9939c1bb513130946ff9faea78af630ba1ae Mon Sep 17 00:00:00 2001 From: SaboniAmine Date: Sat, 6 Jul 2024 16:26:57 +0200 Subject: [PATCH 10/29] wip2 --- backend/bloom/container.py | 23 +++++++++++++++-------- backend/bloom/main.py | 5 ++++- backend/bloom/routers/excursions.py | 4 +++- backend/bloom/routers/vessels.py | 4 ++-- clevercloud/cron.json | 3 +++ 5 files changed, 27 insertions(+), 12 deletions(-) create mode 100644 clevercloud/cron.json diff --git a/backend/bloom/container.py b/backend/bloom/container.py index 338701d9..62ead74f 100644 --- a/backend/bloom/container.py +++ b/backend/bloom/container.py @@ -10,6 +10,7 @@ from bloom.infra.repositories.repository_segment import SegmentRepository from bloom.infra.repositories.repository_zone import ZoneRepository from bloom.services.GetVesselsFromSpire import GetVesselsFromSpire +from bloom.usecase.Excursions import ExcursionUseCase from bloom.usecase.GenerateAlerts import GenerateAlerts from dependency_injector import containers, providers @@ -57,14 +58,6 @@ class UseCasesContainer(containers.DeclarativeContainer): session_factory=db.provided.session, ) - get_spire_data_usecase = providers.Factory(GetVesselsFromSpire) - - generate_alert_usecase = providers.Factory( - GenerateAlerts, - alert_repository=alert_repository, - raster_repository=raster_repository, - ) - spire_ais_data_repository = providers.Factory( SpireAisDataRepository, session_factory=db.provided.session, @@ -74,3 +67,17 @@ class UseCasesContainer(containers.DeclarativeContainer): SegmentRepository, session_factory=db.provided.session, ) + get_spire_data_usecase = providers.Factory( + GetVesselsFromSpire + ) + + generate_alert_usecase = providers.Factory( + GenerateAlerts, + alert_repository=alert_repository, + raster_repository=raster_repository, + ) + + excursion_usecase = providers.Factory( + ExcursionUseCase, + excursion_repository=excursion_repository, + ) diff --git a/backend/bloom/main.py b/backend/bloom/main.py index 7b2a2c08..00f81850 100644 --- a/backend/bloom/main.py +++ b/backend/bloom/main.py @@ -23,6 +23,9 @@ def create_app() -> FastAPI: return server +def schedule_crawling(): + pass + def init_container(): container = UseCasesContainer() container.wire( @@ -59,7 +62,7 @@ async def root(request: Request): } -@router.get("/cache/all/flush") +@app.get("/cache/all/flush") async def cache_all_flush(request: Request): await rd.flushall() return {"code": 0} diff --git a/backend/bloom/routers/excursions.py b/backend/bloom/routers/excursions.py index 8008c51b..046b335f 100644 --- a/backend/bloom/routers/excursions.py +++ b/backend/bloom/routers/excursions.py @@ -1,5 +1,6 @@ import json import time +from typing import List import redis from dependency_injector.wiring import inject, Provide @@ -7,6 +8,7 @@ from bloom.config import settings from bloom.container import UseCasesContainer +from bloom.domain.excursion import Excursion from bloom.logger import logger from bloom.usecase.Excursions import ExcursionUseCase @@ -23,7 +25,7 @@ async def list_vessel_excursions( excursion_usecase: ExcursionUseCase = Depends( Provide[UseCasesContainer.emission_service] ) -): +) -> List[Excursion]: endpoint = f"/vessels/{vessel_id}/excursions" cache = rd.get(endpoint) start = time.time() diff --git a/backend/bloom/routers/vessels.py b/backend/bloom/routers/vessels.py index d60e8481..2f2560c1 100644 --- a/backend/bloom/routers/vessels.py +++ b/backend/bloom/routers/vessels.py @@ -31,8 +31,8 @@ async def list_vessels(nocache: bool = False): json_data = [json.loads(v.model_dump_json() if v else "{}") for v in vessel_repository.get_vessels_list(session)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint, settings.redis_cache_expiration) + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) return json_data diff --git a/clevercloud/cron.json b/clevercloud/cron.json new file mode 100644 index 00000000..d9c4722f --- /dev/null +++ b/clevercloud/cron.json @@ -0,0 +1,3 @@ +[ + "*/15 * * * * python -m backend/bloom/tasks/load_spire_data_from_api" +] \ No newline at end of file From 0bd92aa1c7a56797b01645486950be3207912504 Mon Sep 17 00:00:00 2001 From: SaboniAmine Date: Sat, 6 Jul 2024 16:46:04 +0200 Subject: [PATCH 11/29] add requirements --- backend/requirements.txt | 329 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 329 insertions(+) create mode 100644 backend/requirements.txt diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 00000000..2961024d --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,329 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --output-file=requirements.txt pyproject.toml +# +alembic==1.13.2 + # via bloom (pyproject.toml) +annotated-types==0.7.0 + # via pydantic +anyio==4.4.0 + # via + # gql + # httpx + # starlette + # watchfiles +async-timeout==4.0.3 + # via redis +attrs==23.2.0 + # via fiona +autopep8==2.0.4 + # via bloom (pyproject.toml) +backoff==2.2.1 + # via gql +build==1.2.1 + # via poetry +cachecontrol[filecache]==0.14.0 + # via poetry +certifi==2024.7.4 + # via + # fiona + # httpcore + # httpx + # pyproj + # requests +cffi==1.16.0 + # via xattr +charset-normalizer==3.3.2 + # via requests +cleo==2.1.0 + # via poetry +click==8.1.7 + # via + # click-plugins + # cligj + # fiona + # typer + # uvicorn +click-plugins==1.1.1 + # via fiona +cligj==0.7.2 + # via fiona +contourpy==1.2.1 + # via matplotlib +crashtest==0.4.1 + # via + # cleo + # poetry +cycler==0.12.1 + # via matplotlib +dependency-injection==1.2.0 + # via bloom (pyproject.toml) +dependency-injector==4.41.0 + # via bloom (pyproject.toml) +distlib==0.3.8 + # via virtualenv +dnspython==2.6.1 + # via email-validator +dulwich==0.21.7 + # via poetry +email-validator==2.2.0 + # via fastapi +et-xmlfile==1.1.0 + # via openpyxl +exceptiongroup==1.2.1 + # via anyio +fastapi==0.111.0 + # via bloom (pyproject.toml) +fastapi-cli==0.0.4 + # via fastapi +fastjsonschema==2.20.0 + # via poetry +filelock==3.15.4 + # via + # cachecontrol + # virtualenv +fiona==1.9.6 + # via geopandas +fonttools==4.53.1 + # via matplotlib +geoalchemy2==0.14.7 + # via bloom (pyproject.toml) +geographiclib==2.0 + # via geopy +geopandas==0.14.4 + # via bloom (pyproject.toml) +geopy==2.4.1 + # via bloom (pyproject.toml) +gql==3.5.0 + # via bloom (pyproject.toml) +graphql-core==3.2.3 + # via gql +h11==0.14.0 + # via + # httpcore + # uvicorn +httpcore==1.0.5 + # via httpx +httptools==0.6.1 + # via uvicorn +httpx==0.27.0 + # via fastapi +idna==3.7 + # via + # anyio + # email-validator + # httpx + # requests + # yarl +importlib-metadata==8.0.0 + # via keyring +installer==0.7.0 + # via poetry +jaraco-classes==3.4.0 + # via keyring +jinja2==3.1.4 + # via fastapi +keyring==24.3.1 + # via poetry +kiwisolver==1.4.5 + # via matplotlib +mako==1.3.5 + # via alembic +markdown-it-py==3.0.0 + # via rich +markupsafe==2.1.5 + # via + # jinja2 + # mako +matplotlib==3.8.4 + # via bloom (pyproject.toml) +mdurl==0.1.2 + # via markdown-it-py +more-itertools==10.3.0 + # via jaraco-classes +msgpack==1.0.8 + # via cachecontrol +multidict==6.0.5 + # via yarl +numpy==1.26.4 + # via + # contourpy + # geopandas + # matplotlib + # pandas + # scipy + # shapely +openpyxl==3.1.5 + # via bloom (pyproject.toml) +orjson==3.10.6 + # via fastapi +packaging==24.1 + # via + # build + # geoalchemy2 + # geopandas + # matplotlib + # poetry +pandas==2.2.2 + # via + # bloom (pyproject.toml) + # geopandas +pexpect==4.9.0 + # via poetry +pillow==10.4.0 + # via matplotlib +pkginfo==1.11.1 + # via poetry +platformdirs==4.2.2 + # via + # poetry + # virtualenv +poetry==1.8.3 + # via + # bloom (pyproject.toml) + # poetry-plugin-export +poetry-core==1.9.0 + # via + # poetry + # poetry-plugin-export +poetry-plugin-export==1.8.0 + # via poetry +psycopg2-binary==2.9.9 + # via bloom (pyproject.toml) +ptyprocess==0.7.0 + # via pexpect +pycodestyle==2.12.0 + # via autopep8 +pycountry==23.12.11 + # via bloom (pyproject.toml) +pycparser==2.22 + # via cffi +pydantic==2.6.4 + # via + # bloom (pyproject.toml) + # fastapi + # pydantic-settings +pydantic-core==2.16.3 + # via pydantic +pydantic-settings==2.2.1 + # via bloom (pyproject.toml) +pygments==2.18.0 + # via rich +pyparsing==3.1.2 + # via matplotlib +pyproj==3.6.1 + # via geopandas +pyproject-hooks==1.1.0 + # via + # build + # poetry +python-dateutil==2.9.0.post0 + # via + # matplotlib + # pandas +python-dotenv==1.0.1 + # via + # bloom (pyproject.toml) + # pydantic-settings + # uvicorn +python-multipart==0.0.9 + # via fastapi +pytz==2024.1 + # via pandas +pyyaml==6.0.1 + # via + # bloom (pyproject.toml) + # uvicorn +rapidfuzz==3.9.4 + # via cleo +redis==5.0.7 + # via bloom (pyproject.toml) +requests==2.31.0 + # via + # bloom (pyproject.toml) + # cachecontrol + # poetry + # requests-toolbelt +requests-toolbelt==1.0.0 + # via + # bloom (pyproject.toml) + # poetry +rich==13.7.1 + # via typer +scipy==1.12.0 + # via bloom (pyproject.toml) +shapely==2.0.4 + # via + # bloom (pyproject.toml) + # geopandas +shellingham==1.5.4 + # via + # poetry + # typer +six==1.16.0 + # via + # dependency-injector + # fiona + # python-dateutil +slack-sdk==3.27.2 + # via bloom (pyproject.toml) +sniffio==1.3.1 + # via + # anyio + # httpx +sqlalchemy==2.0.31 + # via + # alembic + # bloom (pyproject.toml) + # geoalchemy2 +starlette==0.37.2 + # via fastapi +tomli==2.0.1 + # via + # autopep8 + # build + # poetry +tomlkit==0.12.5 + # via poetry +trove-classifiers==2024.7.2 + # via poetry +typer==0.12.3 + # via fastapi-cli +typing-extensions==4.12.2 + # via + # alembic + # anyio + # fastapi + # pydantic + # pydantic-core + # sqlalchemy + # typer + # uvicorn +tzdata==2024.1 + # via pandas +ujson==5.10.0 + # via fastapi +urllib3==2.2.2 + # via + # dulwich + # requests +uvicorn[standard]==0.30.1 + # via + # bloom (pyproject.toml) + # fastapi +uvloop==0.19.0 + # via uvicorn +virtualenv==20.26.3 + # via poetry +watchfiles==0.22.0 + # via uvicorn +websockets==12.0 + # via uvicorn +xattr==1.1.0 + # via poetry +yarl==1.9.4 + # via gql +zipp==3.19.2 + # via importlib-metadata From d90a3c7c46bafe04d7cd717ac9582be8aa819923 Mon Sep 17 00:00:00 2001 From: SaboniAmine Date: Sat, 6 Jul 2024 16:50:34 +0200 Subject: [PATCH 12/29] move main file --- backend/bloom/__init__.py | 0 backend/bloom/routers/excursions.py | 8 ++++---- backend/bloom/routers/ports.py | 7 +++---- backend/bloom/routers/zones.py | 1 - backend/{bloom => }/main.py | 3 ++- 5 files changed, 9 insertions(+), 10 deletions(-) create mode 100644 backend/bloom/__init__.py rename backend/{bloom => }/main.py (97%) diff --git a/backend/bloom/__init__.py b/backend/bloom/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/bloom/routers/excursions.py b/backend/bloom/routers/excursions.py index 046b335f..5e6a4373 100644 --- a/backend/bloom/routers/excursions.py +++ b/backend/bloom/routers/excursions.py @@ -23,7 +23,7 @@ async def list_vessel_excursions( vessel_id: int, nocache: bool = False, excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCasesContainer.emission_service] + Provide[UseCasesContainer.excursion_usecase] ) ) -> List[Excursion]: endpoint = f"/vessels/{vessel_id}/excursions" @@ -43,7 +43,7 @@ async def get_vessel_excursion( vessel_id: int, excursions_id: int, excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCasesContainer.emission_service] + Provide[UseCasesContainer.excursion_usecase] )): return excursion_usecase.get_excursion_by_id(vessel_id, excursions_id) @@ -54,7 +54,7 @@ async def list_vessel_excursion_segments( vessel_id: int, excursions_id: int, excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCasesContainer.emission_service] + Provide[UseCasesContainer.excursion_usecase] ) ): return excursion_usecase.get_excursions_segments(vessel_id, excursions_id) @@ -67,7 +67,7 @@ async def get_vessel_excursion_segment( excursions_id: int, segment_id: int, excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCasesContainer.emission_service] + Provide[UseCasesContainer.excursion_usecase] ) ): return await excursion_usecase.get_segment_by_id(vessel_id, excursions_id, segment_id) \ No newline at end of file diff --git a/backend/bloom/routers/ports.py b/backend/bloom/routers/ports.py index 2c5df458..0aeffddb 100644 --- a/backend/bloom/routers/ports.py +++ b/backend/bloom/routers/ports.py @@ -7,7 +7,6 @@ from bloom.config import settings from bloom.container import UseCasesContainer from bloom.logger import logger -from bloom.services.api import rd from bloom.usecase.Ports import PortUseCase router = APIRouter() @@ -19,11 +18,11 @@ async def list_ports( nocache: bool = False, ports_usecase: PortUseCase = Depends( - Provide[UseCasesContainer.emission_service] + Provide[UseCasesContainer.excursion_usecase] ) ): endpoint = f"/ports" - cache = rd.get(endpoint) + cache = redis_client.get(endpoint) start = time.time() if cache and not nocache: logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") @@ -39,7 +38,7 @@ async def list_ports( async def get_port( port_id: int, ports_usecase: PortUseCase = Depends( - Provide[UseCasesContainer.emission_service] + Provide[UseCasesContainer.excursion_usecase] ) ): return ports_usecase.get_port_by_id(port_id) diff --git a/backend/bloom/routers/zones.py b/backend/bloom/routers/zones.py index 2c1b5978..198f98b2 100644 --- a/backend/bloom/routers/zones.py +++ b/backend/bloom/routers/zones.py @@ -8,7 +8,6 @@ from bloom.config import settings from bloom.container import UseCasesContainer from bloom.logger import logger -from bloom.main import app rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) diff --git a/backend/bloom/main.py b/backend/main.py similarity index 97% rename from backend/bloom/main.py rename to backend/main.py index 00f81850..d39fb8c0 100644 --- a/backend/bloom/main.py +++ b/backend/main.py @@ -3,7 +3,7 @@ from bloom.container import UseCasesContainer from bloom.routers import excursions, zones, vessels, ports -from bloom.routers.vessels import router, rd +from bloom.routers.vessels import rd def init_db(container): @@ -26,6 +26,7 @@ def create_app() -> FastAPI: def schedule_crawling(): pass + def init_container(): container = UseCasesContainer() container.wire( From 884a9903f2d23212ef71e04cb05f5c83474531d7 Mon Sep 17 00:00:00 2001 From: SaboniAmine Date: Sat, 6 Jul 2024 17:18:17 +0200 Subject: [PATCH 13/29] change cronjob definition --- backend/main.py | 7 +------ clevercloud/cron.json | 2 +- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/backend/main.py b/backend/main.py index d39fb8c0..92c3ea65 100644 --- a/backend/main.py +++ b/backend/main.py @@ -55,12 +55,7 @@ def init_server(container): @app.get("/") async def root(request: Request): - return { - "maptiles": f"{request.url_for('list_maptiles')}", - "ports": f"{request.url_for('list_ports')}", - "vessels": f"{request.url_for('list_vessels')}", - "zones": f"{request.url_for('list_zones')}", - } + return {"status": "ok"} @app.get("/cache/all/flush") diff --git a/clevercloud/cron.json b/clevercloud/cron.json index d9c4722f..da8772e8 100644 --- a/clevercloud/cron.json +++ b/clevercloud/cron.json @@ -1,3 +1,3 @@ [ - "*/15 * * * * python -m backend/bloom/tasks/load_spire_data_from_api" + "*/15 * * * * python backend/bloom/tasks/load_spire_data_from_api.py" ] \ No newline at end of file From 5728e175111c13e6cac3258ccdb0b7a14f0e3676 Mon Sep 17 00:00:00 2001 From: SaboniAmine Date: Sat, 6 Jul 2024 17:34:43 +0200 Subject: [PATCH 14/29] add init.py files --- backend/bloom/domain/__init__.py | 0 backend/bloom/infra/repositories/__init__.py | 0 backend/bloom/routers/__init__.py | 0 backend/bloom/tasks/__init__.py | 0 clevercloud/cron.json | 2 +- 5 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 backend/bloom/domain/__init__.py create mode 100644 backend/bloom/infra/repositories/__init__.py create mode 100644 backend/bloom/routers/__init__.py create mode 100644 backend/bloom/tasks/__init__.py diff --git a/backend/bloom/domain/__init__.py b/backend/bloom/domain/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/bloom/infra/repositories/__init__.py b/backend/bloom/infra/repositories/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/bloom/routers/__init__.py b/backend/bloom/routers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/bloom/tasks/__init__.py b/backend/bloom/tasks/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/clevercloud/cron.json b/clevercloud/cron.json index da8772e8..33debd2a 100644 --- a/clevercloud/cron.json +++ b/clevercloud/cron.json @@ -1,3 +1,3 @@ [ - "*/15 * * * * python backend/bloom/tasks/load_spire_data_from_api.py" + "*/15 * * * * python /home/bas/app_7244f095-70bc-43c3-9950-d075c01af05f/backend/bloom/tasks/load_spire_data_from_api.py" ] \ No newline at end of file From 8af264c4a334d127c1c993245076e62ba0fc575e Mon Sep 17 00:00:00 2001 From: SaboniAmine Date: Sat, 6 Jul 2024 18:13:15 +0200 Subject: [PATCH 15/29] add init.py files --- backend/bloom/infra/database/__init__.py | 0 backend/bloom/infra/http/__init__.py | 0 backend/bloom/usecase/__init__.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 backend/bloom/infra/database/__init__.py create mode 100644 backend/bloom/infra/http/__init__.py create mode 100644 backend/bloom/usecase/__init__.py diff --git a/backend/bloom/infra/database/__init__.py b/backend/bloom/infra/database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/bloom/infra/http/__init__.py b/backend/bloom/infra/http/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/bloom/usecase/__init__.py b/backend/bloom/usecase/__init__.py new file mode 100644 index 00000000..e69de29b From 698cdded6118232251c1741e02f9837eb4f1e066 Mon Sep 17 00:00:00 2001 From: SaboniAmine Date: Sat, 6 Jul 2024 18:37:53 +0200 Subject: [PATCH 16/29] switch loader place --- backend/{bloom/tasks => }/load_spire_data_from_api.py | 0 clevercloud/cron.json | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename backend/{bloom/tasks => }/load_spire_data_from_api.py (100%) diff --git a/backend/bloom/tasks/load_spire_data_from_api.py b/backend/load_spire_data_from_api.py similarity index 100% rename from backend/bloom/tasks/load_spire_data_from_api.py rename to backend/load_spire_data_from_api.py diff --git a/clevercloud/cron.json b/clevercloud/cron.json index 33debd2a..07868fe4 100644 --- a/clevercloud/cron.json +++ b/clevercloud/cron.json @@ -1,3 +1,3 @@ [ - "*/15 * * * * python /home/bas/app_7244f095-70bc-43c3-9950-d075c01af05f/backend/bloom/tasks/load_spire_data_from_api.py" + "*/15 * * * * python /home/bas/app_7244f095-70bc-43c3-9950-d075c01af05f/backend/load_spire_data_from_api.py" ] \ No newline at end of file From 1c9d7f9acc352cfc0203ce4fa5908f57216cd635 Mon Sep 17 00:00:00 2001 From: SaboniAmine Date: Sat, 6 Jul 2024 18:49:39 +0200 Subject: [PATCH 17/29] fix cronjob --- backend/bloom/config.py | 8 -------- clevercloud/cron.json | 2 +- 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/backend/bloom/config.py b/backend/bloom/config.py index 8cd70e0a..38372e7b 100644 --- a/backend/bloom/config.py +++ b/backend/bloom/config.py @@ -2,17 +2,9 @@ from pathlib import Path from pydantic_settings import BaseSettings, SettingsConfigDict -from typing import Any from pydantic import ( - AliasChoices, - AmqpDsn, - BaseModel, Field, - ImportString, - PostgresDsn, - RedisDsn, - field_validator, model_validator ) diff --git a/clevercloud/cron.json b/clevercloud/cron.json index 07868fe4..c8b3b95b 100644 --- a/clevercloud/cron.json +++ b/clevercloud/cron.json @@ -1,3 +1,3 @@ [ - "*/15 * * * * python /home/bas/app_7244f095-70bc-43c3-9950-d075c01af05f/backend/load_spire_data_from_api.py" + "*/15 * * * * /home/bas/venv/bin/python /home/bas/app_7244f095-70bc-43c3-9950-d075c01af05f/backend/load_spire_data_from_api.py" ] \ No newline at end of file From 97a451a1228d3a49beb9d5860af0264c826db561 Mon Sep 17 00:00:00 2001 From: SaboniAmine Date: Sun, 7 Jul 2024 19:16:51 +0200 Subject: [PATCH 18/29] add config file path to config for cronjob --- clevercloud/cron.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clevercloud/cron.json b/clevercloud/cron.json index c8b3b95b..d8ef525e 100644 --- a/clevercloud/cron.json +++ b/clevercloud/cron.json @@ -1,3 +1,3 @@ [ - "*/15 * * * * /home/bas/venv/bin/python /home/bas/app_7244f095-70bc-43c3-9950-d075c01af05f/backend/load_spire_data_from_api.py" + "*/5 * * * * /home/bas/venv/bin/python /home/bas/app_7244f095-70bc-43c3-9950-d075c01af05f/backend/load_spire_data_from_api.py" ] \ No newline at end of file From 86659de765b1db7c0be2076a50c404f6a7f691df Mon Sep 17 00:00:00 2001 From: SaboniAmine Date: Sun, 7 Jul 2024 19:57:15 +0200 Subject: [PATCH 19/29] fix cronjob planning back --- clevercloud/cron.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clevercloud/cron.json b/clevercloud/cron.json index d8ef525e..c8b3b95b 100644 --- a/clevercloud/cron.json +++ b/clevercloud/cron.json @@ -1,3 +1,3 @@ [ - "*/5 * * * * /home/bas/venv/bin/python /home/bas/app_7244f095-70bc-43c3-9950-d075c01af05f/backend/load_spire_data_from_api.py" + "*/15 * * * * /home/bas/venv/bin/python /home/bas/app_7244f095-70bc-43c3-9950-d075c01af05f/backend/load_spire_data_from_api.py" ] \ No newline at end of file From bcf7604e9260f63d9859dce66ce3ec5c56be937b Mon Sep 17 00:00:00 2001 From: marthevienne <123016211+marthevienne@users.noreply.github.com> Date: Mon, 23 Mar 2026 15:10:33 +0100 Subject: [PATCH 20/29] #528: switch API hostname + log API hostname --- backend/bloom/services/GetVesselsFromSpire.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/backend/bloom/services/GetVesselsFromSpire.py b/backend/bloom/services/GetVesselsFromSpire.py index a4d659a8..d7f0bf0a 100644 --- a/backend/bloom/services/GetVesselsFromSpire.py +++ b/backend/bloom/services/GetVesselsFromSpire.py @@ -14,7 +14,7 @@ def __init__(self) -> None: spire_token = settings.spire_token self.transport = RequestsHTTPTransport( - url="https://api.spire.com/graphql", + url="https://api.sml.kpler.com/graphql", headers={"Authorization": "Bearer " + spire_token}, verify=True, retries=3, @@ -22,6 +22,7 @@ def __init__(self) -> None: ) def create_client(self) -> Client: + print(self.transport.url) try: client = Client(transport=self.transport, fetch_schema_from_transport=True) except exceptions.ConnectTimeout: From 6475f6bbdf5709c011e320484e067c160c205a5a Mon Sep 17 00:00:00 2001 From: marthevienne <123016211+marthevienne@users.noreply.github.com> Date: Tue, 24 Mar 2026 17:35:34 +0100 Subject: [PATCH 21/29] #528: update logs | connection to Kpler hostname --- backend/bloom/services/GetVesselsFromSpire.py | 2 +- backend/load_spire_data_from_api.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/bloom/services/GetVesselsFromSpire.py b/backend/bloom/services/GetVesselsFromSpire.py index d7f0bf0a..12c39d65 100644 --- a/backend/bloom/services/GetVesselsFromSpire.py +++ b/backend/bloom/services/GetVesselsFromSpire.py @@ -22,7 +22,7 @@ def __init__(self) -> None: ) def create_client(self) -> Client: - print(self.transport.url) + logger.info(f"Connecting to {self.transport.url}...") try: client = Client(transport=self.transport, fetch_schema_from_transport=True) except exceptions.ConnectTimeout: diff --git a/backend/load_spire_data_from_api.py b/backend/load_spire_data_from_api.py index 214b55a0..2337ca8d 100644 --- a/backend/load_spire_data_from_api.py +++ b/backend/load_spire_data_from_api.py @@ -58,8 +58,8 @@ def run(dump_path: str) -> None: ) args = parser.parse_args() time_start = perf_counter() - logger.info("DEBUT - Chargement des données JSON depuis l'API SPIRE") + logger.info("DEBUT - Chargement des données JSON depuis l'API Kpler") run(args.dump_path) time_end = perf_counter() duration = time_end - time_start - logger.info(f"FIN - Chargement des données depuis l'API SPIRE en {duration:.2f}s") + logger.info(f"FIN - Chargement des données depuis l'API Kpler en {duration:.2f}s") From 874c63dfaa1ab667bab49a4c2017090301041fa0 Mon Sep 17 00:00:00 2001 From: marthevienne <123016211+marthevienne@users.noreply.github.com> Date: Thu, 2 Apr 2026 16:59:39 +0200 Subject: [PATCH 22/29] fix: harmonize container UseCases class name across app --- backend/bloom/container.py | 2 +- backend/bloom/routers/excursions.py | 10 +++++----- backend/bloom/routers/ports.py | 6 +++--- backend/bloom/routers/vessels.py | 10 +++++----- backend/bloom/routers/zones.py | 12 ++++++------ backend/bloom/services/geo.py | 4 ++-- backend/bloom/tasks/clean_positions.py | 4 ++-- backend/bloom/tasks/compute_port_geometry_buffer.py | 4 ++-- .../tasks/convert_spire_vessels_to_spire_ais_data.py | 4 ++-- .../bloom/tasks/create_update_excursions_segments.py | 8 ++++---- backend/bloom/tasks/load_dim_port_from_csv.py | 4 ++-- backend/bloom/tasks/load_dim_vessel_from_csv.py | 4 ++-- backend/bloom/tasks/load_dim_zone_amp_from_csv.py | 4 ++-- backend/bloom/tasks/load_fct_excursions_from_csv.py | 4 ++-- backend/bloom/tasks/load_spire_data_from_csv.py | 4 ++-- backend/bloom/tasks/load_spire_data_from_json.py | 4 ++-- backend/bloom/tasks/update_vessel_data_voyage.py | 4 ++-- backend/load_spire_data_from_api.py | 4 ++-- backend/main.py | 4 ++-- backend/tests/test_alert.py | 4 ++-- 20 files changed, 52 insertions(+), 52 deletions(-) diff --git a/backend/bloom/container.py b/backend/bloom/container.py index fec81a56..d54c6678 100644 --- a/backend/bloom/container.py +++ b/backend/bloom/container.py @@ -19,7 +19,7 @@ import redis -class UseCasesContainer(containers.DeclarativeContainer): +class UseCases(containers.DeclarativeContainer): config = providers.Configuration() db_url = settings.db_url db = providers.Singleton( diff --git a/backend/bloom/routers/excursions.py b/backend/bloom/routers/excursions.py index 5e6a4373..424cf99a 100644 --- a/backend/bloom/routers/excursions.py +++ b/backend/bloom/routers/excursions.py @@ -7,7 +7,7 @@ from fastapi import APIRouter, Depends from bloom.config import settings -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.domain.excursion import Excursion from bloom.logger import logger from bloom.usecase.Excursions import ExcursionUseCase @@ -23,7 +23,7 @@ async def list_vessel_excursions( vessel_id: int, nocache: bool = False, excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCasesContainer.excursion_usecase] + Provide[UseCases.excursion_usecase] ) ) -> List[Excursion]: endpoint = f"/vessels/{vessel_id}/excursions" @@ -43,7 +43,7 @@ async def get_vessel_excursion( vessel_id: int, excursions_id: int, excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCasesContainer.excursion_usecase] + Provide[UseCases.excursion_usecase] )): return excursion_usecase.get_excursion_by_id(vessel_id, excursions_id) @@ -54,7 +54,7 @@ async def list_vessel_excursion_segments( vessel_id: int, excursions_id: int, excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCasesContainer.excursion_usecase] + Provide[UseCases.excursion_usecase] ) ): return excursion_usecase.get_excursions_segments(vessel_id, excursions_id) @@ -67,7 +67,7 @@ async def get_vessel_excursion_segment( excursions_id: int, segment_id: int, excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCasesContainer.excursion_usecase] + Provide[UseCases.excursion_usecase] ) ): return await excursion_usecase.get_segment_by_id(vessel_id, excursions_id, segment_id) \ No newline at end of file diff --git a/backend/bloom/routers/ports.py b/backend/bloom/routers/ports.py index 0aeffddb..b9e3653c 100644 --- a/backend/bloom/routers/ports.py +++ b/backend/bloom/routers/ports.py @@ -5,7 +5,7 @@ from dependency_injector.wiring import inject, Provide from fastapi import APIRouter, Depends from bloom.config import settings -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.logger import logger from bloom.usecase.Ports import PortUseCase @@ -18,7 +18,7 @@ async def list_ports( nocache: bool = False, ports_usecase: PortUseCase = Depends( - Provide[UseCasesContainer.excursion_usecase] + Provide[UseCases.excursion_usecase] ) ): endpoint = f"/ports" @@ -38,7 +38,7 @@ async def list_ports( async def get_port( port_id: int, ports_usecase: PortUseCase = Depends( - Provide[UseCasesContainer.excursion_usecase] + Provide[UseCases.excursion_usecase] ) ): return ports_usecase.get_port_by_id(port_id) diff --git a/backend/bloom/routers/vessels.py b/backend/bloom/routers/vessels.py index 2f2560c1..1eede30f 100644 --- a/backend/bloom/routers/vessels.py +++ b/backend/bloom/routers/vessels.py @@ -4,7 +4,7 @@ import json import time from bloom.config import settings -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.logger import logger rd = Redis(host=settings.redis_host, port=settings.redis_port, db=0) @@ -24,7 +24,7 @@ async def list_vessels(nocache: bool = False): logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: - use_cases = UseCasesContainer() + use_cases = UseCases() vessel_repository = use_cases.vessel_repository() db = use_cases.db() with db.session() as session: @@ -38,7 +38,7 @@ async def list_vessels(nocache: bool = False): @router.get("/vessels/{vessel_id}") async def get_vessel(vessel_id: int): - use_cases = UseCasesContainer() + use_cases = UseCases() vessel_repository = use_cases.vessel_repository() db = use_cases.db() with db.session() as session: @@ -56,7 +56,7 @@ async def list_all_vessel_last_position(nocache: bool = False): logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: - use_cases = UseCasesContainer() + use_cases = UseCases() segment_repository = use_cases.segment_repository() db = use_cases.db() with db.session() as session: @@ -79,7 +79,7 @@ async def get_vessel_last_position(vessel_id: int, nocache: bool = False): logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: - use_cases = UseCasesContainer() + use_cases = UseCases() segment_repository = use_cases.segment_repository() db = use_cases.db() with db.session() as session: diff --git a/backend/bloom/routers/zones.py b/backend/bloom/routers/zones.py index 198f98b2..900b7ee8 100644 --- a/backend/bloom/routers/zones.py +++ b/backend/bloom/routers/zones.py @@ -6,7 +6,7 @@ from starlette.requests import Request from bloom.config import settings -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.logger import logger rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) @@ -25,7 +25,7 @@ async def list_zones(request: Request, nocache: bool = False): logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: - use_cases = UseCasesContainer() + use_cases = UseCases() zone_repository = use_cases.zone_repository() db = use_cases.db() with db.session() as session: @@ -48,7 +48,7 @@ async def list_zone_categories(request: Request, nocache: bool = False): logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: - use_cases = UseCasesContainer() + use_cases = UseCases() zone_repository = use_cases.zone_repository() db = use_cases.db() with db.session() as session: @@ -71,7 +71,7 @@ async def get_zone_all_by_category(category: str = "all", sub: str = None, nocac logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: - use_cases = UseCasesContainer() + use_cases = UseCases() zone_repository = use_cases.zone_repository() db = use_cases.db() with db.session() as session: @@ -96,7 +96,7 @@ async def get_zone_all_by_category(category: str = "all", nocache: bool = False) logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") return payload else: - use_cases = UseCasesContainer() + use_cases = UseCases() zone_repository = use_cases.zone_repository() db = use_cases.db() with db.session() as session: @@ -111,7 +111,7 @@ async def get_zone_all_by_category(category: str = "all", nocache: bool = False) @router.get("/zones/{zones_id}") async def get_zone(zones_id: int): - use_cases = UseCasesContainer() + use_cases = UseCases() zone_repository = use_cases.zone_repository() db = use_cases.db() with db.session() as session: diff --git a/backend/bloom/services/geo.py b/backend/bloom/services/geo.py index 588aa69b..81f3d674 100644 --- a/backend/bloom/services/geo.py +++ b/backend/bloom/services/geo.py @@ -2,7 +2,7 @@ import pandas as pd import geopandas as gpd -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.config import settings def find_positions_in_port_buffer(vessel_positions: List[tuple]) -> List[tuple]: @@ -28,7 +28,7 @@ def find_positions_in_port_buffer(vessel_positions: List[tuple]) -> List[tuple]: ) # Get all ports from DataBase - use_cases = UseCasesContainer() + use_cases = UseCases() port_repository = use_cases.port_repository() db = use_cases.db() with db.session() as session: diff --git a/backend/bloom/tasks/clean_positions.py b/backend/bloom/tasks/clean_positions.py index 2da637cb..4e801f87 100644 --- a/backend/bloom/tasks/clean_positions.py +++ b/backend/bloom/tasks/clean_positions.py @@ -8,7 +8,7 @@ from geopy import distance from shapely.geometry import Point -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.domain.vessel_position import VesselPosition from bloom.infra.repositories.repository_task_execution import TaskExecutionRepository from bloom.logger import logger @@ -51,7 +51,7 @@ def to_coords(row: pd.Series) -> pd.Series: def run(batch_time): - use_cases = UseCasesContainer() + use_cases = UseCases() db = use_cases.db() spire_repository = use_cases.spire_ais_data_repository() excursion_repository = use_cases.excursion_repository() diff --git a/backend/bloom/tasks/compute_port_geometry_buffer.py b/backend/bloom/tasks/compute_port_geometry_buffer.py index fb1cd858..186caa86 100644 --- a/backend/bloom/tasks/compute_port_geometry_buffer.py +++ b/backend/bloom/tasks/compute_port_geometry_buffer.py @@ -5,7 +5,7 @@ import pyproj import shapely from bloom.config import settings -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.logger import logger from scipy.spatial import Voronoi from shapely.geometry import LineString, Polygon @@ -93,7 +93,7 @@ def assign_voronoi_buffer(ports: gpd.GeoDataFrame) -> gpd.GeoDataFrame: def run() -> None: - use_cases = UseCasesContainer() + use_cases = UseCases() port_repository = use_cases.port_repository() db = use_cases.db() items = [] diff --git a/backend/bloom/tasks/convert_spire_vessels_to_spire_ais_data.py b/backend/bloom/tasks/convert_spire_vessels_to_spire_ais_data.py index c1bcaea0..df120b9e 100644 --- a/backend/bloom/tasks/convert_spire_vessels_to_spire_ais_data.py +++ b/backend/bloom/tasks/convert_spire_vessels_to_spire_ais_data.py @@ -1,7 +1,7 @@ from time import perf_counter from typing import Generator -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.domain.spire_ais_data import SpireAisData from bloom.infra.database.sql_model import VesselPositionSpire from bloom.logger import logger @@ -9,7 +9,7 @@ from shapely import Point from sqlalchemy.orm.session import Session -use_cases = UseCasesContainer() +use_cases = UseCases() vessel_repo = use_cases.vessel_repository() spire_ais_data_repo = use_cases.spire_ais_data_repository() db = use_cases.db() diff --git a/backend/bloom/tasks/create_update_excursions_segments.py b/backend/bloom/tasks/create_update_excursions_segments.py index 1c1d41a9..365c05d4 100644 --- a/backend/bloom/tasks/create_update_excursions_segments.py +++ b/backend/bloom/tasks/create_update_excursions_segments.py @@ -10,7 +10,7 @@ from shapely.geometry import Point from sqlalchemy.orm import Session -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.domain.excursion import Excursion from bloom.domain.segment import Segment from bloom.infra.repositories.repository_task_execution import TaskExecutionRepository @@ -40,7 +40,7 @@ def to_coords(row: pd.Series) -> pd.Series: def add_excursion(session: Session, vessel_id: int, departure_at: datetime, departure_position: Optional[Point] = None) -> int: - use_cases = UseCasesContainer() + use_cases = UseCases() excursion_repository = use_cases.excursion_repository() port_repository = use_cases.port_repository() @@ -82,7 +82,7 @@ def add_excursion(session: Session, vessel_id: int, departure_at: datetime, def close_excursion(session: Session, excursion_id: int, port_id: int, latitude: float, longitude: float, arrived_at: datetime) -> None: - use_cases = UseCasesContainer() + use_cases = UseCases() excursion_repository = use_cases.excursion_repository() port_repository = use_cases.port_repository() @@ -97,7 +97,7 @@ def close_excursion(session: Session, excursion_id: int, port_id: int, latitude: def run(): - use_cases = UseCasesContainer() + use_cases = UseCases() db = use_cases.db() segment_repository = use_cases.segment_repository() vessel_position_repository = use_cases.vessel_position_repository() diff --git a/backend/bloom/tasks/load_dim_port_from_csv.py b/backend/bloom/tasks/load_dim_port_from_csv.py index ad275304..00a2eef8 100644 --- a/backend/bloom/tasks/load_dim_port_from_csv.py +++ b/backend/bloom/tasks/load_dim_port_from_csv.py @@ -5,7 +5,7 @@ import pandas as pd import pycountry from bloom.config import settings -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.domain.port import Port from bloom.infra.database.errors import DBException from bloom.logger import logger @@ -29,7 +29,7 @@ def map_to_domain(row) -> Port: def run(csv_file_name: str) -> None: - use_cases = UseCasesContainer() + use_cases = UseCases() port_repository = use_cases.port_repository() db = use_cases.db() diff --git a/backend/bloom/tasks/load_dim_vessel_from_csv.py b/backend/bloom/tasks/load_dim_vessel_from_csv.py index 0dde3873..fc83b161 100644 --- a/backend/bloom/tasks/load_dim_vessel_from_csv.py +++ b/backend/bloom/tasks/load_dim_vessel_from_csv.py @@ -3,7 +3,7 @@ import pandas as pd from bloom.config import settings -from bloom.container import UseCasesContainer +from bloom.container import r from bloom.domain.vessel import Vessel from bloom.infra.database.errors import DBException from bloom.logger import logger @@ -33,7 +33,7 @@ def map_to_domain(row: pd.Series) -> Vessel: def run(csv_file_name: str) -> None: - use_cases = UseCasesContainer() + use_cases = r() vessel_repository = use_cases.vessel_repository() db = use_cases.db() diff --git a/backend/bloom/tasks/load_dim_zone_amp_from_csv.py b/backend/bloom/tasks/load_dim_zone_amp_from_csv.py index 3c94d5a3..990057fa 100644 --- a/backend/bloom/tasks/load_dim_zone_amp_from_csv.py +++ b/backend/bloom/tasks/load_dim_zone_amp_from_csv.py @@ -5,7 +5,7 @@ from shapely import wkb from bloom.config import settings -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.domain.zone import Zone from bloom.logger import logger @@ -35,7 +35,7 @@ def map_to_domain(row: pd.Series) -> Zone: def run(csv_file_name: str): - use_cases = UseCasesContainer() + use_cases = UseCases() db = use_cases.db() zone_repository = use_cases.zone_repository() diff --git a/backend/bloom/tasks/load_fct_excursions_from_csv.py b/backend/bloom/tasks/load_fct_excursions_from_csv.py index 1f99cc47..6e2fbcaa 100644 --- a/backend/bloom/tasks/load_fct_excursions_from_csv.py +++ b/backend/bloom/tasks/load_fct_excursions_from_csv.py @@ -6,7 +6,7 @@ from datetime import datetime from shapely.geometry import Point from bloom.config import settings -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.infra.database.errors import DBException from bloom.logger import logger from bloom.domain.spire_ais_data import SpireAisData @@ -119,7 +119,7 @@ def get_point(end_position: str) -> Point: return Point(end_position[1], end_position[0]) def run(excursion_csv_filename: str, segment_csv_filename: str, spire_csv_filename: str) -> None: - use_cases = UseCasesContainer() + use_cases = UseCases() excursion_repository = use_cases.excursion_repository() # vessel_position_repository = use_cases.vessel_position_repository() segment_repository = use_cases.segment_repository() diff --git a/backend/bloom/tasks/load_spire_data_from_csv.py b/backend/bloom/tasks/load_spire_data_from_csv.py index e02bcd70..3968b1f1 100644 --- a/backend/bloom/tasks/load_spire_data_from_csv.py +++ b/backend/bloom/tasks/load_spire_data_from_csv.py @@ -3,7 +3,7 @@ import pandas as pd from bloom.config import settings -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.domain.spire_ais_data import SpireAisData from bloom.infra.database.errors import DBException from bloom.logger import logger @@ -51,7 +51,7 @@ def map_to_domain(row: pd.Series) -> SpireAisData: def run(csv_file_name: str): - use_cases = UseCasesContainer() + use_cases = UseCases() db = use_cases.db() spire_ais_data_repository = use_cases.spire_ais_data_repository() diff --git a/backend/bloom/tasks/load_spire_data_from_json.py b/backend/bloom/tasks/load_spire_data_from_json.py index 36dc4512..04fbf907 100644 --- a/backend/bloom/tasks/load_spire_data_from_json.py +++ b/backend/bloom/tasks/load_spire_data_from_json.py @@ -3,14 +3,14 @@ from pathlib import Path from time import perf_counter -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.infra.http.spire_api_utils import map_raw_vessels_to_domain from bloom.logger import logger from pydantic import ValidationError def run(file_name: str) -> None: - use_cases = UseCasesContainer() + use_cases = UseCases() spire_ais_data_repository = use_cases.spire_ais_data_repository() db = use_cases.db() diff --git a/backend/bloom/tasks/update_vessel_data_voyage.py b/backend/bloom/tasks/update_vessel_data_voyage.py index daff9964..ef4886ff 100644 --- a/backend/bloom/tasks/update_vessel_data_voyage.py +++ b/backend/bloom/tasks/update_vessel_data_voyage.py @@ -1,7 +1,7 @@ from datetime import datetime, timezone from time import perf_counter -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.domain.spire_ais_data import SpireAisData from bloom.domain.vessel import Vessel from bloom.domain.vessel_data import VesselData @@ -47,7 +47,7 @@ def map_ais_data_to_vessel_voyage(ais_data: SpireAisData, vessel: Vessel) -> Uni def run() -> None: - use_cases = UseCasesContainer() + use_cases = UseCases() spire_ais_data_repository = use_cases.spire_ais_data_repository() vessel_repository = use_cases.vessel_repository() db = use_cases.db() diff --git a/backend/load_spire_data_from_api.py b/backend/load_spire_data_from_api.py index 73069cbc..e40526b0 100644 --- a/backend/load_spire_data_from_api.py +++ b/backend/load_spire_data_from_api.py @@ -4,7 +4,7 @@ from pathlib import Path from time import perf_counter -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.domain.vessel import Vessel from bloom.infra.http.spire_api_utils import map_raw_vessels_to_domain from bloom.logger import logger @@ -14,7 +14,7 @@ def run(dump_path: str) -> None: - use_cases = UseCasesContainer() + use_cases = UseCases() spire_ais_data_repository = use_cases.spire_ais_data_repository() spire_traffic_usecase = use_cases.get_spire_data_usecase() vessel_repository = use_cases.vessel_repository() diff --git a/backend/main.py b/backend/main.py index 92c3ea65..89af792b 100644 --- a/backend/main.py +++ b/backend/main.py @@ -1,7 +1,7 @@ from fastapi import FastAPI from starlette.requests import Request -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.routers import excursions, zones, vessels, ports from bloom.routers.vessels import rd @@ -28,7 +28,7 @@ def schedule_crawling(): def init_container(): - container = UseCasesContainer() + container = UseCases() container.wire( modules=[ zones, diff --git a/backend/tests/test_alert.py b/backend/tests/test_alert.py index b1fe7929..e8a15a56 100644 --- a/backend/tests/test_alert.py +++ b/backend/tests/test_alert.py @@ -1,4 +1,4 @@ -from bloom.container import UseCasesContainer +from bloom.container import UseCases from bloom.domain.alert import Alert from datetime import datetime, timezone @@ -13,7 +13,7 @@ def test_launch_alert(): - use_cases = UseCasesContainer() + use_cases = UseCases() alert_usecase = use_cases.generate_alert_usecase() status_code = alert_usecase.send_slack_alert( test_alert, From 413130215762c8b7a23b9234ae5503c6b2d81d57 Mon Sep 17 00:00:00 2001 From: marthevienne <123016211+marthevienne@users.noreply.github.com> Date: Thu, 2 Apr 2026 17:16:32 +0200 Subject: [PATCH 23/29] quick-fix: update root name CC instance --- clevercloud/cron.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clevercloud/cron.json b/clevercloud/cron.json index c8b3b95b..899ec7bf 100644 --- a/clevercloud/cron.json +++ b/clevercloud/cron.json @@ -1,3 +1,3 @@ [ - "*/15 * * * * /home/bas/venv/bin/python /home/bas/app_7244f095-70bc-43c3-9950-d075c01af05f/backend/load_spire_data_from_api.py" + "*/15 * * * * /home/bas/venv/bin/python /home/bas/app_6bc7c917-d187-4efa-be8b-abd1da8f82da/backend/load_spire_data_from_api.py" ] \ No newline at end of file From c9099997189830f27a4e7b75bab40847400807cb Mon Sep 17 00:00:00 2001 From: marthevienne <123016211+marthevienne@users.noreply.github.com> Date: Thu, 2 Apr 2026 17:39:14 +0200 Subject: [PATCH 24/29] quick-fix: lower default api_pooling_period --- backend/bloom/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/bloom/config.py b/backend/bloom/config.py index a28779da..b20d2385 100644 --- a/backend/bloom/config.py +++ b/backend/bloom/config.py @@ -44,7 +44,7 @@ class Settings(BaseSettings): redis_password: str = Field(default='bloom',min_length=1) redis_cache_expiration: int = Field(default=900) - api_pooling_period: timedelta = Field(default=timedelta(minutes=15)) + api_pooling_period: timedelta = Field(default=timedelta(minutes=2)) logging_level:str=Field( default="INFO", From d65036a5642e13db164b399872b5b1c582869d87 Mon Sep 17 00:00:00 2001 From: marthevienne <123016211+marthevienne@users.noreply.github.com> Date: Fri, 3 Apr 2026 12:03:49 +0200 Subject: [PATCH 25/29] fix: manual corrections to reach main state --- .../infra/repositories/repository_port.py | 5 +- backend/bloom/routers/excursions.py | 73 ----------- backend/bloom/routers/ports.py | 44 ------- backend/bloom/routers/vessels.py | 91 -------------- backend/bloom/routers/zones.py | 118 ------------------ .../bloom/tasks/load_dim_vessel_from_csv.py | 4 +- 6 files changed, 4 insertions(+), 331 deletions(-) delete mode 100644 backend/bloom/routers/excursions.py delete mode 100644 backend/bloom/routers/ports.py delete mode 100644 backend/bloom/routers/vessels.py delete mode 100644 backend/bloom/routers/zones.py diff --git a/backend/bloom/infra/repositories/repository_port.py b/backend/bloom/infra/repositories/repository_port.py index ae39c758..cc89409e 100644 --- a/backend/bloom/infra/repositories/repository_port.py +++ b/backend/bloom/infra/repositories/repository_port.py @@ -18,9 +18,8 @@ class PortRepository: def __init__(self, session_factory: Callable) -> None: self.session_factory = session_factory - def get_port_by_id(self, port_id: int) -> Union[Port, None]: - with self.session_factory() as session: - entity = session.get(sql_model.Port, port_id) + def get_port_by_id(self, session: Session, port_id: int) -> Union[Port, None]: + entity = session.get(sql_model.Port, port_id) if entity is not None: return PortRepository.map_to_domain(entity) else: diff --git a/backend/bloom/routers/excursions.py b/backend/bloom/routers/excursions.py deleted file mode 100644 index 424cf99a..00000000 --- a/backend/bloom/routers/excursions.py +++ /dev/null @@ -1,73 +0,0 @@ -import json -import time -from typing import List - -import redis -from dependency_injector.wiring import inject, Provide -from fastapi import APIRouter, Depends - -from bloom.config import settings -from bloom.container import UseCases -from bloom.domain.excursion import Excursion -from bloom.logger import logger -from bloom.usecase.Excursions import ExcursionUseCase - -rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) - -router = APIRouter() - - -@router.get("/vessels/{vessel_id}/excursions") -@inject -async def list_vessel_excursions( - vessel_id: int, - nocache: bool = False, - excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCases.excursion_usecase] - ) -) -> List[Excursion]: - endpoint = f"/vessels/{vessel_id}/excursions" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - return excursion_usecase.list_vessel_excursions(vessel_id) - - -@router.get("/vessels/{vessel_id}/excursions/{excursions_id}") -async def get_vessel_excursion( - vessel_id: int, - excursions_id: int, - excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCases.excursion_usecase] - )): - return excursion_usecase.get_excursion_by_id(vessel_id, excursions_id) - - -@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments") -@inject -async def list_vessel_excursion_segments( - vessel_id: int, - excursions_id: int, - excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCases.excursion_usecase] - ) -): - return excursion_usecase.get_excursions_segments(vessel_id, excursions_id) - - -@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments/{segment_id}") -@inject -async def get_vessel_excursion_segment( - vessel_id: int, - excursions_id: int, - segment_id: int, - excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCases.excursion_usecase] - ) -): - return await excursion_usecase.get_segment_by_id(vessel_id, excursions_id, segment_id) \ No newline at end of file diff --git a/backend/bloom/routers/ports.py b/backend/bloom/routers/ports.py deleted file mode 100644 index b9e3653c..00000000 --- a/backend/bloom/routers/ports.py +++ /dev/null @@ -1,44 +0,0 @@ -import json -import time - -from redis import Redis -from dependency_injector.wiring import inject, Provide -from fastapi import APIRouter, Depends -from bloom.config import settings -from bloom.container import UseCases -from bloom.logger import logger -from bloom.usecase.Ports import PortUseCase - -router = APIRouter() -redis_client = Redis(host=settings.redis_host, port=settings.redis_port, db=0) - - -@router.get("/ports") -@inject -async def list_ports( - nocache: bool = False, - ports_usecase: PortUseCase = Depends( - Provide[UseCases.excursion_usecase] - ) -): - endpoint = f"/ports" - cache = redis_client.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - return ports_usecase.list_ports() - - -@router.get("/ports/{port_id}") -@inject -async def get_port( - port_id: int, - ports_usecase: PortUseCase = Depends( - Provide[UseCases.excursion_usecase] - ) -): - return ports_usecase.get_port_by_id(port_id) diff --git a/backend/bloom/routers/vessels.py b/backend/bloom/routers/vessels.py deleted file mode 100644 index 1eede30f..00000000 --- a/backend/bloom/routers/vessels.py +++ /dev/null @@ -1,91 +0,0 @@ -from fastapi import APIRouter - -from redis import Redis -import json -import time -from bloom.config import settings -from bloom.container import UseCases -from bloom.logger import logger - -rd = Redis(host=settings.redis_host, port=settings.redis_port, db=0) - - -router = APIRouter() - - -@router.get("/vessels") -async def list_vessels(nocache: bool = False): - endpoint = f"/vessels" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - vessel_repository = use_cases.vessel_repository() - db = use_cases.db() - with db.session() as session: - - json_data = [json.loads(v.model_dump_json() if v else "{}") - for v in vessel_repository.get_vessels_list(session)] - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - return json_data - - -@router.get("/vessels/{vessel_id}") -async def get_vessel(vessel_id: int): - use_cases = UseCases() - vessel_repository = use_cases.vessel_repository() - db = use_cases.db() - with db.session() as session: - return vessel_repository.get_vessel_by_id(session, vessel_id) - - -@router.get("/vessels/all/positions/last") -async def list_all_vessel_last_position(nocache: bool = False): - endpoint = f"/vessels/all/positions/last" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - segment_repository = use_cases.segment_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(p.model_dump_json() if p else "{}") - for p in segment_repository.get_all_vessels_last_position(session)] - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return json_data - - -@router.get("/vessels/{vessel_id}/positions/last") -async def get_vessel_last_position(vessel_id: int, nocache: bool = False): - endpoint = f"/vessels/{vessel_id}/positions/last" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - segment_repository = use_cases.segment_repository() - db = use_cases.db() - with db.session() as session: - result = segment_repository.get_vessel_last_position(session, vessel_id) - json_data = json.loads(result.model_dump_json() if result else "{}") - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return json_data diff --git a/backend/bloom/routers/zones.py b/backend/bloom/routers/zones.py deleted file mode 100644 index 900b7ee8..00000000 --- a/backend/bloom/routers/zones.py +++ /dev/null @@ -1,118 +0,0 @@ -import json -import time - -import redis -from fastapi import APIRouter -from starlette.requests import Request - -from bloom.config import settings -from bloom.container import UseCases -from bloom.logger import logger - -rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) - -router = APIRouter() - - -@router.get("/zones") -async def list_zones(request: Request, nocache: bool = False): - endpoint = f"/zones" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zones(session)] - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return json_data - - -@router.get("/zones/all/categories") -async def list_zone_categories(request: Request, nocache: bool = False): - endpoint = f"/zones/all/categories" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zone_categories(session)] - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return json_data - - -@router.get("/zones/by-category/{category}/by-sub-category/{sub}") -async def get_zone_all_by_category(category: str = "all", sub: str = None, nocache: bool = False): - endpoint = f"/zones/by-category/{category}/by-sub-category/{sub}" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in - zone_repository.get_all_zones_by_category(session, category if category != 'all' else None, - sub)] - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return json_data - - -@router.get("/zones/by-category/{category}") -async def get_zone_all_by_category(category: str = "all", nocache: bool = False): - endpoint = f"/zones/by-category/{category}" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in - zone_repository.get_all_zones_by_category(session, category if category != 'all' else None)] - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return json_data - - -@router.get("/zones/{zones_id}") -async def get_zone(zones_id: int): - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - return zone_repository.get_zone_by_id(session, zones_id) diff --git a/backend/bloom/tasks/load_dim_vessel_from_csv.py b/backend/bloom/tasks/load_dim_vessel_from_csv.py index fc83b161..b0f4153a 100644 --- a/backend/bloom/tasks/load_dim_vessel_from_csv.py +++ b/backend/bloom/tasks/load_dim_vessel_from_csv.py @@ -3,7 +3,7 @@ import pandas as pd from bloom.config import settings -from bloom.container import r +from bloom.container import UseCases, r from bloom.domain.vessel import Vessel from bloom.infra.database.errors import DBException from bloom.logger import logger @@ -33,7 +33,7 @@ def map_to_domain(row: pd.Series) -> Vessel: def run(csv_file_name: str) -> None: - use_cases = r() + use_cases = UseCases() vessel_repository = use_cases.vessel_repository() db = use_cases.db() From f1e48b08afda92e89b9e18254853ddd864dee931 Mon Sep 17 00:00:00 2001 From: marthevienne <123016211+marthevienne@users.noreply.github.com> Date: Fri, 3 Apr 2026 12:09:01 +0200 Subject: [PATCH 26/29] fix: manual corrections to reach main state --- backend/bloom/container.py | 1 - .../infra/repositories/repository_port.py | 5 +- backend/bloom/routers/excursions.py | 73 ----------- backend/bloom/routers/ports.py | 44 ------- backend/bloom/routers/vessels.py | 91 -------------- backend/bloom/routers/zones.py | 118 ------------------ .../bloom/tasks/load_dim_vessel_from_csv.py | 4 +- backend/bloom/usecase/Excursions.py | 30 ----- backend/bloom/usecase/Ports.py | 20 --- backend/main.py | 64 ---------- 10 files changed, 4 insertions(+), 446 deletions(-) delete mode 100644 backend/bloom/routers/excursions.py delete mode 100644 backend/bloom/routers/ports.py delete mode 100644 backend/bloom/routers/vessels.py delete mode 100644 backend/bloom/routers/zones.py delete mode 100644 backend/bloom/usecase/Excursions.py delete mode 100644 backend/bloom/usecase/Ports.py delete mode 100644 backend/main.py diff --git a/backend/bloom/container.py b/backend/bloom/container.py index d54c6678..04d37269 100644 --- a/backend/bloom/container.py +++ b/backend/bloom/container.py @@ -13,7 +13,6 @@ from bloom.services.GetVesselsFromSpire import GetVesselsFromSpire from bloom.services.metrics import MetricsService -from bloom.usecase.Excursions import ExcursionUseCase from bloom.usecase.GenerateAlerts import GenerateAlerts from dependency_injector import containers, providers import redis diff --git a/backend/bloom/infra/repositories/repository_port.py b/backend/bloom/infra/repositories/repository_port.py index ae39c758..cc89409e 100644 --- a/backend/bloom/infra/repositories/repository_port.py +++ b/backend/bloom/infra/repositories/repository_port.py @@ -18,9 +18,8 @@ class PortRepository: def __init__(self, session_factory: Callable) -> None: self.session_factory = session_factory - def get_port_by_id(self, port_id: int) -> Union[Port, None]: - with self.session_factory() as session: - entity = session.get(sql_model.Port, port_id) + def get_port_by_id(self, session: Session, port_id: int) -> Union[Port, None]: + entity = session.get(sql_model.Port, port_id) if entity is not None: return PortRepository.map_to_domain(entity) else: diff --git a/backend/bloom/routers/excursions.py b/backend/bloom/routers/excursions.py deleted file mode 100644 index 424cf99a..00000000 --- a/backend/bloom/routers/excursions.py +++ /dev/null @@ -1,73 +0,0 @@ -import json -import time -from typing import List - -import redis -from dependency_injector.wiring import inject, Provide -from fastapi import APIRouter, Depends - -from bloom.config import settings -from bloom.container import UseCases -from bloom.domain.excursion import Excursion -from bloom.logger import logger -from bloom.usecase.Excursions import ExcursionUseCase - -rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) - -router = APIRouter() - - -@router.get("/vessels/{vessel_id}/excursions") -@inject -async def list_vessel_excursions( - vessel_id: int, - nocache: bool = False, - excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCases.excursion_usecase] - ) -) -> List[Excursion]: - endpoint = f"/vessels/{vessel_id}/excursions" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - return excursion_usecase.list_vessel_excursions(vessel_id) - - -@router.get("/vessels/{vessel_id}/excursions/{excursions_id}") -async def get_vessel_excursion( - vessel_id: int, - excursions_id: int, - excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCases.excursion_usecase] - )): - return excursion_usecase.get_excursion_by_id(vessel_id, excursions_id) - - -@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments") -@inject -async def list_vessel_excursion_segments( - vessel_id: int, - excursions_id: int, - excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCases.excursion_usecase] - ) -): - return excursion_usecase.get_excursions_segments(vessel_id, excursions_id) - - -@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments/{segment_id}") -@inject -async def get_vessel_excursion_segment( - vessel_id: int, - excursions_id: int, - segment_id: int, - excursion_usecase: ExcursionUseCase = Depends( - Provide[UseCases.excursion_usecase] - ) -): - return await excursion_usecase.get_segment_by_id(vessel_id, excursions_id, segment_id) \ No newline at end of file diff --git a/backend/bloom/routers/ports.py b/backend/bloom/routers/ports.py deleted file mode 100644 index b9e3653c..00000000 --- a/backend/bloom/routers/ports.py +++ /dev/null @@ -1,44 +0,0 @@ -import json -import time - -from redis import Redis -from dependency_injector.wiring import inject, Provide -from fastapi import APIRouter, Depends -from bloom.config import settings -from bloom.container import UseCases -from bloom.logger import logger -from bloom.usecase.Ports import PortUseCase - -router = APIRouter() -redis_client = Redis(host=settings.redis_host, port=settings.redis_port, db=0) - - -@router.get("/ports") -@inject -async def list_ports( - nocache: bool = False, - ports_usecase: PortUseCase = Depends( - Provide[UseCases.excursion_usecase] - ) -): - endpoint = f"/ports" - cache = redis_client.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - return ports_usecase.list_ports() - - -@router.get("/ports/{port_id}") -@inject -async def get_port( - port_id: int, - ports_usecase: PortUseCase = Depends( - Provide[UseCases.excursion_usecase] - ) -): - return ports_usecase.get_port_by_id(port_id) diff --git a/backend/bloom/routers/vessels.py b/backend/bloom/routers/vessels.py deleted file mode 100644 index 1eede30f..00000000 --- a/backend/bloom/routers/vessels.py +++ /dev/null @@ -1,91 +0,0 @@ -from fastapi import APIRouter - -from redis import Redis -import json -import time -from bloom.config import settings -from bloom.container import UseCases -from bloom.logger import logger - -rd = Redis(host=settings.redis_host, port=settings.redis_port, db=0) - - -router = APIRouter() - - -@router.get("/vessels") -async def list_vessels(nocache: bool = False): - endpoint = f"/vessels" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - vessel_repository = use_cases.vessel_repository() - db = use_cases.db() - with db.session() as session: - - json_data = [json.loads(v.model_dump_json() if v else "{}") - for v in vessel_repository.get_vessels_list(session)] - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - return json_data - - -@router.get("/vessels/{vessel_id}") -async def get_vessel(vessel_id: int): - use_cases = UseCases() - vessel_repository = use_cases.vessel_repository() - db = use_cases.db() - with db.session() as session: - return vessel_repository.get_vessel_by_id(session, vessel_id) - - -@router.get("/vessels/all/positions/last") -async def list_all_vessel_last_position(nocache: bool = False): - endpoint = f"/vessels/all/positions/last" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - segment_repository = use_cases.segment_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(p.model_dump_json() if p else "{}") - for p in segment_repository.get_all_vessels_last_position(session)] - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return json_data - - -@router.get("/vessels/{vessel_id}/positions/last") -async def get_vessel_last_position(vessel_id: int, nocache: bool = False): - endpoint = f"/vessels/{vessel_id}/positions/last" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - segment_repository = use_cases.segment_repository() - db = use_cases.db() - with db.session() as session: - result = segment_repository.get_vessel_last_position(session, vessel_id) - json_data = json.loads(result.model_dump_json() if result else "{}") - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return json_data diff --git a/backend/bloom/routers/zones.py b/backend/bloom/routers/zones.py deleted file mode 100644 index 900b7ee8..00000000 --- a/backend/bloom/routers/zones.py +++ /dev/null @@ -1,118 +0,0 @@ -import json -import time - -import redis -from fastapi import APIRouter -from starlette.requests import Request - -from bloom.config import settings -from bloom.container import UseCases -from bloom.logger import logger - -rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) - -router = APIRouter() - - -@router.get("/zones") -async def list_zones(request: Request, nocache: bool = False): - endpoint = f"/zones" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zones(session)] - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return json_data - - -@router.get("/zones/all/categories") -async def list_zone_categories(request: Request, nocache: bool = False): - endpoint = f"/zones/all/categories" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zone_categories(session)] - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return json_data - - -@router.get("/zones/by-category/{category}/by-sub-category/{sub}") -async def get_zone_all_by_category(category: str = "all", sub: str = None, nocache: bool = False): - endpoint = f"/zones/by-category/{category}/by-sub-category/{sub}" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in - zone_repository.get_all_zones_by_category(session, category if category != 'all' else None, - sub)] - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return json_data - - -@router.get("/zones/by-category/{category}") -async def get_zone_all_by_category(category: str = "all", nocache: bool = False): - endpoint = f"/zones/by-category/{category}" - cache = rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload = json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in - zone_repository.get_all_zones_by_category(session, category if category != 'all' else None)] - await rd.set(endpoint, json.dumps(json_data)) - await rd.expire(endpoint, settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") - return json_data - - -@router.get("/zones/{zones_id}") -async def get_zone(zones_id: int): - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - return zone_repository.get_zone_by_id(session, zones_id) diff --git a/backend/bloom/tasks/load_dim_vessel_from_csv.py b/backend/bloom/tasks/load_dim_vessel_from_csv.py index fc83b161..1d0a1e96 100644 --- a/backend/bloom/tasks/load_dim_vessel_from_csv.py +++ b/backend/bloom/tasks/load_dim_vessel_from_csv.py @@ -3,7 +3,7 @@ import pandas as pd from bloom.config import settings -from bloom.container import r +from bloom.container import UseCases from bloom.domain.vessel import Vessel from bloom.infra.database.errors import DBException from bloom.logger import logger @@ -33,7 +33,7 @@ def map_to_domain(row: pd.Series) -> Vessel: def run(csv_file_name: str) -> None: - use_cases = r() + use_cases = UseCases() vessel_repository = use_cases.vessel_repository() db = use_cases.db() diff --git a/backend/bloom/usecase/Excursions.py b/backend/bloom/usecase/Excursions.py deleted file mode 100644 index 716ee3cf..00000000 --- a/backend/bloom/usecase/Excursions.py +++ /dev/null @@ -1,30 +0,0 @@ -import json - -from bloom.config import settings -from bloom.logger import logger - - -class ExcursionUseCase: - def __init__(self, excursions_repository, redis_client): - self.excursions_repository = excursions_repository - self.redis_client = redis_client - self.endpoint = f"/vessels/excursions" - - def list_vessel_excursions(self, vessel_id, with_cache=True): - return self.excursions_repository.get_vessel_excursions(vessel_id, with_cache) - - async def get_excursions_by_vessel_id(self, vessel_id): - excursions = self.excursions_repository.get_excursions_by_vessel_id(vessel_id) - - await self.redis_client.set(self.endpoint, json.dumps(excursions)) - await self.redis_client.expire(self.endpoint, settings.redis_cache_expiration) - return self.excursions_repository.get_excursions_by_vessel_id(vessel_id) - - async def get_excursion_by_id(self, vessel_id, excursions_id): - return self.excursions_repository.get_excursion_by_id(vessel_id, excursions_id) - - async def get_excursions_segments(self, vessel_id, excursions_id, segment_id): - return self.excursions_repository.get(vessel_id, excursions_id, segment_id) - - async def get_segment_by_id(self, vessel_id, excursions_id, segment_id): - return self.excursions_repository.get_segment_by_id(vessel_id, excursions_id, segment_id) diff --git a/backend/bloom/usecase/Ports.py b/backend/bloom/usecase/Ports.py deleted file mode 100644 index 83d69b11..00000000 --- a/backend/bloom/usecase/Ports.py +++ /dev/null @@ -1,20 +0,0 @@ -import json - -from bloom.config import settings -from bloom.infra.repositories.repository_port import PortRepository - - -class PortUseCase: - def __init__(self, ports_repository: PortRepository, redis_client): - self.ports_repository = ports_repository - self.redis_client = redis_client - self.caching_key = 'ports:caching' - - async def list_ports(self): - ports = self.ports_repository.get_all_ports() - await self.redis_client.set(self.caching_key, json.dumps(ports)) - await self.redis_client.expire(self.caching_key, settings.redis_cache_expiration) - return ports - - async def get_port_by_id(self, port_id): - return self.ports_repository.get_port_by_id(port_id) diff --git a/backend/main.py b/backend/main.py deleted file mode 100644 index 89af792b..00000000 --- a/backend/main.py +++ /dev/null @@ -1,64 +0,0 @@ -from fastapi import FastAPI -from starlette.requests import Request - -from bloom.container import UseCases -from bloom.routers import excursions, zones, vessels, ports -from bloom.routers.vessels import rd - - -def init_db(container): - db = container.db() - db.create_database() - - -def create_app() -> FastAPI: - container = init_container() - - init_db(container) - server = init_server(container) - # server.add_exception_handler(DBException, db_exception_handler) - # server.add_exception_handler(ValidationError, validation_exception_handler) - # server.add_exception_handler(Exception, generic_exception_handler) - - return server - - -def schedule_crawling(): - pass - - -def init_container(): - container = UseCases() - container.wire( - modules=[ - zones, - vessels, - excursions, - ports - ] - ) - return container - - -def init_server(container): - server = FastAPI(dependencies=[]) - server.container = container - server.include_router(excursions.router) - server.include_router(ports.router) - server.include_router(vessels.router) - server.include_router(zones.router) - return server - - -app = create_app() - - -@app.get("/") -async def root(request: Request): - return {"status": "ok"} - - -@app.get("/cache/all/flush") -async def cache_all_flush(request: Request): - await rd.flushall() - return {"code": 0} From 3d689d4adda4e9a77d3408fb0b3a90fdc50fb2b1 Mon Sep 17 00:00:00 2001 From: marthevienne <123016211+marthevienne@users.noreply.github.com> Date: Fri, 3 Apr 2026 12:09:59 +0200 Subject: [PATCH 27/29] Merge branch 'up-to-date-main-crawler' of https://github.com/dataforgoodfr/12_bloom into up-to-date-main-crawler --- backend/bloom/container.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/backend/bloom/container.py b/backend/bloom/container.py index 04d37269..cc4f0e8c 100644 --- a/backend/bloom/container.py +++ b/backend/bloom/container.py @@ -97,8 +97,3 @@ class UseCases(containers.DeclarativeContainer): alert_repository=alert_repository, raster_repository=raster_repository, ) - - excursion_usecase = providers.Factory( - ExcursionUseCase, - excursion_repository=excursion_repository, - ) From efe1cd5c69c83008966b0343514ed8d623b94a8f Mon Sep 17 00:00:00 2001 From: marthevienne <123016211+marthevienne@users.noreply.github.com> Date: Fri, 3 Apr 2026 12:33:31 +0200 Subject: [PATCH 28/29] remove cronjobs for dev purposes --- clevercloud/clever.json | 3 --- clevercloud/cron.json | 3 --- 2 files changed, 6 deletions(-) delete mode 100644 clevercloud/clever.json delete mode 100644 clevercloud/cron.json diff --git a/clevercloud/clever.json b/clevercloud/clever.json deleted file mode 100644 index 2c6719ae..00000000 --- a/clevercloud/clever.json +++ /dev/null @@ -1,3 +0,0 @@ -[ - "*/15 * * * * /bin/sh /home/bas/app_7244f095-70bc-43c3-9950-d075c01af05f/etl.sh" -] \ No newline at end of file diff --git a/clevercloud/cron.json b/clevercloud/cron.json deleted file mode 100644 index 899ec7bf..00000000 --- a/clevercloud/cron.json +++ /dev/null @@ -1,3 +0,0 @@ -[ - "*/15 * * * * /home/bas/venv/bin/python /home/bas/app_6bc7c917-d187-4efa-be8b-abd1da8f82da/backend/load_spire_data_from_api.py" -] \ No newline at end of file From 58d20ea8a34b015455d3df5a1d546b6f2dd4418b Mon Sep 17 00:00:00 2001 From: marthevienne <123016211+marthevienne@users.noreply.github.com> Date: Fri, 3 Apr 2026 14:30:35 +0200 Subject: [PATCH 29/29] remove computed requirements.txt --- backend/requirements.txt | 329 --------------------------------------- 1 file changed, 329 deletions(-) delete mode 100644 backend/requirements.txt diff --git a/backend/requirements.txt b/backend/requirements.txt deleted file mode 100644 index 2961024d..00000000 --- a/backend/requirements.txt +++ /dev/null @@ -1,329 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --output-file=requirements.txt pyproject.toml -# -alembic==1.13.2 - # via bloom (pyproject.toml) -annotated-types==0.7.0 - # via pydantic -anyio==4.4.0 - # via - # gql - # httpx - # starlette - # watchfiles -async-timeout==4.0.3 - # via redis -attrs==23.2.0 - # via fiona -autopep8==2.0.4 - # via bloom (pyproject.toml) -backoff==2.2.1 - # via gql -build==1.2.1 - # via poetry -cachecontrol[filecache]==0.14.0 - # via poetry -certifi==2024.7.4 - # via - # fiona - # httpcore - # httpx - # pyproj - # requests -cffi==1.16.0 - # via xattr -charset-normalizer==3.3.2 - # via requests -cleo==2.1.0 - # via poetry -click==8.1.7 - # via - # click-plugins - # cligj - # fiona - # typer - # uvicorn -click-plugins==1.1.1 - # via fiona -cligj==0.7.2 - # via fiona -contourpy==1.2.1 - # via matplotlib -crashtest==0.4.1 - # via - # cleo - # poetry -cycler==0.12.1 - # via matplotlib -dependency-injection==1.2.0 - # via bloom (pyproject.toml) -dependency-injector==4.41.0 - # via bloom (pyproject.toml) -distlib==0.3.8 - # via virtualenv -dnspython==2.6.1 - # via email-validator -dulwich==0.21.7 - # via poetry -email-validator==2.2.0 - # via fastapi -et-xmlfile==1.1.0 - # via openpyxl -exceptiongroup==1.2.1 - # via anyio -fastapi==0.111.0 - # via bloom (pyproject.toml) -fastapi-cli==0.0.4 - # via fastapi -fastjsonschema==2.20.0 - # via poetry -filelock==3.15.4 - # via - # cachecontrol - # virtualenv -fiona==1.9.6 - # via geopandas -fonttools==4.53.1 - # via matplotlib -geoalchemy2==0.14.7 - # via bloom (pyproject.toml) -geographiclib==2.0 - # via geopy -geopandas==0.14.4 - # via bloom (pyproject.toml) -geopy==2.4.1 - # via bloom (pyproject.toml) -gql==3.5.0 - # via bloom (pyproject.toml) -graphql-core==3.2.3 - # via gql -h11==0.14.0 - # via - # httpcore - # uvicorn -httpcore==1.0.5 - # via httpx -httptools==0.6.1 - # via uvicorn -httpx==0.27.0 - # via fastapi -idna==3.7 - # via - # anyio - # email-validator - # httpx - # requests - # yarl -importlib-metadata==8.0.0 - # via keyring -installer==0.7.0 - # via poetry -jaraco-classes==3.4.0 - # via keyring -jinja2==3.1.4 - # via fastapi -keyring==24.3.1 - # via poetry -kiwisolver==1.4.5 - # via matplotlib -mako==1.3.5 - # via alembic -markdown-it-py==3.0.0 - # via rich -markupsafe==2.1.5 - # via - # jinja2 - # mako -matplotlib==3.8.4 - # via bloom (pyproject.toml) -mdurl==0.1.2 - # via markdown-it-py -more-itertools==10.3.0 - # via jaraco-classes -msgpack==1.0.8 - # via cachecontrol -multidict==6.0.5 - # via yarl -numpy==1.26.4 - # via - # contourpy - # geopandas - # matplotlib - # pandas - # scipy - # shapely -openpyxl==3.1.5 - # via bloom (pyproject.toml) -orjson==3.10.6 - # via fastapi -packaging==24.1 - # via - # build - # geoalchemy2 - # geopandas - # matplotlib - # poetry -pandas==2.2.2 - # via - # bloom (pyproject.toml) - # geopandas -pexpect==4.9.0 - # via poetry -pillow==10.4.0 - # via matplotlib -pkginfo==1.11.1 - # via poetry -platformdirs==4.2.2 - # via - # poetry - # virtualenv -poetry==1.8.3 - # via - # bloom (pyproject.toml) - # poetry-plugin-export -poetry-core==1.9.0 - # via - # poetry - # poetry-plugin-export -poetry-plugin-export==1.8.0 - # via poetry -psycopg2-binary==2.9.9 - # via bloom (pyproject.toml) -ptyprocess==0.7.0 - # via pexpect -pycodestyle==2.12.0 - # via autopep8 -pycountry==23.12.11 - # via bloom (pyproject.toml) -pycparser==2.22 - # via cffi -pydantic==2.6.4 - # via - # bloom (pyproject.toml) - # fastapi - # pydantic-settings -pydantic-core==2.16.3 - # via pydantic -pydantic-settings==2.2.1 - # via bloom (pyproject.toml) -pygments==2.18.0 - # via rich -pyparsing==3.1.2 - # via matplotlib -pyproj==3.6.1 - # via geopandas -pyproject-hooks==1.1.0 - # via - # build - # poetry -python-dateutil==2.9.0.post0 - # via - # matplotlib - # pandas -python-dotenv==1.0.1 - # via - # bloom (pyproject.toml) - # pydantic-settings - # uvicorn -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 - # via pandas -pyyaml==6.0.1 - # via - # bloom (pyproject.toml) - # uvicorn -rapidfuzz==3.9.4 - # via cleo -redis==5.0.7 - # via bloom (pyproject.toml) -requests==2.31.0 - # via - # bloom (pyproject.toml) - # cachecontrol - # poetry - # requests-toolbelt -requests-toolbelt==1.0.0 - # via - # bloom (pyproject.toml) - # poetry -rich==13.7.1 - # via typer -scipy==1.12.0 - # via bloom (pyproject.toml) -shapely==2.0.4 - # via - # bloom (pyproject.toml) - # geopandas -shellingham==1.5.4 - # via - # poetry - # typer -six==1.16.0 - # via - # dependency-injector - # fiona - # python-dateutil -slack-sdk==3.27.2 - # via bloom (pyproject.toml) -sniffio==1.3.1 - # via - # anyio - # httpx -sqlalchemy==2.0.31 - # via - # alembic - # bloom (pyproject.toml) - # geoalchemy2 -starlette==0.37.2 - # via fastapi -tomli==2.0.1 - # via - # autopep8 - # build - # poetry -tomlkit==0.12.5 - # via poetry -trove-classifiers==2024.7.2 - # via poetry -typer==0.12.3 - # via fastapi-cli -typing-extensions==4.12.2 - # via - # alembic - # anyio - # fastapi - # pydantic - # pydantic-core - # sqlalchemy - # typer - # uvicorn -tzdata==2024.1 - # via pandas -ujson==5.10.0 - # via fastapi -urllib3==2.2.2 - # via - # dulwich - # requests -uvicorn[standard]==0.30.1 - # via - # bloom (pyproject.toml) - # fastapi -uvloop==0.19.0 - # via uvicorn -virtualenv==20.26.3 - # via poetry -watchfiles==0.22.0 - # via uvicorn -websockets==12.0 - # via uvicorn -xattr==1.1.0 - # via poetry -yarl==1.9.4 - # via gql -zipp==3.19.2 - # via importlib-metadata