From 0612667b22869aff18f83e04029253ddb0ad337f Mon Sep 17 00:00:00 2001 From: adish-rmr Date: Sun, 8 Feb 2026 19:29:22 +0100 Subject: [PATCH] update: new class --- .gitignore | 3 +- CLAUDE.md | 61 ++++- data/db_schema.sql | 82 +++++++ data/insert.sql | 11 + marimo/debug_cosing.py | 29 ++- marimo/test_obj.py | 45 +--- src/pif_compiler/api/routes/api_esposition.py | 104 +++++++++ .../api/routes/api_ingredients.py | 102 +++++++++ src/pif_compiler/classes/__init__.py | 48 ++-- src/pif_compiler/classes/main_cls.py | 209 ++++++++++++++++++ src/pif_compiler/classes/models.py | 114 ++++++++-- src/pif_compiler/functions/db_utils.py | 101 +++++++++ src/pif_compiler/main.py | 14 +- 13 files changed, 836 insertions(+), 87 deletions(-) create mode 100644 data/db_schema.sql create mode 100644 data/insert.sql create mode 100644 src/pif_compiler/api/routes/api_esposition.py create mode 100644 src/pif_compiler/api/routes/api_ingredients.py create mode 100644 src/pif_compiler/classes/main_cls.py diff --git a/.gitignore b/.gitignore index 3f72cd3..6c8c66f 100644 --- a/.gitignore +++ b/.gitignore @@ -208,4 +208,5 @@ __marimo__/ # other -pdfs/ \ No newline at end of file +pdfs/ +streamlit/ \ No newline at end of file diff --git a/CLAUDE.md b/CLAUDE.md index d60a399..35f6c54 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -26,10 +26,15 @@ src/pif_compiler/ │ └── routes/ │ ├── api_echa.py # ECHA endpoints (single + batch search) │ ├── api_cosing.py # COSING endpoints (single + batch search) +│ ├── api_ingredients.py # Ingredient search by CAS + list all ingested +│ ├── api_esposition.py # Esposition preset creation + list all presets │ └── common.py # PDF generation, PubChem, CIR search endpoints ├── classes/ -│ └── models.py # Pydantic models: Ingredient, DapInfo, CosingInfo, -│ # ToxIndicator, Toxicity, Esposition, RetentionFactors +│ ├── __init__.py # Re-exports all models from models.py and main_cls.py +│ ├── models.py # Pydantic models: Ingredient, DapInfo, CosingInfo, +│ │ # ToxIndicator, Toxicity, Esposition, RetentionFactors, StatoOrdine +│ └── main_cls.py # Orchestrator classes: Order (raw input layer), +│ # Project (processed layer), IngredientInput ├── functions/ │ ├── common_func.py # PDF generation with Playwright │ ├── common_log.py # Centralized logging configuration @@ -44,9 +49,10 @@ src/pif_compiler/ ### Other directories -- `data/` - Input data files (`input.json` with sample INCI/CAS/percentage lists), old CSV data +- `data/` - Input data files (`input.json` with sample INCI/CAS/percentage lists), DB schema reference (`db_schema.sql`), old CSV data - `logs/` - Rotating log files (debug.log, error.log) - auto-generated - `pdfs/` - Generated PDF files from ECHA dossier pages +- `streamlit/` - Streamlit UI pages (`ingredients_page.py`, `exposition_page.py`) - `marimo/` - **Ignore this folder.** Debug/test notebooks, not part of the main application ## Architecture & Data Flow @@ -60,10 +66,32 @@ src/pif_compiler/ 6. **Toxicity ranking** (`Toxicity` model): Best toxicological indicator selection with priority (NOAEL > LOAEL > LD50) and safety factors ### Caching strategy -- ECHA results are cached in MongoDB (`toxinfo.substance_index` collection) keyed by `substance.rmlCas` +- **ECHA results** are cached in MongoDB (`toxinfo.substance_index` collection) keyed by `substance.rmlCas` +- **Ingredients** are cached in MongoDB (`toxinfo.ingredients` collection) keyed by `cas`, with PostgreSQL `ingredienti` table as index (stores `mongo_id` + enrichment flags `dap`, `cosing`, `tox`) +- **Orders** are cached in MongoDB (`toxinfo.orders` collection) keyed by `uuid_ordine` +- **Projects** are cached in MongoDB (`toxinfo.projects` collection) keyed by `uuid_progetto` - The orchestrator checks local cache before making external requests +- `Ingredient.get_or_create(cas)` checks PostgreSQL -> MongoDB cache, returns cached if not older than 365 days, otherwise re-scrapes - Search history is logged to PostgreSQL (`logs.search_history` table) +### Order / Project architecture +- **Order** (`main_cls.py`): Raw input layer. Receives JSON with client, compiler, product type, ingredients list (CAS + percentage). Cleans CAS numbers (strips `\n`, splits by `;`). Saves to MongoDB `orders` collection. Registers client/compiler in PostgreSQL. +- **Project** (`main_cls.py`): Processed layer. Created from an Order via `Project.from_order()`. Holds enriched `Ingredient` objects, percentages mapping (CAS -> %), and `Esposition` preset. `process_ingredients()` calls `Ingredient.get_or_create()` for each CAS. Saves to MongoDB `projects` collection. +- An order can update an older project — they are decoupled. + +### PostgreSQL schema (see `data/db_schema.sql`) + +- **`clienti`** - Customers (`id_cliente`, `nome_cliente`) +- **`compilatori`** - PIF compilers/assessors (`id_compilatore`, `nome_compilatore`) +- **`ordini`** - Orders linking a client + compiler to a project (`uuid_ordine`, `uuid_progetto`, `data_ordine`, `stato_ordine`). FK to `clienti`, `compilatori`, `stati_ordini` +- **`stati_ordini`** - Order status lookup table (`id_stato`, `nome_stato`). Values mapped to `StatoOrdine` IntEnum in `models.py` +- **`ingredienti`** - Ingredient registry keyed by CAS. Tracks enrichment status via boolean flags (`dap`, `cosing`, `tox`) and links to MongoDB document (`mongo_id`) +- **`inci`** - INCI name to CAS mapping. FK to `ingredienti(cas)` +- **`progetti`** - Projects linked to an order (`mongo_id` -> `ordini.uuid_progetto`) and a product type preset (`preset_tipo_prodotto` -> `tipi_prodotti`) +- **`ingredients_lineage`** - Many-to-many join between `progetti` and `ingredienti`, tracking which ingredients belong to which project +- **`tipi_prodotti`** - Product type presets with exposure parameters (`preset_name`, `tipo_prodotto`, `luogo_applicazione`, exposure routes, `sup_esposta`, `freq_applicazione`, `qta_giornaliera`, `ritenzione`). Maps to the `Esposition` Pydantic model +- **`logs.search_history`** - Search audit log (`cas_ricercato`, `target`, `esito`) + ## API Endpoints All routes are under `/api/v1`: @@ -74,6 +102,10 @@ All routes are under `/api/v1`: | POST | `/echa/batch-search` | Batch ECHA search for multiple CAS numbers | | POST | `/cosing/search` | COSING search (by name, CAS, EC, or ID) | | POST | `/cosing/batch-search` | Batch COSING search | +| POST | `/ingredients/search` | Get full ingredient by CAS (cached or scraped) | +| GET | `/ingredients/list` | List all ingested ingredients from PostgreSQL | +| POST | `/esposition/create` | Create a new esposition preset | +| GET | `/esposition/presets` | List all esposition presets | | POST | `/common/pubchem` | PubChem property lookup by CAS | | POST | `/common/generate-pdf` | Generate PDF from URL via Playwright | | GET | `/common/download-pdf/{name}` | Download a generated PDF | @@ -110,10 +142,31 @@ uv run uvicorn pif_compiler.main:app --reload --host 0.0.0.0 --port 8000 ### Key conventions - Services in `services/` handle external API calls and data extraction - Models in `classes/models.py` use Pydantic `@model_validator` and `@classmethod` builders for construction from raw API data +- Orchestrator classes in `classes/main_cls.py` handle Order (raw input) and Project (processed) layers - The `orchestrator` pattern (see `srv_echa.py`) handles: validate input -> check local cache -> fetch from external -> store locally -> return +- `Ingredient.ingredient_builder(cas)` calls scraping functions directly (`pubchem_dap`, `cosing_entry`, `orchestrator`) +- `Ingredient.save()` upserts to both MongoDB and PostgreSQL, `Ingredient.from_cas()` retrieves via PostgreSQL index -> MongoDB +- `Ingredient.get_or_create(cas)` is the main entry point: checks cache freshness (365 days), scrapes if needed - All modules use the shared logger from `common_log.get_logger()` - API routes define Pydantic request/response models inline in each route file +### db_utils.py functions +- `db_connect(db_name, collection_name)` - MongoDB collection accessor +- `postgres_connect()` - PostgreSQL connection +- `upsert_ingrediente(cas, mongo_id, dap, cosing, tox)` - Upsert ingredient in PostgreSQL +- `get_ingrediente_by_cas(cas)` - Get ingredient row by CAS +- `get_all_ingredienti()` - List all ingredients from PostgreSQL +- `upsert_cliente(nome_cliente)` - Upsert client, returns `id_cliente` +- `upsert_compilatore(nome_compilatore)` - Upsert compiler, returns `id_compilatore` +- `aggiorna_stato_ordine(id_ordine, nuovo_stato)` - Update order status +- `log_ricerche(cas, target, esito)` - Log search history + +### Streamlit UI +- `streamlit/ingredients_page.py` - Ingredient search by CAS + result display + inventory of ingested ingredients +- `streamlit/exposition_page.py` - Esposition preset creation form + list of existing presets +- Both pages call the FastAPI endpoints via `requests` (API must be running on `localhost:8000`) +- Run with: `streamlit run streamlit/.py` + ### Important domain concepts - **CAS number**: Chemical Abstracts Service identifier (e.g., "50-00-0") - **INCI**: International Nomenclature of Cosmetic Ingredients diff --git a/data/db_schema.sql b/data/db_schema.sql new file mode 100644 index 0000000..badc937 --- /dev/null +++ b/data/db_schema.sql @@ -0,0 +1,82 @@ +-- WARNING: This schema is for context only and is not meant to be run. +-- Table order and constraints may not be valid for execution. + +CREATE TABLE public.clienti ( + id_cliente integer NOT NULL DEFAULT nextval('clienti_id_cliente_seq'::regclass), + nome_cliente character varying NOT NULL UNIQUE, + CONSTRAINT clienti_pkey PRIMARY KEY (id_cliente) +); +CREATE TABLE public.compilatori ( + id_compilatore integer NOT NULL DEFAULT nextval('compilatori_id_compilatore_seq'::regclass), + nome_compilatore character varying NOT NULL UNIQUE, + CONSTRAINT compilatori_pkey PRIMARY KEY (id_compilatore) +); +CREATE TABLE public.inci ( + id bigint GENERATED ALWAYS AS IDENTITY NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT now(), + inci text NOT NULL UNIQUE, + cas text NOT NULL, + CONSTRAINT inci_pkey PRIMARY KEY (id), + CONSTRAINT inci_cas_fkey FOREIGN KEY (cas) REFERENCES public.ingredienti(cas) +); +CREATE TABLE public.ingredienti ( + id bigint GENERATED ALWAYS AS IDENTITY NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT now(), + cas text NOT NULL UNIQUE, + mongo_id text, + dap boolean DEFAULT false, + cosing boolean DEFAULT false, + tox boolean DEFAULT false, + CONSTRAINT ingredienti_pkey PRIMARY KEY (id) +); +CREATE TABLE public.ingredients_lineage ( + id bigint GENERATED ALWAYS AS IDENTITY NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT now(), + id_progetto integer, + id_ingrediente bigint, + CONSTRAINT ingredients_lineage_pkey PRIMARY KEY (id), + CONSTRAINT ingredients_lineage_id_ingrediente_fkey FOREIGN KEY (id_ingrediente) REFERENCES public.ingredienti(id), + CONSTRAINT ingredients_lineage_id_progetto_fkey FOREIGN KEY (id_progetto) REFERENCES public.progetti(id) +); +CREATE TABLE public.ordini ( + id_ordine integer NOT NULL DEFAULT nextval('ordini_id_ordine_seq'::regclass), + id_cliente integer, + id_compilatore integer, + uuid_ordine character varying NOT NULL, + uuid_progetto character varying NOT NULL UNIQUE, + data_ordine timestamp without time zone NOT NULL, + stato_ordine integer DEFAULT 1, + note text, + CONSTRAINT ordini_pkey PRIMARY KEY (id_ordine), + CONSTRAINT ordini_id_cliente_fkey FOREIGN KEY (id_cliente) REFERENCES public.clienti(id_cliente), + CONSTRAINT ordini_id_compilatore_fkey FOREIGN KEY (id_compilatore) REFERENCES public.compilatori(id_compilatore), + CONSTRAINT ordini_stato_ordine_fkey FOREIGN KEY (stato_ordine) REFERENCES public.stati_ordini(id_stato) +); +CREATE TABLE public.progetti ( + id bigint GENERATED ALWAYS AS IDENTITY NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT now(), + mongo_id character varying, + preset_tipo_prodotto integer, + CONSTRAINT progetti_pkey PRIMARY KEY (id), + CONSTRAINT progetti_mongo_id_fkey FOREIGN KEY (mongo_id) REFERENCES public.ordini(uuid_progetto), + CONSTRAINT progetti_preset_tipo_prodotto_fkey FOREIGN KEY (preset_tipo_prodotto) REFERENCES public.tipi_prodotti(id_preset) +); +CREATE TABLE public.stati_ordini ( + id_stato integer NOT NULL DEFAULT nextval('stati_ordini_id_stato_seq'::regclass), + nome_stato character varying NOT NULL, + CONSTRAINT stati_ordini_pkey PRIMARY KEY (id_stato) +); +CREATE TABLE public.tipi_prodotti ( + id_preset integer NOT NULL DEFAULT nextval('tipi_prodotti_id_tipo_seq'::regclass), + preset_name text NOT NULL UNIQUE, + tipo_prodotto text, + luogo_applicazione text, + esp_normali text, + esp_secondarie text, + esp_nano text NOT NULL, + sup_esposta integer, + freq_applicazione integer, + qta_giornaliera double precision, + ritenzione double precision, + CONSTRAINT tipi_prodotti_pkey PRIMARY KEY (id_preset) +); \ No newline at end of file diff --git a/data/insert.sql b/data/insert.sql new file mode 100644 index 0000000..95d269b --- /dev/null +++ b/data/insert.sql @@ -0,0 +1,11 @@ +INSERT INTO public.stati_ordini (id_stato, nome_stato) VALUES + (1, 'RICEVUTO'), + (2, 'VALIDATO'), + (3, 'ARRICCHIMENTO'), + (4, 'ARRICCHIMENTO_PARZIALE'), + (5, 'ARRICCHITO'), + (6, 'CALCOLO'), + (7, 'IN_REVISIONE'), + (8, 'COMPLETATO'), + (9, 'ERRORE'), + (10, 'ANNULLATO'); \ No newline at end of file diff --git a/marimo/debug_cosing.py b/marimo/debug_cosing.py index 1145640..974cedd 100644 --- a/marimo/debug_cosing.py +++ b/marimo/debug_cosing.py @@ -12,19 +12,38 @@ def _(): @app.cell def _(): - from pif_compiler.services.srv_cosing import cosing_search - return (cosing_search,) + from pif_compiler.services.srv_cosing import cosing_entry + from pif_compiler.classes.models import CosingInfo + return CosingInfo, cosing_entry @app.cell def _(): - cas = ' 9006-65-9 ' + cas = '64-17-5' return (cas,) @app.cell -def _(cas, cosing_search): - cosing_search(cas, mode='cas') +def _(cas, cosing_entry): + data = cosing_entry(cas) + return (data,) + + +@app.cell +def _(data): + data + return + + +@app.cell +def _(CosingInfo, data): + test = CosingInfo(**data) + return (test,) + + +@app.cell +def _(test): + test return diff --git a/marimo/test_obj.py b/marimo/test_obj.py index 3296ce6..9d7a813 100644 --- a/marimo/test_obj.py +++ b/marimo/test_obj.py @@ -6,42 +6,19 @@ app = marimo.App(width="medium") @app.cell def _(): - from pif_compiler.classes.models import Esposition - return (Esposition,) + from pif_compiler.classes.models import Ingredient + # Costruisce l'ingrediente da scraping e lo salva + ing = Ingredient.ingredient_builder("64-17-5", inci=["ALCOHOL"]) + print(f"CAS: {ing.cas}") + print(f"DAP: {ing.dap_info}") + print(f"COSING: {ing.cosing_info is not None}") + print(f"TOX: {ing.toxicity}") + print(f"Stats: {ing.get_stats()}") -@app.cell -def _(Esposition): - it = Esposition( - preset_name="Test xzzx 0 else StatoOrdine.ARRICCHITO + self.save() + return self.ingredients + + def set_esposition(self, preset_name: str): + """Carica un preset di esposizione da PostgreSQL per nome.""" + presets = Esposition.get_presets() + for p in presets: + if p.preset_name == preset_name: + self.esposition = p + return p + logger.warning(f"Preset '{preset_name}' non trovato") + return None + + def save(self): + """Salva il progetto su MongoDB (collection 'projects'). Ritorna il mongo_id.""" + collection = db_connect(collection_name='projects') + mongo_dict = self.model_dump() + + result = collection.replace_one( + {"uuid_progetto": self.uuid_progetto}, + mongo_dict, + upsert=True + ) + + if result.upserted_id: + mongo_id = str(result.upserted_id) + else: + doc = collection.find_one({"uuid_progetto": self.uuid_progetto}, {"_id": 1}) + mongo_id = str(doc["_id"]) + + logger.info(f"Progetto {self.uuid_progetto} salvato su MongoDB: {mongo_id}") + return mongo_id + + def get_stats(self): + """Ritorna statistiche sul progetto e sullo stato di arricchimento.""" + stats = { + "uuid_progetto": self.uuid_progetto, + "uuid_ordine": self.uuid_ordine, + "stato": self.stato.name, + "has_esposition": self.esposition is not None, + "num_ingredients": len(self.ingredients), + "num_cas_input": len(self.percentages), + } + + if self.ingredients: + stats["enrichment"] = { + "with_dap": sum(1 for i in self.ingredients if i.dap_info is not None), + "with_cosing": sum(1 for i in self.ingredients if i.cosing_info is not None), + "with_tox": sum(1 for i in self.ingredients if i.toxicity is not None), + "with_noael": sum( + 1 for i in self.ingredients + if i.toxicity and any(ind.indicator == 'NOAEL' for ind in i.toxicity.indicators) + ), + } + + return stats diff --git a/src/pif_compiler/classes/models.py b/src/pif_compiler/classes/models.py index 491f96d..554d070 100644 --- a/src/pif_compiler/classes/models.py +++ b/src/pif_compiler/classes/models.py @@ -1,10 +1,24 @@ from pydantic import BaseModel, Field, field_validator, ConfigDict, model_validator, computed_field import re +from enum import IntEnum from typing import List, Optional from datetime import datetime as dt +class StatoOrdine(IntEnum): + """Stati ordine per orchestrare il flusso di elaborazione PIF.""" + RICEVUTO = 1 # Input grezzo ricevuto, caricato su MongoDB + VALIDATO = 2 # Input validato (compilatore, cliente, tipo cosmetico ok) + ARRICCHIMENTO = 3 # Arricchimento in corso (COSING, PubChem, ECHA) + ARRICCHIMENTO_PARZIALE = 4 # Arricchimento completato ma con dati mancanti + ARRICCHITO = 5 # Arricchimento completato con successo + CALCOLO = 6 # Calcolo DAP, SED, MoS in corso + IN_REVISIONE = 7 # Calcoli completati, in attesa di revisione umana + COMPLETATO = 8 # PIF finalizzato + ERRORE = 9 # Errore durante l'elaborazione + ANNULLATO = 10 # Ordine annullato + from pif_compiler.services.srv_echa import extract_levels, at_extractor, rdt_extractor, orchestrator -from pif_compiler.functions.db_utils import postgres_connect +from pif_compiler.functions.db_utils import postgres_connect, upsert_ingrediente, get_ingrediente_by_cas from pif_compiler.services.srv_pubchem import pubchem_dap from pif_compiler.services.srv_cosing import cosing_entry @@ -160,8 +174,9 @@ class CosingInfo(BaseModel): def cycle_identified(cls, cosing_data : dict): cosing_entries = [] if 'identifiedIngredient' in cosing_data.keys(): - identified_cosing = cls.cosing_builder(cosing_data['identifiedIngredient']) - cosing_entries.append(identified_cosing) + for each_entry in cosing_data['identifiedIngredient']: + identified_cosing = cls.cosing_builder(each_entry) + cosing_entries.append(identified_cosing) main = cls.cosing_builder(cosing_data) cosing_entries.append(main) @@ -246,18 +261,19 @@ class Ingredient(BaseModel): creation_date: Optional[str] = None @classmethod - def ingredient_builder( - cls, - cas: str, - inci: Optional[List[str]] = None, - dap_data: Optional[dict] = None, - cosing_data: Optional[dict] = None, - toxicity_data: Optional[dict] = None): - - dap_info = DapInfo.dap_builder(dap_data) if dap_data else None + def ingredient_builder(cls, cas: str, inci: Optional[List[str]] = None): + # Recupera dati DAP da PubChem + dap_data = pubchem_dap(cas) + dap_info = DapInfo.dap_builder(dap_data) if isinstance(dap_data, dict) else None + + # Recupera dati COSING + cosing_data = cosing_entry(cas) cosing_info = CosingInfo.cycle_identified(cosing_data) if cosing_data else None + + # Recupera dati tossicologici da ECHA + toxicity_data = orchestrator(cas) toxicity = Toxicity.from_result(cas, toxicity_data) if toxicity_data else None - + return cls( cas=cas, inci=inci, @@ -268,7 +284,8 @@ class Ingredient(BaseModel): @model_validator(mode='after') def set_creation_date(self) -> 'Ingredient': - self.creation_date = dt.now().isoformat() + if self.creation_date is None: + self.creation_date = dt.now().isoformat() return self def update_ingredient(self, attr : str, data : dict): @@ -277,7 +294,74 @@ class Ingredient(BaseModel): def to_mongo_dict(self): mongo_dict = self.model_dump() return mongo_dict - + + def save(self): + """Salva l'ingrediente su MongoDB (collection 'ingredients') e crea/aggiorna la riga in PostgreSQL.""" + from pif_compiler.functions.db_utils import db_connect + + collection = db_connect(collection_name='ingredients') + mongo_dict = self.to_mongo_dict() + + # Upsert su MongoDB usando il CAS come chiave + result = collection.replace_one( + {"cas": self.cas}, + mongo_dict, + upsert=True + ) + + # Recupera l'ObjectId del documento (inserito o esistente) + if result.upserted_id: + mongo_id = str(result.upserted_id) + else: + doc = collection.find_one({"cas": self.cas}, {"_id": 1}) + mongo_id = str(doc["_id"]) + + # Segna i flag di arricchimento + has_dap = self.dap_info is not None + has_cosing = self.cosing_info is not None + has_tox = self.toxicity is not None + + # Upsert su PostgreSQL + upsert_ingrediente(self.cas, mongo_id, dap=has_dap, cosing=has_cosing, tox=has_tox) + + return mongo_id + + @classmethod + def from_cas(cls, cas: str): + """Recupera un ingrediente da MongoDB tramite il CAS, usando PostgreSQL come indice.""" + from pif_compiler.functions.db_utils import db_connect + from bson import ObjectId + + # Cerca in PostgreSQL per ottenere il mongo_id + pg_entry = get_ingrediente_by_cas(cas) + if not pg_entry: + return None + + _, _, mongo_id, _, _, _ = pg_entry + if not mongo_id: + return None + + # Recupera il documento da MongoDB + collection = db_connect(collection_name='ingredients') + doc = collection.find_one({"_id": ObjectId(mongo_id)}) + if not doc: + return None + + doc.pop("_id", None) + return cls(**doc) + + @classmethod + def get_or_create(cls, cas: str, inci: Optional[List[str]] = None): + """Restituisce l'ingrediente dalla cache se esiste e non è vecchio, altrimenti lo ricrea.""" + cached = cls.from_cas(cas) + if cached and not cached.is_old(): + return cached + + # Crea un nuovo ingrediente (scraping) e lo salva + ingredient = cls.ingredient_builder(cas, inci=inci) + ingredient.save() + return ingredient + def get_stats(self): stats = { "has_dap_info": self.dap_info is not None, diff --git a/src/pif_compiler/functions/db_utils.py b/src/pif_compiler/functions/db_utils.py index 0b7cce1..fb5667d 100644 --- a/src/pif_compiler/functions/db_utils.py +++ b/src/pif_compiler/functions/db_utils.py @@ -56,6 +56,107 @@ def insert_compilatore(nome_compilatore): except Exception as e: logger.error(f"Error: {e}") +def aggiorna_stato_ordine(id_ordine, nuovo_stato): + try: + conn = postgres_connect() + with conn.cursor() as cur: + cur.execute( + "UPDATE ordini SET stato_ordine = %s WHERE id_ordine = %s", + (int(nuovo_stato), id_ordine) + ) + conn.commit() + conn.close() + except Exception as e: + logger.error(f"Error updating stato ordine {id_ordine}: {e}") + +def upsert_ingrediente(cas, mongo_id, dap=False, cosing=False, tox=False): + """Inserisce o aggiorna un ingrediente nella tabella ingredienti di PostgreSQL.""" + try: + conn = postgres_connect() + with conn.cursor() as cur: + cur.execute(""" + INSERT INTO ingredienti (cas, mongo_id, dap, cosing, tox) + VALUES (%s, %s, %s, %s, %s) + ON CONFLICT (cas) DO UPDATE SET + mongo_id = EXCLUDED.mongo_id, + dap = EXCLUDED.dap, + cosing = EXCLUDED.cosing, + tox = EXCLUDED.tox + RETURNING id; + """, (cas, mongo_id, dap, cosing, tox)) + result = cur.fetchone() + conn.commit() + conn.close() + return result[0] if result else None + except Exception as e: + logger.error(f"Errore upsert ingrediente {cas}: {e}") + return None + +def get_ingrediente_by_cas(cas): + """Recupera un ingrediente dalla tabella ingredienti di PostgreSQL tramite CAS.""" + try: + conn = postgres_connect() + with conn.cursor() as cur: + cur.execute( + "SELECT id, cas, mongo_id, dap, cosing, tox FROM ingredienti WHERE cas = %s", + (cas,) + ) + result = cur.fetchone() + conn.close() + return result + except Exception as e: + logger.error(f"Errore recupero ingrediente {cas}: {e}") + return None + +def get_all_ingredienti(): + """Recupera tutti gli ingredienti dalla tabella ingredienti di PostgreSQL.""" + try: + conn = postgres_connect() + with conn.cursor() as cur: + cur.execute("SELECT id, cas, mongo_id, dap, cosing, tox, created_at FROM ingredienti ORDER BY created_at DESC") + results = cur.fetchall() + conn.close() + return results if results else [] + except Exception as e: + logger.error(f"Errore recupero ingredienti: {e}") + return [] + +def upsert_cliente(nome_cliente): + """Inserisce o recupera un cliente. Ritorna id_cliente.""" + try: + conn = postgres_connect() + with conn.cursor() as cur: + cur.execute( + "INSERT INTO clienti (nome_cliente) VALUES (%s) ON CONFLICT (nome_cliente) DO NOTHING", + (nome_cliente,) + ) + conn.commit() + cur.execute("SELECT id_cliente FROM clienti WHERE nome_cliente = %s", (nome_cliente,)) + result = cur.fetchone() + conn.close() + return result[0] if result else None + except Exception as e: + logger.error(f"Errore upsert cliente {nome_cliente}: {e}") + return None + +def upsert_compilatore(nome_compilatore): + """Inserisce o recupera un compilatore. Ritorna id_compilatore.""" + try: + conn = postgres_connect() + with conn.cursor() as cur: + cur.execute( + "INSERT INTO compilatori (nome_compilatore) VALUES (%s) ON CONFLICT (nome_compilatore) DO NOTHING", + (nome_compilatore,) + ) + conn.commit() + cur.execute("SELECT id_compilatore FROM compilatori WHERE nome_compilatore = %s", (nome_compilatore,)) + result = cur.fetchone() + conn.close() + return result[0] if result else None + except Exception as e: + logger.error(f"Errore upsert compilatore {nome_compilatore}: {e}") + return None + def log_ricerche(cas, target, esito): try: conn = postgres_connect() diff --git a/src/pif_compiler/main.py b/src/pif_compiler/main.py index ffd2ef1..a573c74 100644 --- a/src/pif_compiler/main.py +++ b/src/pif_compiler/main.py @@ -8,7 +8,7 @@ import time from pif_compiler.functions.common_log import get_logger # Import dei tuoi router -from pif_compiler.api.routes import api_echa, api_cosing, common +from pif_compiler.api.routes import api_echa, api_cosing, common, api_ingredients, api_esposition # Configurazione logging logger = get_logger() @@ -135,6 +135,18 @@ app.include_router( tags=["Common"] ) +app.include_router( + api_ingredients.router, + prefix="/api/v1", + tags=["Ingredients"] +) + +app.include_router( + api_esposition.router, + prefix="/api/v1", + tags=["Esposition"] +) + # ==================== ROOT ENDPOINTS ==================== @app.get("/", tags=["Root"])