diff --git a/backend/.env b/backend/.env index 81da84d..dfff369 100644 --- a/backend/.env +++ b/backend/.env @@ -4,4 +4,8 @@ DB_HOST=34.155.247.19 DB_USERNAME=postgres DB_PASSWORD=3cd70e73f0b73a2411c595f3a2f8cf125ee3937498f82628906ed946e8499a00f7733c6ec588cdff4aec386de3c3cc51d0a166369c6a99e73b9c95d2a1bd115a API_TOKEN=B0uKZQN+qIsLc0yNR/t9xCOkgP6Keg0oarLUiZkO2Mo= -FLUXIMMO_API_KEY=a +# FLUXIMMO_API_KEY=trial_default_real-immo_8b9cbce3-8636-4c88-894c-031359db0630 +FLUXIMMO_API_KEY=prod_default_immonator-1_a9261cc6-c229-409f-ade6-0dabcdeff36f +GOOGLE_CREDENTIALS_PATH=/home/vitrix/.config/gcloud/extraction-gemini-461016-42184c10da74.json +GOOGLE_PROJECT_ID=extraction-gemini-461016 + diff --git a/backend/__pycache__/schema_docs.cpython-313.pyc b/backend/__pycache__/schema_docs.cpython-313.pyc new file mode 100644 index 0000000..8116706 Binary files /dev/null and b/backend/__pycache__/schema_docs.cpython-313.pyc differ diff --git a/backend/app.py b/backend/app.py index f6cff6a..55425b3 100644 --- a/backend/app.py +++ b/backend/app.py @@ -18,6 +18,10 @@ from dotenv import load_dotenv from flask import Flask, abort, jsonify, request, g from flask_cors import CORS import requests +from google import genai +from google.genai import types + +from schema_docs import SCRAPER_SCHEMA_DOC, PROFILE_SCHEMA_DOC load_dotenv() @@ -25,8 +29,11 @@ API_TOKEN = os.getenv("API_TOKEN") FLUXIMMO_API_KEY = os.getenv("FLUXIMMO_API_KEY") FLUXIMMO_COUNT_URL = os.getenv( "FLUXIMMO_COUNT_URL", - "https://analytics.fluximmo.io/properties/count", + "https://api.fluximmo.io/v2/protected/analytics/property/count", ) +GOOGLE_CREDENTIALS_PATH = os.getenv("GOOGLE_CREDENTIALS_PATH") +GOOGLE_PROJECT_ID = os.getenv("GOOGLE_PROJECT_ID") +GOOGLE_LOCATION = os.getenv("GOOGLE_LOCATION", "europe-west1") REQUIRED_DB_SETTINGS = { @@ -61,6 +68,12 @@ INVESTMENT_PROFILE_TABLE = os.getenv( "DB_TABLE_INVESTMENT_PROFILES", "users_investmentprofile" ) SCRAPER_TABLE = os.getenv("DB_TABLE_SCRAPERS", "scraper") +SUBSCRIPTION_TABLE = os.getenv( + "DB_TABLE_SUBSCRIPTIONS", "engagements_investorprofilesubscription" +) +PROPERTY_MATCH_TABLE = os.getenv( + "DB_TABLE_PROPERTY_MATCHES", "engagements_investorpropertymatch" +) if not API_TOKEN: raise RuntimeError( @@ -242,12 +255,35 @@ def _validate_property_types(value: str | None) -> str | None: abort( 400, description=f"Invalid property types: {', '.join(invalid_types)}. " - f"Allowed values: {', '.join(sorted(valid_types))}" + f"Allowed values: {', '.join(sorted(valid_types))}", ) return value.strip() +def _cleanup_empty_values(data: Any) -> Any: + """Recursively remove empty lists, empty dicts, and None values from data structure.""" + if isinstance(data, dict): + cleaned = {} + for key, value in data.items(): + cleaned_value = _cleanup_empty_values(value) + # Only include non-empty values + if ( + cleaned_value is not None + and cleaned_value != [] + and cleaned_value != {} + ): + cleaned[key] = cleaned_value + return cleaned if cleaned else None + elif isinstance(data, list): + cleaned = [_cleanup_empty_values(item) for item in data] + # Remove None values from list + cleaned = [item for item in cleaned if item is not None] + return cleaned if cleaned else None + else: + return data + + def build_scraper_params( params: Dict[str, Any], first_seen_days: int | None, @@ -281,7 +317,10 @@ def build_scraper_params( base = deepcopy(params) merged = always_merger.merge(base, dynamic_params) - return merged + + # Clean up empty values before returning + cleaned = _cleanup_empty_values(merged) + return cleaned if cleaned else {} def _require_bearer_token(header_value: str | None) -> str: @@ -420,6 +459,49 @@ def _abort_for_integrity_error(exc: psycopg.IntegrityError) -> None: abort(409, description=detail or "Database constraint violation") +def _hash_django_password(password: str, iterations: int = 260000) -> str: + """Hash a password using Django's PBKDF2 format: pbkdf2_sha256$$$""" + import hashlib + import base64 + import secrets + + # Generate random salt + salt = secrets.token_urlsafe(12) + + # Hash the password + hash_bytes = hashlib.pbkdf2_hmac( + "sha256", password.encode("utf-8"), salt.encode("utf-8"), iterations + ) + + # Encode to base64 + hash_b64 = base64.b64encode(hash_bytes).decode("ascii") + + return f"pbkdf2_sha256${iterations}${salt}${hash_b64}" + + +def _get_user_profiles(user_id: int) -> List[Dict[str, Any]]: + """Fetch all profiles subscribed by a user.""" + query = sql.SQL( + """ + SELECT {profile_columns} + FROM {profile_table} + INNER JOIN {subscription_table} + ON {profile_table}.profile_id = {subscription_table}.investment_profile_id + WHERE {subscription_table}.investor_id = {user_id} + ORDER BY {subscription_table}.subscribed_at DESC + """ + ).format( + profile_columns=_columns_sql(PROFILE_RESPONSE_FIELDS), + profile_table=sql.Identifier(INVESTMENT_PROFILE_TABLE), + subscription_table=sql.Identifier(SUBSCRIPTION_TABLE), + user_id=sql.Placeholder("user_id"), + ) + profiles = _fetch_all(query, {"user_id": user_id}) + return [ + _serialize_row(p, datetime_fields=PROFILE_DATETIME_FIELDS) for p in profiles + ] + + USER_RESPONSE_FIELDS = ( "id", "username", @@ -464,9 +546,7 @@ SCRAPER_RESPONSE_FIELDS = ( "once", ) -SCRAPER_BOOL_FIELDS = ( - "once", -) +SCRAPER_BOOL_FIELDS = ("once",) SCRAPER_INT_FIELDS = ( "last_seen_days", @@ -616,6 +696,26 @@ def update_profile(profile_id: str): @app.delete("/profiles/") def delete_profile(profile_id: str): profile_uuid = _parse_uuid(profile_id, "profile_id") + + # Delete all property matches for this profile first + delete_matches_query = sql.SQL( + "DELETE FROM {table} WHERE investment_profile_id = {profile_id}" + ).format( + table=sql.Identifier(PROPERTY_MATCH_TABLE), + profile_id=sql.Placeholder("profile_id"), + ) + _execute(delete_matches_query, {"profile_id": profile_uuid}) + + # Delete all subscriptions for this profile + delete_subscriptions_query = sql.SQL( + "DELETE FROM {table} WHERE investment_profile_id = {profile_id}" + ).format( + table=sql.Identifier(SUBSCRIPTION_TABLE), + profile_id=sql.Placeholder("profile_id"), + ) + _execute(delete_subscriptions_query, {"profile_id": profile_uuid}) + + # Now delete the profile deleted = _delete_row(INVESTMENT_PROFILE_TABLE, "profile_id", profile_uuid) if not deleted: abort(404, description="Profile not found") @@ -630,9 +730,11 @@ def get_users(): table=sql.Identifier(USER_TABLE), ) ) - payload = [ - _serialize_row(row, datetime_fields=USER_DATETIME_FIELDS) for row in rows - ] + payload = [] + for row in rows: + user = _serialize_row(row, datetime_fields=USER_DATETIME_FIELDS) + user["profiles"] = _get_user_profiles(row["id"]) + payload.append(user) return jsonify(payload) @@ -648,7 +750,9 @@ def get_user(user_id: int): ) if row is None: abort(404, description="User not found") - return jsonify(_serialize_row(row, datetime_fields=USER_DATETIME_FIELDS)) + user = _serialize_row(row, datetime_fields=USER_DATETIME_FIELDS) + user["profiles"] = _get_user_profiles(user_id) + return jsonify(user) @app.post("/users") @@ -656,7 +760,8 @@ def create_user(): payload = _get_json_body() user_data: Dict[str, Any] = {} - user_data["password"] = _parse_string(payload.get("password"), "password") + raw_password = _parse_string(payload.get("password"), "password") + user_data["password"] = _hash_django_password(raw_password) user_data["username"] = _parse_string(payload.get("username"), "username") user_data["first_name"] = _parse_string(payload.get("first_name"), "first_name") user_data["last_name"] = _parse_string(payload.get("last_name"), "last_name") @@ -676,15 +781,62 @@ def create_user(): user_data["last_login"] = _parse_datetime(payload.get("last_login"), "last_login") + # Parse profile_ids if provided + profile_ids: List[UUID] = [] + if "profile_ids" in payload: + raw_profile_ids = payload["profile_ids"] + if not isinstance(raw_profile_ids, list): + abort(400, description="Field 'profile_ids' must be a list") + for idx, pid in enumerate(raw_profile_ids): + profile_ids.append(_parse_uuid(pid, f"profile_ids[{idx}]")) + try: row = _insert_row(USER_TABLE, user_data, USER_RESPONSE_FIELDS) except psycopg.IntegrityError as exc: _abort_for_integrity_error(exc) - return ( - jsonify(_serialize_row(row, datetime_fields=USER_DATETIME_FIELDS)), - 201, - ) + user_id = row["id"] + + # Create profile subscriptions + for profile_id in profile_ids: + subscription_data = { + "subscription_id": uuid4(), + "investor_id": user_id, + "investment_profile_id": profile_id, + "subscribed_at": datetime.now(timezone.utc), + } + try: + _insert_row( + SUBSCRIPTION_TABLE, + subscription_data, + ( + "subscription_id", + "investor_id", + "investment_profile_id", + "subscribed_at", + ), + ) + except psycopg.IntegrityError as exc: + _abort_for_integrity_error(exc) + + # Fetch profiles for response + profiles = [] + if profile_ids: + profiles_query = sql.SQL( + "SELECT {columns} FROM {table} WHERE profile_id = ANY({profile_ids})" + ).format( + columns=_columns_sql(PROFILE_RESPONSE_FIELDS), + table=sql.Identifier(INVESTMENT_PROFILE_TABLE), + profile_ids=sql.Placeholder("profile_ids"), + ) + profiles = _fetch_all(profiles_query, {"profile_ids": profile_ids}) + + response = _serialize_row(row, datetime_fields=USER_DATETIME_FIELDS) + response["profiles"] = [ + _serialize_row(p, datetime_fields=PROFILE_DATETIME_FIELDS) for p in profiles + ] + + return jsonify(response), 201 @app.put("/users/") @@ -693,7 +845,8 @@ def update_user(user_id: int): updates: Dict[str, Any] = {} if "password" in payload: - updates["password"] = _parse_string(payload["password"], "password") + raw_password = _parse_string(payload["password"], "password") + updates["password"] = _hash_django_password(raw_password) if "username" in payload: updates["username"] = _parse_string(payload["username"], "username") if "first_name" in payload: @@ -712,22 +865,91 @@ def update_user(user_id: int): if "last_login" in payload: updates["last_login"] = _parse_datetime(payload["last_login"], "last_login") - if not updates: + # Handle profile_ids update + update_profiles = False + profile_ids: List[UUID] = [] + if "profile_ids" in payload: + update_profiles = True + raw_profile_ids = payload["profile_ids"] + if not isinstance(raw_profile_ids, list): + abort(400, description="Field 'profile_ids' must be a list") + for idx, pid in enumerate(raw_profile_ids): + profile_ids.append(_parse_uuid(pid, f"profile_ids[{idx}]")) + + if not updates and not update_profiles: abort(400, description="No updatable fields provided") - try: - row = _update_row(USER_TABLE, "id", user_id, updates, USER_RESPONSE_FIELDS) - except psycopg.IntegrityError as exc: - _abort_for_integrity_error(exc) + if updates: + try: + row = _update_row(USER_TABLE, "id", user_id, updates, USER_RESPONSE_FIELDS) + except psycopg.IntegrityError as exc: + _abort_for_integrity_error(exc) - if row is None: - abort(404, description="User not found") + if row is None: + abort(404, description="User not found") + else: + # Verify user exists + row = _fetch_one( + sql.SQL("SELECT {columns} FROM {table} WHERE id = {identifier}").format( + columns=_columns_sql(USER_RESPONSE_FIELDS), + table=sql.Identifier(USER_TABLE), + identifier=sql.Placeholder("user_id"), + ), + {"user_id": user_id}, + ) + if row is None: + abort(404, description="User not found") - return jsonify(_serialize_row(row, datetime_fields=USER_DATETIME_FIELDS)) + # Update profile subscriptions if requested + if update_profiles: + # Delete existing subscriptions + delete_query = sql.SQL( + "DELETE FROM {table} WHERE investor_id = {user_id}" + ).format( + table=sql.Identifier(SUBSCRIPTION_TABLE), + user_id=sql.Placeholder("user_id"), + ) + _execute(delete_query, {"user_id": user_id}) + + # Create new subscriptions + for profile_id in profile_ids: + subscription_data = { + "subscription_id": uuid4(), + "investor_id": user_id, + "investment_profile_id": profile_id, + "subscribed_at": datetime.now(timezone.utc), + } + try: + _insert_row( + SUBSCRIPTION_TABLE, + subscription_data, + ( + "subscription_id", + "investor_id", + "investment_profile_id", + "subscribed_at", + ), + ) + except psycopg.IntegrityError as exc: + _abort_for_integrity_error(exc) + + user = _serialize_row(row, datetime_fields=USER_DATETIME_FIELDS) + user["profiles"] = _get_user_profiles(user_id) + return jsonify(user) @app.delete("/users/") def delete_user(user_id: int): + # Delete all profile subscriptions for this user first + delete_subscriptions_query = sql.SQL( + "DELETE FROM {table} WHERE investor_id = {user_id}" + ).format( + table=sql.Identifier(SUBSCRIPTION_TABLE), + user_id=sql.Placeholder("user_id"), + ) + _execute(delete_subscriptions_query, {"user_id": user_id}) + + # Now delete the user deleted = _delete_row(USER_TABLE, "id", user_id) if not deleted: abort(404, description="User not found") @@ -778,7 +1000,11 @@ def create_scraper(): # Validation spéciale pour property_types if "property_types" in payload: value = payload["property_types"] - parsed_value = None if value is None else _parse_string(value, "property_types", allow_empty=True) + parsed_value = ( + None + if value is None + else _parse_string(value, "property_types", allow_empty=True) + ) data["property_types"] = _validate_property_types(parsed_value) for field in SCRAPER_INT_FIELDS: @@ -814,7 +1040,11 @@ def update_scraper(scraper_id: str): # Validation spéciale pour property_types if "property_types" in payload: value = payload["property_types"] - parsed_value = None if value is None else _parse_string(value, "property_types", allow_empty=True) + parsed_value = ( + None + if value is None + else _parse_string(value, "property_types", allow_empty=True) + ) updates["property_types"] = _validate_property_types(parsed_value) for field in SCRAPER_INT_FIELDS: @@ -858,17 +1088,26 @@ def delete_scraper(scraper_id: str): @app.post("/scrapers/count") def count_scraper_properties(): payload = _get_json_body() + print(f"[COUNT] Received payload: {payload}") base_params = _load_scraper_params(payload.get("params")) + print(f"[COUNT] Base params: {base_params}") + first_seen_days = _parse_optional_int( payload.get("first_seen_days"), "first_seen_days" ) last_seen_days = _parse_optional_int( payload.get("last_seen_days"), "last_seen_days" ) + print( + f"[COUNT] first_seen_days: {first_seen_days}, last_seen_days: {last_seen_days}" + ) query_filters = build_scraper_params(base_params, first_seen_days, last_seen_days) - flux_payload = {"query": query_filters} + print(f"[COUNT] Query filters after build: {query_filters}") + + flux_payload = {"query": {"filterProperty": query_filters}} + print(f"[COUNT] Fluximmo payload: {json.dumps(flux_payload, indent=2)}") headers = { "x-api-key": FLUXIMMO_API_KEY, @@ -879,7 +1118,10 @@ def count_scraper_properties(): response = requests.post( FLUXIMMO_COUNT_URL, json=flux_payload, headers=headers, timeout=15 ) - except requests.RequestException: + print(f"[COUNT] Fluximmo response status: {response.status_code}") + print(f"[COUNT] Fluximmo response body: {response.text}") + except requests.RequestException as e: + print(f"[COUNT] Request exception: {e}") abort(502, description="Fluximmo request failed") if response.status_code >= 400: @@ -906,5 +1148,100 @@ def count_scraper_properties(): return jsonify({"count": count_value}) +def _get_genai_client(): + """Get or create a Gemini client using Vertex AI.""" + if not GOOGLE_PROJECT_ID: + abort(503, description="GOOGLE_PROJECT_ID not configured") + + # Set credentials path if provided + if GOOGLE_CREDENTIALS_PATH: + os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = GOOGLE_CREDENTIALS_PATH + + client = genai.Client( + vertexai=True, + project=GOOGLE_PROJECT_ID, + location=GOOGLE_LOCATION, + ) + return client + + +def _call_gemini(system_prompt: str, user_prompt: str) -> str: + """Call Gemini API via Vertex AI and return the generated text.""" + try: + client = _get_genai_client() + + response = client.models.generate_content( + model="gemini-2.5-flash", + contents=user_prompt, + config=types.GenerateContentConfig( + system_instruction=system_prompt, + temperature=0.2, + top_k=40, + top_p=0.95, + max_output_tokens=8192, + ), + ) + + print( + f"[GEMINI] Response: {response.text[:500] if response.text else 'No text'}" + ) + + if not response.text: + abort(502, description="Gemini returned no content") + + return response.text + + except Exception as e: + print(f"[GEMINI] Error: {e}") + abort(502, description=f"Gemini request failed: {str(e)}") + + +def _extract_json_from_response(text: str) -> Dict[str, Any]: + """Extract JSON from Gemini response, handling markdown code blocks.""" + # Remove markdown code blocks if present + text = text.strip() + if text.startswith("```json"): + text = text[7:] + elif text.startswith("```"): + text = text[3:] + if text.endswith("```"): + text = text[:-3] + text = text.strip() + + try: + return json.loads(text) + except json.JSONDecodeError as e: + abort(400, description=f"Failed to parse Gemini response as JSON: {e}") + + +@app.post("/ai/generate-scraper") +def generate_scraper(): + """Generate scraper JSON from natural language prompt using Gemini.""" + payload = _get_json_body() + user_prompt = payload.get("prompt") + if not user_prompt: + abort(400, description="Field 'prompt' is required") + + generated_text = _call_gemini(SCRAPER_SCHEMA_DOC, user_prompt) + result = _extract_json_from_response(generated_text) + + return jsonify({"params": result}) + + +@app.post("/ai/generate-profile") +def generate_profile(): + """Generate profile JSON from natural language prompt using Gemini.""" + payload = _get_json_body() + user_prompt = payload.get("prompt") + if not user_prompt: + abort(400, description="Field 'prompt' is required") + + generated_text = _call_gemini(PROFILE_SCHEMA_DOC, user_prompt) + result = _extract_json_from_response(generated_text) + + # Return the result directly at root level (not wrapped in criteria) + return jsonify(result) + + if __name__ == "__main__": app.run(host="0.0.0.0", port=int(os.getenv("PORT", "3000")), debug=False) diff --git a/backend/requirements.txt b/backend/requirements.txt index e1fc536..4672fd9 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -4,3 +4,4 @@ psycopg[binary]>=3.1,<4.0 Flask-Cors>=4.0.0,<5.0.0 requests>=2.31.0,<3.0.0 deepmerge>=1.1.0,<2.0.0 +google-genai>=1.0.0 diff --git a/backend/schema_docs.py b/backend/schema_docs.py new file mode 100644 index 0000000..5cc7899 --- /dev/null +++ b/backend/schema_docs.py @@ -0,0 +1,488 @@ +"""Documentation détaillée des schémas pour l'assistant IA.""" + +SCRAPER_SCHEMA_DOC = """ +# Schéma Scraper Fluximmo + +Tu dois générer un JSON valide pour configurer un scraper de recherche immobilière. +Le JSON doit respecter exactement la structure suivante. + +## Structure principale + +```json +{ + "location": [...], // Obligatoire: liste des localisations + "type": [...], // Obligatoire: types de biens + "offer": [...], // Obligatoire: type d'offre + "meta": {...}, // Optionnel: filtres temporels + "price": {...}, // Optionnel: filtres de prix + "habitation": {...}, // Optionnel: caractéristiques du bien + "land": {...}, // Optionnel: caractéristiques du terrain + "adverts": [...], // Optionnel: filtres sur les annonces + "process": [...], // Optionnel: statut des annonces + "tags": [...] // Optionnel: tags personnalisés +} +``` + +## Champs détaillés + +### location (OBLIGATOIRE) +Liste d'objets de localisation. UTILISE UNIQUEMENT les codes suivants: + +1. **Pour une région/département entier**: utilise `department` avec le code département à 2 chiffres + - Bretagne: "22" (Côtes-d'Armor), "29" (Finistère), "35" (Ille-et-Vilaine), "56" (Morbihan) + - Autres exemples: "44" (Loire-Atlantique), "75" (Paris), "13" (Bouches-du-Rhône) + +2. **Pour des communes spécifiques**: utilise `inseeCode` avec le code INSEE à 5 chiffres + - Exemples: "35238" (Rennes), "56260" (Vannes), "29019" (Brest), "22278" (Saint-Brieuc) + +IMPORTANT: N'utilise JAMAIS de codes postaux ! Utilise uniquement `department` ou `inseeCode`. + +Exemple pour toute la Bretagne: +```json +"location": [ + { "department": "22" }, + { "department": "29" }, + { "department": "35" }, + { "department": "56" } +] +``` + +Exemple pour des communes spécifiques: +```json +"location": [ + { "inseeCode": "35238" }, + { "inseeCode": "56260" } +] +``` + +### type (OBLIGATOIRE) +Liste des types de biens. Valeurs possibles: +- "CLASS_HOUSE" - Maison +- "CLASS_FLAT" - Appartement +- "CLASS_BUILDING" - Immeuble +- "CLASS_LAND" - Terrain +- "CLASS_PARKING" - Parking +- "CLASS_SHOP" - Commerce +- "CLASS_OFFICE" - Bureau +- "CLASS_PREMISES" - Local commercial +- "CLASS_PROGRAM" - Programme neuf + +Exemple: +```json +"type": ["CLASS_HOUSE", "CLASS_FLAT"] +``` + +### offer (OBLIGATOIRE) +Liste d'objets d'offre. Chaque objet contient: +- `type`: string - Type de transaction: + - "OFFER_BUY" - Achat + - "OFFER_RENT" - Location + - "OFFER_LIFE_ANNUITY_SALE" - Viager + - "OFFER_BUSINESS_TAKE_OVER" - Reprise de commerce + - "OFFER_HOLIDAYS" - Location vacances + +Exemple: +```json +"offer": [ + { "type": "OFFER_BUY" } +] +``` + +### meta (optionnel) +Filtres temporels sur les annonces: +- `firstSeenAt`: { "min": "date ISO", "max": "date ISO" } - Date de première apparition +- `lastSeenAt`: { "min": "date ISO", "max": "date ISO" } - Date de dernière vue +- `lastPublishedAt`: { "min": "date ISO", "max": "date ISO" } - Date de publication +- `isTotallyOffline`: boolean - Annonces retirées + +### price (optionnel) +Filtres de prix: +- `latest.value`: { "min": number, "max": number } - Fourchette de prix en euros +- `latest.valuePerArea`: { "min": number, "max": number } - Prix au m² +- `isAuction`: boolean - Vente aux enchères +- `scope`: ["PRICING_ONE_OFF"] pour achat, ["PRICING_MENSUAL"] pour location + +Exemple: +```json +"price": { + "latest": { + "value": { "min": 50000, "max": 200000 } + } +} +``` + +### habitation (optionnel) +Caractéristiques du bien: +- `roomCount`: { "min": number, "max": number } - Nombre de pièces +- `bedroomCount`: { "min": number, "max": number } - Nombre de chambres +- `bathroomCount`: { "min": number, "max": number } - Salles de bain +- `surface.total`: { "min": number, "max": number } - Surface totale m² +- `surface.livingSpace`: { "min": number, "max": number } - Surface habitable m² +- `characteristics`: objet avec booléens: + - `hasGarden`, `hasPool`, `hasGarage`, `hasParking`, `hasTerrace`, `hasBalcony` + - `hasCellar`, `hasLift`, `hasFireplace`, etc. +- `climate.epcEnergy`: Liste de classes DPE ["ENERGY_CLASSIFICATION_A", ..., "ENERGY_CLASSIFICATION_G"] +- `climate.epcClimate`: Liste de classes GES + +Exemple: +```json +"habitation": { + "bedroomCount": { "min": 3 }, + "surface": { + "total": { "min": 80, "max": 150 } + }, + "characteristics": { + "hasGarden": true + } +} +``` + +### land (optionnel) +Pour les terrains: +- `surface`: { "min": number, "max": number } - Surface en m² +- `canConstruct`: boolean - Constructible +- `isServiced`: boolean - Viabilisé +- `type`: ["LAND_BUILDING_PLOT", "LAND_AGRICULTURAL", ...] + +### adverts (optionnel) +Filtres sur les annonces: +- `isOnline`: boolean - Annonce en ligne +- `isPro`: boolean - Annonce professionnelle +- `isExclusive`: boolean - Exclusivité + +Exemple: +```json +"adverts": [ + { "isOnline": true } +] +``` + +### process (optionnel) +Statut de l'annonce. Valeurs possibles: +- "PROCESS_AVAILABLE_ON_MARKET" - Disponible +- "PROCESS_UNDER_COMPROMISE" - Sous compromis +- "PROCESS_RENTED_SOLD" - Vendu/Loué +- "PROCESS_RESERVED" - Réservé + +## VALEURS PAR DÉFAUT OBLIGATOIRES + +IMPORTANT: Tous les scrapers générés DOIVENT inclure la section habitation.climate avec toutes les classes DPE et GES: + +```json +"habitation": { + "climate": { + "epcClimate": [ + "GREENHOUSE_CLASSIFICATION_A", + "GREENHOUSE_CLASSIFICATION_B", + "GREENHOUSE_CLASSIFICATION_C", + "GREENHOUSE_CLASSIFICATION_D", + "GREENHOUSE_CLASSIFICATION_E", + "GREENHOUSE_CLASSIFICATION_F", + "GREENHOUSE_CLASSIFICATION_G" + ], + "epcEnergy": [ + "ENERGY_CLASSIFICATION_A", + "ENERGY_CLASSIFICATION_B", + "ENERGY_CLASSIFICATION_C", + "ENERGY_CLASSIFICATION_D", + "ENERGY_CLASSIFICATION_E", + "ENERGY_CLASSIFICATION_F", + "ENERGY_CLASSIFICATION_G" + ] + } +} +``` + +## Exemples complets + +### Maisons en Bretagne à acheter +```json +{ + "location": [ + { "department": "22" }, + { "department": "29" }, + { "department": "35" }, + { "department": "56" } + ], + "type": ["CLASS_HOUSE"], + "offer": [ + { "type": "OFFER_BUY" } + ], + "habitation": { + "climate": { + "epcClimate": [ + "GREENHOUSE_CLASSIFICATION_A", + "GREENHOUSE_CLASSIFICATION_B", + "GREENHOUSE_CLASSIFICATION_C", + "GREENHOUSE_CLASSIFICATION_D", + "GREENHOUSE_CLASSIFICATION_E", + "GREENHOUSE_CLASSIFICATION_F", + "GREENHOUSE_CLASSIFICATION_G" + ], + "epcEnergy": [ + "ENERGY_CLASSIFICATION_A", + "ENERGY_CLASSIFICATION_B", + "ENERGY_CLASSIFICATION_C", + "ENERGY_CLASSIFICATION_D", + "ENERGY_CLASSIFICATION_E", + "ENERGY_CLASSIFICATION_F", + "ENERGY_CLASSIFICATION_G" + ] + } + } +} +``` + +### Appartements à louer à Rennes avec 2+ chambres +```json +{ + "location": [ + { "inseeCode": "35238" } + ], + "type": ["CLASS_FLAT"], + "offer": [ + { "type": "OFFER_RENT" } + ], + "habitation": { + "bedroomCount": { "min": 2 }, + "climate": { + "epcClimate": [ + "GREENHOUSE_CLASSIFICATION_A", + "GREENHOUSE_CLASSIFICATION_B", + "GREENHOUSE_CLASSIFICATION_C", + "GREENHOUSE_CLASSIFICATION_D", + "GREENHOUSE_CLASSIFICATION_E", + "GREENHOUSE_CLASSIFICATION_F", + "GREENHOUSE_CLASSIFICATION_G" + ], + "epcEnergy": [ + "ENERGY_CLASSIFICATION_A", + "ENERGY_CLASSIFICATION_B", + "ENERGY_CLASSIFICATION_C", + "ENERGY_CLASSIFICATION_D", + "ENERGY_CLASSIFICATION_E", + "ENERGY_CLASSIFICATION_F", + "ENERGY_CLASSIFICATION_G" + ] + } + } +} +``` + +### Immeubles de rapport en Bretagne +```json +{ + "location": [ + { "department": "35" }, + { "department": "56" } + ], + "type": ["CLASS_BUILDING"], + "offer": [ + { "type": "OFFER_BUY" } + ], + "price": { + "latest": { + "value": { "max": 500000 } + } + }, + "habitation": { + "climate": { + "epcClimate": [ + "GREENHOUSE_CLASSIFICATION_A", + "GREENHOUSE_CLASSIFICATION_B", + "GREENHOUSE_CLASSIFICATION_C", + "GREENHOUSE_CLASSIFICATION_D", + "GREENHOUSE_CLASSIFICATION_E", + "GREENHOUSE_CLASSIFICATION_F", + "GREENHOUSE_CLASSIFICATION_G" + ], + "epcEnergy": [ + "ENERGY_CLASSIFICATION_A", + "ENERGY_CLASSIFICATION_B", + "ENERGY_CLASSIFICATION_C", + "ENERGY_CLASSIFICATION_D", + "ENERGY_CLASSIFICATION_E", + "ENERGY_CLASSIFICATION_F", + "ENERGY_CLASSIFICATION_G" + ] + } + } +} +``` + +IMPORTANT: Réponds UNIQUEMENT avec le JSON, sans texte avant ou après. Le JSON doit être valide et parsable. N'oublie JAMAIS d'inclure la section habitation.climate avec toutes les classes DPE et GES! +""" + +PROFILE_SCHEMA_DOC = """ +# Schéma Profile d'Investissement + +Tu dois générer un JSON valide pour configurer un profil d'investissement immobilier. +Le JSON doit respecter exactement la structure suivante. + +## Structure principale + +```json +{ + "transaction_type": "sale", // Type de transaction + "property_type": [...], // Types de biens + "codes_insee": [...], // Codes INSEE des communes + "min_price": number, // Prix minimum + "max_price": number, // Prix maximum + "min_size": number, // Surface minimale m² + "max_size": number, // Surface maximale m² + "min_bedrooms": number, // Chambres minimum + "max_bedrooms": number, // Chambres maximum + "dpe_classes": [...], // Classes DPE acceptées + "ges_classes": [...], // Classes GES acceptées + "characteristics": [...], // Caractéristiques requises + "target_yield": number, // Rendement cible % + "require_address": boolean // Adresse requise +} +``` + +## Champs détaillés + +### transaction_type +Type de transaction: +- "sale" - Achat/Vente +- "rent" - Location +- "viager" - Viager + +### property_type +Liste des types de biens recherchés: +- "house" - Maison +- "apartment" - Appartement +- "building" - Immeuble +- "land" - Terrain +- "parking" - Parking + +Exemple: +```json +"property_type": ["house", "apartment"] +``` + +### codes_insee +Liste des codes de localisation. UTILISE UNIQUEMENT: +- **Codes département** (2 chiffres) pour une région entière: "22", "29", "35", "56" +- **Codes INSEE** (5 chiffres) pour des communes spécifiques: "35238", "56260" + +IMPORTANT: N'utilise JAMAIS de codes postaux ! + +Exemple pour toute la Bretagne: +```json +"codes_insee": ["22", "29", "35", "56"] +``` + +Exemple pour des communes spécifiques: +```json +"codes_insee": ["35238", "56260", "22168", "29039"] +``` + +### Filtres de prix +- `min_price`: number - Prix minimum en euros +- `max_price`: number - Prix maximum en euros + +### Filtres de surface +- `min_size`: number - Surface minimale en m² +- `max_size`: number - Surface maximale en m² + +### Filtres de chambres +- `min_bedrooms`: number - Nombre minimum de chambres +- `max_bedrooms`: number - Nombre maximum de chambres +- `min_bathrooms`: number - Nombre minimum de salles de bain + +### dpe_classes / ges_classes +Classes énergétiques acceptées: ["A", "B", "C", "D", "E", "F", "G", "NC"] + +Exemple: +```json +"dpe_classes": ["A", "B", "C", "D"] +``` + +### characteristics +Liste de groupes de caractéristiques. Chaque groupe: +```json +{ + "type": "any" | "all" | "none", + "description": "Description du groupe", + "items": ["has_garden", "has_pool", ...] +} +``` + +Caractéristiques disponibles: +- has_alarm, has_balcony, has_cellar, has_lift, has_pool +- has_garage, has_garden, has_terrace, has_parking, has_fireplace +- has_mezzanine, has_concierge, has_digicode, has_interphone +- has_jacuzzi, has_land, has_land_division, has_grenier +- has_vis_a_vis, is_peaceful, has_two_doors_at_entrance, is_squatted + +Exemple: +```json +"characteristics": [ + { + "type": "any", + "items": ["has_garden", "has_terrace"] + } +] +``` + +### target_yield +Rendement locatif cible en pourcentage (ex: 7.5 pour 7.5%) + +### require_address +Boolean - Exiger une adresse complète pour les biens + +## Exemples complets + +### Maisons en Bretagne à acheter (toute la région) +```json +{ + "transaction_type": "sale", + "property_type": ["house"], + "codes_insee": ["22", "29", "35", "56"], + "max_price": 200000 +} +``` + +### Appartements pour investissement locatif à Rennes +```json +{ + "transaction_type": "sale", + "property_type": ["apartment"], + "codes_insee": ["35238"], + "min_size": 40, + "max_size": 80, + "min_bedrooms": 2, + "target_yield": 6, + "dpe_classes": ["A", "B", "C", "D"] +} +``` + +### Immeubles de rapport en Ille-et-Vilaine et Morbihan +```json +{ + "transaction_type": "sale", + "property_type": ["building"], + "codes_insee": ["35", "56"], + "max_price": 500000, + "target_yield": 8 +} +``` + +### Maisons avec terrain divisible en Bretagne +```json +{ + "transaction_type": "sale", + "property_type": ["house"], + "codes_insee": ["22", "29", "35", "56"], + "characteristics": [ + { + "type": "all", + "items": ["has_land_division"] + } + ] +} +``` + +IMPORTANT: Réponds UNIQUEMENT avec le JSON, sans texte avant ou après. Le JSON doit être valide et parsable. N'utilise JAMAIS de codes postaux, uniquement des codes département (2 chiffres) ou INSEE (5 chiffres)! +""" diff --git a/frontend/bun.lock b/frontend/bun.lock index e4c9bfa..5bb2e88 100644 --- a/frontend/bun.lock +++ b/frontend/bun.lock @@ -6,15 +6,18 @@ "dependencies": { "@radix-ui/react-checkbox": "^1.3.3", "@radix-ui/react-dialog": "^1.1.15", + "@radix-ui/react-popover": "^1.1.15", "@radix-ui/react-select": "^2.2.6", - "@radix-ui/react-slot": "^1.2.3", + "@radix-ui/react-slot": "^1.2.4", "@radix-ui/react-tabs": "^1.1.13", "@tailwindcss/vite": "^4.1.14", "@tanstack/react-query": "^5.90.3", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", + "date-fns": "^4.1.0", "lucide-react": "^0.545.0", "react": "^19.1.1", + "react-day-picker": "^9.11.1", "react-dom": "^19.1.1", "react-hook-form": "^7.65.0", "tailwind-merge": "^3.3.1", @@ -80,6 +83,8 @@ "@babel/types": ["@babel/types@7.28.4", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1" } }, "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q=="], + "@date-fns/tz": ["@date-fns/tz@1.4.1", "", {}, "sha512-P5LUNhtbj6YfI3iJjw5EL9eUAG6OitD0W3fWQcpQjDRc/QIsL0tRNuO1PcDvPccWL1fSTXXdE1ds+l95DV/OFA=="], + "@emnapi/core": ["@emnapi/core@1.5.0", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" } }, "sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg=="], "@emnapi/runtime": ["@emnapi/runtime@1.5.0", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ=="], @@ -170,6 +175,8 @@ "@radix-ui/react-id": ["@radix-ui/react-id@1.1.1", "", { "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg=="], + "@radix-ui/react-popover": ["@radix-ui/react-popover@1.1.15", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-controllable-state": "1.2.2", "aria-hidden": "^1.2.4", "react-remove-scroll": "^2.6.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-kr0X2+6Yy/vJzLYJUPCZEc8SfQcf+1COFoAqauJm74umQhta9M7lNJHP7QQS3vkvcGLQUbWpMzwrXYwrYztHKA=="], + "@radix-ui/react-popper": ["@radix-ui/react-popper@1.2.8", "", { "dependencies": { "@floating-ui/react-dom": "^2.0.0", "@radix-ui/react-arrow": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-use-rect": "1.1.1", "@radix-ui/react-use-size": "1.1.1", "@radix-ui/rect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw=="], "@radix-ui/react-portal": ["@radix-ui/react-portal@1.1.9", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ=="], @@ -182,7 +189,7 @@ "@radix-ui/react-select": ["@radix-ui/react-select@2.2.6", "", { "dependencies": { "@radix-ui/number": "1.1.1", "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-visually-hidden": "1.2.3", "aria-hidden": "^1.2.4", "react-remove-scroll": "^2.6.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-I30RydO+bnn2PQztvo25tswPH+wFBjehVGtmagkU78yMdwTwVf12wnAOF+AeP8S2N8xD+5UPbGhkUfPyvT+mwQ=="], - "@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], + "@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.4", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA=="], "@radix-ui/react-tabs": ["@radix-ui/react-tabs@1.1.13", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-7xdcatg7/U+7+Udyoj2zodtI9H/IIopqo+YOIcZOq1nJwXWBZ9p8xiu5llXlekDbZkca79a/fozEYQXIA4sW6A=="], @@ -362,6 +369,10 @@ "csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="], + "date-fns": ["date-fns@4.1.0", "", {}, "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg=="], + + "date-fns-jalali": ["date-fns-jalali@4.1.0-0", "", {}, "sha512-hTIP/z+t+qKwBDcmmsnmjWTduxCg+5KfdqWQvb2X/8C9+knYY6epN/pfxdDuyVlSVeFz0sM5eEfwIUQ70U4ckg=="], + "debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="], "deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="], @@ -548,6 +559,8 @@ "react": ["react@19.2.0", "", {}, "sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ=="], + "react-day-picker": ["react-day-picker@9.11.1", "", { "dependencies": { "@date-fns/tz": "^1.4.1", "date-fns": "^4.1.0", "date-fns-jalali": "^4.1.0-0" }, "peerDependencies": { "react": ">=16.8.0" } }, "sha512-l3ub6o8NlchqIjPKrRFUCkTUEq6KwemQlfv3XZzzwpUeGwmDJ+0u0Upmt38hJyd7D/vn2dQoOoLV/qAp0o3uUw=="], + "react-dom": ["react-dom@19.2.0", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.0" } }, "sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ=="], "react-hook-form": ["react-hook-form@7.65.0", "", { "peerDependencies": { "react": "^16.8.0 || ^17 || ^18 || ^19" } }, "sha512-xtOzDz063WcXvGWaHgLNrNzlsdFgtUWcb32E6WFaGTd7kPZG3EeDusjdZfUsPwKCKVXy1ZlntifaHZ4l8pAsmw=="], @@ -630,6 +643,16 @@ "@eslint/eslintrc/globals": ["globals@14.0.0", "", {}, "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ=="], + "@radix-ui/react-collection/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], + + "@radix-ui/react-dialog/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], + + "@radix-ui/react-popover/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], + + "@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], + + "@radix-ui/react-select/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], + "@tailwindcss/node/lightningcss": ["lightningcss@1.30.1", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-darwin-arm64": "1.30.1", "lightningcss-darwin-x64": "1.30.1", "lightningcss-freebsd-x64": "1.30.1", "lightningcss-linux-arm-gnueabihf": "1.30.1", "lightningcss-linux-arm64-gnu": "1.30.1", "lightningcss-linux-arm64-musl": "1.30.1", "lightningcss-linux-x64-gnu": "1.30.1", "lightningcss-linux-x64-musl": "1.30.1", "lightningcss-win32-arm64-msvc": "1.30.1", "lightningcss-win32-x64-msvc": "1.30.1" } }, "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg=="], "@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.5.0", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" }, "bundled": true }, "sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg=="], diff --git a/frontend/index.html b/frontend/index.html index 072a57e..dc24e25 100644 --- a/frontend/index.html +++ b/frontend/index.html @@ -6,7 +6,7 @@ frontend - +
diff --git a/frontend/package.json b/frontend/package.json index c76c0ae..f435f02 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -12,15 +12,18 @@ "dependencies": { "@radix-ui/react-checkbox": "^1.3.3", "@radix-ui/react-dialog": "^1.1.15", + "@radix-ui/react-popover": "^1.1.15", "@radix-ui/react-select": "^2.2.6", - "@radix-ui/react-slot": "^1.2.3", + "@radix-ui/react-slot": "^1.2.4", "@radix-ui/react-tabs": "^1.1.13", "@tailwindcss/vite": "^4.1.14", "@tanstack/react-query": "^5.90.3", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", + "date-fns": "^4.1.0", "lucide-react": "^0.545.0", "react": "^19.1.1", + "react-day-picker": "^9.11.1", "react-dom": "^19.1.1", "react-hook-form": "^7.65.0", "tailwind-merge": "^3.3.1", diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index a9a3f9b..133c7dd 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,81 +1,16 @@ -import { useEffect, useState } from "react"; - -import { clearSessionToken, getSessionToken, setSessionToken } from "@/lib/api"; import { ProfilesTab } from "@/features/profiles/ProfilesTab"; import { ScrapersTab } from "@/features/scrapers/ScrapersTab"; -import { Button } from "@/components/ui/button"; -import { - Card, - CardContent, - CardDescription, - CardHeader, - CardTitle, -} from "@/components/ui/card"; -import { Input } from "@/components/ui/input"; +import { UsersTab } from "@/features/users/UsersTab"; import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; -function SessionTokenPanel() { - const [token, setToken] = useState(() => getSessionToken() ?? ""); - const [feedback, setFeedback] = useState(null); - - useEffect(() => { - setToken(getSessionToken() ?? ""); - }, []); - - const handleSave = () => { - setSessionToken(token.trim()); - setFeedback("Token sauvegarde."); - }; - - const handleClear = () => { - clearSessionToken(); - setToken(""); - setFeedback("Token efface."); - }; - - return ( - - - Session API - - Fournissez le jeton recu depuis /auth/login pour authentifier les appels. - - - -
- { - setToken(event.target.value); - setFeedback(null); - }} - /> -
- - -
-
- {feedback ? ( -

{feedback}

- ) : null} -
-
- ); -} - function App() { return (
- Profils Scrapers + Utilisateurs @@ -83,6 +18,9 @@ function App() { + + +
); diff --git a/frontend/src/components/ui/calendar.tsx b/frontend/src/components/ui/calendar.tsx new file mode 100644 index 0000000..04fbb7b --- /dev/null +++ b/frontend/src/components/ui/calendar.tsx @@ -0,0 +1,214 @@ +import * as React from "react" +import { + ChevronDownIcon, + ChevronLeftIcon, + ChevronRightIcon, +} from "lucide-react" +import { DayButton, DayPicker, getDefaultClassNames } from "react-day-picker" + +import { cn } from "@/lib/utils" +import { Button, buttonVariants } from "@/components/ui/button" + +function Calendar({ + className, + classNames, + showOutsideDays = true, + captionLayout = "label", + buttonVariant = "ghost", + formatters, + components, + ...props +}: React.ComponentProps & { + buttonVariant?: React.ComponentProps["variant"] +}) { + const defaultClassNames = getDefaultClassNames() + + return ( + svg]:rotate-180`, + String.raw`rtl:**:[.rdp-button\_previous>svg]:rotate-180`, + className + )} + captionLayout={captionLayout} + formatters={{ + formatMonthDropdown: (date) => + date.toLocaleString("default", { month: "short" }), + ...formatters, + }} + classNames={{ + root: cn("w-fit", defaultClassNames.root), + months: cn( + "flex gap-4 flex-col md:flex-row relative", + defaultClassNames.months + ), + month: cn("flex flex-col w-full gap-4", defaultClassNames.month), + nav: cn( + "flex items-center gap-1 w-full absolute top-0 inset-x-0 justify-between", + defaultClassNames.nav + ), + button_previous: cn( + buttonVariants({ variant: buttonVariant }), + "size-(--cell-size) aria-disabled:opacity-50 p-0 select-none", + defaultClassNames.button_previous + ), + button_next: cn( + buttonVariants({ variant: buttonVariant }), + "size-(--cell-size) aria-disabled:opacity-50 p-0 select-none", + defaultClassNames.button_next + ), + month_caption: cn( + "flex items-center justify-center h-(--cell-size) w-full px-(--cell-size)", + defaultClassNames.month_caption + ), + dropdowns: cn( + "w-full flex items-center text-sm font-medium justify-center h-(--cell-size) gap-1.5", + defaultClassNames.dropdowns + ), + dropdown_root: cn( + "relative has-focus:border-ring border border-input shadow-xs has-focus:ring-ring/50 has-focus:ring-[3px] rounded-md", + defaultClassNames.dropdown_root + ), + dropdown: cn( + "absolute bg-popover inset-0 opacity-0", + defaultClassNames.dropdown + ), + caption_label: cn( + "select-none font-medium", + captionLayout === "label" + ? "text-sm" + : "rounded-md pl-2 pr-1 flex items-center gap-1 text-sm h-8 [&>svg]:text-muted-foreground [&>svg]:size-3.5", + defaultClassNames.caption_label + ), + table: "w-full border-collapse", + weekdays: cn("flex", defaultClassNames.weekdays), + weekday: cn( + "text-muted-foreground rounded-md flex-1 font-normal text-[0.8rem] select-none", + defaultClassNames.weekday + ), + week: cn("flex w-full mt-2", defaultClassNames.week), + week_number_header: cn( + "select-none w-(--cell-size)", + defaultClassNames.week_number_header + ), + week_number: cn( + "text-[0.8rem] select-none text-muted-foreground", + defaultClassNames.week_number + ), + day: cn( + "relative w-full h-full p-0 text-center [&:last-child[data-selected=true]_button]:rounded-r-md group/day aspect-square select-none", + props.showWeekNumber + ? "[&:nth-child(2)[data-selected=true]_button]:rounded-l-md" + : "[&:first-child[data-selected=true]_button]:rounded-l-md", + defaultClassNames.day + ), + range_start: cn( + "rounded-l-md bg-accent", + defaultClassNames.range_start + ), + range_middle: cn("rounded-none", defaultClassNames.range_middle), + range_end: cn("rounded-r-md bg-accent", defaultClassNames.range_end), + today: cn( + "bg-accent text-accent-foreground rounded-md data-[selected=true]:rounded-none", + defaultClassNames.today + ), + outside: cn( + "text-muted-foreground aria-selected:text-muted-foreground", + defaultClassNames.outside + ), + disabled: cn( + "text-muted-foreground opacity-50", + defaultClassNames.disabled + ), + hidden: cn("invisible", defaultClassNames.hidden), + ...classNames, + }} + components={{ + Root: ({ className, rootRef, ...props }) => { + return ( +
+ ) + }, + Chevron: ({ className, orientation, ...props }) => { + if (orientation === "left") { + return ( + + ) + } + + if (orientation === "right") { + return ( + + ) + } + + return ( + + ) + }, + DayButton: CalendarDayButton, + WeekNumber: ({ children, ...props }) => { + return ( + +
+ {children} +
+ + ) + }, + ...components, + }} + {...props} + /> + ) +} + +function CalendarDayButton({ + className, + day, + modifiers, + ...props +}: React.ComponentProps) { + const defaultClassNames = getDefaultClassNames() + + const ref = React.useRef(null) + React.useEffect(() => { + if (modifiers.focused) ref.current?.focus() + }, [modifiers.focused]) + + return ( + + +
+ - Creer un profil - - Renseignez les informations du profil ainsi que les criteres JSON a utiliser. - +
+
+ Creer un profil + + Renseignez les informations du profil ainsi que les criteres JSON a utiliser. + +
+ +
@@ -398,16 +475,34 @@ export function ProfilesTab() { - + - Profils existants - - {profilesQuery.isLoading - ? "Chargement des profils..." - : profiles.length === 0 - ? "Aucun profil enregistre." - : "Liste recue depuis l'API."} - +
+
+ Profils existants + + {profilesQuery.isLoading + ? "Chargement des profils..." + : profiles.length === 0 + ? "Aucun profil enregistre." + : "Liste recue depuis l'API."} + +
+ {profiles.length > 0 && ( + + )} +
{profilesQuery.isError ? ( @@ -434,9 +529,23 @@ export function ProfilesTab() { {profile.description ?? "-"} -
-                        {JSON.stringify(profile.criteria, null, 2)}
-                      
+
+
+                          {JSON.stringify(profile.criteria, null, 2)}
+                        
+ +
+ + { + setIsAiDialogOpen(open); + if (!open) { + aiGenerateMutation.reset(); + setAiPrompt(""); + } + }} + > + + + Generer un profil avec l'IA + + Decrivez en langage naturel le type de profil d'investissement que vous souhaitez creer. + L'IA generera les criteres JSON correspondants. + + + +
+