Update app.py
This commit is contained in:
89
app.py
89
app.py
@@ -1,81 +1,37 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import yaml
|
|
||||||
from flask import Flask, request, jsonify, make_response
|
from flask import Flask, request, jsonify, make_response
|
||||||
|
from presidio_analyzer import AnalyzerEngine
|
||||||
# Import des classes nécessaires de Presidio
|
|
||||||
from presidio_analyzer import AnalyzerEngine, RecognizerRegistry, PatternRecognizer, Pattern
|
|
||||||
from presidio_analyzer.nlp_engine import NlpEngineProvider
|
|
||||||
|
|
||||||
# Configuration du logging
|
# Configuration du logging
|
||||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# --- CHARGEMENT MANUEL ET EXPLICITE DE LA CONFIGURATION ---
|
|
||||||
CONFIG_FILE_PATH = os.environ.get("PRESIDIO_ANALYZER_CONFIG_FILE", "conf/default.yaml")
|
|
||||||
logger.info(f"Loading configuration from: {CONFIG_FILE_PATH}")
|
|
||||||
|
|
||||||
config = {}
|
|
||||||
try:
|
|
||||||
with open(CONFIG_FILE_PATH, 'r', encoding='utf-8') as f:
|
|
||||||
config = yaml.safe_load(f)
|
|
||||||
logger.info("Configuration file loaded successfully.")
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(f"Could not load or parse configuration file at {CONFIG_FILE_PATH}")
|
|
||||||
# En cas d'échec, on continue avec une config vide pour ne pas planter, mais le service sera limité.
|
|
||||||
config = {}
|
|
||||||
|
|
||||||
# On récupère les langues supportées depuis la config pour les utiliser partout
|
|
||||||
supported_languages_from_config = config.get("supported_languages", ["en"])
|
|
||||||
logger.info(f"Languages supported according to config: {supported_languages_from_config}")
|
|
||||||
|
|
||||||
# Création du fournisseur de moteur NLP
|
|
||||||
logger.info("Creating NLP engine provider...")
|
|
||||||
nlp_engine_provider = NlpEngineProvider(nlp_configuration=config.get("nlp_engine_configuration"))
|
|
||||||
nlp_engine = nlp_engine_provider.create_engine()
|
|
||||||
logger.info(f"NLP engine created with models for: {nlp_engine.get_supported_languages()}")
|
|
||||||
|
|
||||||
# Création du registre de recognizers
|
|
||||||
logger.info("Creating and populating recognizer registry...")
|
|
||||||
registry = RecognizerRegistry()
|
|
||||||
# On initialise le registre avec TOUTES les langues supportées
|
|
||||||
registry.load_predefined_recognizers(languages=supported_languages_from_config)
|
|
||||||
|
|
||||||
# Ajout des recognizers personnalisés définis dans le YAML
|
|
||||||
custom_recognizers_conf = config.get("recognizers", [])
|
|
||||||
for recognizer_conf in custom_recognizers_conf:
|
|
||||||
patterns = [Pattern(name=p['name'], regex=p['regex'], score=p['score']) for p in recognizer_conf['patterns']]
|
|
||||||
custom_recognizer = PatternRecognizer(
|
|
||||||
supported_entity=recognizer_conf['entity_name'],
|
|
||||||
name=recognizer_conf['name'],
|
|
||||||
supported_language=recognizer_conf['supported_language'],
|
|
||||||
patterns=patterns,
|
|
||||||
context=recognizer_conf.get('context')
|
|
||||||
)
|
|
||||||
registry.add_recognizer(custom_recognizer)
|
|
||||||
logger.info(f"Loaded custom recognizer: {custom_recognizer.name}")
|
|
||||||
|
|
||||||
# Préparation de l'allow_list (simple liste de mots)
|
|
||||||
allow_list_config = config.get("allow_list", [])
|
|
||||||
allow_list_terms = [item if isinstance(item, str) else item.get('text') for item in allow_list_config if item]
|
|
||||||
if allow_list_terms:
|
|
||||||
logger.info(f"Prepared {len(allow_list_terms)} terms for the allow list.")
|
|
||||||
|
|
||||||
# Initialisation de l'application Flask
|
# Initialisation de l'application Flask
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
# Initialisation du moteur Presidio Analyzer
|
# --- LAISSER PRESIDIO GÉRER L'INITIALISATION ---
|
||||||
logger.info("Initializing AnalyzerEngine with custom configuration...")
|
# L'AnalyzerEngine, lorsqu'il est initialisé sans arguments, va automatiquement :
|
||||||
analyzer = AnalyzerEngine(
|
# 1. Chercher la variable d'environnement PRESIDIO_ANALYZER_CONFIG_FILE.
|
||||||
nlp_engine=nlp_engine,
|
# 2. Lire le fichier de configuration (votre default.yaml).
|
||||||
registry=registry,
|
# 3. Créer le moteur NLP, le registre de recognizers, et charger les recognizers
|
||||||
supported_languages=supported_languages_from_config, # On s'assure de la cohérence ici aussi
|
# personnalisés et l'allow_list, en s'assurant que les langues sont cohérentes.
|
||||||
default_score_threshold=config.get("ner_model_configuration", {}).get("confidence_threshold", {}).get("default", 0.35)
|
|
||||||
)
|
try:
|
||||||
logger.info("AnalyzerEngine initialized successfully.")
|
logger.info("Initializing AnalyzerEngine using configuration from environment variable...")
|
||||||
|
analyzer = AnalyzerEngine()
|
||||||
|
logger.info("AnalyzerEngine initialized successfully.")
|
||||||
|
# Pour le débogage, on peut lister les recognizers pour une langue spécifique
|
||||||
|
logger.info(f"Loaded recognizers for 'fr': {[rec.name for rec in analyzer.get_recognizers(language='fr')]}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("FATAL: Error initializing AnalyzerEngine from configuration.")
|
||||||
|
analyzer = None
|
||||||
|
|
||||||
@app.route('/analyze', methods=['POST'])
|
@app.route('/analyze', methods=['POST'])
|
||||||
def analyze_text():
|
def analyze_text():
|
||||||
|
if not analyzer:
|
||||||
|
return jsonify({"error": "Analyzer engine not initialized"}), 500
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = request.get_json(force=True)
|
data = request.get_json(force=True)
|
||||||
text_to_analyze = data.get("text", "")
|
text_to_analyze = data.get("text", "")
|
||||||
@@ -84,11 +40,10 @@ def analyze_text():
|
|||||||
if not text_to_analyze:
|
if not text_to_analyze:
|
||||||
return jsonify({"error": "text field is missing or empty"}), 400
|
return jsonify({"error": "text field is missing or empty"}), 400
|
||||||
|
|
||||||
# On passe directement la liste de mots à ignorer au paramètre 'allow_list'
|
# On n'a plus besoin de passer l'allow_list ici, l'Analyzer l'a déjà chargée
|
||||||
results = analyzer.analyze(
|
results = analyzer.analyze(
|
||||||
text=text_to_analyze,
|
text=text_to_analyze,
|
||||||
language=language,
|
language=language
|
||||||
allow_list=allow_list_terms
|
|
||||||
)
|
)
|
||||||
|
|
||||||
response_data = [res.to_dict() for res in results]
|
response_data = [res.to_dict() for res in results]
|
||||||
|
|||||||
Reference in New Issue
Block a user