Update app.py
Browse files
app.py
CHANGED
|
@@ -15,8 +15,10 @@ load_dotenv()
|
|
| 15 |
UPLOAD_DIR = "uploaded_data"
|
| 16 |
os.makedirs(UPLOAD_DIR, exist_ok=True)
|
| 17 |
|
| 18 |
-
|
|
|
|
| 19 |
SQL_DB_PATH = "data.db"
|
|
|
|
| 20 |
HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY")
|
| 21 |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
| 22 |
|
|
@@ -43,53 +45,46 @@ query_cache = {}
|
|
| 43 |
history_log = []
|
| 44 |
recent_history = []
|
| 45 |
show_history_flag = False
|
|
|
|
| 46 |
|
| 47 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 48 |
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
|
|
|
|
|
|
| 53 |
else:
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
def load_uploaded_csv_and_create_db(uploaded_file):
|
| 63 |
-
if uploaded_file is None:
|
| 64 |
-
return None
|
| 65 |
-
|
| 66 |
-
print(f"[UPLOAD] CSV recebido: {uploaded_file}")
|
| 67 |
-
|
| 68 |
-
engine = create_engine(f"sqlite:///{SQL_DB_PATH}")
|
| 69 |
-
df = pd.read_csv(uploaded_file, sep=";", on_bad_lines="skip")
|
| 70 |
df.to_sql("anomalia_vendas", engine, index=False, if_exists="replace")
|
| 71 |
-
|
| 72 |
-
print(f"CSV carregado: {len(df)} linhas, {len(df.columns)} colunas")
|
| 73 |
-
print(f"[DEBUG] Novo engine criado: {engine}")
|
| 74 |
return engine
|
| 75 |
|
| 76 |
-
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
)
|
| 89 |
-
print("[SQL_AGENT] Atualizado com novo banco de dados.")
|
| 90 |
-
|
| 91 |
-
refresh_sql_agent()
|
| 92 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 93 |
|
| 94 |
def generate_initial_context(db_sample):
|
| 95 |
return (
|
|
@@ -224,15 +219,6 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 224 |
|
| 225 |
history_output = gr.JSON()
|
| 226 |
history_btn.click(toggle_history, inputs=[], outputs=history_output)
|
| 227 |
-
|
| 228 |
-
def handle_csv_upload(file):
|
| 229 |
-
global engine
|
| 230 |
-
try:
|
| 231 |
-
engine = load_uploaded_csv_and_create_db(file)
|
| 232 |
-
if engine is not None:
|
| 233 |
-
refresh_sql_agent()
|
| 234 |
-
except Exception as e:
|
| 235 |
-
print(f"[ERRO] Falha ao processar novo CSV: {e}")
|
| 236 |
|
| 237 |
csv_file.change(handle_csv_upload, inputs=csv_file, outputs=csv_file)
|
| 238 |
|
|
|
|
| 15 |
UPLOAD_DIR = "uploaded_data"
|
| 16 |
os.makedirs(UPLOAD_DIR, exist_ok=True)
|
| 17 |
|
| 18 |
+
DEFAULT_CSV_PATH = "anomalia_vendas.csv"
|
| 19 |
+
UPLOADED_CSV_PATH = os.path.join(UPLOAD_DIR, "tabela.csv")
|
| 20 |
SQL_DB_PATH = "data.db"
|
| 21 |
+
|
| 22 |
HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY")
|
| 23 |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
| 24 |
|
|
|
|
| 45 |
history_log = []
|
| 46 |
recent_history = []
|
| 47 |
show_history_flag = False
|
| 48 |
+
engine = None
|
| 49 |
|
| 50 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 51 |
|
| 52 |
+
# === Controle do CSV ativo ===
|
| 53 |
+
def get_active_csv_path():
|
| 54 |
+
"""Retorna o CSV ativo: o carregado ou o padrão."""
|
| 55 |
+
if os.path.exists(UPLOADED_CSV_PATH):
|
| 56 |
+
logging.info(f"[CSV] Usando arquivo CSV carregado: {UPLOADED_CSV_PATH}")
|
| 57 |
+
return UPLOADED_CSV_PATH
|
| 58 |
else:
|
| 59 |
+
logging.info(f"[CSV] Usando arquivo CSV padrão: {DEFAULT_CSV_PATH}")
|
| 60 |
+
return DEFAULT_CSV_PATH
|
| 61 |
+
|
| 62 |
+
def create_engine_and_load_db(csv_path, sql_db_path):
|
| 63 |
+
logging.info(f"[ENGINE] Criando engine com CSV: {csv_path}")
|
| 64 |
+
engine = create_engine(f"sqlite:///{sql_db_path}")
|
| 65 |
+
df = pd.read_csv(csv_path, sep=";", on_bad_lines="skip")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 66 |
df.to_sql("anomalia_vendas", engine, index=False, if_exists="replace")
|
| 67 |
+
logging.info(f"[ENGINE] Banco criado com sucesso com {len(df)} linhas e {len(df.columns)} colunas")
|
|
|
|
|
|
|
| 68 |
return engine
|
| 69 |
|
| 70 |
+
# === Upload do CSV via UI ===
|
| 71 |
+
def handle_csv_upload(file):
|
| 72 |
+
global engine, db
|
| 73 |
+
try:
|
| 74 |
+
with open(UPLOADED_CSV_PATH, "wb") as f:
|
| 75 |
+
f.write(file.read())
|
| 76 |
+
logging.info(f"[UPLOAD] CSV salvo como: {UPLOADED_CSV_PATH}")
|
| 77 |
+
engine = create_engine_and_load_db(UPLOADED_CSV_PATH, SQL_DB_PATH)
|
| 78 |
+
db = SQLDatabase(engine=engine)
|
| 79 |
+
logging.info("[UPLOAD] Novo banco carregado e DB atualizado.")
|
| 80 |
+
except Exception as e:
|
| 81 |
+
logging.error(f"[ERRO] Falha ao processar novo CSV: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 82 |
|
| 83 |
+
# === Inicialização ===
|
| 84 |
+
engine = create_engine_and_load_db(get_active_csv_path(), SQL_DB_PATH)
|
| 85 |
+
db = SQLDatabase(engine=engine)
|
| 86 |
+
llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
|
| 87 |
+
sql_agent = create_sql_agent(llm, db=db, agent_type="openai-tools", verbose=True, max_iterations=40, return_intermediate_steps=True)
|
| 88 |
|
| 89 |
def generate_initial_context(db_sample):
|
| 90 |
return (
|
|
|
|
| 219 |
|
| 220 |
history_output = gr.JSON()
|
| 221 |
history_btn.click(toggle_history, inputs=[], outputs=history_output)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 222 |
|
| 223 |
csv_file.change(handle_csv_upload, inputs=csv_file, outputs=csv_file)
|
| 224 |
|