|
from fastapi import FastAPI |
|
from fastapi.middleware.cors import CORSMiddleware |
|
from fastapi.responses import JSONResponse |
|
from fastapi.staticfiles import StaticFiles |
|
from huggingface_hub import InferenceClient |
|
from sentence_transformers import SentenceTransformer |
|
|
|
from fastapi import Request |
|
import requests |
|
import numpy as np |
|
import argparse |
|
import os |
|
|
|
HOST = os.environ.get("API_URL", "0.0.0.0") |
|
PORT = os.environ.get("PORT", 7860) |
|
parser = argparse.ArgumentParser() |
|
parser.add_argument("--host", default=HOST) |
|
parser.add_argument("--port", type=int, default=PORT) |
|
parser.add_argument("--reload", action="store_true", default=True) |
|
parser.add_argument("--ssl_certfile") |
|
parser.add_argument("--ssl_keyfile") |
|
args = parser.parse_args() |
|
|
|
app = FastAPI() |
|
app.add_middleware( |
|
CORSMiddleware, |
|
allow_origins=["*"], |
|
allow_credentials=True, |
|
allow_methods=["*"], |
|
allow_headers=["*"], |
|
) |
|
|
|
|
|
app = FastAPI() |
|
embedder = SentenceTransformer('sentence-transformers/distiluse-base-multilingual-cased-v1') |
|
|
|
@app.post("/api/embed") |
|
async def embed(request: Request): |
|
data = await request.json() |
|
texts = data.get("texts", []) |
|
embeddings = embedder.encode(texts) |
|
return {"embeddings": embeddings.tolist()} |
|
|
|
@app.get("/invert") |
|
async def invert(text: str): |
|
return { |
|
"original": text, |
|
"inverted": text[::-1], |
|
} |
|
|
|
HF_TOKEN = os.getenv("REACT_APP_HF_TOKEN") |
|
if not HF_TOKEN: |
|
raise RuntimeError("Le token Hugging Face (HF_TOKEN) n'est pas défini dans les variables d'environnement.") |
|
|
|
|
|
hf_client = InferenceClient( |
|
provider="novita", |
|
api_key=HF_TOKEN, |
|
) |
|
|
|
@app.post("/api/chat") |
|
async def chat(request: Request): |
|
data = await request.json() |
|
user_message = data.get("message", "").strip() |
|
if not user_message: |
|
raise HTTPException(status_code=400, detail="Le champ 'message' est requis.") |
|
|
|
try: |
|
|
|
completion = hf_client.chat.completions.create( |
|
model="mistralai/Mistral-7B-Instruct-v0.3", |
|
messages=[ |
|
{"role": "system", "content": "Tu es un assistant médical spécialisé en schizophrénie."}, |
|
{"role": "user", "content": user_message} |
|
], |
|
max_tokens=512, |
|
temperature=0.7, |
|
) |
|
|
|
bot_msg = completion.choices[0].message.content |
|
return {"response": bot_msg} |
|
|
|
except Exception as e: |
|
|
|
raise HTTPException(status_code=502, detail=f"Erreur d'inférence HF : {e}") |
|
|
|
|
|
@app.get("/data") |
|
async def get_data(): |
|
data = {"data": np.random.rand(100).tolist()} |
|
return JSONResponse(data) |
|
|
|
|
|
app.mount("/", StaticFiles(directory="static", html=True), name="static") |
|
|
|
if __name__ == "__main__": |
|
import uvicorn |
|
|
|
print(args) |
|
uvicorn.run( |
|
"app:app", |
|
host=args.host, |
|
port=args.port, |
|
reload=args.reload, |
|
ssl_certfile=args.ssl_certfile, |
|
ssl_keyfile=args.ssl_keyfile, |
|
) |