Spaces:
Sleeping
Sleeping
Upload app.py with huggingface_hub
Browse files
app.py
CHANGED
@@ -498,13 +498,13 @@ def agentic_rag(query: str):
|
|
498 |
#================================ Guardrails ===========================#
|
499 |
llama_guard_client = Groq(api_key=llama_api_key)
|
500 |
# Function to filter user input with Llama Guard
|
501 |
-
def filter_input_with_llama_guard(user_input, model="llama-guard-4-12b"):
|
502 |
"""
|
503 |
Filters user input using Llama Guard to ensure it is safe.
|
504 |
|
505 |
Parameters:
|
506 |
- user_input: The input provided by the user.
|
507 |
-
- model: The Llama Guard model to be used for filtering (default is "llama-guard-4-12b").
|
508 |
|
509 |
Returns:
|
510 |
- The filtered and safe input.
|
@@ -964,13 +964,13 @@ def agentic_rag(query: str):
|
|
964 |
#================================ Guardrails ===========================#
|
965 |
llama_guard_client = Groq(api_key=llama_api_key)
|
966 |
# Function to filter user input with Llama Guard
|
967 |
-
def filter_input_with_llama_guard(user_input, model="llama-guard-4-12b"):
|
968 |
"""
|
969 |
Filters user input using Llama Guard to ensure it is safe.
|
970 |
|
971 |
Parameters:
|
972 |
- user_input: The input provided by the user.
|
973 |
-
- model: The Llama Guard model to be used for filtering (default is "llama-guard-4-12b").
|
974 |
|
975 |
Returns:
|
976 |
- The filtered and safe input.
|
|
|
498 |
#================================ Guardrails ===========================#
|
499 |
llama_guard_client = Groq(api_key=llama_api_key)
|
500 |
# Function to filter user input with Llama Guard
|
501 |
+
def filter_input_with_llama_guard(user_input, model="meta-llama/llama-guard-4-12b"):
|
502 |
"""
|
503 |
Filters user input using Llama Guard to ensure it is safe.
|
504 |
|
505 |
Parameters:
|
506 |
- user_input: The input provided by the user.
|
507 |
+
- model: The Llama Guard model to be used for filtering (default is "meta-llama/llama-guard-4-12b").
|
508 |
|
509 |
Returns:
|
510 |
- The filtered and safe input.
|
|
|
964 |
#================================ Guardrails ===========================#
|
965 |
llama_guard_client = Groq(api_key=llama_api_key)
|
966 |
# Function to filter user input with Llama Guard
|
967 |
+
def filter_input_with_llama_guard(user_input, model="meta-llama/llama-guard-4-12b"):
|
968 |
"""
|
969 |
Filters user input using Llama Guard to ensure it is safe.
|
970 |
|
971 |
Parameters:
|
972 |
- user_input: The input provided by the user.
|
973 |
+
- model: The Llama Guard model to be used for filtering (default is "meta-llama/llama-guard-4-12b").
|
974 |
|
975 |
Returns:
|
976 |
- The filtered and safe input.
|