6Genix commited on
Commit
945ae37
·
1 Parent(s): ebf362e

Refined Multi-Agent XAI Demo by hiding explicit prompt references, improving final plan clarity, and enhancing conversational flow with contextual variations.

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -66,7 +66,7 @@ def summarize_conversation(conversation):
66
  if speaker == "Engineer" or speaker == "Analyst":
67
  key_points.append(f"- {speaker}: {text}")
68
  summary += "\n".join(key_points[-6:]) # Include only the last 3 turns each
69
- summary += "\n\nThis collaborative plan integrates technical and analytical insights."
70
  return summary
71
 
72
  ##############################################################################
@@ -88,8 +88,8 @@ if st.button("Generate Responses"):
88
  user_text = st.session_state.user_input
89
  st.session_state.conversation = [("User", user_text)] # Clear and restart conversation
90
 
91
- engineer_prompt_base = f"The user asked: {user_text}. Provide a concise technical solution."
92
- analyst_prompt_base = "Respond with complementary data-driven insights."
93
 
94
  for turn in range(3):
95
  # Engineer generates a response
@@ -107,7 +107,7 @@ if st.button("Generate Responses"):
107
  # Analyst generates a response based on engineer's output
108
  with st.spinner(f"Analyst is formulating response {turn + 1}..."):
109
  analyst_resp = generate_response(
110
- prompt=f"Engineer said: {engineer_resp}. {analyst_prompt_base}",
111
  tokenizer=tokenizerA,
112
  model=modelA
113
  )
 
66
  if speaker == "Engineer" or speaker == "Analyst":
67
  key_points.append(f"- {speaker}: {text}")
68
  summary += "\n".join(key_points[-6:]) # Include only the last 3 turns each
69
+ summary += "\n\nThis collaborative plan integrates technical and analytical insights into an actionable framework."
70
  return summary
71
 
72
  ##############################################################################
 
88
  user_text = st.session_state.user_input
89
  st.session_state.conversation = [("User", user_text)] # Clear and restart conversation
90
 
91
+ engineer_prompt_base = f"Given the problem: {user_text}, provide a concise and actionable technical solution."
92
+ analyst_prompt_base = "Based on the engineer's suggestion, provide complementary data-driven recommendations."
93
 
94
  for turn in range(3):
95
  # Engineer generates a response
 
107
  # Analyst generates a response based on engineer's output
108
  with st.spinner(f"Analyst is formulating response {turn + 1}..."):
109
  analyst_resp = generate_response(
110
+ prompt=f"Engineer suggested: {engineer_resp}. {analyst_prompt_base}",
111
  tokenizer=tokenizerA,
112
  model=modelA
113
  )