lunarflu HF Staff commited on
Commit
a12ef2a
·
1 Parent(s): 9b11c94

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +97 -3
app.py CHANGED
@@ -170,9 +170,103 @@ async def on_voice_state_update(member, before, after):
170
  await bot.log_channel.send(embed=embed)
171
 
172
  # bot stuff ---------------------------------------------------------------------------------------------------------------------------------------------------------------
173
-
174
- # send messages
175
- #
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
176
 
177
  @bot.event
178
  async def on_ready():
 
170
  await bot.log_channel.send(embed=embed)
171
 
172
  # bot stuff ---------------------------------------------------------------------------------------------------------------------------------------------------------------
173
+ #----------------------------------------------------------------------------------------------------------------------------
174
+ chathistory = None
175
+ falcon_users = []
176
+ #falcon_threads = []
177
+ falcon_dictionary = {}
178
+ falcon_userid_threadid_dictionary = {}
179
+
180
+ @bot.command()
181
+ async def falcon(ctx, *, prompt: str):
182
+ # todo: need to be careful with these, rework into something simpler
183
+ try:
184
+ global falcon_users
185
+ #global falcon_threads # deprecated
186
+ global falcon_dictionary
187
+ global falcon_userid_threadid_dictionary
188
+
189
+ # dict[generation, authorid]
190
+ # dict[threadid, authorid]
191
+
192
+ if not ctx.author.bot:
193
+ if ctx.channel.id != 1119313248056004729: # initial thread creation inside #falcon
194
+ #if ctx.author.id not in falcon_users: # deprecated
195
+ if ctx.author.id not in falcon_userid_threadid_dictionary:
196
+ #await ctx.message.add_reaction('<a:loading:1114111677990981692>')
197
+ thread = await ctx.message.create_thread(name=f'{ctx.author}')
198
+ #falcon_users = [ctx.author.id] + falcon_users # deprecated
199
+ #falcon_threads = [thread.id] + falcon_threads # deprecated
200
+ await thread.send(f"**(HuggingBot is a highly experimental beta; The Falcon model can be found here: https://huggingface.co/spaces/HuggingFaceH4/falcon-chat)**")
201
+
202
+ # initial generation here
203
+ chathistory = falconclient.predict(
204
+ fn_index=5
205
+ ) # []
206
+ instructions = "The following is a conversation between a highly knowledgeable and intelligent AI assistant, called Falcon, and a human user, called User. In the following interactions, User and Falcon will converse in natural language, and Falcon will answer User's questions. Falcon was built to be respectful, polite and inclusive. Falcon was built by the Technology Innovation Institute in Abu Dhabi. Falcon will never decline to answer a question, and always attempts to give an answer that User would be satisfied with. It knows a lot, and always tells the truth. The conversation begins."
207
+ job = falconclient.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
208
+ while job.done() == False:
209
+ status = job.status() # could be spammy, let's test anyways
210
+ #print(status)
211
+ else:
212
+ file_paths = job.outputs()
213
+ full_generation = file_paths[-1] # tmp12345678.json
214
+ with open(full_generation, 'r') as file:
215
+ data = json.load(file)
216
+ output_text = data[-1][-1] # we output this as the bot
217
+
218
+ falcon_dictionary[ctx.author.id] = full_generation # 1234567890: tmp12345678.json
219
+ falcon_userid_threadid_dictionary[ctx.author.id] = thread.id
220
+
221
+ print(output_text)
222
+ await thread.send(f"{output_text}")
223
+ #await ctx.message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
224
+ #elif ctx.author.id in falcon_users: # deprecated
225
+ elif ctx.author.id in falcon_userid_threadid_dictionary:
226
+ # get the threadid, grab the last message in that thread, link to that message
227
+ thread_id = falcon_userid_threadid_dictionary[ctx.author.id]
228
+ thread_link = f"https://discord.com/channels/879548962464493619/1119313248056004729/{thread_id}"
229
+ await ctx.reply(f"{ctx.author.mention}, you already have an existing conversation here {thread_link}! Use !falconclear in the #falcon channel to start a new one.")
230
+ #------------------------------------
231
+ # post all other generations here
232
+ #if ctx.channel.id in falcon_threads: # deprecated
233
+ if ctx.channel.id in falcon_userid_threadid_dictionary.values():
234
+ if ctx.channel.id == falcon_userid_threadid_dictionary[ctx.author.id]:
235
+ await ctx.message.add_reaction('<a:loading:1114111677990981692>')
236
+ chathistory = falcon_dictionary[ctx.author.id]
237
+
238
+ instructions = "The following is a conversation between a highly knowledgeable and intelligent AI assistant, called Falcon, and a human user, called User. In the following interactions, User and Falcon will converse in natural language, and Falcon will answer User's questions. Falcon was built to be respectful, polite and inclusive. Falcon was built by the Technology Innovation Institute in Abu Dhabi. Falcon will never decline to answer a question, and always attempts to give an answer that User would be satisfied with. It knows a lot, and always tells the truth. The conversation begins."
239
+ job = falconclient.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
240
+ while job.done() == False:
241
+ status = job.status() # could be spammy, let's test anyways
242
+ #print(status)
243
+ else:
244
+ file_paths = job.outputs()
245
+ full_generation = file_paths[-1] # tmp12345678.json
246
+ with open(full_generation, 'r') as file:
247
+ data = json.load(file)
248
+ output_text = data[-1][-1] # we output this as the bot
249
+ falcon_dictionary[ctx.author.id] = full_generation
250
+ print(output_text)
251
+ await ctx.reply(f"{output_text}")
252
+ await ctx.message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
253
+
254
+ except Exception as e:
255
+ print(f"Error: {e}")
256
+ await ctx.reply(f"{e} cc <@811235357663297546> (falconprivate error)")
257
+ await ctx.message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
258
+ await ctx.message.add_reaction('<:disagree:1098628957521313892>')
259
+ #----------------------------------------------------------------------------------------------------------------------------
260
+ @bot.command()
261
+ async def falconclear(ctx):
262
+ if not ctx.author.bot:
263
+ if ctx.channel.id == 1119313248056004729:
264
+ if ctx.author.id in falcon_userid_threadid_dictionary:
265
+ if ctx.author.id in falcon_dictionary:
266
+ del falcon_userid_threadid_dictionary[ctx.author.id]
267
+ del falcon_dictionary[ctx.author.id]
268
+ await ctx.reply(f"{ctx.author.mention}'s conversation has been cleared. Feel free to start a new one!")
269
+ #----------------------------------------------------------------------------------------------------------------------------
270
 
271
  @bot.event
272
  async def on_ready():