lunarflu HF Staff commited on
Commit
2a46d18
·
1 Parent(s): bb586e3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +207 -136
app.py CHANGED
@@ -23,9 +23,7 @@ import concurrent.futures
23
  import multiprocessing
24
 
25
  import glob
26
- #-------------------------------------------------------------------------------------------------------------------------------------------------------------
27
-
28
-
29
  MY_GUILD = discord.Object(id=1077674588122648679) # replace with your guild id
30
  HF_TOKEN = os.getenv('HF_TOKEN')
31
  DISCORD_TOKEN = os.environ.get("DISCORD_TOKEN", None)
@@ -48,7 +46,7 @@ instructions = "The following is a conversation between a highly knowledgeable "
48
  "with. It knows a lot, and always tells the truth. The conversation begins."
49
 
50
 
51
- #-------------------------------------------------------------------------------------------------------------------------------------------------------------
52
  # myclient class so we can run this in thread
53
  # sets up commands and the tree stuff for slash commands
54
  # Allows commands to work instantly
@@ -66,122 +64,44 @@ class MyClient(discord.Client):
66
  intents = discord.Intents.all() # could be default maybe
67
  client = MyClient(intents=intents)
68
 
69
- #-------------------------------------------------------------------------------------------------------------------------------------------------------------
70
  @client.event
71
  async def on_ready():
72
  print(f'Logged in as {client.user} (ID: {client.user.id})')
73
  print('------')
74
- #-------------------------------------------------------------------------------------------------------------------------------------------------------------
75
-
76
-
77
-
78
- # /falcon -> create thread, store it
79
- # on_message -> if channelid in falcon_userid_threadid_dictionary
80
- # -> if userid in falcon_userid_threadid_dictionary
81
- # -> if falcon_userid_threadid_dictionary[userid] == thread_id
82
-
83
-
84
- @client.tree.command()
85
- @app_commands.describe(
86
- prompt='Enter some text to chat with the bot! Like this: /falcon Hello, how are you?')
87
- async def falcon(interaction: discord.Interaction, prompt: str):
88
- try:
89
- global chathistory # ?
90
- global falcon_userid_threadid_dictionary # tracks userid-thread existence
91
- global instructions
92
- global threadid_conversation
93
- global FALCON_CHANNEL_ID
94
-
95
- if not interaction.user.bot:
96
- if interaction.channel.id == FALCON_CHANNEL_ID: # 1079459939405279232= test
97
- await interaction.response.send_message(f"Working on it!")
98
- channel = interaction.channel
99
- # 1
100
- message = await channel.send(f"Creating thread...")
101
- thread = await message.create_thread(name=f'{interaction.user}') # interaction.user
102
- await thread.send(f"[DISCLAIMER: HuggingBot is a **highly experimental** beta feature; The Falcon " \
103
- f"model and system prompt can be found here: https://huggingface.co/spaces/HuggingFaceH4/falcon-chat]")
104
- # generation
105
- chathistory = falconclient.predict(
106
- fn_index=5
107
- ) # []
108
- job = falconclient.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
109
- while job.done() == False:
110
- status = job.status()
111
- #print(status)
112
- else:
113
- file_paths = job.outputs()
114
- full_generation = file_paths[-1] # tmp12345678.json
115
- with open(full_generation, 'r') as file:
116
- data = json.load(file)
117
- output_text = data[-1][-1] # we output this as the bot
118
-
119
- threadid_conversation[thread.id] = full_generation
120
-
121
- falcon_userid_threadid_dictionary[thread.id] = interaction.user.id
122
-
123
- print(output_text)
124
- await thread.send(f"{output_text}")
125
-
126
- except Exception as e:
127
- print(f"Error: {e}")
128
- #await thread.send(f"{e} cc <@811235357663297546> (falconprivate error)")
129
- #--------------------------------------------------------------------------------
130
- @client.event
131
- async def on_message(message):
132
- try:
133
- if not message.author.bot:
134
- global falcon_userid_threadid_dictionary # tracks userid-thread existence
135
- if message.channel.id in falcon_userid_threadid_dictionary: # is this a valid thread?
136
- if falcon_userid_threadid_dictionary[message.channel.id] == message.author.id: # more than that - is this specifically the right user for this thread?
137
- # call job for falcon
138
- #await message.reply("checks succeeded, calling continue_falcon")
139
- await continue_falcon(message)
140
-
141
- except Exception as e:
142
- print(f"Error: {e}")
143
- #----------------------------------------------------------------------------------------------------------------------------
144
- async def continue_falcon(message):
145
- try:
146
- global chathistory # ?
147
- global instructions
148
- global threadid_conversation
149
- await message.add_reaction('<a:loading:1121820108189339738>') # test=<a:loading:1121820108189339738> hf=<a:loading:1114111677990981692>
150
- chathistory = threadid_conversation[message.channel.id]
151
- prompt = message.content
152
- # generation
153
- job = falconclient.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
154
- while job.done() == False:
155
- status = job.status()
156
- #print(status)
157
- else:
158
- file_paths = job.outputs()
159
- full_generation = file_paths[-1] # tmp12345678.json
160
- with open(full_generation, 'r') as file:
161
- data = json.load(file)
162
- output_text = data[-1][-1] # we output this as the bot
163
 
164
- threadid_conversation[message.channel.id] = full_generation # overwrite the old file
165
- falcon_userid_threadid_dictionary[message.channel.id] = message.author.id
166
-
167
- print(output_text)
168
- await message.reply(f"{output_text}")
169
- await message.remove_reaction('<a:loading:1121820108189339738>', client.user) # test=<a:loading:1121820108189339738> hf=<a:loading:1114111677990981692>
170
-
171
- except Exception as e:
172
- print(f"Error: {e}")
173
- await message.reply(f"Error: {e} <@811235357663297546> (continue_falcon error)")
174
- #----------------------------------------------------------------------------------------------------------------------------
175
- '''
176
- @bot.command()
177
- async def falconclear(ctx):
178
- if not ctx.author.bot:
179
- if ctx.channel.id == 1079459939405279232: # 1079459939405279232 = test in lunar server
180
- if ctx.author.id in falcon_userid_threadid_dictionary:
181
- del falcon_userid_threadid_dictionary[ctx.author.id]
182
- await ctx.reply(f"{ctx.author.mention}'s conversation has been cleared. Feel free to start a new one!")
183
- '''
184
- #----------------------------------------------------------------------------------------------------------------------------
 
 
 
 
 
 
 
 
 
 
 
185
  @client.tree.command()
186
  @app_commands.describe(
187
  prompt='Enter a prompt to generate an image! Can generate realistic text, too!')
@@ -284,29 +204,180 @@ async def dfif(interaction: discord.Interaction, prompt: str):
284
  except Exception as e:
285
  print(f"Error: {e}")
286
  #await thread.send(f"Error: {e} <@811235357663297546> (continue_falcon error)")
287
- #----------------------------------------------------------------------------------------------------------------------------
288
- async def react1234(reaction_emojis, combined_image_dfif):
289
- for emoji in reaction_emojis:
290
- await combined_image_dfif.add_reaction(emoji)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
291
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
292
 
293
- #----------------------------------------------------------------------------------------------------------------------------
294
- #----------------------------------------------------------------------------------------------------------------------------------------------
295
- # deepfloydif stage 1 generation ✅
296
- def inference(prompt):
297
- negative_prompt = ''
298
- seed = random.randint(0, 1000)
299
- #seed = 1
300
- number_of_images = 4
301
- guidance_scale = 7
302
- custom_timesteps_1 = 'smart50'
303
- number_of_inference_steps = 50
304
-
305
- stage_1_results, stage_1_param_path, stage_1_result_path = df.predict(
306
- prompt, negative_prompt, seed, number_of_images, guidance_scale, custom_timesteps_1, number_of_inference_steps, api_name='/generate64')
307
-
308
- return [stage_1_results, stage_1_param_path, stage_1_result_path]
309
- #----------------------------------------------------------------------------------------------------------------------------
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
310
  # running in thread
311
  DISCORD_TOKEN = os.environ.get("DISCORD_TOKEN", None)
312
 
 
23
  import multiprocessing
24
 
25
  import glob
26
+ #-------------------------------------------------------------------------------------------------------------------------------------
 
 
27
  MY_GUILD = discord.Object(id=1077674588122648679) # replace with your guild id
28
  HF_TOKEN = os.getenv('HF_TOKEN')
29
  DISCORD_TOKEN = os.environ.get("DISCORD_TOKEN", None)
 
46
  "with. It knows a lot, and always tells the truth. The conversation begins."
47
 
48
 
49
+ #-------------------------------------------------------------------------------------------------------------------------------------
50
  # myclient class so we can run this in thread
51
  # sets up commands and the tree stuff for slash commands
52
  # Allows commands to work instantly
 
64
  intents = discord.Intents.all() # could be default maybe
65
  client = MyClient(intents=intents)
66
 
67
+ #-------------------------------------------------------------------------------------------------------------------------------------
68
  @client.event
69
  async def on_ready():
70
  print(f'Logged in as {client.user} (ID: {client.user.id})')
71
  print('------')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
 
73
+ #-------------------------------------------------------------------------------------------------------------------------------------
74
+ # deepfloydif stage 1 generation
75
+ def inference(prompt):
76
+ negative_prompt = ''
77
+ seed = random.randint(0, 1000)
78
+ #seed = 1
79
+ number_of_images = 4
80
+ guidance_scale = 7
81
+ custom_timesteps_1 = 'smart50'
82
+ number_of_inference_steps = 50
83
+
84
+ stage_1_results, stage_1_param_path, stage_1_result_path = df.predict(
85
+ prompt, negative_prompt, seed, number_of_images, guidance_scale, custom_timesteps_1, number_of_inference_steps, api_name='/generate64')
86
+
87
+ return [stage_1_results, stage_1_param_path, stage_1_result_path]
88
+ #-------------------------------------------------------------------------------------------------------------------------------------
89
+ # deepfloydif stage 2 upscaling
90
+ def inference2(index, stage_1_result_path):
91
+ selected_index_for_stage_2 = index
92
+ seed_2 = 0
93
+ guidance_scale_2 = 4
94
+ custom_timesteps_2 = 'smart50'
95
+ number_of_inference_steps_2 = 50
96
+ result_path = df.predict(stage_1_result_path, selected_index_for_stage_2, seed_2,
97
+ guidance_scale_2, custom_timesteps_2, number_of_inference_steps_2, api_name='/upscale256')
98
+
99
+ return result_path
100
+ #-------------------------------------------------------------------------------------------------------------------------------------
101
+ async def react1234(reaction_emojis, combined_image_dfif):
102
+ for emoji in reaction_emojis:
103
+ await combined_image_dfif.add_reaction(emoji)
104
+ #-------------------------------------------------------------------------------------------------------------------------------------
105
  @client.tree.command()
106
  @app_commands.describe(
107
  prompt='Enter a prompt to generate an image! Can generate realistic text, too!')
 
204
  except Exception as e:
205
  print(f"Error: {e}")
206
  #await thread.send(f"Error: {e} <@811235357663297546> (continue_falcon error)")
207
+ #-------------------------------------------------------------------------------------------------------------------------------------
208
+ async def dfif2(index: int, stage_1_result_path, thread, dfif_command_message_id): # add safetychecks
209
+ try:
210
+ parent_channel = thread.parent
211
+ dfif_command_message = await parent_channel.fetch_message(dfif_command_message_id)
212
+ await dfif_command_message.remove_reaction('✅', bot.user)
213
+ await dfif_command_message.add_reaction('<a:loading:1121820108189339738>')
214
+
215
+ number = index + 1
216
+ if number == 1:
217
+ position = "top left"
218
+ elif number == 2:
219
+ position = "top right"
220
+ elif number == 3:
221
+ position = "bottom left"
222
+ elif number == 4:
223
+ position = "bottom right"
224
+ await thread.send(f"Upscaling the {position} image...")
225
+
226
+ # run blocking function in executor
227
+ loop = asyncio.get_running_loop()
228
+ result_path = await loop.run_in_executor(None, inference2, index, stage_1_result_path)
229
+
230
+ #await thread.send(f"✅upscale done")
231
+ with open(result_path, 'rb') as f:
232
+ await thread.send(f'Here is the upscaled image! :) ', file=discord.File(f, 'result.png'))
233
+
234
+ await dfif_command_message.remove_reaction('<a:loading:1121820108189339738>', bot.user)
235
+ await dfif_command_message.add_reaction('✅')
236
+ await thread.edit(archived=True)
237
 
238
+ except Exception as e:
239
+ print(f"Error: {e}")
240
+ parent_channel = thread.parent
241
+ dfif_command_message = await parent_channel.fetch_message(dfif_command_message_id)
242
+ await dfif_command_message.remove_reaction('<a:loading:1121820108189339738>', bot.user)
243
+ await dfif_command_message.add_reaction('❌')
244
+ await thread.send(f"Error during stage 2 upscaling, {e}")
245
+ await fullqueue(e, thread)
246
+ await thread.edit(archived=True)
247
+ #-------------------------------------------------------------------------------------------------------------------------------------
248
+ @bot.event
249
+ async def on_reaction_add(reaction, user): # ctx = await bot.get_context(reaction.message)? could try later, might simplify
250
+ try:
251
+ global DEEPFLOYD_CHANNEL_ID
252
+ if not user.bot:
253
+ channel_id = reaction.message.channel.id
254
+ if channel_id == DEEPFLOYD_CHANNEL_ID: # should be whatever the deepfloydif channel is
255
+ if reaction.message.attachments:
256
+ if user.id == reaction.message.mentions[0].id: # if user.id == reaction.message.mentions[0].id:
257
+ #await reaction.message.channel.send("✅reaction detected")
258
+ attachment = reaction.message.attachments[0]
259
+ image_name = attachment.filename # named something like: tmpgtv4qjix1111269940599738479.png
260
+ # remove .png first
261
+ partialpathmessageid = image_name[:-4] # should be tmpgtv4qjix1111269940599738479
262
+ # extract partialpath, messageid
263
+ partialpath = partialpathmessageid[:11] # tmpgtv4qjix
264
+ messageid = partialpathmessageid[11:] # 1111269940599738479
265
+ # add /tmp/ to partialpath, save as new variable
266
+ fullpath = "/tmp/" + partialpath # should be /tmp/tmpgtv4qjix
267
+ #await reaction.message.channel.send(f"✅fullpath extracted, {fullpath}")
268
+ emoji = reaction.emoji
269
+
270
+ if emoji == "↖️":
271
+ index = 0
272
+ elif emoji == "↗️":
273
+ index = 1
274
+ elif emoji == "↙️":
275
+ index = 2
276
+ elif emoji == "↘️":
277
+ index = 3
278
+
279
+ #await reaction.message.channel.send(f"✅index extracted, {index}")
280
+ index = index
281
+ stage_1_result_path = fullpath
282
+ thread = reaction.message.channel
283
+ dfif_command_message_id = messageid
284
+ #await reaction.message.channel.send(f"✅calling dfif2")
285
+ await dfif2(index, stage_1_result_path, thread, dfif_command_message_id)
286
 
287
+ except Exception as e:
288
+ print(f"Error: {e} (known error, does not cause issues, fix later)")
289
+ #-------------------------------------------------------------------------------------------------------------------------------------
290
+ @client.tree.command()
291
+ @app_commands.describe(
292
+ prompt='Enter some text to chat with the bot! Like this: /falcon Hello, how are you?')
293
+ async def falcon(interaction: discord.Interaction, prompt: str):
294
+ try:
295
+ global chathistory # ?
296
+ global falcon_userid_threadid_dictionary # tracks userid-thread existence
297
+ global instructions
298
+ global threadid_conversation
299
+ global FALCON_CHANNEL_ID
300
+
301
+ if not interaction.user.bot:
302
+ if interaction.channel.id == FALCON_CHANNEL_ID: # 1079459939405279232= test
303
+ await interaction.response.send_message(f"Working on it!")
304
+ channel = interaction.channel
305
+ # 1
306
+ message = await channel.send(f"Creating thread...")
307
+ thread = await message.create_thread(name=f'{interaction.user}') # interaction.user
308
+ await thread.send(f"[DISCLAIMER: HuggingBot is a **highly experimental** beta feature; The Falcon " \
309
+ f"model and system prompt can be found here: https://huggingface.co/spaces/HuggingFaceH4/falcon-chat]")
310
+ # generation
311
+ chathistory = falconclient.predict(
312
+ fn_index=5
313
+ ) # []
314
+ job = falconclient.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
315
+ while job.done() == False:
316
+ status = job.status()
317
+ #print(status)
318
+ else:
319
+ file_paths = job.outputs()
320
+ full_generation = file_paths[-1] # tmp12345678.json
321
+ with open(full_generation, 'r') as file:
322
+ data = json.load(file)
323
+ output_text = data[-1][-1] # we output this as the bot
324
+
325
+ threadid_conversation[thread.id] = full_generation
326
+
327
+ falcon_userid_threadid_dictionary[thread.id] = interaction.user.id
328
+
329
+ print(output_text)
330
+ await thread.send(f"{output_text}")
331
+
332
+ except Exception as e:
333
+ print(f"Error: {e}")
334
+ #await thread.send(f"{e} cc <@811235357663297546> (falconprivate error)")
335
+ #-------------------------------------------------------------------------------------------------------------------------------------
336
+ async def continue_falcon(message):
337
+ try:
338
+ global chathistory # ?
339
+ global instructions
340
+ global threadid_conversation
341
+ await message.add_reaction('<a:loading:1121820108189339738>') # test=<a:loading:1121820108189339738> hf=<a:loading:1114111677990981692>
342
+ chathistory = threadid_conversation[message.channel.id]
343
+ prompt = message.content
344
+ # generation
345
+ job = falconclient.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
346
+ while job.done() == False:
347
+ status = job.status()
348
+ #print(status)
349
+ else:
350
+ file_paths = job.outputs()
351
+ full_generation = file_paths[-1] # tmp12345678.json
352
+ with open(full_generation, 'r') as file:
353
+ data = json.load(file)
354
+ output_text = data[-1][-1] # we output this as the bot
355
+
356
+ threadid_conversation[message.channel.id] = full_generation # overwrite the old file
357
+ falcon_userid_threadid_dictionary[message.channel.id] = message.author.id
358
+
359
+ print(output_text)
360
+ await message.reply(f"{output_text}")
361
+ await message.remove_reaction('<a:loading:1121820108189339738>', client.user) # test=<a:loading:1121820108189339738> hf=<a:loading:1114111677990981692>
362
+
363
+ except Exception as e:
364
+ print(f"Error: {e}")
365
+ await message.reply(f"Error: {e} <@811235357663297546> (continue_falcon error)")
366
+ #-------------------------------------------------------------------------------------------------------------------------------------
367
+ @client.event
368
+ async def on_message(message):
369
+ try:
370
+ if not message.author.bot:
371
+ global falcon_userid_threadid_dictionary # tracks userid-thread existence
372
+ if message.channel.id in falcon_userid_threadid_dictionary: # is this a valid thread?
373
+ if falcon_userid_threadid_dictionary[message.channel.id] == message.author.id: # more than that - is this specifically the right user for this thread?
374
+ # call job for falcon
375
+ #await message.reply("checks succeeded, calling continue_falcon")
376
+ await continue_falcon(message)
377
+
378
+ except Exception as e:
379
+ print(f"Error: {e}")
380
+ #-------------------------------------------------------------------------------------------------------------------------------------
381
  # running in thread
382
  DISCORD_TOKEN = os.environ.get("DISCORD_TOKEN", None)
383