File size: 5,482 Bytes
75e8fca
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7495138
75e8fca
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
815a27e
 
36d0f40
fa98bc1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7fd36a0
 
 
815a27e
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
import discord
from discord import app_commands
import gradio as gr
from gradio_client import Client
import os
import threading 
import json
from PIL import Image
import asyncio

# HF GUILD SETTINGS
MY_GUILD_ID = 1077674588122648679 if os.getenv("TEST_ENV", False) else 879548962464493619
MY_GUILD = discord.Object(id=MY_GUILD_ID)
HF_TOKEN = os.getenv('HF_TOKEN')
DISCORD_TOKEN = os.environ.get("DISCORD_TOKEN", None)

falcon_userid_threadid_dictionary = {}
threadid_conversation = {}
instructions = "The following is a conversation between a highly knowledgeable " \
"and intelligent AI assistant, called Falcon, and a human user, called User. In the " \
"following interactions, User and Falcon will converse in natural language, and Falcon " \
"will answer User's questions. Falcon was built to be respectful, polite and inclusive. " \
"Falcon was built by the Technology Innovation Institute in Abu Dhabi. Falcon will never " \
"decline to answer a question, and always attempts to give an answer that User would be satisfied " \
"with. It knows a lot, and always tells the truth. The conversation begins."
threadid_conversation = {}

BOT_USER_ID = 1086256910572986469 # 1086256910572986469 = test
FALCON_CHANNEL_ID = 1079459939405279232 #  1079459939405279232 = test

falconclient = Client("HuggingFaceH4/falcon-chat", HF_TOKEN)

async def try_falcon(interaction, prompt):
    try:  
        global falcon_userid_threadid_dictionary # tracks userid-thread existence
        global instructions
        global threadid_conversation
        global BOT_USER_ID
        global FALCON_CHANNEL_ID

        if interaction.user.id != BOT_USER_ID:
            if interaction.channel.id == FALCON_CHANNEL_ID: 
                await interaction.response.send_message(f"Working on it!")
                channel = interaction.channel
                # 1
                message = await channel.send(f"Creating thread...")
                thread = await message.create_thread(name=f'{prompt}', auto_archive_duration=60)  # interaction.user
                await thread.send(f"[DISCLAIMER: HuggingBot is a **highly experimental** beta feature; The Falcon " \
                f"model and system prompt can be found here: https://huggingface.co/spaces/HuggingFaceH4/falcon-chat]")
                # generation
                chathistory = falconclient.predict( 
                        fn_index=5
                ) # []    
                job = falconclient.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1)  # This is not blocking, similar to run_in_executor (but better)
                while job.done() == False: 
                    status = job.status() 
                    #print(status)
                else:
                    file_paths = job.outputs()
                    full_generation = file_paths[-1] # tmp12345678.json
                with open(full_generation, 'r') as file:
                    data = json.load(file)
                    output_text = data[-1][-1] # we output this as the bot

                threadid_conversation[thread.id] = full_generation
                
                falcon_userid_threadid_dictionary[thread.id] = interaction.user.id
                
                print(output_text)
                await thread.send(f"{output_text}")  

    except Exception as e:
        print(f"Error: {e}")
        #await thread.send(f"{e} cc <@811235357663297546> (falconprivate error)") 
#------------------------------------------------------------------------------------------------------------------------------------- 
async def continue_falcon(message):
    try:
        if not message.author.bot:
            global falcon_userid_threadid_dictionary # tracks userid-thread existence
            if message.channel.id in falcon_userid_threadid_dictionary: # is this a valid thread?
                 if falcon_userid_threadid_dictionary[message.channel.id] == message.author.id: # more than that - is this specifically the right user for this thread?
                    # call job for falcon 
                    global instructions
                    global threadid_conversation
                     
                    await message.add_reaction('🔁') 
                    chathistory = threadid_conversation[message.channel.id]
                    prompt = message.content
                    # generation
                    job = falconclient.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1)  # This is not blocking, similar to run_in_executor (but better)
                    while job.done() == False: 
                        status = job.status() 
                        #print(status)
                    else:
                        file_paths = job.outputs()
                        full_generation = file_paths[-1] # tmp12345678.json
                    with open(full_generation, 'r') as file:
                        data = json.load(file)
                        output_text = data[-1][-1] # we output this as the bot
            
                    threadid_conversation[message.channel.id] = full_generation # overwrite the old file
                    falcon_userid_threadid_dictionary[message.channel.id] = message.author.id
            
                    print(output_text)
                    await message.reply(f"{output_text}")  


    except Exception as e:
        print(f"Error: {e}")
        await message.reply(f"Error: {e} <@811235357663297546> (continue_falcon error)")