feat: v2
This commit is contained in:
parent
21d4ad64f3
commit
11e76ff837
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,3 +1,5 @@
|
||||
tokens.db
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
|
@ -76,6 +76,8 @@ def text_to_tokens(string_or_messages: str | list[str | dict | list] | Conversat
|
||||
messages = []
|
||||
if isinstance(string_or_messages, str):
|
||||
messages = [{"role": "user", "content": string_or_messages}]
|
||||
elif isinstance(string_or_messages, Conversation):
|
||||
messages = string_or_messages.messages
|
||||
else:
|
||||
messages = string_or_messages
|
||||
|
||||
@ -99,4 +101,4 @@ def text_to_tokens(string_or_messages: str | list[str | dict | list] | Conversat
|
||||
num_tokens += text_to_tokens(message["content"])
|
||||
num_tokens += 2 # every reply is primed with <im_start>assistant
|
||||
|
||||
return num_tokens
|
||||
return num_tokens
|
||||
|
35
main.py
35
main.py
@ -116,8 +116,8 @@ async def on_message(message: discord.Message):
|
||||
total_tokens = copeai_backend.conversation.text_to_tokens(cached_conversations[message.author])
|
||||
|
||||
cached_conversations[message.author].add_message(
|
||||
role=copeai_backend.conversation.Role.user,
|
||||
content=message.content
|
||||
role=copeai_backend.conversation.Role.USER,
|
||||
message=message.content
|
||||
)
|
||||
|
||||
await message.channel.typing()
|
||||
@ -132,12 +132,31 @@ async def on_message(message: discord.Message):
|
||||
)
|
||||
|
||||
typing.remove(message.channel)
|
||||
response = req['choices'][0]['message']['content']
|
||||
prompt_used_tokens = req['usage']['prompt_tokens']
|
||||
completion_used_tokens = req['usage']['completion_tokens']
|
||||
r=await message.reply(response, allowed_mentions=discord.AllowedMentions.none())
|
||||
c.execute('INSERT INTO message_history VALUES (?, ?, ?, ?, ?, ?)', (message.id, message.author.id, message.content, prompt_used_tokens, 'user', int(message.created_at.timestamp())))
|
||||
c.execute('INSERT INTO message_history VALUES (?, ?, ?, ?, ?, ?)', (r.id, message.author.id, response, completion_used_tokens, 'assistant', int(time.time())))
|
||||
last_generation = 0
|
||||
MSG = await message.reply('** **', view=views.GenerationState.GenerationStateView(views.GenerationState.GenerationState.GENERATING))
|
||||
all_generated = []
|
||||
async for response in req:
|
||||
print(response.text)
|
||||
if isinstance(response, copeai_backend.ConversationResponse):
|
||||
response = ''.join(response.text)
|
||||
else:
|
||||
all_generated.append(response.text)
|
||||
|
||||
if last_generation < time.time():
|
||||
compiled = ''.join(all_generated)
|
||||
last_generation = time.time() + 1.5
|
||||
if len(compiled) > 2000:
|
||||
await MSG.edit(content=None, embed=discord.Embed(description=compiled, color=0xfce75d))
|
||||
else:
|
||||
await MSG.edit(content=compiled)
|
||||
|
||||
if len(response) > 2000:
|
||||
await MSG.edit(content=None, embed=discord.Embed(description=response, color=0xfce75d), view=views.GenerationState.GenerationStateView(views.GenerationState.GenerationState.FINISHED))
|
||||
else:
|
||||
await MSG.edit(content=response, view=views.GenerationState.GenerationStateView(views.GenerationState.GenerationState.FINISHED))
|
||||
|
||||
c.execute('INSERT INTO message_history VALUES (?, ?, ?, ?, ?, ?)', (message.id, message.author.id, message.content, copeai_backend.conversation.text_to_tokens(message.content), 'user', int(message.created_at.timestamp())))
|
||||
c.execute('INSERT INTO message_history VALUES (?, ?, ?, ?, ?, ?)', (MSG.id, message.author.id, response, copeai_backend.conversation.text_to_tokens(response), 'assistant', int(time.time())))
|
||||
db.commit()
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
|
Loading…
Reference in New Issue
Block a user