Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| from openai import OpenAI | |
| import pprint | |
| import chromadb | |
| from chromadb.utils.embedding_functions import OpenAIEmbeddingFunction | |
| # Load environment variables | |
| client = OpenAI(api_key=os.getenv("OPENAI_KEY")) | |
| pp = pprint.PrettyPrinter(indent=4) | |
| current_id = 0 | |
| chat_history = [] | |
| chat_metadata = [] | |
| history_ids = [] | |
| chroma_client = chromadb.Client() | |
| embedding_function = OpenAIEmbeddingFunction(api_key=os.getenv("OPENAI_KEY"), model_name=os.getenv("EMBEDDING_MODEL")) | |
| collection = chroma_client.create_collection(name="conversations", embedding_function=embedding_function) | |
| messages = [{"role": "system", "content": "You are a kind and friendly chatbot"}] | |
| def generate_response(messages): | |
| model_name = os.getenv("MODEL_NAME") | |
| response = client.chat.completions.create(model=model_name, messages=messages, temperature=0.5, max_tokens=250) | |
| print("Request:") | |
| pp.pprint(messages) | |
| print(f"Completion tokens: {response.usage.completion_tokens}, Prompt tokens: {response.usage.prompt_tokens}, Total tokens: {response.usage.total_tokens}") | |
| return response.choices[0].message | |
| def chat_interface(user_input): | |
| global current_id | |
| results = collection.query(query_texts=[user_input], n_results=2) | |
| for res in results['documents'][0]: | |
| messages.append({"role": "user", "content": f"previous chat: {res}"}) | |
| messages.append({"role": "user", "content": user_input}) | |
| response = generate_response(messages) | |
| chat_metadata.append({"role":"user"}) | |
| chat_history.append(user_input) | |
| chat_metadata.append({"role":"assistant"}) | |
| chat_history.append(response.content) | |
| current_id += 1 | |
| history_ids.append(f"id_{current_id}") | |
| current_id += 1 | |
| history_ids.append(f"id_{current_id}") | |
| collection.add( | |
| documents=chat_history, | |
| metadatas=chat_metadata, | |
| ids=history_ids | |
| ) | |
| return response.content | |
| def main(): | |
| interface = gr.Interface(fn=chat_interface, inputs="text", outputs="text", title="Chatbot Interface") | |
| interface.launch() | |
| if __name__ == "__main__": | |
| main() | |