Commit 86168f90 authored by Stergios Papadopoulos's avatar Stergios Papadopoulos
Browse files

- Modified generate_answer() to be compatible with the ui (streaming). Also in...

- Modified generate_answer() to be compatible with the ui (streaming). Also in the conversation only the LLM's answer is saved and later provided to the LLM for context window savings.
- Created uicontroller module that can create a simple ui which can be shared.
parent 2f7bd07b
Loading
Loading
Loading
Loading
+21 −15
Original line number Diff line number Diff line
@@ -46,7 +46,7 @@ class Generator:
        for chunk in self._embedder.search_similar(self._collection_name, question, n_results=self._n_results):
            prompt += chunk + "\n\n"

        print(prompt)
        print(prompt) # TODO Delete this line
        return prompt

    def _fetch_conversation(self) -> list[dict[str, str]]:
@@ -109,12 +109,12 @@ class Generator:
        return True if not self._conversation else False

    ## ====== CALLABLE METHODS ====== ##
    def generate_answer(self, question, model) -> None:
    def generate_answer(self, question, model):
        """
        Generates an answer to the given question using the given model.
        :param model: The model to use for generating the answer.
        :param question: The question to answer.
        :return: None
        :return: generator of strings. Each string represents a chunk of the answer.
        """
        # Set the model to the specified one
        self._model = model
@@ -130,9 +130,12 @@ class Generator:

        answer = ""
        for chunk in answering_fn():
            print(chunk, end="")
            yield chunk
            answer += chunk

        # Save only the user's question
        self._conversation[-1]["content"] = question

        # Save answer to the conversation
        self._update_conversation("assistant", answer)

@@ -140,16 +143,19 @@ class Generator:
        pass


embedder = Embedder()
embedder.load_docs(directory="aiani dedomena/*", chunking_type=Embedder.ByChar)

if not embedder.collection_exists("Mycollection"):
    embedder.add_data("Mycollection")

gen = Generator(embedder=embedder, collection_name="Mycollection", n_results=5)

while True:
    gen.generate_answer(input("Ask a question: "), model="gpt-4o-mini")
    print("\n")
# embedder = Embedder()
# embedder.load_docs(directory="aiani dedomena/*", chunking_type=Embedder.ByChar)
#
# if not embedder.collection_exists("Mycollection"):
#     embedder.add_data("Mycollection")
#
# gen = Generator(embedder=embedder, collection_name="Mycollection", n_results=5)
#
# while True:
#     print("==================================================")
#     gen.generate_answer(input("Ask a question: "), model="gpt-4o-mini")
#     print("\n")
#     print("==================================================")
#     print(f"conversation:\n{gen._fetch_conversation()}\n\n")

# embedder.visualize("Mycollection", dimensions=["2d", "3d"])

uicontroller.py

0 → 100644
+74 −0
Original line number Diff line number Diff line
import gradio as gr  # Ensure the Gradio library is installed by running: pip install gradio
from generator import Generator
from embedder import Embedder

class UIController:

    def __init__(self):
        self.gen = Generator(Embedder(), "Mycollection", n_results=5)
        self.embedder = Embedder()

        self._prepare_embedder()

    # ===   PRIVATE METHODS   === #
    def _prepare_embedder(self):
        """
        Loads the documents and creates the collection.
        :return: None
        """

        if not self.embedder.collection_exists("Mycollection"):
            self.embedder.load_docs(directory="aiani dedomena/*", chunking_type=Embedder.ByChar)
            self.embedder.add_data("Mycollection")

    def _user(self, user_message, history: list):
        """
        Creates and updates the history of the conversation.
        :param user_message: The user's message to add to the history.
        :param history: The history of the conversation.
        :return: A tuple that consists of a blank string and the updated history.
        The blank string will be sent at the textbox and the history at the chatbot.
        """
        return "", history + [{"role": "user", "content": user_message}]

    def _bot(self, history: list):
        """
        Generates an answer to the user's question and updates the history.
        :param history: The previous history of the conversation.
        :return: A generator of strings. Each string represents a chunk of the answer.
        The answer will be sent at the chatbot.
        """
        print(history)
        question = history[-1]["content"]
        bot_message = self.gen.generate_answer(question, model="gpt-4o-mini")
        history.append({"role": "assistant", "content": ""})
        for chunk in bot_message:
            history[-1]['content'] += chunk
            yield history

    # ====  CALLABLE METHODS   === #
    def create_ui(self, share=False):
        """
        Creates the UI.
        :param share: If True, a public link will be generated for the UI, else the host will be localhost.
        :return: None.
        """
        with gr.Blocks() as demo:
            with gr.Row():
                gr.Markdown("AI Ξεναγός!")

            bot = gr.Chatbot(type="messages")
            msg = gr.Textbox()
            # submit = gr.Button("Submit")

            # submit.click(fn=self.gen.generate_answer, inputs=[input], outputs=[output]).then

            msg.submit(self._user, [msg, bot], [msg, bot], queue=False).then(
                self._bot, bot, bot
            )

        demo.launch(share=share)


ui = UIController()
ui.create_ui(share=True)
 No newline at end of file