From 14877238e0fd1ae0de2568cf096e8761ff1275e8 Mon Sep 17 00:00:00 2001 From: Martin Jaros <37882816+JRoshthen1@users.noreply.github.com> Date: Thu, 25 Apr 2024 20:43:23 +0200 Subject: [PATCH] Update llm_page.py --- lsb_package/llm_page.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/lsb_package/llm_page.py b/lsb_package/llm_page.py index 203c30f..f276180 100644 --- a/lsb_package/llm_page.py +++ b/lsb_package/llm_page.py @@ -29,10 +29,8 @@ class LlmPage(tk.Frame): def run_llama_operation(self, llmTopics): try: - print("##### Started Llama...") - print(llmTopics) # Example: Llama class must be imported correctly here - llm = Llama(model_path="/home/jrosh/.local/share/nomic.ai/GPT4All/gpt4all-falcon-newbpe-q4_0.gguf", n_ctx=2048, ) + llm = Llama(model_path="##############", n_ctx=2048, ) # output = llm.create_chat_completion( # messages=[ # {"role": "system", "content": "You are a teacher explaining in great detail given topics divided by new line."}, @@ -79,10 +77,7 @@ class LlmPage(tk.Frame): Returns: Response object containing the generated text. """ - print(output) - #print(output['choices'][0]['message']['content']) self.text_widget.after(0, self.update_text_widget, output['choices'][0]) - print("##### Llama Finished") except Exception as e: print(f"Error during Llama operation: {e}") self.text_widget.after(0, self.update_text_widget, "An error occurred, please try again.")