From 7659fe57765b75cf215ae77002752d9d17efd01b Mon Sep 17 00:00:00 2001 From: Sivert2101 <89571140+Sivert2101@users.noreply.github.com> Date: Fri, 25 Oct 2024 11:58:56 +0200 Subject: [PATCH] added back the for loop --- llama/main_automatic.go | 64 ++++++++++++++++++++++++----------------- 1 file changed, 38 insertions(+), 26 deletions(-) diff --git a/llama/main_automatic.go b/llama/main_automatic.go index a4b3365..906d6ef 100644 --- a/llama/main_automatic.go +++ b/llama/main_automatic.go @@ -43,42 +43,54 @@ func main() { // break //} - var modifiedPrompt = userPrompt + extraction.GoPrompt + for { - fmt.Println("Prompt received. Generating response...") + if userPrompt == "exit" { + fmt.Println("Exiting the program.") + break + } - // Start a go routine to display a waiting indicator while the response is being generated - done := make(chan bool) - go displayindicator.DisplayLoadingIndicator(done) + var modifiedPrompt = userPrompt + extraction.GoPrompt - // Generate response using Ollama API, passing the context - response, updatedContext, err := ollamaimplementation.GetOllamaResponse(modifiedPrompt, conversationContext) + fmt.Println("Prompt received. Generating response...") - // Signal the waiting indicator to stop - done <- true + // Start a go routine to display a waiting indicator while the response is being generated + done := make(chan bool) + go displayindicator.DisplayLoadingIndicator(done) - if err != nil { - fmt.Println("Error generating response:", err) - continue - } + // Generate response using Ollama API, passing the context + response, updatedContext, err := ollamaimplementation.GetOllamaResponse(modifiedPrompt, conversationContext) - // Update the conversation context with the response - conversationContext = updatedContext - //fmt.Println(updatedContext) + // Signal the waiting indicator to stop + done <- true - generatedCode, _ := extraction.Extract(response) // Handle error with string + if err != nil { + fmt.Println("Error generating response:", err) + continue + } - fmt.Println("Ollama's response:", generatedCode) + // Update the conversation context with the response + conversationContext = updatedContext - output, err := compiler.CompileStringToGo(generatedCode) + generatedCode, err_extract := extraction.Extract(response) // Handle error with string - if err != nil { - fmt.Println("There were an error in the code that was compiled") - userPrompt = output + "\nFollowing are the errors, please fix the code. Write it again, and write only source code along with same test cases with no further explanation. The format should be ```rust <yourcode + testcases> ```" - } else { - fmt.Println("Compiled successfully. Here is the output: %v", output) - //userPrompt = "exit" - } + if err_extract != nil { + fmt.Printf("The LLM gave a improper string in response: %v", response) + userPrompt = "exit" + continue + } + + fmt.Println("Ollama's response:", generatedCode) + output, err := compiler.CompileStringToGo(generatedCode) + + if err != nil { + userPrompt = output + "\nFollowing are the errors, please fix the code. Write it again, and write only source code along with same test cases with no further explanation. The format should be ```rust <yourcode + testcases> ```" + } else { + fmt.Printf("Compiled successfully. Here is the output: %v", output) + userPrompt = "exit" + } + + } } } -- GitLab