diff --git a/llama/compiler/compiler.go b/llama/compiler/compiler.go
new file mode 100644
index 0000000000000000000000000000000000000000..41da418e7fa23cbd5c67567d8f64ac47852b154e
--- /dev/null
+++ b/llama/compiler/compiler.go
@@ -0,0 +1,10 @@
+package compiler
+
+// CompileStringToGo tries to compile a string of go code to a go executable, and returns the compiler output and an error.
+// The function does not produce any executables, since they are deleted after the function ends.
+func CompileStringToGo(code string) (string, error) {
+
+	var output string = ""
+	// SetupEnvironment
+	return output, nil
+}
diff --git a/llama/display-indicator/indicator.go b/llama/display-indicator/indicator.go
new file mode 100644
index 0000000000000000000000000000000000000000..566b0f9c03a593e1ee132e69e5799aba8582f656
--- /dev/null
+++ b/llama/display-indicator/indicator.go
@@ -0,0 +1,23 @@
+package displayindicator
+
+import (
+	"fmt"
+	"time"
+)
+
+// Function to display a waiting/loading indicator
+func DisplayLoadingIndicator(done chan bool) {
+	indicator := []string{"|", "/", "-", "\\"}
+	i := 0
+	for {
+		select {
+		case <-done:
+			fmt.Print("\r") // Clear the waiting indicator when done
+			return
+		default:
+			fmt.Printf("\r%s Generating...", indicator[i%len(indicator)])
+			i++
+			time.Sleep(200 * time.Millisecond)
+		}
+	}
+}
diff --git a/llama/extraction/extract.go b/llama/extraction/extract.go
new file mode 100644
index 0000000000000000000000000000000000000000..025fcba865ce0e6c1426a2800cab012c26afaf51
--- /dev/null
+++ b/llama/extraction/extract.go
@@ -0,0 +1,37 @@
+package extraction
+
+import (
+	"fmt"
+	"strings"
+)
+
+var GoPrompt = "The code should be in the Go programming language. There should also be 3 robust test cases within the same code. There should also be a main function inside of which all the execution takes place. Please only provide the source code and no further explanation, The format should be ```go <yourcode + testcases> ```"
+
+var RustPrompt = "The code should be in the Rust programming language. There should also be 3 robust test cases within the same code. There should also be a main function inside of which all the execution takes place. Please only provide the source code and no further explanation, The format should be ```rust <yourcode + testcases> ```"
+
+// func Extract(output string) string {
+// 	parts := strings.Split(output, "```")
+// 	var extracted = ""
+// 	if strings.Contains(parts[1], "rust") {
+// 		extracted = strings.TrimLeft(parts[1], "rust")
+// 	} else {
+// 		extracted = strings.TrimLeft(parts[1], "go")
+// 	}
+// 	return extracted
+// }
+
+// Extract extracts the code snippet between ``` and removes the language identifier.
+func Extract(output string) (string, error) {
+	parts := strings.Split(output, "```")
+	if len(parts) < 2 {
+		return "", fmt.Errorf("the string wasn't in a proper format") // Handle the case if format is incorrect: Return empty string
+	}
+
+	// Trim the language identifier like `go` or `rust` from the code
+	code := parts[1]
+	lines := strings.SplitN(code, "\n", 2)
+	if len(lines) > 1 {
+		return "\n" + lines[1], nil // Return the code without the first line (language identifier)
+	}
+	return "", fmt.Errorf("the string doesn't contain any lines")
+}
diff --git a/llama/extraction/extract_test.go b/llama/extraction/extract_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..d2d7f3fc6c9799d4a249986d4274e20edb481682
--- /dev/null
+++ b/llama/extraction/extract_test.go
@@ -0,0 +1,82 @@
+package extraction
+
+import (
+	"testing"
+)
+
+// var GoInputs []string = []string{"```go\nfunc main() {}\n```", "```go\nfmt.Println('Hello World')\n```"}
+// var expectedGoOuputs []string = []string{"\nfunc main() {}\n", "\nfmt.Println('Hello World')\n"}
+
+// var RustInputs []string = []string{"```go\nfn main() {}\n```", "```go\nprintln!('Hello World')\n```"}
+// var expectedRustOuputs []string = []string{"\nfn main() {}\n", "\nprintln!('Hello World')\n"}
+
+// func TestExtraction(t *testing.T) {
+
+// 	t.Run("Golang Extraction 1", func(t *testing.T) {
+// 		var output = Extract(GoInputs[0])
+// 		if output != expectedGoOuputs[0] {
+// 			t.Error(output)
+// 		}
+// 	})
+
+// 	t.Run("Golang Extraction 2", func(t *testing.T) {
+// 		var output = Extract(GoInputs[1])
+// 		if output != expectedGoOuputs[1] {
+// 			t.Error(output)
+// 		}
+// 	})
+
+// 	t.Run("Rust Extraction 1", func(t *testing.T) {
+// 		var output = Extract(RustInputs[0])
+// 		if output != expectedRustOuputs[0] {
+// 			t.Error(output)
+// 		}
+// 	})
+
+// 	t.Run("Rust Extraction 2", func(t *testing.T) {
+// 		var output = Extract(RustInputs[1])
+// 		if output != expectedRustOuputs[1] {
+// 			t.Error(output)
+// 		}
+// 	})
+
+// }
+
+// Inputs and Expected Outputs for the Test Cases
+// This can be considered a table-driven test or equivalence partitioning
+var testCases = []struct {
+	name     string
+	input    string
+	expected string
+}{
+	// Go Test Cases
+	{"Go Extraction 1 - Main", "```go\nfunc main() {}\n```", "\nfunc main() {}\n"},
+	{"Go Extraction 2 - Print", "```go\nfmt.Println('Hello World')\n```", "\nfmt.Println('Hello World')\n"},
+	{"Go Extraction 3 - Loop", "```go\nfor i := 0; i < 10; i++ {\nfmt.Println(i)\n}\n```", "\nfor i := 0; i < 10; i++ {\nfmt.Println(i)\n}\n"},
+	{"Go Extraction 4 - If Else", "```go\nif x > 10 {\nfmt.Println('Greater than 10')\n} else {\nfmt.Println('Less than or equal to 10')\n}\n```", "\nif x > 10 {\nfmt.Println('Greater than 10')\n} else {\nfmt.Println('Less than or equal to 10')\n}\n"},
+	{"Go Extraction 5 - Function with Parameters", "```go\nfunc add(a int, b int) int {\nreturn a + b\n}\n```", "\nfunc add(a int, b int) int {\nreturn a + b\n}\n"},
+	{"Go Extraction 6 - Nested Loops", "```go\nfor i := 0; i < 3; i++ {\nfor j := 0; j < 3; j++ {\nfmt.Printf('(%d, %d)', i, j)\n}\n}\n```", "\nfor i := 0; i < 3; i++ {\nfor j := 0; j < 3; j++ {\nfmt.Printf('(%d, %d)', i, j)\n}\n}\n"},
+	{"Go Extraction 7 - Invalid", "```go```", ""},
+
+	// Rust Test Cases
+	{"Rust Extraction 1 - Main", "```rust\nfn main() {}\n```", "\nfn main() {}\n"},
+	{"Rust Extraction 2 - Print", "```rust\nprintln!('Hello World')\n```", "\nprintln!('Hello World')\n"},
+	{"Rust Extraction 3 - Loop", "```rust\nfor i in 0..10 {\nprintf!(\"{}\", i);\n}\n```", "\nfor i in 0..10 {\nprintf!(\"{}\", i);\n}\n"},
+	{"Rust Extraction 4 - If Else", "```rust\nif x > 10 {\nprintln!(\"Greater than 10\");\n} else {\nprintln!(\"Less than or equal to 10\");\n}\n```", "\nif x > 10 {\nprintln!(\"Greater than 10\");\n} else {\nprintln!(\"Less than or equal to 10\");\n}\n"},
+	{"Rust Extraction 5 - Function with Parameters", "```rust\nfn add(a: i32, b: i32) -> i32 {\nreturn a + b;\n}\n```", "\nfn add(a: i32, b: i32) -> i32 {\nreturn a + b;\n}\n"},
+	{"Rust Extraction 6 - Nested Loops", "```rust\nfor i in 0..3 {\nfor j in 0..3 {\nprintf!(\"({},{})\", i, j);\n}\n}\n```", "\nfor i in 0..3 {\nfor j in 0..3 {\nprintf!(\"({},{})\", i, j);\n}\n}\n"},
+}
+
+// Refined Test Function using Table-Driven Approach
+func TestExtraction(t *testing.T) {
+	for _, tc := range testCases {
+		t.Run(tc.name, func(t *testing.T) {
+			output, err := Extract(tc.input)
+			if output != tc.expected {
+				t.Errorf("Test %s failed: Expected %q, got %q", tc.name, tc.expected, output)
+				t.Log(err.Error())
+
+			}
+		})
+	}
+}
diff --git a/src/go.mod b/llama/go.mod
similarity index 100%
rename from src/go.mod
rename to llama/go.mod
diff --git a/llama/main.go b/llama/main.go
new file mode 100644
index 0000000000000000000000000000000000000000..7e2b774cc682571797c15c259fadb9a2f46f6500
--- /dev/null
+++ b/llama/main.go
@@ -0,0 +1,71 @@
+package main
+
+import (
+	"bufio"
+	"fmt"
+	"llama/compiler"
+	displayindicator "llama/display-indicator"
+	"llama/extraction"
+	ollamaimplementation "llama/ollama-implementation"
+	"os"
+	"strings"
+)
+
+func main() {
+	reader := bufio.NewReader(os.Stdin)
+	var conversationContext []int // Variable to store conversation context
+
+	fmt.Print("Enter your prompt (or type 'exit' to quit): ")
+	userPrompt, _ := reader.ReadString('\n')
+	userPrompt = strings.TrimSpace(userPrompt)
+
+	for {
+
+		if userPrompt == "exit" {
+			fmt.Println("Exiting the program.")
+			break
+		}
+
+		var modifiedPrompt = userPrompt + extraction.GoPrompt
+
+		fmt.Println("Prompt received. Generating response...")
+
+		// Start a go routine to display a waiting indicator while the response is being generated
+		done := make(chan bool)
+		go displayindicator.DisplayLoadingIndicator(done)
+
+		// Generate response using Ollama API, passing the context
+		response, updatedContext, err := ollamaimplementation.GetOllamaResponse(modifiedPrompt, conversationContext)
+
+		// Signal the waiting indicator to stop
+		done <- true
+
+		if err != nil {
+			fmt.Println("Error generating response:", err)
+			continue
+		}
+
+		// Update the conversation context with the response
+		conversationContext = updatedContext
+
+		generatedCode, err_extract := extraction.Extract(response) // Handle error with string
+
+		if err_extract != nil {
+			fmt.Printf("The LLM gave a improper string in response: %v", response)
+			userPrompt = "exit"
+			continue
+		}
+
+		fmt.Println("Ollama's response:", generatedCode)
+
+		output, err := compiler.CompileStringToGo(generatedCode)
+
+		if err != nil {
+			userPrompt = output + "\nFollowing are the errors, please fix the code. Write it again, and write only source code along with same test cases with no further explanation. The format should be ```rust <yourcode + testcases> ```"
+		} else {
+			fmt.Printf("Compiled successfully. Here is the output: %v", output)
+			userPrompt = "exit"
+		}
+
+	}
+}
diff --git a/llama/ollama-implementation/ollama.go b/llama/ollama-implementation/ollama.go
new file mode 100644
index 0000000000000000000000000000000000000000..820c374d2da7f1c9b2c4e137da7d6a4b9d21f5f8
--- /dev/null
+++ b/llama/ollama-implementation/ollama.go
@@ -0,0 +1,72 @@
+package ollamaimplementation
+
+import (
+	"bytes"
+	"encoding/json"
+	"net/http"
+)
+
+var OllamaEndpoint = "http://localhost:11434/api/generate" // The local endpoint for the Ollama API
+
+// Struct for request to Ollama API
+type OllamaRequest struct {
+	Prompt  string `json:"prompt"`
+	Model   string `json:"model"`
+	Context []int  `json:"context,omitempty"` // Context to maintain conversation
+}
+
+// Struct for response from Ollama API
+type OllamaResponse struct {
+	Model              string `json:"model"`
+	CreatedAt          string `json:"created_at"`
+	Response           string `json:"response"`
+	Done               bool   `json:"done"`
+	DoneReason         string `json:"done_reason,omitempty"`
+	Context            []int  `json:"context,omitempty"` // Updated context
+	TotalDuration      int64  `json:"total_duration,omitempty"`
+	LoadDuration       int64  `json:"load_duration,omitempty"`
+	PromptEvalCount    int    `json:"prompt_eval_count,omitempty"`
+	PromptEvalDuration int64  `json:"prompt_eval_duration,omitempty"`
+	EvalCount          int    `json:"eval_count,omitempty"`
+	EvalDuration       int64  `json:"eval_duration,omitempty"`
+}
+
+func GetOllamaResponse(prompt string, context []int) (string, []int, error) {
+	// Create request payload with the model specified and context
+	requestBody, err := json.Marshal(OllamaRequest{
+		Prompt:  prompt,
+		Model:   "llama3.1",
+		Context: context, // Pass the conversation context
+	})
+	if err != nil {
+		return "", nil, err
+	}
+
+	// Send HTTP POST request to Ollama API
+	resp, err := http.Post(OllamaEndpoint, "application/json", bytes.NewBuffer(requestBody))
+	if err != nil {
+		return "", nil, err
+	}
+	defer resp.Body.Close()
+
+	// Read and accumulate response body in chunks
+	var completeResponse string
+	var updatedContext []int
+	decoder := json.NewDecoder(resp.Body)
+	for decoder.More() {
+		var chunk OllamaResponse
+		if err := decoder.Decode(&chunk); err != nil {
+			return "", nil, err
+		}
+		completeResponse += chunk.Response
+
+		// Capture the updated context from the response
+		updatedContext = chunk.Context
+
+		if chunk.Done {
+			break
+		}
+	}
+
+	return completeResponse, updatedContext, nil
+}
diff --git a/llama/ollama-implementation/ollama_test.go b/llama/ollama-implementation/ollama_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..1ff5f018322fee8803b0081584a5e388667af357
--- /dev/null
+++ b/llama/ollama-implementation/ollama_test.go
@@ -0,0 +1,111 @@
+package ollamaimplementation
+
+import (
+	"encoding/json"
+	"io/ioutil"
+	"llama/extraction"
+	"net/http"
+	"net/http/httptest"
+	"strings"
+	"testing"
+)
+
+// Mock OllamaResponse for testing
+var mockResponse = OllamaResponse{
+	Model:    "llama3.1",
+	Response: "This is a mock response.",
+	Done:     true,
+	Context:  []int{1, 2, 3},
+}
+
+// Mock function to replace the actual HTTP request
+func TestGetOllamaResponse(t *testing.T) {
+	// Create a mock HTTP server
+	mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+		// Check if the request method is POST
+		if r.Method != http.MethodPost {
+			t.Errorf("Expected POST request, got %s", r.Method)
+		}
+
+		// Read and verify the request body
+		body, err := ioutil.ReadAll(r.Body)
+		if err != nil {
+			t.Fatalf("Failed to read request body: %v", err)
+		}
+
+		// Verify the request payload contains the expected prompt and model
+		if !strings.Contains(string(body), `"prompt":"Test prompt"`) || !strings.Contains(string(body), `"model":"llama3.1"`) {
+			t.Errorf("Request body is not as expected: %s", string(body))
+		}
+
+		// Send a mock response
+		responseData, _ := json.Marshal(mockResponse)
+		w.Header().Set("Content-Type", "application/json")
+		w.Write(responseData)
+	}))
+	defer mockServer.Close()
+
+	// Temporarily replace the ollamaEndpoint with the mock server's URL
+	originalEndpoint := OllamaEndpoint
+	OllamaEndpoint = mockServer.URL
+	defer func() { OllamaEndpoint = originalEndpoint }() // Restore the original endpoint after the test
+
+	// Call the function to be tested
+	prompt := "Test prompt"
+	context := []int{}
+	response, updatedContext, err := GetOllamaResponse(prompt, context)
+
+	if err != nil {
+		t.Fatalf("Expected no error, got %v", err)
+	}
+
+	// Verify the response content
+	expectedResponse := "This is a mock response."
+	if response != expectedResponse {
+		t.Errorf("Expected response %q, got %q", expectedResponse, response)
+	}
+
+	// Verify the updated context
+	expectedContext := []int{1, 2, 3}
+	for i, val := range updatedContext {
+		if val != expectedContext[i] {
+			t.Errorf("Expected context %v, got %v", expectedContext, updatedContext)
+		}
+	}
+}
+
+// Test for prompts.
+var promptTestCases = []struct {
+	name          string
+	prompt        string
+	suffixStr     string
+	shouldContain []string
+}{
+	{"5 Even Integers GO", "Write a program that generates 5 random integers.", extraction.GoPrompt, []string{"```go", "```"}},
+	{"Sort the array using mergesort GO", "Write a program that sorts the array [23, 2, 0, -1, 89, 500] using mergesort.", extraction.GoPrompt, []string{"```go", "```"}},
+	{"Reverse the string GO.", "Reverse the string 'ammar'", extraction.GoPrompt, []string{"```go", "```"}},
+
+	{"5 Even Integers rust", "Write a program that generates 5 random integers.", extraction.RustPrompt, []string{"```rust", "```"}},
+	{"Sort the array using mergesort rust", "Write a program that sorts the array [23, 2, 0, -1, 89, 500] using mergesort.", extraction.RustPrompt, []string{"```rust", "```"}},
+	{"Reverse the string rust.", "Reverse the string 'ammar'", extraction.RustPrompt, []string{"```rust", "```"}},
+}
+
+// Test function to verify the prefix and suffix of responses from GetOllamaResponse
+func TestGetOllamaResponsePrompts(t *testing.T) {
+	for _, tc := range promptTestCases {
+		t.Run(tc.name, func(t *testing.T) {
+			// Call the function to get the response
+			response, _, _ := GetOllamaResponse(tc.prompt+tc.suffixStr, []int{})
+
+			// Check if the response starts with the expected prefix
+			if strings.HasPrefix(response, tc.shouldContain[0]) {
+				// Check if the response ends with "```"
+				if !strings.HasSuffix(response, "```") {
+					t.Errorf("Test %s failed: expected response to end with ```; got %q", tc.name, response)
+				}
+			} else {
+				t.Errorf("Test %s failed: expected response to start with %q; got %q", tc.name, tc.shouldContain[0], response)
+			}
+		})
+	}
+}
diff --git a/src/preliminary-implementation/main.go b/src/preliminary-implementation/main.go
deleted file mode 100644
index e0227fb971df4914bed3d55b9aae8a7ae094bedc..0000000000000000000000000000000000000000
--- a/src/preliminary-implementation/main.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package main
-
-import (
-	"fmt"
-)
-
-const ollamaEndpoint = "http://localhost:11434/api/generate" // The local endpoint for the Ollama API
-
-// Struct for request to Ollama API
-type OllamaRequest struct {
-	Prompt string `json:"prompt"`
-	Model  string `json:"model"`
-}
-
-func main() {
-	fmt.Println(ollamaEndpoint)
-}
diff --git a/src_old/compiler/compiler.go b/src_old/compiler/compiler.go
new file mode 100644
index 0000000000000000000000000000000000000000..a1d3b5dc3de2f37b60e6d4401d834ec1c434dfbf
--- /dev/null
+++ b/src_old/compiler/compiler.go
@@ -0,0 +1,255 @@
+package compiler
+
+import (
+	"log"
+	"os"
+	"os/exec"
+	"runtime"
+	"strings"
+)
+
+const TempOutputDir = "tempOutput/"
+const TempModuleName = "tempOutput"
+
+type Language string
+
+// Supported languages
+const (
+	Go   Language = "go"
+	Rust Language = "rust"
+)
+
+type OS string
+
+// Supported OS
+const (
+	Windows OS = "windows"
+	Linux   OS = "linux"
+	MacOS   OS = "darwin" // Darwin is the kernel of macOS
+)
+
+// TODO: I want to make an interface for a compilable language, so that we can add more languages in the future
+// TODO: The cmd might also be an interface or a struct, so that it can build itself based on the OS and language
+// TODO: A cleanup and panic might be needed in setup because if it panics the temp folders should be removed
+// TODO: I am not sure that the setup should panic, maybe it should return an error instead so its easier to clean up
+
+type Compiler struct {
+	OS            OS
+	Language      Language
+	languageEnv   ILanguageEnvironment
+	SourceCode    string
+	Filename      string
+	cmdPrefix     string // For example "cmd /c" on Windows
+	Dependencies  []string
+	tempOutputDir string
+}
+
+type ICompiler interface {
+	Compile() (string, error)
+}
+
+type GoEnvironment struct {
+}
+
+// Setup initializes the go environment by creating a go module and running go mod tidy. Panics if it fails.
+func (ge *GoEnvironment) SetupEnvironment(cmdPrefix string) {
+	// One string
+	cmdString := cmdPrefix + " go mod init " + TempModuleName + " && go mod tidy"
+	// Split the string into a slice
+	cmdSlice := strings.Fields(cmdString) // Fields splits the strings around each instance of one or more consecutive white space characters
+
+	// Make the command
+	cmd := exec.Command(cmdSlice[0], cmdSlice[1:]...)
+	// Set its target directory
+	cmd.Dir = TempOutputDir
+	// Execute the command
+	err := cmd.Run()
+	if err != nil {
+		removeTempFolders(TempOutputDir)
+		log.Fatalf("Error initializing go module: %v", err)
+	}
+}
+
+func (ge *GoEnvironment) CheckCompileErrors(filename string, language Language, cmdPrefix string) (string, error) {
+
+	srcCodeFilename := appendSuffix(filename, language)
+	compiledFilename := filename
+
+	cmdString := cmdPrefix + " go build -o " + compiledFilename + " " + srcCodeFilename
+	cmdSlice := strings.Fields(cmdString) // Fields splits the string on white space of variable length
+
+	cmd := exec.Command(cmdSlice[0], cmdSlice[1:]...)
+	cmd.Dir = TempOutputDir
+	output, err := cmd.CombinedOutput()
+	return string(output), err
+}
+
+func InitCompiler(OS OS, language Language, sourceCode string, filename string, dependencies []string) ICompiler {
+	compiler := &Compiler{}
+	compiler.OS = OS
+	compiler.Language = language
+	compiler.SourceCode = sourceCode
+	compiler.Filename = filename
+	compiler.Dependencies = dependencies
+	compiler.cmdPrefix = getOsPrefix(OS)
+	compiler.languageEnv = getLanguageEnv(language)
+	return compiler
+
+}
+
+func getOsPrefix(OS OS) string {
+	// Set the cmd prefix based on the OS
+	switch OS {
+	case Windows:
+		return "cmd /c "
+	case Linux, MacOS:
+		return ""
+	default:
+		panic("Unsupported OS")
+	}
+}
+
+func getLanguageEnv(language Language) ILanguageEnvironment {
+	switch language {
+	case Go:
+		return &GoEnvironment{}
+	case Rust:
+		return &RustEnvironment{}
+	default:
+		panic("Unsupported language")
+	}
+}
+
+type ILanguageEnvironment interface {
+	SetupEnvironment(cmdPrefix string)
+	CheckCompileErrors(filename string, language Language, cmdPrefix string) (string, error)
+}
+
+type RustEnvironment struct {
+	Compiler
+}
+
+// SetupEnvironment initializes the rust environment by creating a cargo project and adding dependencies. Panics if it fails.
+func (re *RustEnvironment) SetupEnvironment(cmdPrefix string) {
+	// Initialize the rust cargo project--------------------------------------------------------------------------------
+	// Command to initialize a cargo project
+	cmdString := cmdPrefix + " cargo init --bin " + TempModuleName
+	// Split the string into a slice
+	cmdSlice := strings.Fields(cmdString)
+	// Make the command
+	cmd := exec.Command(cmdSlice[0], cmdSlice[1:]...)
+	// Set its target directory
+	cmd.Dir = TempOutputDir
+	// Execute the command
+	err := cmd.Run()
+	if err != nil {
+		log.Fatalf("Error initializing rust project: %v", err)
+	}
+
+	// Write the source code to a file----------------------------------------------------------------------------------
+
+	// Create a new file with the source code in the src folder
+	// 0644 is the file permission where the user can read and write the file, and the group and others can only read the file.
+	err = os.WriteFile(TempOutputDir+TempModuleName+"/src/"+re.Filename, []byte(re.SourceCode), 0644)
+
+	// Update rust dependencies in cargo.toml file using cargo add (cargo-edit)-----------------------------------------
+	addCommand := re.cmdPrefix + " cargo add"
+	addSlice := strings.Split(addCommand, " ")
+	addSlice = append(addSlice, re.Dependencies...)
+	err = exec.Command(addSlice[0], addSlice[1:]...).Run()
+	if err != nil {
+		log.Fatalf("Error adding dependencies: %v", err)
+	}
+}
+
+func (re *RustEnvironment) CheckCompileErrors(filename string, language Language, cmdPrefix string) (string, error) {
+
+	srcCodeFilename := TempOutputDir + appendSuffix(filename, language)
+	output, err := exec.Command(cmdPrefix + " cargo check " + srcCodeFilename).CombinedOutput()
+	return string(output), err
+}
+
+/*
+The compiler pipeline
+1. Set up OS and Language
+2. Set up the temp folders
+3. Write the source code to a file
+4. SetupEnvironment the code
+5. Return the output and error
+*/
+
+func (c *Compiler) Compile() (string, error) {
+	// Set up temp folders
+	setupTempFolders(TempOutputDir)
+	defer removeTempFolders(TempOutputDir)
+
+	srcCodeFilename := TempOutputDir + appendSuffix(c.Filename, c.Language)
+	//compiledFilename := TempOutputDir + c.Filename
+
+	// SetupEnvironment either Go or Rust environment, should be an interface method
+	c.languageEnv.SetupEnvironment(c.cmdPrefix)
+
+	// Write the source code to a file
+	err := os.WriteFile(srcCodeFilename, []byte(c.SourceCode), 0644)
+	if err != nil {
+		log.Fatalf("Error writing source code to file: %v", err)
+	}
+
+	// CheckCompileErrors the code
+	return c.languageEnv.CheckCompileErrors(c.Filename, c.Language, c.cmdPrefix)
+}
+
+// appendSuffix appends the suffix to the filename if it is not already there depending on the language, panics if the language is not supported
+func appendSuffix(filename string, language Language) string {
+
+	suffix := ""
+	switch language {
+	case Go:
+		suffix = ".go"
+	case Rust:
+		suffix = ".rs"
+	default:
+		panic("Unsupported language")
+	}
+
+	// We check if the filename already has the suffix, if not we append it
+	if !strings.HasSuffix(filename, suffix) {
+		filename += suffix
+	}
+	return filename
+}
+
+// setupTempFolders creates the temp output directory for compiled files, panics if it fails
+func setupTempFolders(tempOutputDir string) {
+	// 0777 are the permissions for the directory, everyone can read, write and execute
+	err := os.MkdirAll(tempOutputDir, os.ModePerm)
+	if err != nil {
+		panic("Error creating temp output directory:\n\n" + err.Error())
+	}
+}
+
+// removeTempFolders removes the temp output directory for compiled files, panics if it fails
+func removeTempFolders(tempOutputDir string) {
+	err := os.RemoveAll(tempOutputDir)
+	if err != nil {
+		panic("Error removing temp output directory:\n\n" + err.Error())
+	}
+}
+
+func CompileStringToGo(code string, filename string, dependencies []string) (string, error) {
+
+	// Get the OS
+	os := runtime.GOOS
+
+	// SetupEnvironment
+	return InitCompiler(OS(os), Go, code, filename, dependencies).Compile()
+}
+
+func CompileStringToRust(code string, filename string, dependencies []string) (string, error) {
+
+	// Get the OS
+	os := runtime.GOOS
+
+	// SetupEnvironment
+	return InitCompiler(OS(os), Rust, code, filename, dependencies).Compile()
+}
diff --git a/src_old/context-holder/context.go b/src_old/context-holder/context.go
new file mode 100644
index 0000000000000000000000000000000000000000..ae422e6edd34cac6aa68c90ec45692687ffbca00
--- /dev/null
+++ b/src_old/context-holder/context.go
@@ -0,0 +1,106 @@
+package main
+
+import (
+	"bufio"
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"net/http"
+	"os"
+	"strings"
+)
+
+const ollamaEndpoint = "http://localhost:11434/api/generate" // The local endpoint for the Ollama API
+
+// Struct for request to Ollama API
+type OllamaRequest struct {
+	Prompt  string `json:"prompt"`
+	Model   string `json:"model"`
+	Context []int  `json:"context,omitempty"` // Context to maintain conversation
+}
+
+// Struct for response from Ollama API
+type OllamaResponse struct {
+	Model              string `json:"model"`
+	CreatedAt          string `json:"created_at"`
+	Response           string `json:"response"`
+	Done               bool   `json:"done"`
+	DoneReason         string `json:"done_reason,omitempty"`
+	Context            []int  `json:"context,omitempty"` // Updated context
+	TotalDuration      int64  `json:"total_duration,omitempty"`
+	LoadDuration       int64  `json:"load_duration,omitempty"`
+	PromptEvalCount    int    `json:"prompt_eval_count,omitempty"`
+	PromptEvalDuration int64  `json:"prompt_eval_duration,omitempty"`
+	EvalCount          int    `json:"eval_count,omitempty"`
+	EvalDuration       int64  `json:"eval_duration,omitempty"`
+}
+
+func main() {
+	reader := bufio.NewReader(os.Stdin)
+	var conversationContext []int // Variable to store conversation context
+
+	for {
+
+		fmt.Print("Enter your prompt (or type 'exit' to quit): ")
+		userPrompt, _ := reader.ReadString('\n')
+		userPrompt = strings.TrimSpace(userPrompt)
+
+		if userPrompt == "exit" {
+			fmt.Println("Exiting the program.")
+			break
+		}
+
+		// Generate response using Ollama API, passing the context
+		response, updatedContext, err := getOllamaResponse(userPrompt, conversationContext)
+		if err != nil {
+			fmt.Println("Error generating response:", err)
+			continue
+		}
+
+		// Update the conversation context with the response
+		conversationContext = updatedContext
+
+		fmt.Println("Ollama's response:", response)
+	}
+}
+
+// Function to make a POST request to Ollama API
+func getOllamaResponse(prompt string, context []int) (string, []int, error) {
+	// Create request payload with the model specified and context
+	requestBody, err := json.Marshal(OllamaRequest{
+		Prompt:  prompt,
+		Model:   "llama3.1",
+		Context: context, // Pass the conversation context
+	})
+	if err != nil {
+		return "", nil, err
+	}
+
+	// Send HTTP POST request to Ollama API
+	resp, err := http.Post(ollamaEndpoint, "application/json", bytes.NewBuffer(requestBody))
+	if err != nil {
+		return "", nil, err
+	}
+	defer resp.Body.Close()
+
+	// Read and accumulate response body in chunks
+	var completeResponse string
+	var updatedContext []int
+	decoder := json.NewDecoder(resp.Body)
+	for decoder.More() {
+		var chunk OllamaResponse
+		if err := decoder.Decode(&chunk); err != nil {
+			return "", nil, err
+		}
+		completeResponse += chunk.Response
+
+		// Capture the updated context from the response
+		updatedContext = chunk.Context
+
+		if chunk.Done {
+			break
+		}
+	}
+
+	return completeResponse, updatedContext, nil
+}
diff --git a/src_old/context-with-indicators/context-with-indicators.go b/src_old/context-with-indicators/context-with-indicators.go
new file mode 100644
index 0000000000000000000000000000000000000000..4d7ea215a3a32f5aedcde73e27cde36f0ea9ad68
--- /dev/null
+++ b/src_old/context-with-indicators/context-with-indicators.go
@@ -0,0 +1,134 @@
+package main
+
+import (
+	"bufio"
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"net/http"
+	"os"
+	"strings"
+	"time"
+)
+
+const ollamaEndpoint = "http://localhost:11434/api/generate" // The local endpoint for the Ollama API
+
+// Struct for request to Ollama API
+type OllamaRequest struct {
+	Prompt  string `json:"prompt"`
+	Model   string `json:"model"`
+	Context []int  `json:"context,omitempty"` // Context to maintain conversation
+}
+
+// Struct for response from Ollama API
+type OllamaResponse struct {
+	Model              string `json:"model"`
+	CreatedAt          string `json:"created_at"`
+	Response           string `json:"response"`
+	Done               bool   `json:"done"`
+	DoneReason         string `json:"done_reason,omitempty"`
+	Context            []int  `json:"context,omitempty"` // Updated context
+	TotalDuration      int64  `json:"total_duration,omitempty"`
+	LoadDuration       int64  `json:"load_duration,omitempty"`
+	PromptEvalCount    int    `json:"prompt_eval_count,omitempty"`
+	PromptEvalDuration int64  `json:"prompt_eval_duration,omitempty"`
+	EvalCount          int    `json:"eval_count,omitempty"`
+	EvalDuration       int64  `json:"eval_duration,omitempty"`
+}
+
+func main() {
+	reader := bufio.NewReader(os.Stdin)
+	var conversationContext []int // Variable to store conversation context
+
+	for {
+
+		fmt.Print("Enter your prompt (or type 'exit' to quit): ")
+		userPrompt, _ := reader.ReadString('\n')
+		userPrompt = strings.TrimSpace(userPrompt)
+
+		if userPrompt == "exit" {
+			fmt.Println("Exiting the program.")
+			break
+		}
+
+		fmt.Println("Prompt received. Generating response...")
+
+		// Start a go routine to display a waiting indicator while the response is being generated
+		done := make(chan bool)
+		go displayLoadingIndicator(done)
+
+		// Generate response using Ollama API, passing the context
+		response, updatedContext, err := getOllamaResponse(userPrompt, conversationContext)
+
+		// Signal the waiting indicator to stop
+		done <- true
+
+		if err != nil {
+			fmt.Println("Error generating response:", err)
+			continue
+		}
+
+		// Update the conversation context with the response
+		conversationContext = updatedContext
+
+		fmt.Println("Ollama's response:", response)
+	}
+}
+
+// Function to display a waiting/loading indicator
+func displayLoadingIndicator(done chan bool) {
+	indicator := []string{"|", "/", "-", "\\"}
+	i := 0
+	for {
+		select {
+		case <-done:
+			fmt.Print("\r") // Clear the waiting indicator when done
+			return
+		default:
+			fmt.Printf("\r%s Generating...", indicator[i%len(indicator)])
+			i++
+			time.Sleep(200 * time.Millisecond)
+		}
+	}
+}
+
+// Function to make a POST request to Ollama API
+func getOllamaResponse(prompt string, context []int) (string, []int, error) {
+	// Create request payload with the model specified and context
+	requestBody, err := json.Marshal(OllamaRequest{
+		Prompt:  prompt,
+		Model:   "llama3.1",
+		Context: context, // Pass the conversation context
+	})
+	if err != nil {
+		return "", nil, err
+	}
+
+	// Send HTTP POST request to Ollama API
+	resp, err := http.Post(ollamaEndpoint, "application/json", bytes.NewBuffer(requestBody))
+	if err != nil {
+		return "", nil, err
+	}
+	defer resp.Body.Close()
+
+	// Read and accumulate response body in chunks
+	var completeResponse string
+	var updatedContext []int
+	decoder := json.NewDecoder(resp.Body)
+	for decoder.More() {
+		var chunk OllamaResponse
+		if err := decoder.Decode(&chunk); err != nil {
+			return "", nil, err
+		}
+		completeResponse += chunk.Response
+
+		// Capture the updated context from the response
+		updatedContext = chunk.Context
+
+		if chunk.Done {
+			break
+		}
+	}
+
+	return completeResponse, updatedContext, nil
+}
diff --git a/src_old/extraction/extract.go b/src_old/extraction/extract.go
new file mode 100644
index 0000000000000000000000000000000000000000..d15d87a773a61ede023fdbceb7b30e3495b96ddf
--- /dev/null
+++ b/src_old/extraction/extract.go
@@ -0,0 +1,91 @@
+package main
+
+import (
+	"bufio"
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"net/http"
+	"os"
+	"strings"
+)
+
+const ollamaEndpoint = "http://localhost:11434/api/generate" // The local endpoint for the Ollama API
+
+// Struct for request to Ollama API
+type OllamaRequest struct {
+	Prompt string `json:"prompt"`
+	Model  string `json:"model"`
+}
+
+// Struct for response from Ollama API
+type OllamaResponse struct {
+	Response string `json:"response"`
+	Done     bool   `json:"done"`
+}
+
+func main() {
+	reader := bufio.NewReader(os.Stdin)
+	fmt.Print("Enter your prompt: ")
+	userPrompt, _ := reader.ReadString('\n')
+
+	//Modifying the user prompt to make the code in rust or gloang, will always be rust so far
+	var userPromptModified string = userPrompt + "The code should be in the Go programming language. There should also be 3 robust test cases within the same code. There should also be a main function inside of which all the execution takes place. Please only provide the source code and no further explanation, The format should be ```go <yourcode + testcases> ```"
+
+	// if true {
+	// 	userPromptModified = userPrompt + "The code should be in the Go programming language. There should also be 3 robust test cases within the same code. There should also be a main function inside of which all the execution takes place. Please only provide the source code and no further explanation, The format should be ```go <yourcode + testcases> ```"
+	// } else {
+	// 	userPromptModified = userPrompt + "The code should be in the Rust programming language. There should also be 3 robust test cases within the same code. There should also be a main function inside of which all the execution takes place. Please only provide the source code and no further explanation, The format should be ```rust <yourcode + testcases> ```"
+	// }
+
+	fmt.Println(userPromptModified)
+
+	// Generate response using Ollama API
+	requestBody, err := json.Marshal(OllamaRequest{
+		Prompt: userPromptModified,
+		Model:  "llama3.1",
+	})
+	if err != nil {
+		fmt.Println("Error generating response:", err)
+		return
+	}
+
+	// Send HTTP POST request to Ollama API
+	resp, err := http.Post(ollamaEndpoint, "application/json", bytes.NewBuffer(requestBody))
+	if err != nil {
+		fmt.Println("Error generating response:", err)
+		return
+	}
+	defer resp.Body.Close()
+
+	// Read and accumulate response body in chunks
+	var completeResponse string
+	decoder := json.NewDecoder(resp.Body)
+	for decoder.More() {
+		var chunk OllamaResponse
+		if err := decoder.Decode(&chunk); err != nil {
+			return
+		}
+		completeResponse += chunk.Response
+
+		if chunk.Done {
+			break
+		}
+	}
+
+	//fmt.Println("Ollama's response:", completeResponse)
+	var out = extract(completeResponse)
+	fmt.Println(out)
+}
+
+// Divides a string where you have ``` and then returns 1 index
+func extract(output string) string {
+	parts := strings.Split(output, "```")
+	var extracted = ""
+	if strings.Contains(parts[1], "rust") {
+		extracted = strings.TrimLeft(parts[1], "rust")
+	} else {
+		extracted = strings.TrimLeft(parts[1], "go")
+	}
+	return extracted
+}
diff --git a/src_old/go.mod b/src_old/go.mod
new file mode 100644
index 0000000000000000000000000000000000000000..11ec06cebd377f31fe77133de40ea82b9594caa0
--- /dev/null
+++ b/src_old/go.mod
@@ -0,0 +1,3 @@
+module src
+
+go 1.23.1
diff --git a/src_old/loop-implementation/loop.go b/src_old/loop-implementation/loop.go
new file mode 100644
index 0000000000000000000000000000000000000000..179c63ec956b2129d47afe13660465cf0d5d806f
--- /dev/null
+++ b/src_old/loop-implementation/loop.go
@@ -0,0 +1,74 @@
+package main
+
+import (
+	"bufio"
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"net/http"
+	"os"
+)
+
+const ollamaEndpoint = "http://localhost:11434/api/generate" // The local endpoint for the Ollama API
+
+// Struct for request to Ollama API
+type OllamaRequest struct {
+	Prompt string `json:"prompt"`
+	Model  string `json:"model"`
+}
+
+// Struct for response from Ollama API
+type OllamaResponse struct {
+	Response string `json:"response"`
+	Done     bool   `json:"done"`
+}
+
+func main() {
+
+	for {
+		reader := bufio.NewReader(os.Stdin)
+		fmt.Print("Enter your prompt: ")
+		userPrompt, _ := reader.ReadString('\n')
+
+		// Generate response using Ollama API
+
+		if userPrompt == "exit" {
+			fmt.Println("Exiting the program.")
+			break
+		}
+
+		requestBody, err := json.Marshal(OllamaRequest{
+			Prompt: userPrompt,
+			Model:  "llama3.1",
+		})
+		if err != nil {
+			fmt.Println("Error generating response:", err)
+			return
+		}
+
+		// Send HTTP POST request to Ollama API
+		resp, err := http.Post(ollamaEndpoint, "application/json", bytes.NewBuffer(requestBody))
+		if err != nil {
+			fmt.Println("Error generating response:", err)
+			return
+		}
+		defer resp.Body.Close()
+
+		// Read and accumulate response body in chunks
+		var completeResponse string
+		decoder := json.NewDecoder(resp.Body)
+		for decoder.More() {
+			var chunk OllamaResponse
+			if err := decoder.Decode(&chunk); err != nil {
+				return
+			}
+			completeResponse += chunk.Response
+
+			if chunk.Done {
+				break
+			}
+		}
+
+		fmt.Println("Ollama's response:", completeResponse)
+	}
+}
diff --git a/src_old/preliminary-implementation/preliminary.go b/src_old/preliminary-implementation/preliminary.go
new file mode 100644
index 0000000000000000000000000000000000000000..a38ede5676bf34ae9ea9877eb8fceb8fdd2e6878
--- /dev/null
+++ b/src_old/preliminary-implementation/preliminary.go
@@ -0,0 +1,66 @@
+package main
+
+import (
+	"bufio"
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"net/http"
+	"os"
+)
+
+const ollamaEndpoint = "http://localhost:11434/api/generate" // The local endpoint for the Ollama API
+
+// Struct for request to Ollama API
+type OllamaRequest struct {
+	Prompt string `json:"prompt"`
+	Model  string `json:"model"`
+}
+
+// Struct for response from Ollama API
+type OllamaResponse struct {
+	Response string `json:"response"`
+	Done     bool   `json:"done"`
+}
+
+func main() {
+	reader := bufio.NewReader(os.Stdin)
+	fmt.Print("Enter your prompt: ")
+	userPrompt, _ := reader.ReadString('\n')
+
+	// Generate response using Ollama API
+
+	requestBody, err := json.Marshal(OllamaRequest{
+		Prompt: userPrompt,
+		Model:  "llama3.1",
+	})
+	if err != nil {
+		fmt.Println("Error generating response:", err)
+		return
+	}
+
+	// Send HTTP POST request to Ollama APIƍ
+	resp, err := http.Post(ollamaEndpoint, "application/json", bytes.NewBuffer(requestBody))
+	if err != nil {
+		fmt.Println("Error generating response:", err)
+		return
+	}
+	defer resp.Body.Close()
+
+	// Read and accumulate response body in chunks
+	var completeResponse string
+	decoder := json.NewDecoder(resp.Body)
+	for decoder.More() {
+		var chunk OllamaResponse
+		if err := decoder.Decode(&chunk); err != nil {
+			return
+		}
+		completeResponse += chunk.Response
+
+		if chunk.Done {
+			break
+		}
+	}
+
+	fmt.Println("Ollama's response:", completeResponse)
+}