diff --git a/src/main/java/com/application/Credentials.java b/src/main/java/com/application/Credentials.java
deleted file mode 100644
index e0898e751c8a0f41e4fb8688bd7bf94a4246e12b..0000000000000000000000000000000000000000
--- a/src/main/java/com/application/Credentials.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package com.application;
-
-public class Credentials {
-    String projectId = "sf-drying-optimization";
-}
diff --git a/src/main/java/com/application/DB/DB.java b/src/main/java/com/application/DB/DB.java
new file mode 100644
index 0000000000000000000000000000000000000000..4eed14a8108065de116f9d494258e4eaebb7c9ec
--- /dev/null
+++ b/src/main/java/com/application/DB/DB.java
@@ -0,0 +1,78 @@
+package com.application.DB;
+
+import com.google.auth.oauth2.GoogleCredentials;
+import com.google.auth.oauth2.ServiceAccountCredentials;
+import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.BigQueryOptions;
+import com.google.cloud.bigquery.FieldValueList;
+import com.google.cloud.bigquery.Job;
+import com.google.cloud.bigquery.JobInfo;
+import com.google.cloud.bigquery.QueryJobConfiguration;
+import com.google.cloud.bigquery.TableResult;
+
+import java.io.File;
+import java.io.FileInputStream;
+
+public class DB {
+
+    private static GoogleCredentials getCredentials() throws Exception {
+        File credentialsPath = new File(".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json");
+
+        // Load credentials from JSON key file. If you can't set the GOOGLE_APPLICATION_CREDENTIALS
+        // environment variable, you can explicitly load the credentials file to construct the
+        // credentials.
+        GoogleCredentials credentials;
+        try (FileInputStream serviceAccountStream = new FileInputStream(credentialsPath)) {
+            credentials = ServiceAccountCredentials.fromStream(serviceAccountStream);
+        }
+        return credentials;
+    }
+
+    public static void getFromExistingTable() throws Exception {
+
+        // Step 1: Initialize BigQuery service
+        // Here we set our project ID and get the `BigQuery` service object
+        // this is the interface to our BigQuery instance that
+        // we use to execute jobs on
+        BigQuery bigquery = BigQueryOptions.newBuilder().
+                setCredentials(getCredentials()).
+                setProjectId("sf-drying-optimization")
+                .build().getService();
+
+        // Step 2: Prepare query job
+        // A "QueryJob" is a type of job that executes SQL queries
+        // we create a new job configuration from our SQL query and
+        final String GET_WORD_COUNT = "SELECT VariantValue, TimeStamp FROM sf-drying-optimization.124.int_sd_winccsensordata WHERE TimeStamp BETWEEN \"2020-06-09\" AND \"2020-06-29\" ORDER BY TimeStamp";
+
+                        QueryJobConfiguration queryConfig =
+                QueryJobConfiguration.newBuilder(GET_WORD_COUNT).build();
+
+        // Step 3: Run the job on BigQuery
+        // create a `Job` instance from the job configuration using the BigQuery service
+        // the job starts executing once the `create` method executes
+        Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).build());
+        queryJob = queryJob.waitFor();
+        // the waitFor method blocks until the job completes
+        // and returns `null` if the job doesn't exist anymore
+        if (queryJob == null) {
+            throw new Exception("job no longer exists");
+        }
+        // once the job is done, check if any error occured
+        if (queryJob.getStatus().getError() != null) {
+            throw new Exception(queryJob.getStatus().getError().toString());
+        }
+
+        // Step 4: Display results
+        // Print out a header line, and iterate through the
+        // query results to print each result in a new line
+        System.out.println("Timestamp\tVarient value");
+        TableResult result = queryJob.getQueryResults();
+        for (FieldValueList row : result.iterateAll()) {
+            // We can use the `get` method along with the column
+            // name to get the corresponding row entry
+            int variantValue = row.get("VariantValue").getNumericValue().intValue();
+            String timeStamp = row.get("TimeStamp").getStringValue();
+            System.out.printf("%s\t%d\n", timeStamp, variantValue);
+        }
+    }
+}
\ No newline at end of file
diff --git a/src/main/java/com/application/DataBase/DB.java b/src/main/java/com/application/DataBase/DB.java
deleted file mode 100644
index 3bbdaf59ab67b66b71095057fcc1f0d2648b9a67..0000000000000000000000000000000000000000
--- a/src/main/java/com/application/DataBase/DB.java
+++ /dev/null
@@ -1,179 +0,0 @@
-package com.application.DataBase;
-
-import com.application.Credentials;
-
-import com.google.auth.oauth2.GoogleCredentials;
-import com.google.auth.oauth2.ServiceAccountCredentials;
-import com.google.cloud.bigquery.BigQuery;
-import com.google.cloud.bigquery.BigQueryError;
-import com.google.cloud.bigquery.BigQueryOptions;
-import com.google.cloud.bigquery.FieldValueList;
-import com.google.cloud.bigquery.InsertAllRequest;
-import com.google.cloud.bigquery.InsertAllResponse;
-import com.google.cloud.bigquery.Job;
-import com.google.cloud.bigquery.JobInfo;
-import com.google.cloud.bigquery.QueryJobConfiguration;
-import com.google.cloud.bigquery.JobStatistics;
-import com.google.cloud.bigquery.TableResult;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.List;
-
-
-public class DB {
-
-    Credentials credentialsObject;
-
-    private static void insertSampleData() {
-        // Step 1: Initialize BigQuery service
-        BigQuery bigquery = BigQueryOptions.newBuilder().setProjectId("sf-drying-optimization")
-                .build().getService();
-
-        // Step 2: Create insertAll (streaming) request
-        InsertAllRequest insertAllRequest = getInsertRequest();
-
-        // Step 3: Insert data into table
-        InsertAllResponse response = bigquery.insertAll(insertAllRequest);
-
-        // Step 4: Check for errors and print results
-        if (response.hasErrors()) {
-            for (Map.Entry<Long, List<BigQueryError>> entry : response.getInsertErrors()
-                    .entrySet()) {
-                System.out.printf("error in entry %d: %s", entry.getKey(),
-                        entry.getValue().toString());
-            }
-            return;
-        }
-        System.out.println("inserted successfully");
-    }
-
-    // To create a streaming insert request, we need to specify the table and dataset id
-    // and create the rows we want to insert
-    private static InsertAllRequest getInsertRequest() {
-        String datasetId = "sample_dataset";
-        String tableId = "vegetables";
-        return InsertAllRequest.newBuilder(datasetId, tableId).addRow(getRow(1, "carrot"))
-                .addRow(getRow(2, "beans")).build();
-
-    }
-
-    // each row is a map with the row name as the key and row value as the value
-    // since the value type is "Object" it can take any arbitrary type, based on
-    // the datatype of the row defined on BigQuery
-    private static Map<String, Object> getRow(int id, String vegetableName) {
-        Map<String, Object> rowMap = new HashMap<String, Object>();
-        rowMap.put("id", id);
-        rowMap.put("name", vegetableName);
-        return rowMap;
-    }
-
-    public static void main(String... args) throws Exception {
-        getFromExistingTable();
-    }
-
-    public static GoogleCredentials getCredentials() throws Exception {
-        File credentialsPath = new File(".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json");
-
-        // Load credentials from JSON key file. If you can't set the GOOGLE_APPLICATION_CREDENTIALS
-        // environment variable, you can explicitly load the credentials file to construct the
-        // credentials.
-        GoogleCredentials credentials;
-        try (FileInputStream serviceAccountStream = new FileInputStream(credentialsPath)) {
-            credentials = ServiceAccountCredentials.fromStream(serviceAccountStream);
-        }
-        return credentials;
-    }
-
-    private static BigQuery getBuilder() throws Exception {
-        BigQuery bigquery = BigQueryOptions.newBuilder().
-                setCredentials(getCredentials()).
-                setProjectId("sf-drying-optimization")
-                .build().getService();
-        return bigquery;
-    }
-
-    private static void getFromExistingTable() throws Exception {
-
-        // Step 1: Initialize BigQuery service
-        // Here we set our project ID and get the `BigQuery` service object
-        // this is the interface to our BigQuery instance that
-        // we use to execute jobs on
-        BigQuery bigquery = getBuilder();
-
-        // Step 2: Prepare query job
-        // A "QueryJob" is a type of job that executes SQL queries
-        // we create a new job configuration from our SQL query and
-        final String GET_WORD_COUNT = "SELECT VariantValue, TimeStamp FROM sf-drying-optimization.124.int_sd_winccsensordata " +
-                "WHERE TimeStamp BETWEEN \"2020-06-09\" AND \"2020-06-29\" ORDER BY TimeStamp";
-
-
-
-
-                        QueryJobConfiguration queryConfig =
-                QueryJobConfiguration.newBuilder(GET_WORD_COUNT).build();
-
-        // Step 3: Run the job on BigQuery
-        // create a `Job` instance from the job configuration using the BigQuery service
-        // the job starts executing once the `create` method executes
-        Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).build());
-        queryJob = queryJob.waitFor();
-        // the waitFor method blocks until the job completes
-        // and returns `null` if the job doesn't exist anymore
-        if (queryJob == null) {
-            throw new Exception("job no longer exists");
-        }
-        // once the job is done, check if any error occured
-        if (queryJob.getStatus().getError() != null) {
-            throw new Exception(queryJob.getStatus().getError().toString());
-        }
-
-        // Step 4: Display results
-        // Print out a header line, and iterate through the
-        // query results to print each result in a new line
-        System.out.println("Timestamp\tVarient value");
-        TableResult result = queryJob.getQueryResults();
-        for (FieldValueList row : result.iterateAll()) {
-            // We can use the `get` method along with the column
-            // name to get the corresponding row entry
-            int variantValue = row.get("VariantValue").getNumericValue().intValue();
-            String timeStamp = row.get("TimeStamp").getStringValue();
-            System.out.printf("%s\t%d\n", timeStamp, variantValue);
-        }
-    }
-
-    private static void insertViaQuery() throws Exception {
-
-        // Step 1: Initialize BigQuery service
-        BigQuery bigquery = BigQueryOptions.newBuilder().setProjectId("sample-project-330313")
-                .build().getService();
-
-        // Step 2: Prepare query job
-        final String INSERT_VEGETABLES =
-                "INSERT INTO `sample-project-330313.sample_dataset.vegetables` (id, name) VALUES (1, 'carrot'), (2, 'beans');";
-        QueryJobConfiguration queryConfig =
-                QueryJobConfiguration.newBuilder(INSERT_VEGETABLES).build();
-
-
-        // Step 3: Run the job on BigQuery
-        Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).build());
-        queryJob = queryJob.waitFor();
-        if (queryJob == null) {
-            throw new Exception("job no longer exists");
-        }
-        // once the job is done, check if any error occured
-        if (queryJob.getStatus().getError() != null) {
-            throw new Exception(queryJob.getStatus().getError().toString());
-        }
-
-        // Step 4: Display results
-        // Here, we will print the total number of rows that were inserted
-        JobStatistics.QueryStatistics stats = queryJob.getStatistics();
-        Long rowsInserted = stats.getDmlStats().getInsertedRowCount();
-        System.out.printf("%d rows inserted\n", rowsInserted);
-    }
-
-
-}
\ No newline at end of file
diff --git a/src/main/java/com/application/Main.java b/src/main/java/com/application/Main.java
index b20c57f4afd0ca45b3e8a24437c48fca7e3514ac..2fd00b723766fc1e7f4e08304f49b3140821b39a 100644
--- a/src/main/java/com/application/Main.java
+++ b/src/main/java/com/application/Main.java
@@ -1,39 +1,44 @@
 package com.application;
 
+import com.application.DB.DB;
 import javafx.application.Application;
 import javafx.fxml.FXMLLoader;
 import javafx.scene.Parent;
 import javafx.scene.Scene;
-import javafx.scene.control.Button;
 import javafx.stage.Stage;
 
 import java.io.IOException;
 import java.util.Objects;
 
-
+/**
+ * This class launches the application
+ *
+ * @author Eilert Tunheim, Karin Pettersen, Mads Arnesen
+ * @version 1.0.0
+ */
 public class Main extends Application {
-    Button button;
+
+    /**
+     * Starts the application
+     * @param args
+     * @throws IOException
+     */
     public static void main(String[] args) throws IOException {
-        //System.out.println("Hello world!");
-        //Authentication test = explicit();
         launch(args);
     }
 
     /**
-     * Main javafx code
-     * Stage = The whole window
-     * Scene = Content inside the stage. Put buttons and dropdown stuff
+     * Sets the primaryStage and sets the scene for the window.
      * @param primaryStage
      * @throws Exception
      */
     @Override
     public void start(Stage primaryStage) throws Exception {
 
-        // Loading the GUI-fxml file from resources
-        Parent root = FXMLLoader.load(Objects.requireNonNull(getClass().getResource("/com.application/GUI/graphical_user_interface.fxml")));
+        DB.getFromExistingTable();
 
         // Loading the GUI-fxml file from resources
-         root = FXMLLoader.load(Objects.requireNonNull(getClass().getResource("/com.application/GUI/input.fxml")));
+        Parent root = FXMLLoader.load(Objects.requireNonNull(getClass().getResource("/com.application/GUI/graphical_user_interface.fxml")));
 
         // Sets the scene and defines boundaries
         Scene scene = new Scene(root, 1200, 600);
diff --git a/target/classes/com/application/Credentials.class b/target/classes/com/application/Credentials.class
deleted file mode 100644
index 4b4b4931f691818234e43e1beff85814d2969ca7..0000000000000000000000000000000000000000
Binary files a/target/classes/com/application/Credentials.class and /dev/null differ
diff --git a/target/classes/com/application/DB/DB.class b/target/classes/com/application/DB/DB.class
new file mode 100644
index 0000000000000000000000000000000000000000..358567fe506315da7ec92c447ff21861a8c7bb65
Binary files /dev/null and b/target/classes/com/application/DB/DB.class differ
diff --git a/target/classes/com/application/DataBase/DB.class b/target/classes/com/application/DataBase/DB.class
deleted file mode 100644
index ea482c914d92beae15296c409392fc8bc7a0745c..0000000000000000000000000000000000000000
Binary files a/target/classes/com/application/DataBase/DB.class and /dev/null differ
diff --git a/target/classes/com/application/Main.class b/target/classes/com/application/Main.class
index b2242fb72f009f60d45919e5191f2e44c76b8575..72d339a777587e42e0b4cf66b835e33ec457f966 100644
Binary files a/target/classes/com/application/Main.class and b/target/classes/com/application/Main.class differ