diff --git a/src/main/java/com/application/DataBase/DB.java b/src/main/java/com/application/DataBase/DB.java index fa8c89b0ab44983f71eaa4f0746439338ae363e9..37d348420877287bfd66cfe562eb618da6c46541 100644 --- a/src/main/java/com/application/DataBase/DB.java +++ b/src/main/java/com/application/DataBase/DB.java @@ -25,6 +25,8 @@ import java.util.List; public class DB { + + /* Credentials credentialsObject; private static void insertSampleData() { @@ -75,7 +77,9 @@ public class DB { } public static GoogleCredentials getCredentials() throws Exception { - File credentialsPath = new File(".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json"); + //File credentialsPath = new File(".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json"); + + File credentialsPath = new File("/Users/Madzarn/Desktop/bacheloroppgave_2022/src/main/resources/com.application/sf-drying-optimization-1e234ad2b0f4.json"); // Load credentials from JSON key file. If you can't set the GOOGLE_APPLICATION_CREDENTIALS // environment variable, you can explicitly load the credentials file to construct the @@ -98,10 +102,17 @@ public class DB { setProjectId("sf-drying-optimization") .build().getService(); + //final String GET_WORD_COUNT = "SELECT InTidTork, UtTidTork FROM sf-drying-optimization.124.int_gs_ds_sipalpackages WHERE InTidTork BETWEEN \"2020-05-14 12:51:03\" AND \"2020-05-17 16:10:09\" ORDER BY InTidTork"; + // Step 2: Prepare query job // A "QueryJob" is a type of job that executes SQL queries // we create a new job configuration from our SQL query and - final String GET_WORD_COUNT = "SELECT VariantValue, TimeStamp FROM sf-drying-optimization.124.int_sd_winccsensordata WHERE TimeStamp BETWEEN \"2020-06-09\" AND \"2020-06-29\" ORDER BY TimeStamp"; + final String GET_WORD_COUNT = "SELECT VariantValue, TimeStamp FROM sf-drying-optimization.124.int_sd_winccsensordata " + + "WHERE TimeStamp BETWEEN \"" + + inTidTork + + "\" AND \"" + + utTidTork + + "\" ORDER BY TimeStamp"; QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(GET_WORD_COUNT).build(); @@ -167,4 +178,169 @@ public class DB { } + +*/ + + + + + + + + + + + + + Credentials credentialsObject2; + + private static void insertSampleData2() { + // Step 1: Initialize BigQuery service + BigQuery bigquery = BigQueryOptions.newBuilder().setProjectId("sf-drying-optimization") + .build().getService(); + + // Step 2: Create insertAll (streaming) request + InsertAllRequest insertAllRequest = getInsertRequest2(); + + // Step 3: Insert data into table + InsertAllResponse response = bigquery.insertAll(insertAllRequest); + + // Step 4: Check for errors and print results + if (response.hasErrors()) { + for (Map.Entry<Long, List<BigQueryError>> entry : response.getInsertErrors() + .entrySet()) { + System.out.printf("error in entry %d: %s", entry.getKey(), + entry.getValue().toString()); + } + return; + } + System.out.println("inserted successfully"); + } + + // To create a streaming insert request, we need to specify the table and dataset id + // and create the rows we want to insert + private static InsertAllRequest getInsertRequest2() { + String datasetId = "sample_dataset"; + String tableId = "vegetables"; + return InsertAllRequest.newBuilder(datasetId, tableId).addRow(getRow(1, "carrot")) + .addRow(getRow(2, "beans")).build(); + + } + + private static Map<String, Object> getRow(int id, String vegetableName) { + Map<String, Object> rowMap = new HashMap<String, Object>(); + rowMap.put("id", id); + rowMap.put("name", vegetableName); + return rowMap; + } + + // each row is a map with the row name as the key and row value as the value + // since the value type is "Object" it can take any arbitrary type, based on + // the datatype of the row defined on BigQuery + + + public static void main(String... args) throws Exception { + getFromExistingTable2(); + } + + public static GoogleCredentials getCredentials2() throws Exception { + //File credentialsPath = new File(".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json"); + + File credentialsPath = new File("/Users/Madzarn/Desktop/bacheloroppgave_2022/src/main/resources/com.application/sf-drying-optimization-1e234ad2b0f4.json"); + + // Load credentials from JSON key file. If you can't set the GOOGLE_APPLICATION_CREDENTIALS + // environment variable, you can explicitly load the credentials file to construct the + // credentials. + GoogleCredentials credentials; + try (FileInputStream serviceAccountStream = new FileInputStream(credentialsPath)) { + credentials = ServiceAccountCredentials.fromStream(serviceAccountStream); + } + return credentials; + } + + private static void getFromExistingTable2() throws Exception { + + // Step 1: Initialize BigQuery service + // Here we set our project ID and get the `BigQuery` service object + // this is the interface to our BigQuery instance that + // we use to execute jobs on + BigQuery bigquery = BigQueryOptions.newBuilder(). + setCredentials(getCredentials2()). + setProjectId("sf-drying-optimization") + .build().getService(); + + + + // Step 2: Prepare query job + // A "QueryJob" is a type of job that executes SQL queries + // we create a new job configuration from our SQL query and + final String GET_WORD_COUNT = "SELECT CalculatedStart, CalculatedStop FROM sf-drying-optimization.124.int_dk_valmaticsdryingbatches WHERE CalculatedStart BETWEEN \"2021-08-17 07:53:21\" AND \"2021-10-02 14:42:39\" ORDER BY CalculatedStart"; + + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(GET_WORD_COUNT).build(); + + // Step 3: Run the job on BigQuery + // create a `Job` instance from the job configuration using the BigQuery service + // the job starts executing once the `create` method executes + Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).build()); + queryJob = queryJob.waitFor(); + // the waitFor method blocks until the job completes + // and returns `null` if the job doesn't exist anymore + if (queryJob == null) { + throw new Exception("job no longer exists"); + } + // once the job is done, check if any error occured + if (queryJob.getStatus().getError() != null) { + throw new Exception(queryJob.getStatus().getError().toString()); + } + + // Step 4: Display results + // Print out a header line, and iterate through the + // query results to print each result in a new line + System.out.println("CalculatedStart\tCalculatedStop"); + TableResult result = queryJob.getQueryResults(); + for (FieldValueList row : result.iterateAll()) { + // We can use the `get` method along with the column + // name to get the corresponding row entry + String calculatedStart = row.get("CalculatedStart").getStringValue(); + String calculatedStop = row.get("CalculatedStop").getStringValue(); + System.out.printf("%s\t%s\n", calculatedStart, calculatedStop); + } + } + + /* private static void insertViaQuery2() throws Exception { + + // Step 1: Initialize BigQuery service + BigQuery bigquery = BigQueryOptions.newBuilder().setProjectId("sample-project-330313") + .build().getService(); + + // Step 2: Prepare query job + final String INSERT_VEGETABLES = + "INSERT INTO `sample-project-330313.sample_dataset.vegetables` (id, name) VALUES (1, 'carrot'), (2, 'beans');"; + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(INSERT_VEGETABLES).build(); + + + // Step 3: Run the job on BigQuery + Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).build()); + queryJob = queryJob.waitFor(); + if (queryJob == null) { + throw new Exception("job no longer exists"); + } + // once the job is done, check if any error occured + if (queryJob.getStatus().getError() != null) { + throw new Exception(queryJob.getStatus().getError().toString()); + } + + // Step 4: Display results + // Here, we will print the total number of rows that were inserted + JobStatistics.QueryStatistics stats = queryJob.getStatistics(); + Long rowsInserted = stats.getDmlStats().getInsertedRowCount(); + System.out.printf("%d rows inserted\n", rowsInserted); + } + + + */ + + } \ No newline at end of file diff --git a/src/main/java/com/application/Main.java b/src/main/java/com/application/Main.java index b20c57f4afd0ca45b3e8a24437c48fca7e3514ac..40b56ce83304e7df8dd71f3706ec22188220c0a6 100644 --- a/src/main/java/com/application/Main.java +++ b/src/main/java/com/application/Main.java @@ -32,8 +32,6 @@ public class Main extends Application { // Loading the GUI-fxml file from resources Parent root = FXMLLoader.load(Objects.requireNonNull(getClass().getResource("/com.application/GUI/graphical_user_interface.fxml"))); - // Loading the GUI-fxml file from resources - root = FXMLLoader.load(Objects.requireNonNull(getClass().getResource("/com.application/GUI/input.fxml"))); // Sets the scene and defines boundaries Scene scene = new Scene(root, 1200, 600); diff --git a/target/classes/com/application/Credentials.class b/target/classes/com/application/Credentials.class index 68f97c5ee7aaa1dc208d9fbcad0e9cbcdee30942..4b4b4931f691818234e43e1beff85814d2969ca7 100644 Binary files a/target/classes/com/application/Credentials.class and b/target/classes/com/application/Credentials.class differ diff --git a/target/classes/com/application/DataBase/DB.class b/target/classes/com/application/DataBase/DB.class index bcdfb2c47b692138349417763b3102b135156c6f..97e133c7c78f7e2a47f84ce51ac1f3a8c9074bb6 100644 Binary files a/target/classes/com/application/DataBase/DB.class and b/target/classes/com/application/DataBase/DB.class differ diff --git a/target/classes/com/application/Main.class b/target/classes/com/application/Main.class index 69c4841e149a06ce1a54f8eb35273e6fd9cbd1cd..3505afcd047704f5fc9996beaba9d4649bf885a2 100644 Binary files a/target/classes/com/application/Main.class and b/target/classes/com/application/Main.class differ