Commit 06c91822 authored by Mads Greni Arnesen's avatar Mads Greni Arnesen
Browse files

Added import for CalculatedStart og CalculatedStop

parent cb307d46
......@@ -25,6 +25,8 @@ import java.util.List;
public class DB {
/*
Credentials credentialsObject;
private static void insertSampleData() {
......@@ -75,7 +77,9 @@ public class DB {
}
public static GoogleCredentials getCredentials() throws Exception {
File credentialsPath = new File(".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json");
//File credentialsPath = new File(".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json");
File credentialsPath = new File("/Users/Madzarn/Desktop/bacheloroppgave_2022/src/main/resources/com.application/sf-drying-optimization-1e234ad2b0f4.json");
// Load credentials from JSON key file. If you can't set the GOOGLE_APPLICATION_CREDENTIALS
// environment variable, you can explicitly load the credentials file to construct the
......@@ -98,10 +102,17 @@ public class DB {
setProjectId("sf-drying-optimization")
.build().getService();
//final String GET_WORD_COUNT = "SELECT InTidTork, UtTidTork FROM sf-drying-optimization.124.int_gs_ds_sipalpackages WHERE InTidTork BETWEEN \"2020-05-14 12:51:03\" AND \"2020-05-17 16:10:09\" ORDER BY InTidTork";
// Step 2: Prepare query job
// A "QueryJob" is a type of job that executes SQL queries
// we create a new job configuration from our SQL query and
final String GET_WORD_COUNT = "SELECT VariantValue, TimeStamp FROM sf-drying-optimization.124.int_sd_winccsensordata WHERE TimeStamp BETWEEN \"2020-06-09\" AND \"2020-06-29\" ORDER BY TimeStamp";
final String GET_WORD_COUNT = "SELECT VariantValue, TimeStamp FROM sf-drying-optimization.124.int_sd_winccsensordata " +
"WHERE TimeStamp BETWEEN \""
+ inTidTork +
"\" AND \""
+ utTidTork +
"\" ORDER BY TimeStamp";
QueryJobConfiguration queryConfig =
QueryJobConfiguration.newBuilder(GET_WORD_COUNT).build();
......@@ -167,4 +178,169 @@ public class DB {
}
*/
Credentials credentialsObject2;
private static void insertSampleData2() {
// Step 1: Initialize BigQuery service
BigQuery bigquery = BigQueryOptions.newBuilder().setProjectId("sf-drying-optimization")
.build().getService();
// Step 2: Create insertAll (streaming) request
InsertAllRequest insertAllRequest = getInsertRequest2();
// Step 3: Insert data into table
InsertAllResponse response = bigquery.insertAll(insertAllRequest);
// Step 4: Check for errors and print results
if (response.hasErrors()) {
for (Map.Entry<Long, List<BigQueryError>> entry : response.getInsertErrors()
.entrySet()) {
System.out.printf("error in entry %d: %s", entry.getKey(),
entry.getValue().toString());
}
return;
}
System.out.println("inserted successfully");
}
// To create a streaming insert request, we need to specify the table and dataset id
// and create the rows we want to insert
private static InsertAllRequest getInsertRequest2() {
String datasetId = "sample_dataset";
String tableId = "vegetables";
return InsertAllRequest.newBuilder(datasetId, tableId).addRow(getRow(1, "carrot"))
.addRow(getRow(2, "beans")).build();
}
private static Map<String, Object> getRow(int id, String vegetableName) {
Map<String, Object> rowMap = new HashMap<String, Object>();
rowMap.put("id", id);
rowMap.put("name", vegetableName);
return rowMap;
}
// each row is a map with the row name as the key and row value as the value
// since the value type is "Object" it can take any arbitrary type, based on
// the datatype of the row defined on BigQuery
public static void main(String... args) throws Exception {
getFromExistingTable2();
}
public static GoogleCredentials getCredentials2() throws Exception {
//File credentialsPath = new File(".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json");
File credentialsPath = new File("/Users/Madzarn/Desktop/bacheloroppgave_2022/src/main/resources/com.application/sf-drying-optimization-1e234ad2b0f4.json");
// Load credentials from JSON key file. If you can't set the GOOGLE_APPLICATION_CREDENTIALS
// environment variable, you can explicitly load the credentials file to construct the
// credentials.
GoogleCredentials credentials;
try (FileInputStream serviceAccountStream = new FileInputStream(credentialsPath)) {
credentials = ServiceAccountCredentials.fromStream(serviceAccountStream);
}
return credentials;
}
private static void getFromExistingTable2() throws Exception {
// Step 1: Initialize BigQuery service
// Here we set our project ID and get the `BigQuery` service object
// this is the interface to our BigQuery instance that
// we use to execute jobs on
BigQuery bigquery = BigQueryOptions.newBuilder().
setCredentials(getCredentials2()).
setProjectId("sf-drying-optimization")
.build().getService();
// Step 2: Prepare query job
// A "QueryJob" is a type of job that executes SQL queries
// we create a new job configuration from our SQL query and
final String GET_WORD_COUNT = "SELECT CalculatedStart, CalculatedStop FROM sf-drying-optimization.124.int_dk_valmaticsdryingbatches WHERE CalculatedStart BETWEEN \"2021-08-17 07:53:21\" AND \"2021-10-02 14:42:39\" ORDER BY CalculatedStart";
QueryJobConfiguration queryConfig =
QueryJobConfiguration.newBuilder(GET_WORD_COUNT).build();
// Step 3: Run the job on BigQuery
// create a `Job` instance from the job configuration using the BigQuery service
// the job starts executing once the `create` method executes
Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).build());
queryJob = queryJob.waitFor();
// the waitFor method blocks until the job completes
// and returns `null` if the job doesn't exist anymore
if (queryJob == null) {
throw new Exception("job no longer exists");
}
// once the job is done, check if any error occured
if (queryJob.getStatus().getError() != null) {
throw new Exception(queryJob.getStatus().getError().toString());
}
// Step 4: Display results
// Print out a header line, and iterate through the
// query results to print each result in a new line
System.out.println("CalculatedStart\tCalculatedStop");
TableResult result = queryJob.getQueryResults();
for (FieldValueList row : result.iterateAll()) {
// We can use the `get` method along with the column
// name to get the corresponding row entry
String calculatedStart = row.get("CalculatedStart").getStringValue();
String calculatedStop = row.get("CalculatedStop").getStringValue();
System.out.printf("%s\t%s\n", calculatedStart, calculatedStop);
}
}
/* private static void insertViaQuery2() throws Exception {
// Step 1: Initialize BigQuery service
BigQuery bigquery = BigQueryOptions.newBuilder().setProjectId("sample-project-330313")
.build().getService();
// Step 2: Prepare query job
final String INSERT_VEGETABLES =
"INSERT INTO `sample-project-330313.sample_dataset.vegetables` (id, name) VALUES (1, 'carrot'), (2, 'beans');";
QueryJobConfiguration queryConfig =
QueryJobConfiguration.newBuilder(INSERT_VEGETABLES).build();
// Step 3: Run the job on BigQuery
Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).build());
queryJob = queryJob.waitFor();
if (queryJob == null) {
throw new Exception("job no longer exists");
}
// once the job is done, check if any error occured
if (queryJob.getStatus().getError() != null) {
throw new Exception(queryJob.getStatus().getError().toString());
}
// Step 4: Display results
// Here, we will print the total number of rows that were inserted
JobStatistics.QueryStatistics stats = queryJob.getStatistics();
Long rowsInserted = stats.getDmlStats().getInsertedRowCount();
System.out.printf("%d rows inserted\n", rowsInserted);
}
*/
}
\ No newline at end of file
......@@ -32,8 +32,6 @@ public class Main extends Application {
// Loading the GUI-fxml file from resources
Parent root = FXMLLoader.load(Objects.requireNonNull(getClass().getResource("/com.application/GUI/graphical_user_interface.fxml")));
// Loading the GUI-fxml file from resources
root = FXMLLoader.load(Objects.requireNonNull(getClass().getResource("/com.application/GUI/input.fxml")));
// Sets the scene and defines boundaries
Scene scene = new Scene(root, 1200, 600);
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment