Skip to content
Snippets Groups Projects
Commit bde961ad authored by Eilert Tunheim's avatar Eilert Tunheim
Browse files

Potential contact with bigquery

parent fc2bacb2
No related branches found
No related tags found
No related merge requests found
Showing
with 201 additions and 68 deletions
<component name="libraryTable">
<library name="Maven: com.google.apis:google-api-services-storage:v1-rev20211201-1.32.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/google/apis/google-api-services-storage/v1-rev20211201-1.32.1/google-api-services-storage-v1-rev20211201-1.32.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/google/apis/google-api-services-storage/v1-rev20211201-1.32.1/google-api-services-storage-v1-rev20211201-1.32.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/google/apis/google-api-services-storage/v1-rev20211201-1.32.1/google-api-services-storage-v1-rev20211201-1.32.1-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.google.auto.value:auto-value-annotations:1.9">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/google/auto/value/auto-value-annotations/1.9/auto-value-annotations-1.9.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/google/auto/value/auto-value-annotations/1.9/auto-value-annotations-1.9-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/google/auto/value/auto-value-annotations/1.9/auto-value-annotations-1.9-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.google.cloud:google-cloud-storage:2.4.0">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/google/cloud/google-cloud-storage/2.4.0/google-cloud-storage-2.4.0.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/google/cloud/google-cloud-storage/2.4.0/google-cloud-storage-2.4.0-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/google/cloud/google-cloud-storage/2.4.0/google-cloud-storage-2.4.0-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
......@@ -49,5 +49,8 @@
<orderEntry type="library" name="Maven: org.threeten:threetenbp:1.5.2" level="project" />
<orderEntry type="library" name="Maven: com.google.code.gson:gson:2.8.9" level="project" />
<orderEntry type="library" name="Maven: com.google.errorprone:error_prone_annotations:2.11.0" level="project" />
<orderEntry type="library" name="Maven: com.google.cloud:google-cloud-storage:2.4.0" level="project" />
<orderEntry type="library" name="Maven: com.google.apis:google-api-services-storage:v1-rev20211201-1.32.1" level="project" />
<orderEntry type="library" name="Maven: com.google.auto.value:auto-value-annotations:1.9" level="project" />
</component>
</module>
\ No newline at end of file
......@@ -25,6 +25,10 @@
<groupId>com.google.cloud</groupId>
<artifactId>google-cloud-bigquery</artifactId>
</dependency>
<dependency>
<groupId>com.google.cloud</groupId>
<artifactId>google-cloud-storage</artifactId>
</dependency>
</dependencies>
<properties>
......
package com.application.DataBase;
import com.application.Credentials;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.auth.oauth2.ServiceAccountCredentials;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.Dataset;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
public class Authentication{
public static void explicit() throws IOException {
// TODO(developer): Replace these variables before running the sample.
String projectId = String.valueOf(new Credentials());
File credentialsPath = new File(".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json");
// Load credentials from JSON key file. If you can't set the GOOGLE_APPLICATION_CREDENTIALS
// environment variable, you can explicitly load the credentials file to construct the
// credentials.
GoogleCredentials credentials;
try (FileInputStream serviceAccountStream = new FileInputStream(credentialsPath)) {
credentials = ServiceAccountCredentials.fromStream(serviceAccountStream);
}
// Instantiate a client.
BigQuery bigquery =
BigQueryOptions.newBuilder()
.setCredentials(credentials)
.setProjectId(projectId)
.build()
.getService();
// Use the client.
System.out.println("Datasets:");
for (Dataset dataset : bigquery.listDatasets().iterateAll()) {
System.out.printf("%s%n", dataset.getDatasetId().getDataset());
}
}
}
package com.application.DataBase;
import com.google.cloud.bigquery.*;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.auth.oauth2.ServiceAccountCredentials;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQueryError;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.FieldValueList;
import com.google.cloud.bigquery.InsertAllRequest;
import com.google.cloud.bigquery.InsertAllResponse;
import com.google.cloud.bigquery.Job;
import com.google.cloud.bigquery.JobInfo;
import com.google.cloud.bigquery.QueryJobConfiguration;
import com.google.cloud.bigquery.JobStatistics;
import com.google.cloud.bigquery.TableResult;
import java.io.File;
import java.io.FileInputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.List;
public class DB {
private static void insertSampleData() {
// Step 1: Initialize BigQuery service
BigQuery bigquery = BigQueryOptions.newBuilder().setProjectId("sample-project-330313")
.build().getService();
// Step 2: Create insertAll (streaming) request
InsertAllRequest insertAllRequest = getInsertRequest();
// Step 3: Insert data into table
InsertAllResponse response = bigquery.insertAll(insertAllRequest);
// Step 4: Check for errors and print results
if (response.hasErrors()) {
for (Map.Entry<Long, List<BigQueryError>> entry : response.getInsertErrors()
.entrySet()) {
System.out.printf("error in entry %d: %s", entry.getKey(),
entry.getValue().toString());
}
return;
}
System.out.println("inserted successfully");
}
// To create a streaming insert request, we need to specify the table and dataset id
// and create the rows we want to insert
private static InsertAllRequest getInsertRequest() {
String datasetId = "sample_dataset";
String tableId = "vegetables";
return InsertAllRequest.newBuilder(datasetId, tableId).addRow(getRow(1, "carrot"))
.addRow(getRow(2, "beans")).build();
}
// each row is a map with the row name as the key and row value as the value
// since the value type is "Object" it can take any arbitrary type, based on
// the datatype of the row defined on BigQuery
private static Map<String, Object> getRow(int id, String vegetableName) {
Map<String, Object> rowMap = new HashMap<String, Object>();
rowMap.put("id", id);
rowMap.put("name", vegetableName);
return rowMap;
}
public static void main(String... args) throws Exception {
getFromExistingTable();
}
public static GoogleCredentials getCredentials() throws Exception {
File credentialsPath = new File(".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json");
// Load credentials from JSON key file. If you can't set the GOOGLE_APPLICATION_CREDENTIALS
// environment variable, you can explicitly load the credentials file to construct the
// credentials.
GoogleCredentials credentials;
try (FileInputStream serviceAccountStream = new FileInputStream(credentialsPath)) {
credentials = ServiceAccountCredentials.fromStream(serviceAccountStream);
}
return credentials;
}
private static void getFromExistingTable() throws Exception {
// Step 1: Initialize BigQuery service
// Here we set our project ID and get the `BigQuery` service object
// this is the interface to our BigQuery instance that
// we use to execute jobs on
BigQuery bigquery = BigQueryOptions.newBuilder().
setCredentials(getCredentials()).
setProjectId("sf-drying-optimization")
.build().getService();
// Step 2: Prepare query job
// A "QueryJob" is a type of job that executes SQL queries
// we create a new job configuration from our SQL query and
final String GET_WORD_COUNT =
"SELECT word, word_count FROM `bigquery-public-data.samples.shakespeare` WHERE corpus='juliuscaesar' ORDER BY word_count DESC limit 10;";
QueryJobConfiguration queryConfig =
QueryJobConfiguration.newBuilder(GET_WORD_COUNT).build();
// Step 3: Run the job on BigQuery
// create a `Job` instance from the job configuration using the BigQuery service
// the job starts executing once the `create` method executes
Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).build());
queryJob = queryJob.waitFor();
// the waitFor method blocks until the job completes
// and returns `null` if the job doesn't exist anymore
if (queryJob == null) {
throw new Exception("job no longer exists");
}
// once the job is done, check if any error occured
if (queryJob.getStatus().getError() != null) {
throw new Exception(queryJob.getStatus().getError().toString());
}
// Step 4: Display results
// Print out a header line, and iterate through the
// query results to print each result in a new line
System.out.println("word\tword_count");
TableResult result = queryJob.getQueryResults();
for (FieldValueList row : result.iterateAll()) {
// We can use the `get` method along with the column
// name to get the corresponding row entry
String word = row.get("word").getStringValue();
int wordCount = row.get("word_count").getNumericValue().intValue();
System.out.printf("%s\t%d\n", word, wordCount);
}
}
private static void insertViaQuery() throws Exception {
// Step 1: Initialize BigQuery service
BigQuery bigquery = BigQueryOptions.newBuilder().setProjectId("sample-project-330313")
.build().getService();
// Step 2: Prepare query job
final String INSERT_VEGETABLES =
"INSERT INTO `sample-project-330313.sample_dataset.vegetables` (id, name) VALUES (1, 'carrot'), (2, 'beans');";
QueryJobConfiguration queryConfig =
QueryJobConfiguration.newBuilder(INSERT_VEGETABLES).build();
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
String datasetName = "my_new_dataset";
Dataset dataset = null;
DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build();
// Step 3: Run the job on BigQuery
Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).build());
queryJob = queryJob.waitFor();
if (queryJob == null) {
throw new Exception("job no longer exists");
}
// once the job is done, check if any error occured
if (queryJob.getStatus().getError() != null) {
throw new Exception(queryJob.getStatus().getError().toString());
}
/*
dataset = bigquery.create(datasetInfo);
System.out.printf("Dataset %s created.%n", dataset.getDatasetId().getDataset());
// Step 4: Display results
// Here, we will print the total number of rows that were inserted
JobStatistics.QueryStatistics stats = queryJob.getStatistics();
Long rowsInserted = stats.getDmlStats().getInsertedRowCount();
System.out.printf("%d rows inserted\n", rowsInserted);
}
QueryJobConfiguration queryConfig =
QueryJobConfiguration.newBuilder(
"SELECT VariantValue, TimeStamp "
+ "FROM sf-drying-optimization.124.int_sd_winccsensordata "
+ "WHERE TimeStamp BETWEEN \"2020-06-13 05:16:20\" AND"
+ " \"2020-06-15 15:25:20\" ORDER BY TimeStamp")
// Use standard SQL syntax for queries.
// See: https://cloud.google.com/bigquery/sql-reference/
.setUseLegacySql(false)
.build();
System.out.println(queryConfig);
*/
}
}
\ No newline at end of file
package com.application;
import com.application.DataBase.Authentication;
import javafx.application.Application;
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
......@@ -9,10 +8,8 @@ import javafx.scene.control.Button;
import javafx.stage.Stage;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Objects;
import static com.application.DataBase.Authentication.explicit;
public class Main extends Application {
Button button;
......
File added
No preview for this file type
No preview for this file type
No preview for this file type
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment