Newer
Older
package com.application.DataBase;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.auth.oauth2.ServiceAccountCredentials;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQueryError;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.FieldValueList;
import com.google.cloud.bigquery.InsertAllRequest;
import com.google.cloud.bigquery.InsertAllResponse;
import com.google.cloud.bigquery.Job;
import com.google.cloud.bigquery.JobInfo;
import com.google.cloud.bigquery.QueryJobConfiguration;
import com.google.cloud.bigquery.JobStatistics;
import com.google.cloud.bigquery.TableResult;
import java.io.File;
import java.io.FileInputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.List;
public class DB {
private static void insertSampleData() {
// Step 1: Initialize BigQuery service
BigQuery bigquery = BigQueryOptions.newBuilder().setProjectId("sf-drying-optimization")
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
.build().getService();
// Step 2: Create insertAll (streaming) request
InsertAllRequest insertAllRequest = getInsertRequest();
// Step 3: Insert data into table
InsertAllResponse response = bigquery.insertAll(insertAllRequest);
// Step 4: Check for errors and print results
if (response.hasErrors()) {
for (Map.Entry<Long, List<BigQueryError>> entry : response.getInsertErrors()
.entrySet()) {
System.out.printf("error in entry %d: %s", entry.getKey(),
entry.getValue().toString());
}
return;
}
System.out.println("inserted successfully");
}
// To create a streaming insert request, we need to specify the table and dataset id
// and create the rows we want to insert
private static InsertAllRequest getInsertRequest() {
String datasetId = "sample_dataset";
String tableId = "vegetables";
return InsertAllRequest.newBuilder(datasetId, tableId).addRow(getRow(1, "carrot"))
.addRow(getRow(2, "beans")).build();
}
// each row is a map with the row name as the key and row value as the value
// since the value type is "Object" it can take any arbitrary type, based on
// the datatype of the row defined on BigQuery
private static Map<String, Object> getRow(int id, String vegetableName) {
Map<String, Object> rowMap = new HashMap<String, Object>();
rowMap.put("id", id);
rowMap.put("name", vegetableName);
return rowMap;
}
public static void main(String... args) throws Exception {
getFromExistingTable();
}
public static GoogleCredentials getCredentials() throws Exception {
File credentialsPath = new File(".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json");
// Load credentials from JSON key file. If you can't set the GOOGLE_APPLICATION_CREDENTIALS
// environment variable, you can explicitly load the credentials file to construct the
// credentials.
GoogleCredentials credentials;
try (FileInputStream serviceAccountStream = new FileInputStream(credentialsPath)) {
credentials = ServiceAccountCredentials.fromStream(serviceAccountStream);
}
return credentials;
}
private static void getFromExistingTable() throws Exception {
// Step 1: Initialize BigQuery service
// Here we set our project ID and get the `BigQuery` service object
// this is the interface to our BigQuery instance that
// we use to execute jobs on
BigQuery bigquery = BigQueryOptions.newBuilder().
setCredentials(getCredentials()).
setProjectId("sf-drying-optimization")
.build().getService();
// Step 2: Prepare query job
// A "QueryJob" is a type of job that executes SQL queries
// we create a new job configuration from our SQL query and
final String GET_WORD_COUNT = "SELECT VariantValue, TimeStamp FROM sf-drying-optimization.124.int_sd_winccsensordata WHERE TimeStamp BETWEEN \"2020-06-09\" AND \"2020-06-29\" ORDER BY TimeStamp";
QueryJobConfiguration queryConfig =
QueryJobConfiguration.newBuilder(GET_WORD_COUNT).build();
// Step 3: Run the job on BigQuery
// create a `Job` instance from the job configuration using the BigQuery service
// the job starts executing once the `create` method executes
Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).build());
queryJob = queryJob.waitFor();
// the waitFor method blocks until the job completes
// and returns `null` if the job doesn't exist anymore
if (queryJob == null) {
throw new Exception("job no longer exists");
}
// once the job is done, check if any error occured
if (queryJob.getStatus().getError() != null) {
throw new Exception(queryJob.getStatus().getError().toString());
}
// Step 4: Display results
// Print out a header line, and iterate through the
// query results to print each result in a new line
System.out.println("Timestamp\tVarient value");
TableResult result = queryJob.getQueryResults();
for (FieldValueList row : result.iterateAll()) {
// We can use the `get` method along with the column
// name to get the corresponding row entry
int variantValue = row.get("VariantValue").getNumericValue().intValue();
String timeStamp = row.get("TimeStamp").getStringValue();
System.out.printf("%s\t%d\n", timeStamp, variantValue);
}
}
private static void insertViaQuery() throws Exception {
// Step 1: Initialize BigQuery service
BigQuery bigquery = BigQueryOptions.newBuilder().setProjectId("sample-project-330313")
.build().getService();
// Step 2: Prepare query job
final String INSERT_VEGETABLES =
"INSERT INTO `sample-project-330313.sample_dataset.vegetables` (id, name) VALUES (1, 'carrot'), (2, 'beans');";
QueryJobConfiguration queryConfig =
QueryJobConfiguration.newBuilder(INSERT_VEGETABLES).build();
// Step 3: Run the job on BigQuery
Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).build());
queryJob = queryJob.waitFor();
if (queryJob == null) {
throw new Exception("job no longer exists");
}
// once the job is done, check if any error occured
if (queryJob.getStatus().getError() != null) {
throw new Exception(queryJob.getStatus().getError().toString());
}
// Step 4: Display results
// Here, we will print the total number of rows that were inserted
JobStatistics.QueryStatistics stats = queryJob.getStatistics();
Long rowsInserted = stats.getDmlStats().getInsertedRowCount();
System.out.printf("%d rows inserted\n", rowsInserted);
}