Select Git revision
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
DB.java 4.35 KiB
package com.application.DB;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.auth.oauth2.ServiceAccountCredentials;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.FieldValueList;
import com.google.cloud.bigquery.Job;
import com.google.cloud.bigquery.JobInfo;
import com.google.cloud.bigquery.QueryJobConfiguration;
import com.google.cloud.bigquery.TableResult;
import com.sun.media.jfxmedia.AudioClip;
import org.apache.commons.codec.binary.Hex;
import java.io.File;
import java.io.FileInputStream;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.TimeZone;
public class DB {
private static GoogleCredentials getCredentials() throws Exception {
File credentialsPath = new File("./src/main/resources/com.application/sf-drying-optimization-1e234ad2b0f4.json");
// Load credentials from JSON key file. If you can't set the GOOGLE_APPLICATION_CREDENTIALS
// environment variable, you can explicitly load the credentials file to construct the
// credentials.
GoogleCredentials credentials;
try (FileInputStream serviceAccountStream = new FileInputStream(credentialsPath)) {
credentials = ServiceAccountCredentials.fromStream(serviceAccountStream);
}
return credentials;
}
// Step 1: Initialize BigQuery service
// Here we set our project ID and get the `BigQuery` service object
// this is the interface to our BigQuery instance that
// we use to execute jobs on
private static BigQuery getBuilder() throws Exception {
return BigQueryOptions.newBuilder().
setCredentials(getCredentials()).
setProjectId("sf-drying-optimization")
.build().getService();
}
public static void getFromExistingTable() throws Exception {
//ArrayList<String, Integer> results = new ArrayList<>();
// Step 2: Prepare query job
// A "QueryJob" is a type of job that executes SQL queries
// we create a new job configuration from our SQL query and
final String GET_WORD_COUNT = "SELECT InTidTork, UtTidTork " +
"FROM sf-drying-optimization.124.int_gs_ds_sipalpackages " +
"WHERE Tork LIKE \"%5%\" AND InTidTork BETWEEN \"2020-05-14 12:51:03\" " +
"AND \"2022-03-03 16:10:09\" ORDER BY InTidTork";
QueryJobConfiguration queryConfig =
QueryJobConfiguration.newBuilder(GET_WORD_COUNT).build();
// Step 3: Run the job on BigQuery
// create a `Job` instance from the job configuration using the BigQuery service
// the job starts executing once the `create` method executes
Job queryJob = getBuilder().create(JobInfo.newBuilder(queryConfig).build());
queryJob = queryJob.waitFor();
// the waitFor method blocks until the job completes
// and returns `null` if the job doesn't exist anymore
if (queryJob == null) {
throw new Exception("job no longer exists");
}
// once the job is done, check if any error occured
if (queryJob.getStatus().getError() != null) {
throw new Exception(queryJob.getStatus().getError().toString());
}
// Step 4: Display results
// Print out a header line, and iterate through the
// query results to print each result in a new line
System.out.println("InTidTork \t UtTidTork");
TableResult result = queryJob.getQueryResults();
for (FieldValueList row : result.iterateAll()) {
// We can use the `get` method along with the column
// name to get the corresponding row entry
SimpleDateFormat newFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
newFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
long inTidTork = row.get("InTidTork").getTimestampValue()/1000;
long utTidTork = row.get("UtTidTork").getTimestampValue()/1000;
String formatedInTidTork = newFormat.format(inTidTork);
String formatedUtTidTork = newFormat.format(utTidTork);
System.out.printf("%s\t\t\t\t\t%s\n", formatedInTidTork, formatedUtTidTork);
}
}
}