Skip to content
Snippets Groups Projects
Commit 6d55e154 authored by Mads Greni Arnesen's avatar Mads Greni Arnesen
Browse files

Delete

parent 691d3159
No related branches found
No related tags found
No related merge requests found
package com.application.DataBase;
import com.application.Credentials;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.auth.oauth2.ServiceAccountCredentials;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.Dataset;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
public class Authentication{
public static void explicit() throws IOException {
// TODO(developer): Replace these variables before running the sample.
String projectId = String.valueOf(new Credentials());
File credentialsPath = new File(".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json");
// Load credentials from JSON key file. If you can't set the GOOGLE_APPLICATION_CREDENTIALS
// environment variable, you can explicitly load the credentials file to construct the
// credentials.
GoogleCredentials credentials;
try (FileInputStream serviceAccountStream = new FileInputStream(credentialsPath)) {
credentials = ServiceAccountCredentials.fromStream(serviceAccountStream);
}
// Instantiate a client.
BigQuery bigquery =
BigQueryOptions.newBuilder()
.setCredentials(credentials)
.setProjectId(projectId)
.build()
.getService();
// Use the client.
System.out.println("Datasets:");
for (Dataset dataset : bigquery.listDatasets().iterateAll()) {
System.out.printf("%s%n", dataset.getDatasetId().getDataset());
}
}
}
package com.application.DataBase;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQueryException;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.FormatOptions;
import com.google.cloud.bigquery.Job;
import com.google.cloud.bigquery.JobInfo;
import com.google.cloud.bigquery.LoadJobConfiguration;
import com.google.cloud.bigquery.Schema;
import com.google.cloud.bigquery.StandardSQLTypeName;
import com.google.cloud.bigquery.TableId;
import java.io.File;
// Sample to load JSON data from Cloud Storage into a new BigQuery table
public class LoadJson {
public static void runLoadJsonFromGCS() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "124";
String tableName = "int_sd_winccsensordata";
String credentialsPath = (".\\src\\main\\resources\\com.application\\sf-drying-optimization-1e234ad2b0f4.json");
Schema schema =
Schema.of(
Field.of("VariantValue", StandardSQLTypeName.INT64),
Field.of("TimeStamp", StandardSQLTypeName.INT64));
loadJsonFromGCS(datasetName, tableName, credentialsPath, schema);
}
public static void loadJsonFromGCS(
String datasetName, String tableName, String sourceUri, Schema schema) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
TableId tableId = TableId.of(datasetName, tableName);
LoadJobConfiguration loadConfig =
LoadJobConfiguration.newBuilder(tableId, sourceUri)
.setFormatOptions(FormatOptions.json())
.setSchema(schema)
.build();
// Load data from a GCS JSON file into the table
Job job = bigquery.create(JobInfo.of(loadConfig));
// Blocks until this load table job completes its execution, either failing or succeeding.
job = job.waitFor();
if (job.isDone()) {
System.out.println("Json from GCS successfully loaded in a table");
} else {
System.out.println(
"BigQuery was unable to load into the table due to an error:"
+ job.getStatus().getError());
}
} catch (BigQueryException | InterruptedException e) {
System.out.println("Column not added during load append \n" + e.toString());
}
}
}
package com.application.DataBase;
import com.google.cloud.bigquery.*;
// Sample to run query total rows
public class LoadTable {
public static void main(String[] args) {
// TODO(developer): Replace these variables before running the sample.
String projectId = "f-drying-optimization";
String datasetName = "124";
String tableName = "int_sd_winccsensordata";
getTable(projectId, datasetName, tableName);
}
public static void getTable(String projectId, String datasetName, String tableName) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
TableId tableId = TableId.of(projectId, datasetName, tableName);
Table table = bigquery.getTable(tableId);
System.out.println("Table info: " + table.getDescription());
} catch (BigQueryException e) {
System.out.println("Table not retrieved. \n" + e.toString());
}
}
}
\ No newline at end of file
package com.application;
import com.application.DataBase.Authentication;
import com.application.DataBase.LoadJson;
import com.google.cloud.bigquery.Schema;
import javafx.application.Application;
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
......@@ -11,18 +8,14 @@ import javafx.scene.control.Button;
import javafx.stage.Stage;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Objects;
import static com.application.DataBase.Authentication.explicit;
public class Main extends Application {
Button button;
public static void main(String[] args) throws IOException {
//System.out.println("Hello world!");
//Authentication test = explicit();
launch(args);
new LoadJson().runLoadJsonFromGCS();
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment