Skip to content

Commit 627cdab

Browse files
fix(samples): removed env variables and buckets from creating bq. (#526)
* Removed env variables and buckets from creating BQ. * Fix: removed buckets from BQ import * pr fix: imports. * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * pr fix: fixed test. * pr fix: added comment. Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent d5084c5 commit 627cdab

File tree

4 files changed

+34
-19
lines changed

4 files changed

+34
-19
lines changed

retail/interactive-tutorials/src/main/java/events/setup/EventsCreateBigQueryTable.java

+6-4
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
import java.io.FileReader;
2828
import java.io.IOException;
2929
import java.util.stream.Collectors;
30+
import product.setup.ProductsCreateBigqueryTable;
3031

3132
public class EventsCreateBigQueryTable {
3233

@@ -35,10 +36,11 @@ public static void main(String[] args) throws IOException {
3536
String validEventsTable = "events";
3637
String invalidEventsTable = "events_some_invalid";
3738
String eventsSchemaFilePath = "src/main/resources/events_schema.json";
39+
// user_events.json and user_events_some_invalid.json are located in the resources folder
3840
String validEventsSourceFile =
39-
String.format("gs://%s/user_events.json", System.getenv("EVENTS_BUCKET_NAME"));
41+
ProductsCreateBigqueryTable.class.getResource("/user_events.json").getPath();
4042
String invalidEventsSourceFile =
41-
String.format("gs://%s/user_events_some_invalid.json", System.getenv("EVENTS_BUCKET_NAME"));
43+
ProductsCreateBigqueryTable.class.getResource("/user_events_some_invalid.json").getPath();
4244

4345
BufferedReader bufferedReader = new BufferedReader(new FileReader(eventsSchemaFilePath));
4446
String jsonToString = bufferedReader.lines().collect(Collectors.joining());
@@ -48,8 +50,8 @@ public static void main(String[] args) throws IOException {
4850

4951
createBqDataset(dataset);
5052
createBqTable(dataset, validEventsTable, eventsSchema);
51-
uploadDataToBqTable(dataset, validEventsTable, validEventsSourceFile, eventsSchema);
53+
uploadDataToBqTable(dataset, validEventsTable, validEventsSourceFile);
5254
createBqTable(dataset, invalidEventsTable, eventsSchema);
53-
uploadDataToBqTable(dataset, invalidEventsTable, invalidEventsSourceFile, eventsSchema);
55+
uploadDataToBqTable(dataset, invalidEventsTable, invalidEventsSourceFile);
5456
}
5557
}

retail/interactive-tutorials/src/main/java/product/setup/ProductsCreateBigqueryTable.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,9 @@ public static void main(String[] args) throws IOException {
3636
String invalidProductsTable = "products_some_invalid";
3737
String productSchemaFilePath = "src/main/resources/product_schema.json";
3838
String validProductsSourceFile =
39-
String.format("gs://%s/products.json", System.getenv("BUCKET_NAME"));
39+
ProductsCreateBigqueryTable.class.getResource("/products.json").getPath();
4040
String invalidProductsSourceFile =
41-
String.format("gs://%s/products_some_invalid.json", System.getenv("BUCKET_NAME"));
41+
ProductsCreateBigqueryTable.class.getResource("products_some_invalid.json").getPath();
4242

4343
BufferedReader bufferedReader = new BufferedReader(new FileReader(productSchemaFilePath));
4444
String jsonToString = bufferedReader.lines().collect(Collectors.joining());
@@ -48,8 +48,8 @@ public static void main(String[] args) throws IOException {
4848

4949
createBqDataset(dataset);
5050
createBqTable(dataset, validProductsTable, productSchema);
51-
uploadDataToBqTable(dataset, validProductsTable, validProductsSourceFile, productSchema);
51+
uploadDataToBqTable(dataset, validProductsTable, validProductsSourceFile);
5252
createBqTable(dataset, invalidProductsTable, productSchema);
53-
uploadDataToBqTable(dataset, invalidProductsTable, invalidProductsSourceFile, productSchema);
53+
uploadDataToBqTable(dataset, invalidProductsTable, invalidProductsSourceFile);
5454
}
5555
}

retail/interactive-tutorials/src/main/java/setup/SetupCleanup.java

+23-10
Original file line numberDiff line numberDiff line change
@@ -33,14 +33,15 @@
3333
import com.google.cloud.bigquery.FieldList;
3434
import com.google.cloud.bigquery.FormatOptions;
3535
import com.google.cloud.bigquery.Job;
36-
import com.google.cloud.bigquery.JobInfo;
36+
import com.google.cloud.bigquery.JobId;
3737
import com.google.cloud.bigquery.LegacySQLTypeName;
38-
import com.google.cloud.bigquery.LoadJobConfiguration;
3938
import com.google.cloud.bigquery.Schema;
4039
import com.google.cloud.bigquery.StandardTableDefinition;
40+
import com.google.cloud.bigquery.TableDataWriteChannel;
4141
import com.google.cloud.bigquery.TableDefinition;
4242
import com.google.cloud.bigquery.TableId;
4343
import com.google.cloud.bigquery.TableInfo;
44+
import com.google.cloud.bigquery.WriteChannelConfiguration;
4445
import com.google.cloud.retail.v2.CreateProductRequest;
4546
import com.google.cloud.retail.v2.DeleteProductRequest;
4647
import com.google.cloud.retail.v2.FulfillmentInfo;
@@ -71,6 +72,8 @@
7172
import com.google.protobuf.Int32Value;
7273
import com.google.protobuf.Timestamp;
7374
import java.io.IOException;
75+
import java.io.OutputStream;
76+
import java.nio.channels.Channels;
7477
import java.nio.file.Files;
7578
import java.nio.file.Paths;
7679
import java.time.Instant;
@@ -349,27 +352,37 @@ public static void createBqTable(String datasetName, String tableName, Schema sc
349352
}
350353
}
351354

352-
public static void uploadDataToBqTable(
353-
String datasetName, String tableName, String sourceUri, Schema schema) {
355+
public static void uploadDataToBqTable(String datasetName, String tableName, String sourceUri) {
354356
try {
355357
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
356358
TableId tableId = TableId.of(datasetName, tableName);
357-
LoadJobConfiguration loadConfig =
358-
LoadJobConfiguration.newBuilder(tableId, sourceUri)
359+
360+
WriteChannelConfiguration writeChannelConfiguration =
361+
WriteChannelConfiguration.newBuilder(tableId)
359362
.setFormatOptions(FormatOptions.json())
360-
.setSchema(schema)
361363
.build();
362-
Job job = bigquery.create(JobInfo.of(loadConfig));
363-
job = job.waitFor();
364+
365+
String jobName = "jobId_" + UUID.randomUUID();
366+
JobId jobId = JobId.newBuilder().setLocation("us").setJob(jobName).build();
367+
368+
try (TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
369+
OutputStream stream = Channels.newOutputStream(writer)) {
370+
Files.copy(Paths.get(sourceUri), stream);
371+
}
372+
373+
Job job = bigquery.getJob(jobId);
374+
Job completedJob = job.waitFor();
364375
if (job.isDone()) {
365-
System.out.printf("Json from GCS successfully loaded in a table '%s'.%n", tableName);
376+
System.out.printf("Json successfully loaded in a table '%s'.%n", tableName);
366377
} else {
367378
System.out.println(
368379
"BigQuery was unable to load into the table due to an error:"
369380
+ job.getStatus().getError());
370381
}
371382
} catch (BigQueryException | InterruptedException e) {
372383
System.out.printf("Column not added during load append: %s%n", e.getMessage());
384+
} catch (IOException e) {
385+
System.out.printf("Error copying file: %s%n", e.getMessage());
373386
}
374387
}
375388

retail/interactive-tutorials/src/test/java/product/AddFulfillmentPlacesTest.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ public void setUp() throws IOException, InterruptedException, ExecutionException
6464
public void testAddFulfillment() {
6565
String outputResult = bout.toString();
6666

67-
assertThat(outputResult).contains("Add fulfilment places with current date");
67+
assertThat(outputResult).contains("Add fulfilment places");
6868
assertThat(outputResult).contains("Add fulfillment places, wait 45 seconds");
6969
}
7070

0 commit comments

Comments
 (0)