diff --git a/bigquery/pom.xml b/bigquery/pom.xml
index 8c2deafcd37..47f38c9298b 100644
--- a/bigquery/pom.xml
+++ b/bigquery/pom.xml
@@ -41,18 +41,20 @@
google-oauth-client-jetty
${project.oauth.version}
+
+ com.google.code.gson
+ gson
+ 2.3.1
+
junit
junit
+ test
com.jcabi
jcabi-matchers
-
-
- com.google.code.gson
- gson
- 2.3.1
+ test
diff --git a/bigquery/src/main/java/com/google/cloud/bigquery/samples/AsyncQuerySample.java b/bigquery/src/main/java/com/google/cloud/bigquery/samples/AsyncQuerySample.java
index ca62512377a..a76b9e03c51 100644
--- a/bigquery/src/main/java/com/google/cloud/bigquery/samples/AsyncQuerySample.java
+++ b/bigquery/src/main/java/com/google/cloud/bigquery/samples/AsyncQuerySample.java
@@ -33,17 +33,16 @@
* Example of authorizing with BigQuery and reading from a public dataset.
*/
public class AsyncQuerySample extends BigqueryUtils {
-
-
// [START main]
/**
+ * Prompts for all the parameters required to make a query.
+ *
* @param args Command line args
* @throws IOException IOException
* @throws InterruptedException InterruptedException
*/
public static void main(final String[] args)
throws IOException, InterruptedException {
-
Scanner scanner = new Scanner(System.in);
System.out.println("Enter your project id: ");
String projectId = scanner.nextLine();
@@ -52,20 +51,20 @@ public static void main(final String[] args)
System.out.println("Run query in batch mode? [true|false] ");
boolean batch = Boolean.valueOf(scanner.nextLine());
System.out.println("Enter how often to check if your job is complete "
- + "(milliseconds): ");
+ + "(milliseconds): ");
long waitTime = scanner.nextLong();
scanner.close();
Iterator pages = run(projectId, queryString,
- batch, waitTime);
+ batch, waitTime);
while (pages.hasNext()) {
printRows(pages.next().getRows(), System.out);
}
-
}
// [END main]
- // [START run]
+ // [START run]
/**
+ * Run the query.
*
* @param projectId Get this from Google Developers console
* @param queryString Query we want to run against BigQuery
@@ -75,7 +74,7 @@ public static void main(final String[] args)
* @throws IOException Thrown if there's an IOException
* @throws InterruptedException Thrown if there's an Interrupted Exception
*/
- public static Iterator run(final String projectId,
+ public static Iterator run(final String projectId,
final String queryString,
final boolean batch,
final long waitTime)
@@ -94,7 +93,7 @@ public static Iterator run(final String projectId,
GetQueryResults resultsRequest = bigquery.jobs().getQueryResults(
projectId, query.getJobReference().getJobId());
- return getPages(resultsRequest);
+ return getPages(resultsRequest);
}
// [END run]
@@ -115,7 +114,7 @@ public static Job asyncQuery(final Bigquery bigquery,
final boolean batch) throws IOException {
JobConfigurationQuery queryConfig = new JobConfigurationQuery()
- .setQuery(querySql);
+ .setQuery(querySql);
if (batch) {
queryConfig.setPriority("BATCH");
diff --git a/bigquery/src/main/java/com/google/cloud/bigquery/samples/BigqueryUtils.java b/bigquery/src/main/java/com/google/cloud/bigquery/samples/BigqueryUtils.java
index bab223f7c72..743c29eac79 100644
--- a/bigquery/src/main/java/com/google/cloud/bigquery/samples/BigqueryUtils.java
+++ b/bigquery/src/main/java/com/google/cloud/bigquery/samples/BigqueryUtils.java
@@ -12,6 +12,7 @@
See the License for the specific language governing permissions and
limitations under the License.
*/
+
package com.google.cloud.bigquery.samples;
import com.google.api.client.json.GenericJson;
@@ -44,7 +45,6 @@ public class BigqueryUtils {
* static helper methods.
*/
protected BigqueryUtils() {
-
}
/**
@@ -53,12 +53,11 @@ protected BigqueryUtils() {
* @param out Output stream we want to print to
*/
// [START print_rows]
- public static void printRows(final List rows, final PrintStream
- out) {
+ public static void printRows(final List rows, final PrintStream out) {
for (TableRow row : rows) {
for (TableCell field : row.getF()) {
out.printf("%-50s", field.getV());
- }
+ }
out.println();
}
}
@@ -73,14 +72,13 @@ public static void printRows(final List rows, final PrintStream
* @throws InterruptedException InterruptedException
*/
// [START poll_job]
- public static Job pollJob(final Bigquery.Jobs.Get request, final long
- interval)
+ public static Job pollJob(final Bigquery.Jobs.Get request, final long interval)
throws IOException, InterruptedException {
Job job = request.execute();
while (!job.getStatus().getState().equals("DONE")) {
System.out.println("Job is "
- + job.getStatus().getState()
- + " waiting " + interval + " milliseconds...");
+ + job.getStatus().getState()
+ + " waiting " + interval + " milliseconds...");
Thread.sleep(interval);
job = request.execute();
}
@@ -97,7 +95,7 @@ public static Job pollJob(final Bigquery.Jobs.Get request, final long
*/
// [START paging]
public static Iterator getPages(
- final BigqueryRequest requestTemplate) {
+ final BigqueryRequest requestTemplate) {
/**
* An iterator class that pages through a Bigquery request.
@@ -168,8 +166,8 @@ public static TableSchema loadSchema(final Reader schemaSource) {
TableSchema sourceSchema = new TableSchema();
List fields = (new Gson())
- .>fromJson(schemaSource,
- (new ArrayList()).getClass());
+ .>fromJson(schemaSource,
+ (new ArrayList()).getClass());
sourceSchema.setFields(fields);
@@ -186,8 +184,7 @@ public static TableSchema loadSchema(final Reader schemaSource) {
* @throws IOException Thrown if there is a network error connecting to
* Bigquery.
*/
- public static void listDatasets(final Bigquery bigquery, final String
- projectId)
+ public static void listDatasets(final Bigquery bigquery, final String projectId)
throws IOException {
Datasets.List datasetRequest = bigquery.datasets().list(projectId);
DatasetList datasetList = datasetRequest.execute();
@@ -201,5 +198,4 @@ public static void listDatasets(final Bigquery bigquery, final String
}
}
// [END list_datasets]
-
}
diff --git a/bigquery/src/main/java/com/google/cloud/bigquery/samples/ExportDataCloudStorageSample.java b/bigquery/src/main/java/com/google/cloud/bigquery/samples/ExportDataCloudStorageSample.java
index 67fee35b185..736e5c31bbb 100644
--- a/bigquery/src/main/java/com/google/cloud/bigquery/samples/ExportDataCloudStorageSample.java
+++ b/bigquery/src/main/java/com/google/cloud/bigquery/samples/ExportDataCloudStorageSample.java
@@ -27,13 +27,12 @@
* Sample of how to Export Cloud Data.
*/
public class ExportDataCloudStorageSample {
-
- /**
- * Protected constructor since this is a collection of static functions.
- */
- protected ExportDataCloudStorageSample() {
- super();
- }
+ /**
+ * Protected constructor since this is a collection of static functions.
+ */
+ protected ExportDataCloudStorageSample() {
+ super();
+ }
/**
* This program can be run to demonstrate running a Bigquery query from the
@@ -43,8 +42,8 @@ protected ExportDataCloudStorageSample() {
* @throws InterruptedException Should never be thrown.
*/
// [START main]
- public static void main(final String[] args) throws IOException,
- InterruptedException {
+ public static void main(final String[] args)
+ throws IOException, InterruptedException {
Scanner scanner = new Scanner(System.in);
System.out.println("Enter your project id: ");
String projectId = scanner.nextLine();
@@ -53,15 +52,14 @@ public static void main(final String[] args) throws IOException,
System.out.println("Enter your table id: ");
String tableId = scanner.nextLine();
System.out.println("Enter the Google Cloud Storage Path to which you'd "
- + "like to export: ");
+ + "like to export: ");
String cloudStoragePath = scanner.nextLine();
System.out.println("Enter how often to check if your job is complete "
- + "(milliseconds): ");
+ + "(milliseconds): ");
long interval = scanner.nextLong();
scanner.close();
run(cloudStoragePath, projectId, datasetId, tableId, interval);
-
}
// [END main]
@@ -120,16 +118,12 @@ public static Job extractJob(
final TableReference table) throws IOException {
JobConfigurationExtract extract = new JobConfigurationExtract()
- .setSourceTable(table)
- .setDestinationUri(cloudStoragePath);
+ .setSourceTable(table)
+ .setDestinationUri(cloudStoragePath);
return bigquery.jobs().insert(table.getProjectId(),
new Job().setConfiguration(new JobConfiguration().setExtract(extract)))
.execute();
}
// [END extract_job]
-
-
-
-
}
diff --git a/bigquery/src/main/java/com/google/cloud/bigquery/samples/GettingStarted.java b/bigquery/src/main/java/com/google/cloud/bigquery/samples/GettingStarted.java
index 654769951bb..91c5a6f9c38 100644
--- a/bigquery/src/main/java/com/google/cloud/bigquery/samples/GettingStarted.java
+++ b/bigquery/src/main/java/com/google/cloud/bigquery/samples/GettingStarted.java
@@ -11,47 +11,24 @@
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
-// [START all]
-package com.google.cloud.bigquery.samples;
-
+package com.google.cloud.bigquery.samples;
-import com.google.api.client.auth.oauth2.Credential;
-import com.google.api.client.extensions.java6.auth.oauth2.AuthorizationCodeInstalledApp;
-import com.google.api.client.extensions.jetty.auth.oauth2.LocalServerReceiver;
-import com.google.api.client.googleapis.auth.oauth2.GoogleAuthorizationCodeFlow;
-import com.google.api.client.googleapis.auth.oauth2.GoogleAuthorizationCodeRequestUrl;
-import com.google.api.client.googleapis.auth.oauth2.GoogleClientSecrets;
+// [START all]
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
-import com.google.api.client.googleapis.auth.oauth2.GoogleTokenResponse;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
-import com.google.api.client.util.store.DataStoreFactory;
-import com.google.api.client.util.store.FileDataStoreFactory;
-import com.google.api.services.bigquery.Bigquery.Datasets;
-import com.google.api.services.bigquery.Bigquery.Jobs.Insert;
import com.google.api.services.bigquery.Bigquery;
import com.google.api.services.bigquery.BigqueryScopes;
-import com.google.api.services.bigquery.model.DatasetList;
import com.google.api.services.bigquery.model.GetQueryResultsResponse;
-import com.google.api.services.bigquery.model.Job;
-import com.google.api.services.bigquery.model.JobConfiguration;
-import com.google.api.services.bigquery.model.JobConfigurationQuery;
-import com.google.api.services.bigquery.model.JobReference;
import com.google.api.services.bigquery.model.QueryRequest;
import com.google.api.services.bigquery.model.QueryResponse;
import com.google.api.services.bigquery.model.TableCell;
import com.google.api.services.bigquery.model.TableRow;
-import java.io.BufferedReader;
-import java.io.FileInputStream;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.util.Arrays;
import java.util.List;
import java.util.Scanner;
@@ -144,7 +121,7 @@ private static void printResults(List rows) {
*/
public static void main(String[] args) throws IOException {
Scanner sc;
- if(args.length == 0) {
+ if (args.length == 0) {
// Prompt the user to enter the id of the project to run the queries under
System.out.print("Enter the project ID: ");
sc = new Scanner(System.in);
diff --git a/bigquery/src/main/java/com/google/cloud/bigquery/samples/LoadDataCSVSample.java b/bigquery/src/main/java/com/google/cloud/bigquery/samples/LoadDataCsvSample.java
similarity index 68%
rename from bigquery/src/main/java/com/google/cloud/bigquery/samples/LoadDataCSVSample.java
rename to bigquery/src/main/java/com/google/cloud/bigquery/samples/LoadDataCsvSample.java
index 922ba82bda6..28367529a68 100644
--- a/bigquery/src/main/java/com/google/cloud/bigquery/samples/LoadDataCSVSample.java
+++ b/bigquery/src/main/java/com/google/cloud/bigquery/samples/LoadDataCsvSample.java
@@ -12,6 +12,7 @@
See the License for the specific language governing permissions and
limitations under the License.
*/
+
package com.google.cloud.bigquery.samples;
import com.google.api.services.bigquery.Bigquery;
@@ -31,24 +32,23 @@
/**
* Cli tool to load data from a CSV into Bigquery.
*/
-public class LoadDataCSVSample {
-
- /**
- * Protected constructor since this is a collection of static methods.
- */
- protected LoadDataCSVSample() {
+public class LoadDataCsvSample {
- }
+ /**
+ * Protected constructor since this is a collection of static methods.
+ */
+ protected LoadDataCsvSample() {
+ }
- /**
- * Cli tool to load data from a CSV into Bigquery.
- * @param args Command line args, should be empty
- * @throws IOException IOException
- * @throws InterruptedException InterruptedException
- */
+ /**
+ * Cli tool to load data from a CSV into Bigquery.
+ * @param args Command line args, should be empty
+ * @throws IOException IOException
+ * @throws InterruptedException InterruptedException
+ */
// [START main]
- public static void main(final String[] args) throws IOException,
- InterruptedException {
+ public static void main(final String[] args)
+ throws IOException, InterruptedException {
Scanner scanner = new Scanner(System.in);
System.out.println("Enter your project id: ");
String projectId = scanner.nextLine();
@@ -57,14 +57,14 @@ public static void main(final String[] args) throws IOException,
System.out.println("Enter your table id: ");
String tableId = scanner.nextLine();
System.out.println("Enter the Google Cloud Storage Path to the data "
- + "you'd like to load: ");
+ + "you'd like to load: ");
String cloudStoragePath = scanner.nextLine();
System.out.println("Enter the filepath to your schema: ");
String sourceSchemaPath = scanner.nextLine();
System.out.println("Enter how often to check if your job is complete "
- + "(milliseconds): ");
+ + "(milliseconds): ");
long interval = scanner.nextLong();
scanner.close();
@@ -74,21 +74,20 @@ public static void main(final String[] args) throws IOException,
tableId,
new FileReader(new File(sourceSchemaPath)),
interval);
-
}
// [END main]
- /**
- * Run the bigquery ClI.
- * @param cloudStoragePath The bucket we are using
- * @param projectId Project id
- * @param datasetId datasetid
- * @param tableId tableid
- * @param schemaSource Source of the schema
- * @param interval interval to wait between polling in milliseconds
- * @throws IOException Thrown if there is an error connecting to Bigquery.
- * @throws InterruptedException Should never be thrown
- */
+ /**
+ * Run the bigquery ClI.
+ * @param cloudStoragePath The bucket we are using
+ * @param projectId Project id
+ * @param datasetId datasetid
+ * @param tableId tableid
+ * @param schemaSource Source of the schema
+ * @param interval interval to wait between polling in milliseconds
+ * @throws IOException Thrown if there is an error connecting to Bigquery.
+ * @throws InterruptedException Should never be thrown
+ */
// [START run]
public static void run(
final String cloudStoragePath,
@@ -121,15 +120,15 @@ public static void run(
}
// [END run]
- /**
- * A job that extracts data from a table.
- * @param bigquery Bigquery service to use
- * @param cloudStoragePath Cloud storage bucket we are inserting into
- * @param table Table to extract from
- * @param schema The schema of the table we are loading into
- * @return The job to extract data from the table
- * @throws IOException Thrown if error connceting to Bigtable
- */
+ /**
+ * A job that extracts data from a table.
+ * @param bigquery Bigquery service to use
+ * @param cloudStoragePath Cloud storage bucket we are inserting into
+ * @param table Table to extract from
+ * @param schema The schema of the table we are loading into
+ * @return The job to extract data from the table
+ * @throws IOException Thrown if error connceting to Bigtable
+ */
// [START load_job]
public static Job loadJob(
final Bigquery bigquery,
@@ -138,15 +137,13 @@ public static Job loadJob(
final TableSchema schema) throws IOException {
JobConfigurationLoad load = new JobConfigurationLoad()
- .setDestinationTable(table)
- .setSchema(schema)
- .setSourceUris(Collections.singletonList(cloudStoragePath));
+ .setDestinationTable(table)
+ .setSchema(schema)
+ .setSourceUris(Collections.singletonList(cloudStoragePath));
return bigquery.jobs().insert(table.getProjectId(),
new Job().setConfiguration(new JobConfiguration().setLoad(load)))
.execute();
}
// [END load_job]
-
-
}
diff --git a/bigquery/src/main/java/com/google/cloud/bigquery/samples/StreamingSample.java b/bigquery/src/main/java/com/google/cloud/bigquery/samples/StreamingSample.java
index b2d01c76562..3f2aed4b747 100644
--- a/bigquery/src/main/java/com/google/cloud/bigquery/samples/StreamingSample.java
+++ b/bigquery/src/main/java/com/google/cloud/bigquery/samples/StreamingSample.java
@@ -33,21 +33,21 @@
/**
* Example of Bigquery Streaming.
*/
-public class StreamingSample {
+public class StreamingSample {
- /**
- * Empty constructor since this is just a collection of static methods.
- */
- protected StreamingSample() {
-
- }
+ /**
+ * Empty constructor since this is just a collection of static methods.
+ */
+ protected StreamingSample() {
+ }
- /**
- * Command line that demonstrates Bigquery streaming.
- * @param args Command line args, should be empty
- * @throws IOException IOexception
- */
+ /**
+ * Command line that demonstrates Bigquery streaming.
+ *
+ * @param args Command line args, should be empty
+ * @throws IOException IOexception
+ */
// [START main]
public static void main(final String[] args) throws IOException {
final Scanner scanner = new Scanner(System.in);
@@ -62,24 +62,25 @@ public static void main(final String[] args) throws IOException {
System.out.println("Enter JSON to stream to BigQuery: \n"
+ "Press End-of-stream (CTRL-D) to stop");
- JsonReader fromCLI = new JsonReader(new InputStreamReader(System.in));
+ JsonReader fromCli = new JsonReader(new InputStreamReader(System.in));
Iterator responses = run(projectId,
datasetId,
tableId,
- fromCLI);
+ fromCli);
while (responses.hasNext()) {
System.out.println(responses.next());
}
- fromCLI.close();
+ fromCli.close();
}
// [END main]
/**
* Run the bigquery ClI.
+ *
* @param projectId Project id
* @param datasetId datasetid
* @param tableId tableid
@@ -101,10 +102,11 @@ public static Iterator run(final String projectId,
return new Iterator() {
- /**
- * Get the next row in the stream
- * @return True if there is another row in the stream
- */
+ /**
+ * Check whether there is another row to stream.
+ *
+ * @return True if there is another row in the stream
+ */
public boolean hasNext() {
try {
return rows.hasNext();
@@ -114,10 +116,11 @@ public boolean hasNext() {
return false;
}
- /**
- *
- * @return Next page of data
- */
+ /**
+ * Insert the next row, and return the response.
+ *
+ * @return Next page of data
+ */
public TableDataInsertAllResponse next() {
try {
Map rowData = gson.