Skip to content

Commit 2b7479d

Browse files
steffnayJustinBeckwith
authored andcommitted
docs(samples): adds more samples (#492)
1 parent ef05ab4 commit 2b7479d

33 files changed

Lines changed: 1220 additions & 34 deletions
Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
/**
2+
* Copyright 2019 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* https://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
'use strict';
18+
19+
function main(
20+
datasetId = 'my_dataset',
21+
tableId = 'my_table',
22+
fileName = '/path/to/file.csv'
23+
) {
24+
// [START bigquery_add_column_load_append]
25+
// Import the Google Cloud client libraries
26+
const {BigQuery} = require('@google-cloud/bigquery');
27+
28+
// Instantiate client
29+
const bigquery = new BigQuery();
30+
31+
async function addColumnLoadAppend() {
32+
// Adds a new column to a BigQuery table while appending rows via a load job.
33+
34+
/**
35+
* TODO(developer): Uncomment the following lines before running the sample.
36+
*/
37+
// const fileName = '/path/to/file.csv';
38+
// const datasetId = 'my_dataset';
39+
// const tableId = 'my_table';
40+
41+
// In this example, the existing table contains only the 'Name', 'Age',
42+
// & 'Weight' columns. 'REQUIRED' fields cannot be added to an existing
43+
// schema, so the additional column must be 'NULLABLE'.
44+
const schema = 'Name:STRING, Age:INTEGER, Weight:FLOAT, IsMagic:BOOLEAN';
45+
46+
// Retrieve destination table reference
47+
const [table] = await bigquery
48+
.dataset(datasetId)
49+
.table(tableId)
50+
.get();
51+
const destinationTableRef = table.metadata.tableReference;
52+
53+
// Set load job options
54+
const options = {
55+
schema: schema,
56+
schemaUpdateOptions: ['ALLOW_FIELD_ADDITION'],
57+
writeDisposition: 'WRITE_APPEND',
58+
destinationTable: destinationTableRef,
59+
};
60+
61+
// Load data from a local file into the table
62+
const [job] = await bigquery
63+
.dataset(datasetId)
64+
.table(tableId)
65+
.load(fileName, options);
66+
67+
console.log(`Job ${job.id} completed.`);
68+
console.log(`New Schema:`);
69+
console.log(job.configuration.load.schema.fields);
70+
71+
// Check the job's status for errors
72+
const errors = job.status.errors;
73+
if (errors && errors.length > 0) {
74+
throw errors;
75+
}
76+
}
77+
// [END bigquery_add_column_load_append]
78+
addColumnLoadAppend();
79+
}
80+
main(...process.argv.slice(2));
Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
/**
2+
* Copyright 2019 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* https://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
'use strict';
18+
19+
function main(datasetId = 'my_dataset', tableId = 'my_table') {
20+
// [START bigquery_add_column_query_append]
21+
// Import the Google Cloud client libraries
22+
const {BigQuery} = require('@google-cloud/bigquery');
23+
24+
// Instantiate client
25+
const bigquery = new BigQuery();
26+
27+
async function addColumnQueryAppend() {
28+
// Adds a new column to a BigQuery table while appending rows via a query job.
29+
30+
/**
31+
* TODO(developer): Uncomment the following lines before running the sample.
32+
*/
33+
// const datasetId = 'my_dataset';
34+
// const tableId = 'my_table';
35+
36+
// Retrieve destination table reference
37+
const [table] = await bigquery
38+
.dataset(datasetId)
39+
.table(tableId)
40+
.get();
41+
const destinationTableRef = table.metadata.tableReference;
42+
43+
// In this example, the existing table contains only the 'name' column.
44+
// 'REQUIRED' fields cannot be added to an existing schema,
45+
// so the additional column must be 'NULLABLE'.
46+
const query = `SELECT name, year
47+
FROM \`bigquery-public-data.usa_names.usa_1910_2013\`
48+
WHERE state = 'TX'
49+
LIMIT 10`;
50+
51+
// Set load job options
52+
const options = {
53+
query: query,
54+
schemaUpdateOptions: ['ALLOW_FIELD_ADDITION'],
55+
writeDisposition: 'WRITE_APPEND',
56+
destinationTable: destinationTableRef,
57+
// Location must match that of the dataset(s) referenced in the query.
58+
location: 'US',
59+
};
60+
61+
const [job] = await bigquery.createQueryJob(options);
62+
console.log(`Job ${job.id} started.`);
63+
64+
// Wait for the query to finish
65+
const [rows] = await job.getQueryResults();
66+
console.log(`Job ${job.id} completed.`);
67+
68+
// Print the results
69+
console.log('Rows:');
70+
rows.forEach(row => console.log(row));
71+
}
72+
// [END bigquery_add_column_query_append]
73+
addColumnQueryAppend();
74+
}
75+
main(...process.argv.slice(2));
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
/**
2+
* Copyright 2019 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* https://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
'use strict';
18+
19+
function main(datasetId = 'my_dataset', tableId = 'my_table') {
20+
// [START bigquery_add_empty_column]
21+
22+
// Import the Google Cloud client library and create a client
23+
const {BigQuery} = require('@google-cloud/bigquery');
24+
const bigquery = new BigQuery();
25+
26+
async function addEmptyColumn() {
27+
// Adds an empty column to the schema.
28+
29+
/**
30+
* TODO(developer): Uncomment the following lines before running the sample.
31+
*/
32+
// const datasetId = 'my_dataset';
33+
// const tableId = 'my_table';
34+
const column = {name: 'size', type: 'STRING'};
35+
36+
// Retrieve current table metadata
37+
const table = bigquery.dataset(datasetId).table(tableId);
38+
const [metadata] = await table.getMetadata();
39+
40+
// Update table schema
41+
const schema = metadata.schema;
42+
const new_schema = schema;
43+
new_schema.fields.push(column);
44+
metadata.schema = new_schema;
45+
46+
const [result] = await table.setMetadata(metadata);
47+
console.log(result.schema.fields);
48+
}
49+
// [END bigquery_add_empty_column]
50+
addEmptyColumn();
51+
}
52+
53+
main(...process.argv.slice(2));
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
/**
2+
* Copyright 2019 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* https://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
'use strict';
18+
19+
function main() {
20+
// [START bigquery_client_json_credentials]
21+
// Create a BigQuery client explicitly using service account credentials.
22+
// by specifying the private key file.
23+
const {BigQuery} = require('@google-cloud/bigquery');
24+
25+
const options = {
26+
keyFilename: 'path/to/service_account.json',
27+
projectId: 'my_project',
28+
};
29+
30+
const bigquery = new BigQuery(options);
31+
// [END bigquery_client_json_credentials]
32+
async function query() {
33+
// Queries the U.S. given names dataset for the state of Texas.
34+
35+
const query = `SELECT name
36+
FROM \`bigquery-public-data.usa_names.usa_1910_2013\`
37+
WHERE state = 'TX'
38+
LIMIT 100`;
39+
40+
// For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query
41+
const options = {
42+
query: query,
43+
// Location must match that of the dataset(s) referenced in the query.
44+
location: 'US',
45+
};
46+
47+
// Run the query as a job
48+
const [job] = await bigquery.createQueryJob(options);
49+
console.log(`Job ${job.id} started.`);
50+
51+
// Wait for the query to finish
52+
const [rows] = await job.getQueryResults();
53+
54+
// Print the results
55+
console.log('Rows:');
56+
rows.forEach(row => console.log(row));
57+
}
58+
query();
59+
}
60+
61+
main(...process.argv.slice(2));

handwritten/bigquery/samples/createTable.js

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,13 @@
1818

1919
function main(
2020
datasetId = 'my_dataset', // Existing dataset
21-
tableId = 'my_new_table' // Table to be created
21+
tableId = 'my_new_table', // Table to be created
22+
schema = [
23+
{name: 'Name', type: 'STRING', mode: 'REQUIRED'},
24+
{name: 'Age', type: 'INTEGER'},
25+
{name: 'Weight', type: 'FLOAT'},
26+
{name: 'IsMagic', type: 'BOOLEAN'},
27+
]
2228
) {
2329
// [START bigquery_create_table]
2430
// Import the Google Cloud client library and create a client
@@ -33,8 +39,7 @@ function main(
3339
*/
3440
// const datasetId = "my_dataset";
3541
// const tableId = "my_table";
36-
37-
const schema = 'Name:string, Age:integer, Weight:float, IsMagic:boolean';
42+
// const schema = 'Name:string, Age:integer, Weight:float, IsMagic:boolean';
3843

3944
// For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource
4045
const options = {
Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
/**
2+
* Copyright 2019 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* https://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
'use strict';
18+
19+
function main(
20+
datasetId = 'my_dataset',
21+
tableId = 'my_table',
22+
bucketName = 'my-bucket',
23+
filename = 'file.csv'
24+
) {
25+
// [START bigquery_extract_table_compressed]
26+
// Import the Google Cloud client libraries
27+
const {BigQuery} = require('@google-cloud/bigquery');
28+
const {Storage} = require('@google-cloud/storage');
29+
30+
const bigquery = new BigQuery();
31+
const storage = new Storage();
32+
33+
async function extractTableCompressed() {
34+
// Exports my_dataset:my_table to gcs://my-bucket/my-file as a compressed file.
35+
36+
/**
37+
* TODO(developer): Uncomment the following lines before running the sample.
38+
*/
39+
// const datasetId = "my_dataset";
40+
// const tableId = "my_table";
41+
// const bucketName = "my-bucket";
42+
// const filename = "file.csv";
43+
44+
// Location must match that of the source table.
45+
const options = {
46+
location: 'US',
47+
gzip: true,
48+
};
49+
50+
// Export data from the table into a Google Cloud Storage file
51+
const [job] = await bigquery
52+
.dataset(datasetId)
53+
.table(tableId)
54+
.extract(storage.bucket(bucketName).file(filename), options);
55+
// load() waits for the job to finish
56+
console.log(`Job ${job.id} completed.`);
57+
58+
// Check the job's status for errors
59+
const errors = job.status.errors;
60+
if (errors && errors.length > 0) {
61+
throw errors;
62+
}
63+
}
64+
// [END bigquery_extract_table_compressed]
65+
extractTableCompressed();
66+
}
67+
main(...process.argv.slice(2));

0 commit comments

Comments
 (0)