From 7b5c50de60c7ef0f6fe0f014594c55a6ba1a6b37 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Mon, 19 Nov 2018 10:58:59 -0800 Subject: [PATCH 1/5] chore: add synth.metadata chore: add synth.metadata --- synth.metadata | 1 + 1 file changed, 1 insertion(+) create mode 100644 synth.metadata diff --git a/synth.metadata b/synth.metadata new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/synth.metadata @@ -0,0 +1 @@ +{} \ No newline at end of file From 8f4d555300c07657a569122e37f09faffbc442f9 Mon Sep 17 00:00:00 2001 From: Praveen Kumar Singh Date: Tue, 20 Nov 2018 20:44:13 +0530 Subject: [PATCH 2/5] replace promise with async await --- samples/tables.js | 714 +++++++++++++++++++++------------------------- 1 file changed, 328 insertions(+), 386 deletions(-) diff --git a/samples/tables.js b/samples/tables.js index 46690dfd..317423e2 100644 --- a/samples/tables.js +++ b/samples/tables.js @@ -352,7 +352,7 @@ function loadCSVFromGCS(datasetId, tableId, projectId) { // [END bigquery_load_table_gcs_csv] } -function loadJSONFromGCS(datasetId, tableId, projectId) { +async function loadJSONFromGCS(datasetId, tableId, projectId) { // [START bigquery_load_table_gcs_json] // Imports the Google Cloud client libraries const {BigQuery} = require('@google-cloud/bigquery'); @@ -396,29 +396,22 @@ function loadJSONFromGCS(datasetId, tableId, projectId) { }; // Loads data from a Google Cloud Storage file into the table - bigquery + const [job] = await bigquery .dataset(datasetId) .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata) - .then(results => { - const job = results[0]; - - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - }) - .catch(err => { - console.error('ERROR:', err); - }); + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } // [END bigquery_load_table_gcs_json] } -function loadCSVFromGCSAutodetect(datasetId, tableId, projectId) { +async function loadCSVFromGCSAutodetect(datasetId, tableId, projectId) { // [START bigquery_load_table_gcs_csv_autodetect] // Imports the Google Cloud client libraries const {BigQuery} = require('@google-cloud/bigquery'); @@ -458,29 +451,22 @@ function loadCSVFromGCSAutodetect(datasetId, tableId, projectId) { }; // Loads data from a Google Cloud Storage file into the table - bigquery + const [job] = await bigquery .dataset(datasetId) .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata) - .then(results => { - const job = results[0]; - - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - }) - .catch(err => { - console.error('ERROR:', err); - }); + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } // [END bigquery_load_table_gcs_csv_autodetect] } -function loadJSONFromGCSAutodetect(datasetId, tableId, projectId) { +async function loadJSONFromGCSAutodetect(datasetId, tableId, projectId) { // [START bigquery_load_table_gcs_json_autodetect] // Imports the Google Cloud client libraries const {BigQuery} = require('@google-cloud/bigquery'); @@ -519,29 +505,22 @@ function loadJSONFromGCSAutodetect(datasetId, tableId, projectId) { }; // Loads data from a Google Cloud Storage file into the table - bigquery + const [job] = await bigquery .dataset(datasetId) .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata) - .then(results => { - const job = results[0]; - - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - }) - .catch(err => { - console.error('ERROR:', err); - }); + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } // [END bigquery_load_table_gcs_json_autodetect] } -function loadCSVFromGCSTruncate(datasetId, tableId, projectId) { +async function loadCSVFromGCSTruncate(datasetId, tableId, projectId) { // [START bigquery_load_table_gcs_csv_truncate] // Imports the Google Cloud client libraries const {BigQuery} = require('@google-cloud/bigquery'); @@ -588,29 +567,22 @@ function loadCSVFromGCSTruncate(datasetId, tableId, projectId) { }; // Loads data from a Google Cloud Storage file into the table - bigquery + const [job] = await bigquery .dataset(datasetId) .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata) - .then(results => { - const job = results[0]; - - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - }) - .catch(err => { - console.error('ERROR:', err); - }); + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } // [END bigquery_load_table_gcs_csv_truncate] } -function loadJSONFromGCSTruncate(datasetId, tableId, projectId) { +async function loadJSONFromGCSTruncate(datasetId, tableId, projectId) { // [START bigquery_load_table_gcs_json_truncate] // Imports the Google Cloud client libraries const {BigQuery} = require('@google-cloud/bigquery'); @@ -656,29 +628,22 @@ function loadJSONFromGCSTruncate(datasetId, tableId, projectId) { }; // Loads data from a Google Cloud Storage file into the table - bigquery + const [job] = await bigquery .dataset(datasetId) .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata) - .then(results => { - const job = results[0]; - - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - }) - .catch(err => { - console.error('ERROR:', err); - }); + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } // [END bigquery_load_table_gcs_json_truncate] } -function loadParquetFromGCSTruncate(datasetId, tableId, projectId) { +async function loadParquetFromGCSTruncate(datasetId, tableId, projectId) { // [START bigquery_load_table_gcs_parquet_truncate] // Imports the Google Cloud client libraries const {BigQuery} = require('@google-cloud/bigquery'); @@ -718,29 +683,22 @@ function loadParquetFromGCSTruncate(datasetId, tableId, projectId) { }; // Loads data from a Google Cloud Storage file into the table - bigquery + const [job] = await bigquery .dataset(datasetId) .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata) - .then(results => { - const job = results[0]; - - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - }) - .catch(err => { - console.error('ERROR:', err); - }); + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } // [END bigquery_load_table_gcs_parquet_truncate] } -function loadOrcFromGCSTruncate(datasetId, tableId, projectId) { +async function loadOrcFromGCSTruncate(datasetId, tableId, projectId) { // [START bigquery_load_table_gcs_orc_truncate] // Imports the Google Cloud client libraries const {BigQuery} = require('@google-cloud/bigquery'); @@ -780,29 +738,22 @@ function loadOrcFromGCSTruncate(datasetId, tableId, projectId) { }; // Loads data from a Google Cloud Storage file into the table - bigquery + const [job] = await bigquery .dataset(datasetId) .table(tableId) - .load(storage.bucket(bucketName).file(filename), metadata) - .then(results => { - const job = results[0]; - - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); + .load(storage.bucket(bucketName).file(filename), metadata); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - }) - .catch(err => { - console.error('ERROR:', err); - }); + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } // [END bigquery_load_table_gcs_orc_truncate] } -function extractTableToGCS( +async function extractTableToGCS( datasetId, tableId, bucketName, @@ -833,29 +784,22 @@ function extractTableToGCS( }); // Exports data from the table into a Google Cloud Storage file - bigquery + const [job] = await bigquery .dataset(datasetId) .table(tableId) - .extract(storage.bucket(bucketName).file(filename)) - .then(results => { - const job = results[0]; - - // load() waits for the job to finish - console.log(`Job ${job.id} completed.`); + .extract(storage.bucket(bucketName).file(filename)); + // load() waits for the job to finish + console.log(`Job ${job.id} completed.`); - // Check the job's status for errors - const errors = job.status.errors; - if (errors && errors.length > 0) { - throw errors; - } - }) - .catch(err => { - console.error('ERROR:', err); - }); + // Check the job's status for errors + const errors = job.status.errors; + if (errors && errors.length > 0) { + throw errors; + } // [END bigquery_extract_table] } -function insertRowsAsStream(datasetId, tableId, rows, projectId) { +async function insertRowsAsStream(datasetId, tableId, rows, projectId) { // [START bigquery_table_insert_rows] // Imports the Google Cloud client library const {BigQuery} = require('@google-cloud/bigquery'); @@ -874,264 +818,262 @@ function insertRowsAsStream(datasetId, tableId, rows, projectId) { }); // Inserts data into a table - bigquery + await bigquery .dataset(datasetId) .table(tableId) - .insert(rows) - .then(() => { - console.log(`Inserted ${rows.length} rows`); - }) - .catch(err => { - if (err && err.name === 'PartialFailureError') { - if (err.errors && err.errors.length > 0) { - console.log('Insert errors:'); - err.errors.forEach(err => console.error(err)); - } - } else { - console.error('ERROR:', err); - } - }); + .insert(rows); + console.log(`Inserted ${rows.length} rows`); // [END bigquery_table_insert_rows] } -const fs = require(`fs`); - -require(`yargs`) - .demand(1) - .command( - `create `, - `Creates a new table.`, - {}, - opts => { - createTable(opts.datasetId, opts.tableId, opts.schema, opts.projectId); - } - ) - .command( - `list `, - `Lists all tables in a dataset.`, - {}, - opts => { - listTables(opts.datasetId, opts.projectId); - } - ) - .command( - `delete `, - `Deletes a table.`, - {}, - opts => { - deleteTable(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `copy `, - `Makes a copy of a table.`, - {}, - opts => { - copyTable( - opts.srcDatasetId, - opts.srcTableId, - opts.destDatasetId, - opts.destTableId, - opts.projectId - ); - } - ) - .command( - `browse `, - `Lists rows in a table.`, - {}, - opts => { - browseRows(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-local-csv `, - `Loads data from a local file into a table.`, - {}, - opts => { - loadLocalFile( - opts.datasetId, - opts.tableId, - opts.fileName, - opts.projectId - ); - } - ) - .command( - `load-gcs-orc `, - `Loads sample ORC data from a Google Cloud Storage file into a table.`, - {}, - opts => { - loadORCFromGCS(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-parquet `, - `Loads sample Parquet data from a Google Cloud Storage file into a table.`, - {}, - opts => { - loadParquetFromGCS(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-csv `, - `Loads sample CSV data from a Google Cloud Storage file into a table.`, - {}, - opts => { - loadCSVFromGCS(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-json `, - `Loads sample JSON data from a Google Cloud Storage file into a table.`, - {}, - opts => { - loadJSONFromGCS(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-csv-autodetect `, - `Loads sample CSV data from a Google Cloud Storage file into a table.`, - {}, - opts => { - loadCSVFromGCSAutodetect(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-json-autodetect `, - `Loads sample JSON data from a Google Cloud Storage file into a table.`, - {}, - opts => { - loadJSONFromGCSAutodetect(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-csv-truncate `, - `Loads sample CSV data from GCS, replacing an existing table.`, - {}, - opts => { - loadCSVFromGCSTruncate(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-json-truncate `, - `Loads sample JSON data from GCS, replacing an existing table.`, - {}, - opts => { - loadJSONFromGCSTruncate(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-parquet-truncate `, - `Loads sample Parquet data from GCS, replacing an existing table.`, - {}, - opts => { - loadParquetFromGCSTruncate(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `load-gcs-orc-truncate `, - `Loads sample Orc data from GCS, replacing an existing table.`, - {}, - opts => { - loadOrcFromGCSTruncate(opts.datasetId, opts.tableId, opts.projectId); - } - ) - .command( - `extract `, - `Extract a table from BigQuery to Google Cloud Storage.`, - {}, - opts => { - extractTableToGCS( - opts.datasetId, - opts.tableId, - opts.bucketName, - opts.fileName, - opts.projectId - ); - } - ) - .command( - `insert `, - `Insert a JSON array (as a string or newline-delimited file) into a BigQuery table.`, - {}, - opts => { - let content; - try { - content = fs.readFileSync(opts.json_or_file); - } catch (err) { - content = opts.json_or_file; +async function main() { + const fs = require(`fs`); + + require(`yargs`) + .demand(1) + .command( + `create `, + `Creates a new table.`, + {}, + opts => { + createTable(opts.datasetId, opts.tableId, opts.schema, opts.projectId); } - - let rows = null; - try { - rows = JSON.parse(content); - } catch (err) { - throw new Error( - `"json_or_file" (or the file it points to) is not a valid JSON array.` + ) + .command( + `list `, + `Lists all tables in a dataset.`, + {}, + opts => { + listTables(opts.datasetId, opts.projectId); + } + ) + .command( + `delete `, + `Deletes a table.`, + {}, + opts => { + deleteTable(opts.datasetId, opts.tableId, opts.projectId); + } + ) + .command( + `copy `, + `Makes a copy of a table.`, + {}, + opts => { + copyTable( + opts.srcDatasetId, + opts.srcTableId, + opts.destDatasetId, + opts.destTableId, + opts.projectId + ); + } + ) + .command( + `browse `, + `Lists rows in a table.`, + {}, + opts => { + browseRows(opts.datasetId, opts.tableId, opts.projectId); + } + ) + .command( + `load-local-csv `, + `Loads data from a local file into a table.`, + {}, + opts => { + loadLocalFile( + opts.datasetId, + opts.tableId, + opts.fileName, + opts.projectId + ); + } + ) + .command( + `load-gcs-orc `, + `Loads sample ORC data from a Google Cloud Storage file into a table.`, + {}, + opts => { + loadORCFromGCS(opts.datasetId, opts.tableId, opts.projectId); + } + ) + .command( + `load-gcs-parquet `, + `Loads sample Parquet data from a Google Cloud Storage file into a table.`, + {}, + opts => { + loadParquetFromGCS(opts.datasetId, opts.tableId, opts.projectId); + } + ) + .command( + `load-gcs-csv `, + `Loads sample CSV data from a Google Cloud Storage file into a table.`, + {}, + opts => { + loadCSVFromGCS(opts.datasetId, opts.tableId, opts.projectId); + } + ) + .command( + `load-gcs-json `, + `Loads sample JSON data from a Google Cloud Storage file into a table.`, + {}, + opts => { + loadJSONFromGCS(opts.datasetId, opts.tableId, opts.projectId); + } + ) + .command( + `load-gcs-csv-autodetect `, + `Loads sample CSV data from a Google Cloud Storage file into a table.`, + {}, + opts => { + loadCSVFromGCSAutodetect(opts.datasetId, opts.tableId, opts.projectId); + } + ) + .command( + `load-gcs-json-autodetect `, + `Loads sample JSON data from a Google Cloud Storage file into a table.`, + {}, + opts => { + loadJSONFromGCSAutodetect(opts.datasetId, opts.tableId, opts.projectId); + } + ) + .command( + `load-gcs-csv-truncate `, + `Loads sample CSV data from GCS, replacing an existing table.`, + {}, + opts => { + loadCSVFromGCSTruncate(opts.datasetId, opts.tableId, opts.projectId); + } + ) + .command( + `load-gcs-json-truncate `, + `Loads sample JSON data from GCS, replacing an existing table.`, + {}, + opts => { + loadJSONFromGCSTruncate(opts.datasetId, opts.tableId, opts.projectId); + } + ) + .command( + `load-gcs-parquet-truncate `, + `Loads sample Parquet data from GCS, replacing an existing table.`, + {}, + opts => { + loadParquetFromGCSTruncate( + opts.datasetId, + opts.tableId, + opts.projectId + ); + } + ) + .command( + `load-gcs-orc-truncate `, + `Loads sample Orc data from GCS, replacing an existing table.`, + {}, + opts => { + loadOrcFromGCSTruncate(opts.datasetId, opts.tableId, opts.projectId); + } + ) + .command( + `extract `, + `Extract a table from BigQuery to Google Cloud Storage.`, + {}, + opts => { + extractTableToGCS( + opts.datasetId, + opts.tableId, + opts.bucketName, + opts.fileName, + opts.projectId ); } + ) + .command( + `insert `, + `Insert a JSON array (as a string or newline-delimited file) into a BigQuery table.`, + {}, + opts => { + let content; + try { + content = fs.readFileSync(opts.json_or_file); + } catch (err) { + content = opts.json_or_file; + } + + let rows = null; + try { + rows = JSON.parse(content); + } catch (err) { + throw new Error( + `"json_or_file" (or the file it points to) is not a valid JSON array.` + ); + } - if (!Array.isArray(rows)) { - throw new Error( - `"json_or_file" (or the file it points to) is not a valid JSON array.` + if (!Array.isArray(rows)) { + throw new Error( + `"json_or_file" (or the file it points to) is not a valid JSON array.` + ); + } + + insertRowsAsStream( + opts.datasetId, + opts.tableId, + rows, + opts.projectId || process.env.GCLOUD_PROJECT ); } + ) + .example( + `node $0 create my-project-id my_dataset my_table "Name:string, Age:integer, Weight:float, IsMagic:boolean"`, + `Creates a new table named "my_table" in "my_dataset".` + ) + .example( + `node $0 list my-project-id my_dataset`, + `Lists tables in "my_dataset".` + ) + .example( + `node $0 browse my-project-id my_dataset my_table`, + `Displays rows from "my_table" in "my_dataset".` + ) + .example( + `node $0 delete my-project-id my_dataset my_table`, + `Deletes "my_table" from "my_dataset".` + ) + .example( + `node $0 load my-project-id my_dataset my_table ./data.csv`, + `Imports a local file into a table.` + ) + .example( + `node $0 load-gcs my-project-id my_dataset my_table my-bucket data.csv`, + `Imports a GCS file into a table.` + ) + .example( + `node $0 extract my-project-id my_dataset my_table my-bucket my-file`, + `Exports my_dataset:my_table to gcs://my-bucket/my-file as raw CSV.` + ) + .example( + `node $0 extract my-project-id my_dataset my_table my-bucket my-file -f JSON --gzip`, + `Exports my_dataset:my_table to gcs://my-bucket/my-file as gzipped JSON.` + ) + .example( + `node $0 insert my-project-id my_dataset my_table json_string`, + `Inserts the JSON array represented by json_string into my_dataset:my_table.` + ) + .example( + `node $0 insert my-project-id my_dataset my_table json_file`, + `Inserts the JSON objects contained in json_file (one per line) into my_dataset:my_table.` + ) + .example( + `node $0 copy my-project-id src_dataset src_table dest_dataset dest_table`, + `Copies src_dataset:src_table to dest_dataset:dest_table.` + ) + .wrap(120) + .recommendCommands() + .epilogue( + `For more information, see https://cloud.google.com/bigquery/docs` + ) + .help() + .strict().argv; +} - insertRowsAsStream( - opts.datasetId, - opts.tableId, - rows, - opts.projectId || process.env.GCLOUD_PROJECT - ); - } - ) - .example( - `node $0 create my-project-id my_dataset my_table "Name:string, Age:integer, Weight:float, IsMagic:boolean"`, - `Creates a new table named "my_table" in "my_dataset".` - ) - .example( - `node $0 list my-project-id my_dataset`, - `Lists tables in "my_dataset".` - ) - .example( - `node $0 browse my-project-id my_dataset my_table`, - `Displays rows from "my_table" in "my_dataset".` - ) - .example( - `node $0 delete my-project-id my_dataset my_table`, - `Deletes "my_table" from "my_dataset".` - ) - .example( - `node $0 load my-project-id my_dataset my_table ./data.csv`, - `Imports a local file into a table.` - ) - .example( - `node $0 load-gcs my-project-id my_dataset my_table my-bucket data.csv`, - `Imports a GCS file into a table.` - ) - .example( - `node $0 extract my-project-id my_dataset my_table my-bucket my-file`, - `Exports my_dataset:my_table to gcs://my-bucket/my-file as raw CSV.` - ) - .example( - `node $0 extract my-project-id my_dataset my_table my-bucket my-file -f JSON --gzip`, - `Exports my_dataset:my_table to gcs://my-bucket/my-file as gzipped JSON.` - ) - .example( - `node $0 insert my-project-id my_dataset my_table json_string`, - `Inserts the JSON array represented by json_string into my_dataset:my_table.` - ) - .example( - `node $0 insert my-project-id my_dataset my_table json_file`, - `Inserts the JSON objects contained in json_file (one per line) into my_dataset:my_table.` - ) - .example( - `node $0 copy my-project-id src_dataset src_table dest_dataset dest_table`, - `Copies src_dataset:src_table to dest_dataset:dest_table.` - ) - .wrap(120) - .recommendCommands() - .epilogue(`For more information, see https://cloud.google.com/bigquery/docs`) - .help() - .strict().argv; +main().catch(console.error); From 2a8284ebac2061248f4cb6e19f01f49eec7a60b5 Mon Sep 17 00:00:00 2001 From: Stephen Date: Tue, 20 Nov 2018 11:21:24 -0500 Subject: [PATCH 3/5] fix: Pin @types/sinon to last compatible version (#267) * fix: Pin @types/sinon to last compatible version * chore: Add trailing whitespace --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ebb8aae5..c763b671 100644 --- a/package.json +++ b/package.json @@ -70,7 +70,7 @@ "@types/mocha": "^5.2.5", "@types/proxyquire": "^1.3.28", "@types/request": "^2.48.0", - "@types/sinon": "^5.0.5", + "@types/sinon": "5.0.5", "@types/uuid": "^3.4.4", "codecov": "^3.0.0", "eslint": "^5.0.0", From a04562ceac53340efaf30c9fffc08d77cacbc5a9 Mon Sep 17 00:00:00 2001 From: negatron99 Date: Tue, 20 Nov 2018 22:08:35 +0000 Subject: [PATCH 4/5] fix: Changing import of Big from big.js so it doesn't use default (#270) --- src/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/index.ts b/src/index.ts index 7d4d0df2..fa09b5f3 100644 --- a/src/index.ts +++ b/src/index.ts @@ -18,7 +18,7 @@ import * as common from '@google-cloud/common'; import {paginator} from '@google-cloud/paginator'; import {promisifyAll} from '@google-cloud/promisify'; import * as arrify from 'arrify'; -import Big from 'big.js'; +import {Big} from 'big.js'; import * as extend from 'extend'; const format = require('string-format-obj'); From 834b18f7e2906f595d32cf2398478207532f5bda Mon Sep 17 00:00:00 2001 From: vijay-qlogic <36055624+vijay-qlogic@users.noreply.github.com> Date: Fri, 23 Nov 2018 02:15:58 +0530 Subject: [PATCH 5/5] refactor(tests): convert samples tests from ava to mocha (#248) --- samples/package.json | 5 +- samples/system-test/.eslintrc.yml | 3 +- samples/system-test/datasets.test.js | 67 ++- samples/system-test/queries.test.js | 34 +- samples/system-test/quickstart.test.js | 75 ++- samples/system-test/tables.test.js | 642 +++++++++++-------------- 6 files changed, 377 insertions(+), 449 deletions(-) diff --git a/samples/package.json b/samples/package.json index 7a5b5a28..3b91ef73 100644 --- a/samples/package.json +++ b/samples/package.json @@ -10,8 +10,7 @@ }, "scripts": { "test": "npm run cover", - "ava": "ava -T 3m --verbose test/*.test.js system-test/*.test.js", - "cover": "nyc --reporter=lcov --cache ava -T 3m --verbose test/*.test.js system-test/*.test.js && nyc report" + "cover": "nyc --reporter=lcov --cache mocha system-test/*.test.js --timeout 1800000 && nyc report" }, "dependencies": { "@google-cloud/bigquery": "^2.0.1", @@ -20,7 +19,7 @@ }, "devDependencies": { "@google-cloud/nodejs-repo-tools": "^3.0.0", - "ava": "^0.25.0", + "mocha": "^5.0.0", "nyc": "^13.0.0", "proxyquire": "^2.0.0", "sinon": "^7.0.0", diff --git a/samples/system-test/.eslintrc.yml b/samples/system-test/.eslintrc.yml index c0289282..0ab526f5 100644 --- a/samples/system-test/.eslintrc.yml +++ b/samples/system-test/.eslintrc.yml @@ -1,5 +1,6 @@ --- +env: + mocha: true rules: node/no-unpublished-require: off - node/no-unsupported-features: off no-empty: off diff --git a/samples/system-test/datasets.test.js b/samples/system-test/datasets.test.js index 8624c0f1..f544d0a9 100644 --- a/samples/system-test/datasets.test.js +++ b/samples/system-test/datasets.test.js @@ -17,7 +17,7 @@ const {BigQuery} = require(`@google-cloud/bigquery`); const path = require(`path`); -const test = require(`ava`); +const assert = require(`assert`); const tools = require(`@google-cloud/nodejs-repo-tools`); const uuid = require(`uuid`); @@ -27,42 +27,37 @@ const datasetId = `nodejs-docs-samples-test-${uuid.v4()}`.replace(/-/gi, '_'); const projectId = process.env.GCLOUD_PROJECT; const bigquery = new BigQuery(); -test.before(tools.checkCredentials); -test.beforeEach(tools.stubConsole); -test.afterEach.always(tools.restoreConsole); -test.after.always(async () => { - try { - await bigquery.dataset(datasetId).delete({force: true}); - } catch (err) {} // ignore error -}); +describe(`Datasets`, () => { + before(tools.checkCredentials); + after(async () => { + try { + await bigquery.dataset(datasetId).delete({force: true}); + } catch (err) {} // ignore error + }); -test.serial(`should create a dataset`, async t => { - const output = await tools.runAsync( - `${cmd} create ${projectId} ${datasetId}`, - cwd - ); - t.is(output, `Dataset ${datasetId} created.`); - const [exists] = await bigquery.dataset(datasetId).exists(); - t.true(exists); -}); + it(`should create a dataset`, async () => { + const output = await tools.runAsync( + `${cmd} create ${projectId} ${datasetId}`, + cwd + ); + assert.strictEqual(output, `Dataset ${datasetId} created.`); + const [exists] = await bigquery.dataset(datasetId).exists(); + assert.ok(exists); + }); -test.serial(`should list datasets`, async t => { - t.plan(0); - await tools - .tryTest(async assert => { - const output = await tools.runAsync(`${cmd} list ${projectId}`, cwd); - assert(output.includes(`Datasets:`)); - assert(output.includes(datasetId)); - }) - .start(); -}); + it(`should list datasets`, async () => { + const output = await tools.runAsync(`${cmd} list ${projectId}`, cwd); + assert.ok(output.includes(`Datasets:`)); + assert.ok(output.includes(datasetId)); + }); -test.serial(`should delete a dataset`, async t => { - const output = await tools.runAsync( - `${cmd} delete ${projectId} ${datasetId}`, - cwd - ); - t.is(output, `Dataset ${datasetId} deleted.`); - const [exists] = await bigquery.dataset(datasetId).exists(); - t.false(exists); + it(`should delete a dataset`, async () => { + const output = await tools.runAsync( + `${cmd} delete ${projectId} ${datasetId}`, + cwd + ); + assert.strictEqual(output, `Dataset ${datasetId} deleted.`); + const [exists] = await bigquery.dataset(datasetId).exists(); + assert.strictEqual(exists, false); + }); }); diff --git a/samples/system-test/queries.test.js b/samples/system-test/queries.test.js index 37bc11b3..e0547549 100644 --- a/samples/system-test/queries.test.js +++ b/samples/system-test/queries.test.js @@ -16,28 +16,30 @@ 'use strict'; const path = require(`path`); -const test = require(`ava`); +const assert = require(`assert`); const tools = require(`@google-cloud/nodejs-repo-tools`); const cwd = path.join(__dirname, `..`); const cmd = `node queries.js`; -test.before(tools.checkCredentials); +describe(`Queries`, () => { + before(tools.checkCredentials); -test(`should query stackoverflow`, async t => { - const output = await tools.runAsync(`${cmd} stackoverflow`, cwd); - t.true(output.includes(`Query Results:`)); - t.true(output.includes(`views`)); -}); + it(`should query stackoverflow`, async () => { + const output = await tools.runAsync(`${cmd} stackoverflow`, cwd); + assert.ok(output.includes(`Query Results:`)); + assert.ok(output.includes(`views`)); + }); -test(`should run a query`, async t => { - const output = await tools.runAsync(`${cmd} query`, cwd); - t.true(output.includes(`Rows:`)); - t.true(output.includes(`name`)); -}); + it(`should run a query`, async () => { + const output = await tools.runAsync(`${cmd} query`, cwd); + assert.ok(output.includes(`Rows:`)); + assert.ok(output.includes(`name`)); + }); -test(`should run a query with the cache disabled`, async t => { - const output = await tools.runAsync(`${cmd} disable-cache`, cwd); - t.true(output.includes(`Rows:`)); - t.true(output.includes(`corpus`)); + it(`should run a query with the cache disabled`, async () => { + const output = await tools.runAsync(`${cmd} disable-cache`, cwd); + assert.ok(output.includes(`Rows:`)); + assert.ok(output.includes(`corpus`)); + }); }); diff --git a/samples/system-test/quickstart.test.js b/samples/system-test/quickstart.test.js index 02976a50..4b1fa9b6 100644 --- a/samples/system-test/quickstart.test.js +++ b/samples/system-test/quickstart.test.js @@ -17,10 +17,9 @@ const proxyquire = require(`proxyquire`).noPreserveCache(); const sinon = require(`sinon`); -const test = require(`ava`); -const tools = require(`@google-cloud/nodejs-repo-tools`); +const assert = require(`assert`); const uuid = require(`uuid`); - +const tools = require(`@google-cloud/nodejs-repo-tools`); const {BigQuery} = proxyquire(`@google-cloud/bigquery`, {}); const bigquery = new BigQuery(); @@ -28,48 +27,46 @@ const expectedDatasetId = `my_new_dataset`; let datasetId = `nodejs-docs-samples-test-${uuid.v4()}`; datasetId = datasetId.replace(/-/gi, `_`); -test.after.always(async () => { - try { - bigquery.dataset(datasetId).delete({force: true}); - } catch (err) {} // ignore error -}); +describe(`Quickstart`, () => { + beforeEach(tools.stubConsole); + afterEach(tools.restoreConsole); -test.beforeEach(tools.stubConsole); -test.afterEach.always(tools.restoreConsole); + after(async () => { + try { + await bigquery.dataset(datasetId).delete({force: true}); + } catch (err) {} // ignore error + }); -test(`quickstart should create a dataset`, async t => { - await new Promise((resolve, reject) => { - const bigqueryMock = { - createDataset: _datasetId => { - t.is(_datasetId, expectedDatasetId); + it(`quickstart should create a dataset`, async () => { + await new Promise((resolve, reject) => { + const bigqueryMock = { + createDataset: async _datasetId => { + assert.strictEqual(_datasetId, expectedDatasetId); - return bigquery - .createDataset(datasetId) - .then(([dataset]) => { - t.not(dataset, undefined); + const [dataset] = await bigquery.createDataset(datasetId); + assert.notStrictEqual(dataset, undefined); - setTimeout(() => { - try { - t.true(console.log.calledOnce); - t.deepEqual(console.log.firstCall.args, [ - `Dataset ${dataset.id} created.`, - ]); - resolve(); - } catch (err) { - reject(err); - } - }, 200); + setTimeout(() => { + try { + assert.ok(console.log.calledOnce); + assert.deepStrictEqual(console.log.firstCall.args, [ + `Dataset ${dataset.id} created.`, + ]); + resolve(); + } catch (err) { + reject(err); + } + }, 200); - return [dataset]; - }) - .catch(reject); - }, - }; + return [dataset]; + }, + }; - proxyquire(`../quickstart`, { - '@google-cloud/bigquery': { - BigQuery: sinon.stub().returns(bigqueryMock), - }, + proxyquire(`../quickstart`, { + '@google-cloud/bigquery': { + BigQuery: sinon.stub().returns(bigqueryMock), + }, + }); }); }); }); diff --git a/samples/system-test/tables.test.js b/samples/system-test/tables.test.js index 97963625..dc916bfe 100644 --- a/samples/system-test/tables.test.js +++ b/samples/system-test/tables.test.js @@ -15,7 +15,7 @@ 'use strict'; -const test = require(`ava`); +const assert = require(`assert`); const path = require(`path`); const uuid = require(`uuid`); const tools = require(`@google-cloud/nodejs-repo-tools`); @@ -48,362 +48,296 @@ const rows = [ const bigquery = new BigQuery(); -test.before(tools.checkCredentials); -test.before(async () => { - const [bucket] = await storage.createBucket(bucketName); - await Promise.all([ - bucket.upload(localFilePath), - bigquery.createDataset(srcDatasetId), - bigquery.createDataset(destDatasetId), - ]); -}); -test.beforeEach(tools.stubConsole); -test.afterEach.always(tools.restoreConsole); -test.after.always(async () => { - try { - await bigquery.dataset(srcDatasetId).delete({force: true}); - } catch (err) {} // ignore error - try { - await bigquery.dataset(destDatasetId).delete({force: true}); - } catch (err) {} // ignore error - try { - await storage.bucket(bucketName).deleteFiles({force: true}); - } catch (err) {} // ignore error - try { - // Try deleting files a second time - await storage.bucket(bucketName).deleteFiles({force: true}); - } catch (err) {} // ignore error - try { - await bigquery.dataset(srcDatasetId).delete({force: true}); - } catch (err) {} // ignore error - try { - await storage.bucket(bucketName).delete(); - } catch (err) {} // ignore error -}); - -test.serial(`should create a table`, async t => { - const output = await tools.runAsync( - `${cmd} create ${projectId} ${datasetId} ${tableId} "${schema}"`, - cwd - ); - t.is(output, `Table ${tableId} created.`); - const [exists] = await bigquery - .dataset(datasetId) - .table(tableId) - .exists(); - t.true(exists); -}); - -test.serial(`should list tables`, async t => { - t.plan(0); - await tools - .tryTest(async assert => { - const output = await tools.runAsync( - `${cmd} list ${projectId} ${datasetId}`, - cwd - ); - assert(output.includes(`Tables:`)); - assert(output.includes(tableId)); - }) - .start(); -}); - -test.serial(`should load a local CSV file`, async t => { - t.plan(1); - const output = await tools.runAsync( - `${cmd} load-local-csv ${projectId} ${datasetId} ${tableId} ${localFilePath}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); - assert.strictEqual(rows.length, 1); - }) - .start(); -}); - -test.serial(`should browse table rows`, async t => { - const output = await tools.runAsync( - `${cmd} browse ${projectId} ${datasetId} ${tableId}`, - cwd - ); - t.is( - output, - `Rows:\n{ Name: 'Gandalf', Age: 2000, Weight: 140, IsMagic: true }` - ); -}); - -test.serial(`should extract a table to GCS`, async t => { - t.plan(1); - const output = await tools.runAsync( - `${cmd} extract ${projectId} ${datasetId} ${tableId} ${bucketName} ${exportFileName}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [exists] = await storage - .bucket(bucketName) - .file(exportFileName) - .exists(); - assert(exists); - }) - .start(); -}); - -test(`should load a GCS ORC file`, async t => { - t.plan(1); - const tableId = generateUuid(); - - const output = await tools.runAsync( - `${cmd} load-gcs-orc ${projectId} ${datasetId} ${tableId}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); - assert(rows.length > 0); - }) - .start(); -}); - -test(`should load a GCS Parquet file`, async t => { - t.plan(1); - const tableId = generateUuid(); - - const output = await tools.runAsync( - `${cmd} load-gcs-parquet ${projectId} ${datasetId} ${tableId}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); - assert(rows.length > 0); - }) - .start(); -}); - -test(`should load a GCS CSV file with explicit schema`, async t => { - t.plan(1); - const tableId = generateUuid(); - - const output = await tools.runAsync( - `${cmd} load-gcs-csv ${projectId} ${datasetId} ${tableId}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); - assert(rows.length > 0); - }) - .start(); -}); - -test(`should load a GCS JSON file with explicit schema`, async t => { - t.plan(1); - const tableId = generateUuid(); - - const output = await tools.runAsync( - `${cmd} load-gcs-json ${projectId} ${datasetId} ${tableId}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); - assert(rows.length > 0); - }) - .start(); -}); - -test(`should load a GCS CSV file with autodetected schema`, async t => { - t.plan(1); - const tableId = generateUuid(); - - const output = await tools.runAsync( - `${cmd} load-gcs-csv-autodetect ${projectId} ${datasetId} ${tableId}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); - assert(rows.length > 0); - }) - .start(); -}); - -test(`should load a GCS JSON file with autodetected schema`, async t => { - t.plan(1); - const tableId = generateUuid(); - - const output = await tools.runAsync( - `${cmd} load-gcs-json-autodetect ${projectId} ${datasetId} ${tableId}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); - assert(rows.length > 0); - }) - .start(); -}); - -test(`should load a GCS CSV file truncate table`, async t => { - t.plan(1); - const tableId = generateUuid(); - - const output = await tools.runAsync( - `${cmd} load-gcs-csv-truncate ${projectId} ${datasetId} ${tableId}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); - assert(rows.length > 0); - }) - .start(); -}); - -test(`should load a GCS JSON file truncate table`, async t => { - t.plan(1); - const tableId = generateUuid(); - - const output = await tools.runAsync( - `${cmd} load-gcs-json-truncate ${projectId} ${datasetId} ${tableId}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); - assert(rows.length > 0); - }) - .start(); -}); - -test(`should load a GCS parquet file truncate table`, async t => { - t.plan(1); - const tableId = generateUuid(); - - const output = await tools.runAsync( - `${cmd} load-gcs-parquet-truncate ${projectId} ${datasetId} ${tableId}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); - assert(rows.length > 0); - }) - .start(); -}); +describe(`Tables`, () => { + before(tools.checkCredentials); + + before(async () => { + const [bucket] = await storage.createBucket(bucketName); + await Promise.all([ + bucket.upload(localFilePath), + bigquery.createDataset(srcDatasetId), + bigquery.createDataset(destDatasetId), + ]); + }); + + after(async () => { + try { + await bigquery.dataset(srcDatasetId).delete({force: true}); + } catch (err) {} // ignore error + try { + await bigquery.dataset(destDatasetId).delete({force: true}); + } catch (err) {} // ignore error + try { + await storage.bucket(bucketName).deleteFiles({force: true}); + } catch (err) {} // ignore error + try { + // Try deleting files a second time + await storage.bucket(bucketName).deleteFiles({force: true}); + } catch (err) {} // ignore error + try { + await bigquery.dataset(srcDatasetId).delete({force: true}); + } catch (err) {} // ignore error + try { + await storage.bucket(bucketName).delete(); + } catch (err) {} // ignore error + }); + + it(`should create a table`, async () => { + const output = await tools.runAsync( + `${cmd} create ${projectId} ${datasetId} ${tableId} "${schema}"`, + cwd + ); + assert.strictEqual(output, `Table ${tableId} created.`); + const [exists] = await bigquery + .dataset(datasetId) + .table(tableId) + .exists(); + assert.ok(exists); + }); + + it(`should list tables`, async () => { + const output = await tools.runAsync( + `${cmd} list ${projectId} ${datasetId}`, + cwd + ); + assert.ok(output.includes(`Tables:`)); + assert.ok(output.includes(tableId)); + }); + + it(`should load a local CSV file`, async () => { + const output = await tools.runAsync( + `${cmd} load-local-csv ${projectId} ${datasetId} ${tableId} ${localFilePath}`, + cwd + ); + assert.ok(new RegExp(/completed\./).test(output)); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.strictEqual(rows.length, 1); + }); + + it(`should browse table rows`, async () => { + const output = await tools.runAsync( + `${cmd} browse ${projectId} ${datasetId} ${tableId}`, + cwd + ); + assert.strictEqual( + output, + `Rows:\n{ Name: 'Gandalf', Age: 2000, Weight: 140, IsMagic: true }` + ); + }); + + it(`should extract a table to GCS`, async () => { + const output = await tools.runAsync( + `${cmd} extract ${projectId} ${datasetId} ${tableId} ${bucketName} ${exportFileName}`, + cwd + ); -test(`should load a GCS ORC file truncate table`, async t => { - t.plan(1); - const tableId = generateUuid(); - - const output = await tools.runAsync( - `${cmd} load-gcs-orc-truncate ${projectId} ${datasetId} ${tableId}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); - assert(rows.length > 0); - }) - .start(); -}); + assert.ok(new RegExp(/completed\./).test(output)); + const [exists] = await storage + .bucket(bucketName) + .file(exportFileName) + .exists(); + assert.ok(exists); + }); -test.serial(`should copy a table`, async t => { - t.plan(1); - const output = await tools.runAsync( - `${cmd} copy ${projectId} ${srcDatasetId} ${srcTableId} ${destDatasetId} ${destTableId}`, - cwd - ); - t.regex(output, /completed\./); - await tools - .tryTest(async assert => { - const [rows] = await bigquery - .dataset(destDatasetId) - .table(destTableId) - .getRows(); - assert(rows.length > 0); - }) - .start(); -}); + it(`should load a GCS ORC file`, async () => { + const tableId = generateUuid(); -test.serial(`should insert rows`, async t => { - t.plan(3); - const err = await t.throws( - tools.runAsync( - `${cmd} insert ${projectId} ${datasetId} ${tableId} 'foo.bar'`, + const output = await tools.runAsync( + `${cmd} load-gcs-orc ${projectId} ${datasetId} ${tableId}`, cwd - ) - ); - t.true( - err.message.includes( - `"json_or_file" (or the file it points to) is not a valid JSON array.` - ) - ); - const output = await tools.runAsync( - `${cmd} insert ${projectId} ${datasetId} ${tableId} '${JSON.stringify( - rows - )}'`, - cwd - ); - t.is(output.includes(`Inserted 2 rows`), true); -}); + ); + assert.ok(new RegExp(/completed\./).test(output)); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it(`should load a GCS Parquet file`, async () => { + const tableId = generateUuid(); + + const output = await tools.runAsync( + `${cmd} load-gcs-parquet ${projectId} ${datasetId} ${tableId}`, + cwd + ); + assert.ok(new RegExp(/completed\./).test(output)); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it(`should load a GCS CSV file with explicit schema`, async () => { + const tableId = generateUuid(); + + const output = await tools.runAsync( + `${cmd} load-gcs-csv ${projectId} ${datasetId} ${tableId}`, + cwd + ); + assert.ok(new RegExp(/completed\./).test(output)); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it(`should load a GCS JSON file with explicit schema`, async () => { + const tableId = generateUuid(); + + const output = await tools.runAsync( + `${cmd} load-gcs-json ${projectId} ${datasetId} ${tableId}`, + cwd + ); + assert.ok(new RegExp(/completed\./).test(output)); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it(`should load a GCS CSV file with autodetected schema`, async () => { + const tableId = generateUuid(); + + const output = await tools.runAsync( + `${cmd} load-gcs-csv-autodetect ${projectId} ${datasetId} ${tableId}`, + cwd + ); + assert.ok(new RegExp(/completed\./).test(output)); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it(`should load a GCS JSON file with autodetected schema`, async () => { + const tableId = generateUuid(); + + const output = await tools.runAsync( + `${cmd} load-gcs-json-autodetect ${projectId} ${datasetId} ${tableId}`, + cwd + ); + assert.ok(new RegExp(/completed\./).test(output)); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it(`should load a GCS CSV file truncate table`, async () => { + const tableId = generateUuid(); + + const output = await tools.runAsync( + `${cmd} load-gcs-csv-truncate ${projectId} ${datasetId} ${tableId}`, + cwd + ); + assert.ok(new RegExp(/completed\./).test(output)); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it(`should load a GCS JSON file truncate table`, async () => { + const tableId = generateUuid(); + + const output = await tools.runAsync( + `${cmd} load-gcs-json-truncate ${projectId} ${datasetId} ${tableId}`, + cwd + ); + assert.ok(new RegExp(/completed\./).test(output)); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it(`should load a GCS parquet file truncate table`, async () => { + const tableId = generateUuid(); + + const output = await tools.runAsync( + `${cmd} load-gcs-parquet-truncate ${projectId} ${datasetId} ${tableId}`, + cwd + ); + assert.ok(new RegExp(/completed\./).test(output)); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it(`should load a GCS ORC file truncate table`, async () => { + const tableId = generateUuid(); + + const output = await tools.runAsync( + `${cmd} load-gcs-orc-truncate ${projectId} ${datasetId} ${tableId}`, + cwd + ); + assert.ok(new RegExp(/completed\./).test(output)); + const [rows] = await bigquery + .dataset(datasetId) + .table(tableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it(`should copy a table`, async () => { + const output = await tools.runAsync( + `${cmd} copy ${projectId} ${srcDatasetId} ${srcTableId} ${destDatasetId} ${destTableId}`, + cwd + ); + assert.ok(new RegExp(/completed\./).test(output)); + const [rows] = await bigquery + .dataset(destDatasetId) + .table(destTableId) + .getRows(); + assert.ok(rows.length > 0); + }); + + it(`should insert rows`, async () => { + tools + .runAsync( + `${cmd} insert ${projectId} ${datasetId} ${tableId} 'foo.bar'`, + cwd + ) + .catch(err => { + assert.ok( + err.message.includes( + `"json_or_file" (or the file it points to) is not a valid JSON array.` + ) + ); + }); + + const output = await tools.runAsync( + `${cmd} insert ${projectId} ${datasetId} ${tableId} '${JSON.stringify( + rows + )}'`, + cwd + ); + assert.ok(output.includes(`Inserted 2 rows`)); + }); -test.serial(`should delete a table`, async t => { - const output = await tools.runAsync( - `${cmd} delete ${projectId} ${datasetId} ${tableId}`, - cwd - ); - t.is(output, `Table ${tableId} deleted.`); - const [exists] = await bigquery - .dataset(datasetId) - .table(tableId) - .exists(); - t.false(exists); + it(`should delete a table`, async () => { + const output = await tools.runAsync( + `${cmd} delete ${projectId} ${datasetId} ${tableId}`, + cwd + ); + assert.strictEqual(output, `Table ${tableId} deleted.`); + const [exists] = await bigquery + .dataset(datasetId) + .table(tableId) + .exists(); + assert.strictEqual(exists, false); + }); });