diff --git a/samples/queries.js b/samples/queries.js
index 9aef368d..d8a27b3d 100644
--- a/samples/queries.js
+++ b/samples/queries.js
@@ -142,7 +142,7 @@ function asyncQuery(sqlQuery, projectId) {
// Runs the query as a job
bigquery
- .startQuery(options)
+ .createQueryJob(options)
.then(results => {
job = results[0];
console.log(`Job ${job.id} started.`);
diff --git a/src/dataset.js b/src/dataset.js
index 9078cc45..3c3f0efc 100644
--- a/src/dataset.js
+++ b/src/dataset.js
@@ -221,6 +221,34 @@ function Dataset(bigQuery, id) {
util.inherits(Dataset, common.ServiceObject);
+/**
+ * Run a query as a job. No results are immediately returned. Instead, your
+ * callback will be executed with a {@link Job} object that you must
+ * ping for the results. See the Job documentation for explanations of how to
+ * check on the status of the job.
+ *
+ * See {@link BigQuery#createQueryJob} for full documentation of this method.
+ *
+ * @param {object} options See {@link BigQuery#createQueryJob} for full documentation of this method.
+ * @param {function} [callback] See {@link BigQuery#createQueryJob} for full documentation of this method.
+ * @returns {Promise} See {@link BigQuery#createQueryJob} for full documentation of this method.
+ */
+Dataset.prototype.createQueryJob = function(options, callback) {
+ if (is.string(options)) {
+ options = {
+ query: options,
+ };
+ }
+
+ options = extend(true, {}, options, {
+ defaultDataset: {
+ datasetId: this.id,
+ },
+ });
+
+ return this.bigQuery.createQueryJob(options, callback);
+};
+
/**
* Run a query scoped to your dataset as a readable object stream.
*
@@ -522,31 +550,6 @@ Dataset.prototype.query = function(options, callback) {
return this.bigQuery.query(options, callback);
};
-/**
- * Start running a query scoped to your dataset.
- *
- * See {@link BigQuery#startQuery} for full documentation of this method.
- *
- * @param {object} options See {@link BigQuery#startQuery} for full documentation of this method.
- * @param {function} [callback] See {@link BigQuery#startQuery} for full documentation of this method.
- * @returns {Promise} See {@link BigQuery#startQuery} for full documentation of this method.
- */
-Dataset.prototype.startQuery = function(options, callback) {
- if (is.string(options)) {
- options = {
- query: options,
- };
- }
-
- options = extend(true, {}, options, {
- defaultDataset: {
- datasetId: this.id,
- },
- });
-
- return this.bigQuery.startQuery(options, callback);
-};
-
/**
* Create a Table object.
*
diff --git a/src/index.js b/src/index.js
index d4429006..61c280ca 100644
--- a/src/index.js
+++ b/src/index.js
@@ -634,6 +634,149 @@ BigQuery.prototype.createDataset = function(id, options, callback) {
);
};
+/**
+ * Run a query as a job. No results are immediately returned. Instead, your
+ * callback will be executed with a {@link Job} object that you must
+ * ping for the results. See the Job documentation for explanations of how to
+ * check on the status of the job.
+ *
+ * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
+ *
+ * @param {object|string} options The configuration object. This must be in
+ * the format of the [`configuration.query`](http://goo.gl/wRpHvR) property
+ * of a Jobs resource. If a string is provided, this is used as the query
+ * string, and all other options are defaulted.
+ * @param {Table} [options.destination] The table to save the
+ * query's results to. If omitted, a new table will be created.
+ * @param {boolean} [options.dryRun] If set, don't actually run this job. A
+ * valid query will update the job with processing statistics. These can be
+ * accessed via `job.metadata`.
+ * @param {string} options.query A query string, following the BigQuery query
+ * syntax, of the query to execute.
+ * @param {boolean} [options.useLegacySql=false] Option to use legacy sql syntax.
+ * @param {function} [callback] The callback function.
+ * @param {?error} callback.err An error returned while making this request.
+ * @param {Job} callback.job The newly created job for your
+ query.
+ * @param {object} callback.apiResponse The full API response.
+ * @returns {Promise}
+ *
+ * @throws {Error} If a query is not specified.
+ * @throws {Error} If a Table is not provided as a destination.
+ *
+ * @example
+ * const BigQuery = require('@google-cloud/bigquery');
+ * const bigquery = new BigQuery();
+ *
+ * const query = 'SELECT url FROM `publicdata:samples.github_nested` LIMIT 100';
+ *
+ * //-
+ * // You may pass only a query string, having a new table created to store the
+ * // results of the query.
+ * //-
+ * bigquery.createQueryJob(query, function(err, job) {});
+ *
+ * //-
+ * // You can also control the destination table by providing a
+ * // {@link Table} object.
+ * //-
+ * bigquery.createQueryJob({
+ * destination: bigquery.dataset('higher_education').table('institutions'),
+ * query: query
+ * }, function(err, job) {});
+ *
+ * //-
+ * // After you have run `createQueryJob`, your query will execute in a job. Your
+ * // callback is executed with a {@link Job} object so that you may
+ * // check for the results.
+ * //-
+ * bigquery.createQueryJob(query, function(err, job) {
+ * if (!err) {
+ * job.getQueryResults(function(err, rows, apiResponse) {});
+ * }
+ * });
+ *
+ * //-
+ * // If the callback is omitted, we'll return a Promise.
+ * //-
+ * bigquery.createQueryJob(query).then(function(data) {
+ * var job = data[0];
+ * var apiResponse = data[1];
+ *
+ * return job.getQueryResults();
+ * });
+ */
+BigQuery.prototype.createQueryJob = function(options, callback) {
+ if (is.string(options)) {
+ options = {
+ query: options,
+ };
+ }
+
+ if (!options || !options.query) {
+ throw new Error('A SQL query string is required.');
+ }
+
+ var query = extend(
+ true,
+ {
+ useLegacySql: false,
+ },
+ options
+ );
+
+ if (options.destination) {
+ if (!(options.destination instanceof Table)) {
+ throw new Error('Destination must be a Table object.');
+ }
+
+ query.destinationTable = {
+ datasetId: options.destination.dataset.id,
+ projectId: options.destination.dataset.bigQuery.projectId,
+ tableId: options.destination.id,
+ };
+
+ delete query.destination;
+ }
+
+ if (query.params) {
+ query.parameterMode = is.array(query.params) ? 'positional' : 'named';
+
+ if (query.parameterMode === 'named') {
+ query.queryParameters = [];
+
+ for (var namedParamater in query.params) {
+ var value = query.params[namedParamater];
+ var queryParameter = BigQuery.valueToQueryParameter_(value);
+ queryParameter.name = namedParamater;
+ query.queryParameters.push(queryParameter);
+ }
+ } else {
+ query.queryParameters = query.params.map(BigQuery.valueToQueryParameter_);
+ }
+
+ delete query.params;
+ }
+
+ var reqOpts = {
+ configuration: {
+ query: query,
+ },
+ };
+
+ if (query.dryRun) {
+ reqOpts.configuration.dryRun = query.dryRun;
+ delete query.dryRun;
+ }
+
+ if (query.jobPrefix) {
+ reqOpts.jobPrefix = query.jobPrefix;
+ delete query.jobPrefix;
+ }
+
+ this.createJob(reqOpts, callback);
+};
+
/**
* Run a query scoped to your project as a readable object stream.
*
@@ -671,11 +814,11 @@ BigQuery.prototype.createQueryStream = common.paginator.streamify('query');
* Creates a job. Typically when creating a job you'll have a very specific task
* in mind. For this we recommend one of the following methods:
*
- * - {@link BigQuery#startQuery}
- * - {@link BigQuery/table#startCopy}
- * - {@link BigQuery/table#startCopyFrom}
- * - {@link BigQuery/table#startExport}
- * - {@link BigQuery/table#startImport}
+ * - {@link BigQuery#createQueryJob}
+ * - {@link BigQuery/table#createCopyJob}
+ * - {@link BigQuery/table#createCopyFromJob}
+ * - {@link BigQuery/table#createExtractJob}
+ * - {@link BigQuery/table#createLoadJob}
*
* However in the event you need a finer level of control over the job creation,
* you can use this method to pass in a raw [Job resource](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs)
@@ -1043,7 +1186,7 @@ BigQuery.prototype.job = function(id) {
/**
* Run a query scoped to your project. For manual pagination please refer to
- * {@link BigQuery#startQuery}.
+ * {@link BigQuery#createQueryJob}.
*
* @see [Jobs: query API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/query}
*
@@ -1131,7 +1274,7 @@ BigQuery.prototype.query = function(query, options, callback) {
options = {};
}
- this.startQuery(query, function(err, job, resp) {
+ this.createQueryJob(query, function(err, job, resp) {
if (err) {
callback(err, null, resp);
return;
@@ -1141,149 +1284,6 @@ BigQuery.prototype.query = function(query, options, callback) {
});
};
-/**
- * Run a query as a job. No results are immediately returned. Instead, your
- * callback will be executed with a {@link Job} object that you must
- * ping for the results. See the Job documentation for explanations of how to
- * check on the status of the job.
- *
- * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
- *
- * @param {object|string} options The configuration object. This must be in
- * the format of the [`configuration.query`](http://goo.gl/wRpHvR) property
- * of a Jobs resource. If a string is provided, this is used as the query
- * string, and all other options are defaulted.
- * @param {Table} [options.destination] The table to save the
- * query's results to. If omitted, a new table will be created.
- * @param {boolean} [options.dryRun] If set, don't actually run this job. A
- * valid query will update the job with processing statistics. These can be
- * accessed via `job.metadata`.
- * @param {string} options.query A query string, following the BigQuery query
- * syntax, of the query to execute.
- * @param {boolean} [options.useLegacySql=false] Option to use legacy sql syntax.
- * @param {function} [callback] The callback function.
- * @param {?error} callback.err An error returned while making this request.
- * @param {Job} callback.job The newly created job for your
- query.
- * @param {object} callback.apiResponse The full API response.
- * @returns {Promise}
- *
- * @throws {Error} If a query is not specified.
- * @throws {Error} If a Table is not provided as a destination.
- *
- * @example
- * const BigQuery = require('@google-cloud/bigquery');
- * const bigquery = new BigQuery();
- *
- * const query = 'SELECT url FROM `publicdata:samples.github_nested` LIMIT 100';
- *
- * //-
- * // You may pass only a query string, having a new table created to store the
- * // results of the query.
- * //-
- * bigquery.startQuery(query, function(err, job) {});
- *
- * //-
- * // You can also control the destination table by providing a
- * // {@link Table} object.
- * //-
- * bigquery.startQuery({
- * destination: bigquery.dataset('higher_education').table('institutions'),
- * query: query
- * }, function(err, job) {});
- *
- * //-
- * // After you have run `startQuery`, your query will execute in a job. Your
- * // callback is executed with a {@link Job} object so that you may
- * // check for the results.
- * //-
- * bigquery.startQuery(query, function(err, job) {
- * if (!err) {
- * job.getQueryResults(function(err, rows, apiResponse) {});
- * }
- * });
- *
- * //-
- * // If the callback is omitted, we'll return a Promise.
- * //-
- * bigquery.startQuery(query).then(function(data) {
- * var job = data[0];
- * var apiResponse = data[1];
- *
- * return job.getQueryResults();
- * });
- */
-BigQuery.prototype.startQuery = function(options, callback) {
- if (is.string(options)) {
- options = {
- query: options,
- };
- }
-
- if (!options || !options.query) {
- throw new Error('A SQL query string is required.');
- }
-
- var query = extend(
- true,
- {
- useLegacySql: false,
- },
- options
- );
-
- if (options.destination) {
- if (!(options.destination instanceof Table)) {
- throw new Error('Destination must be a Table object.');
- }
-
- query.destinationTable = {
- datasetId: options.destination.dataset.id,
- projectId: options.destination.dataset.bigQuery.projectId,
- tableId: options.destination.id,
- };
-
- delete query.destination;
- }
-
- if (query.params) {
- query.parameterMode = is.array(query.params) ? 'positional' : 'named';
-
- if (query.parameterMode === 'named') {
- query.queryParameters = [];
-
- for (var namedParamater in query.params) {
- var value = query.params[namedParamater];
- var queryParameter = BigQuery.valueToQueryParameter_(value);
- queryParameter.name = namedParamater;
- query.queryParameters.push(queryParameter);
- }
- } else {
- query.queryParameters = query.params.map(BigQuery.valueToQueryParameter_);
- }
-
- delete query.params;
- }
-
- var reqOpts = {
- configuration: {
- query: query,
- },
- };
-
- if (query.dryRun) {
- reqOpts.configuration.dryRun = query.dryRun;
- delete query.dryRun;
- }
-
- if (query.jobPrefix) {
- reqOpts.jobPrefix = query.jobPrefix;
- delete query.jobPrefix;
- }
-
- this.createJob(reqOpts, callback);
-};
-
/*! Developer Documentation
*
* These methods can be auto-paginated.
diff --git a/src/job.js b/src/job.js
index d6e0adb9..30661478 100644
--- a/src/job.js
+++ b/src/job.js
@@ -31,11 +31,11 @@ var util = require('util');
* - {@link BigQuery#getJobs}
* - {@link BigQuery#job}
* - {@link BigQuery#query}
- * - {@link BigQuery#startJob}
+ * - {@link BigQuery#createJob}
* - {@link BigQuery/table#copy}
* - {@link BigQuery/table#createWriteStream}
- * - {@link BigQuery/table#export}
- * - {@link BigQuery/table#import}
+ * - {@link BigQuery/table#extract}
+ * - {@link BigQuery/table#load}
*
* They can be used to check the status of a running job or fetching the results
* of a previously-executed one.
diff --git a/src/table.js b/src/table.js
index 398e58d7..c6bd8ac0 100644
--- a/src/table.js
+++ b/src/table.js
@@ -459,7 +459,7 @@ Table.prototype.copy = function(destination, metadata, callback) {
metadata = {};
}
- this.startCopy(destination, metadata, function(err, job, resp) {
+ this.createCopyJob(destination, metadata, function(err, job, resp) {
if (err) {
callback(err, resp);
return;
@@ -523,7 +523,7 @@ Table.prototype.copyFrom = function(sourceTables, metadata, callback) {
metadata = {};
}
- this.startCopyFrom(sourceTables, metadata, function(err, job, resp) {
+ this.createCopyFromJob(sourceTables, metadata, function(err, job, resp) {
if (err) {
callback(err, resp);
return;
@@ -536,65 +536,21 @@ Table.prototype.copyFrom = function(sourceTables, metadata, callback) {
};
/**
- * Run a query scoped to your dataset as a readable object stream.
- *
- * See {@link BigQuery#createQueryStream} for full documentation of this
- * method.
- *
- * @param {object} query See {@link BigQuery#createQueryStream} for full
- * documentation of this method.
- * @returns {stream} See {@link BigQuery#createQueryStream} for full
- * documentation of this method.
- */
-Table.prototype.createQueryStream = function(query) {
- return this.dataset.createQueryStream(query);
-};
-
-/**
- * Create a readable stream of the rows of data in your table. This method is
- * simply a wrapper around {@link Table#getRows}.
- *
- * @see [Tabledata: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/list}
- *
- * @returns {ReadableStream}
- *
- * @example
- * const BigQuery = require('@google-cloud/bigquery');
- * const bigquery = new BigQuery();
- * const dataset = bigquery.dataset('my-dataset');
- * const table = bigquery.table('my-table');
- *
- * table.createReadStream(options)
- * .on('error', console.error)
- * .on('data', function(row) {})
- * .on('end', function() {
- * // All rows have been retrieved.
- * });
- *
- * //-
- * // If you anticipate many results, you can end a stream early to prevent
- * // unnecessary processing and API requests.
- * //-
- * table.createReadStream()
- * .on('data', function(row) {
- * this.end();
- * });
- */
-Table.prototype.createReadStream = common.paginator.streamify('getRows');
-
-/**
- * Load data into your table from a readable stream of JSON, CSV, or
- * AVRO data.
+ * Copy data from one table to another, optionally creating that table.
*
* @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
*
- * @param {string|object} [metadata] Metadata to set with the load operation.
- * The metadata object should be in the format of the
- * [`configuration.load`](http://goo.gl/BVcXk4) property of a Jobs resource.
- * If a string is given, it will be used as the filetype.
- * @returns {WritableStream}
+ * @param {Table} destination The destination table.
+ * @param {object} [metadata] Metadata to set with the copy operation. The
+ * metadata object should be in the format of the
+ * [`configuration.copy`](http://goo.gl/dKWIyS) property of a Jobs resource.
+ * @param {function} [callback] The callback function.
+ * @param {?error} callback.err An error returned while making this request
+ * @param {Job} callback.job The job used to copy your table.
+ * @param {object} callback.apiResponse The full API response.
+ * @returns {Promise}
*
- * @throws {Error} If source format isn't recognized.
+ * @throws {Error} If a destination other than a Table object is provided.
*
* @example
* const BigQuery = require('@google-cloud/bigquery');
@@ -602,554 +558,493 @@ Table.prototype.createReadStream = common.paginator.streamify('getRows');
* const dataset = bigquery.dataset('my-dataset');
* const table = bigquery.table('my-table');
*
+ * const yourTable = dataset.table('your-table');
+ * table.createCopyJob(yourTable, function(err, job, apiResponse) {
+ * // `job` is a Job object that can be used to check the status of the
+ * // request.
+ * });
+ *
* //-
- * // Load data from a CSV file.
+ * // See the `configuration.copy` object for
+ * // all available options.
* //-
- * const request = require('request');
- *
- * const csvUrl = 'http://goo.gl/kSE7z6';
- *
* const metadata = {
- * allowJaggedRows: true,
- * skipLeadingRows: 1
+ * createDisposition: 'CREATE_NEVER',
+ * writeDisposition: 'WRITE_TRUNCATE'
* };
*
- * request.get(csvUrl)
- * .pipe(table.createWriteStream(metadata))
- * .on('complete', function(job) {
- * // `job` is a Job object that can be used to check the status of the
- * // request.
- * });
+ * table.createCopyJob(yourTable, metadata, function(err, job, apiResponse) {});
*
* //-
- * // Load data from a JSON file.
+ * // If the callback is omitted, we'll return a Promise.
* //-
- * const fs = require('fs');
- *
- * fs.createReadStream('./test/testdata/testfile.json')
- * .pipe(table.createWriteStream('json'))
- * .on('complete', function(job) {});
+ * table.createCopyJob(yourTable, metadata).then(function(data) {
+ * const job = data[0];
+ * const apiResponse = data[1];
+ * });
*/
-Table.prototype.createWriteStream = function(metadata) {
- var self = this;
-
- metadata = metadata || {};
-
- var fileTypes = Object.keys(FORMATS).map(function(key) {
- return FORMATS[key];
- });
-
- if (is.string(metadata)) {
- metadata = {
- sourceFormat: FORMATS[metadata.toLowerCase()],
- };
+Table.prototype.createCopyJob = function(destination, metadata, callback) {
+ if (!(destination instanceof Table)) {
+ throw new Error('Destination must be a Table object.');
}
- if (is.string(metadata.schema)) {
- metadata.schema = Table.createSchemaFromString_(metadata.schema);
+ if (is.fn(metadata)) {
+ callback = metadata;
+ metadata = {};
}
- extend(true, metadata, {
- destinationTable: {
- projectId: self.bigQuery.projectId,
- datasetId: self.dataset.id,
- tableId: self.id,
+ var body = {
+ configuration: {
+ copy: extend(true, metadata, {
+ destinationTable: {
+ datasetId: destination.dataset.id,
+ projectId: destination.bigQuery.projectId,
+ tableId: destination.id,
+ },
+ sourceTable: {
+ datasetId: this.dataset.id,
+ projectId: this.bigQuery.projectId,
+ tableId: this.id,
+ },
+ }),
},
- });
-
- var jobId = uuid.v4();
+ };
if (metadata.jobPrefix) {
- jobId = metadata.jobPrefix + jobId;
+ body.jobPrefix = metadata.jobPrefix;
delete metadata.jobPrefix;
}
- if (
- metadata.hasOwnProperty('sourceFormat') &&
- fileTypes.indexOf(metadata.sourceFormat) < 0
- ) {
- throw new Error('Source format not recognized: ' + metadata.sourceFormat);
- }
-
- var dup = streamEvents(duplexify());
-
- dup.once('writing', function() {
- common.util.makeWritableStream(
- dup,
- {
- makeAuthenticatedRequest: self.bigQuery.makeAuthenticatedRequest,
- metadata: {
- configuration: {
- load: metadata,
- },
- jobReference: {
- jobId: jobId,
- projectId: self.bigQuery.projectId,
- },
- },
- request: {
- uri: format('{base}/{projectId}/jobs', {
- base: 'https://www.googleapis.com/upload/bigquery/v2/projects',
- projectId: self.bigQuery.projectId,
- }),
- },
- },
- function(data) {
- var job = self.bigQuery.job(data.jobReference.jobId);
- job.metadata = data;
-
- dup.emit('complete', job);
- }
- );
- });
-
- return dup;
+ this.bigQuery.createJob(body, callback);
};
/**
- * Export table to Cloud Storage.
+ * Copy data from multiple tables into this table.
*
- * @param {string|File} destination Where the file should be exported
- * to. A string or a {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File}.
- * @param {object} [options] The configuration object.
- * @param {string} [options.format] The format to export the data in. Allowed
- * options are "CSV", "JSON", or "AVRO". Default: "CSV".
- * @param {boolean} [options.gzip] Specify if you would like the file compressed
- * with GZIP. Default: false.
+ * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
+ *
+ * @param {Table|Table[]} sourceTables The
+ * source table(s) to copy data from.
+ * @param {object} [metadata] Metadata to set with the copy operation. The
+ * metadata object should be in the format of the
+ * [`configuration.copy`](http://goo.gl/dKWIyS) property of a Jobs resource.
* @param {function} [callback] The callback function.
* @param {?error} callback.err An error returned while making this request
+ * @param {Job} callback.job The job used to copy your table.
* @param {object} callback.apiResponse The full API response.
* @returns {Promise}
*
- * @throws {Error} If destination isn't a File object.
- * @throws {Error} If destination format isn't recongized.
+ * @throws {Error} If a source other than a Table object is provided.
*
* @example
- * const Storage = require('@google-cloud/storage');
* const BigQuery = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const dataset = bigquery.dataset('my-dataset');
* const table = bigquery.table('my-table');
*
- * const storage = new Storage({
- * projectId: 'grape-spaceship-123'
- * });
- * var extractedFile = storage.bucket('institutions').file('2014.csv');
- *
- * //-
- * // To use the default options, just pass a {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} object.
- * //
- * // Note: The exported format type will be inferred by the file's extension.
- * // If you wish to override this, or provide an array of destination files,
- * // you must provide an `options` object.
- * //-
- * table.extract(extractedFile, function(err, apiResponse) {});
+ * const sourceTables = [
+ * dataset.table('your-table'),
+ * dataset.table('your-second-table')
+ * ];
*
- * //-
- * // If you need more customization, pass an `options` object.
- * //-
- * var options = {
- * format: 'json',
- * gzip: true
+ * const callback = function(err, job, apiResponse) {
+ * // `job` is a Job object that can be used to check the status of the
+ * // request.
* };
*
- * table.extract(extractedFile, options, function(err, apiResponse) {});
+ * table.createCopyFromJob(sourceTables, callback);
*
* //-
- * // You can also specify multiple destination files.
+ * // See the `configuration.copy` object for
+ * // all available options.
* //-
- * table.extract([
- * storage.bucket('institutions').file('2014.json'),
- * storage.bucket('institutions-copy').file('2014.json')
- * ], options, function(err, apiResponse) {});
+ * const metadata = {
+ * createDisposition: 'CREATE_NEVER',
+ * writeDisposition: 'WRITE_TRUNCATE'
+ * };
+ *
+ * table.createCopyFromJob(sourceTables, metadata, callback);
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
- * table.extract(extractedFile, options).then(function(data) {
- * var apiResponse = data[0];
+ * table.createCopyFromJob(sourceTables, metadata).then(function(data) {
+ * const job = data[0];
+ * const apiResponse = data[1];
* });
*/
-Table.prototype.extract = function(destination, options, callback) {
- if (is.fn(options)) {
- callback = options;
- options = {};
- }
+Table.prototype.createCopyFromJob = function(sourceTables, metadata, callback) {
+ sourceTables = arrify(sourceTables);
- this.startExtract(destination, options, function(err, job, resp) {
- if (err) {
- callback(err, resp);
- return;
+ sourceTables.forEach(function(sourceTable) {
+ if (!(sourceTable instanceof Table)) {
+ throw new Error('Source must be a Table object.');
}
-
- job.on('error', callback).on('complete', function(metadata) {
- callback(null, metadata);
- });
});
+
+ if (is.fn(metadata)) {
+ callback = metadata;
+ metadata = {};
+ }
+
+ var body = {
+ configuration: {
+ copy: extend(true, metadata, {
+ destinationTable: {
+ datasetId: this.dataset.id,
+ projectId: this.bigQuery.projectId,
+ tableId: this.id,
+ },
+
+ sourceTables: sourceTables.map(function(sourceTable) {
+ return {
+ datasetId: sourceTable.dataset.id,
+ projectId: sourceTable.bigQuery.projectId,
+ tableId: sourceTable.id,
+ };
+ }),
+ }),
+ },
+ };
+
+ if (metadata.jobPrefix) {
+ body.jobPrefix = metadata.jobPrefix;
+ delete metadata.jobPrefix;
+ }
+
+ this.bigQuery.createJob(body, callback);
};
/**
- * Retrieves table data from a specified set of rows. The rows are returned to
- * your callback as an array of objects matching your table's schema.
+ * Export table to Cloud Storage.
*
- * @see [Tabledata: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/list}
+ * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
*
- * @param {object} [options] The configuration object.
- * @param {boolean} [options.autoPaginate=true] Have pagination handled
- * automatically.
- * @param {number} [options.maxApiCalls] Maximum number of API calls to make.
- * @param {number} [options.maxResults] Maximum number of results to return.
- * @param {function} [callback] The callback function.
- * @param {?error} callback.err An error returned while making this request
- * @param {array} callback.rows The table data from specified set of rows.
- * @returns {Promise}
+ * @param {string|File} destination Where the file should be exported
+ * to. A string or a {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} object.
+ * @param {object=} options - The configuration object.
+ * @param {string} options.format - The format to export the data in. Allowed
+ * options are "CSV", "JSON", or "AVRO". Default: "CSV".
+ * @param {boolean} options.gzip - Specify if you would like the file compressed
+ * with GZIP. Default: false.
+ * @param {function} callback - The callback function.
+ * @param {?error} callback.err - An error returned while making this request
+ * @param {Job} callback.job - The job used to export the table.
+ * @param {object} callback.apiResponse - The full API response.
+ *
+ * @throws {Error} If destination isn't a File object.
+ * @throws {Error} If destination format isn't recongized.
*
* @example
+ * const Storage = require('@google-cloud/storage');
* const BigQuery = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const dataset = bigquery.dataset('my-dataset');
* const table = bigquery.table('my-table');
*
- * table.getRows(function(err, rows) {
- * if (!err) {
- * // rows is an array of results.
- * }
+ * const storage = new Storage({
+ * projectId: 'grape-spaceship-123'
* });
+ * const extractedFile = storage.bucket('institutions').file('2014.csv');
+ *
+ * function callback(err, job, apiResponse) {
+ * // `job` is a Job object that can be used to check the status of the
+ * // request.
+ * }
*
* //-
- * // To control how many API requests are made and page through the results
- * // manually, set `autoPaginate` to `false`.
+ * // To use the default options, just pass a {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} object.
+ * //
+ * // Note: The exported format type will be inferred by the file's extension.
+ * // If you wish to override this, or provide an array of destination files,
+ * // you must provide an `options` object.
* //-
- * function manualPaginationCallback(err, rows, nextQuery, apiResponse) {
- * if (nextQuery) {
- * // More results exist.
- * table.getRows(nextQuery, manualPaginationCallback);
- * }
- * }
+ * table.createExtractJob(extractedFile, callback);
*
- * table.getRows({
- * autoPaginate: false
- * }, manualPaginationCallback);
+ * //-
+ * // If you need more customization, pass an `options` object.
+ * //-
+ * const options = {
+ * format: 'json',
+ * gzip: true
+ * };
+ *
+ * table.createExtractJob(extractedFile, options, callback);
+ *
+ * //-
+ * // You can also specify multiple destination files.
+ * //-
+ * table.createExtractJob([
+ * storage.bucket('institutions').file('2014.json'),
+ * storage.bucket('institutions-copy').file('2014.json')
+ * ], options, callback);
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
- * table.getRows().then(function(data) {
- * const rows = data[0];
-});
+ * table.createExtractJob(extractedFile, options).then(function(data) {
+ * const job = data[0];
+ * const apiResponse = data[1];
+ * });
*/
-Table.prototype.getRows = function(options, callback) {
- var self = this;
-
+Table.prototype.createExtractJob = function(destination, options, callback) {
if (is.fn(options)) {
callback = options;
options = {};
}
- this.request(
- {
- uri: '/data',
- qs: options,
- },
- function(err, resp) {
- if (err) {
- onComplete(err, null, null, resp);
- return;
+ options = extend(true, options, {
+ destinationUris: arrify(destination).map(function(dest) {
+ if (!common.util.isCustomType(dest, 'storage/file')) {
+ throw new Error('Destination must be a File object.');
}
- var nextQuery = null;
-
- if (resp.pageToken) {
- nextQuery = extend({}, options, {
- pageToken: resp.pageToken,
- });
+ // If no explicit format was provided, attempt to find a match from the
+ // file's extension. If no match, don't set, and default upstream to CSV.
+ var format = path
+ .extname(dest.name)
+ .substr(1)
+ .toLowerCase();
+ if (!options.destinationFormat && !options.format && FORMATS[format]) {
+ options.destinationFormat = FORMATS[format];
}
- if (resp.rows && resp.rows.length > 0 && !self.metadata.schema) {
- // We don't know the schema for this table yet. Do a quick stat.
- self.getMetadata(function(err, metadata, apiResponse) {
- if (err) {
- onComplete(err, null, null, apiResponse);
- return;
- }
-
- onComplete(null, resp.rows, nextQuery, resp);
- });
+ return 'gs://' + dest.bucket.name + '/' + dest.name;
+ }),
+ });
- return;
- }
+ if (options.format) {
+ options.format = options.format.toLowerCase();
- onComplete(null, resp.rows, nextQuery, resp);
+ if (FORMATS[options.format]) {
+ options.destinationFormat = FORMATS[options.format];
+ delete options.format;
+ } else {
+ throw new Error('Destination format not recognized: ' + options.format);
}
- );
+ }
- function onComplete(err, rows, nextQuery, resp) {
- if (err) {
- callback(err, null, null, resp);
- return;
- }
+ if (options.gzip) {
+ options.compression = 'GZIP';
+ delete options.gzip;
+ }
- rows = self.bigQuery.mergeSchemaWithRows_(self.metadata.schema, rows || []);
- callback(null, rows, nextQuery, resp);
+ var body = {
+ configuration: {
+ extract: extend(true, options, {
+ sourceTable: {
+ datasetId: this.dataset.id,
+ projectId: this.bigQuery.projectId,
+ tableId: this.id,
+ },
+ }),
+ },
+ };
+
+ if (options.jobPrefix) {
+ body.jobPrefix = options.jobPrefix;
+ delete options.jobPrefix;
}
+
+ this.bigQuery.createJob(body, callback);
};
/**
- * Stream data into BigQuery one record at a time without running a load job.
+ * Load data from a local file or Storage {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File}.
*
- * There are more strict quota limits using this method so it is highly
- * recommended that you load data into BigQuery using
- * {@link Table#import} instead.
+ * By loading data this way, you create a load job that will run your data load
+ * asynchronously. If you would like instantaneous access to your data, insert
+ * it using {@liink Table#insert}.
*
- * @see [Tabledata: insertAll API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll}
- * @see [Troubleshooting Errors]{@link https://developers.google.com/bigquery/troubleshooting-errors}
+ * Note: The file type will be inferred by the given file's extension. If you
+ * wish to override this, you must provide `metadata.format`.
*
- * @param {object|object[]} rows The rows to insert into the table.
- * @param {object} [options] Configuration object.
- * @param {boolean} [options.autoCreate] Automatically create the table if it
- * doesn't already exist. In order for this to succeed the `schema` option
- * must also be set. Note that this can take longer than 2 minutes to
- * complete.
- * @param {boolean} [options.ignoreUnknownValues=false] Accept rows that contain
- * values that do not match the schema. The unknown values are ignored.
- * @param {boolean} [options.raw] If `true`, the `rows` argument is expected to
- * be formatted as according to the
- * [specification](https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll).
- * @param {string|object} [options.schema] A comma-separated list of name:type
- * pairs. Valid types are "string", "integer", "float", "boolean", and
- * "timestamp". If the type is omitted, it is assumed to be "string".
- * Example: "name:string, age:integer". Schemas can also be specified as a
- * JSON array of fields, which allows for nested and repeated fields. See
- * a [Table resource](http://goo.gl/sl8Dmg) for more detailed information.
- * @param {boolean} [options.skipInvalidRows=false] Insert all valid rows of a
- * request, even if invalid rows exist.
- * @param {string} [options.templateSuffix] Treat the destination table as a
- * base template, and insert the rows into an instance table named
- * "{destination}{templateSuffix}". BigQuery will manage creation of
- * the instance table, using the schema of the base template table. See
- * [Automatic table creation using template tables](https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables)
- * for considerations when working with templates tables.
+ * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
+ *
+ * @param {string|File} source The source file to import. A string or a
+ * {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} object.
+ * @param {object} [metadata] Metadata to set with the load operation. The
+ * metadata object should be in the format of the
+ * [`configuration.load`](http://goo.gl/BVcXk4) property of a Jobs resource.
+ * @param {string} [metadata.format] The format the data being imported is in.
+ * Allowed options are "CSV", "JSON", or "AVRO".
* @param {function} [callback] The callback function.
- * @param {?error} callback.err An error returned while making this request.
- * @param {object[]} callback.err.errors If present, these represent partial
- * failures. It's possible for part of your request to be completed
- * successfully, while the other part was not.
+ * @param {?error} callback.err An error returned while making this request
+ * @param {Job} callback.job The job used to import your data.
* @param {object} callback.apiResponse The full API response.
* @returns {Promise}
*
+ * @throws {Error} If the source isn't a string file name or a File instance.
+ *
* @example
+ * const Storage = require('@google-cloud/storage');
* const BigQuery = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const dataset = bigquery.dataset('my-dataset');
* const table = bigquery.table('my-table');
*
* //-
- * // Insert a single row.
- * //-
- * table.insert({
- * INSTNM: 'Motion Picture Institute of Michigan',
- * CITY: 'Troy',
- * STABBR: 'MI'
- * }, insertHandler);
- *
- * //-
- * // Insert multiple rows at a time.
+ * // Load data from a local file.
* //-
- * var rows = [
- * {
- * INSTNM: 'Motion Picture Institute of Michigan',
- * CITY: 'Troy',
- * STABBR: 'MI'
- * },
- * // ...
- * ];
+ * const callback = function(err, job, apiResponse) {
+ * // `job` is a Job object that can be used to check the status of the
+ * // request.
+ * };
*
- * table.insert(rows, insertHandler);
+ * table.createLoadJob('./institutions.csv', callback);
*
* //-
- * // Insert a row as according to the
- * // specification.
+ * // You may also pass in metadata in the format of a Jobs resource. See
+ * // (http://goo.gl/BVcXk4) for a full list of supported values.
* //-
- * var row = {
- * insertId: '1',
- * json: {
- * INSTNM: 'Motion Picture Institute of Michigan',
- * CITY: 'Troy',
- * STABBR: 'MI'
- * }
- * };
- *
- * var options = {
- * raw: true
+ * const metadata = {
+ * encoding: 'ISO-8859-1',
+ * sourceFormat: 'NEWLINE_DELIMITED_JSON'
* };
*
- * table.insert(row, options, insertHandler);
+ * table.createLoadJob('./my-data.csv', metadata, callback);
*
* //-
- * // Handling the response. See
- * // Troubleshooting Errors for best practices on how to handle errors.
+ * // Load data from a file in your Cloud Storage bucket.
* //-
- * function insertHandler(err, apiResponse) {
- * if (err) {
- * // An API error or partial failure occurred.
- *
- * if (err.name === 'PartialFailureError') {
- * // Some rows failed to insert, while others may have succeeded.
+ * const storage = new Storage({
+ * projectId: 'grape-spaceship-123'
+ * });
+ * const data = storage.bucket('institutions').file('data.csv');
+ * table.createLoadJob(data, callback);
*
- * // err.errors (object[]):
- * // err.errors[].row (original row object passed to `insert`)
- * // err.errors[].errors[].reason
- * // err.errors[].errors[].message
- * }
- * }
- * }
+ * //-
+ * // Load data from multiple files in your Cloud Storage bucket(s).
+ * //-
+ * table.createLoadJob([
+ * storage.bucket('institutions').file('2011.csv'),
+ * storage.bucket('institutions').file('2012.csv')
+ * ], callback);
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
- * table.insert(rows)
- * .then(function(data) {
- * var apiResponse = data[0];
- * })
- * .catch(function(err) {
- * // An API error or partial failure occurred.
- *
- * if (err.name === 'PartialFailureError') {
- * // Some rows failed to insert, while others may have succeeded.
- *
- * // err.errors (object[]):
- * // err.errors[].row (original row object passed to `insert`)
- * // err.errors[].errors[].reason
- * // err.errors[].errors[].message
- * }
- * });
+ * table.createLoadJob(data).then(function(data) {
+ * const job = data[0];
+ * const apiResponse = data[1];
+ * });
*/
-Table.prototype.insert = function(rows, options, callback) {
- var self = this;
-
- if (is.fn(options)) {
- callback = options;
- options = {};
+Table.prototype.createLoadJob = function(source, metadata, callback) {
+ if (is.fn(metadata)) {
+ callback = metadata;
+ metadata = {};
}
- rows = arrify(rows);
+ callback = callback || common.util.noop;
+ metadata = metadata || {};
- if (!rows.length) {
- throw new Error('You must provide at least 1 row to be inserted.');
+ if (metadata.format) {
+ metadata.sourceFormat = FORMATS[metadata.format.toLowerCase()];
+ delete metadata.format;
}
- var json = extend(true, {}, options, {
- rows: rows,
- });
+ if (is.string(source)) {
+ // A path to a file was given. If a sourceFormat wasn't specified, try to
+ // find a match from the file's extension.
+ var detectedFormat =
+ FORMATS[
+ path
+ .extname(source)
+ .substr(1)
+ .toLowerCase()
+ ];
+ if (!metadata.sourceFormat && detectedFormat) {
+ metadata.sourceFormat = detectedFormat;
+ }
- if (!options.raw) {
- json.rows = arrify(rows).map(function(row) {
- return {
- insertId: uuid.v4(),
- json: Table.encodeValue_(row),
- };
- });
+ // Read the file into a new write stream.
+ return fs
+ .createReadStream(source)
+ .pipe(this.createWriteStream(metadata))
+ .on('error', callback)
+ .on('complete', function(job) {
+ callback(null, job, job.metadata);
+ });
}
- delete json.raw;
-
- var autoCreate = !!options.autoCreate;
- var schema;
-
- delete json.autoCreate;
-
- if (autoCreate) {
- if (!options.schema) {
- throw new Error('Schema must be provided in order to auto-create Table.');
- }
+ var body = {
+ configuration: {
+ load: {
+ destinationTable: {
+ projectId: this.bigQuery.projectId,
+ datasetId: this.dataset.id,
+ tableId: this.id,
+ },
+ },
+ },
+ };
- schema = options.schema;
- delete json.schema;
+ if (metadata.jobPrefix) {
+ body.jobPrefix = metadata.jobPrefix;
+ delete metadata.jobPrefix;
}
- this.request(
- {
- method: 'POST',
- uri: '/insertAll',
- json: json,
- },
- function(err, resp) {
- if (err) {
- if (err.code === 404 && autoCreate) {
- setTimeout(createTableAndRetry, Math.random() * 60000);
- } else {
- callback(err, resp);
- }
- return;
+ extend(true, body.configuration.load, metadata, {
+ sourceUris: arrify(source).map(function(src) {
+ if (!common.util.isCustomType(src, 'storage/file')) {
+ throw new Error('Source must be a File object.');
}
- var partialFailures = (resp.insertErrors || []).map(function(
- insertError
- ) {
- return {
- errors: insertError.errors.map(function(error) {
- return {
- message: error.message,
- reason: error.reason,
- };
- }),
- row: rows[insertError.index],
- };
- });
-
- if (partialFailures.length > 0) {
- err = new common.util.PartialFailureError({
- errors: partialFailures,
- response: resp,
- });
+ // If no explicit format was provided, attempt to find a match from
+ // the file's extension. If no match, don't set, and default upstream
+ // to CSV.
+ var format =
+ FORMATS[
+ path
+ .extname(src.name)
+ .substr(1)
+ .toLowerCase()
+ ];
+ if (!metadata.sourceFormat && format) {
+ body.configuration.load.sourceFormat = format;
}
- callback(err, resp);
- }
- );
+ return 'gs://' + src.bucket.name + '/' + src.name;
+ }),
+ });
- function createTableAndRetry() {
- self.create(
- {
- schema: schema,
- },
- function(err, table, resp) {
- if (err && err.code !== 409) {
- callback(err, resp);
- return;
- }
+ this.bigQuery.createJob(body, callback);
+};
- setTimeout(function() {
- self.insert(rows, options, callback);
- }, 60000);
- }
- );
- }
+/**
+ * Run a query as a job. No results are immediately returned. Instead, your
+ * callback will be executed with a {@link Job} object that you must
+ * ping for the results. See the Job documentation for explanations of how to
+ * check on the status of the job.
+ *
+ * See {@link BigQuery#createQueryJob} for full documentation of this method.
+ */
+Table.prototype.createQueryJob = function(options, callback) {
+ return this.dataset.createQueryJob(options, callback);
};
/**
- * Load data from a local file or Storage {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File}.
+ * Run a query scoped to your dataset as a readable object stream.
*
- * By loading data this way, you create a load job that will run your data load
- * asynchronously. If you would like instantaneous access to your data, insert
- * it using {@link Table#insert}.
+ * See {@link BigQuery#createQueryStream} for full documentation of this
+ * method.
*
- * Note: The file type will be inferred by the given file's extension. If you
- * wish to override this, you must provide `metadata.format`.
+ * @param {object} query See {@link BigQuery#createQueryStream} for full
+ * documentation of this method.
+ * @returns {stream} See {@link BigQuery#createQueryStream} for full
+ * documentation of this method.
+ */
+Table.prototype.createQueryStream = function(query) {
+ return this.dataset.createQueryStream(query);
+};
+
+/**
+ * Create a readable stream of the rows of data in your table. This method is
+ * simply a wrapper around {@link Table#getRows}.
*
- * @param {string|File} source The source file to import. A string or a
- * {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} object.
- * @param {object} [metadata] Metadata to set with the load operation. The
- * metadata object should be in the format of the
- * [`configuration.load`](http://goo.gl/BVcXk4) property of a Jobs resource.
- * @param {string} [metadata.format] The format the data being imported is in.
- * Allowed options are "CSV", "JSON", or "AVRO".
- * @param {function} [callback] The callback function.
- * @param {?error} callback.err An error returned while making this request
- * @param {object} callback.apiResponse The full API response.
- * @returns {Promise}
+ * @see [Tabledata: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/list}
*
- * @throws {Error} If the source isn't a string file name or a File instance.
+ * @returns {ReadableStream}
*
* @example
* const BigQuery = require('@google-cloud/bigquery');
@@ -1157,96 +1052,37 @@ Table.prototype.insert = function(rows, options, callback) {
* const dataset = bigquery.dataset('my-dataset');
* const table = bigquery.table('my-table');
*
- * //-
- * // Load data from a local file.
- * //-
- * table.load('./institutions.csv', function(err, apiResponse) {});
- *
- * //-
- * // You may also pass in metadata in the format of a Jobs resource. See
- * // (http://goo.gl/BVcXk4) for a full list of supported values.
- * //-
- * var metadata = {
- * encoding: 'ISO-8859-1',
- * sourceFormat: 'NEWLINE_DELIMITED_JSON'
- * };
- *
- * table.load('./my-data.csv', metadata, function(err, apiResponse) {});
- *
- * //-
- * // Load data from a file in your Cloud Storage bucket.
- * //-
- * var gcs = require('@google-cloud/storage')({
- * projectId: 'grape-spaceship-123'
- * });
- * var data = gcs.bucket('institutions').file('data.csv');
- * table.load(data, function(err, apiResponse) {});
- *
- * //-
- * // Load data from multiple files in your Cloud Storage bucket(s).
- * //-
- * table.load([
- * gcs.bucket('institutions').file('2011.csv'),
- * gcs.bucket('institutions').file('2012.csv')
- * ], function(err, apiResponse) {});
+ * table.createReadStream(options)
+ * .on('error', console.error)
+ * .on('data', function(row) {})
+ * .on('end', function() {
+ * // All rows have been retrieved.
+ * });
*
* //-
- * // If the callback is omitted, we'll return a Promise.
+ * // If you anticipate many results, you can end a stream early to prevent
+ * // unnecessary processing and API requests.
* //-
- * table.load(data).then(function(data) {
- * var apiResponse = data[0];
- * });
+ * table.createReadStream()
+ * .on('data', function(row) {
+ * this.end();
+ * });
*/
-Table.prototype.load = function(source, metadata, callback) {
- if (is.fn(metadata)) {
- callback = metadata;
- metadata = {};
- }
-
- this.startLoad(source, metadata, function(err, job, resp) {
- if (err) {
- callback(err, resp);
- return;
- }
-
- job.on('error', callback).on('complete', function(metadata) {
- callback(null, metadata);
- });
- });
-};
+Table.prototype.createReadStream = common.paginator.streamify('getRows');
/**
- * Run a query scoped to your dataset.
+ * Load data into your table from a readable stream of JSON, CSV, or
+ * AVRO data.
*
- * See {@link BigQuery#query} for full documentation of this method.
- * @param {object} query See {@link BigQuery#query} for full documentation of this method.
- * @param {function} [callback] See {@link BigQuery#query} for full documentation of this method.
- * @returns {Promise}
- */
-Table.prototype.query = function(query, callback) {
- this.dataset.query(query, callback);
-};
-
-/**
- * Set the metadata on the table.
+ * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
*
- * @see [Tables: update API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/tables/update}
+ * @param {string|object} [metadata] Metadata to set with the load operation.
+ * The metadata object should be in the format of the
+ * [`configuration.load`](http://goo.gl/BVcXk4) property of a Jobs resource.
+ * If a string is given, it will be used as the filetype.
+ * @returns {WritableStream}
*
- * @param {object} metadata The metadata key/value object to set.
- * @param {string} metadata.description A user-friendly description of the
- * table.
- * @param {string} metadata.name A descriptive name for the table.
- * @param {string|object} metadata.schema A comma-separated list of name:type
- * pairs. Valid types are "string", "integer", "float", "boolean", "bytes",
- * "record", and "timestamp". If the type is omitted, it is assumed to be
- * "string". Example: "name:string, age:integer". Schemas can also be
- * specified as a JSON array of fields, which allows for nested and repeated
- * fields. See a [Table resource](http://goo.gl/sl8Dmg) for more detailed
- * information.
- * @param {function} [callback] The callback function.
- * @param {?error} callback.err An error returned while making this request.
- * @param {object} callback.apiResponse The full API response.
- * @returns {Promise}
+ * @throws {Error} If source format isn't recognized.
*
* @example
* const BigQuery = require('@google-cloud/bigquery');
@@ -1254,340 +1090,529 @@ Table.prototype.query = function(query, callback) {
* const dataset = bigquery.dataset('my-dataset');
* const table = bigquery.table('my-table');
*
+ * //-
+ * // Load data from a CSV file.
+ * //-
+ * const request = require('request');
+ *
+ * const csvUrl = 'http://goo.gl/kSE7z6';
+ *
* const metadata = {
- * name: 'My recipes',
- * description: 'A table for storing my recipes.',
- * schema: 'name:string, servings:integer, cookingTime:float, quick:boolean'
+ * allowJaggedRows: true,
+ * skipLeadingRows: 1
* };
*
- * table.setMetadata(metadata, function(err, metadata, apiResponse) {});
+ * request.get(csvUrl)
+ * .pipe(table.createWriteStream(metadata))
+ * .on('complete', function(job) {
+ * // `job` is a Job object that can be used to check the status of the
+ * // request.
+ * });
*
* //-
- * // If the callback is omitted, we'll return a Promise.
+ * // Load data from a JSON file.
* //-
- * table.setMetadata(metadata).then(function(data) {
- * const metadata = data[0];
- * const apiResponse = data[1];
- * });
+ * const fs = require('fs');
+ *
+ * fs.createReadStream('./test/testdata/testfile.json')
+ * .pipe(table.createWriteStream('json'))
+ * .on('complete', function(job) {});
*/
-Table.prototype.setMetadata = function(metadata, callback) {
- var body = Table.formatMetadata_(metadata);
+Table.prototype.createWriteStream = function(metadata) {
+ var self = this;
- common.ServiceObject.prototype.setMetadata.call(this, body, callback);
+ metadata = metadata || {};
+
+ var fileTypes = Object.keys(FORMATS).map(function(key) {
+ return FORMATS[key];
+ });
+
+ if (is.string(metadata)) {
+ metadata = {
+ sourceFormat: FORMATS[metadata.toLowerCase()],
+ };
+ }
+
+ if (is.string(metadata.schema)) {
+ metadata.schema = Table.createSchemaFromString_(metadata.schema);
+ }
+
+ extend(true, metadata, {
+ destinationTable: {
+ projectId: self.bigQuery.projectId,
+ datasetId: self.dataset.id,
+ tableId: self.id,
+ },
+ });
+
+ var jobId = uuid.v4();
+
+ if (metadata.jobPrefix) {
+ jobId = metadata.jobPrefix + jobId;
+ delete metadata.jobPrefix;
+ }
+
+ if (
+ metadata.hasOwnProperty('sourceFormat') &&
+ fileTypes.indexOf(metadata.sourceFormat) < 0
+ ) {
+ throw new Error('Source format not recognized: ' + metadata.sourceFormat);
+ }
+
+ var dup = streamEvents(duplexify());
+
+ dup.once('writing', function() {
+ common.util.makeWritableStream(
+ dup,
+ {
+ makeAuthenticatedRequest: self.bigQuery.makeAuthenticatedRequest,
+ metadata: {
+ configuration: {
+ load: metadata,
+ },
+ jobReference: {
+ jobId: jobId,
+ projectId: self.bigQuery.projectId,
+ },
+ },
+ request: {
+ uri: format('{base}/{projectId}/jobs', {
+ base: 'https://www.googleapis.com/upload/bigquery/v2/projects',
+ projectId: self.bigQuery.projectId,
+ }),
+ },
+ },
+ function(data) {
+ var job = self.bigQuery.job(data.jobReference.jobId);
+ job.metadata = data;
+
+ dup.emit('complete', job);
+ }
+ );
+ });
+
+ return dup;
};
/**
- * Copy data from one table to another, optionally creating that table.
- *
- * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
+ * Export table to Cloud Storage.
*
- * @param {Table} destination The destination table.
- * @param {object} [metadata] Metadata to set with the copy operation. The
- * metadata object should be in the format of the
- * [`configuration.copy`](http://goo.gl/dKWIyS) property of a Jobs resource.
+ * @param {string|File} destination Where the file should be exported
+ * to. A string or a {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File}.
+ * @param {object} [options] The configuration object.
+ * @param {string} [options.format] The format to export the data in. Allowed
+ * options are "CSV", "JSON", or "AVRO". Default: "CSV".
+ * @param {boolean} [options.gzip] Specify if you would like the file compressed
+ * with GZIP. Default: false.
* @param {function} [callback] The callback function.
* @param {?error} callback.err An error returned while making this request
- * @param {Job} callback.job The job used to copy your table.
* @param {object} callback.apiResponse The full API response.
* @returns {Promise}
*
- * @throws {Error} If a destination other than a Table object is provided.
+ * @throws {Error} If destination isn't a File object.
+ * @throws {Error} If destination format isn't recongized.
*
* @example
+ * const Storage = require('@google-cloud/storage');
* const BigQuery = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const dataset = bigquery.dataset('my-dataset');
* const table = bigquery.table('my-table');
*
- * const yourTable = dataset.table('your-table');
- * table.startCopy(yourTable, function(err, job, apiResponse) {
- * // `job` is a Job object that can be used to check the status of the
- * // request.
+ * const storage = new Storage({
+ * projectId: 'grape-spaceship-123'
* });
+ * var extractedFile = storage.bucket('institutions').file('2014.csv');
*
* //-
- * // See the `configuration.copy` object for
- * // all available options.
+ * // To use the default options, just pass a {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} object.
+ * //
+ * // Note: The exported format type will be inferred by the file's extension.
+ * // If you wish to override this, or provide an array of destination files,
+ * // you must provide an `options` object.
* //-
- * const metadata = {
- * createDisposition: 'CREATE_NEVER',
- * writeDisposition: 'WRITE_TRUNCATE'
+ * table.extract(extractedFile, function(err, apiResponse) {});
+ *
+ * //-
+ * // If you need more customization, pass an `options` object.
+ * //-
+ * var options = {
+ * format: 'json',
+ * gzip: true
* };
*
- * table.startCopy(yourTable, metadata, function(err, job, apiResponse) {});
+ * table.extract(extractedFile, options, function(err, apiResponse) {});
+ *
+ * //-
+ * // You can also specify multiple destination files.
+ * //-
+ * table.extract([
+ * storage.bucket('institutions').file('2014.json'),
+ * storage.bucket('institutions-copy').file('2014.json')
+ * ], options, function(err, apiResponse) {});
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
- * table.startCopy(yourTable, metadata).then(function(data) {
- * const job = data[0];
- * const apiResponse = data[1];
+ * table.extract(extractedFile, options).then(function(data) {
+ * var apiResponse = data[0];
* });
*/
-Table.prototype.startCopy = function(destination, metadata, callback) {
- if (!(destination instanceof Table)) {
- throw new Error('Destination must be a Table object.');
- }
-
- if (is.fn(metadata)) {
- callback = metadata;
- metadata = {};
+Table.prototype.extract = function(destination, options, callback) {
+ if (is.fn(options)) {
+ callback = options;
+ options = {};
}
- var body = {
- configuration: {
- copy: extend(true, metadata, {
- destinationTable: {
- datasetId: destination.dataset.id,
- projectId: destination.bigQuery.projectId,
- tableId: destination.id,
- },
- sourceTable: {
- datasetId: this.dataset.id,
- projectId: this.bigQuery.projectId,
- tableId: this.id,
- },
- }),
- },
- };
-
- if (metadata.jobPrefix) {
- body.jobPrefix = metadata.jobPrefix;
- delete metadata.jobPrefix;
- }
+ this.createExtractJob(destination, options, function(err, job, resp) {
+ if (err) {
+ callback(err, resp);
+ return;
+ }
- this.bigQuery.createJob(body, callback);
+ job.on('error', callback).on('complete', function(metadata) {
+ callback(null, metadata);
+ });
+ });
};
/**
- * Copy data from multiple tables into this table.
+ * Retrieves table data from a specified set of rows. The rows are returned to
+ * your callback as an array of objects matching your table's schema.
*
- * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
+ * @see [Tabledata: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/list}
*
- * @param {Table|Table[]} sourceTables The
- * source table(s) to copy data from.
- * @param {object} [metadata] Metadata to set with the copy operation. The
- * metadata object should be in the format of the
- * [`configuration.copy`](http://goo.gl/dKWIyS) property of a Jobs resource.
+ * @param {object} [options] The configuration object.
+ * @param {boolean} [options.autoPaginate=true] Have pagination handled
+ * automatically.
+ * @param {number} [options.maxApiCalls] Maximum number of API calls to make.
+ * @param {number} [options.maxResults] Maximum number of results to return.
* @param {function} [callback] The callback function.
* @param {?error} callback.err An error returned while making this request
- * @param {Job} callback.job The job used to copy your table.
- * @param {object} callback.apiResponse The full API response.
+ * @param {array} callback.rows The table data from specified set of rows.
* @returns {Promise}
*
- * @throws {Error} If a source other than a Table object is provided.
- *
* @example
* const BigQuery = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const dataset = bigquery.dataset('my-dataset');
* const table = bigquery.table('my-table');
*
- * const sourceTables = [
- * dataset.table('your-table'),
- * dataset.table('your-second-table')
- * ];
- *
- * const callback = function(err, job, apiResponse) {
- * // `job` is a Job object that can be used to check the status of the
- * // request.
- * };
- *
- * table.startCopyFrom(sourceTables, callback);
+ * table.getRows(function(err, rows) {
+ * if (!err) {
+ * // rows is an array of results.
+ * }
+ * });
*
* //-
- * // See the `configuration.copy` object for
- * // all available options.
+ * // To control how many API requests are made and page through the results
+ * // manually, set `autoPaginate` to `false`.
* //-
- * const metadata = {
- * createDisposition: 'CREATE_NEVER',
- * writeDisposition: 'WRITE_TRUNCATE'
- * };
+ * function manualPaginationCallback(err, rows, nextQuery, apiResponse) {
+ * if (nextQuery) {
+ * // More results exist.
+ * table.getRows(nextQuery, manualPaginationCallback);
+ * }
+ * }
*
- * table.startCopyFrom(sourceTables, metadata, callback);
+ * table.getRows({
+ * autoPaginate: false
+ * }, manualPaginationCallback);
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
- * table.startCopyFrom(sourceTables, metadata).then(function(data) {
- * const job = data[0];
- * const apiResponse = data[1];
- * });
- */
-Table.prototype.startCopyFrom = function(sourceTables, metadata, callback) {
- sourceTables = arrify(sourceTables);
-
- sourceTables.forEach(function(sourceTable) {
- if (!(sourceTable instanceof Table)) {
- throw new Error('Source must be a Table object.');
- }
- });
+ * table.getRows().then(function(data) {
+ * const rows = data[0];
+});
+ */
+Table.prototype.getRows = function(options, callback) {
+ var self = this;
- if (is.fn(metadata)) {
- callback = metadata;
- metadata = {};
+ if (is.fn(options)) {
+ callback = options;
+ options = {};
}
- var body = {
- configuration: {
- copy: extend(true, metadata, {
- destinationTable: {
- datasetId: this.dataset.id,
- projectId: this.bigQuery.projectId,
- tableId: this.id,
- },
-
- sourceTables: sourceTables.map(function(sourceTable) {
- return {
- datasetId: sourceTable.dataset.id,
- projectId: sourceTable.bigQuery.projectId,
- tableId: sourceTable.id,
- };
- }),
- }),
+ this.request(
+ {
+ uri: '/data',
+ qs: options,
},
- };
+ function(err, resp) {
+ if (err) {
+ onComplete(err, null, null, resp);
+ return;
+ }
- if (metadata.jobPrefix) {
- body.jobPrefix = metadata.jobPrefix;
- delete metadata.jobPrefix;
- }
+ var nextQuery = null;
- this.bigQuery.createJob(body, callback);
+ if (resp.pageToken) {
+ nextQuery = extend({}, options, {
+ pageToken: resp.pageToken,
+ });
+ }
+
+ if (resp.rows && resp.rows.length > 0 && !self.metadata.schema) {
+ // We don't know the schema for this table yet. Do a quick stat.
+ self.getMetadata(function(err, metadata, apiResponse) {
+ if (err) {
+ onComplete(err, null, null, apiResponse);
+ return;
+ }
+
+ onComplete(null, resp.rows, nextQuery, resp);
+ });
+
+ return;
+ }
+
+ onComplete(null, resp.rows, nextQuery, resp);
+ }
+ );
+
+ function onComplete(err, rows, nextQuery, resp) {
+ if (err) {
+ callback(err, null, null, resp);
+ return;
+ }
+
+ rows = self.bigQuery.mergeSchemaWithRows_(self.metadata.schema, rows || []);
+ callback(null, rows, nextQuery, resp);
+ }
};
/**
- * Export table to Cloud Storage.
+ * Stream data into BigQuery one record at a time without running a load job.
*
- * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
+ * There are more strict quota limits using this method so it is highly
+ * recommended that you load data into BigQuery using
+ * {@link Table#import} instead.
*
- * @param {string|File} destination Where the file should be exported
- * to. A string or a {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} object.
- * @param {object=} options - The configuration object.
- * @param {string} options.format - The format to export the data in. Allowed
- * options are "CSV", "JSON", or "AVRO". Default: "CSV".
- * @param {boolean} options.gzip - Specify if you would like the file compressed
- * with GZIP. Default: false.
- * @param {function} callback - The callback function.
- * @param {?error} callback.err - An error returned while making this request
- * @param {Job} callback.job - The job used to export the table.
- * @param {object} callback.apiResponse - The full API response.
+ * @see [Tabledata: insertAll API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll}
+ * @see [Troubleshooting Errors]{@link https://developers.google.com/bigquery/troubleshooting-errors}
*
- * @throws {Error} If destination isn't a File object.
- * @throws {Error} If destination format isn't recongized.
+ * @param {object|object[]} rows The rows to insert into the table.
+ * @param {object} [options] Configuration object.
+ * @param {boolean} [options.autoCreate] Automatically create the table if it
+ * doesn't already exist. In order for this to succeed the `schema` option
+ * must also be set. Note that this can take longer than 2 minutes to
+ * complete.
+ * @param {boolean} [options.ignoreUnknownValues=false] Accept rows that contain
+ * values that do not match the schema. The unknown values are ignored.
+ * @param {boolean} [options.raw] If `true`, the `rows` argument is expected to
+ * be formatted as according to the
+ * [specification](https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll).
+ * @param {string|object} [options.schema] A comma-separated list of name:type
+ * pairs. Valid types are "string", "integer", "float", "boolean", and
+ * "timestamp". If the type is omitted, it is assumed to be "string".
+ * Example: "name:string, age:integer". Schemas can also be specified as a
+ * JSON array of fields, which allows for nested and repeated fields. See
+ * a [Table resource](http://goo.gl/sl8Dmg) for more detailed information.
+ * @param {boolean} [options.skipInvalidRows=false] Insert all valid rows of a
+ * request, even if invalid rows exist.
+ * @param {string} [options.templateSuffix] Treat the destination table as a
+ * base template, and insert the rows into an instance table named
+ * "{destination}{templateSuffix}". BigQuery will manage creation of
+ * the instance table, using the schema of the base template table. See
+ * [Automatic table creation using template tables](https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables)
+ * for considerations when working with templates tables.
+ * @param {function} [callback] The callback function.
+ * @param {?error} callback.err An error returned while making this request.
+ * @param {object[]} callback.err.errors If present, these represent partial
+ * failures. It's possible for part of your request to be completed
+ * successfully, while the other part was not.
+ * @param {object} callback.apiResponse The full API response.
+ * @returns {Promise}
*
* @example
- * const Storage = require('@google-cloud/storage');
* const BigQuery = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const dataset = bigquery.dataset('my-dataset');
* const table = bigquery.table('my-table');
*
- * const storage = new Storage({
- * projectId: 'grape-spaceship-123'
- * });
- * const extractedFile = storage.bucket('institutions').file('2014.csv');
- *
- * function callback(err, job, apiResponse) {
- * // `job` is a Job object that can be used to check the status of the
- * // request.
- * }
+ * //-
+ * // Insert a single row.
+ * //-
+ * table.insert({
+ * INSTNM: 'Motion Picture Institute of Michigan',
+ * CITY: 'Troy',
+ * STABBR: 'MI'
+ * }, insertHandler);
*
* //-
- * // To use the default options, just pass a {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} object.
- * //
- * // Note: The exported format type will be inferred by the file's extension.
- * // If you wish to override this, or provide an array of destination files,
- * // you must provide an `options` object.
+ * // Insert multiple rows at a time.
* //-
- * table.startExtract(extractedFile, callback);
+ * var rows = [
+ * {
+ * INSTNM: 'Motion Picture Institute of Michigan',
+ * CITY: 'Troy',
+ * STABBR: 'MI'
+ * },
+ * // ...
+ * ];
+ *
+ * table.insert(rows, insertHandler);
*
* //-
- * // If you need more customization, pass an `options` object.
+ * // Insert a row as according to the
+ * // specification.
* //-
- * const options = {
- * format: 'json',
- * gzip: true
+ * var row = {
+ * insertId: '1',
+ * json: {
+ * INSTNM: 'Motion Picture Institute of Michigan',
+ * CITY: 'Troy',
+ * STABBR: 'MI'
+ * }
+ * };
+ *
+ * var options = {
+ * raw: true
* };
*
- * table.startExtract(extractedFile, options, callback);
+ * table.insert(row, options, insertHandler);
*
* //-
- * // You can also specify multiple destination files.
+ * // Handling the response. See
+ * // Troubleshooting Errors for best practices on how to handle errors.
* //-
- * table.startExtract([
- * storage.bucket('institutions').file('2014.json'),
- * storage.bucket('institutions-copy').file('2014.json')
- * ], options, callback);
+ * function insertHandler(err, apiResponse) {
+ * if (err) {
+ * // An API error or partial failure occurred.
+ *
+ * if (err.name === 'PartialFailureError') {
+ * // Some rows failed to insert, while others may have succeeded.
+ *
+ * // err.errors (object[]):
+ * // err.errors[].row (original row object passed to `insert`)
+ * // err.errors[].errors[].reason
+ * // err.errors[].errors[].message
+ * }
+ * }
+ * }
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
- * table.startExtract(extractedFile, options).then(function(data) {
- * const job = data[0];
- * const apiResponse = data[1];
- * });
+ * table.insert(rows)
+ * .then(function(data) {
+ * var apiResponse = data[0];
+ * })
+ * .catch(function(err) {
+ * // An API error or partial failure occurred.
+ *
+ * if (err.name === 'PartialFailureError') {
+ * // Some rows failed to insert, while others may have succeeded.
+ *
+ * // err.errors (object[]):
+ * // err.errors[].row (original row object passed to `insert`)
+ * // err.errors[].errors[].reason
+ * // err.errors[].errors[].message
+ * }
+ * });
*/
-Table.prototype.startExtract = function(destination, options, callback) {
+Table.prototype.insert = function(rows, options, callback) {
+ var self = this;
+
if (is.fn(options)) {
callback = options;
options = {};
}
- options = extend(true, options, {
- destinationUris: arrify(destination).map(function(dest) {
- if (!common.util.isCustomType(dest, 'storage/file')) {
- throw new Error('Destination must be a File object.');
- }
+ rows = arrify(rows);
- // If no explicit format was provided, attempt to find a match from the
- // file's extension. If no match, don't set, and default upstream to CSV.
- var format = path
- .extname(dest.name)
- .substr(1)
- .toLowerCase();
- if (!options.destinationFormat && !options.format && FORMATS[format]) {
- options.destinationFormat = FORMATS[format];
- }
+ if (!rows.length) {
+ throw new Error('You must provide at least 1 row to be inserted.');
+ }
+
+ var json = extend(true, {}, options, {
+ rows: rows,
+ });
+
+ if (!options.raw) {
+ json.rows = arrify(rows).map(function(row) {
+ return {
+ insertId: uuid.v4(),
+ json: Table.encodeValue_(row),
+ };
+ });
+ }
+
+ delete json.raw;
- return 'gs://' + dest.bucket.name + '/' + dest.name;
- }),
- });
+ var autoCreate = !!options.autoCreate;
+ var schema;
- if (options.format) {
- options.format = options.format.toLowerCase();
+ delete json.autoCreate;
- if (FORMATS[options.format]) {
- options.destinationFormat = FORMATS[options.format];
- delete options.format;
- } else {
- throw new Error('Destination format not recognized: ' + options.format);
+ if (autoCreate) {
+ if (!options.schema) {
+ throw new Error('Schema must be provided in order to auto-create Table.');
}
- }
- if (options.gzip) {
- options.compression = 'GZIP';
- delete options.gzip;
+ schema = options.schema;
+ delete json.schema;
}
- var body = {
- configuration: {
- extract: extend(true, options, {
- sourceTable: {
- datasetId: this.dataset.id,
- projectId: this.bigQuery.projectId,
- tableId: this.id,
- },
- }),
+ this.request(
+ {
+ method: 'POST',
+ uri: '/insertAll',
+ json: json,
},
- };
+ function(err, resp) {
+ if (err) {
+ if (err.code === 404 && autoCreate) {
+ setTimeout(createTableAndRetry, Math.random() * 60000);
+ } else {
+ callback(err, resp);
+ }
+ return;
+ }
- if (options.jobPrefix) {
- body.jobPrefix = options.jobPrefix;
- delete options.jobPrefix;
- }
+ var partialFailures = (resp.insertErrors || []).map(function(
+ insertError
+ ) {
+ return {
+ errors: insertError.errors.map(function(error) {
+ return {
+ message: error.message,
+ reason: error.reason,
+ };
+ }),
+ row: rows[insertError.index],
+ };
+ });
- this.bigQuery.createJob(body, callback);
+ if (partialFailures.length > 0) {
+ err = new common.util.PartialFailureError({
+ errors: partialFailures,
+ response: resp,
+ });
+ }
+
+ callback(err, resp);
+ }
+ );
+
+ function createTableAndRetry() {
+ self.create(
+ {
+ schema: schema,
+ },
+ function(err, table, resp) {
+ if (err && err.code !== 409) {
+ callback(err, resp);
+ return;
+ }
+
+ setTimeout(function() {
+ self.insert(rows, options, callback);
+ }, 60000);
+ }
+ );
+ }
};
/**
@@ -1595,13 +1620,11 @@ Table.prototype.startExtract = function(destination, options, callback) {
*
* By loading data this way, you create a load job that will run your data load
* asynchronously. If you would like instantaneous access to your data, insert
- * it using {@liink Table#insert}.
+ * it using {@link Table#insert}.
*
* Note: The file type will be inferred by the given file's extension. If you
* wish to override this, you must provide `metadata.format`.
*
- * @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
- *
* @param {string|File} source The source file to import. A string or a
* {@link https://cloud.google.com/nodejs/docs/reference/storage/latest/File File} object.
* @param {object} [metadata] Metadata to set with the load operation. The
@@ -1611,14 +1634,12 @@ Table.prototype.startExtract = function(destination, options, callback) {
* Allowed options are "CSV", "JSON", or "AVRO".
* @param {function} [callback] The callback function.
* @param {?error} callback.err An error returned while making this request
- * @param {Job} callback.job The job used to import your data.
* @param {object} callback.apiResponse The full API response.
* @returns {Promise}
*
* @throws {Error} If the source isn't a string file name or a File instance.
*
* @example
- * const Storage = require('@google-cloud/storage');
* const BigQuery = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const dataset = bigquery.dataset('my-dataset');
@@ -1627,138 +1648,120 @@ Table.prototype.startExtract = function(destination, options, callback) {
* //-
* // Load data from a local file.
* //-
- * const callback = function(err, job, apiResponse) {
- * // `job` is a Job object that can be used to check the status of the
- * // request.
- * };
- *
- * table.startLoad('./institutions.csv', callback);
+ * table.load('./institutions.csv', function(err, apiResponse) {});
*
* //-
* // You may also pass in metadata in the format of a Jobs resource. See
* // (http://goo.gl/BVcXk4) for a full list of supported values.
* //-
- * const metadata = {
+ * var metadata = {
* encoding: 'ISO-8859-1',
* sourceFormat: 'NEWLINE_DELIMITED_JSON'
* };
*
- * table.startLoad('./my-data.csv', metadata, callback);
+ * table.load('./my-data.csv', metadata, function(err, apiResponse) {});
*
* //-
* // Load data from a file in your Cloud Storage bucket.
* //-
- * const storage = new Storage({
+ * var gcs = require('@google-cloud/storage')({
* projectId: 'grape-spaceship-123'
* });
- * const data = storage.bucket('institutions').file('data.csv');
- * table.startLoad(data, callback);
+ * var data = gcs.bucket('institutions').file('data.csv');
+ * table.load(data, function(err, apiResponse) {});
*
* //-
* // Load data from multiple files in your Cloud Storage bucket(s).
* //-
- * table.startLoad([
- * storage.bucket('institutions').file('2011.csv'),
- * storage.bucket('institutions').file('2012.csv')
- * ], callback);
+ * table.load([
+ * gcs.bucket('institutions').file('2011.csv'),
+ * gcs.bucket('institutions').file('2012.csv')
+ * ], function(err, apiResponse) {});
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
- * table.startLoad(data).then(function(data) {
- * const job = data[0];
- * const apiResponse = data[1];
+ * table.load(data).then(function(data) {
+ * var apiResponse = data[0];
* });
*/
-Table.prototype.startLoad = function(source, metadata, callback) {
+Table.prototype.load = function(source, metadata, callback) {
if (is.fn(metadata)) {
callback = metadata;
metadata = {};
}
- callback = callback || common.util.noop;
- metadata = metadata || {};
-
- if (metadata.format) {
- metadata.sourceFormat = FORMATS[metadata.format.toLowerCase()];
- delete metadata.format;
- }
-
- if (is.string(source)) {
- // A path to a file was given. If a sourceFormat wasn't specified, try to
- // find a match from the file's extension.
- var detectedFormat =
- FORMATS[
- path
- .extname(source)
- .substr(1)
- .toLowerCase()
- ];
- if (!metadata.sourceFormat && detectedFormat) {
- metadata.sourceFormat = detectedFormat;
+ this.createLoadJob(source, metadata, function(err, job, resp) {
+ if (err) {
+ callback(err, resp);
+ return;
}
- // Read the file into a new write stream.
- return fs
- .createReadStream(source)
- .pipe(this.createWriteStream(metadata))
- .on('error', callback)
- .on('complete', function(job) {
- callback(null, job, job.metadata);
- });
- }
-
- var body = {
- configuration: {
- load: {
- destinationTable: {
- projectId: this.bigQuery.projectId,
- datasetId: this.dataset.id,
- tableId: this.id,
- },
- },
- },
- };
-
- if (metadata.jobPrefix) {
- body.jobPrefix = metadata.jobPrefix;
- delete metadata.jobPrefix;
- }
-
- extend(true, body.configuration.load, metadata, {
- sourceUris: arrify(source).map(function(src) {
- if (!common.util.isCustomType(src, 'storage/file')) {
- throw new Error('Source must be a File object.');
- }
-
- // If no explicit format was provided, attempt to find a match from
- // the file's extension. If no match, don't set, and default upstream
- // to CSV.
- var format =
- FORMATS[
- path
- .extname(src.name)
- .substr(1)
- .toLowerCase()
- ];
- if (!metadata.sourceFormat && format) {
- body.configuration.load.sourceFormat = format;
- }
-
- return 'gs://' + src.bucket.name + '/' + src.name;
- }),
+ job.on('error', callback).on('complete', function(metadata) {
+ callback(null, metadata);
+ });
});
+};
- this.bigQuery.createJob(body, callback);
+/**
+ * Run a query scoped to your dataset.
+ *
+ * See {@link BigQuery#query} for full documentation of this method.
+ * @param {object} query See {@link BigQuery#query} for full documentation of this method.
+ * @param {function} [callback] See {@link BigQuery#query} for full documentation of this method.
+ * @returns {Promise}
+ */
+Table.prototype.query = function(query, callback) {
+ this.dataset.query(query, callback);
};
/**
- * Start running a query scoped to your dataset.
+ * Set the metadata on the table.
+ *
+ * @see [Tables: update API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/tables/update}
+ *
+ * @param {object} metadata The metadata key/value object to set.
+ * @param {string} metadata.description A user-friendly description of the
+ * table.
+ * @param {string} metadata.name A descriptive name for the table.
+ * @param {string|object} metadata.schema A comma-separated list of name:type
+ * pairs. Valid types are "string", "integer", "float", "boolean", "bytes",
+ * "record", and "timestamp". If the type is omitted, it is assumed to be
+ * "string". Example: "name:string, age:integer". Schemas can also be
+ * specified as a JSON array of fields, which allows for nested and repeated
+ * fields. See a [Table resource](http://goo.gl/sl8Dmg) for more detailed
+ * information.
+ * @param {function} [callback] The callback function.
+ * @param {?error} callback.err An error returned while making this request.
+ * @param {object} callback.apiResponse The full API response.
+ * @returns {Promise}
+ *
+ * @example
+ * const BigQuery = require('@google-cloud/bigquery');
+ * const bigquery = new BigQuery();
+ * const dataset = bigquery.dataset('my-dataset');
+ * const table = bigquery.table('my-table');
+ *
+ * const metadata = {
+ * name: 'My recipes',
+ * description: 'A table for storing my recipes.',
+ * schema: 'name:string, servings:integer, cookingTime:float, quick:boolean'
+ * };
+ *
+ * table.setMetadata(metadata, function(err, metadata, apiResponse) {});
*
- * See {@link BigQuery#startQuery} for full documentation of this method.
+ * //-
+ * // If the callback is omitted, we'll return a Promise.
+ * //-
+ * table.setMetadata(metadata).then(function(data) {
+ * const metadata = data[0];
+ * const apiResponse = data[1];
+ * });
*/
-Table.prototype.startQuery = function(options, callback) {
- return this.dataset.startQuery(options, callback);
+Table.prototype.setMetadata = function(metadata, callback) {
+ var body = Table.formatMetadata_(metadata);
+
+ common.ServiceObject.prototype.setMetadata.call(this, body, callback);
};
/*! Developer Documentation
diff --git a/system-test/bigquery.js b/system-test/bigquery.js
index c58cb69a..3a30fef6 100644
--- a/system-test/bigquery.js
+++ b/system-test/bigquery.js
@@ -215,7 +215,7 @@ describe('BigQuery', function() {
});
it('should run a query job, then get results', function(done) {
- bigquery.startQuery(query, function(err, job) {
+ bigquery.createQueryJob(query, function(err, job) {
assert.ifError(err);
assert(job instanceof Job);
@@ -232,7 +232,7 @@ describe('BigQuery', function() {
var job;
return bigquery
- .startQuery(query)
+ .createQueryJob(query)
.then(function(response) {
job = response[0];
return job.promise();
@@ -248,7 +248,7 @@ describe('BigQuery', function() {
});
it('should get query results as a stream', function(done) {
- bigquery.startQuery(query, function(err, job) {
+ bigquery.createQueryJob(query, function(err, job) {
assert.ifError(err);
var rowsEmitted = [];
@@ -273,7 +273,7 @@ describe('BigQuery', function() {
jobPrefix: 'hi-im-a-prefix',
};
- bigquery.startQuery(options, function(err, job) {
+ bigquery.createQueryJob(options, function(err, job) {
assert.ifError(err);
assert.strictEqual(job.id.indexOf(options.jobPrefix), 0);
@@ -292,7 +292,7 @@ describe('BigQuery', function() {
dryRun: true,
};
- bigquery.startQuery(options, function(err, job) {
+ bigquery.createQueryJob(options, function(err, job) {
assert.ifError(err);
assert(job.metadata.statistics);
done();
@@ -364,7 +364,7 @@ describe('BigQuery', function() {
it('should cancel a job', function(done) {
var query = 'SELECT url FROM `publicdata.samples.github_nested` LIMIT 10';
- bigquery.startQuery(query, function(err, job) {
+ bigquery.createQueryJob(query, function(err, job) {
assert.ifError(err);
job.cancel(function(err) {
@@ -612,7 +612,7 @@ describe('BigQuery', function() {
var table2 = TABLES[2];
var table2Instance = table2.table;
- table1Instance.startCopy(table2Instance, function(err, job) {
+ table1Instance.createCopyJob(table2Instance, function(err, job) {
assert.ifError(err);
job.on('error', done).on('complete', function() {
@@ -645,7 +645,7 @@ describe('BigQuery', function() {
var table2 = TABLES[2];
var table2Instance = table2.table;
- table2Instance.startCopyFrom(table1Instance, function(err, job) {
+ table2Instance.createCopyFromJob(table1Instance, function(err, job) {
assert.ifError(err);
job.on('error', done).on('complete', function() {
@@ -688,7 +688,7 @@ describe('BigQuery', function() {
});
it('should start to load data from a storage file', function(done) {
- table.startLoad(file, function(err, job) {
+ table.createLoadJob(file, function(err, job) {
assert.ifError(err);
job.on('error', done).on('complete', function() {
@@ -1201,7 +1201,7 @@ describe('BigQuery', function() {
it('should start extracting data to a storage file', function(done) {
var file = bucket.file('kitten-test-data-backup.json');
- table.startExtract(file, function(err, job) {
+ table.createExtractJob(file, function(err, job) {
assert.ifError(err);
job.on('error', done).on('complete', function() {
diff --git a/test/dataset.js b/test/dataset.js
index 84f53c2b..b0c132f2 100644
--- a/test/dataset.js
+++ b/test/dataset.js
@@ -180,6 +180,40 @@ describe('BigQuery/Dataset', function() {
});
});
+ describe('createQueryJob', function() {
+ var FAKE_QUERY = 'SELECT * FROM `table`';
+
+ it('should extend the options', function(done) {
+ var fakeOptions = {
+ query: FAKE_QUERY,
+ a: {b: 'c'},
+ };
+
+ var expectedOptions = extend(true, {}, fakeOptions, {
+ defaultDataset: {
+ datasetId: ds.id,
+ },
+ });
+
+ ds.bigQuery.createQueryJob = function(options, callback) {
+ assert.deepEqual(options, expectedOptions);
+ assert.notStrictEqual(fakeOptions, options);
+ callback(); // the done fn
+ };
+
+ ds.createQueryJob(fakeOptions, done);
+ });
+
+ it('should accept a query string', function(done) {
+ ds.bigQuery.createQueryJob = function(options, callback) {
+ assert.strictEqual(options.query, FAKE_QUERY);
+ callback(); // the done fn
+ };
+
+ ds.createQueryJob(FAKE_QUERY, done);
+ });
+ });
+
describe('createQueryStream', function() {
var options = {
a: 'b',
@@ -648,40 +682,6 @@ describe('BigQuery/Dataset', function() {
});
});
- describe('startQuery', function() {
- var FAKE_QUERY = 'SELECT * FROM `table`';
-
- it('should extend the options', function(done) {
- var fakeOptions = {
- query: FAKE_QUERY,
- a: {b: 'c'},
- };
-
- var expectedOptions = extend(true, {}, fakeOptions, {
- defaultDataset: {
- datasetId: ds.id,
- },
- });
-
- ds.bigQuery.startQuery = function(options, callback) {
- assert.deepEqual(options, expectedOptions);
- assert.notStrictEqual(fakeOptions, options);
- callback(); // the done fn
- };
-
- ds.startQuery(fakeOptions, done);
- });
-
- it('should accept a query string', function(done) {
- ds.bigQuery.startQuery = function(options, callback) {
- assert.strictEqual(options.query, FAKE_QUERY);
- callback(); // the done fn
- };
-
- ds.startQuery(FAKE_QUERY, done);
- });
- });
-
describe('table', function() {
it('should return a Table object', function() {
var tableId = 'tableId';
diff --git a/test/index.js b/test/index.js
index 448f3ab0..7bc8b6d9 100644
--- a/test/index.js
+++ b/test/index.js
@@ -893,6 +893,216 @@ describe('BigQuery', function() {
});
});
+ describe('createQueryJob', function() {
+ var QUERY_STRING = 'SELECT * FROM [dataset.table]';
+
+ it('should throw if a query is not provided', function() {
+ assert.throws(function() {
+ bq.createQueryJob();
+ }, /SQL query string is required/);
+
+ assert.throws(function() {
+ bq.createQueryJob({noQuery: 'here'});
+ }, /SQL query string is required/);
+ });
+
+ describe('with destination', function() {
+ var dataset;
+ var TABLE_ID = 'table-id';
+
+ beforeEach(function() {
+ dataset = {
+ bigQuery: bq,
+ id: 'dataset-id',
+ createTable: util.noop,
+ };
+ });
+
+ it('should throw if a destination is not a table', function() {
+ assert.throws(function() {
+ bq.createQueryJob({
+ query: 'query',
+ destination: 'not a table',
+ });
+ }, /Destination must be a Table/);
+ });
+
+ it('should assign destination table to request body', function(done) {
+ bq.request = function(reqOpts) {
+ assert.deepEqual(reqOpts.json.configuration.query.destinationTable, {
+ datasetId: dataset.id,
+ projectId: dataset.bigQuery.projectId,
+ tableId: TABLE_ID,
+ });
+
+ done();
+ };
+
+ bq.createQueryJob({
+ query: 'query',
+ destination: new FakeTable(dataset, TABLE_ID),
+ });
+ });
+
+ it('should delete `destination` prop from request body', function(done) {
+ bq.request = function(reqOpts) {
+ var body = reqOpts.json;
+ assert.strictEqual(body.configuration.query.destination, undefined);
+ done();
+ };
+
+ bq.createQueryJob({
+ query: 'query',
+ destination: new FakeTable(dataset, TABLE_ID),
+ });
+ });
+ });
+
+ describe('SQL parameters', function() {
+ var NAMED_PARAMS = {
+ key: 'value',
+ };
+
+ var POSITIONAL_PARAMS = ['value'];
+
+ it('should delete the params option', function(done) {
+ bq.createJob = function(reqOpts) {
+ assert.strictEqual(reqOpts.params, undefined);
+ done();
+ };
+
+ bq.createQueryJob(
+ {
+ query: QUERY_STRING,
+ params: NAMED_PARAMS,
+ },
+ assert.ifError
+ );
+ });
+
+ describe('named', function() {
+ it('should set the correct parameter mode', function(done) {
+ bq.createJob = function(reqOpts) {
+ var query = reqOpts.configuration.query;
+ assert.strictEqual(query.parameterMode, 'named');
+ done();
+ };
+
+ bq.createQueryJob(
+ {
+ query: QUERY_STRING,
+ params: NAMED_PARAMS,
+ },
+ assert.ifError
+ );
+ });
+
+ it('should get set the correct query parameters', function(done) {
+ var queryParameter = {};
+
+ BigQuery.valueToQueryParameter_ = function(value) {
+ assert.strictEqual(value, NAMED_PARAMS.key);
+ return queryParameter;
+ };
+
+ bq.createJob = function(reqOpts) {
+ var query = reqOpts.configuration.query;
+ assert.strictEqual(query.queryParameters[0], queryParameter);
+ assert.strictEqual(query.queryParameters[0].name, 'key');
+ done();
+ };
+
+ bq.createQueryJob(
+ {
+ query: QUERY_STRING,
+ params: NAMED_PARAMS,
+ },
+ assert.ifError
+ );
+ });
+ });
+
+ describe('positional', function() {
+ it('should set the correct parameter mode', function(done) {
+ bq.createJob = function(reqOpts) {
+ var query = reqOpts.configuration.query;
+ assert.strictEqual(query.parameterMode, 'positional');
+ done();
+ };
+
+ bq.createQueryJob(
+ {
+ query: QUERY_STRING,
+ params: POSITIONAL_PARAMS,
+ },
+ assert.ifError
+ );
+ });
+
+ it('should get set the correct query parameters', function(done) {
+ var queryParameter = {};
+
+ BigQuery.valueToQueryParameter_ = function(value) {
+ assert.strictEqual(value, POSITIONAL_PARAMS[0]);
+ return queryParameter;
+ };
+
+ bq.createJob = function(reqOpts) {
+ var query = reqOpts.configuration.query;
+ assert.strictEqual(query.queryParameters[0], queryParameter);
+ done();
+ };
+
+ bq.createQueryJob(
+ {
+ query: QUERY_STRING,
+ params: POSITIONAL_PARAMS,
+ },
+ assert.ifError
+ );
+ });
+ });
+ });
+
+ it('should accept the dryRun options', function(done) {
+ var options = {
+ query: QUERY_STRING,
+ dryRun: true,
+ };
+
+ bq.createJob = function(reqOpts) {
+ assert.strictEqual(reqOpts.configuration.query.dryRun, undefined);
+ assert.strictEqual(reqOpts.configuration.dryRun, options.dryRun);
+ done();
+ };
+
+ bq.createQueryJob(options, assert.ifError);
+ });
+
+ it('should accept a job prefix', function(done) {
+ var options = {
+ query: QUERY_STRING,
+ jobPrefix: 'hi',
+ };
+
+ bq.createJob = function(reqOpts) {
+ assert.strictEqual(reqOpts.configuration.query.jobPrefix, undefined);
+ assert.strictEqual(reqOpts.jobPrefix, options.jobPrefix);
+ done();
+ };
+
+ bq.createQueryJob(options, assert.ifError);
+ });
+
+ it('should pass the callback to createJob', function(done) {
+ bq.createJob = function(reqOpts, callback) {
+ callback(); // the done fn
+ };
+
+ bq.createQueryJob(QUERY_STRING, done);
+ });
+ });
+
describe('dataset', function() {
var DATASET_ID = 'dataset-id';
@@ -1172,10 +1382,10 @@ describe('BigQuery', function() {
var FAKE_RESPONSE = {};
var QUERY_STRING = 'SELECT * FROM [dataset.table]';
- it('should return any errors from startQuery', function(done) {
+ it('should return any errors from createQueryJob', function(done) {
var error = new Error('err');
- bq.startQuery = function(query, callback) {
+ bq.createQueryJob = function(query, callback) {
callback(error, null, FAKE_RESPONSE);
};
@@ -1195,7 +1405,7 @@ describe('BigQuery', function() {
},
};
- bq.startQuery = function(query, callback) {
+ bq.createQueryJob = function(query, callback) {
callback(null, fakeJob, FAKE_RESPONSE);
};
@@ -1216,221 +1426,11 @@ describe('BigQuery', function() {
},
};
- bq.startQuery = function(query, callback) {
+ bq.createQueryJob = function(query, callback) {
callback(null, fakeJob, FAKE_RESPONSE);
};
bq.query(QUERY_STRING, fakeOptions, assert.ifError);
});
});
-
- describe('startQuery', function() {
- var QUERY_STRING = 'SELECT * FROM [dataset.table]';
-
- it('should throw if a query is not provided', function() {
- assert.throws(function() {
- bq.startQuery();
- }, /SQL query string is required/);
-
- assert.throws(function() {
- bq.startQuery({noQuery: 'here'});
- }, /SQL query string is required/);
- });
-
- describe('with destination', function() {
- var dataset;
- var TABLE_ID = 'table-id';
-
- beforeEach(function() {
- dataset = {
- bigQuery: bq,
- id: 'dataset-id',
- createTable: util.noop,
- };
- });
-
- it('should throw if a destination is not a table', function() {
- assert.throws(function() {
- bq.startQuery({
- query: 'query',
- destination: 'not a table',
- });
- }, /Destination must be a Table/);
- });
-
- it('should assign destination table to request body', function(done) {
- bq.request = function(reqOpts) {
- assert.deepEqual(reqOpts.json.configuration.query.destinationTable, {
- datasetId: dataset.id,
- projectId: dataset.bigQuery.projectId,
- tableId: TABLE_ID,
- });
-
- done();
- };
-
- bq.startQuery({
- query: 'query',
- destination: new FakeTable(dataset, TABLE_ID),
- });
- });
-
- it('should delete `destination` prop from request body', function(done) {
- bq.request = function(reqOpts) {
- var body = reqOpts.json;
- assert.strictEqual(body.configuration.query.destination, undefined);
- done();
- };
-
- bq.startQuery({
- query: 'query',
- destination: new FakeTable(dataset, TABLE_ID),
- });
- });
- });
-
- describe('SQL parameters', function() {
- var NAMED_PARAMS = {
- key: 'value',
- };
-
- var POSITIONAL_PARAMS = ['value'];
-
- it('should delete the params option', function(done) {
- bq.createJob = function(reqOpts) {
- assert.strictEqual(reqOpts.params, undefined);
- done();
- };
-
- bq.startQuery(
- {
- query: QUERY_STRING,
- params: NAMED_PARAMS,
- },
- assert.ifError
- );
- });
-
- describe('named', function() {
- it('should set the correct parameter mode', function(done) {
- bq.createJob = function(reqOpts) {
- var query = reqOpts.configuration.query;
- assert.strictEqual(query.parameterMode, 'named');
- done();
- };
-
- bq.startQuery(
- {
- query: QUERY_STRING,
- params: NAMED_PARAMS,
- },
- assert.ifError
- );
- });
-
- it('should get set the correct query parameters', function(done) {
- var queryParameter = {};
-
- BigQuery.valueToQueryParameter_ = function(value) {
- assert.strictEqual(value, NAMED_PARAMS.key);
- return queryParameter;
- };
-
- bq.createJob = function(reqOpts) {
- var query = reqOpts.configuration.query;
- assert.strictEqual(query.queryParameters[0], queryParameter);
- assert.strictEqual(query.queryParameters[0].name, 'key');
- done();
- };
-
- bq.startQuery(
- {
- query: QUERY_STRING,
- params: NAMED_PARAMS,
- },
- assert.ifError
- );
- });
- });
-
- describe('positional', function() {
- it('should set the correct parameter mode', function(done) {
- bq.createJob = function(reqOpts) {
- var query = reqOpts.configuration.query;
- assert.strictEqual(query.parameterMode, 'positional');
- done();
- };
-
- bq.startQuery(
- {
- query: QUERY_STRING,
- params: POSITIONAL_PARAMS,
- },
- assert.ifError
- );
- });
-
- it('should get set the correct query parameters', function(done) {
- var queryParameter = {};
-
- BigQuery.valueToQueryParameter_ = function(value) {
- assert.strictEqual(value, POSITIONAL_PARAMS[0]);
- return queryParameter;
- };
-
- bq.createJob = function(reqOpts) {
- var query = reqOpts.configuration.query;
- assert.strictEqual(query.queryParameters[0], queryParameter);
- done();
- };
-
- bq.startQuery(
- {
- query: QUERY_STRING,
- params: POSITIONAL_PARAMS,
- },
- assert.ifError
- );
- });
- });
- });
-
- it('should accept the dryRun options', function(done) {
- var options = {
- query: QUERY_STRING,
- dryRun: true,
- };
-
- bq.createJob = function(reqOpts) {
- assert.strictEqual(reqOpts.configuration.query.dryRun, undefined);
- assert.strictEqual(reqOpts.configuration.dryRun, options.dryRun);
- done();
- };
-
- bq.startQuery(options, assert.ifError);
- });
-
- it('should accept a job prefix', function(done) {
- var options = {
- query: QUERY_STRING,
- jobPrefix: 'hi',
- };
-
- bq.createJob = function(reqOpts) {
- assert.strictEqual(reqOpts.configuration.query.jobPrefix, undefined);
- assert.strictEqual(reqOpts.jobPrefix, options.jobPrefix);
- done();
- };
-
- bq.startQuery(options, assert.ifError);
- });
-
- it('should pass the callback to createJob', function(done) {
- bq.createJob = function(reqOpts, callback) {
- callback(); // the done fn
- };
-
- bq.startQuery(QUERY_STRING, done);
- });
- });
});
diff --git a/test/table.js b/test/table.js
index e0f0831c..390648fa 100644
--- a/test/table.js
+++ b/test/table.js
@@ -400,16 +400,16 @@ describe('BigQuery/Table', function() {
beforeEach(function() {
fakeJob = new events.EventEmitter();
- table.startCopy = function(destination, metadata, callback) {
+ table.createCopyJob = function(destination, metadata, callback) {
callback(null, fakeJob);
};
});
- it('should pass the arguments to startCopy', function(done) {
+ it('should pass the arguments to createCopyJob', function(done) {
var fakeDestination = {};
var fakeMetadata = {};
- table.startCopy = function(destination, metadata) {
+ table.createCopyJob = function(destination, metadata) {
assert.strictEqual(destination, fakeDestination);
assert.strictEqual(metadata, fakeMetadata);
done();
@@ -419,7 +419,7 @@ describe('BigQuery/Table', function() {
});
it('should optionally accept metadata', function(done) {
- table.startCopy = function(destination, metadata) {
+ table.createCopyJob = function(destination, metadata) {
assert.deepEqual(metadata, {});
done();
};
@@ -427,11 +427,11 @@ describe('BigQuery/Table', function() {
table.copy({}, assert.ifError);
});
- it('should return any startCopy errors', function(done) {
+ it('should return any createCopyJob errors', function(done) {
var error = new Error('err');
var response = {};
- table.startCopy = function(destination, metadata, callback) {
+ table.createCopyJob = function(destination, metadata, callback) {
callback(error, null, response);
};
@@ -471,16 +471,16 @@ describe('BigQuery/Table', function() {
beforeEach(function() {
fakeJob = new events.EventEmitter();
- table.startCopyFrom = function(sourceTables, metadata, callback) {
+ table.createCopyFromJob = function(sourceTables, metadata, callback) {
callback(null, fakeJob);
};
});
- it('should pass the arguments to startCopyFrom', function(done) {
+ it('should pass the arguments to createCopyFromJob', function(done) {
var fakeSourceTables = {};
var fakeMetadata = {};
- table.startCopyFrom = function(sourceTables, metadata) {
+ table.createCopyFromJob = function(sourceTables, metadata) {
assert.strictEqual(sourceTables, fakeSourceTables);
assert.strictEqual(metadata, fakeMetadata);
done();
@@ -490,7 +490,7 @@ describe('BigQuery/Table', function() {
});
it('should optionally accept metadata', function(done) {
- table.startCopyFrom = function(sourceTables, metadata) {
+ table.createCopyFromJob = function(sourceTables, metadata) {
assert.deepEqual(metadata, {});
done();
};
@@ -498,11 +498,11 @@ describe('BigQuery/Table', function() {
table.copyFrom({}, assert.ifError);
});
- it('should return any startCopyFrom errors', function(done) {
+ it('should return any createCopyFromJob errors', function(done) {
var error = new Error('err');
var response = {};
- table.startCopyFrom = function(sourceTables, metadata, callback) {
+ table.createCopyFromJob = function(sourceTables, metadata, callback) {
callback(error, null, response);
};
@@ -537,776 +537,859 @@ describe('BigQuery/Table', function() {
});
});
- describe('createQueryStream', function() {
- it('should call datasetInstance.createQueryStream()', function(done) {
- table.dataset.createQueryStream = function(a) {
- assert.equal(a, 'a');
+ describe('createCopyJob', function() {
+ var DEST_TABLE;
+
+ before(function() {
+ DEST_TABLE = new Table(DATASET, 'destination-table');
+ });
+
+ it('should throw if a destination is not a Table', function() {
+ assert.throws(function() {
+ table.createCopyJob();
+ }, /Destination must be a Table/);
+
+ assert.throws(function() {
+ table.createCopyJob({});
+ }, /Destination must be a Table/);
+
+ assert.throws(function() {
+ table.createCopyJob(function() {});
+ }, /Destination must be a Table/);
+ });
+
+ it('should send correct request to the API', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ assert.deepEqual(reqOpts, {
+ configuration: {
+ copy: {
+ a: 'b',
+ c: 'd',
+ destinationTable: {
+ datasetId: DEST_TABLE.dataset.id,
+ projectId: DEST_TABLE.bigQuery.projectId,
+ tableId: DEST_TABLE.id,
+ },
+ sourceTable: {
+ datasetId: table.dataset.id,
+ projectId: table.bigQuery.projectId,
+ tableId: table.id,
+ },
+ },
+ },
+ });
+
done();
};
- table.createQueryStream('a');
+ table.createCopyJob(DEST_TABLE, {a: 'b', c: 'd'}, assert.ifError);
});
- it('should return whatever dataset.createQueryStream returns', function() {
- var fakeValue = 123;
+ it('should accept a job prefix', function(done) {
+ var fakeJobPrefix = 'abc-';
+ var options = {
+ jobPrefix: fakeJobPrefix,
+ };
- table.dataset.createQueryStream = function() {
- return fakeValue;
+ table.bigQuery.createJob = function(reqOpts, callback) {
+ assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix);
+ assert.strictEqual(reqOpts.configuration.copy.jobPrefix, undefined);
+ callback(); // the done fn
};
- var val = table.createQueryStream();
+ table.createCopyJob(DEST_TABLE, options, done);
+ });
- assert.strictEqual(val, fakeValue);
+ it('should pass the callback to createJob', function(done) {
+ table.bigQuery.createJob = function(reqOpts, callback) {
+ assert.strictEqual(done, callback);
+ callback(); // the done fn
+ };
+
+ table.createCopyJob(DEST_TABLE, {}, done);
+ });
+
+ it('should optionally accept metadata', function(done) {
+ table.bigQuery.createJob = function(reqOpts, callback) {
+ assert.strictEqual(done, callback);
+ callback(); // the done fn
+ };
+
+ table.createCopyJob(DEST_TABLE, done);
});
});
- describe('createWriteStream', function() {
- describe('formats', function() {
- it('should accept csv', function(done) {
- makeWritableStreamOverride = function(stream, options) {
- var load = options.metadata.configuration.load;
- assert.equal(load.sourceFormat, 'CSV');
- done();
- };
+ describe('createCopyFromJob', function() {
+ var SOURCE_TABLE;
- table.createWriteStream('csv').emit('writing');
- });
+ before(function() {
+ SOURCE_TABLE = new Table(DATASET, 'source-table');
+ });
- it('should accept json', function(done) {
- makeWritableStreamOverride = function(stream, options) {
- var load = options.metadata.configuration.load;
- assert.equal(load.sourceFormat, 'NEWLINE_DELIMITED_JSON');
- done();
- };
+ it('should throw if a source is not a Table', function() {
+ assert.throws(function() {
+ table.createCopyFromJob(['table']);
+ }, /Source must be a Table/);
- table.createWriteStream('json').emit('writing');
- });
+ assert.throws(function() {
+ table.createCopyFromJob([SOURCE_TABLE, 'table']);
+ }, /Source must be a Table/);
- it('should accept avro', function(done) {
- makeWritableStreamOverride = function(stream, options) {
- var load = options.metadata.configuration.load;
- assert.equal(load.sourceFormat, 'AVRO');
- done();
- };
+ assert.throws(function() {
+ table.createCopyFromJob({});
+ }, /Source must be a Table/);
- table.createWriteStream('avro').emit('writing');
- });
+ assert.throws(function() {
+ table.createCopyFromJob(function() {});
+ }, /Source must be a Table/);
});
- it('should format a schema', function(done) {
- var expectedSchema = {};
+ it('should send correct request to the API', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ assert.deepEqual(reqOpts, {
+ configuration: {
+ copy: {
+ a: 'b',
+ c: 'd',
+ destinationTable: {
+ datasetId: table.dataset.id,
+ projectId: table.bigQuery.projectId,
+ tableId: table.id,
+ },
+ sourceTables: [
+ {
+ datasetId: SOURCE_TABLE.dataset.id,
+ projectId: SOURCE_TABLE.bigQuery.projectId,
+ tableId: SOURCE_TABLE.id,
+ },
+ ],
+ },
+ },
+ });
- tableOverrides.createSchemaFromString_ = function(string) {
- assert.strictEqual(string, SCHEMA_STRING);
- return expectedSchema;
+ done();
};
- makeWritableStreamOverride = function(stream, options) {
- var load = options.metadata.configuration.load;
- assert.deepEqual(load.schema, expectedSchema);
+ table.createCopyFromJob(SOURCE_TABLE, {a: 'b', c: 'd'}, assert.ifError);
+ });
+
+ it('should accept multiple source tables', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ assert.deepEqual(reqOpts.configuration.copy.sourceTables, [
+ {
+ datasetId: SOURCE_TABLE.dataset.id,
+ projectId: SOURCE_TABLE.bigQuery.projectId,
+ tableId: SOURCE_TABLE.id,
+ },
+ {
+ datasetId: SOURCE_TABLE.dataset.id,
+ projectId: SOURCE_TABLE.bigQuery.projectId,
+ tableId: SOURCE_TABLE.id,
+ },
+ ]);
+
done();
};
- table.createWriteStream({schema: SCHEMA_STRING}).emit('writing');
+ table.createCopyFromJob([SOURCE_TABLE, SOURCE_TABLE], assert.ifError);
});
- it('should throw if a given source format is not recognized', function() {
- assert.throws(function() {
- table.createWriteStream('zip');
- }, /Source format not recognized/);
+ it('should accept a job prefix', function(done) {
+ var fakeJobPrefix = 'abc-';
+ var options = {
+ jobPrefix: fakeJobPrefix,
+ };
- assert.throws(function() {
- table.createWriteStream({
- sourceFormat: 'zip',
- });
- }, /Source format not recognized/);
+ table.bigQuery.createJob = function(reqOpts, callback) {
+ assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix);
+ assert.strictEqual(reqOpts.configuration.copy.jobPrefix, undefined);
+ callback(); // the done fn
+ };
- assert.doesNotThrow(function() {
- table.createWriteStream();
- table.createWriteStream({});
- });
+ table.createCopyFromJob(SOURCE_TABLE, options, done);
});
- it('should return a stream', function() {
- assert(table.createWriteStream() instanceof stream.Stream);
+ it('should pass the callback to createJob', function(done) {
+ table.bigQuery.createJob = function(reqOpts, callback) {
+ assert.strictEqual(done, callback);
+ callback(); // the done fn
+ };
+
+ table.createCopyFromJob(SOURCE_TABLE, {}, done);
});
- describe('writable stream', function() {
- var fakeJobId;
+ it('should optionally accept options', function(done) {
+ table.bigQuery.createJob = function(reqOpts, callback) {
+ assert.strictEqual(done, callback);
+ callback(); // the done fn
+ };
- beforeEach(function() {
- fakeJobId = uuid.v4();
+ table.createCopyFromJob(SOURCE_TABLE, done);
+ });
+ });
- fakeUuid.v4 = function() {
- return fakeJobId;
- };
- });
+ describe('createExtractJob', function() {
+ var FILE = {
+ name: 'file-name.json',
+ bucket: {
+ name: 'bucket-name',
+ },
+ };
- it('should make a writable stream when written to', function(done) {
- var stream;
+ beforeEach(function() {
+ isCustomTypeOverride = function() {
+ return true;
+ };
- makeWritableStreamOverride = function(s) {
- assert.equal(s, stream);
- done();
- };
+ table.bigQuery.job = function(id) {
+ return {id: id};
+ };
- stream = table.createWriteStream();
- stream.emit('writing');
- });
+ table.bigQuery.createJob = function() {};
+ });
- it('should pass the connection', function(done) {
- makeWritableStreamOverride = function(stream, options) {
- assert.deepEqual(options.connection, table.connection);
+ it('should call createJob correctly', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ assert.deepEqual(reqOpts.configuration.extract.sourceTable, {
+ datasetId: table.dataset.id,
+ projectId: table.bigQuery.projectId,
+ tableId: table.id,
+ });
+
+ done();
+ };
+
+ table.createExtractJob(FILE, assert.ifError);
+ });
+
+ it('should accept just a destination and a callback', function(done) {
+ table.bigQuery.createJob = function(reqOpts, callback) {
+ callback(null, {jobReference: {jobId: 'job-id'}});
+ };
+
+ table.createExtractJob(FILE, done);
+ });
+
+ describe('formats', function() {
+ it('should accept csv', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ var extract = reqOpts.configuration.extract;
+ assert.equal(extract.destinationFormat, 'CSV');
done();
};
- table.createWriteStream().emit('writing');
+ table.createExtractJob(FILE, {format: 'csv'}, assert.ifError);
});
- it('should pass extended metadata', function(done) {
- makeWritableStreamOverride = function(stream, options) {
- assert.deepEqual(options.metadata, {
- configuration: {
- load: {
- a: 'b',
- c: 'd',
- destinationTable: {
- projectId: table.bigQuery.projectId,
- datasetId: table.dataset.id,
- tableId: table.id,
- },
- },
- },
- jobReference: {
- projectId: table.bigQuery.projectId,
- jobId: fakeJobId,
- },
- });
+ it('should accept json', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ var extract = reqOpts.configuration.extract;
+ assert.equal(extract.destinationFormat, 'NEWLINE_DELIMITED_JSON');
done();
};
- table.createWriteStream({a: 'b', c: 'd'}).emit('writing');
+ table.createExtractJob(FILE, {format: 'json'}, assert.ifError);
});
- it('should pass the correct request uri', function(done) {
- makeWritableStreamOverride = function(stream, options) {
- var uri =
- 'https://www.googleapis.com/upload/bigquery/v2/projects/' +
- table.bigQuery.projectId +
- '/jobs';
- assert.equal(options.request.uri, uri);
+ it('should accept avro', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ var extract = reqOpts.configuration.extract;
+ assert.equal(extract.destinationFormat, 'AVRO');
done();
};
- table.createWriteStream().emit('writing');
+ table.createExtractJob(FILE, {format: 'avro'}, assert.ifError);
});
+ });
- it('should respect the jobPrefix option', function(done) {
- var jobPrefix = 'abc-';
- var expectedJobId = jobPrefix + fakeJobId;
+ it('should parse out full gs:// urls from files', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ assert.deepEqual(reqOpts.configuration.extract.destinationUris, [
+ 'gs://' + FILE.bucket.name + '/' + FILE.name,
+ ]);
+ done();
+ };
- makeWritableStreamOverride = function(stream, options) {
- var jobId = options.metadata.jobReference.jobId;
- assert.strictEqual(jobId, expectedJobId);
+ table.createExtractJob(FILE, assert.ifError);
+ });
- var config = options.metadata.configuration.load;
- assert.strictEqual(config.jobPrefix, undefined);
+ it('should check if a destination is a File', function(done) {
+ isCustomTypeOverride = function(dest, type) {
+ assert.strictEqual(dest, FILE);
+ assert.strictEqual(type, 'storage/file');
+ setImmediate(done);
+ return true;
+ };
- done();
- };
+ table.createExtractJob(FILE, assert.ifError);
+ });
- table.createWriteStream({jobPrefix: jobPrefix}).emit('writing');
- });
+ it('should throw if a destination is not a File', function() {
+ isCustomTypeOverride = function() {
+ return false;
+ };
- it('should create a job and emit it with complete', function(done) {
- var jobId = 'job-id';
- var metadata = {jobReference: {jobId: jobId}, a: 'b', c: 'd'};
+ assert.throws(function() {
+ table.createExtractJob({}, util.noop);
+ }, /Destination must be a File object/);
- table.bigQuery.job = function(id) {
- return {id: id};
- };
+ assert.throws(function() {
+ table.createExtractJob([FILE, {}], util.noop);
+ }, /Destination must be a File object/);
+ });
- makeWritableStreamOverride = function(stream, options, callback) {
- callback(metadata);
- };
+ it('should detect file format if a format is not provided', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ var destFormat = reqOpts.configuration.extract.destinationFormat;
+ assert.equal(destFormat, 'NEWLINE_DELIMITED_JSON');
+ done();
+ };
- table
- .createWriteStream()
- .on('complete', function(job) {
- assert.equal(job.id, jobId);
- assert.deepEqual(job.metadata, metadata);
- done();
- })
- .emit('writing');
- });
+ table.createExtractJob(FILE, assert.ifError);
});
- });
- describe('extract', function() {
- var fakeJob;
+ it('should assign the provided format if matched', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ var extract = reqOpts.configuration.extract;
+ assert.equal(extract.destinationFormat, 'CSV');
+ assert.strictEqual(extract.format, undefined);
+ done();
+ };
- beforeEach(function() {
- fakeJob = new events.EventEmitter();
- table.startExtract = function(destination, metadata, callback) {
- callback(null, fakeJob);
+ table.createExtractJob(FILE, {format: 'csv'}, assert.ifError);
+ });
+
+ it('should throw if a provided format is not recognized', function() {
+ assert.throws(function() {
+ table.createExtractJob(FILE, {format: 'zip'}, util.noop);
+ }, /Destination format not recognized/);
+ });
+
+ it('should assign GZIP compression with gzip: true', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ assert.equal(reqOpts.configuration.extract.compression, 'GZIP');
+ assert.strictEqual(reqOpts.configuration.extract.gzip, undefined);
+ done();
};
+
+ table.createExtractJob(FILE, {gzip: true}, util.noop);
});
- it('should pass the arguments to startExtract', function(done) {
- var fakeDestination = {};
- var fakeMetadata = {};
+ it('should accept a job prefix', function(done) {
+ var fakeJobPrefix = 'abc-';
+ var options = {
+ jobPrefix: fakeJobPrefix,
+ };
- table.startExtract = function(destination, metadata) {
- assert.strictEqual(destination, fakeDestination);
- assert.strictEqual(metadata, fakeMetadata);
- done();
+ table.bigQuery.createJob = function(reqOpts, callback) {
+ assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix);
+ assert.strictEqual(reqOpts.configuration.extract.jobPrefix, undefined);
+ callback(); // the done fn
};
- table.extract(fakeDestination, fakeMetadata, assert.ifError);
+ table.createExtractJob(FILE, options, done);
});
- it('should optionally accept metadata', function(done) {
- table.startExtract = function(destination, metadata) {
- assert.deepEqual(metadata, {});
- done();
+ it('should pass the callback to createJob', function(done) {
+ table.bigQuery.createJob = function(reqOpts, callback) {
+ assert.strictEqual(done, callback);
+ callback(); // the done fn
};
- table.extract({}, assert.ifError);
+ table.createExtractJob(FILE, {}, done);
});
- it('should return any startExtract errors', function(done) {
- var error = new Error('err');
- var response = {};
-
- table.startExtract = function(destination, metadata, callback) {
- callback(error, null, response);
+ it('should optionally accept options', function(done) {
+ table.bigQuery.createJob = function(reqOpts, callback) {
+ assert.strictEqual(done, callback);
+ callback(); // the done fn
};
- table.extract({}, function(err, resp) {
- assert.strictEqual(err, error);
- assert.strictEqual(resp, response);
- done();
- });
+ table.createExtractJob(FILE, done);
});
+ });
- it('should return any job errors', function(done) {
- var error = new Error('err');
+ describe('createLoadJob', function() {
+ var FILEPATH = require.resolve('./testdata/testfile.json');
+ var FILE = {
+ name: 'file-name.json',
+ bucket: {
+ name: 'bucket-name',
+ },
+ };
- table.extract({}, function(err) {
- assert.strictEqual(err, error);
- done();
- });
+ var JOB = {
+ id: 'foo',
+ metadata: {},
+ };
- fakeJob.emit('error', error);
+ beforeEach(function() {
+ isCustomTypeOverride = function() {
+ return true;
+ };
});
- it('should return the metadata on complete', function(done) {
- var metadata = {};
+ it('should accept just a File and a callback', function(done) {
+ table.createWriteStream = function() {
+ var ws = new stream.Writable();
+ setImmediate(function() {
+ ws.emit('complete', JOB);
+ ws.end();
+ });
+ return ws;
+ };
- table.extract({}, function(err, resp) {
- assert.ifError(err);
- assert.strictEqual(resp, metadata);
+ table.createLoadJob(FILEPATH, function(err, job, resp) {
+ assert.strictEqual(err, null);
+ assert.strictEqual(job, JOB);
+ assert.strictEqual(resp, JOB.metadata);
done();
});
-
- fakeJob.emit('complete', metadata);
});
- });
- describe('getRows', function() {
- it('should accept just a callback', function(done) {
- table.request = function(reqOpts, callback) {
- callback(null, {});
+ it('should return a stream when a string is given', function() {
+ table.createWriteStream = function() {
+ return new stream.Writable();
};
- table.getRows(done);
- });
- it('should make correct API request', function(done) {
- var options = {a: 'b', c: 'd'};
+ assert(table.createLoadJob(FILEPATH) instanceof stream.Stream);
+ });
- table.request = function(reqOpts, callback) {
- assert.strictEqual(reqOpts.uri, '/data');
- assert.strictEqual(reqOpts.qs, options);
- callback(null, {});
+ it('should infer the file format from the given filepath', function(done) {
+ table.createWriteStream = function(metadata) {
+ assert.equal(metadata.sourceFormat, 'NEWLINE_DELIMITED_JSON');
+ var ws = new stream.Writable();
+ setImmediate(function() {
+ ws.emit('complete', JOB);
+ ws.end();
+ });
+ return ws;
};
- table.getRows(options, done);
+ table.createLoadJob(FILEPATH, done);
});
- it('should execute callback with error & API response', function(done) {
- var apiResponse = {};
+ it('should execute callback with error from writestream', function(done) {
var error = new Error('Error.');
- table.request = function(reqOpts, callback) {
- callback(error, apiResponse);
+ table.createWriteStream = function(metadata) {
+ assert.equal(metadata.sourceFormat, 'NEWLINE_DELIMITED_JSON');
+ var ws = new stream.Writable();
+ setImmediate(function() {
+ ws.emit('error', error);
+ ws.end();
+ });
+ return ws;
};
- table.getRows(function(err, rows, nextQuery, apiResponse_) {
+ table.createLoadJob(FILEPATH, function(err) {
assert.strictEqual(err, error);
- assert.strictEqual(rows, null);
- assert.strictEqual(nextQuery, null);
- assert.strictEqual(apiResponse_, apiResponse);
-
done();
});
});
- describe('refreshing metadata', function() {
- // Using "Stephen" so you know who to blame for these tests.
- var rows = [{f: [{v: 'stephen'}]}];
- var schema = {fields: [{name: 'name', type: 'string'}]};
- var mergedRows = [{name: 'stephen'}];
-
- beforeEach(function() {
- table.request = function(reqOpts, callback) {
- // Respond with a row, so it grabs the schema.
- // Use setImmediate to let our getMetadata overwrite process.
- setImmediate(callback, null, {rows: rows});
- };
-
- table.bigQuery.mergeSchemaWithRows_ = function(schema_, rows_) {
- assert.strictEqual(schema_, schema);
- assert.strictEqual(rows_, rows);
- return mergedRows;
- };
- });
-
- it('should refresh', function(done) {
- // Step 1: makes the request.
- table.getRows(responseHandler);
-
- // Step 2: refreshes the metadata to pull down the schema.
- table.getMetadata = function(callback) {
- table.metadata = {schema: schema};
- callback();
- };
+ it('should not infer the file format if one is given', function(done) {
+ table.createWriteStream = function(metadata) {
+ assert.equal(metadata.sourceFormat, 'CSV');
+ var ws = new stream.Writable();
+ setImmediate(function() {
+ ws.emit('complete', JOB);
+ ws.end();
+ });
+ return ws;
+ };
- // Step 3: execute original complete handler with schema-merged rows.
- function responseHandler(err, rows) {
- assert.ifError(err);
- assert.strictEqual(rows, mergedRows);
- done();
- }
- });
+ table.createLoadJob(FILEPATH, {sourceFormat: 'CSV'}, done);
+ });
- it('should execute callback from refreshing metadata', function(done) {
- var apiResponse = {};
- var error = new Error('Error.');
+ it('should check if a destination is a File', function(done) {
+ isCustomTypeOverride = function(dest, type) {
+ assert.strictEqual(dest, FILE);
+ assert.strictEqual(type, 'storage/file');
+ setImmediate(done);
+ return true;
+ };
- // Step 1: makes the request.
- table.getRows(responseHandler);
+ table.createLoadJob(FILE, assert.ifError);
+ });
- // Step 2: refreshes the metadata to pull down the schema.
- table.getMetadata = function(callback) {
- callback(error, {}, apiResponse);
- };
+ it('should throw if a File object is not provided', function() {
+ isCustomTypeOverride = function() {
+ return false;
+ };
- // Step 3: execute original complete handler with schema-merged rows.
- function responseHandler(err, rows, nextQuery, apiResponse_) {
- assert.strictEqual(err, error);
- assert.strictEqual(rows, null);
- assert.strictEqual(nextQuery, null);
- assert.strictEqual(apiResponse_, apiResponse);
- done();
- }
- });
+ assert.throws(function() {
+ table.createLoadJob({});
+ }, /Source must be a File object/);
});
- it('should return schema-merged rows', function(done) {
- var rows = [{f: [{v: 'stephen'}]}];
- var schema = {fields: [{name: 'name', type: 'string'}]};
- var merged = [{name: 'stephen'}];
+ it('should convert File objects to gs:// urls', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ var sourceUri = reqOpts.configuration.load.sourceUris[0];
+ assert.equal(sourceUri, 'gs://' + FILE.bucket.name + '/' + FILE.name);
+ done();
+ };
- table.metadata = {schema: schema};
+ table.createLoadJob(FILE, assert.ifError);
+ });
- table.request = function(reqOpts, callback) {
- callback(null, {rows: rows});
+ it('should infer the file format from a File object', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ var sourceFormat = reqOpts.configuration.load.sourceFormat;
+ assert.equal(sourceFormat, 'NEWLINE_DELIMITED_JSON');
+ done();
};
- table.bigQuery.mergeSchemaWithRows_ = function(schema_, rows_) {
- assert.strictEqual(schema_, schema);
- assert.strictEqual(rows_, rows);
- return merged;
- };
+ table.createLoadJob(FILE, assert.ifError);
+ });
- table.getRows(function(err, rows) {
- assert.ifError(err);
- assert.strictEqual(rows, merged);
+ it('should not override a provided format with a File', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ var sourceFormat = reqOpts.configuration.load.sourceFormat;
+ assert.equal(sourceFormat, 'NEWLINE_DELIMITED_JSON');
done();
- });
- });
+ };
- it('should return apiResponse in callback', function(done) {
- var rows = [{f: [{v: 'stephen'}]}];
- var schema = {fields: [{name: 'name', type: 'string'}]};
- table.metadata = {schema: schema};
+ table.createLoadJob(
+ FILE,
+ {
+ sourceFormat: 'NEWLINE_DELIMITED_JSON',
+ },
+ assert.ifError
+ );
+ });
- table.request = function(reqOpts, callback) {
- callback(null, {rows: rows});
+ it('should pass the callback to createJob', function(done) {
+ table.bigQuery.createJob = function(reqOpts, callback) {
+ assert.strictEqual(done, callback);
+ callback(); // the done fn
};
- table.getRows(function(err, rows, nextQuery, apiResponse) {
- assert.ifError(err);
- assert.deepEqual(apiResponse, {rows: [{f: [{v: 'stephen'}]}]});
- done();
- });
+ table.createLoadJob(FILE, {}, done);
});
- it('should pass nextQuery if pageToken is returned', function(done) {
- var options = {a: 'b', c: 'd'};
- var pageToken = 'token';
+ it('should optionally accept options', function(done) {
+ table.bigQuery.createJob = function(reqOpts, callback) {
+ assert.strictEqual(done, callback);
+ callback(); // the done fn
+ };
- // Set a schema so it doesn't try to refresh the metadata.
- table.metadata = {schema: {}};
+ table.createLoadJob(FILE, done);
+ });
- table.request = function(reqOpts, callback) {
- callback(null, {pageToken: pageToken});
- };
+ it('should set the job prefix', function(done) {
+ var fakeJobPrefix = 'abc';
- table.getRows(options, function(err, rows, nextQuery) {
- assert.ifError(err);
- assert.deepEqual(nextQuery, {a: 'b', c: 'd', pageToken: pageToken});
- // Original object isn't affected.
- assert.deepEqual(options, {a: 'b', c: 'd'});
+ table.bigQuery.createJob = function(reqOpts) {
+ assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix);
+ assert.strictEqual(reqOpts.configuration.load.jobPrefix, undefined);
done();
- });
+ };
+
+ table.createLoadJob(
+ FILE,
+ {
+ jobPrefix: fakeJobPrefix,
+ },
+ assert.ifError
+ );
});
- });
- describe('insert', function() {
- var fakeInsertId = 'fake-insert-id';
+ describe('formats', function() {
+ it('should accept csv', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ var load = reqOpts.configuration.load;
+ assert.strictEqual(load.sourceFormat, 'CSV');
+ done();
+ };
- var data = [
- {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'},
- {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'},
- {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'},
- {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'},
- {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'},
- ];
+ table.createLoadJob(FILE, {format: 'csv'}, assert.ifError);
+ });
- var rawData = [
- {insertId: 1, json: data[0]},
- {insertId: 2, json: data[1]},
- {insertId: 3, json: data[2]},
- {insertId: 4, json: data[3]},
- {insertId: 5, json: data[4]},
- ];
+ it('should accept json', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ var load = reqOpts.configuration.load;
+ assert.strictEqual(load.sourceFormat, 'NEWLINE_DELIMITED_JSON');
+ done();
+ };
- var dataApiFormat = {
- rows: data.map(function(row) {
- return {
- insertId: fakeInsertId,
- json: row,
+ table.createLoadJob(FILE, {format: 'json'}, assert.ifError);
+ });
+
+ it('should accept avro', function(done) {
+ table.bigQuery.createJob = function(reqOpts) {
+ var load = reqOpts.configuration.load;
+ assert.strictEqual(load.sourceFormat, 'AVRO');
+ done();
};
- }),
- };
- beforeEach(function() {
- fakeUuid.v4 = function() {
- return fakeInsertId;
- };
+ table.createLoadJob(FILE, {format: 'avro'}, assert.ifError);
+ });
});
+ });
- it('should throw an error if rows is empty', function() {
- assert.throws(function() {
- table.insert([]);
- }, /You must provide at least 1 row to be inserted\./);
- });
+ describe('createQueryJob', function() {
+ it('should call through to dataset#createQueryJob', function(done) {
+ var fakeOptions = {};
+ var fakeReturnValue = {};
- it('should save data', function(done) {
- table.request = function(reqOpts) {
- assert.equal(reqOpts.method, 'POST');
- assert.equal(reqOpts.uri, '/insertAll');
- assert.deepEqual(reqOpts.json, dataApiFormat);
- done();
+ table.dataset.createQueryJob = function(options, callback) {
+ assert.strictEqual(options, fakeOptions);
+ setImmediate(callback);
+ return fakeReturnValue;
};
- table.insert(data, done);
+ var returnVal = table.createQueryJob(fakeOptions, done);
+ assert.strictEqual(returnVal, fakeReturnValue);
});
+ });
- it('should generate insertId', function(done) {
- table.request = function(reqOpts) {
- assert.strictEqual(reqOpts.json.rows[0].insertId, fakeInsertId);
+ describe('createQueryStream', function() {
+ it('should call datasetInstance.createQueryStream()', function(done) {
+ table.dataset.createQueryStream = function(a) {
+ assert.equal(a, 'a');
done();
};
- table.insert([data[0]], done);
+ table.createQueryStream('a');
});
- it('should execute callback with API response', function(done) {
- var apiResponse = {insertErrors: []};
+ it('should return whatever dataset.createQueryStream returns', function() {
+ var fakeValue = 123;
- table.request = function(reqOpts, callback) {
- callback(null, apiResponse);
+ table.dataset.createQueryStream = function() {
+ return fakeValue;
};
- table.insert(data, function(err, apiResponse_) {
- assert.ifError(err);
- assert.strictEqual(apiResponse_, apiResponse);
- done();
- });
- });
+ var val = table.createQueryStream();
- it('should execute callback with error & API response', function(done) {
- var error = new Error('Error.');
- var apiResponse = {};
+ assert.strictEqual(val, fakeValue);
+ });
+ });
- table.request = function(reqOpts, callback) {
- callback(error, apiResponse);
- };
+ describe('createWriteStream', function() {
+ describe('formats', function() {
+ it('should accept csv', function(done) {
+ makeWritableStreamOverride = function(stream, options) {
+ var load = options.metadata.configuration.load;
+ assert.equal(load.sourceFormat, 'CSV');
+ done();
+ };
- table.insert(data, function(err, apiResponse_) {
- assert.strictEqual(err, error);
- assert.strictEqual(apiResponse_, apiResponse);
- done();
+ table.createWriteStream('csv').emit('writing');
});
- });
-
- it('should return partial failures', function(done) {
- var row0Error = {message: 'Error.', reason: 'notFound'};
- var row1Error = {message: 'Error.', reason: 'notFound'};
- table.request = function(reqOpts, callback) {
- callback(null, {
- insertErrors: [
- {index: 0, errors: [row0Error]},
- {index: 1, errors: [row1Error]},
- ],
- });
- };
+ it('should accept json', function(done) {
+ makeWritableStreamOverride = function(stream, options) {
+ var load = options.metadata.configuration.load;
+ assert.equal(load.sourceFormat, 'NEWLINE_DELIMITED_JSON');
+ done();
+ };
- table.insert(data, function(err) {
- assert.strictEqual(err.name, 'PartialFailureError');
+ table.createWriteStream('json').emit('writing');
+ });
- assert.deepEqual(err.errors, [
- {
- row: dataApiFormat.rows[0].json,
- errors: [row0Error],
- },
- {
- row: dataApiFormat.rows[1].json,
- errors: [row1Error],
- },
- ]);
+ it('should accept avro', function(done) {
+ makeWritableStreamOverride = function(stream, options) {
+ var load = options.metadata.configuration.load;
+ assert.equal(load.sourceFormat, 'AVRO');
+ done();
+ };
- done();
+ table.createWriteStream('avro').emit('writing');
});
});
- it('should insert raw data', function(done) {
- table.request = function(reqOpts) {
- assert.equal(reqOpts.method, 'POST');
- assert.equal(reqOpts.uri, '/insertAll');
- assert.deepEqual(reqOpts.json, {rows: rawData});
- assert.strictEqual(reqOpts.json.raw, undefined);
- done();
- };
-
- var opts = {raw: true};
- table.insert(rawData, opts, done);
- });
+ it('should format a schema', function(done) {
+ var expectedSchema = {};
- it('should accept options', function(done) {
- var opts = {
- ignoreUnknownValues: true,
- skipInvalidRows: true,
- templateSuffix: 'test',
+ tableOverrides.createSchemaFromString_ = function(string) {
+ assert.strictEqual(string, SCHEMA_STRING);
+ return expectedSchema;
};
- table.request = function(reqOpts) {
- assert.equal(reqOpts.method, 'POST');
- assert.equal(reqOpts.uri, '/insertAll');
-
- assert.strictEqual(
- reqOpts.json.ignoreUnknownValues,
- opts.ignoreUnknownValues
- );
- assert.strictEqual(reqOpts.json.skipInvalidRows, opts.skipInvalidRows);
- assert.strictEqual(reqOpts.json.templateSuffix, opts.templateSuffix);
-
- assert.deepEqual(reqOpts.json.rows, dataApiFormat.rows);
+ makeWritableStreamOverride = function(stream, options) {
+ var load = options.metadata.configuration.load;
+ assert.deepEqual(load.schema, expectedSchema);
done();
};
- table.insert(data, opts, done);
+ table.createWriteStream({schema: SCHEMA_STRING}).emit('writing');
});
- describe('create table and retry', function() {
- var OPTIONS = {
- autoCreate: true,
- schema: SCHEMA_STRING,
- };
+ it('should throw if a given source format is not recognized', function() {
+ assert.throws(function() {
+ table.createWriteStream('zip');
+ }, /Source format not recognized/);
- var _setTimeout;
- var _random;
+ assert.throws(function() {
+ table.createWriteStream({
+ sourceFormat: 'zip',
+ });
+ }, /Source format not recognized/);
- before(function() {
- _setTimeout = global.setTimeout;
- _random = Math.random;
+ assert.doesNotThrow(function() {
+ table.createWriteStream();
+ table.createWriteStream({});
});
+ });
- beforeEach(function() {
- global.setTimeout = function(callback) {
- callback();
- };
+ it('should return a stream', function() {
+ assert(table.createWriteStream() instanceof stream.Stream);
+ });
- Math.random = _random;
+ describe('writable stream', function() {
+ var fakeJobId;
- table.request = function(reqOpts, callback) {
- callback({code: 404});
- };
+ beforeEach(function() {
+ fakeJobId = uuid.v4();
- table.create = function(reqOpts, callback) {
- callback(null);
+ fakeUuid.v4 = function() {
+ return fakeJobId;
};
});
- after(function() {
- global.setTimeout = _setTimeout;
- Math.random = _random;
- });
+ it('should make a writable stream when written to', function(done) {
+ var stream;
- it('should throw if autoCreate is set with no schema', function() {
- var options = {
- autoCreate: true,
+ makeWritableStreamOverride = function(s) {
+ assert.equal(s, stream);
+ done();
};
- assert.throws(function() {
- table.insert(data, options);
- }, /Schema must be provided in order to auto-create Table\./);
+ stream = table.createWriteStream();
+ stream.emit('writing');
});
- it('should not include the schema in the insert request', function(done) {
- table.request = function(reqOpts) {
- assert.strictEqual(reqOpts.json.schema, undefined);
- assert.strictEqual(reqOpts.json.autoCreate, undefined);
+ it('should pass the connection', function(done) {
+ makeWritableStreamOverride = function(stream, options) {
+ assert.deepEqual(options.connection, table.connection);
done();
};
- table.insert(data, OPTIONS, assert.ifError);
+ table.createWriteStream().emit('writing');
});
- it('should set a timeout to create the table', function(done) {
- var fakeRandomValue = Math.random();
-
- Math.random = function() {
- return fakeRandomValue;
- };
-
- global.setTimeout = function(callback, delay) {
- assert.strictEqual(delay, fakeRandomValue * 60000);
- callback();
- };
-
- table.create = function(reqOpts) {
- assert.strictEqual(reqOpts.schema, SCHEMA_STRING);
+ it('should pass extended metadata', function(done) {
+ makeWritableStreamOverride = function(stream, options) {
+ assert.deepEqual(options.metadata, {
+ configuration: {
+ load: {
+ a: 'b',
+ c: 'd',
+ destinationTable: {
+ projectId: table.bigQuery.projectId,
+ datasetId: table.dataset.id,
+ tableId: table.id,
+ },
+ },
+ },
+ jobReference: {
+ projectId: table.bigQuery.projectId,
+ jobId: fakeJobId,
+ },
+ });
done();
};
- table.insert(data, OPTIONS, assert.ifError);
+ table.createWriteStream({a: 'b', c: 'd'}).emit('writing');
});
- it('should return table creation errors', function(done) {
- var error = new Error('err.');
- var response = {};
-
- table.create = function(reqOpts, callback) {
- callback(error, null, response);
+ it('should pass the correct request uri', function(done) {
+ makeWritableStreamOverride = function(stream, options) {
+ var uri =
+ 'https://www.googleapis.com/upload/bigquery/v2/projects/' +
+ table.bigQuery.projectId +
+ '/jobs';
+ assert.equal(options.request.uri, uri);
+ done();
};
- table.insert(data, OPTIONS, function(err, resp) {
- assert.strictEqual(err, error);
- assert.strictEqual(resp, response);
- done();
- });
+ table.createWriteStream().emit('writing');
});
- it('should ignore 409 errors', function(done) {
- table.create = function(reqOpts, callback) {
- callback({code: 409});
- };
+ it('should respect the jobPrefix option', function(done) {
+ var jobPrefix = 'abc-';
+ var expectedJobId = jobPrefix + fakeJobId;
- var timeouts = 0;
- global.setTimeout = function(callback, delay) {
- if (++timeouts === 2) {
- assert.strictEqual(delay, 60000);
- done();
- return;
- }
+ makeWritableStreamOverride = function(stream, options) {
+ var jobId = options.metadata.jobReference.jobId;
+ assert.strictEqual(jobId, expectedJobId);
- callback(null);
+ var config = options.metadata.configuration.load;
+ assert.strictEqual(config.jobPrefix, undefined);
+
+ done();
};
- table.insert(data, OPTIONS, assert.ifError);
+ table.createWriteStream({jobPrefix: jobPrefix}).emit('writing');
});
- it('should retry the insert', function(done) {
- var response = {};
- var attempts = 0;
-
- table.request = function(reqOpts, callback) {
- assert.equal(reqOpts.method, 'POST');
- assert.equal(reqOpts.uri, '/insertAll');
- assert.deepEqual(reqOpts.json, dataApiFormat);
+ it('should create a job and emit it with complete', function(done) {
+ var jobId = 'job-id';
+ var metadata = {jobReference: {jobId: jobId}, a: 'b', c: 'd'};
- if (++attempts === 2) {
- callback(null, response);
- return;
- }
+ table.bigQuery.job = function(id) {
+ return {id: id};
+ };
- callback({code: 404});
+ makeWritableStreamOverride = function(stream, options, callback) {
+ callback(metadata);
};
- table.insert(data, OPTIONS, function(err, resp) {
- assert.ifError(err);
- assert.strictEqual(resp, response);
- done();
- });
+ table
+ .createWriteStream()
+ .on('complete', function(job) {
+ assert.equal(job.id, jobId);
+ assert.deepEqual(job.metadata, metadata);
+ done();
+ })
+ .emit('writing');
});
});
});
- describe('load', function() {
+ describe('extract', function() {
var fakeJob;
beforeEach(function() {
fakeJob = new events.EventEmitter();
- table.startLoad = function(source, metadata, callback) {
+ table.createExtractJob = function(destination, metadata, callback) {
callback(null, fakeJob);
};
});
- it('should pass the arguments to startLoad', function(done) {
- var fakeSource = {};
+ it('should pass the arguments to createExtractJob', function(done) {
+ var fakeDestination = {};
var fakeMetadata = {};
- table.startLoad = function(source, metadata) {
- assert.strictEqual(source, fakeSource);
+ table.createExtractJob = function(destination, metadata) {
+ assert.strictEqual(destination, fakeDestination);
assert.strictEqual(metadata, fakeMetadata);
done();
};
- table.load(fakeSource, fakeMetadata, assert.ifError);
+ table.extract(fakeDestination, fakeMetadata, assert.ifError);
});
it('should optionally accept metadata', function(done) {
- table.startLoad = function(source, metadata) {
+ table.createExtractJob = function(destination, metadata) {
assert.deepEqual(metadata, {});
done();
};
- table.load({}, assert.ifError);
+ table.extract({}, assert.ifError);
});
- it('should return any startLoad errors', function(done) {
+ it('should return any createExtractJob errors', function(done) {
var error = new Error('err');
var response = {};
- table.startLoad = function(source, metadata, callback) {
+ table.createExtractJob = function(destination, metadata, callback) {
callback(error, null, response);
};
- table.load({}, function(err, resp) {
+ table.extract({}, function(err, resp) {
assert.strictEqual(err, error);
assert.strictEqual(resp, response);
done();
@@ -1316,7 +1399,7 @@ describe('BigQuery/Table', function() {
it('should return any job errors', function(done) {
var error = new Error('err');
- table.load({}, function(err) {
+ table.extract({}, function(err) {
assert.strictEqual(err, error);
done();
});
@@ -1327,653 +1410,570 @@ describe('BigQuery/Table', function() {
it('should return the metadata on complete', function(done) {
var metadata = {};
- table.load({}, function(err, resp) {
+ table.extract({}, function(err, resp) {
assert.ifError(err);
- assert.strictEqual(resp, metadata);
- done();
- });
-
- fakeJob.emit('complete', metadata);
- });
- });
-
- describe('query', function() {
- it('should pass args through to datasetInstance.query()', function(done) {
- table.dataset.query = function(a, b) {
- assert.equal(a, 'a');
- assert.equal(b, 'b');
- done();
- };
-
- table.query('a', 'b');
- });
- });
-
- describe('setMetadata', function() {
- it('should call ServiceObject#setMetadata', function(done) {
- var fakeMetadata = {};
- var formattedMetadata = {};
-
- Table.formatMetadata_ = function(data) {
- assert.strictEqual(data, fakeMetadata);
- return formattedMetadata;
- };
-
- FakeServiceObject.prototype.setMetadata = function(metadata, callback) {
- assert.strictEqual(this, table);
- assert.strictEqual(metadata, formattedMetadata);
- assert.strictEqual(callback, done);
- callback(); // the done fn
- };
-
- table.setMetadata(fakeMetadata, done);
- });
- });
-
- describe('startCopy', function() {
- var DEST_TABLE;
-
- before(function() {
- DEST_TABLE = new Table(DATASET, 'destination-table');
- });
-
- it('should throw if a destination is not a Table', function() {
- assert.throws(function() {
- table.startCopy();
- }, /Destination must be a Table/);
-
- assert.throws(function() {
- table.startCopy({});
- }, /Destination must be a Table/);
-
- assert.throws(function() {
- table.startCopy(function() {});
- }, /Destination must be a Table/);
- });
-
- it('should send correct request to the API', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- assert.deepEqual(reqOpts, {
- configuration: {
- copy: {
- a: 'b',
- c: 'd',
- destinationTable: {
- datasetId: DEST_TABLE.dataset.id,
- projectId: DEST_TABLE.bigQuery.projectId,
- tableId: DEST_TABLE.id,
- },
- sourceTable: {
- datasetId: table.dataset.id,
- projectId: table.bigQuery.projectId,
- tableId: table.id,
- },
- },
- },
- });
-
+ assert.strictEqual(resp, metadata);
done();
- };
+ });
- table.startCopy(DEST_TABLE, {a: 'b', c: 'd'}, assert.ifError);
+ fakeJob.emit('complete', metadata);
});
+ });
- it('should accept a job prefix', function(done) {
- var fakeJobPrefix = 'abc-';
- var options = {
- jobPrefix: fakeJobPrefix,
- };
-
- table.bigQuery.createJob = function(reqOpts, callback) {
- assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix);
- assert.strictEqual(reqOpts.configuration.copy.jobPrefix, undefined);
- callback(); // the done fn
+ describe('getRows', function() {
+ it('should accept just a callback', function(done) {
+ table.request = function(reqOpts, callback) {
+ callback(null, {});
};
-
- table.startCopy(DEST_TABLE, options, done);
+ table.getRows(done);
});
- it('should pass the callback to createJob', function(done) {
- table.bigQuery.createJob = function(reqOpts, callback) {
- assert.strictEqual(done, callback);
- callback(); // the done fn
+ it('should make correct API request', function(done) {
+ var options = {a: 'b', c: 'd'};
+
+ table.request = function(reqOpts, callback) {
+ assert.strictEqual(reqOpts.uri, '/data');
+ assert.strictEqual(reqOpts.qs, options);
+ callback(null, {});
};
- table.startCopy(DEST_TABLE, {}, done);
+ table.getRows(options, done);
});
- it('should optionally accept metadata', function(done) {
- table.bigQuery.createJob = function(reqOpts, callback) {
- assert.strictEqual(done, callback);
- callback(); // the done fn
- };
+ it('should execute callback with error & API response', function(done) {
+ var apiResponse = {};
+ var error = new Error('Error.');
- table.startCopy(DEST_TABLE, done);
- });
- });
+ table.request = function(reqOpts, callback) {
+ callback(error, apiResponse);
+ };
- describe('startCopyFrom', function() {
- var SOURCE_TABLE;
+ table.getRows(function(err, rows, nextQuery, apiResponse_) {
+ assert.strictEqual(err, error);
+ assert.strictEqual(rows, null);
+ assert.strictEqual(nextQuery, null);
+ assert.strictEqual(apiResponse_, apiResponse);
- before(function() {
- SOURCE_TABLE = new Table(DATASET, 'source-table');
+ done();
+ });
});
- it('should throw if a source is not a Table', function() {
- assert.throws(function() {
- table.startCopyFrom(['table']);
- }, /Source must be a Table/);
+ describe('refreshing metadata', function() {
+ // Using "Stephen" so you know who to blame for these tests.
+ var rows = [{f: [{v: 'stephen'}]}];
+ var schema = {fields: [{name: 'name', type: 'string'}]};
+ var mergedRows = [{name: 'stephen'}];
- assert.throws(function() {
- table.startCopyFrom([SOURCE_TABLE, 'table']);
- }, /Source must be a Table/);
+ beforeEach(function() {
+ table.request = function(reqOpts, callback) {
+ // Respond with a row, so it grabs the schema.
+ // Use setImmediate to let our getMetadata overwrite process.
+ setImmediate(callback, null, {rows: rows});
+ };
- assert.throws(function() {
- table.startCopyFrom({});
- }, /Source must be a Table/);
+ table.bigQuery.mergeSchemaWithRows_ = function(schema_, rows_) {
+ assert.strictEqual(schema_, schema);
+ assert.strictEqual(rows_, rows);
+ return mergedRows;
+ };
+ });
- assert.throws(function() {
- table.startCopyFrom(function() {});
- }, /Source must be a Table/);
- });
+ it('should refresh', function(done) {
+ // Step 1: makes the request.
+ table.getRows(responseHandler);
- it('should send correct request to the API', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- assert.deepEqual(reqOpts, {
- configuration: {
- copy: {
- a: 'b',
- c: 'd',
- destinationTable: {
- datasetId: table.dataset.id,
- projectId: table.bigQuery.projectId,
- tableId: table.id,
- },
- sourceTables: [
- {
- datasetId: SOURCE_TABLE.dataset.id,
- projectId: SOURCE_TABLE.bigQuery.projectId,
- tableId: SOURCE_TABLE.id,
- },
- ],
- },
- },
- });
+ // Step 2: refreshes the metadata to pull down the schema.
+ table.getMetadata = function(callback) {
+ table.metadata = {schema: schema};
+ callback();
+ };
- done();
- };
+ // Step 3: execute original complete handler with schema-merged rows.
+ function responseHandler(err, rows) {
+ assert.ifError(err);
+ assert.strictEqual(rows, mergedRows);
+ done();
+ }
+ });
- table.startCopyFrom(SOURCE_TABLE, {a: 'b', c: 'd'}, assert.ifError);
- });
+ it('should execute callback from refreshing metadata', function(done) {
+ var apiResponse = {};
+ var error = new Error('Error.');
- it('should accept multiple source tables', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- assert.deepEqual(reqOpts.configuration.copy.sourceTables, [
- {
- datasetId: SOURCE_TABLE.dataset.id,
- projectId: SOURCE_TABLE.bigQuery.projectId,
- tableId: SOURCE_TABLE.id,
- },
- {
- datasetId: SOURCE_TABLE.dataset.id,
- projectId: SOURCE_TABLE.bigQuery.projectId,
- tableId: SOURCE_TABLE.id,
- },
- ]);
+ // Step 1: makes the request.
+ table.getRows(responseHandler);
- done();
- };
+ // Step 2: refreshes the metadata to pull down the schema.
+ table.getMetadata = function(callback) {
+ callback(error, {}, apiResponse);
+ };
- table.startCopyFrom([SOURCE_TABLE, SOURCE_TABLE], assert.ifError);
+ // Step 3: execute original complete handler with schema-merged rows.
+ function responseHandler(err, rows, nextQuery, apiResponse_) {
+ assert.strictEqual(err, error);
+ assert.strictEqual(rows, null);
+ assert.strictEqual(nextQuery, null);
+ assert.strictEqual(apiResponse_, apiResponse);
+ done();
+ }
+ });
});
- it('should accept a job prefix', function(done) {
- var fakeJobPrefix = 'abc-';
- var options = {
- jobPrefix: fakeJobPrefix,
+ it('should return schema-merged rows', function(done) {
+ var rows = [{f: [{v: 'stephen'}]}];
+ var schema = {fields: [{name: 'name', type: 'string'}]};
+ var merged = [{name: 'stephen'}];
+
+ table.metadata = {schema: schema};
+
+ table.request = function(reqOpts, callback) {
+ callback(null, {rows: rows});
};
- table.bigQuery.createJob = function(reqOpts, callback) {
- assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix);
- assert.strictEqual(reqOpts.configuration.copy.jobPrefix, undefined);
- callback(); // the done fn
+ table.bigQuery.mergeSchemaWithRows_ = function(schema_, rows_) {
+ assert.strictEqual(schema_, schema);
+ assert.strictEqual(rows_, rows);
+ return merged;
};
- table.startCopyFrom(SOURCE_TABLE, options, done);
+ table.getRows(function(err, rows) {
+ assert.ifError(err);
+ assert.strictEqual(rows, merged);
+ done();
+ });
});
- it('should pass the callback to createJob', function(done) {
- table.bigQuery.createJob = function(reqOpts, callback) {
- assert.strictEqual(done, callback);
- callback(); // the done fn
+ it('should return apiResponse in callback', function(done) {
+ var rows = [{f: [{v: 'stephen'}]}];
+ var schema = {fields: [{name: 'name', type: 'string'}]};
+ table.metadata = {schema: schema};
+
+ table.request = function(reqOpts, callback) {
+ callback(null, {rows: rows});
};
- table.startCopyFrom(SOURCE_TABLE, {}, done);
+ table.getRows(function(err, rows, nextQuery, apiResponse) {
+ assert.ifError(err);
+ assert.deepEqual(apiResponse, {rows: [{f: [{v: 'stephen'}]}]});
+ done();
+ });
});
- it('should optionally accept options', function(done) {
- table.bigQuery.createJob = function(reqOpts, callback) {
- assert.strictEqual(done, callback);
- callback(); // the done fn
+ it('should pass nextQuery if pageToken is returned', function(done) {
+ var options = {a: 'b', c: 'd'};
+ var pageToken = 'token';
+
+ // Set a schema so it doesn't try to refresh the metadata.
+ table.metadata = {schema: {}};
+
+ table.request = function(reqOpts, callback) {
+ callback(null, {pageToken: pageToken});
};
- table.startCopyFrom(SOURCE_TABLE, done);
+ table.getRows(options, function(err, rows, nextQuery) {
+ assert.ifError(err);
+ assert.deepEqual(nextQuery, {a: 'b', c: 'd', pageToken: pageToken});
+ // Original object isn't affected.
+ assert.deepEqual(options, {a: 'b', c: 'd'});
+ done();
+ });
});
});
- describe('startExtract', function() {
- var FILE = {
- name: 'file-name.json',
- bucket: {
- name: 'bucket-name',
- },
- };
+ describe('insert', function() {
+ var fakeInsertId = 'fake-insert-id';
- beforeEach(function() {
- isCustomTypeOverride = function() {
- return true;
- };
+ var data = [
+ {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'},
+ {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'},
+ {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'},
+ {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'},
+ {state: 'MI', gender: 'M', year: '2015', name: 'Berkley', count: '0'},
+ ];
- table.bigQuery.job = function(id) {
- return {id: id};
- };
+ var rawData = [
+ {insertId: 1, json: data[0]},
+ {insertId: 2, json: data[1]},
+ {insertId: 3, json: data[2]},
+ {insertId: 4, json: data[3]},
+ {insertId: 5, json: data[4]},
+ ];
+
+ var dataApiFormat = {
+ rows: data.map(function(row) {
+ return {
+ insertId: fakeInsertId,
+ json: row,
+ };
+ }),
+ };
- table.bigQuery.createJob = function() {};
+ beforeEach(function() {
+ fakeUuid.v4 = function() {
+ return fakeInsertId;
+ };
});
- it('should call createJob correctly', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- assert.deepEqual(reqOpts.configuration.extract.sourceTable, {
- datasetId: table.dataset.id,
- projectId: table.bigQuery.projectId,
- tableId: table.id,
- });
+ it('should throw an error if rows is empty', function() {
+ assert.throws(function() {
+ table.insert([]);
+ }, /You must provide at least 1 row to be inserted\./);
+ });
+ it('should save data', function(done) {
+ table.request = function(reqOpts) {
+ assert.equal(reqOpts.method, 'POST');
+ assert.equal(reqOpts.uri, '/insertAll');
+ assert.deepEqual(reqOpts.json, dataApiFormat);
done();
};
- table.startExtract(FILE, assert.ifError);
+ table.insert(data, done);
});
- it('should accept just a destination and a callback', function(done) {
- table.bigQuery.createJob = function(reqOpts, callback) {
- callback(null, {jobReference: {jobId: 'job-id'}});
+ it('should generate insertId', function(done) {
+ table.request = function(reqOpts) {
+ assert.strictEqual(reqOpts.json.rows[0].insertId, fakeInsertId);
+ done();
};
- table.startExtract(FILE, done);
+ table.insert([data[0]], done);
});
- describe('formats', function() {
- it('should accept csv', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- var extract = reqOpts.configuration.extract;
- assert.equal(extract.destinationFormat, 'CSV');
- done();
- };
-
- table.startExtract(FILE, {format: 'csv'}, assert.ifError);
- });
-
- it('should accept json', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- var extract = reqOpts.configuration.extract;
- assert.equal(extract.destinationFormat, 'NEWLINE_DELIMITED_JSON');
- done();
- };
-
- table.startExtract(FILE, {format: 'json'}, assert.ifError);
- });
+ it('should execute callback with API response', function(done) {
+ var apiResponse = {insertErrors: []};
- it('should accept avro', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- var extract = reqOpts.configuration.extract;
- assert.equal(extract.destinationFormat, 'AVRO');
- done();
- };
+ table.request = function(reqOpts, callback) {
+ callback(null, apiResponse);
+ };
- table.startExtract(FILE, {format: 'avro'}, assert.ifError);
+ table.insert(data, function(err, apiResponse_) {
+ assert.ifError(err);
+ assert.strictEqual(apiResponse_, apiResponse);
+ done();
});
});
- it('should parse out full gs:// urls from files', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- assert.deepEqual(reqOpts.configuration.extract.destinationUris, [
- 'gs://' + FILE.bucket.name + '/' + FILE.name,
- ]);
- done();
- };
-
- table.startExtract(FILE, assert.ifError);
- });
+ it('should execute callback with error & API response', function(done) {
+ var error = new Error('Error.');
+ var apiResponse = {};
- it('should check if a destination is a File', function(done) {
- isCustomTypeOverride = function(dest, type) {
- assert.strictEqual(dest, FILE);
- assert.strictEqual(type, 'storage/file');
- setImmediate(done);
- return true;
+ table.request = function(reqOpts, callback) {
+ callback(error, apiResponse);
};
- table.startExtract(FILE, assert.ifError);
+ table.insert(data, function(err, apiResponse_) {
+ assert.strictEqual(err, error);
+ assert.strictEqual(apiResponse_, apiResponse);
+ done();
+ });
});
- it('should throw if a destination is not a File', function() {
- isCustomTypeOverride = function() {
- return false;
+ it('should return partial failures', function(done) {
+ var row0Error = {message: 'Error.', reason: 'notFound'};
+ var row1Error = {message: 'Error.', reason: 'notFound'};
+
+ table.request = function(reqOpts, callback) {
+ callback(null, {
+ insertErrors: [
+ {index: 0, errors: [row0Error]},
+ {index: 1, errors: [row1Error]},
+ ],
+ });
};
- assert.throws(function() {
- table.startExtract({}, util.noop);
- }, /Destination must be a File object/);
+ table.insert(data, function(err) {
+ assert.strictEqual(err.name, 'PartialFailureError');
- assert.throws(function() {
- table.startExtract([FILE, {}], util.noop);
- }, /Destination must be a File object/);
+ assert.deepEqual(err.errors, [
+ {
+ row: dataApiFormat.rows[0].json,
+ errors: [row0Error],
+ },
+ {
+ row: dataApiFormat.rows[1].json,
+ errors: [row1Error],
+ },
+ ]);
+
+ done();
+ });
});
- it('should detect file format if a format is not provided', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- var destFormat = reqOpts.configuration.extract.destinationFormat;
- assert.equal(destFormat, 'NEWLINE_DELIMITED_JSON');
+ it('should insert raw data', function(done) {
+ table.request = function(reqOpts) {
+ assert.equal(reqOpts.method, 'POST');
+ assert.equal(reqOpts.uri, '/insertAll');
+ assert.deepEqual(reqOpts.json, {rows: rawData});
+ assert.strictEqual(reqOpts.json.raw, undefined);
done();
};
- table.startExtract(FILE, assert.ifError);
+ var opts = {raw: true};
+ table.insert(rawData, opts, done);
});
- it('should assign the provided format if matched', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- var extract = reqOpts.configuration.extract;
- assert.equal(extract.destinationFormat, 'CSV');
- assert.strictEqual(extract.format, undefined);
- done();
+ it('should accept options', function(done) {
+ var opts = {
+ ignoreUnknownValues: true,
+ skipInvalidRows: true,
+ templateSuffix: 'test',
};
- table.startExtract(FILE, {format: 'csv'}, assert.ifError);
- });
+ table.request = function(reqOpts) {
+ assert.equal(reqOpts.method, 'POST');
+ assert.equal(reqOpts.uri, '/insertAll');
- it('should throw if a provided format is not recognized', function() {
- assert.throws(function() {
- table.startExtract(FILE, {format: 'zip'}, util.noop);
- }, /Destination format not recognized/);
- });
+ assert.strictEqual(
+ reqOpts.json.ignoreUnknownValues,
+ opts.ignoreUnknownValues
+ );
+ assert.strictEqual(reqOpts.json.skipInvalidRows, opts.skipInvalidRows);
+ assert.strictEqual(reqOpts.json.templateSuffix, opts.templateSuffix);
- it('should assign GZIP compression with gzip: true', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- assert.equal(reqOpts.configuration.extract.compression, 'GZIP');
- assert.strictEqual(reqOpts.configuration.extract.gzip, undefined);
+ assert.deepEqual(reqOpts.json.rows, dataApiFormat.rows);
done();
};
- table.startExtract(FILE, {gzip: true}, util.noop);
+ table.insert(data, opts, done);
});
- it('should accept a job prefix', function(done) {
- var fakeJobPrefix = 'abc-';
- var options = {
- jobPrefix: fakeJobPrefix,
+ describe('create table and retry', function() {
+ var OPTIONS = {
+ autoCreate: true,
+ schema: SCHEMA_STRING,
};
- table.bigQuery.createJob = function(reqOpts, callback) {
- assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix);
- assert.strictEqual(reqOpts.configuration.extract.jobPrefix, undefined);
- callback(); // the done fn
- };
+ var _setTimeout;
+ var _random;
- table.startExtract(FILE, options, done);
- });
+ before(function() {
+ _setTimeout = global.setTimeout;
+ _random = Math.random;
+ });
- it('should pass the callback to createJob', function(done) {
- table.bigQuery.createJob = function(reqOpts, callback) {
- assert.strictEqual(done, callback);
- callback(); // the done fn
- };
+ beforeEach(function() {
+ global.setTimeout = function(callback) {
+ callback();
+ };
- table.startExtract(FILE, {}, done);
- });
+ Math.random = _random;
- it('should optionally accept options', function(done) {
- table.bigQuery.createJob = function(reqOpts, callback) {
- assert.strictEqual(done, callback);
- callback(); // the done fn
- };
+ table.request = function(reqOpts, callback) {
+ callback({code: 404});
+ };
- table.startExtract(FILE, done);
- });
- });
+ table.create = function(reqOpts, callback) {
+ callback(null);
+ };
+ });
- describe('startLoad', function() {
- var FILEPATH = require.resolve('./testdata/testfile.json');
- var FILE = {
- name: 'file-name.json',
- bucket: {
- name: 'bucket-name',
- },
- };
+ after(function() {
+ global.setTimeout = _setTimeout;
+ Math.random = _random;
+ });
- var JOB = {
- id: 'foo',
- metadata: {},
- };
+ it('should throw if autoCreate is set with no schema', function() {
+ var options = {
+ autoCreate: true,
+ };
- beforeEach(function() {
- isCustomTypeOverride = function() {
- return true;
- };
- });
+ assert.throws(function() {
+ table.insert(data, options);
+ }, /Schema must be provided in order to auto-create Table\./);
+ });
+
+ it('should not include the schema in the insert request', function(done) {
+ table.request = function(reqOpts) {
+ assert.strictEqual(reqOpts.json.schema, undefined);
+ assert.strictEqual(reqOpts.json.autoCreate, undefined);
+ done();
+ };
+
+ table.insert(data, OPTIONS, assert.ifError);
+ });
+
+ it('should set a timeout to create the table', function(done) {
+ var fakeRandomValue = Math.random();
+
+ Math.random = function() {
+ return fakeRandomValue;
+ };
- it('should accept just a File and a callback', function(done) {
- table.createWriteStream = function() {
- var ws = new stream.Writable();
- setImmediate(function() {
- ws.emit('complete', JOB);
- ws.end();
- });
- return ws;
- };
+ global.setTimeout = function(callback, delay) {
+ assert.strictEqual(delay, fakeRandomValue * 60000);
+ callback();
+ };
- table.startLoad(FILEPATH, function(err, job, resp) {
- assert.strictEqual(err, null);
- assert.strictEqual(job, JOB);
- assert.strictEqual(resp, JOB.metadata);
- done();
+ table.create = function(reqOpts) {
+ assert.strictEqual(reqOpts.schema, SCHEMA_STRING);
+ done();
+ };
+
+ table.insert(data, OPTIONS, assert.ifError);
});
- });
- it('should return a stream when a string is given', function() {
- table.createWriteStream = function() {
- return new stream.Writable();
- };
+ it('should return table creation errors', function(done) {
+ var error = new Error('err.');
+ var response = {};
- assert(table.startLoad(FILEPATH) instanceof stream.Stream);
- });
+ table.create = function(reqOpts, callback) {
+ callback(error, null, response);
+ };
- it('should infer the file format from the given filepath', function(done) {
- table.createWriteStream = function(metadata) {
- assert.equal(metadata.sourceFormat, 'NEWLINE_DELIMITED_JSON');
- var ws = new stream.Writable();
- setImmediate(function() {
- ws.emit('complete', JOB);
- ws.end();
+ table.insert(data, OPTIONS, function(err, resp) {
+ assert.strictEqual(err, error);
+ assert.strictEqual(resp, response);
+ done();
});
- return ws;
- };
+ });
- table.startLoad(FILEPATH, done);
- });
+ it('should ignore 409 errors', function(done) {
+ table.create = function(reqOpts, callback) {
+ callback({code: 409});
+ };
- it('should execute callback with error from writestream', function(done) {
- var error = new Error('Error.');
+ var timeouts = 0;
+ global.setTimeout = function(callback, delay) {
+ if (++timeouts === 2) {
+ assert.strictEqual(delay, 60000);
+ done();
+ return;
+ }
- table.createWriteStream = function(metadata) {
- assert.equal(metadata.sourceFormat, 'NEWLINE_DELIMITED_JSON');
- var ws = new stream.Writable();
- setImmediate(function() {
- ws.emit('error', error);
- ws.end();
- });
- return ws;
- };
+ callback(null);
+ };
- table.startLoad(FILEPATH, function(err) {
- assert.strictEqual(err, error);
- done();
+ table.insert(data, OPTIONS, assert.ifError);
});
- });
-
- it('should not infer the file format if one is given', function(done) {
- table.createWriteStream = function(metadata) {
- assert.equal(metadata.sourceFormat, 'CSV');
- var ws = new stream.Writable();
- setImmediate(function() {
- ws.emit('complete', JOB);
- ws.end();
- });
- return ws;
- };
- table.startLoad(FILEPATH, {sourceFormat: 'CSV'}, done);
- });
+ it('should retry the insert', function(done) {
+ var response = {};
+ var attempts = 0;
- it('should check if a destination is a File', function(done) {
- isCustomTypeOverride = function(dest, type) {
- assert.strictEqual(dest, FILE);
- assert.strictEqual(type, 'storage/file');
- setImmediate(done);
- return true;
- };
+ table.request = function(reqOpts, callback) {
+ assert.equal(reqOpts.method, 'POST');
+ assert.equal(reqOpts.uri, '/insertAll');
+ assert.deepEqual(reqOpts.json, dataApiFormat);
- table.startLoad(FILE, assert.ifError);
- });
+ if (++attempts === 2) {
+ callback(null, response);
+ return;
+ }
- it('should throw if a File object is not provided', function() {
- isCustomTypeOverride = function() {
- return false;
- };
+ callback({code: 404});
+ };
- assert.throws(function() {
- table.startLoad({});
- }, /Source must be a File object/);
+ table.insert(data, OPTIONS, function(err, resp) {
+ assert.ifError(err);
+ assert.strictEqual(resp, response);
+ done();
+ });
+ });
});
+ });
- it('should convert File objects to gs:// urls', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- var sourceUri = reqOpts.configuration.load.sourceUris[0];
- assert.equal(sourceUri, 'gs://' + FILE.bucket.name + '/' + FILE.name);
- done();
- };
+ describe('load', function() {
+ var fakeJob;
- table.startLoad(FILE, assert.ifError);
+ beforeEach(function() {
+ fakeJob = new events.EventEmitter();
+ table.createLoadJob = function(source, metadata, callback) {
+ callback(null, fakeJob);
+ };
});
- it('should infer the file format from a File object', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- var sourceFormat = reqOpts.configuration.load.sourceFormat;
- assert.equal(sourceFormat, 'NEWLINE_DELIMITED_JSON');
+ it('should pass the arguments to createLoadJob', function(done) {
+ var fakeSource = {};
+ var fakeMetadata = {};
+
+ table.createLoadJob = function(source, metadata) {
+ assert.strictEqual(source, fakeSource);
+ assert.strictEqual(metadata, fakeMetadata);
done();
};
- table.startLoad(FILE, assert.ifError);
+ table.load(fakeSource, fakeMetadata, assert.ifError);
});
- it('should not override a provided format with a File', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- var sourceFormat = reqOpts.configuration.load.sourceFormat;
- assert.equal(sourceFormat, 'NEWLINE_DELIMITED_JSON');
+ it('should optionally accept metadata', function(done) {
+ table.createLoadJob = function(source, metadata) {
+ assert.deepEqual(metadata, {});
done();
};
- table.startLoad(
- FILE,
- {
- sourceFormat: 'NEWLINE_DELIMITED_JSON',
- },
- assert.ifError
- );
+ table.load({}, assert.ifError);
});
- it('should pass the callback to createJob', function(done) {
- table.bigQuery.createJob = function(reqOpts, callback) {
- assert.strictEqual(done, callback);
- callback(); // the done fn
- };
-
- table.startLoad(FILE, {}, done);
- });
+ it('should return any createLoadJob errors', function(done) {
+ var error = new Error('err');
+ var response = {};
- it('should optionally accept options', function(done) {
- table.bigQuery.createJob = function(reqOpts, callback) {
- assert.strictEqual(done, callback);
- callback(); // the done fn
+ table.createLoadJob = function(source, metadata, callback) {
+ callback(error, null, response);
};
- table.startLoad(FILE, done);
+ table.load({}, function(err, resp) {
+ assert.strictEqual(err, error);
+ assert.strictEqual(resp, response);
+ done();
+ });
});
- it('should set the job prefix', function(done) {
- var fakeJobPrefix = 'abc';
+ it('should return any job errors', function(done) {
+ var error = new Error('err');
- table.bigQuery.createJob = function(reqOpts) {
- assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix);
- assert.strictEqual(reqOpts.configuration.load.jobPrefix, undefined);
+ table.load({}, function(err) {
+ assert.strictEqual(err, error);
done();
- };
+ });
- table.startLoad(
- FILE,
- {
- jobPrefix: fakeJobPrefix,
- },
- assert.ifError
- );
+ fakeJob.emit('error', error);
});
- describe('formats', function() {
- it('should accept csv', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- var load = reqOpts.configuration.load;
- assert.strictEqual(load.sourceFormat, 'CSV');
- done();
- };
+ it('should return the metadata on complete', function(done) {
+ var metadata = {};
- table.startLoad(FILE, {format: 'csv'}, assert.ifError);
+ table.load({}, function(err, resp) {
+ assert.ifError(err);
+ assert.strictEqual(resp, metadata);
+ done();
});
- it('should accept json', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- var load = reqOpts.configuration.load;
- assert.strictEqual(load.sourceFormat, 'NEWLINE_DELIMITED_JSON');
- done();
- };
-
- table.startLoad(FILE, {format: 'json'}, assert.ifError);
- });
+ fakeJob.emit('complete', metadata);
+ });
+ });
- it('should accept avro', function(done) {
- table.bigQuery.createJob = function(reqOpts) {
- var load = reqOpts.configuration.load;
- assert.strictEqual(load.sourceFormat, 'AVRO');
- done();
- };
+ describe('query', function() {
+ it('should pass args through to datasetInstance.query()', function(done) {
+ table.dataset.query = function(a, b) {
+ assert.equal(a, 'a');
+ assert.equal(b, 'b');
+ done();
+ };
- table.startLoad(FILE, {format: 'avro'}, assert.ifError);
- });
+ table.query('a', 'b');
});
});
- describe('startQuery', function() {
- it('should call through to dataset#startQuery', function(done) {
- var fakeOptions = {};
- var fakeReturnValue = {};
+ describe('setMetadata', function() {
+ it('should call ServiceObject#setMetadata', function(done) {
+ var fakeMetadata = {};
+ var formattedMetadata = {};
- table.dataset.startQuery = function(options, callback) {
- assert.strictEqual(options, fakeOptions);
- setImmediate(callback);
- return fakeReturnValue;
+ Table.formatMetadata_ = function(data) {
+ assert.strictEqual(data, fakeMetadata);
+ return formattedMetadata;
};
- var returnVal = table.startQuery(fakeOptions, done);
- assert.strictEqual(returnVal, fakeReturnValue);
+ FakeServiceObject.prototype.setMetadata = function(metadata, callback) {
+ assert.strictEqual(this, table);
+ assert.strictEqual(metadata, formattedMetadata);
+ assert.strictEqual(callback, done);
+ callback(); // the done fn
+ };
+
+ table.setMetadata(fakeMetadata, done);
});
});
});