-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathindex.js
149 lines (128 loc) · 4.97 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
const fs = require('fs');
const request = require('./request');
const { getBearer } = require('./auth');
const BearerCache = require('./bearer-cache');
const SCOPES = [
'https://www.googleapis.com/auth/bigquery',
'https://www.googleapis.com/auth/bigquery.insertdata',
];
class MinQuery {
/**
* Constructor.
*
* @param {string} options.key A PEM-encoded private key to authenticate
* to the BigQuery API. Must be specified if
* `options.keyFile` is not specified.
* @param {string} options.keyFile Path to a PEM-encoded private key,
* which will be read synchronously in
* the constructor. Must be specified if
* `options.key` is not specified.
* @param {string} options.email Google auth e-mail address. Required.
* @param {string} options.projectId Google cloud project ID. Required.
*/
constructor(options) {
options = options || {};
if (!options.key && !options.keyFile) {
throw new Error('Must specify `options.key` or `options.keyFile`');
} else if (options.key && options.keyFile) {
throw new Error('Specify only one of `options.key` or `options.keyFile`');
}
if (!options.email) {
throw new Error('Must specify `options.email`');
}
if (!options.projectId) {
throw new Error('Must specify `options.projectId`');
}
if (options.keyFile) {
this.key = fs.readFileSync(options.keyFile);
} else {
this.key = options.key;
}
this.email = options.email;
this.projectId = options.projectId;
this.bearerCache = new BearerCache();
}
async _request(method, path, data) {
const url = `https://www.googleapis.com/bigquery/v2/projects/${this.projectId}${path}`;
const bearer = await getBearer({
bearerCache: this.bearerCache,
key: this.key,
email: this.email,
scopes: SCOPES,
});
return request({
method,
body: data,
json: true,
url,
auth: { bearer },
});
}
/**
* Create a table, returning the response on success.
*
* Ref: https://cloud.google.com/bigquery/docs/reference/v2/tables/insert
* Ref: https://cloud.google.com/bigquery/docs/reference/v2/tables#resource
*
* @param {string} dataset dataset name
* @param {string} tableName table name
* @param {object[]} fields table schema
* @param {Date} options.expirationDate If set, a Date object specifying when
* the table should be expired.
* @param {object} options.timePartitioning If set, an object containing fields
* `expirationMs` and `type` which will be
* passed through to the BigQuery API.
*/
createTable(dataset, tableName, fields, options) {
options = options || {};
const data = {
schema: {
fields,
},
tableReference: {
projectId: this.projectId,
datasetId: dataset,
tableId: tableName,
},
};
if (options.expirationDate) {
data.expirationTime = options.expirationDate.getTime();
}
if (options.timePartitioning) {
data.timePartitioning = options.timePartitioning;
}
return this._request('POST', `/datasets/${dataset}/tables`, data);
}
/**
* Insert one or more rows, returning the response on success.
*
* Ref: https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll
*
* @param {string} dataset dataset name
* @param {string} tableName table name
* @param {object[]} rows row data
* @param {boolean} options.skipInvalidRows Passed through to BigQuery API:
* Don't fail the whole request if a
* row is invalid (default true).
* @param {boolean} options.ignoreUnknownValues Passed through to BigQuery API:
* Don't fail the whole request if a
* row is invalid (default true).
* @param {boolean} options.addInsertId Add a random insert id to each
*/
insert(dataset, tableName, rows, options) {
options = options || {};
const rowData = rows.map((row) => {
return { json: row };
});
const data = {
kind: 'bigquery#tableDataInsertAllRequest',
skipInvalidRows: options.skipInvalidRows !== undefined ?
!!options.skipInvalidRows : true,
ignoreUnknownValues: options.ignoreUnknownValues !== undefined ?
!!options.ignoreUnknownValues : true,
rows: rowData,
};
return this._request('POST', `/datasets/${dataset}/tables/${tableName}/insertAll`, data);
}
}
module.exports = MinQuery;