diff --git a/db-service/lib/cqn2sql.js b/db-service/lib/cqn2sql.js index e99c613eb..624f00dfe 100644 --- a/db-service/lib/cqn2sql.js +++ b/db-service/lib/cqn2sql.js @@ -142,13 +142,15 @@ class CQN2SQLRenderer { */ CREATE_elements(elements) { let sql = '' + let keys = '' for (let e in elements) { const definition = elements[e] if (definition.isAssociation) continue + if (definition.key) keys = `${keys}, ${this.quote(definition.name)}` const s = this.CREATE_element(definition) - if (s) sql += `${s}, ` + if (s) sql += `, ${s}` } - return sql.slice(0, -2) + return `${sql.slice(2)}${keys && `, PRIMARY KEY(${keys.slice(2)})`}` } /** @@ -491,8 +493,6 @@ class CQN2SQLRenderer { */ INSERT_entries(q) { const { INSERT } = q - const entity = this.name(q.target?.name || INSERT.into.ref[0]) - const alias = INSERT.into.as const elements = q.elements || q.target?.elements if (!elements && !INSERT.entries?.length) { return // REVISIT: mtx sends an insert statement without entries and no reference entity @@ -504,19 +504,14 @@ class CQN2SQLRenderer { /** @type {string[]} */ this.columns = columns + const alias = INSERT.into.as + const entity = this.name(q.target?.name || INSERT.into.ref[0]) if (!elements) { this.entries = INSERT.entries.map(e => columns.map(c => e[c])) const param = this.param.bind(this, { ref: ['?'] }) return (this.sql = `INSERT INTO ${this.quote(entity)}${alias ? ' as ' + this.quote(alias) : ''} (${this.columns.map(c => this.quote(c))}) VALUES (${columns.map(param)})`) } - const extractions = this.managed( - columns.map(c => ({ name: c })), - elements, - !!q.UPSERT, - ) - const extraction = extractions.map(c => c.sql) - // Include this.values for placeholders /** @type {unknown[][]} */ this.entries = [] @@ -530,8 +525,9 @@ class CQN2SQLRenderer { this.entries = [[...this.values, stream]] } + const extractions = this._managed = this.managed(columns.map(c => ({ name: c })), elements) return (this.sql = `INSERT INTO ${this.quote(entity)}${alias ? ' as ' + this.quote(alias) : ''} (${this.columns.map(c => this.quote(c)) - }) SELECT ${extraction} FROM json_each(?)`) + }) SELECT ${extractions.map(c => c.insert)} FROM json_each(?)`) } async *INSERT_entries_stream(entries, binaryEncoding = 'base64') { @@ -646,18 +642,7 @@ class CQN2SQLRenderer { const entity = this.name(q.target?.name || INSERT.into.ref[0]) const alias = INSERT.into.as const elements = q.elements || q.target?.elements - const columns = INSERT.columns - || cds.error`Cannot insert rows without columns or elements` - - const inputConverter = this.class._convertInput - const extraction = columns.map((c, i) => { - const extract = `value->>'$[${i}]'` - const element = elements?.[c] - const converter = element?.[inputConverter] - return converter?.(extract, element) || extract - }) - - this.columns = columns + const columns = this.columns = INSERT.columns || cds.error`Cannot insert rows without columns or elements` if (!elements) { this.entries = INSERT.rows @@ -675,6 +660,10 @@ class CQN2SQLRenderer { this.entries = [[...this.values, stream]] } + const extraction = (this._managed = this.managed(columns.map(c => ({ name: c })), elements)) + .slice(0, columns.length) + .map(c => c.converter(c.extract)) + return (this.sql = `INSERT INTO ${this.quote(entity)}${alias ? ' as ' + this.quote(alias) : ''} (${this.columns.map(c => this.quote(c)) }) SELECT ${extraction} FROM json_each(?)`) } @@ -686,7 +675,7 @@ class CQN2SQLRenderer { */ INSERT_values(q) { let { columns, values } = q.INSERT - return this.INSERT_rows({ __proto__: q, INSERT: { __proto__: q.INSERT, columns, rows: [values] } }) + return this.render({ __proto__: q, INSERT: { __proto__: q.INSERT, columns, rows: [values] } }) } /** @@ -737,14 +726,37 @@ class CQN2SQLRenderer { */ UPSERT(q) { const { UPSERT } = q - const elements = q.target?.elements || {} + let sql = this.INSERT({ __proto__: q, INSERT: UPSERT }) - let keys = q.target?.keys - if (!keys) return this.sql = sql - keys = Object.keys(keys).filter(k => !keys[k].isAssociation && !keys[k].virtual) + if (!q.target?.keys) return sql + const keys = [] + for (const k of ObjectKeys(q.target?.keys)) { + const element = q.target.keys[k] + if (element.isAssociation || element.virtual) continue + keys.push(k) + } + + const elements = q.target?.elements || {} + // temporal data + for (const k of ObjectKeys(elements)) { + if (elements[k]['@cds.valid.from']) keys.push(k) + } - let updateColumns = q.UPSERT.entries ? Object.keys(q.UPSERT.entries[0]) : this.columns - updateColumns = updateColumns.filter(c => { + const keyCompare = keys + .map(k => `NEW.${this.quote(k)}=OLD.${this.quote(k)}`) + .join(' AND ') + + const columns = this.columns // this.columns is computed as part of this.INSERT + const managed = this._managed.slice(0, columns.length) + + const extractkeys = managed + .filter(c => keys.includes(c.name)) + .map(c => `${c.onInsert || c.sql} as ${this.quote(c.name)}`) + + const entity = this.name(q.target?.name || UPSERT.into.ref[0]) + sql = `SELECT ${managed.map(c => c.upsert)} FROM (SELECT value, ${extractkeys} from json_each(?)) as NEW LEFT JOIN ${this.quote(entity)} AS OLD ON ${keyCompare}` + + const updateColumns = columns.filter(c => { if (keys.includes(c)) return false //> keys go into ON CONFLICT clause let e = elements[c] if (!e) return true //> pass through to native SQL columns not in CDS model @@ -754,14 +766,8 @@ class CQN2SQLRenderer { else return true }).map(c => `${this.quote(c)} = excluded.${this.quote(c)}`) - // temporal data - keys.push(...Object.values(q.target.elements).filter(e => e['@cds.valid.from']).map(e => e.name)) - - keys = keys.map(k => this.quote(k)) - const conflict = updateColumns.length - ? `ON CONFLICT(${keys}) DO UPDATE SET ` + updateColumns - : `ON CONFLICT(${keys}) DO NOTHING` - return (this.sql = `${sql} WHERE true ${conflict}`) + return (this.sql = `INSERT INTO ${this.quote(entity)} (${columns.map(c => this.quote(c))}) ${sql + } WHERE TRUE ON CONFLICT(${keys.map(c => this.quote(c))}) DO ${updateColumns.length ? `UPDATE SET ${updateColumns}` : 'NOTHING'}`) } // UPDATE Statements ------------------------------------------------ @@ -790,7 +796,9 @@ class CQN2SQLRenderer { } } - const extraction = this.managed(columns, elements, true).map(c => `${this.quote(c.name)}=${c.sql}`) + const extraction = this.managed(columns, elements) + .filter((c, i) => columns[i] || c.onUpdate) + .map((c, i) => `${this.quote(c.name)}=${!columns[i] ? c.onUpdate : c.sql}`) sql += ` SET ${extraction}` if (where) sql += ` WHERE ${this.where(where)}` @@ -1042,56 +1050,104 @@ class CQN2SQLRenderer { } /** - * Convers the columns array into an array of SQL expressions that extract the correct value from inserted JSON data + * Converts the columns array into an array of SQL expressions that extract the correct value from inserted JSON data * @param {object[]} columns * @param {import('./infer/cqn').elements} elements * @param {Boolean} isUpdate * @returns {string[]} Array of SQL expressions for processing input JSON data */ - managed(columns, elements, isUpdate = false) { - const annotation = isUpdate ? '@cds.on.update' : '@cds.on.insert' + managed(columns, elements) { + const cdsOnInsert = '@cds.on.insert' + const cdsOnUpdate = '@cds.on.update' + const { _convertInput } = this.class // Ensure that missing managed columns are added const requiredColumns = !elements ? [] - : Object.keys(elements) - .filter( - e => - (elements[e]?.[annotation] || (!isUpdate && elements[e]?.default && !elements[e].virtual && !elements[e].isAssociation)) && - !columns.find(c => c.name === e), - ) + : ObjectKeys(elements) + .filter(e => { + const element = elements[e] + // Actual mandatory check + if (!(element.default || element[cdsOnInsert] || element[cdsOnUpdate])) return false + // Physical column check + if (!element || element.virtual || element.isAssociation) return false + // Existence check + if (columns.find(c => c.name === e)) return false + return true + }) .map(name => ({ name, sql: 'NULL' })) + const keys = ObjectKeys(elements).filter(e => elements[e].key) + const keyZero = keys[0] && this.quote(keys[0]) + return [...columns, ...requiredColumns].map(({ name, sql }) => { - let element = elements?.[name] || {} - if (!sql) sql = `value->>'$."${name}"'` - - let converter = element[_convertInput] - if (converter && sql[0] !== '$') sql = converter(sql, element) - - let val = _managed[element[annotation]?.['=']] - if (val) sql = `coalesce(${sql}, ${this.func({ func: 'session_context', args: [{ val, param: false }] })})` - else if (!isUpdate && element.default) { - const d = element.default - if (d.val !== undefined || d.ref?.[0] === '$now') { - // REVISIT: d.ref is not used afterwards - sql = `(CASE WHEN json_type(value,'$."${name}"') IS NULL THEN ${this.defaultValue(d.val) // REVISIT: this.defaultValue is a strange function - } ELSE ${sql} END)` - } + const element = elements?.[name] || {} + + const converter = a => element[_convertInput]?.(a, element) || a + let extract + if (!sql) { + ({ sql, extract } = this.managed_extract(name, element, converter)) + } else { + extract = sql = converter(sql) } + // if (sql[0] !== '$') sql = converter(sql, element) + + let onInsert = this.managed_session_context(element[cdsOnInsert]?.['=']) + || this.managed_session_context(element.default?.ref?.[0]) + || (element.default?.val !== undefined && { val: element.default.val, param: false }) + let onUpdate = this.managed_session_context(element[cdsOnUpdate]?.['=']) + + if (onInsert) onInsert = this.expr(onInsert) + if (onUpdate) onUpdate = this.expr(onUpdate) + + const qname = this.quote(name) + + const insert = onInsert ? this.managed_default(name, converter(onInsert), sql) : sql + const update = onUpdate ? this.managed_default(name, converter(onUpdate), sql) : sql + const upsert = keyZero && ( + // upsert requires the keys to be provided for the existance join (default values optional) + element.key + // If both insert and update have the same managed definition exclude the old value check + || (onInsert && onUpdate && insert === update) + ? `${insert} as ${qname}` + : `CASE WHEN OLD.${keyZero} IS NULL THEN ${ + // If key of old is null execute insert + insert + } ELSE ${ + // Else execute managed update or keep old if no new data if provided + onUpdate ? update : this.managed_default(name, `OLD.${qname}`, update) + } END as ${qname}` + ) - return { name, sql } + return { + name, // Element name + sql, // Reference SQL + extract, // Source SQL + converter, // Converter logic + // action specific full logic + insert, update, upsert, + // action specific isolated logic + onInsert, onUpdate + } }) } - /** - * Returns the default value - * @param {string} defaultValue - * @returns {string} - */ - // REVISIT: This is a strange method, also overridden inconsistently in postgres - defaultValue(defaultValue = this.context.timestamp.toISOString()) { - return typeof defaultValue === 'string' ? this.string(defaultValue) : defaultValue + managed_extract(name, element, converter) { + const { UPSERT, INSERT } = this.cqn + const extract = !(INSERT?.entries || UPSERT?.entries) && (INSERT?.rows || UPSERT?.rows) + ? `value->>'$[${this.columns.indexOf(name)}]'` + : `value->>'$."${name.replace(/"/g, '""')}"'` + const sql = converter?.(extract) || extract + return { extract, sql } + } + + managed_session_context(src) { + const val = _managed[src] + return val && { func: 'session_context', args: [{ val, param: false }] } + } + + managed_default(name, managed, src) { + return `(CASE WHEN json_type(value,${this.managed_extract(name).extract.slice(8)}) IS NULL THEN ${managed} ELSE ${src} END)` } } diff --git a/hana/lib/HANAService.js b/hana/lib/HANAService.js index caff2042d..801a11253 100644 --- a/hana/lib/HANAService.js +++ b/hana/lib/HANAService.js @@ -653,14 +653,11 @@ class HANAService extends SQLService { : ObjectKeys(INSERT.entries[0]) this.columns = columns - const extractions = this.managed( - columns.map(c => ({ name: c })), - elements, - !!q.UPSERT, - ) + const extractions = this.managed(columns.map(c => ({ name: c })), elements) - const extraction = extractions.map(c => c.column) - const converter = extractions.map(c => c.convert) + // REVISIT: @cds.extension required + const extraction = extractions.map(c => c.extract) + const converter = extractions.map(c => c.insert) const _stream = entries => { const stream = Readable.from(this.INSERT_entries_stream(entries, 'hex'), { objectMode: false }) @@ -696,7 +693,7 @@ class HANAService extends SQLService { return (this.sql = `INSERT INTO ${this.quote(entity)} (${this.columns.map(c => this.quote(c), )}) WITH SRC AS (SELECT ? AS JSON FROM DUMMY UNION ALL SELECT TO_NCLOB(NULL) AS JSON FROM DUMMY) - SELECT ${converter} FROM JSON_TABLE(SRC.JSON, '$' COLUMNS(${extraction}) ERROR ON ERROR)`) + SELECT ${converter} FROM JSON_TABLE(SRC.JSON, '$' COLUMNS(${extraction}) ERROR ON ERROR) AS NEW`) } INSERT_rows(q) { @@ -715,7 +712,11 @@ class HANAService extends SQLService { return super.INSERT_rows(q) } - const columns = INSERT.columns || (elements && ObjectKeys(elements)) + const columns = INSERT.columns || [] + for (const col of ObjectKeys(elements)) { + if (!columns.includes(col)) columns.push(col) + } + const entries = new Array(INSERT.rows.length) const rows = INSERT.rows for (let x = 0; x < rows.length; x++) { @@ -723,6 +724,8 @@ class HANAService extends SQLService { const entry = {} for (let y = 0; y < columns.length; y++) { entry[columns[y]] = row[y] + // Include explicit null values for managed fields + ?? (elements[columns[y]]['@cds.on.insert'] && null) } entries[x] = entry } @@ -732,37 +735,35 @@ class HANAService extends SQLService { UPSERT(q) { const { UPSERT } = q - const sql = this.INSERT({ __proto__: q, INSERT: UPSERT }) + // REVISIT: should @cds.persistence.name be considered ? + const entity = q.target?.['@cds.persistence.name'] || this.name(q.target?.name || UPSERT.into.ref[0]) + const elements = q.target?.elements || {} + const insert = this.INSERT({ __proto__: q, INSERT: UPSERT }) - // If no definition is available fallback to INSERT statement - const elements = q.elements || q.target?.elements - if (!elements) { - return (this.sql = sql) - } + let keys = q.target?.keys + if (!keys) return insert + keys = Object.keys(keys).filter(k => !keys[k].isAssociation && !keys[k].virtual) - // REVISIT: should @cds.persistence.name be considered ? - const entity = q.target?.['@cds.persistence.name'] || this.name(q.target?.name || INSERT.into.ref[0]) - const dataSelect = sql.substring(sql.indexOf('WITH')) + // temporal data + keys.push(...ObjectKeys(q.target.elements).filter(e => q.target.elements[e]['@cds.valid.from'])) - // Calculate @cds.on.insert - const collations = this.managed( - this.columns.map(c => ({ name: c, sql: `NEW.${this.quote(c)}` })), - elements, - false, + const managed = this.managed( + this.columns.map(c => ({ name: c })), + elements ) - let keys = q.target?.keys - const keyCompare = - keys && - Object.keys(keys) - .filter(k => !keys[k].isAssociation && !keys[k].virtual) - .map(k => `NEW.${this.quote(k)}=OLD.${this.quote(k)}`) - .join(' AND ') - - return (this.sql = `UPSERT ${this.quote(entity)} (${this.columns.map(c => - this.quote(c), - )}) SELECT ${collations.map(keyCompare ? c => c.switch : c => c.sql)} FROM (${dataSelect}) AS NEW ${keyCompare ? ` LEFT JOIN ${this.quote(entity)} AS OLD ON ${keyCompare}` : '' - }`) + const keyCompare = managed + .filter(c => keys.includes(c.name)) + .map(c => `${c.insert}=OLD.${this.quote(c.name)}`) + .join(' AND ') + + const mixing = managed.map(c => c.upsert) + const extraction = managed.map(c => c.extract) + + const sql = `WITH SRC AS (SELECT ? AS JSON FROM DUMMY UNION ALL SELECT TO_NCLOB(NULL) AS JSON FROM DUMMY) +SELECT ${mixing} FROM JSON_TABLE(SRC.JSON, '$' COLUMNS(${extraction})) AS NEW LEFT JOIN ${this.quote(entity)} AS OLD ON ${keyCompare}` + + return (this.sql = `UPSERT ${this.quote(entity)} (${this.columns.map(c => this.quote(c))}) ${sql}`) } DROP(q) { @@ -1034,71 +1035,16 @@ class HANAService extends SQLService { ) } - managed(columns, elements, isUpdate = false) { - const annotation = isUpdate ? '@cds.on.update' : '@cds.on.insert' - const inputConverterKey = this.class._convertInput - // Ensure that missing managed columns are added - const requiredColumns = !elements - ? [] - : Object.keys(elements) - .filter(e => { - if (elements[e]?.virtual) return false - if (elements[e]?.isAssociation) return false - if (columns.find(c => c.name === e)) return false - if (elements[e]?.[annotation]) return true - if (!isUpdate && elements[e]?.default) return true - return false - }) - .map(name => ({ name, sql: 'NULL' })) - - const keyZero = this.quote( - ObjectKeys(elements).find(e => { - const el = elements[e] - return el.key && !el.isAssociation - }) || '', - ) - - return [...columns, ...requiredColumns].map(({ name, sql }) => { - const element = elements?.[name] || {} - // Don't apply input converters for place holders - const converter = (sql !== '?' && element[inputConverterKey]) || (e => e) - const val = _managed[element[annotation]?.['=']] - let managed - if (val) managed = this.func({ func: 'session_context', args: [{ val, param: false }] }) - let extract = sql ?? `${this.quote(name)} ${this.insertType4(element)} PATH '$.${name}'` - if (!isUpdate) { - const d = element.default - if (d && ('val' in d || d.ref?.[0] === '$now')) { - const defaultValue = 'val' in d ? d.val : (cds.context?.timestamp || new Date()).toISOString() - managed = typeof defaultValue === 'string' ? this.string(defaultValue) : defaultValue - } - } + managed_extract(name, element, converter) { + // TODO: test property names with single and double quotes + return { + extract: `${this.quote(name)} ${this.insertType4(element)} PATH '$.${name}', ${this.quote('$.' + name)} NVARCHAR(2147483647) FORMAT JSON PATH '$.${name}'`, + sql: converter(`NEW.${this.quote(name)}`), + } + } - // Switch between OLD and NEW based upon the existence of the column in the NEW dataset - // Coalesce is not good enough as it would not allow for setting a value to NULL using UPSERT - const oldOrNew = - element['@cds.on.update']?.['='] !== undefined - ? extract - : `CASE WHEN ${this.quote('$.' + name)} IS NULL THEN OLD.${this.quote(name)} ELSE ${extract} END` - - const notManged = managed === undefined - return { - name, - column: `${extract}, ${this.quote('$.' + name)} NVARCHAR(2147483647) FORMAT JSON PATH '$.${name}'`, - // For @cds.on.insert ensure that there was no entry yet before setting managed in UPSERT - switch: notManged - ? oldOrNew - : `CASE WHEN OLD.${keyZero} IS NULL THEN COALESCE(${extract},${managed}) ELSE ${oldOrNew} END`, - convert: - (notManged - ? `${converter(this.quote(name), element)} AS ${this.quote(name)}` - : `CASE WHEN ${this.quote('$.' + name)} IS NULL THEN ${managed} ELSE ${converter( - this.quote(name), - element, - )} END AS ${this.quote(name)}`) + (isUpdate ? `,${this.quote('$.' + name)}` : ''), - sql: converter(notManged ? extract : `COALESCE(${extract}, ${managed})`, element), - } - }) + managed_default(name, managed, src) { + return `(CASE WHEN ${this.quote('$.' + name)} IS NULL THEN ${managed} ELSE ${src} END)` } // Loads a static result from the query `SELECT * FROM RESERVED_KEYWORDS` @@ -1142,16 +1088,18 @@ class HANAService extends SQLService { // HANA JSON_TABLE function does not support BOOLEAN types static InputConverters = { ...super.InputConverters, + // REVISIT: BASE64_DECODE has stopped working // Unable to convert NVARCHAR to UTF8 // Not encoded string with CESU-8 or some UTF-8 except a surrogate pair at "base64_decode" function - Binary: e => `HEXTOBIN(${e})`, - Boolean: e => `CASE WHEN ${e} = 'true' OR ${e} = '1' THEN TRUE WHEN ${e} = 'false' OR ${e} = '0' THEN FALSE END`, - Vector: e => `TO_REAL_VECTOR(${e})`, + Binary: e => e === '?' ? e : `HEXTOBIN(${e})`, + Boolean: e => e === '?' ? e : `CASE WHEN ${e} = 'true' OR ${e} = '1' THEN TRUE WHEN ${e} = 'false' OR ${e} = '0' THEN FALSE END`, // TODO: Decimal: (expr, element) => element.precision ? `TO_DECIMAL(${expr},${element.precision},${element.scale})` : expr + // Types that require input converters for placeholders as well + Vector: e => `TO_REAL_VECTOR(${e})`, // HANA types - 'cds.hana.ST_POINT': e => `CASE WHEN ${e} IS NOT NULL THEN NEW ST_POINT(TO_DOUBLE(JSON_VALUE(${e}, '$.x')), TO_DOUBLE(JSON_VALUE(${e}, '$.y'))) END`, + 'cds.hana.ST_POINT': e => `TO_POINT(${e})`, 'cds.hana.ST_GEOMETRY': e => `TO_GEOMETRY(${e})`, } @@ -1169,11 +1117,11 @@ class HANAService extends SQLService { Int64: expr => `TO_NVARCHAR(${expr})`, // Reading decimal as string to not loose precision Decimal: (expr, elem) => elem?.scale - ? `TO_NVARCHAR(${expr}, '9.${''.padEnd(elem.scale, '0')}')` + ? `TO_NVARCHAR(${expr}, '0.${''.padEnd(elem.scale, '0')}')` : `TO_NVARCHAR(${expr})`, // HANA types - 'cds.hana.ST_POINT': e => `(SELECT NEW ST_POINT(TO_NVARCHAR(${e})).ST_X() as "x", NEW ST_POINT(TO_NVARCHAR(${e})).ST_Y() as "y" FROM DUMMY WHERE (${e}) IS NOT NULL FOR JSON ('format'='no', 'omitnull'='no', 'arraywrap'='no') RETURNS NVARCHAR(2147483647))`, + 'cds.hana.ST_POINT': e => `TO_NVARCHAR(${e})`, 'cds.hana.ST_GEOMETRY': e => `TO_NVARCHAR(${e})`, } } @@ -1381,11 +1329,6 @@ function _not_unique(err, code) { const is_regexp = x => x?.constructor?.name === 'RegExp' // NOTE: x instanceof RegExp doesn't work in repl const ObjectKeys = o => (o && [...ObjectKeys(o.__proto__), ...Object.keys(o)]) || [] -const _managed = { - '$user.id': '$user.id', - $user: '$user.id', - $now: '$now', -} const caseOperators = { 'CASE': 1, diff --git a/hana/lib/scripts/container-tenant.sql b/hana/lib/scripts/container-tenant.sql index ae256221a..2adeac02d 100644 --- a/hana/lib/scripts/container-tenant.sql +++ b/hana/lib/scripts/container-tenant.sql @@ -62,7 +62,7 @@ BEGIN SEQUENTIAL EXECUTION CALL _SYS_DI#{{{GROUP}}}.GRANT_CONTAINER_SCHEMA_PRIVILEGES(:SCHEMANAME, :SCHEMA_PRIV, :NO_PARAMS, :RETURN_CODE, :REQUEST_ID, :MESSAGES); ALL_MESSAGES = SELECT * FROM :ALL_MESSAGES UNION ALL SELECT * FROM :MESSAGES; COMMIT; - - SELECT * FROM :ALL_MESSAGES; END IF; + + SELECT * FROM :ALL_MESSAGES; END; diff --git a/hana/test/spatial.test.js b/hana/test/spatial.test.js new file mode 100644 index 000000000..c37e49785 --- /dev/null +++ b/hana/test/spatial.test.js @@ -0,0 +1,25 @@ +const cds = require('../../test/cds.js') + +describe('Spatial Types', () => { + const { data, expect } = cds.test(__dirname + '/../../test/compliance/resources') + data.autoIsolation(true) + data.autoReset() + + test('point', async () => { + const { HANA_ST } = cds.entities('edge.hana.literals') + const point = 'POINT(1 1)' + await INSERT({ point: null }).into(HANA_ST) + await UPDATE(HANA_ST).data({ point }) + const result = await SELECT.one.from(HANA_ST) + expect(result.point).to.contain('POINT') + }) + + test('geometry', async () => { + const { HANA_ST } = cds.entities('edge.hana.literals') + const geometry = 'POINT(1 1)' + await INSERT({ geometry: null }).into(HANA_ST) + await UPDATE(HANA_ST).data({ geometry }) + const result = await SELECT.one.from(HANA_ST) + expect(result.geometry).to.contain('POINT') + }) +}) diff --git a/postgres/lib/PostgresService.js b/postgres/lib/PostgresService.js index 465d2e1bc..ecfa302a0 100644 --- a/postgres/lib/PostgresService.js +++ b/postgres/lib/PostgresService.js @@ -424,11 +424,32 @@ GROUP BY k // REVISIT: this should probably be made a bit easier to adopt return (this.sql = this.sql - // Adjusts json path expressions to be postgres specific - .replace(/->>'\$(?:(?:\."(.*?)")|(?:\[(\d*)\]))'/g, (a, b, c) => (b ? `->>'${b}'` : `->>${c}`)) // Adjusts json function to be postgres specific .replace('json_each(?)', 'json_array_elements($1::json)') - .replace(/json_type\((\w+),'\$\."(\w+)"'\)/g, (_a, b, c) => `json_typeof(${b}->'${c}')`)) + ) + } + + UPSERT(q, isUpsert = false) { + super.UPSERT(q, isUpsert) + + // REVISIT: this should probably be made a bit easier to adopt + return (this.sql = this.sql + // Adjusts json function to be postgres specific + .replace('json_each(?)', 'json_array_elements($1::json)') + ) + } + + managed_extract(name, element, converter) { + const { UPSERT, INSERT } = this.cqn + const extract = !(INSERT?.entries || UPSERT?.entries) && (INSERT?.rows || UPSERT?.rows) + ? `value->>${this.columns.indexOf(name)}` + : `value->>'${name.replace(/'/g, "''")}'` + const sql = converter?.(extract) || extract + return { extract, sql } + } + + managed_default(name, managed, src) { + return `(CASE WHEN json_typeof(value->${this.managed_extract(name).extract.slice(8)}) IS NULL THEN ${managed} ELSE ${src} END)` } param({ ref }) { @@ -503,27 +524,31 @@ GROUP BY k // Used for INSERT statements static InputConverters = { ...super.InputConverters, - // UUID: (e) => `CAST(${e} as UUID)`, // UUID is strict in formatting sflight does not comply - boolean: e => `CASE ${e} WHEN 'true' THEN true WHEN 'false' THEN false END`, - Float: (e, t) => `CAST(${e} as decimal${t.precision && t.scale ? `(${t.precision},${t.scale})` : ''})`, - Decimal: (e, t) => `CAST(${e} as decimal${t.precision && t.scale ? `(${t.precision},${t.scale})` : ''})`, - Integer: e => `CAST(${e} as integer)`, - Int64: e => `CAST(${e} as bigint)`, - Date: e => `CAST(${e} as DATE)`, - Time: e => `CAST(${e} as TIME)`, - DateTime: e => `CAST(${e} as TIMESTAMP)`, - Timestamp: e => `CAST(${e} as TIMESTAMP)`, + // UUID: (e) => e[0] === '$' ? e : `CAST(${e} as UUID)`, // UUID is strict in formatting sflight does not comply + boolean: e => e[0] === '$' ? e : `CASE ${e} WHEN 'true' THEN true WHEN 'false' THEN false END`, + // REVISIT: Postgres and HANA round Decimal numbers differently therefore precision and scale are removed + // Float: (e, t) => e[0] === '$' ? e : `CAST(${e} as decimal${t.precision && t.scale ? `(${t.precision},${t.scale})` : ''})`, + // Decimal: (e, t) => e[0] === '$' ? e : `CAST(${e} as decimal${t.precision && t.scale ? `(${t.precision},${t.scale})` : ''})`, + Float: e => e[0] === '$' ? e : `CAST(${e} as decimal)`, + Decimal: e => e[0] === '$' ? e : `CAST(${e} as decimal)`, + Integer: e => e[0] === '$' ? e : `CAST(${e} as integer)`, + Int64: e => e[0] === '$' ? e : `CAST(${e} as bigint)`, + Date: e => e[0] === '$' ? e : `CAST(${e} as DATE)`, + Time: e => e[0] === '$' ? e : `CAST(${e} as TIME)`, + DateTime: e => e[0] === '$' ? e : `CAST(${e} as TIMESTAMP)`, + Timestamp: e => e[0] === '$' ? e : `CAST(${e} as TIMESTAMP)`, // REVISIT: Remove that with upcomming fixes in cds.linked - Double: (e, t) => `CAST(${e} as decimal${t.precision && t.scale ? `(${t.precision},${t.scale})` : ''})`, - DecimalFloat: (e, t) => `CAST(${e} as decimal${t.precision && t.scale ? `(${t.precision},${t.scale})` : ''})`, - Binary: e => `DECODE(${e},'base64')`, - LargeBinary: e => `DECODE(${e},'base64')`, + Double: (e, t) => e[0] === '$' ? e : `CAST(${e} as decimal${t.precision && t.scale ? `(${t.precision},${t.scale})` : ''})`, + DecimalFloat: (e, t) => e[0] === '$' ? e : `CAST(${e} as decimal${t.precision && t.scale ? `(${t.precision},${t.scale})` : ''})`, + Binary: e => e[0] === '$' ? e : `DECODE(${e},'base64')`, + LargeBinary: e => e[0] === '$' ? e : `DECODE(${e},'base64')`, // HANA Types - 'cds.hana.CLOB': e => `DECODE(${e},'base64')`, - 'cds.hana.BINARY': e => `DECODE(${e},'base64')`, - 'cds.hana.ST_POINT': e => `POINT(((${e})::json->>'x')::float, ((${e})::json->>'y')::float)`, - 'cds.hana.ST_GEOMETRY': e => `POLYGON(${e})`, + 'cds.hana.CLOB': e => e[0] === '$' ? e : `DECODE(${e},'base64')`, + 'cds.hana.BINARY': e => e[0] === '$' ? e : `DECODE(${e},'base64')`, + // REVISIT: have someone take a look at how this syntax exactly works in postgres with postgis + 'cds.hana.ST_POINT': e => `(${e})::point`, + 'cds.hana.ST_GEOMETRY': e => `(${e})::polygon`, } static OutputConverters = { @@ -548,8 +573,9 @@ GROUP BY k : `cast(${expr} as varchar)` : undefined, - // Convert point back to json format - 'cds.hana.ST_POINT': expr => `CASE WHEN (${expr}) IS NOT NULL THEN json_object('x':(${expr})[0],'y':(${expr})[1])::varchar END`, + // Convert ST types back to WKT format + 'cds.hana.ST_POINT': expr => `ST_AsText(${expr})`, + 'cds.hana.ST_POINT': expr => `ST_AsText(${expr})`, } } diff --git a/sqlite/lib/SQLiteService.js b/sqlite/lib/SQLiteService.js index 0f6f6ed65..35a4b85ad 100644 --- a/sqlite/lib/SQLiteService.js +++ b/sqlite/lib/SQLiteService.js @@ -192,12 +192,12 @@ class SQLiteService extends SQLService { ...super.InputConverters, // The following allows passing in ISO strings with non-zulu // timezones and converts them into zulu dates and times - Date: e => `strftime('%Y-%m-%d',${e})`, - Time: e => `strftime('%H:%M:%S',${e})`, + Date: e => e === '?' ? e : `strftime('%Y-%m-%d',${e})`, + Time: e => e === '?' ? e : `strftime('%H:%M:%S',${e})`, // Both, DateTimes and Timestamps are canonicalized to ISO strings with // ms precision to allow safe comparisons, also to query {val}s in where clauses - DateTime: e => `ISO(${e})`, - Timestamp: e => `ISO(${e})`, + DateTime: e => e === '?' ? e : `ISO(${e})`, + Timestamp: e => e === '?' ? e : `ISO(${e})`, } static OutputConverters = { diff --git a/sqlite/test/general/managed.test.js b/sqlite/test/general/managed.test.js index 1cfab31fe..b913738ec 100644 --- a/sqlite/test/general/managed.test.js +++ b/sqlite/test/general/managed.test.js @@ -1,7 +1,7 @@ const cds = require('../../../test/cds.js') describe('Managed thingies', () => { - const { POST, PUT, sleep, expect } = cds.test(__dirname, 'model.cds') + const { POST, PUT, PATCH, expect } = cds.test(__dirname, 'model.cds') test('INSERT execute on db only', async () => { const db = await cds.connect.to('db') @@ -15,7 +15,8 @@ describe('Managed thingies', () => { ID: 2, // createdAt: expect.any(String), createdBy: 'anonymous', - // modifiedAt: expect.any(String), + defaultValue: 100, + modifiedAt: expect.any(String), modifiedBy: 'samuel', }, ]) @@ -25,7 +26,9 @@ describe('Managed thingies', () => { test('UPSERT execute on db only', async () => { // UPSERT behaves like UPDATE for managed, so insert annotated fields should not be filled const db = await cds.connect.to('db') - return db.run(async () => { + + let modifications = [] + await db.tx(async () => { // REVISIT: Why do we allow overriding managed elements here? // provided values for managed annotated fields should be kept on DB level if provided await UPSERT.into('test.foo').entries({ ID: 3, modifiedBy: 'samuel' }) @@ -34,22 +37,41 @@ describe('Managed thingies', () => { expect(result).to.containSubset([ { ID: 3, - createdAt: null, - createdBy: null, - // modifiedAt: expect.any(String), + createdAt: expect.any(String), + createdBy: "anonymous", + defaultValue: 100, + modifiedAt: expect.any(String), modifiedBy: 'samuel', }, ]) - const { modifiedAt } = result[0] - expect(modifiedAt).to.equal(cds.context.timestamp.toISOString()) + const row = result.at(-1) + modifications.push(row) + const { modifiedAt } = row + expect(modifiedAt).toEqual(cds.context.timestamp.toISOString()) + }) - await sleep(11) // ensure some ms are passed - const modified = new Date(modifiedAt).getTime() - const now = Date.now() + // Ensure that a second UPSERT updates the managed fields + await db.tx(async () => { + await UPSERT.into('test.foo').entries({ ID: 3 }) - expect(now - modified).to.be.greaterThan(0) - expect(now - modified).to.be.lessThan(10 * 1000) // 10s + const result = await SELECT.from('test.foo').where({ ID: 3 }) + expect(result).toEqual([ + { + ID: 3, + createdAt: expect.any(String), + createdBy: "anonymous", + defaultValue: 100, + modifiedAt: expect.any(String), + modifiedBy: 'anonymous', + }, + ]) + + const row = result.at(-1) + modifications.push(row) + const { modifiedAt } = row + expect(modifiedAt).toEqual(cds.context.timestamp.toISOString()) + expect(modifiedAt).not.toEqual(modifications.at(-2).modifiedAt) }) }) @@ -62,38 +84,48 @@ describe('Managed thingies', () => { ID: 4, // createdAt: expect.any(String), createdBy: 'anonymous', - // modifiedAt: expect.any(String), + defaultValue: 100, + modifiedAt: expect.any(String), modifiedBy: 'anonymous', }) const { createdAt, modifiedAt } = resPost.data - expect(createdAt).to.equal(modifiedAt) - - await sleep(11) // ensure some ms are passed - const now = Date.now() - const created = new Date(createdAt).getTime() - - expect(now - created).to.be.greaterThan(0) - expect(now - created).to.be.lessThan(10 * 1000) // 10s + expect(createdAt).toEqual(modifiedAt) }) test('on update is filled', async () => { - const resPost = await POST('/test/foo', { ID: 5 }) + const resPost = await POST('/test/foo', { ID: 5, defaultValue: 50 }) + + // patch keeps old defaults + const resUpdate1 = await PATCH('/test/foo(5)', {}) + expect(resUpdate1.status).toBe(200) + + expect(resUpdate1.data).toEqual({ + '@odata.context': '$metadata#foo/$entity', + ID: 5, + createdAt: resPost.data.createdAt, + createdBy: resPost.data.createdBy, + defaultValue: 50, // not defaulted to 100 on update + modifiedAt: expect.any(String), + modifiedBy: 'anonymous', + }) - const resUpdate = await PUT('/test/foo(5)', {}) - expect(resUpdate.status).to.equal(200) + // put overwrites not provided defaults + const resUpdate2 = await PUT('/test/foo(5)', {}) + expect(resUpdate2.status).toBe(200) - expect(resUpdate.data).to.containSubset({ + expect(resUpdate2.data).toEqual({ '@odata.context': '$metadata#foo/$entity', ID: 5, createdAt: resPost.data.createdAt, createdBy: resPost.data.createdBy, - // modifiedAt: expect.any(String), + defaultValue: 100, + modifiedAt: expect.any(String), modifiedBy: 'anonymous', }) - const { createdAt, modifiedAt } = resUpdate.data - expect(createdAt).not.to.equal(modifiedAt) + const { createdAt, modifiedAt } = resUpdate1.data + expect(createdAt).not.toEqual(modifiedAt) const insertTime = new Date(createdAt).getTime() const updateTime = new Date(modifiedAt).getTime() diff --git a/sqlite/test/general/model.cds b/sqlite/test/general/model.cds index 9b470e24c..17dd32d38 100644 --- a/sqlite/test/general/model.cds +++ b/sqlite/test/general/model.cds @@ -11,6 +11,7 @@ entity db.fooTemporal : managed, temporal { service test { entity foo : managed { key ID : Integer; + defaultValue: Integer default 100; } entity bar { diff --git a/test/compliance/CREATE.test.js b/test/compliance/CREATE.test.js index d3f824983..cd79accbe 100644 --- a/test/compliance/CREATE.test.js +++ b/test/compliance/CREATE.test.js @@ -6,6 +6,166 @@ const fspath = require('path') // Add the test names you want to run as only const only = [] +const toTitle = obj => + JSON.stringify( + obj, + (_, b) => { + if (Buffer.isBuffer(b) || b?.type === 'Buffer') { + return `Buffer(${b.byteLength || b.data?.length})` + } + if (b instanceof Readable) { + return 'Readable' + } + if (typeof b === 'function') return `${b}` + return b + }, + Object.keys(obj).length === 1 ? undefined : '\t ', + ) + // Super hacky way to make the jest report look nice + .replace(/\n}/g, '\n\t }') + + +const dataTest = async function (entity, table, type, obj) { + const data = {} + const transforms = {} + const expect = {} + Object.setPrototypeOf(expect, transforms) + Object.setPrototypeOf(transforms, data) + let throws = false + + const assign = (t, p, v) => { + if (typeof v === 'function') { + Object.defineProperty(t, p, { + get: v, + set(v) { + Object.defineProperty(t, p, { value: v }) + }, + enumerable: true, + configurable: true, + }) + } else { + t[p] = v + } + } + Object.keys(obj).forEach(k => { + const cur = obj[k] + if (k === '!') { + throws = obj[k] + return + } + if (k[0] === '=') { + assign(transforms, k.substring(1), cur) + } else { + assign(data, k, cur) + } + }) + + const keys = [] + for (const e in entity.elements) { + if (entity.elements[e].key) keys.push(e) + } + + let cuid = false + if (entity.elements.ID) { + const ID = entity.elements.ID + cuid = ID.key && ID.type === 'cds.UUID' + if (!data.ID && cuid) { + data.ID = '00000000-0000-0000-000000000000' + } + } + + // It is required for Postgres to reset the transaction + // Once a query in the transaction throws it is poisoned + // Making all follow up queries throw + // This includes commit all previous successfull changes are lost + let tx = await cds.tx() + const commit = async () => { + await tx.commit() + tx = await cds.tx() + } + + try { + await tx.run(cds.ql.DELETE.from(table)) + try { + await tx.run(cds.ql[type](data).into(table)) + } catch (e) { + if (throws === false) throw e + // Check for error test cases + assert.equal(e.message, throws, 'Ensure that the correct error message is being thrown.') + return + } + + await commit() + + // Execute the query an extra time if the entity has an ID key column + if (cuid) { + let error + try { + await tx.run(cds.ql[type](data).into(table)) + if (type === 'INSERT') error = new Error('Ensure that INSERT queries fail when executed twice') + } catch (e) { + // Ensure that UPSERT does not throw when executed twice + if (type === 'UPSERT') throw e + } + await commit() + + try { + const keysOnly = keys.reduce((l, c) => { l[c] = data[c]; return l }, {}) + await tx.run(cds.ql[type](keysOnly).into(table)) + if (type === 'INSERT') error = new Error('Ensure that INSERT queries fail when executed twice') + } catch (e) { + // Ensure that UPSERT does not throw when executed twice + if (type === 'UPSERT') throw e + } + + if (error) throw error + + await commit() + } + + if (throws !== false) + assert.equal('resolved', throws, 'Ensure that the correct error message is being thrown.') + + const columns = [] + for (let col in entity.elements) { + columns.push({ ref: [col] }) + } + + // Extract data set + const sel = await tx.run({ + SELECT: { + from: { ref: [table] }, + columns + }, + }) + + // TODO: Can we expect all Database to respond in insert order ? + const result = sel[sel.length - 1] + + let checks = 0 + for (const k in expect) { + const msg = `Ensure that the Database echos correct data back, property ${k} does not match expected result.` + if (result[k] instanceof Readable && expect[k] instanceof Readable) { + result[k] = await buffer(result[k]) + expect[k] = await buffer(expect[k]) + } + if (result[k] instanceof Buffer && expect[k] instanceof Buffer) { + assert.equal(result[k].compare(expect[k]), 0, `${msg} (Buffer contents are different)`) + } else if (expect[k] instanceof RegExp) { + assert.match(result[k], expect[k], msg) + } else if (typeof expect[k] === 'object' && expect[k]) { + assert.deepEqual(result[k], expect[k], msg) + } else { + assert.strictEqual(result[k], expect[k], msg) + } + checks++ + } + assert.notEqual(checks, 0, 'Ensure that the test has expectations') + } finally { + await tx.commit() + } +} + describe('CREATE', () => { // TODO: reference to ./definitions.test.js @@ -25,7 +185,11 @@ describe('CREATE', () => { // Load model before test suite to generate test suite from model definition const model = cds.load(__dirname + '/resources/db', { sync: true }) - const literals = Object.keys(model.definitions).filter(n => model.definitions[n].kind === 'entity') + const literals = Object.keys(model.definitions) + .filter(n => + n.indexOf('sap.') !== 0 && // Skip all entities in sap namespace + model.definitions[n].kind === 'entity' + ) describe('custom entites', () => { const entityName = 'custom.entity' @@ -82,7 +246,7 @@ describe('CREATE', () => { const path = table.split('.') const type = path[path.length - 1] const entity = model.definitions[table] - const desc = !only.length || only.includes(type) ? describe : describe.skip + const desc = !only.length || only.includes(type) ? describe : () => {} if (entity.query) return // Skip complex view as cqn4sql does not allow union views desc(`${entity.projection ? 'View' : 'Type'}: ${type}`, () => { @@ -173,92 +337,16 @@ describe('CREATE', () => { beforeAll(() => deploy) data.forEach(obj => { - test( - JSON.stringify( - obj, - (_, b) => { - if (Buffer.isBuffer(b) || b?.type === 'Buffer') { - return `Buffer(${b.byteLength || b.data?.length})` - } - if (b instanceof Readable) { - return 'Readable' - } - if (typeof b === 'function') return `${b}` - return b - }, - Object.keys(obj).length === 1 ? undefined : '\t ', // TODO: adjust for new reporter rendering - ) - // Super hacky way to make the jest report look nice - .replace(/\n}/g, '\n\t }'), - async () => { - const data = {} - const transforms = {} - let throws = false - - Object.keys(obj).forEach(k => { - const cur = obj[k] - const val = typeof cur === 'function' ? cur() : cur - if (k === '!') { - throws = obj[k] - return - } - if (k[0] === '=') { - transforms[k.substring(1)] = val - } else { - data[k] = val - } - }) - - const expect = Object.assign({}, data, transforms) - - await db.run(async tx => { - try { - await tx.run(cds.ql.INSERT(data).into(table)) - } catch (e) { - if (throws === false) throw e - // Check for error test cases - assert.match(e.message, throws, 'Ensure that the correct error message is being thrown.') - return - } - - if (throws !== false) - assert.equal('did_not_throw', throws, 'Ensure that the correct error message is being thrown.') - - const columns = [] - for (let col in entity.elements) { - columns.push({ ref: [col] }) - } - - // Extract data set - const sel = await tx.run({ - SELECT: { - from: { ref: [table] }, - columns - }, - }) - - // TODO: Can we expect all Database to respond in insert order ? - const result = sel[sel.length - 1] - - await Promise.all(Object.keys(expect).map(async k => { - const msg = `Ensure that the Database echos correct data back, property ${k} does not match expected result.` - if (result[k] instanceof Readable && expect[k] instanceof Readable) { - result[k] = await buffer(result[k]) - expect[k] = await buffer(expect[k]) - } - if (result[k] instanceof Buffer && expect[k] instanceof Buffer) { - assert.equal(result[k].compare(expect[k]), 0, `${msg} (Buffer contents are different)`) - } else if (expect[k] instanceof RegExp) { - assert.match(result[k], expect[k], msg) - } else if (typeof expect[k] === 'object' && expect[k]) { - assert.deepEqual(result[k], expect[k], msg) - } else { - assert.strictEqual(result[k], expect[k], msg) - } - })) - }) - }, - ) + test(toTitle(obj), dataTest.bind(null, entity, table, 'INSERT', obj)) + }) + }) + + describe('UPSERT', () => { + // Prevent INSERT tests from running when CREATE fails + beforeAll(() => deploy) + + data.forEach(obj => { + test(toTitle(obj), dataTest.bind(null, entity, table, 'UPSERT', obj)) }) }) } catch (e) { diff --git a/test/compliance/UPSERT.test.js b/test/compliance/UPSERT.test.js new file mode 100644 index 000000000..3f2e848fd --- /dev/null +++ b/test/compliance/UPSERT.test.js @@ -0,0 +1,53 @@ +const cds = require('../cds.js') + +describe('UPSERT', () => { + const { data, expect } = cds.test(__dirname + '/resources') + data.autoIsolation(true) + + describe('into', () => { + test('Apply default for keys before join to existing data', async () => { + const { keys } = cds.entities('basic.common') + // HXE cannot handle the default key logic + await INSERT([/*{ id: 0, data: 'insert' },*/ { id: 0, default: 'overwritten', data: 'insert' }]).into(keys) + const insert = await SELECT.from(keys) + + await UPSERT([/*{ id: 0, data: 'upsert' },*/ { id: 0, default: 'overwritten', data: 'upsert' }]).into(keys) + const upsert = await SELECT.from(keys) + + for (let i = 0; i < insert.length; i++) { + const ins = insert[i] + const ups = upsert[i] + expect(ups.id).to.eq(ins.id) + expect(ups.default).to.eq(ins.default) + expect(ins.data).to.eq('insert') + expect(ups.data).to.eq('upsert') + } + }) + }) + + describe('entries', () => { + test.skip('missing', () => { + throw new Error('not supported') + }) + }) + + describe('columns', () => { + describe('values', () => { + test.skip('missing', () => { + throw new Error('not supported') + }) + }) + + describe('rows', () => { + test.skip('missing', () => { + throw new Error('not supported') + }) + }) + }) + + describe('as', () => { + test.skip('missing', () => { + throw new Error('not supported') + }) + }) +}) diff --git a/test/compliance/index.js b/test/compliance/index.js index 075335791..9b1832574 100644 --- a/test/compliance/index.js +++ b/test/compliance/index.js @@ -2,6 +2,7 @@ require('./CREATE.test') require('./DELETE.test') require('./DROP.test') require('./INSERT.test') +require('./UPSERT.test') require('./SELECT.test') require('./UPDATE.test') require('./definitions.test') @@ -9,3 +10,4 @@ require('./functions.test') require('./literals.test') require('./timestamps.test') require('./api.test') +require('./keywords.test') diff --git a/test/compliance/resources/db/basic/common.cds b/test/compliance/resources/db/basic/common.cds new file mode 100644 index 000000000..3e8d37f6a --- /dev/null +++ b/test/compliance/resources/db/basic/common.cds @@ -0,0 +1,46 @@ +namespace basic.common; + +using { + cuid as _cuid, + managed as _managed, + temporal as _temporal +} from '@sap/cds/common'; + +entity cuid : _cuid {} +entity managed : _cuid, _managed {} +entity temporal : _cuid, _temporal {} + +// Set default values for all literals from ./literals.cds +entity ![default] : _cuid { + bool : Boolean default false; + integer8 : UInt8 default 8; + integer16 : Int16 default 9; + integer32 : Int32 default 10; + integer64 : Int64 default 11; + double : cds.Double default 1.1; + float : cds.Decimal default 1.1; + decimal : cds.Decimal(5, 4) default 1.11111; + string : String default 'default'; + char : String(1) default 'd'; + short : String(10) default 'default'; + medium : String(100) default 'default'; + large : String(5000) default 'default'; + // HANA Does not support default values on BLOB types + // default value cannot be created on column of data type NCLOB: BLOB + // blob : LargeString default 'default'; + date : Date default '1970-01-01'; + time : Time default '01:02:03'; + dateTime : DateTime default '1970-01-01T01:02:03Z'; + timestamp : Timestamp default '1970-01-01T01:02:03.123456789Z'; +// Binary default values don't make sense. while technically possible +// binary : Binary default 'YmluYXJ5'; // base64 encoded 'binary'; +// largebinary : LargeBinary default 'YmluYXJ5'; // base64 encoded 'binary'; +// Vector default values probably also don't make sense +// vector : Vector default '[1.0,0.5,0.0,...]'; +} + +entity keys { + key id : Integer; + key default : String default 'defaulted'; + data : String; +} diff --git a/test/compliance/resources/db/basic/common/basic.common.default.js b/test/compliance/resources/db/basic/common/basic.common.default.js new file mode 100644 index 000000000..338865eea --- /dev/null +++ b/test/compliance/resources/db/basic/common/basic.common.default.js @@ -0,0 +1,38 @@ +const dstring = size => ({ d: 'default'.slice(0, size), o: 'not default'.slice(0, size) }) + +const columns = { + bool: { d: false, o: true }, + integer8: { d: 8, o: 18 }, + integer16: { d: 9, o: 19 }, + integer32: { d: 10, o: 20 }, + integer64: { d: '11', o: '21' }, + double: { d: 1.1, o: 2.2 }, + float: { d: '1.1', o: '2.2' }, + decimal: { d: '1.1111', o: '2.1111' }, + string: dstring(255), + char: dstring(1), + short: dstring(10), + medium: dstring(100), + large: dstring(5000), + // blob: dstring(5001), + date: { d: '1970-01-01', o: '2000-01-01' }, + time: { d: '01:02:03', o: '21:02:03' }, + dateTime: { d: '1970-01-01T01:02:03Z', o: '2000-01-01T21:02:03Z' }, + timestamp: { d: '1970-01-01T01:02:03.123Z', o: '2000-01-01T21:02:03.123Z' }, + // Binary default values don't make sense. while technically possible + // binary: { d: Buffer.from('binary'), o: Buffer.from('...') }, + // largebinary: { d: Buffer.from('binary'), o: Buffer.from('...') }, +} + +module.exports = Object.keys(columns).map(c => { + const vals = columns[c] + return [{ + [c]: null // Make sure that null still works + }, { + [c]: vals.o // Make sure that overwriting the default works + }, { + [c]: vals.d // Make sure that the default can also be written + }, { + [`=${c}`]: vals.d // Make sure when excluded in the data that default is returned + }] +}).flat() \ No newline at end of file diff --git a/test/compliance/resources/db/basic/index.cds b/test/compliance/resources/db/basic/index.cds index dc9389f8c..c24beaa5b 100644 --- a/test/compliance/resources/db/basic/index.cds +++ b/test/compliance/resources/db/basic/index.cds @@ -2,3 +2,4 @@ namespace basic; using from './projection'; using from './literals'; +using from './common'; diff --git a/test/compliance/resources/db/basic/literals.cds b/test/compliance/resources/db/basic/literals.cds index 41e1943c0..fd6aa5ed6 100644 --- a/test/compliance/resources/db/basic/literals.cds +++ b/test/compliance/resources/db/basic/literals.cds @@ -1,66 +1,66 @@ namespace basic.literals; entity globals { - bool : Boolean; + bool : Boolean; } entity uuid { - uuid : UUID; + uuid : UUID; } entity number { - integer8 : UInt8; - integer16 : Int16; - integer32 : Int32; - integer64 : Int64; - double : cds.Double; - // Decimal: (p,s) p = 1 - 38, s = 0 - p - // p = number of total decimal digits - // s = number of decimal digits after decimal seperator - float : cds.Decimal; // implied float - decimal : cds.Decimal(5, 4); // 𝝅 -> 3.1415 + integer8 : UInt8; + integer16 : Int16; + integer32 : Int32; + integer64 : Int64; + double : cds.Double; + // Decimal: (p,s) p = 1 - 38, s = 0 - p + // p = number of total decimal digits + // s = number of decimal digits after decimal seperator + float : cds.Decimal; // implied float + decimal : cds.Decimal(5, 4); // 𝝅 -> 3.1415 } // NVARCHAR: Unicode string between 1 and 5000 length (default: 5000) entity string { - string : String; - char : String(1); - short : String(10); - medium : String(100); - large : String(5000); // TODO: should be broken on HANA || switch to Binary - blob : LargeString; // NCLOB: Unicode binary (max size 2 GiB) + string : String; + char : String(1); + short : String(10); + medium : String(100); + large : String(5000); // TODO: should be broken on HANA || switch to Binary + blob : LargeString; // NCLOB: Unicode binary (max size 2 GiB) } // ISO Date format (1970-01-01) entity date { - date : Date; + date : Date; } // ISO Time format (00:00:00) entity time { - time : Time; + time : Time; } // ISO DateTime format (1970-1-1T00:00:00Z) entity dateTime { - dateTime : DateTime; + dateTime : DateTime; } // TODO: Verify that everyone agrees to only allow UTC timestamps // ISO timestamp format (1970-1-1T00:00:00.000Z) // HANA timestamp format (1970-1-1T00:00:00.0000000Z) entity timestamp { - timestamp : Timestamp; + timestamp : Timestamp; } entity array { - string : array of String; - integer : array of Integer; + string : array of String; + integer : array of Integer; } entity binaries { - binary : Binary; - largebinary : LargeBinary; + binary : Binary; + largebinary : LargeBinary; } entity defaults { diff --git a/test/compliance/resources/db/basic/literals/basic.literals.binaries.js b/test/compliance/resources/db/basic/literals/basic.literals.binaries.js index 52684ad29..7b8a18c59 100644 --- a/test/compliance/resources/db/basic/literals/basic.literals.binaries.js +++ b/test/compliance/resources/db/basic/literals/basic.literals.binaries.js @@ -1,6 +1,6 @@ -const { Readable } = require('stream') +const { Readable } = require('stream') -const generator = function*() { +const generator = function* () { yield Buffer.from('Simple Large Binary') } @@ -18,14 +18,14 @@ module.exports = [ }, { largebinary: Buffer.from('Simple Large Binary'), - '=largebinary': () => Readable.from(generator()) + '=largebinary': () => Readable.from(generator()) }, { largebinary: Buffer.from('Simple Large Binary').toString('base64'), - '=largebinary': () => Readable.from(generator()) + '=largebinary': () => Readable.from(generator()) }, { - largebinary: Readable.from(generator()), - '=largebinary': () => Readable.from(generator()) + largebinary: () => Readable.from(generator()), + '=largebinary': () => Readable.from(generator()) } ] diff --git a/test/compliance/resources/db/basic/literals/basic.literals.number.js b/test/compliance/resources/db/basic/literals/basic.literals.number.js index cd25e4f67..d942b63bc 100644 --- a/test/compliance/resources/db/basic/literals/basic.literals.number.js +++ b/test/compliance/resources/db/basic/literals/basic.literals.number.js @@ -55,6 +55,17 @@ module.exports = [ { integer64: '-9223372036854775808', }, + { + decimal: null + }, + { + decimal: 0, + '=decimal': '0.0000' + }, + { + decimal: 1, + '=decimal': '1.0000' + }, { decimal: '3.14153', '=decimal': '3.1415' diff --git a/test/compliance/resources/db/hana/literals/edge.hana.literals.HANA_ST.js b/test/compliance/resources/db/hana/literals/edge.hana.literals.HANA_ST.js index fd600f48f..7e8f6e138 100644 --- a/test/compliance/resources/db/hana/literals/edge.hana.literals.HANA_ST.js +++ b/test/compliance/resources/db/hana/literals/edge.hana.literals.HANA_ST.js @@ -1,17 +1,17 @@ // TODO: Add HANA TYPE EXPECTATIONS -module.exports = [ +module.exports = [/* { point: null, - },/* + }, { point: 'POINT(1 1)', }, { point: '0101000000000000000000F03F000000000000F03F', - },*/ + }, { // GeoJSON specification: https://www.rfc-editor.org/rfc/rfc7946 point: '{"x":1,"y":1,"spatialReference":{"wkid":4326}}', '=point': /\{\W*"x"\W*:\W*1\W*,\W*"y"\W*:\W*1(,.*)?\}/, - }, + },*/ ] diff --git a/test/scenarios/bookshop/update.test.js b/test/scenarios/bookshop/update.test.js index f784b9ca9..131967b60 100644 --- a/test/scenarios/bookshop/update.test.js +++ b/test/scenarios/bookshop/update.test.js @@ -145,6 +145,40 @@ describe('Bookshop - Update', () => { expect(afterUpdate[0]).to.have.property('foo').that.equals(42) }) + test('Upsert behavior validation', async () => { + const { Books } = cds.entities('sap.capire.bookshop') + + const entries = { + ID: 482, + descr: 'CREATED' + } + + const read = SELECT.one.from(Books).where(`ID = `, entries.ID) + + await UPSERT.into(Books).entries(entries) + const onInsert = await read.clone() + + entries.descr = 'UPDATED' + await UPSERT.into(Books).entries(entries) + + const onUpdate = await read.clone() + + // Ensure that the @cds.on.insert and @cds.on.update are being applied + expect(onInsert.createdAt).to.be.not.undefined + expect(onInsert.modifiedAt).to.be.not.undefined + expect(onUpdate.createdAt).to.be.not.undefined + expect(onUpdate.modifiedAt).to.be.not.undefined + + // Ensure that the @cds.on.insert and @cds.on.update are correctly applied + expect(onInsert.createdAt).to.be.eq(onInsert.modifiedAt) + expect(onInsert.createdAt).to.be.eq(onUpdate.createdAt) + expect(onInsert.modifiedAt).to.be.not.eq(onUpdate.modifiedAt) + + // Ensure that the actual update happened + expect(onInsert.descr).to.be.eq('CREATED') + expect(onUpdate.descr).to.be.eq('UPDATED') + }) + test('Upsert draft enabled entity', async () => { const res = await UPSERT.into('DraftService.DraftEnabledBooks').entries({ ID: 42, title: 'Foo' }) expect(res).to.equal(1)