Skip to content

Commit

Permalink
Revert "Experimental native deep insert"
Browse files Browse the repository at this point in the history
This reverts commit 124d8b7.
  • Loading branch information
BobdenOs committed Oct 18, 2024
1 parent 0704a64 commit a128494
Show file tree
Hide file tree
Showing 3 changed files with 46 additions and 107 deletions.
2 changes: 1 addition & 1 deletion db-service/lib/SQLService.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ const BINARY_TYPES = {
class SQLService extends DatabaseService {
init() {
this.on(['INSERT', 'UPSERT', 'UPDATE'], require('./fill-in-keys')) // REVISIT should be replaced by correct input processing eventually
this.on(cds.env.features.HANA_DEEP_INSERT ? ['UPSERT', 'UPDATE'] : ['INSERT', 'UPSERT', 'UPDATE'], require('./deep-queries').onDeep)
this.on(['INSERT', 'UPSERT', 'UPDATE'], require('./deep-queries').onDeep)
if (cds.env.features.db_strict) {
this.before(['INSERT', 'UPSERT', 'UPDATE'], ({ query }) => {
const elements = query.target?.elements; if (!elements) return
Expand Down
15 changes: 7 additions & 8 deletions db-service/lib/cqn2sql.js
Original file line number Diff line number Diff line change
Expand Up @@ -1080,15 +1080,15 @@ class CQN2SQLRenderer {
const keys = ObjectKeys(elements).filter(e => elements[e].key)
const keyZero = keys[0] && this.quote(keys[0])

return [...columns, ...requiredColumns].map(({ name, sql, extract, as }) => {
return [...columns, ...requiredColumns].map(({ name, sql }) => {
const element = elements?.[name] || {}

const converter = a => element[_convertInput]?.(a, element) || a
let extract
if (!sql) {
({ sql, extract } = this.managed_extract(name, element, converter, as))
({ sql, extract } = this.managed_extract(name, element, converter))
} else {
sql = converter(sql)
extract ??= sql
extract = sql = converter(sql)
}
// if (sql[0] !== '$') sql = converter(sql, element)

Expand All @@ -1100,10 +1100,10 @@ class CQN2SQLRenderer {
if (onInsert) onInsert = this.expr(onInsert)
if (onUpdate) onUpdate = this.expr(onUpdate)

const qname = this.quote(as || name)
const qname = this.quote(name)

const insert = onInsert ? this.managed_default(as || name, converter(onInsert), sql) : sql
const update = onUpdate ? this.managed_default(as || name, converter(onUpdate), sql) : sql
const insert = onInsert ? this.managed_default(name, converter(onInsert), sql) : sql
const update = onUpdate ? this.managed_default(name, converter(onUpdate), sql) : sql
const upsert = keyZero && (
// upsert requires the keys to be provided for the existance join (default values optional)
element.key
Expand All @@ -1121,7 +1121,6 @@ class CQN2SQLRenderer {

return {
name, // Element name
as, // Output element name
sql, // Reference SQL
extract, // Source SQL
converter, // Converter logic
Expand Down
136 changes: 38 additions & 98 deletions hana/lib/HANAService.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ const hanaKeywords = keywords.reduce((prev, curr) => {
const DEBUG = cds.debug('sql|db')
let HANAVERSION = 0

cds.env.features.HANA_DEEP_INSERT = true
/**
* @implements SQLService
*/
Expand Down Expand Up @@ -649,75 +648,35 @@ class HANAService extends SQLService {
return super.INSERT_entries(q)
}

const columns = ObjectKeys(elements)
.filter(c => c in elements && !elements[c].virtual && !elements[c].value && !elements[c].isAssociation)

const columns = elements
? ObjectKeys(elements).filter(c => c in elements && !elements[c].virtual && !elements[c].value && !elements[c].isAssociation)
: ObjectKeys(INSERT.entries[0])
this.columns = columns

let extractions, extraction = [], converter = []

const exists = `${this.quote('$$EXISTS$$')} NVARCHAR(2147483647) FORMAT JSON PATH '$'`
if (INSERT.into.ref.length > 1) {
extraction = ''
let closing = ''

for (let i = 1; i < INSERT.into.ref.length; i++) {
const comp = INSERT.into.ref[i]
const cols = []

for (const fk of comp._foreignKeys) {
if (!fk.fillChild) continue
if (fk.deep) { debugger }
if (i === INSERT.into.ref.length - 1) {
columns.splice(columns.findIndex(c => c === fk.childElement.name), 1)
cols.push({
name: fk.parentElement.name,
as: fk.childElement.name,
})
}
}

extractions = this.managed(cols, comp.parent.elements).slice(0, cols.length)
converter = [...converter, ...extractions]
const extractions = this.managed(columns.map(c => ({ name: c })), elements)

extraction = `${extraction}${extractions.map(c => c.extract)}${extractions.length ? ',' : ''} NESTED PATH '$.${comp.name}' COLUMNS(`
closing = `${closing})`
}
extractions = this.managed(columns.map(c => ({ name: c })), elements)
converter = [...converter, ...extractions]
// REVISIT: @cds.extension required
const extraction = extractions.map(c => c.extract)
const converter = extractions.map(c => c.insert)

converter = converter.map((c, i) => {
columns[i] = c.as || c.name
return c.insert
})
const _stream = entries => {
const stream = Readable.from(this.INSERT_entries_stream(entries, 'hex'), { objectMode: false })
stream._raw = entries
return stream
}

extraction = `${extraction}${exists}${extraction.length ? ',' : ''}${extractions.map(c => c.extract)}${closing}`
// HANA Express does not process large JSON documents
// The limit is somewhere between 64KB and 128KB
if (HANAVERSION <= 2) {
this.entries = INSERT.entries.map(e => (e instanceof Readable
? [e]
: [_stream([e])]))
} else {
extractions = this.managed(columns.map(c => ({ name: c })), elements)

// REVISIT: @cds.extension required
extraction = [exists, ...extractions.map(c => c.extract)]
converter = extractions.map(c => c.insert)

const _stream = entries => {
const stream = Readable.from(this.INSERT_entries_stream(entries, 'hex'), { objectMode: false })
stream._raw = entries
return stream
}

// HANA Express does not process large JSON documents
// The limit is somewhere between 64KB and 128KB
if (HANAVERSION <= 2) {
this.entries = INSERT.entries.map(e => (e instanceof Readable
? [e]
: [_stream([e])]))
} else {
this.entries = [[
INSERT.entries[0] instanceof Readable
? INSERT.entries[0]
: _stream(INSERT.entries)
]]
}
this.entries = [[
INSERT.entries[0] instanceof Readable
? INSERT.entries[0]
: _stream(INSERT.entries)
]]
}

// WITH SRC is used to force HANA to interpret the ? as a NCLOB allowing for streaming of the data
Expand All @@ -731,29 +690,10 @@ class HANAService extends SQLService {
// With the buffer table approach the data is processed in chunks of a configurable size
// Which allows even smaller HANA systems to process large datasets
// But the chunk size determines the maximum size of a single row
const withSrc = `WITH SRC AS (SELECT ? AS JSON FROM DUMMY UNION ALL SELECT TO_NCLOB(NULL) AS JSON FROM DUMMY)`
this.sqls ??= []
this.sqls.push(`INSERT INTO ${this.quote(entity)} (${columns.map(c =>
return (this.sql = `INSERT INTO ${this.quote(entity)} (${this.columns.map(c =>
this.quote(c),
)}) ${withSrc}
SELECT ${converter} FROM JSON_TABLE(SRC.JSON, '$' COLUMNS(${extraction}) ERROR ON ERROR) AS NEW WHERE ${this.quote('$$EXISTS$$')} IS NOT NULL`)

if (cds.env.features.HANA_DEEP_INSERT && q.target.compositions && INSERT.into.ref.length < 5) for (const comp of q.target.compositions) {
const deeper = cds.ql.clone(q)
deeper.target = comp._target
deeper.INSERT.into = { ref: [...q.INSERT.into.ref, comp] }
deeper.INSERT.entries = []
this.INSERT_entries(deeper)
}

return (this.sql = this.sqls.length === 1
? this.sqls[0]
: `DO (IN JSON BLOB => ?) BEGIN\n${this.sqls.map(
sql => sql
.replace(withSrc, '')
.replace('SRC.JSON', ':JSON')
).join(';\n')} ;END;`
)
)}) WITH SRC AS (SELECT ? AS JSON FROM DUMMY UNION ALL SELECT TO_NCLOB(NULL) AS JSON FROM DUMMY)
SELECT ${converter} FROM JSON_TABLE(SRC.JSON, '$' COLUMNS(${extraction}) ERROR ON ERROR) AS NEW`)
}

INSERT_rows(q) {
Expand Down Expand Up @@ -1041,31 +981,31 @@ SELECT ${mixing} FROM JSON_TABLE(SRC.JSON, '$' COLUMNS(${extraction})) AS NEW LE
list(list) {
const first = list.list[0]
// If the list only contains of lists it is replaced with a json function and a placeholder
if (this.values && first.list && !first.list.find(v => v.val == null)) {
const listMapped = []
if (this.values && first.list && !first.list.find(v => v.val == null)) {
const listMapped = []
for (let l of list.list) {
const obj = {}
for (let i = 0; i < l.list.length; i++) {
const obj ={}
for (let i = 0; i< l.list.length; i++) {
const c = l.list[i]
if (Buffer.isBuffer(c.val)) {
return super.list(list)
}
}
obj[`V${i}`] = c.val
}
listMapped.push(obj)
}
}
this.values.push(JSON.stringify(listMapped))
const extraction = first.list.map((v, i) => `"${i}" ${this.constructor.InsertTypeMap[typeof v.val]()} PATH '$.V${i}'`)
const extraction = first.list.map((v, i) => `"${i}" ${this.constructor.InsertTypeMap[typeof v.val]()} PATH '$.V${i}'`)
return `(SELECT * FROM JSON_TABLE(?, '$' COLUMNS(${extraction})))`
}
// If the list only contains of vals it is replaced with a json function and a placeholder
if (this.values && first.val != null) {
for (let c of list.list) {
if (Buffer.isBuffer(c.val)) {
return super.list(list)
}
}
}
const v = first
const v = first
const extraction = `"val" ${this.constructor.InsertTypeMap[typeof v.val]()} PATH '$.val'`
this.values.push(JSON.stringify(list.list))
return `(SELECT * FROM JSON_TABLE(?, '$' COLUMNS(${extraction})))`
Expand Down Expand Up @@ -1095,11 +1035,11 @@ SELECT ${mixing} FROM JSON_TABLE(SRC.JSON, '$' COLUMNS(${extraction})) AS NEW LE
)
}

managed_extract(name, element, converter, rename) {
managed_extract(name, element, converter) {
// TODO: test property names with single and double quotes
return {
extract: `${this.quote(rename || name)} ${this.insertType4(element)} PATH '$.${name}', ${this.quote('$.' + (rename || name))} NVARCHAR(2147483647) FORMAT JSON PATH '$.${name}'`,
sql: converter(`NEW.${this.quote(rename || name)}`),
extract: `${this.quote(name)} ${this.insertType4(element)} PATH '$.${name}', ${this.quote('$.' + name)} NVARCHAR(2147483647) FORMAT JSON PATH '$.${name}'`,
sql: converter(`NEW.${this.quote(name)}`),
}
}

Expand Down

0 comments on commit a128494

Please sign in to comment.