diff --git a/.build/build_and_prepare.ts b/.build/build_and_prepare.ts
new file mode 100644
index 00000000..bdb4ffdf
--- /dev/null
+++ b/.build/build_and_prepare.ts
@@ -0,0 +1,58 @@
+import { execSync } from 'child_process'
+import fs from 'fs'
+import * as process from 'process'
+;(() => {
+ const [tag] = process.argv.slice(2)
+ if (!tag) {
+ console.error(`Expected a tag as an argument`)
+ process.exit(1)
+ }
+
+ let packageName = ''
+ if (tag.endsWith('-browser')) {
+ packageName = 'client-browser'
+ } else if (tag.endsWith('-node')) {
+ packageName = 'client-node'
+ } else if (tag.endsWith('-common')) {
+ packageName = 'client-common'
+ } else {
+ console.error(`Provided tag ${tag} does not match any packages`)
+ process.exit(1)
+ }
+
+ fs.copyFileSync(`./packages/${packageName}/package.json`, './package.json')
+
+ const packageJson = require('../package.json')
+ const version = require(`../packages/${packageName}/src/version.ts`).default
+ console.log(`Current ${packageName} package version is: ${version}`)
+ packageJson.version = version
+
+ if (packageJson['dependencies']['@clickhouse/client-common']) {
+ const commonVersion =
+ require(`../packages/client-common/src/version.ts`).default
+ console.log(`Updating client-common dependency to ${commonVersion}`)
+ packageJson['dependencies']['@clickhouse/client-common'] = commonVersion
+ }
+
+ console.log('Updated package json:')
+ console.log(packageJson)
+
+ try {
+ execSync(`./.scripts/build.sh ${packageName}`, { cwd: process.cwd() })
+ } catch (err) {
+ console.error(err)
+ process.exit(1)
+ }
+
+ try {
+ fs.writeFileSync(
+ './package.json',
+ JSON.stringify(packageJson, null, 2) + '\n',
+ 'utf-8'
+ )
+ } catch (err) {
+ console.error(err)
+ process.exit(1)
+ }
+ process.exit(0)
+})()
diff --git a/.build/update_version.ts b/.build/update_version.ts
deleted file mode 100644
index a361db10..00000000
--- a/.build/update_version.ts
+++ /dev/null
@@ -1,20 +0,0 @@
-import version from '../src/version'
-import packageJson from '../package.json'
-import fs from 'fs'
-;(async () => {
- console.log(`Current package version is: ${version}`)
- packageJson.version = version
- console.log('Updated package json:')
- console.log(packageJson)
- try {
- fs.writeFileSync(
- './package.json',
- JSON.stringify(packageJson, null, 2) + '\n',
- 'utf-8'
- )
- } catch (err) {
- console.error(err)
- process.exit(1)
- }
- process.exit(0)
-})()
diff --git a/.docker/clickhouse/single_node/config.xml b/.docker/clickhouse/single_node/config.xml
index 3ef3abd5..d28f21e1 100644
--- a/.docker/clickhouse/single_node/config.xml
+++ b/.docker/clickhouse/single_node/config.xml
@@ -32,4 +32,23 @@
1000
+
+
+ Access-Control-Allow-Origin
+ *
+
+
+ Access-Control-Allow-Headers
+ origin, x-requested-with, content-type, authorization
+
+
+ Access-Control-Allow-Methods
+ POST, GET, OPTIONS
+
+
+ Access-Control-Max-Age
+ 86400
+
+
+
diff --git a/.eslintignore b/.eslintignore
new file mode 100644
index 00000000..bd862fdb
--- /dev/null
+++ b/.eslintignore
@@ -0,0 +1,3 @@
+dist
+node_modules
+webpack
diff --git a/.eslintrc.json b/.eslintrc.json
index 87ccabdf..feb32493 100644
--- a/.eslintrc.json
+++ b/.eslintrc.json
@@ -3,7 +3,7 @@
"parser": "@typescript-eslint/parser",
"parserOptions": {
"sourceType": "module",
- "project": ["./tsconfig.dev.json"]
+ "project": ["./tsconfig.all.json"]
},
"env": {
"node": true
@@ -25,10 +25,12 @@
},
"overrides": [
{
- "files": ["./__tests__/**/*.ts"],
+ "files": ["./**/__tests__/**/*.ts"],
"rules": {
"@typescript-eslint/no-explicit-any": "off",
- "@typescript-eslint/no-non-null-assertion": "off"
+ "@typescript-eslint/no-non-null-assertion": "off",
+ "@typescript-eslint/ban-ts-comment": "off",
+ "no-constant-condition": "off"
}
}
]
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index bc0ed6c0..41dac272 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -2,8 +2,14 @@
name: release
on:
workflow_dispatch:
- release:
- types: [created]
+ inputs:
+ version:
+ type: string
+ required: true
+ description: 'Version to release. Released package is based on the version suffix: -browser, -common, -node'
+# TODO: trigger on release, currently it's just manual dispatch
+# release:
+# types: [created]
jobs:
build:
runs-on: ubuntu-latest
@@ -15,9 +21,8 @@ jobs:
node-version: '16.x'
registry-url: 'https://registry.npmjs.org'
- run: npm i --ignore-scripts
- - name: Update package.json version
- run: NODE_OPTIONS="-r ts-node/register" node .build/update_version.ts
- - run: npm run build
- - run: npm publish
+ - name: Build package and prepare package.json
+ run: NODE_OPTIONS="-r ts-node/register" node .build/build_and_prepare.ts ${{ github.event.inputs.version }}
+ - run: npm publish --dry-run
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index abb28ab0..b112512d 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -2,15 +2,6 @@ name: 'tests'
on:
workflow_dispatch:
- inputs:
- push-coverage-report:
- type: choice
- required: true
- description: Push coverage
- options:
- - yes
- - no
- default: no
push:
branches:
- main
@@ -20,10 +11,8 @@ on:
- 'benchmarks/**'
- 'examples/**'
pull_request:
- branches:
- - main
paths-ignore:
- - 'README.md'
+ - '**/*.md'
- 'LICENSE'
- 'benchmarks/**'
- 'examples/**'
@@ -32,12 +21,12 @@ on:
- cron: '0 9 * * *'
jobs:
- build:
+ node-unit-tests:
runs-on: ubuntu-latest
strategy:
fail-fast: true
matrix:
- node: [ 16, 18, 20 ]
+ node: [16, 18, 20]
steps:
- uses: actions/checkout@main
@@ -60,16 +49,47 @@ jobs:
- name: Run unit tests
run: |
- npm run test:unit
+ npm run test:node:unit
- integration-tests-local-single-node:
- needs: build
+ browser-all-tests-local-single-node:
runs-on: ubuntu-latest
+ needs: node-unit-tests
strategy:
fail-fast: true
matrix:
- node: [ 16, 18, 20 ]
- clickhouse: [ head, latest ]
+ clickhouse: [head, latest]
+ steps:
+ - uses: actions/checkout@main
+
+ - name: Start ClickHouse (version - ${{ matrix.clickhouse }}) in Docker
+ uses: isbang/compose-action@v1.1.0
+ env:
+ CLICKHOUSE_VERSION: ${{ matrix.clickhouse }}
+ with:
+ compose-file: 'docker-compose.yml'
+ down-flags: '--volumes'
+
+ - name: Setup NodeJS
+ uses: actions/setup-node@v3
+ with:
+ node-version: 16
+
+ - name: Install dependencies
+ run: |
+ npm install
+
+ - name: Run all browser tests
+ run: |
+ npm run test:browser
+
+ node-integration-tests-local-single-node:
+ needs: node-unit-tests
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: true
+ matrix:
+ node: [16, 18, 20]
+ clickhouse: [head, latest]
steps:
- uses: actions/checkout@main
@@ -95,35 +115,27 @@ jobs:
run: |
sudo echo "127.0.0.1 server.clickhouseconnect.test" | sudo tee -a /etc/hosts
- # Includes TLS integration tests run
- # Will also run unit tests, but that's almost free.
- # Otherwise, we need to set up a separate job,
- # which will also run the integration tests for the second time,
- # and that's more time-consuming.
- - name: Run all tests
+ - name: Run integration tests
run: |
- npm t -- --coverage
+ npm run test:node:integration
- - name: Upload coverage report
- uses: actions/upload-artifact@v3
- with:
- name: coverage
- path: coverage
- retention-days: 1
+ - name: Run TLS tests
+ run: |
+ npm run test:node:tls
- integration-tests-local-cluster:
- needs: build
+ node-integration-tests-local-cluster:
+ needs: node-unit-tests
runs-on: ubuntu-latest
strategy:
fail-fast: true
matrix:
- node: [ 16, 18, 20 ]
- clickhouse: [ head, latest ]
+ node: [16, 18, 20]
+ clickhouse: [head, latest]
steps:
- uses: actions/checkout@main
- - name: Start ClickHouse (version - ${{ matrix.clickhouse }}) in Docker
+ - name: Start ClickHouse cluster (version - ${{ matrix.clickhouse }}) in Docker
uses: isbang/compose-action@v1.1.0
env:
CLICKHOUSE_VERSION: ${{ matrix.clickhouse }}
@@ -142,15 +154,46 @@ jobs:
- name: Run integration tests
run: |
- npm run test:integration:local_cluster
+ npm run test:node:integration:local_cluster
- integration-tests-cloud:
- needs: build
+ browser-integration-tests-local-cluster:
runs-on: ubuntu-latest
+ needs: node-unit-tests
strategy:
fail-fast: true
matrix:
- node: [ 16, 18, 20 ]
+ clickhouse: [head, latest]
+ steps:
+ - uses: actions/checkout@main
+
+ - name: Start ClickHouse cluster (version - ${{ matrix.clickhouse }}) in Docker
+ uses: isbang/compose-action@v1.1.0
+ env:
+ CLICKHOUSE_VERSION: ${{ matrix.clickhouse }}
+ with:
+ compose-file: 'docker-compose.cluster.yml'
+ down-flags: '--volumes'
+
+ - name: Setup NodeJS
+ uses: actions/setup-node@v3
+ with:
+ node-version: 16
+
+ - name: Install dependencies
+ run: |
+ npm install
+
+ - name: Run all browser tests
+ run: |
+ npm run test:browser:integration:local_cluster
+
+ node-integration-tests-cloud:
+ needs: node-unit-tests
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: true
+ matrix:
+ node: [16, 18, 20]
steps:
- uses: actions/checkout@main
@@ -169,37 +212,27 @@ jobs:
CLICKHOUSE_CLOUD_HOST: ${{ secrets.INTEGRATIONS_TEAM_TESTS_CLOUD_HOST }}
CLICKHOUSE_CLOUD_PASSWORD: ${{ secrets.INTEGRATIONS_TEAM_TESTS_CLOUD_PASSWORD }}
run: |
- npm run test:integration:cloud
+ npm run test:node:integration:cloud
- upload-coverage-and-badge:
- if: github.ref == 'refs/heads/main' && github.event.inputs.push-coverage-report != 'no'
- needs:
- - integration-tests-local-single-node
- - integration-tests-local-cluster
- - integration-tests-cloud
+ browser-integration-tests-cloud:
+ needs: node-unit-tests
runs-on: ubuntu-latest
permissions: write-all
steps:
- - uses: actions/checkout@v2
- with:
- repository: ${{ github.event.pull_request.head.repo.full_name }}
- ref: ${{ github.event.pull_request.head.ref }}
+ - uses: actions/checkout@main
+
- name: Setup NodeJS
uses: actions/setup-node@v3
with:
node-version: 16
- - name: Download coverage report
- uses: actions/download-artifact@v3
- with:
- name: coverage
- path: coverage
- - name: Install packages
- run: npm i -G make-coverage-badge
- - name: Generate badge
- run: npx make-coverage-badge
- - name: Make "Coverage" lowercase for style points
- run: sed -i 's/Coverage/coverage/g' coverage/badge.svg
- - uses: stefanzweifel/git-auto-commit-action@v4
- with:
- file_pattern: 'coverage'
- commit_message: '[skip ci] Update coverage report'
+
+ - name: Install dependencies
+ run: |
+ npm install
+
+ - name: Run integration tests
+ env:
+ CLICKHOUSE_CLOUD_HOST: ${{ secrets.INTEGRATIONS_TEAM_TESTS_CLOUD_HOST }}
+ CLICKHOUSE_CLOUD_PASSWORD: ${{ secrets.INTEGRATIONS_TEAM_TESTS_CLOUD_PASSWORD }}
+ run: |
+ npm run test:browser:integration:cloud
diff --git a/.gitignore b/.gitignore
index 1af59cc9..c3ebb5bb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,3 +5,5 @@ node_modules
benchmarks/leaks/input
*.tgz
.npmrc
+webpack
+out
diff --git a/.scripts/build.sh b/.scripts/build.sh
new file mode 100755
index 00000000..84177d53
--- /dev/null
+++ b/.scripts/build.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+rm -rf out dist
+tsc
+mkdir -p dist
+mv out/$1/src/* dist/
diff --git a/.scripts/jasmine.sh b/.scripts/jasmine.sh
new file mode 100755
index 00000000..dca0989e
--- /dev/null
+++ b/.scripts/jasmine.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+ts-node -r tsconfig-paths/register --project=tsconfig.dev.json node_modules/jasmine/bin/jasmine --config=$1
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 588b8a14..c61389ce 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,12 +1,42 @@
+## 0.2.0-beta1 (browser support)
+
+Introduces browser client (using native [fetch](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)
+and [WebStream](https://developer.mozilla.org/en-US/docs/Web/API/Streams_API) APIs)
+with no Node.js modules in the common interfaces. No polyfills are required.
+
+It is now possible to implement new custom connections on top of `@clickhouse/client-common`.
+
+The client was refactored into three packages:
+
+- `@clickhouse/client-common`: all possible platform-independent code, types and interfaces
+- `@clickhouse/client-browser`: new "browser" (or non-Node.js env) connection, uses native fetch.
+- `@clickhouse/client`: Node.js connection as it was before.
+
+### Node.js client breaking changes
+
+- Log level configuration parameter is now explicit instead of `CLICKHOUSE_LOG_LEVEL` environment variable.
+ Default is `OFF`.
+- `query` return type signature changed to is `BaseResultSet` (no functional changes)
+- `exec` return type signature changed to `ExecResult` (no functional changes)
+- `insert` params argument type changed to `InsertParams` (no functional changes)
+- Experimental `schema` module is removed
+
+### Browser client known limitations
+
+- Streaming for select queries works, but it is disabled for inserts (on the type level as well).
+- KeepAlive is disabled and not configurable yet.
+- Request compression is disabled and ignored.
+- No logging support yet.
+
## 0.1.1
## New features
-* Expired socket detection on the client side when using Keep-Alive. If a potentially expired socket is detected,
-and retry is enabled in the configuration, both socket and request will be immediately destroyed (before sending the data),
-and the client will recreate the request. See `ClickHouseClientConfigOptions.keep_alive` for more details. Disabled by default.
-* Allow disabling Keep-Alive feature entirely.
-* `TRACE` log level.
+- Expired socket detection on the client side when using Keep-Alive. If a potentially expired socket is detected,
+ and retry is enabled in the configuration, both socket and request will be immediately destroyed (before sending the data),
+ and the client will recreate the request. See `ClickHouseClientConfigOptions.keep_alive` for more details. Disabled by default.
+- Allow disabling Keep-Alive feature entirely.
+- `TRACE` log level.
## Examples
@@ -39,14 +69,14 @@ const client = createClient({
## Breaking changes
-* `connect_timeout` client setting is removed, as it was unused in the code.
+- `connect_timeout` client setting is removed, as it was unused in the code.
## New features
-* `command` method is introduced as an alternative to `exec`.
-`command` does not expect user to consume the response stream, and it is destroyed immediately.
-Essentially, this is a shortcut to `exec` that destroys the stream under the hood.
-Consider using `command` instead of `exec` for DDLs and other custom commands which do not provide any valuable output.
+- `command` method is introduced as an alternative to `exec`.
+ `command` does not expect user to consume the response stream, and it is destroyed immediately.
+ Essentially, this is a shortcut to `exec` that destroys the stream under the hood.
+ Consider using `command` instead of `exec` for DDLs and other custom commands which do not provide any valuable output.
Example:
@@ -55,7 +85,9 @@ Example:
await client.exec('CREATE TABLE foo (id String) ENGINE Memory')
// correct: stream does not contain any information and just destroyed
-const { stream } = await client.exec('CREATE TABLE foo (id String) ENGINE Memory')
+const { stream } = await client.exec(
+ 'CREATE TABLE foo (id String) ENGINE Memory'
+)
stream.destroy()
// correct: same as exec + stream.destroy()
@@ -64,80 +96,102 @@ await client.command('CREATE TABLE foo (id String) ENGINE Memory')
### Bug fixes
-* Fixed delays on subsequent requests after calling `insert` that happened due to unclosed stream instance when using low number of `max_open_connections`. See [#161](https://github.com/ClickHouse/clickhouse-js/issues/161) for more details.
-* Request timeouts internal logic rework (see [#168](https://github.com/ClickHouse/clickhouse-js/pull/168))
+- Fixed delays on subsequent requests after calling `insert` that happened due to unclosed stream instance when using low number of `max_open_connections`. See [#161](https://github.com/ClickHouse/clickhouse-js/issues/161) for more details.
+- Request timeouts internal logic rework (see [#168](https://github.com/ClickHouse/clickhouse-js/pull/168))
## 0.0.16
-* Fix NULL parameter binding.
-As HTTP interface expects `\N` instead of `'NULL'` string, it is now correctly handled for both `null`
-and _explicitly_ `undefined` parameters. See the [test scenarios](https://github.com/ClickHouse/clickhouse-js/blob/f1500e188600d85ddd5ee7d2a80846071c8cf23e/__tests__/integration/select_query_binding.test.ts#L273-L303) for more details.
+
+- Fix NULL parameter binding.
+ As HTTP interface expects `\N` instead of `'NULL'` string, it is now correctly handled for both `null`
+ and _explicitly_ `undefined` parameters. See the [test scenarios](https://github.com/ClickHouse/clickhouse-js/blob/f1500e188600d85ddd5ee7d2a80846071c8cf23e/__tests__/integration/select_query_binding.test.ts#L273-L303) for more details.
## 0.0.15
### Bug fixes
-* Fix Node.JS 19.x/20.x timeout error (@olexiyb)
+
+- Fix Node.JS 19.x/20.x timeout error (@olexiyb)
## 0.0.14
### New features
-* Added support for `JSONStrings`, `JSONCompact`, `JSONCompactStrings`, `JSONColumnsWithMetadata` formats (@andrewzolotukhin).
+
+- Added support for `JSONStrings`, `JSONCompact`, `JSONCompactStrings`, `JSONColumnsWithMetadata` formats (@andrewzolotukhin).
## 0.0.13
### New features
-* `query_id` can be now overridden for all main client's methods: `query`, `exec`, `insert`.
+
+- `query_id` can be now overridden for all main client's methods: `query`, `exec`, `insert`.
## 0.0.12
### New features
-* `ResultSet.query_id` contains a unique query identifier that might be useful for retrieving query metrics from `system.query_log`
-* `User-Agent` HTTP header is set according to the [language client spec](https://docs.google.com/document/d/1924Dvy79KXIhfqKpi1EBVY3133pIdoMwgCQtZ-uhEKs/edit#heading=h.ah33hoz5xei2).
-For example, for client version 0.0.12 and Node.js runtime v19.0.4 on Linux platform, it will be `clickhouse-js/0.0.12 (lv:nodejs/19.0.4; os:linux)`.
-If `ClickHouseClientConfigOptions.application` is set, it will be prepended to the generated `User-Agent`.
+
+- `ResultSet.query_id` contains a unique query identifier that might be useful for retrieving query metrics from `system.query_log`
+- `User-Agent` HTTP header is set according to the [language client spec](https://docs.google.com/document/d/1924Dvy79KXIhfqKpi1EBVY3133pIdoMwgCQtZ-uhEKs/edit#heading=h.ah33hoz5xei2).
+ For example, for client version 0.0.12 and Node.js runtime v19.0.4 on Linux platform, it will be `clickhouse-js/0.0.12 (lv:nodejs/19.0.4; os:linux)`.
+ If `ClickHouseClientConfigOptions.application` is set, it will be prepended to the generated `User-Agent`.
### Breaking changes
-* `client.insert` now returns `{ query_id: string }` instead of `void`
-* `client.exec` now returns `{ stream: Stream.Readable, query_id: string }` instead of just `Stream.Readable`
+
+- `client.insert` now returns `{ query_id: string }` instead of `void`
+- `client.exec` now returns `{ stream: Stream.Readable, query_id: string }` instead of just `Stream.Readable`
## 0.0.11, 2022-12-08
+
### Breaking changes
-* `log.enabled` flag was removed from the client configuration.
-* Use `CLICKHOUSE_LOG_LEVEL` environment variable instead. Possible values: `OFF`, `TRACE`, `DEBUG`, `INFO`, `WARN`, `ERROR`.
-Currently, there are only debug messages, but we will log more in the future.
+
+- `log.enabled` flag was removed from the client configuration.
+- Use `CLICKHOUSE_LOG_LEVEL` environment variable instead. Possible values: `OFF`, `TRACE`, `DEBUG`, `INFO`, `WARN`, `ERROR`.
+ Currently, there are only debug messages, but we will log more in the future.
For more details, see PR [#110](https://github.com/ClickHouse/clickhouse-js/pull/110)
## 0.0.10, 2022-11-14
+
### New features
+
- Remove request listeners synchronously.
-[#123](https://github.com/ClickHouse/clickhouse-js/issues/123)
+ [#123](https://github.com/ClickHouse/clickhouse-js/issues/123)
## 0.0.9, 2022-10-25
+
### New features
+
- Added ClickHouse session_id support.
-[#121](https://github.com/ClickHouse/clickhouse-js/pull/121)
+ [#121](https://github.com/ClickHouse/clickhouse-js/pull/121)
## 0.0.8, 2022-10-18
+
### New features
+
- Added SSL/TLS support (basic and mutual).
-[#52](https://github.com/ClickHouse/clickhouse-js/issues/52)
+ [#52](https://github.com/ClickHouse/clickhouse-js/issues/52)
## 0.0.7, 2022-10-18
+
### Bug fixes
+
- Allow semicolons in select clause.
-[#116](https://github.com/ClickHouse/clickhouse-js/issues/116)
+ [#116](https://github.com/ClickHouse/clickhouse-js/issues/116)
## 0.0.6, 2022-10-07
+
### New features
+
- Add JSONObjectEachRow input/output and JSON input formats.
-[#113](https://github.com/ClickHouse/clickhouse-js/pull/113)
+ [#113](https://github.com/ClickHouse/clickhouse-js/pull/113)
## 0.0.5, 2022-10-04
+
### Breaking changes
- - Rows abstraction was renamed to ResultSet.
- - now, every iteration over `ResultSet.stream()` yields `Row[]` instead of a single `Row`.
-Please check out [an example](https://github.com/ClickHouse/clickhouse-js/blob/c86c31dada8f4845cd4e6843645177c99bc53a9d/examples/select_streaming_on_data.ts)
-and [this PR](https://github.com/ClickHouse/clickhouse-js/pull/109) for more details.
-These changes allowed us to significantly reduce overhead on select result set streaming.
+
+- Rows abstraction was renamed to ResultSet.
+- now, every iteration over `ResultSet.stream()` yields `Row[]` instead of a single `Row`.
+ Please check out [an example](https://github.com/ClickHouse/clickhouse-js/blob/c86c31dada8f4845cd4e6843645177c99bc53a9d/examples/select_streaming_on_data.ts)
+ and [this PR](https://github.com/ClickHouse/clickhouse-js/pull/109) for more details.
+ These changes allowed us to significantly reduce overhead on select result set streaming.
+
### New features
+
- [split2](https://www.npmjs.com/package/split2) is no longer a package dependency.
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index c0c1f029..5933971d 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,21 +1,26 @@
## Getting started
+
ClickHouse js client is an open-source project,
and we welcome any contributions from the community.
Please share your ideas, contribute to the codebase,
and help us maintain up-to-date documentation.
### Set up environment
+
You have installed:
+
- a compatible LTS version of nodejs: `v14.x`, `v16.x` or `v18.x`
- NPM >= `6.x`
### Create a fork of the repository and clone it
+
```bash
git clone https://github.com/[YOUR_USERNAME]/clickhouse-js
cd clickhouse-js
```
### Install dependencies
+
```bash
npm i
```
@@ -29,13 +34,14 @@ sudo -- sh -c "echo 127.0.0.1 server.clickhouseconnect.test >> /etc/hosts"
```
## Testing
+
Whenever you add a new feature to the package or fix a bug,
we strongly encourage you to add appropriate tests to ensure
everyone in the community can safely benefit from your contribution.
### Tooling
-We use [jest](https://jestjs.io/) as a test runner.
-All the testing scripts are run with `jest-silent-reporter`.
+
+We use [Jasmine](https://jasmine.github.io/index.html) as a test runner.
### Type check and linting
@@ -43,6 +49,7 @@ All the testing scripts are run with `jest-silent-reporter`.
npm run typecheck
npm run lint:fix
```
+
We use [Husky](https://typicode.github.io/husky) for pre-commit hooks,
so it will be executed before every commit.
@@ -61,6 +68,7 @@ Integration tests use a running ClickHouse server in Docker or the Cloud.
`CLICKHOUSE_TEST_ENVIRONMENT` environment variable is used to switch between testing modes.
There are three possible options:
+
- `local_single_node` (default)
- `local_cluster`
- `cloud`
@@ -138,6 +146,7 @@ npm run test:integration:cloud
```
## CI
+
GitHub Actions should execute integration test jobs in parallel
after we complete the TypeScript type check, lint check, and unit tests.
@@ -149,9 +158,11 @@ Build + Unit tests
```
## Style Guide
+
We use an automatic code formatting with `prettier` and `eslint`.
## Test Coverage
+
We try to aim for at least 90% tests coverage.
Coverage is collected and pushed to the repo automatically
@@ -171,6 +182,7 @@ npm t -- --coverage
Please don't commit the coverage reports manually.
## Update package version
+
Don't forget to change the package version in `src/version.ts` before the release.
`release` GitHub action will pick it up and replace `package.json` version automatically.
diff --git a/README.md b/README.md
index 275f4e1f..49e17d89 100644
--- a/README.md
+++ b/README.md
@@ -1,22 +1,26 @@
-
ClickHouse Node.JS client
+ClickHouse JS client
-
-
-
## About
-Official Node.js client for [ClickHouse](https://clickhouse.com/), written purely in TypeScript, thoroughly tested with actual ClickHouse versions.
+Official JS client for [ClickHouse](https://clickhouse.com/), written purely in TypeScript,
+thoroughly tested with actual ClickHouse versions.
+
+The repository consists of three packages:
-It is focused on data streaming for both inserts and selects using standard [Node.js Streaming API](https://nodejs.org/docs/latest-v14.x/api/stream.html).
+- `@clickhouse/client` - Node.js client, built on top of [HTTP](https://nodejs.org/api/http.html)
+ and [Stream](https://nodejs.org/api/stream.html) APIs; supports streaming for both selects and inserts.
+- `@clickhouse/client-browser` - browser client, built on top of [Fetch](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)
+ and [Web Streams](https://developer.mozilla.org/en-US/docs/Web/API/Streams_API) APIs; supports streaming for selects.
+- `@clickhouse/common` - shared common types and the base framework for building a custom client implementation.
## Documentation
diff --git a/__tests__/global.integration.ts b/__tests__/global.integration.ts
deleted file mode 100644
index 8971d548..00000000
--- a/__tests__/global.integration.ts
+++ /dev/null
@@ -1 +0,0 @@
-export const TestDatabaseEnvKey = 'CLICKHOUSE_TEST_DATABASE'
diff --git a/__tests__/integration/abort_request.test.ts b/__tests__/integration/abort_request.test.ts
deleted file mode 100644
index 62dbf1a9..00000000
--- a/__tests__/integration/abort_request.test.ts
+++ /dev/null
@@ -1,335 +0,0 @@
-import type { Row } from '../../src'
-import { type ClickHouseClient, type ResponseJSON } from '../../src'
-import { createTestClient, guid, makeObjectStream } from '../utils'
-import { createSimpleTable } from './fixtures/simple_table'
-import type Stream from 'stream'
-import { jsonValues } from './fixtures/test_data'
-
-describe('abort request', () => {
- let client: ClickHouseClient
-
- beforeEach(() => {
- client = createTestClient()
- })
-
- afterEach(async () => {
- await client.close()
- })
-
- describe('select', () => {
- it('cancels a select query before it is sent', async () => {
- const controller = new AbortController()
- const selectPromise = client.query({
- query: 'SELECT sleep(3)',
- format: 'CSV',
- abort_signal: controller.signal,
- })
- controller.abort()
-
- await expect(selectPromise).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringMatching('The request was aborted'),
- })
- )
- })
-
- it('cancels a select query after it is sent', async () => {
- const controller = new AbortController()
- const selectPromise = client.query({
- query: 'SELECT sleep(3)',
- format: 'CSV',
- abort_signal: controller.signal,
- })
-
- await new Promise((resolve) => {
- setTimeout(() => {
- controller.abort()
- resolve(undefined)
- }, 50)
- })
-
- await expect(selectPromise).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringMatching('The request was aborted'),
- })
- )
- })
-
- it('should not throw an error when aborted the second time', async () => {
- const controller = new AbortController()
- const selectPromise = client.query({
- query: 'SELECT sleep(3)',
- format: 'CSV',
- abort_signal: controller.signal,
- })
-
- await new Promise((resolve) => {
- setTimeout(() => {
- controller.abort()
- resolve(undefined)
- }, 50)
- })
-
- controller.abort('foo bar') // no-op, does not throw here
-
- await expect(selectPromise).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringMatching('The request was aborted'),
- })
- )
- })
-
- it('cancels a select query while reading response', async () => {
- const controller = new AbortController()
- const selectPromise = client
- .query({
- query: 'SELECT * from system.numbers',
- format: 'JSONCompactEachRow',
- abort_signal: controller.signal,
- })
- .then(async (rows) => {
- const stream = rows.stream()
- for await (const chunk of stream) {
- const [[number]] = chunk.json()
- // abort when reach number 3
- if (number === '3') {
- controller.abort()
- }
- }
- })
-
- // There is no assertion against an error message.
- // A race condition on events might lead to
- // Request Aborted or ERR_STREAM_PREMATURE_CLOSE errors.
- await expect(selectPromise).rejects.toThrowError()
- })
-
- it('cancels a select query while reading response by closing response stream', async () => {
- const selectPromise = client
- .query({
- query: 'SELECT * from system.numbers',
- format: 'JSONCompactEachRow',
- })
- .then(async function (rows) {
- const stream = rows.stream()
- for await (const rows of stream) {
- rows.forEach((row: Row) => {
- const [[number]] = row.json<[[string]]>()
- // abort when reach number 3
- if (number === '3') {
- stream.destroy()
- }
- })
- }
- })
- // There was a breaking change in Node.js 18.x+ behavior
- if (
- process.version.startsWith('v18') ||
- process.version.startsWith('v20')
- ) {
- await expect(selectPromise).rejects.toMatchObject({
- message: 'Premature close',
- })
- } else {
- expect(await selectPromise).toEqual(undefined)
- }
- })
-
- // FIXME: it does not work with ClickHouse Cloud.
- // Active queries never contain the long-running query unlike local setup.
- it.skip('ClickHouse server must cancel query on abort', async () => {
- const controller = new AbortController()
-
- const longRunningQuery = `SELECT sleep(3), '${guid()}'`
- console.log(`Long running query: ${longRunningQuery}`)
- void client.query({
- query: longRunningQuery,
- abort_signal: controller.signal,
- format: 'JSONCompactEachRow',
- })
-
- await assertActiveQueries(client, (queries) => {
- console.log(`Active queries: ${JSON.stringify(queries, null, 2)}`)
- return queries.some((q) => q.query.includes(longRunningQuery))
- })
-
- controller.abort()
-
- await assertActiveQueries(client, (queries) =>
- queries.every((q) => !q.query.includes(longRunningQuery))
- )
- })
-
- it('should cancel of the select queries while keeping the others', async () => {
- type Res = Array<{ foo: number }>
-
- const controller = new AbortController()
- const results: number[] = []
-
- const selectPromises = Promise.all(
- [...Array(5)].map((_, i) => {
- const shouldAbort = i === 3
- const requestPromise = client
- .query({
- query: `SELECT sleep(0.5), ${i} AS foo`,
- format: 'JSONEachRow',
- abort_signal:
- // we will cancel the request that should've yielded '3'
- shouldAbort ? controller.signal : undefined,
- })
- .then((r) => r.json())
- .then((r) => results.push(r[0].foo))
- // this way, the cancelled request will not cancel the others
- if (shouldAbort) {
- return requestPromise.catch(() => {
- // ignored
- })
- }
- return requestPromise
- })
- )
-
- controller.abort()
- await selectPromises
-
- expect(results.sort((a, b) => a - b)).toEqual([0, 1, 2, 4])
- })
- })
-
- describe('insert', () => {
- let tableName: string
- beforeEach(async () => {
- tableName = `abort_request_insert_test_${guid()}`
- await createSimpleTable(client, tableName)
- })
-
- it('cancels an insert query before it is sent', async () => {
- const controller = new AbortController()
- const stream = makeObjectStream()
- const insertPromise = client.insert({
- table: tableName,
- values: stream,
- abort_signal: controller.signal,
- })
- controller.abort()
-
- await expect(insertPromise).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringMatching('The request was aborted'),
- })
- )
- })
-
- it('cancels an insert query before it is sent by closing a stream', async () => {
- const stream = makeObjectStream()
- stream.push(null)
-
- expect(
- await client.insert({
- table: tableName,
- values: stream,
- })
- ).toEqual(
- expect.objectContaining({
- query_id: expect.any(String),
- })
- )
- })
-
- it('cancels an insert query after it is sent', async () => {
- const controller = new AbortController()
- const stream = makeObjectStream()
- const insertPromise = client.insert({
- table: tableName,
- values: stream,
- abort_signal: controller.signal,
- })
-
- setTimeout(() => {
- controller.abort()
- }, 50)
-
- await expect(insertPromise).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringMatching('The request was aborted'),
- })
- )
- })
-
- it('should cancel one insert while keeping the others', async () => {
- function shouldAbort(i: number) {
- // we will cancel the request
- // that should've inserted a value at index 3
- return i === 3
- }
-
- const controller = new AbortController()
- const streams: Stream.Readable[] = Array(jsonValues.length)
- const insertStreamPromises = Promise.all(
- jsonValues.map((value, i) => {
- const stream = makeObjectStream()
- streams[i] = stream
- stream.push(value)
- const insertPromise = client.insert({
- values: stream,
- format: 'JSONEachRow',
- table: tableName,
- abort_signal: shouldAbort(i) ? controller.signal : undefined,
- })
- if (shouldAbort(i)) {
- return insertPromise.catch(() => {
- // ignored
- })
- }
- return insertPromise
- })
- )
-
- setTimeout(() => {
- streams.forEach((stream, i) => {
- if (shouldAbort(i)) {
- controller.abort()
- }
- stream.push(null)
- })
- }, 100)
-
- await insertStreamPromises
-
- const result = await client
- .query({
- query: `SELECT * FROM ${tableName} ORDER BY id ASC`,
- format: 'JSONEachRow',
- })
- .then((r) => r.json())
-
- expect(result).toEqual([
- jsonValues[0],
- jsonValues[1],
- jsonValues[2],
- jsonValues[4],
- ])
- })
- })
-})
-
-async function assertActiveQueries(
- client: ClickHouseClient,
- assertQueries: (queries: Array<{ query: string }>) => boolean
-) {
- // eslint-disable-next-line no-constant-condition
- while (true) {
- const rs = await client.query({
- query: 'SELECT query FROM system.processes',
- format: 'JSON',
- })
-
- const queries = await rs.json>()
-
- if (assertQueries(queries.data)) {
- break
- }
-
- await new Promise((res) => setTimeout(res, 100))
- }
-}
diff --git a/__tests__/integration/config.test.ts b/__tests__/integration/config.test.ts
deleted file mode 100644
index 16b05bc1..00000000
--- a/__tests__/integration/config.test.ts
+++ /dev/null
@@ -1,229 +0,0 @@
-import type { Logger } from '../../src'
-import { type ClickHouseClient } from '../../src'
-import { createTestClient, guid, retryOnFailure } from '../utils'
-import type { RetryOnFailureOptions } from '../utils/retry'
-import type { ErrorLogParams, LogParams } from '../../src/logger'
-import { createSimpleTable } from './fixtures/simple_table'
-
-describe('config', () => {
- let client: ClickHouseClient
- let logs: {
- message: string
- err?: Error
- args?: Record
- }[] = []
-
- afterEach(async () => {
- await client.close()
- logs = []
- })
-
- it('should set request timeout with "request_timeout" setting', async () => {
- client = createTestClient({
- request_timeout: 100,
- })
-
- await expect(
- client.query({
- query: 'SELECT sleep(3)',
- })
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringMatching('Timeout error'),
- })
- )
- })
-
- it('should specify the default database name on creation', async () => {
- client = createTestClient({
- database: 'system',
- })
- const result = await client.query({
- query: 'SELECT * FROM numbers LIMIT 2',
- format: 'TabSeparated',
- })
- expect(await result.text()).toEqual('0\n1\n')
- })
-
- describe('Logger support', () => {
- const logLevelKey = 'CLICKHOUSE_LOG_LEVEL'
- let defaultLogLevel: string | undefined
- beforeEach(() => {
- defaultLogLevel = process.env[logLevelKey]
- })
- afterEach(() => {
- if (defaultLogLevel === undefined) {
- delete process.env[logLevelKey]
- } else {
- process.env[logLevelKey] = defaultLogLevel
- }
- })
-
- it('should use the default logger implementation', async () => {
- process.env[logLevelKey] = 'DEBUG'
- client = createTestClient()
- const consoleSpy = jest.spyOn(console, 'log')
- await client.ping()
- // logs[0] are about current log level
- expect(consoleSpy).toHaveBeenNthCalledWith(
- 1,
- expect.stringContaining('Got a response from ClickHouse'),
- expect.objectContaining({
- request_headers: {
- 'user-agent': expect.any(String),
- },
- request_method: 'GET',
- request_params: '',
- request_path: '/ping',
- response_headers: expect.objectContaining({
- connection: expect.stringMatching(/Keep-Alive/i),
- 'content-type': 'text/html; charset=UTF-8',
- 'transfer-encoding': 'chunked',
- }),
- response_status: 200,
- })
- )
- expect(consoleSpy).toHaveBeenCalledTimes(1)
- })
-
- it('should provide a custom logger implementation', async () => {
- process.env[logLevelKey] = 'DEBUG'
- client = createTestClient({
- log: {
- LoggerClass: TestLogger,
- },
- })
- await client.ping()
- // logs[0] are about current log level
- expect(logs[1]).toEqual({
- module: 'Connection',
- message: 'Got a response from ClickHouse',
- args: expect.objectContaining({
- request_path: '/ping',
- request_method: 'GET',
- }),
- })
- })
-
- it('should provide a custom logger implementation (but logs are disabled)', async () => {
- process.env[logLevelKey] = 'OFF'
- client = createTestClient({
- log: {
- // enable: false,
- LoggerClass: TestLogger,
- },
- })
- await client.ping()
- expect(logs).toHaveLength(0)
- })
- })
-
- describe('max_open_connections', () => {
- let results: number[] = []
- afterEach(() => {
- results = []
- })
-
- const retryOpts: RetryOnFailureOptions = {
- maxAttempts: 20,
- }
-
- function select(query: string) {
- return client
- .query({
- query,
- format: 'JSONEachRow',
- })
- .then((r) => r.json<[{ x: number }]>())
- .then(([{ x }]) => results.push(x))
- }
-
- it('should use only one connection', async () => {
- client = createTestClient({
- max_open_connections: 1,
- })
- void select('SELECT 1 AS x, sleep(0.3)')
- void select('SELECT 2 AS x, sleep(0.3)')
- await retryOnFailure(async () => {
- expect(results).toEqual([1])
- }, retryOpts)
- await retryOnFailure(async () => {
- expect(results.sort()).toEqual([1, 2])
- }, retryOpts)
- })
-
- it('should use only one connection for insert', async () => {
- const tableName = `config_single_connection_insert_${guid()}`
- client = createTestClient({
- max_open_connections: 1,
- request_timeout: 3000,
- })
- await createSimpleTable(client, tableName)
-
- const timeout = setTimeout(() => {
- throw new Error('Timeout was triggered')
- }, 3000).unref()
-
- const value1 = { id: '42', name: 'hello', sku: [0, 1] }
- const value2 = { id: '43', name: 'hello', sku: [0, 1] }
- function insert(value: object) {
- return client.insert({
- table: tableName,
- values: [value],
- format: 'JSONEachRow',
- })
- }
- await insert(value1)
- await insert(value2) // if previous call holds the socket, the test will time out
- clearTimeout(timeout)
-
- const result = await client.query({
- query: `SELECT * FROM ${tableName}`,
- format: 'JSONEachRow',
- })
-
- const json = await result.json()
- expect(json).toContainEqual(value1)
- expect(json).toContainEqual(value2)
- expect(json.length).toEqual(2)
- })
-
- it('should use several connections', async () => {
- client = createTestClient({
- max_open_connections: 2,
- })
- void select('SELECT 1 AS x, sleep(0.3)')
- void select('SELECT 2 AS x, sleep(0.3)')
- void select('SELECT 3 AS x, sleep(0.3)')
- void select('SELECT 4 AS x, sleep(0.3)')
- await retryOnFailure(async () => {
- expect(results).toContain(1)
- expect(results).toContain(2)
- expect(results.sort()).toEqual([1, 2])
- }, retryOpts)
- await retryOnFailure(async () => {
- expect(results).toContain(3)
- expect(results).toContain(4)
- expect(results.sort()).toEqual([1, 2, 3, 4])
- }, retryOpts)
- })
- })
-
- class TestLogger implements Logger {
- trace(params: LogParams) {
- logs.push(params)
- }
- debug(params: LogParams) {
- logs.push(params)
- }
- info(params: LogParams) {
- logs.push(params)
- }
- warn(params: LogParams) {
- logs.push(params)
- }
- error(params: ErrorLogParams) {
- logs.push(params)
- }
- }
-})
diff --git a/__tests__/integration/schema_e2e.test.ts b/__tests__/integration/schema_e2e.test.ts
deleted file mode 100644
index 31a9a997..00000000
--- a/__tests__/integration/schema_e2e.test.ts
+++ /dev/null
@@ -1,215 +0,0 @@
-import type { ClickHouseClient } from '../../src'
-import { createTableWithSchema, createTestClient, guid } from '../utils'
-import * as ch from '../../src/schema'
-import { And, Eq, Or } from '../../src/schema'
-
-describe('schema e2e test', () => {
- let client: ClickHouseClient
- let tableName: string
-
- beforeEach(async () => {
- client = await createTestClient()
- tableName = `schema_e2e_test_${guid()}`
- })
- afterEach(async () => {
- await client.close()
- })
-
- const shape = {
- id: ch.UUID,
- name: ch.String,
- sku: ch.Array(ch.UInt8),
- active: ch.Bool,
- }
- let table: ch.Table
- type Value = ch.Infer
-
- const value1: Value = {
- id: '8dbb28f7-4da0-4e49-af71-e830aee422eb',
- name: 'foo',
- sku: [1, 2],
- active: true,
- }
- const value2: Value = {
- id: '314f5ac4-fe93-4c39-b26c-0cb079be0767',
- name: 'bar',
- sku: [3, 4],
- active: false,
- }
-
- beforeEach(async () => {
- table = await createTableWithSchema(
- client,
- new ch.Schema(shape),
- tableName,
- ['id']
- )
- })
-
- it('should insert and select data using arrays', async () => {
- await table.insert({
- values: [value1, value2],
- })
- const result = await (await table.select()).json()
- expect(result).toEqual([value1, value2])
- })
-
- it('should insert and select data using streams', async () => {
- const values = new ch.InsertStream()
- values.add(value1)
- values.add(value2)
- setTimeout(() => values.complete(), 100)
-
- await table.insert({
- values,
- })
-
- const result: Value[] = []
- const { asyncGenerator } = await table.select()
-
- for await (const value of asyncGenerator()) {
- result.push(value)
- }
-
- expect(result).toEqual([value1, value2])
- })
-
- // FIXME: find a way to disallow default values
- it.skip('should not swallow generic insert errors using arrays', async () => {
- await expect(
- table.insert({
- values: [{ foobar: 'qaz' } as any],
- })
- ).rejects.toEqual(
- expect.objectContaining({
- error: 'asdfsdaf',
- })
- )
- })
-
- // FIXME: find a way to disallow default values
- it.skip('should not swallow generic insert errors using streams', async () => {
- const values = new ch.InsertStream()
- values.add(value1)
- values.add({ foobar: 'qaz' } as any)
- setTimeout(() => values.complete(), 100)
-
- await table.insert({
- values,
- })
- const result = await (await table.select()).json()
- expect(result).toEqual([value1, value2])
- })
-
- it('should not swallow generic select errors', async () => {
- await expect(
- table.select({
- order_by: [['non_existing_column' as any, 'ASC']],
- })
- ).rejects.toMatchObject({
- message: expect.stringContaining('Missing columns'),
- })
- })
-
- it('should use order by / where statements', async () => {
- const value3: Value = {
- id: '7640bde3-cdc5-4d63-a47e-66c6a16629df',
- name: 'qaz',
- sku: [6, 7],
- active: true,
- }
- await table.insert({
- values: [value1, value2, value3],
- })
-
- expect(
- await table
- .select({
- where: Eq('name', 'bar'),
- })
- .then((r) => r.json())
- ).toEqual([value2])
-
- expect(
- await table
- .select({
- where: Or(Eq('name', 'foo'), Eq('name', 'qaz')),
- order_by: [['name', 'DESC']],
- })
- .then((r) => r.json())
- ).toEqual([value3, value1])
-
- expect(
- await table
- .select({
- where: And(Eq('active', true), Eq('name', 'foo')),
- })
- .then((r) => r.json())
- ).toEqual([value1])
-
- expect(
- await table
- .select({
- where: Eq('sku', [3, 4]),
- })
- .then((r) => r.json())
- ).toEqual([value2])
-
- expect(
- await table
- .select({
- where: And(Eq('active', true), Eq('name', 'quuux')),
- })
- .then((r) => r.json())
- ).toEqual([])
-
- expect(
- await table
- .select({
- order_by: [
- ['active', 'DESC'],
- ['name', 'DESC'],
- ],
- })
- .then((r) => r.json())
- ).toEqual([value3, value1, value2])
-
- expect(
- await table
- .select({
- order_by: [
- ['active', 'DESC'],
- ['name', 'ASC'],
- ],
- })
- .then((r) => r.json())
- ).toEqual([value1, value3, value2])
- })
-
- it('should be able to select only specific columns', async () => {
- await table.insert({
- values: [value1, value2],
- })
-
- expect(
- await table
- .select({
- columns: ['id'],
- order_by: [['name', 'ASC']],
- })
- .then((r) => r.json())
- ).toEqual([{ id: value2.id }, { id: value1.id }])
-
- expect(
- await table
- .select({
- columns: ['id', 'active'],
- order_by: [['name', 'ASC']],
- })
- .then((r) => r.json())
- ).toEqual([
- { id: value2.id, active: value2.active },
- { id: value1.id, active: value1.active },
- ])
- })
-})
diff --git a/__tests__/integration/schema_types.test.ts b/__tests__/integration/schema_types.test.ts
deleted file mode 100644
index 272e0743..00000000
--- a/__tests__/integration/schema_types.test.ts
+++ /dev/null
@@ -1,388 +0,0 @@
-import type { ClickHouseClient } from '../../src'
-import { createTableWithSchema, createTestClient, guid } from '../utils'
-
-import * as ch from '../../src/schema'
-
-describe('schema types', () => {
- let client: ClickHouseClient
- let tableName: string
-
- beforeEach(async () => {
- client = await createTestClient()
- tableName = `schema_test_${guid()}`
- })
- afterEach(async () => {
- await client.close()
- })
-
- describe('(U)Int', () => {
- const shape = {
- i1: ch.Int8,
- i2: ch.Int16,
- i3: ch.Int32,
- i4: ch.Int64,
- i5: ch.Int128,
- i6: ch.Int256,
- u1: ch.UInt8,
- u2: ch.UInt16,
- u3: ch.UInt32,
- u4: ch.UInt64,
- u5: ch.UInt128,
- u6: ch.UInt256,
- }
- const value: ch.Infer = {
- i1: 127,
- i2: 32767,
- i3: 2147483647,
- i4: '9223372036854775807',
- i5: '170141183460469231731687303715884105727',
- i6: '57896044618658097711785492504343953926634992332820282019728792003956564819967',
- u1: 255,
- u2: 65535,
- u3: 4294967295,
- u4: '18446744073709551615',
- u5: '340282366920938463463374607431768211455',
- u6: '115792089237316195423570985008687907853269984665640564039457584007913129639935',
- }
-
- let table: ch.Table
- beforeEach(async () => {
- table = await createTableWithSchema(
- client,
- new ch.Schema(shape),
- tableName,
- ['i1']
- )
- })
-
- it('should insert and select it back', async () => {
- await assertInsertAndSelect(table, value)
- })
- })
-
- describe('Float', () => {
- const shape = {
- f1: ch.Float32,
- f2: ch.Float64,
- }
- // TODO: figure out better values for this test
- const value: ch.Infer = {
- f1: 1.2345,
- f2: 2.2345,
- }
-
- let table: ch.Table
- beforeEach(async () => {
- table = await createTableWithSchema(
- client,
- new ch.Schema(shape),
- tableName,
- ['f1']
- )
- })
-
- it('should insert and select it back', async () => {
- await assertInsertAndSelect(table, value)
- })
- })
-
- describe('String', () => {
- const shape = {
- s1: ch.String,
- s2: ch.FixedString(255),
- }
- const value: ch.Infer = {
- s1: 'foo',
- s2: 'bar',
- }
-
- let table: ch.Table
- beforeEach(async () => {
- table = await createTableWithSchema(
- client,
- new ch.Schema(shape),
- tableName,
- ['s1']
- )
- })
-
- it('should insert and select it back', async () => {
- await table.insert({
- values: [value],
- })
- const result = await (await table.select()).json()
- expect(result).toEqual([
- {
- s1: value.s1,
- s2: value.s2.padEnd(255, '\x00'),
- },
- ])
- expect(result[0].s2.length).toEqual(255)
- })
- })
-
- describe('IP', () => {
- const shape = {
- ip1: ch.IPv4,
- ip2: ch.IPv6,
- }
- const value: ch.Infer = {
- ip1: '127.0.0.116',
- ip2: '2001:db8:85a3::8a2e:370:7334',
- }
-
- let table: ch.Table
- beforeEach(async () => {
- table = await createTableWithSchema(
- client,
- new ch.Schema(shape),
- tableName,
- ['ip1']
- )
- })
-
- it('should insert and select it back', async () => {
- await assertInsertAndSelect(table, value)
- })
- })
-
- describe('Array', () => {
- const shape = {
- arr1: ch.Array(ch.UInt32),
- arr2: ch.Array(ch.String),
- arr3: ch.Array(ch.Array(ch.Array(ch.Int32))),
- arr4: ch.Array(ch.Nullable(ch.String)),
- }
- // TODO: better values for this test
- const value: ch.Infer = {
- arr1: [1, 2],
- arr2: ['foo', 'bar'],
- arr3: [[[12345]]],
- arr4: ['qux', null, 'qaz'],
- }
-
- let table: ch.Table
- beforeEach(async () => {
- table = await createTableWithSchema(
- client,
- new ch.Schema(shape),
- tableName,
- ['arr2']
- )
- })
-
- it('should insert and select it back', async () => {
- await assertInsertAndSelect(table, value)
- })
- })
-
- describe('Map', () => {
- const shape = {
- m1: ch.Map(ch.String, ch.String),
- m2: ch.Map(ch.Int32, ch.Map(ch.Date, ch.Array(ch.Int32))),
- }
- const value: ch.Infer = {
- m1: { foo: 'bar' },
- m2: {
- 42: {
- '2022-04-25': [1, 2, 3],
- },
- },
- }
-
- let table: ch.Table
- beforeEach(async () => {
- table = await createTableWithSchema(
- client,
- new ch.Schema(shape),
- tableName,
- ['m1']
- )
- })
-
- it('should insert and select it back', async () => {
- await assertInsertAndSelect(table, value)
- })
- })
-
- describe('Nullable', () => {
- const shape = {
- id: ch.Int32, // nullable order by is prohibited
- n1: ch.Nullable(ch.String),
- n2: ch.Nullable(ch.Date),
- }
- const value1: ch.Infer = {
- id: 1,
- n1: 'foo',
- n2: null,
- }
- const value2: ch.Infer = {
- id: 2,
- n1: null,
- n2: '2022-04-30',
- }
-
- let table: ch.Table
- beforeEach(async () => {
- table = await createTableWithSchema(
- client,
- new ch.Schema(shape),
- tableName,
- ['id']
- )
- })
-
- it('should insert and select it back', async () => {
- await assertInsertAndSelect(table, value1, value2)
- })
- })
-
- describe('Enum', () => {
- enum MyEnum {
- Foo = 'Foo',
- Bar = 'Bar',
- Qaz = 'Qaz',
- Qux = 'Qux',
- }
-
- const shape = {
- id: ch.Int32, // to preserve the order of values
- e: ch.Enum(MyEnum),
- }
- const values: ch.Infer[] = [
- { id: 1, e: MyEnum.Bar },
- { id: 2, e: MyEnum.Qux },
- { id: 3, e: MyEnum.Foo },
- { id: 4, e: MyEnum.Qaz },
- ]
-
- let table: ch.Table
- beforeEach(async () => {
- table = await createTableWithSchema(
- client,
- new ch.Schema(shape),
- tableName,
- ['id']
- )
- })
-
- it('should insert and select it back', async () => {
- await assertInsertAndSelect(table, ...values)
- })
-
- it('should fail in case of an invalid value', async () => {
- await expect(
- table.insert({
- values: [{ id: 4, e: 'NonExistingValue' as MyEnum }],
- })
- ).rejects.toMatchObject(
- expect.objectContaining({
- message: expect.stringContaining(
- `Unknown element 'NonExistingValue' for enum`
- ),
- })
- )
- })
- })
-
- describe('Date(Time)', () => {
- const shape = {
- d1: ch.Date,
- d2: ch.Date32,
- dt1: ch.DateTime(),
- dt2: ch.DateTime64(3),
- dt3: ch.DateTime64(6),
- dt4: ch.DateTime64(9),
- }
- const value: ch.Infer = {
- d1: '2149-06-06',
- d2: '2178-04-16',
- dt1: '2106-02-07 06:28:15',
- dt2: '2106-02-07 06:28:15.123',
- dt3: '2106-02-07 06:28:15.123456',
- dt4: '2106-02-07 06:28:15.123456789',
- }
-
- let table: ch.Table
- beforeEach(async () => {
- table = await createTableWithSchema(
- client,
- new ch.Schema(shape),
- tableName,
- ['d1']
- )
- })
-
- it('should insert and select it back', async () => {
- await assertInsertAndSelect(table, value)
- })
- })
-
- // FIXME: uncomment and extend the test
- // once Decimal is re-implemented properly
-
- // describe('Decimal', () => {
- // const shape = {
- // d1: ch.Decimal({
- // precision: 9,
- // scale: 2,
- // }), // Decimal32
- // d2: ch.Decimal({
- // precision: 18,
- // scale: 3,
- // }), // Decimal64
- // }
- // const value: ch.Infer = {
- // d1: 1234567.89,
- // d2: 123456789123456.789,
- // }
- //
- // let table: ch.Table
- // beforeEach(async () => {
- // table = await createTableWithSchema(
- // client,
- // new ch.Schema(shape),
- // tableName,
- // ['d1']
- // )
- // })
- //
- // it('should insert and select it back', async () => {
- // await assertInsertAndSelect(table, value)
- // })
- // })
-
- describe('LowCardinality', () => {
- const shape = {
- lc1: ch.LowCardinality(ch.String),
- }
- const value: ch.Infer = {
- lc1: 'foobar',
- }
-
- let table: ch.Table
- beforeEach(async () => {
- table = await createTableWithSchema(
- client,
- new ch.Schema(shape),
- tableName,
- ['lc1']
- )
- })
-
- it('should insert and select it back', async () => {
- await assertInsertAndSelect(table, value)
- })
- })
-})
-
-async function assertInsertAndSelect(
- table: ch.Table,
- ...value: ch.Infer[]
-) {
- await table.insert({
- values: value,
- })
- const result = await (await table.select()).json()
- expect(result).toEqual(value)
-}
diff --git a/__tests__/integration/select.test.ts b/__tests__/integration/select.test.ts
deleted file mode 100644
index d1480635..00000000
--- a/__tests__/integration/select.test.ts
+++ /dev/null
@@ -1,524 +0,0 @@
-import type Stream from 'stream'
-import { type ClickHouseClient, type ResponseJSON, type Row } from '../../src'
-import { createTestClient, guid } from '../utils'
-import * as uuid from 'uuid'
-
-async function rowsValues(stream: Stream.Readable): Promise {
- const result: any[] = []
- for await (const rows of stream) {
- rows.forEach((row: Row) => {
- result.push(row.json())
- })
- }
- return result
-}
-
-async function rowsText(stream: Stream.Readable): Promise {
- const result: string[] = []
- for await (const rows of stream) {
- rows.forEach((row: Row) => {
- result.push(row.text)
- })
- }
- return result
-}
-
-describe('select', () => {
- let client: ClickHouseClient
- afterEach(async () => {
- await client.close()
- })
- beforeEach(async () => {
- client = createTestClient()
- })
-
- it('gets query_id back', async () => {
- const resultSet = await client.query({
- query: 'SELECT * FROM system.numbers LIMIT 1',
- format: 'JSONEachRow',
- })
- expect(await resultSet.json()).toEqual([{ number: '0' }])
- expect(uuid.validate(resultSet.query_id)).toBeTruthy()
- })
-
- it('can override query_id', async () => {
- const query_id = guid()
- const resultSet = await client.query({
- query: 'SELECT * FROM system.numbers LIMIT 1',
- format: 'JSONEachRow',
- query_id,
- })
- expect(await resultSet.json()).toEqual([{ number: '0' }])
- expect(resultSet.query_id).toEqual(query_id)
- })
-
- it('can process an empty response', async () => {
- expect(
- await client
- .query({
- query: 'SELECT * FROM system.numbers LIMIT 0',
- format: 'JSONEachRow',
- })
- .then((r) => r.json())
- ).toEqual([])
- expect(
- await client
- .query({
- query: 'SELECT * FROM system.numbers LIMIT 0',
- format: 'TabSeparated',
- })
- .then((r) => r.text())
- ).toEqual('')
- })
-
- describe('consume the response only once', () => {
- async function assertAlreadyConsumed$(fn: () => Promise) {
- await expect(fn()).rejects.toMatchObject(
- expect.objectContaining({
- message: 'Stream has been already consumed',
- })
- )
- }
- function assertAlreadyConsumed(fn: () => T) {
- expect(fn).toThrow(
- expect.objectContaining({
- message: 'Stream has been already consumed',
- })
- )
- }
- it('should consume a JSON response only once', async () => {
- const rs = await client.query({
- query: 'SELECT * FROM system.numbers LIMIT 1',
- format: 'JSONEachRow',
- })
- expect(await rs.json()).toEqual([{ number: '0' }])
- // wrap in a func to avoid changing inner "this"
- await assertAlreadyConsumed$(() => rs.json())
- await assertAlreadyConsumed$(() => rs.text())
- await assertAlreadyConsumed(() => rs.stream())
- })
-
- it('should consume a text response only once', async () => {
- const rs = await client.query({
- query: 'SELECT * FROM system.numbers LIMIT 1',
- format: 'TabSeparated',
- })
- expect(await rs.text()).toEqual('0\n')
- // wrap in a func to avoid changing inner "this"
- await assertAlreadyConsumed$(() => rs.json())
- await assertAlreadyConsumed$(() => rs.text())
- await assertAlreadyConsumed(() => rs.stream())
- })
-
- it('should consume a stream response only once', async () => {
- const rs = await client.query({
- query: 'SELECT * FROM system.numbers LIMIT 1',
- format: 'TabSeparated',
- })
- let result = ''
- for await (const rows of rs.stream()) {
- rows.forEach((row: Row) => {
- result += row.text
- })
- }
- expect(result).toEqual('0')
- // wrap in a func to avoid changing inner "this"
- await assertAlreadyConsumed$(() => rs.json())
- await assertAlreadyConsumed$(() => rs.text())
- await assertAlreadyConsumed(() => rs.stream())
- })
- })
-
- it('can send a multiline query', async () => {
- const rs = await client.query({
- query: `
- SELECT number
- FROM system.numbers
- LIMIT 2
- `,
- format: 'CSV',
- })
-
- const response = await rs.text()
- expect(response).toBe('0\n1\n')
- })
-
- it('can send a query with an inline comment', async () => {
- const rs = await client.query({
- query: `
- SELECT number
- -- a comment
- FROM system.numbers
- LIMIT 2
- `,
- format: 'CSV',
- })
-
- const response = await rs.text()
- expect(response).toBe('0\n1\n')
- })
-
- it('can send a query with a multiline comment', async () => {
- const rs = await client.query({
- query: `
- SELECT number
- /* This is:
- a multiline comment
- */
- FROM system.numbers
- LIMIT 2
- `,
- format: 'CSV',
- })
-
- const response = await rs.text()
- expect(response).toBe('0\n1\n')
- })
-
- it('can send a query with a trailing comment', async () => {
- const rs = await client.query({
- query: `
- SELECT number
- FROM system.numbers
- LIMIT 2
- -- comment`,
- format: 'JSON',
- })
-
- const response = await rs.json>()
- expect(response.data).toEqual([{ number: '0' }, { number: '1' }])
- })
-
- it('can specify settings in select', async () => {
- const rs = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'CSV',
- clickhouse_settings: {
- limit: '2',
- },
- })
-
- const response = await rs.text()
- expect(response).toBe('0\n1\n')
- })
-
- it('does not swallow a client error', async () => {
- await expect(client.query({ query: 'SELECT number FR' })).rejects.toEqual(
- expect.objectContaining({
- type: 'UNKNOWN_IDENTIFIER',
- })
- )
- })
-
- it('returns an error details provided by ClickHouse', async () => {
- await expect(client.query({ query: 'foobar' })).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Syntax error'),
- code: '62',
- type: 'SYNTAX_ERROR',
- })
- )
- })
-
- it('should provide error details when sending a request with an unknown clickhouse settings', async () => {
- await expect(
- client.query({
- query: 'SELECT * FROM system.numbers',
- clickhouse_settings: { foobar: 1 } as any,
- })
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Unknown setting foobar'),
- code: '115',
- type: 'UNKNOWN_SETTING',
- })
- )
- })
-
- it('can send multiple simultaneous requests', async () => {
- type Res = Array<{ sum: number }>
- const results: number[] = []
- await Promise.all(
- [...Array(5)].map((_, i) =>
- client
- .query({
- query: `SELECT toInt32(sum(*)) AS sum FROM numbers(0, ${i + 2});`,
- format: 'JSONEachRow',
- })
- .then((r) => r.json())
- .then((json: Res) => results.push(json[0].sum))
- )
- )
- expect(results.sort((a, b) => a - b)).toEqual([1, 3, 6, 10, 15])
- })
-
- describe('select result', () => {
- describe('text()', function () {
- it('returns values from SELECT query in specified format', async () => {
- const rs = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 3',
- format: 'CSV',
- })
-
- expect(await rs.text()).toBe('0\n1\n2\n')
- })
- it('returns values from SELECT query in specified format', async () => {
- const rs = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 3',
- format: 'JSONEachRow',
- })
-
- expect(await rs.text()).toBe(
- '{"number":"0"}\n{"number":"1"}\n{"number":"2"}\n'
- )
- })
- })
-
- describe('json()', () => {
- it('returns an array of values in data property', async () => {
- const rs = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'JSON',
- })
-
- const { data: nums } = await rs.json>()
- expect(Array.isArray(nums)).toBe(true)
- expect(nums).toHaveLength(5)
- const values = nums.map((i) => i.number)
- expect(values).toEqual(['0', '1', '2', '3', '4'])
- })
-
- it('returns columns data in response', async () => {
- const rs = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'JSON',
- })
-
- const { meta } = await rs.json>()
-
- expect(meta?.length).toBe(1)
- const column = meta ? meta[0] : undefined
- expect(column).toEqual({
- name: 'number',
- type: 'UInt64',
- })
- })
-
- it('returns number of rows in response', async () => {
- const rs = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'JSON',
- })
-
- const response = await rs.json>()
-
- expect(response.rows).toBe(5)
- })
-
- it('returns statistics in response', async () => {
- const rs = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'JSON',
- })
-
- const response = await rs.json>()
- expect(response).toEqual(
- expect.objectContaining({
- statistics: {
- elapsed: expect.any(Number),
- rows_read: expect.any(Number),
- bytes_read: expect.any(Number),
- },
- })
- )
- })
- })
- })
-
- describe('select result asStream()', () => {
- it('throws an exception if format is not stream-able', async () => {
- const result = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'JSON',
- })
- try {
- expect(() => result.stream()).toThrowError(
- 'JSON format is not streamable'
- )
- } finally {
- result.close()
- }
- })
-
- it('can pause response stream', async () => {
- const result = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 10000',
- format: 'CSV',
- })
-
- const stream = result.stream()
-
- let last = null
- let i = 0
- for await (const rows of stream) {
- rows.forEach((row: Row) => {
- last = row.text
- i++
- if (i % 1000 === 0) {
- stream.pause()
- setTimeout(() => stream.resume(), 100)
- }
- })
- }
- expect(last).toBe('9999')
- })
-
- describe('text()', () => {
- it('returns stream of rows in CSV format', async () => {
- const result = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'CSV',
- })
-
- const rs = await rowsText(result.stream())
- expect(rs).toEqual(['0', '1', '2', '3', '4'])
- })
-
- it('returns stream of rows in TabSeparated format', async () => {
- const result = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'TabSeparated',
- })
-
- const rs = await rowsText(result.stream())
- expect(rs).toEqual(['0', '1', '2', '3', '4'])
- })
- })
-
- describe('json()', () => {
- it('returns stream of objects in JSONEachRow format', async () => {
- const result = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'JSONEachRow',
- })
-
- const rs = await rowsValues(result.stream())
- expect(rs).toEqual([
- { number: '0' },
- { number: '1' },
- { number: '2' },
- { number: '3' },
- { number: '4' },
- ])
- })
-
- it('returns stream of objects in JSONStringsEachRow format', async () => {
- const result = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'JSONStringsEachRow',
- })
-
- const rs = await rowsValues(result.stream())
- expect(rs).toEqual([
- { number: '0' },
- { number: '1' },
- { number: '2' },
- { number: '3' },
- { number: '4' },
- ])
- })
-
- it('returns stream of objects in JSONCompactEachRow format', async () => {
- const result = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'JSONCompactEachRow',
- })
-
- const rs = await rowsValues(result.stream())
- expect(rs).toEqual([['0'], ['1'], ['2'], ['3'], ['4']])
- })
-
- it('returns stream of objects in JSONCompactEachRowWithNames format', async () => {
- const result = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'JSONCompactEachRowWithNames',
- })
-
- const rs = await rowsValues(result.stream())
- expect(rs).toEqual([['number'], ['0'], ['1'], ['2'], ['3'], ['4']])
- })
-
- it('returns stream of objects in JSONCompactEachRowWithNamesAndTypes format', async () => {
- const result = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'JSONCompactEachRowWithNamesAndTypes',
- })
-
- const rs = await rowsValues(result.stream())
- expect(rs).toEqual([
- ['number'],
- ['UInt64'],
- ['0'],
- ['1'],
- ['2'],
- ['3'],
- ['4'],
- ])
- })
-
- it('returns stream of objects in JSONCompactStringsEachRowWithNames format', async () => {
- const result = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'JSONCompactStringsEachRowWithNames',
- })
-
- const rs = await rowsValues(result.stream())
- expect(rs).toEqual([['number'], ['0'], ['1'], ['2'], ['3'], ['4']])
- })
-
- it('returns stream of objects in JSONCompactStringsEachRowWithNamesAndTypes format', async () => {
- const result = await client.query({
- query: 'SELECT number FROM system.numbers LIMIT 5',
- format: 'JSONCompactStringsEachRowWithNamesAndTypes',
- })
-
- const rs = await rowsValues(result.stream())
- expect(rs).toEqual([
- ['number'],
- ['UInt64'],
- ['0'],
- ['1'],
- ['2'],
- ['3'],
- ['4'],
- ])
- })
- })
- })
-
- describe('trailing semi', () => {
- it('should allow queries with trailing semicolon', async () => {
- const numbers = await client.query({
- query: 'SELECT * FROM system.numbers LIMIT 3;',
- format: 'CSV',
- })
- expect(await numbers.text()).toEqual('0\n1\n2\n')
- })
-
- it('should allow queries with multiple trailing semicolons', async () => {
- const numbers = await client.query({
- query: 'SELECT * FROM system.numbers LIMIT 3;;;;;;;;;;;;;;;;;',
- format: 'CSV',
- })
- expect(await numbers.text()).toEqual('0\n1\n2\n')
- })
-
- it('should allow semi in select clause', async () => {
- const resultSet = await client.query({
- query: `SELECT ';'`,
- format: 'CSV',
- })
- expect(await resultSet.text()).toEqual('";"\n')
- })
- })
-})
diff --git a/__tests__/setup.integration.ts b/__tests__/setup.integration.ts
deleted file mode 100644
index 70ad1315..00000000
--- a/__tests__/setup.integration.ts
+++ /dev/null
@@ -1,9 +0,0 @@
-import { createRandomDatabase, createTestClient } from './utils'
-import { TestDatabaseEnvKey } from './global.integration'
-
-export default async () => {
- const client = createTestClient()
- const databaseName = await createRandomDatabase(client)
- await client.close()
- process.env[TestDatabaseEnvKey] = databaseName
-}
diff --git a/__tests__/unit/client.test.ts b/__tests__/unit/client.test.ts
deleted file mode 100644
index 00c6d314..00000000
--- a/__tests__/unit/client.test.ts
+++ /dev/null
@@ -1,32 +0,0 @@
-import type { ClickHouseClientConfigOptions } from '../../src'
-import { createClient } from '../../src'
-
-describe('createClient', () => {
- it('throws on incorrect "host" config value', () => {
- expect(() => createClient({ host: 'foo' })).toThrowError(
- 'Configuration parameter "host" contains malformed url.'
- )
- })
-
- it('should not mutate provided configuration', async () => {
- const config: ClickHouseClientConfigOptions = {
- host: 'http://localhost',
- }
- createClient(config)
- // none of the initial configuration settings are overridden
- // by the defaults we assign when we normalize the specified config object
- expect(config).toEqual({
- host: 'http://localhost',
- request_timeout: undefined,
- max_open_connections: undefined,
- tls: undefined,
- compression: undefined,
- username: undefined,
- password: undefined,
- application: undefined,
- database: undefined,
- clickhouse_settings: undefined,
- log: undefined,
- })
- })
-})
diff --git a/__tests__/unit/connection.test.ts b/__tests__/unit/connection.test.ts
deleted file mode 100644
index c420454b..00000000
--- a/__tests__/unit/connection.test.ts
+++ /dev/null
@@ -1,44 +0,0 @@
-import { createConnection } from '../../src/connection'
-import { HttpAdapter, HttpsAdapter } from '../../src/connection/adapter'
-
-describe('connection', () => {
- it('should create HTTP adapter', async () => {
- const adapter = createConnection(
- {
- url: new URL('http://localhost'),
- keep_alive: {
- enabled: true,
- },
- } as any,
- {} as any
- )
- expect(adapter).toBeInstanceOf(HttpAdapter)
- })
-
- it('should create HTTPS adapter', async () => {
- const adapter = createConnection(
- {
- url: new URL('https://localhost'),
- keep_alive: {
- enabled: true,
- },
- } as any,
- {} as any
- )
- expect(adapter).toBeInstanceOf(HttpsAdapter)
- })
-
- it('should throw if the supplied protocol is unknown', async () => {
- expect(() =>
- createConnection(
- {
- url: new URL('tcp://localhost'),
- keep_alive: {
- enabled: true,
- },
- } as any,
- {} as any
- )
- ).toThrowError('Only HTTP(s) adapters are supported')
- })
-})
diff --git a/__tests__/unit/encode_values.test.ts b/__tests__/unit/encode_values.test.ts
deleted file mode 100644
index 2c3f494d..00000000
--- a/__tests__/unit/encode_values.test.ts
+++ /dev/null
@@ -1,106 +0,0 @@
-import Stream from 'stream'
-import { encodeValues } from '../../src/client'
-import type { DataFormat, InputJSON, InputJSONObjectEachRow } from '../../src'
-
-describe('encodeValues', () => {
- const rawFormats = [
- 'CSV',
- 'CSVWithNames',
- 'CSVWithNamesAndTypes',
- 'TabSeparated',
- 'TabSeparatedRaw',
- 'TabSeparatedWithNames',
- 'TabSeparatedWithNamesAndTypes',
- 'CustomSeparated',
- 'CustomSeparatedWithNames',
- 'CustomSeparatedWithNamesAndTypes',
- ]
- const jsonFormats = [
- 'JSON',
- 'JSONStrings',
- 'JSONCompact',
- 'JSONCompactStrings',
- 'JSONColumnsWithMetadata',
- 'JSONObjectEachRow',
- 'JSONEachRow',
- 'JSONStringsEachRow',
- 'JSONCompactEachRow',
- 'JSONCompactEachRowWithNames',
- 'JSONCompactEachRowWithNamesAndTypes',
- 'JSONCompactStringsEachRowWithNames',
- 'JSONCompactStringsEachRowWithNamesAndTypes',
- ]
-
- it('should not do anything for raw formats streams', async () => {
- const values = Stream.Readable.from('foo,bar\n', {
- objectMode: false,
- })
- rawFormats.forEach((format) => {
- // should be exactly the same object (no duplicate instances)
- expect(encodeValues(values, format as DataFormat)).toEqual(values)
- })
- })
-
- it('should encode JSON streams per line', async () => {
- for (const format of jsonFormats) {
- const values = Stream.Readable.from(['foo', 'bar'], {
- objectMode: true,
- })
- const result = encodeValues(values, format as DataFormat)
- let encoded = ''
- for await (const chunk of result) {
- encoded += chunk
- }
- expect(encoded).toEqual('"foo"\n"bar"\n')
- }
- })
-
- it('should encode JSON arrays', async () => {
- for (const format of jsonFormats) {
- const values = ['foo', 'bar']
- const result = encodeValues(values, format as DataFormat)
- let encoded = ''
- for await (const chunk of result) {
- encoded += chunk
- }
- expect(encoded).toEqual('"foo"\n"bar"\n')
- }
- })
-
- it('should encode JSON input', async () => {
- const values: InputJSON = {
- meta: [
- {
- name: 'name',
- type: 'string',
- },
- ],
- data: [{ name: 'foo' }, { name: 'bar' }],
- }
- const result = encodeValues(values, 'JSON')
- let encoded = ''
- for await (const chunk of result) {
- encoded += chunk
- }
- expect(encoded).toEqual(JSON.stringify(values) + '\n')
- })
-
- it('should encode JSONObjectEachRow input', async () => {
- const values: InputJSONObjectEachRow = {
- a: { name: 'foo' },
- b: { name: 'bar' },
- }
- const result = encodeValues(values, 'JSON')
- let encoded = ''
- for await (const chunk of result) {
- encoded += chunk
- }
- expect(encoded).toEqual(JSON.stringify(values) + '\n')
- })
-
- it('should fail when we try to encode an unknown type of input', async () => {
- expect(() => encodeValues(1 as any, 'JSON')).toThrow(
- 'Cannot encode values of type number with JSON format'
- )
- })
-})
diff --git a/__tests__/unit/query_formatter.test.ts b/__tests__/unit/query_formatter.test.ts
deleted file mode 100644
index 81b4c978..00000000
--- a/__tests__/unit/query_formatter.test.ts
+++ /dev/null
@@ -1,56 +0,0 @@
-import * as ch from '../../src/schema'
-import { QueryFormatter } from '../../src/schema/query_formatter'
-
-describe('QueryFormatter', () => {
- it('should render a simple CREATE TABLE statement', async () => {
- const schema = new ch.Schema({
- foo: ch.String,
- bar: ch.UInt8,
- })
- const tableOptions = {
- name: 'my_table',
- schema,
- }
- expect(
- QueryFormatter.createTable(tableOptions, {
- engine: ch.MergeTree(),
- order_by: ['foo'],
- })
- ).toEqual(
- 'CREATE TABLE my_table (foo String, bar UInt8) ENGINE MergeTree() ORDER BY (foo)'
- )
- })
-
- it('should render a complex CREATE TABLE statement', async () => {
- const schema = new ch.Schema({
- foo: ch.String,
- bar: ch.UInt8,
- })
- const tableOptions = {
- name: 'my_table',
- schema,
- }
- expect(
- QueryFormatter.createTable(tableOptions, {
- engine: ch.MergeTree(),
- if_not_exists: true,
- on_cluster: '{cluster}',
- order_by: ['foo', 'bar'],
- partition_by: ['foo'],
- primary_key: ['bar'],
- settings: {
- merge_max_block_size: '16384',
- enable_mixed_granularity_parts: 1,
- },
- })
- ).toEqual(
- `CREATE TABLE IF NOT EXISTS my_table ON CLUSTER '{cluster}' ` +
- '(foo String, bar UInt8) ' +
- 'ENGINE MergeTree() ' +
- 'ORDER BY (foo, bar) ' +
- 'PARTITION BY (foo) ' +
- 'PRIMARY KEY (bar) ' +
- `SETTINGS merge_max_block_size = '16384', enable_mixed_granularity_parts = 1`
- )
- })
-})
diff --git a/__tests__/unit/schema_select_result.test.ts b/__tests__/unit/schema_select_result.test.ts
deleted file mode 100644
index 1eb1b311..00000000
--- a/__tests__/unit/schema_select_result.test.ts
+++ /dev/null
@@ -1,52 +0,0 @@
-import type { ClickHouseClient } from '../../src'
-import { ResultSet } from '../../src'
-import * as ch from '../../src/schema'
-import { QueryFormatter } from '../../src/schema/query_formatter'
-import { Readable } from 'stream'
-import { guid } from '../utils'
-
-describe('schema select result', () => {
- const client: ClickHouseClient = {
- query: () => {
- // stub
- },
- } as any
- const schema = new ch.Schema({
- id: ch.UInt32,
- name: ch.String,
- })
- const table = new ch.Table(client, {
- name: 'data_table',
- schema,
- })
-
- beforeEach(() => {
- jest
- .spyOn(QueryFormatter, 'select')
- .mockReturnValueOnce('SELECT * FROM data_table')
- jest
- .spyOn(client, 'query')
- .mockResolvedValueOnce(
- new ResultSet(
- Readable.from(['{"valid":"json"}\n', 'invalid_json}\n']),
- 'JSONEachRow',
- guid()
- )
- )
- })
-
- it('should not swallow error during select stream consumption', async () => {
- const { asyncGenerator } = await table.select()
-
- expect((await asyncGenerator().next()).value).toEqual({ valid: 'json' })
- await expect(asyncGenerator().next()).rejects.toMatchObject({
- message: expect.stringContaining('Unexpected token'),
- })
- })
-
- it('should not swallow error while converting stream to json', async () => {
- await expect(table.select().then((r) => r.json())).rejects.toMatchObject({
- message: expect.stringContaining('Unexpected token'),
- })
- })
-})
diff --git a/__tests__/unit/user_agent.test.ts b/__tests__/unit/user_agent.test.ts
deleted file mode 100644
index 7f6103d2..00000000
--- a/__tests__/unit/user_agent.test.ts
+++ /dev/null
@@ -1,37 +0,0 @@
-import * as p from '../../src/utils/process'
-import { getProcessVersion } from '../../src/utils/process'
-import * as os from 'os'
-import { getUserAgent } from '../../src/utils/user_agent'
-
-jest.mock('os')
-jest.mock('../../src/version', () => {
- return '0.0.42'
-})
-describe('user_agent', () => {
- describe('process util', () => {
- it('should get correct process version by default', async () => {
- expect(getProcessVersion()).toEqual(process.version)
- })
- })
-
- it('should generate a user agent without app id', async () => {
- setupMocks()
- const userAgent = getUserAgent()
- expect(userAgent).toEqual(
- 'clickhouse-js/0.0.42 (lv:nodejs/v16.144; os:freebsd)'
- )
- })
-
- it('should generate a user agent with app id', async () => {
- setupMocks()
- const userAgent = getUserAgent()
- expect(userAgent).toEqual(
- 'clickhouse-js/0.0.42 (lv:nodejs/v16.144; os:freebsd)'
- )
- })
-
- function setupMocks() {
- jest.spyOn(os, 'platform').mockReturnValueOnce('freebsd')
- jest.spyOn(p, 'getProcessVersion').mockReturnValueOnce('v16.144')
- }
-})
diff --git a/__tests__/unit/validate_insert_values.test.ts b/__tests__/unit/validate_insert_values.test.ts
deleted file mode 100644
index 53e6e0f5..00000000
--- a/__tests__/unit/validate_insert_values.test.ts
+++ /dev/null
@@ -1,55 +0,0 @@
-import Stream from 'stream'
-import type { DataFormat } from '../../src'
-import { validateInsertValues } from '../../src/client'
-
-describe('validateInsertValues', () => {
- it('should allow object mode stream for JSON* and raw for Tab* or CSV*', async () => {
- const objectModeStream = Stream.Readable.from('foo,bar\n', {
- objectMode: true,
- })
- const rawStream = Stream.Readable.from('foo,bar\n', {
- objectMode: false,
- })
-
- const objectFormats = [
- 'JSON',
- 'JSONObjectEachRow',
- 'JSONEachRow',
- 'JSONStringsEachRow',
- 'JSONCompactEachRow',
- 'JSONCompactEachRowWithNames',
- 'JSONCompactEachRowWithNamesAndTypes',
- 'JSONCompactStringsEachRowWithNames',
- 'JSONCompactStringsEachRowWithNamesAndTypes',
- ]
- objectFormats.forEach((format) => {
- expect(() =>
- validateInsertValues(objectModeStream, format as DataFormat)
- ).not.toThrow()
- expect(() =>
- validateInsertValues(rawStream, format as DataFormat)
- ).toThrow('with enabled object mode')
- })
-
- const rawFormats = [
- 'CSV',
- 'CSVWithNames',
- 'CSVWithNamesAndTypes',
- 'TabSeparated',
- 'TabSeparatedRaw',
- 'TabSeparatedWithNames',
- 'TabSeparatedWithNamesAndTypes',
- 'CustomSeparated',
- 'CustomSeparatedWithNames',
- 'CustomSeparatedWithNamesAndTypes',
- ]
- rawFormats.forEach((format) => {
- expect(() =>
- validateInsertValues(objectModeStream, format as DataFormat)
- ).toThrow('disabled object mode')
- expect(() =>
- validateInsertValues(rawStream, format as DataFormat)
- ).not.toThrow()
- })
- })
-})
diff --git a/__tests__/utils/guid.ts b/__tests__/utils/guid.ts
deleted file mode 100644
index e042fb25..00000000
--- a/__tests__/utils/guid.ts
+++ /dev/null
@@ -1,5 +0,0 @@
-import { v4 as uuid_v4 } from 'uuid'
-
-export function guid() {
- return uuid_v4().replace(/-/g, '')
-}
diff --git a/__tests__/utils/retry.test.ts b/__tests__/utils/retry.test.ts
deleted file mode 100644
index 3b966473..00000000
--- a/__tests__/utils/retry.test.ts
+++ /dev/null
@@ -1,54 +0,0 @@
-import { retryOnFailure } from './index'
-import type { RetryOnFailureOptions } from './retry'
-
-describe('retryOnFailure', () => {
- it('should resolve after some failures', async () => {
- let result = 0
- setTimeout(() => {
- result = 42
- }, 100)
- await retryOnFailure(async () => {
- expect(result).toEqual(42)
- })
- })
-
- it('should throw after final fail', async () => {
- let result = 0
- setTimeout(() => {
- result = 42
- }, 1000).unref()
- await expect(
- retryOnFailure(
- async () => {
- expect(result).toEqual(42)
- },
- {
- maxAttempts: 2,
- waitBetweenAttemptsMs: 1,
- }
- )
- ).rejects.toThrowError()
- })
-
- it('should not allow invalid options values', async () => {
- const assertThrows = async (options: RetryOnFailureOptions) => {
- await expect(
- retryOnFailure(async () => {
- expect(1).toEqual(1)
- }, options)
- ).rejects.toThrowError()
- }
-
- for (const [maxAttempts, waitBetweenAttempts] of [
- [-1, 1],
- [1, -1],
- [0, 1],
- [1, 0],
- ]) {
- await assertThrows({
- maxAttempts,
- waitBetweenAttemptsMs: waitBetweenAttempts,
- })
- }
- })
-})
diff --git a/__tests__/utils/retry.ts b/__tests__/utils/retry.ts
deleted file mode 100644
index 53f805db..00000000
--- a/__tests__/utils/retry.ts
+++ /dev/null
@@ -1,53 +0,0 @@
-export type RetryOnFailureOptions = {
- maxAttempts?: number
- waitBetweenAttemptsMs?: number
- logRetries?: boolean
-}
-
-export async function retryOnFailure(
- fn: () => Promise,
- options?: RetryOnFailureOptions
-): Promise {
- const maxAttempts = validate(options?.maxAttempts) ?? 200
- const waitBetweenAttempts = validate(options?.waitBetweenAttemptsMs) ?? 50
- const logRetries = options?.logRetries ?? false
-
- let attempts = 0
-
- const attempt: () => Promise = async () => {
- try {
- return await fn()
- } catch (e: any) {
- if (++attempts === maxAttempts) {
- console.error(
- `Final fail after ${attempts} attempt(s) every ${waitBetweenAttempts} ms\n`,
- e.message
- )
- throw e
- }
- if (logRetries) {
- console.error(
- `Failure after ${attempts} attempt(s), will retry\n`,
- e.message
- )
- }
- await sleep(waitBetweenAttempts)
- return await attempt()
- }
- }
-
- return await attempt()
-}
-
-export function sleep(ms: number): Promise {
- return new Promise((resolve) => {
- setTimeout(resolve, ms).unref()
- })
-}
-
-function validate(value: undefined | number): typeof value {
- if (value !== undefined && value < 1) {
- throw new Error(`Expect maxTries to be at least 1`)
- }
- return value
-}
diff --git a/__tests__/utils/schema.ts b/__tests__/utils/schema.ts
deleted file mode 100644
index 68030f44..00000000
--- a/__tests__/utils/schema.ts
+++ /dev/null
@@ -1,49 +0,0 @@
-import { getClickHouseTestEnvironment, TestEnv } from './test_env'
-import * as ch from '../../src/schema'
-import type { ClickHouseClient } from '../../src'
-import type { NonEmptyArray } from '../../src/schema'
-
-export async function createTableWithSchema(
- client: ClickHouseClient,
- schema: ch.Schema,
- tableName: string,
- orderBy: NonEmptyArray
-) {
- const table = new ch.Table(client, {
- name: tableName,
- schema,
- })
- const env = getClickHouseTestEnvironment()
- switch (env) {
- case TestEnv.Cloud:
- await table.create({
- engine: ch.MergeTree(),
- order_by: orderBy,
- clickhouse_settings: {
- wait_end_of_query: 1,
- },
- })
- break
- case TestEnv.LocalCluster:
- await table.create({
- engine: ch.ReplicatedMergeTree({
- zoo_path: '/clickhouse/{cluster}/tables/{database}/{table}/{shard}',
- replica_name: '{replica}',
- }),
- on_cluster: '{cluster}',
- order_by: orderBy,
- clickhouse_settings: {
- wait_end_of_query: 1,
- },
- })
- break
- case TestEnv.LocalSingleNode:
- await table.create({
- engine: ch.MergeTree(),
- order_by: orderBy,
- })
- break
- }
- console.log(`Created table ${tableName}`)
- return table
-}
diff --git a/__tests__/utils/test_env.test.ts b/__tests__/utils/test_env.test.ts
deleted file mode 100644
index ce15979c..00000000
--- a/__tests__/utils/test_env.test.ts
+++ /dev/null
@@ -1,44 +0,0 @@
-import { getClickHouseTestEnvironment, TestEnv } from './index'
-
-describe('TestEnv environment variable parsing', () => {
- const key = 'CLICKHOUSE_TEST_ENVIRONMENT'
- let previousValue = process.env[key]
- beforeAll(() => {
- previousValue = process.env[key]
- })
- beforeEach(() => {
- delete process.env[key]
- })
- afterAll(() => {
- process.env[key] = previousValue
- })
-
- it('should fall back to local_single_node env if unset', async () => {
- expect(getClickHouseTestEnvironment()).toBe(TestEnv.LocalSingleNode)
- })
-
- it('should be able to set local_single_node env explicitly', async () => {
- process.env[key] = 'local_single_node'
- expect(getClickHouseTestEnvironment()).toBe(TestEnv.LocalSingleNode)
- })
-
- it('should be able to set local_cluster env', async () => {
- process.env[key] = 'local_cluster'
- expect(getClickHouseTestEnvironment()).toBe(TestEnv.LocalCluster)
- })
-
- it('should be able to set cloud env', async () => {
- process.env[key] = 'cloud'
- expect(getClickHouseTestEnvironment()).toBe(TestEnv.Cloud)
- })
-
- it('should throw in case of an empty string', async () => {
- process.env[key] = ''
- expect(getClickHouseTestEnvironment).toThrowError()
- })
-
- it('should throw in case of malformed enum value', async () => {
- process.env[key] = 'foobar'
- expect(getClickHouseTestEnvironment).toThrowError()
- })
-})
diff --git a/benchmarks/leaks/README.md b/benchmarks/leaks/README.md
index 9e736eeb..68de8625 100644
--- a/benchmarks/leaks/README.md
+++ b/benchmarks/leaks/README.md
@@ -39,7 +39,7 @@ See [official examples](https://clickhouse.com/docs/en/getting-started/example-d
#### Run the test
```sh
-tsc --project tsconfig.dev.json \
+tsc --project tsconfig.json \
&& node --expose-gc --max-old-space-size=256 \
build/benchmarks/leaks/memory_leak_brown.js
```
@@ -61,7 +61,7 @@ Configuration can be done via env variables:
With default configuration:
```sh
-tsc --project tsconfig.dev.json \
+tsc --project tsconfig.json \
&& node --expose-gc --max-old-space-size=256 \
build/benchmarks/leaks/memory_leak_random_integers.js
```
@@ -69,7 +69,7 @@ build/benchmarks/leaks/memory_leak_random_integers.js
With custom configuration via env variables:
```sh
-tsc --project tsconfig.dev.json \
+tsc --project tsconfig.json \
&& BATCH_SIZE=100000000 ITERATIONS=1000 LOG_INTERVAL=100 \
node --expose-gc --max-old-space-size=256 \
build/benchmarks/leaks/memory_leak_random_integers.js
@@ -90,7 +90,7 @@ Configuration is the same as the previous test, but with different default value
With default configuration:
```sh
-tsc --project tsconfig.dev.json \
+tsc --project tsconfig.json \
&& node --expose-gc --max-old-space-size=256 \
build/benchmarks/leaks/memory_leak_arrays.js
```
@@ -98,8 +98,8 @@ build/benchmarks/leaks/memory_leak_arrays.js
With custom configuration via env variables and different max heap size:
```sh
-tsc --project tsconfig.dev.json \
+tsc --project tsconfig.json \
&& BATCH_SIZE=10000 ITERATIONS=1000 LOG_INTERVAL=100 \
node --expose-gc --max-old-space-size=1024 \
build/benchmarks/leaks/memory_leak_arrays.js
-```
\ No newline at end of file
+```
diff --git a/benchmarks/leaks/memory_leak_arrays.ts b/benchmarks/leaks/memory_leak_arrays.ts
index 6722588f..d845080b 100644
--- a/benchmarks/leaks/memory_leak_arrays.ts
+++ b/benchmarks/leaks/memory_leak_arrays.ts
@@ -1,4 +1,3 @@
-import { createClient } from '../../src'
import { v4 as uuid_v4 } from 'uuid'
import { randomInt } from 'crypto'
import {
@@ -10,6 +9,7 @@ import {
randomArray,
randomStr,
} from './shared'
+import { createClient } from '@clickhouse/client'
const program = async () => {
const client = createClient({})
diff --git a/benchmarks/leaks/memory_leak_brown.ts b/benchmarks/leaks/memory_leak_brown.ts
index 052c6732..b346c520 100644
--- a/benchmarks/leaks/memory_leak_brown.ts
+++ b/benchmarks/leaks/memory_leak_brown.ts
@@ -1,4 +1,3 @@
-import { createClient } from '../../src'
import { v4 as uuid_v4 } from 'uuid'
import Path from 'path'
import Fs from 'fs'
@@ -9,6 +8,7 @@ import {
logMemoryUsage,
logMemoryUsageDiff,
} from './shared'
+import { createClient } from '@clickhouse/client'
const program = async () => {
const client = createClient({})
diff --git a/benchmarks/leaks/memory_leak_random_integers.ts b/benchmarks/leaks/memory_leak_random_integers.ts
index 1683172e..cb875f01 100644
--- a/benchmarks/leaks/memory_leak_random_integers.ts
+++ b/benchmarks/leaks/memory_leak_random_integers.ts
@@ -1,5 +1,5 @@
import Stream from 'stream'
-import { createClient } from '../../src'
+import { createClient } from '@clickhouse/client'
import { v4 as uuid_v4 } from 'uuid'
import { randomInt } from 'crypto'
import {
diff --git a/benchmarks/tsconfig.json b/benchmarks/tsconfig.json
new file mode 100644
index 00000000..cc899888
--- /dev/null
+++ b/benchmarks/tsconfig.json
@@ -0,0 +1,17 @@
+{
+ "extends": "../tsconfig.json",
+ "include": ["leaks/**/*.ts"],
+ "compilerOptions": {
+ "noUnusedLocals": false,
+ "noUnusedParameters": false,
+ "outDir": "dist",
+ "baseUrl": "./",
+ "paths": {
+ "@clickhouse/client": ["../packages/client-node/src/index.ts"],
+ "@clickhouse/client/*": ["../packages/client-node/src/*"]
+ }
+ },
+ "ts-node": {
+ "require": ["tsconfig-paths/register"]
+ }
+}
diff --git a/coverage/badge.svg b/coverage/badge.svg
deleted file mode 100644
index d5c9fe7e..00000000
--- a/coverage/badge.svg
+++ /dev/null
@@ -1 +0,0 @@
-coverage: 92.29% coverage coverage 92.29% 92.29%
\ No newline at end of file
diff --git a/coverage/coverage-summary.json b/coverage/coverage-summary.json
deleted file mode 100644
index 6019888f..00000000
--- a/coverage/coverage-summary.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{"total": {"lines":{"total":633,"covered":586,"skipped":0,"pct":92.57},"statements":{"total":675,"covered":623,"skipped":0,"pct":92.29},"functions":{"total":190,"covered":168,"skipped":0,"pct":88.42},"branches":{"total":334,"covered":294,"skipped":0,"pct":88.02},"branchesTrue":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/client.ts": {"lines":{"total":76,"covered":74,"skipped":0,"pct":97.36},"functions":{"total":19,"covered":19,"skipped":0,"pct":100},"statements":{"total":78,"covered":76,"skipped":0,"pct":97.43},"branches":{"total":107,"covered":104,"skipped":0,"pct":97.19}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/index.ts": {"lines":{"total":5,"covered":5,"skipped":0,"pct":100},"functions":{"total":3,"covered":3,"skipped":0,"pct":100},"statements":{"total":7,"covered":7,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/logger.ts": {"lines":{"total":46,"covered":38,"skipped":0,"pct":82.6},"functions":{"total":14,"covered":8,"skipped":0,"pct":57.14},"statements":{"total":46,"covered":38,"skipped":0,"pct":82.6},"branches":{"total":14,"covered":14,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/result.ts": {"lines":{"total":33,"covered":33,"skipped":0,"pct":100},"functions":{"total":8,"covered":8,"skipped":0,"pct":100},"statements":{"total":33,"covered":33,"skipped":0,"pct":100},"branches":{"total":7,"covered":7,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/settings.ts": {"lines":{"total":4,"covered":4,"skipped":0,"pct":100},"functions":{"total":4,"covered":4,"skipped":0,"pct":100},"statements":{"total":4,"covered":4,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/version.ts": {"lines":{"total":1,"covered":1,"skipped":0,"pct":100},"functions":{"total":0,"covered":0,"skipped":0,"pct":100},"statements":{"total":1,"covered":1,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/connection/connection.ts": {"lines":{"total":6,"covered":6,"skipped":0,"pct":100},"functions":{"total":1,"covered":1,"skipped":0,"pct":100},"statements":{"total":6,"covered":6,"skipped":0,"pct":100},"branches":{"total":3,"covered":3,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/connection/index.ts": {"lines":{"total":1,"covered":1,"skipped":0,"pct":100},"functions":{"total":0,"covered":0,"skipped":0,"pct":100},"statements":{"total":1,"covered":1,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/connection/adapter/base_http_adapter.ts": {"lines":{"total":126,"covered":122,"skipped":0,"pct":96.82},"functions":{"total":28,"covered":28,"skipped":0,"pct":100},"statements":{"total":127,"covered":123,"skipped":0,"pct":96.85},"branches":{"total":47,"covered":45,"skipped":0,"pct":95.74}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/connection/adapter/http_adapter.ts": {"lines":{"total":6,"covered":6,"skipped":0,"pct":100},"functions":{"total":2,"covered":2,"skipped":0,"pct":100},"statements":{"total":6,"covered":6,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/connection/adapter/http_search_params.ts": {"lines":{"total":21,"covered":21,"skipped":0,"pct":100},"functions":{"total":1,"covered":1,"skipped":0,"pct":100},"statements":{"total":21,"covered":21,"skipped":0,"pct":100},"branches":{"total":12,"covered":12,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/connection/adapter/https_adapter.ts": {"lines":{"total":11,"covered":11,"skipped":0,"pct":100},"functions":{"total":3,"covered":3,"skipped":0,"pct":100},"statements":{"total":11,"covered":11,"skipped":0,"pct":100},"branches":{"total":26,"covered":26,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/connection/adapter/index.ts": {"lines":{"total":2,"covered":2,"skipped":0,"pct":100},"functions":{"total":2,"covered":2,"skipped":0,"pct":100},"statements":{"total":4,"covered":4,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/connection/adapter/transform_url.ts": {"lines":{"total":7,"covered":7,"skipped":0,"pct":100},"functions":{"total":1,"covered":1,"skipped":0,"pct":100},"statements":{"total":7,"covered":7,"skipped":0,"pct":100},"branches":{"total":6,"covered":5,"skipped":0,"pct":83.33}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/data_formatter/format_query_params.ts": {"lines":{"total":35,"covered":34,"skipped":0,"pct":97.14},"functions":{"total":4,"covered":4,"skipped":0,"pct":100},"statements":{"total":43,"covered":42,"skipped":0,"pct":97.67},"branches":{"total":21,"covered":21,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/data_formatter/format_query_settings.ts": {"lines":{"total":8,"covered":8,"skipped":0,"pct":100},"functions":{"total":1,"covered":1,"skipped":0,"pct":100},"statements":{"total":11,"covered":11,"skipped":0,"pct":100},"branches":{"total":6,"covered":6,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/data_formatter/formatter.ts": {"lines":{"total":26,"covered":22,"skipped":0,"pct":84.61},"functions":{"total":7,"covered":7,"skipped":0,"pct":100},"statements":{"total":26,"covered":22,"skipped":0,"pct":84.61},"branches":{"total":5,"covered":4,"skipped":0,"pct":80}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/data_formatter/index.ts": {"lines":{"total":3,"covered":3,"skipped":0,"pct":100},"functions":{"total":2,"covered":2,"skipped":0,"pct":100},"statements":{"total":5,"covered":5,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/error/index.ts": {"lines":{"total":1,"covered":1,"skipped":0,"pct":100},"functions":{"total":0,"covered":0,"skipped":0,"pct":100},"statements":{"total":1,"covered":1,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/error/parse_error.ts": {"lines":{"total":14,"covered":13,"skipped":0,"pct":92.85},"functions":{"total":2,"covered":2,"skipped":0,"pct":100},"statements":{"total":14,"covered":13,"skipped":0,"pct":92.85},"branches":{"total":6,"covered":4,"skipped":0,"pct":66.66}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/schema/engines.ts": {"lines":{"total":20,"covered":9,"skipped":0,"pct":45},"functions":{"total":16,"covered":2,"skipped":0,"pct":12.5},"statements":{"total":34,"covered":18,"skipped":0,"pct":52.94},"branches":{"total":6,"covered":0,"skipped":0,"pct":0}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/schema/index.ts": {"lines":{"total":7,"covered":7,"skipped":0,"pct":100},"functions":{"total":0,"covered":0,"skipped":0,"pct":100},"statements":{"total":7,"covered":7,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/schema/query_formatter.ts": {"lines":{"total":21,"covered":21,"skipped":0,"pct":100},"functions":{"total":5,"covered":5,"skipped":0,"pct":100},"statements":{"total":21,"covered":21,"skipped":0,"pct":100},"branches":{"total":24,"covered":22,"skipped":0,"pct":91.66}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/schema/schema.ts": {"lines":{"total":3,"covered":3,"skipped":0,"pct":100},"functions":{"total":3,"covered":3,"skipped":0,"pct":100},"statements":{"total":3,"covered":3,"skipped":0,"pct":100},"branches":{"total":4,"covered":3,"skipped":0,"pct":75}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/schema/stream.ts": {"lines":{"total":5,"covered":5,"skipped":0,"pct":100},"functions":{"total":4,"covered":4,"skipped":0,"pct":100},"statements":{"total":5,"covered":5,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/schema/table.ts": {"lines":{"total":20,"covered":19,"skipped":0,"pct":95},"functions":{"total":6,"covered":6,"skipped":0,"pct":100},"statements":{"total":20,"covered":19,"skipped":0,"pct":95},"branches":{"total":11,"covered":8,"skipped":0,"pct":72.72}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/schema/types.ts": {"lines":{"total":84,"covered":70,"skipped":0,"pct":83.33},"functions":{"total":40,"covered":38,"skipped":0,"pct":95},"statements":{"total":92,"covered":78,"skipped":0,"pct":84.78},"branches":{"total":20,"covered":2,"skipped":0,"pct":10}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/schema/where.ts": {"lines":{"total":16,"covered":15,"skipped":0,"pct":93.75},"functions":{"total":7,"covered":7,"skipped":0,"pct":100},"statements":{"total":16,"covered":15,"skipped":0,"pct":93.75},"branches":{"total":5,"covered":4,"skipped":0,"pct":80}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/utils/index.ts": {"lines":{"total":2,"covered":2,"skipped":0,"pct":100},"functions":{"total":0,"covered":0,"skipped":0,"pct":100},"statements":{"total":2,"covered":2,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/utils/process.ts": {"lines":{"total":2,"covered":2,"skipped":0,"pct":100},"functions":{"total":1,"covered":1,"skipped":0,"pct":100},"statements":{"total":2,"covered":2,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/utils/stream.ts": {"lines":{"total":13,"covered":13,"skipped":0,"pct":100},"functions":{"total":4,"covered":4,"skipped":0,"pct":100},"statements":{"total":13,"covered":13,"skipped":0,"pct":100},"branches":{"total":2,"covered":2,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/utils/string.ts": {"lines":{"total":2,"covered":2,"skipped":0,"pct":100},"functions":{"total":1,"covered":1,"skipped":0,"pct":100},"statements":{"total":2,"covered":2,"skipped":0,"pct":100},"branches":{"total":0,"covered":0,"skipped":0,"pct":100}}
-,"/home/runner/work/clickhouse-js/clickhouse-js/src/utils/user_agent.ts": {"lines":{"total":6,"covered":6,"skipped":0,"pct":100},"functions":{"total":1,"covered":1,"skipped":0,"pct":100},"statements":{"total":6,"covered":6,"skipped":0,"pct":100},"branches":{"total":2,"covered":2,"skipped":0,"pct":100}}
-}
diff --git a/examples/README.md b/examples/README.md
index ce6bc12d..a7c70752 100644
--- a/examples/README.md
+++ b/examples/README.md
@@ -2,20 +2,24 @@
## How to run
-All commands are written with an assumption that you are in the root project folder.
-
### Any example except `create_table_*`
-Start a local ClickHouse first:
+Start a local ClickHouse first (from the root project folder):
```sh
docker-compose up -d
```
-then you can run some sample program:
+Change the working directory to examples:
+
+```sh
+cd examples
+```
+
+Then, you should be able to run the sample programs:
```sh
-ts-node --transpile-only --project tsconfig.dev.json examples/array_json_each_row.ts
+ts-node --transpile-only --project tsconfig.json array_json_each_row.ts
```
### TLS examples
@@ -29,14 +33,13 @@ sudo -- sh -c "echo 127.0.0.1 server.clickhouseconnect.test >> /etc/hosts"
After that, you should be able to run the examples:
```bash
-ts-node --transpile-only --project tsconfig.dev.json examples/basic_tls.ts
-ts-node --transpile-only --project tsconfig.dev.json examples/mutual_tls.ts
+ts-node --transpile-only --project tsconfig.json basic_tls.ts
+ts-node --transpile-only --project tsconfig.json mutual_tls.ts
```
### Create table examples
-- for `create_table_local_cluster.ts`,
- you will need to start a local cluster first:
+- for `create_table_local_cluster.ts`, you will need to start a local cluster first:
```sh
docker-compose -f docker-compose.cluster.yml up -d
@@ -45,16 +48,16 @@ docker-compose -f docker-compose.cluster.yml up -d
then run the example:
```
-ts-node --transpile-only --project tsconfig.dev.json examples/create_table_local_cluster.ts
+ts-node --transpile-only --project tsconfig.json create_table_local_cluster.ts
```
-- for `create_table_cloud.ts`, Docker containers are not required,
- but you need to set some environment variables first:
+- for `create_table_cloud.ts`, Docker containers are not required, but you need to set some environment variables first:
```sh
export CLICKHOUSE_HOST=https://:8443
export CLICKHOUSE_PASSWORD=
```
+
You can obtain these credentials in the Cloud console.
This example assumes that you do not add any users or databases
to your Cloud instance, so it is `default` for both.
@@ -62,5 +65,5 @@ to your Cloud instance, so it is `default` for both.
Run the example:
```
-ts-node --transpile-only --project tsconfig.dev.json examples/create_table_cloud.ts
+ts-node --transpile-only --project tsconfig.json create_table_cloud.ts
```
diff --git a/examples/abort_request.ts b/examples/abort_request.ts
index 9624fcea..f6ce64f3 100644
--- a/examples/abort_request.ts
+++ b/examples/abort_request.ts
@@ -9,7 +9,7 @@ void (async () => {
format: 'CSV',
abort_signal: controller.signal,
})
- .catch((e) => {
+ .catch((e: unknown) => {
console.info('Select was aborted')
console.info('This is the underlying error message')
console.info('------------------------------------')
diff --git a/examples/clickhouse_settings.ts b/examples/clickhouse_settings.ts
index 389b9737..5f409628 100644
--- a/examples/clickhouse_settings.ts
+++ b/examples/clickhouse_settings.ts
@@ -1,4 +1,5 @@
import { createClient } from '@clickhouse/client'
+
void (async () => {
const client = createClient()
const rows = await client.query({
diff --git a/examples/ping_cloud.ts b/examples/ping_cloud.ts
index cec98b6f..f4c97d04 100644
--- a/examples/ping_cloud.ts
+++ b/examples/ping_cloud.ts
@@ -1,4 +1,5 @@
import { createClient } from '@clickhouse/client'
+
void (async () => {
const client = createClient({
host: getFromEnv('CLICKHOUSE_HOST'),
diff --git a/examples/query_with_parameter_binding.ts b/examples/query_with_parameter_binding.ts
index 77c91a51..7f4cc60e 100644
--- a/examples/query_with_parameter_binding.ts
+++ b/examples/query_with_parameter_binding.ts
@@ -1,4 +1,5 @@
import { createClient } from '@clickhouse/client'
+
void (async () => {
const client = createClient()
const rows = await client.query({
diff --git a/examples/schema/simple_schema.ts b/examples/schema/simple_schema.ts
deleted file mode 100644
index 122704ba..00000000
--- a/examples/schema/simple_schema.ts
+++ /dev/null
@@ -1,61 +0,0 @@
-import * as ch from '../../src/schema'
-import type { Infer } from '../../src/schema'
-import { InsertStream } from '../../src/schema'
-import { createClient } from '../../src'
-// If you found this example,
-// consider it as a highly experimental WIP development :)
-void (async () => {
- const client = createClient()
-
- enum UserRole {
- User = 'User',
- Admin = 'Admin',
- }
- const userSchema = new ch.Schema({
- id: ch.UInt64,
- name: ch.String,
- externalIds: ch.Array(ch.UInt32),
- settings: ch.Map(ch.String, ch.String),
- role: ch.Enum(UserRole),
- registeredAt: ch.DateTime64(3, 'Europe/Amsterdam'),
- })
-
- type Data = Infer
-
- const usersTable = new ch.Table(client, {
- name: 'users',
- schema: userSchema,
- })
-
- await usersTable.create({
- engine: ch.MergeTree(),
- order_by: ['id'],
- })
-
- const insertStream = new InsertStream()
- insertStream.add({
- // NB: (U)Int64/128/256 are represented as strings
- // since their max value > Number.MAX_SAFE_INTEGER
- id: '42',
- name: 'foo',
- externalIds: [1, 2],
- settings: { foo: 'bar' },
- role: UserRole.Admin,
- registeredAt: '2021-04-30 08:05:37.123',
- })
- insertStream.complete()
- await usersTable.insert({
- values: insertStream,
- clickhouse_settings: {
- insert_quorum: '2',
- },
- })
-
- const { asyncGenerator } = await usersTable.select({
- columns: ['id', 'name', 'registeredAt'], // or omit to select *
- order_by: [['name', 'DESC']],
- })
- for await (const value of asyncGenerator()) {
- console.log(value.id)
- }
-})()
diff --git a/examples/select_json_with_metadata.ts b/examples/select_json_with_metadata.ts
index 2dfd2517..1e0fad33 100644
--- a/examples/select_json_with_metadata.ts
+++ b/examples/select_json_with_metadata.ts
@@ -1,5 +1,5 @@
-import type { ResponseJSON } from '@clickhouse/client'
-import { createClient } from '@clickhouse/client'
+import { createClient, type ResponseJSON } from '@clickhouse/client'
+
void (async () => {
const client = createClient()
const rows = await client.query({
diff --git a/examples/select_streaming_for_await.ts b/examples/select_streaming_for_await.ts
index 3db2cc33..46961a98 100644
--- a/examples/select_streaming_for_await.ts
+++ b/examples/select_streaming_for_await.ts
@@ -1,5 +1,4 @@
-import type { Row } from '@clickhouse/client'
-import { createClient } from '@clickhouse/client'
+import { createClient, type Row } from '@clickhouse/client'
/**
* NB: `for await const` has quite significant overhead
diff --git a/examples/select_streaming_on_data.ts b/examples/select_streaming_on_data.ts
index f71587cb..e28d4bb0 100644
--- a/examples/select_streaming_on_data.ts
+++ b/examples/select_streaming_on_data.ts
@@ -1,5 +1,4 @@
-import type { Row } from '@clickhouse/client'
-import { createClient } from '@clickhouse/client'
+import { createClient, type Row } from '@clickhouse/client'
/**
* Can be used for consuming large datasets for reducing memory overhead,
@@ -12,7 +11,6 @@ import { createClient } from '@clickhouse/client'
* As `for await const` has quite significant overhead (up to 2 times worse)
* vs old school `on(data)` approach, this example covers `on(data)` usage
*/
-
void (async () => {
const client = createClient()
const rows = await client.query({
@@ -20,7 +18,7 @@ void (async () => {
format: 'CSV',
})
const stream = rows.stream()
- stream.on('data', (rows) => {
+ stream.on('data', (rows: Row[]) => {
rows.forEach((row: Row) => {
console.log(row.text)
})
diff --git a/examples/tsconfig.json b/examples/tsconfig.json
new file mode 100644
index 00000000..324dde9b
--- /dev/null
+++ b/examples/tsconfig.json
@@ -0,0 +1,17 @@
+{
+ "extends": "../tsconfig.json",
+ "include": ["./*.ts"],
+ "compilerOptions": {
+ "noUnusedLocals": false,
+ "noUnusedParameters": false,
+ "outDir": "dist",
+ "baseUrl": "./",
+ "paths": {
+ "@clickhouse/client": ["../packages/client-node/src/index.ts"],
+ "@clickhouse/client/*": ["../packages/client-node/src/*"]
+ }
+ },
+ "ts-node": {
+ "require": ["tsconfig-paths/register"]
+ }
+}
diff --git a/jasmine.all.json b/jasmine.all.json
new file mode 100644
index 00000000..5910e0ba
--- /dev/null
+++ b/jasmine.all.json
@@ -0,0 +1,17 @@
+{
+ "spec_dir": ".",
+ "spec_files": [
+ "packages/client-common/__tests__/utils/*.test.ts",
+ "packages/client-common/__tests__/unit/*.test.ts",
+ "packages/client-common/__tests__/integration/*.test.ts",
+ "packages/client-node/__tests__/unit/*.test.ts",
+ "packages/client-node/__tests__/integration/*.test.ts",
+ "packages/client-node/__tests__/tls/*.test.ts"
+ ],
+ "env": {
+ "failSpecWithNoExpectations": true,
+ "stopSpecOnExpectationFailure": true,
+ "stopOnSpecFailure": false,
+ "random": false
+ }
+}
diff --git a/jasmine.common.integration.json b/jasmine.common.integration.json
new file mode 100644
index 00000000..22c983ee
--- /dev/null
+++ b/jasmine.common.integration.json
@@ -0,0 +1,10 @@
+{
+ "spec_dir": "packages/client-common/__tests__",
+ "spec_files": ["integration/*.test.ts"],
+ "env": {
+ "failSpecWithNoExpectations": true,
+ "stopSpecOnExpectationFailure": true,
+ "stopOnSpecFailure": false,
+ "random": false
+ }
+}
diff --git a/jasmine.common.unit.json b/jasmine.common.unit.json
new file mode 100644
index 00000000..e146713a
--- /dev/null
+++ b/jasmine.common.unit.json
@@ -0,0 +1,10 @@
+{
+ "spec_dir": "packages/client-common/__tests__",
+ "spec_files": ["utils/*.test.ts", "unit/*.test.ts"],
+ "env": {
+ "failSpecWithNoExpectations": true,
+ "stopSpecOnExpectationFailure": true,
+ "stopOnSpecFailure": false,
+ "random": false
+ }
+}
diff --git a/jasmine.node.integration.json b/jasmine.node.integration.json
new file mode 100644
index 00000000..4122efd1
--- /dev/null
+++ b/jasmine.node.integration.json
@@ -0,0 +1,10 @@
+{
+ "spec_dir": "packages/client-node/__tests__",
+ "spec_files": ["integration/*.test.ts"],
+ "env": {
+ "failSpecWithNoExpectations": true,
+ "stopSpecOnExpectationFailure": true,
+ "stopOnSpecFailure": false,
+ "random": false
+ }
+}
diff --git a/jasmine.node.tls.json b/jasmine.node.tls.json
new file mode 100644
index 00000000..5f27d29a
--- /dev/null
+++ b/jasmine.node.tls.json
@@ -0,0 +1,10 @@
+{
+ "spec_dir": "packages/client-node/__tests__",
+ "spec_files": ["tls/*.test.ts"],
+ "env": {
+ "failSpecWithNoExpectations": true,
+ "stopSpecOnExpectationFailure": true,
+ "stopOnSpecFailure": false,
+ "random": false
+ }
+}
diff --git a/jasmine.node.unit.json b/jasmine.node.unit.json
new file mode 100644
index 00000000..140a29c4
--- /dev/null
+++ b/jasmine.node.unit.json
@@ -0,0 +1,10 @@
+{
+ "spec_dir": "packages/client-node/__tests__",
+ "spec_files": ["unit/*.test.ts", "utils/*.test.ts"],
+ "env": {
+ "failSpecWithNoExpectations": true,
+ "stopSpecOnExpectationFailure": true,
+ "stopOnSpecFailure": false,
+ "random": false
+ }
+}
diff --git a/jest.config.js b/jest.config.js
deleted file mode 100644
index d691ca6b..00000000
--- a/jest.config.js
+++ /dev/null
@@ -1,11 +0,0 @@
-/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
-module.exports = {
- testEnvironment: 'node',
- preset: 'ts-jest',
- clearMocks: true,
- collectCoverageFrom: ['/src/**/*.ts'],
- testMatch: ['/__tests__/**/*.test.{js,mjs,ts,tsx}'],
- testTimeout: 30000,
- coverageReporters: ['json-summary'],
- reporters: ['/jest.reporter.js'],
-}
diff --git a/jest.reporter.js b/jest.reporter.js
deleted file mode 100644
index aceeae50..00000000
--- a/jest.reporter.js
+++ /dev/null
@@ -1,22 +0,0 @@
-// see https://github.com/facebook/jest/issues/4156#issuecomment-757376195
-const { DefaultReporter } = require('@jest/reporters')
-
-class Reporter extends DefaultReporter {
- constructor() {
- super(...arguments)
- }
-
- // Print console logs only for __failed__ test __files__
- // Unfortunately, it does not seem possible to extract logs
- // from a particular test __case__ in a clean way without too much hacks
- printTestFileHeader(_testPath, config, result) {
- const console = result.console
- if (result.numFailingTests === 0 && !result.testExecError) {
- result.console = null
- }
- super.printTestFileHeader(...arguments)
- result.console = console
- }
-}
-
-module.exports = Reporter
diff --git a/karma.config.cjs b/karma.config.cjs
new file mode 100644
index 00000000..9c30dc01
--- /dev/null
+++ b/karma.config.cjs
@@ -0,0 +1,64 @@
+const webpackConfig = require('./webpack.dev.js')
+
+module.exports = function (config) {
+ config.set({
+ // base path that will be used to resolve all patterns (eg. files, exclude)
+ basePath: '',
+ frameworks: ['webpack', 'jasmine'],
+ // list of files / patterns to load in the browser
+ files: [
+ 'packages/client-common/__tests__/unit/*.test.ts',
+ 'packages/client-common/__tests__/utils/*.ts',
+ 'packages/client-common/__tests__/integration/*.test.ts',
+ 'packages/client-browser/__tests__/integration/*.test.ts',
+ 'packages/client-browser/__tests__/unit/*.test.ts',
+ ],
+ exclude: [],
+ webpack: webpackConfig,
+ preprocessors: {
+ 'packages/client-common/**/*.ts': ['webpack', 'sourcemap'],
+ 'packages/client-browser/**/*.ts': ['webpack', 'sourcemap'],
+ 'packages/client-common/__tests__/unit/*.test.ts': [
+ 'webpack',
+ 'sourcemap',
+ ],
+ 'packages/client-common/__tests__/integration/*.ts': [
+ 'webpack',
+ 'sourcemap',
+ ],
+ 'packages/client-common/__tests__/utils/*.ts': ['webpack', 'sourcemap'],
+ 'packages/client-browser/__tests__/unit/*.test.ts': [
+ 'webpack',
+ 'sourcemap',
+ ],
+ 'packages/client-browser/__tests__/integration/*.ts': [
+ 'webpack',
+ 'sourcemap',
+ ],
+ },
+ reporters: ['progress'],
+ port: 9876,
+ colors: true,
+ logLevel: config.LOG_INFO,
+ autoWatch: false,
+ // available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
+ browsers: ['Chrome_without_security'],
+ customLaunchers: {
+ Chrome_without_security: {
+ base: 'ChromeHeadless',
+ // to disable CORS
+ flags: ['--disable-web-security'],
+ },
+ },
+ // if true, Karma captures browsers, runs the tests and exits
+ singleRun: true,
+ client: {
+ jasmine: {
+ random: false,
+ stopOnSpecFailure: false,
+ stopSpecOnExpectationFailure: true,
+ timeoutInterval: 30000,
+ },
+ },
+ })
+}
diff --git a/package.json b/package.json
index 0b8fc2df..e30fd616 100644
--- a/package.json
+++ b/package.json
@@ -1,69 +1,100 @@
{
- "name": "@clickhouse/client",
- "version": "0.0.0",
+ "name": "clickhouse-js",
"description": "Official JS client for ClickHouse DB",
+ "homepage": "https://clickhouse.com",
+ "version": "0.0.0",
"license": "Apache-2.0",
"keywords": [
"clickhouse",
"sql",
"client"
],
- "engines": {
- "node": ">=16"
- },
- "private": false,
"repository": {
"type": "git",
"url": "https://github.com/ClickHouse/clickhouse-js.git"
},
- "homepage": "https://clickhouse.com",
+ "private": false,
+ "engines": {
+ "node": ">=16"
+ },
"scripts": {
- "build": "rm -rf dist; tsc",
- "build:all": "rm -rf dist; tsc --project tsconfig.dev.json",
- "typecheck": "tsc --project tsconfig.dev.json --noEmit",
+ "build:node:all": "rm -rf out; tsc --project tsconfig.all.json",
+ "build:common:package": ".scripts/build.sh client-common",
+ "build:node:package": ".scripts/build.sh client-node",
+ "build:browser:package": ".scripts/build.sh client-browser",
+ "build:browser:minjs": "webpack --config webpack.release.js",
+ "typecheck": "tsc --project tsconfig.all.json --noEmit",
"lint": "eslint . --ext .ts",
"lint:fix": "eslint --fix . --ext .ts",
- "test": "jest --testPathPattern=__tests__ --globalSetup='/__tests__/setup.integration.ts'",
- "test:tls": "jest --testMatch='**/__tests__/tls/*.test.ts'",
- "test:unit": "jest --testMatch='**/__tests__/{unit,utils}/*.test.ts'",
- "test:integration": "jest --runInBand --testPathPattern=__tests__/integration --globalSetup='/__tests__/setup.integration.ts'",
- "test:integration:local_cluster": "CLICKHOUSE_TEST_ENVIRONMENT=local_cluster jest --runInBand --testPathPattern=__tests__/integration --globalSetup='/__tests__/setup.integration.ts'",
- "test:integration:cloud": "CLICKHOUSE_TEST_ENVIRONMENT=cloud jest --runInBand --testPathPattern=__tests__/integration --globalSetup='/__tests__/setup.integration.ts'",
+ "test": ".scripts/jasmine.sh jasmine.all.json",
+ "test:common:unit": ".scripts/jasmine.sh jasmine.common.unit.json",
+ "test:common:integration": ".scripts/jasmine.sh jasmine.common.integration.json",
+ "test:node:unit": ".scripts/jasmine.sh jasmine.node.unit.json",
+ "test:node:tls": ".scripts/jasmine.sh jasmine.node.tls.json",
+ "test:node:integration": ".scripts/jasmine.sh jasmine.node.integration.json",
+ "test:node:integration:local_cluster": "CLICKHOUSE_TEST_ENVIRONMENT=local_cluster npm run test:node:integration",
+ "test:node:integration:cloud": "CLICKHOUSE_TEST_ENVIRONMENT=cloud npm run test:node:integration",
+ "test:browser": "karma start karma.config.cjs",
+ "test:browser:integration:local_cluster": "CLICKHOUSE_TEST_ENVIRONMENT=local_cluster npm run test:browser",
+ "test:browser:integration:cloud": "CLICKHOUSE_TEST_ENVIRONMENT=cloud npm run test:browser",
"prepare": "husky install"
},
- "main": "dist/index.js",
- "types": "dist/index.d.ts",
- "files": [
- "dist"
- ],
- "dependencies": {
- "uuid": "^9.0.0"
- },
"devDependencies": {
- "@jest/reporters": "^29.4.0",
- "@types/jest": "^29.4.0",
+ "@types/jasmine": "^4.3.2",
"@types/node": "^18.11.18",
+ "@types/sinon": "^10.0.15",
"@types/split2": "^3.2.1",
- "@types/uuid": "^9.0.0",
+ "@types/uuid": "^9.0.2",
"@typescript-eslint/eslint-plugin": "^5.49.0",
"@typescript-eslint/parser": "^5.49.0",
"eslint": "^8.32.0",
"eslint-config-prettier": "^8.6.0",
"eslint-plugin-prettier": "^4.2.1",
"husky": "^8.0.2",
- "jest": "^29.4.0",
+ "jasmine": "^5.0.0",
+ "jasmine-core": "^5.0.0",
+ "jasmine-expect": "^5.0.0",
+ "karma": "^6.4.2",
+ "karma-chrome-launcher": "^3.2.0",
+ "karma-jasmine": "^5.1.0",
+ "karma-sourcemap-loader": "^0.4.0",
+ "karma-typescript": "^5.5.4",
+ "karma-webpack": "^5.0.0",
"lint-staged": "^13.1.0",
"prettier": "2.8.3",
+ "sinon": "^15.2.0",
"split2": "^4.1.0",
- "ts-jest": "^29.0.5",
+ "terser-webpack-plugin": "^5.3.9",
+ "ts-jest": "^29.1.0",
+ "ts-loader": "^9.4.3",
"ts-node": "^10.9.1",
- "tsconfig-paths": "^4.1.2",
- "typescript": "^4.9.4"
+ "tsconfig-paths": "^4.2.0",
+ "tsconfig-paths-webpack-plugin": "^4.0.1",
+ "typescript": "^4.9.4",
+ "uuid": "^9.0.0",
+ "webpack": "^5.84.1",
+ "webpack-cli": "^5.1.4",
+ "webpack-merge": "^5.9.0"
},
+ "workspaces": [
+ "./packages/*"
+ ],
+ "files": [
+ "dist"
+ ],
"lint-staged": {
"*.ts": [
"prettier --write",
"eslint --fix"
+ ],
+ "*.json": [
+ "prettier --write"
+ ],
+ "*.yml": [
+ "prettier --write"
+ ],
+ "*.md": [
+ "prettier --write"
]
}
}
diff --git a/packages/client-browser/__tests__/integration/browser_abort_request.test.ts b/packages/client-browser/__tests__/integration/browser_abort_request.test.ts
new file mode 100644
index 00000000..3c05d60e
--- /dev/null
+++ b/packages/client-browser/__tests__/integration/browser_abort_request.test.ts
@@ -0,0 +1,72 @@
+import type { ClickHouseClient, Row } from '@clickhouse/client-common'
+import { createTestClient } from '@test/utils'
+
+describe('Browser abort request streaming', () => {
+ let client: ClickHouseClient
+
+ beforeEach(() => {
+ client = createTestClient()
+ })
+
+ afterEach(async () => {
+ await client.close()
+ })
+
+ it('cancels a select query while reading response', async () => {
+ const controller = new AbortController()
+ const selectPromise = client
+ .query({
+ query: 'SELECT * from system.numbers',
+ format: 'JSONCompactEachRow',
+ abort_signal: controller.signal,
+ })
+ .then(async (rs) => {
+ const reader = rs.stream().getReader()
+ while (true) {
+ const { done, value: rows } = await reader.read()
+ if (done) break
+ ;(rows as Row[]).forEach((row: Row) => {
+ const [[number]] = row.json<[[string]]>()
+ // abort when reach number 3
+ if (number === '3') {
+ controller.abort()
+ }
+ })
+ }
+ })
+
+ await expectAsync(selectPromise).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('The user aborted a request'),
+ })
+ )
+ })
+
+ it('cancels a select query while reading response by closing response stream', async () => {
+ const selectPromise = client
+ .query({
+ query: 'SELECT * from system.numbers',
+ format: 'JSONCompactEachRow',
+ })
+ .then(async function (rs) {
+ const reader = rs.stream().getReader()
+ while (true) {
+ const { done, value: rows } = await reader.read()
+ if (done) break
+ for (const row of rows as Row[]) {
+ const [[number]] = row.json<[[string]]>()
+ // abort when reach number 3
+ if (number === '3') {
+ await reader.releaseLock()
+ await rs.close()
+ }
+ }
+ }
+ })
+ await expectAsync(selectPromise).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Stream has been already consumed'),
+ })
+ )
+ })
+})
diff --git a/packages/client-browser/__tests__/integration/browser_error_parsing.test.ts b/packages/client-browser/__tests__/integration/browser_error_parsing.test.ts
new file mode 100644
index 00000000..b8dbe67d
--- /dev/null
+++ b/packages/client-browser/__tests__/integration/browser_error_parsing.test.ts
@@ -0,0 +1,18 @@
+import { createClient } from '../../src'
+
+describe('Browser errors parsing', () => {
+ it('should return an error when URL is unreachable', async () => {
+ const client = createClient({
+ host: 'http://localhost:1111',
+ })
+ await expectAsync(
+ client.query({
+ query: 'SELECT * FROM system.numbers LIMIT 3',
+ })
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: 'Failed to fetch',
+ })
+ )
+ })
+})
diff --git a/packages/client-browser/__tests__/integration/browser_exec.test.ts b/packages/client-browser/__tests__/integration/browser_exec.test.ts
new file mode 100644
index 00000000..2cacfcde
--- /dev/null
+++ b/packages/client-browser/__tests__/integration/browser_exec.test.ts
@@ -0,0 +1,47 @@
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createTestClient } from '@test/utils'
+import { getAsText } from '../../src/utils'
+
+describe('Browser exec result streaming', () => {
+ let client: ClickHouseClient
+ beforeEach(() => {
+ client = createTestClient()
+ })
+ afterEach(async () => {
+ await client.close()
+ })
+
+ it('should send a parametrized query', async () => {
+ const result = await client.exec({
+ query: 'SELECT plus({val1: Int32}, {val2: Int32})',
+ query_params: {
+ val1: 10,
+ val2: 20,
+ },
+ })
+ expect(await getAsText(result.stream)).toEqual('30\n')
+ })
+
+ describe('trailing semi', () => {
+ it('should allow commands with semi in select clause', async () => {
+ const result = await client.exec({
+ query: `SELECT ';' FORMAT CSV`,
+ })
+ expect(await getAsText(result.stream)).toEqual('";"\n')
+ })
+
+ it('should allow commands with trailing semi', async () => {
+ const result = await client.exec({
+ query: 'EXISTS system.databases;',
+ })
+ expect(await getAsText(result.stream)).toEqual('1\n')
+ })
+
+ it('should allow commands with multiple trailing semi', async () => {
+ const result = await client.exec({
+ query: 'EXISTS system.foobar;;;;;;',
+ })
+ expect(await getAsText(result.stream)).toEqual('0\n')
+ })
+ })
+})
diff --git a/packages/client-browser/__tests__/integration/browser_ping.test.ts b/packages/client-browser/__tests__/integration/browser_ping.test.ts
new file mode 100644
index 00000000..9fff8aa8
--- /dev/null
+++ b/packages/client-browser/__tests__/integration/browser_ping.test.ts
@@ -0,0 +1,18 @@
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createTestClient } from '@test/utils'
+
+describe('Browser ping', () => {
+ let client: ClickHouseClient
+ afterEach(async () => {
+ await client.close()
+ })
+ it('does not swallow a client error', async () => {
+ client = createTestClient({
+ host: 'http://localhost:3333',
+ })
+
+ await expectAsync(client.ping()).toBeRejectedWith(
+ jasmine.objectContaining({ message: 'Failed to fetch' })
+ )
+ })
+})
diff --git a/packages/client-browser/__tests__/integration/browser_select_streaming.test.ts b/packages/client-browser/__tests__/integration/browser_select_streaming.test.ts
new file mode 100644
index 00000000..dad9c3d6
--- /dev/null
+++ b/packages/client-browser/__tests__/integration/browser_select_streaming.test.ts
@@ -0,0 +1,230 @@
+import type { ClickHouseClient, Row } from '@clickhouse/client-common'
+import { createTestClient } from '@test/utils'
+
+describe('Browser SELECT streaming', () => {
+ let client: ClickHouseClient>
+ afterEach(async () => {
+ await client.close()
+ })
+ beforeEach(async () => {
+ client = createTestClient()
+ })
+
+ describe('consume the response only once', () => {
+ async function assertAlreadyConsumed$(fn: () => Promise) {
+ await expectAsync(fn()).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: 'Stream has been already consumed',
+ })
+ )
+ }
+ function assertAlreadyConsumed(fn: () => T) {
+ expect(fn).toThrow(
+ jasmine.objectContaining({
+ message: 'Stream has been already consumed',
+ })
+ )
+ }
+ it('should consume a JSON response only once', async () => {
+ const rs = await client.query({
+ query: 'SELECT * FROM system.numbers LIMIT 1',
+ format: 'JSONEachRow',
+ })
+ expect(await rs.json()).toEqual([{ number: '0' }])
+ // wrap in a func to avoid changing inner "this"
+ await assertAlreadyConsumed$(() => rs.json())
+ await assertAlreadyConsumed$(() => rs.text())
+ await assertAlreadyConsumed(() => rs.stream())
+ })
+
+ it('should consume a text response only once', async () => {
+ const rs = await client.query({
+ query: 'SELECT * FROM system.numbers LIMIT 1',
+ format: 'TabSeparated',
+ })
+ expect(await rs.text()).toEqual('0\n')
+ // wrap in a func to avoid changing inner "this"
+ await assertAlreadyConsumed$(() => rs.json())
+ await assertAlreadyConsumed$(() => rs.text())
+ await assertAlreadyConsumed(() => rs.stream())
+ })
+
+ it('should consume a stream response only once', async () => {
+ const rs = await client.query({
+ query: 'SELECT * FROM system.numbers LIMIT 1',
+ format: 'TabSeparated',
+ })
+ const result = await rowsText(rs.stream())
+ expect(result).toEqual(['0'])
+ // wrap in a func to avoid changing inner "this"
+ await assertAlreadyConsumed$(() => rs.json())
+ await assertAlreadyConsumed$(() => rs.text())
+ assertAlreadyConsumed(() => rs.stream())
+ })
+ })
+
+ describe('select result asStream()', () => {
+ it('throws an exception if format is not stream-able', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSON',
+ })
+ // wrap in a func to avoid changing inner "this"
+ expect(() => result.stream()).toThrow(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('JSON format is not streamable'),
+ })
+ )
+ })
+ })
+
+ describe('text()', () => {
+ it('returns stream of rows in CSV format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'CSV',
+ })
+
+ const rs = await rowsText(result.stream())
+ expect(rs).toEqual(['0', '1', '2', '3', '4'])
+ })
+
+ it('returns stream of rows in TabSeparated format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'TabSeparated',
+ })
+
+ const rs = await rowsText(result.stream())
+ expect(rs).toEqual(['0', '1', '2', '3', '4'])
+ })
+ })
+
+ describe('json()', () => {
+ it('returns stream of objects in JSONEachRow format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONEachRow',
+ })
+
+ const rs = await rowsJsonValues<{ number: string }>(result.stream())
+ expect(rs).toEqual([
+ { number: '0' },
+ { number: '1' },
+ { number: '2' },
+ { number: '3' },
+ { number: '4' },
+ ])
+ })
+
+ it('returns stream of objects in JSONStringsEachRow format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONStringsEachRow',
+ })
+
+ const rs = await rowsJsonValues<{ number: string }>(result.stream())
+ expect(rs).toEqual([
+ { number: '0' },
+ { number: '1' },
+ { number: '2' },
+ { number: '3' },
+ { number: '4' },
+ ])
+ })
+
+ it('returns stream of objects in JSONCompactEachRow format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONCompactEachRow',
+ })
+
+ const rs = await rowsJsonValues<[string]>(result.stream())
+ expect(rs).toEqual([['0'], ['1'], ['2'], ['3'], ['4']])
+ })
+
+ it('returns stream of objects in JSONCompactEachRowWithNames format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONCompactEachRowWithNames',
+ })
+
+ const rs = await rowsJsonValues<[string]>(result.stream())
+ expect(rs).toEqual([['number'], ['0'], ['1'], ['2'], ['3'], ['4']])
+ })
+
+ it('returns stream of objects in JSONCompactEachRowWithNamesAndTypes format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONCompactEachRowWithNamesAndTypes',
+ })
+
+ const rs = await rowsJsonValues<[string]>(result.stream())
+ expect(rs).toEqual([
+ ['number'],
+ ['UInt64'],
+ ['0'],
+ ['1'],
+ ['2'],
+ ['3'],
+ ['4'],
+ ])
+ })
+
+ it('returns stream of objects in JSONCompactStringsEachRowWithNames format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONCompactStringsEachRowWithNames',
+ })
+
+ const rs = await rowsJsonValues<[string]>(result.stream())
+ expect(rs).toEqual([['number'], ['0'], ['1'], ['2'], ['3'], ['4']])
+ })
+
+ it('returns stream of objects in JSONCompactStringsEachRowWithNamesAndTypes format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONCompactStringsEachRowWithNamesAndTypes',
+ })
+
+ const rs = await rowsJsonValues<[string]>(result.stream())
+ expect(rs).toEqual([
+ ['number'],
+ ['UInt64'],
+ ['0'],
+ ['1'],
+ ['2'],
+ ['3'],
+ ['4'],
+ ])
+ })
+ })
+})
+
+async function rowsJsonValues(
+ stream: ReadableStream
+): Promise {
+ const result: T[] = []
+ const reader = stream.getReader()
+ while (true) {
+ const { done, value } = await reader.read()
+ if (done) break
+ value.forEach((row) => {
+ result.push(row.json())
+ })
+ }
+ return result
+}
+
+async function rowsText(stream: ReadableStream): Promise {
+ const result: string[] = []
+ const reader = stream.getReader()
+ while (true) {
+ const { done, value } = await reader.read()
+ if (done) break
+ value.forEach((row) => {
+ result.push(row.text)
+ })
+ }
+ return result
+}
diff --git a/packages/client-browser/__tests__/integration/browser_watch_stream.test.ts b/packages/client-browser/__tests__/integration/browser_watch_stream.test.ts
new file mode 100644
index 00000000..c00d2780
--- /dev/null
+++ b/packages/client-browser/__tests__/integration/browser_watch_stream.test.ts
@@ -0,0 +1,66 @@
+import type { Row } from '@clickhouse/client-common'
+import { type ClickHouseClient } from '@clickhouse/client-common'
+import {
+ createTable,
+ createTestClient,
+ guid,
+ TestEnv,
+ whenOnEnv,
+} from '@test/utils'
+
+describe('Browser WATCH stream', () => {
+ let client: ClickHouseClient
+ let viewName: string
+
+ beforeEach(async () => {
+ client = await createTestClient({
+ compression: {
+ response: false, // WATCH won't work with response compression
+ },
+ clickhouse_settings: {
+ allow_experimental_live_view: 1,
+ },
+ })
+ viewName = `browser_watch_stream_test_${guid()}`
+ await createTable(
+ client,
+ () => `CREATE LIVE VIEW ${viewName} WITH REFRESH 1 AS SELECT now()`
+ )
+ })
+
+ afterEach(async () => {
+ await client.exec({
+ query: `DROP VIEW ${viewName}`,
+ clickhouse_settings: { wait_end_of_query: 1 },
+ })
+ await client.close()
+ })
+
+ /**
+ * "Does not work with replicated or distributed tables where inserts are performed on different nodes"
+ * @see https://clickhouse.com/docs/en/sql-reference/statements/create/view#live-view-experimental
+ */
+ whenOnEnv(TestEnv.LocalSingleNode).it(
+ 'should eventually get several events using WATCH',
+ async () => {
+ const resultSet = await client.query({
+ query: `WATCH ${viewName} EVENTS`,
+ format: 'JSONEachRow',
+ })
+ const stream = resultSet.stream()
+ const data = new Array<{ version: string }>()
+ let i = 0
+ const reader = stream.getReader()
+ while (i < 2) {
+ const result: ReadableStreamReadResult = await reader.read()
+ result.value!.forEach((row) => {
+ data.push(row.json())
+ })
+ i++
+ }
+ await reader.releaseLock()
+ await stream.cancel()
+ expect(data).toEqual([{ version: '1' }, { version: '2' }])
+ }
+ )
+})
diff --git a/packages/client-browser/__tests__/unit/browser_client.test.ts b/packages/client-browser/__tests__/unit/browser_client.test.ts
new file mode 100644
index 00000000..7b7f4bd8
--- /dev/null
+++ b/packages/client-browser/__tests__/unit/browser_client.test.ts
@@ -0,0 +1,22 @@
+import type { BaseClickHouseClientConfigOptions } from '@clickhouse/client-common'
+import { createClient } from '../../src'
+
+describe('Browser createClient', () => {
+ it('throws on incorrect "host" config value', () => {
+ expect(() => createClient({ host: 'foo' })).toThrowError(
+ 'Configuration parameter "host" contains malformed url.'
+ )
+ })
+
+ it('should not mutate provided configuration', async () => {
+ const config: BaseClickHouseClientConfigOptions = {
+ host: 'http://localhost',
+ }
+ createClient(config)
+ // initial configuration is not overridden by the defaults we assign
+ // when we transform the specified config object to the connection params
+ expect(config).toEqual({
+ host: 'http://localhost',
+ })
+ })
+})
diff --git a/packages/client-browser/__tests__/unit/browser_result_set.test.ts b/packages/client-browser/__tests__/unit/browser_result_set.test.ts
new file mode 100644
index 00000000..5dc6c31b
--- /dev/null
+++ b/packages/client-browser/__tests__/unit/browser_result_set.test.ts
@@ -0,0 +1,92 @@
+import type { Row } from '@clickhouse/client-common'
+import { guid } from '@test/utils'
+import { ResultSet } from '../../src'
+
+describe('Browser ResultSet', () => {
+ const expectedText = `{"foo":"bar"}\n{"qaz":"qux"}\n`
+ const expectedJson = [{ foo: 'bar' }, { qaz: 'qux' }]
+
+ const errMsg = 'Stream has been already consumed'
+ const err = jasmine.objectContaining({
+ message: jasmine.stringContaining(errMsg),
+ })
+
+ it('should consume the response as text only once', async () => {
+ const rs = makeResultSet()
+
+ expect(await rs.text()).toEqual(expectedText)
+ await expectAsync(rs.text()).toBeRejectedWith(err)
+ await expectAsync(rs.json()).toBeRejectedWith(err)
+ })
+
+ it('should consume the response as JSON only once', async () => {
+ const rs = makeResultSet()
+
+ expect(await rs.json()).toEqual(expectedJson)
+ await expectAsync(rs.json()).toBeRejectedWith(err)
+ await expectAsync(rs.text()).toBeRejectedWith(err)
+ })
+
+ it('should consume the response as a stream of Row instances', async () => {
+ const rs = makeResultSet()
+ const stream = rs.stream()
+
+ const result: unknown[] = []
+ const reader = stream.getReader()
+ while (true) {
+ const { done, value } = await reader.read()
+ if (done) break
+ value.forEach((row) => {
+ result.push(row.json())
+ })
+ }
+
+ expect(result).toEqual(expectedJson)
+ expect(() => rs.stream()).toThrow(new Error(errMsg))
+ await expectAsync(rs.json()).toBeRejectedWith(err)
+ await expectAsync(rs.text()).toBeRejectedWith(err)
+ })
+
+ it('should be able to call Row.text and Row.json multiple times', async () => {
+ const rs = new ResultSet(
+ new ReadableStream({
+ start(controller) {
+ controller.enqueue(new TextEncoder().encode('{"foo":"bar"}\n'))
+ controller.close()
+ },
+ }),
+ 'JSONEachRow',
+ guid()
+ )
+
+ const allRows: Row[] = []
+ const reader = rs.stream().getReader()
+ while (true) {
+ const { done, value } = await reader.read()
+ if (done) break
+ allRows.push(...value)
+ }
+ expect(allRows.length).toEqual(1)
+
+ const [row] = allRows
+ expect(row.text).toEqual('{"foo":"bar"}')
+ expect(row.text).toEqual('{"foo":"bar"}')
+ expect(row.json()).toEqual({ foo: 'bar' })
+ expect(row.json()).toEqual({ foo: 'bar' })
+ })
+
+ function makeResultSet() {
+ return new ResultSet(
+ new ReadableStream({
+ start(controller) {
+ const encoder = new TextEncoder()
+ controller.enqueue(encoder.encode('{"foo":"bar"}\n'))
+ controller.enqueue(encoder.encode('{"qaz":"qux"}\n'))
+ controller.close()
+ },
+ }),
+ 'JSONEachRow',
+ guid()
+ )
+ }
+})
diff --git a/packages/client-browser/package.json b/packages/client-browser/package.json
new file mode 100644
index 00000000..1b7f7caf
--- /dev/null
+++ b/packages/client-browser/package.json
@@ -0,0 +1,25 @@
+{
+ "name": "@clickhouse/client-browser",
+ "description": "Official JS client for ClickHouse DB - browser implementation",
+ "homepage": "https://clickhouse.com",
+ "version": "0.0.0",
+ "license": "Apache-2.0",
+ "keywords": [
+ "clickhouse",
+ "sql",
+ "client"
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/ClickHouse/clickhouse-js.git"
+ },
+ "private": false,
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "files": [
+ "dist"
+ ],
+ "dependencies": {
+ "@clickhouse/client-common": "*"
+ }
+}
diff --git a/packages/client-browser/src/client.ts b/packages/client-browser/src/client.ts
new file mode 100644
index 00000000..2726607e
--- /dev/null
+++ b/packages/client-browser/src/client.ts
@@ -0,0 +1,49 @@
+import type {
+ BaseClickHouseClientConfigOptions,
+ ConnectionParams,
+ DataFormat,
+ InputJSON,
+ InputJSONObjectEachRow,
+ InsertParams,
+ InsertResult,
+ BaseResultSet,
+ QueryParams,
+ Row,
+} from '@clickhouse/client-common'
+import { ClickHouseClient } from '@clickhouse/client-common'
+import { BrowserConnection } from './connection'
+import { ResultSet } from './result_set'
+import { BrowserValuesEncoder } from './utils'
+
+export type BrowserClickHouseClient = Omit<
+ ClickHouseClient,
+ 'insert' | 'query'
+> & {
+ // restrict ReadableStream as a possible insert value
+ insert(
+ params: Omit, 'values'> & {
+ values: ReadonlyArray | InputJSON | InputJSONObjectEachRow
+ }
+ ): Promise
+ // narrow down the return type here for better type-hinting
+ query(params: QueryParams): Promise>>
+}
+
+export function createClient(
+ config?: BaseClickHouseClientConfigOptions
+): BrowserClickHouseClient {
+ return new ClickHouseClient({
+ impl: {
+ make_connection: (params: ConnectionParams) =>
+ new BrowserConnection(params),
+ make_result_set: (
+ stream: ReadableStream,
+ format: DataFormat,
+ query_id: string
+ ) => new ResultSet(stream, format, query_id),
+ values_encoder: new BrowserValuesEncoder(),
+ close_stream: (stream) => stream.cancel(),
+ },
+ ...(config || {}),
+ })
+}
diff --git a/packages/client-browser/src/connection/browser_connection.ts b/packages/client-browser/src/connection/browser_connection.ts
new file mode 100644
index 00000000..a85a8e4d
--- /dev/null
+++ b/packages/client-browser/src/connection/browser_connection.ts
@@ -0,0 +1,200 @@
+import type {
+ ConnBaseQueryParams,
+ Connection,
+ ConnectionParams,
+ ConnInsertParams,
+ ConnInsertResult,
+ ConnQueryResult,
+} from '@clickhouse/client-common'
+import {
+ isSuccessfulResponse,
+ parseError,
+ toSearchParams,
+ transformUrl,
+ withCompressionHeaders,
+ withHttpSettings,
+} from '@clickhouse/client-common'
+import { getAsText } from '../utils'
+
+type BrowserInsertParams = Omit<
+ ConnInsertParams>,
+ 'values'
+> & {
+ values: string
+}
+
+export class BrowserConnection implements Connection {
+ private readonly defaultHeaders: Record
+ constructor(private readonly params: ConnectionParams) {
+ this.defaultHeaders = {
+ Authorization: `Basic ${btoa(`${params.username}:${params.password}`)}`,
+ }
+ }
+
+ async query(
+ params: ConnBaseQueryParams
+ ): Promise>> {
+ const query_id = getQueryId(params.query_id)
+ const clickhouse_settings = withHttpSettings(
+ params.clickhouse_settings,
+ this.params.compression.decompress_response
+ )
+ const searchParams = toSearchParams({
+ database: this.params.database,
+ clickhouse_settings,
+ query_params: params.query_params,
+ session_id: params.session_id,
+ query_id,
+ })
+ const response = await this.request({
+ values: params.query,
+ params,
+ searchParams,
+ })
+ return {
+ query_id,
+ stream: response.body || new ReadableStream(),
+ }
+ }
+
+ async exec(
+ params: ConnBaseQueryParams
+ ): Promise> {
+ const query_id = getQueryId(params.query_id)
+ const searchParams = toSearchParams({
+ database: this.params.database,
+ clickhouse_settings: params.clickhouse_settings,
+ query_params: params.query_params,
+ session_id: params.session_id,
+ query_id,
+ })
+ const response = await this.request({
+ values: params.query,
+ params,
+ searchParams,
+ })
+ return {
+ stream: response.body || new ReadableStream(),
+ query_id,
+ }
+ }
+
+ async insert(
+ params: BrowserInsertParams
+ ): Promise {
+ const query_id = getQueryId(params.query_id)
+ const searchParams = toSearchParams({
+ database: this.params.database,
+ clickhouse_settings: params.clickhouse_settings,
+ query_params: params.query_params,
+ query: params.query,
+ session_id: params.session_id,
+ query_id,
+ })
+ await this.request({
+ values: params.values,
+ params,
+ searchParams,
+ })
+ return {
+ query_id,
+ }
+ }
+
+ async ping(): Promise {
+ // TODO: catch an error and just log it, returning false?
+ const response = await this.request({
+ method: 'GET',
+ values: null,
+ pathname: '/ping',
+ searchParams: undefined,
+ })
+ if (response.body !== null) {
+ await response.body.cancel()
+ }
+ return true
+ }
+
+ async close(): Promise {
+ return
+ }
+
+ private async request({
+ values,
+ params,
+ searchParams,
+ pathname,
+ method,
+ }: {
+ values: string | null
+ params?: ConnBaseQueryParams
+ searchParams: URLSearchParams | undefined
+ pathname?: string
+ method?: 'GET' | 'POST'
+ }): Promise {
+ const url = transformUrl({
+ url: this.params.url,
+ pathname: pathname ?? '/',
+ searchParams,
+ }).toString()
+
+ const abortController = new AbortController()
+
+ let isTimedOut = false
+ const timeout = setTimeout(() => {
+ isTimedOut = true
+ abortController.abort()
+ }, this.params.request_timeout)
+
+ let isAborted = false
+ if (params?.abort_signal !== undefined) {
+ params.abort_signal.onabort = () => {
+ isAborted = true
+ abortController.abort()
+ }
+ }
+
+ try {
+ const headers = withCompressionHeaders({
+ headers: this.defaultHeaders,
+ compress_request: false,
+ decompress_response: this.params.compression.decompress_response,
+ })
+ const response = await fetch(url, {
+ body: values,
+ headers,
+ keepalive: false,
+ method: method ?? 'POST',
+ signal: abortController.signal,
+ })
+ clearTimeout(timeout)
+ if (isSuccessfulResponse(response.status)) {
+ return response
+ } else {
+ return Promise.reject(
+ parseError(
+ await getAsText(response.body || new ReadableStream())
+ )
+ )
+ }
+ } catch (err) {
+ clearTimeout(timeout)
+ if (err instanceof Error) {
+ if (isAborted) {
+ return Promise.reject(new Error('The user aborted a request.'))
+ }
+ if (isTimedOut) {
+ return Promise.reject(new Error('Timeout error.'))
+ }
+ // maybe it's a ClickHouse error
+ return Promise.reject(parseError(err))
+ }
+ // shouldn't happen
+ throw err
+ }
+ }
+}
+
+function getQueryId(query_id: string | undefined): string {
+ return query_id || crypto.randomUUID()
+}
diff --git a/packages/client-browser/src/connection/index.ts b/packages/client-browser/src/connection/index.ts
new file mode 100644
index 00000000..8527105b
--- /dev/null
+++ b/packages/client-browser/src/connection/index.ts
@@ -0,0 +1 @@
+export * from './browser_connection'
diff --git a/packages/client-browser/src/index.ts b/packages/client-browser/src/index.ts
new file mode 100644
index 00000000..ba7e0c9e
--- /dev/null
+++ b/packages/client-browser/src/index.ts
@@ -0,0 +1,32 @@
+export { createClient } from './client'
+export { ResultSet } from './result_set'
+
+/** Re-export @clickhouse/client-common types */
+export {
+ type BaseClickHouseClientConfigOptions,
+ type ClickHouseClientConfigOptions,
+ type BaseQueryParams,
+ type QueryParams,
+ type ExecParams,
+ type InsertParams,
+ type InsertValues,
+ type CommandParams,
+ type CommandResult,
+ type ExecResult,
+ type InsertResult,
+ type DataFormat,
+ type ErrorLogParams,
+ type Logger,
+ type LogParams,
+ type ClickHouseSettings,
+ type MergeTreeSettings,
+ type Row,
+ type ResponseJSON,
+ type InputJSON,
+ type InputJSONObjectEachRow,
+ type BaseResultSet,
+ ClickHouseError,
+ ClickHouseLogLevel,
+ ClickHouseClient,
+ SettingsMap,
+} from '@clickhouse/client-common'
diff --git a/packages/client-browser/src/result_set.ts b/packages/client-browser/src/result_set.ts
new file mode 100644
index 00000000..9052afe0
--- /dev/null
+++ b/packages/client-browser/src/result_set.ts
@@ -0,0 +1,84 @@
+import type { BaseResultSet, DataFormat, Row } from '@clickhouse/client-common'
+import { decode, validateStreamFormat } from '@clickhouse/client-common'
+import { getAsText } from './utils'
+
+export class ResultSet implements BaseResultSet> {
+ private isAlreadyConsumed = false
+ constructor(
+ private _stream: ReadableStream,
+ private readonly format: DataFormat,
+ public readonly query_id: string
+ ) {}
+
+ async text(): Promise {
+ this.markAsConsumed()
+ return getAsText(this._stream)
+ }
+
+ async json(): Promise {
+ const text = await this.text()
+ return decode(text, this.format)
+ }
+
+ stream(): ReadableStream {
+ this.markAsConsumed()
+ validateStreamFormat(this.format)
+
+ let decodedChunk = ''
+ const decoder = new TextDecoder('utf-8')
+ const transform = new TransformStream({
+ start() {
+ //
+ },
+ transform: (chunk, controller) => {
+ if (chunk === null) {
+ controller.terminate()
+ }
+ decodedChunk += decoder.decode(chunk)
+ const rows: Row[] = []
+ // eslint-disable-next-line no-constant-condition
+ while (true) {
+ const idx = decodedChunk.indexOf('\n')
+ if (idx !== -1) {
+ const text = decodedChunk.slice(0, idx)
+ decodedChunk = decodedChunk.slice(idx + 1)
+ rows.push({
+ text,
+ json(): T {
+ return decode(text, 'JSON')
+ },
+ })
+ } else {
+ if (rows.length) {
+ controller.enqueue(rows)
+ }
+ break
+ }
+ }
+ },
+ flush() {
+ decodedChunk = ''
+ },
+ })
+
+ return this._stream.pipeThrough(transform, {
+ preventClose: false,
+ preventAbort: false,
+ preventCancel: false,
+ })
+ }
+
+ async close(): Promise {
+ this.markAsConsumed()
+ await this._stream.cancel()
+ }
+
+ private markAsConsumed() {
+ if (this.isAlreadyConsumed) {
+ throw new Error(streamAlreadyConsumedMessage)
+ }
+ this.isAlreadyConsumed = true
+ }
+}
+
+const streamAlreadyConsumedMessage = 'Stream has been already consumed'
diff --git a/packages/client-browser/src/utils/encoder.ts b/packages/client-browser/src/utils/encoder.ts
new file mode 100644
index 00000000..54530cb3
--- /dev/null
+++ b/packages/client-browser/src/utils/encoder.ts
@@ -0,0 +1,41 @@
+import type {
+ DataFormat,
+ InsertValues,
+ ValuesEncoder,
+} from '@clickhouse/client-common'
+import { encodeJSON } from '@clickhouse/client-common'
+import { isStream } from './stream'
+
+export class BrowserValuesEncoder implements ValuesEncoder {
+ encodeValues(
+ values: InsertValues,
+ format: DataFormat
+ ): string | ReadableStream {
+ if (isStream(values)) {
+ throw new Error('Streaming is not supported for inserts in browser')
+ }
+ // JSON* arrays
+ if (Array.isArray(values)) {
+ return values.map((value) => encodeJSON(value, format)).join('')
+ }
+ // JSON & JSONObjectEachRow format input
+ if (typeof values === 'object') {
+ return encodeJSON(values, format)
+ }
+ throw new Error(
+ `Cannot encode values of type ${typeof values} with ${format} format`
+ )
+ }
+
+ validateInsertValues(values: InsertValues): void {
+ if (isStream(values)) {
+ throw new Error('Streaming is not supported for inserts in browser')
+ }
+ if (!Array.isArray(values) && typeof values !== 'object') {
+ throw new Error(
+ 'Insert expected "values" to be an array or a JSON object, ' +
+ `got: ${typeof values}`
+ )
+ }
+ }
+}
diff --git a/packages/client-browser/src/utils/index.ts b/packages/client-browser/src/utils/index.ts
new file mode 100644
index 00000000..99083b36
--- /dev/null
+++ b/packages/client-browser/src/utils/index.ts
@@ -0,0 +1,2 @@
+export * from './stream'
+export * from './encoder'
diff --git a/packages/client-browser/src/utils/stream.ts b/packages/client-browser/src/utils/stream.ts
new file mode 100644
index 00000000..242923b4
--- /dev/null
+++ b/packages/client-browser/src/utils/stream.ts
@@ -0,0 +1,23 @@
+export function isStream(obj: any): obj is ReadableStream {
+ return (
+ obj !== null && obj !== undefined && typeof obj.pipeThrough === 'function'
+ )
+}
+
+export async function getAsText(stream: ReadableStream): Promise {
+ let result = ''
+ let isDone = false
+
+ const textDecoder = new TextDecoder()
+ const reader = stream.getReader()
+
+ while (!isDone) {
+ const { done, value } = await reader.read()
+ result += textDecoder.decode(value, { stream: true })
+ isDone = done
+ }
+
+ // flush
+ result += textDecoder.decode()
+ return result
+}
diff --git a/packages/client-browser/src/version.ts b/packages/client-browser/src/version.ts
new file mode 100644
index 00000000..27b4abf4
--- /dev/null
+++ b/packages/client-browser/src/version.ts
@@ -0,0 +1 @@
+export default '0.2.0-beta1'
diff --git a/packages/client-common/__tests__/README.md b/packages/client-common/__tests__/README.md
new file mode 100644
index 00000000..2626153d
--- /dev/null
+++ b/packages/client-common/__tests__/README.md
@@ -0,0 +1,4 @@
+### Common tests and utilities
+
+This folder contains unit and integration test scenarios that we expect to be compatible to every connection,
+as well as the shared utilities for effective tests writing.
diff --git a/__tests__/integration/fixtures/read_only_user.ts b/packages/client-common/__tests__/fixtures/read_only_user.ts
similarity index 94%
rename from __tests__/integration/fixtures/read_only_user.ts
rename to packages/client-common/__tests__/fixtures/read_only_user.ts
index bac3b1a3..d727bceb 100644
--- a/__tests__/integration/fixtures/read_only_user.ts
+++ b/packages/client-common/__tests__/fixtures/read_only_user.ts
@@ -1,10 +1,10 @@
+import type { ClickHouseClient } from '@clickhouse/client-common'
import {
getClickHouseTestEnvironment,
getTestDatabaseName,
guid,
TestEnv,
-} from '../../utils'
-import type { ClickHouseClient } from '../../../src'
+} from '../utils'
export async function createReadOnlyUser(client: ClickHouseClient) {
const username = `clickhousejs__read_only_user_${guid()}`
diff --git a/__tests__/integration/fixtures/simple_table.ts b/packages/client-common/__tests__/fixtures/simple_table.ts
similarity index 89%
rename from __tests__/integration/fixtures/simple_table.ts
rename to packages/client-common/__tests__/fixtures/simple_table.ts
index 9ee58b76..b8627b31 100644
--- a/__tests__/integration/fixtures/simple_table.ts
+++ b/packages/client-common/__tests__/fixtures/simple_table.ts
@@ -1,9 +1,11 @@
-import { createTable, TestEnv } from '../../utils'
-import type { ClickHouseClient } from '../../../src'
-import type { MergeTreeSettings } from '../../../src/settings'
+import type {
+ ClickHouseClient,
+ MergeTreeSettings,
+} from '@clickhouse/client-common'
+import { createTable, TestEnv } from '../utils'
-export function createSimpleTable(
- client: ClickHouseClient,
+export function createSimpleTable(
+ client: ClickHouseClient,
tableName: string,
settings: MergeTreeSettings = {}
) {
@@ -39,7 +41,7 @@ export function createSimpleTable(
CREATE TABLE ${tableName} ON CLUSTER '{cluster}'
(id UInt64, name String, sku Array(UInt8))
ENGINE ReplicatedMergeTree(
- '/clickhouse/{cluster}/tables/{database}/{table}/{shard}',
+ '/clickhouse/{cluster}/tables/{database}/{table}/{shard}',
'{replica}'
)
ORDER BY (id) ${_settings}
diff --git a/__tests__/integration/fixtures/streaming_e2e_data.ndjson b/packages/client-common/__tests__/fixtures/streaming_e2e_data.ndjson
similarity index 100%
rename from __tests__/integration/fixtures/streaming_e2e_data.ndjson
rename to packages/client-common/__tests__/fixtures/streaming_e2e_data.ndjson
diff --git a/__tests__/integration/fixtures/table_with_fields.ts b/packages/client-common/__tests__/fixtures/table_with_fields.ts
similarity index 88%
rename from __tests__/integration/fixtures/table_with_fields.ts
rename to packages/client-common/__tests__/fixtures/table_with_fields.ts
index 36fabd49..13bda0fe 100644
--- a/__tests__/integration/fixtures/table_with_fields.ts
+++ b/packages/client-common/__tests__/fixtures/table_with_fields.ts
@@ -1,5 +1,8 @@
-import { createTable, guid, TestEnv } from '../../utils'
-import type { ClickHouseClient, ClickHouseSettings } from '../../../src'
+import type {
+ ClickHouseClient,
+ ClickHouseSettings,
+} from '@clickhouse/client-common'
+import { createTable, guid, TestEnv } from '../utils'
export async function createTableWithFields(
client: ClickHouseClient,
@@ -31,7 +34,7 @@ export async function createTableWithFields(
CREATE TABLE ${tableName} ON CLUSTER '{cluster}'
(id UInt32, ${fields})
ENGINE ReplicatedMergeTree(
- '/clickhouse/{cluster}/tables/{database}/{table}/{shard}',
+ '/clickhouse/{cluster}/tables/{database}/{table}/{shard}',
'{replica}'
)
ORDER BY (id)
diff --git a/__tests__/integration/fixtures/test_data.ts b/packages/client-common/__tests__/fixtures/test_data.ts
similarity index 89%
rename from __tests__/integration/fixtures/test_data.ts
rename to packages/client-common/__tests__/fixtures/test_data.ts
index e7ad3d0a..448201b1 100644
--- a/__tests__/integration/fixtures/test_data.ts
+++ b/packages/client-common/__tests__/fixtures/test_data.ts
@@ -1,4 +1,4 @@
-import type { ClickHouseClient } from '../../../src'
+import type { ClickHouseClient } from '@clickhouse/client-common'
export const jsonValues = [
{ id: '42', name: 'hello', sku: [0, 1] },
diff --git a/packages/client-common/__tests__/integration/abort_request.test.ts b/packages/client-common/__tests__/integration/abort_request.test.ts
new file mode 100644
index 00000000..268dabcb
--- /dev/null
+++ b/packages/client-common/__tests__/integration/abort_request.test.ts
@@ -0,0 +1,167 @@
+import type { ClickHouseClient, ResponseJSON } from '@clickhouse/client-common'
+import { createTestClient, guid, sleep } from '../utils'
+
+describe('abort request', () => {
+ let client: ClickHouseClient
+
+ beforeEach(() => {
+ client = createTestClient()
+ })
+
+ afterEach(async () => {
+ await client.close()
+ })
+
+ describe('select', () => {
+ it('cancels a select query before it is sent', async () => {
+ const controller = new AbortController()
+ const selectPromise = client.query({
+ query: 'SELECT sleep(3)',
+ format: 'CSV',
+ abort_signal: controller.signal,
+ })
+ controller.abort()
+
+ await expectAsync(selectPromise).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringMatching('The user aborted a request'),
+ })
+ )
+ })
+
+ it('cancels a select query after it is sent', async () => {
+ const controller = new AbortController()
+ const selectPromise = client.query({
+ query: 'SELECT sleep(3)',
+ format: 'CSV',
+ abort_signal: controller.signal,
+ })
+
+ await new Promise((resolve) => {
+ setTimeout(() => {
+ controller.abort()
+ resolve(undefined)
+ }, 50)
+ })
+
+ await expectAsync(selectPromise).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringMatching('The user aborted a request'),
+ })
+ )
+ })
+
+ it('should not throw an error when aborted the second time', async () => {
+ const controller = new AbortController()
+ const selectPromise = client.query({
+ query: 'SELECT sleep(3)',
+ format: 'CSV',
+ abort_signal: controller.signal,
+ })
+
+ await new Promise((resolve) => {
+ setTimeout(() => {
+ controller.abort()
+ resolve(undefined)
+ }, 50)
+ })
+
+ controller.abort('foo bar') // no-op, does not throw here
+
+ await expectAsync(selectPromise).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringMatching('The user aborted a request'),
+ })
+ )
+ })
+
+ // FIXME: It does not work with ClickHouse Cloud.
+ // Active queries never contain the long-running query unlike local setup.
+ // To be revisited in https://github.com/ClickHouse/clickhouse-js/issues/177
+ xit('ClickHouse server must cancel query on abort', async () => {
+ const controller = new AbortController()
+
+ const longRunningQuery = `SELECT sleep(3), '${guid()}'`
+ console.log(`Long running query: ${longRunningQuery}`)
+ void client
+ .query({
+ query: longRunningQuery,
+ abort_signal: controller.signal,
+ format: 'JSONCompactEachRow',
+ })
+ .catch(() => {
+ // ignore aborted query exception
+ })
+
+ // Long-running query should be there
+ await assertActiveQueries(client, (queries) => {
+ console.log(`Active queries: ${JSON.stringify(queries, null, 2)}`)
+ return queries.some((q) => q.query.includes(longRunningQuery))
+ })
+
+ controller.abort()
+
+ // Long-running query should be cancelled on the server
+ await assertActiveQueries(client, (queries) =>
+ queries.every((q) => {
+ console.log(`${q.query} VS ${longRunningQuery}`)
+ return !q.query.includes(longRunningQuery)
+ })
+ )
+ })
+
+ it('should cancel of the select queries while keeping the others', async () => {
+ type Res = Array<{ foo: number }>
+
+ const controller = new AbortController()
+ const results: number[] = []
+
+ const selectPromises = Promise.all(
+ [...Array(5)].map((_, i) => {
+ const shouldAbort = i === 3
+ const requestPromise = client
+ .query({
+ query: `SELECT sleep(0.5), ${i} AS foo`,
+ format: 'JSONEachRow',
+ abort_signal:
+ // we will cancel the request that should've yielded '3'
+ shouldAbort ? controller.signal : undefined,
+ })
+ .then((r) => r.json())
+ .then((r) => results.push(r[0].foo))
+ // this way, the cancelled request will not cancel the others
+ if (shouldAbort) {
+ return requestPromise.catch(() => {
+ // ignored
+ })
+ }
+ return requestPromise
+ })
+ )
+
+ controller.abort()
+ await selectPromises
+
+ expect(results.sort((a, b) => a - b)).toEqual([0, 1, 2, 4])
+ })
+ })
+})
+
+async function assertActiveQueries(
+ client: ClickHouseClient,
+ assertQueries: (queries: Array<{ query: string }>) => boolean
+) {
+ let isRunning = true
+ while (isRunning) {
+ const rs = await client.query({
+ query: 'SELECT query FROM system.processes',
+ format: 'JSON',
+ })
+ const queries = await rs.json>()
+ if (assertQueries(queries.data)) {
+ isRunning = false
+ } else {
+ await sleep(100)
+ }
+ }
+}
diff --git a/__tests__/integration/auth.test.ts b/packages/client-common/__tests__/integration/auth.test.ts
similarity index 70%
rename from __tests__/integration/auth.test.ts
rename to packages/client-common/__tests__/integration/auth.test.ts
index dcdafe12..0c350cf0 100644
--- a/__tests__/integration/auth.test.ts
+++ b/packages/client-common/__tests__/integration/auth.test.ts
@@ -1,4 +1,4 @@
-import { type ClickHouseClient } from '../../src'
+import { type ClickHouseClient } from '@clickhouse/client-common'
import { createTestClient } from '../utils'
describe('authentication', () => {
@@ -13,15 +13,15 @@ describe('authentication', () => {
password: 'gibberish',
})
- await expect(
+ await expectAsync(
client.query({
query: 'SELECT number FROM system.numbers LIMIT 3',
})
- ).rejects.toEqual(
- expect.objectContaining({
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
code: '516',
type: 'AUTHENTICATION_FAILED',
- message: expect.stringMatching('Authentication failed'),
+ message: jasmine.stringMatching('Authentication failed'),
})
)
})
diff --git a/__tests__/integration/clickhouse_settings.test.ts b/packages/client-common/__tests__/integration/clickhouse_settings.test.ts
similarity index 91%
rename from __tests__/integration/clickhouse_settings.test.ts
rename to packages/client-common/__tests__/integration/clickhouse_settings.test.ts
index c8d440d4..2fee6caf 100644
--- a/__tests__/integration/clickhouse_settings.test.ts
+++ b/packages/client-common/__tests__/integration/clickhouse_settings.test.ts
@@ -1,7 +1,7 @@
-import type { ClickHouseClient, InsertParams } from '../../src'
-import { SettingsMap } from '../../src'
+import type { ClickHouseClient, InsertParams } from '@clickhouse/client-common'
+import { SettingsMap } from '@clickhouse/client-common'
+import { createSimpleTable } from '../fixtures/simple_table'
import { createTestClient, guid } from '../utils'
-import { createSimpleTable } from './fixtures/simple_table'
// TODO: cover at least all enum settings
describe('ClickHouse settings', () => {
diff --git a/packages/client-common/__tests__/integration/config.test.ts b/packages/client-common/__tests__/integration/config.test.ts
new file mode 100644
index 00000000..3bad6c3d
--- /dev/null
+++ b/packages/client-common/__tests__/integration/config.test.ts
@@ -0,0 +1,37 @@
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createTestClient } from '../utils'
+
+describe('config', () => {
+ let client: ClickHouseClient
+
+ afterEach(async () => {
+ await client.close()
+ })
+
+ it('should set request timeout with "request_timeout" setting', async () => {
+ client = createTestClient({
+ request_timeout: 100,
+ })
+
+ await expectAsync(
+ client.query({
+ query: 'SELECT sleep(3)',
+ })
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringMatching('Timeout error.'),
+ })
+ )
+ })
+
+ it('should specify the default database name on creation', async () => {
+ client = createTestClient({
+ database: 'system',
+ })
+ const result = await client.query({
+ query: 'SELECT * FROM numbers LIMIT 2',
+ format: 'TabSeparated',
+ })
+ expect(await result.text()).toEqual('0\n1\n')
+ })
+})
diff --git a/__tests__/integration/data_types.test.ts b/packages/client-common/__tests__/integration/data_types.test.ts
similarity index 87%
rename from __tests__/integration/data_types.test.ts
rename to packages/client-common/__tests__/integration/data_types.test.ts
index 9cfbe5c4..6b69e1c1 100644
--- a/__tests__/integration/data_types.test.ts
+++ b/packages/client-common/__tests__/integration/data_types.test.ts
@@ -1,9 +1,7 @@
-import type { ClickHouseClient } from '../../src'
-import { createTestClient } from '../utils'
-import { v4 } from 'uuid'
-import { randomInt } from 'crypto'
-import Stream from 'stream'
-import { createTableWithFields } from './fixtures/table_with_fields'
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { randomUUID } from '@test/utils/guid'
+import { createTableWithFields } from '../fixtures/table_with_fields'
+import { createTestClient, getRandomInt } from '../utils'
describe('data types', () => {
let client: ClickHouseClient
@@ -82,35 +80,40 @@ describe('data types', () => {
it('should throw if a value is too large for a FixedString field', async () => {
const table = await createTableWithFields(client, 'fs FixedString(3)')
- await expect(
+ await expectAsync(
client.insert({
table,
values: [{ fs: 'foobar' }],
format: 'JSONEachRow',
})
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Too large value for FixedString(3)'),
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Too large value for FixedString(3)'),
})
)
})
it('should work with decimals', async () => {
- const stream = new Stream.Readable({
- objectMode: false,
- read() {
- //
- },
- })
- const row1 =
+ const row1 = {
+ id: 1,
+ d1: '1234567.89',
+ d2: '123456789123456.789',
+ d3: '1234567891234567891234567891.1234567891',
+ d4: '12345678912345678912345678911234567891234567891234567891.12345678911234567891',
+ }
+ const row2 = {
+ id: 2,
+ d1: '12.01',
+ d2: '5000000.405',
+ d3: '1.0000000004',
+ d4: '42.00000000000000013007',
+ }
+ const stringRow1 =
'1\t1234567.89\t123456789123456.789\t' +
'1234567891234567891234567891.1234567891\t' +
'12345678912345678912345678911234567891234567891234567891.12345678911234567891\n'
- const row2 =
+ const stringRow2 =
'2\t12.01\t5000000.405\t1.0000000004\t42.00000000000000013007\n'
- stream.push(row1)
- stream.push(row2)
- stream.push(null)
const table = await createTableWithFields(
client,
'd1 Decimal(9, 2), d2 Decimal(18, 3), ' +
@@ -118,8 +121,8 @@ describe('data types', () => {
)
await client.insert({
table,
- values: stream,
- format: 'TabSeparated',
+ values: [row1, row2],
+ format: 'JSONEachRow',
})
const result = await client
.query({
@@ -127,11 +130,11 @@ describe('data types', () => {
format: 'TabSeparated',
})
.then((r) => r.text())
- expect(result).toEqual(row1 + row2)
+ expect(result).toEqual(stringRow1 + stringRow2)
})
it('should work with UUID', async () => {
- const values = [{ u: v4() }, { u: v4() }]
+ const values = [{ u: randomUUID() }, { u: randomUUID() }]
const table = await createTableWithFields(client, 'u UUID')
await insertAndAssert(table, values)
})
@@ -255,15 +258,17 @@ describe('data types', () => {
// it's the largest reasonable nesting value (data is generated within 50 ms);
// 25 here can already tank the performance to ~500ms only to generate the data;
// 50 simply times out :)
- const maxNestingLevel = 20
+ // FIXME: investigate fetch max body length
+ // (reduced 20 to 10 cause the body was too large and fetch failed)
+ const maxNestingLevel = 10
function genNestedArray(level: number): unknown {
if (level === 1) {
- return [...Array(randomInt(2, 4))].map(() =>
+ return [...Array(getRandomInt(2, 4))].map(() =>
Math.random().toString(36).slice(2)
)
}
- return [...Array(randomInt(1, 3))].map(() => genNestedArray(level - 1))
+ return [...Array(getRandomInt(1, 3))].map(() => genNestedArray(level - 1))
}
function genArrayType(level: number): string {
@@ -303,11 +308,10 @@ describe('data types', () => {
a3: genNestedArray(maxNestingLevel),
},
]
- const table = await createTableWithFields(
- client,
+ const fields =
'a1 Array(Int32), a2 Array(Array(Tuple(String, Int32))), ' +
- `a3 ${genArrayType(maxNestingLevel)}`
- )
+ `a3 ${genArrayType(maxNestingLevel)}`
+ const table = await createTableWithFields(client, fields)
await insertAndAssert(table, values)
})
@@ -317,13 +321,14 @@ describe('data types', () => {
function genNestedMap(level: number): unknown {
const obj: Record = {}
if (level === 1) {
- ;[...Array(randomInt(2, 4))].forEach(
- () => (obj[randomInt(1, 1000)] = Math.random().toString(36).slice(2))
+ ;[...Array(getRandomInt(2, 4))].forEach(
+ () =>
+ (obj[getRandomInt(1, 1000)] = Math.random().toString(36).slice(2))
)
return obj
}
- ;[...Array(randomInt(1, 3))].forEach(
- () => (obj[randomInt(1, 1000)] = genNestedMap(level - 1))
+ ;[...Array(getRandomInt(1, 3))].forEach(
+ () => (obj[getRandomInt(1, 1000)] = genNestedMap(level - 1))
)
return obj
}
@@ -469,7 +474,8 @@ describe('data types', () => {
await insertAndAssert(table, values)
})
- it.skip('should work with nested', async () => {
+ /** @see https://github.com/ClickHouse/clickhouse-js/issues/89 */
+ xit('should work with nested', async () => {
const values = [
{
id: 1,
diff --git a/__tests__/integration/date_time.test.ts b/packages/client-common/__tests__/integration/date_time.test.ts
similarity index 97%
rename from __tests__/integration/date_time.test.ts
rename to packages/client-common/__tests__/integration/date_time.test.ts
index 73d5ccaa..1ab5a25c 100644
--- a/__tests__/integration/date_time.test.ts
+++ b/packages/client-common/__tests__/integration/date_time.test.ts
@@ -1,5 +1,5 @@
-import { createTableWithFields } from './fixtures/table_with_fields'
-import type { ClickHouseClient } from '../../src'
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createTableWithFields } from '../fixtures/table_with_fields'
import { createTestClient } from '../utils'
describe('DateTime', () => {
diff --git a/__tests__/integration/error_parsing.test.ts b/packages/client-common/__tests__/integration/error_parsing.test.ts
similarity index 59%
rename from __tests__/integration/error_parsing.test.ts
rename to packages/client-common/__tests__/integration/error_parsing.test.ts
index 6acff633..785d1c2c 100644
--- a/__tests__/integration/error_parsing.test.ts
+++ b/packages/client-common/__tests__/integration/error_parsing.test.ts
@@ -1,7 +1,7 @@
-import { type ClickHouseClient, createClient } from '../../src'
+import type { ClickHouseClient } from '@clickhouse/client-common'
import { createTestClient, getTestDatabaseName } from '../utils'
-describe('error', () => {
+describe('ClickHouse server errors parsing', () => {
let client: ClickHouseClient
beforeEach(() => {
client = createTestClient()
@@ -11,12 +11,12 @@ describe('error', () => {
})
it('returns "unknown identifier" error', async () => {
- await expect(
+ await expectAsync(
client.query({
query: 'SELECT number FR',
})
- ).rejects.toEqual(
- expect.objectContaining({
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
message: `Missing columns: 'number' while processing query: 'SELECT number AS FR', required columns: 'number'. `,
code: '47',
type: 'UNKNOWN_IDENTIFIER',
@@ -25,12 +25,12 @@ describe('error', () => {
})
it('returns "unknown table" error', async () => {
- await expect(
+ await expectAsync(
client.query({
query: 'SELECT * FROM unknown_table',
})
- ).rejects.toEqual(
- expect.objectContaining({
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
message: `Table ${getTestDatabaseName()}.unknown_table doesn't exist. `,
code: '60',
type: 'UNKNOWN_TABLE',
@@ -39,13 +39,13 @@ describe('error', () => {
})
it('returns "syntax error" error', async () => {
- await expect(
+ await expectAsync(
client.query({
query: 'SELECT * FRON unknown_table',
})
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Syntax error: failed at position'),
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Syntax error: failed at position'),
code: '62',
type: 'SYNTAX_ERROR',
})
@@ -53,7 +53,7 @@ describe('error', () => {
})
it('returns "syntax error" error in a multiline query', async () => {
- await expect(
+ await expectAsync(
client.query({
query: `
SELECT *
@@ -63,28 +63,12 @@ describe('error', () => {
FRON unknown_table
`,
})
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Syntax error: failed at position'),
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Syntax error: failed at position'),
code: '62',
type: 'SYNTAX_ERROR',
})
)
})
-
- it('should return an error when URL is unreachable', async () => {
- await client.close()
- client = createClient({
- host: 'http://localhost:1111',
- })
- await expect(
- client.query({
- query: 'SELECT * FROM system.numbers LIMIT 3',
- })
- ).rejects.toEqual(
- expect.objectContaining({
- code: 'ECONNREFUSED',
- })
- )
- })
})
diff --git a/__tests__/integration/exec.test.ts b/packages/client-common/__tests__/integration/exec.test.ts
similarity index 64%
rename from __tests__/integration/exec.test.ts
rename to packages/client-common/__tests__/integration/exec.test.ts
index 761947c7..ce6eae97 100644
--- a/__tests__/integration/exec.test.ts
+++ b/packages/client-common/__tests__/integration/exec.test.ts
@@ -1,14 +1,13 @@
-import type { ExecParams, ResponseJSON } from '../../src'
-import { type ClickHouseClient } from '../../src'
+import type { ExecParams, ResponseJSON } from '@clickhouse/client-common'
+import { type ClickHouseClient } from '@clickhouse/client-common'
import {
createTestClient,
getClickHouseTestEnvironment,
getTestDatabaseName,
guid,
TestEnv,
+ validateUUID,
} from '../utils'
-import { getAsText } from '../../src/utils'
-import * as uuid from 'uuid'
describe('exec', () => {
let client: ClickHouseClient
@@ -27,7 +26,7 @@ describe('exec', () => {
})
// generated automatically
- expect(uuid.validate(query_id)).toBeTruthy()
+ expect(validateUUID(query_id)).toBeTruthy()
await checkCreatedTable({
tableName,
@@ -54,58 +53,25 @@ describe('exec', () => {
it('does not swallow ClickHouse error', async () => {
const { ddl, tableName } = getDDL()
- await expect(async () => {
- const exec = () =>
+ const commands = async () => {
+ const command = () =>
runExec({
query: ddl,
})
- await exec()
- await exec()
- }).rejects.toEqual(
- expect.objectContaining({
+ await command()
+ await command()
+ }
+ await expectAsync(commands()).toBeRejectedWith(
+ jasmine.objectContaining({
code: '57',
type: 'TABLE_ALREADY_EXISTS',
- message: expect.stringContaining(
+ message: jasmine.stringContaining(
`Table ${getTestDatabaseName()}.${tableName} already exists. `
),
})
)
})
- it('should send a parametrized query', async () => {
- const result = await client.exec({
- query: 'SELECT plus({val1: Int32}, {val2: Int32})',
- query_params: {
- val1: 10,
- val2: 20,
- },
- })
- expect(await getAsText(result.stream)).toEqual('30\n')
- })
-
- describe('trailing semi', () => {
- it('should allow commands with semi in select clause', async () => {
- const result = await client.exec({
- query: `SELECT ';' FORMAT CSV`,
- })
- expect(await getAsText(result.stream)).toEqual('";"\n')
- })
-
- it('should allow commands with trailing semi', async () => {
- const result = await client.exec({
- query: 'EXISTS system.databases;',
- })
- expect(await getAsText(result.stream)).toEqual('1\n')
- })
-
- it('should allow commands with multiple trailing semi', async () => {
- const result = await client.exec({
- query: 'EXISTS system.foobar;;;;;;',
- })
- expect(await getAsText(result.stream)).toEqual('0\n')
- })
- })
-
describe('sessions', () => {
let sessionClient: ClickHouseClient
beforeEach(() => {
@@ -119,34 +85,27 @@ describe('exec', () => {
it('should allow the use of a session', async () => {
// Temporary tables cannot be used without a session
- const { stream } = await sessionClient.exec({
- query: 'CREATE TEMPORARY TABLE test_temp (val Int32)',
- })
- stream.destroy()
+ const tableName = `temp_table_${guid()}`
+ await expectAsync(
+ sessionClient.exec({
+ query: `CREATE TEMPORARY TABLE ${tableName} (val Int32)`,
+ })
+ ).toBeResolved()
})
})
- it.skip('can specify a parameterized query', async () => {
- await runExec({
- query: '',
- query_params: {
- table_name: 'example',
- },
- })
-
- // FIXME: use different DDL based on the TestEnv
+ it('can specify a parameterized query', async () => {
const result = await client.query({
- query: `SELECT * from system.tables where name = 'example'`,
+ query: `SELECT * from system.tables where name = 'numbers'`,
format: 'JSON',
})
- const { data, rows } = await result.json<
- ResponseJSON<{ name: string; engine: string; create_table_query: string }>
- >()
-
- expect(rows).toBe(1)
- const table = data[0]
- expect(table.name).toBe('example')
+ const json = await result.json<{
+ rows: number
+ data: Array<{ name: string }>
+ }>()
+ expect(json.rows).toBe(1)
+ expect(json.data[0].name).toBe('numbers')
})
async function checkCreatedTable({
@@ -176,14 +135,13 @@ describe('exec', () => {
console.log(
`Running command with query_id ${params.query_id}:\n${params.query}`
)
- const { stream, query_id } = await client.exec({
+ const { query_id } = await client.exec({
...params,
clickhouse_settings: {
// ClickHouse responds to a command when it's completely finished
wait_end_of_query: 1,
},
})
- stream.destroy()
return { query_id }
}
})
diff --git a/__tests__/integration/insert.test.ts b/packages/client-common/__tests__/integration/insert.test.ts
similarity index 73%
rename from __tests__/integration/insert.test.ts
rename to packages/client-common/__tests__/integration/insert.test.ts
index a1c4b5a1..1d5f1571 100644
--- a/__tests__/integration/insert.test.ts
+++ b/packages/client-common/__tests__/integration/insert.test.ts
@@ -1,10 +1,7 @@
-import type { ResponseJSON } from '../../src'
-import { type ClickHouseClient } from '../../src'
-import { createTestClient, guid } from '../utils'
-import { createSimpleTable } from './fixtures/simple_table'
-import { assertJsonValues, jsonValues } from './fixtures/test_data'
-import Stream from 'stream'
-import * as uuid from 'uuid'
+import { type ClickHouseClient } from '@clickhouse/client-common'
+import { createSimpleTable } from '../fixtures/simple_table'
+import { assertJsonValues, jsonValues } from '../fixtures/test_data'
+import { createTestClient, guid, validateUUID } from '../utils'
describe('insert', () => {
let client: ClickHouseClient
@@ -42,7 +39,7 @@ describe('insert', () => {
format: 'JSON',
})
await assertJsonValues(client, tableName)
- expect(uuid.validate(query_id)).toBeTruthy()
+ expect(validateUUID(query_id)).toBeTruthy()
})
it('should use provide query_id', async () => {
@@ -104,7 +101,7 @@ describe('insert', () => {
format: 'JSONEachRow',
})
- const result = await rs.json()
+ const result = await rs.json()
expect(result).toEqual(values)
})
@@ -122,37 +119,19 @@ describe('insert', () => {
})
it('should provide error details when sending a request with an unknown clickhouse settings', async () => {
- await expect(
+ await expectAsync(
client.insert({
table: tableName,
values: jsonValues,
format: 'JSONEachRow',
clickhouse_settings: { foobar: 1 } as any,
})
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Unknown setting foobar'),
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Unknown setting foobar'),
code: '115',
type: 'UNKNOWN_SETTING',
})
)
})
-
- it('should provide error details about a dataset with an invalid type', async () => {
- await expect(
- client.insert({
- table: tableName,
- values: Stream.Readable.from(['42,foobar,"[1,2]"'], {
- objectMode: false,
- }),
- format: 'TabSeparated',
- })
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Cannot parse input'),
- code: '27',
- type: 'CANNOT_PARSE_INPUT_ASSERTION_FAILED',
- })
- )
- })
})
diff --git a/__tests__/integration/multiple_clients.test.ts b/packages/client-common/__tests__/integration/multiple_clients.test.ts
similarity index 75%
rename from __tests__/integration/multiple_clients.test.ts
rename to packages/client-common/__tests__/integration/multiple_clients.test.ts
index 1f3acc8a..6fa89a7f 100644
--- a/__tests__/integration/multiple_clients.test.ts
+++ b/packages/client-common/__tests__/integration/multiple_clients.test.ts
@@ -1,7 +1,6 @@
-import type { ClickHouseClient } from '../../src'
-import { createSimpleTable } from './fixtures/simple_table'
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createSimpleTable } from '../fixtures/simple_table'
import { createTestClient, guid } from '../utils'
-import Stream from 'stream'
const CLIENTS_COUNT = 5
@@ -90,25 +89,5 @@ describe('multiple clients', () => {
})
expect(await result.json()).toEqual(expected)
})
-
- it('should be able to send parallel inserts (streams)', async () => {
- const id = guid()
- const tableName = `multiple_clients_insert_streams_test__${id}`
- await createSimpleTable(clients[0], tableName)
- await Promise.all(
- clients.map((client, i) =>
- client.insert({
- table: tableName,
- values: Stream.Readable.from([getValue(i)]),
- format: 'JSONEachRow',
- })
- )
- )
- const result = await clients[0].query({
- query: `SELECT * FROM ${tableName} ORDER BY id ASC`,
- format: 'JSONEachRow',
- })
- expect(await result.json()).toEqual(expected)
- })
})
})
diff --git a/__tests__/integration/ping.test.ts b/packages/client-common/__tests__/integration/ping.test.ts
similarity index 51%
rename from __tests__/integration/ping.test.ts
rename to packages/client-common/__tests__/integration/ping.test.ts
index 9f42c9f8..f4d9fb5e 100644
--- a/__tests__/integration/ping.test.ts
+++ b/packages/client-common/__tests__/integration/ping.test.ts
@@ -1,4 +1,4 @@
-import { type ClickHouseClient } from '../../src'
+import { type ClickHouseClient } from '@clickhouse/client-common'
import { createTestClient } from '../utils'
describe('ping', () => {
@@ -12,14 +12,4 @@ describe('ping', () => {
const response = await client.ping()
expect(response).toBe(true)
})
-
- it('does not swallow a client error', async () => {
- client = createTestClient({
- host: 'http://localhost:3333',
- })
-
- await expect(client.ping()).rejects.toEqual(
- expect.objectContaining({ code: 'ECONNREFUSED' })
- )
- })
})
diff --git a/__tests__/integration/query_log.test.ts b/packages/client-common/__tests__/integration/query_log.test.ts
similarity index 59%
rename from __tests__/integration/query_log.test.ts
rename to packages/client-common/__tests__/integration/query_log.test.ts
index 8d86043c..66b5c2c3 100644
--- a/__tests__/integration/query_log.test.ts
+++ b/packages/client-common/__tests__/integration/query_log.test.ts
@@ -1,17 +1,12 @@
-import { type ClickHouseClient } from '../../src'
-import {
- createTestClient,
- guid,
- retryOnFailure,
- TestEnv,
- whenOnEnv,
-} from '../utils'
-import { createSimpleTable } from './fixtures/simple_table'
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createSimpleTable } from '../fixtures/simple_table'
+import { createTestClient, guid, TestEnv, whenOnEnv } from '../utils'
+import { sleep } from '../utils/sleep'
// these tests are very flaky in the Cloud environment
-// likely due flushing the query_log not too often
+// likely due to the fact that flushing the query_log there happens not too often
// it's better to execute only with the local single node or cluster
-const testEnvs = [TestEnv.LocalSingleNode, TestEnv.LocalCluster]
+const testEnvs = [TestEnv.LocalSingleNode]
describe('query_log', () => {
let client: ClickHouseClient
@@ -76,41 +71,35 @@ describe('query_log', () => {
}) {
// query_log is flushed every ~1000 milliseconds
// so this might fail a couple of times
- await retryOnFailure(
- async () => {
- const logResultSet = await client.query({
- query: `
- SELECT * FROM system.query_log
- WHERE query_id = {query_id: String}
- `,
- query_params: {
- query_id,
- },
- format: 'JSONEachRow',
- })
- expect(await logResultSet.json()).toEqual([
- expect.objectContaining({
- type: 'QueryStart',
- query: formattedQuery,
- initial_query_id: query_id,
- query_duration_ms: expect.any(String),
- read_rows: expect.any(String),
- read_bytes: expect.any(String),
- }),
- expect.objectContaining({
- type: 'QueryFinish',
- query: formattedQuery,
- initial_query_id: query_id,
- query_duration_ms: expect.any(String),
- read_rows: expect.any(String),
- read_bytes: expect.any(String),
- }),
- ])
+ // FIXME: jasmine does not throw. RetryOnFailure does not work
+ await sleep(1200)
+ const logResultSet = await client.query({
+ query: `
+ SELECT * FROM system.query_log
+ WHERE query_id = {query_id: String}
+ `,
+ query_params: {
+ query_id,
},
- {
- maxAttempts: 30,
- waitBetweenAttemptsMs: 100,
- }
- )
+ format: 'JSONEachRow',
+ })
+ expect(await logResultSet.json()).toEqual([
+ jasmine.objectContaining({
+ type: 'QueryStart',
+ query: formattedQuery,
+ initial_query_id: query_id,
+ query_duration_ms: jasmine.any(String),
+ read_rows: jasmine.any(String),
+ read_bytes: jasmine.any(String),
+ }),
+ jasmine.objectContaining({
+ type: 'QueryFinish',
+ query: formattedQuery,
+ initial_query_id: query_id,
+ query_duration_ms: jasmine.any(String),
+ read_rows: jasmine.any(String),
+ read_bytes: jasmine.any(String),
+ }),
+ ])
}
})
diff --git a/__tests__/integration/read_only_user.test.ts b/packages/client-common/__tests__/integration/read_only_user.test.ts
similarity index 76%
rename from __tests__/integration/read_only_user.test.ts
rename to packages/client-common/__tests__/integration/read_only_user.test.ts
index 28f48945..dbb66c28 100644
--- a/__tests__/integration/read_only_user.test.ts
+++ b/packages/client-common/__tests__/integration/read_only_user.test.ts
@@ -1,7 +1,7 @@
-import type { ClickHouseClient } from '../../src'
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createReadOnlyUser } from '../fixtures/read_only_user'
+import { createSimpleTable } from '../fixtures/simple_table'
import { createTestClient, getTestDatabaseName, guid } from '../utils'
-import { createSimpleTable } from './fixtures/simple_table'
-import { createReadOnlyUser } from './fixtures/read_only_user'
describe('read only user', () => {
let client: ClickHouseClient
@@ -52,24 +52,24 @@ describe('read only user', () => {
})
it('should fail to create a table', async () => {
- await expect(
+ await expectAsync(
createSimpleTable(client, `should_not_be_created_${guid()}`)
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Not enough privileges'),
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Not enough privileges'),
})
)
})
it('should fail to insert', async () => {
- await expect(
+ await expectAsync(
client.insert({
table: tableName,
values: [[43, 'foobar', [5, 25]]],
})
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Not enough privileges'),
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Not enough privileges'),
})
)
})
@@ -77,9 +77,9 @@ describe('read only user', () => {
// TODO: find a way to restrict all the system tables access
it('should fail to query system tables', async () => {
const query = `SELECT * FROM system.users LIMIT 5`
- await expect(client.query({ query })).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Not enough privileges'),
+ await expectAsync(client.query({ query })).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Not enough privileges'),
})
)
})
diff --git a/__tests__/integration/request_compression.test.ts b/packages/client-common/__tests__/integration/request_compression.test.ts
similarity index 85%
rename from __tests__/integration/request_compression.test.ts
rename to packages/client-common/__tests__/integration/request_compression.test.ts
index a6193f74..690aa9e4 100644
--- a/__tests__/integration/request_compression.test.ts
+++ b/packages/client-common/__tests__/integration/request_compression.test.ts
@@ -1,6 +1,9 @@
-import { type ClickHouseClient, type ResponseJSON } from '../../src'
+import {
+ type ClickHouseClient,
+ type ResponseJSON,
+} from '@clickhouse/client-common'
+import { createSimpleTable } from '../fixtures/simple_table'
import { createTestClient, guid } from '../utils'
-import { createSimpleTable } from './fixtures/simple_table'
describe('insert compression', () => {
let client: ClickHouseClient
diff --git a/__tests__/integration/response_compression.test.ts b/packages/client-common/__tests__/integration/response_compression.test.ts
similarity index 90%
rename from __tests__/integration/response_compression.test.ts
rename to packages/client-common/__tests__/integration/response_compression.test.ts
index ca1002de..ed06a28b 100644
--- a/__tests__/integration/response_compression.test.ts
+++ b/packages/client-common/__tests__/integration/response_compression.test.ts
@@ -1,4 +1,4 @@
-import { type ClickHouseClient } from '../../src'
+import { type ClickHouseClient } from '@clickhouse/client-common'
import { createTestClient } from '../utils'
describe('response compression', () => {
diff --git a/packages/client-common/__tests__/integration/select.test.ts b/packages/client-common/__tests__/integration/select.test.ts
new file mode 100644
index 00000000..41b03fd8
--- /dev/null
+++ b/packages/client-common/__tests__/integration/select.test.ts
@@ -0,0 +1,205 @@
+import {
+ type ClickHouseClient,
+ type ResponseJSON,
+} from '@clickhouse/client-common'
+import { createTestClient, guid, validateUUID } from '../utils'
+
+describe('select', () => {
+ let client: ClickHouseClient
+ afterEach(async () => {
+ await client.close()
+ })
+ beforeEach(async () => {
+ client = createTestClient()
+ })
+
+ it('gets query_id back', async () => {
+ const resultSet = await client.query({
+ query: 'SELECT * FROM system.numbers LIMIT 1',
+ format: 'JSONEachRow',
+ })
+ expect(await resultSet.json()).toEqual([{ number: '0' }])
+ expect(validateUUID(resultSet.query_id)).toBeTruthy()
+ })
+
+ it('can override query_id', async () => {
+ const query_id = guid()
+ const resultSet = await client.query({
+ query: 'SELECT * FROM system.numbers LIMIT 1',
+ format: 'JSONEachRow',
+ query_id,
+ })
+ expect(await resultSet.json()).toEqual([{ number: '0' }])
+ expect(resultSet.query_id).toEqual(query_id)
+ })
+
+ it('can process an empty response', async () => {
+ expect(
+ await client
+ .query({
+ query: 'SELECT * FROM system.numbers LIMIT 0',
+ format: 'JSONEachRow',
+ })
+ .then((r) => r.json())
+ ).toEqual([])
+ expect(
+ await client
+ .query({
+ query: 'SELECT * FROM system.numbers LIMIT 0',
+ format: 'TabSeparated',
+ })
+ .then((r) => r.text())
+ ).toEqual('')
+ })
+
+ it('can send a multiline query', async () => {
+ const rs = await client.query({
+ query: `
+ SELECT number
+ FROM system.numbers
+ LIMIT 2
+ `,
+ format: 'CSV',
+ })
+
+ const response = await rs.text()
+ expect(response).toBe('0\n1\n')
+ })
+
+ it('can send a query with an inline comment', async () => {
+ const rs = await client.query({
+ query: `
+ SELECT number
+ -- a comment
+ FROM system.numbers
+ LIMIT 2
+ `,
+ format: 'CSV',
+ })
+
+ const response = await rs.text()
+ expect(response).toBe('0\n1\n')
+ })
+
+ it('can send a query with a multiline comment', async () => {
+ const rs = await client.query({
+ query: `
+ SELECT number
+ /* This is:
+ a multiline comment
+ */
+ FROM system.numbers
+ LIMIT 2
+ `,
+ format: 'CSV',
+ })
+
+ const response = await rs.text()
+ expect(response).toBe('0\n1\n')
+ })
+
+ it('can send a query with a trailing comment', async () => {
+ const rs = await client.query({
+ query: `
+ SELECT number
+ FROM system.numbers
+ LIMIT 2
+ -- comment`,
+ format: 'JSON',
+ })
+
+ const response = await rs.json>()
+ expect(response.data).toEqual([{ number: '0' }, { number: '1' }])
+ })
+
+ it('can specify settings in select', async () => {
+ const rs = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'CSV',
+ clickhouse_settings: {
+ limit: '2',
+ },
+ })
+
+ const response = await rs.text()
+ expect(response).toBe('0\n1\n')
+ })
+
+ it('does not swallow a client error', async () => {
+ await expectAsync(
+ client.query({ query: 'SELECT number FR' })
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ type: 'UNKNOWN_IDENTIFIER',
+ })
+ )
+ })
+
+ it('returns an error details provided by ClickHouse', async () => {
+ await expectAsync(client.query({ query: 'foobar' })).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Syntax error'),
+ code: '62',
+ type: 'SYNTAX_ERROR',
+ })
+ )
+ })
+
+ it('should provide error details when sending a request with an unknown clickhouse settings', async () => {
+ await expectAsync(
+ client.query({
+ query: 'SELECT * FROM system.numbers',
+ clickhouse_settings: { foobar: 1 } as any,
+ })
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Unknown setting foobar'),
+ code: '115',
+ type: 'UNKNOWN_SETTING',
+ })
+ )
+ })
+
+ it('can send multiple simultaneous requests', async () => {
+ type Res = Array<{ sum: number }>
+ const results: number[] = []
+ await Promise.all(
+ [...Array(5)].map((_, i) =>
+ client
+ .query({
+ query: `SELECT toInt32(sum(*)) AS sum FROM numbers(0, ${i + 2});`,
+ format: 'JSONEachRow',
+ })
+ .then((r) => r.json())
+ .then((json: Res) => results.push(json[0].sum))
+ )
+ )
+ expect(results.sort((a, b) => a - b)).toEqual([1, 3, 6, 10, 15])
+ })
+
+ describe('trailing semi', () => {
+ it('should allow queries with trailing semicolon', async () => {
+ const numbers = await client.query({
+ query: 'SELECT * FROM system.numbers LIMIT 3;',
+ format: 'CSV',
+ })
+ expect(await numbers.text()).toEqual('0\n1\n2\n')
+ })
+
+ it('should allow queries with multiple trailing semicolons', async () => {
+ const numbers = await client.query({
+ query: 'SELECT * FROM system.numbers LIMIT 3;;;;;;;;;;;;;;;;;',
+ format: 'CSV',
+ })
+ expect(await numbers.text()).toEqual('0\n1\n2\n')
+ })
+
+ it('should allow semi in select clause', async () => {
+ const resultSet = await client.query({
+ query: `SELECT ';'`,
+ format: 'CSV',
+ })
+ expect(await resultSet.text()).toEqual('";"\n')
+ })
+ })
+})
diff --git a/__tests__/integration/select_query_binding.test.ts b/packages/client-common/__tests__/integration/select_query_binding.test.ts
similarity index 96%
rename from __tests__/integration/select_query_binding.test.ts
rename to packages/client-common/__tests__/integration/select_query_binding.test.ts
index 895ff387..1ccb3dbd 100644
--- a/__tests__/integration/select_query_binding.test.ts
+++ b/packages/client-common/__tests__/integration/select_query_binding.test.ts
@@ -1,5 +1,5 @@
-import type { QueryParams } from '../../src'
-import { type ClickHouseClient } from '../../src'
+import type { QueryParams } from '@clickhouse/client-common'
+import { type ClickHouseClient } from '@clickhouse/client-common'
import { createTestClient } from '../utils'
describe('select with query binding', () => {
@@ -251,16 +251,16 @@ describe('select with query binding', () => {
})
it('should provide error details when sending a request with missing parameter', async () => {
- await expect(
+ await expectAsync(
client.query({
query: `
SELECT * FROM system.numbers
WHERE number > {min_limit: UInt64} LIMIT 3
`,
})
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining(
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining(
'Query parameter `min_limit` was not set'
),
code: '456',
diff --git a/packages/client-common/__tests__/integration/select_result.test.ts b/packages/client-common/__tests__/integration/select_result.test.ts
new file mode 100644
index 00000000..2699154a
--- /dev/null
+++ b/packages/client-common/__tests__/integration/select_result.test.ts
@@ -0,0 +1,93 @@
+import type { ClickHouseClient, ResponseJSON } from '@clickhouse/client-common'
+import { createTestClient } from '../utils'
+
+describe('Select ResultSet', () => {
+ let client: ClickHouseClient
+ afterEach(async () => {
+ await client.close()
+ })
+ beforeEach(async () => {
+ client = createTestClient()
+ })
+
+ describe('text()', function () {
+ it('returns values from SELECT query in specified format', async () => {
+ const rs = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 3',
+ format: 'CSV',
+ })
+
+ expect(await rs.text()).toBe('0\n1\n2\n')
+ })
+ it('returns values from SELECT query in specified format', async () => {
+ const rs = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 3',
+ format: 'JSONEachRow',
+ })
+
+ expect(await rs.text()).toBe(
+ '{"number":"0"}\n{"number":"1"}\n{"number":"2"}\n'
+ )
+ })
+ })
+
+ describe('json()', () => {
+ it('returns an array of values in data property', async () => {
+ const rs = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSON',
+ })
+
+ const { data: nums } = await rs.json>()
+ expect(Array.isArray(nums)).toBe(true)
+ expect(nums.length).toEqual(5)
+ const values = nums.map((i) => i.number)
+ expect(values).toEqual(['0', '1', '2', '3', '4'])
+ })
+
+ it('returns columns data in response', async () => {
+ const rs = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSON',
+ })
+
+ const { meta } = await rs.json>()
+
+ expect(meta?.length).toBe(1)
+ const column = meta ? meta[0] : undefined
+ expect(column).toEqual({
+ name: 'number',
+ type: 'UInt64',
+ })
+ })
+
+ it('returns number of rows in response', async () => {
+ const rs = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSON',
+ })
+
+ const response = await rs.json>()
+
+ expect(response.rows).toBe(5)
+ })
+
+ it('returns statistics in response', async () => {
+ const rs = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSON',
+ })
+
+ const response = await rs.json>()
+ expect(response).toEqual(
+ jasmine.objectContaining({
+ statistics: {
+ elapsed: jasmine.any(Number),
+ rows_read: jasmine.any(Number),
+ bytes_read: jasmine.any(Number),
+ },
+ })
+ )
+ })
+ })
+})
diff --git a/__tests__/unit/format_query_params.test.ts b/packages/client-common/__tests__/unit/format_query_params.test.ts
similarity index 97%
rename from __tests__/unit/format_query_params.test.ts
rename to packages/client-common/__tests__/unit/format_query_params.test.ts
index 97ef1230..903c3912 100644
--- a/__tests__/unit/format_query_params.test.ts
+++ b/packages/client-common/__tests__/unit/format_query_params.test.ts
@@ -1,4 +1,4 @@
-import { formatQueryParams } from '../../src/data_formatter'
+import { formatQueryParams } from '@clickhouse/client-common'
// JS always creates Date object in local timezone,
// so we might need to convert the date to another timezone
diff --git a/__tests__/unit/format_query_settings.test.ts b/packages/client-common/__tests__/unit/format_query_settings.test.ts
similarity index 88%
rename from __tests__/unit/format_query_settings.test.ts
rename to packages/client-common/__tests__/unit/format_query_settings.test.ts
index ac16231a..133206f6 100644
--- a/__tests__/unit/format_query_settings.test.ts
+++ b/packages/client-common/__tests__/unit/format_query_settings.test.ts
@@ -1,5 +1,4 @@
-import { formatQuerySettings } from '../../src/data_formatter'
-import { SettingsMap } from '../../src'
+import { formatQuerySettings, SettingsMap } from '@clickhouse/client-common'
describe('formatQuerySettings', () => {
it('formats boolean', () => {
diff --git a/__tests__/unit/parse_error.test.ts b/packages/client-common/__tests__/unit/parse_error.test.ts
similarity index 96%
rename from __tests__/unit/parse_error.test.ts
rename to packages/client-common/__tests__/unit/parse_error.test.ts
index 856fa4dc..e3b95f4c 100644
--- a/__tests__/unit/parse_error.test.ts
+++ b/packages/client-common/__tests__/unit/parse_error.test.ts
@@ -1,4 +1,4 @@
-import { parseError, ClickHouseError } from '../../src/error'
+import { ClickHouseError, parseError } from '@clickhouse/client-common'
describe('parseError', () => {
it('parses a single line error', () => {
@@ -77,9 +77,9 @@ describe('parseError', () => {
})
})
- describe('Cluster mode errors', () => {
+ xdescribe('Cluster mode errors', () => {
// FIXME: https://github.com/ClickHouse/clickhouse-js/issues/39
- it.skip('should work with TABLE_ALREADY_EXISTS', async () => {
+ it('should work with TABLE_ALREADY_EXISTS', async () => {
const message = `Code: 57. DB::Exception: There was an error on [clickhouse2:9000]: Code: 57. DB::Exception: Table default.command_test_2a751694160745f5aebe586c90b27515 already exists. (TABLE_ALREADY_EXISTS) (version 22.6.5.22 (official build)). (TABLE_ALREADY_EXISTS) (version 22.6.5.22 (official build))`
const error = parseError(message) as ClickHouseError
diff --git a/__tests__/unit/to_search_params.test.ts b/packages/client-common/__tests__/unit/to_search_params.test.ts
similarity index 96%
rename from __tests__/unit/to_search_params.test.ts
rename to packages/client-common/__tests__/unit/to_search_params.test.ts
index fa64a6c8..a327cb57 100644
--- a/__tests__/unit/to_search_params.test.ts
+++ b/packages/client-common/__tests__/unit/to_search_params.test.ts
@@ -1,4 +1,4 @@
-import { toSearchParams } from '../../src/connection/adapter/http_search_params'
+import { toSearchParams } from '@clickhouse/client-common'
import type { URLSearchParams } from 'url'
describe('toSearchParams', () => {
diff --git a/__tests__/unit/transform_url.test.ts b/packages/client-common/__tests__/unit/transform_url.test.ts
similarity index 94%
rename from __tests__/unit/transform_url.test.ts
rename to packages/client-common/__tests__/unit/transform_url.test.ts
index 78711be1..524f7815 100644
--- a/__tests__/unit/transform_url.test.ts
+++ b/packages/client-common/__tests__/unit/transform_url.test.ts
@@ -1,4 +1,4 @@
-import { transformUrl } from '../../src/connection/adapter/transform_url'
+import { transformUrl } from '@clickhouse/client-common'
describe('transformUrl', () => {
it('attaches pathname and search params to the url', () => {
diff --git a/__tests__/utils/client.ts b/packages/client-common/__tests__/utils/client.ts
similarity index 58%
rename from __tests__/utils/client.ts
rename to packages/client-common/__tests__/utils/client.ts
index 5f47db5f..003126bc 100644
--- a/__tests__/utils/client.ts
+++ b/packages/client-common/__tests__/utils/client.ts
@@ -1,23 +1,34 @@
+/* eslint @typescript-eslint/no-var-requires: 0 */
import type {
+ BaseClickHouseClientConfigOptions,
ClickHouseClient,
- ClickHouseClientConfigOptions,
ClickHouseSettings,
-} from '../../src'
-import { createClient } from '../../src'
+} from '@clickhouse/client-common'
+import { getFromEnv } from './env'
import { guid } from './guid'
-import { TestLogger } from './test_logger'
import { getClickHouseTestEnvironment, TestEnv } from './test_env'
-import { getFromEnv } from './env'
-import { TestDatabaseEnvKey } from '../global.integration'
+import { TestLogger } from './test_logger'
+
+let databaseName: string
+beforeAll(async () => {
+ jasmine.DEFAULT_TIMEOUT_INTERVAL = 60000
+ if (
+ getClickHouseTestEnvironment() === TestEnv.Cloud &&
+ databaseName === undefined
+ ) {
+ const client = createTestClient()
+ databaseName = await createRandomDatabase(client)
+ await client.close()
+ }
+})
-export function createTestClient(
- config: ClickHouseClientConfigOptions = {}
-): ClickHouseClient {
+export function createTestClient(
+ config: BaseClickHouseClientConfigOptions = {}
+): ClickHouseClient {
const env = getClickHouseTestEnvironment()
- const database = process.env[TestDatabaseEnvKey]
console.log(
`Using ${env} test environment to create a Client instance for database ${
- database || 'default'
+ databaseName || 'default'
}`
)
const clickHouseSettings: ClickHouseSettings = {}
@@ -36,21 +47,42 @@ export function createTestClient(
},
}
if (env === TestEnv.Cloud) {
- return createClient({
+ const cloudConfig: BaseClickHouseClientConfigOptions = {
host: `https://${getFromEnv('CLICKHOUSE_CLOUD_HOST')}:8443`,
password: getFromEnv('CLICKHOUSE_CLOUD_PASSWORD'),
- database,
+ database: databaseName,
...logging,
...config,
clickhouse_settings: clickHouseSettings,
- })
+ }
+ if (process.env.browser) {
+ return require('../../../client-browser/src/client').createClient(
+ cloudConfig
+ )
+ } else {
+ // props to https://stackoverflow.com/a/41063795/4575540
+ // @ts-expect-error
+ return eval('require')('../../../client-node/src/client').createClient(
+ cloudConfig
+ ) as ClickHouseClient
+ }
} else {
- return createClient({
- database,
+ const localConfig: BaseClickHouseClientConfigOptions = {
+ database: databaseName,
...logging,
...config,
clickhouse_settings: clickHouseSettings,
- })
+ }
+ if (process.env.browser) {
+ return require('../../../client-browser/src/client').createClient(
+ localConfig
+ ) // eslint-disable-line @typescript-eslint/no-var-requires
+ } else {
+ // @ts-expect-error
+ return eval('require')('../../../client-node/src/client').createClient(
+ localConfig
+ ) as ClickHouseClient
+ }
}
}
@@ -72,8 +104,8 @@ export async function createRandomDatabase(
return databaseName
}
-export async function createTable(
- client: ClickHouseClient,
+export async function createTable(
+ client: ClickHouseClient,
definition: (environment: TestEnv) => string,
clickhouse_settings?: ClickHouseSettings
) {
@@ -93,5 +125,5 @@ export async function createTable(
}
export function getTestDatabaseName(): string {
- return process.env[TestDatabaseEnvKey] || 'default'
+ return databaseName || 'default'
}
diff --git a/__tests__/utils/env.ts b/packages/client-common/__tests__/utils/env.ts
similarity index 100%
rename from __tests__/utils/env.ts
rename to packages/client-common/__tests__/utils/env.ts
diff --git a/packages/client-common/__tests__/utils/guid.ts b/packages/client-common/__tests__/utils/guid.ts
new file mode 100644
index 00000000..2da20c64
--- /dev/null
+++ b/packages/client-common/__tests__/utils/guid.ts
@@ -0,0 +1,13 @@
+import * as uuid from 'uuid'
+
+export function guid(): string {
+ return uuid.v4().replace(/-/g, '')
+}
+
+export function randomUUID(): string {
+ return uuid.v4()
+}
+
+export function validateUUID(s: string): boolean {
+ return uuid.validate(s)
+}
diff --git a/__tests__/utils/index.ts b/packages/client-common/__tests__/utils/index.ts
similarity index 53%
rename from __tests__/utils/index.ts
rename to packages/client-common/__tests__/utils/index.ts
index c8532e67..849fd37f 100644
--- a/__tests__/utils/index.ts
+++ b/packages/client-common/__tests__/utils/index.ts
@@ -5,10 +5,9 @@ export {
createTable,
getTestDatabaseName,
} from './client'
-export { guid } from './guid'
+export { guid, validateUUID } from './guid'
export { getClickHouseTestEnvironment } from './test_env'
export { TestEnv } from './test_env'
-export { retryOnFailure } from './retry'
-export { createTableWithSchema } from './schema'
-export { makeObjectStream, makeRawStream } from './stream'
-export { whenOnEnv } from './jest'
+export { sleep } from './sleep'
+export { whenOnEnv } from './jasmine'
+export { getRandomInt } from './random'
diff --git a/__tests__/utils/jest.ts b/packages/client-common/__tests__/utils/jasmine.ts
similarity index 73%
rename from __tests__/utils/jest.ts
rename to packages/client-common/__tests__/utils/jasmine.ts
index c5af9044..a30e85fd 100644
--- a/__tests__/utils/jest.ts
+++ b/packages/client-common/__tests__/utils/jasmine.ts
@@ -4,12 +4,12 @@ import { getClickHouseTestEnvironment } from './test_env'
export const whenOnEnv = (...envs: TestEnv[]) => {
const currentEnv = getClickHouseTestEnvironment()
return {
- it: (...args: Parameters) =>
+ it: (...args: Parameters) =>
envs.includes(currentEnv) ? it(...args) : logAndSkip(currentEnv, ...args),
}
}
-function logAndSkip(currentEnv: TestEnv, ...args: Parameters) {
+function logAndSkip(currentEnv: TestEnv, ...args: Parameters) {
console.info(`Test "${args[0]}" is skipped for ${currentEnv} environment`)
- return it.skip(...args)
+ return xit(...args)
}
diff --git a/packages/client-common/__tests__/utils/random.ts b/packages/client-common/__tests__/utils/random.ts
new file mode 100644
index 00000000..c08815e8
--- /dev/null
+++ b/packages/client-common/__tests__/utils/random.ts
@@ -0,0 +1,6 @@
+/** @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/random#getting_a_random_integer_between_two_values */
+export function getRandomInt(min: number, max: number): number {
+ min = Math.ceil(min)
+ max = Math.floor(max)
+ return Math.floor(Math.random() * (max - min) + min) // The maximum is exclusive and the minimum is inclusive
+}
diff --git a/packages/client-common/__tests__/utils/sleep.ts b/packages/client-common/__tests__/utils/sleep.ts
new file mode 100644
index 00000000..adf71b01
--- /dev/null
+++ b/packages/client-common/__tests__/utils/sleep.ts
@@ -0,0 +1,5 @@
+export function sleep(ms: number): Promise {
+ return new Promise((resolve) => {
+ setTimeout(resolve, ms)
+ })
+}
diff --git a/packages/client-common/__tests__/utils/test_connection_type.ts b/packages/client-common/__tests__/utils/test_connection_type.ts
new file mode 100644
index 00000000..8e433c00
--- /dev/null
+++ b/packages/client-common/__tests__/utils/test_connection_type.ts
@@ -0,0 +1,23 @@
+export enum TestConnectionType {
+ Node = 'node',
+ Browser = 'browser',
+}
+export function getTestConnectionType(): TestConnectionType {
+ let connectionType
+ switch (process.env['CLICKHOUSE_TEST_CONNECTION_TYPE']) {
+ case 'browser':
+ connectionType = TestConnectionType.Browser
+ break
+ case 'node':
+ case undefined:
+ connectionType = TestConnectionType.Node
+ break
+ default:
+ throw new Error(
+ 'Unexpected CLICKHOUSE_TEST_CONNECTION_TYPE value. ' +
+ 'Possible options: `node`, `browser` ' +
+ 'or keep it unset to fall back to `node`'
+ )
+ }
+ return connectionType
+}
diff --git a/__tests__/utils/test_env.ts b/packages/client-common/__tests__/utils/test_env.ts
similarity index 78%
rename from __tests__/utils/test_env.ts
rename to packages/client-common/__tests__/utils/test_env.ts
index 2cb17dfd..1c7b340d 100644
--- a/__tests__/utils/test_env.ts
+++ b/packages/client-common/__tests__/utils/test_env.ts
@@ -6,7 +6,8 @@ export enum TestEnv {
export function getClickHouseTestEnvironment(): TestEnv {
let env
- switch (process.env['CLICKHOUSE_TEST_ENVIRONMENT']) {
+ const value = process.env['CLICKHOUSE_TEST_ENVIRONMENT']
+ switch (value) {
case 'cloud':
env = TestEnv.Cloud
break
@@ -14,12 +15,13 @@ export function getClickHouseTestEnvironment(): TestEnv {
env = TestEnv.LocalCluster
break
case 'local_single_node':
+ case 'undefined':
case undefined:
env = TestEnv.LocalSingleNode
break
default:
throw new Error(
- 'Unexpected CLICKHOUSE_TEST_ENVIRONMENT value. ' +
+ `Unexpected CLICKHOUSE_TEST_ENVIRONMENT value: ${value}. ` +
'Possible options: `local_single_node`, `local_cluster`, `cloud` ' +
'or keep it unset to fall back to `local_single_node`'
)
diff --git a/__tests__/utils/test_logger.ts b/packages/client-common/__tests__/utils/test_logger.ts
similarity index 65%
rename from __tests__/utils/test_logger.ts
rename to packages/client-common/__tests__/utils/test_logger.ts
index c9ddf7c9..c9e35835 100644
--- a/__tests__/utils/test_logger.ts
+++ b/packages/client-common/__tests__/utils/test_logger.ts
@@ -1,5 +1,8 @@
-import type { Logger } from '../../src'
-import type { ErrorLogParams, LogParams } from '../../src/logger'
+import type {
+ ErrorLogParams,
+ Logger,
+ LogParams,
+} from '@clickhouse/client-common'
export class TestLogger implements Logger {
trace({ module, message, args }: LogParams) {
@@ -32,16 +35,5 @@ function formatMessage({
module: string
message: string
}): string {
- return `[${level}][${module}][${getTestName()}] ${message}`
-}
-
-function getTestName() {
- try {
- return expect.getState().currentTestName || 'Unknown'
- } catch (e) {
- // ReferenceError can happen here cause `expect`
- // is not yet available during globalSetup phase,
- // and we are not allowed to import it explicitly
- return 'Global Setup'
- }
+ return `[${level}][${module}] ${message}`
}
diff --git a/packages/client-common/package.json b/packages/client-common/package.json
new file mode 100644
index 00000000..5c80024d
--- /dev/null
+++ b/packages/client-common/package.json
@@ -0,0 +1,24 @@
+{
+ "name": "@clickhouse/client-common",
+ "description": "Official JS client for ClickHouse DB - common types",
+ "homepage": "https://clickhouse.com",
+ "version": "0.0.0",
+ "license": "Apache-2.0",
+ "keywords": [
+ "clickhouse",
+ "sql",
+ "client"
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/ClickHouse/clickhouse-js.git"
+ },
+ "private": false,
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "files": [
+ "dist"
+ ],
+ "dependencies": {},
+ "devDependencies": {}
+}
diff --git a/src/clickhouse_types.ts b/packages/client-common/src/clickhouse_types.ts
similarity index 100%
rename from src/clickhouse_types.ts
rename to packages/client-common/src/clickhouse_types.ts
diff --git a/packages/client-common/src/client.ts b/packages/client-common/src/client.ts
new file mode 100644
index 00000000..341c5400
--- /dev/null
+++ b/packages/client-common/src/client.ts
@@ -0,0 +1,318 @@
+import type {
+ ClickHouseLogLevel,
+ ClickHouseSettings,
+ Connection,
+ ConnectionParams,
+ ConnInsertResult,
+ ConnQueryResult,
+ Logger,
+} from '@clickhouse/client-common'
+import {
+ type DataFormat,
+ DefaultLogger,
+ LogWriter,
+} from '@clickhouse/client-common'
+import type { InputJSON, InputJSONObjectEachRow } from './clickhouse_types'
+import type { BaseResultSet } from './result'
+
+export type MakeConnection = (
+ params: ConnectionParams
+) => Connection
+
+export type MakeResultSet = (
+ stream: Stream,
+ format: DataFormat,
+ session_id: string
+) => BaseResultSet
+
+export interface ValuesEncoder {
+ validateInsertValues(
+ values: InsertValues,
+ format: DataFormat
+ ): void
+
+ /**
+ * A function encodes an array or a stream of JSON objects to a format compatible with ClickHouse.
+ * If values are provided as an array of JSON objects, the function encodes it in place.
+ * If values are provided as a stream of JSON objects, the function sets up the encoding of each chunk.
+ * If values are provided as a raw non-object stream, the function does nothing.
+ *
+ * @param values a set of values to send to ClickHouse.
+ * @param format a format to encode value to.
+ */
+ encodeValues(
+ values: InsertValues,
+ format: DataFormat
+ ): string | Stream
+}
+
+export type CloseStream = (stream: Stream) => Promise
+
+export interface ClickHouseClientConfigOptions {
+ impl: {
+ make_connection: MakeConnection
+ make_result_set: MakeResultSet
+ values_encoder: ValuesEncoder
+ close_stream: CloseStream
+ }
+ /** A ClickHouse instance URL. Default value: `http://localhost:8123`. */
+ host?: string
+ /** The request timeout in milliseconds. Default value: `30_000`. */
+ request_timeout?: number
+ /** Maximum number of sockets to allow per host. Default value: `Infinity`. */
+ max_open_connections?: number
+
+ compression?: {
+ /** `response: true` instructs ClickHouse server to respond with
+ * compressed response body. Default: true. */
+ response?: boolean
+ /** `request: true` enabled compression on the client request body.
+ * Default: false. */
+ request?: boolean
+ }
+ /** The name of the user on whose behalf requests are made.
+ * Default: 'default'. */
+ username?: string
+ /** The user password. Default: ''. */
+ password?: string
+ /** The name of the application using the nodejs client.
+ * Default: empty. */
+ application?: string
+ /** Database name to use. Default value: `default`. */
+ database?: string
+ /** ClickHouse settings to apply to all requests. Default value: {} */
+ clickhouse_settings?: ClickHouseSettings
+ log?: {
+ /** A class to instantiate a custom logger implementation.
+ * Default: {@link DefaultLogger} */
+ LoggerClass?: new () => Logger
+ /** Default: OFF */
+ level?: ClickHouseLogLevel
+ }
+ session_id?: string
+}
+
+export type BaseClickHouseClientConfigOptions = Omit<
+ ClickHouseClientConfigOptions,
+ 'impl'
+>
+
+export interface BaseQueryParams {
+ /** ClickHouse's settings that can be applied on query level. */
+ clickhouse_settings?: ClickHouseSettings
+ /** Parameters for query binding. https://clickhouse.com/docs/en/interfaces/http/#cli-queries-with-parameters */
+ query_params?: Record
+ /** AbortSignal instance to cancel a request in progress. */
+ abort_signal?: AbortSignal
+ /** A specific `query_id` that will be sent with this request.
+ * If it is not set, a random identifier will be generated automatically by the client. */
+ query_id?: string
+ session_id?: string
+}
+
+export interface QueryParams extends BaseQueryParams {
+ /** Statement to execute. */
+ query: string
+ /** Format of the resulting dataset. */
+ format?: DataFormat
+}
+
+export interface ExecParams extends BaseQueryParams {
+ /** Statement to execute. */
+ query: string
+}
+
+export type CommandParams = ExecParams
+export interface CommandResult {
+ query_id: string
+}
+
+export type InsertResult = ConnInsertResult
+export type ExecResult = ConnQueryResult
+
+export type InsertValues =
+ | ReadonlyArray
+ | Stream
+ | InputJSON
+ | InputJSONObjectEachRow
+
+export interface InsertParams
+ extends BaseQueryParams {
+ /** Name of a table to insert into. */
+ table: string
+ /** A dataset to insert. */
+ values: InsertValues
+ /** Format of the dataset to insert. */
+ format?: DataFormat
+}
+
+function validateConnectionParams({ url }: ConnectionParams): void {
+ if (url.protocol !== 'http:' && url.protocol !== 'https:') {
+ throw new Error(
+ `Only http(s) protocol is supported, but given: [${url.protocol}]`
+ )
+ }
+}
+
+function createUrl(host: string): URL {
+ try {
+ return new URL(host)
+ } catch (err) {
+ throw new Error('Configuration parameter "host" contains malformed url.')
+ }
+}
+
+function getConnectionParams(
+ config: ClickHouseClientConfigOptions
+): ConnectionParams {
+ return {
+ application_id: config.application,
+ url: createUrl(config.host ?? 'http://localhost:8123'),
+ request_timeout: config.request_timeout ?? 300_000,
+ max_open_connections: config.max_open_connections ?? Infinity,
+ compression: {
+ decompress_response: config.compression?.response ?? true,
+ compress_request: config.compression?.request ?? false,
+ },
+ username: config.username ?? 'default',
+ password: config.password ?? '',
+ database: config.database ?? 'default',
+ clickhouse_settings: config.clickhouse_settings ?? {},
+ logWriter: new LogWriter(
+ config?.log?.LoggerClass
+ ? new config.log.LoggerClass()
+ : new DefaultLogger(),
+ config.log?.level
+ ),
+ }
+}
+
+export class ClickHouseClient {
+ private readonly connectionParams: ConnectionParams
+ private readonly connection: Connection
+ private readonly makeResultSet: MakeResultSet
+ private readonly valuesEncoder: ValuesEncoder
+ private readonly closeStream: CloseStream
+ private readonly sessionId?: string
+
+ constructor(config: ClickHouseClientConfigOptions) {
+ this.connectionParams = getConnectionParams(config)
+ this.sessionId = config.session_id
+ validateConnectionParams(this.connectionParams)
+ this.connection = config.impl.make_connection(this.connectionParams)
+ this.makeResultSet = config.impl.make_result_set
+ this.valuesEncoder = config.impl.values_encoder
+ this.closeStream = config.impl.close_stream
+ }
+
+ private getQueryParams(params: BaseQueryParams) {
+ return {
+ clickhouse_settings: {
+ ...this.connectionParams.clickhouse_settings,
+ ...params.clickhouse_settings,
+ },
+ query_params: params.query_params,
+ abort_signal: params.abort_signal,
+ query_id: params.query_id,
+ session_id: this.sessionId,
+ }
+ }
+
+ /**
+ * Used for most statements that can have a response, such as SELECT.
+ * FORMAT clause should be specified separately via {@link QueryParams.format} (default is JSON)
+ * Consider using {@link ClickHouseClient.insert} for data insertion,
+ * or {@link ClickHouseClient.command} for DDLs.
+ */
+ async query(params: QueryParams): Promise> {
+ const format = params.format ?? 'JSON'
+ const query = formatQuery(params.query, format)
+ const { stream, query_id } = await this.connection.query({
+ query,
+ ...this.getQueryParams(params),
+ })
+ return this.makeResultSet(stream, format, query_id)
+ }
+
+ /**
+ * It should be used for statements that do not have any output,
+ * when the format clause is not applicable, or when you are not interested in the response at all.
+ * Response stream is destroyed immediately as we do not expect useful information there.
+ * Examples of such statements are DDLs or custom inserts.
+ * If you are interested in the response data, consider using {@link ClickHouseClient.exec}
+ */
+ async command(params: CommandParams): Promise {
+ const { stream, query_id } = await this.exec(params)
+ await this.closeStream(stream)
+ return { query_id }
+ }
+
+ /**
+ * Similar to {@link ClickHouseClient.command}, but for the cases where the output is expected,
+ * but format clause is not applicable. The caller of this method is expected to consume the stream,
+ * otherwise, the request will eventually be timed out.
+ */
+ async exec(params: ExecParams): Promise> {
+ const query = removeTrailingSemi(params.query.trim())
+ return await this.connection.exec({
+ query,
+ ...this.getQueryParams(params),
+ })
+ }
+
+ /**
+ * The primary method for data insertion. It is recommended to avoid arrays in case of large inserts
+ * to reduce application memory consumption and consider streaming for most of such use cases.
+ * As the insert operation does not provide any output, the response stream is immediately destroyed.
+ * In case of a custom insert operation, such as, for example, INSERT FROM SELECT,
+ * consider using {@link ClickHouseClient.command}, passing the entire raw query there (including FORMAT clause).
+ */
+ async insert(params: InsertParams): Promise {
+ const format = params.format || 'JSONCompactEachRow'
+
+ this.valuesEncoder.validateInsertValues(params.values, format)
+ const query = `INSERT INTO ${params.table.trim()} FORMAT ${format}`
+
+ return await this.connection.insert({
+ query,
+ values: this.valuesEncoder.encodeValues(params.values, format),
+ ...this.getQueryParams(params),
+ })
+ }
+
+ /**
+ * Health-check request. Can throw an error if the connection is refused.
+ */
+ async ping(): Promise {
+ return await this.connection.ping()
+ }
+
+ /**
+ * Shuts down the underlying connection.
+ * This method should ideally be called only once per application lifecycle,
+ * for example, during the graceful shutdown phase.
+ */
+ async close(): Promise {
+ return await this.connection.close()
+ }
+}
+
+function formatQuery(query: string, format: DataFormat): string {
+ query = query.trim()
+ query = removeTrailingSemi(query)
+ return query + ' \nFORMAT ' + format
+}
+
+function removeTrailingSemi(query: string) {
+ let lastNonSemiIdx = query.length
+ for (let i = lastNonSemiIdx; i > 0; i--) {
+ if (query[i - 1] !== ';') {
+ lastNonSemiIdx = i
+ break
+ }
+ }
+ if (lastNonSemiIdx !== query.length) {
+ return query.slice(0, lastNonSemiIdx)
+ }
+ return query
+}
diff --git a/packages/client-common/src/connection.ts b/packages/client-common/src/connection.ts
new file mode 100644
index 00000000..4449b80b
--- /dev/null
+++ b/packages/client-common/src/connection.ts
@@ -0,0 +1,51 @@
+import type { LogWriter } from './logger'
+import type { ClickHouseSettings } from './settings'
+
+export interface ConnectionParams {
+ url: URL
+ request_timeout: number
+ max_open_connections: number
+ compression: {
+ decompress_response: boolean
+ compress_request: boolean
+ }
+ username: string
+ password: string
+ database: string
+ clickhouse_settings: ClickHouseSettings
+ logWriter: LogWriter
+ application_id?: string
+}
+
+export interface ConnBaseQueryParams {
+ query: string
+ clickhouse_settings?: ClickHouseSettings
+ query_params?: Record
+ abort_signal?: AbortSignal
+ session_id?: string
+ query_id?: string
+}
+
+export interface ConnInsertParams extends ConnBaseQueryParams {
+ values: string | Stream
+}
+
+export interface ConnBaseResult {
+ query_id: string
+}
+
+export interface ConnQueryResult extends ConnBaseResult {
+ stream: Stream
+ query_id: string
+}
+
+export type ConnInsertResult = ConnBaseResult
+export type ConnExecResult = ConnQueryResult
+
+export interface Connection {
+ ping(): Promise
+ close(): Promise
+ query(params: ConnBaseQueryParams): Promise>
+ exec(params: ConnBaseQueryParams): Promise>
+ insert(params: ConnInsertParams): Promise
+}
diff --git a/src/data_formatter/format_query_params.ts b/packages/client-common/src/data_formatter/format_query_params.ts
similarity index 100%
rename from src/data_formatter/format_query_params.ts
rename to packages/client-common/src/data_formatter/format_query_params.ts
diff --git a/src/data_formatter/format_query_settings.ts b/packages/client-common/src/data_formatter/format_query_settings.ts
similarity index 100%
rename from src/data_formatter/format_query_settings.ts
rename to packages/client-common/src/data_formatter/format_query_settings.ts
diff --git a/src/data_formatter/formatter.ts b/packages/client-common/src/data_formatter/formatter.ts
similarity index 100%
rename from src/data_formatter/formatter.ts
rename to packages/client-common/src/data_formatter/formatter.ts
diff --git a/src/data_formatter/index.ts b/packages/client-common/src/data_formatter/index.ts
similarity index 100%
rename from src/data_formatter/index.ts
rename to packages/client-common/src/data_formatter/index.ts
diff --git a/src/error/index.ts b/packages/client-common/src/error/index.ts
similarity index 100%
rename from src/error/index.ts
rename to packages/client-common/src/error/index.ts
diff --git a/src/error/parse_error.ts b/packages/client-common/src/error/parse_error.ts
similarity index 75%
rename from src/error/parse_error.ts
rename to packages/client-common/src/error/parse_error.ts
index 28d07854..ad692702 100644
--- a/src/error/parse_error.ts
+++ b/packages/client-common/src/error/parse_error.ts
@@ -20,12 +20,14 @@ export class ClickHouseError extends Error {
}
}
-export function parseError(input: string): ClickHouseError | Error {
- const match = input.match(errorRe)
+export function parseError(input: string | Error): ClickHouseError | Error {
+ const inputIsError = input instanceof Error
+ const message = inputIsError ? input.message : input
+ const match = message.match(errorRe)
const groups = match?.groups as ParsedClickHouseError | undefined
if (groups) {
return new ClickHouseError(groups)
} else {
- return new Error(input)
+ return inputIsError ? input : new Error(input)
}
}
diff --git a/packages/client-common/src/index.ts b/packages/client-common/src/index.ts
new file mode 100644
index 00000000..b7392331
--- /dev/null
+++ b/packages/client-common/src/index.ts
@@ -0,0 +1,72 @@
+/** Should be re-exported by the implementation */
+export {
+ type BaseClickHouseClientConfigOptions,
+ type ClickHouseClientConfigOptions,
+ type BaseQueryParams,
+ type QueryParams,
+ type ExecParams,
+ type InsertParams,
+ type InsertValues,
+ ClickHouseClient,
+ type CommandParams,
+ type CommandResult,
+ type ExecResult,
+ type InsertResult,
+} from './client'
+export type { Row, BaseResultSet } from './result'
+export { type DataFormat } from './data_formatter'
+export { ClickHouseError } from './error'
+export {
+ ClickHouseLogLevel,
+ type ErrorLogParams,
+ type Logger,
+ type LogParams,
+} from './logger'
+export type {
+ ResponseJSON,
+ InputJSON,
+ InputJSONObjectEachRow,
+} from './clickhouse_types'
+export {
+ type ClickHouseSettings,
+ type MergeTreeSettings,
+ SettingsMap,
+} from './settings'
+
+/** For implementations usage only */
+export {
+ encodeJSON,
+ isSupportedRawFormat,
+ decode,
+ validateStreamFormat,
+} from './data_formatter'
+export {
+ type ValuesEncoder,
+ type MakeResultSet,
+ type MakeConnection,
+} from './client'
+export {
+ withCompressionHeaders,
+ isSuccessfulResponse,
+ toSearchParams,
+ transformUrl,
+ withHttpSettings,
+} from './utils'
+export { LogWriter, DefaultLogger } from './logger'
+export { parseError } from './error'
+export type {
+ Connection,
+ ConnectionParams,
+ ConnInsertResult,
+ ConnExecResult,
+ ConnQueryResult,
+ ConnBaseQueryParams,
+ ConnBaseResult,
+ ConnInsertParams,
+} from './connection'
+export {
+ type RawDataFormat,
+ type JSONDataFormat,
+ formatQuerySettings,
+ formatQueryParams,
+} from './data_formatter'
diff --git a/src/logger.ts b/packages/client-common/src/logger.ts
similarity index 69%
rename from src/logger.ts
rename to packages/client-common/src/logger.ts
index 3ceb4801..dbfa8090 100644
--- a/src/logger.ts
+++ b/packages/client-common/src/logger.ts
@@ -35,8 +35,8 @@ export class DefaultLogger implements Logger {
}
export class LogWriter {
private readonly logLevel: ClickHouseLogLevel
- constructor(private readonly logger: Logger) {
- this.logLevel = this.getClickHouseLogLevel()
+ constructor(private readonly logger: Logger, logLevel?: ClickHouseLogLevel) {
+ this.logLevel = logLevel ?? ClickHouseLogLevel.OFF
this.info({
module: 'Logger',
message: `Log level is set to ${ClickHouseLogLevel[this.logLevel]}`,
@@ -72,39 +72,9 @@ export class LogWriter {
this.logger.error(params)
}
}
-
- private getClickHouseLogLevel(): ClickHouseLogLevel {
- const logLevelFromEnv = process.env['CLICKHOUSE_LOG_LEVEL']
- if (!logLevelFromEnv) {
- return ClickHouseLogLevel.OFF
- }
- const logLevel = logLevelFromEnv.toLocaleLowerCase()
- if (logLevel === 'info') {
- return ClickHouseLogLevel.INFO
- }
- if (logLevel === 'warn') {
- return ClickHouseLogLevel.WARN
- }
- if (logLevel === 'error') {
- return ClickHouseLogLevel.ERROR
- }
- if (logLevel === 'debug') {
- return ClickHouseLogLevel.DEBUG
- }
- if (logLevel === 'trace') {
- return ClickHouseLogLevel.TRACE
- }
- if (logLevel === 'off') {
- return ClickHouseLogLevel.OFF
- }
- console.error(
- `Unknown CLICKHOUSE_LOG_LEVEL value: ${logLevelFromEnv}, logs are disabled`
- )
- return ClickHouseLogLevel.OFF
- }
}
-enum ClickHouseLogLevel {
+export enum ClickHouseLogLevel {
TRACE = 0, // unused at the moment
DEBUG = 1,
INFO = 2,
diff --git a/packages/client-common/src/result.ts b/packages/client-common/src/result.ts
new file mode 100644
index 00000000..a86b1f9b
--- /dev/null
+++ b/packages/client-common/src/result.ts
@@ -0,0 +1,52 @@
+export interface Row {
+ /** A string representation of a row. */
+ text: string
+
+ /**
+ * Returns a JSON representation of a row.
+ * The method will throw if called on a response in JSON incompatible format.
+ * It is safe to call this method multiple times.
+ */
+ json(): T
+}
+
+export interface BaseResultSet {
+ /**
+ * The method waits for all the rows to be fully loaded
+ * and returns the result as a string.
+ *
+ * The method should throw if the underlying stream was already consumed
+ * by calling the other methods.
+ */
+ text(): Promise
+
+ /**
+ * The method waits for the all the rows to be fully loaded.
+ * When the response is received in full, it will be decoded to return JSON.
+ *
+ * The method should throw if the underlying stream was already consumed
+ * by calling the other methods.
+ */
+ json(): Promise
+
+ /**
+ * Returns a readable stream for responses that can be streamed
+ * (i.e. all except JSON).
+ *
+ * Every iteration provides an array of {@link Row} instances
+ * for {@link StreamableDataFormat} format.
+ *
+ * Should be called only once.
+ *
+ * The method should throw if called on a response in non-streamable format,
+ * and if the underlying stream was already consumed
+ * by calling the other methods.
+ */
+ stream(): Stream
+
+ /** Close the underlying stream. */
+ close(): void
+
+ /** ClickHouse server QueryID. */
+ query_id: string
+}
diff --git a/src/settings.ts b/packages/client-common/src/settings.ts
similarity index 100%
rename from src/settings.ts
rename to packages/client-common/src/settings.ts
diff --git a/packages/client-common/src/utils/connection.ts b/packages/client-common/src/utils/connection.ts
new file mode 100644
index 00000000..8fe7f2a7
--- /dev/null
+++ b/packages/client-common/src/utils/connection.ts
@@ -0,0 +1,38 @@
+import type { ClickHouseSettings } from '../settings'
+
+export type HttpHeader = number | string | string[]
+export type HttpHeaders = Record
+
+export function withCompressionHeaders({
+ headers,
+ compress_request,
+ decompress_response,
+}: {
+ headers: HttpHeaders
+ compress_request: boolean | undefined
+ decompress_response: boolean | undefined
+}): Record {
+ return {
+ ...headers,
+ ...(decompress_response ? { 'Accept-Encoding': 'gzip' } : {}),
+ ...(compress_request ? { 'Content-Encoding': 'gzip' } : {}),
+ }
+}
+
+export function withHttpSettings(
+ clickhouse_settings?: ClickHouseSettings,
+ compression?: boolean
+): ClickHouseSettings {
+ return {
+ ...(compression
+ ? {
+ enable_http_compression: 1,
+ }
+ : {}),
+ ...clickhouse_settings,
+ }
+}
+
+export function isSuccessfulResponse(statusCode?: number): boolean {
+ return Boolean(statusCode && 200 <= statusCode && statusCode < 300)
+}
diff --git a/packages/client-common/src/utils/index.ts b/packages/client-common/src/utils/index.ts
new file mode 100644
index 00000000..8793b362
--- /dev/null
+++ b/packages/client-common/src/utils/index.ts
@@ -0,0 +1,3 @@
+export * from './connection'
+export * from './string'
+export * from './url'
diff --git a/src/utils/string.ts b/packages/client-common/src/utils/string.ts
similarity index 76%
rename from src/utils/string.ts
rename to packages/client-common/src/utils/string.ts
index 5ee7e457..fd61e4d0 100644
--- a/src/utils/string.ts
+++ b/packages/client-common/src/utils/string.ts
@@ -1,4 +1,3 @@
-// string.replaceAll supported in nodejs v15+
export function replaceAll(
input: string,
replace_char: string,
diff --git a/src/connection/adapter/http_search_params.ts b/packages/client-common/src/utils/url.ts
similarity index 75%
rename from src/connection/adapter/http_search_params.ts
rename to packages/client-common/src/utils/url.ts
index ed913dba..53315569 100644
--- a/src/connection/adapter/http_search_params.ts
+++ b/packages/client-common/src/utils/url.ts
@@ -1,5 +1,27 @@
-import { formatQueryParams, formatQuerySettings } from '../../data_formatter/'
-import type { ClickHouseSettings } from '../../settings'
+import type { ClickHouseSettings } from '../settings'
+import { formatQueryParams, formatQuerySettings } from '../data_formatter'
+
+export function transformUrl({
+ url,
+ pathname,
+ searchParams,
+}: {
+ url: URL
+ pathname?: string
+ searchParams?: URLSearchParams
+}): URL {
+ const newUrl = new URL(url)
+
+ if (pathname) {
+ newUrl.pathname = pathname
+ }
+
+ if (searchParams) {
+ newUrl.search = searchParams?.toString()
+ }
+
+ return newUrl
+}
type ToSearchParamsOptions = {
database: string
diff --git a/packages/client-common/src/version.ts b/packages/client-common/src/version.ts
new file mode 100644
index 00000000..27b4abf4
--- /dev/null
+++ b/packages/client-common/src/version.ts
@@ -0,0 +1 @@
+export default '0.2.0-beta1'
diff --git a/packages/client-node/__tests__/integration/node_abort_request.test.ts b/packages/client-node/__tests__/integration/node_abort_request.test.ts
new file mode 100644
index 00000000..1a39fdfa
--- /dev/null
+++ b/packages/client-node/__tests__/integration/node_abort_request.test.ts
@@ -0,0 +1,189 @@
+import type { ClickHouseClient, Row } from '@clickhouse/client-common'
+import { createSimpleTable } from '@test/fixtures/simple_table'
+import { jsonValues } from '@test/fixtures/test_data'
+import { createTestClient, guid } from '@test/utils'
+import type Stream from 'stream'
+import { makeObjectStream } from '../utils/stream'
+
+describe('Node.js abort request streaming', () => {
+ let client: ClickHouseClient
+
+ beforeEach(() => {
+ client = createTestClient()
+ })
+
+ afterEach(async () => {
+ await client.close()
+ })
+
+ it('cancels a select query while reading response', async () => {
+ const controller = new AbortController()
+ const selectPromise = client
+ .query({
+ query: 'SELECT * from system.numbers',
+ format: 'JSONCompactEachRow',
+ abort_signal: controller.signal,
+ })
+ .then(async (rows) => {
+ const stream = rows.stream()
+ for await (const chunk of stream) {
+ const [[number]] = chunk.json()
+ // abort when reach number 3
+ if (number === '3') {
+ controller.abort()
+ }
+ }
+ })
+
+ // There is no assertion against an error message.
+ // A race condition on events might lead to
+ // Request Aborted or ERR_STREAM_PREMATURE_CLOSE errors.
+ await expectAsync(selectPromise).toBeRejectedWithError()
+ })
+
+ it('cancels a select query while reading response by closing response stream', async () => {
+ const selectPromise = client
+ .query({
+ query: 'SELECT * from system.numbers',
+ format: 'JSONCompactEachRow',
+ })
+ .then(async function (rows) {
+ const stream = rows.stream()
+ for await (const rows of stream) {
+ rows.forEach((row: Row) => {
+ const [[number]] = row.json<[[string]]>()
+ // abort when reach number 3
+ if (number === '3') {
+ stream.destroy()
+ }
+ })
+ }
+ })
+ // There was a breaking change in Node.js 18.x+ behavior
+ if (
+ process.version.startsWith('v18') ||
+ process.version.startsWith('v20')
+ ) {
+ // FIXME: add proper error message matching (does not work on Node.js 18/20)
+ await expectAsync(selectPromise).toBeRejectedWithError()
+ } else {
+ expect(await selectPromise).toEqual(undefined)
+ }
+ })
+
+ describe('insert', () => {
+ let tableName: string
+ beforeEach(async () => {
+ tableName = `abort_request_insert_test_${guid()}`
+ await createSimpleTable(client, tableName)
+ })
+
+ it('should cancel one insert while keeping the others', async () => {
+ function shouldAbort(i: number) {
+ // we will cancel the request
+ // that should've inserted a value at index 3
+ return i === 3
+ }
+
+ const controller = new AbortController()
+ const streams: Stream.Readable[] = Array(jsonValues.length)
+ const insertStreamPromises = Promise.all(
+ jsonValues.map((value, i) => {
+ const stream = makeObjectStream()
+ streams[i] = stream
+ stream.push(value)
+ const insertPromise = client.insert({
+ values: stream,
+ format: 'JSONEachRow',
+ table: tableName,
+ abort_signal: shouldAbort(i) ? controller.signal : undefined,
+ })
+ if (shouldAbort(i)) {
+ return insertPromise.catch(() => {
+ // ignored
+ })
+ }
+ return insertPromise
+ })
+ )
+
+ setTimeout(() => {
+ streams.forEach((stream, i) => {
+ if (shouldAbort(i)) {
+ controller.abort()
+ }
+ stream.push(null)
+ })
+ }, 100)
+
+ await insertStreamPromises
+
+ const result = await client
+ .query({
+ query: `SELECT * FROM ${tableName} ORDER BY id ASC`,
+ format: 'JSONEachRow',
+ })
+ .then((r) => r.json())
+
+ expect(result).toEqual([
+ jsonValues[0],
+ jsonValues[1],
+ jsonValues[2],
+ jsonValues[4],
+ ])
+ })
+
+ it('cancels an insert query before it is sent', async () => {
+ const controller = new AbortController()
+ const stream = makeObjectStream()
+ const insertPromise = client.insert({
+ table: tableName,
+ values: stream,
+ abort_signal: controller.signal,
+ })
+ controller.abort()
+
+ await expectAsync(insertPromise).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringMatching('The user aborted a request'),
+ })
+ )
+ })
+
+ it('cancels an insert query before it is sent by closing a stream', async () => {
+ const stream = makeObjectStream()
+ stream.push(null)
+
+ expect(
+ await client.insert({
+ table: tableName,
+ values: stream,
+ })
+ ).toEqual(
+ jasmine.objectContaining({
+ query_id: jasmine.any(String),
+ })
+ )
+ })
+
+ it('cancels an insert query after it is sent', async () => {
+ const controller = new AbortController()
+ const stream = makeObjectStream()
+ const insertPromise = client.insert({
+ table: tableName,
+ values: stream,
+ abort_signal: controller.signal,
+ })
+
+ setTimeout(() => {
+ controller.abort()
+ }, 50)
+
+ await expectAsync(insertPromise).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringMatching('The user aborted a request'),
+ })
+ )
+ })
+ })
+})
diff --git a/__tests__/integration/command.test.ts b/packages/client-node/__tests__/integration/node_command.test.ts
similarity index 81%
rename from __tests__/integration/command.test.ts
rename to packages/client-node/__tests__/integration/node_command.test.ts
index e339df2c..4a66b297 100644
--- a/__tests__/integration/command.test.ts
+++ b/packages/client-node/__tests__/integration/node_command.test.ts
@@ -1,5 +1,5 @@
-import { createTestClient } from '../utils'
-import type { ClickHouseClient } from '../../src/client'
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createTestClient } from '@test/utils'
/**
* {@link ClickHouseClient.command} re-introduction is the result of
@@ -8,7 +8,7 @@ import type { ClickHouseClient } from '../../src/client'
*
* This test makes sure that the consequent requests are not blocked by command calls
*/
-describe('command', () => {
+describe('Node.js command', () => {
let client: ClickHouseClient
beforeEach(() => {
client = createTestClient({
@@ -32,5 +32,6 @@ describe('command', () => {
await command()
await command() // if previous call holds the socket, the test will time out
clearTimeout(timeout)
+ expect(1).toEqual(1) // Jasmine needs at least 1 assertion
})
})
diff --git a/packages/client-node/__tests__/integration/node_errors_parsing.test.ts b/packages/client-node/__tests__/integration/node_errors_parsing.test.ts
new file mode 100644
index 00000000..02992031
--- /dev/null
+++ b/packages/client-node/__tests__/integration/node_errors_parsing.test.ts
@@ -0,0 +1,18 @@
+import { createClient } from '../../src'
+
+describe('Node.js errors parsing', () => {
+ it('should return an error when URL is unreachable', async () => {
+ const client = createClient({
+ host: 'http://localhost:1111',
+ })
+ await expectAsync(
+ client.query({
+ query: 'SELECT * FROM system.numbers LIMIT 3',
+ })
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ code: 'ECONNREFUSED',
+ })
+ )
+ })
+})
diff --git a/packages/client-node/__tests__/integration/node_exec.test.ts b/packages/client-node/__tests__/integration/node_exec.test.ts
new file mode 100644
index 00000000..9827594d
--- /dev/null
+++ b/packages/client-node/__tests__/integration/node_exec.test.ts
@@ -0,0 +1,48 @@
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createTestClient } from '@test/utils'
+import type Stream from 'stream'
+import { getAsText } from '../../src/utils'
+
+describe('Node.js exec result streaming', () => {
+ let client: ClickHouseClient
+ beforeEach(() => {
+ client = createTestClient()
+ })
+ afterEach(async () => {
+ await client.close()
+ })
+
+ it('should send a parametrized query', async () => {
+ const result = await client.exec({
+ query: 'SELECT plus({val1: Int32}, {val2: Int32})',
+ query_params: {
+ val1: 10,
+ val2: 20,
+ },
+ })
+ expect(await getAsText(result.stream)).toEqual('30\n')
+ })
+
+ describe('trailing semi', () => {
+ it('should allow commands with semi in select clause', async () => {
+ const result = await client.exec({
+ query: `SELECT ';' FORMAT CSV`,
+ })
+ expect(await getAsText(result.stream)).toEqual('";"\n')
+ })
+
+ it('should allow commands with trailing semi', async () => {
+ const result = await client.exec({
+ query: 'EXISTS system.databases;',
+ })
+ expect(await getAsText(result.stream)).toEqual('1\n')
+ })
+
+ it('should allow commands with multiple trailing semi', async () => {
+ const result = await client.exec({
+ query: 'EXISTS system.foobar;;;;;;',
+ })
+ expect(await getAsText(result.stream)).toEqual('0\n')
+ })
+ })
+})
diff --git a/packages/client-node/__tests__/integration/node_insert.test.ts b/packages/client-node/__tests__/integration/node_insert.test.ts
new file mode 100644
index 00000000..211d1a47
--- /dev/null
+++ b/packages/client-node/__tests__/integration/node_insert.test.ts
@@ -0,0 +1,35 @@
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createSimpleTable } from '@test/fixtures/simple_table'
+import { createTestClient, guid } from '@test/utils'
+import Stream from 'stream'
+
+describe('Node.js insert', () => {
+ let client: ClickHouseClient
+ let tableName: string
+
+ beforeEach(async () => {
+ client = await createTestClient()
+ tableName = `insert_test_${guid()}`
+ await createSimpleTable(client, tableName)
+ })
+ afterEach(async () => {
+ await client.close()
+ })
+ it('should provide error details about a dataset with an invalid type', async () => {
+ await expectAsync(
+ client.insert({
+ table: tableName,
+ values: Stream.Readable.from(['42,foobar,"[1,2]"'], {
+ objectMode: false,
+ }),
+ format: 'TabSeparated',
+ })
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Cannot parse input'),
+ code: '27',
+ type: 'CANNOT_PARSE_INPUT_ASSERTION_FAILED',
+ })
+ )
+ })
+})
diff --git a/__tests__/integration/keep_alive.test.ts b/packages/client-node/__tests__/integration/node_keep_alive.test.ts
similarity index 83%
rename from __tests__/integration/keep_alive.test.ts
rename to packages/client-node/__tests__/integration/node_keep_alive.test.ts
index 77fe551a..a7de9acb 100644
--- a/__tests__/integration/keep_alive.test.ts
+++ b/packages/client-node/__tests__/integration/node_keep_alive.test.ts
@@ -1,10 +1,16 @@
-import type { ClickHouseClient } from '../../src/client'
-import { createTestClient, guid } from '../utils'
-import { sleep } from '../utils/retry'
-import { createSimpleTable } from './fixtures/simple_table'
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createSimpleTable } from '@test/fixtures/simple_table'
+import { createTestClient, guid, sleep } from '@test/utils'
+import type Stream from 'stream'
+import type { NodeClickHouseClientConfigOptions } from '../../src/client'
-describe('Node.js Keep Alive', () => {
- let client: ClickHouseClient
+/**
+ * FIXME: Works fine during the local runs, but it is flaky on GHA,
+ * maybe because of Jasmine test runner vs Jest and tests isolation
+ * To be revisited in https://github.com/ClickHouse/clickhouse-js/issues/177
+ */
+xdescribe('Node.js Keep Alive', () => {
+ let client: ClickHouseClient
const socketTTL = 2500 // seems to be a sweet spot for testing Keep-Alive socket hangups with 3s in config.xml
afterEach(async () => {
await client.close()
@@ -19,7 +25,7 @@ describe('Node.js Keep Alive', () => {
socket_ttl: socketTTL,
retry_on_expired_socket: true,
},
- })
+ } as NodeClickHouseClientConfigOptions)
expect(await query(0)).toEqual(1)
await sleep(socketTTL)
// this one will fail without retries
@@ -32,7 +38,7 @@ describe('Node.js Keep Alive', () => {
keep_alive: {
enabled: false,
},
- })
+ } as NodeClickHouseClientConfigOptions)
expect(await query(0)).toEqual(1)
await sleep(socketTTL)
// this one won't fail cause a new socket will be assigned
@@ -46,7 +52,7 @@ describe('Node.js Keep Alive', () => {
socket_ttl: socketTTL,
retry_on_expired_socket: true,
},
- })
+ } as NodeClickHouseClientConfigOptions)
const results = await Promise.all(
[...Array(4).keys()].map((n) => query(n))
@@ -81,7 +87,7 @@ describe('Node.js Keep Alive', () => {
socket_ttl: socketTTL,
retry_on_expired_socket: true,
},
- })
+ } as NodeClickHouseClientConfigOptions)
tableName = `keep_alive_single_connection_insert_${guid()}`
await createSimpleTable(client, tableName)
await insert(0)
@@ -106,7 +112,7 @@ describe('Node.js Keep Alive', () => {
socket_ttl: socketTTL,
retry_on_expired_socket: true,
},
- })
+ } as NodeClickHouseClientConfigOptions)
tableName = `keep_alive_multiple_connection_insert_${guid()}`
await createSimpleTable(client, tableName)
await Promise.all([...Array(3).keys()].map((n) => insert(n)))
diff --git a/packages/client-node/__tests__/integration/node_logger.ts b/packages/client-node/__tests__/integration/node_logger.ts
new file mode 100644
index 00000000..e469e828
--- /dev/null
+++ b/packages/client-node/__tests__/integration/node_logger.ts
@@ -0,0 +1,111 @@
+import type {
+ ClickHouseClient,
+ ErrorLogParams,
+ Logger,
+ LogParams,
+} from '@clickhouse/client-common'
+import { createTestClient } from '@test/utils'
+
+describe('config', () => {
+ let client: ClickHouseClient
+ let logs: {
+ message: string
+ err?: Error
+ args?: Record
+ }[] = []
+
+ afterEach(async () => {
+ await client.close()
+ logs = []
+ })
+
+ describe('Logger support', () => {
+ const logLevelKey = 'CLICKHOUSE_LOG_LEVEL'
+ let defaultLogLevel: string | undefined
+ beforeEach(() => {
+ defaultLogLevel = process.env[logLevelKey]
+ })
+ afterEach(() => {
+ if (defaultLogLevel === undefined) {
+ delete process.env[logLevelKey]
+ } else {
+ process.env[logLevelKey] = defaultLogLevel
+ }
+ })
+
+ it('should use the default logger implementation', async () => {
+ process.env[logLevelKey] = 'DEBUG'
+ client = createTestClient()
+ const consoleSpy = spyOn(console, 'log')
+ await client.ping()
+ // logs[0] are about current log level
+ expect(consoleSpy).toHaveBeenCalledOnceWith(
+ jasmine.stringContaining('Got a response from ClickHouse'),
+ jasmine.objectContaining({
+ request_headers: {
+ 'user-agent': jasmine.any(String),
+ },
+ request_method: 'GET',
+ request_params: '',
+ request_path: '/ping',
+ response_headers: jasmine.objectContaining({
+ connection: jasmine.stringMatching(/Keep-Alive/i),
+ 'content-type': 'text/html; charset=UTF-8',
+ 'transfer-encoding': 'chunked',
+ }),
+ response_status: 200,
+ })
+ )
+ })
+
+ it('should provide a custom logger implementation', async () => {
+ process.env[logLevelKey] = 'DEBUG'
+ client = createTestClient({
+ log: {
+ LoggerClass: TestLogger,
+ },
+ })
+ await client.ping()
+ // logs[0] are about current log level
+ expect(logs[1]).toEqual(
+ jasmine.objectContaining({
+ message: 'Got a response from ClickHouse',
+ args: jasmine.objectContaining({
+ request_path: '/ping',
+ request_method: 'GET',
+ }),
+ })
+ )
+ })
+
+ it('should provide a custom logger implementation (but logs are disabled)', async () => {
+ process.env[logLevelKey] = 'OFF'
+ client = createTestClient({
+ log: {
+ // enable: false,
+ LoggerClass: TestLogger,
+ },
+ })
+ await client.ping()
+ expect(logs.length).toEqual(0)
+ })
+ })
+
+ class TestLogger implements Logger {
+ trace(params: LogParams) {
+ logs.push(params)
+ }
+ debug(params: LogParams) {
+ logs.push(params)
+ }
+ info(params: LogParams) {
+ logs.push(params)
+ }
+ warn(params: LogParams) {
+ logs.push(params)
+ }
+ error(params: ErrorLogParams) {
+ logs.push(params)
+ }
+ }
+})
diff --git a/packages/client-node/__tests__/integration/node_max_open_connections.test.ts b/packages/client-node/__tests__/integration/node_max_open_connections.test.ts
new file mode 100644
index 00000000..4f88d145
--- /dev/null
+++ b/packages/client-node/__tests__/integration/node_max_open_connections.test.ts
@@ -0,0 +1,93 @@
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createSimpleTable } from '@test/fixtures/simple_table'
+import { createTestClient, guid, sleep } from '@test/utils'
+
+describe('Node.js max_open_connections config', () => {
+ let client: ClickHouseClient
+ let results: number[] = []
+
+ afterEach(async () => {
+ await client.close()
+ results = []
+ })
+
+ function select(query: string) {
+ return client
+ .query({
+ query,
+ format: 'JSONEachRow',
+ })
+ .then((r) => r.json<[{ x: number }]>())
+ .then(([{ x }]) => results.push(x))
+ }
+
+ it('should use only one connection', async () => {
+ client = createTestClient({
+ max_open_connections: 1,
+ })
+ void select('SELECT 1 AS x, sleep(0.3)')
+ void select('SELECT 2 AS x, sleep(0.3)')
+ while (results.length !== 1) {
+ await sleep(100)
+ }
+ expect(results).toEqual([1])
+ while (results.length === 1) {
+ await sleep(100)
+ }
+ expect(results.sort()).toEqual([1, 2])
+ })
+
+ it('should use only one connection for insert', async () => {
+ const tableName = `node_connections_single_connection_insert_${guid()}`
+ client = createTestClient({
+ max_open_connections: 1,
+ request_timeout: 3000,
+ })
+ await createSimpleTable(client, tableName)
+
+ const timeout = setTimeout(() => {
+ throw new Error('Timeout was triggered')
+ }, 3000).unref()
+
+ const value1 = { id: '42', name: 'hello', sku: [0, 1] }
+ const value2 = { id: '43', name: 'hello', sku: [0, 1] }
+ function insert(value: object) {
+ return client.insert({
+ table: tableName,
+ values: [value],
+ format: 'JSONEachRow',
+ })
+ }
+ await insert(value1)
+ await insert(value2) // if previous call holds the socket, the test will time out
+ clearTimeout(timeout)
+
+ const result = await client.query({
+ query: `SELECT * FROM ${tableName}`,
+ format: 'JSONEachRow',
+ })
+
+ const json = await result.json()
+ expect(json).toContain(value1)
+ expect(json).toContain(value2)
+ expect(json.length).toEqual(2)
+ })
+
+ it('should use several connections', async () => {
+ client = createTestClient({
+ max_open_connections: 2,
+ })
+ void select('SELECT 1 AS x, sleep(0.3)')
+ void select('SELECT 2 AS x, sleep(0.3)')
+ void select('SELECT 3 AS x, sleep(0.3)')
+ void select('SELECT 4 AS x, sleep(0.3)')
+ while (results.length < 2) {
+ await sleep(100)
+ }
+ expect(results.sort()).toEqual([1, 2])
+ while (results.length < 4) {
+ await sleep(100)
+ }
+ expect(results.sort()).toEqual([1, 2, 3, 4])
+ })
+})
diff --git a/packages/client-node/__tests__/integration/node_multiple_clients.test.ts b/packages/client-node/__tests__/integration/node_multiple_clients.test.ts
new file mode 100644
index 00000000..0967b735
--- /dev/null
+++ b/packages/client-node/__tests__/integration/node_multiple_clients.test.ts
@@ -0,0 +1,60 @@
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createSimpleTable } from '@test/fixtures/simple_table'
+import { createTestClient, guid } from '@test/utils'
+import Stream from 'stream'
+
+const CLIENTS_COUNT = 5
+
+describe('Node.js multiple clients', () => {
+ const clients: ClickHouseClient[] = Array(CLIENTS_COUNT)
+
+ beforeEach(() => {
+ for (let i = 0; i < CLIENTS_COUNT; i++) {
+ clients[i] = createTestClient()
+ }
+ })
+
+ afterEach(async () => {
+ for (const c of clients) {
+ await c.close()
+ }
+ })
+
+ const names = ['foo', 'bar', 'baz', 'qaz', 'qux']
+
+ function getValue(i: number) {
+ return {
+ id: i,
+ name: names[i],
+ sku: [i, i + 1],
+ }
+ }
+
+ const expected = [
+ { id: '0', name: 'foo', sku: [0, 1] },
+ { id: '1', name: 'bar', sku: [1, 2] },
+ { id: '2', name: 'baz', sku: [2, 3] },
+ { id: '3', name: 'qaz', sku: [3, 4] },
+ { id: '4', name: 'qux', sku: [4, 5] },
+ ]
+
+ it('should be able to send parallel inserts (streams)', async () => {
+ const id = guid()
+ const tableName = `multiple_clients_insert_streams_test__${id}`
+ await createSimpleTable(clients[0], tableName)
+ await Promise.all(
+ clients.map((client, i) =>
+ client.insert({
+ table: tableName,
+ values: Stream.Readable.from([getValue(i)]),
+ format: 'JSONEachRow',
+ })
+ )
+ )
+ const result = await clients[0].query({
+ query: `SELECT * FROM ${tableName} ORDER BY id ASC`,
+ format: 'JSONEachRow',
+ })
+ expect(await result.json()).toEqual(expected)
+ })
+})
diff --git a/packages/client-node/__tests__/integration/node_ping.test.ts b/packages/client-node/__tests__/integration/node_ping.test.ts
new file mode 100644
index 00000000..b51facd2
--- /dev/null
+++ b/packages/client-node/__tests__/integration/node_ping.test.ts
@@ -0,0 +1,18 @@
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createTestClient } from '@test/utils'
+
+describe('Node.js ping', () => {
+ let client: ClickHouseClient
+ afterEach(async () => {
+ await client.close()
+ })
+ it('does not swallow a client error', async () => {
+ client = createTestClient({
+ host: 'http://localhost:3333',
+ })
+
+ await expectAsync(client.ping()).toBeRejectedWith(
+ jasmine.objectContaining({ code: 'ECONNREFUSED' })
+ )
+ })
+})
diff --git a/packages/client-node/__tests__/integration/node_select_streaming.test.ts b/packages/client-node/__tests__/integration/node_select_streaming.test.ts
new file mode 100644
index 00000000..bfc33533
--- /dev/null
+++ b/packages/client-node/__tests__/integration/node_select_streaming.test.ts
@@ -0,0 +1,254 @@
+import type { ClickHouseClient, Row } from '@clickhouse/client-common'
+import { createTestClient } from '@test/utils'
+import type Stream from 'stream'
+
+describe('Node.js SELECT streaming', () => {
+ let client: ClickHouseClient
+ afterEach(async () => {
+ await client.close()
+ })
+ beforeEach(async () => {
+ client = createTestClient()
+ })
+
+ describe('consume the response only once', () => {
+ async function assertAlreadyConsumed$(fn: () => Promise) {
+ await expectAsync(fn()).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: 'Stream has been already consumed',
+ })
+ )
+ }
+ function assertAlreadyConsumed(fn: () => T) {
+ expect(fn).toThrow(
+ jasmine.objectContaining({
+ message: 'Stream has been already consumed',
+ })
+ )
+ }
+ it('should consume a JSON response only once', async () => {
+ const rs = await client.query({
+ query: 'SELECT * FROM system.numbers LIMIT 1',
+ format: 'JSONEachRow',
+ })
+ expect(await rs.json()).toEqual([{ number: '0' }])
+ // wrap in a func to avoid changing inner "this"
+ await assertAlreadyConsumed$(() => rs.json())
+ await assertAlreadyConsumed$(() => rs.text())
+ await assertAlreadyConsumed(() => rs.stream())
+ })
+
+ it('should consume a text response only once', async () => {
+ const rs = await client.query({
+ query: 'SELECT * FROM system.numbers LIMIT 1',
+ format: 'TabSeparated',
+ })
+ expect(await rs.text()).toEqual('0\n')
+ // wrap in a func to avoid changing inner "this"
+ await assertAlreadyConsumed$(() => rs.json())
+ await assertAlreadyConsumed$(() => rs.text())
+ await assertAlreadyConsumed(() => rs.stream())
+ })
+
+ it('should consume a stream response only once', async () => {
+ const rs = await client.query({
+ query: 'SELECT * FROM system.numbers LIMIT 1',
+ format: 'TabSeparated',
+ })
+ let result = ''
+ for await (const rows of rs.stream()) {
+ rows.forEach((row: Row) => {
+ result += row.text
+ })
+ }
+ expect(result).toEqual('0')
+ // wrap in a func to avoid changing inner "this"
+ await assertAlreadyConsumed$(() => rs.json())
+ await assertAlreadyConsumed$(() => rs.text())
+ await assertAlreadyConsumed(() => rs.stream())
+ })
+ })
+
+ describe('select result asStream()', () => {
+ it('throws an exception if format is not stream-able', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSON',
+ })
+ try {
+ await expectAsync((async () => result.stream())()).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('JSON format is not streamable'),
+ })
+ )
+ } finally {
+ result.close()
+ }
+ })
+
+ it('can pause response stream', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 10000',
+ format: 'CSV',
+ })
+
+ const stream = result.stream()
+
+ let last = ''
+ let i = 0
+ for await (const rows of stream) {
+ rows.forEach((row: Row) => {
+ last = row.text
+ i++
+ if (i % 1000 === 0) {
+ stream.pause()
+ setTimeout(() => stream.resume(), 100)
+ }
+ })
+ }
+ expect(last).toBe('9999')
+ })
+
+ describe('text()', () => {
+ it('returns stream of rows in CSV format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'CSV',
+ })
+
+ const rs = await rowsText(result.stream())
+ expect(rs).toEqual(['0', '1', '2', '3', '4'])
+ })
+
+ it('returns stream of rows in TabSeparated format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'TabSeparated',
+ })
+
+ const rs = await rowsText(result.stream())
+ expect(rs).toEqual(['0', '1', '2', '3', '4'])
+ })
+ })
+
+ describe('json()', () => {
+ it('returns stream of objects in JSONEachRow format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONEachRow',
+ })
+
+ const rs = await rowsValues(result.stream())
+ expect(rs).toEqual([
+ { number: '0' },
+ { number: '1' },
+ { number: '2' },
+ { number: '3' },
+ { number: '4' },
+ ])
+ })
+
+ it('returns stream of objects in JSONStringsEachRow format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONStringsEachRow',
+ })
+
+ const rs = await rowsValues(result.stream())
+ expect(rs).toEqual([
+ { number: '0' },
+ { number: '1' },
+ { number: '2' },
+ { number: '3' },
+ { number: '4' },
+ ])
+ })
+
+ it('returns stream of objects in JSONCompactEachRow format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONCompactEachRow',
+ })
+
+ const rs = await rowsValues(result.stream())
+ expect(rs).toEqual([['0'], ['1'], ['2'], ['3'], ['4']])
+ })
+
+ it('returns stream of objects in JSONCompactEachRowWithNames format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONCompactEachRowWithNames',
+ })
+
+ const rs = await rowsValues(result.stream())
+ expect(rs).toEqual([['number'], ['0'], ['1'], ['2'], ['3'], ['4']])
+ })
+
+ it('returns stream of objects in JSONCompactEachRowWithNamesAndTypes format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONCompactEachRowWithNamesAndTypes',
+ })
+
+ const rs = await rowsValues(result.stream())
+ expect(rs).toEqual([
+ ['number'],
+ ['UInt64'],
+ ['0'],
+ ['1'],
+ ['2'],
+ ['3'],
+ ['4'],
+ ])
+ })
+
+ it('returns stream of objects in JSONCompactStringsEachRowWithNames format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONCompactStringsEachRowWithNames',
+ })
+
+ const rs = await rowsValues(result.stream())
+ expect(rs).toEqual([['number'], ['0'], ['1'], ['2'], ['3'], ['4']])
+ })
+
+ it('returns stream of objects in JSONCompactStringsEachRowWithNamesAndTypes format', async () => {
+ const result = await client.query({
+ query: 'SELECT number FROM system.numbers LIMIT 5',
+ format: 'JSONCompactStringsEachRowWithNamesAndTypes',
+ })
+
+ const rs = await rowsValues(result.stream())
+ expect(rs).toEqual([
+ ['number'],
+ ['UInt64'],
+ ['0'],
+ ['1'],
+ ['2'],
+ ['3'],
+ ['4'],
+ ])
+ })
+ })
+ })
+})
+
+async function rowsValues(stream: Stream.Readable): Promise {
+ const result: any[] = []
+ for await (const rows of stream) {
+ rows.forEach((row: Row) => {
+ result.push(row.json())
+ })
+ }
+ return result
+}
+
+async function rowsText(stream: Stream.Readable): Promise {
+ const result: string[] = []
+ for await (const rows of stream) {
+ rows.forEach((row: Row) => {
+ result.push(row.text)
+ })
+ }
+ return result
+}
diff --git a/__tests__/integration/stream_json_formats.test.ts b/packages/client-node/__tests__/integration/node_stream_json_formats.test.ts
similarity index 92%
rename from __tests__/integration/stream_json_formats.test.ts
rename to packages/client-node/__tests__/integration/node_stream_json_formats.test.ts
index deacd4fb..a11fa251 100644
--- a/__tests__/integration/stream_json_formats.test.ts
+++ b/packages/client-node/__tests__/integration/node_stream_json_formats.test.ts
@@ -1,10 +1,11 @@
-import { type ClickHouseClient } from '../../src'
+import { type ClickHouseClient } from '@clickhouse/client-common'
+import { createSimpleTable } from '@test/fixtures/simple_table'
+import { assertJsonValues, jsonValues } from '@test/fixtures/test_data'
+import { createTestClient, guid } from '@test/utils'
import Stream from 'stream'
-import { createTestClient, guid, makeObjectStream } from '../utils'
-import { createSimpleTable } from './fixtures/simple_table'
-import { assertJsonValues, jsonValues } from './fixtures/test_data'
+import { makeObjectStream } from '../utils/stream'
-describe('stream JSON formats', () => {
+describe('Node.js stream JSON formats', () => {
let client: ClickHouseClient
let tableName: string
@@ -174,9 +175,9 @@ describe('stream JSON formats', () => {
values: stream,
format: 'JSONCompactEachRowWithNamesAndTypes',
})
- await expect(insertPromise).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringMatching(
+ await expectAsync(insertPromise).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringMatching(
`Type of 'name' must be String, not UInt64`
),
})
@@ -238,10 +239,12 @@ describe('stream JSON formats', () => {
},
})
- await client.insert({
- table: tableName,
- values: stream,
- })
+ await expectAsync(
+ client.insert({
+ table: tableName,
+ values: stream,
+ })
+ ).toBeResolved()
})
it('waits for stream of values to be closed', async () => {
@@ -291,15 +294,15 @@ describe('stream JSON formats', () => {
const stream = makeObjectStream()
stream.push({ id: 'baz', name: 'foo', sku: '[0,1]' })
stream.push(null)
- await expect(
+ await expectAsync(
client.insert({
table: tableName,
values: stream,
format: 'JSONEachRow',
})
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Cannot parse input'),
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Cannot parse input'),
})
)
})
diff --git a/__tests__/integration/stream_raw_formats.test.ts b/packages/client-node/__tests__/integration/node_stream_raw_formats.test.ts
similarity index 89%
rename from __tests__/integration/stream_raw_formats.test.ts
rename to packages/client-node/__tests__/integration/node_stream_raw_formats.test.ts
index d1e0b425..591410c8 100644
--- a/__tests__/integration/stream_raw_formats.test.ts
+++ b/packages/client-node/__tests__/integration/node_stream_raw_formats.test.ts
@@ -1,11 +1,15 @@
-import { createTestClient, guid, makeRawStream } from '../utils'
-import type { ClickHouseClient, ClickHouseSettings } from '../../src'
-import { createSimpleTable } from './fixtures/simple_table'
+import type {
+ ClickHouseClient,
+ ClickHouseSettings,
+ RawDataFormat,
+} from '@clickhouse/client-common'
+import { createSimpleTable } from '@test/fixtures/simple_table'
+import { assertJsonValues, jsonValues } from '@test/fixtures/test_data'
+import { createTestClient, guid } from '@test/utils'
import Stream from 'stream'
-import { assertJsonValues, jsonValues } from './fixtures/test_data'
-import type { RawDataFormat } from '../../src/data_formatter'
+import { makeRawStream } from '../utils/stream'
-describe('stream raw formats', () => {
+describe('Node.js stream raw formats', () => {
let client: ClickHouseClient
let tableName: string
@@ -25,15 +29,15 @@ describe('stream raw formats', () => {
objectMode: false,
}
)
- await expect(
+ await expectAsync(
client.insert({
table: tableName,
values: stream,
format: 'CSV',
})
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Cannot parse input'),
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Cannot parse input'),
})
)
})
@@ -95,15 +99,15 @@ describe('stream raw formats', () => {
const stream = Stream.Readable.from(`foobar\t42\n`, {
objectMode: false,
})
- await expect(
+ await expectAsync(
client.insert({
table: tableName,
values: stream,
format: 'TabSeparated',
})
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Cannot parse input'),
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Cannot parse input'),
})
)
})
@@ -199,15 +203,15 @@ describe('stream raw formats', () => {
objectMode: false,
}
)
- await expect(
+ await expectAsync(
client.insert({
table: tableName,
values: stream,
format: 'CSVWithNamesAndTypes',
})
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining(
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining(
`Type of 'name' must be String, not UInt64`
),
})
@@ -218,15 +222,15 @@ describe('stream raw formats', () => {
const stream = Stream.Readable.from(`"foobar","42",,\n`, {
objectMode: false,
})
- await expect(
+ await expectAsync(
client.insert({
table: tableName,
values: stream,
format: 'CSV',
})
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Cannot parse input'),
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Cannot parse input'),
})
)
})
@@ -313,16 +317,16 @@ describe('stream raw formats', () => {
const stream = Stream.Readable.from(`"foobar"^"42"^^\n`, {
objectMode: false,
})
- await expect(
+ await expectAsync(
client.insert({
table: tableName,
values: stream,
format: 'CustomSeparated',
clickhouse_settings,
})
- ).rejects.toEqual(
- expect.objectContaining({
- message: expect.stringContaining('Cannot parse input'),
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('Cannot parse input'),
})
)
})
@@ -350,9 +354,9 @@ describe('stream raw formats', () => {
})
})
- async function assertInsertedValues(
+ async function assertInsertedValues(
format: RawDataFormat,
- expected: T,
+ expected: string,
clickhouse_settings?: ClickHouseSettings
) {
const result = await client.query({
diff --git a/__tests__/integration/streaming_e2e.test.ts b/packages/client-node/__tests__/integration/node_streaming_e2e.test.ts
similarity index 70%
rename from __tests__/integration/streaming_e2e.test.ts
rename to packages/client-node/__tests__/integration/node_streaming_e2e.test.ts
index 28ea9345..f9a2866c 100644
--- a/__tests__/integration/streaming_e2e.test.ts
+++ b/packages/client-node/__tests__/integration/node_streaming_e2e.test.ts
@@ -1,21 +1,14 @@
+import type { Row } from '@clickhouse/client-common'
+import { type ClickHouseClient } from '@clickhouse/client-common'
+import { createSimpleTable } from '@test/fixtures/simple_table'
+import { createTestClient, guid } from '@test/utils'
import Fs from 'fs'
-import Path from 'path'
-import Stream from 'stream'
import split from 'split2'
-import type { Row } from '../../src'
-import { type ClickHouseClient } from '../../src'
-import { createTestClient, guid } from '../utils'
-import { createSimpleTable } from './fixtures/simple_table'
-
-const expected = [
- ['0', 'a', [1, 2]],
- ['1', 'b', [3, 4]],
- ['2', 'c', [5, 6]],
-]
+import Stream from 'stream'
-describe('streaming e2e', () => {
+describe('Node.js streaming e2e', () => {
let tableName: string
- let client: ClickHouseClient
+ let client: ClickHouseClient
beforeEach(async () => {
client = createTestClient()
@@ -27,13 +20,16 @@ describe('streaming e2e', () => {
await client.close()
})
+ const expected: Array> = [
+ ['0', 'a', [1, 2]],
+ ['1', 'b', [3, 4]],
+ ['2', 'c', [5, 6]],
+ ]
+
it('should stream a file', async () => {
// contains id as numbers in JSONCompactEachRow format ["0"]\n["1"]\n...
- const filename = Path.resolve(
- __dirname,
- './fixtures/streaming_e2e_data.ndjson'
- )
-
+ const filename =
+ 'packages/client-common/__tests__/fixtures/streaming_e2e_data.ndjson'
await client.insert({
table: tableName,
values: Fs.createReadStream(filename).pipe(
@@ -48,7 +44,7 @@ describe('streaming e2e', () => {
format: 'JSONCompactEachRow',
})
- const actual: string[] = []
+ const actual: unknown[] = []
for await (const rows of rs.stream()) {
rows.forEach((row: Row) => {
actual.push(row.json())
@@ -69,7 +65,7 @@ describe('streaming e2e', () => {
format: 'JSONCompactEachRow',
})
- const actual: string[] = []
+ const actual: unknown[] = []
for await (const rows of rs.stream()) {
rows.forEach((row: Row) => {
actual.push(row.json())
diff --git a/__tests__/integration/watch_stream.test.ts b/packages/client-node/__tests__/integration/node_watch_stream.test.ts
similarity index 77%
rename from __tests__/integration/watch_stream.test.ts
rename to packages/client-node/__tests__/integration/node_watch_stream.test.ts
index 0034a845..b5fa3d66 100644
--- a/__tests__/integration/watch_stream.test.ts
+++ b/packages/client-node/__tests__/integration/node_watch_stream.test.ts
@@ -1,16 +1,17 @@
-import type { Row } from '../../src'
-import { type ClickHouseClient } from '../../src'
+import type { Row } from '@clickhouse/client-common'
+import { type ClickHouseClient } from '@clickhouse/client-common'
import {
createTable,
createTestClient,
guid,
- retryOnFailure,
+ sleep,
TestEnv,
whenOnEnv,
-} from '../utils'
+} from '@test/utils'
+import type Stream from 'stream'
-describe('watch stream', () => {
- let client: ClickHouseClient
+describe('Node.js WATCH stream', () => {
+ let client: ClickHouseClient
let viewName: string
beforeEach(async () => {
@@ -55,15 +56,8 @@ describe('watch stream', () => {
data.push(row.json())
})
})
- await retryOnFailure(
- async () => {
- expect(data).toEqual([{ version: '1' }, { version: '2' }])
- },
- {
- maxAttempts: 5,
- waitBetweenAttemptsMs: 1000,
- }
- )
+ await sleep(1500)
+ expect(data).toEqual([{ version: '1' }, { version: '2' }])
stream.destroy()
}
)
diff --git a/__tests__/tls/tls.test.ts b/packages/client-node/__tests__/tls/tls.test.ts
similarity index 79%
rename from __tests__/tls/tls.test.ts
rename to packages/client-node/__tests__/tls/tls.test.ts
index 1cb6c6e2..d677d4cd 100644
--- a/__tests__/tls/tls.test.ts
+++ b/packages/client-node/__tests__/tls/tls.test.ts
@@ -1,10 +1,11 @@
-import type { ClickHouseClient } from '../../src'
-import { createClient } from '../../src'
-import { createTestClient } from '../utils'
+import type { ClickHouseClient } from '@clickhouse/client-common'
+import { createTestClient } from '@test/utils'
import * as fs from 'fs'
+import type Stream from 'stream'
+import { createClient } from '../../src'
describe('TLS connection', () => {
- let client: ClickHouseClient
+ let client: ClickHouseClient
beforeEach(() => {
client = createTestClient()
})
@@ -58,12 +59,18 @@ describe('TLS connection', () => {
key,
},
})
- await expect(
+ await expectAsync(
client.query({
query: 'SELECT number FROM system.numbers LIMIT 3',
format: 'CSV',
})
- ).rejects.toThrowError('Hostname/IP does not match certificate')
+ ).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining(
+ 'Hostname/IP does not match certificate'
+ ),
+ })
+ )
})
it('should fail with invalid certificates', async () => {
@@ -76,15 +83,12 @@ describe('TLS connection', () => {
key: fs.readFileSync(`${certsPath}/server.key`),
},
})
- const errorMessage =
- process.version.startsWith('v18') || process.version.startsWith('v20')
- ? 'unsupported certificate'
- : 'socket hang up'
- await expect(
+ // FIXME: add proper error message matching (does not work on Node.js 18/20)
+ await expectAsync(
client.query({
query: 'SELECT number FROM system.numbers LIMIT 3',
format: 'CSV',
})
- ).rejects.toThrowError(errorMessage)
+ ).toBeRejectedWithError()
})
})
diff --git a/packages/client-node/__tests__/unit/node_client.test.ts b/packages/client-node/__tests__/unit/node_client.test.ts
new file mode 100644
index 00000000..e032de2f
--- /dev/null
+++ b/packages/client-node/__tests__/unit/node_client.test.ts
@@ -0,0 +1,22 @@
+import type { BaseClickHouseClientConfigOptions } from '@clickhouse/client-common'
+import { createClient } from '../../src'
+
+describe('Node.js createClient', () => {
+ it('throws on incorrect "host" config value', () => {
+ expect(() => createClient({ host: 'foo' })).toThrowError(
+ 'Configuration parameter "host" contains malformed url.'
+ )
+ })
+
+ it('should not mutate provided configuration', async () => {
+ const config: BaseClickHouseClientConfigOptions = {
+ host: 'http://localhost',
+ }
+ createClient(config)
+ // initial configuration is not overridden by the defaults we assign
+ // when we transform the specified config object to the connection params
+ expect(config).toEqual({
+ host: 'http://localhost',
+ })
+ })
+})
diff --git a/packages/client-node/__tests__/unit/node_connection.test.ts b/packages/client-node/__tests__/unit/node_connection.test.ts
new file mode 100644
index 00000000..26471630
--- /dev/null
+++ b/packages/client-node/__tests__/unit/node_connection.test.ts
@@ -0,0 +1,41 @@
+import { createConnection } from '../../src'
+import {
+ type NodeConnectionParams,
+ NodeHttpConnection,
+ NodeHttpsConnection,
+} from '../../src/connection'
+
+describe('Node.js connection', () => {
+ const baseParams = {
+ keep_alive: {
+ enabled: true,
+ retry_on_expired_socket: false,
+ socket_ttl: 2500,
+ },
+ } as NodeConnectionParams
+
+ it('should create HTTP adapter', async () => {
+ expect(adapter).toBeInstanceOf(NodeHttpConnection)
+ })
+ const adapter = createConnection({
+ ...baseParams,
+ url: new URL('http://localhost'),
+ })
+
+ it('should create HTTPS adapter', async () => {
+ const adapter = createConnection({
+ ...baseParams,
+ url: new URL('https://localhost'),
+ })
+ expect(adapter).toBeInstanceOf(NodeHttpsConnection)
+ })
+
+ it('should throw if the supplied protocol is unknown', async () => {
+ expect(() =>
+ createConnection({
+ ...baseParams,
+ url: new URL('tcp://localhost'),
+ })
+ ).toThrowError('Only HTTP(s) adapters are supported')
+ })
+})
diff --git a/__tests__/unit/http_adapter.test.ts b/packages/client-node/__tests__/unit/node_http_adapter.test.ts
similarity index 71%
rename from __tests__/unit/http_adapter.test.ts
rename to packages/client-node/__tests__/unit/node_http_adapter.test.ts
index a23ae735..85041211 100644
--- a/__tests__/unit/http_adapter.test.ts
+++ b/packages/client-node/__tests__/unit/node_http_adapter.test.ts
@@ -1,24 +1,28 @@
+import type {
+ ConnectionParams,
+ ConnQueryResult,
+} from '@clickhouse/client-common'
+import { LogWriter } from '@clickhouse/client-common'
+import { guid, sleep, TestLogger, validateUUID } from '@test/utils'
+import { randomUUID } from '@test/utils/guid'
import type { ClientRequest } from 'http'
import Http from 'http'
import Stream from 'stream'
import Util from 'util'
import Zlib from 'zlib'
-import type { ConnectionParams, QueryResult } from '../../src/connection'
-import { HttpAdapter } from '../../src/connection/adapter'
-import { guid, retryOnFailure, TestLogger } from '../utils'
+import type { NodeConnectionParams } from '../../src/connection'
+import { NodeBaseConnection, NodeHttpConnection } from '../../src/connection'
import { getAsText } from '../../src/utils'
-import { LogWriter } from '../../src/logger'
-import * as uuid from 'uuid'
-import { v4 as uuid_v4 } from 'uuid'
-import { BaseHttpAdapter } from '../../src/connection/adapter/base_http_adapter'
-describe('HttpAdapter', () => {
+describe('Node.js HttpAdapter', () => {
const gzip = Util.promisify(Zlib.gzip)
- const httpRequestStub = jest.spyOn(Http, 'request')
describe('compression', () => {
describe('response decompression', () => {
it('hints ClickHouse server to send a gzip compressed response if compress_request: true', async () => {
+ const request = stubClientRequest()
+ const httpRequestStub = spyOn(Http, 'request').and.returnValue(request)
+
const adapter = buildHttpAdapter({
compression: {
decompress_response: true,
@@ -26,8 +30,6 @@ describe('HttpAdapter', () => {
},
})
- const request = stubRequest()
-
const selectPromise = adapter.query({
query: 'SELECT * FROM system.numbers LIMIT 5',
})
@@ -36,17 +38,21 @@ describe('HttpAdapter', () => {
await emitCompressedBody(request, responseBody)
await selectPromise
- assertStub('gzip')
+
+ expect(httpRequestStub).toHaveBeenCalledTimes(1)
+ const calledWith = httpRequestStub.calls.mostRecent().args[1]
+ expect(calledWith.headers!['Accept-Encoding']).toBe('gzip')
})
it('does not send a compression algorithm hint if compress_request: false', async () => {
+ const request = stubClientRequest()
+ const httpRequestStub = spyOn(Http, 'request').and.returnValue(request)
const adapter = buildHttpAdapter({
compression: {
decompress_response: false,
compress_request: false,
},
})
- const request = stubRequest()
const selectPromise = adapter.query({
query: 'SELECT * FROM system.numbers LIMIT 5',
@@ -62,17 +68,21 @@ describe('HttpAdapter', () => {
const queryResult = await selectPromise
await assertQueryResult(queryResult, responseBody)
- assertStub(undefined)
+
+ expect(httpRequestStub).toHaveBeenCalledTimes(1)
+ const calledWith = httpRequestStub.calls.mostRecent().args[1]
+ expect(calledWith.headers!['Accept-Encoding']).toBeUndefined()
})
it('uses request-specific settings over config settings', async () => {
+ const request = stubClientRequest()
+ const httpRequestStub = spyOn(Http, 'request').and.returnValue(request)
const adapter = buildHttpAdapter({
compression: {
decompress_response: false,
compress_request: false,
},
})
- const request = stubRequest()
const selectPromise = adapter.query({
query: 'SELECT * FROM system.numbers LIMIT 5',
@@ -86,17 +96,21 @@ describe('HttpAdapter', () => {
const queryResult = await selectPromise
await assertQueryResult(queryResult, responseBody)
- assertStub('gzip')
+
+ expect(httpRequestStub).toHaveBeenCalledTimes(1)
+ const calledWith = httpRequestStub.calls.mostRecent().args[1]
+ expect(calledWith.headers!['Accept-Encoding']).toBe('gzip')
})
it('decompresses a gzip response', async () => {
+ const request = stubClientRequest()
+ spyOn(Http, 'request').and.returnValue(request)
const adapter = buildHttpAdapter({
compression: {
decompress_response: true,
compress_request: false,
},
})
- const request = stubRequest()
const selectPromise = adapter.query({
query: 'SELECT * FROM system.numbers LIMIT 5',
@@ -110,13 +124,14 @@ describe('HttpAdapter', () => {
})
it('throws on an unexpected encoding', async () => {
+ const request = stubClientRequest()
+ spyOn(Http, 'request').and.returnValue(request)
const adapter = buildHttpAdapter({
compression: {
decompress_response: true,
compress_request: false,
},
})
- const request = stubRequest()
const selectPromise = adapter.query({
query: 'SELECT * FROM system.numbers LIMIT 5',
@@ -124,19 +139,22 @@ describe('HttpAdapter', () => {
await emitCompressedBody(request, 'abc', 'br')
- await expect(selectPromise).rejects.toMatchObject({
- message: 'Unexpected encoding: br',
- })
+ await expectAsync(selectPromise).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: 'Unexpected encoding: br',
+ })
+ )
})
it('provides decompression error to a stream consumer', async () => {
+ const request = stubClientRequest()
+ spyOn(Http, 'request').and.returnValue(request)
const adapter = buildHttpAdapter({
compression: {
decompress_response: true,
compress_request: false,
},
})
- const request = stubRequest()
const selectPromise = adapter.query({
query: 'SELECT * FROM system.numbers LIMIT 5',
@@ -153,22 +171,20 @@ describe('HttpAdapter', () => {
})
)
- await expect(async () => {
+ const readStream = async () => {
const { stream } = await selectPromise
for await (const chunk of stream) {
void chunk // stub
}
- }).rejects.toMatchObject({
- message: 'incorrect header check',
- code: 'Z_DATA_ERROR',
- })
- })
+ }
- function assertStub(encoding: string | undefined) {
- expect(httpRequestStub).toBeCalledTimes(1)
- const calledWith = httpRequestStub.mock.calls[0][1]
- expect(calledWith.headers!['Accept-Encoding']).toBe(encoding)
- }
+ await expectAsync(readStream()).toBeRejectedWith(
+ jasmine.objectContaining({
+ message: 'incorrect header check',
+ code: 'Z_DATA_ERROR',
+ })
+ )
+ })
})
describe('request compression', () => {
@@ -196,7 +212,7 @@ describe('HttpAdapter', () => {
},
}) as ClientRequest
- httpRequestStub.mockReturnValueOnce(request)
+ const httpRequestStub = spyOn(Http, 'request').and.returnValue(request)
void adapter.insert({
query: 'INSERT INTO insert_compression_table',
@@ -210,17 +226,12 @@ describe('HttpAdapter', () => {
},
})
- await retryOnFailure(async () => {
- expect(finalResult!.toString('utf8')).toEqual(values)
- })
- assertStub('gzip')
+ await sleep(100)
+ expect(finalResult!.toString('utf8')).toEqual(values)
+ expect(httpRequestStub).toHaveBeenCalledTimes(1)
+ const calledWith = httpRequestStub.calls.mostRecent().args[1]
+ expect(calledWith.headers!['Content-Encoding']).toBe('gzip')
})
-
- function assertStub(encoding: string | undefined) {
- expect(httpRequestStub).toBeCalledTimes(1)
- const calledWith = httpRequestStub.mock.calls[0][1]
- expect(calledWith.headers!['Content-Encoding']).toBe(encoding)
- }
})
async function emitCompressedBody(
@@ -246,7 +257,7 @@ describe('HttpAdapter', () => {
const myHttpAdapter = new MyTestHttpAdapter()
const headers = myHttpAdapter.getDefaultHeaders()
expect(headers['User-Agent']).toMatch(
- /^clickhouse-js\/[0-9\\.]+? \(lv:nodejs\/v[0-9\\.]+?; os:(?:linux|darwin|win32)\)$/
+ /^clickhouse-js\/[0-9\\.]+-(?:(alpha|beta)\d*)? \(lv:nodejs\/v[0-9\\.]+?; os:(?:linux|darwin|win32)\)$/
)
})
@@ -254,7 +265,7 @@ describe('HttpAdapter', () => {
const myHttpAdapter = new MyTestHttpAdapter('MyFancyApp')
const headers = myHttpAdapter.getDefaultHeaders()
expect(headers['User-Agent']).toMatch(
- /^MyFancyApp clickhouse-js\/[0-9\\.]+? \(lv:nodejs\/v[0-9\\.]+?; os:(?:linux|darwin|win32)\)$/
+ /^MyFancyApp clickhouse-js\/[0-9\\.]+-(?:(alpha|beta)\d*)? \(lv:nodejs\/v[0-9\\.]+?; os:(?:linux|darwin|win32)\)$/
)
})
})
@@ -273,7 +284,11 @@ describe('HttpAdapter', () => {
compress_request: false,
},
})
- const request1 = stubRequest()
+
+ const httpRequestStub = spyOn(Http, 'request')
+
+ const request1 = stubClientRequest()
+ httpRequestStub.and.returnValue(request1)
const selectPromise1 = adapter.query({
query: 'SELECT * FROM system.numbers LIMIT 5',
@@ -287,7 +302,9 @@ describe('HttpAdapter', () => {
)
const queryResult1 = await selectPromise1
- const request2 = stubRequest()
+ const request2 = stubClientRequest()
+ httpRequestStub.and.returnValue(request2)
+
const selectPromise2 = adapter.query({
query: 'SELECT * FROM system.numbers LIMIT 5',
})
@@ -304,10 +321,10 @@ describe('HttpAdapter', () => {
await assertQueryResult(queryResult2, responseBody2)
expect(queryResult1.query_id).not.toEqual(queryResult2.query_id)
- const url1 = httpRequestStub.mock.calls[0][0]
+ const url1 = httpRequestStub.calls.all()[0].args[0]
expect(url1.search).toContain(`&query_id=${queryResult1.query_id}`)
- const url2 = httpRequestStub.mock.calls[1][0]
+ const url2 = httpRequestStub.calls.all()[1].args[0]
expect(url2.search).toContain(`&query_id=${queryResult2.query_id}`)
})
@@ -318,7 +335,9 @@ describe('HttpAdapter', () => {
compress_request: false,
},
})
- const request = stubRequest()
+
+ const request = stubClientRequest()
+ const httpRequestStub = spyOn(Http, 'request').and.returnValue(request)
const query_id = guid()
const selectPromise = adapter.query({
@@ -335,8 +354,8 @@ describe('HttpAdapter', () => {
const { stream } = await selectPromise
expect(await getAsText(stream)).toBe(responseBody)
- expect(httpRequestStub).toBeCalledTimes(1)
- const [url] = httpRequestStub.mock.calls[0]
+ expect(httpRequestStub).toHaveBeenCalledTimes(1)
+ const [url] = httpRequestStub.calls.mostRecent().args
expect(url.search).toContain(`&query_id=${query_id}`)
})
@@ -347,7 +366,11 @@ describe('HttpAdapter', () => {
compress_request: false,
},
})
- const request1 = stubRequest()
+
+ const httpRequestStub = spyOn(Http, 'request')
+
+ const request1 = stubClientRequest()
+ httpRequestStub.and.returnValue(request1)
const execPromise1 = adapter.exec({
query: 'SELECT * FROM system.numbers LIMIT 5',
@@ -361,7 +384,9 @@ describe('HttpAdapter', () => {
)
const queryResult1 = await execPromise1
- const request2 = stubRequest()
+ const request2 = stubClientRequest()
+ httpRequestStub.and.returnValue(request2)
+
const execPromise2 = adapter.exec({
query: 'SELECT * FROM system.numbers LIMIT 5',
})
@@ -378,10 +403,10 @@ describe('HttpAdapter', () => {
await assertQueryResult(queryResult2, responseBody2)
expect(queryResult1.query_id).not.toEqual(queryResult2.query_id)
- const url1 = httpRequestStub.mock.calls[0][0]
+ const [url1] = httpRequestStub.calls.all()[0].args
expect(url1.search).toContain(`&query_id=${queryResult1.query_id}`)
- const url2 = httpRequestStub.mock.calls[1][0]
+ const [url2] = httpRequestStub.calls.all()[1].args
expect(url2.search).toContain(`&query_id=${queryResult2.query_id}`)
})
@@ -392,7 +417,10 @@ describe('HttpAdapter', () => {
compress_request: false,
},
})
- const request = stubRequest()
+
+ const httpRequestStub = spyOn(Http, 'request')
+ const request = stubClientRequest()
+ httpRequestStub.and.returnValue(request)
const query_id = guid()
const execPromise = adapter.exec({
@@ -409,8 +437,8 @@ describe('HttpAdapter', () => {
const { stream } = await execPromise
expect(await getAsText(stream)).toBe(responseBody)
- expect(httpRequestStub).toBeCalledTimes(1)
- const [url] = httpRequestStub.mock.calls[0]
+ expect(httpRequestStub).toHaveBeenCalledTimes(1)
+ const [url] = httpRequestStub.calls.mostRecent().args
expect(url.search).toContain(`&query_id=${query_id}`)
})
@@ -421,7 +449,11 @@ describe('HttpAdapter', () => {
compress_request: false,
},
})
- const request1 = stubRequest()
+
+ const httpRequestStub = spyOn(Http, 'request')
+
+ const request1 = stubClientRequest()
+ httpRequestStub.and.returnValue(request1)
const insertPromise1 = adapter.insert({
query: 'INSERT INTO default.foo VALUES (42)',
@@ -436,7 +468,9 @@ describe('HttpAdapter', () => {
)
const { query_id: queryId1 } = await insertPromise1
- const request2 = stubRequest()
+ const request2 = stubClientRequest()
+ httpRequestStub.and.returnValue(request2)
+
const insertPromise2 = adapter.insert({
query: 'INSERT INTO default.foo VALUES (42)',
values: 'foobar',
@@ -454,10 +488,10 @@ describe('HttpAdapter', () => {
assertQueryId(queryId2)
expect(queryId1).not.toEqual(queryId2)
- const url1 = httpRequestStub.mock.calls[0][0]
+ const [url1] = httpRequestStub.calls.all()[0].args
expect(url1.search).toContain(`&query_id=${queryId1}`)
- const url2 = httpRequestStub.mock.calls[1][0]
+ const [url2] = httpRequestStub.calls.all()[1].args
expect(url2.search).toContain(`&query_id=${queryId2}`)
})
@@ -468,7 +502,9 @@ describe('HttpAdapter', () => {
compress_request: false,
},
})
- const request1 = stubRequest()
+
+ const request = stubClientRequest()
+ const httpRequestStub = spyOn(Http, 'request').and.returnValue(request)
const query_id = guid()
const insertPromise1 = adapter.insert({
@@ -477,7 +513,7 @@ describe('HttpAdapter', () => {
query_id,
})
const responseBody1 = 'foobar'
- request1.emit(
+ request.emit(
'response',
buildIncomingMessage({
body: responseBody1,
@@ -485,7 +521,7 @@ describe('HttpAdapter', () => {
)
await insertPromise1
- const [url] = httpRequestStub.mock.calls[0]
+ const [url] = httpRequestStub.calls.mostRecent().args
expect(url.search).toContain(`&query_id=${query_id}`)
})
})
@@ -508,54 +544,53 @@ describe('HttpAdapter', () => {
response.statusCode = statusCode
response.headers = {
- 'x-clickhouse-query-id': uuid_v4(),
+ 'x-clickhouse-query-id': randomUUID(),
...headers,
}
return response
}
- function stubRequest() {
+ function stubClientRequest() {
const request = new Stream.Writable({
write() {
/** stub */
},
}) as ClientRequest
request.getHeaders = () => ({})
- httpRequestStub.mockReturnValueOnce(request)
return request
}
function buildHttpAdapter(config: Partial) {
- return new HttpAdapter(
- {
- ...{
- url: new URL('http://localhost:8132'),
+ return new NodeHttpConnection({
+ ...{
+ url: new URL('http://localhost:8132'),
- connect_timeout: 10_000,
- request_timeout: 30_000,
- compression: {
- decompress_response: true,
- compress_request: false,
- },
- max_open_connections: Infinity,
-
- username: '',
- password: '',
- database: '',
- keep_alive: {
- enabled: true,
- socket_ttl: 2500,
- retry_on_expired_socket: false,
- },
+ connect_timeout: 10_000,
+ request_timeout: 30_000,
+ compression: {
+ decompress_response: true,
+ compress_request: false,
+ },
+ max_open_connections: Infinity,
+
+ username: '',
+ password: '',
+ database: '',
+ clickhouse_settings: {},
+
+ logWriter: new LogWriter(new TestLogger()),
+ keep_alive: {
+ enabled: true,
+ socket_ttl: 2500,
+ retry_on_expired_socket: false,
},
- ...config,
},
- new LogWriter(new TestLogger())
- )
+ ...config,
+ })
}
async function assertQueryResult(
- { stream, query_id }: QueryResult,
+ { stream, query_id }: ConnQueryResult,
expectedResponseBody: any
) {
expect(await getAsText(stream)).toBe(expectedResponseBody)
@@ -564,22 +599,22 @@ describe('HttpAdapter', () => {
function assertQueryId(query_id: string) {
expect(typeof query_id).toBe('string')
- expect(uuid.validate(query_id)).toBeTruthy()
+ expect(validateUUID(query_id)).toBeTruthy()
}
})
-class MyTestHttpAdapter extends BaseHttpAdapter {
+class MyTestHttpAdapter extends NodeBaseConnection {
constructor(application_id?: string) {
super(
{
application_id,
+ logWriter: new LogWriter(new TestLogger()),
keep_alive: {
enabled: true,
socket_ttl: 2500,
retry_on_expired_socket: true,
},
- } as ConnectionParams,
- new TestLogger(),
+ } as NodeConnectionParams,
{} as Http.Agent
)
}
diff --git a/__tests__/unit/logger.test.ts b/packages/client-node/__tests__/unit/node_logger.test.ts
similarity index 78%
rename from __tests__/unit/logger.test.ts
rename to packages/client-node/__tests__/unit/node_logger.test.ts
index 87643c30..8b0b1adb 100644
--- a/__tests__/unit/logger.test.ts
+++ b/packages/client-node/__tests__/unit/node_logger.test.ts
@@ -1,26 +1,20 @@
-import type { ErrorLogParams, Logger, LogParams } from '../../src/logger'
-import { LogWriter } from '../../src/logger'
+import type {
+ ErrorLogParams,
+ Logger,
+ LogParams,
+} from '@clickhouse/client-common'
+import { ClickHouseLogLevel, LogWriter } from '@clickhouse/client-common'
-describe('Logger', () => {
+describe('Node.js Logger', () => {
type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error'
- const logLevelKey = 'CLICKHOUSE_LOG_LEVEL'
const module = 'LoggerUnitTest'
const message = 'very informative'
const err = new Error('boo')
- let logs: Array = []
- let defaultLogLevel: string | undefined
+ let logs: Array = []
- beforeEach(() => {
- defaultLogLevel = process.env[logLevelKey]
- })
afterEach(() => {
- if (defaultLogLevel === undefined) {
- delete process.env[logLevelKey]
- } else {
- process.env[logLevelKey] = defaultLogLevel
- }
logs = []
})
@@ -31,8 +25,7 @@ describe('Logger', () => {
})
it('should explicitly use TRACE', async () => {
- process.env[logLevelKey] = 'TRACE'
- const logWriter = new LogWriter(new TestLogger())
+ const logWriter = new LogWriter(new TestLogger(), ClickHouseLogLevel.TRACE)
checkLogLevelSet('TRACE')
logEveryLogLevel(logWriter)
expect(logs[0]).toEqual({
@@ -65,8 +58,7 @@ describe('Logger', () => {
})
it('should explicitly use DEBUG', async () => {
- process.env[logLevelKey] = 'DEBUG'
- const logWriter = new LogWriter(new TestLogger())
+ const logWriter = new LogWriter(new TestLogger(), ClickHouseLogLevel.DEBUG)
checkLogLevelSet('DEBUG')
logEveryLogLevel(logWriter)
expect(logs[0]).toEqual({
@@ -94,8 +86,7 @@ describe('Logger', () => {
})
it('should explicitly use INFO', async () => {
- process.env[logLevelKey] = 'INFO'
- const logWriter = new LogWriter(new TestLogger())
+ const logWriter = new LogWriter(new TestLogger(), ClickHouseLogLevel.INFO)
checkLogLevelSet('INFO')
logEveryLogLevel(logWriter)
expect(logs[0]).toEqual({
@@ -118,8 +109,7 @@ describe('Logger', () => {
})
it('should explicitly use WARN', async () => {
- process.env[logLevelKey] = 'WARN'
- const logWriter = new LogWriter(new TestLogger())
+ const logWriter = new LogWriter(new TestLogger(), ClickHouseLogLevel.WARN)
logEveryLogLevel(logWriter)
expect(logs[0]).toEqual({
level: 'warn',
@@ -136,8 +126,7 @@ describe('Logger', () => {
})
it('should explicitly use ERROR', async () => {
- process.env[logLevelKey] = 'ERROR'
- const logWriter = new LogWriter(new TestLogger())
+ const logWriter = new LogWriter(new TestLogger(), ClickHouseLogLevel.ERROR)
logEveryLogLevel(logWriter)
expect(logs[0]).toEqual({
level: 'error',
diff --git a/__tests__/unit/result.test.ts b/packages/client-node/__tests__/unit/node_result_set.test.ts
similarity index 70%
rename from __tests__/unit/result.test.ts
rename to packages/client-node/__tests__/unit/node_result_set.test.ts
index c4c6e97b..cd387937 100644
--- a/__tests__/unit/result.test.ts
+++ b/packages/client-node/__tests__/unit/node_result_set.test.ts
@@ -1,28 +1,31 @@
-import type { Row } from '../../src'
-import { ResultSet } from '../../src'
+import type { Row } from '@clickhouse/client-common'
+import { guid } from '@test/utils'
import Stream, { Readable } from 'stream'
-import { guid } from '../utils'
+import { ResultSet } from '../../src'
-describe('rows', () => {
+describe('Node.js ResultSet', () => {
const expectedText = `{"foo":"bar"}\n{"qaz":"qux"}\n`
const expectedJson = [{ foo: 'bar' }, { qaz: 'qux' }]
- const err = 'Stream has been already consumed'
+ const errMsg = 'Stream has been already consumed'
+ const err = jasmine.objectContaining({
+ message: jasmine.stringContaining(errMsg),
+ })
it('should consume the response as text only once', async () => {
const rs = makeResultSet()
expect(await rs.text()).toEqual(expectedText)
- await expect(rs.text()).rejects.toThrowError(err)
- await expect(rs.json()).rejects.toThrowError(err)
+ await expectAsync(rs.text()).toBeRejectedWith(err)
+ await expectAsync(rs.json()).toBeRejectedWith(err)
})
it('should consume the response as JSON only once', async () => {
const rs = makeResultSet()
expect(await rs.json()).toEqual(expectedJson)
- await expect(rs.json()).rejects.toThrowError(err)
- await expect(rs.text()).rejects.toThrowError(err)
+ await expectAsync(rs.json()).toBeRejectedWith(err)
+ await expectAsync(rs.text()).toBeRejectedWith(err)
})
it('should consume the response as a stream of Row instances', async () => {
@@ -41,9 +44,9 @@ describe('rows', () => {
expect(result).toEqual(expectedJson)
expect(stream.readableEnded).toBeTruthy()
- expect(() => rs.stream()).toThrowError(err)
- await expect(rs.json()).rejects.toThrowError(err)
- await expect(rs.text()).rejects.toThrowError(err)
+ expect(() => rs.stream()).toThrow(new Error(errMsg))
+ await expectAsync(rs.json()).toBeRejectedWith(err)
+ await expectAsync(rs.text()).toBeRejectedWith(err)
})
it('should be able to call Row.text and Row.json multiple times', async () => {
@@ -56,7 +59,7 @@ describe('rows', () => {
for await (const rows of rs.stream()) {
allRows.push(...rows)
}
- expect(allRows).toHaveLength(1)
+ expect(allRows.length).toEqual(1)
const [row] = allRows
expect(row.text).toEqual('{"foo":"bar"}')
expect(row.text).toEqual('{"foo":"bar"}')
diff --git a/packages/client-node/__tests__/unit/node_user_agent.test.ts b/packages/client-node/__tests__/unit/node_user_agent.test.ts
new file mode 100644
index 00000000..ec05a375
--- /dev/null
+++ b/packages/client-node/__tests__/unit/node_user_agent.test.ts
@@ -0,0 +1,27 @@
+import sinon from 'sinon'
+import { getUserAgent } from '../../src/utils'
+import * as version from '../../src/version'
+
+describe('Node.js User-Agent', () => {
+ const sandbox = sinon.createSandbox()
+ beforeEach(() => {
+ // Jasmine's spyOn won't work here: 'platform' property is not configurable
+ sandbox.stub(process, 'platform').value('freebsd')
+ sandbox.stub(process, 'version').value('v16.144')
+ sandbox.stub(version, 'default').value('0.0.42')
+ })
+
+ it('should generate a user agent without app id', async () => {
+ const userAgent = getUserAgent()
+ expect(userAgent).toEqual(
+ 'clickhouse-js/0.0.42 (lv:nodejs/v16.144; os:freebsd)'
+ )
+ })
+
+ it('should generate a user agent with app id', async () => {
+ const userAgent = getUserAgent()
+ expect(userAgent).toEqual(
+ 'clickhouse-js/0.0.42 (lv:nodejs/v16.144; os:freebsd)'
+ )
+ })
+})
diff --git a/packages/client-node/__tests__/unit/node_values_encoder.test.ts b/packages/client-node/__tests__/unit/node_values_encoder.test.ts
new file mode 100644
index 00000000..1ad40de1
--- /dev/null
+++ b/packages/client-node/__tests__/unit/node_values_encoder.test.ts
@@ -0,0 +1,162 @@
+import type {
+ DataFormat,
+ InputJSON,
+ InputJSONObjectEachRow,
+} from '@clickhouse/client-common'
+import Stream from 'stream'
+import { NodeValuesEncoder } from '../../src/utils'
+
+describe('NodeValuesEncoder', () => {
+ const rawFormats = [
+ 'CSV',
+ 'CSVWithNames',
+ 'CSVWithNamesAndTypes',
+ 'TabSeparated',
+ 'TabSeparatedRaw',
+ 'TabSeparatedWithNames',
+ 'TabSeparatedWithNamesAndTypes',
+ 'CustomSeparated',
+ 'CustomSeparatedWithNames',
+ 'CustomSeparatedWithNamesAndTypes',
+ ]
+ const objectFormats = [
+ 'JSON',
+ 'JSONObjectEachRow',
+ 'JSONEachRow',
+ 'JSONStringsEachRow',
+ 'JSONCompactEachRow',
+ 'JSONCompactEachRowWithNames',
+ 'JSONCompactEachRowWithNamesAndTypes',
+ 'JSONCompactStringsEachRowWithNames',
+ 'JSONCompactStringsEachRowWithNamesAndTypes',
+ ]
+ const jsonFormats = [
+ 'JSON',
+ 'JSONStrings',
+ 'JSONCompact',
+ 'JSONCompactStrings',
+ 'JSONColumnsWithMetadata',
+ 'JSONObjectEachRow',
+ 'JSONEachRow',
+ 'JSONStringsEachRow',
+ 'JSONCompactEachRow',
+ 'JSONCompactEachRowWithNames',
+ 'JSONCompactEachRowWithNamesAndTypes',
+ 'JSONCompactStringsEachRowWithNames',
+ 'JSONCompactStringsEachRowWithNamesAndTypes',
+ ]
+
+ const encoder = new NodeValuesEncoder()
+
+ describe('Node.js validateInsertValues', () => {
+ it('should allow object mode stream for JSON* and raw for Tab* or CSV*', async () => {
+ const objectModeStream = Stream.Readable.from('foo,bar\n', {
+ objectMode: true,
+ })
+ const rawStream = Stream.Readable.from('foo,bar\n', {
+ objectMode: false,
+ })
+
+ objectFormats.forEach((format) => {
+ expect(() =>
+ encoder.validateInsertValues(objectModeStream, format as DataFormat)
+ ).not.toThrow()
+ expect(() =>
+ encoder.validateInsertValues(rawStream, format as DataFormat)
+ ).toThrow(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('with enabled object mode'),
+ })
+ )
+ })
+ rawFormats.forEach((format) => {
+ expect(() =>
+ encoder.validateInsertValues(objectModeStream, format as DataFormat)
+ ).toThrow(
+ jasmine.objectContaining({
+ message: jasmine.stringContaining('with disabled object mode'),
+ })
+ )
+ expect(() =>
+ encoder.validateInsertValues(rawStream, format as DataFormat)
+ ).not.toThrow()
+ })
+ })
+ })
+ describe('encodeValues', () => {
+ it('should not do anything for raw formats streams', async () => {
+ const values = Stream.Readable.from('foo,bar\n', {
+ objectMode: false,
+ })
+ rawFormats.forEach((format) => {
+ // should be exactly the same object (no duplicate instances)
+ expect(encoder.encodeValues(values, format as DataFormat)).toEqual(
+ values
+ )
+ })
+ })
+
+ it('should encode JSON streams per line', async () => {
+ for (const format of jsonFormats) {
+ const values = Stream.Readable.from(['foo', 'bar'], {
+ objectMode: true,
+ })
+ const result = encoder.encodeValues(values, format as DataFormat)
+ let encoded = ''
+ for await (const chunk of result) {
+ encoded += chunk
+ }
+ expect(encoded).toEqual('"foo"\n"bar"\n')
+ }
+ })
+
+ it('should encode JSON arrays', async () => {
+ for (const format of jsonFormats) {
+ const values = ['foo', 'bar']
+ const result = encoder.encodeValues(values, format as DataFormat)
+ let encoded = ''
+ for await (const chunk of result) {
+ encoded += chunk
+ }
+ expect(encoded).toEqual('"foo"\n"bar"\n')
+ }
+ })
+
+ it('should encode JSON input', async () => {
+ const values: InputJSON = {
+ meta: [
+ {
+ name: 'name',
+ type: 'string',
+ },
+ ],
+ data: [{ name: 'foo' }, { name: 'bar' }],
+ }
+ const result = encoder.encodeValues(values, 'JSON')
+ let encoded = ''
+ for await (const chunk of result) {
+ encoded += chunk
+ }
+ expect(encoded).toEqual(JSON.stringify(values) + '\n')
+ })
+
+ it('should encode JSONObjectEachRow input', async () => {
+ const values: InputJSONObjectEachRow = {
+ a: { name: 'foo' },
+ b: { name: 'bar' },
+ }
+ const result = encoder.encodeValues(values, 'JSON')
+ let encoded = ''
+ for await (const chunk of result) {
+ encoded += chunk
+ }
+ expect(encoded).toEqual(JSON.stringify(values) + '\n')
+ })
+
+ it('should fail when we try to encode an unknown type of input', async () => {
+ expect(() => encoder.encodeValues(1 as any, 'JSON')).toThrowError(
+ 'Cannot encode values of type number with JSON format'
+ )
+ })
+ })
+})
diff --git a/packages/client-node/__tests__/utils/env.test.ts b/packages/client-node/__tests__/utils/env.test.ts
new file mode 100644
index 00000000..eb0b0aea
--- /dev/null
+++ b/packages/client-node/__tests__/utils/env.test.ts
@@ -0,0 +1,84 @@
+import {
+ getTestConnectionType,
+ TestConnectionType,
+} from '@test/utils/test_connection_type'
+import { getClickHouseTestEnvironment, TestEnv } from '@test/utils/test_env'
+
+/** Ideally, should've been in common, but it does not work with Karma well */
+describe('Test env variables parsing', () => {
+ describe('CLICKHOUSE_TEST_ENVIRONMENT', () => {
+ const key = 'CLICKHOUSE_TEST_ENVIRONMENT'
+ addHooks(key)
+
+ it('should fall back to local_single_node env if unset', async () => {
+ expect(getClickHouseTestEnvironment()).toBe(TestEnv.LocalSingleNode)
+ })
+
+ it('should be able to set local_single_node env explicitly', async () => {
+ process.env[key] = 'local_single_node'
+ expect(getClickHouseTestEnvironment()).toBe(TestEnv.LocalSingleNode)
+ })
+
+ it('should be able to set local_cluster env', async () => {
+ process.env[key] = 'local_cluster'
+ expect(getClickHouseTestEnvironment()).toBe(TestEnv.LocalCluster)
+ })
+
+ it('should be able to set cloud env', async () => {
+ process.env[key] = 'cloud'
+ expect(getClickHouseTestEnvironment()).toBe(TestEnv.Cloud)
+ })
+
+ it('should throw in case of an empty string', async () => {
+ process.env[key] = ''
+ expect(getClickHouseTestEnvironment).toThrowError()
+ })
+
+ it('should throw in case of malformed enum value', async () => {
+ process.env[key] = 'foobar'
+ expect(getClickHouseTestEnvironment).toThrowError()
+ })
+ })
+
+ describe('CLICKHOUSE_TEST_CONNECTION_TYPE', () => {
+ const key = 'CLICKHOUSE_TEST_CONNECTION_TYPE'
+ addHooks(key)
+
+ it('should fall back to Node.js if unset', async () => {
+ expect(getTestConnectionType()).toBe(TestConnectionType.Node)
+ })
+
+ it('should be able to set Node.js explicitly', async () => {
+ process.env[key] = 'node'
+ expect(getTestConnectionType()).toBe(TestConnectionType.Node)
+ })
+
+ it('should be able to set Browser explicitly', async () => {
+ process.env[key] = 'browser'
+ expect(getTestConnectionType()).toBe(TestConnectionType.Browser)
+ })
+
+ it('should throw in case of an empty string', async () => {
+ process.env[key] = ''
+ expect(getTestConnectionType).toThrowError()
+ })
+
+ it('should throw in case of malformed enum value', async () => {
+ process.env[key] = 'foobar'
+ expect(getTestConnectionType).toThrowError()
+ })
+ })
+
+ function addHooks(key: string) {
+ let previousValue = process.env[key]
+ beforeAll(() => {
+ previousValue = process.env[key]
+ })
+ beforeEach(() => {
+ delete process.env[key]
+ })
+ afterAll(() => {
+ process.env[key] = previousValue
+ })
+ }
+})
diff --git a/__tests__/utils/stream.ts b/packages/client-node/__tests__/utils/stream.ts
similarity index 100%
rename from __tests__/utils/stream.ts
rename to packages/client-node/__tests__/utils/stream.ts
diff --git a/packages/client-node/package.json b/packages/client-node/package.json
new file mode 100644
index 00000000..490847a4
--- /dev/null
+++ b/packages/client-node/package.json
@@ -0,0 +1,28 @@
+{
+ "name": "@clickhouse/client",
+ "description": "Official JS client for ClickHouse DB - Node.js implementation",
+ "homepage": "https://clickhouse.com",
+ "version": "0.0.0",
+ "license": "Apache-2.0",
+ "keywords": [
+ "clickhouse",
+ "sql",
+ "client"
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/ClickHouse/clickhouse-js.git"
+ },
+ "private": false,
+ "engines": {
+ "node": ">=16"
+ },
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "files": [
+ "dist"
+ ],
+ "dependencies": {
+ "@clickhouse/client-common": "*"
+ }
+}
diff --git a/packages/client-node/src/client.ts b/packages/client-node/src/client.ts
new file mode 100644
index 00000000..7d46112d
--- /dev/null
+++ b/packages/client-node/src/client.ts
@@ -0,0 +1,108 @@
+import type {
+ BaseClickHouseClientConfigOptions,
+ Connection,
+ ConnectionParams,
+ DataFormat,
+} from '@clickhouse/client-common'
+import { ClickHouseClient } from '@clickhouse/client-common'
+import type Stream from 'stream'
+import type { NodeConnectionParams, TLSParams } from './connection'
+import { NodeHttpConnection, NodeHttpsConnection } from './connection'
+import { ResultSet } from './result_set'
+import { NodeValuesEncoder } from './utils'
+
+export type NodeClickHouseClientConfigOptions =
+ BaseClickHouseClientConfigOptions & {
+ tls?: BasicTLSOptions | MutualTLSOptions
+ /** HTTP Keep-Alive related settings */
+ keep_alive?: {
+ /** Enable or disable HTTP Keep-Alive mechanism. Default: true */
+ enabled?: boolean
+ /** How long to keep a particular open socket alive
+ * on the client side (in milliseconds).
+ * Should be less than the server setting
+ * (see `keep_alive_timeout` in server's `config.xml`).
+ * Currently, has no effect if {@link retry_on_expired_socket}
+ * is unset or false. Default value: 2500
+ * (based on the default ClickHouse server setting, which is 3000) */
+ socket_ttl?: number
+ /** If the client detects a potentially expired socket based on the
+ * {@link socket_ttl}, this socket will be immediately destroyed
+ * before sending the request, and this request will be retried
+ * with a new socket up to 3 times. Default: false (no retries) */
+ retry_on_expired_socket?: boolean
+ }
+ }
+
+interface BasicTLSOptions {
+ ca_cert: Buffer
+}
+
+interface MutualTLSOptions {
+ ca_cert: Buffer
+ cert: Buffer
+ key: Buffer
+}
+
+export function createClient(
+ config?: NodeClickHouseClientConfigOptions
+): ClickHouseClient {
+ let tls: TLSParams | undefined = undefined
+ if (config?.tls) {
+ if ('cert' in config.tls && 'key' in config.tls) {
+ tls = {
+ type: 'Mutual',
+ ...config.tls,
+ }
+ } else {
+ tls = {
+ type: 'Basic',
+ ...config.tls,
+ }
+ }
+ }
+ const keep_alive = {
+ enabled: config?.keep_alive?.enabled ?? true,
+ socket_ttl: config?.keep_alive?.socket_ttl ?? 2500,
+ retry_on_expired_socket:
+ config?.keep_alive?.retry_on_expired_socket ?? false,
+ }
+ return new ClickHouseClient({
+ impl: {
+ make_connection: (params: ConnectionParams) => {
+ switch (params.url.protocol) {
+ case 'http:':
+ return new NodeHttpConnection({ ...params, keep_alive })
+ case 'https:':
+ return new NodeHttpsConnection({ ...params, tls, keep_alive })
+ default:
+ throw new Error('Only HTTP(s) adapters are supported')
+ }
+ },
+ make_result_set: (
+ stream: Stream.Readable,
+ format: DataFormat,
+ session_id: string
+ ) => new ResultSet(stream, format, session_id),
+ values_encoder: new NodeValuesEncoder(),
+ close_stream: async (stream) => {
+ stream.destroy()
+ },
+ },
+ ...(config || {}),
+ })
+}
+
+export function createConnection(
+ params: NodeConnectionParams
+): Connection {
+ // TODO throw ClickHouseClient error
+ switch (params.url.protocol) {
+ case 'http:':
+ return new NodeHttpConnection(params)
+ case 'https:':
+ return new NodeHttpsConnection(params)
+ default:
+ throw new Error('Only HTTP(s) adapters are supported')
+ }
+}
diff --git a/packages/client-node/src/connection/index.ts b/packages/client-node/src/connection/index.ts
new file mode 100644
index 00000000..029ae367
--- /dev/null
+++ b/packages/client-node/src/connection/index.ts
@@ -0,0 +1,3 @@
+export * from './node_base_connection'
+export * from './node_http_connection'
+export * from './node_https_connection'
diff --git a/src/connection/adapter/base_http_adapter.ts b/packages/client-node/src/connection/node_base_connection.ts
similarity index 78%
rename from src/connection/adapter/base_http_adapter.ts
rename to packages/client-node/src/connection/node_base_connection.ts
index 3ecf1ac0..38517dd6 100644
--- a/src/connection/adapter/base_http_adapter.ts
+++ b/packages/client-node/src/connection/node_base_connection.ts
@@ -1,28 +1,47 @@
-import Stream from 'stream'
-import type Http from 'http'
-import Zlib from 'zlib'
-import { parseError } from '../../error'
-
-import type { Logger } from '../../logger'
-
import type {
- BaseParams,
+ ConnBaseQueryParams,
Connection,
ConnectionParams,
- ExecParams,
- ExecResult,
- InsertParams,
- InsertResult,
- QueryParams,
- QueryResult,
-} from '../connection'
-import { toSearchParams } from './http_search_params'
-import { transformUrl } from './transform_url'
-import { getAsText, isStream } from '../../utils'
-import type { ClickHouseSettings } from '../../settings'
-import { getUserAgent } from '../../utils/user_agent'
-import * as uuid from 'uuid'
+ ConnExecResult,
+ ConnInsertParams,
+ ConnInsertResult,
+ ConnQueryResult,
+ LogWriter,
+} from '@clickhouse/client-common'
+import {
+ isSuccessfulResponse,
+ parseError,
+ toSearchParams,
+ transformUrl,
+ withHttpSettings,
+} from '@clickhouse/client-common'
+import crypto from 'crypto'
+import type Http from 'http'
import type * as net from 'net'
+import Stream from 'stream'
+import Zlib from 'zlib'
+import { getAsText, getUserAgent, isStream } from '../utils'
+
+export type NodeConnectionParams = ConnectionParams & {
+ tls?: TLSParams
+ keep_alive: {
+ enabled: boolean
+ socket_ttl: number
+ retry_on_expired_socket: boolean
+ }
+}
+
+export type TLSParams =
+ | {
+ ca_cert: Buffer
+ type: 'Basic'
+ }
+ | {
+ ca_cert: Buffer
+ cert: Buffer
+ key: Buffer
+ type: 'Mutual'
+ }
export interface RequestParams {
method: 'GET' | 'POST'
@@ -33,60 +52,13 @@ export interface RequestParams {
compress_request?: boolean
}
-function isSuccessfulResponse(statusCode?: number): boolean {
- return Boolean(statusCode && 200 <= statusCode && statusCode < 300)
-}
-
-function withHttpSettings(
- clickhouse_settings?: ClickHouseSettings,
- compression?: boolean
-): ClickHouseSettings {
- return {
- ...(compression
- ? {
- enable_http_compression: 1,
- }
- : {}),
- ...clickhouse_settings,
- }
-}
-
-function decompressResponse(response: Http.IncomingMessage):
- | {
- response: Stream.Readable
- }
- | { error: Error } {
- const encoding = response.headers['content-encoding']
-
- if (encoding === 'gzip') {
- return {
- response: Stream.pipeline(
- response,
- Zlib.createGunzip(),
- function pipelineCb(err) {
- if (err) {
- console.error(err)
- }
- }
- ),
- }
- } else if (encoding !== undefined) {
- return {
- error: new Error(`Unexpected encoding: ${encoding}`),
- }
- }
-
- return { response }
-}
-
-function isDecompressionError(result: any): result is { error: Error } {
- return result.error !== undefined
-}
-
const expiredSocketMessage = 'expired socket'
-export abstract class BaseHttpAdapter implements Connection {
+export abstract class NodeBaseConnection
+ implements Connection
+{
protected readonly headers: Http.OutgoingHttpHeaders
+ private readonly logger: LogWriter
private readonly retry_expired_sockets: boolean
private readonly known_sockets = new WeakMap<
net.Socket,
@@ -96,14 +68,13 @@ export abstract class BaseHttpAdapter implements Connection {
}
>()
protected constructor(
- protected readonly config: ConnectionParams,
- private readonly logger: Logger,
+ protected readonly params: NodeConnectionParams,
protected readonly agent: Http.Agent
) {
- this.headers = this.buildDefaultHeaders(config.username, config.password)
+ this.logger = params.logWriter
this.retry_expired_sockets =
- this.config.keep_alive.enabled &&
- this.config.keep_alive.retry_on_expired_socket
+ params.keep_alive.enabled && params.keep_alive.retry_on_expired_socket
+ this.headers = this.buildDefaultHeaders(params.username, params.password)
}
protected buildDefaultHeaders(
@@ -114,13 +85,12 @@ export abstract class BaseHttpAdapter implements Connection {
Authorization: `Basic ${Buffer.from(`${username}:${password}`).toString(
'base64'
)}`,
- 'User-Agent': getUserAgent(this.config.application_id),
+ 'User-Agent': getUserAgent(this.params.application_id),
}
}
protected abstract createClientRequest(
- params: RequestParams,
- abort_signal?: AbortSignal
+ params: RequestParams
): Http.ClientRequest
private async request(
@@ -148,7 +118,7 @@ export abstract class BaseHttpAdapter implements Connection {
private async _request(params: RequestParams): Promise {
return new Promise((resolve, reject) => {
const start = Date.now()
- const request = this.createClientRequest(params, params.abort_signal)
+ const request = this.createClientRequest(params)
function onError(err: Error): void {
removeRequestListeners()
@@ -183,7 +153,7 @@ export abstract class BaseHttpAdapter implements Connection {
* see the full sequence of events https://nodejs.org/api/http.html#httprequesturl-options-callback
* */
})
- reject(new Error('The request was aborted.'))
+ reject(new Error('The user aborted a request.'))
}
function onClose(): void {
@@ -230,7 +200,7 @@ export abstract class BaseHttpAdapter implements Connection {
// and is likely about to expire
const isPossiblyExpired =
Date.now() - socketInfo.last_used_time >
- this.config.keep_alive.socket_ttl
+ this.params.keep_alive.socket_ttl
if (isPossiblyExpired) {
this.logger.trace({
module: 'Connection',
@@ -252,7 +222,7 @@ export abstract class BaseHttpAdapter implements Connection {
pipeStream()
}
} else {
- const socketId = uuid.v4()
+ const socketId = crypto.randomUUID()
this.logger.trace({
module: 'Connection',
message: `Using a new socket ${socketId}`,
@@ -273,13 +243,13 @@ export abstract class BaseHttpAdapter implements Connection {
// The socket won't be actually destroyed,
// and it will be returned to the pool.
// TODO: investigate if can actually remove the idle sockets properly
- socket.setTimeout(this.config.request_timeout, onTimeout)
+ socket.setTimeout(this.params.request_timeout, onTimeout)
}
function onTimeout(): void {
removeRequestListeners()
request.destroy()
- reject(new Error('Timeout error'))
+ reject(new Error('Timeout error.'))
}
function removeRequestListeners(): void {
@@ -313,20 +283,22 @@ export abstract class BaseHttpAdapter implements Connection {
// TODO add status code check
const stream = await this.request({
method: 'GET',
- url: transformUrl({ url: this.config.url, pathname: '/ping' }),
+ url: transformUrl({ url: this.params.url, pathname: '/ping' }),
})
stream.destroy()
return true
}
- async query(params: QueryParams): Promise {
- const query_id = this.getQueryId(params)
+ async query(
+ params: ConnBaseQueryParams
+ ): Promise> {
+ const query_id = getQueryId(params.query_id)
const clickhouse_settings = withHttpSettings(
params.clickhouse_settings,
- this.config.compression.decompress_response
+ this.params.compression.decompress_response
)
const searchParams = toSearchParams({
- database: this.config.database,
+ database: this.params.database,
clickhouse_settings,
query_params: params.query_params,
session_id: params.session_id,
@@ -335,7 +307,7 @@ export abstract class BaseHttpAdapter implements Connection {
const stream = await this.request({
method: 'POST',
- url: transformUrl({ url: this.config.url, pathname: '/', searchParams }),
+ url: transformUrl({ url: this.params.url, pathname: '/', searchParams }),
body: params.query,
abort_signal: params.abort_signal,
decompress_response: clickhouse_settings.enable_http_compression === 1,
@@ -347,10 +319,12 @@ export abstract class BaseHttpAdapter implements Connection {
}
}
- async exec(params: ExecParams): Promise {
- const query_id = this.getQueryId(params)
+ async exec(
+ params: ConnBaseQueryParams
+ ): Promise> {
+ const query_id = getQueryId(params.query_id)
const searchParams = toSearchParams({
- database: this.config.database,
+ database: this.params.database,
clickhouse_settings: params.clickhouse_settings,
query_params: params.query_params,
session_id: params.session_id,
@@ -359,7 +333,7 @@ export abstract class BaseHttpAdapter implements Connection {
const stream = await this.request({
method: 'POST',
- url: transformUrl({ url: this.config.url, pathname: '/', searchParams }),
+ url: transformUrl({ url: this.params.url, pathname: '/', searchParams }),
body: params.query,
abort_signal: params.abort_signal,
})
@@ -370,10 +344,12 @@ export abstract class BaseHttpAdapter implements Connection {
}
}
- async insert(params: InsertParams): Promise {
- const query_id = this.getQueryId(params)
+ async insert(
+ params: ConnInsertParams
+ ): Promise {
+ const query_id = getQueryId(params.query_id)
const searchParams = toSearchParams({
- database: this.config.database,
+ database: this.params.database,
clickhouse_settings: params.clickhouse_settings,
query_params: params.query_params,
query: params.query,
@@ -383,10 +359,10 @@ export abstract class BaseHttpAdapter implements Connection {
const stream = await this.request({
method: 'POST',
- url: transformUrl({ url: this.config.url, pathname: '/', searchParams }),
+ url: transformUrl({ url: this.params.url, pathname: '/', searchParams }),
body: params.values,
abort_signal: params.abort_signal,
- compress_request: this.config.compression.compress_request,
+ compress_request: this.params.compression.compress_request,
})
stream.destroy()
@@ -399,10 +375,6 @@ export abstract class BaseHttpAdapter implements Connection {
}
}
- private getQueryId(params: BaseParams): string {
- return params.query_id || uuid.v4()
- }
-
private logResponse(
request: Http.ClientRequest,
params: RequestParams,
@@ -412,8 +384,8 @@ export abstract class BaseHttpAdapter implements Connection {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { authorization, host, ...headers } = request.getHeaders()
const duration = Date.now() - startTimestamp
- this.logger.debug({
- module: 'Connection',
+ this.params.logWriter.debug({
+ module: 'HTTP Adapter',
message: 'Got a response from ClickHouse',
args: {
request_method: params.method,
@@ -426,12 +398,40 @@ export abstract class BaseHttpAdapter implements Connection {
},
})
}
+}
- protected getHeaders(params: RequestParams) {
+function decompressResponse(response: Http.IncomingMessage):
+ | {
+ response: Stream.Readable
+ }
+ | { error: Error } {
+ const encoding = response.headers['content-encoding']
+
+ if (encoding === 'gzip') {
return {
- ...this.headers,
- ...(params.decompress_response ? { 'Accept-Encoding': 'gzip' } : {}),
- ...(params.compress_request ? { 'Content-Encoding': 'gzip' } : {}),
+ response: Stream.pipeline(
+ response,
+ Zlib.createGunzip(),
+ function pipelineCb(err) {
+ if (err) {
+ console.error(err)
+ }
+ }
+ ),
+ }
+ } else if (encoding !== undefined) {
+ return {
+ error: new Error(`Unexpected encoding: ${encoding}`),
}
}
+
+ return { response }
+}
+
+function isDecompressionError(result: any): result is { error: Error } {
+ return result.error !== undefined
+}
+
+function getQueryId(query_id: string | undefined): string {
+ return query_id || crypto.randomUUID()
}
diff --git a/packages/client-node/src/connection/node_http_connection.ts b/packages/client-node/src/connection/node_http_connection.ts
new file mode 100644
index 00000000..5a2d33c6
--- /dev/null
+++ b/packages/client-node/src/connection/node_http_connection.ts
@@ -0,0 +1,35 @@
+import type { Connection } from '@clickhouse/client-common'
+import { withCompressionHeaders } from '@clickhouse/client-common'
+import Http from 'http'
+import type Stream from 'stream'
+import type {
+ NodeConnectionParams,
+ RequestParams,
+} from './node_base_connection'
+import { NodeBaseConnection } from './node_base_connection'
+
+export class NodeHttpConnection
+ extends NodeBaseConnection
+ implements Connection
+{
+ constructor(params: NodeConnectionParams) {
+ const agent = new Http.Agent({
+ keepAlive: params.keep_alive.enabled,
+ maxSockets: params.max_open_connections,
+ })
+ super(params, agent)
+ }
+
+ protected createClientRequest(params: RequestParams): Http.ClientRequest {
+ return Http.request(params.url, {
+ method: params.method,
+ agent: this.agent,
+ headers: withCompressionHeaders({
+ headers: this.headers,
+ compress_request: params.compress_request,
+ decompress_response: params.decompress_response,
+ }),
+ signal: params.abort_signal,
+ })
+ }
+}
diff --git a/packages/client-node/src/connection/node_https_connection.ts b/packages/client-node/src/connection/node_https_connection.ts
new file mode 100644
index 00000000..34320074
--- /dev/null
+++ b/packages/client-node/src/connection/node_https_connection.ts
@@ -0,0 +1,59 @@
+import type { Connection } from '@clickhouse/client-common'
+import { withCompressionHeaders } from '@clickhouse/client-common'
+import type Http from 'http'
+import Https from 'https'
+import type Stream from 'stream'
+import type {
+ NodeConnectionParams,
+ RequestParams,
+} from './node_base_connection'
+import { NodeBaseConnection } from './node_base_connection'
+
+export class NodeHttpsConnection
+ extends NodeBaseConnection
+ implements Connection
+{
+ constructor(params: NodeConnectionParams) {
+ const agent = new Https.Agent({
+ keepAlive: params.keep_alive.enabled,
+ maxSockets: params.max_open_connections,
+ ca: params.tls?.ca_cert,
+ key: params.tls?.type === 'Mutual' ? params.tls.key : undefined,
+ cert: params.tls?.type === 'Mutual' ? params.tls.cert : undefined,
+ })
+ super(params, agent)
+ }
+
+ protected override buildDefaultHeaders(
+ username: string,
+ password: string
+ ): Http.OutgoingHttpHeaders {
+ if (this.params.tls?.type === 'Mutual') {
+ return {
+ 'X-ClickHouse-User': username,
+ 'X-ClickHouse-Key': password,
+ 'X-ClickHouse-SSL-Certificate-Auth': 'on',
+ }
+ }
+ if (this.params.tls?.type === 'Basic') {
+ return {
+ 'X-ClickHouse-User': username,
+ 'X-ClickHouse-Key': password,
+ }
+ }
+ return super.buildDefaultHeaders(username, password)
+ }
+
+ protected createClientRequest(params: RequestParams): Http.ClientRequest {
+ return Https.request(params.url, {
+ method: params.method,
+ agent: this.agent,
+ headers: withCompressionHeaders({
+ headers: this.headers,
+ compress_request: params.compress_request,
+ decompress_response: params.decompress_response,
+ }),
+ signal: params.abort_signal,
+ })
+ }
+}
diff --git a/packages/client-node/src/index.ts b/packages/client-node/src/index.ts
new file mode 100644
index 00000000..67161543
--- /dev/null
+++ b/packages/client-node/src/index.ts
@@ -0,0 +1,32 @@
+export { createConnection, createClient } from './client'
+export { ResultSet } from './result_set'
+
+/** Re-export @clickhouse/client-common types */
+export {
+ type BaseClickHouseClientConfigOptions,
+ type ClickHouseClientConfigOptions,
+ type BaseQueryParams,
+ type QueryParams,
+ type ExecParams,
+ type InsertParams,
+ type InsertValues,
+ type CommandParams,
+ type CommandResult,
+ type ExecResult,
+ type InsertResult,
+ type DataFormat,
+ type ErrorLogParams,
+ type Logger,
+ type LogParams,
+ type ClickHouseSettings,
+ type MergeTreeSettings,
+ type Row,
+ type ResponseJSON,
+ type InputJSON,
+ type InputJSONObjectEachRow,
+ type BaseResultSet,
+ ClickHouseError,
+ ClickHouseLogLevel,
+ ClickHouseClient,
+ SettingsMap,
+} from '@clickhouse/client-common'
diff --git a/src/result.ts b/packages/client-node/src/result_set.ts
similarity index 61%
rename from src/result.ts
rename to packages/client-node/src/result_set.ts
index f9c68185..9eeb2ab2 100644
--- a/src/result.ts
+++ b/packages/client-node/src/result_set.ts
@@ -1,23 +1,16 @@
+import type { BaseResultSet, DataFormat, Row } from '@clickhouse/client-common'
+import { decode, validateStreamFormat } from '@clickhouse/client-common'
import type { TransformCallback } from 'stream'
import Stream, { Transform } from 'stream'
-
import { getAsText } from './utils'
-import { type DataFormat, decode, validateStreamFormat } from './data_formatter'
-export class ResultSet {
+export class ResultSet implements BaseResultSet {
constructor(
private _stream: Stream.Readable,
private readonly format: DataFormat,
public readonly query_id: string
) {}
- /**
- * The method waits for all the rows to be fully loaded
- * and returns the result as a string.
- *
- * The method will throw if the underlying stream was already consumed
- * by calling the other methods.
- */
async text(): Promise {
if (this._stream.readableEnded) {
throw Error(streamAlreadyConsumedMessage)
@@ -25,13 +18,6 @@ export class ResultSet {
return (await getAsText(this._stream)).toString()
}
- /**
- * The method waits for the all the rows to be fully loaded.
- * When the response is received in full, it will be decoded to return JSON.
- *
- * The method will throw if the underlying stream was already consumed
- * by calling the other methods.
- */
async json(): Promise {
if (this._stream.readableEnded) {
throw Error(streamAlreadyConsumedMessage)
@@ -39,19 +25,6 @@ export class ResultSet {
return decode(await this.text(), this.format)
}
- /**
- * Returns a readable stream for responses that can be streamed
- * (i.e. all except JSON).
- *
- * Every iteration provides an array of {@link Row} instances
- * for {@link StreamableDataFormat} format.
- *
- * Should be called only once.
- *
- * The method will throw if called on a response in non-streamable format,
- * and if the underlying stream was already consumed
- * by calling the other methods.
- */
stream(): Stream.Readable {
// If the underlying stream has already ended by calling `text` or `json`,
// Stream.pipeline will create a new empty stream
@@ -108,18 +81,4 @@ export class ResultSet {
}
}
-export interface Row {
- /**
- * A string representation of a row.
- */
- text: string
-
- /**
- * Returns a JSON representation of a row.
- * The method will throw if called on a response in JSON incompatible format.
- * It is safe to call this method multiple times.
- */
- json(): T
-}
-
const streamAlreadyConsumedMessage = 'Stream has been already consumed'
diff --git a/packages/client-node/src/utils/encoder.ts b/packages/client-node/src/utils/encoder.ts
new file mode 100644
index 00000000..7c12bc4a
--- /dev/null
+++ b/packages/client-node/src/utils/encoder.ts
@@ -0,0 +1,75 @@
+import type {
+ DataFormat,
+ InsertValues,
+ ValuesEncoder,
+} from '@clickhouse/client-common'
+import { encodeJSON, isSupportedRawFormat } from '@clickhouse/client-common'
+import Stream from 'stream'
+import { isStream, mapStream } from './stream'
+
+export class NodeValuesEncoder implements ValuesEncoder {
+ encodeValues(
+ values: InsertValues,
+ format: DataFormat
+ ): string | Stream.Readable {
+ if (isStream(values)) {
+ // TSV/CSV/CustomSeparated formats don't require additional serialization
+ if (!values.readableObjectMode) {
+ return values
+ }
+ // JSON* formats streams
+ return Stream.pipeline(
+ values,
+ mapStream((value) => encodeJSON(value, format)),
+ pipelineCb
+ )
+ }
+ // JSON* arrays
+ if (Array.isArray(values)) {
+ return values.map((value) => encodeJSON(value, format)).join('')
+ }
+ // JSON & JSONObjectEachRow format input
+ if (typeof values === 'object') {
+ return encodeJSON(values, format)
+ }
+ throw new Error(
+ `Cannot encode values of type ${typeof values} with ${format} format`
+ )
+ }
+
+ validateInsertValues(
+ values: InsertValues,
+ format: DataFormat
+ ): void {
+ if (
+ !Array.isArray(values) &&
+ !isStream(values) &&
+ typeof values !== 'object'
+ ) {
+ throw new Error(
+ 'Insert expected "values" to be an array, a stream of values or a JSON object, ' +
+ `got: ${typeof values}`
+ )
+ }
+
+ if (isStream(values)) {
+ if (isSupportedRawFormat(format)) {
+ if (values.readableObjectMode) {
+ throw new Error(
+ `Insert for ${format} expected Readable Stream with disabled object mode.`
+ )
+ }
+ } else if (!values.readableObjectMode) {
+ throw new Error(
+ `Insert for ${format} expected Readable Stream with enabled object mode.`
+ )
+ }
+ }
+ }
+}
+
+function pipelineCb(err: NodeJS.ErrnoException | null) {
+ if (err) {
+ console.error(err)
+ }
+}
diff --git a/packages/client-node/src/utils/index.ts b/packages/client-node/src/utils/index.ts
new file mode 100644
index 00000000..d9fa4870
--- /dev/null
+++ b/packages/client-node/src/utils/index.ts
@@ -0,0 +1,4 @@
+export * from './stream'
+export * from './encoder'
+export * from './process'
+export * from './user_agent'
diff --git a/src/utils/process.ts b/packages/client-node/src/utils/process.ts
similarity index 100%
rename from src/utils/process.ts
rename to packages/client-node/src/utils/process.ts
diff --git a/src/utils/stream.ts b/packages/client-node/src/utils/stream.ts
similarity index 87%
rename from src/utils/stream.ts
rename to packages/client-node/src/utils/stream.ts
index a6708dcf..65dcb552 100644
--- a/src/utils/stream.ts
+++ b/packages/client-node/src/utils/stream.ts
@@ -17,7 +17,9 @@ export async function getAsText(stream: Stream.Readable): Promise {
return result
}
-export function mapStream(mapper: (input: any) => any): Stream.Transform {
+export function mapStream(
+ mapper: (input: unknown) => string
+): Stream.Transform {
return new Stream.Transform({
objectMode: true,
transform(chunk, encoding, callback) {
diff --git a/src/utils/user_agent.ts b/packages/client-node/src/utils/user_agent.ts
similarity index 82%
rename from src/utils/user_agent.ts
rename to packages/client-node/src/utils/user_agent.ts
index 3dc07e6e..9a04e685 100644
--- a/src/utils/user_agent.ts
+++ b/packages/client-node/src/utils/user_agent.ts
@@ -1,6 +1,5 @@
import * as os from 'os'
import packageVersion from '../version'
-import { getProcessVersion } from './process'
/**
* Generate a user agent string like
@@ -9,7 +8,9 @@ import { getProcessVersion } from './process'
* MyApplicationName clickhouse-js/0.0.11 (lv:nodejs/19.0.4; os:linux)
*/
export function getUserAgent(application_id?: string): string {
- const defaultUserAgent = `clickhouse-js/${packageVersion} (lv:nodejs/${getProcessVersion()}; os:${os.platform()})`
+ const defaultUserAgent = `clickhouse-js/${packageVersion} (lv:nodejs/${
+ process.version
+ }; os:${os.platform()})`
return application_id
? `${application_id} ${defaultUserAgent}`
: defaultUserAgent
diff --git a/packages/client-node/src/version.ts b/packages/client-node/src/version.ts
new file mode 100644
index 00000000..d836ffc8
--- /dev/null
+++ b/packages/client-node/src/version.ts
@@ -0,0 +1,2 @@
+const version = '0.2.0-beta1'
+export default version
diff --git a/src/client.ts b/src/client.ts
deleted file mode 100644
index 3a805afa..00000000
--- a/src/client.ts
+++ /dev/null
@@ -1,393 +0,0 @@
-import Stream from 'stream'
-import type { ExecResult, InsertResult, TLSParams } from './connection'
-import { type Connection, createConnection } from './connection'
-import type { Logger } from './logger'
-import { DefaultLogger, LogWriter } from './logger'
-import { isStream, mapStream } from './utils'
-import {
- type DataFormat,
- encodeJSON,
- isSupportedRawFormat,
-} from './data_formatter'
-import { ResultSet } from './result'
-import type { ClickHouseSettings } from './settings'
-import type { InputJSON, InputJSONObjectEachRow } from './clickhouse_types'
-
-export interface ClickHouseClientConfigOptions {
- /** A ClickHouse instance URL.
- * Default value: `http://localhost:8123`. */
- host?: string
- /** The request timeout in milliseconds.
- * Default value: `30_000`. */
- request_timeout?: number
- /** Maximum number of sockets to allow per host.
- * Default value: `Infinity`. */
- max_open_connections?: number
-
- compression?: {
- /** `response: true` instructs ClickHouse server to respond with
- * compressed response body. Default: true. */
- response?: boolean
- /** `request: true` enabled compression on the client request body.
- * Default: false. */
- request?: boolean
- }
- /** The name of the user on whose behalf requests are made.
- * Default: 'default'. */
- username?: string
- /** The user password. Default: ''. */
- password?: string
- /** The name of the application using the nodejs client.
- * Default: empty. */
- application?: string
- /** Database name to use. Default value: `default`. */
- database?: string
- /** ClickHouse settings to apply to all requests. Default value: {} */
- clickhouse_settings?: ClickHouseSettings
- log?: {
- /** A class to instantiate a custom logger implementation.
- * Default: {@link DefaultLogger} */
- LoggerClass?: new () => Logger
- }
- tls?: BasicTLSOptions | MutualTLSOptions
- session_id?: string
- /** HTTP Keep-Alive related settings */
- keep_alive?: {
- /** Enable or disable HTTP Keep-Alive mechanism. Default: true */
- enabled?: boolean
- /** How long to keep a particular open socket alive
- * on the client side (in milliseconds).
- * Should be less than the server setting
- * (see `keep_alive_timeout` in server's `config.xml`).
- * Currently, has no effect if {@link retry_on_expired_socket}
- * is unset or false. Default value: 2500
- * (based on the default ClickHouse server setting, which is 3000) */
- socket_ttl?: number
- /** If the client detects a potentially expired socket based on the
- * {@link socket_ttl}, this socket will be immediately destroyed
- * before sending the request, and this request will be retried
- * with a new socket up to 3 times.
- * * Default: false (no retries) */
- retry_on_expired_socket?: boolean
- }
-}
-
-interface BasicTLSOptions {
- ca_cert: Buffer
-}
-
-interface MutualTLSOptions {
- ca_cert: Buffer
- cert: Buffer
- key: Buffer
-}
-
-export interface BaseParams {
- /** ClickHouse settings that can be applied on query level. */
- clickhouse_settings?: ClickHouseSettings
- /** Parameters for query binding. https://clickhouse.com/docs/en/interfaces/http/#cli-queries-with-parameters */
- query_params?: Record
- /** AbortSignal instance to cancel a request in progress. */
- abort_signal?: AbortSignal
- /** A specific `query_id` that will be sent with this request.
- * If it is not set, a random identifier will be generated automatically by the client. */
- query_id?: string
-}
-
-export interface QueryParams extends BaseParams {
- /** Statement to execute. */
- query: string
- /** Format of the resulting dataset. */
- format?: DataFormat
-}
-
-export interface ExecParams extends BaseParams {
- /** Statement to execute. */
- query: string
-}
-
-export type CommandParams = ExecParams
-export interface CommandResult {
- query_id: string
-}
-
-type InsertValues =
- | ReadonlyArray
- | Stream.Readable
- | InputJSON