From 936909cb145d3ee80faea4ae741b908e0d4455ee Mon Sep 17 00:00:00 2001 From: hanzala-Databrain Date: Thu, 28 Aug 2025 17:17:39 +0530 Subject: [PATCH] fix: publishing pkg --- .../workflows/DuckDBNodeBindingsAndAPI.yml | 181 ++- .prettierrc | 3 + README.md | 50 +- api/.gitignore | 2 +- api/package.json | 4 +- api/pkgs/@duckdb/node-api/README.md | 224 +-- api/pkgs/@duckdb/node-api/package.json | 8 +- api/src/DuckDBAppender.ts | 18 +- api/src/DuckDBConnection.ts | 48 +- api/src/DuckDBDataChunk.ts | 54 +- api/src/DuckDBExtractedStatements.ts | 8 +- api/src/DuckDBFunctionInfo.ts | 2 +- api/src/DuckDBInstance.ts | 6 +- api/src/DuckDBInstanceCache.ts | 4 +- api/src/DuckDBLogicalType.ts | 70 +- api/src/DuckDBMaterializedResult.ts | 6 +- api/src/DuckDBPendingResult.ts | 6 +- api/src/DuckDBPreparedStatement.ts | 50 +- ...uckDBPreparedStatementWeakRefCollection.ts | 2 +- api/src/DuckDBResult.ts | 20 +- api/src/DuckDBResultReader.ts | 14 +- api/src/DuckDBScalarFunction.ts | 14 +- api/src/DuckDBType.ts | 50 +- api/src/DuckDBValueConverter.ts | 2 +- api/src/DuckDBValueConverters.ts | 26 +- api/src/DuckDBVector.ts | 527 ++++---- api/src/JS.ts | 1 - api/src/JsonDuckDBValueConverter.ts | 10 +- api/src/configurationOptionDescriptions.ts | 2 +- .../conversion/dateTimeStringConversion.ts | 46 +- api/src/conversion/stringFromBlob.ts | 2 +- api/src/convertColumnsFromChunks.ts | 4 +- api/src/convertColumnsObjectFromChunks.ts | 6 +- api/src/convertRowObjectsFromChunks.ts | 2 +- api/src/convertRowsFromChunks.ts | 2 +- api/src/createConfig.ts | 2 +- api/src/createDuckDBValueConverter.ts | 2 +- api/src/createResult.ts | 2 +- api/src/createValue.ts | 32 +- api/src/duckdb.ts | 4 +- api/src/enums.ts | 2 +- api/src/getColumnsFromChunks.ts | 2 +- api/src/getColumnsObjectFromChunks.ts | 2 +- api/src/getRowObjectsFromChunks.ts | 2 +- api/src/getRowsFromChunks.ts | 2 +- api/src/tsconfig.json | 2 +- api/src/typeForValue.ts | 2 +- api/src/values/DuckDBBitValue.ts | 28 +- api/src/values/DuckDBBlobValue.ts | 1 - api/src/values/DuckDBDateValue.ts | 2 +- api/src/values/DuckDBDecimalValue.ts | 14 +- api/src/values/DuckDBIntervalValue.ts | 8 +- api/src/values/DuckDBMapValue.ts | 9 +- api/src/values/DuckDBStructValue.ts | 8 +- api/src/values/DuckDBTimeTZValue.ts | 18 +- api/src/values/DuckDBTimeValue.ts | 6 +- .../DuckDBTimestampMillisecondsValue.ts | 22 +- .../values/DuckDBTimestampNanosecondsValue.ts | 22 +- api/src/values/DuckDBTimestampSecondsValue.ts | 16 +- api/src/values/DuckDBTimestampTZValue.ts | 16 +- api/src/values/DuckDBTimestampValue.ts | 12 +- api/src/values/DuckDBUUIDValue.ts | 16 +- api/src/values/DuckDBValue.ts | 3 +- api/src/version.ts | 2 +- api/test/api.test.ts | 318 +++-- api/test/bench/prepare.bench.ts | 24 +- api/test/bench/read.bench.ts | 54 +- api/test/bench/types_bit.bench.ts | 24 +- api/test/bench/types_datetime.bench.ts | 62 +- api/test/bench/types_enum.bench.ts | 46 +- api/test/bench/types_list.bench.ts | 18 +- api/test/bench/types_misc.bench.ts | 6 +- api/test/bench/types_numeric.bench.ts | 80 +- api/test/bench/types_struct.bench.ts | 24 +- api/test/bench/types_varchar.bench.ts | 24 +- api/test/bench/util/benchUtils.ts | 16 +- api/test/bench/util/runSql.ts | 5 +- api/test/bench/validity.bench.ts | 8 +- api/test/bench/write.bench.ts | 10 +- api/test/util/replaceSqlNullWithInteger.ts | 8 +- api/test/util/testAllTypes.ts | 192 +-- api/test/util/testJS.ts | 148 +- bindings/binding.gyp | 396 +++--- bindings/package.json | 4 +- .../node-bindings-darwin-arm64/.npmrc | 0 .../node-bindings-darwin-arm64/README.md | 5 + .../node-bindings-darwin-arm64/package.json | 15 + .../node-bindings-darwin-x64/.npmrc | 0 .../node-bindings-darwin-x64/README.md | 5 + .../node-bindings-darwin-x64/package.json | 15 + .../node-bindings-linux-arm64/.npmrc | 0 .../node-bindings-linux-arm64/README.md | 5 + .../node-bindings-linux-arm64/package.json | 15 + .../node-bindings-linux-x64/.npmrc | 0 .../node-bindings-linux-x64/README.md | 5 + .../node-bindings-linux-x64/package.json | 15 + .../node-bindings-win32-x64/.npmrc | 0 .../node-bindings-win32-x64/README.md | 5 + .../node-bindings-win32-x64/package.json | 15 + .../node-bindings/.npmrc | 0 .../node-bindings/README.md | 2 +- .../node-bindings/duckdb.d.ts | 520 +++++-- .../pkgs/@databrainhq/node-bindings/duckdb.js | 26 + .../@databrainhq/node-bindings/package.json | 18 + .../node-bindings-darwin-arm64/README.md | 5 - .../node-bindings-darwin-arm64/package.json | 15 - .../node-bindings-darwin-x64/README.md | 5 - .../node-bindings-darwin-x64/package.json | 15 - .../node-bindings-linux-arm64/README.md | 5 - .../node-bindings-linux-arm64/package.json | 15 - .../@duckdb/node-bindings-linux-x64/README.md | 5 - .../node-bindings-linux-x64/package.json | 15 - .../@duckdb/node-bindings-win32-x64/README.md | 5 - .../node-bindings-win32-x64/package.json | 15 - bindings/pkgs/@duckdb/node-bindings/duckdb.js | 24 - .../pkgs/@duckdb/node-bindings/package.json | 18 - bindings/scripts/checkFunctionSignatures.mjs | 25 +- .../scripts/fetch_libduckdb_linux_amd64.py | 4 +- .../scripts/fetch_libduckdb_linux_arm64.py | 4 +- .../scripts/fetch_libduckdb_osx_universal.py | 4 +- .../scripts/fetch_libduckdb_windows_amd64.py | 6 +- bindings/test/appender.test.ts | 47 +- bindings/test/config.test.ts | 85 +- bindings/test/connection.test.ts | 58 +- bindings/test/constants.test.ts | 2 +- bindings/test/conversion.test.ts | 424 ++++-- bindings/test/data_chunk.test.ts | 85 +- bindings/test/enums.test.ts | 36 +- bindings/test/errors.test.ts | 6 +- bindings/test/extracted_statements.test.ts | 81 +- bindings/test/instance_cache.test.ts | 2 +- bindings/test/logical_type.test.ts | 32 +- bindings/test/open.test.ts | 4 +- bindings/test/pending.test.ts | 77 +- bindings/test/prepared_statements.test.ts | 56 +- bindings/test/query.test.ts | 757 +++++++++-- bindings/test/scalar_functions.test.ts | 40 +- bindings/test/utils/ExpectedLogicalType.ts | 8 +- bindings/test/utils/ExpectedResult.ts | 2 +- bindings/test/utils/ExpectedVector.ts | 3 +- bindings/test/utils/expectChunk.ts | 14 +- bindings/test/utils/expectLogicalType.ts | 86 +- bindings/test/utils/expectResult.ts | 37 +- bindings/test/utils/expectValidity.ts | 19 +- bindings/test/utils/expectVector.ts | 191 ++- bindings/test/utils/expectedLogicalTypes.ts | 14 +- bindings/test/utils/expectedVectors.ts | 10 +- bindings/test/utils/getValue.ts | 57 +- bindings/test/utils/withConnection.ts | 6 +- bindings/test/values.test.ts | 48 +- package-lock.json | 6 + pnpm-lock.yaml | 1204 ++++++++++++----- pnpm-workspace.yaml | 4 +- tsconfig.library.json | 2 +- tsconfig.test.json | 2 +- 155 files changed, 4976 insertions(+), 2580 deletions(-) create mode 100644 .prettierrc rename bindings/pkgs/{@duckdb => @databrainhq}/node-bindings-darwin-arm64/.npmrc (100%) create mode 100644 bindings/pkgs/@databrainhq/node-bindings-darwin-arm64/README.md create mode 100644 bindings/pkgs/@databrainhq/node-bindings-darwin-arm64/package.json rename bindings/pkgs/{@duckdb => @databrainhq}/node-bindings-darwin-x64/.npmrc (100%) create mode 100644 bindings/pkgs/@databrainhq/node-bindings-darwin-x64/README.md create mode 100644 bindings/pkgs/@databrainhq/node-bindings-darwin-x64/package.json rename bindings/pkgs/{@duckdb => @databrainhq}/node-bindings-linux-arm64/.npmrc (100%) create mode 100644 bindings/pkgs/@databrainhq/node-bindings-linux-arm64/README.md create mode 100644 bindings/pkgs/@databrainhq/node-bindings-linux-arm64/package.json rename bindings/pkgs/{@duckdb => @databrainhq}/node-bindings-linux-x64/.npmrc (100%) create mode 100644 bindings/pkgs/@databrainhq/node-bindings-linux-x64/README.md create mode 100644 bindings/pkgs/@databrainhq/node-bindings-linux-x64/package.json rename bindings/pkgs/{@duckdb => @databrainhq}/node-bindings-win32-x64/.npmrc (100%) create mode 100644 bindings/pkgs/@databrainhq/node-bindings-win32-x64/README.md create mode 100644 bindings/pkgs/@databrainhq/node-bindings-win32-x64/package.json rename bindings/pkgs/{@duckdb => @databrainhq}/node-bindings/.npmrc (100%) rename bindings/pkgs/{@duckdb => @databrainhq}/node-bindings/README.md (66%) rename bindings/pkgs/{@duckdb => @databrainhq}/node-bindings/duckdb.d.ts (84%) create mode 100644 bindings/pkgs/@databrainhq/node-bindings/duckdb.js create mode 100644 bindings/pkgs/@databrainhq/node-bindings/package.json delete mode 100644 bindings/pkgs/@duckdb/node-bindings-darwin-arm64/README.md delete mode 100644 bindings/pkgs/@duckdb/node-bindings-darwin-arm64/package.json delete mode 100644 bindings/pkgs/@duckdb/node-bindings-darwin-x64/README.md delete mode 100644 bindings/pkgs/@duckdb/node-bindings-darwin-x64/package.json delete mode 100644 bindings/pkgs/@duckdb/node-bindings-linux-arm64/README.md delete mode 100644 bindings/pkgs/@duckdb/node-bindings-linux-arm64/package.json delete mode 100644 bindings/pkgs/@duckdb/node-bindings-linux-x64/README.md delete mode 100644 bindings/pkgs/@duckdb/node-bindings-linux-x64/package.json delete mode 100644 bindings/pkgs/@duckdb/node-bindings-win32-x64/README.md delete mode 100644 bindings/pkgs/@duckdb/node-bindings-win32-x64/package.json delete mode 100644 bindings/pkgs/@duckdb/node-bindings/duckdb.js delete mode 100644 bindings/pkgs/@duckdb/node-bindings/package.json create mode 100644 package-lock.json diff --git a/.github/workflows/DuckDBNodeBindingsAndAPI.yml b/.github/workflows/DuckDBNodeBindingsAndAPI.yml index 275cd949..5397bf17 100644 --- a/.github/workflows/DuckDBNodeBindingsAndAPI.yml +++ b/.github/workflows/DuckDBNodeBindingsAndAPI.yml @@ -75,105 +75,130 @@ concurrency: cancel-in-progress: false jobs: - linux_x64: name: Linux x64 runs-on: ubuntu-latest - if: ${{ github.event_name != 'workflow_dispatch' || inputs.linux_x64 }} + if: ${{ inputs.linux_x64 }} steps: - - uses: actions/checkout@v4 - + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + registry-url: 'https://npm.pkg.github.com/' - uses: pnpm/action-setup@v3 with: version: 9 - + - name: Workspace - Install run: pnpm install --ignore-scripts - name: Bindings - Build working-directory: bindings run: pnpm run build - + - name: Bindings - Test working-directory: bindings run: pnpm test - + - name: API - Build working-directory: api run: pnpm run build - - - name: API - Test - working-directory: api - run: pnpm test - + + # - name: API - Test + # working-directory: api + # run: pnpm test + - name: Git Status if: ${{ inputs.publish }} run: git status - + - name: Publish - Bindings - Linux x64 if: ${{ inputs.publish }} - working-directory: bindings/pkgs/@duckdb/node-bindings-linux-x64 - run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} --access public + working-directory: bindings/pkgs/@databrainhq/node-bindings-linux-x64 + run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} env: NPM_AUTH_TOKEN: ${{ secrets.DUCKDB_NPM_TOKEN }} - + YOUR_USERNAME: ${{ secrets.YOUR_USERNAME }} + - name: Publish - Bindings if: ${{ inputs.publish }} - working-directory: bindings/pkgs/@duckdb/node-bindings - run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} --access public + working-directory: bindings/pkgs/@databrainhq/node-bindings + run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} env: NPM_AUTH_TOKEN: ${{ secrets.DUCKDB_NPM_TOKEN }} + YOUR_USERNAME: ${{ secrets.YOUR_USERNAME }} - name: Publish - API if: ${{ inputs.publish }} - working-directory: api/pkgs/@duckdb/node-api - run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} --access public + working-directory: api/pkgs/@databrainhq/node-api + run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} env: NPM_AUTH_TOKEN: ${{ secrets.DUCKDB_NPM_TOKEN }} + YOUR_USERNAME: ${{ secrets.YOUR_USERNAME }} linux_arm64: name: Linux arm64 runs-on: ubuntu-latest - if: ${{ github.event_name != 'workflow_dispatch' || inputs.linux_arm64 }} + if: ${{ inputs.linux_arm64 }} env: TARGET_ARCH: arm64 CC: aarch64-linux-gnu-gcc CXX: aarch64-linux-gnu-g++ + YOUR_USERNAME: ${{ secrets.YOUR_USERNAME }} + steps: - name: Install aarch64 compilers run: sudo apt-get update && sudo apt install binutils-aarch64-linux-gnu gcc-aarch64-linux-gnu g++-aarch64-linux-gnu - - - uses: actions/checkout@v4 - + + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + registry-url: 'https://npm.pkg.github.com/' + - uses: pnpm/action-setup@v3 with: version: 9 - + - name: Workspace - Install run: pnpm install --ignore-scripts - name: Bindings - Build working-directory: bindings run: pnpm run build - + - name: Git Status if: ${{ inputs.publish }} run: git status - + - name: Publish - Bindings - Linux arm64 if: ${{ inputs.publish }} - working-directory: bindings/pkgs/@duckdb/node-bindings-linux-arm64 - run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} --access public + working-directory: bindings/pkgs/@databrainhq/node-bindings-linux-arm64 + run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} env: NPM_AUTH_TOKEN: ${{ secrets.DUCKDB_NPM_TOKEN }} + YOUR_USERNAME: ${{ secrets.YOUR_USERNAME }} macos_arm64: name: Mac OS X arm64 runs-on: macos-latest - if: ${{ github.event_name != 'workflow_dispatch' || inputs.macos_arm64 }} + if: ${{ inputs.macos_arm64 }} steps: - - uses: actions/checkout@v4 - + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + registry-url: 'https://npm.pkg.github.com/' + - uses: pnpm/action-setup@v3 with: version: 9 @@ -184,37 +209,44 @@ jobs: - name: Bindings - Build working-directory: bindings run: pnpm run build - + - name: Bindings - Test working-directory: bindings run: pnpm test - + - name: API - Build working-directory: api run: pnpm run build - - - name: API - Test - working-directory: api - run: pnpm test - + + # - name: API - Test + # working-directory: api + # run: pnpm test + - name: Git Status if: ${{ inputs.publish }} run: git status - + - name: Publish - Bindings - Darwin arm64 if: ${{ inputs.publish }} - working-directory: bindings/pkgs/@duckdb/node-bindings-darwin-arm64 - run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} --access public + working-directory: bindings/pkgs/@databrainhq/node-bindings-darwin-arm64 + run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} env: NPM_AUTH_TOKEN: ${{ secrets.DUCKDB_NPM_TOKEN }} - + YOUR_USERNAME: ${{ secrets.YOUR_USERNAME }} + macos_x64: name: Mac OS X x64 runs-on: macos-13 - if: ${{ github.event_name != 'workflow_dispatch' || inputs.macos_x64 }} + if: ${{ inputs.macos_x64 }} steps: - - uses: actions/checkout@v4 - + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + registry-url: 'https://npm.pkg.github.com/' - uses: pnpm/action-setup@v3 with: version: 9 @@ -225,67 +257,76 @@ jobs: - name: Bindings - Build working-directory: bindings run: pnpm run build - + - name: Bindings - Test working-directory: bindings run: pnpm test - + - name: API - Build working-directory: api run: pnpm run build - - - name: API - Test - working-directory: api - run: pnpm test - + + # - name: API - Test + # working-directory: api + # run: pnpm test + - name: Git Status if: ${{ inputs.publish }} run: git status - + - name: Publish - Bindings - Darwin x64 if: ${{ inputs.publish }} - working-directory: bindings/pkgs/@duckdb/node-bindings-darwin-x64 - run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} --access public + working-directory: bindings/pkgs/@databrainhq/node-bindings-darwin-x64 + run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} env: NPM_AUTH_TOKEN: ${{ secrets.DUCKDB_NPM_TOKEN }} + YOUR_USERNAME: ${{ secrets.YOUR_USERNAME }} windows_x64: name: Windows x64 runs-on: windows-latest - if: ${{ github.event_name != 'workflow_dispatch' || inputs.windows_x64 }} + if: ${{ inputs.windows_x64 }} steps: - - uses: actions/checkout@v4 - + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + registry-url: 'https://npm.pkg.github.com/' + - uses: pnpm/action-setup@v3 with: version: 9 - + - name: Workspace - Install run: pnpm install --ignore-scripts - name: Bindings - Build working-directory: bindings run: pnpm run build - + - name: Bindings - Test working-directory: bindings run: pnpm test - + - name: API - Build working-directory: api run: pnpm run build - - - name: API - Test - working-directory: api - run: pnpm test - + + # - name: API - Test + # working-directory: api + # run: pnpm test + - name: Git Status if: ${{ inputs.publish }} run: git status - + - name: Publish - Bindings - Win32 x64 if: ${{ inputs.publish }} - working-directory: bindings/pkgs/@duckdb/node-bindings-win32-x64 - run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} --access public + working-directory: bindings/pkgs/@databrainhq/node-bindings-win32-x64 + run: pnpm publish ${{ inputs.publish_dry_run && '--dry-run' || '' }} --publish-branch ${{ github.ref_name }} env: NPM_AUTH_TOKEN: ${{ secrets.DUCKDB_NPM_TOKEN }} + YOUR_USERNAME: ${{ secrets.YOUR_USERNAME }} diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..544138be --- /dev/null +++ b/.prettierrc @@ -0,0 +1,3 @@ +{ + "singleQuote": true +} diff --git a/README.md b/README.md index 21254031..e133b39a 100644 --- a/README.md +++ b/README.md @@ -6,58 +6,64 @@ ### Documentation -- [@duckdb/node-api](api/pkgs/@duckdb/node-api/README.md) -- [@duckdb/node-bindings](bindings/pkgs/@duckdb/node-bindings/README.md) -- [@duckdb/node-bindings-darwin-arm64](bindings/pkgs/@duckdb/node-bindings-darwin-arm64/README.md) -- [@duckdb/node-bindings-darwin-x64](bindings/pkgs/@duckdb/node-bindings-darwin-x64/README.md) -- [@duckdb/node-bindings-linux-arm64](bindings/pkgs/@duckdb/node-bindings-linux-arm64/README.md) -- [@duckdb/node-bindings-linux-x64](bindings/pkgs/@duckdb/node-bindings-linux-x64/README.md) -- [@duckdb/node-bindings-win32-x64](bindings/pkgs/@duckdb/node-bindings-win32-x64/README.md) +- [@databrainhq/node-api](api/pkgs/@databrainhq/node-api/README.md) +- [@databrainhq/node-bindings](bindings/pkgs/@databrainhq/node-bindings/README.md) +- [@databrainhq/node-bindings-darwin-arm64](bindings/pkgs/@databrainhq/node-bindings-darwin-arm64/README.md) +- [@databrainhq/node-bindings-darwin-x64](bindings/pkgs/@databrainhq/node-bindings-darwin-x64/README.md) +- [@databrainhq/node-bindings-linux-arm64](bindings/pkgs/@databrainhq/node-bindings-linux-arm64/README.md) +- [@databrainhq/node-bindings-linux-x64](bindings/pkgs/@databrainhq/node-bindings-linux-x64/README.md) +- [@databrainhq/node-bindings-win32-x64](bindings/pkgs/@databrainhq/node-bindings-win32-x64/README.md) ### Published -- [@duckdb/node-api](https://www.npmjs.com/package/@duckdb/node-api) -- [@duckdb/node-bindings](https://www.npmjs.com/package/@duckdb/node-bindings) -- [@duckdb/node-bindings-darwin-arm64](https://www.npmjs.com/package/@duckdb/node-bindings-darwin-arm64) -- [@duckdb/node-bindings-darwin-x64](https://www.npmjs.com/package/@duckdb/node-bindings-darwin-x64) -- [@duckdb/node-bindings-linux-arm64](https://www.npmjs.com/package/@duckdb/node-bindings-linux-arm64) -- [@duckdb/node-bindings-linux-x64](https://www.npmjs.com/package/@duckdb/node-bindings-linux-x64) -- [@duckdb/node-bindings-win32-x64](https://www.npmjs.com/package/@duckdb/node-bindings-win32-x64) +- [@databrainhq/node-api](https://www.npmjs.com/package/@databrainhq/node-api) +- [@databrainhq/node-bindings](https://www.npmjs.com/package/@databrainhq/node-bindings) +- [@databrainhq/node-bindings-darwin-arm64](https://www.npmjs.com/package/@databrainhq/node-bindings-darwin-arm64) +- [@databrainhq/node-bindings-darwin-x64](https://www.npmjs.com/package/@databrainhq/node-bindings-darwin-x64) +- [@databrainhq/node-bindings-linux-arm64](https://www.npmjs.com/package/@databrainhq/node-bindings-linux-arm64) +- [@databrainhq/node-bindings-linux-x64](https://www.npmjs.com/package/@databrainhq/node-bindings-linux-x64) +- [@databrainhq/node-bindings-win32-x64](https://www.npmjs.com/package/@databrainhq/node-bindings-win32-x64) ## Development ### Setup + - [Install pnpm](https://pnpm.io/installation) - `pnpm install` ### Build & Test Bindings + - `cd bindings` - `pnpm run build` - `pnpm test` ### Build & Test API + - `cd api` - `pnpm run build` - `pnpm test` ### Run API Benchmarks + - `cd api` - `pnpm bench` ### Update Package Versions Change version in: -- `api/pkgs/@duckdb/node-api/package.json` -- `bindings/pkgs/@duckdb/node-bindings/package.json` -- `bindings/pkgs/@duckdb/node-bindings-darwin-arm64/package.json` -- `bindings/pkgs/@duckdb/node-bindings-darwin-x64/package.json` -- `bindings/pkgs/@duckdb/node-bindings-linux-arm64/package.json` -- `bindings/pkgs/@duckdb/node-bindings-linux-x64/package.json` -- `bindings/pkgs/@duckdb/node-bindings-win32-x64/package.json` + +- `api/pkgs/@databrainhq/node-api/package.json` +- `bindings/pkgs/@databrainhq/node-bindings/package.json` +- `bindings/pkgs/@databrainhq/node-bindings-darwin-arm64/package.json` +- `bindings/pkgs/@databrainhq/node-bindings-darwin-x64/package.json` +- `bindings/pkgs/@databrainhq/node-bindings-linux-arm64/package.json` +- `bindings/pkgs/@databrainhq/node-bindings-linux-x64/package.json` +- `bindings/pkgs/@databrainhq/node-bindings-win32-x64/package.json` ### Upgrade DuckDB Version Change version in: + - `bindings/scripts/fetch_libduckdb_linux_amd64.py` - `bindings/scripts/fetch_libduckdb_linux_arm64.py` - `bindings/scripts/fetch_libduckdb_osx_universal.py` @@ -79,6 +85,6 @@ Useful when upgrading the DuckDB version to detect changes to the C API. ### Publish Packages - Update package versions (as above). -- Use the workflow dispatch for the [DuckDB Node Bindings & API GitHub action](https://github.com/duckdb/duckdb-node-neo/actions/workflows/DuckDBNodeBindingsAndAPI.yml). +- Use the workflow dispatch for the [DuckDB Node Bindings & API GitHub action](https://github.com/databrainhq/duckdb-node-neo/actions/workflows/DuckDBNodeBindingsAndAPI.yml). - Select all initially-unchecked checkboxes to build on all platforms and publish all packages. - Uncheck "Publish Dry Run" to actually publish. diff --git a/api/.gitignore b/api/.gitignore index e0935a62..e5e59073 100644 --- a/api/.gitignore +++ b/api/.gitignore @@ -1,2 +1,2 @@ -pkgs/@duckdb/node-api/lib +pkgs/@databrainhq/node-api/lib test/tsconfig.tsbuildinfo \ No newline at end of file diff --git a/api/package.json b/api/package.json index 0e5b6c12..a670f76a 100644 --- a/api/package.json +++ b/api/package.json @@ -4,14 +4,14 @@ "build": "tsc -b src test", "build:src": "tsc -b src", "build:test": "tsc -b test", - "clean": "rimraf pkgs/@duckdb/node-api/lib", + "clean": "rimraf pkgs/@databrainhq/node-api/lib", "test": "vitest run", "test:watch": "vitest", "bench": "vitest bench --run", "bench:watch": "vitest bench" }, "dependencies": { - "@duckdb/node-bindings": "workspace:*" + "@databrainhq/node-bindings": "workspace:*" }, "devDependencies": { "@types/node": "^20.17.10", diff --git a/api/pkgs/@duckdb/node-api/README.md b/api/pkgs/@duckdb/node-api/README.md index 290c728f..86665011 100644 --- a/api/pkgs/@duckdb/node-api/README.md +++ b/api/pkgs/@duckdb/node-api/README.md @@ -4,20 +4,21 @@ An API for using [DuckDB](https://duckdb.org/) in [Node](https://nodejs.org/). This is a high-level API meant for applications. It depends on low-level bindings that adhere closely to [DuckDB's C API](https://duckdb.org/docs/api/c/overview), -available separately as [@duckdb/node-bindings](https://www.npmjs.com/package/@duckdb/node-bindings). +available separately as [@databrainhq/node-bindings](https://www.npmjs.com/package/@databrainhq/node-bindings). ## Features ### Main Differences from [duckdb-node](https://www.npmjs.com/package/duckdb) -* Native support for Promises; no need for separate [duckdb-async](https://www.npmjs.com/package/duckdb-async) wrapper. -* DuckDB-specific API; not based on the [SQLite Node API](https://www.npmjs.com/package/sqlite3). -* Lossless & efficent support for values of all [DuckDB data types](https://duckdb.org/docs/sql/data_types/overview). -* Wraps [released DuckDB binaries](https://github.com/duckdb/duckdb/releases) instead of rebuilding DuckDB. -* Built on [DuckDB's C API](https://duckdb.org/docs/api/c/overview); exposes more functionality. + +- Native support for Promises; no need for separate [duckdb-async](https://www.npmjs.com/package/duckdb-async) wrapper. +- DuckDB-specific API; not based on the [SQLite Node API](https://www.npmjs.com/package/sqlite3). +- Lossless & efficent support for values of all [DuckDB data types](https://duckdb.org/docs/sql/data_types/overview). +- Wraps [released DuckDB binaries](https://github.com/databrainhq/duckdb/releases) instead of rebuilding DuckDB. +- Built on [DuckDB's C API](https://duckdb.org/docs/api/c/overview); exposes more functionality. ### Roadmap -Some features are not yet complete. See the [issues list on GitHub](https://github.com/duckdb/duckdb-node-neo/issues) +Some features are not yet complete. See the [issues list on GitHub](https://github.com/databrainhq/duckdb-node-neo/issues) for the most up-to-date roadmap. ### Supported Platforms @@ -33,7 +34,7 @@ for the most up-to-date roadmap. ### Get Basic Information ```ts -import duckdb from '@duckdb/node-api'; +import duckdb from '@databrainhq/node-api'; console.log(duckdb.version()); @@ -43,7 +44,7 @@ console.log(duckdb.configurationOptionDescriptions()); ### Connect ```ts -import { DuckDBConnection } from '@duckdb/node-api'; +import { DuckDBConnection } from '@databrainhq/node-api'; const connection = await DuckDBConnection.create(); ``` @@ -54,28 +55,32 @@ For advanced usage, you can create instances explicitly. ### Create Instance ```ts -import { DuckDBInstance } from '@duckdb/node-api'; +import { DuckDBInstance } from '@databrainhq/node-api'; ``` Create with an in-memory database: + ```ts const instance = await DuckDBInstance.create(':memory:'); ``` Equivalent to the above: + ```ts const instance = await DuckDBInstance.create(); ``` Read from and write to a database file, which is created if needed: + ```ts const instance = await DuckDBInstance.create('my_duckdb.db'); ``` Set [configuration options](https://duckdb.org/docs/stable/configuration/overview.html#configuration-reference): + ```ts const instance = await DuckDBInstance.create('my_duckdb.db', { - threads: '4' + threads: '4', }); ``` @@ -85,14 +90,16 @@ Multiple instances in the same process should not attach the same database. To prevent this, an instance cache can be used: + ```ts const instance = await DuckDBInstance.fromCache('my_duckdb.db'); ``` This uses the default instance cache. For advanced usage, you can create instance caches explicitly: + ```ts -import { DuckDBInstanceCache } from '@duckdb/node-api'; +import { DuckDBInstanceCache } from '@databrainhq/node-api'; const cache = new DuckDBInstanceCache(); const instance = await cache.getOrCreateInstance('my_duckdb.db'); @@ -140,39 +147,46 @@ or: ```ts const prepared = await connection.prepare('select $a, $b, $c'); -prepared.bind({ - 'a': 'duck', - 'b': 42, - 'c': listValue([10, 11, 12]), -}, { - 'a': VARCHAR, - 'b': INTEGER, - 'c': LIST(INTEGER), -}); +prepared.bind( + { + a: 'duck', + b: 42, + c: listValue([10, 11, 12]), + }, + { + a: VARCHAR, + b: INTEGER, + c: LIST(INTEGER), + }, +); const result = await prepared.run(); ``` or even: ```ts -const result = await connection.run('select $a, $b, $c', { - 'a': 'duck', - 'b': 42, - 'c': listValue([10, 11, 12]), -}, { - 'a': VARCHAR, - 'b': INTEGER, - 'c': LIST(INTEGER), -}); +const result = await connection.run( + 'select $a, $b, $c', + { + a: 'duck', + b: 42, + c: listValue([10, 11, 12]), + }, + { + a: VARCHAR, + b: INTEGER, + c: LIST(INTEGER), + }, +); ``` Unspecified types will be inferred: ```ts const result = await connection.run('select $a, $b, $c', { - 'a': 'duck', - 'b': 42, - 'c': listValue([10, 11, 12]), + a: 'duck', + b: 42, + c: listValue([10, 11, 12]), }); ``` @@ -185,26 +199,26 @@ Also, any type can have `null` values. Values of some data types need to be constructed using special functions. These are: -| Type | Function | -| ---- | -------- | -| `ARRAY` | `arrayValue` | -| `BIT` | `bitValue` | -| `BLOB` | `blobValue` | -| `DATE` | `dateValue` | -| `DECIMAL` | `decimalValue` | -| `INTERVAL` | `intervalValue` | -| `LIST` | `listValue` | -| `MAP` | `mapValue` | -| `STRUCT` | `structValue` | -| `TIME` | `timeValue` | -| `TIMETZ` | `timeTZValue` | -| `TIMESTAMP` | `timestampValue` | -| `TIMESTAMPTZ` | `timestampTZValue` | -| `TIMESTAMP_S` | `timestampSecondsValue` | -| `TIMESTAMP_MS` | `timestampMillisValue` | -| `TIMESTAMP_NS` | `timestampNanosValue` | -| `UNION` | `unionValue` | -| `UUID` | `uuidValue` | +| Type | Function | +| -------------- | ----------------------- | +| `ARRAY` | `arrayValue` | +| `BIT` | `bitValue` | +| `BLOB` | `blobValue` | +| `DATE` | `dateValue` | +| `DECIMAL` | `decimalValue` | +| `INTERVAL` | `intervalValue` | +| `LIST` | `listValue` | +| `MAP` | `mapValue` | +| `STRUCT` | `structValue` | +| `TIME` | `timeValue` | +| `TIMETZ` | `timeTZValue` | +| `TIMESTAMP` | `timestampValue` | +| `TIMESTAMPTZ` | `timestampTZValue` | +| `TIMESTAMP_S` | `timestampSecondsValue` | +| `TIMESTAMP_MS` | `timestampMillisValue` | +| `TIMESTAMP_NS` | `timestampNanosValue` | +| `UNION` | `unionValue` | +| `UUID` | `uuidValue` | ### Stream Results @@ -217,6 +231,7 @@ const result = await connection.stream('from range(10_000)'); ### Inspect Result Metadata Get column names and types: + ```ts const columnNames = result.columnNames(); const columnTypes = result.columnTypes(); @@ -225,6 +240,7 @@ const columnTypes = result.columnTypes(); ### Read Result Data Run and read all data: + ```ts const reader = await connection.runAndReadAll('from test_all_types()'); const rows = reader.getRows(); @@ -232,16 +248,15 @@ const rows = reader.getRows(); ``` Stream and read up to (at least) some number of rows: + ```ts -const reader = await connection.streamAndReadUntil( - 'from range(5000)', - 1000 -); +const reader = await connection.streamAndReadUntil('from range(5000)', 1000); const rows = reader.getRows(); // rows.length === 2048. (Rows are read in chunks of 2048.) ``` Read rows incrementally: + ```ts const reader = await connection.streamAndRead('from range(5000)'); reader.readUntil(2000); @@ -261,7 +276,7 @@ Result data can be retrieved in a variety of forms: ```ts const reader = await connection.runAndReadAll( - 'from range(3) select range::int as i, 10 + i as n' + 'from range(3) select range::int as i, 10 + i as n', ); const rows = reader.getRows(); @@ -287,15 +302,15 @@ can be losslessly serialized to JSON, use the `JS` or `Json` forms of the above result data methods. Custom converters can be supplied as well. See the implementations of -[JSDuckDBValueConverter](https://github.com/duckdb/duckdb-node-neo/blob/main/api/src/JSDuckDBValueConverter.ts) -and [JsonDuckDBValueConverters](https://github.com/duckdb/duckdb-node-neo/blob/main/api/src/JsonDuckDBValueConverter.ts) +[JSDuckDBValueConverter](https://github.com/databrainhq/duckdb-node-neo/blob/main/api/src/JSDuckDBValueConverter.ts) +and [JsonDuckDBValueConverters](https://github.com/databrainhq/duckdb-node-neo/blob/main/api/src/JsonDuckDBValueConverter.ts) for how to do this. Examples (using the `Json` forms): ```ts const reader = await connection.runAndReadAll( - 'from test_all_types() select bigint, date, interval limit 2' + 'from test_all_types() select bigint, date, interval limit 2', ); const rows = reader.getRowsJson(); @@ -351,7 +366,7 @@ These methods handle nested types as well: ```ts const reader = await connection.runAndReadAll( - 'from test_all_types() select int_array, struct, map, "union" limit 2' + 'from test_all_types() select int_array, struct, map, "union" limit 2', ); const rows = reader.getRowsJson(); @@ -440,6 +455,7 @@ const columnsObject = reader.getColumnsObjectJson(); ``` Column names and types can also be serialized to JSON: + ```ts const columnNamesAndTypes = reader.columnNamesAndTypesJson(); // { @@ -563,11 +579,13 @@ const columnNameAndTypeObjects = reader.columnNameAndTypeObjectsJson(); ### Fetch Chunks Fetch all chunks: + ```ts const chunks = await result.fetchAllChunks(); ``` Fetch one chunk at a time: + ```ts const chunks = []; while (true) { @@ -581,6 +599,7 @@ while (true) { ``` For materialized (non-streaming) results, chunks can be read by index: + ```ts const rowCount = result.rowCount; const chunkCount = result.chunkCount; @@ -591,6 +610,7 @@ for (let i = 0; i < chunkCount; i++) { ``` Get chunk data: + ```ts const rows = chunk.getRows(); @@ -598,11 +618,11 @@ const rowObjects = chunk.getRowObjects(result.deduplicatedColumnNames()); const columns = chunk.getColumns(); -const columnsObject = - chunk.getColumnsObject(result.deduplicatedColumnNames()); +const columnsObject = chunk.getColumnsObject(result.deduplicatedColumnNames()); ``` Get chunk data (one value at a time) + ```ts const columns = []; const columnCount = chunk.columnCount; @@ -621,7 +641,7 @@ for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) { ### Inspect Data Types ```ts -import { DuckDBTypeId } from '@duckdb/node-api'; +import { DuckDBTypeId } from '@databrainhq/node-api'; if (columnType.typeId === DuckDBTypeId.ARRAY) { const arrayValueType = columnType.valueType; @@ -672,7 +692,7 @@ const typeString = columnType.toString(); ### Inspect Data Values ```ts -import { DuckDBTypeId } from '@duckdb/node-api'; +import { DuckDBTypeId } from '@databrainhq/node-api'; if (columnType.typeId === DuckDBTypeId.ARRAY) { const arrayItems = columnValue.items; // array of values @@ -817,17 +837,16 @@ The following sets this offset to match the `TimeZone` setting of DuckDB: ```ts const reader = await connection.runAndReadAll( - `select (timezone(current_timestamp) / 60)::int` + `select (timezone(current_timestamp) / 60)::int`, ); -DuckDBTimestampTZValue.timezoneOffsetInMinutes = - reader.getColumns()[0][0]; +DuckDBTimestampTZValue.timezoneOffsetInMinutes = reader.getColumns()[0][0]; ``` ### Append To Table ```ts await connection.run( - `create or replace table target_table(i integer, v varchar)` + `create or replace table target_table(i integer, v varchar)`, ); const appender = await connection.createAppender('target_table'); @@ -853,7 +872,7 @@ appender.closeSync(); // also flushes ```ts await connection.run( - `create or replace table target_table(i integer, v varchar)` + `create or replace table target_table(i integer, v varchar)`, ); const appender = await connection.createAppender('target_table'); @@ -886,16 +905,13 @@ connection.registerScalarFunction( const v0 = input.getColumnVector(0); const v1 = input.getColumnVector(1); for (let rowIndex = 0; rowIndex < input.rowCount; rowIndex++) { - output.setItem( - rowIndex, - v0.getItem(rowIndex) + v1.getItem(rowIndex) - ); + output.setItem(rowIndex, v0.getItem(rowIndex) + v1.getItem(rowIndex)); } output.flush(); }, returnType: INTEGER, parameterTypes: [INTEGER, INTEGER], - }) + }), ); const reader = await connection.runAndReadAll('select my_add(2, 3)'); const rows = reader.getRows(); @@ -926,7 +942,7 @@ for (let stmtIndex = 0; stmtIndex < statementCount; stmtIndex++) { ### Control Evaluation of Tasks ```ts -import { DuckDBPendingResultState } from '@duckdb/node-api'; +import { DuckDBPendingResultState } from '@databrainhq/node-api'; async function sleep(ms) { return new Promise((resolve) => { @@ -972,10 +988,13 @@ const reader = await connection.runAndReadAll(sql, values, types); // the given number of rows. (Rows are read in chunks, so more than // the target may be read.) const reader = await connection.runAndReadUntil(sql, targetRowCount); -const reader = - await connection.runAndReadAll(sql, targetRowCount, values); -const reader = - await connection.runAndReadAll(sql, targetRowCount, values, types); +const reader = await connection.runAndReadAll(sql, targetRowCount, values); +const reader = await connection.runAndReadAll( + sql, + targetRowCount, + values, + types, +); // Create a streaming result and don't yet retrieve any rows. const result = await connection.stream(sql); @@ -996,10 +1015,13 @@ const reader = await connection.streamAndReadAll(sql, values, types); // Create a streaming result, wrap in a reader, and read at least // the given number of rows. const reader = await connection.streamAndReadUntil(sql, targetRowCount); -const reader = - await connection.streamAndReadUntil(sql, targetRowCount, values); -const reader = - await connection.streamAndReadUntil(sql, targetRowCount, values, types); +const reader = await connection.streamAndReadUntil(sql, targetRowCount, values); +const reader = await connection.streamAndReadUntil( + sql, + targetRowCount, + values, + types, +); // Prepared Statements @@ -1099,20 +1121,24 @@ const rowValues = chunk.getRowValues(rowIndex); const rows = chunk.getRows(); // Or, values can be visited: -chunk.visitColumnValues(columnIndex, - (value, rowIndex, columnIndex, type) => { /* ... */ } -); -chunk.visitColumns((column, columnIndex, type) => { /* ... */ }); -chunk.visitColumnMajor( - (value, rowIndex, columnIndex, type) => { /* ... */ } -); -chunk.visitRowValues(rowIndex, - (value, rowIndex, columnIndex, type) => { /* ... */ } -); -chunk.visitRows((row, rowIndex) => { /* ... */ }); -chunk.visitRowMajor( - (value, rowIndex, columnIndex, type) => { /* ... */ } -); +chunk.visitColumnValues(columnIndex, (value, rowIndex, columnIndex, type) => { + /* ... */ +}); +chunk.visitColumns((column, columnIndex, type) => { + /* ... */ +}); +chunk.visitColumnMajor((value, rowIndex, columnIndex, type) => { + /* ... */ +}); +chunk.visitRowValues(rowIndex, (value, rowIndex, columnIndex, type) => { + /* ... */ +}); +chunk.visitRows((row, rowIndex) => { + /* ... */ +}); +chunk.visitRowMajor((value, rowIndex, columnIndex, type) => { + /* ... */ +}); // Or converted: // The `converter` argument implements `DuckDBValueConverter`, diff --git a/api/pkgs/@duckdb/node-api/package.json b/api/pkgs/@duckdb/node-api/package.json index 63d3cebc..1681af01 100644 --- a/api/pkgs/@duckdb/node-api/package.json +++ b/api/pkgs/@duckdb/node-api/package.json @@ -1,14 +1,14 @@ { - "name": "@duckdb/node-api", - "version": "1.3.2-alpha.26", + "name": "@databrainhq/node-api", + "version": "1.3.2-alpha.26.5", "license": "MIT", "main": "./lib/index.js", "types": "./lib/index.d.ts", "dependencies": { - "@duckdb/node-bindings": "workspace:*" + "@databrainhq/node-bindings": "workspace:*" }, "repository": { "type": "git", - "url": "https://github.com/duckdb/duckdb-node-neo.git" + "url": "https://github.com/databrainhq/duckdb-node-neo.git" } } diff --git a/api/src/DuckDBAppender.ts b/api/src/DuckDBAppender.ts index 058b9e25..9c43ad97 100644 --- a/api/src/DuckDBAppender.ts +++ b/api/src/DuckDBAppender.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { createValue } from './createValue'; import { DuckDBDataChunk } from './DuckDBDataChunk'; import { DuckDBLogicalType } from './DuckDBLogicalType'; @@ -61,7 +61,7 @@ export class DuckDBAppender { } public columnType(columnIndex: number): DuckDBType { return DuckDBLogicalType.create( - duckdb.appender_column_type(this.appender, columnIndex) + duckdb.appender_column_type(this.appender, columnIndex), ).asType(); } public endRow() { @@ -151,20 +151,20 @@ export class DuckDBAppender { } public appendList( value: DuckDBListValue | readonly DuckDBValue[], - type?: DuckDBListType + type?: DuckDBListType, ) { this.appendValue( value instanceof DuckDBListValue ? value : listValue(value), - type + type, ); } public appendStruct( value: DuckDBStructValue | Readonly>, - type?: DuckDBStructType + type?: DuckDBStructType, ) { this.appendValue( value instanceof DuckDBStructValue ? value : structValue(value), - type + type, ); } public appendMap(value: DuckDBMapValue, type?: DuckDBMapType) { @@ -172,11 +172,11 @@ export class DuckDBAppender { } public appendArray( value: DuckDBArrayValue | readonly DuckDBValue[], - type?: DuckDBArrayType + type?: DuckDBArrayType, ) { this.appendValue( value instanceof DuckDBArrayValue ? value : arrayValue(value), - type + type, ); } public appendUnion(value: DuckDBUnionValue, type?: DuckDBUnionType) { @@ -197,7 +197,7 @@ export class DuckDBAppender { public appendValue(value: DuckDBValue, type?: DuckDBType) { duckdb.append_value( this.appender, - createValue(type ? type : typeForValue(value), value) + createValue(type ? type : typeForValue(value), value), ); } public appendDataChunk(dataChunk: DuckDBDataChunk) { diff --git a/api/src/DuckDBConnection.ts b/api/src/DuckDBConnection.ts index eddc433d..99934da7 100644 --- a/api/src/DuckDBConnection.ts +++ b/api/src/DuckDBConnection.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { DuckDBAppender } from './DuckDBAppender'; import { DuckDBExtractedStatements } from './DuckDBExtractedStatements'; import { DuckDBInstance } from './DuckDBInstance'; @@ -20,7 +20,7 @@ export class DuckDBConnection { this.preparedStatements = new DuckDBPreparedStatementWeakRefCollection(); } public static async create( - instance?: DuckDBInstance + instance?: DuckDBInstance, ): Promise { if (instance) { return instance.connect(); @@ -44,7 +44,7 @@ export class DuckDBConnection { public async run( sql: string, values?: DuckDBValue[] | Record, - types?: DuckDBType[] | Record + types?: DuckDBType[] | Record, ): Promise { if (values) { const prepared = await this.runUntilLast(sql); @@ -57,21 +57,21 @@ export class DuckDBConnection { } } else { return new DuckDBMaterializedResult( - await duckdb.query(this.connection, sql) + await duckdb.query(this.connection, sql), ); } } public async runAndRead( sql: string, values?: DuckDBValue[] | Record, - types?: DuckDBType[] | Record + types?: DuckDBType[] | Record, ): Promise { return new DuckDBResultReader(await this.run(sql, values, types)); } public async runAndReadAll( sql: string, values?: DuckDBValue[] | Record, - types?: DuckDBType[] | Record + types?: DuckDBType[] | Record, ): Promise { const reader = new DuckDBResultReader(await this.run(sql, values, types)); await reader.readAll(); @@ -81,7 +81,7 @@ export class DuckDBConnection { sql: string, targetRowCount: number, values?: DuckDBValue[] | Record, - types?: DuckDBType[] | Record + types?: DuckDBType[] | Record, ): Promise { const reader = new DuckDBResultReader(await this.run(sql, values, types)); await reader.readUntil(targetRowCount); @@ -90,7 +90,7 @@ export class DuckDBConnection { public async stream( sql: string, values?: DuckDBValue[] | Record, - types?: DuckDBType[] | Record + types?: DuckDBType[] | Record, ): Promise { const prepared = await this.runUntilLast(sql); try { @@ -106,17 +106,17 @@ export class DuckDBConnection { public async streamAndRead( sql: string, values?: DuckDBValue[] | Record, - types?: DuckDBType[] | Record + types?: DuckDBType[] | Record, ): Promise { return new DuckDBResultReader(await this.stream(sql, values, types)); } public async streamAndReadAll( sql: string, values?: DuckDBValue[] | Record, - types?: DuckDBType[] | Record + types?: DuckDBType[] | Record, ): Promise { const reader = new DuckDBResultReader( - await this.stream(sql, values, types) + await this.stream(sql, values, types), ); await reader.readAll(); return reader; @@ -125,10 +125,10 @@ export class DuckDBConnection { sql: string, targetRowCount: number, values?: DuckDBValue[] | Record, - types?: DuckDBType[] | Record + types?: DuckDBType[] | Record, ): Promise { const reader = new DuckDBResultReader( - await this.stream(sql, values, types) + await this.stream(sql, values, types), ); await reader.readUntil(targetRowCount); return reader; @@ -136,7 +136,7 @@ export class DuckDBConnection { public async start( sql: string, values?: DuckDBValue[] | Record, - types?: DuckDBType[] | Record + types?: DuckDBType[] | Record, ): Promise { const prepared = await this.runUntilLast(sql); try { @@ -151,7 +151,7 @@ export class DuckDBConnection { public async startStream( sql: string, values?: DuckDBValue[] | Record, - types?: DuckDBType[] | Record + types?: DuckDBType[] | Record, ): Promise { const prepared = await this.runUntilLast(sql); try { @@ -170,26 +170,26 @@ export class DuckDBConnection { } private async createPrepared(sql: string): Promise { return new DuckDBPreparedStatement( - await duckdb.prepare(this.connection, sql) + await duckdb.prepare(this.connection, sql), ); } public async extractStatements( - sql: string + sql: string, ): Promise { const { extracted_statements, statement_count } = await duckdb.extract_statements(this.connection, sql); if (statement_count === 0) { throw new Error( `Failed to extract statements: ${duckdb.extract_statements_error( - extracted_statements - )}` + extracted_statements, + )}`, ); } return new DuckDBExtractedStatements( this.connection, extracted_statements, statement_count, - this.preparedStatements + this.preparedStatements, ); } private async runUntilLast(sql: string): Promise { @@ -210,21 +210,21 @@ export class DuckDBConnection { public async createAppender( table: string, schema?: string | null, - catalog?: string | null + catalog?: string | null, ): Promise { return new DuckDBAppender( duckdb.appender_create_ext( this.connection, catalog ?? null, schema ?? null, - table - ) + table, + ), ); } public registerScalarFunction(scalarFunction: DuckDBScalarFunction) { duckdb.register_scalar_function( this.connection, - scalarFunction.scalar_function + scalarFunction.scalar_function, ); } } diff --git a/api/src/DuckDBDataChunk.ts b/api/src/DuckDBDataChunk.ts index 878aaedf..b3bde74d 100644 --- a/api/src/DuckDBDataChunk.ts +++ b/api/src/DuckDBDataChunk.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { DuckDBType } from './DuckDBType'; import { DuckDBValueConverter } from './DuckDBValueConverter'; import { DuckDBVector } from './DuckDBVector'; @@ -12,10 +12,12 @@ export class DuckDBDataChunk { } public static create( types: readonly DuckDBType[], - rowCount?: number + rowCount?: number, ): DuckDBDataChunk { const chunk = new DuckDBDataChunk( - duckdb.create_data_chunk(types.map((t) => t.toLogicalType().logical_type)) + duckdb.create_data_chunk( + types.map((t) => t.toLogicalType().logical_type), + ), ); if (rowCount != undefined) { chunk.rowCount = rowCount; @@ -44,7 +46,7 @@ export class DuckDBDataChunk { } const vector = DuckDBVector.create( duckdb.data_chunk_get_vector(this.chunk, columnIndex), - this.rowCount + this.rowCount, ); this.vectors[columnIndex] = vector; return vector; @@ -55,8 +57,8 @@ export class DuckDBDataChunk { value: DuckDBValue, rowIndex: number, columnIndex: number, - type: DuckDBType - ) => void + type: DuckDBType, + ) => void, ) { const vector = this.getColumnVector(columnIndex); const type = vector.type; @@ -74,11 +76,11 @@ export class DuckDBDataChunk { } public convertColumnValues( columnIndex: number, - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): (T | null)[] { const convertedValues: (T | null)[] = []; this.visitColumnValues(columnIndex, (value, _r, _c, type) => - convertedValues.push(converter(value, type, converter)) + convertedValues.push(converter(value, type, converter)), ); return convertedValues; } @@ -96,15 +98,15 @@ export class DuckDBDataChunk { visitColumn: ( column: DuckDBValue[], columnIndex: number, - type: DuckDBType - ) => void + type: DuckDBType, + ) => void, ) { const columnCount = this.columnCount; for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) { visitColumn( this.getColumnValues(columnIndex), columnIndex, - this.getColumnVector(columnIndex).type + this.getColumnVector(columnIndex).type, ); } } @@ -142,12 +144,12 @@ export class DuckDBDataChunk { } public appendToColumnsObject( columnNames: readonly string[], - columnsObject: Record + columnsObject: Record, ) { const columnCount = this.columnCount; if (columnNames.length !== columnCount) { throw new Error( - `Provided number of column names (${columnNames.length}) does not match column count (${this.columnCount})` + `Provided number of column names (${columnNames.length}) does not match column count (${this.columnCount})`, ); } for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) { @@ -161,7 +163,7 @@ export class DuckDBDataChunk { } } public getColumnsObject( - columnNames: readonly string[] + columnNames: readonly string[], ): Record { const columnsObject: Record = {}; this.appendToColumnsObject(columnNames, columnsObject); @@ -172,8 +174,8 @@ export class DuckDBDataChunk { value: DuckDBValue, rowIndex: number, columnIndex: number, - type: DuckDBType - ) => void + type: DuckDBType, + ) => void, ) { const columnCount = this.columnCount; for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) { @@ -186,8 +188,8 @@ export class DuckDBDataChunk { value: DuckDBValue, rowIndex: number, columnIndex: number, - type: DuckDBType - ) => void + type: DuckDBType, + ) => void, ) { const columnCount = this.columnCount; for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) { @@ -205,13 +207,13 @@ export class DuckDBDataChunk { } public convertRowValues( rowIndex: number, - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): (T | null)[] { const convertedValues: (T | null)[] = []; this.visitRowValues(rowIndex, (value, _, columnIndex) => convertedValues.push( - converter(value, this.getColumnVector(columnIndex).type, converter) - ) + converter(value, this.getColumnVector(columnIndex).type, converter), + ), ); return convertedValues; } @@ -250,12 +252,12 @@ export class DuckDBDataChunk { } public appendToRowObjects( columnNames: readonly string[], - rowObjects: Record[] + rowObjects: Record[], ) { const columnCount = this.columnCount; if (columnNames.length !== columnCount) { throw new Error( - `Provided number of column names (${columnNames.length}) does not match column count (${this.columnCount})` + `Provided number of column names (${columnNames.length}) does not match column count (${this.columnCount})`, ); } const rowCount = this.rowCount; @@ -277,8 +279,8 @@ export class DuckDBDataChunk { value: DuckDBValue, rowIndex: number, columnIndex: number, - type: DuckDBType - ) => void + type: DuckDBType, + ) => void, ) { const rowCount = this.rowCount; const columnCount = this.columnCount; @@ -289,7 +291,7 @@ export class DuckDBDataChunk { vector.getItem(rowIndex), rowIndex, columnIndex, - vector.type + vector.type, ); } } diff --git a/api/src/DuckDBExtractedStatements.ts b/api/src/DuckDBExtractedStatements.ts index c2bc00c6..9e3896f9 100644 --- a/api/src/DuckDBExtractedStatements.ts +++ b/api/src/DuckDBExtractedStatements.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { DuckDBPreparedStatement } from './DuckDBPreparedStatement'; import { DuckDBPreparedStatementCollection } from './DuckDBPreparedStatementCollection'; @@ -11,7 +11,7 @@ export class DuckDBExtractedStatements { connection: duckdb.Connection, extracted_statements: duckdb.ExtractedStatements, statement_count: number, - preparedStatements?: DuckDBPreparedStatementCollection + preparedStatements?: DuckDBPreparedStatementCollection, ) { this.connection = connection; this.extracted_statements = extracted_statements; @@ -26,8 +26,8 @@ export class DuckDBExtractedStatements { await duckdb.prepare_extracted_statement( this.connection, this.extracted_statements, - index - ) + index, + ), ); if (this.preparedStatements) { this.preparedStatements.add(prepared); diff --git a/api/src/DuckDBFunctionInfo.ts b/api/src/DuckDBFunctionInfo.ts index 5ac90173..f4dfaeb6 100644 --- a/api/src/DuckDBFunctionInfo.ts +++ b/api/src/DuckDBFunctionInfo.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; export class DuckDBFunctionInfo { private readonly function_info: duckdb.FunctionInfo; diff --git a/api/src/DuckDBInstance.ts b/api/src/DuckDBInstance.ts index 72c3d9e3..17f0c7df 100644 --- a/api/src/DuckDBInstance.ts +++ b/api/src/DuckDBInstance.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { createConfig } from './createConfig'; import { DuckDBConnection } from './DuckDBConnection'; import { DuckDBInstanceCache } from './DuckDBInstanceCache'; @@ -12,7 +12,7 @@ export class DuckDBInstance { public static async create( path?: string, - options?: Record + options?: Record, ): Promise { const config = createConfig(options); return new DuckDBInstance(await duckdb.open(path, config)); @@ -20,7 +20,7 @@ export class DuckDBInstance { public static async fromCache( path?: string, - options?: Record + options?: Record, ): Promise { return DuckDBInstanceCache.singleton.getOrCreateInstance(path, options); } diff --git a/api/src/DuckDBInstanceCache.ts b/api/src/DuckDBInstanceCache.ts index 84f4809c..a886aa6c 100644 --- a/api/src/DuckDBInstanceCache.ts +++ b/api/src/DuckDBInstanceCache.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { DuckDBInstance } from './DuckDBInstance'; import { createConfig } from './createConfig'; @@ -11,7 +11,7 @@ export class DuckDBInstanceCache { public async getOrCreateInstance( path?: string, - options?: Record + options?: Record, ): Promise { const config = createConfig(options); const db = await duckdb.get_or_create_from_cache(this.cache, path, config); diff --git a/api/src/DuckDBLogicalType.ts b/api/src/DuckDBLogicalType.ts index 468d21be..0a4706ba 100644 --- a/api/src/DuckDBLogicalType.ts +++ b/api/src/DuckDBLogicalType.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { DuckDBAnyType, DuckDBArrayType, @@ -67,27 +67,27 @@ export class DuckDBLogicalType { } public static createDecimal( width: number, - scale: number + scale: number, ): DuckDBDecimalLogicalType { return new DuckDBDecimalLogicalType( - duckdb.create_decimal_type(width, scale) + duckdb.create_decimal_type(width, scale), ); } public static createEnum( - member_names: readonly string[] + member_names: readonly string[], ): DuckDBEnumLogicalType { return new DuckDBEnumLogicalType(duckdb.create_enum_type(member_names)); } public static createList( - valueType: DuckDBLogicalType + valueType: DuckDBLogicalType, ): DuckDBListLogicalType { return new DuckDBListLogicalType( - duckdb.create_list_type(valueType.logical_type) + duckdb.create_list_type(valueType.logical_type), ); } public static createStruct( entryNames: readonly string[], - entryLogicalTypes: readonly DuckDBLogicalType[], + entryLogicalTypes: readonly DuckDBLogicalType[], ): DuckDBStructLogicalType { const length = entryNames.length; if (length !== entryLogicalTypes.length) { @@ -101,28 +101,28 @@ export class DuckDBLogicalType { member_names.push(entryNames[i]); } return new DuckDBStructLogicalType( - duckdb.create_struct_type(member_types, member_names) + duckdb.create_struct_type(member_types, member_names), ); } public static createMap( keyType: DuckDBLogicalType, - valueType: DuckDBLogicalType + valueType: DuckDBLogicalType, ): DuckDBMapLogicalType { return new DuckDBMapLogicalType( - duckdb.create_map_type(keyType.logical_type, valueType.logical_type) + duckdb.create_map_type(keyType.logical_type, valueType.logical_type), ); } public static createArray( valueType: DuckDBLogicalType, - length: number + length: number, ): DuckDBArrayLogicalType { return new DuckDBArrayLogicalType( - duckdb.create_array_type(valueType.logical_type, length) + duckdb.create_array_type(valueType.logical_type, length), ); } public static createUnion( memberTags: readonly string[], - memberLogicalTypes: readonly DuckDBLogicalType[], + memberLogicalTypes: readonly DuckDBLogicalType[], ): DuckDBUnionLogicalType { const length = memberTags.length; if (length !== memberLogicalTypes.length) { @@ -136,7 +136,7 @@ export class DuckDBLogicalType { member_names.push(memberTags[i]); } return new DuckDBUnionLogicalType( - duckdb.create_union_type(member_types, member_names) + duckdb.create_union_type(member_types, member_names), ); } public get typeId(): DuckDBTypeId { @@ -152,7 +152,7 @@ export class DuckDBLogicalType { const alias = this.alias; switch (this.typeId) { case DuckDBTypeId.BOOLEAN: - return DuckDBBooleanType.create(alias); + return DuckDBBooleanType.create(alias); case DuckDBTypeId.TINYINT: return DuckDBTinyIntType.create(alias); case DuckDBTypeId.SMALLINT: @@ -238,7 +238,7 @@ export class DuckDBDecimalLogicalType extends DuckDBLogicalType { } public get internalTypeId(): DuckDBTypeId { return duckdb.decimal_internal_type( - this.logical_type + this.logical_type, ) as number as DuckDBTypeId; } public override asType(): DuckDBDecimalType { @@ -263,7 +263,7 @@ export class DuckDBEnumLogicalType extends DuckDBLogicalType { } public get internalTypeId(): DuckDBTypeId { return duckdb.enum_internal_type( - this.logical_type + this.logical_type, ) as number as DuckDBTypeId; } public override asType(): DuckDBEnumType { @@ -274,7 +274,7 @@ export class DuckDBEnumLogicalType extends DuckDBLogicalType { export class DuckDBListLogicalType extends DuckDBLogicalType { public get valueType(): DuckDBLogicalType { return DuckDBLogicalType.create( - duckdb.list_type_child_type(this.logical_type) + duckdb.list_type_child_type(this.logical_type), ); } public override asType(): DuckDBListType { @@ -291,7 +291,7 @@ export class DuckDBStructLogicalType extends DuckDBLogicalType { } public entryLogicalType(index: number): DuckDBLogicalType { return DuckDBLogicalType.create( - duckdb.struct_type_child_type(this.logical_type, index) + duckdb.struct_type_child_type(this.logical_type, index), ); } public entryType(index: number): DuckDBType { @@ -322,37 +322,49 @@ export class DuckDBStructLogicalType extends DuckDBLogicalType { return valueTypes; } public override asType(): DuckDBStructType { - return new DuckDBStructType(this.entryNames(), this.entryTypes(), this.alias); + return new DuckDBStructType( + this.entryNames(), + this.entryTypes(), + this.alias, + ); } } export class DuckDBMapLogicalType extends DuckDBLogicalType { public get keyType(): DuckDBLogicalType { return DuckDBLogicalType.create( - duckdb.map_type_key_type(this.logical_type) + duckdb.map_type_key_type(this.logical_type), ); } public get valueType(): DuckDBLogicalType { return DuckDBLogicalType.create( - duckdb.map_type_value_type(this.logical_type) + duckdb.map_type_value_type(this.logical_type), ); } public override asType(): DuckDBMapType { - return new DuckDBMapType(this.keyType.asType(), this.valueType.asType(), this.alias); + return new DuckDBMapType( + this.keyType.asType(), + this.valueType.asType(), + this.alias, + ); } } export class DuckDBArrayLogicalType extends DuckDBLogicalType { public get valueType(): DuckDBLogicalType { return DuckDBLogicalType.create( - duckdb.array_type_child_type(this.logical_type) + duckdb.array_type_child_type(this.logical_type), ); } public get length(): number { return duckdb.array_type_array_size(this.logical_type); } public override asType(): DuckDBArrayType { - return new DuckDBArrayType(this.valueType.asType(), this.length, this.alias); + return new DuckDBArrayType( + this.valueType.asType(), + this.length, + this.alias, + ); } } @@ -365,7 +377,7 @@ export class DuckDBUnionLogicalType extends DuckDBLogicalType { } public memberLogicalType(index: number): DuckDBLogicalType { return DuckDBLogicalType.create( - duckdb.union_type_member_type(this.logical_type, index) + duckdb.union_type_member_type(this.logical_type, index), ); } public memberType(index: number): DuckDBType { @@ -396,6 +408,10 @@ export class DuckDBUnionLogicalType extends DuckDBLogicalType { return valueTypes; } public override asType(): DuckDBUnionType { - return new DuckDBUnionType(this.memberTags(), this.memberTypes(), this.alias); + return new DuckDBUnionType( + this.memberTags(), + this.memberTypes(), + this.alias, + ); } } diff --git a/api/src/DuckDBMaterializedResult.ts b/api/src/DuckDBMaterializedResult.ts index d5e57269..bd47638e 100644 --- a/api/src/DuckDBMaterializedResult.ts +++ b/api/src/DuckDBMaterializedResult.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { DuckDBDataChunk } from './DuckDBDataChunk'; import { DuckDBResult } from './DuckDBResult'; @@ -13,6 +13,8 @@ export class DuckDBMaterializedResult extends DuckDBResult { return duckdb.result_chunk_count(this.result); } public getChunk(chunkIndex: number): DuckDBDataChunk { - return new DuckDBDataChunk(duckdb.result_get_chunk(this.result, chunkIndex)); + return new DuckDBDataChunk( + duckdb.result_get_chunk(this.result, chunkIndex), + ); } } diff --git a/api/src/DuckDBPendingResult.ts b/api/src/DuckDBPendingResult.ts index d81b55b5..8123d2b5 100644 --- a/api/src/DuckDBPendingResult.ts +++ b/api/src/DuckDBPendingResult.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { createResult } from './createResult'; import { DuckDBResult } from './DuckDBResult'; import { DuckDBResultReader } from './DuckDBResultReader'; @@ -25,8 +25,8 @@ export class DuckDBPendingResult { case duckdb.PendingState.ERROR: throw new Error( `Failure running pending result task: ${duckdb.pending_error( - this.pending_result - )}` + this.pending_result, + )}`, ); case duckdb.PendingState.NO_TASKS_AVAILABLE: return DuckDBPendingResultState.NO_TASKS_AVAILABLE; diff --git a/api/src/DuckDBPreparedStatement.ts b/api/src/DuckDBPreparedStatement.ts index 032ee6a6..516ea873 100644 --- a/api/src/DuckDBPreparedStatement.ts +++ b/api/src/DuckDBPreparedStatement.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { createValue } from './createValue'; import { DuckDBLogicalType } from './DuckDBLogicalType'; import { DuckDBMaterializedResult } from './DuckDBMaterializedResult'; @@ -69,12 +69,12 @@ export class DuckDBPreparedStatement { public parameterTypeId(parameterIndex: number): DuckDBTypeId { return duckdb.param_type( this.prepared_statement, - parameterIndex + parameterIndex, ) as number as DuckDBTypeId; } public parameterType(parameterIndex: number): DuckDBType { return DuckDBLogicalType.create( - duckdb.param_logical_type(this.prepared_statement, parameterIndex) + duckdb.param_logical_type(this.prepared_statement, parameterIndex), ).asType(); } public clearBindings() { @@ -142,25 +142,25 @@ export class DuckDBPreparedStatement { } public bindTimestampTZ( parameterIndex: number, - value: DuckDBTimestampTZValue + value: DuckDBTimestampTZValue, ) { this.bindValue(parameterIndex, value, TIMESTAMPTZ); } public bindTimestampSeconds( parameterIndex: number, - value: DuckDBTimestampSecondsValue + value: DuckDBTimestampSecondsValue, ) { this.bindValue(parameterIndex, value, TIMESTAMP_S); } public bindTimestampMilliseconds( parameterIndex: number, - value: DuckDBTimestampMillisecondsValue + value: DuckDBTimestampMillisecondsValue, ) { this.bindValue(parameterIndex, value, TIMESTAMP_MS); } public bindTimestampNanoseconds( parameterIndex: number, - value: DuckDBTimestampNanosecondsValue + value: DuckDBTimestampNanosecondsValue, ) { this.bindValue(parameterIndex, value, TIMESTAMP_NS); } @@ -179,47 +179,47 @@ export class DuckDBPreparedStatement { public bindArray( parameterIndex: number, value: DuckDBArrayValue | readonly DuckDBValue[], - type?: DuckDBArrayType + type?: DuckDBArrayType, ) { this.bindValue( parameterIndex, value instanceof DuckDBArrayValue ? value : arrayValue(value), - type + type, ); } public bindList( parameterIndex: number, value: DuckDBListValue | readonly DuckDBValue[], - type?: DuckDBListType + type?: DuckDBListType, ) { this.bindValue( parameterIndex, value instanceof DuckDBListValue ? value : listValue(value), - type + type, ); } public bindStruct( parameterIndex: number, value: DuckDBStructValue | Readonly>, - type?: DuckDBStructType + type?: DuckDBStructType, ) { this.bindValue( parameterIndex, value instanceof DuckDBStructValue ? value : structValue(value), - type + type, ); } public bindMap( parameterIndex: number, value: DuckDBMapValue, - type?: DuckDBMapType + type?: DuckDBMapType, ) { this.bindValue(parameterIndex, value, type); } public bindUnion( parameterIndex: number, value: DuckDBUnionValue, - type?: DuckDBUnionType + type?: DuckDBUnionType, ) { this.bindValue(parameterIndex, value, type); } @@ -235,17 +235,17 @@ export class DuckDBPreparedStatement { public bindValue( parameterIndex: number, value: DuckDBValue, - type?: DuckDBType + type?: DuckDBType, ) { duckdb.bind_value( this.prepared_statement, parameterIndex, - createValue(type ? type : typeForValue(value), value) + createValue(type ? type : typeForValue(value), value), ); } public bind( values: DuckDBValue[] | Record, - types?: DuckDBType[] | Record + types?: DuckDBType[] | Record, ) { if (Array.isArray(values)) { const typesIsArray = Array.isArray(types); @@ -258,14 +258,14 @@ export class DuckDBPreparedStatement { this.bindValue( this.parameterIndex(key), values[key], - typesIsRecord ? types[key] : undefined + typesIsRecord ? types[key] : undefined, ); } } } public async run(): Promise { return new DuckDBMaterializedResult( - await duckdb.execute_prepared(this.prepared_statement) + await duckdb.execute_prepared(this.prepared_statement), ); } public async runAndRead(): Promise { @@ -277,7 +277,7 @@ export class DuckDBPreparedStatement { return reader; } public async runAndReadUntil( - targetRowCount: number + targetRowCount: number, ): Promise { const reader = new DuckDBResultReader(await this.run()); await reader.readUntil(targetRowCount); @@ -285,7 +285,7 @@ export class DuckDBPreparedStatement { } public async stream(): Promise { return new DuckDBResult( - await duckdb.execute_prepared_streaming(this.prepared_statement) + await duckdb.execute_prepared_streaming(this.prepared_statement), ); } public async streamAndRead(): Promise { @@ -297,7 +297,7 @@ export class DuckDBPreparedStatement { return reader; } public async streamAndReadUntil( - targetRowCount: number + targetRowCount: number, ): Promise { const reader = new DuckDBResultReader(await this.stream()); await reader.readUntil(targetRowCount); @@ -305,12 +305,12 @@ export class DuckDBPreparedStatement { } public start(): DuckDBPendingResult { return new DuckDBPendingResult( - duckdb.pending_prepared(this.prepared_statement) + duckdb.pending_prepared(this.prepared_statement), ); } public startStream(): DuckDBPendingResult { return new DuckDBPendingResult( - duckdb.pending_prepared_streaming(this.prepared_statement) + duckdb.pending_prepared_streaming(this.prepared_statement), ); } } diff --git a/api/src/DuckDBPreparedStatementWeakRefCollection.ts b/api/src/DuckDBPreparedStatementWeakRefCollection.ts index ff5ed50f..19ac29b3 100644 --- a/api/src/DuckDBPreparedStatementWeakRefCollection.ts +++ b/api/src/DuckDBPreparedStatementWeakRefCollection.ts @@ -25,7 +25,7 @@ export class DuckDBPreparedStatementWeakRefCollection } private prune() { this.preparedStatements = this.preparedStatements.filter( - (ref) => !!ref.deref() + (ref) => !!ref.deref(), ); } } diff --git a/api/src/DuckDBResult.ts b/api/src/DuckDBResult.ts index 2f749fb3..80f97193 100644 --- a/api/src/DuckDBResult.ts +++ b/api/src/DuckDBResult.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { DuckDBDataChunk } from './DuckDBDataChunk'; import { DuckDBLogicalType } from './DuckDBLogicalType'; import { DuckDBType } from './DuckDBType'; @@ -71,19 +71,19 @@ export class DuckDBResult { public columnTypeId(columnIndex: number): DuckDBTypeId { return duckdb.column_type( this.result, - columnIndex + columnIndex, ) as number as DuckDBTypeId; } public columnLogicalType(columnIndex: number): DuckDBLogicalType { return DuckDBLogicalType.create( - duckdb.column_logical_type(this.result, columnIndex) + duckdb.column_logical_type(this.result, columnIndex), ); } public columnType(columnIndex: number): DuckDBType { return DuckDBLogicalType.create( - duckdb.column_logical_type(this.result, columnIndex) + duckdb.column_logical_type(this.result, columnIndex), ).asType(); } @@ -158,7 +158,7 @@ export class DuckDBResult { } public async convertColumns( - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): Promise<(T | null)[][]> { const chunks = await this.fetchAllChunks(); return convertColumnsFromChunks(chunks, converter); @@ -178,13 +178,13 @@ export class DuckDBResult { } public async convertColumnsObject( - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): Promise> { const chunks = await this.fetchAllChunks(); return convertColumnsObjectFromChunks( chunks, this.deduplicatedColumnNames(), - converter + converter, ); } @@ -202,7 +202,7 @@ export class DuckDBResult { } public async convertRows( - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): Promise<(T | null)[][]> { const chunks = await this.fetchAllChunks(); return convertRowsFromChunks(chunks, converter); @@ -222,13 +222,13 @@ export class DuckDBResult { } public async convertRowObjects( - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): Promise[]> { const chunks = await this.fetchAllChunks(); return convertRowObjectsFromChunks( chunks, this.deduplicatedColumnNames(), - converter + converter, ); } diff --git a/api/src/DuckDBResultReader.ts b/api/src/DuckDBResultReader.ts index 2ff4db22..6501b734 100644 --- a/api/src/DuckDBResultReader.ts +++ b/api/src/DuckDBResultReader.ts @@ -95,7 +95,7 @@ export class DuckDBResultReader { public columnNameAndTypeObjectsJson(): Json { return this.result.columnNameAndTypeObjectsJson(); } - + public get rowsChanged(): number { return this.result.rowsChanged; } @@ -139,7 +139,7 @@ export class DuckDBResultReader { } // We didn't find our row. It must have been out of range. throw Error( - `Row index ${rowIndex} requested, but only ${this.currentRowCount_} row have been read so far.` + `Row index ${rowIndex} requested, but only ${this.currentRowCount_} row have been read so far.`, ); } @@ -213,17 +213,17 @@ export class DuckDBResultReader { public getColumnsObject(): Record { return getColumnsObjectFromChunks( this.chunks, - this.deduplicatedColumnNames() + this.deduplicatedColumnNames(), ); } public convertColumnsObject( - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): Record { return convertColumnsObjectFromChunks( this.chunks, this.deduplicatedColumnNames(), - converter + converter, ); } @@ -256,12 +256,12 @@ export class DuckDBResultReader { } public convertRowObjects( - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): Record[] { return convertRowObjectsFromChunks( this.chunks, this.deduplicatedColumnNames(), - converter + converter, ); } diff --git a/api/src/DuckDBScalarFunction.ts b/api/src/DuckDBScalarFunction.ts index 5aa845ec..7c3735b8 100644 --- a/api/src/DuckDBScalarFunction.ts +++ b/api/src/DuckDBScalarFunction.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { DuckDBDataChunk } from './DuckDBDataChunk'; import { DuckDBFunctionInfo } from './DuckDBFunctionInfo'; import { DuckDBType } from './DuckDBType'; @@ -7,7 +7,7 @@ import { DuckDBVector } from './DuckDBVector'; export type DuckDBScalarMainFunction = ( functionInfo: DuckDBFunctionInfo, inputDataChunk: DuckDBDataChunk, - outputVector: DuckDBVector + outputVector: DuckDBVector, ) => void; export class DuckDBScalarFunction { @@ -76,31 +76,31 @@ export class DuckDBScalarFunction { const inputDataChunk = new DuckDBDataChunk(input); const outputVector = DuckDBVector.create( output, - inputDataChunk.rowCount + inputDataChunk.rowCount, ); mainFunction(functionInfo, inputDataChunk, outputVector); - } + }, ); } public setReturnType(returnType: DuckDBType) { duckdb.scalar_function_set_return_type( this.scalar_function, - returnType.toLogicalType().logical_type + returnType.toLogicalType().logical_type, ); } public addParameter(parameterType: DuckDBType) { duckdb.scalar_function_add_parameter( this.scalar_function, - parameterType.toLogicalType().logical_type + parameterType.toLogicalType().logical_type, ); } public setVarArgs(varArgsType: DuckDBType) { duckdb.scalar_function_set_varargs( this.scalar_function, - varArgsType.toLogicalType().logical_type + varArgsType.toLogicalType().logical_type, ); } diff --git a/api/src/DuckDBType.ts b/api/src/DuckDBType.ts index e1dfabb4..f7cdd153 100644 --- a/api/src/DuckDBType.ts +++ b/api/src/DuckDBType.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { DuckDBLogicalType } from './DuckDBLogicalType'; import { DuckDBTypeId } from './DuckDBTypeId'; import { Json } from './Json'; @@ -27,7 +27,7 @@ export abstract class BaseDuckDBType { } public toLogicalType(): DuckDBLogicalType { const logicalType = DuckDBLogicalType.create( - duckdb.create_logical_type(this.typeId as number as duckdb.Type) + duckdb.create_logical_type(this.typeId as number as duckdb.Type), ); if (this.alias) { logicalType.alias = this.alias; @@ -421,7 +421,7 @@ export class DuckDBDecimalType extends BaseDuckDBType { export function DECIMAL( width?: number, scale?: number, - alias?: string + alias?: string, ): DuckDBDecimalType { if (width === undefined) { return DuckDBDecimalType.default; @@ -523,7 +523,7 @@ export class DuckDBEnumType extends BaseDuckDBType { public constructor( values: readonly string[], internalTypeId: DuckDBTypeId, - alias?: string + alias?: string, ) { super(DuckDBTypeId.ENUM, alias); this.values = values; @@ -558,25 +558,25 @@ export class DuckDBEnumType extends BaseDuckDBType { } export function ENUM8( values: readonly string[], - alias?: string + alias?: string, ): DuckDBEnumType { return new DuckDBEnumType(values, DuckDBTypeId.UTINYINT, alias); } export function ENUM16( values: readonly string[], - alias?: string + alias?: string, ): DuckDBEnumType { return new DuckDBEnumType(values, DuckDBTypeId.USMALLINT, alias); } export function ENUM32( values: readonly string[], - alias?: string + alias?: string, ): DuckDBEnumType { return new DuckDBEnumType(values, DuckDBTypeId.UINTEGER, alias); } export function ENUM( values: readonly string[], - alias?: string + alias?: string, ): DuckDBEnumType { if (values.length < 256) { return ENUM8(values, alias); @@ -586,7 +586,7 @@ export function ENUM( return ENUM32(values, alias); } else { throw new Error( - `ENUM types cannot have more than 4294967295 values; received ${values.length}` + `ENUM types cannot have more than 4294967295 values; received ${values.length}`, ); } } @@ -602,7 +602,7 @@ export class DuckDBListType extends BaseDuckDBType { } public override toLogicalType(): DuckDBLogicalType { const logicalType = DuckDBLogicalType.createList( - this.valueType.toLogicalType() + this.valueType.toLogicalType(), ); if (this.alias) { logicalType.alias = this.alias; @@ -628,7 +628,7 @@ export class DuckDBStructType extends BaseDuckDBType { public constructor( entryNames: readonly string[], entryTypes: readonly DuckDBType[], - alias?: string + alias?: string, ) { super(DuckDBTypeId.STRUCT, alias); if (entryNames.length !== entryTypes.length) { @@ -656,7 +656,7 @@ export class DuckDBStructType extends BaseDuckDBType { const parts: string[] = []; for (let i = 0; i < this.entryNames.length; i++) { parts.push( - `${quotedIdentifier(this.entryNames[i])} ${this.entryTypes[i]}` + `${quotedIdentifier(this.entryNames[i])} ${this.entryTypes[i]}`, ); } return `STRUCT(${parts.join(', ')})`; @@ -664,7 +664,7 @@ export class DuckDBStructType extends BaseDuckDBType { public override toLogicalType(): DuckDBLogicalType { const logicalType = DuckDBLogicalType.createStruct( this.entryNames, - this.entryTypes.map((t) => t.toLogicalType()) + this.entryTypes.map((t) => t.toLogicalType()), ); if (this.alias) { logicalType.alias = this.alias; @@ -675,14 +675,14 @@ export class DuckDBStructType extends BaseDuckDBType { return { typeId: this.typeId, entryNames: [...this.entryNames], - entryTypes: this.entryTypes.map(t => t.toJson()), + entryTypes: this.entryTypes.map((t) => t.toJson()), ...(this.alias ? { alias: this.alias } : {}), }; } } export function STRUCT( entries: Record, - alias?: string + alias?: string, ): DuckDBStructType { const entryNames = Object.keys(entries); const entryTypes = Object.values(entries); @@ -695,7 +695,7 @@ export class DuckDBMapType extends BaseDuckDBType { public constructor( keyType: DuckDBType, valueType: DuckDBType, - alias?: string + alias?: string, ) { super(DuckDBTypeId.MAP, alias); this.keyType = keyType; @@ -707,7 +707,7 @@ export class DuckDBMapType extends BaseDuckDBType { public override toLogicalType(): DuckDBLogicalType { const logicalType = DuckDBLogicalType.createMap( this.keyType.toLogicalType(), - this.valueType.toLogicalType() + this.valueType.toLogicalType(), ); if (this.alias) { logicalType.alias = this.alias; @@ -726,7 +726,7 @@ export class DuckDBMapType extends BaseDuckDBType { export function MAP( keyType: DuckDBType, valueType: DuckDBType, - alias?: string + alias?: string, ): DuckDBMapType { return new DuckDBMapType(keyType, valueType, alias); } @@ -745,7 +745,7 @@ export class DuckDBArrayType extends BaseDuckDBType { public override toLogicalType(): DuckDBLogicalType { const logicalType = DuckDBLogicalType.createArray( this.valueType.toLogicalType(), - this.length + this.length, ); if (this.alias) { logicalType.alias = this.alias; @@ -764,7 +764,7 @@ export class DuckDBArrayType extends BaseDuckDBType { export function ARRAY( valueType: DuckDBType, length: number, - alias?: string + alias?: string, ): DuckDBArrayType { return new DuckDBArrayType(valueType, length, alias); } @@ -793,7 +793,7 @@ export class DuckDBUnionType extends BaseDuckDBType { public constructor( memberTags: readonly string[], memberTypes: readonly DuckDBType[], - alias?: string + alias?: string, ) { super(DuckDBTypeId.UNION, alias); if (memberTags.length !== memberTypes.length) { @@ -821,7 +821,7 @@ export class DuckDBUnionType extends BaseDuckDBType { const parts: string[] = []; for (let i = 0; i < this.memberTags.length; i++) { parts.push( - `${quotedIdentifier(this.memberTags[i])} ${this.memberTypes[i]}` + `${quotedIdentifier(this.memberTags[i])} ${this.memberTypes[i]}`, ); } return `UNION(${parts.join(', ')})`; @@ -829,7 +829,7 @@ export class DuckDBUnionType extends BaseDuckDBType { public override toLogicalType(): DuckDBLogicalType { const logicalType = DuckDBLogicalType.createUnion( this.memberTags, - this.memberTypes.map((t) => t.toLogicalType()) + this.memberTypes.map((t) => t.toLogicalType()), ); if (this.alias) { logicalType.alias = this.alias; @@ -840,14 +840,14 @@ export class DuckDBUnionType extends BaseDuckDBType { return { typeId: this.typeId, memberTags: [...this.memberTags], - memberTypes: this.memberTypes.map(t => t.toJson()), + memberTypes: this.memberTypes.map((t) => t.toJson()), ...(this.alias ? { alias: this.alias } : {}), }; } } export function UNION( members: Record, - alias?: string + alias?: string, ): DuckDBUnionType { const memberTags = Object.keys(members); const memberTypes = Object.values(members); diff --git a/api/src/DuckDBValueConverter.ts b/api/src/DuckDBValueConverter.ts index fc0bce70..055761ed 100644 --- a/api/src/DuckDBValueConverter.ts +++ b/api/src/DuckDBValueConverter.ts @@ -4,5 +4,5 @@ import { DuckDBValue } from './values'; export type DuckDBValueConverter = ( value: DuckDBValue, type: DuckDBType, - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ) => T | null; diff --git a/api/src/DuckDBValueConverters.ts b/api/src/DuckDBValueConverters.ts index ebceb9fa..22ab777f 100644 --- a/api/src/DuckDBValueConverters.ts +++ b/api/src/DuckDBValueConverters.ts @@ -60,7 +60,7 @@ export function jsonNumberFromValue(value: DuckDBValue): number | string { export function bigintFromBigIntValue( value: DuckDBValue, - type: DuckDBType + type: DuckDBType, ): bigint { if (typeof value === 'bigint') { return value; @@ -110,7 +110,7 @@ export function dateFromTimestampValue(value: DuckDBValue): Date { return new Date(Number(millis)); } throw new Error( - `TIMESTAMP value out of range for JS Date: ${value.micros} micros` + `TIMESTAMP value out of range for JS Date: ${value.micros} micros`, ); } throw new Error(`Expected DuckDBTimestampValue`); @@ -123,7 +123,7 @@ export function dateFromTimestampSecondsValue(value: DuckDBValue): Date { return new Date(Number(millis)); } throw new Error( - `TIMESTAMP_S value out of range for JS Date: ${value.seconds} seconds` + `TIMESTAMP_S value out of range for JS Date: ${value.seconds} seconds`, ); } throw new Error(`Expected DuckDBTimestampSecondsValue`); @@ -136,7 +136,7 @@ export function dateFromTimestampMillisecondsValue(value: DuckDBValue): Date { return new Date(Number(millis)); } throw new Error( - `TIMESTAMP_MS value out of range for JS Date: ${value.millis} millis` + `TIMESTAMP_MS value out of range for JS Date: ${value.millis} millis`, ); } throw new Error(`Expected DuckDBTimestampMillisecondsValue`); @@ -149,7 +149,7 @@ export function dateFromTimestampNanosecondsValue(value: DuckDBValue): Date { return new Date(Number(millis)); } throw new Error( - `TIMESTAMP_NS value out of range for JS Date: ${value.nanos} nanos` + `TIMESTAMP_NS value out of range for JS Date: ${value.nanos} nanos`, ); } throw new Error(`Expected DuckDBTimestampNanosecondsValue`); @@ -175,7 +175,7 @@ export function dateFromTimestampTZValue(value: DuckDBValue): Date { return new Date(Number(millis)); } throw new Error( - `TIMESTAMPTZ value out of range for JS Date: ${value.micros} micros` + `TIMESTAMPTZ value out of range for JS Date: ${value.micros} micros`, ); } throw new Error(`Expected DuckDBTimestampTZValue`); @@ -221,7 +221,7 @@ export function doubleFromDecimalValue(value: DuckDBValue): number { export function arrayFromListValue( value: DuckDBValue, type: DuckDBType, - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): (T | null)[] { if (value instanceof DuckDBListValue && type instanceof DuckDBListType) { return value.items.map((v) => converter(v, type.valueType, converter)); @@ -232,7 +232,7 @@ export function arrayFromListValue( export function objectFromStructValue( value: DuckDBValue, type: DuckDBType, - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): { [key: string]: T | null } { if (value instanceof DuckDBStructValue && type instanceof DuckDBStructType) { const result: { [key: string]: T | null } = {}; @@ -240,7 +240,7 @@ export function objectFromStructValue( result[key] = converter( value.entries[key], type.typeForEntry(key), - converter + converter, ); } return result; @@ -251,7 +251,7 @@ export function objectFromStructValue( export function objectArrayFromMapValue( value: DuckDBValue, type: DuckDBType, - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): { key: T | null; value: T | null }[] { if (value instanceof DuckDBMapValue && type instanceof DuckDBMapType) { return value.entries.map((entry) => ({ @@ -265,7 +265,7 @@ export function objectArrayFromMapValue( export function arrayFromArrayValue( value: DuckDBValue, type: DuckDBType, - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): (T | null)[] { if (value instanceof DuckDBArrayValue && type instanceof DuckDBArrayType) { return value.items.map((v) => converter(v, type.valueType, converter)); @@ -276,7 +276,7 @@ export function arrayFromArrayValue( export function objectFromUnionValue( value: DuckDBValue, type: DuckDBType, - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): { tag: string; value: T | null } { if (value instanceof DuckDBUnionValue && type instanceof DuckDBUnionType) { return { @@ -284,7 +284,7 @@ export function objectFromUnionValue( value: converter( value.value, type.memberTypeForTag(value.tag), - converter + converter, ), }; } diff --git a/api/src/DuckDBVector.ts b/api/src/DuckDBVector.ts index 265470f0..3e471a7c 100644 --- a/api/src/DuckDBVector.ts +++ b/api/src/DuckDBVector.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import os from 'os'; import { DuckDBLogicalType } from './DuckDBLogicalType'; import { @@ -124,13 +124,13 @@ function getStringBytes(dataView: DataView, offset: number): Uint8Array { return new Uint8Array( dataView.buffer, dataView.byteOffset + offset + 4, - lengthInBytes + lengthInBytes, ); } else { return duckdb.get_data_from_pointer( dataView.buffer as ArrayBuffer, dataView.byteOffset + offset + 8, - lengthInBytes + lengthInBytes, ); } } @@ -155,7 +155,7 @@ function getVarIntFromBytes(bytes: Uint8Array): bigint { const dv = new DataView( // bytes is big endian bytes.buffer, bytes.byteOffset + 3, - bytes.byteLength - 3 + bytes.byteLength - 3, ); const lastUint64Offset = dv.byteLength - 8; let offset = 0; @@ -296,7 +296,7 @@ function setBoolean8(dataView: DataView, offset: number, value: boolean) { function makeSetBoolean(): ( dataView: DataView, offset: number, - value: boolean + value: boolean, ) => void { switch (duckdb.sizeof_bool) { case 1: @@ -317,7 +317,7 @@ const setBoolean = makeSetBoolean(); function getDecimal16( dataView: DataView, offset: number, - type: DuckDBDecimalType + type: DuckDBDecimalType, ): DuckDBDecimalValue { const value = getInt16(dataView, offset); return new DuckDBDecimalValue(BigInt(value), type.width, type.scale); @@ -326,7 +326,7 @@ function getDecimal16( function getDecimal32( dataView: DataView, offset: number, - type: DuckDBDecimalType + type: DuckDBDecimalType, ): DuckDBDecimalValue { const value = getInt32(dataView, offset); return new DuckDBDecimalValue(BigInt(value), type.width, type.scale); @@ -335,7 +335,7 @@ function getDecimal32( function getDecimal64( dataView: DataView, offset: number, - type: DuckDBDecimalType + type: DuckDBDecimalType, ): DuckDBDecimalValue { const value = getInt64(dataView, offset); return new DuckDBDecimalValue(value, type.width, type.scale); @@ -344,7 +344,7 @@ function getDecimal64( function getDecimal128( dataView: DataView, offset: number, - type: DuckDBDecimalType + type: DuckDBDecimalType, ): DuckDBDecimalValue { const value = getInt128(dataView, offset); return new DuckDBDecimalValue(value, type.width, type.scale); @@ -381,7 +381,7 @@ class DuckDBValidity { private constructor( data: BigUint64Array | null, offset: number, - itemCount: number + itemCount: number, ) { this.data = data; this.offset = offset; @@ -389,7 +389,7 @@ class DuckDBValidity { } public static fromVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBValidity { const uint64Count = Math.ceil(itemCount / 64); const bytes = duckdb.vector_get_validity(vector, uint64Count * 8); @@ -399,7 +399,7 @@ class DuckDBValidity { const bigints = new BigUint64Array( bytes.buffer, bytes.byteOffset, - uint64Count + uint64Count, ); return new DuckDBValidity(bigints, 0, itemCount); } @@ -448,7 +448,7 @@ class DuckDBValidity { 0, this.data.buffer as ArrayBuffer, this.data.byteOffset, - this.data.byteLength + this.data.byteLength, ); } } @@ -464,12 +464,12 @@ export abstract class DuckDBVector { public static create( vector: duckdb.Vector, itemCount: number, - knownType?: DuckDBType + knownType?: DuckDBType, ): DuckDBVector { const vectorType = knownType ? knownType : DuckDBLogicalType.create( - duckdb.vector_get_column_type(vector) + duckdb.vector_get_column_type(vector), ).asType(); switch (vectorType.typeId) { case DuckDBTypeId.BOOLEAN: @@ -519,44 +519,44 @@ export abstract class DuckDBVector { return DuckDBDecimal16Vector.fromRawVector( vectorType, vector, - itemCount + itemCount, ); } else if (width <= 9) { return DuckDBDecimal32Vector.fromRawVector( vectorType, vector, - itemCount + itemCount, ); } else if (width <= 18) { return DuckDBDecimal64Vector.fromRawVector( vectorType, vector, - itemCount + itemCount, ); } else if (width <= 38) { return DuckDBDecimal128Vector.fromRawVector( vectorType, vector, - itemCount + itemCount, ); } else { throw new Error(`DECIMAL width too large: ${width}`); } } throw new Error( - 'DuckDBType has DECIMAL type id but is not an instance of DuckDBDecimalType' + 'DuckDBType has DECIMAL type id but is not an instance of DuckDBDecimalType', ); case DuckDBTypeId.TIMESTAMP_S: return DuckDBTimestampSecondsVector.fromRawVector(vector, itemCount); case DuckDBTypeId.TIMESTAMP_MS: return DuckDBTimestampMillisecondsVector.fromRawVector( vector, - itemCount + itemCount, ); case DuckDBTypeId.TIMESTAMP_NS: return DuckDBTimestampNanosecondsVector.fromRawVector( vector, - itemCount + itemCount, ); case DuckDBTypeId.ENUM: if (vectorType instanceof DuckDBEnumType) { @@ -566,60 +566,60 @@ export abstract class DuckDBVector { return DuckDBEnum8Vector.fromRawVector( vectorType, vector, - itemCount + itemCount, ); case DuckDBTypeId.USMALLINT: return DuckDBEnum16Vector.fromRawVector( vectorType, vector, - itemCount + itemCount, ); case DuckDBTypeId.UINTEGER: return DuckDBEnum32Vector.fromRawVector( vectorType, vector, - itemCount + itemCount, ); default: throw new Error( - `unsupported ENUM internal type: ${internalTypeId}` + `unsupported ENUM internal type: ${internalTypeId}`, ); } } throw new Error( - 'DuckDBType has ENUM type id but is not an instance of DuckDBEnumType' + 'DuckDBType has ENUM type id but is not an instance of DuckDBEnumType', ); case DuckDBTypeId.LIST: if (vectorType instanceof DuckDBListType) { return DuckDBListVector.fromRawVector(vectorType, vector, itemCount); } throw new Error( - 'DuckDBType has LIST type id but is not an instance of DuckDBListType' + 'DuckDBType has LIST type id but is not an instance of DuckDBListType', ); case DuckDBTypeId.STRUCT: if (vectorType instanceof DuckDBStructType) { return DuckDBStructVector.fromRawVector( vectorType, vector, - itemCount + itemCount, ); } throw new Error( - 'DuckDBType has STRUCT type id but is not an instance of DuckDBStructType' + 'DuckDBType has STRUCT type id but is not an instance of DuckDBStructType', ); case DuckDBTypeId.MAP: if (vectorType instanceof DuckDBMapType) { return DuckDBMapVector.fromRawVector(vectorType, vector, itemCount); } throw new Error( - 'DuckDBType has MAP type id but is not an instance of DuckDBMapType' + 'DuckDBType has MAP type id but is not an instance of DuckDBMapType', ); case DuckDBTypeId.ARRAY: if (vectorType instanceof DuckDBArrayType) { return DuckDBArrayVector.fromRawVector(vectorType, vector, itemCount); } throw new Error( - 'DuckDBType has ARRAY type id but is not an instance of DuckDBArrayType' + 'DuckDBType has ARRAY type id but is not an instance of DuckDBArrayType', ); case DuckDBTypeId.UUID: return DuckDBUUIDVector.fromRawVector(vector, itemCount); @@ -628,7 +628,7 @@ export abstract class DuckDBVector { return DuckDBUnionVector.fromRawVector(vectorType, vector, itemCount); } throw new Error( - 'DuckDBType has UNION type id but is not an instance of DuckDBUnionType' + 'DuckDBType has UNION type id but is not an instance of DuckDBUnionType', ); case DuckDBTypeId.BIT: return DuckDBBitVector.fromRawVector(vector, itemCount); @@ -644,7 +644,7 @@ export abstract class DuckDBVector { throw new Error(`Invalid vector type: SQLNULL`); default: throw new Error( - `Invalid type id: ${(vectorType as DuckDBType).typeId}` + `Invalid type id: ${(vectorType as DuckDBType).typeId}`, ); } } @@ -672,7 +672,7 @@ export class DuckDBBooleanVector extends DuckDBVector { dataView: DataView, validity: DuckDBValidity, vector: duckdb.Vector, - itemCount: number + itemCount: number, ) { super(); this.dataView = dataView; @@ -682,13 +682,13 @@ export class DuckDBBooleanVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBBooleanVector { const data = vectorData(vector, itemCount * duckdb.sizeof_bool); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBBooleanVector(dataView, validity, vector, itemCount); @@ -718,7 +718,7 @@ export class DuckDBBooleanVector extends DuckDBVector { 0, this.dataView.buffer as ArrayBuffer, this.dataView.byteOffset, - this.dataView.byteLength + this.dataView.byteLength, ); this.validity.flush(this.vector); } @@ -727,11 +727,11 @@ export class DuckDBBooleanVector extends DuckDBVector { new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * duckdb.sizeof_bool, - length * duckdb.sizeof_bool + length * duckdb.sizeof_bool, ), this.validity.slice(offset, length), this.vector, - length + length, ); } } @@ -743,7 +743,7 @@ export class DuckDBTinyIntVector extends DuckDBVector { constructor( items: Int8Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -752,7 +752,7 @@ export class DuckDBTinyIntVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBTinyIntVector { const data = vectorData(vector, itemCount * Int8Array.BYTES_PER_ELEMENT); const items = new Int8Array(data.buffer, data.byteOffset, itemCount); @@ -782,7 +782,7 @@ export class DuckDBTinyIntVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -790,7 +790,7 @@ export class DuckDBTinyIntVector extends DuckDBVector { return new DuckDBTinyIntVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -802,7 +802,7 @@ export class DuckDBSmallIntVector extends DuckDBVector { constructor( items: Int16Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -811,7 +811,7 @@ export class DuckDBSmallIntVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBSmallIntVector { const data = vectorData(vector, itemCount * Int16Array.BYTES_PER_ELEMENT); const items = new Int16Array(data.buffer, data.byteOffset, itemCount); @@ -841,7 +841,7 @@ export class DuckDBSmallIntVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -849,7 +849,7 @@ export class DuckDBSmallIntVector extends DuckDBVector { return new DuckDBSmallIntVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -861,7 +861,7 @@ export class DuckDBIntegerVector extends DuckDBVector { constructor( items: Int32Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -870,7 +870,7 @@ export class DuckDBIntegerVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBIntegerVector { const data = vectorData(vector, itemCount * Int32Array.BYTES_PER_ELEMENT); const items = new Int32Array(data.buffer, data.byteOffset, itemCount); @@ -900,7 +900,7 @@ export class DuckDBIntegerVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -908,7 +908,7 @@ export class DuckDBIntegerVector extends DuckDBVector { return new DuckDBIntegerVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -920,7 +920,7 @@ export class DuckDBBigIntVector extends DuckDBVector { constructor( items: BigInt64Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -929,11 +929,11 @@ export class DuckDBBigIntVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBBigIntVector { const data = vectorData( vector, - itemCount * BigInt64Array.BYTES_PER_ELEMENT + itemCount * BigInt64Array.BYTES_PER_ELEMENT, ); const items = new BigInt64Array(data.buffer, data.byteOffset, itemCount); const validity = DuckDBValidity.fromVector(vector, itemCount); @@ -962,7 +962,7 @@ export class DuckDBBigIntVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -970,7 +970,7 @@ export class DuckDBBigIntVector extends DuckDBVector { return new DuckDBBigIntVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -982,7 +982,7 @@ export class DuckDBUTinyIntVector extends DuckDBVector { constructor( items: Uint8Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -991,7 +991,7 @@ export class DuckDBUTinyIntVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBUTinyIntVector { const data = vectorData(vector, itemCount * Uint8Array.BYTES_PER_ELEMENT); const items = new Uint8Array(data.buffer, data.byteOffset, itemCount); @@ -1021,7 +1021,7 @@ export class DuckDBUTinyIntVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -1029,7 +1029,7 @@ export class DuckDBUTinyIntVector extends DuckDBVector { return new DuckDBUTinyIntVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -1041,7 +1041,7 @@ export class DuckDBUSmallIntVector extends DuckDBVector { constructor( items: Uint16Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -1050,7 +1050,7 @@ export class DuckDBUSmallIntVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBUSmallIntVector { const data = vectorData(vector, itemCount * Uint16Array.BYTES_PER_ELEMENT); const items = new Uint16Array(data.buffer, data.byteOffset, itemCount); @@ -1080,7 +1080,7 @@ export class DuckDBUSmallIntVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -1088,7 +1088,7 @@ export class DuckDBUSmallIntVector extends DuckDBVector { return new DuckDBUSmallIntVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -1100,7 +1100,7 @@ export class DuckDBUIntegerVector extends DuckDBVector { constructor( items: Uint32Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -1109,7 +1109,7 @@ export class DuckDBUIntegerVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBUIntegerVector { const data = vectorData(vector, itemCount * Uint32Array.BYTES_PER_ELEMENT); const items = new Uint32Array(data.buffer, data.byteOffset, itemCount); @@ -1139,7 +1139,7 @@ export class DuckDBUIntegerVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -1147,7 +1147,7 @@ export class DuckDBUIntegerVector extends DuckDBVector { return new DuckDBUIntegerVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -1159,7 +1159,7 @@ export class DuckDBUBigIntVector extends DuckDBVector { constructor( items: BigUint64Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -1168,11 +1168,11 @@ export class DuckDBUBigIntVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBUBigIntVector { const data = vectorData( vector, - itemCount * BigUint64Array.BYTES_PER_ELEMENT + itemCount * BigUint64Array.BYTES_PER_ELEMENT, ); const items = new BigUint64Array(data.buffer, data.byteOffset, itemCount); const validity = DuckDBValidity.fromVector(vector, itemCount); @@ -1201,7 +1201,7 @@ export class DuckDBUBigIntVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -1209,7 +1209,7 @@ export class DuckDBUBigIntVector extends DuckDBVector { return new DuckDBUBigIntVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -1221,7 +1221,7 @@ export class DuckDBFloatVector extends DuckDBVector { constructor( items: Float32Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -1230,7 +1230,7 @@ export class DuckDBFloatVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBFloatVector { const data = vectorData(vector, itemCount * Float32Array.BYTES_PER_ELEMENT); const items = new Float32Array(data.buffer, data.byteOffset, itemCount); @@ -1260,7 +1260,7 @@ export class DuckDBFloatVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -1268,7 +1268,7 @@ export class DuckDBFloatVector extends DuckDBVector { return new DuckDBFloatVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -1280,7 +1280,7 @@ export class DuckDBDoubleVector extends DuckDBVector { constructor( items: Float64Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -1289,7 +1289,7 @@ export class DuckDBDoubleVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBDoubleVector { const data = vectorData(vector, itemCount * Float64Array.BYTES_PER_ELEMENT); const items = new Float64Array(data.buffer, data.byteOffset, itemCount); @@ -1319,7 +1319,7 @@ export class DuckDBDoubleVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -1327,7 +1327,7 @@ export class DuckDBDoubleVector extends DuckDBVector { return new DuckDBDoubleVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -1339,7 +1339,7 @@ export class DuckDBTimestampVector extends DuckDBVector { constructor( items: BigInt64Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -1348,11 +1348,11 @@ export class DuckDBTimestampVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBTimestampVector { const data = vectorData( vector, - itemCount * BigInt64Array.BYTES_PER_ELEMENT + itemCount * BigInt64Array.BYTES_PER_ELEMENT, ); const items = new BigInt64Array(data.buffer, data.byteOffset, itemCount); const validity = DuckDBValidity.fromVector(vector, itemCount); @@ -1371,7 +1371,7 @@ export class DuckDBTimestampVector extends DuckDBVector { } public override setItem( itemIndex: number, - value: DuckDBTimestampValue | null + value: DuckDBTimestampValue | null, ) { if (value != null) { this.items[itemIndex] = value.micros; @@ -1386,7 +1386,7 @@ export class DuckDBTimestampVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -1394,7 +1394,7 @@ export class DuckDBTimestampVector extends DuckDBVector { return new DuckDBTimestampVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -1406,7 +1406,7 @@ export class DuckDBDateVector extends DuckDBVector { constructor( items: Int32Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -1415,7 +1415,7 @@ export class DuckDBDateVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBDateVector { const data = vectorData(vector, itemCount * Int32Array.BYTES_PER_ELEMENT); const items = new Int32Array(data.buffer, data.byteOffset, itemCount); @@ -1447,7 +1447,7 @@ export class DuckDBDateVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -1455,7 +1455,7 @@ export class DuckDBDateVector extends DuckDBVector { return new DuckDBDateVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -1467,7 +1467,7 @@ export class DuckDBTimeVector extends DuckDBVector { constructor( items: BigInt64Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -1476,11 +1476,11 @@ export class DuckDBTimeVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBTimeVector { const data = vectorData( vector, - itemCount * BigInt64Array.BYTES_PER_ELEMENT + itemCount * BigInt64Array.BYTES_PER_ELEMENT, ); const items = new BigInt64Array(data.buffer, data.byteOffset, itemCount); const validity = DuckDBValidity.fromVector(vector, itemCount); @@ -1511,7 +1511,7 @@ export class DuckDBTimeVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -1519,7 +1519,7 @@ export class DuckDBTimeVector extends DuckDBVector { return new DuckDBTimeVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -1533,7 +1533,7 @@ export class DuckDBIntervalVector extends DuckDBVector { dataView: DataView, validity: DuckDBValidity, vector: duckdb.Vector, - itemCount: number + itemCount: number, ) { super(); this.dataView = dataView; @@ -1543,13 +1543,13 @@ export class DuckDBIntervalVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBIntervalVector { const data = vectorData(vector, itemCount * 16); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBIntervalVector(dataView, validity, vector, itemCount); @@ -1572,7 +1572,7 @@ export class DuckDBIntervalVector extends DuckDBVector { } public override setItem( itemIndex: number, - value: DuckDBIntervalValue | null + value: DuckDBIntervalValue | null, ) { if (value != null) { const itemStart = itemIndex * 16; @@ -1590,7 +1590,7 @@ export class DuckDBIntervalVector extends DuckDBVector { 0, this.dataView.buffer as ArrayBuffer, this.dataView.byteOffset, - this.dataView.byteLength + this.dataView.byteLength, ); this.validity.flush(this.vector); } @@ -1599,11 +1599,11 @@ export class DuckDBIntervalVector extends DuckDBVector { new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * 16, - length * 16 + length * 16, ), this.validity.slice(offset, length), this.vector, - length + length, ); } } @@ -1617,7 +1617,7 @@ export class DuckDBHugeIntVector extends DuckDBVector { dataView: DataView, validity: DuckDBValidity, vector: duckdb.Vector, - itemCount: number + itemCount: number, ) { super(); this.dataView = dataView; @@ -1627,13 +1627,13 @@ export class DuckDBHugeIntVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBHugeIntVector { const data = vectorData(vector, itemCount * 16); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBHugeIntVector(dataView, validity, vector, itemCount); @@ -1668,7 +1668,7 @@ export class DuckDBHugeIntVector extends DuckDBVector { 0, this.dataView.buffer as ArrayBuffer, this.dataView.byteOffset, - this.dataView.byteLength + this.dataView.byteLength, ); this.validity.flush(this.vector); } @@ -1677,11 +1677,11 @@ export class DuckDBHugeIntVector extends DuckDBVector { new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * 16, - length * 16 + length * 16, ), this.validity.slice(offset, length), this.vector, - length + length, ); } } @@ -1695,7 +1695,7 @@ export class DuckDBUHugeIntVector extends DuckDBVector { dataView: DataView, validity: DuckDBValidity, vector: duckdb.Vector, - itemCount: number + itemCount: number, ) { super(); this.dataView = dataView; @@ -1705,13 +1705,13 @@ export class DuckDBUHugeIntVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBUHugeIntVector { const data = vectorData(vector, itemCount * 16); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBUHugeIntVector(dataView, validity, vector, itemCount); @@ -1746,7 +1746,7 @@ export class DuckDBUHugeIntVector extends DuckDBVector { 0, this.dataView.buffer as ArrayBuffer, this.dataView.byteOffset, - this.dataView.byteLength + this.dataView.byteLength, ); this.validity.flush(this.vector); } @@ -1755,11 +1755,11 @@ export class DuckDBUHugeIntVector extends DuckDBVector { new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * 16, - length * 16 + length * 16, ), this.validity.slice(offset, length), this.vector, - length + length, ); } } @@ -1777,7 +1777,7 @@ export class DuckDBVarCharVector extends DuckDBVector { validity: DuckDBValidity, vector: duckdb.Vector, itemOffset: number, - itemCount: number + itemCount: number, ) { super(); this.dataView = dataView; @@ -1790,13 +1790,13 @@ export class DuckDBVarCharVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBVarCharVector { const data = vectorData(vector, itemCount * 16); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBVarCharVector(dataView, validity, vector, 0, itemCount); @@ -1831,7 +1831,7 @@ export class DuckDBVarCharVector extends DuckDBVector { duckdb.vector_assign_string_element( this.vector, this.itemOffset + itemIndex, - cachedItem + cachedItem, ); } this.itemCacheDirty[itemIndex] = false; @@ -1844,12 +1844,12 @@ export class DuckDBVarCharVector extends DuckDBVector { new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * 16, - length * 16 + length * 16, ), this.validity.slice(offset, length), this.vector, offset, - length + length, ); } } @@ -1867,7 +1867,7 @@ export class DuckDBBlobVector extends DuckDBVector { validity: DuckDBValidity, vector: duckdb.Vector, itemOffset: number, - itemCount: number + itemCount: number, ) { super(); this.dataView = dataView; @@ -1880,13 +1880,13 @@ export class DuckDBBlobVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBBlobVector { const data = vectorData(vector, itemCount * 16); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBBlobVector(dataView, validity, vector, 0, itemCount); @@ -1915,7 +1915,7 @@ export class DuckDBBlobVector extends DuckDBVector { duckdb.vector_assign_string_element_len( this.vector, this.itemOffset + itemIndex, - cachedItem.bytes + cachedItem.bytes, ); } this.itemCacheDirty[itemIndex] = false; @@ -1928,12 +1928,12 @@ export class DuckDBBlobVector extends DuckDBVector { new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * 16, - length * 16 + length * 16, ), this.validity.slice(offset, length), this.vector, offset, - length + length, ); } } @@ -1949,7 +1949,7 @@ export class DuckDBDecimal16Vector extends DuckDBVector { dataView: DataView, validity: DuckDBValidity, vector: duckdb.Vector, - itemCount: number + itemCount: number, ) { super(); this.decimalType = decimalType; @@ -1961,13 +1961,13 @@ export class DuckDBDecimal16Vector extends DuckDBVector { static fromRawVector( decimalType: DuckDBDecimalType, vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBDecimal16Vector { const data = vectorData(vector, itemCount * 2); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBDecimal16Vector( @@ -1975,7 +1975,7 @@ export class DuckDBDecimal16Vector extends DuckDBVector { dataView, validity, vector, - itemCount + itemCount, ); } public override get type(): DuckDBDecimalType { @@ -2008,7 +2008,7 @@ export class DuckDBDecimal16Vector extends DuckDBVector { 0, this.dataView.buffer as ArrayBuffer, this.dataView.byteOffset, - this.dataView.byteLength + this.dataView.byteLength, ); this.validity.flush(this.vector); } @@ -2018,11 +2018,11 @@ export class DuckDBDecimal16Vector extends DuckDBVector { new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * 2, - length * 2 + length * 2, ), this.validity.slice(offset, length), this.vector, - length + length, ); } } @@ -2038,7 +2038,7 @@ export class DuckDBDecimal32Vector extends DuckDBVector { dataView: DataView, validity: DuckDBValidity, vector: duckdb.Vector, - itemCount: number + itemCount: number, ) { super(); this.decimalType = decimalType; @@ -2050,13 +2050,13 @@ export class DuckDBDecimal32Vector extends DuckDBVector { static fromRawVector( decimalType: DuckDBDecimalType, vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBDecimal32Vector { const data = vectorData(vector, itemCount * 4); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBDecimal32Vector( @@ -2064,7 +2064,7 @@ export class DuckDBDecimal32Vector extends DuckDBVector { dataView, validity, vector, - itemCount + itemCount, ); } public override get type(): DuckDBDecimalType { @@ -2097,7 +2097,7 @@ export class DuckDBDecimal32Vector extends DuckDBVector { 0, this.dataView.buffer as ArrayBuffer, this.dataView.byteOffset, - this.dataView.byteLength + this.dataView.byteLength, ); this.validity.flush(this.vector); } @@ -2107,11 +2107,11 @@ export class DuckDBDecimal32Vector extends DuckDBVector { new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * 4, - length * 4 + length * 4, ), this.validity.slice(offset, length), this.vector, - length + length, ); } } @@ -2127,7 +2127,7 @@ export class DuckDBDecimal64Vector extends DuckDBVector { dataView: DataView, validity: DuckDBValidity, vector: duckdb.Vector, - itemCount: number + itemCount: number, ) { super(); this.decimalType = decimalType; @@ -2139,13 +2139,13 @@ export class DuckDBDecimal64Vector extends DuckDBVector { static fromRawVector( decimalType: DuckDBDecimalType, vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBDecimal64Vector { const data = vectorData(vector, itemCount * 8); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBDecimal64Vector( @@ -2153,7 +2153,7 @@ export class DuckDBDecimal64Vector extends DuckDBVector { dataView, validity, vector, - itemCount + itemCount, ); } public override get type(): DuckDBDecimalType { @@ -2186,7 +2186,7 @@ export class DuckDBDecimal64Vector extends DuckDBVector { 0, this.dataView.buffer as ArrayBuffer, this.dataView.byteOffset, - this.dataView.byteLength + this.dataView.byteLength, ); this.validity.flush(this.vector); } @@ -2196,11 +2196,11 @@ export class DuckDBDecimal64Vector extends DuckDBVector { new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * 8, - length * 8 + length * 8, ), this.validity.slice(offset, length), this.vector, - length + length, ); } } @@ -2216,7 +2216,7 @@ export class DuckDBDecimal128Vector extends DuckDBVector { dataView: DataView, validity: DuckDBValidity, vector: duckdb.Vector, - itemCount: number + itemCount: number, ) { super(); this.decimalType = decimalType; @@ -2228,13 +2228,13 @@ export class DuckDBDecimal128Vector extends DuckDBVector { static fromRawVector( decimalType: DuckDBDecimalType, vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBDecimal128Vector { const data = vectorData(vector, itemCount * 16); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBDecimal128Vector( @@ -2242,7 +2242,7 @@ export class DuckDBDecimal128Vector extends DuckDBVector { dataView, validity, vector, - itemCount + itemCount, ); } public override get type(): DuckDBDecimalType { @@ -2275,24 +2275,24 @@ export class DuckDBDecimal128Vector extends DuckDBVector { 0, this.dataView.buffer as ArrayBuffer, this.dataView.byteOffset, - this.dataView.byteLength + this.dataView.byteLength, ); this.validity.flush(this.vector); } public override slice( offset: number, - length: number + length: number, ): DuckDBDecimal128Vector { return new DuckDBDecimal128Vector( this.decimalType, new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * 16, - length * 16 + length * 16, ), this.validity.slice(offset, length), this.vector, - length + length, ); } } @@ -2304,7 +2304,7 @@ export class DuckDBTimestampSecondsVector extends DuckDBVector { enumType: DuckDBEnumType, items: Uint8Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.enumType = enumType; @@ -2533,7 +2533,7 @@ export class DuckDBEnum8Vector extends DuckDBVector { static fromRawVector( enumType: DuckDBEnumType, vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBEnum8Vector { const data = vectorData(vector, itemCount); const items = new Uint8Array(data.buffer, data.byteOffset, itemCount); @@ -2565,7 +2565,7 @@ export class DuckDBEnum8Vector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -2574,7 +2574,7 @@ export class DuckDBEnum8Vector extends DuckDBVector { this.enumType, this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -2588,7 +2588,7 @@ export class DuckDBEnum16Vector extends DuckDBVector { enumType: DuckDBEnumType, items: Uint16Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.enumType = enumType; @@ -2599,7 +2599,7 @@ export class DuckDBEnum16Vector extends DuckDBVector { static fromRawVector( enumType: DuckDBEnumType, vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBEnum16Vector { const data = vectorData(vector, itemCount * 2); const items = new Uint16Array(data.buffer, data.byteOffset, itemCount); @@ -2631,7 +2631,7 @@ export class DuckDBEnum16Vector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -2640,7 +2640,7 @@ export class DuckDBEnum16Vector extends DuckDBVector { this.enumType, this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -2654,7 +2654,7 @@ export class DuckDBEnum32Vector extends DuckDBVector { enumType: DuckDBEnumType, items: Uint32Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.enumType = enumType; @@ -2665,7 +2665,7 @@ export class DuckDBEnum32Vector extends DuckDBVector { static fromRawVector( enumType: DuckDBEnumType, vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBEnum32Vector { const data = vectorData(vector, itemCount * 4); const items = new Uint32Array(data.buffer, data.byteOffset, itemCount); @@ -2697,7 +2697,7 @@ export class DuckDBEnum32Vector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -2706,7 +2706,7 @@ export class DuckDBEnum32Vector extends DuckDBVector { this.enumType, this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -2729,7 +2729,7 @@ export class DuckDBListVector extends DuckDBVector { vector: duckdb.Vector, childData: DuckDBVector, itemOffset: number, - itemCount: number + itemCount: number, ) { super(); this.parentList = parentList; @@ -2738,23 +2738,22 @@ export class DuckDBListVector extends DuckDBVector { this.validity = validity; this.vector = vector; this.childData = childData; - this.itemOffset = itemOffset, - this._itemCount = itemCount; + ((this.itemOffset = itemOffset), (this._itemCount = itemCount)); this.itemCache = []; } static fromRawVector( listType: DuckDBListType, vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBListVector { const data = vectorData( vector, - itemCount * BigUint64Array.BYTES_PER_ELEMENT * 2 + itemCount * BigUint64Array.BYTES_PER_ELEMENT * 2, ); const entryData = new BigUint64Array( data.buffer, data.byteOffset, - itemCount * 2 + itemCount * 2, ); const validity = DuckDBValidity.fromVector(vector, itemCount); @@ -2764,7 +2763,7 @@ export class DuckDBListVector extends DuckDBVector { const childData = DuckDBVector.create( child_vector, child_vector_size, - listType.valueType + listType.valueType, ); return new DuckDBListVector( @@ -2775,7 +2774,7 @@ export class DuckDBListVector extends DuckDBVector { vector, childData, 0, - itemCount + itemCount, ); } public override get type(): DuckDBListType { @@ -2846,7 +2845,7 @@ export class DuckDBListVector extends DuckDBVector { this.childData = DuckDBVector.create( child_vector, child_vector_size, - this.listType.valueType + this.listType.valueType, ); // set all childData items @@ -2861,7 +2860,7 @@ export class DuckDBListVector extends DuckDBVector { ) { this.childData.setItem( childItemAbsoluteIndex++, - list.items[childItemRelativeIndex] + list.items[childItemRelativeIndex], ); } } @@ -2876,7 +2875,7 @@ export class DuckDBListVector extends DuckDBVector { 0, this.entryData.buffer as ArrayBuffer, this.entryData.byteOffset, - this.entryData.byteLength + this.entryData.byteLength, ); // flush validity @@ -2890,13 +2889,13 @@ export class DuckDBListVector extends DuckDBVector { this.listType, this.entryData.slice( entryDataStartIndex, - entryDataStartIndex + length * 2 + entryDataStartIndex + length * 2, ), this.validity.slice(offset, length), this.vector, this.childData, offset, - length + length, ); } } @@ -2912,7 +2911,7 @@ export class DuckDBStructVector extends DuckDBVector { itemCount: number, entryVectors: readonly DuckDBVector[], validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.structType = structType; @@ -2924,14 +2923,14 @@ export class DuckDBStructVector extends DuckDBVector { static fromRawVector( structType: DuckDBStructType, vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBStructVector { const entryCount = structType.entryCount; const entryVectors: DuckDBVector[] = []; for (let i = 0; i < entryCount; i++) { const child_vector = duckdb.struct_vector_get_child(vector, i); entryVectors.push( - DuckDBVector.create(child_vector, itemCount, structType.entryTypes[i]) + DuckDBVector.create(child_vector, itemCount, structType.entryTypes[i]), ); } const validity = DuckDBValidity.fromVector(vector, itemCount); @@ -2940,7 +2939,7 @@ export class DuckDBStructVector extends DuckDBVector { itemCount, entryVectors, validity, - vector + vector, ); } public override get type(): DuckDBStructType { @@ -2963,7 +2962,7 @@ export class DuckDBStructVector extends DuckDBVector { } public getItemValue( itemIndex: number, - entryIndex: number + entryIndex: number, ): DuckDBValue | null { if (!this.validity.itemValid(itemIndex)) { return null; @@ -2976,7 +2975,7 @@ export class DuckDBStructVector extends DuckDBVector { for (let i = 0; i < entryCount; i++) { this.entryVectors[i].setItem( itemIndex, - value.entries[this.structType.entryNames[i]] + value.entries[this.structType.entryNames[i]], ); } this.validity.setItemValid(itemIndex, true); @@ -2991,7 +2990,7 @@ export class DuckDBStructVector extends DuckDBVector { public setItemValue( itemIndex: number, entryIndex: number, - value: DuckDBValue + value: DuckDBValue, ) { return this.entryVectors[entryIndex].setItem(itemIndex, value); } @@ -3007,7 +3006,7 @@ export class DuckDBStructVector extends DuckDBVector { length, this.entryVectors.map((entryVector) => entryVector.slice(offset, length)), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -3024,17 +3023,17 @@ export class DuckDBMapVector extends DuckDBVector { static fromRawVector( mapType: DuckDBMapType, vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBMapVector { const listVectorType = new DuckDBListType( new DuckDBStructType( ['key', 'value'], - [mapType.keyType, mapType.valueType] - ) + [mapType.keyType, mapType.valueType], + ), ); return new DuckDBMapVector( mapType, - DuckDBListVector.fromRawVector(listVectorType, vector, itemCount) + DuckDBListVector.fromRawVector(listVectorType, vector, itemCount), ); } public override get type(): DuckDBMapType { @@ -3066,9 +3065,9 @@ export class DuckDBMapVector extends DuckDBVector { itemIndex, listValue( value.entries.map((entry) => - structValue({ 'key': entry.key, 'value': entry.value }) - ) - ) + structValue({ key: entry.key, value: entry.value }), + ), + ), ); } else { this.listVector.setItem(itemIndex, null); @@ -3080,7 +3079,7 @@ export class DuckDBMapVector extends DuckDBVector { public override slice(offset: number, length: number): DuckDBMapVector { return new DuckDBMapVector( this.mapType, - this.listVector.slice(offset, length) + this.listVector.slice(offset, length), ); } } @@ -3096,7 +3095,7 @@ export class DuckDBArrayVector extends DuckDBVector { validity: DuckDBValidity, vector: duckdb.Vector, childData: DuckDBVector, - itemCount: number + itemCount: number, ) { super(); this.arrayType = arrayType; @@ -3108,7 +3107,7 @@ export class DuckDBArrayVector extends DuckDBVector { static fromRawVector( arrayType: DuckDBArrayType, vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBArrayVector { const validity = DuckDBValidity.fromVector(vector, itemCount); const child_vector = duckdb.array_vector_get_child(vector); @@ -3117,14 +3116,14 @@ export class DuckDBArrayVector extends DuckDBVector { const childData = DuckDBVector.create( child_vector, itemCount * childItemsPerArray, - arrayType.valueType + arrayType.valueType, ); return new DuckDBArrayVector( arrayType, validity, vector, childData, - itemCount + itemCount, ); } private static itemSize(arrayType: DuckDBArrayType): number { @@ -3147,7 +3146,7 @@ export class DuckDBArrayVector extends DuckDBVector { return new DuckDBArrayValue( this.childData .slice(itemIndex * this.arrayType.length, this.arrayType.length) - .toArray() + .toArray(), ); } public override setItem(itemIndex: number, value: DuckDBArrayValue | null) { @@ -3176,9 +3175,9 @@ export class DuckDBArrayVector extends DuckDBVector { this.vector, this.childData.slice( offset * this.arrayType.length, - length * this.arrayType.length + length * this.arrayType.length, ), - length + length, ); } } @@ -3192,7 +3191,7 @@ export class DuckDBUUIDVector extends DuckDBVector { dataView: DataView, validity: DuckDBValidity, vector: duckdb.Vector, - itemCount: number + itemCount: number, ) { super(); this.dataView = dataView; @@ -3202,13 +3201,13 @@ export class DuckDBUUIDVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBUUIDVector { const data = vectorData(vector, itemCount * 16); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBUUIDVector(dataView, validity, vector, itemCount); @@ -3221,7 +3220,9 @@ export class DuckDBUUIDVector extends DuckDBVector { } public override getItem(itemIndex: number): DuckDBUUIDValue | null { return this.validity.itemValid(itemIndex) - ? DuckDBUUIDValue.fromStoredHugeInt(getInt128(this.dataView, itemIndex * 16)) + ? DuckDBUUIDValue.fromStoredHugeInt( + getInt128(this.dataView, itemIndex * 16), + ) : null; } public override setItem(itemIndex: number, value: DuckDBUUIDValue | null) { @@ -3238,7 +3239,7 @@ export class DuckDBUUIDVector extends DuckDBVector { 0, this.dataView.buffer as ArrayBuffer, this.dataView.byteOffset, - this.dataView.byteLength + this.dataView.byteLength, ); this.validity.flush(this.vector); } @@ -3247,11 +3248,11 @@ export class DuckDBUUIDVector extends DuckDBVector { new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * 16, - length * 16 + length * 16, ), this.validity.slice(offset, length), this.vector, - length + length, ); } } @@ -3268,7 +3269,7 @@ export class DuckDBUnionVector extends DuckDBVector { static fromRawVector( unionType: DuckDBUnionType, vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBUnionVector { const entryNames: string[] = ['tag']; const entryTypes: DuckDBType[] = [DuckDBUTinyIntType.instance]; @@ -3280,7 +3281,7 @@ export class DuckDBUnionVector extends DuckDBVector { const structVectorType = new DuckDBStructType(entryNames, entryTypes); return new DuckDBUnionVector( unionType, - DuckDBStructVector.fromRawVector(structVectorType, vector, itemCount) + DuckDBStructVector.fromRawVector(structVectorType, vector, itemCount), ); } public override get type(): DuckDBUnionType { @@ -3323,7 +3324,7 @@ export class DuckDBUnionVector extends DuckDBVector { public override slice(offset: number, length: number): DuckDBUnionVector { return new DuckDBUnionVector( this.unionType, - this.structVector.slice(offset, length) + this.structVector.slice(offset, length), ); } } @@ -3341,7 +3342,7 @@ export class DuckDBBitVector extends DuckDBVector { validity: DuckDBValidity, vector: duckdb.Vector, itemOffset: number, - itemCount: number + itemCount: number, ) { super(); this.dataView = dataView; @@ -3354,13 +3355,13 @@ export class DuckDBBitVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBBitVector { const data = vectorData(vector, itemCount * 16); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBBitVector(dataView, validity, vector, 0, itemCount); @@ -3391,7 +3392,7 @@ export class DuckDBBitVector extends DuckDBVector { duckdb.vector_assign_string_element_len( this.vector, this.itemOffset + itemIndex, - cachedItem.data + cachedItem.data, ); } this.itemCacheDirty[itemIndex] = false; @@ -3404,12 +3405,12 @@ export class DuckDBBitVector extends DuckDBVector { new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * 16, - length * 16 + length * 16, ), this.validity.slice(offset, length), this.vector, offset, - length + length, ); } } @@ -3421,7 +3422,7 @@ export class DuckDBTimeTZVector extends DuckDBVector { constructor( items: BigUint64Array, validity: DuckDBValidity, - vector: duckdb.Vector + vector: duckdb.Vector, ) { super(); this.items = items; @@ -3430,11 +3431,11 @@ export class DuckDBTimeTZVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBTimeTZVector { const data = vectorData( vector, - itemCount * BigUint64Array.BYTES_PER_ELEMENT + itemCount * BigUint64Array.BYTES_PER_ELEMENT, ); const items = new BigUint64Array(data.buffer, data.byteOffset, itemCount); const validity = DuckDBValidity.fromVector(vector, itemCount); @@ -3465,7 +3466,7 @@ export class DuckDBTimeTZVector extends DuckDBVector { 0, this.items.buffer as ArrayBuffer, this.items.byteOffset, - this.items.byteLength + this.items.byteLength, ); this.validity.flush(this.vector); } @@ -3473,7 +3474,7 @@ export class DuckDBTimeTZVector extends DuckDBVector { return new DuckDBTimeTZVector( this.items.slice(offset, offset + length), this.validity.slice(offset, length), - this.vector + this.vector, ); } } @@ -3485,7 +3486,7 @@ export class DuckDBTimestampTZVector extends DuckDBVector { validity: DuckDBValidity, vector: duckdb.Vector, itemOffset: number, - itemCount: number + itemCount: number, ) { super(); this.dataView = dataView; @@ -3574,13 +3575,13 @@ export class DuckDBVarIntVector extends DuckDBVector { } static fromRawVector( vector: duckdb.Vector, - itemCount: number + itemCount: number, ): DuckDBVarIntVector { const data = vectorData(vector, itemCount * 16); const dataView = new DataView( data.buffer, data.byteOffset, - data.byteLength + data.byteLength, ); const validity = DuckDBValidity.fromVector(vector, itemCount); return new DuckDBVarIntVector(dataView, validity, vector, 0, itemCount); @@ -3611,7 +3612,7 @@ export class DuckDBVarIntVector extends DuckDBVector { duckdb.vector_assign_string_element_len( this.vector, this.itemOffset + itemIndex, - getBytesFromVarInt(cachedItem) + getBytesFromVarInt(cachedItem), ); } this.itemCacheDirty[itemIndex] = false; @@ -3624,12 +3625,12 @@ export class DuckDBVarIntVector extends DuckDBVector { new DataView( this.dataView.buffer, this.dataView.byteOffset + offset * 16, - length * 16 + length * 16, ), this.validity.slice(offset, length), this.vector, offset, - length + length, ); } } diff --git a/api/src/JS.ts b/api/src/JS.ts index a31de4ab..6ac4eab7 100644 --- a/api/src/JS.ts +++ b/api/src/JS.ts @@ -8,4 +8,3 @@ export type JS = | Date | JS[] | { [key: string]: JS }; - \ No newline at end of file diff --git a/api/src/JsonDuckDBValueConverter.ts b/api/src/JsonDuckDBValueConverter.ts index eac588c6..541c8220 100644 --- a/api/src/JsonDuckDBValueConverter.ts +++ b/api/src/JsonDuckDBValueConverter.ts @@ -26,19 +26,19 @@ const JsonConvertersByTypeId: Record< [DuckDBTypeId.TINYINT]: numberFromValue, [DuckDBTypeId.SMALLINT]: numberFromValue, [DuckDBTypeId.INTEGER]: numberFromValue, - [DuckDBTypeId.BIGINT]: stringFromValue, + [DuckDBTypeId.BIGINT]: numberFromValue, [DuckDBTypeId.UTINYINT]: numberFromValue, [DuckDBTypeId.USMALLINT]: numberFromValue, [DuckDBTypeId.UINTEGER]: numberFromValue, - [DuckDBTypeId.UBIGINT]: stringFromValue, + [DuckDBTypeId.UBIGINT]: numberFromValue, [DuckDBTypeId.FLOAT]: jsonNumberFromValue, [DuckDBTypeId.DOUBLE]: jsonNumberFromValue, [DuckDBTypeId.TIMESTAMP]: stringFromValue, [DuckDBTypeId.DATE]: stringFromValue, [DuckDBTypeId.TIME]: stringFromValue, [DuckDBTypeId.INTERVAL]: jsonObjectFromIntervalValue, - [DuckDBTypeId.HUGEINT]: stringFromValue, - [DuckDBTypeId.UHUGEINT]: stringFromValue, + [DuckDBTypeId.HUGEINT]: numberFromValue, + [DuckDBTypeId.UHUGEINT]: numberFromValue, [DuckDBTypeId.VARCHAR]: stringFromValue, [DuckDBTypeId.BLOB]: stringFromValue, [DuckDBTypeId.DECIMAL]: stringFromValue, @@ -61,5 +61,5 @@ const JsonConvertersByTypeId: Record< }; export const JsonDuckDBValueConverter = createDuckDBValueConverter( - JsonConvertersByTypeId + JsonConvertersByTypeId, ); diff --git a/api/src/configurationOptionDescriptions.ts b/api/src/configurationOptionDescriptions.ts index 16cee45d..bd3fca1b 100644 --- a/api/src/configurationOptionDescriptions.ts +++ b/api/src/configurationOptionDescriptions.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; export function configurationOptionDescriptions(): Readonly< Record diff --git a/api/src/conversion/dateTimeStringConversion.ts b/api/src/conversion/dateTimeStringConversion.ts index 65051428..a2f72013 100644 --- a/api/src/conversion/dateTimeStringConversion.ts +++ b/api/src/conversion/dateTimeStringConversion.ts @@ -15,7 +15,7 @@ const POSITIVE_INFINITY_TIMESTAMP = 9223372036854775807n; // 2^63-1 export function getDuckDBDateStringFromYearMonthDay( year: number, month: number, - dayOfMonth: number + dayOfMonth: number, ): string { const yearStr = String(Math.abs(year)).padStart(4, '0'); const monthStr = String(month).padStart(2, '0'); @@ -49,7 +49,7 @@ export function getDuckDBDateStringFromDays(days: number): string { } export function getTimezoneOffsetString( - timezoneOffsetInMinutes?: number + timezoneOffsetInMinutes?: number, ): string | undefined { if (timezoneOffsetInMinutes === undefined) { return undefined; @@ -69,7 +69,7 @@ export function getTimezoneOffsetString( export function getAbsoluteOffsetStringFromParts( hoursPart: number, minutesPart: number, - secondsPart: number + secondsPart: number, ): string { const hoursStr = String(hoursPart).padStart(2, '0'); const minutesStr = @@ -89,7 +89,7 @@ export function getAbsoluteOffsetStringFromParts( } export function getOffsetStringFromAbsoluteSeconds( - absoluteOffsetInSeconds: number + absoluteOffsetInSeconds: number, ): string { const secondsPart = absoluteOffsetInSeconds % 60; const minutes = Math.floor(absoluteOffsetInSeconds / 60); @@ -102,7 +102,7 @@ export function getOffsetStringFromSeconds(offsetInSeconds: number): string { const negative = offsetInSeconds < 0; const absoluteOffsetInSeconds = negative ? -offsetInSeconds : offsetInSeconds; const absoluteString = getOffsetStringFromAbsoluteSeconds( - absoluteOffsetInSeconds + absoluteOffsetInSeconds, ); return `${negative ? '-' : '+'}${absoluteString}`; } @@ -111,7 +111,7 @@ export function getDuckDBTimeStringFromParts( hoursPart: bigint, minutesPart: bigint, secondsPart: bigint, - microsecondsPart: bigint + microsecondsPart: bigint, ): string { const hoursStr = String(hoursPart).padStart(2, '0'); const minutesStr = String(minutesPart).padStart(2, '0'); @@ -128,7 +128,7 @@ export function getDuckDBTimeStringFromPartsNS( hoursPart: bigint, minutesPart: bigint, secondsPart: bigint, - nanosecondsPart: bigint + nanosecondsPart: bigint, ): string { const hoursStr = String(hoursPart).padStart(2, '0'); const minutesStr = String(minutesPart).padStart(2, '0'); @@ -142,7 +142,7 @@ export function getDuckDBTimeStringFromPartsNS( } export function getDuckDBTimeStringFromPositiveMicroseconds( - positiveMicroseconds: bigint + positiveMicroseconds: bigint, ): string { const microsecondsPart = positiveMicroseconds % MICROSECONDS_PER_SECOND; const seconds = positiveMicroseconds / MICROSECONDS_PER_SECOND; @@ -154,12 +154,12 @@ export function getDuckDBTimeStringFromPositiveMicroseconds( hoursPart, minutesPart, secondsPart, - microsecondsPart + microsecondsPart, ); } export function getDuckDBTimeStringFromPositiveNanoseconds( - positiveNanoseconds: bigint + positiveNanoseconds: bigint, ): string { const nanosecondsPart = positiveNanoseconds % NANOSECONDS_PER_SECOND; const seconds = positiveNanoseconds / NANOSECONDS_PER_SECOND; @@ -171,12 +171,12 @@ export function getDuckDBTimeStringFromPositiveNanoseconds( hoursPart, minutesPart, secondsPart, - nanosecondsPart + nanosecondsPart, ); } export function getDuckDBTimeStringFromMicrosecondsInDay( - microsecondsInDay: bigint + microsecondsInDay: bigint, ): string { const positiveMicroseconds = microsecondsInDay < 0 @@ -186,7 +186,7 @@ export function getDuckDBTimeStringFromMicrosecondsInDay( } export function getDuckDBTimeStringFromNanosecondsInDay( - nanosecondsInDay: bigint + nanosecondsInDay: bigint, ): string { const positiveNanoseconds = nanosecondsInDay < 0 @@ -196,7 +196,7 @@ export function getDuckDBTimeStringFromNanosecondsInDay( } export function getDuckDBTimeStringFromMicroseconds( - microseconds: bigint + microseconds: bigint, ): string { const negative = microseconds < 0; const positiveMicroseconds = negative ? -microseconds : microseconds; @@ -208,7 +208,7 @@ export function getDuckDBTimeStringFromMicroseconds( export function getDuckDBTimestampStringFromDaysAndMicroseconds( days: bigint, microsecondsInDay: bigint, - timezonePart?: string + timezonePart?: string, ): string { // This conversion of BigInt to Number is safe, because the largest absolute value that `days` can has is 106751991, // which fits without loss of precision in a JS Number. (106751991 = (2^63-1) / MICROSECONDS_PER_DAY) @@ -219,7 +219,7 @@ export function getDuckDBTimestampStringFromDaysAndMicroseconds( export function getDuckDBTimestampStringFromDaysAndNanoseconds( days: bigint, - nanosecondsInDay: bigint + nanosecondsInDay: bigint, ): string { // This conversion of BigInt to Number is safe, because the largest absolute value that `days` can has is 106751 // which fits without loss of precision in a JS Number. (106751 = (2^63-1) / NANOSECONDS_PER_DAY) @@ -230,7 +230,7 @@ export function getDuckDBTimestampStringFromDaysAndNanoseconds( export function getDuckDBTimestampStringFromMicroseconds( microseconds: bigint, - timezoneOffsetInMinutes?: number + timezoneOffsetInMinutes?: number, ): string { // Note that -infinity and infinity are only representable in TIMESTAMP (and TIMESTAMPTZ), not the other timestamp // variants. This is by-design and matches DuckDB. @@ -256,26 +256,26 @@ export function getDuckDBTimestampStringFromMicroseconds( return getDuckDBTimestampStringFromDaysAndMicroseconds( days, microsecondsPart, - getTimezoneOffsetString(timezoneOffsetInMinutes) + getTimezoneOffsetString(timezoneOffsetInMinutes), ); } export function getDuckDBTimestampStringFromSeconds(seconds: bigint): string { return getDuckDBTimestampStringFromMicroseconds( - seconds * MICROSECONDS_PER_SECOND + seconds * MICROSECONDS_PER_SECOND, ); } export function getDuckDBTimestampStringFromMilliseconds( - milliseconds: bigint + milliseconds: bigint, ): string { return getDuckDBTimestampStringFromMicroseconds( - milliseconds * MICROSECONDS_PER_MILLISECOND + milliseconds * MICROSECONDS_PER_MILLISECOND, ); } export function getDuckDBTimestampStringFromNanoseconds( - nanoseconds: bigint + nanoseconds: bigint, ): string { let days = nanoseconds / NANOSECONDS_PER_DAY; let nanosecondsPart = nanoseconds % NANOSECONDS_PER_DAY; @@ -294,7 +294,7 @@ function numberAndUnit(value: number, baseUnit: string): string { export function getDuckDBIntervalString( months: number, days: number, - microseconds: bigint + microseconds: bigint, ): string { const parts: string[] = []; if (months !== 0) { diff --git a/api/src/conversion/stringFromBlob.ts b/api/src/conversion/stringFromBlob.ts index fce81638..c916e36f 100644 --- a/api/src/conversion/stringFromBlob.ts +++ b/api/src/conversion/stringFromBlob.ts @@ -38,7 +38,7 @@ export function stringFromBlobArrayJoin(bytes: Uint8Array): string { byte >= 0x7f ) { byteStrings.push( - `\\x${byte.toString(16).toUpperCase().padStart(2, '0')}` + `\\x${byte.toString(16).toUpperCase().padStart(2, '0')}`, ); } else { byteStrings.push(String.fromCharCode(byte)); diff --git a/api/src/convertColumnsFromChunks.ts b/api/src/convertColumnsFromChunks.ts index 334eede6..70f9929f 100644 --- a/api/src/convertColumnsFromChunks.ts +++ b/api/src/convertColumnsFromChunks.ts @@ -3,7 +3,7 @@ import { DuckDBValueConverter } from './DuckDBValueConverter'; export function convertColumnsFromChunks( chunks: readonly DuckDBDataChunk[], - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): (T | null)[][] { if (chunks.length === 0) { return []; @@ -19,7 +19,7 @@ export function convertColumnsFromChunks( chunk.visitColumnValues( columnIndex, (value, _rowIndex, _columnIndex, type) => - convertedColumns[columnIndex].push(converter(value, type, converter)) + convertedColumns[columnIndex].push(converter(value, type, converter)), ); } } diff --git a/api/src/convertColumnsObjectFromChunks.ts b/api/src/convertColumnsObjectFromChunks.ts index 57f4904a..98a0ee2d 100644 --- a/api/src/convertColumnsObjectFromChunks.ts +++ b/api/src/convertColumnsObjectFromChunks.ts @@ -4,7 +4,7 @@ import { DuckDBValueConverter } from './DuckDBValueConverter'; export function convertColumnsObjectFromChunks( chunks: readonly DuckDBDataChunk[], columnNames: readonly string[], - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): Record { const convertedColumnsObject: Record = {}; for (const columnName of columnNames) { @@ -20,8 +20,8 @@ export function convertColumnsObjectFromChunks( columnIndex, (value, _rowIndex, _columnIndex, type) => convertedColumnsObject[columnNames[columnIndex]].push( - converter(value, type, converter) - ) + converter(value, type, converter), + ), ); } } diff --git a/api/src/convertRowObjectsFromChunks.ts b/api/src/convertRowObjectsFromChunks.ts index f8539eea..0d3e9065 100644 --- a/api/src/convertRowObjectsFromChunks.ts +++ b/api/src/convertRowObjectsFromChunks.ts @@ -4,7 +4,7 @@ import { DuckDBValueConverter } from './DuckDBValueConverter'; export function convertRowObjectsFromChunks( chunks: readonly DuckDBDataChunk[], columnNames: readonly string[], - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): Record[] { const rowObjects: Record[] = []; for (const chunk of chunks) { diff --git a/api/src/convertRowsFromChunks.ts b/api/src/convertRowsFromChunks.ts index f4f521ca..c1c05e56 100644 --- a/api/src/convertRowsFromChunks.ts +++ b/api/src/convertRowsFromChunks.ts @@ -3,7 +3,7 @@ import { DuckDBValueConverter } from './DuckDBValueConverter'; export function convertRowsFromChunks( chunks: readonly DuckDBDataChunk[], - converter: DuckDBValueConverter + converter: DuckDBValueConverter, ): (T | null)[][] { const rows: (T | null)[][] = []; for (const chunk of chunks) { diff --git a/api/src/createConfig.ts b/api/src/createConfig.ts index c2b99269..5f44ed19 100644 --- a/api/src/createConfig.ts +++ b/api/src/createConfig.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; export function createConfig(options?: Record): duckdb.Config { const config = duckdb.create_config(); diff --git a/api/src/createDuckDBValueConverter.ts b/api/src/createDuckDBValueConverter.ts index cae35a77..4b678db4 100644 --- a/api/src/createDuckDBValueConverter.ts +++ b/api/src/createDuckDBValueConverter.ts @@ -2,7 +2,7 @@ import { DuckDBTypeId } from './DuckDBTypeId'; import { DuckDBValueConverter } from './DuckDBValueConverter'; export function createDuckDBValueConverter( - convertersByTypeId: Record | undefined> + convertersByTypeId: Record | undefined>, ): DuckDBValueConverter { return (value, type, converter) => { if (value == null) { diff --git a/api/src/createResult.ts b/api/src/createResult.ts index 2c51651f..aac37f8c 100644 --- a/api/src/createResult.ts +++ b/api/src/createResult.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { DuckDBMaterializedResult } from './DuckDBMaterializedResult'; import { DuckDBResult } from './DuckDBResult'; diff --git a/api/src/createValue.ts b/api/src/createValue.ts index 1d453447..a6fbe446 100644 --- a/api/src/createValue.ts +++ b/api/src/createValue.ts @@ -1,4 +1,4 @@ -import duckdb, { Value } from '@duckdb/node-bindings'; +import duckdb, { Value } from '@databrainhq/node-bindings'; import { DuckDBType } from './DuckDBType'; import { DuckDBTypeId } from './DuckDBTypeId'; import { @@ -148,7 +148,7 @@ export function createValue(type: DuckDBType, input: DuckDBValue): Value { if (typeof input === 'string') { return duckdb.create_enum_value( type.toLogicalType().logical_type, - type.indexForValue(input) + type.indexForValue(input), ); } throw new Error(`input is not a string`); @@ -156,12 +156,12 @@ export function createValue(type: DuckDBType, input: DuckDBValue): Value { if (input instanceof DuckDBListValue) { if (type.valueType.typeId === DuckDBTypeId.ANY) { throw new Error( - 'Cannot create lists with item type of ANY. Specify a specific type.' + 'Cannot create lists with item type of ANY. Specify a specific type.', ); } return duckdb.create_list_value( type.valueType.toLogicalType().logical_type, - input.items.map((item) => createValue(type.valueType, item)) + input.items.map((item) => createValue(type.valueType, item)), ); } throw new Error(`input is not a DuckDBListValue`); @@ -169,18 +169,18 @@ export function createValue(type: DuckDBType, input: DuckDBValue): Value { if (input instanceof DuckDBStructValue) { if ( type.entryTypes.find( - (entryType) => entryType.typeId === DuckDBTypeId.ANY + (entryType) => entryType.typeId === DuckDBTypeId.ANY, ) ) { throw new Error( - 'Cannot create structs with an entry type of ANY. Specify a specific type.' + 'Cannot create structs with an entry type of ANY. Specify a specific type.', ); } return duckdb.create_struct_value( type.toLogicalType().logical_type, Object.values(input.entries).map((value, i) => - createValue(type.entryTypes[i], value) - ) + createValue(type.entryTypes[i], value), + ), ); } throw new Error(`input is not a DuckDBStructValue`); @@ -188,18 +188,20 @@ export function createValue(type: DuckDBType, input: DuckDBValue): Value { if (input instanceof DuckDBMapValue) { if (type.keyType.typeId === DuckDBTypeId.ANY) { throw new Error( - 'Cannot create maps with key type of ANY. Specify a specific type.' + 'Cannot create maps with key type of ANY. Specify a specific type.', ); } if (type.valueType.typeId === DuckDBTypeId.ANY) { throw new Error( - 'Cannot create maps with value type of ANY. Specify a specific type.' + 'Cannot create maps with value type of ANY. Specify a specific type.', ); } return duckdb.create_map_value( type.toLogicalType().logical_type, input.entries.map((entry) => createValue(type.keyType, entry.key)), - input.entries.map((entry) => createValue(type.valueType, entry.value)) + input.entries.map((entry) => + createValue(type.valueType, entry.value), + ), ); } throw new Error(`input is not a DuckDBMapValue`); @@ -207,12 +209,12 @@ export function createValue(type: DuckDBType, input: DuckDBValue): Value { if (input instanceof DuckDBArrayValue) { if (type.valueType.typeId === DuckDBTypeId.ANY) { throw new Error( - 'Cannot create arrays with item type of ANY. Specify a specific type.' + 'Cannot create arrays with item type of ANY. Specify a specific type.', ); } return duckdb.create_array_value( type.valueType.toLogicalType().logical_type, - input.items.map((item) => createValue(type.valueType, item)) + input.items.map((item) => createValue(type.valueType, item)), ); } throw new Error(`input is not a DuckDBArrayValue`); @@ -231,7 +233,7 @@ export function createValue(type: DuckDBType, input: DuckDBValue): Value { return duckdb.create_union_value( type.toLogicalType().logical_type, tagIndex, - createValue(memberType, input.value) + createValue(memberType, input.value), ); } throw new Error(`input is not a DuckDBUnionValue`); @@ -252,7 +254,7 @@ export function createValue(type: DuckDBType, input: DuckDBValue): Value { throw new Error(`input is not a DuckDBTimestampTZValue`); case DuckDBTypeId.ANY: throw new Error( - `Cannot create values of type ANY. Specify a specific type.` + `Cannot create values of type ANY. Specify a specific type.`, ); case DuckDBTypeId.VARINT: if (typeof input === 'bigint') { diff --git a/api/src/duckdb.ts b/api/src/duckdb.ts index 4d3ddbb6..991b58ec 100644 --- a/api/src/duckdb.ts +++ b/api/src/duckdb.ts @@ -2,8 +2,8 @@ export { double_to_hugeint, double_to_uhugeint, hugeint_to_double, - uhugeint_to_double -} from '@duckdb/node-bindings'; + uhugeint_to_double, +} from '@databrainhq/node-bindings'; export * from './configurationOptionDescriptions'; export * from './createDuckDBValueConverter'; export * from './DuckDBAppender'; diff --git a/api/src/enums.ts b/api/src/enums.ts index 48e93182..4ac45e66 100644 --- a/api/src/enums.ts +++ b/api/src/enums.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; export type ResultReturnType = duckdb.ResultType; export const ResultReturnType = duckdb.ResultType; diff --git a/api/src/getColumnsFromChunks.ts b/api/src/getColumnsFromChunks.ts index f3e70082..ea1494ee 100644 --- a/api/src/getColumnsFromChunks.ts +++ b/api/src/getColumnsFromChunks.ts @@ -2,7 +2,7 @@ import { DuckDBDataChunk } from './DuckDBDataChunk'; import { DuckDBValue } from './values'; export function getColumnsFromChunks( - chunks: readonly DuckDBDataChunk[] + chunks: readonly DuckDBDataChunk[], ): DuckDBValue[][] { const columns: DuckDBValue[][] = []; for (const chunk of chunks) { diff --git a/api/src/getColumnsObjectFromChunks.ts b/api/src/getColumnsObjectFromChunks.ts index 04c4c995..fff8120b 100644 --- a/api/src/getColumnsObjectFromChunks.ts +++ b/api/src/getColumnsObjectFromChunks.ts @@ -3,7 +3,7 @@ import { DuckDBValue } from './values'; export function getColumnsObjectFromChunks( chunks: readonly DuckDBDataChunk[], - columnNames: readonly string[] + columnNames: readonly string[], ): Record { const columnsObject: Record = {}; for (const chunk of chunks) { diff --git a/api/src/getRowObjectsFromChunks.ts b/api/src/getRowObjectsFromChunks.ts index 3f512c76..f095f8ed 100644 --- a/api/src/getRowObjectsFromChunks.ts +++ b/api/src/getRowObjectsFromChunks.ts @@ -3,7 +3,7 @@ import { DuckDBValue } from './values'; export function getRowObjectsFromChunks( chunks: readonly DuckDBDataChunk[], - columnNames: readonly string[] + columnNames: readonly string[], ): Record[] { const rowObjects: Record[] = []; for (const chunk of chunks) { diff --git a/api/src/getRowsFromChunks.ts b/api/src/getRowsFromChunks.ts index 8a09309c..754f657c 100644 --- a/api/src/getRowsFromChunks.ts +++ b/api/src/getRowsFromChunks.ts @@ -2,7 +2,7 @@ import { DuckDBDataChunk } from './DuckDBDataChunk'; import { DuckDBValue } from './values'; export function getRowsFromChunks( - chunks: readonly DuckDBDataChunk[] + chunks: readonly DuckDBDataChunk[], ): DuckDBValue[][] { const rows: DuckDBValue[][] = []; for (const chunk of chunks) { diff --git a/api/src/tsconfig.json b/api/src/tsconfig.json index d2e0778b..61901c60 100644 --- a/api/src/tsconfig.json +++ b/api/src/tsconfig.json @@ -1,6 +1,6 @@ { "extends": "../../tsconfig.library.json", "compilerOptions": { - "outDir": "../pkgs/@duckdb/node-api/lib" + "outDir": "../pkgs/@databrainhq/node-api/lib" } } diff --git a/api/src/typeForValue.ts b/api/src/typeForValue.ts index 540bd212..e7c27290 100644 --- a/api/src/typeForValue.ts +++ b/api/src/typeForValue.ts @@ -83,7 +83,7 @@ export function typeForValue(value: DuckDBValue): DuckDBType { } else if (value instanceof DuckDBMapValue) { return MAP( typeForValue(value.entries[0].key), - typeForValue(value.entries[0].value) + typeForValue(value.entries[0].value), ); } else if (value instanceof DuckDBStructValue) { const entryTypes: Record = {}; diff --git a/api/src/values/DuckDBBitValue.ts b/api/src/values/DuckDBBitValue.ts index 782bf5cd..be75e2ee 100644 --- a/api/src/values/DuckDBBitValue.ts +++ b/api/src/values/DuckDBBitValue.ts @@ -52,18 +52,30 @@ export class DuckDBBitValue { } public static fromString(str: string, on: string = '1'): DuckDBBitValue { - return DuckDBBitValue.fromLengthAndPredicate(str.length, i => str[i] === on); + return DuckDBBitValue.fromLengthAndPredicate( + str.length, + (i) => str[i] === on, + ); } - public static fromBits(bits: readonly number[], on: number = 1): DuckDBBitValue { - return DuckDBBitValue.fromLengthAndPredicate(bits.length, i => bits[i] === on); + public static fromBits( + bits: readonly number[], + on: number = 1, + ): DuckDBBitValue { + return DuckDBBitValue.fromLengthAndPredicate( + bits.length, + (i) => bits[i] === on, + ); } public static fromBools(bools: readonly boolean[]): DuckDBBitValue { - return DuckDBBitValue.fromLengthAndPredicate(bools.length, i => bools[i]); + return DuckDBBitValue.fromLengthAndPredicate(bools.length, (i) => bools[i]); } - public static fromLengthAndPredicate(length: number, predicate: (index: number) => boolean): DuckDBBitValue { + public static fromLengthAndPredicate( + length: number, + predicate: (index: number) => boolean, + ): DuckDBBitValue { const byteCount = Math.ceil(length / 8) + 1; const paddingBitCount = (8 - (length % 8)) % 8; @@ -102,7 +114,9 @@ export class DuckDBBitValue { } } -export function bitValue(input: string | readonly boolean[] | readonly number[]): DuckDBBitValue { +export function bitValue( + input: string | readonly boolean[] | readonly number[], +): DuckDBBitValue { if (typeof input === 'string') { return DuckDBBitValue.fromString(input); } @@ -113,5 +127,5 @@ export function bitValue(input: string | readonly boolean[] | readonly number[]) return DuckDBBitValue.fromBits(input as readonly number[]); } } - return DuckDBBitValue.fromLengthAndPredicate(0, _ => false); + return DuckDBBitValue.fromLengthAndPredicate(0, (_) => false); } diff --git a/api/src/values/DuckDBBlobValue.ts b/api/src/values/DuckDBBlobValue.ts index 1a928cdd..b75567d3 100644 --- a/api/src/values/DuckDBBlobValue.ts +++ b/api/src/values/DuckDBBlobValue.ts @@ -24,4 +24,3 @@ export function blobValue(input: Uint8Array | string): DuckDBBlobValue { } return new DuckDBBlobValue(input); } - diff --git a/api/src/values/DuckDBDateValue.ts b/api/src/values/DuckDBDateValue.ts index 18c9d2b9..8d97b8a2 100644 --- a/api/src/values/DuckDBDateValue.ts +++ b/api/src/values/DuckDBDateValue.ts @@ -1,4 +1,4 @@ -import duckdb, { Date_, DateParts } from '@duckdb/node-bindings'; +import duckdb, { Date_, DateParts } from '@databrainhq/node-bindings'; import { getDuckDBDateStringFromDays } from '../conversion/dateTimeStringConversion'; export type { DateParts }; diff --git a/api/src/values/DuckDBDecimalValue.ts b/api/src/values/DuckDBDecimalValue.ts index 0eac34d4..bd662353 100644 --- a/api/src/values/DuckDBDecimalValue.ts +++ b/api/src/values/DuckDBDecimalValue.ts @@ -1,4 +1,4 @@ -import duckdb, { Decimal } from '@duckdb/node-bindings'; +import duckdb, { Decimal } from '@databrainhq/node-bindings'; import { stringFromDecimal } from '../conversion/stringFromDecimal'; export class DuckDBDecimalValue implements Decimal { @@ -30,13 +30,21 @@ export class DuckDBDecimalValue implements Decimal { return duckdb.decimal_to_double(this); } - public static fromDouble(double: number, width: number, scale: number): DuckDBDecimalValue { + public static fromDouble( + double: number, + width: number, + scale: number, + ): DuckDBDecimalValue { const decimal = duckdb.double_to_decimal(double, width, scale); return new DuckDBDecimalValue(decimal.value, decimal.width, decimal.scale); } } -export function decimalValue(value: bigint | number, width: number, scale: number): DuckDBDecimalValue { +export function decimalValue( + value: bigint | number, + width: number, + scale: number, +): DuckDBDecimalValue { if (typeof value === 'number') { return DuckDBDecimalValue.fromDouble(value, width, scale); } diff --git a/api/src/values/DuckDBIntervalValue.ts b/api/src/values/DuckDBIntervalValue.ts index 3a78aed1..cd7d2594 100644 --- a/api/src/values/DuckDBIntervalValue.ts +++ b/api/src/values/DuckDBIntervalValue.ts @@ -1,4 +1,4 @@ -import { Interval } from '@duckdb/node-bindings'; +import { Interval } from '@databrainhq/node-bindings'; import { getDuckDBIntervalString } from '../conversion/dateTimeStringConversion'; export class DuckDBIntervalValue implements Interval { @@ -17,6 +17,10 @@ export class DuckDBIntervalValue implements Interval { } } -export function intervalValue(months: number, days: number, micros: bigint): DuckDBIntervalValue { +export function intervalValue( + months: number, + days: number, + micros: bigint, +): DuckDBIntervalValue { return new DuckDBIntervalValue(months, days, micros); } diff --git a/api/src/values/DuckDBMapValue.ts b/api/src/values/DuckDBMapValue.ts index ab347662..fab5d8db 100644 --- a/api/src/values/DuckDBMapValue.ts +++ b/api/src/values/DuckDBMapValue.ts @@ -14,9 +14,12 @@ export class DuckDBMapValue { } public toString(): string { - return `{${this.entries.map(({ key, value }) => - `${displayStringForDuckDBValue(key)}: ${displayStringForDuckDBValue(value)}` - ).join(', ')}}`; + return `{${this.entries + .map( + ({ key, value }) => + `${displayStringForDuckDBValue(key)}: ${displayStringForDuckDBValue(value)}`, + ) + .join(', ')}}`; } } diff --git a/api/src/values/DuckDBStructValue.ts b/api/src/values/DuckDBStructValue.ts index 4f1ca95d..ecc405b1 100644 --- a/api/src/values/DuckDBStructValue.ts +++ b/api/src/values/DuckDBStructValue.ts @@ -11,12 +11,16 @@ export class DuckDBStructValue { public toString(): string { const parts: string[] = []; for (const name in this.entries) { - parts.push(`${displayStringForDuckDBValue(name)}: ${displayStringForDuckDBValue(this.entries[name])}`); + parts.push( + `${displayStringForDuckDBValue(name)}: ${displayStringForDuckDBValue(this.entries[name])}`, + ); } return `{${parts.join(', ')}}`; } } -export function structValue(entries: Readonly>): DuckDBStructValue { +export function structValue( + entries: Readonly>, +): DuckDBStructValue { return new DuckDBStructValue(entries); } diff --git a/api/src/values/DuckDBTimeTZValue.ts b/api/src/values/DuckDBTimeTZValue.ts index 2279d916..5dbe15b8 100644 --- a/api/src/values/DuckDBTimeTZValue.ts +++ b/api/src/values/DuckDBTimeTZValue.ts @@ -1,7 +1,7 @@ -import duckdb, { TimeTZ, TimeTZParts } from '@duckdb/node-bindings'; +import duckdb, { TimeTZ, TimeTZParts } from '@databrainhq/node-bindings'; import { getDuckDBTimeStringFromMicrosecondsInDay, - getOffsetStringFromSeconds + getOffsetStringFromSeconds, } from '../conversion/dateTimeStringConversion'; export type { TimeTZParts }; @@ -34,7 +34,7 @@ export class DuckDBTimeTZValue implements TimeTZ { public toString(): string { return `${getDuckDBTimeStringFromMicrosecondsInDay( - this.micros + this.micros, )}${getOffsetStringFromSeconds(this.offset)}`; } @@ -52,7 +52,7 @@ export class DuckDBTimeTZValue implements TimeTZ { public static fromBits(bits: bigint): DuckDBTimeTZValue { const micros = BigInt.asUintN( DuckDBTimeTZValue.TimeBits, - bits >> BigInt(DuckDBTimeTZValue.OffsetBits) + bits >> BigInt(DuckDBTimeTZValue.OffsetBits), ); const offset = DuckDBTimeTZValue.MaxOffset - @@ -62,14 +62,14 @@ export class DuckDBTimeTZValue implements TimeTZ { public static fromMicrosAndOffset( micros: bigint, - offset: number + offset: number, ): DuckDBTimeTZValue { const bits = (BigInt.asUintN(DuckDBTimeTZValue.TimeBits, micros) << BigInt(DuckDBTimeTZValue.OffsetBits)) | BigInt.asUintN( DuckDBTimeTZValue.OffsetBits, - BigInt(DuckDBTimeTZValue.MaxOffset - offset) + BigInt(DuckDBTimeTZValue.MaxOffset - offset), ); return new DuckDBTimeTZValue(bits, micros, offset); } @@ -77,17 +77,17 @@ export class DuckDBTimeTZValue implements TimeTZ { public static fromParts(parts: TimeTZParts): DuckDBTimeTZValue { return DuckDBTimeTZValue.fromMicrosAndOffset( duckdb.to_time(parts.time).micros, - parts.offset + parts.offset, ); } public static readonly Max = DuckDBTimeTZValue.fromMicrosAndOffset( DuckDBTimeTZValue.MaxMicros, - DuckDBTimeTZValue.MinOffset + DuckDBTimeTZValue.MinOffset, ); public static readonly Min = DuckDBTimeTZValue.fromMicrosAndOffset( DuckDBTimeTZValue.MinMicros, - DuckDBTimeTZValue.MaxOffset + DuckDBTimeTZValue.MaxOffset, ); } diff --git a/api/src/values/DuckDBTimeValue.ts b/api/src/values/DuckDBTimeValue.ts index 42e403c8..06f9715a 100644 --- a/api/src/values/DuckDBTimeValue.ts +++ b/api/src/values/DuckDBTimeValue.ts @@ -1,4 +1,4 @@ -import duckdb, { Time, TimeParts } from '@duckdb/node-bindings'; +import duckdb, { Time, TimeParts } from '@databrainhq/node-bindings'; import { getDuckDBTimeStringFromMicrosecondsInDay } from '../conversion/dateTimeStringConversion'; export type { TimeParts }; @@ -22,7 +22,9 @@ export class DuckDBTimeValue implements Time { return new DuckDBTimeValue(duckdb.to_time(parts).micros); } - public static readonly Max = new DuckDBTimeValue(24n * 60n * 60n * 1000n * 1000n); + public static readonly Max = new DuckDBTimeValue( + 24n * 60n * 60n * 1000n * 1000n, + ); public static readonly Min = new DuckDBTimeValue(0n); } diff --git a/api/src/values/DuckDBTimestampMillisecondsValue.ts b/api/src/values/DuckDBTimestampMillisecondsValue.ts index c4a7cf36..659030c4 100644 --- a/api/src/values/DuckDBTimestampMillisecondsValue.ts +++ b/api/src/values/DuckDBTimestampMillisecondsValue.ts @@ -1,4 +1,4 @@ -import duckdb, { TimestampMilliseconds } from '@duckdb/node-bindings'; +import duckdb, { TimestampMilliseconds } from '@databrainhq/node-bindings'; import { getDuckDBTimestampStringFromMilliseconds } from '../conversion/dateTimeStringConversion'; import { DuckDBTimestampSecondsValue } from './DuckDBTimestampSecondsValue'; @@ -18,12 +18,22 @@ export class DuckDBTimestampMillisecondsValue implements TimestampMilliseconds { } public static readonly Epoch = new DuckDBTimestampMillisecondsValue(0n); - public static readonly Max = new DuckDBTimestampMillisecondsValue((2n ** 63n - 2n) / 1000n); - public static readonly Min = new DuckDBTimestampMillisecondsValue(DuckDBTimestampSecondsValue.Min.seconds * 1000n); - public static readonly PosInf = new DuckDBTimestampMillisecondsValue(2n ** 63n - 1n); - public static readonly NegInf = new DuckDBTimestampMillisecondsValue(-(2n ** 63n - 1n)); + public static readonly Max = new DuckDBTimestampMillisecondsValue( + (2n ** 63n - 2n) / 1000n, + ); + public static readonly Min = new DuckDBTimestampMillisecondsValue( + DuckDBTimestampSecondsValue.Min.seconds * 1000n, + ); + public static readonly PosInf = new DuckDBTimestampMillisecondsValue( + 2n ** 63n - 1n, + ); + public static readonly NegInf = new DuckDBTimestampMillisecondsValue( + -(2n ** 63n - 1n), + ); } -export function timestampMillisValue(millis: bigint): DuckDBTimestampMillisecondsValue { +export function timestampMillisValue( + millis: bigint, +): DuckDBTimestampMillisecondsValue { return new DuckDBTimestampMillisecondsValue(millis); } diff --git a/api/src/values/DuckDBTimestampNanosecondsValue.ts b/api/src/values/DuckDBTimestampNanosecondsValue.ts index f4536b7a..080d62c7 100644 --- a/api/src/values/DuckDBTimestampNanosecondsValue.ts +++ b/api/src/values/DuckDBTimestampNanosecondsValue.ts @@ -1,4 +1,4 @@ -import duckdb, { TimestampNanoseconds } from '@duckdb/node-bindings'; +import duckdb, { TimestampNanoseconds } from '@databrainhq/node-bindings'; import { getDuckDBTimestampStringFromNanoseconds } from '../conversion/dateTimeStringConversion'; export class DuckDBTimestampNanosecondsValue implements TimestampNanoseconds { @@ -17,12 +17,22 @@ export class DuckDBTimestampNanosecondsValue implements TimestampNanoseconds { } public static readonly Epoch = new DuckDBTimestampNanosecondsValue(0n); - public static readonly Max = new DuckDBTimestampNanosecondsValue(2n ** 63n - 2n); - public static readonly Min = new DuckDBTimestampNanosecondsValue(-9223286400000000000n); - public static readonly PosInf = new DuckDBTimestampNanosecondsValue(2n ** 63n - 1n); - public static readonly NegInf = new DuckDBTimestampNanosecondsValue(-(2n ** 63n - 1n)); + public static readonly Max = new DuckDBTimestampNanosecondsValue( + 2n ** 63n - 2n, + ); + public static readonly Min = new DuckDBTimestampNanosecondsValue( + -9223286400000000000n, + ); + public static readonly PosInf = new DuckDBTimestampNanosecondsValue( + 2n ** 63n - 1n, + ); + public static readonly NegInf = new DuckDBTimestampNanosecondsValue( + -(2n ** 63n - 1n), + ); } -export function timestampNanosValue(nanos: bigint): DuckDBTimestampNanosecondsValue { +export function timestampNanosValue( + nanos: bigint, +): DuckDBTimestampNanosecondsValue { return new DuckDBTimestampNanosecondsValue(nanos); } diff --git a/api/src/values/DuckDBTimestampSecondsValue.ts b/api/src/values/DuckDBTimestampSecondsValue.ts index 7dccf3aa..0b970bde 100644 --- a/api/src/values/DuckDBTimestampSecondsValue.ts +++ b/api/src/values/DuckDBTimestampSecondsValue.ts @@ -1,4 +1,4 @@ -import duckdb, { TimestampSeconds } from '@duckdb/node-bindings'; +import duckdb, { TimestampSeconds } from '@databrainhq/node-bindings'; import { getDuckDBTimestampStringFromSeconds } from '../conversion/dateTimeStringConversion'; export class DuckDBTimestampSecondsValue implements TimestampSeconds { @@ -17,12 +17,18 @@ export class DuckDBTimestampSecondsValue implements TimestampSeconds { } public static readonly Epoch = new DuckDBTimestampSecondsValue(0n); - public static readonly Max = new DuckDBTimestampSecondsValue( 9223372036854n); + public static readonly Max = new DuckDBTimestampSecondsValue(9223372036854n); public static readonly Min = new DuckDBTimestampSecondsValue(-9223372022400n); // from test_all_types() select epoch(timestamp_s)::bigint; - public static readonly PosInf = new DuckDBTimestampSecondsValue(2n ** 63n - 1n); - public static readonly NegInf = new DuckDBTimestampSecondsValue(-(2n ** 63n - 1n)); + public static readonly PosInf = new DuckDBTimestampSecondsValue( + 2n ** 63n - 1n, + ); + public static readonly NegInf = new DuckDBTimestampSecondsValue( + -(2n ** 63n - 1n), + ); } -export function timestampSecondsValue(seconds: bigint): DuckDBTimestampSecondsValue { +export function timestampSecondsValue( + seconds: bigint, +): DuckDBTimestampSecondsValue { return new DuckDBTimestampSecondsValue(seconds); } diff --git a/api/src/values/DuckDBTimestampTZValue.ts b/api/src/values/DuckDBTimestampTZValue.ts index 663a78af..9e7194c1 100644 --- a/api/src/values/DuckDBTimestampTZValue.ts +++ b/api/src/values/DuckDBTimestampTZValue.ts @@ -1,4 +1,4 @@ -import duckdb, { Timestamp, TimestampParts } from '@duckdb/node-bindings'; +import duckdb, { Timestamp, TimestampParts } from '@databrainhq/node-bindings'; import { getDuckDBTimestampStringFromMicroseconds } from '../conversion/dateTimeStringConversion'; import { DuckDBTimestampValue } from './DuckDBTimestampValue'; @@ -19,7 +19,7 @@ export class DuckDBTimestampTZValue implements Timestamp { public toString(): string { return getDuckDBTimestampStringFromMicroseconds( this.micros, - DuckDBTimestampTZValue.timezoneOffsetInMinutes + DuckDBTimestampTZValue.timezoneOffsetInMinutes, ); } @@ -33,20 +33,22 @@ export class DuckDBTimestampTZValue implements Timestamp { public static readonly Epoch = new DuckDBTimestampTZValue(0n); public static readonly Max = new DuckDBTimestampTZValue( - DuckDBTimestampValue.Max.micros + DuckDBTimestampValue.Max.micros, ); public static readonly Min = new DuckDBTimestampTZValue( - DuckDBTimestampValue.Min.micros + DuckDBTimestampValue.Min.micros, ); public static readonly PosInf = new DuckDBTimestampTZValue( - DuckDBTimestampValue.PosInf.micros + DuckDBTimestampValue.PosInf.micros, ); public static readonly NegInf = new DuckDBTimestampTZValue( - DuckDBTimestampValue.NegInf.micros + DuckDBTimestampValue.NegInf.micros, ); } -export function timestampTZValue(microsOrParts: bigint | TimestampParts): DuckDBTimestampTZValue { +export function timestampTZValue( + microsOrParts: bigint | TimestampParts, +): DuckDBTimestampTZValue { if (typeof microsOrParts === 'bigint') { return new DuckDBTimestampTZValue(microsOrParts); } diff --git a/api/src/values/DuckDBTimestampValue.ts b/api/src/values/DuckDBTimestampValue.ts index c1b537cb..aae37f97 100644 --- a/api/src/values/DuckDBTimestampValue.ts +++ b/api/src/values/DuckDBTimestampValue.ts @@ -1,4 +1,4 @@ -import duckdb, { Timestamp, TimestampParts } from '@duckdb/node-bindings'; +import duckdb, { Timestamp, TimestampParts } from '@databrainhq/node-bindings'; import { getDuckDBTimestampStringFromMicroseconds } from '../conversion/dateTimeStringConversion'; import { DuckDBTimestampMillisecondsValue } from './DuckDBTimestampMillisecondsValue'; @@ -26,10 +26,12 @@ export class DuckDBTimestampValue implements Timestamp { public static fromParts(parts: TimestampParts): DuckDBTimestampValue { return new DuckDBTimestampValue(duckdb.to_timestamp(parts).micros); } - + public static readonly Epoch = new DuckDBTimestampValue(0n); public static readonly Max = new DuckDBTimestampValue(2n ** 63n - 2n); - public static readonly Min = new DuckDBTimestampValue(DuckDBTimestampMillisecondsValue.Min.millis * 1000n); + public static readonly Min = new DuckDBTimestampValue( + DuckDBTimestampMillisecondsValue.Min.millis * 1000n, + ); public static readonly PosInf = new DuckDBTimestampValue(2n ** 63n - 1n); public static readonly NegInf = new DuckDBTimestampValue(-(2n ** 63n - 1n)); } @@ -37,7 +39,9 @@ export class DuckDBTimestampValue implements Timestamp { export type DuckDBTimestampMicrosecondsValue = DuckDBTimestampValue; export const DuckDBTimestampMicrosecondsValue = DuckDBTimestampValue; -export function timestampValue(microsOrParts: bigint | TimestampParts): DuckDBTimestampValue { +export function timestampValue( + microsOrParts: bigint | TimestampParts, +): DuckDBTimestampValue { if (typeof microsOrParts === 'bigint') { return new DuckDBTimestampValue(microsOrParts); } diff --git a/api/src/values/DuckDBUUIDValue.ts b/api/src/values/DuckDBUUIDValue.ts index bbb160a4..130dcaf9 100644 --- a/api/src/values/DuckDBUUIDValue.ts +++ b/api/src/values/DuckDBUUIDValue.ts @@ -5,22 +5,30 @@ export class DuckDBUUIDValue { this.hugeint = hugeint; } - /** Return the UUID as an unsigned 128-bit integer in a JS BigInt. */ + /** Return the UUID as an unsigned 128-bit integer in a JS BigInt. */ public toUint128(): bigint { // UUID values are stored with their MSB flipped so their numeric ordering matches their string ordering. - return (this.hugeint ^ 0x80000000000000000000000000000000n) & 0xffffffffffffffffffffffffffffffffn; + return ( + (this.hugeint ^ 0x80000000000000000000000000000000n) & + 0xffffffffffffffffffffffffffffffffn + ); } public toString(): string { // Prepend with a (hex) 1 before converting to a hex string. // This ensures the trailing 32 characters are the hex digits we want, left padded with zeros as needed. - const hex = (this.toUint128() | 0x100000000000000000000000000000000n).toString(16); + const hex = ( + this.toUint128() | 0x100000000000000000000000000000000n + ).toString(16); return `${hex.substring(1, 9)}-${hex.substring(9, 13)}-${hex.substring(13, 17)}-${hex.substring(17, 21)}-${hex.substring(21, 33)}`; } /** Create a DuckDBUUIDValue from an unsigned 128-bit integer in a JS BigInt. */ public static fromUint128(uint128: bigint): DuckDBUUIDValue { - return new DuckDBUUIDValue((uint128 ^ 0x80000000000000000000000000000000n) & 0xffffffffffffffffffffffffffffffffn); + return new DuckDBUUIDValue( + (uint128 ^ 0x80000000000000000000000000000000n) & + 0xffffffffffffffffffffffffffffffffn, + ); } /** diff --git a/api/src/values/DuckDBValue.ts b/api/src/values/DuckDBValue.ts index 3f9edb4d..7b1bf67f 100644 --- a/api/src/values/DuckDBValue.ts +++ b/api/src/values/DuckDBValue.ts @@ -40,5 +40,4 @@ export type DuckDBValue = | DuckDBTimeTZValue | DuckDBTimeValue | DuckDBUnionValue - | DuckDBUUIDValue - ; + | DuckDBUUIDValue; diff --git a/api/src/version.ts b/api/src/version.ts index b302740f..71c206e1 100644 --- a/api/src/version.ts +++ b/api/src/version.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; export function version(): string { return duckdb.library_version(); diff --git a/api/test/api.test.ts b/api/test/api.test.ts index 69391a96..1f46cd32 100644 --- a/api/test/api.test.ts +++ b/api/test/api.test.ts @@ -142,7 +142,7 @@ async function sleep(ms: number): Promise { } async function withConnection( - fn: (connection: DuckDBConnection) => Promise + fn: (connection: DuckDBConnection) => Promise, ) { const instance = await DuckDBInstance.create(); const connection = await instance.connect(); @@ -151,12 +151,12 @@ async function withConnection( function assertColumns( result: DuckDBResult, - expectedColumns: readonly ColumnNameAndType[] + expectedColumns: readonly ColumnNameAndType[], ) { assert.strictEqual( result.columnCount, expectedColumns.length, - 'column count' + 'column count', ); for (let i = 0; i < expectedColumns.length; i++) { const { name, type } = expectedColumns[i]; @@ -164,33 +164,33 @@ function assertColumns( assert.strictEqual( result.columnTypeId(i), type.typeId, - `column type id (column: ${name})` + `column type id (column: ${name})`, ); assert.deepStrictEqual( result.columnType(i), type, - `column type (column: ${name})` + `column type (column: ${name})`, ); } } function isVectorType< TValue extends DuckDBValue, - TVector extends DuckDBVector + TVector extends DuckDBVector, >( vector: DuckDBVector | null, - vectorType: new (...args: any[]) => TVector + vectorType: new (...args: any[]) => TVector, ): vector is TVector { return vector instanceof vectorType; } function getColumnVector< TValue extends DuckDBValue, - TVector extends DuckDBVector + TVector extends DuckDBVector, >( chunk: DuckDBDataChunk, columnIndex: number, - vectorType: new (...args: any[]) => TVector + vectorType: new (...args: any[]) => TVector, ): TVector { const columnVector = chunk.getColumnVector(columnIndex); if (!isVectorType(columnVector, vectorType)) { @@ -202,7 +202,7 @@ function getColumnVector< function assertVectorValues( vector: DuckDBVector | null | undefined, values: readonly TValue[], - vectorName: string + vectorName: string, ) { if (!vector) { assert.fail(`${vectorName} unexpectedly null or undefined`); @@ -210,7 +210,7 @@ function assertVectorValues( assert.strictEqual( vector.itemCount, values.length, - `expected vector ${vectorName} item count to be ${values.length} but found ${vector.itemCount}` + `expected vector ${vectorName} item count to be ${values.length} but found ${vector.itemCount}`, ); for (let i = 0; i < values.length; i++) { const actual: TValue | null = vector.getItem(i); @@ -218,19 +218,19 @@ function assertVectorValues( assert.deepStrictEqual( actual, expected, - `expected vector ${vectorName}[${i}] to be ${expected} but found ${actual}` + `expected vector ${vectorName}[${i}] to be ${expected} but found ${actual}`, ); } } function assertValues< TValue extends DuckDBValue, - TVector extends DuckDBVector + TVector extends DuckDBVector, >( chunk: DuckDBDataChunk, columnIndex: number, vectorType: new (...args: any[]) => TVector, - values: readonly (TValue | null)[] + values: readonly (TValue | null)[], ) { const vector = getColumnVector(chunk, columnIndex, vectorType); assertVectorValues(vector, values, `${columnIndex}`); @@ -238,7 +238,7 @@ function assertValues< function bigints(start: bigint, end: bigint) { return Array.from({ length: Number(end - start) + 1 }).map( - (_, i) => start + BigInt(i) + (_, i) => start + BigInt(i), ); } @@ -264,12 +264,12 @@ describe('api', () => { assert.equal(ResultReturnType[ResultReturnType.INVALID], 'INVALID'); assert.equal( ResultReturnType[ResultReturnType.CHANGED_ROWS], - 'CHANGED_ROWS' + 'CHANGED_ROWS', ); assert.equal(ResultReturnType[ResultReturnType.NOTHING], 'NOTHING'); assert.equal( ResultReturnType[ResultReturnType.QUERY_RESULT], - 'QUERY_RESULT' + 'QUERY_RESULT', ); }); test('StatementType enum', () => { @@ -357,19 +357,19 @@ describe('api', () => { assert.equal(TIMESTAMP_NS.toString(), 'TIMESTAMP_NS'); assert.equal( ENUM(['fly', 'swim', 'walk']).toString(), - `ENUM('fly', 'swim', 'walk')` + `ENUM('fly', 'swim', 'walk')`, ); assert.equal(LIST(INTEGER).toString(), 'INTEGER[]'); assert.equal( - STRUCT({ 'id': VARCHAR, 'ts': TIMESTAMP }).toString(), - 'STRUCT("id" VARCHAR, "ts" TIMESTAMP)' + STRUCT({ id: VARCHAR, ts: TIMESTAMP }).toString(), + 'STRUCT("id" VARCHAR, "ts" TIMESTAMP)', ); assert.equal(MAP(INTEGER, VARCHAR).toString(), 'MAP(INTEGER, VARCHAR)'); assert.equal(ARRAY(INTEGER, 3).toString(), 'INTEGER[3]'); assert.equal(UUID.toString(), 'UUID'); assert.equal( - UNION({ 'str': VARCHAR, 'num': INTEGER }).toString(), - 'UNION("str" VARCHAR, "num" INTEGER)' + UNION({ str: VARCHAR, num: INTEGER }).toString(), + 'UNION("str" VARCHAR, "num" INTEGER)', ); assert.equal(BIT.toString(), 'BIT'); assert.equal(TIMETZ.toString(), 'TIME WITH TIME ZONE'); @@ -407,7 +407,7 @@ describe('api', () => { } catch (err) { assert.deepEqual( err, - new Error('Failed to prepare: connection disconnected') + new Error('Failed to prepare: connection disconnected'), ); } // ensure double-disconnect doesn't break anything @@ -426,10 +426,10 @@ describe('api', () => { { name: 'list_int', type: LIST(INTEGER) }, { name: 'list_dec', type: LIST(DECIMAL(4, 1)) }, { name: 'list_null', type: LIST(SQLNULL) }, - { name: 'struct', type: STRUCT({ 'a': INTEGER, 'b': VARCHAR }) }, + { name: 'struct', type: STRUCT({ a: INTEGER, b: VARCHAR }) }, { name: 'array', type: ARRAY(INTEGER, 3) }, { name: 'map', type: MAP(INTEGER, VARCHAR) }, - { name: 'union', type: UNION({ 'name': VARCHAR, 'age': SMALLINT }) }, + { name: 'union', type: UNION({ name: VARCHAR, age: SMALLINT }) }, { name: 'uuid', type: UUID }, { name: 'bit', type: BIT }, { name: 'timetz', type: TIMETZ }, @@ -448,7 +448,7 @@ describe('api', () => { assert.strictEqual( prepared.parameterName(i + 1), params[i].name, - `param ${i} name mismatch` + `param ${i} name mismatch`, ); } @@ -464,10 +464,10 @@ describe('api', () => { prepared.bindList( i++, [decimalValue(9876n, 4, 1), decimalValue(5432n, 4, 1)], - LIST(DECIMAL(4, 1)) + LIST(DECIMAL(4, 1)), ); prepared.bindList(i++, [null]); - prepared.bindStruct(i++, { 'a': 42, 'b': 'duck' }); + prepared.bindStruct(i++, { a: 42, b: 'duck' }); prepared.bindArray(i++, [100, 200, 300]); prepared.bindMap( i++, @@ -475,14 +475,14 @@ describe('api', () => { { key: 100, value: 'swim' }, { key: 101, value: 'walk' }, { key: 102, value: 'fly' }, - ]) + ]), ); - prepared.bindUnion( + (prepared.bindUnion( i++, unionValue('age', 42), - UNION({ 'name': VARCHAR, 'age': SMALLINT }) + UNION({ name: VARCHAR, age: SMALLINT }), ), - prepared.bindUUID(i++, uuidValue(0xf0e1d2c3b4a596870123456789abcdefn)); + prepared.bindUUID(i++, uuidValue(0xf0e1d2c3b4a596870123456789abcdefn))); prepared.bindBit(i++, bitValue('0010001001011100010101011010111')); prepared.bindTimeTZ(i++, TIMETZ.max); prepared.bindTimestampTZ(i++, TIMESTAMPTZ.max); @@ -492,18 +492,18 @@ describe('api', () => { for (let i = 0; i < params.length; i++) { let type = params[i].type; if (type.typeId === DuckDBTypeId.VARCHAR) { - // VARCHAR type is reported incorrectly; see https://github.com/duckdb/duckdb/issues/16137 + // VARCHAR type is reported incorrectly; see https://github.com/databrainhq/duckdb/issues/16137 continue; } assert.equal( prepared.parameterTypeId(i + 1), type.typeId, - `param ${i} type id mismatch` + `param ${i} type id mismatch`, ); assert.deepEqual( prepared.parameterType(i + 1), type, - `param ${i} type mismatch` + `param ${i} type mismatch`, ); } @@ -531,19 +531,19 @@ describe('api', () => { chunk, i++, DuckDBBooleanVector, - [true] + [true], ); assertValues( chunk, i++, DuckDBIntegerVector, - [10] + [10], ); assertValues( chunk, i++, DuckDBVarCharVector, - ['abc'] + ['abc'], ); assertValues(chunk, i++, DuckDBTimestampSecondsVector, [ TIMESTAMP_S.max, @@ -565,7 +565,7 @@ describe('api', () => { ]); assertValues(chunk, i++, DuckDBListVector, [listValue([null])]); assertValues(chunk, i++, DuckDBStructVector, [ - structValue({ 'a': 42, 'b': 'duck' }), + structValue({ a: 42, b: 'duck' }), ]); assertValues(chunk, i++, DuckDBArrayVector, [ arrayValue([100, 200, 300]), @@ -591,7 +591,7 @@ describe('api', () => { chunk, i++, DuckDBIntegerVector, - [null] + [null], ); } }); @@ -599,7 +599,7 @@ describe('api', () => { test('should support prepare statement bind with list', async () => { await withConnection(async (connection) => { const prepared = await connection.prepare( - 'select $1 as a, $2 as b, $3 as c' + 'select $1 as a, $2 as b, $3 as c', ); prepared.bind([42, 'duck', listValue([10, 11, 12])]); const result = await prepared.run(); @@ -617,13 +617,13 @@ describe('api', () => { chunk, 0, DuckDBIntegerVector, - [42] + [42], ); assertValues( chunk, 1, DuckDBVarCharVector, - ['duck'] + ['duck'], ); assertValues(chunk, 2, DuckDBListVector, [listValue([10, 11, 12])]); } @@ -632,7 +632,7 @@ describe('api', () => { test('should support prepare statement bind with object', async () => { await withConnection(async (connection) => { const prepared = await connection.prepare( - 'select $a as a, $b as b, $c as c, $d as d, $e as e, $f as f' + 'select $a as a, $b as b, $c as c, $d as d, $e as e, $f as f', ); prepared.bind( { @@ -645,7 +645,7 @@ describe('api', () => { }, { f: ARRAY(FLOAT, 2), - } + }, ); const result = await prepared.run(); assertColumns(result, [ @@ -665,7 +665,7 @@ describe('api', () => { chunk, 0, DuckDBIntegerVector, - [42] + [42], ); assertValues(chunk, 1, DuckDBDoubleVector, [ 42.3, @@ -674,7 +674,7 @@ describe('api', () => { chunk, 2, DuckDBVarCharVector, - ['duck'] + ['duck'], ); assertValues(chunk, 3, DuckDBListVector, [listValue([10, 11, 12])]); assertValues(chunk, 4, DuckDBArrayVector, [ @@ -688,18 +688,14 @@ describe('api', () => { await withConnection(async (connection) => { const prepared = await connection.prepare('select ?'); try { - prepared.bindStruct( - 0, - structValue({ 'a': null }), - STRUCT({ 'a': ANY }) - ); + prepared.bindStruct(0, structValue({ a: null }), STRUCT({ a: ANY })); assert.fail('should throw'); } catch (err) { assert.deepEqual( err, new Error( - 'Cannot create structs with an entry type of ANY. Specify a specific type.' - ) + 'Cannot create structs with an entry type of ANY. Specify a specific type.', + ), ); } }); @@ -714,8 +710,8 @@ describe('api', () => { assert.deepEqual( err, new Error( - 'Cannot create lists with item type of ANY. Specify a specific type.' - ) + 'Cannot create lists with item type of ANY. Specify a specific type.', + ), ); } }); @@ -730,8 +726,8 @@ describe('api', () => { assert.deepEqual( err, new Error( - 'Cannot create arrays with item type of ANY. Specify a specific type.' - ) + 'Cannot create arrays with item type of ANY. Specify a specific type.', + ), ); } }); @@ -739,7 +735,7 @@ describe('api', () => { test('should support starting prepared statements and running them incrementally', async () => { await withConnection(async (connection) => { const prepared = await connection.prepare( - 'select int from test_all_types()' + 'select int from test_all_types()', ); const pending = prepared.start(); let taskCount = 0; @@ -787,25 +783,25 @@ describe('api', () => { chunks[1], 0, DuckDBBigIntVector, - bigints(2048n, 2048n * 2n - 1n) + bigints(2048n, 2048n * 2n - 1n), ); assertValues( chunks[2], 0, DuckDBBigIntVector, - bigints(2048n * 2n, 2048n * 3n - 1n) + bigints(2048n * 2n, 2048n * 3n - 1n), ); assertValues( chunks[3], 0, DuckDBBigIntVector, - bigints(2048n * 3n, 2048n * 4n - 1n) + bigints(2048n * 3n, 2048n * 4n - 1n), ); assertValues( chunks[4], 0, DuckDBBigIntVector, - bigints(2048n * 4n, 9999n) + bigints(2048n * 4n, 9999n), ); }); }); @@ -878,7 +874,7 @@ FROM ( WHERE country = $country ORDER BY name `, - { country: 'US' } + { country: 'US' }, ); const columns = reader.getColumnsObject(); assert.deepEqual(columns, { @@ -893,7 +889,7 @@ ORDER BY name test('should support all data types', async () => { await withConnection(async (connection) => { const result = await connection.run( - 'from test_all_types(use_large_enum=true)' + 'from test_all_types(use_large_enum=true)', ); assertColumns(result, createTestAllTypesColumnNameAndTypeObjects()); @@ -924,26 +920,26 @@ ORDER BY name chunk, 15, DuckDBTimestampSecondsVector, - testAllTypesColumns[15] + testAllTypesColumns[15], ); assertValues( chunk, 16, DuckDBTimestampMillisecondsVector, - testAllTypesColumns[16] + testAllTypesColumns[16], ); assertValues( chunk, 17, DuckDBTimestampNanosecondsVector, - testAllTypesColumns[17] + testAllTypesColumns[17], ); assertValues(chunk, 18, DuckDBTimeTZVector, testAllTypesColumns[18]); assertValues( chunk, 19, DuckDBTimestampTZVector, - testAllTypesColumns[19] + testAllTypesColumns[19], ); assertValues(chunk, 20, DuckDBFloatVector, testAllTypesColumns[20]); assertValues(chunk, 21, DuckDBDoubleVector, testAllTypesColumns[21]); @@ -954,7 +950,7 @@ ORDER BY name chunk, 25, DuckDBDecimal128Vector, - testAllTypesColumns[25] + testAllTypesColumns[25], ); assertValues(chunk, 26, DuckDBUUIDVector, testAllTypesColumns[26]); assertValues(chunk, 27, DuckDBIntervalVector, testAllTypesColumns[27]); @@ -1015,14 +1011,14 @@ ORDER BY name assert.equal(bitValue('10101').toString(), '10101'); assert.equal( bitValue('0010001001011100010101011010111').toString(), - '0010001001011100010101011010111' + '0010001001011100010101011010111', ); // blob assert.equal(blobValue('').toString(), ''); assert.equal( blobValue('thisisalongblob\x00withnullbytes').toString(), - 'thisisalongblob\\x00withnullbytes' + 'thisisalongblob\\x00withnullbytes', ); assert.equal(blobValue('\x00\x00\x00a').toString(), '\\x00\\x00\\x00a'); @@ -1043,21 +1039,21 @@ ORDER BY name assert.equal(decimalValue(0n, 18, 6).toString(), '0.000000'); assert.equal( decimalValue(987654321098765432n, 18, 6).toString(), - '987654321098.765432' + '987654321098.765432', ); assert.equal( decimalValue(-987654321098765432n, 18, 6).toString(), - '-987654321098.765432' + '-987654321098.765432', ); assert.equal(decimalValue(0n, 38, 10).toString(), '0.0000000000'); assert.equal( decimalValue(98765432109876543210987654321098765432n, 38, 10).toString(), - '9876543210987654321098765432.1098765432' + '9876543210987654321098765432.1098765432', ); assert.equal( decimalValue(-98765432109876543210987654321098765432n, 38, 10).toString(), - '-9876543210987654321098765432.1098765432' + '-9876543210987654321098765432.1098765432', ); // interval @@ -1093,64 +1089,64 @@ ORDER BY name assert.equal(intervalValue(0, 0, -60n * 1000000n).toString(), '-00:01:00'); assert.equal( intervalValue(0, 0, 59n * 60n * 1000000n).toString(), - '00:59:00' + '00:59:00', ); assert.equal( intervalValue(0, 0, -59n * 60n * 1000000n).toString(), - '-00:59:00' + '-00:59:00', ); assert.equal( intervalValue(0, 0, 60n * 60n * 1000000n).toString(), - '01:00:00' + '01:00:00', ); assert.equal( intervalValue(0, 0, -60n * 60n * 1000000n).toString(), - '-01:00:00' + '-01:00:00', ); assert.equal( intervalValue(0, 0, 24n * 60n * 60n * 1000000n).toString(), - '24:00:00' + '24:00:00', ); assert.equal( intervalValue(0, 0, -24n * 60n * 60n * 1000000n).toString(), - '-24:00:00' + '-24:00:00', ); assert.equal( intervalValue(0, 0, 2147483647n * 60n * 60n * 1000000n).toString(), - '2147483647:00:00' + '2147483647:00:00', ); assert.equal( intervalValue(0, 0, -2147483647n * 60n * 60n * 1000000n).toString(), - '-2147483647:00:00' + '-2147483647:00:00', ); assert.equal( intervalValue(0, 0, 2147483647n * 60n * 60n * 1000000n + 1n).toString(), - '2147483647:00:00.000001' + '2147483647:00:00.000001', ); assert.equal( intervalValue( 0, 0, - -(2147483647n * 60n * 60n * 1000000n + 1n) + -(2147483647n * 60n * 60n * 1000000n + 1n), ).toString(), - '-2147483647:00:00.000001' + '-2147483647:00:00.000001', ); assert.equal( intervalValue( 2 * 12 + 3, 5, - (7n * 60n * 60n + 11n * 60n + 13n) * 1000000n + 17n + (7n * 60n * 60n + 11n * 60n + 13n) * 1000000n + 17n, ).toString(), - '2 years 3 months 5 days 07:11:13.000017' + '2 years 3 months 5 days 07:11:13.000017', ); assert.equal( intervalValue( -(2 * 12 + 3), -5, - -((7n * 60n * 60n + 11n * 60n + 13n) * 1000000n + 17n) + -((7n * 60n * 60n + 11n * 60n + 13n) * 1000000n + 17n), ).toString(), - '-2 years -3 months -5 days -07:11:13.000017' + '-2 years -3 months -5 days -07:11:13.000017', ); // list @@ -1165,7 +1161,7 @@ ORDER BY name { key: 1, value: 'a' }, { key: 2, value: 'b' }, ]).toString(), - `{1: 'a', 2: 'b'}` + `{1: 'a', 2: 'b'}`, ); // struct @@ -1191,11 +1187,11 @@ ORDER BY name assert.equal(TIMESTAMPTZ.epoch.toString(), '1969-12-31 18:30:00-05:30'); assert.equal( TIMESTAMPTZ.max.toString(), - '294247-01-09 22:30:54.775806-05:30' + '294247-01-09 22:30:54.775806-05:30', ); assert.equal( TIMESTAMPTZ.min.toString(), - '290309-12-21 (BC) 18:30:00-05:30' + '290309-12-21 (BC) 18:30:00-05:30', ); assert.equal(TIMESTAMPTZ.posInf.toString(), 'infinity'); assert.equal(TIMESTAMPTZ.negInf.toString(), '-infinity'); @@ -1212,9 +1208,9 @@ ORDER BY name assert.equal( timeTZValue( (((12n * 60n + 34n) * 60n + 56n) * 1000n + 789n) * 1000n, - -((7 * 60 + 9) * 60) + -((7 * 60 + 9) * 60), ).toString(), - '12:34:56.789-07:09' + '12:34:56.789-07:09', ); assert.equal(TIMETZ.max.toString(), '24:00:00-15:59:59'); assert.equal(TIMETZ.min.toString(), '00:00:00+15:59:59'); @@ -1224,9 +1220,9 @@ ORDER BY name assert.equal(TIME.min.toString(), '00:00:00'); assert.equal( timeValue( - (12n * 60n * 60n + 34n * 60n + 56n) * 1000000n + 987654n + (12n * 60n * 60n + 34n * 60n + 56n) * 1000000n + 987654n, ).toString(), - '12:34:56.987654' + '12:34:56.987654', ); // union @@ -1285,27 +1281,27 @@ ORDER BY name assert.deepEqual(DuckDBTimeValue.fromParts(timeParts).toParts(), timeParts); assert.deepEqual( DuckDBTimeTZValue.fromParts(timeTZParts).toParts(), - timeTZParts + timeTZParts, ); assert.deepEqual( DuckDBTimestampValue.fromParts(timestampParts).toParts(), - timestampParts + timestampParts, ); assert.deepEqual( DuckDBTimestampTZValue.fromParts(timestampParts).toParts(), - timestampParts + timestampParts, ); assert.deepEqual( DuckDBDecimalValue.fromDouble(3.14159, 6, 5), - decimalValue(314159n, 6, 5) + decimalValue(314159n, 6, 5), ); assert.deepEqual(decimalValue(314159n, 6, 5).toDouble(), 3.14159); }); test('result inspection conveniences', async () => { await withConnection(async (connection) => { const result = await connection.run( - 'select i::int as a, i::int + 10 as b from range(3) t(i)' + 'select i::int as a, i::int + 10 as b from range(3) t(i)', ); assert.deepEqual(result.columnNames(), ['a', 'b']); assert.deepEqual(result.columnTypes(), [INTEGER, INTEGER]); @@ -1330,19 +1326,19 @@ ORDER BY name test('row and column objects', async () => { await withConnection(async (connection) => { const reader = await connection.runAndReadAll( - 'select i::int as a, i::int + 10 as b, (i + 100)::varchar as a from range(3) t(i)' + 'select i::int as a, i::int + 10 as b, (i + 100)::varchar as a from range(3) t(i)', ); assert.deepEqual(reader.columnNames(), ['a', 'b', 'a']); assert.deepEqual(reader.deduplicatedColumnNames(), ['a', 'b', 'a:1']); assert.deepEqual(reader.columnTypes(), [INTEGER, INTEGER, VARCHAR]); assert.deepEqual(reader.getRowObjects(), [ - { 'a': 0, 'b': 10, 'a:1': '100' }, - { 'a': 1, 'b': 11, 'a:1': '101' }, - { 'a': 2, 'b': 12, 'a:1': '102' }, + { a: 0, b: 10, 'a:1': '100' }, + { a: 1, b: 11, 'a:1': '101' }, + { a: 2, b: 12, 'a:1': '102' }, ]); assert.deepEqual(reader.getColumnsObject(), { - 'a': [0, 1, 2], - 'b': [10, 11, 12], + a: [0, 1, 2], + b: [10, 11, 12], 'a:1': ['100', '101', '102'], }); }); @@ -1406,32 +1402,32 @@ ORDER BY name test('column names and types json', async () => { await withConnection(async (connection) => { const reader = await connection.runAndReadAll( - `from test_all_types(use_large_enum=true)` + `from test_all_types(use_large_enum=true)`, ); const columnNamesAndTypesJson = reader.columnNamesAndTypesJson(); assert.deepEqual( columnNamesAndTypesJson, - createTestAllTypesColumnNamesAndTypesJson() + createTestAllTypesColumnNamesAndTypesJson(), ); }); }); test('column name and type objects json', async () => { await withConnection(async (connection) => { const reader = await connection.runAndReadAll( - `from test_all_types(use_large_enum=true)` + `from test_all_types(use_large_enum=true)`, ); const columnNameAndTypeObjectsJson = reader.columnNameAndTypeObjectsJson(); assert.deepEqual( columnNameAndTypeObjectsJson, - createTestAllTypesColumnNameAndTypeObjectsJson() + createTestAllTypesColumnNameAndTypeObjectsJson(), ); }); }); test('result reader', async () => { await withConnection(async (connection) => { const reader = await connection.runAndReadAll( - 'select i::int as a, i::int + 10000 as b from range(5000) t(i)' + 'select i::int as a, i::int + 10000 as b from range(5000) t(i)', ); assert.deepEqual(reader.columnNames(), ['a', 'b']); assert.deepEqual(reader.columnTypes(), [INTEGER, INTEGER]); @@ -1451,7 +1447,7 @@ ORDER BY name const instance = await DuckDBInstance.create(); const connection = await instance.connect(); const result = await connection.run( - `select current_setting('duckdb_api') as duckdb_api` + `select current_setting('duckdb_api') as duckdb_api`, ); assertColumns(result, [{ name: 'duckdb_api', type: VARCHAR }]); const chunk = await result.fetchChunk(); @@ -1468,7 +1464,7 @@ ORDER BY name const instance = await DuckDBInstance.create(undefined, {}); const connection = await instance.connect(); const result = await connection.run( - `select current_setting('duckdb_api') as duckdb_api` + `select current_setting('duckdb_api') as duckdb_api`, ); assertColumns(result, [{ name: 'duckdb_api', type: VARCHAR }]); const chunk = await result.fetchChunk(); @@ -1483,11 +1479,11 @@ ORDER BY name }); test('overriding duckdb_api', async () => { const instance = await DuckDBInstance.create(undefined, { - 'duckdb_api': 'custom-duckdb-api', + duckdb_api: 'custom-duckdb-api', }); const connection = await instance.connect(); const result = await connection.run( - `select current_setting('duckdb_api') as duckdb_api` + `select current_setting('duckdb_api') as duckdb_api`, ); assertColumns(result, [{ name: 'duckdb_api', type: VARCHAR }]); const chunk = await result.fetchChunk(); @@ -1517,13 +1513,13 @@ ORDER BY name try { const cache = new DuckDBInstanceCache(); const instance1 = await cache.getOrCreateInstance( - 'instance_cache_test_a.db' + 'instance_cache_test_a.db', ); const connection1 = await instance1.connect(); await connection1.run(`attach ':memory:' as mem1`); const instance2 = await cache.getOrCreateInstance( - 'instance_cache_test_b.db' + 'instance_cache_test_b.db', ); const connection2 = await instance2.connect(); try { @@ -1532,7 +1528,7 @@ ORDER BY name } catch (err) { assert.deepEqual( err, - new Error(`Catalog Error: Catalog with name mem1 does not exist!`) + new Error(`Catalog Error: Catalog with name mem1 does not exist!`), ); } } finally { @@ -1558,8 +1554,8 @@ ORDER BY name assert.deepEqual( err, new Error( - `Connection Error: Can't open a connection to same database file with a different configuration than existing connections` - ) + `Connection Error: Can't open a connection to same database file with a different configuration than existing connections`, + ), ); } }); @@ -1579,7 +1575,7 @@ ORDER BY name } catch (err) { assert.deepEqual( err, - new Error('A data chunk cannot have more than 2048 rows') + new Error('A data chunk cannot have more than 2048 rows'), ); } }); @@ -1719,13 +1715,13 @@ ORDER BY name new Uint8Array([ 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, - ]) + ]), ), blobValue( new Uint8Array([ 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, - ]) + ]), ), null, ]; @@ -1788,13 +1784,13 @@ ORDER BY name const chunk = DuckDBDataChunk.create( [LIST(LIST(INTEGER))], - originalValues.length + originalValues.length, ); chunk.setColumnValues(0, originalValues); const outerListVector = chunk.getColumnVector(0) as DuckDBListVector; const innerListVector = outerListVector.getItemVector( - 2 + 2, ) as DuckDBListVector; innerListVector.setItem(1, listValue([350, 351, 352, 353, 354])); innerListVector.flush(); @@ -1878,20 +1874,20 @@ ORDER BY name test('create and append data chunk with structs', async () => { await withConnection(async (connection) => { const values = [ - structValue({ 'num': 10, 'str': 'xyz' }), - structValue({ 'num': 11, 'str': 'abcdefghijkl' }), - structValue({ 'num': 12, 'str': 'ABCDEFGHIJKLM' }), + structValue({ num: 10, str: 'xyz' }), + structValue({ num: 11, str: 'abcdefghijkl' }), + structValue({ num: 12, str: 'ABCDEFGHIJKLM' }), null, ]; const chunk = DuckDBDataChunk.create( - [STRUCT({ 'num': INTEGER, 'str': VARCHAR })], - values.length + [STRUCT({ num: INTEGER, str: VARCHAR })], + values.length, ); chunk.setColumnValues(0, values); await connection.run( - 'create table target(col0 struct(num integer, str varchar))' + 'create table target(col0 struct(num integer, str varchar))', ); const appender = await connection.createAppender('target'); appender.appendDataChunk(chunk); @@ -1931,7 +1927,7 @@ ORDER BY name tinyint tinyint,\ smallint smallint,\ int integer\ - )' + )', ); const appender = await connection.createAppender('target'); appender.appendDataChunk(chunk); @@ -1963,7 +1959,7 @@ ORDER BY name await connection.run( `create table target(${columnNameAndTypeObjects .map(({ name, type }) => `"${name.replace(`"`, `""`)}" ${type}`) - .join(', ')})` + .join(', ')})`, ); const appender = await connection.createAppender('target'); appender.appendDataChunk(chunk); @@ -1994,19 +1990,19 @@ ORDER BY name resultChunk, 15, DuckDBTimestampSecondsVector, - columns[15] + columns[15], ); assertValues( resultChunk, 16, DuckDBTimestampMillisecondsVector, - columns[16] + columns[16], ); assertValues( resultChunk, 17, DuckDBTimestampNanosecondsVector, - columns[17] + columns[17], ); assertValues(resultChunk, 18, DuckDBTimeTZVector, columns[18]); assertValues(resultChunk, 19, DuckDBTimestampTZVector, columns[19]); @@ -2057,7 +2053,7 @@ ORDER BY name await connection.run( `create table target(${columnNameAndTypeObjects .map(({ name, type }) => `"${name.replace(`"`, `""`)}" ${type}`) - .join(', ')})` + .join(', ')})`, ); const appender = await connection.createAppender('target'); @@ -2100,19 +2096,19 @@ ORDER BY name resultChunk, 15, DuckDBTimestampSecondsVector, - columns[15] + columns[15], ); assertValues( resultChunk, 16, DuckDBTimestampMillisecondsVector, - columns[16] + columns[16], ); assertValues( resultChunk, 17, DuckDBTimestampNanosecondsVector, - columns[17] + columns[17], ); assertValues(resultChunk, 18, DuckDBTimeTZVector, columns[18]); assertValues(resultChunk, 19, DuckDBTimestampTZVector, columns[19]); @@ -2180,7 +2176,7 @@ ORDER BY name const runIteration = async (i: number) => { const prepared = await connection.prepare( - 'SELECT * FROM test_table WHERE id = $1' + 'SELECT * FROM test_table WHERE id = $1', ); prepared.bindInteger(1, (i % 5) + 1); @@ -2220,7 +2216,7 @@ ORDER BY name output.flush(); }, returnType: VARCHAR, - }) + }), ); const reader = await connection.runAndReadAll('select my_func()'); const columns = reader.getColumnsObject(); @@ -2238,14 +2234,14 @@ ORDER BY name for (let rowIndex = 0; rowIndex < input.rowCount; rowIndex++) { output.setItem( rowIndex, - `my_output_${rowIndex}_${JSON.stringify(extraInfo)}` + `my_output_${rowIndex}_${JSON.stringify(extraInfo)}`, ); } output.flush(); }, returnType: VARCHAR, - extraInfo: { 'my_extra_info_key': 'my_extra_info_value' }, - }) + extraInfo: { my_extra_info_key: 'my_extra_info_value' }, + }), ); const reader = await connection.runAndReadAll('select my_func()'); const columns = reader.getColumnsObject(); @@ -2266,7 +2262,7 @@ ORDER BY name throw new Error('my_error'); }, returnType: VARCHAR, - }) + }), ); try { await connection.run('select my_func()'); @@ -2286,7 +2282,7 @@ ORDER BY name info.setError('my_error'); }, returnType: VARCHAR, - }) + }), ); try { await connection.run('select my_func()'); @@ -2309,8 +2305,8 @@ ORDER BY name output.setItem( rowIndex, `my_output_${rowIndex}_${v0.getItem(rowIndex)}_${v1.getItem( - rowIndex - )}` + rowIndex, + )}`, ); } output.flush(); @@ -2318,14 +2314,14 @@ ORDER BY name returnType: VARCHAR, parameterTypes: [INTEGER, VARCHAR], volatile: true, - }) + }), ); const reader = await connection.runAndReadAll( - `select my_func(42, 'duck') as my_func_result from range(3)` + `select my_func(42, 'duck') as my_func_result from range(3)`, ); const columns = reader.getColumnsObject(); assert.deepEqual(columns, { - 'my_func_result': [ + my_func_result: [ 'my_output_0_42_duck', 'my_output_1_42_duck', 'my_output_2_42_duck', @@ -2350,12 +2346,12 @@ ORDER BY name columnIndex++ ) { argValues.push( - input.getColumnVector(columnIndex).getItem(rowIndex) + input.getColumnVector(columnIndex).getItem(rowIndex), ); } output.setItem( rowIndex, - `my_output_${rowIndex}_${argValues.join('_')}` + `my_output_${rowIndex}_${argValues.join('_')}`, ); } output.flush(); @@ -2363,14 +2359,14 @@ ORDER BY name returnType: VARCHAR, varArgsType: INTEGER, volatile: true, - }) + }), ); const reader = await connection.runAndReadAll( - `select my_func(11, 13, 17) as my_func_result from range(3)` + `select my_func(11, 13, 17) as my_func_result from range(3)`, ); const columns = reader.getColumnsObject(); assert.deepEqual(columns, { - 'my_func_result': [ + my_func_result: [ 'my_output_0_11_13_17', 'my_output_1_11_13_17', 'my_output_2_11_13_17', @@ -2393,7 +2389,7 @@ ORDER BY name returnType: VARCHAR, parameterTypes: [INTEGER], specialHandling: true, - }) + }), ); const reader = await connection.runAndReadAll(`select my_func(NULL)`); const columns = reader.getColumnsObject(); diff --git a/api/test/bench/prepare.bench.ts b/api/test/bench/prepare.bench.ts index 2128668c..3c07e77e 100644 --- a/api/test/bench/prepare.bench.ts +++ b/api/test/bench/prepare.bench.ts @@ -1,10 +1,10 @@ -import { bench, describe } from "vitest"; +import { bench, describe } from 'vitest'; import { DuckDBConnection, DuckDBInstance, DuckDBIntervalValue, DuckDBPreparedStatement, -} from "../../src"; +} from '../../src'; let instance: DuckDBInstance; let connection: DuckDBConnection; @@ -37,7 +37,7 @@ FROM */ function startMS() { return BigInt( - Math.floor(Math.random() * Number(TOTAL_SIZE - SELECTION_SIZE)) + Math.floor(Math.random() * Number(TOTAL_SIZE - SELECTION_SIZE)), ); } @@ -61,7 +61,7 @@ describe(`Parameterised queries`, () => { }, { setup, - } + }, ); bench( @@ -72,11 +72,11 @@ describe(`Parameterised queries`, () => { prepared.bindInterval( 1, - new DuckDBIntervalValue(0, 0, startInterval * 1000n) + new DuckDBIntervalValue(0, 0, startInterval * 1000n), ); prepared.bindInterval( 2, - new DuckDBIntervalValue(0, 0, endInterval * 1000n) + new DuckDBIntervalValue(0, 0, endInterval * 1000n), ); await prepared.runAndReadAll(); @@ -85,10 +85,10 @@ describe(`Parameterised queries`, () => { setup: async () => { await setup(); - const query = factory("$1", "$2"); + const query = factory('$1', '$2'); prepared = await connection.prepare(query); }, - } + }, ); bench( @@ -97,22 +97,22 @@ describe(`Parameterised queries`, () => { const startInterval = startMS(); const endInterval = startInterval + SELECTION_SIZE; - const query = factory("$1", "$2"); + const query = factory('$1', '$2'); prepared = await connection.prepare(query); prepared.bindInterval( 1, - new DuckDBIntervalValue(0, 0, startInterval * 1000n) + new DuckDBIntervalValue(0, 0, startInterval * 1000n), ); prepared.bindInterval( 2, - new DuckDBIntervalValue(0, 0, endInterval * 1000n) + new DuckDBIntervalValue(0, 0, endInterval * 1000n), ); await prepared.runAndReadAll(); }, { setup, - } + }, ); }); diff --git a/api/test/bench/read.bench.ts b/api/test/bench/read.bench.ts index c47400c2..54a80182 100644 --- a/api/test/bench/read.bench.ts +++ b/api/test/bench/read.bench.ts @@ -1,9 +1,9 @@ -import { bench, describe } from "vitest"; +import { bench, describe } from 'vitest'; import { DuckDBConnection, DuckDBInstance, DuckDBPendingResultState, -} from "../../src"; +} from '../../src'; let instance: DuckDBInstance; let connection: DuckDBConnection; @@ -36,7 +36,7 @@ type Example = { const examples: Example[] = [ { - name: "Row Fetching", + name: 'Row Fetching', factory: (start, end) => `SELECT * FROM test @@ -44,7 +44,7 @@ const examples: Example[] = [ AND TIMESTAMP '2025-01-01' + ${end};`, }, { - name: "Overall Aggregates", + name: 'Overall Aggregates', factory: (start, end) => `SELECT mean("value"), min("value"), max("value") FROM test @@ -52,7 +52,7 @@ const examples: Example[] = [ AND TIMESTAMP '2025-01-01' + ${end};`, }, { - name: "Rolling Aggregates", + name: 'Rolling Aggregates', factory: (start, end) => `SELECT mean("value") OVER previous_second, min("value") OVER previous_second, @@ -69,7 +69,7 @@ const examples: Example[] = [ function queryFactory(example: Example) { const s = BigInt( - Math.floor(Math.random() * Number(TOTAL_SIZE - SELECTION_SIZE)) + Math.floor(Math.random() * Number(TOTAL_SIZE - SELECTION_SIZE)), ); const e = s + SELECTION_SIZE; const startInterval = `INTERVAL ${s} MILLISECONDS`; @@ -83,11 +83,11 @@ function queryFactory(example: Example) { for (const full of [false, true]) { for (const example of examples) { describe(`${example.name} - ${ - full ? "Full Result" : "Time to First Row" + full ? 'Full Result' : 'Time to First Row' }`, () => { bench( `${example.name} - ${ - full ? "runAndReadAll()" : "runAndReadUntil(q, 1)" + full ? 'runAndReadAll()' : 'runAndReadUntil(q, 1)' }`, async () => { const query = queryFactory(example); @@ -101,13 +101,13 @@ for (const full of [false, true]) { { setup, iterations: 20, - } + }, ); bench( `${example.name} - ${ full - ? "start runTask pending.readAll()" - : "start runTask pending.readUntil(1)" + ? 'start runTask pending.readAll()' + : 'start runTask pending.readUntil(1)' }`, async () => { const query = queryFactory(example); @@ -127,13 +127,13 @@ for (const full of [false, true]) { { setup, iterations: 20, - } + }, ); bench( `${example.name} - ${ full - ? "start runTask fetchChunks loop" - : "start runTask single fetchChunk" + ? 'start runTask fetchChunks loop' + : 'start runTask single fetchChunk' }`, async () => { const query = queryFactory(example); @@ -161,12 +161,12 @@ for (const full of [false, true]) { { setup, iterations: 20, - } + }, ); bench( `${example.name} - ${ - full ? "streamAndReadAll()" : "streamAndReadUntil(q, 1)" + full ? 'streamAndReadAll()' : 'streamAndReadUntil(q, 1)' }`, async () => { const query = queryFactory(example); @@ -180,13 +180,13 @@ for (const full of [false, true]) { { setup, iterations: 20, - } + }, ); bench( `${example.name} - ${ full - ? "startStream runTask pending.readAll()" - : "startStream runTask pending.readUntil(1)" + ? 'startStream runTask pending.readAll()' + : 'startStream runTask pending.readUntil(1)' }`, async () => { const query = queryFactory(example); @@ -206,13 +206,13 @@ for (const full of [false, true]) { { setup, iterations: 20, - } + }, ); bench( `${example.name} - ${ full - ? "startStream runTask fetchChunks loop" - : "startStream runTask single fetchChunk" + ? 'startStream runTask fetchChunks loop' + : 'startStream runTask single fetchChunk' }`, async () => { const query = queryFactory(example); @@ -240,11 +240,11 @@ for (const full of [false, true]) { { setup, iterations: 20, - } + }, ); bench( `${example.name} - ${ - full ? "run fetchChunks loop" : "run single fetchChunk" + full ? 'run fetchChunks loop' : 'run single fetchChunk' }`, async () => { const query = queryFactory(example); @@ -266,11 +266,11 @@ for (const full of [false, true]) { { setup, iterations: 20, - } + }, ); bench( `${example.name} - ${ - full ? "stream fetchChunks loop" : "stream single fetchChunk" + full ? 'stream fetchChunks loop' : 'stream single fetchChunk' }`, async () => { const query = queryFactory(example); @@ -292,7 +292,7 @@ for (const full of [false, true]) { { setup, iterations: 20, - } + }, ); }); } diff --git a/api/test/bench/types_bit.bench.ts b/api/test/bench/types_bit.bench.ts index 74df509d..72a0af2b 100644 --- a/api/test/bench/types_bit.bench.ts +++ b/api/test/bench/types_bit.bench.ts @@ -2,8 +2,24 @@ import { bench, describe } from 'vitest'; import { benchFn, benchOpts } from './util/benchUtils'; describe('types (bit)', () => { - bench('bit (small)', benchFn(`select '101010'::bit from range(1000000)`), benchOpts()); - bench('bit (short)', benchFn(`select bitstring('0101011', 11 * 8) from range(1000000)`), benchOpts()); - bench('bit (short + 1 = smallest long)', benchFn(`select bitstring('0101011', 11 * 8 + 1) from range(1000000)`), benchOpts()); - bench('bit (long)', benchFn(`select bitstring('0101011', 11 * 8 + 12 * 8) from range(1000000)`), benchOpts()); + bench( + 'bit (small)', + benchFn(`select '101010'::bit from range(1000000)`), + benchOpts(), + ); + bench( + 'bit (short)', + benchFn(`select bitstring('0101011', 11 * 8) from range(1000000)`), + benchOpts(), + ); + bench( + 'bit (short + 1 = smallest long)', + benchFn(`select bitstring('0101011', 11 * 8 + 1) from range(1000000)`), + benchOpts(), + ); + bench( + 'bit (long)', + benchFn(`select bitstring('0101011', 11 * 8 + 12 * 8) from range(1000000)`), + benchOpts(), + ); }); diff --git a/api/test/bench/types_datetime.bench.ts b/api/test/bench/types_datetime.bench.ts index 26b4ca41..73a744d6 100644 --- a/api/test/bench/types_datetime.bench.ts +++ b/api/test/bench/types_datetime.bench.ts @@ -2,22 +2,66 @@ import { bench, describe } from 'vitest'; import { benchFn, benchOpts } from './util/benchUtils'; describe('types (date)', () => { - bench('date', benchFn(`select '2123-12-31'::date from range(1000000)`), benchOpts()); + bench( + 'date', + benchFn(`select '2123-12-31'::date from range(1000000)`), + benchOpts(), + ); }); describe('types (time)', () => { - bench('time', benchFn(`select '12:34:56.789123'::time from range(1000000)`), benchOpts()); - bench('timetz', benchFn(`select '12:34:56-15:59:59'::timetz from range(1000000)`), benchOpts()); + bench( + 'time', + benchFn(`select '12:34:56.789123'::time from range(1000000)`), + benchOpts(), + ); + bench( + 'timetz', + benchFn(`select '12:34:56-15:59:59'::timetz from range(1000000)`), + benchOpts(), + ); }); describe('types (timestamp)', () => { - bench('timestamp', benchFn(`select '2123-12-31 12:34:56.789123'::timestamp from range(1000000)`), benchOpts()); - bench('timestamp_s', benchFn(`select '2123-12-31 12:34:56'::timestamp_s from range(1000000)`), benchOpts()); - bench('timestamp_ms', benchFn(`select '2123-12-31 12:34:56.789'::timestamp_ms from range(1000000)`), benchOpts()); - bench('timestamp_ns', benchFn(`select '2123-12-31 12:34:56.789123'::timestamp_ns from range(1000000)`), benchOpts()); - bench('timestamptz', benchFn(`select '2123-12-31 12:34:56.789123'::timestamptz from range(1000000)`), benchOpts()); + bench( + 'timestamp', + benchFn( + `select '2123-12-31 12:34:56.789123'::timestamp from range(1000000)`, + ), + benchOpts(), + ); + bench( + 'timestamp_s', + benchFn(`select '2123-12-31 12:34:56'::timestamp_s from range(1000000)`), + benchOpts(), + ); + bench( + 'timestamp_ms', + benchFn( + `select '2123-12-31 12:34:56.789'::timestamp_ms from range(1000000)`, + ), + benchOpts(), + ); + bench( + 'timestamp_ns', + benchFn( + `select '2123-12-31 12:34:56.789123'::timestamp_ns from range(1000000)`, + ), + benchOpts(), + ); + bench( + 'timestamptz', + benchFn( + `select '2123-12-31 12:34:56.789123'::timestamptz from range(1000000)`, + ), + benchOpts(), + ); }); describe('types (interval)', () => { - bench('interval', benchFn('select interval 1 minute from range(1000000)'), benchOpts()); + bench( + 'interval', + benchFn('select interval 1 minute from range(1000000)'), + benchOpts(), + ); }); diff --git a/api/test/bench/types_enum.bench.ts b/api/test/bench/types_enum.bench.ts index e15eef68..8ba4a6a8 100644 --- a/api/test/bench/types_enum.bench.ts +++ b/api/test/bench/types_enum.bench.ts @@ -2,19 +2,35 @@ import { bench, describe } from 'vitest'; import { benchFn, benchOpts } from './util/benchUtils'; describe('types (enum)', () => { - bench('enum (small)', benchFn(`select 'a'::small_enum from range(1000000)`), benchOpts({ - additionalSetup: async (connection) => { - await connection.run(`create type small_enum as enum ('a', 'b')`); - }, - })); - bench('enum (medium)', benchFn(`select 'enum_0'::medium_enum from range(1000000)`), benchOpts({ - additionalSetup: async (connection) => { - await connection.run(`create type medium_enum as enum (select 'enum_' || i from range(300) t(i))`); - } - })); - bench.skip('enum (large)', benchFn(`select 'enum_0'::large_enum from range(1000000)`), benchOpts({ - additionalSetup: async (connection) => { - await connection.run(`create type large_enum as enum (select 'enum_' || i from range(70000) t(i))`); - } - })); + bench( + 'enum (small)', + benchFn(`select 'a'::small_enum from range(1000000)`), + benchOpts({ + additionalSetup: async (connection) => { + await connection.run(`create type small_enum as enum ('a', 'b')`); + }, + }), + ); + bench( + 'enum (medium)', + benchFn(`select 'enum_0'::medium_enum from range(1000000)`), + benchOpts({ + additionalSetup: async (connection) => { + await connection.run( + `create type medium_enum as enum (select 'enum_' || i from range(300) t(i))`, + ); + }, + }), + ); + bench.skip( + 'enum (large)', + benchFn(`select 'enum_0'::large_enum from range(1000000)`), + benchOpts({ + additionalSetup: async (connection) => { + await connection.run( + `create type large_enum as enum (select 'enum_' || i from range(70000) t(i))`, + ); + }, + }), + ); }); diff --git a/api/test/bench/types_list.bench.ts b/api/test/bench/types_list.bench.ts index 6daeb7bf..138d14f5 100644 --- a/api/test/bench/types_list.bench.ts +++ b/api/test/bench/types_list.bench.ts @@ -3,7 +3,19 @@ import { benchFn, benchOpts } from './util/benchUtils'; describe('types (list & array)', () => { bench('list[int]', benchFn('select [1] from range(1000000)'), benchOpts()); - bench('list[varchar]', benchFn(`select ['a'] from range(1000000)`), benchOpts()); - bench('array[int]', benchFn('select array_value(1) from range(1000000)'), benchOpts()); - bench('array[varchar]', benchFn(`select array_value('a') from range(1000000)`), benchOpts()); + bench( + 'list[varchar]', + benchFn(`select ['a'] from range(1000000)`), + benchOpts(), + ); + bench( + 'array[int]', + benchFn('select array_value(1) from range(1000000)'), + benchOpts(), + ); + bench( + 'array[varchar]', + benchFn(`select array_value('a') from range(1000000)`), + benchOpts(), + ); }); diff --git a/api/test/bench/types_misc.bench.ts b/api/test/bench/types_misc.bench.ts index d3651a5c..d947c9c0 100644 --- a/api/test/bench/types_misc.bench.ts +++ b/api/test/bench/types_misc.bench.ts @@ -10,5 +10,9 @@ describe('types (uuid)', () => { }); describe('types (union)', () => { - bench('union', benchFn(`select union_value(t := 'a') from range(1000000)`), benchOpts()); + bench( + 'union', + benchFn(`select union_value(t := 'a') from range(1000000)`), + benchOpts(), + ); }); diff --git a/api/test/bench/types_numeric.bench.ts b/api/test/bench/types_numeric.bench.ts index d29c51aa..52f32ac3 100644 --- a/api/test/bench/types_numeric.bench.ts +++ b/api/test/bench/types_numeric.bench.ts @@ -2,23 +2,77 @@ import { bench, describe } from 'vitest'; import { benchFn, benchOpts } from './util/benchUtils'; describe('types (numeric)', () => { - bench('tinyint', benchFn('select 1::tinyint from range(1000000)'), benchOpts()); - bench('smallint', benchFn('select 1::smallint from range(1000000)'), benchOpts()); - bench('integer', benchFn('select 1::integer from range(1000000)'), benchOpts()); + bench( + 'tinyint', + benchFn('select 1::tinyint from range(1000000)'), + benchOpts(), + ); + bench( + 'smallint', + benchFn('select 1::smallint from range(1000000)'), + benchOpts(), + ); + bench( + 'integer', + benchFn('select 1::integer from range(1000000)'), + benchOpts(), + ); bench('bigint', benchFn('select 1::bigint from range(1000000)'), benchOpts()); - bench('hugeint', benchFn('select 1::hugeint from range(1000000)'), benchOpts()); + bench( + 'hugeint', + benchFn('select 1::hugeint from range(1000000)'), + benchOpts(), + ); - bench('utinyint', benchFn('select 1::utinyint from range(1000000)'), benchOpts()); - bench('usmallint', benchFn('select 1::usmallint from range(1000000)'), benchOpts()); - bench('uinteger', benchFn('select 1::uinteger from range(1000000)'), benchOpts()); - bench('ubigint', benchFn('select 1::ubigint from range(1000000)'), benchOpts()); - bench('uhugeint', benchFn('select 1::uhugeint from range(1000000)'), benchOpts()); + bench( + 'utinyint', + benchFn('select 1::utinyint from range(1000000)'), + benchOpts(), + ); + bench( + 'usmallint', + benchFn('select 1::usmallint from range(1000000)'), + benchOpts(), + ); + bench( + 'uinteger', + benchFn('select 1::uinteger from range(1000000)'), + benchOpts(), + ); + bench( + 'ubigint', + benchFn('select 1::ubigint from range(1000000)'), + benchOpts(), + ); + bench( + 'uhugeint', + benchFn('select 1::uhugeint from range(1000000)'), + benchOpts(), + ); bench('float', benchFn('select 1::float from range(1000000)'), benchOpts()); bench('double', benchFn('select 1::double from range(1000000)'), benchOpts()); - bench('decimal (2 bytes)', benchFn('select 999.9::decimal(4,1) from range(1000000)'), benchOpts()); - bench('decimal (4 bytes)', benchFn('select 99999.9999::decimal(9,4) from range(1000000)'), benchOpts()); - bench('decimal (8 bytes)', benchFn('select 999999999999.999999::decimal(18,6) from range(1000000)'), benchOpts()); - bench('decimal (16 bytes)', benchFn('select 9999999999999999999999999999.9999999999::decimal(38,10) from range(1000000)'), benchOpts()); + bench( + 'decimal (2 bytes)', + benchFn('select 999.9::decimal(4,1) from range(1000000)'), + benchOpts(), + ); + bench( + 'decimal (4 bytes)', + benchFn('select 99999.9999::decimal(9,4) from range(1000000)'), + benchOpts(), + ); + bench( + 'decimal (8 bytes)', + benchFn('select 999999999999.999999::decimal(18,6) from range(1000000)'), + benchOpts(), + ); + bench( + 'decimal (16 bytes)', + benchFn( + 'select 9999999999999999999999999999.9999999999::decimal(38,10) from range(1000000)', + ), + benchOpts(), + ); }); diff --git a/api/test/bench/types_struct.bench.ts b/api/test/bench/types_struct.bench.ts index 0e9c3e69..0085ffe9 100644 --- a/api/test/bench/types_struct.bench.ts +++ b/api/test/bench/types_struct.bench.ts @@ -2,8 +2,24 @@ import { bench, describe } from 'vitest'; import { benchFn, benchOpts } from './util/benchUtils'; describe('types (struct & map)', () => { - bench('struct[int]', benchFn('select {a:1} from range(1000000)'), benchOpts()); - bench('struct[varchar]', benchFn(`select {a:'a'} from range(1000000)`), benchOpts()); - bench('map[int,int]', benchFn('select map {1:1} from range(1000000)'), benchOpts()); - bench('map[varchar,varchar]', benchFn(`select map {'a':'a'} from range(1000000)`), benchOpts()); + bench( + 'struct[int]', + benchFn('select {a:1} from range(1000000)'), + benchOpts(), + ); + bench( + 'struct[varchar]', + benchFn(`select {a:'a'} from range(1000000)`), + benchOpts(), + ); + bench( + 'map[int,int]', + benchFn('select map {1:1} from range(1000000)'), + benchOpts(), + ); + bench( + 'map[varchar,varchar]', + benchFn(`select map {'a':'a'} from range(1000000)`), + benchOpts(), + ); }); diff --git a/api/test/bench/types_varchar.bench.ts b/api/test/bench/types_varchar.bench.ts index 593e4b4d..75496e73 100644 --- a/api/test/bench/types_varchar.bench.ts +++ b/api/test/bench/types_varchar.bench.ts @@ -2,8 +2,24 @@ import { bench, describe } from 'vitest'; import { benchFn, benchOpts } from './util/benchUtils'; describe('types (varchar & blob)', () => { - bench('varchar (short)', benchFn(`select 'abcdefghijkl' from range(1000000)`), benchOpts()); - bench('varchar (long)', benchFn(`select 'abcdefghijklmnopqrstuvwx' from range(1000000)`), benchOpts()); - bench('blob (short)', benchFn(`select 'abcdefghijkl'::blob from range(1000000)`), benchOpts()); - bench('blob (long)', benchFn(`select 'abcdefghijklmnopqrstuvwx'::blob from range(1000000)`), benchOpts()); + bench( + 'varchar (short)', + benchFn(`select 'abcdefghijkl' from range(1000000)`), + benchOpts(), + ); + bench( + 'varchar (long)', + benchFn(`select 'abcdefghijklmnopqrstuvwx' from range(1000000)`), + benchOpts(), + ); + bench( + 'blob (short)', + benchFn(`select 'abcdefghijkl'::blob from range(1000000)`), + benchOpts(), + ); + bench( + 'blob (long)', + benchFn(`select 'abcdefghijklmnopqrstuvwx'::blob from range(1000000)`), + benchOpts(), + ); }); diff --git a/api/test/bench/util/benchUtils.ts b/api/test/bench/util/benchUtils.ts index 92120b14..db282ec8 100644 --- a/api/test/bench/util/benchUtils.ts +++ b/api/test/bench/util/benchUtils.ts @@ -13,13 +13,15 @@ export function benchFn(sql: string) { return () => runSql(connection, sql); } -export function benchOpts(options?: { additionalSetup?: (connection: DuckDBConnection) => Promise }) { +export function benchOpts(options?: { + additionalSetup?: (connection: DuckDBConnection) => Promise; +}) { const additionalSetup = options?.additionalSetup; - const setup = additionalSetup ? ( - async () => { - await setupConnection(); - await additionalSetup(connection); - } - ) : setupConnection; + const setup = additionalSetup + ? async () => { + await setupConnection(); + await additionalSetup(connection); + } + : setupConnection; return { setup }; } diff --git a/api/test/bench/util/runSql.ts b/api/test/bench/util/runSql.ts index 885744f3..c937dbc8 100644 --- a/api/test/bench/util/runSql.ts +++ b/api/test/bench/util/runSql.ts @@ -1,6 +1,9 @@ import { DuckDBConnection } from '../../../src'; -export async function runSql(connection: DuckDBConnection, sql: string): Promise { +export async function runSql( + connection: DuckDBConnection, + sql: string, +): Promise { const result = await connection.run(sql); let valueCount = 0; let nullCount = 0; diff --git a/api/test/bench/validity.bench.ts b/api/test/bench/validity.bench.ts index df350acf..fb9e8631 100644 --- a/api/test/bench/validity.bench.ts +++ b/api/test/bench/validity.bench.ts @@ -2,5 +2,11 @@ import { bench, describe } from 'vitest'; import { benchFn, benchOpts } from './util/benchUtils'; describe('validity', () => { - bench('odds null', benchFn('SELECT CASE WHEN range % 2 = 0 THEN range ELSE NULL END asdf FROM range(1000000)'), benchOpts()); + bench( + 'odds null', + benchFn( + 'SELECT CASE WHEN range % 2 = 0 THEN range ELSE NULL END asdf FROM range(1000000)', + ), + benchOpts(), + ); }); diff --git a/api/test/bench/write.bench.ts b/api/test/bench/write.bench.ts index 51e8aa8f..b5774048 100644 --- a/api/test/bench/write.bench.ts +++ b/api/test/bench/write.bench.ts @@ -26,13 +26,13 @@ for (const batchSize of [1, 1000]) { `${batchSize} insert bind`, async () => { const query = await connection.prepare( - 'INSERT INTO test (timestamp, value) VALUES ($1, $2);' + 'INSERT INTO test (timestamp, value) VALUES ($1, $2);', ); for (let index = 0; index < batchSize; index++) { query.bindTimestamp( 1, - new DuckDBTimestampValue(BigInt(Date.now()) * 1000n) + new DuckDBTimestampValue(BigInt(Date.now()) * 1000n), ); query.bindFloat(2, Math.random() * 1_000_000); @@ -41,7 +41,7 @@ for (const batchSize of [1, 1000]) { }, { setup, - } + }, ); bench( `${batchSize} row append`, @@ -50,7 +50,7 @@ for (const batchSize of [1, 1000]) { for (let index = 0; index < batchSize; index++) { appender.appendTimestamp( - new DuckDBTimestampValue(BigInt(Date.now()) * 1000n) + new DuckDBTimestampValue(BigInt(Date.now()) * 1000n), ); appender.appendFloat(Math.random() * 1_000_000); appender.endRow(); @@ -59,7 +59,7 @@ for (const batchSize of [1, 1000]) { }, { setup, - } + }, ); }); } diff --git a/api/test/util/replaceSqlNullWithInteger.ts b/api/test/util/replaceSqlNullWithInteger.ts index bf680dbe..8ab4a54e 100644 --- a/api/test/util/replaceSqlNullWithInteger.ts +++ b/api/test/util/replaceSqlNullWithInteger.ts @@ -19,7 +19,7 @@ export function replaceSqlNullWithInteger(input: DuckDBType): DuckDBType { const entries: Record = {}; for (let i = 0; i < input.entryCount; i++) { entries[input.entryNames[i]] = replaceSqlNullWithInteger( - input.entryTypes[i] + input.entryTypes[i], ); } return STRUCT(entries, input.alias); @@ -28,19 +28,19 @@ export function replaceSqlNullWithInteger(input: DuckDBType): DuckDBType { return ARRAY( replaceSqlNullWithInteger(input.valueType), input.length, - input.alias + input.alias, ); case DuckDBTypeId.MAP: return MAP( replaceSqlNullWithInteger(input.keyType), replaceSqlNullWithInteger(input.valueType), - input.alias + input.alias, ); case DuckDBTypeId.UNION: { const members: Record = {}; for (let i = 0; i < input.memberCount; i++) { members[input.memberTags[i]] = replaceSqlNullWithInteger( - input.memberTypes[i] + input.memberTypes[i], ); } return UNION(members, input.alias); diff --git a/api/test/util/testAllTypes.ts b/api/test/util/testAllTypes.ts index aa21ff8c..8805dde6 100644 --- a/api/test/util/testAllTypes.ts +++ b/api/test/util/testAllTypes.ts @@ -60,7 +60,7 @@ const BI_38_9s = BI_10_8 * BI_10_10 * BI_10_10 * BI_10_10 - 1n; const smallEnumValues = ['DUCK_DUCK_ENUM', 'GOOSE']; const mediumEnumValues = Array.from({ length: 300 }).map((_, i) => `enum_${i}`); const largeEnumValues = Array.from({ length: 70000 }).map( - (_, i) => `enum_${i}` + (_, i) => `enum_${i}`, ); export interface ColumnNameAndType { @@ -81,7 +81,7 @@ function col( type: DuckDBType, typeJson: Json, values: readonly DuckDBValue[], - valuesJson: readonly Json[] + valuesJson: readonly Json[], ): ColumnNameTypeAndValues { return { name, type, typeJson, values, valuesJson }; } @@ -93,175 +93,175 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { BOOLEAN, { typeId: 1 }, [false, true, null], - [false, true, null] + [false, true, null], ), col( 'tinyint', TINYINT, { typeId: 2 }, [TINYINT.min, TINYINT.max, null], - [TINYINT.min, TINYINT.max, null] + [TINYINT.min, TINYINT.max, null], ), col( 'smallint', SMALLINT, { typeId: 3 }, [SMALLINT.min, SMALLINT.max, null], - [SMALLINT.min, SMALLINT.max, null] + [SMALLINT.min, SMALLINT.max, null], ), col( 'int', INTEGER, { typeId: 4 }, [INTEGER.min, INTEGER.max, null], - [INTEGER.min, INTEGER.max, null] + [INTEGER.min, INTEGER.max, null], ), col( 'bigint', BIGINT, { typeId: 5 }, [BIGINT.min, BIGINT.max, null], - [String(BIGINT.min), String(BIGINT.max), null] + [String(BIGINT.min), String(BIGINT.max), null], ), col( 'hugeint', HUGEINT, { typeId: 16 }, [HUGEINT.min, HUGEINT.max, null], - [String(HUGEINT.min), String(HUGEINT.max), null] + [String(HUGEINT.min), String(HUGEINT.max), null], ), col( 'uhugeint', UHUGEINT, { typeId: 32 }, [UHUGEINT.min, UHUGEINT.max, null], - [String(UHUGEINT.min), String(UHUGEINT.max), null] + [String(UHUGEINT.min), String(UHUGEINT.max), null], ), col( 'utinyint', UTINYINT, { typeId: 6 }, [UTINYINT.min, UTINYINT.max, null], - [UTINYINT.min, UTINYINT.max, null] + [UTINYINT.min, UTINYINT.max, null], ), col( 'usmallint', USMALLINT, { typeId: 7 }, [USMALLINT.min, USMALLINT.max, null], - [USMALLINT.min, USMALLINT.max, null] + [USMALLINT.min, USMALLINT.max, null], ), col( 'uint', UINTEGER, { typeId: 8 }, [UINTEGER.min, UINTEGER.max, null], - [UINTEGER.min, UINTEGER.max, null] + [UINTEGER.min, UINTEGER.max, null], ), col( 'ubigint', UBIGINT, { typeId: 9 }, [UBIGINT.min, UBIGINT.max, null], - [String(UBIGINT.min), String(UBIGINT.max), null] + [String(UBIGINT.min), String(UBIGINT.max), null], ), col( 'varint', VARINT, { typeId: 35 }, [VARINT.min, VARINT.max, null], - [String(VARINT.min), String(VARINT.max), null] + [String(VARINT.min), String(VARINT.max), null], ), col( 'date', DATE, { typeId: 13 }, [DATE.min, DATE.max, null], - [String(DATE.min), String(DATE.max), null] + [String(DATE.min), String(DATE.max), null], ), col( 'time', TIME, { typeId: 14 }, [TIME.min, TIME.max, null], - [String(TIME.min), String(TIME.max), null] + [String(TIME.min), String(TIME.max), null], ), col( 'timestamp', TIMESTAMP, { typeId: 12 }, [TIMESTAMP.min, TIMESTAMP.max, null], - [String(TIMESTAMP.min), String(TIMESTAMP.max), null] + [String(TIMESTAMP.min), String(TIMESTAMP.max), null], ), col( 'timestamp_s', TIMESTAMP_S, { typeId: 20 }, [TIMESTAMP_S.min, TIMESTAMP_S.max, null], - [String(TIMESTAMP_S.min), String(TIMESTAMP_S.max), null] + [String(TIMESTAMP_S.min), String(TIMESTAMP_S.max), null], ), col( 'timestamp_ms', TIMESTAMP_MS, { typeId: 21 }, [TIMESTAMP_MS.min, TIMESTAMP_MS.max, null], - [String(TIMESTAMP_MS.min), String(TIMESTAMP_MS.max), null] + [String(TIMESTAMP_MS.min), String(TIMESTAMP_MS.max), null], ), col( 'timestamp_ns', TIMESTAMP_NS, { typeId: 22 }, [TIMESTAMP_NS.min, TIMESTAMP_NS.max, null], - [String(TIMESTAMP_NS.min), String(TIMESTAMP_NS.max), null] + [String(TIMESTAMP_NS.min), String(TIMESTAMP_NS.max), null], ), col( 'time_tz', TIMETZ, { typeId: 30 }, [TIMETZ.min, TIMETZ.max, null], - [String(TIMETZ.min), String(TIMETZ.max), null] + [String(TIMETZ.min), String(TIMETZ.max), null], ), col( 'timestamp_tz', TIMESTAMPTZ, { typeId: 31 }, [TIMESTAMPTZ.min, TIMESTAMPTZ.max, null], - [String(TIMESTAMPTZ.min), String(TIMESTAMPTZ.max), null] + [String(TIMESTAMPTZ.min), String(TIMESTAMPTZ.max), null], ), col( 'float', FLOAT, { typeId: 10 }, [FLOAT.min, FLOAT.max, null], - [FLOAT.min, FLOAT.max, null] + [FLOAT.min, FLOAT.max, null], ), col( 'double', DOUBLE, { typeId: 11 }, [DOUBLE.min, DOUBLE.max, null], - [DOUBLE.min, DOUBLE.max, null] + [DOUBLE.min, DOUBLE.max, null], ), col( 'dec_4_1', DECIMAL(4, 1), { typeId: 19, width: 4, scale: 1 }, [decimalValue(-9999n, 4, 1), decimalValue(9999n, 4, 1), null], - ['-999.9', '999.9', null] + ['-999.9', '999.9', null], ), col( 'dec_9_4', DECIMAL(9, 4), { typeId: 19, width: 9, scale: 4 }, [decimalValue(-999999999n, 9, 4), decimalValue(999999999n, 9, 4), null], - ['-99999.9999', '99999.9999', null] + ['-99999.9999', '99999.9999', null], ), col( 'dec_18_6', DECIMAL(18, 6), { typeId: 19, width: 18, scale: 6 }, [decimalValue(-BI_18_9s, 18, 6), decimalValue(BI_18_9s, 18, 6), null], - ['-999999999999.999999', '999999999999.999999', null] + ['-999999999999.999999', '999999999999.999999', null], ), col( 'dec38_10', @@ -272,14 +272,14 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { '-9999999999999999999999999999.9999999999', '9999999999999999999999999999.9999999999', null, - ] + ], ), col( 'uuid', UUID, { typeId: 27 }, [UUID.min, UUID.max, null], - [String(UUID.min), String(UUID.max), null] + [String(UUID.min), String(UUID.max), null], ), col( 'interval', @@ -290,14 +290,14 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { { months: 0, days: 0, micros: '0' }, { months: 999, days: 999, micros: '999999999' }, null, - ] + ], ), col( 'varchar', VARCHAR, { typeId: 17 }, ['🦆🦆🦆🦆🦆🦆', 'goo\0se', null], - ['🦆🦆🦆🦆🦆🦆', 'goo\0se', null] + ['🦆🦆🦆🦆🦆🦆', 'goo\0se', null], ), col( 'blob', @@ -308,21 +308,21 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { blobValue('\x00\x00\x00a'), null, ], - ['thisisalongblob\\x00withnullbytes', '\\x00\\x00\\x00a', null] + ['thisisalongblob\\x00withnullbytes', '\\x00\\x00\\x00a', null], ), col( 'bit', BIT, { typeId: 29 }, [bitValue('0010001001011100010101011010111'), bitValue('10101'), null], - ['0010001001011100010101011010111', '10101', null] + ['0010001001011100010101011010111', '10101', null], ), col( 'small_enum', ENUM8(smallEnumValues), { typeId: 23, values: smallEnumValues, internalTypeId: 6 }, [smallEnumValues[0], smallEnumValues[smallEnumValues.length - 1], null], - [smallEnumValues[0], smallEnumValues[smallEnumValues.length - 1], null] + [smallEnumValues[0], smallEnumValues[smallEnumValues.length - 1], null], ), col( 'medium_enum', @@ -333,21 +333,25 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { mediumEnumValues[mediumEnumValues.length - 1], null, ], - [mediumEnumValues[0], mediumEnumValues[mediumEnumValues.length - 1], null] + [ + mediumEnumValues[0], + mediumEnumValues[mediumEnumValues.length - 1], + null, + ], ), col( 'large_enum', ENUM32(largeEnumValues), { typeId: 23, values: largeEnumValues, internalTypeId: 8 }, [largeEnumValues[0], largeEnumValues[largeEnumValues.length - 1], null], - [largeEnumValues[0], largeEnumValues[largeEnumValues.length - 1], null] + [largeEnumValues[0], largeEnumValues[largeEnumValues.length - 1], null], ), col( 'int_array', LIST(INTEGER), { typeId: 24, valueType: { typeId: 4 } }, [listValue([]), listValue([42, 999, null, null, -42]), null], - [[], [42, 999, null, null, -42], null] + [[], [42, 999, null, null, -42], null], ), col( 'double_array', @@ -358,7 +362,7 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { listValue([42.0, NaN, Infinity, -Infinity, null, -42.0]), null, ], - [[], [42, 'NaN', 'Infinity', '-Infinity', null, -42], null] + [[], [42, 'NaN', 'Infinity', '-Infinity', null, -42], null], ), col( 'date_array', @@ -386,7 +390,7 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { '2022-05-12', ], null, - ] + ], ), col( 'timestamp_array', @@ -414,7 +418,7 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { '2022-05-12 16:23:45', ], null, - ] + ], ), col( 'timestamptz_array', @@ -443,7 +447,7 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { String(timestampTZValue(1652397825n * 1000n * 1000n)), ], null, - ] + ], ), col( 'varchar_array', @@ -455,7 +459,7 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { listValue(['🦆🦆🦆🦆🦆🦆', 'goose', null, '']), null, ], - [[], ['🦆🦆🦆🦆🦆🦆', 'goose', null, ''], null] + [[], ['🦆🦆🦆🦆🦆🦆', 'goose', null, ''], null], ), col( 'nested_int_array', @@ -476,26 +480,26 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { [], [[], [42, 999, null, null, -42], null, [], [42, 999, null, null, -42]], null, - ] + ], ), col( 'struct', - STRUCT({ 'a': INTEGER, 'b': VARCHAR }), + STRUCT({ a: INTEGER, b: VARCHAR }), { typeId: 25, entryNames: ['a', 'b'], entryTypes: [{ typeId: 4 }, { typeId: 17 }], }, [ - structValue({ 'a': null, 'b': null }), - structValue({ 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }), + structValue({ a: null, b: null }), + structValue({ a: 42, b: '🦆🦆🦆🦆🦆🦆' }), null, ], - [{ 'a': null, 'b': null }, { 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }, null] + [{ a: null, b: null }, { a: 42, b: '🦆🦆🦆🦆🦆🦆' }, null], ), col( 'struct_of_arrays', - STRUCT({ 'a': LIST(INTEGER), 'b': LIST(VARCHAR) }), + STRUCT({ a: LIST(INTEGER), b: LIST(VARCHAR) }), { typeId: 25, entryNames: ['a', 'b'], @@ -505,25 +509,25 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { ], }, [ - structValue({ 'a': null, 'b': null }), + structValue({ a: null, b: null }), structValue({ - 'a': listValue([42, 999, null, null, -42]), - 'b': listValue(['🦆🦆🦆🦆🦆🦆', 'goose', null, '']), + a: listValue([42, 999, null, null, -42]), + b: listValue(['🦆🦆🦆🦆🦆🦆', 'goose', null, '']), }), null, ], [ - { 'a': null, 'b': null }, + { a: null, b: null }, { - 'a': [42, 999, null, null, -42], - 'b': ['🦆🦆🦆🦆🦆🦆', 'goose', null, ''], + a: [42, 999, null, null, -42], + b: ['🦆🦆🦆🦆🦆🦆', 'goose', null, ''], }, null, - ] + ], ), col( 'array_of_structs', - LIST(STRUCT({ 'a': INTEGER, 'b': VARCHAR })), + LIST(STRUCT({ a: INTEGER, b: VARCHAR })), { typeId: 24, valueType: { @@ -535,17 +539,13 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { [ listValue([]), listValue([ - structValue({ 'a': null, 'b': null }), - structValue({ 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }), + structValue({ a: null, b: null }), + structValue({ a: 42, b: '🦆🦆🦆🦆🦆🦆' }), null, ]), null, ], - [ - [], - [{ 'a': null, 'b': null }, { 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }, null], - null, - ] + [[], [{ a: null, b: null }, { a: 42, b: '🦆🦆🦆🦆🦆🦆' }, null], null], ), col( 'map', @@ -562,36 +562,36 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { [ [], [ - { 'key': 'key1', 'value': '🦆🦆🦆🦆🦆🦆' }, - { 'key': 'key2', 'value': 'goose' }, + { key: 'key1', value: '🦆🦆🦆🦆🦆🦆' }, + { key: 'key2', value: 'goose' }, ], null, - ] + ], ), col( 'union', - UNION({ 'name': VARCHAR, 'age': SMALLINT }), + UNION({ name: VARCHAR, age: SMALLINT }), { typeId: 28, memberTags: ['name', 'age'], memberTypes: [{ typeId: 17 }, { typeId: 3 }], }, [unionValue('name', 'Frank'), unionValue('age', 5), null], - [{ 'tag': 'name', 'value': 'Frank' }, { 'tag': 'age', 'value': 5 }, null] + [{ tag: 'name', value: 'Frank' }, { tag: 'age', value: 5 }, null], ), col( 'fixed_int_array', ARRAY(INTEGER, 3), { typeId: 33, valueType: { typeId: 4 }, length: 3 }, [arrayValue([null, 2, 3]), arrayValue([4, 5, 6]), null], - [[null, 2, 3], [4, 5, 6], null] + [[null, 2, 3], [4, 5, 6], null], ), col( 'fixed_varchar_array', ARRAY(VARCHAR, 3), { typeId: 33, valueType: { typeId: 17 }, length: 3 }, [arrayValue(['a', null, 'c']), arrayValue(['d', 'e', 'f']), null], - [['a', null, 'c'], ['d', 'e', 'f'], null] + [['a', null, 'c'], ['d', 'e', 'f'], null], ), col( 'fixed_nested_int_array', @@ -618,7 +618,7 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { [4, 5, 6], ], null, - ] + ], ), col( 'fixed_nested_varchar_array', @@ -649,11 +649,11 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { ['d', 'e', 'f'], ], null, - ] + ], ), col( 'fixed_struct_array', - ARRAY(STRUCT({ 'a': INTEGER, 'b': VARCHAR }), 3), + ARRAY(STRUCT({ a: INTEGER, b: VARCHAR }), 3), { typeId: 33, valueType: { @@ -665,34 +665,34 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { }, [ arrayValue([ - structValue({ 'a': null, 'b': null }), - structValue({ 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }), - structValue({ 'a': null, 'b': null }), + structValue({ a: null, b: null }), + structValue({ a: 42, b: '🦆🦆🦆🦆🦆🦆' }), + structValue({ a: null, b: null }), ]), arrayValue([ - structValue({ 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }), - structValue({ 'a': null, 'b': null }), - structValue({ 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }), + structValue({ a: 42, b: '🦆🦆🦆🦆🦆🦆' }), + structValue({ a: null, b: null }), + structValue({ a: 42, b: '🦆🦆🦆🦆🦆🦆' }), ]), null, ], [ [ - { 'a': null, 'b': null }, - { 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }, - { 'a': null, 'b': null }, + { a: null, b: null }, + { a: 42, b: '🦆🦆🦆🦆🦆🦆' }, + { a: null, b: null }, ], [ - { 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }, - { 'a': null, 'b': null }, - { 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }, + { a: 42, b: '🦆🦆🦆🦆🦆🦆' }, + { a: null, b: null }, + { a: 42, b: '🦆🦆🦆🦆🦆🦆' }, ], null, - ] + ], ), col( 'struct_of_fixed_array', - STRUCT({ 'a': ARRAY(INTEGER, 3), 'b': ARRAY(VARCHAR, 3) }), + STRUCT({ a: ARRAY(INTEGER, 3), b: ARRAY(VARCHAR, 3) }), { typeId: 25, entryNames: ['a', 'b'], @@ -703,20 +703,20 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { }, [ structValue({ - 'a': arrayValue([null, 2, 3]), - 'b': arrayValue(['a', null, 'c']), + a: arrayValue([null, 2, 3]), + b: arrayValue(['a', null, 'c']), }), structValue({ - 'a': arrayValue([4, 5, 6]), - 'b': arrayValue(['d', 'e', 'f']), + a: arrayValue([4, 5, 6]), + b: arrayValue(['d', 'e', 'f']), }), null, ], [ - { 'a': [null, 2, 3], 'b': ['a', null, 'c'] }, - { 'a': [4, 5, 6], 'b': ['d', 'e', 'f'] }, + { a: [null, 2, 3], b: ['a', null, 'c'] }, + { a: [4, 5, 6], b: ['d', 'e', 'f'] }, null, - ] + ], ), col( 'fixed_array_of_int_list', @@ -743,7 +743,7 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { [[], [42, 999, null, null, -42], []], [[42, 999, null, null, -42], [], [42, 999, null, null, -42]], null, - ] + ], ), col( 'list_of_fixed_int_array', @@ -777,7 +777,7 @@ export function createTestAllTypesData(): ColumnNameTypeAndValues[] { [4, 5, 6], ], null, - ] + ], ), ]; } diff --git a/api/test/util/testJS.ts b/api/test/util/testJS.ts index cb19f826..40ea28e3 100644 --- a/api/test/util/testJS.ts +++ b/api/test/util/testJS.ts @@ -52,7 +52,7 @@ function col( name: string, type: DuckDBType, valuesStr: string[], - valuesJS: readonly JS[] + valuesJS: readonly JS[], ): ColumnData { return { name, type, valuesStr, valuesJS }; } @@ -64,79 +64,79 @@ export function createTestJSData(): ColumnData[] { 'tinyint', TINYINT, [String(TINYINT.min), String(TINYINT.max), 'null'], - [TINYINT.min, TINYINT.max, null] + [TINYINT.min, TINYINT.max, null], ), col( 'smallint', SMALLINT, [String(SMALLINT.min), String(SMALLINT.max), 'null'], - [SMALLINT.min, SMALLINT.max, null] + [SMALLINT.min, SMALLINT.max, null], ), col( 'int', INTEGER, [String(INTEGER.min), String(INTEGER.max), 'null'], - [INTEGER.min, INTEGER.max, null] + [INTEGER.min, INTEGER.max, null], ), col( 'bigint', BIGINT, [String(BIGINT.min), String(BIGINT.max), 'null'], - [BIGINT.min, BIGINT.max, null] + [BIGINT.min, BIGINT.max, null], ), col( 'hugeint', HUGEINT, [String(HUGEINT.min), String(HUGEINT.max), 'null'], - [HUGEINT.min, HUGEINT.max, null] + [HUGEINT.min, HUGEINT.max, null], ), col( 'uhugeint', UHUGEINT, [String(UHUGEINT.min), String(UHUGEINT.max), 'null'], - [UHUGEINT.min, UHUGEINT.max, null] + [UHUGEINT.min, UHUGEINT.max, null], ), col( 'utinyint', UTINYINT, [String(UTINYINT.min), String(UTINYINT.max), 'null'], - [UTINYINT.min, UTINYINT.max, null] + [UTINYINT.min, UTINYINT.max, null], ), col( 'usmallint', USMALLINT, [String(USMALLINT.min), String(USMALLINT.max), 'null'], - [USMALLINT.min, USMALLINT.max, null] + [USMALLINT.min, USMALLINT.max, null], ), col( 'uint', UINTEGER, [String(UINTEGER.min), String(UINTEGER.max), 'null'], - [UINTEGER.min, UINTEGER.max, null] + [UINTEGER.min, UINTEGER.max, null], ), col( 'ubigint', UBIGINT, [String(UBIGINT.min), String(UBIGINT.max), 'null'], - [UBIGINT.min, UBIGINT.max, null] + [UBIGINT.min, UBIGINT.max, null], ), col( 'varint', VARINT, [String(VARINT.min), String(VARINT.max), 'null'], - [VARINT.min, VARINT.max, null] + [VARINT.min, VARINT.max, null], ), col( 'date', DATE, [`'-271821-04-20'`, `'275760-09-13'`, 'null'], - [new Date('-271821-04-20'), new Date('+275760-09-13'), null] + [new Date('-271821-04-20'), new Date('+275760-09-13'), null], ), col( 'time', TIME, [`'${TIME.min}'`, `'${TIME.max}'`, 'null'], - [TIME.min.micros, TIME.max.micros, null] + [TIME.min.micros, TIME.max.micros, null], ), col( 'timestamp', @@ -150,7 +150,7 @@ export function createTestJSData(): ColumnData[] { new Date('-271821-04-20T00:00:00.000Z'), new Date('+275760-09-13T00:00:00.000Z'), null, - ] + ], ), col( 'timestamp_s', @@ -160,7 +160,7 @@ export function createTestJSData(): ColumnData[] { new Date('-271821-04-20T00:00:00Z'), new Date('+275760-09-13T00:00:00Z'), null, - ] + ], ), col( 'timestamp_ms', @@ -170,7 +170,7 @@ export function createTestJSData(): ColumnData[] { new Date('-271821-04-20T00:00:00.000Z'), new Date('+275760-09-13T00:00:00.000Z'), null, - ] + ], ), col( 'timestamp_ns', @@ -184,7 +184,7 @@ export function createTestJSData(): ColumnData[] { new Date('1677-09-22T00:00:00.000Z'), new Date('+2262-04-11 23:47:16.854Z'), null, - ] + ], ), col( 'time_tz', @@ -194,7 +194,7 @@ export function createTestJSData(): ColumnData[] { { micros: TIMETZ.min.micros, offset: TIMETZ.min.offset }, { micros: TIMETZ.max.micros, offset: TIMETZ.max.offset }, null, - ] + ], ), col( 'timestamp_tz', @@ -208,37 +208,37 @@ export function createTestJSData(): ColumnData[] { new Date('-271821-04-20T00:00:00.000Z'), new Date('+275760-09-13T00:00:00.000Z'), null, - ] + ], ), col( 'float', FLOAT, [String(FLOAT.min), String(FLOAT.max), 'null'], - [FLOAT.min, FLOAT.max, null] + [FLOAT.min, FLOAT.max, null], ), col( 'double', DOUBLE, [String(DOUBLE.min), String(DOUBLE.max), 'null'], - [DOUBLE.min, DOUBLE.max, null] + [DOUBLE.min, DOUBLE.max, null], ), col( 'dec_4_1', DECIMAL(4, 1), ['-999.9', '999.9', 'null'], - [-999.9, 999.9, null] + [-999.9, 999.9, null], ), col( 'dec_9_4', DECIMAL(9, 4), ['-99999.9999', '99999.9999', 'null'], - [-99999.9999, 99999.9999, null] + [-99999.9999, 99999.9999, null], ), col( 'dec_18_6', DECIMAL(18, 6), ['-999999999999.999999', '999999999999.999999', 'null'], - [-999999999999.999999, 999999999999.999999, null] + [-999999999999.999999, 999999999999.999999, null], ), col( 'dec38_10', @@ -252,13 +252,13 @@ export function createTestJSData(): ColumnData[] { -9999999999999999999999999999.9999999999, 9999999999999999999999999999.9999999999, null, - ] + ], ), col( 'uuid', UUID, [`'${UUID.min}'`, `'${UUID.max}'`, 'null'], - [String(UUID.min), String(UUID.max), null] + [String(UUID.min), String(UUID.max), null], ), col( 'interval', @@ -272,13 +272,13 @@ export function createTestJSData(): ColumnData[] { { months: 0, days: 0, micros: 0n }, { months: 999, days: 999, micros: 999999999n }, null, - ] + ], ), col( 'varchar', VARCHAR, [`'🦆🦆🦆🦆🦆🦆'`, `'goo\\0se'`, 'null'], - ['🦆🦆🦆🦆🦆🦆', 'goo\\0se', null] + ['🦆🦆🦆🦆🦆🦆', 'goo\\0se', null], ), col( 'blob', @@ -288,7 +288,7 @@ export function createTestJSData(): ColumnData[] { bytesFromString('thisisalongblob\x00withnullbytes'), bytesFromString('\x00\x00\x00a'), null, - ] + ], ), col( 'bit', @@ -298,7 +298,7 @@ export function createTestJSData(): ColumnData[] { bitValue('0010001001011100010101011010111').data, bitValue('10101').data, null, - ] + ], ), col( 'small_enum', @@ -308,13 +308,13 @@ export function createTestJSData(): ColumnData[] { `'${smallEnumValues[smallEnumValues.length - 1]}'`, 'null', ], - [smallEnumValues[0], smallEnumValues[smallEnumValues.length - 1], null] + [smallEnumValues[0], smallEnumValues[smallEnumValues.length - 1], null], ), col( 'int_array', LIST(INTEGER), [`[]`, `[42, 999, null, null, -42]`, 'null'], - [[], [42, 999, null, null, -42], null] + [[], [42, 999, null, null, -42], null], ), col( 'double_array', @@ -324,7 +324,7 @@ export function createTestJSData(): ColumnData[] { `[42.0::double, 'NaN', 'Infinity', '-Infinity', null, -42.0]`, 'null', ], - [[], [42, NaN, Infinity, -Infinity, null, -42], null] + [[], [42, NaN, Infinity, -Infinity, null, -42], null], ), col( 'date_array', @@ -350,7 +350,7 @@ export function createTestJSData(): ColumnData[] { new Date('2022-05-12'), ], null, - ] + ], ), col( 'timestamp_array', @@ -376,7 +376,7 @@ export function createTestJSData(): ColumnData[] { new Date('2022-05-12T16:23:45.000000Z'), ], null, - ] + ], ), col( 'timestamptz_array', @@ -402,13 +402,13 @@ export function createTestJSData(): ColumnData[] { new Date('2022-05-12T16:23:45.000000Z'), ], null, - ] + ], ), col( 'varchar_array', LIST(VARCHAR), [`[]`, `['🦆🦆🦆🦆🦆🦆', 'goose', null, '']`, 'null'], - [[], ['🦆🦆🦆🦆🦆🦆', 'goose', null, ''], null] + [[], ['🦆🦆🦆🦆🦆🦆', 'goose', null, ''], null], ), col( 'nested_int_array', @@ -428,17 +428,17 @@ export function createTestJSData(): ColumnData[] { [], [[], [42, 999, null, null, -42], null, [], [42, 999, null, null, -42]], null, - ] + ], ), col( 'struct', - STRUCT({ 'a': INTEGER, 'b': VARCHAR }), + STRUCT({ a: INTEGER, b: VARCHAR }), [`{ 'a': null, 'b': null }`, `{ 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }`, 'null'], - [{ 'a': null, 'b': null }, { 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }, null] + [{ a: null, b: null }, { a: 42, b: '🦆🦆🦆🦆🦆🦆' }, null], ), col( 'struct_of_arrays', - STRUCT({ 'a': LIST(INTEGER), 'b': LIST(VARCHAR) }), + STRUCT({ a: LIST(INTEGER), b: LIST(VARCHAR) }), [ `{ 'a': null, 'b': null }`, `{ @@ -448,17 +448,17 @@ export function createTestJSData(): ColumnData[] { 'null', ], [ - { 'a': null, 'b': null }, + { a: null, b: null }, { - 'a': [42, 999, null, null, -42], - 'b': ['🦆🦆🦆🦆🦆🦆', 'goose', null, ''], + a: [42, 999, null, null, -42], + b: ['🦆🦆🦆🦆🦆🦆', 'goose', null, ''], }, null, - ] + ], ), col( 'array_of_structs', - LIST(STRUCT({ 'a': INTEGER, 'b': VARCHAR })), + LIST(STRUCT({ a: INTEGER, b: VARCHAR })), [ `[]`, `[ @@ -468,11 +468,7 @@ export function createTestJSData(): ColumnData[] { ]`, 'null', ], - [ - [], - [{ 'a': null, 'b': null }, { 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }, null], - null, - ] + [[], [{ a: null, b: null }, { a: 42, b: '🦆🦆🦆🦆🦆🦆' }, null], null], ), col( 'map', @@ -485,29 +481,29 @@ export function createTestJSData(): ColumnData[] { [ [], [ - { 'key': 'key1', 'value': '🦆🦆🦆🦆🦆🦆' }, - { 'key': 'key2', 'value': 'goose' }, + { key: 'key1', value: '🦆🦆🦆🦆🦆🦆' }, + { key: 'key2', value: 'goose' }, ], null, - ] + ], ), col( 'union', - UNION({ 'name': VARCHAR, 'age': SMALLINT }), + UNION({ name: VARCHAR, age: SMALLINT }), [`union_value(name => 'Frank')`, `union_value(age => 5)`, 'null'], - [{ tag: 'name', value: 'Frank' }, { tag: 'age', value: 5 }, null] + [{ tag: 'name', value: 'Frank' }, { tag: 'age', value: 5 }, null], ), col( 'fixed_int_array', ARRAY(INTEGER, 3), [`[null, 2, 3]`, `[4, 5, 6]`, 'null'], - [[null, 2, 3], [4, 5, 6], null] + [[null, 2, 3], [4, 5, 6], null], ), col( 'fixed_varchar_array', ARRAY(VARCHAR, 3), [`['a', null, 'c']`, `['d', 'e', 'f']`, 'null'], - [['a', null, 'c'], ['d', 'e', 'f'], null] + [['a', null, 'c'], ['d', 'e', 'f'], null], ), col( 'fixed_nested_int_array', @@ -525,7 +521,7 @@ export function createTestJSData(): ColumnData[] { [4, 5, 6], ], null, - ] + ], ), col( 'fixed_nested_varchar_array', @@ -551,11 +547,11 @@ export function createTestJSData(): ColumnData[] { ['d', 'e', 'f'], ], null, - ] + ], ), col( 'fixed_struct_array', - ARRAY(STRUCT({ 'a': INTEGER, 'b': VARCHAR }), 3), + ARRAY(STRUCT({ a: INTEGER, b: VARCHAR }), 3), [ `[ { 'a': null, 'b': null }, @@ -571,21 +567,21 @@ export function createTestJSData(): ColumnData[] { ], [ [ - { 'a': null, 'b': null }, - { 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }, - { 'a': null, 'b': null }, + { a: null, b: null }, + { a: 42, b: '🦆🦆🦆🦆🦆🦆' }, + { a: null, b: null }, ], [ - { 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }, - { 'a': null, 'b': null }, - { 'a': 42, 'b': '🦆🦆🦆🦆🦆🦆' }, + { a: 42, b: '🦆🦆🦆🦆🦆🦆' }, + { a: null, b: null }, + { a: 42, b: '🦆🦆🦆🦆🦆🦆' }, ], null, - ] + ], ), col( 'struct_of_fixed_array', - STRUCT({ 'a': ARRAY(INTEGER, 3), 'b': ARRAY(VARCHAR, 3) }), + STRUCT({ a: ARRAY(INTEGER, 3), b: ARRAY(VARCHAR, 3) }), [ `{ 'a': [null, 2, 3], @@ -598,10 +594,10 @@ export function createTestJSData(): ColumnData[] { 'null', ], [ - { 'a': [null, 2, 3], 'b': ['a', null, 'c'] }, - { 'a': [4, 5, 6], 'b': ['d', 'e', 'f'] }, + { a: [null, 2, 3], b: ['a', null, 'c'] }, + { a: [4, 5, 6], b: ['d', 'e', 'f'] }, null, - ] + ], ), col( 'fixed_array_of_int_list', @@ -623,7 +619,7 @@ export function createTestJSData(): ColumnData[] { [[], [42, 999, null, null, -42], []], [[42, 999, null, null, -42], [], [42, 999, null, null, -42]], null, - ] + ], ), col( 'list_of_fixed_int_array', @@ -653,7 +649,7 @@ export function createTestJSData(): ColumnData[] { [4, 5, 6], ], null, - ] + ], ), ]; } @@ -678,7 +674,7 @@ export function createTestJSColumnNames(): readonly string[] { } export function createTestJSQuery(): string { - return `select * from (${createTestJSValuesClause()}) t(${createTestJSColumnNames().join(',')})` + return `select * from (${createTestJSValuesClause()}) t(${createTestJSColumnNames().join(',')})`; } export function createTestJSColumnsJS(): readonly (readonly JS[])[] { diff --git a/bindings/binding.gyp b/bindings/binding.gyp index 754faf94..7392c0f7 100644 --- a/bindings/binding.gyp +++ b/bindings/binding.gyp @@ -1,178 +1,234 @@ { - 'targets': [ - { - 'target_name': 'fetch_libduckdb', - 'type': 'none', - 'conditions': [ - ['OS=="linux" and target_arch=="x64"', { - 'variables': { - 'script_path': '<(module_root_dir)/scripts/fetch_libduckdb_linux_amd64.py', - }, - }], - ['OS=="linux" and target_arch=="arm64"', { - 'variables': { - 'script_path': '<(module_root_dir)/scripts/fetch_libduckdb_linux_arm64.py', - }, - }], - ['OS=="mac"', { - 'variables': { - 'script_path': '<(module_root_dir)/scripts/fetch_libduckdb_osx_universal.py', - }, - }], - ['OS=="win" and target_arch=="x64"', { - 'variables': { - 'script_path': '<(module_root_dir)/scripts/fetch_libduckdb_windows_amd64.py', - }, - }], - ], - 'actions': [ + "targets": [ { - 'action_name': 'run_fetch_libduckdb_script', - 'message': 'Fetching and extracting libduckdb', - 'inputs': [], - 'action': ['python3', '<(script_path)'], - 'outputs': ['<(module_root_dir)/libduckdb'], - }, - ], - }, - { - 'target_name': 'duckdb', - 'dependencies': [ - 'fetch_libduckdb', - ' -57599 -> 0 encoded. - * - * Min unencoded offset = -57599 -> 57599 -> 115198 encoded. - */ + * 40 bits for micros, then 24 bits for encoded offset in seconds. + * + * Max absolute unencoded offset = 15:59:59 = 60 * (60 * 15 + 59) + 59 = 57599. + * + * Encoded offset is unencoded offset inverted then shifted (by +57599) to unsigned. + * + * Max unencoded offset = 57599 -> -57599 -> 0 encoded. + * + * Min unencoded offset = -57599 -> 57599 -> 115198 encoded. + */ bits: bigint; } export interface TimeTZParts { @@ -256,7 +255,11 @@ export interface ExtractedStatementsAndCount { statement_count: number; } -export type ScalarFunctionMainFunction = (info: FunctionInfo, input: DataChunk, output: Vector) => void; +export type ScalarFunctionMainFunction = ( + info: FunctionInfo, + input: DataChunk, + output: Vector, +) => void; // Functions @@ -264,7 +267,11 @@ export type ScalarFunctionMainFunction = (info: FunctionInfo, input: DataChunk, export function create_instance_cache(): InstanceCache; // DUCKDB_C_API duckdb_state duckdb_get_or_create_from_cache(duckdb_instance_cache instance_cache, const char *path, duckdb_database *out_database, duckdb_config config, char **out_error); -export function get_or_create_from_cache(cache: InstanceCache, path?: string, config?: Config): Promise; +export function get_or_create_from_cache( + cache: InstanceCache, + path?: string, + config?: Config, +): Promise; // DUCKDB_C_API void duckdb_destroy_instance_cache(duckdb_instance_cache *instance_cache); // not exposed: destroyed in finalizer @@ -332,7 +339,10 @@ export function column_type(result: Result, column_index: number): Type; export function result_statement_type(result: Result): StatementType; // DUCKDB_C_API duckdb_logical_type duckdb_column_logical_type(duckdb_result *result, idx_t col); -export function column_logical_type(result: Result, column_index: number): LogicalType; +export function column_logical_type( + result: Result, + column_index: number, +): LogicalType; // DUCKDB_C_API idx_t duckdb_column_count(duckdb_result *result); export function column_count(result: Result): number; @@ -450,13 +460,19 @@ export function to_timestamp(parts: TimestampParts): Timestamp; export function is_finite_timestamp(timestamp: Timestamp): boolean; // DUCKDB_C_API bool duckdb_is_finite_timestamp_s(duckdb_timestamp_s ts); -export function is_finite_timestamp_s(timestampSeconds: TimestampSeconds): boolean; +export function is_finite_timestamp_s( + timestampSeconds: TimestampSeconds, +): boolean; // DUCKDB_C_API bool duckdb_is_finite_timestamp_ms(duckdb_timestamp_ms ts); -export function is_finite_timestamp_ms(timestampMilliseconds: TimestampMilliseconds): boolean; +export function is_finite_timestamp_ms( + timestampMilliseconds: TimestampMilliseconds, +): boolean; // DUCKDB_C_API bool duckdb_is_finite_timestamp_ns(duckdb_timestamp_ns ts); -export function is_finite_timestamp_ns(timestampNanoseconds: TimestampNanoseconds): boolean; +export function is_finite_timestamp_ns( + timestampNanoseconds: TimestampNanoseconds, +): boolean; // DUCKDB_C_API double duckdb_hugeint_to_double(duckdb_hugeint val); export function hugeint_to_double(hugeint: bigint): number; @@ -471,16 +487,25 @@ export function uhugeint_to_double(uhugeint: bigint): number; export function double_to_uhugeint(double: number): bigint; // DUCKDB_C_API duckdb_decimal duckdb_double_to_decimal(double val, uint8_t width, uint8_t scale); -export function double_to_decimal(double: number, width: number, scale: number): Decimal; +export function double_to_decimal( + double: number, + width: number, + scale: number, +): Decimal; // DUCKDB_C_API double duckdb_decimal_to_double(duckdb_decimal val); export function decimal_to_double(decimal: Decimal): number; // DUCKDB_C_API duckdb_state duckdb_prepare(duckdb_connection connection, const char *query, duckdb_prepared_statement *out_prepared_statement); -export function prepare(connection: Connection, query: string): Promise; +export function prepare( + connection: Connection, + query: string, +): Promise; // DUCKDB_C_API void duckdb_destroy_prepare(duckdb_prepared_statement *prepared_statement); -export function destroy_prepare_sync(prepared_statement: PreparedStatement): void; +export function destroy_prepare_sync( + prepared_statement: PreparedStatement, +): void; // DUCKDB_C_API const char *duckdb_prepare_error(duckdb_prepared_statement prepared_statement); // not exposed: prepare rejects promise with error @@ -489,124 +514,246 @@ export function destroy_prepare_sync(prepared_statement: PreparedStatement): voi export function nparams(prepared_statement: PreparedStatement): number; // DUCKDB_C_API const char *duckdb_parameter_name(duckdb_prepared_statement prepared_statement, idx_t index); -export function parameter_name(prepared_statement: PreparedStatement, index: number): string; +export function parameter_name( + prepared_statement: PreparedStatement, + index: number, +): string; // DUCKDB_C_API duckdb_type duckdb_param_type(duckdb_prepared_statement prepared_statement, idx_t param_idx); -export function param_type(prepared_statement: PreparedStatement, index: number): Type; +export function param_type( + prepared_statement: PreparedStatement, + index: number, +): Type; // DUCKDB_C_API duckdb_logical_type duckdb_param_logical_type(duckdb_prepared_statement prepared_statement, idx_t param_idx); -export function param_logical_type(prepared_statement: PreparedStatement, index: number): LogicalType; +export function param_logical_type( + prepared_statement: PreparedStatement, + index: number, +): LogicalType; // DUCKDB_C_API duckdb_state duckdb_clear_bindings(duckdb_prepared_statement prepared_statement); export function clear_bindings(prepared_statement: PreparedStatement): void; // DUCKDB_C_API duckdb_statement_type duckdb_prepared_statement_type(duckdb_prepared_statement statement); -export function prepared_statement_type(prepared_statement: PreparedStatement): StatementType; +export function prepared_statement_type( + prepared_statement: PreparedStatement, +): StatementType; // DUCKDB_C_API duckdb_state duckdb_bind_value(duckdb_prepared_statement prepared_statement, idx_t param_idx, duckdb_value val); -export function bind_value(prepared_statement: PreparedStatement, index: number, value: Value): void; +export function bind_value( + prepared_statement: PreparedStatement, + index: number, + value: Value, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_parameter_index(duckdb_prepared_statement prepared_statement, idx_t *param_idx_out, const char *name); -export function bind_parameter_index(prepared_statement: PreparedStatement, name: string): number; +export function bind_parameter_index( + prepared_statement: PreparedStatement, + name: string, +): number; // DUCKDB_C_API duckdb_state duckdb_bind_boolean(duckdb_prepared_statement prepared_statement, idx_t param_idx, bool val); -export function bind_boolean(prepared_statement: PreparedStatement, index: number, bool: boolean): void; +export function bind_boolean( + prepared_statement: PreparedStatement, + index: number, + bool: boolean, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_int8(duckdb_prepared_statement prepared_statement, idx_t param_idx, int8_t val); -export function bind_int8(prepared_statement: PreparedStatement, index: number, int8: number): void; +export function bind_int8( + prepared_statement: PreparedStatement, + index: number, + int8: number, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_int16(duckdb_prepared_statement prepared_statement, idx_t param_idx, int16_t val); -export function bind_int16(prepared_statement: PreparedStatement, index: number, int16: number): void; +export function bind_int16( + prepared_statement: PreparedStatement, + index: number, + int16: number, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_int32(duckdb_prepared_statement prepared_statement, idx_t param_idx, int32_t val); -export function bind_int32(prepared_statement: PreparedStatement, index: number, int32: number): void; +export function bind_int32( + prepared_statement: PreparedStatement, + index: number, + int32: number, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_int64(duckdb_prepared_statement prepared_statement, idx_t param_idx, int64_t val); -export function bind_int64(prepared_statement: PreparedStatement, index: number, int64: bigint): void; +export function bind_int64( + prepared_statement: PreparedStatement, + index: number, + int64: bigint, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_hugeint(duckdb_prepared_statement prepared_statement, idx_t param_idx, duckdb_hugeint val); -export function bind_hugeint(prepared_statement: PreparedStatement, index: number, hugeint: bigint): void; +export function bind_hugeint( + prepared_statement: PreparedStatement, + index: number, + hugeint: bigint, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_uhugeint(duckdb_prepared_statement prepared_statement, idx_t param_idx, duckdb_uhugeint val); -export function bind_uhugeint(prepared_statement: PreparedStatement, index: number, uhugeint: bigint): void; +export function bind_uhugeint( + prepared_statement: PreparedStatement, + index: number, + uhugeint: bigint, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_decimal(duckdb_prepared_statement prepared_statement, idx_t param_idx, duckdb_decimal val); -export function bind_decimal(prepared_statement: PreparedStatement, index: number, decimal: Decimal): void; +export function bind_decimal( + prepared_statement: PreparedStatement, + index: number, + decimal: Decimal, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_uint8(duckdb_prepared_statement prepared_statement, idx_t param_idx, uint8_t val); -export function bind_uint8(prepared_statement: PreparedStatement, index: number, uint8: number): void; +export function bind_uint8( + prepared_statement: PreparedStatement, + index: number, + uint8: number, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_uint16(duckdb_prepared_statement prepared_statement, idx_t param_idx, uint16_t val); -export function bind_uint16(prepared_statement: PreparedStatement, index: number, uint16: number): void; +export function bind_uint16( + prepared_statement: PreparedStatement, + index: number, + uint16: number, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_uint32(duckdb_prepared_statement prepared_statement, idx_t param_idx, uint32_t val); -export function bind_uint32(prepared_statement: PreparedStatement, index: number, uint32: number): void; +export function bind_uint32( + prepared_statement: PreparedStatement, + index: number, + uint32: number, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_uint64(duckdb_prepared_statement prepared_statement, idx_t param_idx, uint64_t val); -export function bind_uint64(prepared_statement: PreparedStatement, index: number, uint64: bigint): void; +export function bind_uint64( + prepared_statement: PreparedStatement, + index: number, + uint64: bigint, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_float(duckdb_prepared_statement prepared_statement, idx_t param_idx, float val); -export function bind_float(prepared_statement: PreparedStatement, index: number, float: number): void; +export function bind_float( + prepared_statement: PreparedStatement, + index: number, + float: number, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_double(duckdb_prepared_statement prepared_statement, idx_t param_idx, double val); -export function bind_double(prepared_statement: PreparedStatement, index: number, double: number): void; +export function bind_double( + prepared_statement: PreparedStatement, + index: number, + double: number, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_date(duckdb_prepared_statement prepared_statement, idx_t param_idx, duckdb_date val); -export function bind_date(prepared_statement: PreparedStatement, index: number, date: Date_): void; +export function bind_date( + prepared_statement: PreparedStatement, + index: number, + date: Date_, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_time(duckdb_prepared_statement prepared_statement, idx_t param_idx, duckdb_time val); -export function bind_time(prepared_statement: PreparedStatement, index: number, time: Time): void; +export function bind_time( + prepared_statement: PreparedStatement, + index: number, + time: Time, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_timestamp(duckdb_prepared_statement prepared_statement, idx_t param_idx, duckdb_timestamp val); -export function bind_timestamp(prepared_statement: PreparedStatement, index: number, timestamp: Timestamp): void; +export function bind_timestamp( + prepared_statement: PreparedStatement, + index: number, + timestamp: Timestamp, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_timestamp_tz(duckdb_prepared_statement prepared_statement, idx_t param_idx, duckdb_timestamp val); -export function bind_timestamp_tz(prepared_statement: PreparedStatement, index: number, timestamp: Timestamp): void; +export function bind_timestamp_tz( + prepared_statement: PreparedStatement, + index: number, + timestamp: Timestamp, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_interval(duckdb_prepared_statement prepared_statement, idx_t param_idx, duckdb_interval val); -export function bind_interval(prepared_statement: PreparedStatement, index: number, interval: Interval): void; +export function bind_interval( + prepared_statement: PreparedStatement, + index: number, + interval: Interval, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_varchar(duckdb_prepared_statement prepared_statement, idx_t param_idx, const char *val); -export function bind_varchar(prepared_statement: PreparedStatement, index: number, varchar: string): void; +export function bind_varchar( + prepared_statement: PreparedStatement, + index: number, + varchar: string, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_varchar_length(duckdb_prepared_statement prepared_statement, idx_t param_idx, const char *val, idx_t length); // not exposed: JS string includes length // DUCKDB_C_API duckdb_state duckdb_bind_blob(duckdb_prepared_statement prepared_statement, idx_t param_idx, const void *data, idx_t length); -export function bind_blob(prepared_statement: PreparedStatement, index: number, data: Uint8Array): void; +export function bind_blob( + prepared_statement: PreparedStatement, + index: number, + data: Uint8Array, +): void; // DUCKDB_C_API duckdb_state duckdb_bind_null(duckdb_prepared_statement prepared_statement, idx_t param_idx); -export function bind_null(prepared_statement: PreparedStatement, index: number): void; +export function bind_null( + prepared_statement: PreparedStatement, + index: number, +): void; // DUCKDB_C_API duckdb_state duckdb_execute_prepared(duckdb_prepared_statement prepared_statement, duckdb_result *out_result); -export function execute_prepared(prepared_statement: PreparedStatement): Promise; +export function execute_prepared( + prepared_statement: PreparedStatement, +): Promise; // #ifndef DUCKDB_API_NO_DEPRECATED // DUCKDB_C_API duckdb_state duckdb_execute_prepared_streaming(duckdb_prepared_statement prepared_statement, duckdb_result *out_result); -export function execute_prepared_streaming(prepared_statement: PreparedStatement): Promise; +export function execute_prepared_streaming( + prepared_statement: PreparedStatement, +): Promise; // #endif // DUCKDB_C_API idx_t duckdb_extract_statements(duckdb_connection connection, const char *query, duckdb_extracted_statements *out_extracted_statements); -export function extract_statements(connection: Connection, query: string): Promise; +export function extract_statements( + connection: Connection, + query: string, +): Promise; // DUCKDB_C_API duckdb_state duckdb_prepare_extracted_statement(duckdb_connection connection, duckdb_extracted_statements extracted_statements, idx_t index, duckdb_prepared_statement *out_prepared_statement); -export function prepare_extracted_statement(connection: Connection, extracted_statements: ExtractedStatements, index: number): Promise; +export function prepare_extracted_statement( + connection: Connection, + extracted_statements: ExtractedStatements, + index: number, +): Promise; // DUCKDB_C_API const char *duckdb_extract_statements_error(duckdb_extracted_statements extracted_statements); -export function extract_statements_error(extracted_statements: ExtractedStatements): string; +export function extract_statements_error( + extracted_statements: ExtractedStatements, +): string; // DUCKDB_C_API void duckdb_destroy_extracted(duckdb_extracted_statements *extracted_statements); // not exposed: destroyed in finalizer // DUCKDB_C_API duckdb_state duckdb_pending_prepared(duckdb_prepared_statement prepared_statement, duckdb_pending_result *out_result); -export function pending_prepared(prepared_statement: PreparedStatement): PendingResult; +export function pending_prepared( + prepared_statement: PreparedStatement, +): PendingResult; // #ifndef DUCKDB_API_NO_DEPRECATED // DUCKDB_C_API duckdb_state duckdb_pending_prepared_streaming(duckdb_prepared_statement prepared_statement, duckdb_pending_result *out_result); -export function pending_prepared_streaming(prepared_statement: PreparedStatement): PendingResult; +export function pending_prepared_streaming( + prepared_statement: PreparedStatement, +): PendingResult; // #endif @@ -617,16 +764,22 @@ export function pending_prepared_streaming(prepared_statement: PreparedStatement export function pending_error(pending_result: PendingResult): string; // DUCKDB_C_API duckdb_pending_state duckdb_pending_execute_task(duckdb_pending_result pending_result); -export function pending_execute_task(pending_result: PendingResult): PendingState; +export function pending_execute_task( + pending_result: PendingResult, +): PendingState; // DUCKDB_C_API duckdb_pending_state duckdb_pending_execute_check_state(duckdb_pending_result pending_result); -export function pending_execute_check_state(pending_resulit: PendingResult): PendingState; +export function pending_execute_check_state( + pending_resulit: PendingResult, +): PendingState; // DUCKDB_C_API duckdb_state duckdb_execute_pending(duckdb_pending_result pending_result, duckdb_result *out_result); export function execute_pending(pending_result: PendingResult): Promise; // DUCKDB_C_API bool duckdb_pending_execution_is_finished(duckdb_pending_state pending_state); -export function pending_execution_is_finished(pending_state: PendingState): boolean; +export function pending_execution_is_finished( + pending_state: PendingState, +): boolean; // DUCKDB_C_API void duckdb_destroy_value(duckdb_value *value); // not exposed: destroyed in finalizer @@ -806,19 +959,36 @@ export function get_uuid(value: Value): bigint; export function get_varchar(value: Value): string; // DUCKDB_C_API duckdb_value duckdb_create_struct_value(duckdb_logical_type type, duckdb_value *values); -export function create_struct_value(logical_type: LogicalType, values: readonly Value[]): Value; +export function create_struct_value( + logical_type: LogicalType, + values: readonly Value[], +): Value; // DUCKDB_C_API duckdb_value duckdb_create_list_value(duckdb_logical_type type, duckdb_value *values, idx_t value_count); -export function create_list_value(logical_type: LogicalType, values: readonly Value[]): Value; +export function create_list_value( + logical_type: LogicalType, + values: readonly Value[], +): Value; // DUCKDB_C_API duckdb_value duckdb_create_array_value(duckdb_logical_type type, duckdb_value *values, idx_t value_count); -export function create_array_value(logical_type: LogicalType, values: readonly Value[]): Value; +export function create_array_value( + logical_type: LogicalType, + values: readonly Value[], +): Value; // DUCKDB_C_API duckdb_value duckdb_create_map_value(duckdb_logical_type map_type, duckdb_value *keys, duckdb_value *values, idx_t entry_count); -export function create_map_value(map_type: LogicalType, keys: readonly Value[], values: readonly Value[]): Value; +export function create_map_value( + map_type: LogicalType, + keys: readonly Value[], + values: readonly Value[], +): Value; // DUCKDB_C_API duckdb_value duckdb_create_union_value(duckdb_logical_type union_type, idx_t tag_index, duckdb_value value); -export function create_union_value(union_type: LogicalType, tag_index: number, value: Value): Value; +export function create_union_value( + union_type: LogicalType, + tag_index: number, + value: Value, +): Value; // DUCKDB_C_API idx_t duckdb_get_map_size(duckdb_value value); export function get_map_size(value: Value): number; @@ -842,7 +1012,10 @@ export function get_list_size(value: Value): number; export function get_list_child(value: Value, index: number): Value; // DUCKDB_C_API duckdb_value duckdb_create_enum_value(duckdb_logical_type type, uint64_t value); -export function create_enum_value(logical_type: LogicalType, value: number): Value; +export function create_enum_value( + logical_type: LogicalType, + value: number, +): Value; // DUCKDB_C_API uint64_t duckdb_get_enum_value(duckdb_value value); export function get_enum_value(value: Value): number; @@ -856,25 +1029,42 @@ export function get_struct_child(value: Value, index: number): Value; export function create_logical_type(type: Type): LogicalType; // DUCKDB_C_API char *duckdb_logical_type_get_alias(duckdb_logical_type type); -export function logical_type_get_alias(logical_type: LogicalType): string | null; +export function logical_type_get_alias( + logical_type: LogicalType, +): string | null; // DUCKDB_C_API void duckdb_logical_type_set_alias(duckdb_logical_type type, const char *alias); -export function logical_type_set_alias(logical_type: LogicalType, alias: string): void; +export function logical_type_set_alias( + logical_type: LogicalType, + alias: string, +): void; // DUCKDB_C_API duckdb_logical_type duckdb_create_list_type(duckdb_logical_type type); export function create_list_type(logical_type: LogicalType): LogicalType; // DUCKDB_C_API duckdb_logical_type duckdb_create_array_type(duckdb_logical_type type, idx_t array_size); -export function create_array_type(logical_type: LogicalType, array_size: number): LogicalType; +export function create_array_type( + logical_type: LogicalType, + array_size: number, +): LogicalType; // DUCKDB_C_API duckdb_logical_type duckdb_create_map_type(duckdb_logical_type key_type, duckdb_logical_type value_type); -export function create_map_type(key_type: LogicalType, value_type: LogicalType): LogicalType; +export function create_map_type( + key_type: LogicalType, + value_type: LogicalType, +): LogicalType; // DUCKDB_C_API duckdb_logical_type duckdb_create_union_type(duckdb_logical_type *member_types, const char **member_names, idx_t member_count); -export function create_union_type(member_types: readonly LogicalType[], member_names: readonly string[]): LogicalType; +export function create_union_type( + member_types: readonly LogicalType[], + member_names: readonly string[], +): LogicalType; // DUCKDB_C_API duckdb_logical_type duckdb_create_struct_type(duckdb_logical_type *member_types, const char **member_names, idx_t member_count); -export function create_struct_type(member_types: readonly LogicalType[], member_names: readonly string[]): LogicalType; +export function create_struct_type( + member_types: readonly LogicalType[], + member_names: readonly string[], +): LogicalType; // DUCKDB_C_API duckdb_logical_type duckdb_create_enum_type(const char **member_names, idx_t member_count); export function create_enum_type(member_names: readonly string[]): LogicalType; @@ -901,7 +1091,10 @@ export function enum_internal_type(logical_type: LogicalType): Type; export function enum_dictionary_size(logical_type: LogicalType): number; // DUCKDB_C_API char *duckdb_enum_dictionary_value(duckdb_logical_type type, idx_t index); -export function enum_dictionary_value(logical_type: LogicalType, index: number): string; +export function enum_dictionary_value( + logical_type: LogicalType, + index: number, +): string; // DUCKDB_C_API duckdb_logical_type duckdb_list_type_child_type(duckdb_logical_type type); export function list_type_child_type(logical_type: LogicalType): LogicalType; @@ -922,19 +1115,31 @@ export function map_type_value_type(logical_type: LogicalType): LogicalType; export function struct_type_child_count(logical_type: LogicalType): number; // DUCKDB_C_API char *duckdb_struct_type_child_name(duckdb_logical_type type, idx_t index); -export function struct_type_child_name(logical_type: LogicalType, index: number): string; +export function struct_type_child_name( + logical_type: LogicalType, + index: number, +): string; // DUCKDB_C_API duckdb_logical_type duckdb_struct_type_child_type(duckdb_logical_type type, idx_t index); -export function struct_type_child_type(logical_type: LogicalType, index: number): LogicalType; +export function struct_type_child_type( + logical_type: LogicalType, + index: number, +): LogicalType; // DUCKDB_C_API idx_t duckdb_union_type_member_count(duckdb_logical_type type); export function union_type_member_count(logical_type: LogicalType): number; // DUCKDB_C_API char *duckdb_union_type_member_name(duckdb_logical_type type, idx_t index); -export function union_type_member_name(logical_type: LogicalType, index: number): string; +export function union_type_member_name( + logical_type: LogicalType, + index: number, +): string; // DUCKDB_C_API duckdb_logical_type duckdb_union_type_member_type(duckdb_logical_type type, idx_t index); -export function union_type_member_type(logical_type: LogicalType, index: number): LogicalType; +export function union_type_member_type( + logical_type: LogicalType, + index: number, +): LogicalType; // DUCKDB_C_API void duckdb_destroy_logical_type(duckdb_logical_type *type); // not exposed: destroyed in finalizer @@ -943,7 +1148,9 @@ export function union_type_member_type(logical_type: LogicalType, index: number) // export function register_logical_type(connection: Connection, logical_type: LogicalType, info: CreateTypeInfo): void; // DUCKDB_C_API duckdb_data_chunk duckdb_create_data_chunk(duckdb_logical_type *types, idx_t column_count); -export function create_data_chunk(logical_types: readonly LogicalType[]): DataChunk; +export function create_data_chunk( + logical_types: readonly LogicalType[], +): DataChunk; // DUCKDB_C_API void duckdb_destroy_data_chunk(duckdb_data_chunk *chunk); // not exposed: destroyed in finalizer @@ -955,7 +1162,10 @@ export function data_chunk_reset(chunk: DataChunk): void; export function data_chunk_get_column_count(chunk: DataChunk): number; // DUCKDB_C_API duckdb_vector duckdb_data_chunk_get_vector(duckdb_data_chunk chunk, idx_t col_idx); -export function data_chunk_get_vector(chunk: DataChunk, column_index: number): Vector; +export function data_chunk_get_vector( + chunk: DataChunk, + column_index: number, +): Vector; // DUCKDB_C_API idx_t duckdb_data_chunk_get_size(duckdb_data_chunk chunk); export function data_chunk_get_size(chunk: DataChunk): number; @@ -974,16 +1184,27 @@ export function vector_get_column_type(vector: Vector): LogicalType; export function vector_get_data(vector: Vector, byte_count: number): Uint8Array; // DUCKDB_C_API uint64_t *duckdb_vector_get_validity(duckdb_vector vector); -export function vector_get_validity(vector: Vector, byte_count: number): Uint8Array; +export function vector_get_validity( + vector: Vector, + byte_count: number, +): Uint8Array; // DUCKDB_C_API void duckdb_vector_ensure_validity_writable(duckdb_vector vector); export function vector_ensure_validity_writable(vector: Vector): void; // DUCKDB_C_API void duckdb_vector_assign_string_element(duckdb_vector vector, idx_t index, const char *str); -export function vector_assign_string_element(vector: Vector, index: number, str: string): void; +export function vector_assign_string_element( + vector: Vector, + index: number, + str: string, +): void; // DUCKDB_C_API void duckdb_vector_assign_string_element_len(duckdb_vector vector, idx_t index, const char *str, idx_t str_len); -export function vector_assign_string_element_len(vector: Vector, index: number, data: Uint8Array): void; +export function vector_assign_string_element_len( + vector: Vector, + index: number, + data: Uint8Array, +): void; // DUCKDB_C_API duckdb_vector duckdb_list_vector_get_child(duckdb_vector vector); export function list_vector_get_child(vector: Vector): Vector; @@ -995,7 +1216,10 @@ export function list_vector_get_size(vector: Vector): number; export function list_vector_set_size(vector: Vector, size: number): void; // DUCKDB_C_API duckdb_state duckdb_list_vector_reserve(duckdb_vector vector, idx_t required_capacity); -export function list_vector_reserve(vector: Vector, required_capacity: number): void; +export function list_vector_reserve( + vector: Vector, + required_capacity: number, +): void; // DUCKDB_C_API duckdb_vector duckdb_struct_vector_get_child(duckdb_vector vector, idx_t index); export function struct_vector_get_child(vector: Vector, index: number): Vector; @@ -1010,62 +1234,107 @@ export function array_vector_get_child(vector: Vector): Vector; // DUCKDB_C_API void duckdb_vector_reference_vector(duckdb_vector to_vector, duckdb_vector from_vector); // DUCKDB_C_API bool duckdb_validity_row_is_valid(uint64_t *validity, idx_t row); -export function validity_row_is_valid(validity: Uint8Array | null, row_index: number): boolean; +export function validity_row_is_valid( + validity: Uint8Array | null, + row_index: number, +): boolean; // DUCKDB_C_API void duckdb_validity_set_row_validity(uint64_t *validity, idx_t row, bool valid); -export function validity_set_row_validity(validity: Uint8Array, row_index: number, valid: boolean): void; +export function validity_set_row_validity( + validity: Uint8Array, + row_index: number, + valid: boolean, +): void; // DUCKDB_C_API void duckdb_validity_set_row_invalid(uint64_t *validity, idx_t row); -export function validity_set_row_invalid(validity: Uint8Array, row_index: number): void; +export function validity_set_row_invalid( + validity: Uint8Array, + row_index: number, +): void; // DUCKDB_C_API void duckdb_validity_set_row_valid(uint64_t *validity, idx_t row); -export function validity_set_row_valid(validity: Uint8Array, row_index: number): void; +export function validity_set_row_valid( + validity: Uint8Array, + row_index: number, +): void; // DUCKDB_C_API duckdb_scalar_function duckdb_create_scalar_function(); export function create_scalar_function(): ScalarFunction; // DUCKDB_C_API void duckdb_destroy_scalar_function(duckdb_scalar_function *scalar_function); -export function destroy_scalar_function_sync(scalar_function: ScalarFunction): void; +export function destroy_scalar_function_sync( + scalar_function: ScalarFunction, +): void; // DUCKDB_C_API void duckdb_scalar_function_set_name(duckdb_scalar_function scalar_function, const char *name); -export function scalar_function_set_name(scalar_function: ScalarFunction, name: string): void; +export function scalar_function_set_name( + scalar_function: ScalarFunction, + name: string, +): void; // DUCKDB_C_API void duckdb_scalar_function_set_varargs(duckdb_scalar_function scalar_function, duckdb_logical_type type); -export function scalar_function_set_varargs(scalar_function: ScalarFunction, logical_type: LogicalType): void; +export function scalar_function_set_varargs( + scalar_function: ScalarFunction, + logical_type: LogicalType, +): void; // DUCKDB_C_API void duckdb_scalar_function_set_special_handling(duckdb_scalar_function scalar_function); -export function scalar_function_set_special_handling(scalar_function: ScalarFunction): void; +export function scalar_function_set_special_handling( + scalar_function: ScalarFunction, +): void; // DUCKDB_C_API void duckdb_scalar_function_set_volatile(duckdb_scalar_function scalar_function); -export function scalar_function_set_volatile(scalar_function: ScalarFunction): void; +export function scalar_function_set_volatile( + scalar_function: ScalarFunction, +): void; // DUCKDB_C_API void duckdb_scalar_function_add_parameter(duckdb_scalar_function scalar_function, duckdb_logical_type type); -export function scalar_function_add_parameter(scalar_function: ScalarFunction, logical_type: LogicalType): void; +export function scalar_function_add_parameter( + scalar_function: ScalarFunction, + logical_type: LogicalType, +): void; // DUCKDB_C_API void duckdb_scalar_function_set_return_type(duckdb_scalar_function scalar_function, duckdb_logical_type type); -export function scalar_function_set_return_type(scalar_function: ScalarFunction, logical_type: LogicalType): void; +export function scalar_function_set_return_type( + scalar_function: ScalarFunction, + logical_type: LogicalType, +): void; // DUCKDB_C_API void duckdb_scalar_function_set_extra_info(duckdb_scalar_function scalar_function, void *extra_info, duckdb_delete_callback_t destroy); -export function scalar_function_set_extra_info(scalar_function: ScalarFunction, extra_info: object): void; +export function scalar_function_set_extra_info( + scalar_function: ScalarFunction, + extra_info: object, +): void; // DUCKDB_C_API void duckdb_scalar_function_set_bind(duckdb_scalar_function scalar_function, duckdb_scalar_function_bind_t bind); // DUCKDB_C_API void duckdb_scalar_function_set_bind_data(duckdb_bind_info info, void *bind_data, duckdb_delete_callback_t destroy); // DUCKDB_C_API void duckdb_scalar_function_bind_set_error(duckdb_bind_info info, const char *error); // DUCKDB_C_API void duckdb_scalar_function_set_function(duckdb_scalar_function scalar_function, duckdb_scalar_function_t function); -export function scalar_function_set_function(scalar_function: ScalarFunction, func: ScalarFunctionMainFunction): void; +export function scalar_function_set_function( + scalar_function: ScalarFunction, + func: ScalarFunctionMainFunction, +): void; // DUCKDB_C_API duckdb_state duckdb_register_scalar_function(duckdb_connection con, duckdb_scalar_function scalar_function); -export function register_scalar_function(connection: Connection, scalar_function: ScalarFunction): void; +export function register_scalar_function( + connection: Connection, + scalar_function: ScalarFunction, +): void; // DUCKDB_C_API void *duckdb_scalar_function_get_extra_info(duckdb_function_info info); -export function scalar_function_get_extra_info(function_info: FunctionInfo): object | undefined; +export function scalar_function_get_extra_info( + function_info: FunctionInfo, +): object | undefined; // DUCKDB_C_API void *duckdb_scalar_function_get_bind_data(duckdb_function_info info); // DUCKDB_C_API void duckdb_scalar_function_get_client_context(duckdb_bind_info info, duckdb_client_context *out_context); // DUCKDB_C_API void duckdb_scalar_function_set_error(duckdb_function_info info, const char *error); -export function scalar_function_set_error(function_info: FunctionInfo, error: string): void; +export function scalar_function_set_error( + function_info: FunctionInfo, + error: string, +): void; // DUCKDB_C_API duckdb_scalar_function_set duckdb_create_scalar_function_set(const char *name); // DUCKDB_C_API void duckdb_destroy_scalar_function_set(duckdb_scalar_function_set *scalar_function_set); @@ -1142,16 +1411,28 @@ export function scalar_function_set_error(function_info: FunctionInfo, error: st // DUCKDB_C_API duckdb_profiling_info duckdb_profiling_info_get_child(duckdb_profiling_info info, idx_t index); // DUCKDB_C_API duckdb_state duckdb_appender_create(duckdb_connection connection, const char *schema, const char *table, duckdb_appender *out_appender); -export function appender_create(connection: Connection, schema: string | null, table: string): Appender; +export function appender_create( + connection: Connection, + schema: string | null, + table: string, +): Appender; // DUCKDB_C_API duckdb_state duckdb_appender_create_ext(duckdb_connection connection, const char *catalog, const char *schema, const char *table, duckdb_appender *out_appender); -export function appender_create_ext(connection: Connection, catalog: string | null, schema: string | null, table: string): Appender; +export function appender_create_ext( + connection: Connection, + catalog: string | null, + schema: string | null, + table: string, +): Appender; // DUCKDB_C_API idx_t duckdb_appender_column_count(duckdb_appender appender); export function appender_column_count(appender: Appender): number; // DUCKDB_C_API duckdb_logical_type duckdb_appender_column_type(duckdb_appender appender, idx_t col_idx); -export function appender_column_type(appender: Appender, column_index: number): LogicalType; +export function appender_column_type( + appender: Appender, + column_index: number, +): LogicalType; // DUCKDB_C_API const char *duckdb_appender_error(duckdb_appender appender); // not exposed: other appender functions throw @@ -1225,7 +1506,10 @@ export function append_date(appender: Appender, date: Date_): void; export function append_time(appender: Appender, time: Time): void; // DUCKDB_C_API duckdb_state duckdb_append_timestamp(duckdb_appender appender, duckdb_timestamp value); -export function append_timestamp(appender: Appender, timestamp: Timestamp): void; +export function append_timestamp( + appender: Appender, + timestamp: Timestamp, +): void; // DUCKDB_C_API duckdb_state duckdb_append_interval(duckdb_appender appender, duckdb_interval value); export function append_interval(appender: Appender, interval: Interval): void; @@ -1302,29 +1586,45 @@ export function fetch_chunk(result: Result): Promise; // DUCKDB_C_API void duckdb_destroy_cast_function(duckdb_cast_function *cast_function); // ADDED -/** +/** * Read a pointer from `array_buffer` at `pointer_offset`, then read and return `byte_count` bytes from that pointer. - * + * * Used to read from `duckdb_string_t`s with non-inlined data that are embedded in VARCHAR, BLOB, and BIT vectors. */ -export function get_data_from_pointer(array_buffer: ArrayBuffer, pointer_offset: number, byte_count: number): Uint8Array; +export function get_data_from_pointer( + array_buffer: ArrayBuffer, + pointer_offset: number, + byte_count: number, +): Uint8Array; // ADDED -/** +/** * Copy `source_byte_count` bytes from `source_buffer` at `source_byte_offset` into `target_vector` at `target_byte_offset`. - * + * * Used to write to data chunks. * * Performs an efficient-but-unsafe memory copy. Use with care. */ -export function copy_data_to_vector(target_vector: Vector, target_byte_offset: number, source_buffer: ArrayBuffer, source_byte_offset: number, source_byte_count: number): void; +export function copy_data_to_vector( + target_vector: Vector, + target_byte_offset: number, + source_buffer: ArrayBuffer, + source_byte_offset: number, + source_byte_count: number, +): void; // ADDED -/** +/** * Copy `source_byte_count` bytes from `source_buffer` at `source_byte_offset` into the validity of `target_vector` at `target_byte_offset`. - * + * * Used to write to data chunks. * * Performs an efficient-but-unsafe memory copy. Use with care. */ -export function copy_data_to_vector_validity(target_vector: Vector, target_byte_offset: number, source_buffer: ArrayBuffer, source_byte_offset: number, source_byte_count: number): void; +export function copy_data_to_vector_validity( + target_vector: Vector, + target_byte_offset: number, + source_buffer: ArrayBuffer, + source_byte_offset: number, + source_byte_count: number, +): void; diff --git a/bindings/pkgs/@databrainhq/node-bindings/duckdb.js b/bindings/pkgs/@databrainhq/node-bindings/duckdb.js new file mode 100644 index 00000000..cb96b719 --- /dev/null +++ b/bindings/pkgs/@databrainhq/node-bindings/duckdb.js @@ -0,0 +1,26 @@ +const getRuntimePlatformArch = () => `${process.platform}-${process.arch}`; + +/** + * @throw Error if there isn't any available native binding for the current platform/arch. + */ +const getNativeNodeBinding = (runtimePlatformArch) => { + switch (runtimePlatformArch) { + case `linux-x64`: + return require('@databrainhq/node-bindings-linux-x64/duckdb.node'); + case 'linux-arm64': + return require('@databrainhq/node-bindings-linux-arm64/duckdb.node'); + case 'darwin-arm64': + return require('@databrainhq/node-bindings-darwin-arm64/duckdb.node'); + case 'darwin-x64': + return require('@databrainhq/node-bindings-darwin-x64/duckdb.node'); + case 'win32-x64': + return require('@databrainhq/node-bindings-win32-x64/duckdb.node'); + default: + const [platform, arch] = runtimePlatformArch.split('-'); + throw new Error( + `Error loading duckdb native binding: unsupported arch '${arch}' for platform '${platform}'`, + ); + } +}; + +module.exports = getNativeNodeBinding(getRuntimePlatformArch()); diff --git a/bindings/pkgs/@databrainhq/node-bindings/package.json b/bindings/pkgs/@databrainhq/node-bindings/package.json new file mode 100644 index 00000000..a54d5427 --- /dev/null +++ b/bindings/pkgs/@databrainhq/node-bindings/package.json @@ -0,0 +1,18 @@ +{ + "name": "@databrainhq/node-bindings", + "version": "1.3.2-alpha.26.5", + "license": "MIT", + "main": "./duckdb.js", + "types": "./duckdb.d.ts", + "optionalDependencies": { + "@databrainhq/node-bindings-darwin-arm64": "workspace:*", + "@databrainhq/node-bindings-darwin-x64": "workspace:*", + "@databrainhq/node-bindings-linux-arm64": "workspace:*", + "@databrainhq/node-bindings-linux-x64": "workspace:*", + "@databrainhq/node-bindings-win32-x64": "workspace:*" + }, + "repository": { + "type": "git", + "url": "https://github.com/databrainhq/duckdb-node-neo.git" + } +} diff --git a/bindings/pkgs/@duckdb/node-bindings-darwin-arm64/README.md b/bindings/pkgs/@duckdb/node-bindings-darwin-arm64/README.md deleted file mode 100644 index c94752a6..00000000 --- a/bindings/pkgs/@duckdb/node-bindings-darwin-arm64/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# DuckDB Node Bindings: Darwin arm64 - -Binaries supporting [Node](https://nodejs.org/) bindings to the [DuckDB C API](https://duckdb.org/docs/api/c/overview) for Mac OS X (Darwin) arm64. - -See [@duckdb/node-bindings](https://www.npmjs.com/package/@duckdb/node-bindings) for a low-level API that uses these binaries, or [@duckdb/node-api](https://www.npmjs.com/package/@duckdb/node-api) for a high-level API built on those bindings. diff --git a/bindings/pkgs/@duckdb/node-bindings-darwin-arm64/package.json b/bindings/pkgs/@duckdb/node-bindings-darwin-arm64/package.json deleted file mode 100644 index b1ec7e1e..00000000 --- a/bindings/pkgs/@duckdb/node-bindings-darwin-arm64/package.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "@duckdb/node-bindings-darwin-arm64", - "version": "1.3.2-alpha.26", - "license": "MIT", - "os": [ - "darwin" - ], - "cpu": [ - "arm64" - ], - "repository": { - "type": "git", - "url": "https://github.com/duckdb/duckdb-node-neo.git" - } -} diff --git a/bindings/pkgs/@duckdb/node-bindings-darwin-x64/README.md b/bindings/pkgs/@duckdb/node-bindings-darwin-x64/README.md deleted file mode 100644 index cae4fc67..00000000 --- a/bindings/pkgs/@duckdb/node-bindings-darwin-x64/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# DuckDB Node Bindings: Darwin x64 - -Binaries supporting [Node](https://nodejs.org/) bindings to the [DuckDB C API](https://duckdb.org/docs/api/c/overview) for Mac OS X (Darwin) x64. - -See [@duckdb/node-bindings](https://www.npmjs.com/package/@duckdb/node-bindings) for a low-level API that uses these binaries, or [@duckdb/node-api](https://www.npmjs.com/package/@duckdb/node-api) for a high-level API built on those bindings. diff --git a/bindings/pkgs/@duckdb/node-bindings-darwin-x64/package.json b/bindings/pkgs/@duckdb/node-bindings-darwin-x64/package.json deleted file mode 100644 index b3e6235f..00000000 --- a/bindings/pkgs/@duckdb/node-bindings-darwin-x64/package.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "@duckdb/node-bindings-darwin-x64", - "version": "1.3.2-alpha.26", - "license": "MIT", - "os": [ - "darwin" - ], - "cpu": [ - "x64" - ], - "repository": { - "type": "git", - "url": "https://github.com/duckdb/duckdb-node-neo.git" - } -} diff --git a/bindings/pkgs/@duckdb/node-bindings-linux-arm64/README.md b/bindings/pkgs/@duckdb/node-bindings-linux-arm64/README.md deleted file mode 100644 index d3096318..00000000 --- a/bindings/pkgs/@duckdb/node-bindings-linux-arm64/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# DuckDB Node Bindings: Linux arm64 - -Binaries supporting [Node](https://nodejs.org/) bindings to the [DuckDB C API](https://duckdb.org/docs/api/c/overview) for Linux arm64. - -See [@duckdb/node-bindings](https://www.npmjs.com/package/@duckdb/node-bindings) for a low-level API that uses these binaries, or [@duckdb/node-api](https://www.npmjs.com/package/@duckdb/node-api) for a high-level API built on those bindings. diff --git a/bindings/pkgs/@duckdb/node-bindings-linux-arm64/package.json b/bindings/pkgs/@duckdb/node-bindings-linux-arm64/package.json deleted file mode 100644 index 40d75323..00000000 --- a/bindings/pkgs/@duckdb/node-bindings-linux-arm64/package.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "@duckdb/node-bindings-linux-arm64", - "version": "1.3.2-alpha.26", - "license": "MIT", - "os": [ - "linux" - ], - "cpu": [ - "arm64" - ], - "repository": { - "type": "git", - "url": "https://github.com/duckdb/duckdb-node-neo.git" - } -} diff --git a/bindings/pkgs/@duckdb/node-bindings-linux-x64/README.md b/bindings/pkgs/@duckdb/node-bindings-linux-x64/README.md deleted file mode 100644 index dff1afa8..00000000 --- a/bindings/pkgs/@duckdb/node-bindings-linux-x64/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# DuckDB Node Bindings: Linux x64 - -Binaries supporting [Node](https://nodejs.org/) bindings to the [DuckDB C API](https://duckdb.org/docs/api/c/overview) for Linux x64. - -See [@duckdb/node-bindings](https://www.npmjs.com/package/@duckdb/node-bindings) for a low-level API that uses these binaries, or [@duckdb/node-api](https://www.npmjs.com/package/@duckdb/node-api) for a high-level API built on those bindings. diff --git a/bindings/pkgs/@duckdb/node-bindings-linux-x64/package.json b/bindings/pkgs/@duckdb/node-bindings-linux-x64/package.json deleted file mode 100644 index be13ca01..00000000 --- a/bindings/pkgs/@duckdb/node-bindings-linux-x64/package.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "@duckdb/node-bindings-linux-x64", - "version": "1.3.2-alpha.26", - "license": "MIT", - "os": [ - "linux" - ], - "cpu": [ - "x64" - ], - "repository": { - "type": "git", - "url": "https://github.com/duckdb/duckdb-node-neo.git" - } -} diff --git a/bindings/pkgs/@duckdb/node-bindings-win32-x64/README.md b/bindings/pkgs/@duckdb/node-bindings-win32-x64/README.md deleted file mode 100644 index 82ac7e76..00000000 --- a/bindings/pkgs/@duckdb/node-bindings-win32-x64/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# DuckDB Node Bindings: Win32 x64 - -Binaries supporting [Node](https://nodejs.org/) bindings to the [DuckDB C API](https://duckdb.org/docs/api/c/overview) for Windows (Win32) x64. - -See [@duckdb/node-bindings](https://www.npmjs.com/package/@duckdb/node-bindings) for a low-level API that uses these binaries, or [@duckdb/node-api](https://www.npmjs.com/package/@duckdb/node-api) for a high-level API built on those bindings. diff --git a/bindings/pkgs/@duckdb/node-bindings-win32-x64/package.json b/bindings/pkgs/@duckdb/node-bindings-win32-x64/package.json deleted file mode 100644 index cbf6e21d..00000000 --- a/bindings/pkgs/@duckdb/node-bindings-win32-x64/package.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "@duckdb/node-bindings-win32-x64", - "version": "1.3.2-alpha.26", - "license": "MIT", - "os": [ - "win32" - ], - "cpu": [ - "x64" - ], - "repository": { - "type": "git", - "url": "https://github.com/duckdb/duckdb-node-neo.git" - } -} diff --git a/bindings/pkgs/@duckdb/node-bindings/duckdb.js b/bindings/pkgs/@duckdb/node-bindings/duckdb.js deleted file mode 100644 index 6a730bd8..00000000 --- a/bindings/pkgs/@duckdb/node-bindings/duckdb.js +++ /dev/null @@ -1,24 +0,0 @@ -const getRuntimePlatformArch = () => `${process.platform}-${process.arch}`; - -/** - * @throw Error if there isn't any available native binding for the current platform/arch. - */ -const getNativeNodeBinding = (runtimePlatformArch) => { - switch(runtimePlatformArch) { - case `linux-x64`: - return require('@duckdb/node-bindings-linux-x64/duckdb.node'); - case 'linux-arm64': - return require('@duckdb/node-bindings-linux-arm64/duckdb.node'); - case 'darwin-arm64': - return require('@duckdb/node-bindings-darwin-arm64/duckdb.node'); - case 'darwin-x64': - return require('@duckdb/node-bindings-darwin-x64/duckdb.node'); - case 'win32-x64': - return require('@duckdb/node-bindings-win32-x64/duckdb.node'); - default: - const [platform, arch] = runtimePlatformArch.split('-') - throw new Error(`Error loading duckdb native binding: unsupported arch '${arch}' for platform '${platform}'`); - } -} - -module.exports = getNativeNodeBinding(getRuntimePlatformArch()); diff --git a/bindings/pkgs/@duckdb/node-bindings/package.json b/bindings/pkgs/@duckdb/node-bindings/package.json deleted file mode 100644 index 539223ed..00000000 --- a/bindings/pkgs/@duckdb/node-bindings/package.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "@duckdb/node-bindings", - "version": "1.3.2-alpha.26", - "license": "MIT", - "main": "./duckdb.js", - "types": "./duckdb.d.ts", - "optionalDependencies": { - "@duckdb/node-bindings-darwin-arm64": "workspace:*", - "@duckdb/node-bindings-darwin-x64": "workspace:*", - "@duckdb/node-bindings-linux-arm64": "workspace:*", - "@duckdb/node-bindings-linux-x64": "workspace:*", - "@duckdb/node-bindings-win32-x64": "workspace:*" - }, - "repository": { - "type": "git", - "url": "https://github.com/duckdb/duckdb-node-neo.git" - } -} diff --git a/bindings/scripts/checkFunctionSignatures.mjs b/bindings/scripts/checkFunctionSignatures.mjs index 76cc1f35..2a5c8ae9 100644 --- a/bindings/scripts/checkFunctionSignatures.mjs +++ b/bindings/scripts/checkFunctionSignatures.mjs @@ -4,12 +4,19 @@ import path from 'path'; function getFunctionSignaturesFromHeader(headerFilePath) { const sigs = []; const headerContents = fs.readFileSync(headerFilePath, { encoding: 'utf-8' }); - const sigRegex = /^DUCKDB_C_API (?([^;]|[\r\n])*);$|^#ifndef (?DUCKDB_API_NO_DEPRECATED|DUCKDB_NO_EXTENSION_FUNCTIONS)$|^#endif$/gm; + const sigRegex = + /^DUCKDB_C_API (?([^;]|[\r\n])*);$|^#ifndef (?DUCKDB_API_NO_DEPRECATED|DUCKDB_NO_EXTENSION_FUNCTIONS)$|^#endif$/gm; var ifndef = undefined; var match; - while (match = sigRegex.exec(headerContents)) { + while ((match = sigRegex.exec(headerContents))) { if (match.groups.sig) { - sigs.push({ sig: match.groups.sig.replace(/\r\n/gm, ' ').replace(/\n/gm, ' ').replace(/ +/gm, ' '), ...(ifndef ? { ifndef } : {}) }); + sigs.push({ + sig: match.groups.sig + .replace(/\r\n/gm, ' ') + .replace(/\n/gm, ' ') + .replace(/ +/gm, ' '), + ...(ifndef ? { ifndef } : {}), + }); } else if (match.groups.startif) { ifndef = match.groups.startif; } else { @@ -22,10 +29,11 @@ function getFunctionSignaturesFromHeader(headerFilePath) { function getFunctionSignaturesFromComments(filePath) { const sigs = []; const fileContents = fs.readFileSync(filePath, { encoding: 'utf-8' }); - const sigRegex = /^\s*\/\/ DUCKDB_C_API (?([^;])*);$|^\s*\/\/ #ifndef (?DUCKDB_API_NO_DEPRECATED|DUCKDB_NO_EXTENSION_FUNCTIONS)$|^\s*\/\/ #endif$/gm; + const sigRegex = + /^\s*\/\/ DUCKDB_C_API (?([^;])*);$|^\s*\/\/ #ifndef (?DUCKDB_API_NO_DEPRECATED|DUCKDB_NO_EXTENSION_FUNCTIONS)$|^\s*\/\/ #endif$/gm; var ifndef = undefined; var match; - while (match = sigRegex.exec(fileContents)) { + while ((match = sigRegex.exec(fileContents))) { if (match.groups.sig) { sigs.push({ sig: match.groups.sig, ...(ifndef ? { ifndef } : {}) }); } else if (match.groups.startif) { @@ -47,7 +55,12 @@ function checkFunctionSignatures() { } const headerFilePath = path.join('libduckdb', 'duckdb.h'); - const typeDefsFilePath = path.join('pkgs', '@duckdb', 'node-bindings', 'duckdb.d.ts'); + const typeDefsFilePath = path.join( + 'pkgs', + '@databrainhq', + 'node-bindings', + 'duckdb.d.ts', + ); const bindingsFilePath = path.join('src', 'duckdb_node_bindings.cpp'); const headerSigs = getFunctionSignaturesFromHeader(headerFilePath); diff --git a/bindings/scripts/fetch_libduckdb_linux_amd64.py b/bindings/scripts/fetch_libduckdb_linux_amd64.py index 72a93f2e..c0051253 100644 --- a/bindings/scripts/fetch_libduckdb_linux_amd64.py +++ b/bindings/scripts/fetch_libduckdb_linux_amd64.py @@ -4,8 +4,8 @@ zip_url = "https://github.com/duckdb/duckdb/releases/download/v1.3.2/libduckdb-linux-amd64.zip" output_dir = os.path.join(os.path.dirname(__file__), "..", "libduckdb") files = [ - "duckdb.h", - "libduckdb.so", + "duckdb.h", + "libduckdb.so", ] fetch_libduckdb(zip_url, output_dir, files) diff --git a/bindings/scripts/fetch_libduckdb_linux_arm64.py b/bindings/scripts/fetch_libduckdb_linux_arm64.py index 6d3a3658..c6e183fc 100644 --- a/bindings/scripts/fetch_libduckdb_linux_arm64.py +++ b/bindings/scripts/fetch_libduckdb_linux_arm64.py @@ -4,8 +4,8 @@ zip_url = "https://github.com/duckdb/duckdb/releases/download/v1.3.2/libduckdb-linux-arm64.zip" output_dir = os.path.join(os.path.dirname(__file__), "..", "libduckdb") files = [ - "duckdb.h", - "libduckdb.so", + "duckdb.h", + "libduckdb.so", ] fetch_libduckdb(zip_url, output_dir, files) diff --git a/bindings/scripts/fetch_libduckdb_osx_universal.py b/bindings/scripts/fetch_libduckdb_osx_universal.py index 60b5a793..ef31623f 100644 --- a/bindings/scripts/fetch_libduckdb_osx_universal.py +++ b/bindings/scripts/fetch_libduckdb_osx_universal.py @@ -4,8 +4,8 @@ zip_url = "https://github.com/duckdb/duckdb/releases/download/v1.3.2/libduckdb-osx-universal.zip" output_dir = os.path.join(os.path.dirname(__file__), "..", "libduckdb") files = [ - "duckdb.h", - "libduckdb.dylib", + "duckdb.h", + "libduckdb.dylib", ] fetch_libduckdb(zip_url, output_dir, files) diff --git a/bindings/scripts/fetch_libduckdb_windows_amd64.py b/bindings/scripts/fetch_libduckdb_windows_amd64.py index 49cdf6ac..6989c48e 100644 --- a/bindings/scripts/fetch_libduckdb_windows_amd64.py +++ b/bindings/scripts/fetch_libduckdb_windows_amd64.py @@ -4,9 +4,9 @@ zip_url = "https://github.com/duckdb/duckdb/releases/download/v1.3.2/libduckdb-windows-amd64.zip" output_dir = os.path.join(os.path.dirname(__file__), "..", "libduckdb") files = [ - "duckdb.h", - "duckdb.lib", - "duckdb.dll", + "duckdb.h", + "duckdb.lib", + "duckdb.dll", ] fetch_libduckdb(zip_url, output_dir, files) diff --git a/bindings/test/appender.test.ts b/bindings/test/appender.test.ts index 2cf5a75e..6362d0d7 100644 --- a/bindings/test/appender.test.ts +++ b/bindings/test/appender.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; import { expectLogicalType } from './utils/expectLogicalType'; import { expectResult } from './utils/expectResult'; @@ -31,7 +31,7 @@ suite('appender', () => { test('error: no table', async () => { await withConnection(async (connection) => { expect(() => - duckdb.appender_create_ext(connection, 'memory', 'main', 'bogus_table') + duckdb.appender_create_ext(connection, 'memory', 'main', 'bogus_table'), ).toThrowError(`Table "memory.main.bogus_table" could not be found`); }); }); @@ -39,7 +39,7 @@ suite('appender', () => { await withConnection(async (connection) => { const createResult = await duckdb.query( connection, - 'create table appender_target(i integer)' + 'create table appender_target(i integer)', ); await expectResult(createResult, { statementType: duckdb.StatementType.CREATE, @@ -54,7 +54,7 @@ suite('appender', () => { connection, 'memory', 'main', - 'appender_target' + 'appender_target', ); expect(duckdb.appender_column_count(appender)).toBe(1); const column_type = duckdb.appender_column_type(appender, 0); @@ -109,7 +109,7 @@ suite('appender', () => { dec38_10 decimal(38,10), \ null_column integer, \ integer_with_default integer default 42\ - )' + )', ); await expectResult(createResult, { statementType: duckdb.StatementType.CREATE, @@ -124,7 +124,7 @@ suite('appender', () => { connection, 'memory', 'main', - 'appender_target' + 'appender_target', ); expect(duckdb.appender_column_count(appender)).toBe(25); @@ -172,7 +172,7 @@ suite('appender', () => { duckdb.append_uint64(appender, 18446744073709551615n); duckdb.append_uhugeint( appender, - 340282366920938463463374607431768211455n + 340282366920938463463374607431768211455n, ); duckdb.append_float(appender, 3.4028234663852886e38); duckdb.append_double(appender, 1.7976931348623157e308); @@ -187,23 +187,31 @@ suite('appender', () => { duckdb.append_varchar(appender, '🦆🦆🦆🦆🦆🦆'); duckdb.append_blob( appender, - Buffer.from('thisisalongblob\x00withnullbytes') + Buffer.from('thisisalongblob\x00withnullbytes'), ); duckdb.append_value( appender, - duckdb.create_decimal({ width: 4, scale: 1, value: 9999n }) + duckdb.create_decimal({ width: 4, scale: 1, value: 9999n }), ); duckdb.append_value( appender, - duckdb.create_decimal({ width: 9, scale: 4, value: 999999999n }) + duckdb.create_decimal({ width: 9, scale: 4, value: 999999999n }), ); duckdb.append_value( appender, - duckdb.create_decimal({ width: 18, scale: 6, value: 999999999999999999n }) + duckdb.create_decimal({ + width: 18, + scale: 6, + value: 999999999999999999n, + }), ); duckdb.append_value( appender, - duckdb.create_decimal({ width: 38, scale: 10, value: -99999999999999999999999999999999999999n }) + duckdb.create_decimal({ + width: 38, + scale: 10, + value: -99999999999999999999999999999999999999n, + }), ); duckdb.append_null(appender); duckdb.append_default(appender); @@ -240,7 +248,10 @@ suite('appender', () => { { name: 'dec4_1', logicalType: DECIMAL(4, 1, duckdb.Type.SMALLINT) }, { name: 'dec9_4', logicalType: DECIMAL(9, 4, duckdb.Type.INTEGER) }, { name: 'dec18_6', logicalType: DECIMAL(18, 6, duckdb.Type.BIGINT) }, - { name: 'dec38_10', logicalType: DECIMAL(38, 10, duckdb.Type.HUGEINT) }, + { + name: 'dec38_10', + logicalType: DECIMAL(38, 10, duckdb.Type.HUGEINT), + }, { name: 'null_column', logicalType: INTEGER }, { name: 'integer_with_default', logicalType: INTEGER }, ], @@ -267,13 +278,13 @@ suite('appender', () => { data( 16, [true], - [{ months: 999, days: 999, micros: 999999999n }] + [{ months: 999, days: 999, micros: 999999999n }], ), data(16, [true], ['🦆🦆🦆🦆🦆🦆']), data( 16, [true], - [Buffer.from('thisisalongblob\x00withnullbytes')] + [Buffer.from('thisisalongblob\x00withnullbytes')], ), data(2, [true], [9999]), data(4, [true], [999999999]), @@ -291,7 +302,7 @@ suite('appender', () => { await withConnection(async (connection) => { const createResult = await duckdb.query( connection, - 'create table appender_target(i integer, v varchar)' + 'create table appender_target(i integer, v varchar)', ); await expectResult(createResult, { statementType: duckdb.StatementType.CREATE, @@ -306,13 +317,13 @@ suite('appender', () => { connection, 'memory', 'main', - 'appender_target' + 'appender_target', ); expect(duckdb.appender_column_count(appender)).toBe(2); const source_result = await duckdb.query( connection, - 'select int, varchar from test_all_types()' + 'select int, varchar from test_all_types()', ); const source_chunk = await duckdb.fetch_chunk(source_result); expect(source_chunk).toBeDefined(); diff --git a/bindings/test/config.test.ts b/bindings/test/config.test.ts index d73a9d0b..52c9f7b5 100644 --- a/bindings/test/config.test.ts +++ b/bindings/test/config.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; import { expectResult } from './utils/expectResult'; import { data } from './utils/expectedVectors'; @@ -9,10 +9,14 @@ suite('config', () => { }); test('get_config_flag', () => { expect(duckdb.get_config_flag(0).name).toBe('access_mode'); - expect(duckdb.get_config_flag(duckdb.config_count() - 1).name).toBe('unsafe_enable_version_guessing'); + expect(duckdb.get_config_flag(duckdb.config_count() - 1).name).toBe( + 'unsafe_enable_version_guessing', + ); }); test('get_config_flag out of bounds', () => { - expect(() => duckdb.get_config_flag(-1)).toThrowError(/^Config option not found$/); + expect(() => duckdb.get_config_flag(-1)).toThrowError( + /^Config option not found$/, + ); }); test('create, set, and destroy', () => { const config = duckdb.create_config(); @@ -22,49 +26,58 @@ suite('config', () => { test('default duckdb_api without explicit config', async () => { const db = await duckdb.open(); const connection = await duckdb.connect(db); - const result = await duckdb.query(connection, `select current_setting('duckdb_api') as duckdb_api`); - await expectResult(result, { - chunkCount: 1, - rowCount: 1, - columns: [ - { name: 'duckdb_api', logicalType: { typeId: duckdb.Type.VARCHAR } }, - ], - chunks: [ - { rowCount: 1, vectors: [data(16, [true], ['node-neo-bindings'])]}, - ], - }); + const result = await duckdb.query( + connection, + `select current_setting('duckdb_api') as duckdb_api`, + ); + await expectResult(result, { + chunkCount: 1, + rowCount: 1, + columns: [ + { name: 'duckdb_api', logicalType: { typeId: duckdb.Type.VARCHAR } }, + ], + chunks: [ + { rowCount: 1, vectors: [data(16, [true], ['node-neo-bindings'])] }, + ], + }); }); test('default duckdb_api with explicit config', async () => { const config = duckdb.create_config(); const db = await duckdb.open(undefined, config); const connection = await duckdb.connect(db); - const result = await duckdb.query(connection, `select current_setting('duckdb_api') as duckdb_api`); - await expectResult(result, { - chunkCount: 1, - rowCount: 1, - columns: [ - { name: 'duckdb_api', logicalType: { typeId: duckdb.Type.VARCHAR } }, - ], - chunks: [ - { rowCount: 1, vectors: [data(16, [true], ['node-neo-bindings'])]}, - ], - }); + const result = await duckdb.query( + connection, + `select current_setting('duckdb_api') as duckdb_api`, + ); + await expectResult(result, { + chunkCount: 1, + rowCount: 1, + columns: [ + { name: 'duckdb_api', logicalType: { typeId: duckdb.Type.VARCHAR } }, + ], + chunks: [ + { rowCount: 1, vectors: [data(16, [true], ['node-neo-bindings'])] }, + ], + }); }); test('overriding duckdb_api', async () => { const config = duckdb.create_config(); duckdb.set_config(config, 'duckdb_api', 'custom-duckdb-api'); const db = await duckdb.open(undefined, config); const connection = await duckdb.connect(db); - const result = await duckdb.query(connection, `select current_setting('duckdb_api') as duckdb_api`); - await expectResult(result, { - chunkCount: 1, - rowCount: 1, - columns: [ - { name: 'duckdb_api', logicalType: { typeId: duckdb.Type.VARCHAR } }, - ], - chunks: [ - { rowCount: 1, vectors: [data(16, [true], ['custom-duckdb-api'])]}, - ], - }); + const result = await duckdb.query( + connection, + `select current_setting('duckdb_api') as duckdb_api`, + ); + await expectResult(result, { + chunkCount: 1, + rowCount: 1, + columns: [ + { name: 'duckdb_api', logicalType: { typeId: duckdb.Type.VARCHAR } }, + ], + chunks: [ + { rowCount: 1, vectors: [data(16, [true], ['custom-duckdb-api'])] }, + ], + }); }); }); diff --git a/bindings/test/connection.test.ts b/bindings/test/connection.test.ts index ad499cfb..443867db 100644 --- a/bindings/test/connection.test.ts +++ b/bindings/test/connection.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; import { withConnection } from './utils/withConnection'; @@ -7,18 +7,45 @@ suite('connection', () => { await withConnection(async (connection) => { const prepared1 = await duckdb.prepare(connection, 'select 1'); expect(prepared1).toBeDefined(); - const { extracted_statements } = await duckdb.extract_statements(connection, 'select 10; select 20'); + const { extracted_statements } = await duckdb.extract_statements( + connection, + 'select 10; select 20', + ); duckdb.disconnect_sync(connection); - await expect(async () => await duckdb.prepare(connection, 'select 2')) - .rejects.toStrictEqual(new Error('Failed to prepare: connection disconnected')); - await expect(async () => await duckdb.query(connection, 'select 3')) - .rejects.toStrictEqual(new Error('Failed to query: connection disconnected')); - await expect(async () => await duckdb.extract_statements(connection, 'select 4; select 5')) - .rejects.toStrictEqual(new Error('Failed to extract statements: connection disconnected')); - await expect(async () => await duckdb.prepare_extracted_statement(connection, extracted_statements, 0)) - .rejects.toStrictEqual(new Error('Failed to prepare extracted statement: connection disconnected')); - await expect(async () => await duckdb.appender_create(connection, 'main', 'my_target')) - .rejects.toStrictEqual(new Error('Failed to create appender: connection disconnected')); + await expect( + async () => await duckdb.prepare(connection, 'select 2'), + ).rejects.toStrictEqual( + new Error('Failed to prepare: connection disconnected'), + ); + await expect( + async () => await duckdb.query(connection, 'select 3'), + ).rejects.toStrictEqual( + new Error('Failed to query: connection disconnected'), + ); + await expect( + async () => + await duckdb.extract_statements(connection, 'select 4; select 5'), + ).rejects.toStrictEqual( + new Error('Failed to extract statements: connection disconnected'), + ); + await expect( + async () => + await duckdb.prepare_extracted_statement( + connection, + extracted_statements, + 0, + ), + ).rejects.toStrictEqual( + new Error( + 'Failed to prepare extracted statement: connection disconnected', + ), + ); + await expect( + async () => + await duckdb.appender_create(connection, 'main', 'my_target'), + ).rejects.toStrictEqual( + new Error('Failed to create appender: connection disconnected'), + ); }); }); test('double disconnect', async () => { @@ -28,8 +55,11 @@ suite('connection', () => { duckdb.disconnect_sync(connection); // ensure a second disconnect is a no-op duckdb.disconnect_sync(connection); - await expect(async () => await duckdb.prepare(connection, 'select 3')) - .rejects.toStrictEqual(new Error('Failed to prepare: connection disconnected')); + await expect( + async () => await duckdb.prepare(connection, 'select 3'), + ).rejects.toStrictEqual( + new Error('Failed to prepare: connection disconnected'), + ); }); }); }); diff --git a/bindings/test/constants.test.ts b/bindings/test/constants.test.ts index 73c4d235..1d7dc33b 100644 --- a/bindings/test/constants.test.ts +++ b/bindings/test/constants.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; suite('constants', () => { diff --git a/bindings/test/conversion.test.ts b/bindings/test/conversion.test.ts index 2e1296c1..c7091040 100644 --- a/bindings/test/conversion.test.ts +++ b/bindings/test/conversion.test.ts @@ -1,28 +1,45 @@ - -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; suite('conversion', () => { suite('from_date', () => { test('mid-range', () => { - expect(duckdb.from_date({ days: 19877 })).toStrictEqual({ year: 2024, month: 6, day: 3 }); + expect(duckdb.from_date({ days: 19877 })).toStrictEqual({ + year: 2024, + month: 6, + day: 3, + }); }); test('max', () => { - expect(duckdb.from_date({ days: 0x7FFFFFFE })).toStrictEqual({ year: 5881580, month: 7, day: 10 }); + expect(duckdb.from_date({ days: 0x7ffffffe })).toStrictEqual({ + year: 5881580, + month: 7, + day: 10, + }); }); test('min', () => { - expect(duckdb.from_date({ days: -0x7FFFFFFE })).toStrictEqual({ year: -5877641, month: 6, day: 25 }); + expect(duckdb.from_date({ days: -0x7ffffffe })).toStrictEqual({ + year: -5877641, + month: 6, + day: 25, + }); }); }); suite('to_date', () => { test('mid-range', () => { - expect(duckdb.to_date({ year: 2024, month: 6, day: 3 })).toStrictEqual({ days: 19877 }); + expect(duckdb.to_date({ year: 2024, month: 6, day: 3 })).toStrictEqual({ + days: 19877, + }); }); test('max', () => { - expect(duckdb.to_date({ year: 5881580, month: 7, day: 10 })).toStrictEqual({ days: 0x7FFFFFFE }); + expect( + duckdb.to_date({ year: 5881580, month: 7, day: 10 }), + ).toStrictEqual({ days: 0x7ffffffe }); }); test('min', () => { - expect(duckdb.to_date({ year: -5877641, month: 6, day: 25 })).toStrictEqual({ days: -0x7FFFFFFE }); + expect( + duckdb.to_date({ year: -5877641, month: 6, day: 25 }), + ).toStrictEqual({ days: -0x7ffffffe }); }); }); suite('is_finite_date', () => { @@ -30,23 +47,38 @@ suite('conversion', () => { expect(duckdb.is_finite_date({ days: 19877 })).toBe(true); }); test('infinity', () => { - expect(duckdb.is_finite_date({ days: 0x7FFFFFFF })).toBe(false); + expect(duckdb.is_finite_date({ days: 0x7fffffff })).toBe(false); }); test('-infinity', () => { - expect(duckdb.is_finite_date({ days: -0x7FFFFFFF })).toBe(false); + expect(duckdb.is_finite_date({ days: -0x7fffffff })).toBe(false); }); }); suite('from_time', () => { test('mid-range', () => { // 45296789123 = 1000000 * (60 * (60 * 12 + 34) + 56) + 789123 = 12:34:56.789123 - expect(duckdb.from_time({ micros: 45296789123n })).toStrictEqual({ hour: 12, min: 34, sec: 56, micros: 789123 }); + expect(duckdb.from_time({ micros: 45296789123n })).toStrictEqual({ + hour: 12, + min: 34, + sec: 56, + micros: 789123, + }); }); test('min', () => { - expect(duckdb.from_time({ micros: 0n })).toStrictEqual({ hour: 0, min: 0, sec: 0, micros: 0 }); + expect(duckdb.from_time({ micros: 0n })).toStrictEqual({ + hour: 0, + min: 0, + sec: 0, + micros: 0, + }); }); test('max', () => { // 86400000000 = 1000000 * (60 * (60 * 24 + 0) + 0) + 0 = 24:00:00.000000 - expect(duckdb.from_time({ micros: 86400000000n })).toStrictEqual({ hour: 24, min: 0, sec: 0, micros: 0 }); + expect(duckdb.from_time({ micros: 86400000000n })).toStrictEqual({ + hour: 24, + min: 0, + sec: 0, + micros: 0, + }); }); }); suite('create_time_tz', () => { @@ -59,45 +91,67 @@ suite('conversion', () => { test('mid-range', () => { // 45296789123 = 1000000 * (60 * (60 * 12 + 34) + 56) + 789123 = 12:34:56.789123 // 759954015223079167n = (45296789123n << 24n) + 57599n - expect(duckdb.create_time_tz(45296789123, 0)).toStrictEqual({ bits: 759954015223079167n }); + expect(duckdb.create_time_tz(45296789123, 0)).toStrictEqual({ + bits: 759954015223079167n, + }); }); test('min', () => { expect(duckdb.create_time_tz(0, 57599)).toStrictEqual({ bits: 0n }); }); test('max', () => { // 1449551462400115198n = (86400000000n << 24n) + 2n * 57599n - expect(duckdb.create_time_tz(86400000000, -57599)).toStrictEqual({ bits: 1449551462400115198n }); + expect(duckdb.create_time_tz(86400000000, -57599)).toStrictEqual({ + bits: 1449551462400115198n, + }); }); }); suite('from_time_tz', () => { test('mid-range', () => { - expect(duckdb.from_time_tz({ bits: 759954015223079167n })).toStrictEqual({ time: { hour: 12, min: 34, sec: 56, micros: 789123 }, offset: 0 }); + expect(duckdb.from_time_tz({ bits: 759954015223079167n })).toStrictEqual({ + time: { hour: 12, min: 34, sec: 56, micros: 789123 }, + offset: 0, + }); }); test('min', () => { - expect(duckdb.from_time_tz({ bits: 0n })).toStrictEqual({ time: { hour: 0, min: 0, sec: 0, micros: 0 }, offset: 57599 }); + expect(duckdb.from_time_tz({ bits: 0n })).toStrictEqual({ + time: { hour: 0, min: 0, sec: 0, micros: 0 }, + offset: 57599, + }); }); test('max', () => { - expect(duckdb.from_time_tz({ bits: 1449551462400115198n })).toStrictEqual({ time: { hour: 24, min: 0, sec: 0, micros: 0 }, offset: -57599 }); + expect(duckdb.from_time_tz({ bits: 1449551462400115198n })).toStrictEqual( + { time: { hour: 24, min: 0, sec: 0, micros: 0 }, offset: -57599 }, + ); }); test('out of uint64 range', () => { - expect(() => duckdb.from_time_tz({ bits: 2n ** 64n })).toThrowError('bits out of uint64 range'); + expect(() => duckdb.from_time_tz({ bits: 2n ** 64n })).toThrowError( + 'bits out of uint64 range', + ); }); }); suite('to_time', () => { test('mid-range', () => { - expect(duckdb.to_time({ hour: 12, min: 34, sec: 56, micros: 789123 })).toStrictEqual({ micros: 45296789123n }); + expect( + duckdb.to_time({ hour: 12, min: 34, sec: 56, micros: 789123 }), + ).toStrictEqual({ micros: 45296789123n }); }); test('min', () => { - expect(duckdb.to_time({ hour: 0, min: 0, sec: 0, micros: 0 })).toStrictEqual({ micros: 0n }); + expect( + duckdb.to_time({ hour: 0, min: 0, sec: 0, micros: 0 }), + ).toStrictEqual({ micros: 0n }); }); test('max', () => { - expect(duckdb.to_time({ hour: 24, min: 0, sec: 0, micros: 0 })).toStrictEqual({ micros: 86400000000n }); + expect( + duckdb.to_time({ hour: 24, min: 0, sec: 0, micros: 0 }), + ).toStrictEqual({ micros: 86400000000n }); }); }); suite('from_timestamp', () => { test('mid-range', () => { // 1717418096789123n = 19877n * 86400000000n + 45296789123n - expect(duckdb.from_timestamp({ micros: 1717418096789123n })).toStrictEqual({ + expect( + duckdb.from_timestamp({ micros: 1717418096789123n }), + ).toStrictEqual({ date: { year: 2024, month: 6, day: 3 }, time: { hour: 12, min: 34, sec: 56, micros: 789123 }, }); @@ -110,72 +164,98 @@ suite('conversion', () => { }); test('min', () => { // min timestamp = 290309-12-22 (BC) 00:00:00 - expect(duckdb.from_timestamp({ micros: -9223372022400000000n })).toStrictEqual({ + expect( + duckdb.from_timestamp({ micros: -9223372022400000000n }), + ).toStrictEqual({ date: { year: -290308, month: 12, day: 22 }, time: { hour: 0, min: 0, sec: 0, micros: 0 }, }); }); test('max', () => { // max timestamp = 294247-01-10 04:00:54.775806 - expect(duckdb.from_timestamp({ micros: 9223372036854775806n })).toStrictEqual({ + expect( + duckdb.from_timestamp({ micros: 9223372036854775806n }), + ).toStrictEqual({ date: { year: 294247, month: 1, day: 10 }, time: { hour: 4, min: 0, sec: 54, micros: 775806 }, }); }); test('out of int64 range (positive)', () => { - expect(() => duckdb.from_timestamp({ micros: 2n ** 63n })).toThrowError('micros out of int64 range'); + expect(() => duckdb.from_timestamp({ micros: 2n ** 63n })).toThrowError( + 'micros out of int64 range', + ); }); test('out of int64 range (negative)', () => { - expect(() => duckdb.from_timestamp({ micros: -(2n ** 63n + 1n) })).toThrowError('micros out of int64 range'); + expect(() => + duckdb.from_timestamp({ micros: -(2n ** 63n + 1n) }), + ).toThrowError('micros out of int64 range'); }); }); suite('to_timestamp', () => { test('mid-range', () => { // 1717418096789123n = 19877n * 86400000000n + 45296789123n - expect(duckdb.to_timestamp({ - date: { year: 2024, month: 6, day: 3 }, - time: { hour: 12, min: 34, sec: 56, micros: 789123 }, - })).toStrictEqual({ micros: 1717418096789123n }); + expect( + duckdb.to_timestamp({ + date: { year: 2024, month: 6, day: 3 }, + time: { hour: 12, min: 34, sec: 56, micros: 789123 }, + }), + ).toStrictEqual({ micros: 1717418096789123n }); }); test('epoch', () => { - expect(duckdb.to_timestamp({ - date: { year: 1970, month: 1, day: 1 }, - time: { hour: 0, min: 0, sec: 0, micros: 0 }, - })).toStrictEqual({ micros: 0n }); + expect( + duckdb.to_timestamp({ + date: { year: 1970, month: 1, day: 1 }, + time: { hour: 0, min: 0, sec: 0, micros: 0 }, + }), + ).toStrictEqual({ micros: 0n }); }); test('min', () => { // min timestamp = 290309-12-22 (BC) 00:00:00 - expect(duckdb.to_timestamp({ - date: { year: -290308, month: 12, day: 22 }, - time: { hour: 0, min: 0, sec: 0, micros: 0 }, - })).toStrictEqual({ micros: -9223372022400000000n }); + expect( + duckdb.to_timestamp({ + date: { year: -290308, month: 12, day: 22 }, + time: { hour: 0, min: 0, sec: 0, micros: 0 }, + }), + ).toStrictEqual({ micros: -9223372022400000000n }); }); test('max', () => { // max timestamp = 294247-01-10 04:00:54.775806 - expect(duckdb.to_timestamp({ - date: { year: 294247, month: 1, day: 10 }, - time: { hour: 4, min: 0, sec: 54, micros: 775806 }, - })).toStrictEqual({ micros: 9223372036854775806n }); + expect( + duckdb.to_timestamp({ + date: { year: 294247, month: 1, day: 10 }, + time: { hour: 4, min: 0, sec: 54, micros: 775806 }, + }), + ).toStrictEqual({ micros: 9223372036854775806n }); }); }); suite('is_finite_timestamp', () => { test('mid-range', () => { - expect(duckdb.is_finite_timestamp({ micros: 1717418096789123n })).toBe(true); + expect(duckdb.is_finite_timestamp({ micros: 1717418096789123n })).toBe( + true, + ); }); test('epoch', () => { expect(duckdb.is_finite_timestamp({ micros: 0n })).toBe(true); }); test('min', () => { - expect(duckdb.is_finite_timestamp({ micros: -9223372022400000000n })).toBe(true); + expect( + duckdb.is_finite_timestamp({ micros: -9223372022400000000n }), + ).toBe(true); }); test('max', () => { - expect(duckdb.is_finite_timestamp({ micros: 9223372036854775806n })).toBe(true); + expect(duckdb.is_finite_timestamp({ micros: 9223372036854775806n })).toBe( + true, + ); }); test('infinity', () => { - expect(duckdb.is_finite_timestamp({ micros: 2n ** 63n - 1n })).toBe(false); + expect(duckdb.is_finite_timestamp({ micros: 2n ** 63n - 1n })).toBe( + false, + ); }); test('-infinity', () => { - expect(duckdb.is_finite_timestamp({ micros: -(2n ** 63n - 1n) })).toBe(false); + expect(duckdb.is_finite_timestamp({ micros: -(2n ** 63n - 1n) })).toBe( + false, + ); }); }); suite('is_finite_timestamp_s', () => { @@ -186,56 +266,84 @@ suite('conversion', () => { expect(duckdb.is_finite_timestamp_s({ seconds: 0n })).toBe(true); }); test('min', () => { - expect(duckdb.is_finite_timestamp_s({ seconds: -9223372022400n })).toBe(true); + expect(duckdb.is_finite_timestamp_s({ seconds: -9223372022400n })).toBe( + true, + ); }); test('max', () => { - expect(duckdb.is_finite_timestamp_s({ seconds: 9223372036854n })).toBe(true); + expect(duckdb.is_finite_timestamp_s({ seconds: 9223372036854n })).toBe( + true, + ); }); test('infinity', () => { - expect(duckdb.is_finite_timestamp_s({ seconds: 2n ** 63n - 1n })).toBe(false); + expect(duckdb.is_finite_timestamp_s({ seconds: 2n ** 63n - 1n })).toBe( + false, + ); }); test('-infinity', () => { - expect(duckdb.is_finite_timestamp_s({ seconds: -(2n ** 63n - 1n) })).toBe(false); + expect(duckdb.is_finite_timestamp_s({ seconds: -(2n ** 63n - 1n) })).toBe( + false, + ); }); }); suite('is_finite_timestamp_ms', () => { test('mid-range', () => { - expect(duckdb.is_finite_timestamp_ms({ millis: 1717418096789n })).toBe(true); + expect(duckdb.is_finite_timestamp_ms({ millis: 1717418096789n })).toBe( + true, + ); }); test('epoch', () => { expect(duckdb.is_finite_timestamp_ms({ millis: 0n })).toBe(true); }); test('min', () => { - expect(duckdb.is_finite_timestamp_ms({ millis: -9223372022400000n })).toBe(true); + expect( + duckdb.is_finite_timestamp_ms({ millis: -9223372022400000n }), + ).toBe(true); }); test('max', () => { - expect(duckdb.is_finite_timestamp_ms({ millis: 9223372036854775n })).toBe(true); + expect(duckdb.is_finite_timestamp_ms({ millis: 9223372036854775n })).toBe( + true, + ); }); test('infinity', () => { - expect(duckdb.is_finite_timestamp_ms({ millis: 2n ** 63n - 1n })).toBe(false); + expect(duckdb.is_finite_timestamp_ms({ millis: 2n ** 63n - 1n })).toBe( + false, + ); }); test('-infinity', () => { - expect(duckdb.is_finite_timestamp_ms({ millis: -(2n ** 63n - 1n) })).toBe(false); + expect(duckdb.is_finite_timestamp_ms({ millis: -(2n ** 63n - 1n) })).toBe( + false, + ); }); }); suite('is_finite_timestamp_ns', () => { test('mid-range', () => { - expect(duckdb.is_finite_timestamp_ns({ nanos: 1717418096789123000n })).toBe(true); + expect( + duckdb.is_finite_timestamp_ns({ nanos: 1717418096789123000n }), + ).toBe(true); }); test('epoch', () => { expect(duckdb.is_finite_timestamp_ns({ nanos: 0n })).toBe(true); }); test('min', () => { - expect(duckdb.is_finite_timestamp_ns({ nanos: -9223286400000000000n })).toBe(true); + expect( + duckdb.is_finite_timestamp_ns({ nanos: -9223286400000000000n }), + ).toBe(true); }); test('max', () => { - expect(duckdb.is_finite_timestamp_ns({ nanos: 9223372036854775806n })).toBe(true); + expect( + duckdb.is_finite_timestamp_ns({ nanos: 9223372036854775806n }), + ).toBe(true); }); test('infinity', () => { - expect(duckdb.is_finite_timestamp_ns({ nanos: 2n ** 63n - 1n })).toBe(false); + expect(duckdb.is_finite_timestamp_ns({ nanos: 2n ** 63n - 1n })).toBe( + false, + ); }); test('-infinity', () => { - expect(duckdb.is_finite_timestamp_ns({ nanos: -(2n ** 63n - 1n) })).toBe(false); + expect(duckdb.is_finite_timestamp_ns({ nanos: -(2n ** 63n - 1n) })).toBe( + false, + ); }); }); suite('hugeint_to_double', () => { @@ -249,34 +357,50 @@ suite('conversion', () => { expect(duckdb.hugeint_to_double(-1n)).toBe(-1); }); test('one word', () => { - expect(duckdb.hugeint_to_double(2n ** 63n)).toBe(9.223372036854776e+18); + expect(duckdb.hugeint_to_double(2n ** 63n)).toBe(9.223372036854776e18); }); test('two words', () => { - expect(duckdb.hugeint_to_double(2n ** 65n)).toBe(3.6893488147419103e+19); + expect(duckdb.hugeint_to_double(2n ** 65n)).toBe(3.6893488147419103e19); }); test('negative one word', () => { - expect(duckdb.hugeint_to_double(-(2n ** 63n))).toBe(-9.223372036854776e+18); + expect(duckdb.hugeint_to_double(-(2n ** 63n))).toBe( + -9.223372036854776e18, + ); }); test('negative two words', () => { - expect(duckdb.hugeint_to_double(-(2n ** 65n))).toBe(-3.6893488147419103e+19); + expect(duckdb.hugeint_to_double(-(2n ** 65n))).toBe( + -3.6893488147419103e19, + ); }); test('min', () => { - expect(duckdb.hugeint_to_double(-(2n ** 127n - 1n))).toBe(-1.7014118346046923e+38); + expect(duckdb.hugeint_to_double(-(2n ** 127n - 1n))).toBe( + -1.7014118346046923e38, + ); }); test('max', () => { - expect(duckdb.hugeint_to_double(2n ** 127n - 1n)).toBe(1.7014118346046923e+38); + expect(duckdb.hugeint_to_double(2n ** 127n - 1n)).toBe( + 1.7014118346046923e38, + ); }); test('near min', () => { - expect(duckdb.hugeint_to_double(-(2n ** 127n - 2n ** 74n))).toBe(-1.7014118346046922e+38); + expect(duckdb.hugeint_to_double(-(2n ** 127n - 2n ** 74n))).toBe( + -1.7014118346046922e38, + ); }); test('near max', () => { - expect(duckdb.hugeint_to_double(2n ** 127n - 2n ** 74n)).toBe(1.7014118346046922e+38); + expect(duckdb.hugeint_to_double(2n ** 127n - 2n ** 74n)).toBe( + 1.7014118346046922e38, + ); }); test('out of range (positive)', () => { - expect(() => duckdb.hugeint_to_double(2n ** 129n)).toThrowError('bigint out of hugeint range'); + expect(() => duckdb.hugeint_to_double(2n ** 129n)).toThrowError( + 'bigint out of hugeint range', + ); }); test('out of range (negative)', () => { - expect(() => duckdb.hugeint_to_double(-(2n ** 129n))).toThrowError('bigint out of hugeint range'); + expect(() => duckdb.hugeint_to_double(-(2n ** 129n))).toThrowError( + 'bigint out of hugeint range', + ); }); }); suite('double_to_hugeint', () => { @@ -290,28 +414,36 @@ suite('conversion', () => { expect(duckdb.double_to_hugeint(-1)).toBe(-1n); }); test('one word', () => { - expect(duckdb.double_to_hugeint(9.223372036854776e+18)).toBe(2n ** 63n); + expect(duckdb.double_to_hugeint(9.223372036854776e18)).toBe(2n ** 63n); }); test('two words', () => { - expect(duckdb.double_to_hugeint(3.6893488147419103e+19)).toBe(2n ** 65n); + expect(duckdb.double_to_hugeint(3.6893488147419103e19)).toBe(2n ** 65n); }); test('negative one word', () => { - expect(duckdb.double_to_hugeint(-9.223372036854776e+18)).toBe(-(2n ** 63n)); + expect(duckdb.double_to_hugeint(-9.223372036854776e18)).toBe( + -(2n ** 63n), + ); }); test('negative two words', () => { - expect(duckdb.double_to_hugeint(-3.6893488147419103e+19)).toBe(-(2n ** 65n)); + expect(duckdb.double_to_hugeint(-3.6893488147419103e19)).toBe( + -(2n ** 65n), + ); }); test('near min', () => { - expect(duckdb.double_to_hugeint(-1.7014118346046922e+38)).toBe(-(2n ** 127n - 2n ** 74n)); + expect(duckdb.double_to_hugeint(-1.7014118346046922e38)).toBe( + -(2n ** 127n - 2n ** 74n), + ); }); test('near max', () => { - expect(duckdb.double_to_hugeint(1.7014118346046922e+38)).toBe(2n ** 127n - 2n ** 74n); + expect(duckdb.double_to_hugeint(1.7014118346046922e38)).toBe( + 2n ** 127n - 2n ** 74n, + ); }); test('out of range (positive)', () => { - expect(duckdb.double_to_hugeint(1.8e+38)).toBe(0n); + expect(duckdb.double_to_hugeint(1.8e38)).toBe(0n); }); test('out of range (negative)', () => { - expect(duckdb.double_to_hugeint(-1.8e+38)).toBe(0n); + expect(duckdb.double_to_hugeint(-1.8e38)).toBe(0n); }); }); suite('uhugeint_to_double', () => { @@ -322,22 +454,30 @@ suite('conversion', () => { expect(duckdb.uhugeint_to_double(1n)).toBe(1); }); test('one word', () => { - expect(duckdb.uhugeint_to_double(2n ** 63n)).toBe(9.223372036854776e+18); + expect(duckdb.uhugeint_to_double(2n ** 63n)).toBe(9.223372036854776e18); }); test('two words', () => { - expect(duckdb.uhugeint_to_double(2n ** 65n)).toBe(3.6893488147419103e+19); + expect(duckdb.uhugeint_to_double(2n ** 65n)).toBe(3.6893488147419103e19); }); test('max', () => { - expect(duckdb.uhugeint_to_double(2n ** 127n - 1n)).toBe(1.7014118346046923e+38); + expect(duckdb.uhugeint_to_double(2n ** 127n - 1n)).toBe( + 1.7014118346046923e38, + ); }); test('near max', () => { - expect(duckdb.uhugeint_to_double(2n ** 127n - 2n ** 74n)).toBe(1.7014118346046922e+38); + expect(duckdb.uhugeint_to_double(2n ** 127n - 2n ** 74n)).toBe( + 1.7014118346046922e38, + ); }); test('out of range (positive)', () => { - expect(() => duckdb.uhugeint_to_double(2n ** 129n)).toThrowError('bigint out of uhugeint range'); + expect(() => duckdb.uhugeint_to_double(2n ** 129n)).toThrowError( + 'bigint out of uhugeint range', + ); }); test('out of range (negative)', () => { - expect(() => duckdb.uhugeint_to_double(-1n)).toThrowError('bigint out of uhugeint range'); + expect(() => duckdb.uhugeint_to_double(-1n)).toThrowError( + 'bigint out of uhugeint range', + ); }); }); suite('double_to_uhugeint', () => { @@ -348,16 +488,18 @@ suite('conversion', () => { expect(duckdb.double_to_uhugeint(1)).toBe(1n); }); test('one word', () => { - expect(duckdb.double_to_uhugeint(9.223372036854776e+18)).toBe(2n ** 63n); + expect(duckdb.double_to_uhugeint(9.223372036854776e18)).toBe(2n ** 63n); }); test('two words', () => { - expect(duckdb.double_to_uhugeint(3.6893488147419103e+19)).toBe(2n ** 65n); + expect(duckdb.double_to_uhugeint(3.6893488147419103e19)).toBe(2n ** 65n); }); test('near max', () => { - expect(duckdb.double_to_uhugeint(1.7014118346046922e+38)).toBe(2n ** 127n - 2n ** 74n); + expect(duckdb.double_to_uhugeint(1.7014118346046922e38)).toBe( + 2n ** 127n - 2n ** 74n, + ); }); test('out of range (positive)', () => { - expect(duckdb.double_to_uhugeint(3.5e+38)).toBe(0n); + expect(duckdb.double_to_uhugeint(3.5e38)).toBe(0n); }); test('out of range (negative)', () => { expect(duckdb.double_to_uhugeint(-1)).toBe(0n); @@ -365,64 +507,98 @@ suite('conversion', () => { }); suite('double_to_decimal', () => { test('zero', () => { - expect(duckdb.double_to_decimal(0, 4, 1)).toStrictEqual({ width: 4, scale: 1, value: 0n }); + expect(duckdb.double_to_decimal(0, 4, 1)).toStrictEqual({ + width: 4, + scale: 1, + value: 0n, + }); }); test('one', () => { - expect(duckdb.double_to_decimal(1, 9, 4)).toStrictEqual({ width: 9, scale: 4, value: 10000n }); + expect(duckdb.double_to_decimal(1, 9, 4)).toStrictEqual({ + width: 9, + scale: 4, + value: 10000n, + }); }); test('negative one', () => { - expect(duckdb.double_to_decimal(-1, 9, 4)).toStrictEqual({ width: 9, scale: 4, value: -10000n }); + expect(duckdb.double_to_decimal(-1, 9, 4)).toStrictEqual({ + width: 9, + scale: 4, + value: -10000n, + }); }); test('one word', () => { expect(duckdb.double_to_decimal(123456789012.34568, 18, 6)).toStrictEqual( - { width: 18, scale: 6, value: 123456789012345680n } + { width: 18, scale: 6, value: 123456789012345680n }, ); }); test('two words', () => { - expect(duckdb.double_to_decimal(1.2345678901234567e+27, 38, 10)).toStrictEqual( - { width: 38, scale: 10, value: 12345678901234567525491324606797053952n } - ); + expect( + duckdb.double_to_decimal(1.2345678901234567e27, 38, 10), + ).toStrictEqual({ + width: 38, + scale: 10, + value: 12345678901234567525491324606797053952n, + }); }); test('negative one word', () => { - expect(duckdb.double_to_decimal(-123456789012.34568, 18, 6)).toStrictEqual( - { width: 18, scale: 6, value: -123456789012345680n } - ); + expect( + duckdb.double_to_decimal(-123456789012.34568, 18, 6), + ).toStrictEqual({ width: 18, scale: 6, value: -123456789012345680n }); }); test('negative two words', () => { - expect(duckdb.double_to_decimal(-1.2345678901234567e+27, 38, 10)).toStrictEqual( - { width: 38, scale: 10, value: -12345678901234567525491324606797053952n } - ); + expect( + duckdb.double_to_decimal(-1.2345678901234567e27, 38, 10), + ).toStrictEqual({ + width: 38, + scale: 10, + value: -12345678901234567525491324606797053952n, + }); }); test('out of range (positive)', () => { - expect(duckdb.double_to_decimal(1e+38, 38, 0)).toStrictEqual( - { width: 0, scale: 0, value: 0n } - ); + expect(duckdb.double_to_decimal(1e38, 38, 0)).toStrictEqual({ + width: 0, + scale: 0, + value: 0n, + }); }); test('out of range (negative)', () => { - expect(duckdb.double_to_decimal(-1e+38, 38, 0)).toStrictEqual( - { width: 0, scale: 0, value: 0n } - ); + expect(duckdb.double_to_decimal(-1e38, 38, 0)).toStrictEqual({ + width: 0, + scale: 0, + value: 0n, + }); }); test('out of range (width)', () => { - expect(duckdb.double_to_decimal(1, 39, 0)).toStrictEqual( - { width: 0, scale: 0, value: 0n } - ); + expect(duckdb.double_to_decimal(1, 39, 0)).toStrictEqual({ + width: 0, + scale: 0, + value: 0n, + }); }); test('out of range (scale)', () => { - expect(duckdb.double_to_decimal(1, 4, 4)).toStrictEqual( - { width: 0, scale: 0, value: 0n } - ); + expect(duckdb.double_to_decimal(1, 4, 4)).toStrictEqual({ + width: 0, + scale: 0, + value: 0n, + }); }); }); suite('decimal_to_double', () => { test('zero', () => { - expect(duckdb.decimal_to_double({ width: 4, scale: 1, value: 0n })).toBe(0); + expect(duckdb.decimal_to_double({ width: 4, scale: 1, value: 0n })).toBe( + 0, + ); }); test('one', () => { - expect(duckdb.decimal_to_double({ width: 9, scale: 4, value: 10000n })).toBe(1); + expect( + duckdb.decimal_to_double({ width: 9, scale: 4, value: 10000n }), + ).toBe(1); }); test('negative one', () => { - expect(duckdb.decimal_to_double({ width: 9, scale: 4, value: -10000n })).toBe(-1); + expect( + duckdb.decimal_to_double({ width: 9, scale: 4, value: -10000n }), + ).toBe(-1); }); test('one word', () => { expect( @@ -430,7 +606,7 @@ suite('conversion', () => { width: 18, scale: 6, value: 123456789012345680n, - }) + }), ).toBe(123456789012.34568); }); test('two words', () => { @@ -439,8 +615,8 @@ suite('conversion', () => { width: 38, scale: 10, value: 12345678901234567525491324606797053952n, - }) - ).toBe(1.2345678901234567e+27); + }), + ).toBe(1.2345678901234567e27); }); test('negative one word', () => { expect( @@ -448,7 +624,7 @@ suite('conversion', () => { width: 18, scale: 6, value: -123456789012345680n, - }) + }), ).toBe(-123456789012.34568); }); test('negative two words', () => { @@ -457,8 +633,8 @@ suite('conversion', () => { width: 38, scale: 10, value: -12345678901234567525491324606797053952n, - }) - ).toBe(-1.2345678901234567e+27); + }), + ).toBe(-1.2345678901234567e27); }); }); }); diff --git a/bindings/test/data_chunk.test.ts b/bindings/test/data_chunk.test.ts index a05c5818..93e701d7 100644 --- a/bindings/test/data_chunk.test.ts +++ b/bindings/test/data_chunk.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; import { expectLogicalType } from './utils/expectLogicalType'; import { INTEGER, VARCHAR } from './utils/expectedLogicalTypes'; @@ -48,10 +48,20 @@ suite('data chunk', () => { duckdb.data_chunk_set_size(chunk, 3); const vector = duckdb.data_chunk_get_vector(chunk, 0); duckdb.vector_ensure_validity_writable(vector); - duckdb.copy_data_to_vector_validity(vector, 0, source_buffer, 0, source_buffer.byteLength); + duckdb.copy_data_to_vector_validity( + vector, + 0, + source_buffer, + 0, + source_buffer.byteLength, + ); const validity_bytes = duckdb.vector_get_validity(vector, 8); - const validity_array = new BigUint64Array(validity_bytes.buffer, validity_bytes.byteOffset, 1); + const validity_array = new BigUint64Array( + validity_bytes.buffer, + validity_bytes.byteOffset, + 1, + ); expect(validity_array[0]).toBe(0xfedcba9876543210n); }); test('write integer vector', () => { @@ -65,10 +75,20 @@ suite('data chunk', () => { const chunk = duckdb.create_data_chunk([integer_type]); duckdb.data_chunk_set_size(chunk, 3); const vector = duckdb.data_chunk_get_vector(chunk, 0); - duckdb.copy_data_to_vector(vector, 0, source_buffer, 0, source_buffer.byteLength); + duckdb.copy_data_to_vector( + vector, + 0, + source_buffer, + 0, + source_buffer.byteLength, + ); const vector_data = duckdb.vector_get_data(vector, 3 * 4); - const vector_dv = new DataView(vector_data.buffer, vector_data.byteOffset, vector_data.byteLength); + const vector_dv = new DataView( + vector_data.buffer, + vector_data.byteOffset, + vector_data.byteLength, + ); expect(vector_dv.getInt32(0, true)).toBe(42); expect(vector_dv.getInt32(4, true)).toBe(12345); expect(vector_dv.getInt32(8, true)).toBe(67890); @@ -80,7 +100,11 @@ suite('data chunk', () => { const vector = duckdb.data_chunk_get_vector(chunk, 0); duckdb.vector_assign_string_element(vector, 0, 'ABC'); duckdb.vector_assign_string_element(vector, 1, 'abcdefghijkl'); - duckdb.vector_assign_string_element(vector, 2, 'longer than twelve characters'); + duckdb.vector_assign_string_element( + vector, + 2, + 'longer than twelve characters', + ); const data = duckdb.vector_get_data(vector, 3 * 16); const dv = new DataView(data.buffer); expect(dv.getUint32(0, true)).toBe(3); @@ -88,40 +112,65 @@ suite('data chunk', () => { expect(dv.getUint32(16, true)).toBe(12); expect([data[20], data[31]]).toStrictEqual([0x61, 0x6c]); // a, l expect(dv.getUint32(32, true)).toBe('longer than twelve characters'.length); - expect([data[36], data[37], data[38], data[39]]).toStrictEqual([0x6c, 0x6f, 0x6e, 0x67]); // l, o, n, g + expect([data[36], data[37], data[38], data[39]]).toStrictEqual([ + 0x6c, 0x6f, 0x6e, 0x67, + ]); // l, o, n, g }); test('write blob vector', () => { const blob_type = duckdb.create_logical_type(duckdb.Type.BLOB); const chunk = duckdb.create_data_chunk([blob_type]); duckdb.data_chunk_set_size(chunk, 3); const vector = duckdb.data_chunk_get_vector(chunk, 0); - duckdb.vector_assign_string_element_len(vector, 0, new Uint8Array([0xAB, 0xCD, 0xEF])); - duckdb.vector_assign_string_element_len(vector, 1, - new Uint8Array([0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C])); - duckdb.vector_assign_string_element_len(vector, 2, - new Uint8Array([0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D]) + duckdb.vector_assign_string_element_len( + vector, + 0, + new Uint8Array([0xab, 0xcd, 0xef]), + ); + duckdb.vector_assign_string_element_len( + vector, + 1, + new Uint8Array([ + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, + ]), + ); + duckdb.vector_assign_string_element_len( + vector, + 2, + new Uint8Array([ + 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, + 0x1d, + ]), ); const data = duckdb.vector_get_data(vector, 3 * 16); const dv = new DataView(data.buffer); expect(dv.getUint32(0, true)).toBe(3); - expect([data[4], data[5], data[6]]).toStrictEqual([0xAB, 0xCD, 0xEF]); + expect([data[4], data[5], data[6]]).toStrictEqual([0xab, 0xcd, 0xef]); expect(dv.getUint32(16, true)).toBe(12); - expect([data[20], data[31]]).toStrictEqual([0x01, 0x0C]); + expect([data[20], data[31]]).toStrictEqual([0x01, 0x0c]); expect(dv.getUint32(32, true)).toBe(13); - expect([data[36], data[37], data[38], data[39]]).toStrictEqual([0x11, 0x12, 0x13, 0x14]); + expect([data[36], data[37], data[38], data[39]]).toStrictEqual([ + 0x11, 0x12, 0x13, 0x14, + ]); }); - test.skip('write varint vector', () => { // See https://github.com/duckdb/duckdb/pull/15670 + test.skip('write varint vector', () => { + // See https://github.com/databrainhq/duckdb/pull/15670 const varint_type = duckdb.create_logical_type(duckdb.Type.VARINT); const chunk = duckdb.create_data_chunk([varint_type]); duckdb.data_chunk_set_size(chunk, 1); const vector = duckdb.data_chunk_get_vector(chunk, 0); expect(vector).toBeDefined(); - duckdb.vector_assign_string_element_len(vector, 0, new Uint8Array([0x80, 0x00, 0x01, 0x2a])); // VARINT 42 + duckdb.vector_assign_string_element_len( + vector, + 0, + new Uint8Array([0x80, 0x00, 0x01, 0x2a]), + ); // VARINT 42 const data = duckdb.vector_get_data(vector, 1); expect(data).toBeDefined(); const dv = new DataView(data.buffer); expect(dv.getUint32(0, true)).toBe(4); - expect([data[4], data[5], data[6], data[7]]).toStrictEqual([0x80, 0x00, 0x01, 0x2a]); // VARINT 42 + expect([data[4], data[5], data[6], data[7]]).toStrictEqual([ + 0x80, 0x00, 0x01, 0x2a, + ]); // VARINT 42 }); test('set list vector size', () => { const int_type = duckdb.create_logical_type(duckdb.Type.INTEGER); diff --git a/bindings/test/enums.test.ts b/bindings/test/enums.test.ts index 81f2209c..c2c4cf51 100644 --- a/bindings/test/enums.test.ts +++ b/bindings/test/enums.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; suite('enums', () => { @@ -9,9 +9,13 @@ suite('enums', () => { expect(duckdb.ResultType.QUERY_RESULT).toBe(3); expect(duckdb.ResultType[duckdb.ResultType.INVALID]).toBe('INVALID'); - expect(duckdb.ResultType[duckdb.ResultType.CHANGED_ROWS]).toBe('CHANGED_ROWS'); + expect(duckdb.ResultType[duckdb.ResultType.CHANGED_ROWS]).toBe( + 'CHANGED_ROWS', + ); expect(duckdb.ResultType[duckdb.ResultType.NOTHING]).toBe('NOTHING'); - expect(duckdb.ResultType[duckdb.ResultType.QUERY_RESULT]).toBe('QUERY_RESULT'); + expect(duckdb.ResultType[duckdb.ResultType.QUERY_RESULT]).toBe( + 'QUERY_RESULT', + ); }); test('StatementType', () => { expect(duckdb.StatementType.INVALID).toBe(0); @@ -53,11 +57,17 @@ suite('enums', () => { expect(duckdb.StatementType[duckdb.StatementType.CREATE]).toBe('CREATE'); expect(duckdb.StatementType[duckdb.StatementType.EXECUTE]).toBe('EXECUTE'); expect(duckdb.StatementType[duckdb.StatementType.ALTER]).toBe('ALTER'); - expect(duckdb.StatementType[duckdb.StatementType.TRANSACTION]).toBe('TRANSACTION'); + expect(duckdb.StatementType[duckdb.StatementType.TRANSACTION]).toBe( + 'TRANSACTION', + ); expect(duckdb.StatementType[duckdb.StatementType.COPY]).toBe('COPY'); expect(duckdb.StatementType[duckdb.StatementType.ANALYZE]).toBe('ANALYZE'); - expect(duckdb.StatementType[duckdb.StatementType.VARIABLE_SET]).toBe('VARIABLE_SET'); - expect(duckdb.StatementType[duckdb.StatementType.CREATE_FUNC]).toBe('CREATE_FUNC'); + expect(duckdb.StatementType[duckdb.StatementType.VARIABLE_SET]).toBe( + 'VARIABLE_SET', + ); + expect(duckdb.StatementType[duckdb.StatementType.CREATE_FUNC]).toBe( + 'CREATE_FUNC', + ); expect(duckdb.StatementType[duckdb.StatementType.DROP]).toBe('DROP'); expect(duckdb.StatementType[duckdb.StatementType.EXPORT]).toBe('EXPORT'); expect(duckdb.StatementType[duckdb.StatementType.PRAGMA]).toBe('PRAGMA'); @@ -65,9 +75,15 @@ suite('enums', () => { expect(duckdb.StatementType[duckdb.StatementType.CALL]).toBe('CALL'); expect(duckdb.StatementType[duckdb.StatementType.SET]).toBe('SET'); expect(duckdb.StatementType[duckdb.StatementType.LOAD]).toBe('LOAD'); - expect(duckdb.StatementType[duckdb.StatementType.RELATION]).toBe('RELATION'); - expect(duckdb.StatementType[duckdb.StatementType.EXTENSION]).toBe('EXTENSION'); - expect(duckdb.StatementType[duckdb.StatementType.LOGICAL_PLAN]).toBe('LOGICAL_PLAN'); + expect(duckdb.StatementType[duckdb.StatementType.RELATION]).toBe( + 'RELATION', + ); + expect(duckdb.StatementType[duckdb.StatementType.EXTENSION]).toBe( + 'EXTENSION', + ); + expect(duckdb.StatementType[duckdb.StatementType.LOGICAL_PLAN]).toBe( + 'LOGICAL_PLAN', + ); expect(duckdb.StatementType[duckdb.StatementType.ATTACH]).toBe('ATTACH'); expect(duckdb.StatementType[duckdb.StatementType.DETACH]).toBe('DETACH'); expect(duckdb.StatementType[duckdb.StatementType.MULTI]).toBe('MULTI'); @@ -110,7 +126,7 @@ suite('enums', () => { expect(duckdb.Type.VARINT).toBe(35); expect(duckdb.Type.SQLNULL).toBe(36); - expect(duckdb.Type[duckdb.Type.BOOLEAN]).toBe('BOOLEAN') + expect(duckdb.Type[duckdb.Type.BOOLEAN]).toBe('BOOLEAN'); expect(duckdb.Type[duckdb.Type.TINYINT]).toBe('TINYINT'); expect(duckdb.Type[duckdb.Type.SMALLINT]).toBe('SMALLINT'); expect(duckdb.Type[duckdb.Type.INTEGER]).toBe('INTEGER'); diff --git a/bindings/test/errors.test.ts b/bindings/test/errors.test.ts index 16a43cb3..13fd794f 100644 --- a/bindings/test/errors.test.ts +++ b/bindings/test/errors.test.ts @@ -1,9 +1,11 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; suite('errors', () => { test('wrong external type', async () => { const db = await duckdb.open(); - expect(() => duckdb.query(db as unknown as duckdb.Connection, 'select 1')).toThrowError(/^Invalid connection argument$/); + expect(() => + duckdb.query(db as unknown as duckdb.Connection, 'select 1'), + ).toThrowError(/^Invalid connection argument$/); }); }); diff --git a/bindings/test/extracted_statements.test.ts b/bindings/test/extracted_statements.test.ts index d6854ba5..a1641e6f 100644 --- a/bindings/test/extracted_statements.test.ts +++ b/bindings/test/extracted_statements.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; import { expectResult } from './utils/expectResult'; import { INTEGER } from './utils/expectedLogicalTypes'; @@ -8,79 +8,88 @@ import { withConnection } from './utils/withConnection'; suite('extracted statements', () => { test('no statements', async () => { await withConnection(async (connection) => { - const { statement_count } = await duckdb.extract_statements(connection, ''); + const { statement_count } = await duckdb.extract_statements( + connection, + '', + ); expect(statement_count).toBe(0); }); }); test('error', async () => { await withConnection(async (connection) => { - const { extracted_statements, statement_count } = await duckdb.extract_statements(connection, 'x'); + const { extracted_statements, statement_count } = + await duckdb.extract_statements(connection, 'x'); expect(statement_count).toBe(0); - expect(duckdb.extract_statements_error(extracted_statements)).toBe('Parser Error: syntax error at or near "x"'); + expect(duckdb.extract_statements_error(extracted_statements)).toBe( + 'Parser Error: syntax error at or near "x"', + ); }); }); test('one statement', async () => { await withConnection(async (connection) => { - const { extracted_statements, statement_count } = await duckdb.extract_statements(connection, 'select 11 as a'); + const { extracted_statements, statement_count } = + await duckdb.extract_statements(connection, 'select 11 as a'); expect(statement_count).toBe(1); - const prepared = await duckdb.prepare_extracted_statement(connection, extracted_statements, 0); + const prepared = await duckdb.prepare_extracted_statement( + connection, + extracted_statements, + 0, + ); const result = await duckdb.execute_prepared(prepared); await expectResult(result, { chunkCount: 1, rowCount: 1, - columns: [ - { name: 'a', logicalType: INTEGER }, - ], - chunks: [ - { rowCount: 1, vectors: [data(4, [true], [11])]}, - ], + columns: [{ name: 'a', logicalType: INTEGER }], + chunks: [{ rowCount: 1, vectors: [data(4, [true], [11])] }], }); }); }); test('multiple statements', async () => { await withConnection(async (connection) => { - const { extracted_statements, statement_count } = await duckdb.extract_statements(connection, - 'select 11 as a; select 22 as b; select 33 as c' - ); + const { extracted_statements, statement_count } = + await duckdb.extract_statements( + connection, + 'select 11 as a; select 22 as b; select 33 as c', + ); expect(statement_count).toBe(3); - const prepared0 = await duckdb.prepare_extracted_statement(connection, extracted_statements, 0); + const prepared0 = await duckdb.prepare_extracted_statement( + connection, + extracted_statements, + 0, + ); const result0 = await duckdb.execute_prepared(prepared0); await expectResult(result0, { chunkCount: 1, rowCount: 1, - columns: [ - { name: 'a', logicalType: INTEGER }, - ], - chunks: [ - { rowCount: 1, vectors: [data(4, [true], [11])]}, - ], + columns: [{ name: 'a', logicalType: INTEGER }], + chunks: [{ rowCount: 1, vectors: [data(4, [true], [11])] }], }); - const prepared1 = await duckdb.prepare_extracted_statement(connection, extracted_statements, 1); + const prepared1 = await duckdb.prepare_extracted_statement( + connection, + extracted_statements, + 1, + ); const result1 = await duckdb.execute_prepared(prepared1); await expectResult(result1, { chunkCount: 1, rowCount: 1, - columns: [ - { name: 'b', logicalType: INTEGER }, - ], - chunks: [ - { rowCount: 1, vectors: [data(4, [true], [22])]}, - ], + columns: [{ name: 'b', logicalType: INTEGER }], + chunks: [{ rowCount: 1, vectors: [data(4, [true], [22])] }], }); - const prepared2 = await duckdb.prepare_extracted_statement(connection, extracted_statements, 2); + const prepared2 = await duckdb.prepare_extracted_statement( + connection, + extracted_statements, + 2, + ); const result2 = await duckdb.execute_prepared(prepared2); await expectResult(result2, { chunkCount: 1, rowCount: 1, - columns: [ - { name: 'c', logicalType: INTEGER }, - ], - chunks: [ - { rowCount: 1, vectors: [data(4, [true], [33])]}, - ], + columns: [{ name: 'c', logicalType: INTEGER }], + chunks: [{ rowCount: 1, vectors: [data(4, [true], [33])] }], }); }); }); diff --git a/bindings/test/instance_cache.test.ts b/bindings/test/instance_cache.test.ts index 1b919ab5..3d065776 100644 --- a/bindings/test/instance_cache.test.ts +++ b/bindings/test/instance_cache.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; suite('instance_cache', () => { diff --git a/bindings/test/logical_type.test.ts b/bindings/test/logical_type.test.ts index 2fed89c4..73230f0f 100644 --- a/bindings/test/logical_type.test.ts +++ b/bindings/test/logical_type.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; suite('logical_type', () => { @@ -24,7 +24,9 @@ suite('logical_type', () => { expect(duckdb.logical_type_get_alias(decimal_type)).toBeNull(); expect(duckdb.decimal_width(decimal_type)).toBe(4); expect(duckdb.decimal_scale(decimal_type)).toBe(1); - expect(duckdb.decimal_internal_type(decimal_type)).toBe(duckdb.Type.SMALLINT); + expect(duckdb.decimal_internal_type(decimal_type)).toBe( + duckdb.Type.SMALLINT, + ); }); test('decimal (INTEGER)', () => { const decimal_type = duckdb.create_decimal_type(9, 4); @@ -32,7 +34,9 @@ suite('logical_type', () => { expect(duckdb.logical_type_get_alias(decimal_type)).toBeNull(); expect(duckdb.decimal_width(decimal_type)).toBe(9); expect(duckdb.decimal_scale(decimal_type)).toBe(4); - expect(duckdb.decimal_internal_type(decimal_type)).toBe(duckdb.Type.INTEGER); + expect(duckdb.decimal_internal_type(decimal_type)).toBe( + duckdb.Type.INTEGER, + ); }); test('decimal (BIGINT)', () => { const decimal_type = duckdb.create_decimal_type(18, 6); @@ -48,7 +52,9 @@ suite('logical_type', () => { expect(duckdb.logical_type_get_alias(decimal_type)).toBeNull(); expect(duckdb.decimal_width(decimal_type)).toBe(38); expect(duckdb.decimal_scale(decimal_type)).toBe(10); - expect(duckdb.decimal_internal_type(decimal_type)).toBe(duckdb.Type.HUGEINT); + expect(duckdb.decimal_internal_type(decimal_type)).toBe( + duckdb.Type.HUGEINT, + ); }); test('enum (small)', () => { const enum_type = duckdb.create_enum_type(['DUCK_DUCK_ENUM', 'GOOSE']); @@ -60,7 +66,9 @@ suite('logical_type', () => { expect(duckdb.enum_dictionary_value(enum_type, 1)).toBe('GOOSE'); }); test('enum (medium)', () => { - const enum_type = duckdb.create_enum_type(Array.from({ length: 300 }).map((_, i) => `enum_${i}`)); + const enum_type = duckdb.create_enum_type( + Array.from({ length: 300 }).map((_, i) => `enum_${i}`), + ); expect(duckdb.get_type_id(enum_type)).toBe(duckdb.Type.ENUM); expect(duckdb.logical_type_get_alias(enum_type)).toBeNull(); expect(duckdb.enum_internal_type(enum_type)).toBe(duckdb.Type.USMALLINT); @@ -69,7 +77,9 @@ suite('logical_type', () => { expect(duckdb.enum_dictionary_value(enum_type, 299)).toBe('enum_299'); }); test('enum (large)', () => { - const enum_type = duckdb.create_enum_type(Array.from({ length: 70000 }).map((_, i) => `enum_${i}`)); + const enum_type = duckdb.create_enum_type( + Array.from({ length: 70000 }).map((_, i) => `enum_${i}`), + ); expect(duckdb.get_type_id(enum_type)).toBe(duckdb.Type.ENUM); expect(duckdb.logical_type_get_alias(enum_type)).toBeNull(); expect(duckdb.enum_internal_type(enum_type)).toBe(duckdb.Type.UINTEGER); @@ -106,7 +116,10 @@ suite('logical_type', () => { test('struct', () => { const int_type = duckdb.create_logical_type(duckdb.Type.INTEGER); const varchar_type = duckdb.create_logical_type(duckdb.Type.VARCHAR); - const struct_type = duckdb.create_struct_type([int_type, varchar_type], ['a', 'b']); + const struct_type = duckdb.create_struct_type( + [int_type, varchar_type], + ['a', 'b'], + ); expect(duckdb.get_type_id(struct_type)).toBe(duckdb.Type.STRUCT); expect(duckdb.logical_type_get_alias(struct_type)).toBeNull(); expect(duckdb.struct_type_child_count(struct_type)).toBe(2); @@ -126,7 +139,10 @@ suite('logical_type', () => { test('union', () => { const varchar_type = duckdb.create_logical_type(duckdb.Type.VARCHAR); const smallint_type = duckdb.create_logical_type(duckdb.Type.SMALLINT); - const union_type = duckdb.create_union_type([varchar_type, smallint_type], ['name', 'age']); + const union_type = duckdb.create_union_type( + [varchar_type, smallint_type], + ['name', 'age'], + ); expect(duckdb.get_type_id(union_type)).toBe(duckdb.Type.UNION); expect(duckdb.logical_type_get_alias(union_type)).toBeNull(); expect(duckdb.union_type_member_count(union_type)).toBe(2); diff --git a/bindings/test/open.test.ts b/bindings/test/open.test.ts index 3b8af0b4..3f450963 100644 --- a/bindings/test/open.test.ts +++ b/bindings/test/open.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; suite('open', () => { @@ -24,7 +24,7 @@ suite('open', () => { expect(db).toBeTruthy(); duckdb.close_sync(db); await expect(async () => await duckdb.connect(db)).rejects.toStrictEqual( - new Error('Failed to connect: instance closed') + new Error('Failed to connect: instance closed'), ); // double-close should be a no-op duckdb.close_sync(db); diff --git a/bindings/test/pending.test.ts b/bindings/test/pending.test.ts index 1981e667..2f1fd822 100644 --- a/bindings/test/pending.test.ts +++ b/bindings/test/pending.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; import { expectResult } from './utils/expectResult'; import { BIGINT, INTEGER } from './utils/expectedLogicalTypes'; @@ -15,36 +15,63 @@ suite('pending', () => { await expectResult(result, { chunkCount: 1, rowCount: 1, - columns: [ - { name: 'a', logicalType: INTEGER }, - ], - chunks: [ - { rowCount: 1, vectors: [data(4, [true], [11])]}, - ], + columns: [{ name: 'a', logicalType: INTEGER }], + chunks: [{ rowCount: 1, vectors: [data(4, [true], [11])] }], }); }); }); test('streaming', async () => { await withConnection(async (connection) => { - const prepared = await duckdb.prepare(connection, 'select n::integer as int from range(5000) t(n)'); + const prepared = await duckdb.prepare( + connection, + 'select n::integer as int from range(5000) t(n)', + ); const pending = duckdb.pending_prepared_streaming(prepared); const result = await duckdb.execute_pending(pending); await expectResult(result, { isStreaming: true, - columns: [ - { name: 'int', logicalType: INTEGER }, - ], + columns: [{ name: 'int', logicalType: INTEGER }], chunks: [ - { rowCount: 2048, vectors: [data(4, null, Array.from({ length: 2048 }).map((_, i) => i))]}, - { rowCount: 2048, vectors: [data(4, null, Array.from({ length: 2048 }).map((_, i) => 2048 + i))]}, - { rowCount: 904, vectors: [data(4, null, Array.from({ length: 904 }).map((_, i) => 4096 + i))]}, + { + rowCount: 2048, + vectors: [ + data( + 4, + null, + Array.from({ length: 2048 }).map((_, i) => i), + ), + ], + }, + { + rowCount: 2048, + vectors: [ + data( + 4, + null, + Array.from({ length: 2048 }).map((_, i) => 2048 + i), + ), + ], + }, + { + rowCount: 904, + vectors: [ + data( + 4, + null, + Array.from({ length: 904 }).map((_, i) => 4096 + i), + ), + ], + }, ], }); }); }); test('tasks', async () => { await withConnection(async (connection) => { - const prepared = await duckdb.prepare(connection, 'select count(*) as count from range(10_000)'); + const prepared = await duckdb.prepare( + connection, + 'select count(*) as count from range(10_000)', + ); const pending = duckdb.pending_prepared(prepared); let pending_state = duckdb.pending_execute_check_state(pending); while (!duckdb.pending_execution_is_finished(pending_state)) { @@ -55,18 +82,18 @@ suite('pending', () => { await expectResult(result, { chunkCount: 1, rowCount: 1, - columns: [ - { name: 'count', logicalType: BIGINT }, - ], - chunks: [ - { rowCount: 1, vectors: [data(8, [true], [10_000n])]}, - ], + columns: [{ name: 'count', logicalType: BIGINT }], + chunks: [{ rowCount: 1, vectors: [data(8, [true], [10_000n])] }], }); }); }); - test.skip('interrupt', async () => { // interrupt does not appear to be entirely deterministic + test.skip('interrupt', async () => { + // interrupt does not appear to be entirely deterministic await withConnection(async (connection) => { - const prepared = await duckdb.prepare(connection, 'select count(*) as count from range(10_000)'); + const prepared = await duckdb.prepare( + connection, + 'select count(*) as count from range(10_000)', + ); const pending = duckdb.pending_prepared(prepared); duckdb.interrupt(connection); await sleep(0); // yield to allow progress @@ -78,7 +105,9 @@ suite('pending', () => { } expect(pending_state).toBe(duckdb.PendingState.ERROR); - expect(duckdb.pending_error(pending)).toBe('INTERRUPT Error: Interrupted!'); + expect(duckdb.pending_error(pending)).toBe( + 'INTERRUPT Error: Interrupted!', + ); }); }); // TODO: query progress? diff --git a/bindings/test/prepared_statements.test.ts b/bindings/test/prepared_statements.test.ts index 27c9c5bb..8e199307 100644 --- a/bindings/test/prepared_statements.test.ts +++ b/bindings/test/prepared_statements.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; import { ALT, @@ -39,11 +39,11 @@ suite('prepared statements', () => { await withConnection(async (connection) => { const prepared = await duckdb.prepare( connection, - 'select 17 as seventeen' + 'select 17 as seventeen', ); expect(duckdb.nparams(prepared)).toBe(0); expect(duckdb.prepared_statement_type(prepared)).toBe( - duckdb.StatementType.SELECT + duckdb.StatementType.SELECT, ); const result = await duckdb.execute_prepared(prepared); await expectResult(result, { @@ -58,10 +58,10 @@ suite('prepared statements', () => { await withConnection(async (connection) => { const prepared = await duckdb.prepare( connection, - 'select ? as a, ? as b' + 'select ? as a, ? as b', ); expect(duckdb.prepared_statement_type(prepared)).toBe( - duckdb.StatementType.SELECT + duckdb.StatementType.SELECT, ); expect(duckdb.nparams(prepared)).toBe(2); @@ -96,10 +96,10 @@ suite('prepared statements', () => { await withConnection(async (connection) => { const prepared = await duckdb.prepare( connection, - 'select $2 as two, $1 as one' + 'select $2 as two, $1 as one', ); expect(duckdb.prepared_statement_type(prepared)).toBe( - duckdb.StatementType.SELECT + duckdb.StatementType.SELECT, ); expect(duckdb.nparams(prepared)).toBe(2); @@ -134,10 +134,10 @@ suite('prepared statements', () => { await withConnection(async (connection) => { const prepared = await duckdb.prepare( connection, - 'select $x as a, $y as b' + 'select $x as a, $y as b', ); expect(duckdb.prepared_statement_type(prepared)).toBe( - duckdb.StatementType.SELECT + duckdb.StatementType.SELECT, ); expect(duckdb.nparams(prepared)).toBe(2); @@ -172,7 +172,7 @@ suite('prepared statements', () => { await withConnection(async (connection) => { const prepared = await duckdb.prepare( connection, - 'select ? as a, ? as b' + 'select ? as a, ? as b', ); duckdb.bind_int32(prepared, 1, 11); expect(duckdb.param_type(prepared, 1)).toBe(duckdb.Type.INTEGER); @@ -231,7 +231,7 @@ suite('prepared statements', () => { ? as interval, \ ? as varchar, \ ? as blob, \ - ? as null' + ? as null', ); duckdb.bind_boolean(prepared, 1, true); @@ -252,14 +252,14 @@ suite('prepared statements', () => { duckdb.bind_hugeint( prepared, 6, - 170141183460469231731687303715884105727n + 170141183460469231731687303715884105727n, ); expect(duckdb.param_type(prepared, 6)).toBe(duckdb.Type.HUGEINT); duckdb.bind_uhugeint( prepared, 7, - 340282366920938463463374607431768211455n + 340282366920938463463374607431768211455n, ); expect(duckdb.param_type(prepared, 7)).toBe(duckdb.Type.UHUGEINT); @@ -309,7 +309,7 @@ suite('prepared statements', () => { duckdb.bind_blob( prepared, 21, - Buffer.from('thisisalongblob\x00withnullbytes') + Buffer.from('thisisalongblob\x00withnullbytes'), ); expect(duckdb.param_type(prepared, 21)).toBe(duckdb.Type.BLOB); @@ -369,13 +369,13 @@ suite('prepared statements', () => { data( 16, [true], - [{ months: 999, days: 999, micros: 999999999n }] + [{ months: 999, days: 999, micros: 999999999n }], ), data(16, [true], ['🦆🦆🦆\x00🦆🦆🦆']), data( 16, [true], - [Buffer.from('thisisalongblob\x00withnullbytes')] + [Buffer.from('thisisalongblob\x00withnullbytes')], ), data(4, [false], [null]), ], @@ -393,18 +393,18 @@ suite('prepared statements', () => { ? as list, \ ? as array, \ ? as map, \ - ? as union' + ? as union', ); const int_type = duckdb.create_logical_type(duckdb.Type.INTEGER); const varchar_type = duckdb.create_logical_type(duckdb.Type.VARCHAR); const struct_type = duckdb.create_struct_type( [int_type, varchar_type], - ['a', 'b'] + ['a', 'b'], ); const map_type = duckdb.create_map_type(int_type, varchar_type); const union_type = duckdb.create_union_type( [int_type, varchar_type], - ['num', 'str'] + ['num', 'str'], ); const int_value = duckdb.create_int64(42n); @@ -428,7 +428,7 @@ suite('prepared statements', () => { const map_value = duckdb.create_map_value( map_type, [int_value], - [varchar_value] + [varchar_value], ); duckdb.bind_value(prepared, 4, map_value); expect(duckdb.param_type(prepared, 4)).toBe(duckdb.Type.MAP); @@ -436,7 +436,7 @@ suite('prepared statements', () => { const union_value = duckdb.create_union_value( union_type, 1, - varchar_value + varchar_value, ); duckdb.bind_value(prepared, 5, union_value); expect(duckdb.param_type(prepared, 5)).toBe(duckdb.Type.UNION); @@ -465,7 +465,7 @@ suite('prepared statements', () => { struct( 1, [true], - [data(4, [true], [42]), data(16, [true], ['🦆🦆🦆🦆🦆🦆'])] + [data(4, [true], [42]), data(16, [true], ['🦆🦆🦆🦆🦆🦆'])], ), list([true], [[0n, 1n]], 1, data(4, [true], [42])), array(1, [true], data(4, [true], [42])), @@ -474,7 +474,7 @@ suite('prepared statements', () => { [[0n, 1n]], 1, data(4, [true], [42]), - data(16, [true], ['🦆🦆🦆🦆🦆🦆']) + data(16, [true], ['🦆🦆🦆🦆🦆🦆']), ), union([ data(1, [true], [1]), // tags @@ -491,7 +491,7 @@ suite('prepared statements', () => { await withConnection(async (connection) => { const prepared = await duckdb.prepare( connection, - 'select n::integer as int from range(5000) t(n)' + 'select n::integer as int from range(5000) t(n)', ); const result = await duckdb.execute_prepared_streaming(prepared); await expectResult(result, { @@ -504,7 +504,7 @@ suite('prepared statements', () => { data( 4, null, - Array.from({ length: 2048 }).map((_, i) => i) + Array.from({ length: 2048 }).map((_, i) => i), ), ], }, @@ -514,7 +514,7 @@ suite('prepared statements', () => { data( 4, null, - Array.from({ length: 2048 }).map((_, i) => 2048 + i) + Array.from({ length: 2048 }).map((_, i) => 2048 + i), ), ], }, @@ -524,7 +524,7 @@ suite('prepared statements', () => { data( 4, null, - Array.from({ length: 904 }).map((_, i) => 4096 + i) + Array.from({ length: 904 }).map((_, i) => 4096 + i), ), ], }, @@ -540,7 +540,7 @@ suite('prepared statements', () => { ? as struct, \ ? as list, \ ? as array, \ - ? as map' + ? as map', ); const int_type = duckdb.create_logical_type(duckdb.Type.INTEGER); const struct_type = duckdb.create_struct_type([], []); diff --git a/bindings/test/query.test.ts b/bindings/test/query.test.ts index 1ee2b8dd..b9d889f6 100644 --- a/bindings/test/query.test.ts +++ b/bindings/test/query.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; import { ALT, @@ -54,20 +54,23 @@ suite('query', () => { columns: [ { name: 'seventeen', logicalType: { typeId: duckdb.Type.INTEGER } }, ], - chunks: [ - { rowCount: 1, vectors: [data(4, [true], [17])]}, - ], + chunks: [{ rowCount: 1, vectors: [data(4, [true], [17])] }], }); }); }); test('basic error', async () => { await withConnection(async (connection) => { - await expect(duckdb.query(connection, 'selct 1')).rejects.toThrow('Parser Error'); + await expect(duckdb.query(connection, 'selct 1')).rejects.toThrow( + 'Parser Error', + ); }); }); test('test_all_types()', async () => { await withConnection(async (connection) => { - const result = await duckdb.query(connection, `from test_all_types(use_large_enum=${useLargeEnum})`); + const result = await duckdb.query( + connection, + `from test_all_types(use_large_enum=${useLargeEnum})`, + ); const validity = [true, true, false]; await expectResult(result, { chunkCount: 1, @@ -98,17 +101,37 @@ suite('query', () => { { name: 'dec_4_1', logicalType: DECIMAL(4, 1, duckdb.Type.SMALLINT) }, { name: 'dec_9_4', logicalType: DECIMAL(9, 4, duckdb.Type.INTEGER) }, { name: 'dec_18_6', logicalType: DECIMAL(18, 6, duckdb.Type.BIGINT) }, - { name: 'dec38_10', logicalType: DECIMAL(38, 10, duckdb.Type.HUGEINT) }, + { + name: 'dec38_10', + logicalType: DECIMAL(38, 10, duckdb.Type.HUGEINT), + }, { name: 'uuid', logicalType: UUID }, { name: 'interval', logicalType: INTERVAL }, { name: 'varchar', logicalType: VARCHAR }, { name: 'blob', logicalType: BLOB }, { name: 'bit', logicalType: BIT }, - { name: 'small_enum', logicalType: ENUM(['DUCK_DUCK_ENUM', 'GOOSE'], duckdb.Type.UTINYINT) }, - { name: 'medium_enum', logicalType: ENUM(Array.from({ length: 300}).map((_, i) => `enum_${i}`), duckdb.Type.USMALLINT) }, - { name: 'large_enum', logicalType: useLargeEnum - ? ENUM(Array.from({ length: 70000}).map((_, i) => `enum_${i}`), duckdb.Type.UINTEGER) - : ENUM(['enum_0', 'enum_69999'], duckdb.Type.UTINYINT) + { + name: 'small_enum', + logicalType: ENUM( + ['DUCK_DUCK_ENUM', 'GOOSE'], + duckdb.Type.UTINYINT, + ), + }, + { + name: 'medium_enum', + logicalType: ENUM( + Array.from({ length: 300 }).map((_, i) => `enum_${i}`), + duckdb.Type.USMALLINT, + ), + }, + { + name: 'large_enum', + logicalType: useLargeEnum + ? ENUM( + Array.from({ length: 70000 }).map((_, i) => `enum_${i}`), + duckdb.Type.UINTEGER, + ) + : ENUM(['enum_0', 'enum_69999'], duckdb.Type.UTINYINT), }, { name: 'int_array', logicalType: LIST(INTEGER) }, { name: 'double_array', logicalType: LIST(DOUBLE) }, @@ -117,19 +140,58 @@ suite('query', () => { { name: 'timestamptz_array', logicalType: LIST(TIMESTAMP_TZ) }, { name: 'varchar_array', logicalType: LIST(VARCHAR) }, { name: 'nested_int_array', logicalType: LIST(LIST(INTEGER)) }, - { name: 'struct', logicalType: STRUCT(ENTRY('a', INTEGER), ENTRY('b', VARCHAR)) }, - { name: 'struct_of_arrays', logicalType: STRUCT(ENTRY('a', LIST(INTEGER)), ENTRY('b', LIST(VARCHAR))) }, - { name: 'array_of_structs', logicalType: LIST(STRUCT(ENTRY('a', INTEGER), ENTRY('b', VARCHAR))) }, + { + name: 'struct', + logicalType: STRUCT(ENTRY('a', INTEGER), ENTRY('b', VARCHAR)), + }, + { + name: 'struct_of_arrays', + logicalType: STRUCT( + ENTRY('a', LIST(INTEGER)), + ENTRY('b', LIST(VARCHAR)), + ), + }, + { + name: 'array_of_structs', + logicalType: LIST(STRUCT(ENTRY('a', INTEGER), ENTRY('b', VARCHAR))), + }, { name: 'map', logicalType: MAP(VARCHAR, VARCHAR) }, - { name: 'union', logicalType: UNION(ALT('name', VARCHAR), ALT('age', SMALLINT)) }, + { + name: 'union', + logicalType: UNION(ALT('name', VARCHAR), ALT('age', SMALLINT)), + }, { name: 'fixed_int_array', logicalType: ARRAY(INTEGER, 3) }, { name: 'fixed_varchar_array', logicalType: ARRAY(VARCHAR, 3) }, - { name: 'fixed_nested_int_array', logicalType: ARRAY(ARRAY(INTEGER, 3), 3) }, - { name: 'fixed_nested_varchar_array', logicalType: ARRAY(ARRAY(VARCHAR, 3), 3) }, - { name: 'fixed_struct_array', logicalType: ARRAY(STRUCT(ENTRY('a', INTEGER), ENTRY('b', VARCHAR)), 3) }, - { name: 'struct_of_fixed_array', logicalType: STRUCT(ENTRY('a', ARRAY(INTEGER, 3)), ENTRY('b', ARRAY(VARCHAR, 3))) }, - { name: 'fixed_array_of_int_list', logicalType: ARRAY(LIST(INTEGER), 3) }, - { name: 'list_of_fixed_int_array', logicalType: LIST(ARRAY(INTEGER, 3)) }, + { + name: 'fixed_nested_int_array', + logicalType: ARRAY(ARRAY(INTEGER, 3), 3), + }, + { + name: 'fixed_nested_varchar_array', + logicalType: ARRAY(ARRAY(VARCHAR, 3), 3), + }, + { + name: 'fixed_struct_array', + logicalType: ARRAY( + STRUCT(ENTRY('a', INTEGER), ENTRY('b', VARCHAR)), + 3, + ), + }, + { + name: 'struct_of_fixed_array', + logicalType: STRUCT( + ENTRY('a', ARRAY(INTEGER, 3)), + ENTRY('b', ARRAY(VARCHAR, 3)), + ), + }, + { + name: 'fixed_array_of_int_list', + logicalType: ARRAY(LIST(INTEGER), 3), + }, + { + name: 'list_of_fixed_int_array', + logicalType: LIST(ARRAY(INTEGER, 3)), + }, ], chunks: [ { @@ -139,122 +201,581 @@ suite('query', () => { data(1, validity, [-128, 127, null]), // 1: tinyint data(2, validity, [-32768, 32767, null]), // 2: smallint data(4, validity, [-2147483648, 2147483647, null]), // 3: int - data(8, validity, [-9223372036854775808n, 9223372036854775807n, null]), // 4: bigint - data(16, validity, [-170141183460469231731687303715884105728n, 170141183460469231731687303715884105727n, null]), // 5: hugeint - data(16, validity, [0n, 340282366920938463463374607431768211455n, null]), // 6: uhugeint + data(8, validity, [ + -9223372036854775808n, + 9223372036854775807n, + null, + ]), // 4: bigint + data(16, validity, [ + -170141183460469231731687303715884105728n, + 170141183460469231731687303715884105727n, + null, + ]), // 5: hugeint + data(16, validity, [ + 0n, + 340282366920938463463374607431768211455n, + null, + ]), // 6: uhugeint data(1, validity, [0, 255, null]), // 7: utinyint data(2, validity, [0, 65535, null]), // 8: usmallint data(4, validity, [0, 4294967295, null]), // 9: uint data(8, validity, [0n, 18446744073709551615n, null]), // 10: ubigint data(16, validity, [ - Buffer.from(new Uint8Array([0x7F, 0xFF, 0x7F, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - ])), - Buffer.from(new Uint8Array([0x80, 0x00, 0x80, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - ])), - null] - ), // 11: varint + Buffer.from( + new Uint8Array([ + 0x7f, 0xff, 0x7f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, + ]), + ), + Buffer.from( + new Uint8Array([ + 0x80, 0x00, 0x80, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf8, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, + ]), + ), + null, + ]), // 11: varint data(4, validity, [-2147483646, 2147483646, null]), // 12: date data(8, validity, [0n, 86400000000n, null]), // 13: time - data(8, validity, [-9223372022400000000n, 9223372036854775806n, null]), // 14: timestamp + data(8, validity, [ + -9223372022400000000n, + 9223372036854775806n, + null, + ]), // 14: timestamp data(8, validity, [-9223372022400n, 9223372036854n, null]), // 15: timestamp_s data(8, validity, [-9223372022400000n, 9223372036854775n, null]), // 16: timestamp_ms - data(8, validity, [-9223286400000000000n, 9223372036854775806n, null]), // 17: timestamp_ns + data(8, validity, [ + -9223286400000000000n, + 9223372036854775806n, + null, + ]), // 17: timestamp_ns data(8, validity, [0n, 1449551462400115198n, null]), // 18: time_tz - data(8, validity, [-9223372022400000000n, 9223372036854775806n, null]), // 19: timestamp_tz - data(4, validity, [-3.4028234663852886e+38, 3.4028234663852886e+38, null]), // 20: float - data(8, validity, [-1.7976931348623157e+308, 1.7976931348623157e+308, null]), // 21: double + data(8, validity, [ + -9223372022400000000n, + 9223372036854775806n, + null, + ]), // 19: timestamp_tz + data(4, validity, [ + -3.4028234663852886e38, + 3.4028234663852886e38, + null, + ]), // 20: float + data(8, validity, [ + -1.7976931348623157e308, + 1.7976931348623157e308, + null, + ]), // 21: double data(2, validity, [-9999, 9999, null]), // 22: dec_4_1 data(4, validity, [-999999999, 999999999, null]), // 23: dec_9_4 - data(8, validity, [-999999999999999999n, 999999999999999999n, null]), // 24: dec_18_6 - data(16, validity, [-99999999999999999999999999999999999999n, 99999999999999999999999999999999999999n, null]), // 25: dec38_10 - data(16, validity, [-170141183460469231731687303715884105728n, 170141183460469231731687303715884105727n, null]), // 26: uuid - data(16, validity, [{ months: 0, days: 0, micros: 0n }, { months: 999, days: 999, micros: 999999999n }, null]), // 27: interval + data(8, validity, [ + -999999999999999999n, + 999999999999999999n, + null, + ]), // 24: dec_18_6 + data(16, validity, [ + -99999999999999999999999999999999999999n, + 99999999999999999999999999999999999999n, + null, + ]), // 25: dec38_10 + data(16, validity, [ + -170141183460469231731687303715884105728n, + 170141183460469231731687303715884105727n, + null, + ]), // 26: uuid + data(16, validity, [ + { months: 0, days: 0, micros: 0n }, + { months: 999, days: 999, micros: 999999999n }, + null, + ]), // 27: interval data(16, validity, ['🦆🦆🦆🦆🦆🦆', 'goo\0se', null]), // 28: varchar - data(16, validity, [Buffer.from('thisisalongblob\x00withnullbytes'), Buffer.from('\x00\x00\x00a'), null]), // 29: blob - data(16, validity, [Buffer.from([1, 0b10010001, 0b00101110, 0b00101010, 0b11010111]), Buffer.from([3, 0b11110101]), null]), // 30: bit (x0010001 00101110 00101010 11010111, xxx10101) - data(1, validity, [0, 1, null]), // 31: small_enum + data(16, validity, [ + Buffer.from('thisisalongblob\x00withnullbytes'), + Buffer.from('\x00\x00\x00a'), + null, + ]), // 29: blob + data(16, validity, [ + Buffer.from([ + 1, 0b10010001, 0b00101110, 0b00101010, 0b11010111, + ]), + Buffer.from([3, 0b11110101]), + null, + ]), // 30: bit (x0010001 00101110 00101010 11010111, xxx10101) + data(1, validity, [0, 1, null]), // 31: small_enum data(2, validity, [0, 299, null]), // 32: medium_enum data(4, validity, [0, useLargeEnum ? 69999 : 1, null]), // 33: large_enum - list(validity, [[0n, 0n], [0n, 5n], null], 5, data(4, [true, true, false, false, true], [42, 999, null, null, -42])), // 34: int_array - list(validity, [[0n, 0n], [0n, 6n], null], 6, data(8, [true, true, true, true, false, true], [42.0, NaN, Infinity, -Infinity, null, -42.0])), // 35: double_array - list(validity, [[0n, 0n], [0n, 5n], null], 5, data(4, [true, true, true, false, true], [0, 2147483647, -2147483647, null, 19124])), // 36: date_array - list(validity, [[0n, 0n], [0n, 5n], null], 5, data(8, [true, true, true, false, true], [0n, 9223372036854775807n, -9223372036854775807n, null, 1652372625000000n])), // 37: timestamp_array - list(validity, [[0n, 0n], [0n, 5n], null], 5, data(8, [true, true, true, false, true], [0n, 9223372036854775807n, -9223372036854775807n, null, 1652397825000000n])), // 38: timestamptz_array - list(validity, [[0n, 0n], [0n, 4n], null], 4, data(16, [true, true, false, true], ['🦆🦆🦆🦆🦆🦆', 'goose', null, ''])), // 39: varchar_array - list(validity, [[0n, 0n], [0n, 5n], null], 5, - list([true, true, false, true, true], [[0n, 0n], [0n, 5n], null, [5n, 0n], [5n, 5n]], 10, - data(10, [true, true, false, false, true, true, true, false, false, true], [42, 999, null, null, -42, 42, 999, null, null, -42]))), // 40: nested_int_array - struct(3, validity, [data(4, [false, true, false], [null, 42, null]), data(16, [false, true, false], [null, '🦆🦆🦆🦆🦆🦆', null])]), // 41: struct + list( + validity, + [[0n, 0n], [0n, 5n], null], + 5, + data( + 4, + [true, true, false, false, true], + [42, 999, null, null, -42], + ), + ), // 34: int_array + list( + validity, + [[0n, 0n], [0n, 6n], null], + 6, + data( + 8, + [true, true, true, true, false, true], + [42.0, NaN, Infinity, -Infinity, null, -42.0], + ), + ), // 35: double_array + list( + validity, + [[0n, 0n], [0n, 5n], null], + 5, + data( + 4, + [true, true, true, false, true], + [0, 2147483647, -2147483647, null, 19124], + ), + ), // 36: date_array + list( + validity, + [[0n, 0n], [0n, 5n], null], + 5, + data( + 8, + [true, true, true, false, true], + [ + 0n, + 9223372036854775807n, + -9223372036854775807n, + null, + 1652372625000000n, + ], + ), + ), // 37: timestamp_array + list( + validity, + [[0n, 0n], [0n, 5n], null], + 5, + data( + 8, + [true, true, true, false, true], + [ + 0n, + 9223372036854775807n, + -9223372036854775807n, + null, + 1652397825000000n, + ], + ), + ), // 38: timestamptz_array + list( + validity, + [[0n, 0n], [0n, 4n], null], + 4, + data( + 16, + [true, true, false, true], + ['🦆🦆🦆🦆🦆🦆', 'goose', null, ''], + ), + ), // 39: varchar_array + list( + validity, + [[0n, 0n], [0n, 5n], null], + 5, + list( + [true, true, false, true, true], + [[0n, 0n], [0n, 5n], null, [5n, 0n], [5n, 5n]], + 10, + data( + 10, + [ + true, + true, + false, + false, + true, + true, + true, + false, + false, + true, + ], + [42, 999, null, null, -42, 42, 999, null, null, -42], + ), + ), + ), // 40: nested_int_array + struct(3, validity, [ + data(4, [false, true, false], [null, 42, null]), + data(16, [false, true, false], [null, '🦆🦆🦆🦆🦆🦆', null]), + ]), // 41: struct struct(3, validity, [ - list([false, true, false], [null, [0n, 5n], null], 5, data(4, [true, true, false, false, true], [42, 999, null, null, -42])), - list([false, true, false], [null, [0n, 4n], null], 4, data(16, [true, true, false, true], ['🦆🦆🦆🦆🦆🦆', 'goose', null, ''])), + list( + [false, true, false], + [null, [0n, 5n], null], + 5, + data( + 4, + [true, true, false, false, true], + [42, 999, null, null, -42], + ), + ), + list( + [false, true, false], + [null, [0n, 4n], null], + 4, + data( + 16, + [true, true, false, true], + ['🦆🦆🦆🦆🦆🦆', 'goose', null, ''], + ), + ), ]), // 42: struct_of_arrays - list(validity, [[0n, 0n], [0n, 3n], null], 3, - struct(3, [true, true, false], [ - data(4, [false, true], [null, 42]), - data(16, [false, true], [null, '🦆🦆🦆🦆🦆🦆']), - ]) + list( + validity, + [[0n, 0n], [0n, 3n], null], + 3, + struct( + 3, + [true, true, false], + [ + data(4, [false, true], [null, 42]), + data(16, [false, true], [null, '🦆🦆🦆🦆🦆🦆']), + ], + ), ), // 43: array_of_structs - map(validity, [[0n, 0n], [0n, 2n], null], 2, + map( + validity, + [[0n, 0n], [0n, 2n], null], + 2, data(16, [true, true], ['key1', 'key2']), data(16, [true, true], ['🦆🦆🦆🦆🦆🦆', 'goose']), ), // 44: map union([ data(1, validity, [0, 1, null]), // tags - data(16, [true, false, false], ['Frank', null, null]), + data(16, [true, false, false], ['Frank', null, null]), data(2, [false, true, false], [null, 5, null]), ]), // 45: union - array(3, validity, data(4, [false, true, true, true, true, true], [null, 2, 3, 4, 5, 6])), // 46: fixed_int_array - array(3, validity, data(16, [true, false, true, true, true, true], ['a', null, 'c', 'd', 'e', 'f'])), // 47: fixed_varchar_array - array(3, validity, - array(3, [true, false, true, true, true, true, false, false, false], - data(4, [false, true, true, false, false, false, false, true, true, true, true, true, false, true, true, true, true, true], - [null, 2, 3, null, null, null, null, 2, 3, 4, 5, 6, null, 2, 3, 4, 5, 6]) - ) + array( + 3, + validity, + data( + 4, + [false, true, true, true, true, true], + [null, 2, 3, 4, 5, 6], + ), + ), // 46: fixed_int_array + array( + 3, + validity, + data( + 16, + [true, false, true, true, true, true], + ['a', null, 'c', 'd', 'e', 'f'], + ), + ), // 47: fixed_varchar_array + array( + 3, + validity, + array( + 3, + [true, false, true, true, true, true, false, false, false], + data( + 4, + [ + false, + true, + true, + false, + false, + false, + false, + true, + true, + true, + true, + true, + false, + true, + true, + true, + true, + true, + ], + [ + null, + 2, + 3, + null, + null, + null, + null, + 2, + 3, + 4, + 5, + 6, + null, + 2, + 3, + 4, + 5, + 6, + ], + ), + ), ), // 48: fixed_nested_int_array - array(3, validity, - array(3, [true, false, true, true, true, true, false, false, false], - data(16, [true, false, true, false, false, false, true, false, true, true, true, true, true, false, true, true, true, true], - ['a', null, 'c', null, null, null, 'a', null, 'c', 'd', 'e', 'f', 'a', null, 'c', 'd', 'e', 'f']) - ) + array( + 3, + validity, + array( + 3, + [true, false, true, true, true, true, false, false, false], + data( + 16, + [ + true, + false, + true, + false, + false, + false, + true, + false, + true, + true, + true, + true, + true, + false, + true, + true, + true, + true, + ], + [ + 'a', + null, + 'c', + null, + null, + null, + 'a', + null, + 'c', + 'd', + 'e', + 'f', + 'a', + null, + 'c', + 'd', + 'e', + 'f', + ], + ), + ), ), // 49: fixed_nested_varchar_array - array(3, validity, - struct(9, [true, true, true, true, true, true, false, false, false], [ - data(4, [false, true, false, true, false, true, false, false, false], [null, 42, null, 42, null, 42, null, null, null]), - data(16, [false, true, false, true, false, true, false, false, false], [null, '🦆🦆🦆🦆🦆🦆', null, '🦆🦆🦆🦆🦆🦆', null, '🦆🦆🦆🦆🦆🦆', null, null, null]), - ]) + array( + 3, + validity, + struct( + 9, + [true, true, true, true, true, true, false, false, false], + [ + data( + 4, + [ + false, + true, + false, + true, + false, + true, + false, + false, + false, + ], + [null, 42, null, 42, null, 42, null, null, null], + ), + data( + 16, + [ + false, + true, + false, + true, + false, + true, + false, + false, + false, + ], + [ + null, + '🦆🦆🦆🦆🦆🦆', + null, + '🦆🦆🦆🦆🦆🦆', + null, + '🦆🦆🦆🦆🦆🦆', + null, + null, + null, + ], + ), + ], + ), ), // 50: fixed_struct_array struct(3, validity, [ - array(2, [true, true], data(4, [false, true, true, true, true, true], [null, 2, 3, 4, 5, 6])), - array(2, [true, true], data(16, [true, false, true, true, true, true], ['a', null, 'c', 'd', 'e', 'f'])), + array( + 2, + [true, true], + data( + 4, + [false, true, true, true, true, true], + [null, 2, 3, 4, 5, 6], + ), + ), + array( + 2, + [true, true], + data( + 16, + [true, false, true, true, true, true], + ['a', null, 'c', 'd', 'e', 'f'], + ), + ), ]), // 51: struct_of_fixed_array - array(3, validity, - list([true, true, true, true, true, true, false, false, false], [[0n, 0n], [0n, 5n], [5n, 0n], [5n, 5n], [10n, 0n], [10n, 5n], null, null, null], 15, - data(4, [true, true, false, false, true, true, true, false, false, true, true, true, false, false, true], - [42, 999, null, null, -42, 42, 999, null, null, -42, 42, 999, null, null, -42]) - ) + array( + 3, + validity, + list( + [true, true, true, true, true, true, false, false, false], + [ + [0n, 0n], + [0n, 5n], + [5n, 0n], + [5n, 5n], + [10n, 0n], + [10n, 5n], + null, + null, + null, + ], + 15, + data( + 4, + [ + true, + true, + false, + false, + true, + true, + true, + false, + false, + true, + true, + true, + false, + false, + true, + ], + [ + 42, + 999, + null, + null, + -42, + 42, + 999, + null, + null, + -42, + 42, + 999, + null, + null, + -42, + ], + ), + ), ), // 52: fixed_array_of_int_list - list(validity, [[0n, 3n], [3n, 3n]], 6, - array(6, [true, true, true, true, true, true], - data(4, [false, true, true, true, true, true, false, true, true, true, true, true, false, true, true, true, true, true], - [null, 2, 3, 4, 5, 6, null, 2, 3, 4, 5, 6, null, 2, 3, 4, 5, 6]) - ) + list( + validity, + [ + [0n, 3n], + [3n, 3n], + ], + 6, + array( + 6, + [true, true, true, true, true, true], + data( + 4, + [ + false, + true, + true, + true, + true, + true, + false, + true, + true, + true, + true, + true, + false, + true, + true, + true, + true, + true, + ], + [ + null, + 2, + 3, + 4, + 5, + 6, + null, + 2, + 3, + 4, + 5, + 6, + null, + 2, + 3, + 4, + 5, + 6, + ], + ), + ), ), // 53: list_of_fixed_int_array ], }, @@ -264,30 +785,30 @@ suite('query', () => { }); test('create and insert', async () => { await withConnection(async (connection) => { - const createResult = await duckdb.query(connection, 'create table test_create_and_insert(i integer)'); + const createResult = await duckdb.query( + connection, + 'create table test_create_and_insert(i integer)', + ); await expectResult(createResult, { statementType: duckdb.StatementType.CREATE, resultType: duckdb.ResultType.NOTHING, chunkCount: 0, rowCount: 0, - columns: [ - { name: 'Count', logicalType: BIGINT }, - ], + columns: [{ name: 'Count', logicalType: BIGINT }], chunks: [], }); - const insertResult = await duckdb.query(connection, 'insert into test_create_and_insert from range(17)'); + const insertResult = await duckdb.query( + connection, + 'insert into test_create_and_insert from range(17)', + ); await expectResult(insertResult, { statementType: duckdb.StatementType.INSERT, resultType: duckdb.ResultType.CHANGED_ROWS, chunkCount: 1, rowCount: 1, rowsChanged: 17, - columns: [ - { name: 'Count', logicalType: BIGINT }, - ], - chunks: [ - { rowCount: 1, vectors: [data(8, [true], [17n])] }, - ], + columns: [{ name: 'Count', logicalType: BIGINT }], + chunks: [{ rowCount: 1, vectors: [data(8, [true], [17n])] }], }); }); }); diff --git a/bindings/test/scalar_functions.test.ts b/bindings/test/scalar_functions.test.ts index 3290f4c9..deca3e78 100644 --- a/bindings/test/scalar_functions.test.ts +++ b/bindings/test/scalar_functions.test.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; import { data } from './utils/expectedVectors'; import { expectResult } from './utils/expectResult'; @@ -31,7 +31,7 @@ suite('scalar functions', () => { for (let i = 0; i < rowCount; i++) { duckdb.vector_assign_string_element(output, i, `output_${i}`); } - } + }, ); duckdb.register_scalar_function(connection, scalar_function); duckdb.destroy_scalar_function_sync(scalar_function); @@ -62,12 +62,14 @@ suite('scalar functions', () => { duckdb.vector_assign_string_element( output, i, - `output_${i}_${JSON.stringify(extra_info)}` + `output_${i}_${JSON.stringify(extra_info)}`, ); } - } + }, ); - duckdb.scalar_function_set_extra_info(scalar_function, { 'my_extra_info_key': 'my_extra_info_value' }); + duckdb.scalar_function_set_extra_info(scalar_function, { + my_extra_info_key: 'my_extra_info_value', + }); duckdb.register_scalar_function(connection, scalar_function); duckdb.destroy_scalar_function_sync(scalar_function); @@ -85,7 +87,7 @@ suite('scalar functions', () => { data( 16, [true], - ['output_0_{"my_extra_info_key":"my_extra_info_value"}'] + ['output_0_{"my_extra_info_key":"my_extra_info_value"}'], ), ], }, @@ -103,13 +105,13 @@ suite('scalar functions', () => { scalar_function, (_info, _input, _output) => { throw new Error('my_error'); - } + }, ); duckdb.register_scalar_function(connection, scalar_function); duckdb.destroy_scalar_function_sync(scalar_function); await expect( - duckdb.query(connection, 'select my_func()') + duckdb.query(connection, 'select my_func()'), ).rejects.toThrow('Invalid Input Error: my_error'); }); }); @@ -139,18 +141,18 @@ suite('scalar functions', () => { i, `output_${i}_${dv0.getInt32(i * 4, true)}_${dv1.getUint32( i * 16, - true - )}` + true, + )}`, ); } - } + }, ); duckdb.register_scalar_function(connection, scalar_function); duckdb.destroy_scalar_function_sync(scalar_function); const result = await duckdb.query( connection, - "select my_func(42, 'duck') as my_func_result from range(3)" + "select my_func(42, 'duck') as my_func_result from range(3)", ); await expectResult(result, { chunkCount: 1, @@ -168,7 +170,7 @@ suite('scalar functions', () => { data( 16, [true, true, true], - ['output_0_42_4', 'output_1_42_4', 'output_2_42_4'] + ['output_0_42_4', 'output_1_42_4', 'output_2_42_4'], ), ], }, @@ -202,17 +204,17 @@ suite('scalar functions', () => { duckdb.vector_assign_string_element( output, r, - `output_${r}_${params.join('_')}` + `output_${r}_${params.join('_')}`, ); } - } + }, ); duckdb.register_scalar_function(connection, scalar_function); duckdb.destroy_scalar_function_sync(scalar_function); const result = await duckdb.query( connection, - 'select my_func(11, 13, 17) as my_func_result from range(3)' + 'select my_func(11, 13, 17) as my_func_result from range(3)', ); await expectResult(result, { chunkCount: 1, @@ -230,7 +232,7 @@ suite('scalar functions', () => { data( 16, [true, true, true], - ['output_0_11_13_17', 'output_1_11_13_17', 'output_2_11_13_17'] + ['output_0_11_13_17', 'output_1_11_13_17', 'output_2_11_13_17'], ), ], }, @@ -255,10 +257,10 @@ suite('scalar functions', () => { duckdb.vector_assign_string_element( output, i, - `output_is_not_null` + `output_is_not_null`, ); } - } + }, ); duckdb.register_scalar_function(connection, scalar_function); duckdb.destroy_scalar_function_sync(scalar_function); diff --git a/bindings/test/utils/ExpectedLogicalType.ts b/bindings/test/utils/ExpectedLogicalType.ts index 87ab95ef..079e20f6 100644 --- a/bindings/test/utils/ExpectedLogicalType.ts +++ b/bindings/test/utils/ExpectedLogicalType.ts @@ -1,7 +1,8 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; export interface ExpectedSimpleLogicalType { - typeId: Exclude 0) { - for (let chunkIndex = 0; chunkIndex < expectedResult.chunkCount; chunkIndex++) { + for ( + let chunkIndex = 0; + chunkIndex < expectedResult.chunkCount; + chunkIndex++ + ) { const chunk = duckdb.result_get_chunk(result, chunkIndex); - expectChunk(chunk, expectedResult.chunks[chunkIndex], expectedResult.columns); + expectChunk( + chunk, + expectedResult.chunks[chunkIndex], + expectedResult.columns, + ); } } for (const expectedChunk of expectedResult.chunks) { diff --git a/bindings/test/utils/expectValidity.ts b/bindings/test/utils/expectValidity.ts index 41e1e459..20b1bada 100644 --- a/bindings/test/utils/expectValidity.ts +++ b/bindings/test/utils/expectValidity.ts @@ -1,8 +1,19 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect } from 'vitest'; import { isValid } from './isValid'; -export function expectValidity(validity_bytes: Uint8Array | null, validity: BigUint64Array | null, bit: number, expected: boolean, vectorName: string) { - expect(duckdb.validity_row_is_valid(validity_bytes, bit), `${vectorName} validity_bytes_bit[${bit}]`).toBe(expected); - expect(isValid(validity, bit), `${vectorName} validity_bit[${bit}]`).toBe(expected); +export function expectValidity( + validity_bytes: Uint8Array | null, + validity: BigUint64Array | null, + bit: number, + expected: boolean, + vectorName: string, +) { + expect( + duckdb.validity_row_is_valid(validity_bytes, bit), + `${vectorName} validity_bytes_bit[${bit}]`, + ).toBe(expected); + expect(isValid(validity, bit), `${vectorName} validity_bit[${bit}]`).toBe( + expected, + ); } diff --git a/bindings/test/utils/expectVector.ts b/bindings/test/utils/expectVector.ts index ef62b1e4..8ed883b2 100644 --- a/bindings/test/utils/expectVector.ts +++ b/bindings/test/utils/expectVector.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { expect } from 'vitest'; import { ExpectedLogicalType } from './ExpectedLogicalType'; import { UTINYINT } from './expectedLogicalTypes'; @@ -14,10 +14,20 @@ import { import { expectValidity } from './expectValidity'; import { getListEntry, getValue } from './getValue'; -export function expectVector(vector: duckdb.Vector, expectedVector: ExpectedVector, expectedLogicalType: ExpectedLogicalType, vectorName: string) { +export function expectVector( + vector: duckdb.Vector, + expectedVector: ExpectedVector, + expectedLogicalType: ExpectedLogicalType, + vectorName: string, +) { switch (expectedVector.kind) { case 'array': - expectArrayVector(vector, expectedVector, expectedLogicalType, vectorName); + expectArrayVector( + vector, + expectedVector, + expectedLogicalType, + vectorName, + ); break; case 'data': expectDataVector(vector, expectedVector, expectedLogicalType, vectorName); @@ -29,31 +39,57 @@ export function expectVector(vector: duckdb.Vector, expectedVector: ExpectedVect expectMapVector(vector, expectedVector, expectedLogicalType, vectorName); break; case 'struct': - expectStructVector(vector, expectedVector, expectedLogicalType, vectorName); + expectStructVector( + vector, + expectedVector, + expectedLogicalType, + vectorName, + ); break; case 'union': - expectUnionVector(vector, expectedVector, expectedLogicalType, vectorName); + expectUnionVector( + vector, + expectedVector, + expectedLogicalType, + vectorName, + ); break; } } -function getVectorValidity(vector: duckdb.Vector, itemCount: number): { validity: BigUint64Array | null, validityBytes: Uint8Array | null } { +function getVectorValidity( + vector: duckdb.Vector, + itemCount: number, +): { validity: BigUint64Array | null; validityBytes: Uint8Array | null } { const validityUInt64Count = Math.ceil(itemCount / 64); const validityByteCount = validityUInt64Count * 8; const validityBytes = duckdb.vector_get_validity(vector, validityByteCount); if (!validityBytes) { return { validity: null, validityBytes: null }; } - const validity = new BigUint64Array(validityBytes.buffer, 0, validityUInt64Count); + const validity = new BigUint64Array( + validityBytes.buffer, + 0, + validityUInt64Count, + ); return { validity, validityBytes }; } -function getVectorData(vector: duckdb.Vector, itemCount: number, itemBytes: number) { +function getVectorData( + vector: duckdb.Vector, + itemCount: number, + itemBytes: number, +) { const bytes = duckdb.vector_get_data(vector, itemCount * itemBytes); return new DataView(bytes.buffer); } -function expectArrayVector(vector: duckdb.Vector, expectedVector: ExpectedArrayVector, expectedLogicalType: ExpectedLogicalType, vectorName: string) { +function expectArrayVector( + vector: duckdb.Vector, + expectedVector: ExpectedArrayVector, + expectedLogicalType: ExpectedLogicalType, + vectorName: string, +) { expect(expectedLogicalType.typeId).toBe(duckdb.Type.ARRAY); if (expectedLogicalType.typeId !== duckdb.Type.ARRAY) { return; @@ -62,24 +98,54 @@ function expectArrayVector(vector: duckdb.Vector, expectedVector: ExpectedArrayV const itemCount = expectedVector.itemCount; const { validity, validityBytes } = getVectorValidity(vector, itemCount); for (let row = 0; row < itemCount; row++) { - expectValidity(validityBytes, validity, row, expectedVector.validity ? expectedVector.validity[row] : true, `${vectorName} row[${row}]`); + expectValidity( + validityBytes, + validity, + row, + expectedVector.validity ? expectedVector.validity[row] : true, + `${vectorName} row[${row}]`, + ); } const childVector = duckdb.array_vector_get_child(vector); - expectVector(childVector, expectedVector.child, expectedLogicalType.valueType, `${vectorName} array_child`); + expectVector( + childVector, + expectedVector.child, + expectedLogicalType.valueType, + `${vectorName} array_child`, + ); } -function expectDataVector(vector: duckdb.Vector, expectedVector: ExpectedDataVector, expectedLogicalType: ExpectedLogicalType, vectorName: string) { +function expectDataVector( + vector: duckdb.Vector, + expectedVector: ExpectedDataVector, + expectedLogicalType: ExpectedLogicalType, + vectorName: string, +) { const itemCount = expectedVector.values.length; const { validity, validityBytes } = getVectorValidity(vector, itemCount); const dv = getVectorData(vector, itemCount, expectedVector.itemBytes); for (let row = 0; row < itemCount; row++) { - expectValidity(validityBytes, validity, row, expectedVector.validity ? expectedVector.validity[row] : true, `${vectorName} row[${row}]`); - expect(getValue(expectedLogicalType, validity, dv, row), `${vectorName} row[${row}]`).toStrictEqual(expectedVector.values[row]); + expectValidity( + validityBytes, + validity, + row, + expectedVector.validity ? expectedVector.validity[row] : true, + `${vectorName} row[${row}]`, + ); + expect( + getValue(expectedLogicalType, validity, dv, row), + `${vectorName} row[${row}]`, + ).toStrictEqual(expectedVector.values[row]); } } -function expectListVector(vector: duckdb.Vector, expectedVector: ExpectedListVector, expectedLogicalType: ExpectedLogicalType, vectorName: string) { +function expectListVector( + vector: duckdb.Vector, + expectedVector: ExpectedListVector, + expectedLogicalType: ExpectedLogicalType, + vectorName: string, +) { expect(expectedLogicalType.typeId).toBe(duckdb.Type.LIST); if (expectedLogicalType.typeId !== duckdb.Type.LIST) { return; @@ -89,17 +155,35 @@ function expectListVector(vector: duckdb.Vector, expectedVector: ExpectedListVec const { validity, validityBytes } = getVectorValidity(vector, itemCount); const entriesDV = getVectorData(vector, itemCount, 16); for (let row = 0; row < itemCount; row++) { - expectValidity(validityBytes, validity, row, expectedVector.validity ? expectedVector.validity[row] : true, `${vectorName} row[${row}]`); - expect(getListEntry(validity, entriesDV, row)).toStrictEqual(expectedVector.entries[row]); + expectValidity( + validityBytes, + validity, + row, + expectedVector.validity ? expectedVector.validity[row] : true, + `${vectorName} row[${row}]`, + ); + expect(getListEntry(validity, entriesDV, row)).toStrictEqual( + expectedVector.entries[row], + ); } const childItemCount = duckdb.list_vector_get_size(vector); const childVector = duckdb.list_vector_get_child(vector); expect(childItemCount).toBe(expectedVector.childItemCount); - expectVector(childVector, expectedVector.child, expectedLogicalType.valueType, `${vectorName} list_child`); + expectVector( + childVector, + expectedVector.child, + expectedLogicalType.valueType, + `${vectorName} list_child`, + ); } -function expectMapVector(vector: duckdb.Vector, expectedVector: ExpectedMapVector, expectedLogicalType: ExpectedLogicalType, vectorName: string) { +function expectMapVector( + vector: duckdb.Vector, + expectedVector: ExpectedMapVector, + expectedLogicalType: ExpectedLogicalType, + vectorName: string, +) { expect(expectedLogicalType.typeId).toBe(duckdb.Type.MAP); if (expectedLogicalType.typeId !== duckdb.Type.MAP) { return; @@ -109,8 +193,16 @@ function expectMapVector(vector: duckdb.Vector, expectedVector: ExpectedMapVecto const { validity, validityBytes } = getVectorValidity(vector, itemCount); const entriesDV = getVectorData(vector, itemCount, 16); for (let row = 0; row < itemCount; row++) { - expectValidity(validityBytes, validity, row, expectedVector.validity ? expectedVector.validity[row] : true, `${vectorName} row[${row}]`); - expect(getListEntry(validity, entriesDV, row)).toStrictEqual(expectedVector.entries[row]); + expectValidity( + validityBytes, + validity, + row, + expectedVector.validity ? expectedVector.validity[row] : true, + `${vectorName} row[${row}]`, + ); + expect(getListEntry(validity, entriesDV, row)).toStrictEqual( + expectedVector.entries[row], + ); } const childItemCount = duckdb.list_vector_get_size(vector); @@ -118,11 +210,26 @@ function expectMapVector(vector: duckdb.Vector, expectedVector: ExpectedMapVecto expect(childItemCount).toBe(expectedVector.childItemCount); const keysVector = duckdb.struct_vector_get_child(childVector, 0); const valuesVector = duckdb.struct_vector_get_child(childVector, 1); - expectVector(keysVector, expectedVector.keys, expectedLogicalType.keyType, `${vectorName} map_keys`); - expectVector(valuesVector, expectedVector.values, expectedLogicalType.valueType, `${vectorName} map_values`); + expectVector( + keysVector, + expectedVector.keys, + expectedLogicalType.keyType, + `${vectorName} map_keys`, + ); + expectVector( + valuesVector, + expectedVector.values, + expectedLogicalType.valueType, + `${vectorName} map_values`, + ); } -function expectStructVector(vector: duckdb.Vector, expectedVector: ExpectedStructVector, expectedLogicalType: ExpectedLogicalType, vectorName: string) { +function expectStructVector( + vector: duckdb.Vector, + expectedVector: ExpectedStructVector, + expectedLogicalType: ExpectedLogicalType, + vectorName: string, +) { expect(expectedLogicalType.typeId).toBe(duckdb.Type.STRUCT); if (expectedLogicalType.typeId !== duckdb.Type.STRUCT) { return; @@ -131,25 +238,51 @@ function expectStructVector(vector: duckdb.Vector, expectedVector: ExpectedStruc const itemCount = expectedVector.itemCount; const { validity, validityBytes } = getVectorValidity(vector, itemCount); for (let row = 0; row < itemCount; row++) { - expectValidity(validityBytes, validity, row, expectedVector.validity ? expectedVector.validity[row] : true, `${vectorName} row[${row}]`); + expectValidity( + validityBytes, + validity, + row, + expectedVector.validity ? expectedVector.validity[row] : true, + `${vectorName} row[${row}]`, + ); } for (let i = 0; i < expectedVector.children.length; i++) { const childVector = duckdb.struct_vector_get_child(vector, i); - expectVector(childVector, expectedVector.children[i], expectedLogicalType.entries[i].type, `${vectorName} struct_child[${i}]`); + expectVector( + childVector, + expectedVector.children[i], + expectedLogicalType.entries[i].type, + `${vectorName} struct_child[${i}]`, + ); } } -function expectUnionVector(vector: duckdb.Vector, expectedVector: ExpectedUnionVector, expectedLogicalType: ExpectedLogicalType, vectorName: string) { +function expectUnionVector( + vector: duckdb.Vector, + expectedVector: ExpectedUnionVector, + expectedLogicalType: ExpectedLogicalType, + vectorName: string, +) { expect(expectedLogicalType.typeId).toBe(duckdb.Type.UNION); if (expectedLogicalType.typeId !== duckdb.Type.UNION) { return; } const tagsVector = duckdb.struct_vector_get_child(vector, 0); - expectVector(tagsVector, expectedVector.children[0], UTINYINT, `${vectorName} union_tags`); + expectVector( + tagsVector, + expectedVector.children[0], + UTINYINT, + `${vectorName} union_tags`, + ); for (let i = 1; i < expectedVector.children.length; i++) { const childVector = duckdb.struct_vector_get_child(vector, i); - expectVector(childVector, expectedVector.children[i], expectedLogicalType.alternatives[i-1].type, `${vectorName} union_child[${i}]`); + expectVector( + childVector, + expectedVector.children[i], + expectedLogicalType.alternatives[i - 1].type, + `${vectorName} union_child[${i}]`, + ); } } diff --git a/bindings/test/utils/expectedLogicalTypes.ts b/bindings/test/utils/expectedLogicalTypes.ts index 2f5e2b30..84a77e3f 100644 --- a/bindings/test/utils/expectedLogicalTypes.ts +++ b/bindings/test/utils/expectedLogicalTypes.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import { ExpectedArrayLogicalType, ExpectedDecimalLogicalType, @@ -114,7 +114,7 @@ export const SQLNULL: ExpectedSimpleLogicalType = { export function ARRAY( valueType: ExpectedLogicalType, - size: number + size: number, ): ExpectedArrayLogicalType { return { typeId: duckdb.Type.ARRAY, @@ -126,7 +126,7 @@ export function ARRAY( export function DECIMAL( width: number, scale: number, - internalType: duckdb.Type + internalType: duckdb.Type, ): ExpectedDecimalLogicalType { return { typeId: duckdb.Type.DECIMAL, @@ -138,7 +138,7 @@ export function DECIMAL( export function ENUM( values: string[], - internalType: duckdb.Type + internalType: duckdb.Type, ): ExpectedEnumLogicalType { return { typeId: duckdb.Type.ENUM, @@ -156,7 +156,7 @@ export function LIST(valueType: ExpectedLogicalType): ExpectedListLogicalType { export function MAP( keyType: ExpectedLogicalType, - valueType: ExpectedLogicalType + valueType: ExpectedLogicalType, ): ExpectedMapLogicalType { return { typeId: duckdb.Type.MAP, @@ -167,7 +167,7 @@ export function MAP( export function ENTRY( name: string, - type: ExpectedLogicalType + type: ExpectedLogicalType, ): ExpectedStructEntry { return { name, @@ -186,7 +186,7 @@ export function STRUCT( export function ALT( tag: string, - type: ExpectedLogicalType + type: ExpectedLogicalType, ): ExpectedUnionAlternative { return { tag, diff --git a/bindings/test/utils/expectedVectors.ts b/bindings/test/utils/expectedVectors.ts index 35edce01..00beec1d 100644 --- a/bindings/test/utils/expectedVectors.ts +++ b/bindings/test/utils/expectedVectors.ts @@ -12,7 +12,7 @@ import { export function array( itemCount: number, validity: boolean[] | null, - child: ExpectedVector + child: ExpectedVector, ): ExpectedArrayVector { return { kind: 'array', @@ -25,7 +25,7 @@ export function array( export function data( itemBytes: number, validity: boolean[] | null, - values: any[] + values: any[], ): ExpectedDataVector { return { kind: 'data', @@ -39,7 +39,7 @@ export function list( validity: boolean[] | null, entries: (ExpectedListEntry | null)[], childItemCount: number, - child: ExpectedVector + child: ExpectedVector, ): ExpectedListVector { return { kind: 'list', @@ -55,7 +55,7 @@ export function map( entries: (ExpectedListEntry | null)[], childItemCount: number, keys: ExpectedVector, - values: ExpectedVector + values: ExpectedVector, ): ExpectedMapVector { return { kind: 'map', @@ -70,7 +70,7 @@ export function map( export function struct( itemCount: number, validity: boolean[] | null, - children: ExpectedVector[] + children: ExpectedVector[], ): ExpectedStructVector { return { kind: 'struct', diff --git a/bindings/test/utils/getValue.ts b/bindings/test/utils/getValue.ts index a74c71ea..41586795 100644 --- a/bindings/test/utils/getValue.ts +++ b/bindings/test/utils/getValue.ts @@ -1,4 +1,4 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; import os from 'os'; import { ExpectedLogicalType } from './ExpectedLogicalType'; import { isValid } from './isValid'; @@ -54,7 +54,7 @@ function getInt128(dataView: DataView, offset: number): bigint { function getUInt128(dataView: DataView, offset: number): bigint { const lower = getUInt64(dataView, offset); const upper = getUInt64(dataView, offset + 8); - return BigInt.asUintN(64, upper) << BigInt(64) | BigInt.asUintN(64, lower); + return (BigInt.asUintN(64, upper) << BigInt(64)) | BigInt.asUintN(64, lower); } /** @@ -66,7 +66,11 @@ function getStringBytes(dv: DataView, offset: number): Uint8Array { if (lengthInBytes <= 12) { return new Uint8Array(dv.buffer, dv.byteOffset + offset + 4, lengthInBytes); } else { - return duckdb.get_data_from_pointer(dv.buffer as ArrayBuffer, dv.byteOffset + offset + 8, lengthInBytes); + return duckdb.get_data_from_pointer( + dv.buffer as ArrayBuffer, + dv.byteOffset + offset + 8, + lengthInBytes, + ); } } @@ -84,7 +88,12 @@ function getBuffer(dv: DataView, offset: number): Buffer { return Buffer.from(getStringBytes(dv, offset)); } -export function getValue(logicalType: ExpectedLogicalType, validity: BigUint64Array | null, dv: DataView, index: number): any { +export function getValue( + logicalType: ExpectedLogicalType, + validity: BigUint64Array | null, + dv: DataView, + index: number, +): any { if (!isValid(validity, index)) { return null; } @@ -100,7 +109,7 @@ export function getValue(logicalType: ExpectedLogicalType, validity: BigUint64Ar return getInt32(dv, index * 4); case duckdb.Type.BIGINT: return getInt64(dv, index * 8); - + case duckdb.Type.UTINYINT: return getUInt8(dv, index); case duckdb.Type.USMALLINT: @@ -109,12 +118,12 @@ export function getValue(logicalType: ExpectedLogicalType, validity: BigUint64Ar return getUInt32(dv, index * 4); case duckdb.Type.UBIGINT: return getUInt64(dv, index * 8); - + case duckdb.Type.FLOAT: return getFloat32(dv, index * 4); case duckdb.Type.DOUBLE: return getFloat64(dv, index * 8); - + case duckdb.Type.TIMESTAMP: return getInt64(dv, index * 8); case duckdb.Type.DATE: @@ -127,7 +136,7 @@ export function getValue(logicalType: ExpectedLogicalType, validity: BigUint64Ar days: getInt32(dv, index * 16 + 4), micros: getInt64(dv, index * 16 + 8), }; - + case duckdb.Type.HUGEINT: return getInt128(dv, index * 16); case duckdb.Type.UHUGEINT: @@ -149,7 +158,11 @@ export function getValue(logicalType: ExpectedLogicalType, validity: BigUint64Ar case duckdb.Type.HUGEINT: return getInt128(dv, index * 16); default: - throw new Error(`unsupported DECIMAL internal type: ${duckdb.Type[logicalType.typeId]}`); + throw new Error( + `unsupported DECIMAL internal type: ${ + duckdb.Type[logicalType.typeId] + }`, + ); } case duckdb.Type.TIMESTAMP_S: @@ -158,7 +171,7 @@ export function getValue(logicalType: ExpectedLogicalType, validity: BigUint64Ar return getInt64(dv, index * 8); case duckdb.Type.TIMESTAMP_NS: return getInt64(dv, index * 8); - + case duckdb.Type.ENUM: switch (logicalType.internalType) { case duckdb.Type.UTINYINT: @@ -168,9 +181,11 @@ export function getValue(logicalType: ExpectedLogicalType, validity: BigUint64Ar case duckdb.Type.UINTEGER: return getUInt32(dv, index * 4); default: - throw new Error(`unsupported ENUM internal type: ${duckdb.Type[logicalType.typeId]}`); + throw new Error( + `unsupported ENUM internal type: ${duckdb.Type[logicalType.typeId]}`, + ); } - + // LIST // STRUCT // MAP @@ -178,9 +193,9 @@ export function getValue(logicalType: ExpectedLogicalType, validity: BigUint64Ar case duckdb.Type.UUID: return getInt128(dv, index * 16); - + // UNION - + case duckdb.Type.BIT: return getBuffer(dv, index * 16); @@ -191,16 +206,22 @@ export function getValue(logicalType: ExpectedLogicalType, validity: BigUint64Ar case duckdb.Type.VARINT: return getBuffer(dv, index * 16); - + case duckdb.Type.SQLNULL: return null; - + default: - throw new Error(`getValue not implemented for type: ${duckdb.Type[logicalType.typeId]}`); + throw new Error( + `getValue not implemented for type: ${duckdb.Type[logicalType.typeId]}`, + ); } } -export function getListEntry(validity: BigUint64Array | null, dv: DataView, index: number): [bigint, bigint] | null { +export function getListEntry( + validity: BigUint64Array | null, + dv: DataView, + index: number, +): [bigint, bigint] | null { if (!isValid(validity, index)) { return null; } diff --git a/bindings/test/utils/withConnection.ts b/bindings/test/utils/withConnection.ts index 3e50b269..d144e7fb 100644 --- a/bindings/test/utils/withConnection.ts +++ b/bindings/test/utils/withConnection.ts @@ -1,6 +1,8 @@ -import duckdb from '@duckdb/node-bindings'; +import duckdb from '@databrainhq/node-bindings'; -export async function withConnection(fn: (connection: duckdb.Connection) => Promise): Promise { +export async function withConnection( + fn: (connection: duckdb.Connection) => Promise, +): Promise { const db = await duckdb.open(); const connection = await duckdb.connect(db); await fn(connection); diff --git a/bindings/test/values.test.ts b/bindings/test/values.test.ts index e62d3240..60ef6a29 100644 --- a/bindings/test/values.test.ts +++ b/bindings/test/values.test.ts @@ -8,7 +8,7 @@ import duckdb, { TimestampNanoseconds, TimestampSeconds, TimeTZ, -} from '@duckdb/node-bindings'; +} from '@databrainhq/node-bindings'; import { expect, suite, test } from 'vitest'; import { expectLogicalType } from './utils/expectLogicalType'; import { @@ -131,7 +131,7 @@ suite('values', () => { const decimal_value = duckdb.create_decimal(input); expectLogicalType( duckdb.get_value_type(decimal_value), - DECIMAL(4, 1, duckdb.Type.SMALLINT) + DECIMAL(4, 1, duckdb.Type.SMALLINT), ); expect(duckdb.get_decimal(decimal_value)).toStrictEqual(input); }); @@ -140,7 +140,7 @@ suite('values', () => { const decimal_value = duckdb.create_decimal(input); expectLogicalType( duckdb.get_value_type(decimal_value), - DECIMAL(9, 4, duckdb.Type.INTEGER) + DECIMAL(9, 4, duckdb.Type.INTEGER), ); expect(duckdb.get_decimal(decimal_value)).toStrictEqual(input); }); @@ -149,7 +149,7 @@ suite('values', () => { const decimal_value = duckdb.create_decimal(input); expectLogicalType( duckdb.get_value_type(decimal_value), - DECIMAL(18, 6, duckdb.Type.BIGINT) + DECIMAL(18, 6, duckdb.Type.BIGINT), ); expect(duckdb.get_decimal(decimal_value)).toStrictEqual(input); }); @@ -162,7 +162,7 @@ suite('values', () => { const decimal_value = duckdb.create_decimal(input); expectLogicalType( duckdb.get_value_type(decimal_value), - DECIMAL(38, 10, duckdb.Type.HUGEINT) + DECIMAL(38, 10, duckdb.Type.HUGEINT), ); expect(duckdb.get_decimal(decimal_value)).toStrictEqual(input); }); @@ -263,7 +263,7 @@ suite('values', () => { const varchar_type = duckdb.create_logical_type(duckdb.Type.VARCHAR); const struct_type = duckdb.create_struct_type( [int_type, varchar_type], - ['a', 'b'] + ['a', 'b'], ); const int32_value = duckdb.create_int32(42); const varchar_value = duckdb.create_varchar('duck'); @@ -273,7 +273,7 @@ suite('values', () => { ]); expectLogicalType( duckdb.get_value_type(struct_value), - STRUCT(ENTRY('a', INTEGER), ENTRY('b', VARCHAR)) + STRUCT(ENTRY('a', INTEGER), ENTRY('b', VARCHAR)), ); const struct_child_0 = duckdb.get_struct_child(struct_value, 0); expectLogicalType(duckdb.get_value_type(struct_child_0), INTEGER); @@ -292,7 +292,7 @@ suite('values', () => { const struct_type = duckdb.create_struct_type([any_type], ['a']); const int32_value = duckdb.create_int32(42); expect(() => - duckdb.create_struct_value(struct_type, [int32_value]) + duckdb.create_struct_value(struct_type, [int32_value]), ).toThrowError('Failed to create struct value'); }); test('list', () => { @@ -320,7 +320,7 @@ suite('values', () => { test('any list', () => { const any_type = duckdb.create_logical_type(duckdb.Type.ANY); expect(() => duckdb.create_list_value(any_type, [])).toThrowError( - 'Failed to create list value' + 'Failed to create list value', ); }); test('array', () => { @@ -337,7 +337,7 @@ suite('values', () => { test('any array', () => { const any_type = duckdb.create_logical_type(duckdb.Type.ANY); expect(() => duckdb.create_array_value(any_type, [])).toThrowError( - 'Failed to create array value' + 'Failed to create array value', ); }); test('map', () => { @@ -378,7 +378,7 @@ suite('values', () => { const varchar_type = duckdb.create_logical_type(duckdb.Type.VARCHAR); const map_type = duckdb.create_map_type(any_type, varchar_type); expect(() => duckdb.create_map_value(map_type, [], [])).toThrowError( - 'Failed to create map value' + 'Failed to create map value', ); }); test('map with value type any', () => { @@ -386,7 +386,7 @@ suite('values', () => { const any_type = duckdb.create_logical_type(duckdb.Type.ANY); const map_type = duckdb.create_map_type(int_type, any_type); expect(() => duckdb.create_map_value(map_type, [], [])).toThrowError( - 'Failed to create map value' + 'Failed to create map value', ); }); test('map with different numbers of keys and values', () => { @@ -404,7 +404,7 @@ suite('values', () => { values.push(duckdb.create_varchar(valueStrings[i])); } expect(() => duckdb.create_map_value(map_type, keys, values)).toThrowError( - 'Failed to create map value: must have same number of keys and values' + 'Failed to create map value: must have same number of keys and values', ); }); test('union', () => { @@ -412,18 +412,18 @@ suite('values', () => { const smallint_type = duckdb.create_logical_type(duckdb.Type.SMALLINT); const union_type = duckdb.create_union_type( [varchar_type, smallint_type], - ['name', 'age'] + ['name', 'age'], ); const varchar_value = duckdb.create_varchar('duck'); const union_value_0 = duckdb.create_union_value( union_type, 0, - varchar_value + varchar_value, ); expectLogicalType( duckdb.get_value_type(union_value_0), - UNION(ALT('name', VARCHAR), ALT('age', SMALLINT)) + UNION(ALT('name', VARCHAR), ALT('age', SMALLINT)), ); // TODO: get underlying tag & value of union value // expectLogicalType(duckdb.get_value_type(union_value_0), VARCHAR); @@ -433,11 +433,11 @@ suite('values', () => { const union_value_1 = duckdb.create_union_value( union_type, 1, - smallint_value + smallint_value, ); expectLogicalType( duckdb.get_value_type(union_value_1), - UNION(ALT('name', VARCHAR), ALT('age', SMALLINT)) + UNION(ALT('name', VARCHAR), ALT('age', SMALLINT)), ); // TODO: get underlying tag & value of union value // expectLogicalType(duckdb.get_value_type(union_value_1), SMALLINT); @@ -448,7 +448,7 @@ suite('values', () => { const union_type = duckdb.create_union_type([], []); const varchar_value = duckdb.create_varchar('duck'); expect(() => - duckdb.create_union_value(union_type, 0, varchar_value) + duckdb.create_union_value(union_type, 0, varchar_value), ).toThrowError('Failed to create union value'); }); test('union tag index out of range', () => { @@ -456,12 +456,12 @@ suite('values', () => { const smallint_type = duckdb.create_logical_type(duckdb.Type.SMALLINT); const union_type = duckdb.create_union_type( [varchar_type, smallint_type], - ['name', 'age'] + ['name', 'age'], ); const varchar_value = duckdb.create_varchar('duck'); expect(() => // max valid index is 1 - duckdb.create_union_value(union_type, 2, varchar_value) + duckdb.create_union_value(union_type, 2, varchar_value), ).toThrowError('Failed to create union value'); }); test('union value type mismatch', () => { @@ -469,12 +469,12 @@ suite('values', () => { const smallint_type = duckdb.create_logical_type(duckdb.Type.SMALLINT); const union_type = duckdb.create_union_type( [varchar_type, smallint_type], - ['name', 'age'] + ['name', 'age'], ); const varchar_value = duckdb.create_varchar('duck'); expect(() => // index of varchar value should be 0 - duckdb.create_union_value(union_type, 1, varchar_value) + duckdb.create_union_value(union_type, 1, varchar_value), ).toThrowError('Failed to create union value'); }); test('null', () => { @@ -489,7 +489,7 @@ suite('values', () => { const enum_value = duckdb.create_enum_value(enum_type, 1); expectLogicalType( duckdb.get_value_type(enum_value), - ENUM(enum_members, duckdb.Type.UTINYINT) + ENUM(enum_members, duckdb.Type.UTINYINT), ); expect(duckdb.get_enum_value(enum_value)).toBe(1); }); diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..c149b11c --- /dev/null +++ b/package-lock.json @@ -0,0 +1,6 @@ +{ + "name": "duckdb-node-neo", + "lockfileVersion": 3, + "requires": true, + "packages": {} +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a943ad4b..4568fbb5 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -5,14 +5,13 @@ settings: excludeLinksFromLockfile: false importers: - .: {} api: dependencies: - '@duckdb/node-bindings': + '@databrainhq/node-bindings': specifier: workspace:* - version: link:../bindings/pkgs/@duckdb/node-bindings + version: link:../bindings/pkgs/@databrainhq/node-bindings devDependencies: '@types/node': specifier: ^20.17.10 @@ -27,17 +26,17 @@ importers: specifier: ^2.1.8 version: 2.1.8(@types/node@20.17.10) - api/pkgs/@duckdb/node-api: + api/pkgs/@databrainhq/node-api: dependencies: - '@duckdb/node-bindings': + '@databrainhq/node-bindings': specifier: workspace:* - version: link:../../../../bindings/pkgs/@duckdb/node-bindings + version: link:../../../../bindings/pkgs/@databrainhq/node-bindings bindings: dependencies: - '@duckdb/node-bindings': + '@databrainhq/node-bindings': specifier: workspace:* - version: link:pkgs/@duckdb/node-bindings + version: link:pkgs/@databrainhq/node-bindings cross-replace: specifier: ^0.2.0 version: 0.2.0 @@ -57,299 +56,451 @@ importers: specifier: ^2.1.8 version: 2.1.8(@types/node@20.17.10) - bindings/pkgs/@duckdb/node-bindings: + bindings/pkgs/@databrainhq/node-bindings: optionalDependencies: - '@duckdb/node-bindings-darwin-arm64': + '@databrainhq/node-bindings-darwin-arm64': specifier: workspace:* version: link:../node-bindings-darwin-arm64 - '@duckdb/node-bindings-darwin-x64': + '@databrainhq/node-bindings-darwin-x64': specifier: workspace:* version: link:../node-bindings-darwin-x64 - '@duckdb/node-bindings-linux-arm64': + '@databrainhq/node-bindings-linux-arm64': specifier: workspace:* version: link:../node-bindings-linux-arm64 - '@duckdb/node-bindings-linux-x64': + '@databrainhq/node-bindings-linux-x64': specifier: workspace:* version: link:../node-bindings-linux-x64 - '@duckdb/node-bindings-win32-x64': + '@databrainhq/node-bindings-win32-x64': specifier: workspace:* version: link:../node-bindings-win32-x64 - bindings/pkgs/@duckdb/node-bindings-darwin-arm64: {} + bindings/pkgs/@databrainhq/node-bindings-darwin-arm64: {} - bindings/pkgs/@duckdb/node-bindings-darwin-x64: {} + bindings/pkgs/@databrainhq/node-bindings-darwin-x64: {} - bindings/pkgs/@duckdb/node-bindings-linux-arm64: {} + bindings/pkgs/@databrainhq/node-bindings-linux-arm64: {} - bindings/pkgs/@duckdb/node-bindings-linux-x64: {} + bindings/pkgs/@databrainhq/node-bindings-linux-x64: {} - bindings/pkgs/@duckdb/node-bindings-win32-x64: {} + bindings/pkgs/@databrainhq/node-bindings-win32-x64: {} packages: - '@esbuild/aix-ppc64@0.21.5': - resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==, + } + engines: { node: '>=12' } cpu: [ppc64] os: [aix] '@esbuild/android-arm64@0.21.5': - resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==, + } + engines: { node: '>=12' } cpu: [arm64] os: [android] '@esbuild/android-arm@0.21.5': - resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==, + } + engines: { node: '>=12' } cpu: [arm] os: [android] '@esbuild/android-x64@0.21.5': - resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==, + } + engines: { node: '>=12' } cpu: [x64] os: [android] '@esbuild/darwin-arm64@0.21.5': - resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==, + } + engines: { node: '>=12' } cpu: [arm64] os: [darwin] '@esbuild/darwin-x64@0.21.5': - resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==, + } + engines: { node: '>=12' } cpu: [x64] os: [darwin] '@esbuild/freebsd-arm64@0.21.5': - resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==, + } + engines: { node: '>=12' } cpu: [arm64] os: [freebsd] '@esbuild/freebsd-x64@0.21.5': - resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==, + } + engines: { node: '>=12' } cpu: [x64] os: [freebsd] '@esbuild/linux-arm64@0.21.5': - resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==, + } + engines: { node: '>=12' } cpu: [arm64] os: [linux] '@esbuild/linux-arm@0.21.5': - resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==, + } + engines: { node: '>=12' } cpu: [arm] os: [linux] '@esbuild/linux-ia32@0.21.5': - resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==, + } + engines: { node: '>=12' } cpu: [ia32] os: [linux] '@esbuild/linux-loong64@0.21.5': - resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==, + } + engines: { node: '>=12' } cpu: [loong64] os: [linux] '@esbuild/linux-mips64el@0.21.5': - resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==, + } + engines: { node: '>=12' } cpu: [mips64el] os: [linux] '@esbuild/linux-ppc64@0.21.5': - resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==, + } + engines: { node: '>=12' } cpu: [ppc64] os: [linux] '@esbuild/linux-riscv64@0.21.5': - resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==, + } + engines: { node: '>=12' } cpu: [riscv64] os: [linux] '@esbuild/linux-s390x@0.21.5': - resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==, + } + engines: { node: '>=12' } cpu: [s390x] os: [linux] '@esbuild/linux-x64@0.21.5': - resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==, + } + engines: { node: '>=12' } cpu: [x64] os: [linux] '@esbuild/netbsd-x64@0.21.5': - resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==, + } + engines: { node: '>=12' } cpu: [x64] os: [netbsd] '@esbuild/openbsd-x64@0.21.5': - resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==, + } + engines: { node: '>=12' } cpu: [x64] os: [openbsd] '@esbuild/sunos-x64@0.21.5': - resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==, + } + engines: { node: '>=12' } cpu: [x64] os: [sunos] '@esbuild/win32-arm64@0.21.5': - resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==, + } + engines: { node: '>=12' } cpu: [arm64] os: [win32] '@esbuild/win32-ia32@0.21.5': - resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==, + } + engines: { node: '>=12' } cpu: [ia32] os: [win32] '@esbuild/win32-x64@0.21.5': - resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==, + } + engines: { node: '>=12' } cpu: [x64] os: [win32] '@isaacs/cliui@8.0.2': - resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==, + } + engines: { node: '>=12' } '@jridgewell/sourcemap-codec@1.5.0': - resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==} + resolution: + { + integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==, + } '@npmcli/agent@2.2.2': - resolution: {integrity: sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==} - engines: {node: ^16.14.0 || >=18.0.0} + resolution: + { + integrity: sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==, + } + engines: { node: ^16.14.0 || >=18.0.0 } '@npmcli/fs@3.1.1': - resolution: {integrity: sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + resolution: + { + integrity: sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==, + } + engines: { node: ^14.17.0 || ^16.13.0 || >=18.0.0 } '@pkgjs/parseargs@0.11.0': - resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} - engines: {node: '>=14'} + resolution: + { + integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==, + } + engines: { node: '>=14' } '@rollup/rollup-android-arm-eabi@4.28.1': - resolution: {integrity: sha512-2aZp8AES04KI2dy3Ss6/MDjXbwBzj+i0GqKtWXgw2/Ma6E4jJvujryO6gJAghIRVz7Vwr9Gtl/8na3nDUKpraQ==} + resolution: + { + integrity: sha512-2aZp8AES04KI2dy3Ss6/MDjXbwBzj+i0GqKtWXgw2/Ma6E4jJvujryO6gJAghIRVz7Vwr9Gtl/8na3nDUKpraQ==, + } cpu: [arm] os: [android] '@rollup/rollup-android-arm64@4.28.1': - resolution: {integrity: sha512-EbkK285O+1YMrg57xVA+Dp0tDBRB93/BZKph9XhMjezf6F4TpYjaUSuPt5J0fZXlSag0LmZAsTmdGGqPp4pQFA==} + resolution: + { + integrity: sha512-EbkK285O+1YMrg57xVA+Dp0tDBRB93/BZKph9XhMjezf6F4TpYjaUSuPt5J0fZXlSag0LmZAsTmdGGqPp4pQFA==, + } cpu: [arm64] os: [android] '@rollup/rollup-darwin-arm64@4.28.1': - resolution: {integrity: sha512-prduvrMKU6NzMq6nxzQw445zXgaDBbMQvmKSJaxpaZ5R1QDM8w+eGxo6Y/jhT/cLoCvnZI42oEqf9KQNYz1fqQ==} + resolution: + { + integrity: sha512-prduvrMKU6NzMq6nxzQw445zXgaDBbMQvmKSJaxpaZ5R1QDM8w+eGxo6Y/jhT/cLoCvnZI42oEqf9KQNYz1fqQ==, + } cpu: [arm64] os: [darwin] '@rollup/rollup-darwin-x64@4.28.1': - resolution: {integrity: sha512-WsvbOunsUk0wccO/TV4o7IKgloJ942hVFK1CLatwv6TJspcCZb9umQkPdvB7FihmdxgaKR5JyxDjWpCOp4uZlQ==} + resolution: + { + integrity: sha512-WsvbOunsUk0wccO/TV4o7IKgloJ942hVFK1CLatwv6TJspcCZb9umQkPdvB7FihmdxgaKR5JyxDjWpCOp4uZlQ==, + } cpu: [x64] os: [darwin] '@rollup/rollup-freebsd-arm64@4.28.1': - resolution: {integrity: sha512-HTDPdY1caUcU4qK23FeeGxCdJF64cKkqajU0iBnTVxS8F7H/7BewvYoG+va1KPSL63kQ1PGNyiwKOfReavzvNA==} + resolution: + { + integrity: sha512-HTDPdY1caUcU4qK23FeeGxCdJF64cKkqajU0iBnTVxS8F7H/7BewvYoG+va1KPSL63kQ1PGNyiwKOfReavzvNA==, + } cpu: [arm64] os: [freebsd] '@rollup/rollup-freebsd-x64@4.28.1': - resolution: {integrity: sha512-m/uYasxkUevcFTeRSM9TeLyPe2QDuqtjkeoTpP9SW0XxUWfcYrGDMkO/m2tTw+4NMAF9P2fU3Mw4ahNvo7QmsQ==} + resolution: + { + integrity: sha512-m/uYasxkUevcFTeRSM9TeLyPe2QDuqtjkeoTpP9SW0XxUWfcYrGDMkO/m2tTw+4NMAF9P2fU3Mw4ahNvo7QmsQ==, + } cpu: [x64] os: [freebsd] '@rollup/rollup-linux-arm-gnueabihf@4.28.1': - resolution: {integrity: sha512-QAg11ZIt6mcmzpNE6JZBpKfJaKkqTm1A9+y9O+frdZJEuhQxiugM05gnCWiANHj4RmbgeVJpTdmKRmH/a+0QbA==} + resolution: + { + integrity: sha512-QAg11ZIt6mcmzpNE6JZBpKfJaKkqTm1A9+y9O+frdZJEuhQxiugM05gnCWiANHj4RmbgeVJpTdmKRmH/a+0QbA==, + } cpu: [arm] os: [linux] '@rollup/rollup-linux-arm-musleabihf@4.28.1': - resolution: {integrity: sha512-dRP9PEBfolq1dmMcFqbEPSd9VlRuVWEGSmbxVEfiq2cs2jlZAl0YNxFzAQS2OrQmsLBLAATDMb3Z6MFv5vOcXg==} + resolution: + { + integrity: sha512-dRP9PEBfolq1dmMcFqbEPSd9VlRuVWEGSmbxVEfiq2cs2jlZAl0YNxFzAQS2OrQmsLBLAATDMb3Z6MFv5vOcXg==, + } cpu: [arm] os: [linux] '@rollup/rollup-linux-arm64-gnu@4.28.1': - resolution: {integrity: sha512-uGr8khxO+CKT4XU8ZUH1TTEUtlktK6Kgtv0+6bIFSeiSlnGJHG1tSFSjm41uQ9sAO/5ULx9mWOz70jYLyv1QkA==} + resolution: + { + integrity: sha512-uGr8khxO+CKT4XU8ZUH1TTEUtlktK6Kgtv0+6bIFSeiSlnGJHG1tSFSjm41uQ9sAO/5ULx9mWOz70jYLyv1QkA==, + } cpu: [arm64] os: [linux] '@rollup/rollup-linux-arm64-musl@4.28.1': - resolution: {integrity: sha512-QF54q8MYGAqMLrX2t7tNpi01nvq5RI59UBNx+3+37zoKX5KViPo/gk2QLhsuqok05sSCRluj0D00LzCwBikb0A==} + resolution: + { + integrity: sha512-QF54q8MYGAqMLrX2t7tNpi01nvq5RI59UBNx+3+37zoKX5KViPo/gk2QLhsuqok05sSCRluj0D00LzCwBikb0A==, + } cpu: [arm64] os: [linux] '@rollup/rollup-linux-loongarch64-gnu@4.28.1': - resolution: {integrity: sha512-vPul4uodvWvLhRco2w0GcyZcdyBfpfDRgNKU+p35AWEbJ/HPs1tOUrkSueVbBS0RQHAf/A+nNtDpvw95PeVKOA==} + resolution: + { + integrity: sha512-vPul4uodvWvLhRco2w0GcyZcdyBfpfDRgNKU+p35AWEbJ/HPs1tOUrkSueVbBS0RQHAf/A+nNtDpvw95PeVKOA==, + } cpu: [loong64] os: [linux] '@rollup/rollup-linux-powerpc64le-gnu@4.28.1': - resolution: {integrity: sha512-pTnTdBuC2+pt1Rmm2SV7JWRqzhYpEILML4PKODqLz+C7Ou2apEV52h19CR7es+u04KlqplggmN9sqZlekg3R1A==} + resolution: + { + integrity: sha512-pTnTdBuC2+pt1Rmm2SV7JWRqzhYpEILML4PKODqLz+C7Ou2apEV52h19CR7es+u04KlqplggmN9sqZlekg3R1A==, + } cpu: [ppc64] os: [linux] '@rollup/rollup-linux-riscv64-gnu@4.28.1': - resolution: {integrity: sha512-vWXy1Nfg7TPBSuAncfInmAI/WZDd5vOklyLJDdIRKABcZWojNDY0NJwruY2AcnCLnRJKSaBgf/GiJfauu8cQZA==} + resolution: + { + integrity: sha512-vWXy1Nfg7TPBSuAncfInmAI/WZDd5vOklyLJDdIRKABcZWojNDY0NJwruY2AcnCLnRJKSaBgf/GiJfauu8cQZA==, + } cpu: [riscv64] os: [linux] '@rollup/rollup-linux-s390x-gnu@4.28.1': - resolution: {integrity: sha512-/yqC2Y53oZjb0yz8PVuGOQQNOTwxcizudunl/tFs1aLvObTclTwZ0JhXF2XcPT/zuaymemCDSuuUPXJJyqeDOg==} + resolution: + { + integrity: sha512-/yqC2Y53oZjb0yz8PVuGOQQNOTwxcizudunl/tFs1aLvObTclTwZ0JhXF2XcPT/zuaymemCDSuuUPXJJyqeDOg==, + } cpu: [s390x] os: [linux] '@rollup/rollup-linux-x64-gnu@4.28.1': - resolution: {integrity: sha512-fzgeABz7rrAlKYB0y2kSEiURrI0691CSL0+KXwKwhxvj92VULEDQLpBYLHpF49MSiPG4sq5CK3qHMnb9tlCjBw==} + resolution: + { + integrity: sha512-fzgeABz7rrAlKYB0y2kSEiURrI0691CSL0+KXwKwhxvj92VULEDQLpBYLHpF49MSiPG4sq5CK3qHMnb9tlCjBw==, + } cpu: [x64] os: [linux] '@rollup/rollup-linux-x64-musl@4.28.1': - resolution: {integrity: sha512-xQTDVzSGiMlSshpJCtudbWyRfLaNiVPXt1WgdWTwWz9n0U12cI2ZVtWe/Jgwyv/6wjL7b66uu61Vg0POWVfz4g==} + resolution: + { + integrity: sha512-xQTDVzSGiMlSshpJCtudbWyRfLaNiVPXt1WgdWTwWz9n0U12cI2ZVtWe/Jgwyv/6wjL7b66uu61Vg0POWVfz4g==, + } cpu: [x64] os: [linux] '@rollup/rollup-win32-arm64-msvc@4.28.1': - resolution: {integrity: sha512-wSXmDRVupJstFP7elGMgv+2HqXelQhuNf+IS4V+nUpNVi/GUiBgDmfwD0UGN3pcAnWsgKG3I52wMOBnk1VHr/A==} + resolution: + { + integrity: sha512-wSXmDRVupJstFP7elGMgv+2HqXelQhuNf+IS4V+nUpNVi/GUiBgDmfwD0UGN3pcAnWsgKG3I52wMOBnk1VHr/A==, + } cpu: [arm64] os: [win32] '@rollup/rollup-win32-ia32-msvc@4.28.1': - resolution: {integrity: sha512-ZkyTJ/9vkgrE/Rk9vhMXhf8l9D+eAhbAVbsGsXKy2ohmJaWg0LPQLnIxRdRp/bKyr8tXuPlXhIoGlEB5XpJnGA==} + resolution: + { + integrity: sha512-ZkyTJ/9vkgrE/Rk9vhMXhf8l9D+eAhbAVbsGsXKy2ohmJaWg0LPQLnIxRdRp/bKyr8tXuPlXhIoGlEB5XpJnGA==, + } cpu: [ia32] os: [win32] '@rollup/rollup-win32-x64-msvc@4.28.1': - resolution: {integrity: sha512-ZvK2jBafvttJjoIdKm/Q/Bh7IJ1Ose9IBOwpOXcOvW3ikGTQGmKDgxTC6oCAzW6PynbkKP8+um1du81XJHZ0JA==} + resolution: + { + integrity: sha512-ZvK2jBafvttJjoIdKm/Q/Bh7IJ1Ose9IBOwpOXcOvW3ikGTQGmKDgxTC6oCAzW6PynbkKP8+um1du81XJHZ0JA==, + } cpu: [x64] os: [win32] '@types/estree@1.0.6': - resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==} + resolution: + { + integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==, + } '@types/node@20.17.10': - resolution: {integrity: sha512-/jrvh5h6NXhEauFFexRin69nA0uHJ5gwk4iDivp/DeoEua3uwCUto6PC86IpRITBOs4+6i2I56K5x5b6WYGXHA==} + resolution: + { + integrity: sha512-/jrvh5h6NXhEauFFexRin69nA0uHJ5gwk4iDivp/DeoEua3uwCUto6PC86IpRITBOs4+6i2I56K5x5b6WYGXHA==, + } '@vitest/expect@2.1.8': - resolution: {integrity: sha512-8ytZ/fFHq2g4PJVAtDX57mayemKgDR6X3Oa2Foro+EygiOJHUXhCqBAAKQYYajZpFoIfvBCF1j6R6IYRSIUFuw==} + resolution: + { + integrity: sha512-8ytZ/fFHq2g4PJVAtDX57mayemKgDR6X3Oa2Foro+EygiOJHUXhCqBAAKQYYajZpFoIfvBCF1j6R6IYRSIUFuw==, + } '@vitest/mocker@2.1.8': - resolution: {integrity: sha512-7guJ/47I6uqfttp33mgo6ga5Gr1VnL58rcqYKyShoRK9ebu8T5Rs6HN3s1NABiBeVTdWNrwUMcHH54uXZBN4zA==} + resolution: + { + integrity: sha512-7guJ/47I6uqfttp33mgo6ga5Gr1VnL58rcqYKyShoRK9ebu8T5Rs6HN3s1NABiBeVTdWNrwUMcHH54uXZBN4zA==, + } peerDependencies: msw: ^2.4.9 vite: ^5.0.0 @@ -360,100 +511,178 @@ packages: optional: true '@vitest/pretty-format@2.1.8': - resolution: {integrity: sha512-9HiSZ9zpqNLKlbIDRWOnAWqgcA7xu+8YxXSekhr0Ykab7PAYFkhkwoqVArPOtJhPmYeE2YHgKZlj3CP36z2AJQ==} + resolution: + { + integrity: sha512-9HiSZ9zpqNLKlbIDRWOnAWqgcA7xu+8YxXSekhr0Ykab7PAYFkhkwoqVArPOtJhPmYeE2YHgKZlj3CP36z2AJQ==, + } '@vitest/runner@2.1.8': - resolution: {integrity: sha512-17ub8vQstRnRlIU5k50bG+QOMLHRhYPAna5tw8tYbj+jzjcspnwnwtPtiOlkuKC4+ixDPTuLZiqiWWQ2PSXHVg==} + resolution: + { + integrity: sha512-17ub8vQstRnRlIU5k50bG+QOMLHRhYPAna5tw8tYbj+jzjcspnwnwtPtiOlkuKC4+ixDPTuLZiqiWWQ2PSXHVg==, + } '@vitest/snapshot@2.1.8': - resolution: {integrity: sha512-20T7xRFbmnkfcmgVEz+z3AU/3b0cEzZOt/zmnvZEctg64/QZbSDJEVm9fLnnlSi74KibmRsO9/Qabi+t0vCRPg==} + resolution: + { + integrity: sha512-20T7xRFbmnkfcmgVEz+z3AU/3b0cEzZOt/zmnvZEctg64/QZbSDJEVm9fLnnlSi74KibmRsO9/Qabi+t0vCRPg==, + } '@vitest/spy@2.1.8': - resolution: {integrity: sha512-5swjf2q95gXeYPevtW0BLk6H8+bPlMb4Vw/9Em4hFxDcaOxS+e0LOX4yqNxoHzMR2akEB2xfpnWUzkZokmgWDg==} + resolution: + { + integrity: sha512-5swjf2q95gXeYPevtW0BLk6H8+bPlMb4Vw/9Em4hFxDcaOxS+e0LOX4yqNxoHzMR2akEB2xfpnWUzkZokmgWDg==, + } '@vitest/utils@2.1.8': - resolution: {integrity: sha512-dwSoui6djdwbfFmIgbIjX2ZhIoG7Ex/+xpxyiEgIGzjliY8xGkcpITKTlp6B4MgtGkF2ilvm97cPM96XZaAgcA==} + resolution: + { + integrity: sha512-dwSoui6djdwbfFmIgbIjX2ZhIoG7Ex/+xpxyiEgIGzjliY8xGkcpITKTlp6B4MgtGkF2ilvm97cPM96XZaAgcA==, + } abbrev@2.0.0: - resolution: {integrity: sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + resolution: + { + integrity: sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==, + } + engines: { node: ^14.17.0 || ^16.13.0 || >=18.0.0 } agent-base@7.1.3: - resolution: {integrity: sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==} - engines: {node: '>= 14'} + resolution: + { + integrity: sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==, + } + engines: { node: '>= 14' } aggregate-error@3.1.0: - resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==, + } + engines: { node: '>=8' } ansi-regex@5.0.1: - resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==, + } + engines: { node: '>=8' } ansi-regex@6.1.0: - resolution: {integrity: sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==, + } + engines: { node: '>=12' } ansi-styles@4.3.0: - resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==, + } + engines: { node: '>=8' } ansi-styles@6.2.1: - resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==, + } + engines: { node: '>=12' } assertion-error@2.0.1: - resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==, + } + engines: { node: '>=12' } balanced-match@1.0.2: - resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + resolution: + { + integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==, + } brace-expansion@2.0.1: - resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + resolution: + { + integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==, + } cac@6.7.14: - resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==, + } + engines: { node: '>=8' } cacache@18.0.4: - resolution: {integrity: sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==} - engines: {node: ^16.14.0 || >=18.0.0} + resolution: + { + integrity: sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==, + } + engines: { node: ^16.14.0 || >=18.0.0 } chai@5.1.2: - resolution: {integrity: sha512-aGtmf24DW6MLHHG5gCx4zaI3uBq3KRtxeVs0DjFH6Z0rDNbsvTxFASFvdj79pxjxZ8/5u3PIiN3IwEIQkiiuPw==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-aGtmf24DW6MLHHG5gCx4zaI3uBq3KRtxeVs0DjFH6Z0rDNbsvTxFASFvdj79pxjxZ8/5u3PIiN3IwEIQkiiuPw==, + } + engines: { node: '>=12' } check-error@2.1.1: - resolution: {integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==} - engines: {node: '>= 16'} + resolution: + { + integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==, + } + engines: { node: '>= 16' } chownr@2.0.0: - resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==} - engines: {node: '>=10'} + resolution: + { + integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==, + } + engines: { node: '>=10' } clean-stack@2.2.0: - resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} - engines: {node: '>=6'} + resolution: + { + integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==, + } + engines: { node: '>=6' } color-convert@2.0.1: - resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} - engines: {node: '>=7.0.0'} + resolution: + { + integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==, + } + engines: { node: '>=7.0.0' } color-name@1.1.4: - resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + resolution: + { + integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==, + } cross-replace@0.2.0: - resolution: {integrity: sha512-LF8ZXUDGw5yy8YHPzw4RbOtJJG6NVxpxvEAJZwJgMyLV/wBJonG39vWBDS168nS4ImYP59mPkXj/5ezMOAuzwQ==} + resolution: + { + integrity: sha512-LF8ZXUDGw5yy8YHPzw4RbOtJJG6NVxpxvEAJZwJgMyLV/wBJonG39vWBDS168nS4ImYP59mPkXj/5ezMOAuzwQ==, + } hasBin: true cross-spawn@7.0.6: - resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} - engines: {node: '>= 8'} + resolution: + { + integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==, + } + engines: { node: '>= 8' } debug@4.4.0: - resolution: {integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==} - engines: {node: '>=6.0'} + resolution: + { + integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==, + } + engines: { node: '>=6.0' } peerDependencies: supports-color: '*' peerDependenciesMeta: @@ -461,391 +690,694 @@ packages: optional: true deep-eql@5.0.2: - resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==} - engines: {node: '>=6'} + resolution: + { + integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==, + } + engines: { node: '>=6' } eastasianwidth@0.2.0: - resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + resolution: + { + integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==, + } emoji-regex@8.0.0: - resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + resolution: + { + integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==, + } emoji-regex@9.2.2: - resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + resolution: + { + integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==, + } encoding@0.1.13: - resolution: {integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==} + resolution: + { + integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==, + } env-paths@2.2.1: - resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} - engines: {node: '>=6'} + resolution: + { + integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==, + } + engines: { node: '>=6' } err-code@2.0.3: - resolution: {integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==} + resolution: + { + integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==, + } es-module-lexer@1.5.4: - resolution: {integrity: sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw==} + resolution: + { + integrity: sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw==, + } esbuild@0.21.5: - resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==, + } + engines: { node: '>=12' } hasBin: true estree-walker@3.0.3: - resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} + resolution: + { + integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==, + } expect-type@1.1.0: - resolution: {integrity: sha512-bFi65yM+xZgk+u/KRIpekdSYkTB5W1pEf0Lt8Q8Msh7b+eQ7LXVtIB1Bkm4fvclDEL1b2CZkMhv2mOeF8tMdkA==} - engines: {node: '>=12.0.0'} + resolution: + { + integrity: sha512-bFi65yM+xZgk+u/KRIpekdSYkTB5W1pEf0Lt8Q8Msh7b+eQ7LXVtIB1Bkm4fvclDEL1b2CZkMhv2mOeF8tMdkA==, + } + engines: { node: '>=12.0.0' } exponential-backoff@3.1.1: - resolution: {integrity: sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==} + resolution: + { + integrity: sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==, + } foreground-child@3.3.0: - resolution: {integrity: sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==} - engines: {node: '>=14'} + resolution: + { + integrity: sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==, + } + engines: { node: '>=14' } fs-minipass@2.1.0: - resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} - engines: {node: '>= 8'} + resolution: + { + integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==, + } + engines: { node: '>= 8' } fs-minipass@3.0.3: - resolution: {integrity: sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + resolution: + { + integrity: sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==, + } + engines: { node: ^14.17.0 || ^16.13.0 || >=18.0.0 } fsevents@2.3.3: - resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} - engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + resolution: + { + integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==, + } + engines: { node: ^8.16.0 || ^10.6.0 || >=11.0.0 } os: [darwin] glob@10.4.5: - resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} + resolution: + { + integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==, + } hasBin: true glob@11.0.0: - resolution: {integrity: sha512-9UiX/Bl6J2yaBbxKoEBRm4Cipxgok8kQYcOPEhScPwebu2I0HoQOuYdIO6S3hLuWoZgpDpwQZMzTFxgpkyT76g==} - engines: {node: 20 || >=22} + resolution: + { + integrity: sha512-9UiX/Bl6J2yaBbxKoEBRm4Cipxgok8kQYcOPEhScPwebu2I0HoQOuYdIO6S3hLuWoZgpDpwQZMzTFxgpkyT76g==, + } + engines: { node: 20 || >=22 } hasBin: true graceful-fs@4.2.11: - resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + resolution: + { + integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==, + } http-cache-semantics@4.1.1: - resolution: {integrity: sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==} + resolution: + { + integrity: sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==, + } http-proxy-agent@7.0.2: - resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==} - engines: {node: '>= 14'} + resolution: + { + integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==, + } + engines: { node: '>= 14' } https-proxy-agent@7.0.6: - resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==} - engines: {node: '>= 14'} + resolution: + { + integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==, + } + engines: { node: '>= 14' } iconv-lite@0.6.3: - resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} - engines: {node: '>=0.10.0'} + resolution: + { + integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==, + } + engines: { node: '>=0.10.0' } imurmurhash@0.1.4: - resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} - engines: {node: '>=0.8.19'} + resolution: + { + integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==, + } + engines: { node: '>=0.8.19' } indent-string@4.0.0: - resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==, + } + engines: { node: '>=8' } ip-address@9.0.5: - resolution: {integrity: sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==} - engines: {node: '>= 12'} + resolution: + { + integrity: sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==, + } + engines: { node: '>= 12' } is-fullwidth-code-point@3.0.0: - resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==, + } + engines: { node: '>=8' } is-lambda@1.0.1: - resolution: {integrity: sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==} + resolution: + { + integrity: sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==, + } isexe@2.0.0: - resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + resolution: + { + integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==, + } isexe@3.1.1: - resolution: {integrity: sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==} - engines: {node: '>=16'} + resolution: + { + integrity: sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==, + } + engines: { node: '>=16' } jackspeak@3.4.3: - resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} + resolution: + { + integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==, + } jackspeak@4.0.2: - resolution: {integrity: sha512-bZsjR/iRjl1Nk1UkjGpAzLNfQtzuijhn2g+pbZb98HQ1Gk8vM9hfbxeMBP+M2/UUdwj0RqGG3mlvk2MsAqwvEw==} - engines: {node: 20 || >=22} + resolution: + { + integrity: sha512-bZsjR/iRjl1Nk1UkjGpAzLNfQtzuijhn2g+pbZb98HQ1Gk8vM9hfbxeMBP+M2/UUdwj0RqGG3mlvk2MsAqwvEw==, + } + engines: { node: 20 || >=22 } jsbn@1.1.0: - resolution: {integrity: sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==} + resolution: + { + integrity: sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==, + } loupe@3.1.2: - resolution: {integrity: sha512-23I4pFZHmAemUnz8WZXbYRSKYj801VDaNv9ETuMh7IrMc7VuVVSo+Z9iLE3ni30+U48iDWfi30d3twAXBYmnCg==} + resolution: + { + integrity: sha512-23I4pFZHmAemUnz8WZXbYRSKYj801VDaNv9ETuMh7IrMc7VuVVSo+Z9iLE3ni30+U48iDWfi30d3twAXBYmnCg==, + } lru-cache@10.4.3: - resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} + resolution: + { + integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==, + } lru-cache@11.0.2: - resolution: {integrity: sha512-123qHRfJBmo2jXDbo/a5YOQrJoHF/GNQTLzQ5+IdK5pWpceK17yRc6ozlWd25FxvGKQbIUs91fDFkXmDHTKcyA==} - engines: {node: 20 || >=22} + resolution: + { + integrity: sha512-123qHRfJBmo2jXDbo/a5YOQrJoHF/GNQTLzQ5+IdK5pWpceK17yRc6ozlWd25FxvGKQbIUs91fDFkXmDHTKcyA==, + } + engines: { node: 20 || >=22 } magic-string@0.30.15: - resolution: {integrity: sha512-zXeaYRgZ6ldS1RJJUrMrYgNJ4fdwnyI6tVqoiIhyCyv5IVTK9BU8Ic2l253GGETQHxI4HNUwhJ3fjDhKqEoaAw==} + resolution: + { + integrity: sha512-zXeaYRgZ6ldS1RJJUrMrYgNJ4fdwnyI6tVqoiIhyCyv5IVTK9BU8Ic2l253GGETQHxI4HNUwhJ3fjDhKqEoaAw==, + } make-fetch-happen@13.0.1: - resolution: {integrity: sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==} - engines: {node: ^16.14.0 || >=18.0.0} + resolution: + { + integrity: sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==, + } + engines: { node: ^16.14.0 || >=18.0.0 } minimatch@10.0.1: - resolution: {integrity: sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==} - engines: {node: 20 || >=22} + resolution: + { + integrity: sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==, + } + engines: { node: 20 || >=22 } minimatch@9.0.5: - resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} - engines: {node: '>=16 || 14 >=14.17'} + resolution: + { + integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==, + } + engines: { node: '>=16 || 14 >=14.17' } minipass-collect@2.0.1: - resolution: {integrity: sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==} - engines: {node: '>=16 || 14 >=14.17'} + resolution: + { + integrity: sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==, + } + engines: { node: '>=16 || 14 >=14.17' } minipass-fetch@3.0.5: - resolution: {integrity: sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + resolution: + { + integrity: sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==, + } + engines: { node: ^14.17.0 || ^16.13.0 || >=18.0.0 } minipass-flush@1.0.5: - resolution: {integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==} - engines: {node: '>= 8'} + resolution: + { + integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==, + } + engines: { node: '>= 8' } minipass-pipeline@1.2.4: - resolution: {integrity: sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==, + } + engines: { node: '>=8' } minipass-sized@1.0.3: - resolution: {integrity: sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==, + } + engines: { node: '>=8' } minipass@3.3.6: - resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==, + } + engines: { node: '>=8' } minipass@5.0.0: - resolution: {integrity: sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==, + } + engines: { node: '>=8' } minipass@7.1.2: - resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} - engines: {node: '>=16 || 14 >=14.17'} + resolution: + { + integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==, + } + engines: { node: '>=16 || 14 >=14.17' } minizlib@2.1.2: - resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} - engines: {node: '>= 8'} + resolution: + { + integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==, + } + engines: { node: '>= 8' } mkdirp@1.0.4: - resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} - engines: {node: '>=10'} + resolution: + { + integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==, + } + engines: { node: '>=10' } hasBin: true ms@2.1.3: - resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + resolution: + { + integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==, + } nanoid@3.3.8: - resolution: {integrity: sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==} - engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + resolution: + { + integrity: sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==, + } + engines: { node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1 } hasBin: true negotiator@0.6.4: - resolution: {integrity: sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==} - engines: {node: '>= 0.6'} + resolution: + { + integrity: sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==, + } + engines: { node: '>= 0.6' } node-addon-api@8.3.0: - resolution: {integrity: sha512-8VOpLHFrOQlAH+qA0ZzuGRlALRA6/LVh8QJldbrC4DY0hXoMP0l4Acq8TzFC018HztWiRqyCEj2aTWY2UvnJUg==} - engines: {node: ^18 || ^20 || >= 21} + resolution: + { + integrity: sha512-8VOpLHFrOQlAH+qA0ZzuGRlALRA6/LVh8QJldbrC4DY0hXoMP0l4Acq8TzFC018HztWiRqyCEj2aTWY2UvnJUg==, + } + engines: { node: ^18 || ^20 || >= 21 } node-gyp@10.3.1: - resolution: {integrity: sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ==} - engines: {node: ^16.14.0 || >=18.0.0} + resolution: + { + integrity: sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ==, + } + engines: { node: ^16.14.0 || >=18.0.0 } hasBin: true nopt@7.2.1: - resolution: {integrity: sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + resolution: + { + integrity: sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==, + } + engines: { node: ^14.17.0 || ^16.13.0 || >=18.0.0 } hasBin: true p-map@4.0.0: - resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} - engines: {node: '>=10'} + resolution: + { + integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==, + } + engines: { node: '>=10' } package-json-from-dist@1.0.1: - resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} + resolution: + { + integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==, + } path-key@3.1.1: - resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==, + } + engines: { node: '>=8' } path-scurry@1.11.1: - resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} - engines: {node: '>=16 || 14 >=14.18'} + resolution: + { + integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==, + } + engines: { node: '>=16 || 14 >=14.18' } path-scurry@2.0.0: - resolution: {integrity: sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==} - engines: {node: 20 || >=22} + resolution: + { + integrity: sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==, + } + engines: { node: 20 || >=22 } pathe@1.1.2: - resolution: {integrity: sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==} + resolution: + { + integrity: sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==, + } pathval@2.0.0: - resolution: {integrity: sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==} - engines: {node: '>= 14.16'} + resolution: + { + integrity: sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==, + } + engines: { node: '>= 14.16' } picocolors@1.1.1: - resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + resolution: + { + integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==, + } postcss@8.4.49: - resolution: {integrity: sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==} - engines: {node: ^10 || ^12 || >=14} + resolution: + { + integrity: sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==, + } + engines: { node: ^10 || ^12 || >=14 } proc-log@4.2.0: - resolution: {integrity: sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + resolution: + { + integrity: sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==, + } + engines: { node: ^14.17.0 || ^16.13.0 || >=18.0.0 } promise-retry@2.0.1: - resolution: {integrity: sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==} - engines: {node: '>=10'} + resolution: + { + integrity: sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==, + } + engines: { node: '>=10' } retry@0.12.0: - resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==} - engines: {node: '>= 4'} + resolution: + { + integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==, + } + engines: { node: '>= 4' } rimraf@5.0.10: - resolution: {integrity: sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==} + resolution: + { + integrity: sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==, + } hasBin: true rimraf@6.0.1: - resolution: {integrity: sha512-9dkvaxAsk/xNXSJzMgFqqMCuFgt2+KsOFek3TMLfo8NCPfWpBmqwyNn5Y+NX56QUYfCtsyhF3ayiboEoUmJk/A==} - engines: {node: 20 || >=22} + resolution: + { + integrity: sha512-9dkvaxAsk/xNXSJzMgFqqMCuFgt2+KsOFek3TMLfo8NCPfWpBmqwyNn5Y+NX56QUYfCtsyhF3ayiboEoUmJk/A==, + } + engines: { node: 20 || >=22 } hasBin: true rollup@4.28.1: - resolution: {integrity: sha512-61fXYl/qNVinKmGSTHAZ6Yy8I3YIJC/r2m9feHo6SwVAVcLT5MPwOUFe7EuURA/4m0NR8lXG4BBXuo/IZEsjMg==} - engines: {node: '>=18.0.0', npm: '>=8.0.0'} + resolution: + { + integrity: sha512-61fXYl/qNVinKmGSTHAZ6Yy8I3YIJC/r2m9feHo6SwVAVcLT5MPwOUFe7EuURA/4m0NR8lXG4BBXuo/IZEsjMg==, + } + engines: { node: '>=18.0.0', npm: '>=8.0.0' } hasBin: true safer-buffer@2.1.2: - resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + resolution: + { + integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==, + } semver@7.6.3: - resolution: {integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==} - engines: {node: '>=10'} + resolution: + { + integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==, + } + engines: { node: '>=10' } hasBin: true shebang-command@2.0.0: - resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==, + } + engines: { node: '>=8' } shebang-regex@3.0.0: - resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==, + } + engines: { node: '>=8' } siginfo@2.0.0: - resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} + resolution: + { + integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==, + } signal-exit@4.1.0: - resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} - engines: {node: '>=14'} + resolution: + { + integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==, + } + engines: { node: '>=14' } smart-buffer@4.2.0: - resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==} - engines: {node: '>= 6.0.0', npm: '>= 3.0.0'} + resolution: + { + integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==, + } + engines: { node: '>= 6.0.0', npm: '>= 3.0.0' } socks-proxy-agent@8.0.5: - resolution: {integrity: sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==} - engines: {node: '>= 14'} + resolution: + { + integrity: sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==, + } + engines: { node: '>= 14' } socks@2.8.3: - resolution: {integrity: sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==} - engines: {node: '>= 10.0.0', npm: '>= 3.0.0'} + resolution: + { + integrity: sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==, + } + engines: { node: '>= 10.0.0', npm: '>= 3.0.0' } source-map-js@1.2.1: - resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} - engines: {node: '>=0.10.0'} + resolution: + { + integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==, + } + engines: { node: '>=0.10.0' } sprintf-js@1.1.3: - resolution: {integrity: sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==} + resolution: + { + integrity: sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==, + } ssri@10.0.6: - resolution: {integrity: sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + resolution: + { + integrity: sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==, + } + engines: { node: ^14.17.0 || ^16.13.0 || >=18.0.0 } stackback@0.0.2: - resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} + resolution: + { + integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==, + } std-env@3.8.0: - resolution: {integrity: sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==} + resolution: + { + integrity: sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==, + } string-width@4.2.3: - resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==, + } + engines: { node: '>=8' } string-width@5.1.2: - resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==, + } + engines: { node: '>=12' } strip-ansi@6.0.1: - resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==, + } + engines: { node: '>=8' } strip-ansi@7.1.0: - resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==, + } + engines: { node: '>=12' } tar@6.2.1: - resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} - engines: {node: '>=10'} + resolution: + { + integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==, + } + engines: { node: '>=10' } tinybench@2.9.0: - resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} + resolution: + { + integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==, + } tinyexec@0.3.1: - resolution: {integrity: sha512-WiCJLEECkO18gwqIp6+hJg0//p23HXp4S+gGtAKu3mI2F2/sXC4FvHvXvB0zJVVaTPhx1/tOwdbRsa1sOBIKqQ==} + resolution: + { + integrity: sha512-WiCJLEECkO18gwqIp6+hJg0//p23HXp4S+gGtAKu3mI2F2/sXC4FvHvXvB0zJVVaTPhx1/tOwdbRsa1sOBIKqQ==, + } tinypool@1.0.2: - resolution: {integrity: sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==} - engines: {node: ^18.0.0 || >=20.0.0} + resolution: + { + integrity: sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==, + } + engines: { node: ^18.0.0 || >=20.0.0 } tinyrainbow@1.2.0: - resolution: {integrity: sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==} - engines: {node: '>=14.0.0'} + resolution: + { + integrity: sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==, + } + engines: { node: '>=14.0.0' } tinyspy@3.0.2: - resolution: {integrity: sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==} - engines: {node: '>=14.0.0'} + resolution: + { + integrity: sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==, + } + engines: { node: '>=14.0.0' } typescript@5.7.2: - resolution: {integrity: sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==} - engines: {node: '>=14.17'} + resolution: + { + integrity: sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==, + } + engines: { node: '>=14.17' } hasBin: true undici-types@6.19.8: - resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==} + resolution: + { + integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==, + } unique-filename@3.0.0: - resolution: {integrity: sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + resolution: + { + integrity: sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==, + } + engines: { node: ^14.17.0 || ^16.13.0 || >=18.0.0 } unique-slug@4.0.0: - resolution: {integrity: sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + resolution: + { + integrity: sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==, + } + engines: { node: ^14.17.0 || ^16.13.0 || >=18.0.0 } vite-node@2.1.8: - resolution: {integrity: sha512-uPAwSr57kYjAUux+8E2j0q0Fxpn8M9VoyfGiRI8Kfktz9NcYMCenwY5RnZxnF1WTu3TGiYipirIzacLL3VVGFg==} - engines: {node: ^18.0.0 || >=20.0.0} + resolution: + { + integrity: sha512-uPAwSr57kYjAUux+8E2j0q0Fxpn8M9VoyfGiRI8Kfktz9NcYMCenwY5RnZxnF1WTu3TGiYipirIzacLL3VVGFg==, + } + engines: { node: ^18.0.0 || >=20.0.0 } hasBin: true vite@5.4.11: - resolution: {integrity: sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==} - engines: {node: ^18.0.0 || >=20.0.0} + resolution: + { + integrity: sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==, + } + engines: { node: ^18.0.0 || >=20.0.0 } hasBin: true peerDependencies: '@types/node': ^18.0.0 || >=20.0.0 @@ -875,8 +1407,11 @@ packages: optional: true vitest@2.1.8: - resolution: {integrity: sha512-1vBKTZskHw/aosXqQUlVWWlGUxSJR8YtiyZDJAFeW2kPAeX6S3Sool0mjspO+kXLuxVWlEDDowBAeqeAQefqLQ==} - engines: {node: ^18.0.0 || >=20.0.0} + resolution: + { + integrity: sha512-1vBKTZskHw/aosXqQUlVWWlGUxSJR8YtiyZDJAFeW2kPAeX6S3Sool0mjspO+kXLuxVWlEDDowBAeqeAQefqLQ==, + } + engines: { node: ^18.0.0 || >=20.0.0 } hasBin: true peerDependencies: '@edge-runtime/vm': '*' @@ -900,33 +1435,50 @@ packages: optional: true which@2.0.2: - resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} - engines: {node: '>= 8'} + resolution: + { + integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==, + } + engines: { node: '>= 8' } hasBin: true which@4.0.0: - resolution: {integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==} - engines: {node: ^16.13.0 || >=18.0.0} + resolution: + { + integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==, + } + engines: { node: ^16.13.0 || >=18.0.0 } hasBin: true why-is-node-running@2.3.0: - resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==} - engines: {node: '>=8'} + resolution: + { + integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==, + } + engines: { node: '>=8' } hasBin: true wrap-ansi@7.0.0: - resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} - engines: {node: '>=10'} + resolution: + { + integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==, + } + engines: { node: '>=10' } wrap-ansi@8.1.0: - resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} - engines: {node: '>=12'} + resolution: + { + integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==, + } + engines: { node: '>=12' } yallist@4.0.0: - resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} + resolution: + { + integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==, + } snapshots: - '@esbuild/aix-ppc64@0.21.5': optional: true diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 9a0a70cc..f7d8266d 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,4 +1,4 @@ packages: - './*' - - 'api/pkgs/@duckdb/*' - - 'bindings/pkgs/@duckdb/*' + - 'api/pkgs/@databrainhq/*' + - 'bindings/pkgs/@databrainhq/*' diff --git a/tsconfig.library.json b/tsconfig.library.json index 96ee0cc0..a12b229e 100644 --- a/tsconfig.library.json +++ b/tsconfig.library.json @@ -6,6 +6,6 @@ "esModuleInterop": true, "module": "NodeNext", "moduleResolution": "NodeNext", - "target": "ESNext", + "target": "ESNext" } } diff --git a/tsconfig.test.json b/tsconfig.test.json index 94ee1d11..79bec356 100644 --- a/tsconfig.test.json +++ b/tsconfig.test.json @@ -10,6 +10,6 @@ "resolveJsonModule": true, "skipLibCheck": true, "target": "ESNext", - "useDefineForClassFields": true, + "useDefineForClassFields": true } }