diff --git a/.gitignore b/.gitignore index a547bf3..97bee46 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,5 @@ dist-ssr *.njsproj *.sln *.sw? + +coverage diff --git a/eslint.config.js b/eslint.config.js index d10f2d4..a17b4e6 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -1,33 +1,51 @@ -import { defineConfig } from "eslint/config"; -import js from "@eslint/js"; -import globals from "globals"; -import reactHooks from "eslint-plugin-react-hooks"; -import reactRefresh from "eslint-plugin-react-refresh"; -import tseslint from "typescript-eslint"; -import { globalIgnores } from "eslint/config"; -import reactX from "eslint-plugin-react-x"; -import reactDom from "eslint-plugin-react-dom"; +import { fileURLToPath } from 'node:url' + +import { includeIgnoreFile } from '@eslint/compat' +import js from '@eslint/js' +import stylistic from '@stylistic/eslint-plugin' +import { defineConfig } from 'eslint/config' +import jsdoc from 'eslint-plugin-jsdoc' +import reactDom from 'eslint-plugin-react-dom' +import reactHooks from 'eslint-plugin-react-hooks' +import reactRefresh from 'eslint-plugin-react-refresh' +import reactX from 'eslint-plugin-react-x' +import simpleImportSort from 'eslint-plugin-simple-import-sort' +import globals from 'globals' +import tseslint from 'typescript-eslint' + +const gitignorePath = fileURLToPath(new URL('.gitignore', import.meta.url)) export default defineConfig([ - globalIgnores(["dist"]), + includeIgnoreFile(gitignorePath, 'Imported .gitignore patterns'), + { + files: ['**/*.{ts,tsx,js}'], + plugins: { js, '@stylistic': stylistic }, + extends: ['js/recommended'], + languageOptions: { globals: { ...globals.browser, ...globals.node } }, + }, + tseslint.configs.recommended, + stylistic.configs.recommended, + jsdoc.configs['flat/recommended-error'], + + reactHooks.configs.flat['recommended-latest'], + reactRefresh.configs.vite, + reactX.configs['recommended-typescript'], + reactDom.configs.recommended, + { + files: ['**/*.{ts,tsx,js}'], + plugins: { 'simple-import-sort': simpleImportSort, jsdoc }, + rules: { + 'simple-import-sort/imports': 'error', + 'simple-import-sort/exports': 'error', + 'jsdoc/require-yields-type': 'off', + 'jsdoc/require-param-type': 'off', + 'jsdoc/require-returns-type': 'off', + }, + }, { - files: ["**/*.{ts,tsx}"], - extends: [ - js.configs.recommended, - ...tseslint.configs.strictTypeChecked, - ...tseslint.configs.stylisticTypeChecked, - reactHooks.configs.flat['recommended-latest'], - reactRefresh.configs.vite, - reactX.configs["recommended-typescript"], - reactDom.configs.recommended, - ], - languageOptions: { - parserOptions: { - project: ["./tsconfig.node.json", "./tsconfig.app.json"], - tsconfigRootDir: import.meta.dirname, - }, - ecmaVersion: 2020, - globals: globals.browser, + files: ['tests/*.ts'], + rules: { + 'jsdoc/require-jsdoc': 'off', }, }, -]); +]) diff --git a/package-lock.json b/package-lock.json index 73632f0..1161ee2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,34 +1,40 @@ { - "name": "parquet-table", + "name": "csv-table", "version": "0.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "parquet-table", + "name": "csv-table", "version": "0.0.0", "dependencies": { - "@severo_tests/papaparse": "^5.5.4", - "hightable": "0.20.1", - "react": "^19.2.0", - "react-dom": "^19.2.0" + "csv-range": "0.0.8", + "hightable": "0.22.2", + "react": "19.2.0", + "react-dom": "19.2.0" }, "devDependencies": { - "@eslint/js": "^9.37.0", - "@types/papaparse": "^5.3.16", - "@types/react": "^19.2.2", - "@types/react-dom": "^19.2.2", - "@vitejs/plugin-react": "^5.0.4", - "eslint": "^9.37.0", - "eslint-plugin-react-dom": "^2.2.1", - "eslint-plugin-react-hooks": "^7.0.0", - "eslint-plugin-react-refresh": "^0.4.23", - "eslint-plugin-react-x": "^2.2.1", - "globals": "^16.3.0", - "prettier": "^3.6.2", - "typescript": "^5.9.3", - "typescript-eslint": "^8.46.1", - "vite": "^7.1.9" + "@eslint/compat": "2.0.0", + "@eslint/js": "9.39.1", + "@stylistic/eslint-plugin": "5.6.1", + "@types/node": "24.10.1", + "@types/react": "19.2.6", + "@types/react-dom": "19.2.3", + "@vitejs/plugin-react": "5.1.1", + "@vitest/coverage-v8": "4.0.10", + "@vitest/ui": "4.0.10", + "eslint": "9.39.1", + "eslint-plugin-jsdoc": "61.3.0", + "eslint-plugin-react-dom": "2.3.5", + "eslint-plugin-react-hooks": "7.0.1", + "eslint-plugin-react-refresh": "0.4.24", + "eslint-plugin-react-x": "2.3.5", + "eslint-plugin-simple-import-sort": "12.1.1", + "globals": "16.5.0", + "typescript": "5.9.3", + "typescript-eslint": "8.47.0", + "vite": "7.2.2", + "vitest": "4.0.10" } }, "node_modules/@babel/code-frame": { @@ -57,22 +63,22 @@ } }, "node_modules/@babel/core": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.4.tgz", - "integrity": "sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz", + "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", "dev": true, "license": "MIT", "peer": true, "dependencies": { "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", + "@babel/generator": "^7.28.5", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.28.3", "@babel/helpers": "^7.28.4", - "@babel/parser": "^7.28.4", + "@babel/parser": "^7.28.5", "@babel/template": "^7.27.2", - "@babel/traverse": "^7.28.4", - "@babel/types": "^7.28.4", + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5", "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", @@ -89,14 +95,14 @@ } }, "node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", + "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", "dev": true, "license": "MIT", "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", "@jridgewell/gen-mapping": "^0.3.12", "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" @@ -185,9 +191,9 @@ } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", - "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", "dev": true, "license": "MIT", "engines": { @@ -219,13 +225,13 @@ } }, "node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.28.4" + "@babel/types": "^7.28.5" }, "bin": { "parser": "bin/babel-parser.js" @@ -282,18 +288,18 @@ } }, "node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", + "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", "dev": true, "license": "MIT", "dependencies": { "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", + "@babel/generator": "^7.28.5", "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", + "@babel/parser": "^7.28.5", "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", + "@babel/types": "^7.28.5", "debug": "^4.3.1" }, "engines": { @@ -301,19 +307,56 @@ } }, "node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", "dev": true, "license": "MIT", "dependencies": { "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" + "@babel/helper-validator-identifier": "^7.28.5" }, "engines": { "node": ">=6.9.0" } }, + "node_modules/@bcoe/v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-1.0.2.tgz", + "integrity": "sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@es-joy/jsdoccomment": { + "version": "0.76.0", + "resolved": "https://registry.npmjs.org/@es-joy/jsdoccomment/-/jsdoccomment-0.76.0.tgz", + "integrity": "sha512-g+RihtzFgGTx2WYCuTHbdOXJeAlGnROws0TeALx9ow/ZmOROOZkVg5wp/B44n0WJgI4SQFP1eWM2iRPlU2Y14w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.8", + "@typescript-eslint/types": "^8.46.0", + "comment-parser": "1.4.1", + "esquery": "^1.6.0", + "jsdoc-type-pratt-parser": "~6.10.0" + }, + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@es-joy/resolve.exports": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@es-joy/resolve.exports/-/resolve.exports-1.2.0.tgz", + "integrity": "sha512-Q9hjxWI5xBM+qW2enxfe8wDKdFWMfd0Z29k5ZJnuBqD/CasY5Zryj09aCA6owbGATWz+39p5uIdaHXpopOcG8g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, "node_modules/@esbuild/aix-ppc64": { "version": "0.25.10", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.10.tgz", @@ -799,16 +842,16 @@ } }, "node_modules/@eslint-react/ast": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@eslint-react/ast/-/ast-2.2.1.tgz", - "integrity": "sha512-bjzSAdtTT/gIU0/olh8Kki57Mnadl5BIjJxcA3wqxcAvNwYAt3yl0CM4LRqVqW4kJneslCNqB5UriRJJPSKhuA==", + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@eslint-react/ast/-/ast-2.3.5.tgz", + "integrity": "sha512-gTnLEdQ82Kcy2Yn8fLe6ks/yQx1kI3OYuWgYNb4D1XSAOYvL1Cj+UIx2/+ew9vMBLMO3NJr90EMPUr0yVOhC7w==", "dev": true, "license": "MIT", "dependencies": { - "@eslint-react/eff": "2.2.1", - "@typescript-eslint/types": "^8.46.0", - "@typescript-eslint/typescript-estree": "^8.46.0", - "@typescript-eslint/utils": "^8.46.0", + "@eslint-react/eff": "2.3.5", + "@typescript-eslint/types": "^8.46.4", + "@typescript-eslint/typescript-estree": "^8.46.4", + "@typescript-eslint/utils": "^8.46.4", "string-ts": "^2.2.1" }, "engines": { @@ -816,30 +859,30 @@ } }, "node_modules/@eslint-react/core": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@eslint-react/core/-/core-2.2.1.tgz", - "integrity": "sha512-slP1G7sReKgijlDx56SDhgkpS6OjoNCEqItuY6Ayo4viIIYMyQ5LkPJ2BJ5xbBxSklyszQ/yP+8UFDK6uzYChQ==", + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@eslint-react/core/-/core-2.3.5.tgz", + "integrity": "sha512-6+/3bMmkxIk4vlMwfxw4lU6y7/Z1cjGURPsooAULitbBS4+s0M0N1UjWaPpDwT4FR0SVVqjOp1yUcI66uQvQKg==", "dev": true, "license": "MIT", "dependencies": { - "@eslint-react/ast": "2.2.1", - "@eslint-react/eff": "2.2.1", - "@eslint-react/shared": "2.2.1", - "@eslint-react/var": "2.2.1", - "@typescript-eslint/scope-manager": "^8.46.0", - "@typescript-eslint/types": "^8.46.0", - "@typescript-eslint/utils": "^8.46.0", + "@eslint-react/ast": "2.3.5", + "@eslint-react/eff": "2.3.5", + "@eslint-react/shared": "2.3.5", + "@eslint-react/var": "2.3.5", + "@typescript-eslint/scope-manager": "^8.46.4", + "@typescript-eslint/types": "^8.46.4", + "@typescript-eslint/utils": "^8.46.4", "birecord": "^0.1.1", - "ts-pattern": "^5.8.0" + "ts-pattern": "^5.9.0" }, "engines": { "node": ">=20.19.0" } }, "node_modules/@eslint-react/eff": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@eslint-react/eff/-/eff-2.2.1.tgz", - "integrity": "sha512-u9IJB9O8Jwo4b40CLIoF1HePsOvFLdbRKdCVUBEv2TPihae/ltYRD45mCI0bHLroYUxevC1nvD/cQRfwJPH0zg==", + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@eslint-react/eff/-/eff-2.3.5.tgz", + "integrity": "sha512-F2bj6v7Q1hgLn+N28pkJyYvBiTaUFh0qOEz3IXUupkqqnu9zGxmh3P7c0l//8AlR2CvRTCmSVBBhem4BhoSczw==", "dev": true, "license": "MIT", "engines": { @@ -847,15 +890,15 @@ } }, "node_modules/@eslint-react/shared": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@eslint-react/shared/-/shared-2.2.1.tgz", - "integrity": "sha512-YxYo4Svw2OzV0XDj4HLJKcmB4vtAyQghSE1ZFKN5i90CIbfp/RYBNK6VVrIbxtXu6tAsJ0lSBZiepHfn3i/l8w==", + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@eslint-react/shared/-/shared-2.3.5.tgz", + "integrity": "sha512-k65W/X2MeiDX21HPwtcPaFHciYVRYrzE+EZ2ok2BVQWcl24GQUEckAfdMzKQ6cS19OgjQm9k0juHjpUcyHj29g==", "dev": true, "license": "MIT", "dependencies": { - "@eslint-react/eff": "2.2.1", - "@typescript-eslint/utils": "^8.46.0", - "ts-pattern": "^5.8.0", + "@eslint-react/eff": "2.3.5", + "@typescript-eslint/utils": "^8.46.4", + "ts-pattern": "^5.9.0", "zod": "^4.1.12" }, "engines": { @@ -863,31 +906,65 @@ } }, "node_modules/@eslint-react/var": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@eslint-react/var/-/var-2.2.1.tgz", - "integrity": "sha512-u5o1z01mNE0F+6DG1sDPnIGDbTaI3s0IOJnGCU4FfcsH7DOf96F4aB1szdJfznJBgVCrcBbyhO9oKKlYZoW0hQ==", + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@eslint-react/var/-/var-2.3.5.tgz", + "integrity": "sha512-BDq9o4kUu4h0Lvv29AY+N9LFh69tgICRNDmr5GnRmRFaYZ6/fq+UbO18K47ccb2tj2TI8V6VJFpkPx1fK7lYeQ==", "dev": true, "license": "MIT", "dependencies": { - "@eslint-react/ast": "2.2.1", - "@eslint-react/eff": "2.2.1", - "@typescript-eslint/scope-manager": "^8.46.0", - "@typescript-eslint/types": "^8.46.0", - "@typescript-eslint/utils": "^8.46.0", - "ts-pattern": "^5.8.0" + "@eslint-react/ast": "2.3.5", + "@eslint-react/eff": "2.3.5", + "@typescript-eslint/scope-manager": "^8.46.4", + "@typescript-eslint/types": "^8.46.4", + "@typescript-eslint/utils": "^8.46.4", + "ts-pattern": "^5.9.0" }, "engines": { "node": ">=20.19.0" } }, + "node_modules/@eslint/compat": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@eslint/compat/-/compat-2.0.0.tgz", + "integrity": "sha512-T9AfE1G1uv4wwq94ozgTGio5EUQBqAVe1X9qsQtSNVEYW6j3hvtZVm8Smr4qL1qDPFg+lOB2cL5RxTRMzq4CTA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^1.0.0" + }, + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + }, + "peerDependencies": { + "eslint": "^8.40 || 9" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } + } + }, + "node_modules/@eslint/compat/node_modules/@eslint/core": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-1.0.0.tgz", + "integrity": "sha512-PRfWP+8FOldvbApr6xL7mNCw4cJcSTq4GA7tYbgq15mRb0kWKO/wEB2jr+uwjFH3sZvEZneZyCUGTxsv4Sahyw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + } + }, "node_modules/@eslint/config-array": { - "version": "0.21.0", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", - "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz", + "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/object-schema": "^2.1.6", + "@eslint/object-schema": "^2.1.7", "debug": "^4.3.1", "minimatch": "^3.1.2" }, @@ -896,22 +973,22 @@ } }, "node_modules/@eslint/config-helpers": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.0.tgz", - "integrity": "sha512-WUFvV4WoIwW8Bv0KeKCIIEgdSiFOsulyN0xrMu+7z43q/hkOLXjvb5u7UC9jDxvRzcrbEmuZBX5yJZz1741jog==", + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz", + "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.16.0" + "@eslint/core": "^0.17.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, "node_modules/@eslint/core": { - "version": "0.16.0", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.16.0.tgz", - "integrity": "sha512-nmC8/totwobIiFcGkDza3GIKfAw1+hLiYVrh3I1nIomQ8PEr5cxg34jnkmGawul/ep52wGRAcyeDCNtWKSOj4Q==", + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz", + "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -959,9 +1036,9 @@ } }, "node_modules/@eslint/js": { - "version": "9.37.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.37.0.tgz", - "integrity": "sha512-jaS+NJ+hximswBG6pjNX0uEJZkrT0zwpVi3BA3vX22aFGjJjmgSTSmPpZCRKmoBL5VY/M6p0xsSJx7rk7sy5gg==", + "version": "9.39.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.1.tgz", + "integrity": "sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw==", "dev": true, "license": "MIT", "engines": { @@ -972,9 +1049,9 @@ } }, "node_modules/@eslint/object-schema": { - "version": "2.1.6", - "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", - "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz", + "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -982,13 +1059,13 @@ } }, "node_modules/@eslint/plugin-kit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.0.tgz", - "integrity": "sha512-sB5uyeq+dwCWyPi31B2gQlVlo+j5brPlWx4yZBrEaRo/nhdDE8Xke1gsGgtiBdaBTxuTkceLVuVt/pclrasb0A==", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz", + "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.16.0", + "@eslint/core": "^0.17.0", "levn": "^0.4.1" }, "engines": { @@ -1135,10 +1212,17 @@ "node": ">= 8" } }, + "node_modules/@polka/url": { + "version": "1.0.0-next.29", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz", + "integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==", + "dev": true, + "license": "MIT" + }, "node_modules/@rolldown/pluginutils": { - "version": "1.0.0-beta.38", - "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.38.tgz", - "integrity": "sha512-N/ICGKleNhA5nc9XXQG/kkKHJ7S55u0x0XUJbbkmdCnFuoRkM1Il12q9q0eX19+M7KKUEPw/daUPIRnxhcxAIw==", + "version": "1.0.0-beta.47", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.47.tgz", + "integrity": "sha512-8QagwMH3kNCuzD8EWL8R2YPW5e4OrHNSAHRFDdmFqEwEaD/KcNKjVoumo+gP2vW5eKB2UPbM6vTYiGZX0ixLnw==", "dev": true, "license": "MIT" }, @@ -1450,12 +1534,60 @@ "win32" ] }, - "node_modules/@severo_tests/papaparse": { - "version": "5.5.4", - "resolved": "https://registry.npmjs.org/@severo_tests/papaparse/-/papaparse-5.5.4.tgz", - "integrity": "sha512-v5qCUqDrI0C14Tp3DSShfBUpeZLtxMERyV++fTBMv5TFCcrXw6sbXEIcMgDF39v4ENsltRH6ZqtlamfPLk7wTQ==", + "node_modules/@sindresorhus/base62": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/base62/-/base62-1.0.0.tgz", + "integrity": "sha512-TeheYy0ILzBEI/CO55CP6zJCSdSWeRtGnHy8U8dWSUH4I68iqTsy7HkMktR4xakThc9jotkPQUXT4ITdbV7cHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@standard-schema/spec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", + "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", + "dev": true, "license": "MIT" }, + "node_modules/@stylistic/eslint-plugin": { + "version": "5.6.1", + "resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-5.6.1.tgz", + "integrity": "sha512-JCs+MqoXfXrRPGbGmho/zGS/jMcn3ieKl/A8YImqib76C8kjgZwq5uUFzc30lJkMvcchuRn6/v8IApLxli3Jyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.9.0", + "@typescript-eslint/types": "^8.47.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "estraverse": "^5.3.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "peerDependencies": { + "eslint": ">=9.0.0" + } + }, + "node_modules/@stylistic/eslint-plugin/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/@types/babel__core": { "version": "7.20.5", "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", @@ -1501,6 +1633,24 @@ "@babel/types": "^7.28.2" } }, + "node_modules/@types/chai": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*", + "assertion-error": "^2.0.1" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/estree": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", @@ -1516,40 +1666,31 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "24.7.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.7.2.tgz", - "integrity": "sha512-/NbVmcGTP+lj5oa4yiYxxeBjRivKQ5Ns1eSZeB99ExsEQ6rX5XYU1Zy/gGxY/ilqtD4Etx9mKyrPxZRetiahhA==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~7.14.0" - } - }, - "node_modules/@types/papaparse": { - "version": "5.3.16", - "resolved": "https://registry.npmjs.org/@types/papaparse/-/papaparse-5.3.16.tgz", - "integrity": "sha512-T3VuKMC2H0lgsjI9buTB3uuKj3EMD2eap1MOuEQuBQ44EnDx/IkGhU6EwiTf9zG3za4SKlmwKAImdDKdNnCsXg==", + "version": "24.10.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz", + "integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { - "@types/node": "*" + "undici-types": "~7.16.0" } }, "node_modules/@types/react": { - "version": "19.2.2", - "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.2.tgz", - "integrity": "sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA==", + "version": "19.2.6", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.6.tgz", + "integrity": "sha512-p/jUvulfgU7oKtj6Xpk8cA2Y1xKTtICGpJYeJXz2YVO2UcvjQgeRMLDGfDeqeRW2Ta+0QNFwcc8X3GH8SxZz6w==", "dev": true, "license": "MIT", "peer": true, "dependencies": { - "csstype": "^3.0.2" + "csstype": "^3.2.2" } }, "node_modules/@types/react-dom": { - "version": "19.2.2", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.2.tgz", - "integrity": "sha512-9KQPoO6mZCi7jcIStSnlOWn2nEF3mNmyr3rIAsGnAbQKYbRLyqmeSc39EVgtxXVia+LMT8j3knZLAZAh+xLmrw==", + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==", "dev": true, "license": "MIT", "peerDependencies": { @@ -1557,17 +1698,17 @@ } }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.46.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.46.1.tgz", - "integrity": "sha512-rUsLh8PXmBjdiPY+Emjz9NX2yHvhS11v0SR6xNJkm5GM1MO9ea/1GoDKlHHZGrOJclL/cZ2i/vRUYVtjRhrHVQ==", + "version": "8.47.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.47.0.tgz", + "integrity": "sha512-fe0rz9WJQ5t2iaLfdbDc9T80GJy0AeO453q8C3YCilnGozvOyCG5t+EZtg7j7D88+c3FipfP/x+wzGnh1xp8ZA==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.46.1", - "@typescript-eslint/type-utils": "8.46.1", - "@typescript-eslint/utils": "8.46.1", - "@typescript-eslint/visitor-keys": "8.46.1", + "@typescript-eslint/scope-manager": "8.47.0", + "@typescript-eslint/type-utils": "8.47.0", + "@typescript-eslint/utils": "8.47.0", + "@typescript-eslint/visitor-keys": "8.47.0", "graphemer": "^1.4.0", "ignore": "^7.0.0", "natural-compare": "^1.4.0", @@ -1581,7 +1722,7 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.46.1", + "@typescript-eslint/parser": "^8.47.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } @@ -1597,17 +1738,17 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.46.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.46.1.tgz", - "integrity": "sha512-6JSSaBZmsKvEkbRUkf7Zj7dru/8ZCrJxAqArcLaVMee5907JdtEbKGsZ7zNiIm/UAkpGUkaSMZEXShnN2D1HZA==", + "version": "8.47.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.47.0.tgz", + "integrity": "sha512-lJi3PfxVmo0AkEY93ecfN+r8SofEqZNGByvHAI3GBLrvt1Cw6H5k1IM02nSzu0RfUafr2EvFSw0wAsZgubNplQ==", "dev": true, "license": "MIT", "peer": true, "dependencies": { - "@typescript-eslint/scope-manager": "8.46.1", - "@typescript-eslint/types": "8.46.1", - "@typescript-eslint/typescript-estree": "8.46.1", - "@typescript-eslint/visitor-keys": "8.46.1", + "@typescript-eslint/scope-manager": "8.47.0", + "@typescript-eslint/types": "8.47.0", + "@typescript-eslint/typescript-estree": "8.47.0", + "@typescript-eslint/visitor-keys": "8.47.0", "debug": "^4.3.4" }, "engines": { @@ -1623,14 +1764,14 @@ } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.46.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.46.1.tgz", - "integrity": "sha512-FOIaFVMHzRskXr5J4Jp8lFVV0gz5ngv3RHmn+E4HYxSJ3DgDzU7fVI1/M7Ijh1zf6S7HIoaIOtln1H5y8V+9Zg==", + "version": "8.47.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.47.0.tgz", + "integrity": "sha512-2X4BX8hUeB5JcA1TQJ7GjcgulXQ+5UkNb0DL8gHsHUHdFoiCTJoYLTpib3LtSDPZsRET5ygN4qqIWrHyYIKERA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.46.1", - "@typescript-eslint/types": "^8.46.1", + "@typescript-eslint/tsconfig-utils": "^8.47.0", + "@typescript-eslint/types": "^8.47.0", "debug": "^4.3.4" }, "engines": { @@ -1645,14 +1786,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.46.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.46.1.tgz", - "integrity": "sha512-weL9Gg3/5F0pVQKiF8eOXFZp8emqWzZsOJuWRUNtHT+UNV2xSJegmpCNQHy37aEQIbToTq7RHKhWvOsmbM680A==", + "version": "8.47.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.47.0.tgz", + "integrity": "sha512-a0TTJk4HXMkfpFkL9/WaGTNuv7JWfFTQFJd6zS9dVAjKsojmv9HT55xzbEpnZoY+VUb+YXLMp+ihMLz/UlZfDg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.46.1", - "@typescript-eslint/visitor-keys": "8.46.1" + "@typescript-eslint/types": "8.47.0", + "@typescript-eslint/visitor-keys": "8.47.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -1663,9 +1804,9 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.46.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.46.1.tgz", - "integrity": "sha512-X88+J/CwFvlJB+mK09VFqx5FE4H5cXD+H/Bdza2aEWkSb8hnWIQorNcscRl4IEo1Cz9VI/+/r/jnGWkbWPx54g==", + "version": "8.47.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.47.0.tgz", + "integrity": "sha512-ybUAvjy4ZCL11uryalkKxuT3w3sXJAuWhOoGS3T/Wu+iUu1tGJmk5ytSY8gbdACNARmcYEB0COksD2j6hfGK2g==", "dev": true, "license": "MIT", "engines": { @@ -1680,15 +1821,15 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.46.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.46.1.tgz", - "integrity": "sha512-+BlmiHIiqufBxkVnOtFwjah/vrkF4MtKKvpXrKSPLCkCtAp8H01/VV43sfqA98Od7nJpDcFnkwgyfQbOG0AMvw==", + "version": "8.47.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.47.0.tgz", + "integrity": "sha512-QC9RiCmZ2HmIdCEvhd1aJELBlD93ErziOXXlHEZyuBo3tBiAZieya0HLIxp+DoDWlsQqDawyKuNEhORyku+P8A==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.46.1", - "@typescript-eslint/typescript-estree": "8.46.1", - "@typescript-eslint/utils": "8.46.1", + "@typescript-eslint/types": "8.47.0", + "@typescript-eslint/typescript-estree": "8.47.0", + "@typescript-eslint/utils": "8.47.0", "debug": "^4.3.4", "ts-api-utils": "^2.1.0" }, @@ -1705,9 +1846,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.46.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.46.1.tgz", - "integrity": "sha512-C+soprGBHwWBdkDpbaRC4paGBrkIXxVlNohadL5o0kfhsXqOC6GYH2S/Obmig+I0HTDl8wMaRySwrfrXVP8/pQ==", + "version": "8.47.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.47.0.tgz", + "integrity": "sha512-nHAE6bMKsizhA2uuYZbEbmp5z2UpffNrPEqiKIeN7VsV6UY/roxanWfoRrf6x/k9+Obf+GQdkm0nPU+vnMXo9A==", "dev": true, "license": "MIT", "engines": { @@ -1719,16 +1860,16 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.46.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.46.1.tgz", - "integrity": "sha512-uIifjT4s8cQKFQ8ZBXXyoUODtRoAd7F7+G8MKmtzj17+1UbdzFl52AzRyZRyKqPHhgzvXunnSckVu36flGy8cg==", + "version": "8.47.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.47.0.tgz", + "integrity": "sha512-k6ti9UepJf5NpzCjH31hQNLHQWupTRPhZ+KFF8WtTuTpy7uHPfeg2NM7cP27aCGajoEplxJDFVCEm9TGPYyiVg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.46.1", - "@typescript-eslint/tsconfig-utils": "8.46.1", - "@typescript-eslint/types": "8.46.1", - "@typescript-eslint/visitor-keys": "8.46.1", + "@typescript-eslint/project-service": "8.47.0", + "@typescript-eslint/tsconfig-utils": "8.47.0", + "@typescript-eslint/types": "8.47.0", + "@typescript-eslint/visitor-keys": "8.47.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", @@ -1787,16 +1928,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.46.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.46.1.tgz", - "integrity": "sha512-vkYUy6LdZS7q1v/Gxb2Zs7zziuXN0wxqsetJdeZdRe/f5dwJFglmuvZBfTUivCtjH725C1jWCDfpadadD95EDQ==", + "version": "8.47.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.47.0.tgz", + "integrity": "sha512-g7XrNf25iL4TJOiPqatNuaChyqt49a/onq5YsJ9+hXeugK+41LVg7AxikMfM02PC6jbNtZLCJj6AUcQXJS/jGQ==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", - "@typescript-eslint/scope-manager": "8.46.1", - "@typescript-eslint/types": "8.46.1", - "@typescript-eslint/typescript-estree": "8.46.1" + "@typescript-eslint/scope-manager": "8.47.0", + "@typescript-eslint/types": "8.47.0", + "@typescript-eslint/typescript-estree": "8.47.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -1811,13 +1952,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.46.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.46.1.tgz", - "integrity": "sha512-ptkmIf2iDkNUjdeu2bQqhFPV1m6qTnFFjg7PPDjxKWaMaP0Z6I9l30Jr3g5QqbZGdw8YdYvLp+XnqnWWZOg/NA==", + "version": "8.47.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.47.0.tgz", + "integrity": "sha512-SIV3/6eftCy1bNzCQoPmbWsRLujS8t5iDIZ4spZOBHqrM+yfX2ogg8Tt3PDTAVKw3sSCiUgg30uOAvK2r9zGjQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.46.1", + "@typescript-eslint/types": "8.47.0", "eslint-visitor-keys": "^4.2.1" }, "engines": { @@ -1829,18 +1970,18 @@ } }, "node_modules/@vitejs/plugin-react": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.0.4.tgz", - "integrity": "sha512-La0KD0vGkVkSk6K+piWDKRUyg8Rl5iAIKRMH0vMJI0Eg47bq1eOxmoObAaQG37WMW9MSyk7Cs8EIWwJC1PtzKA==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.1.1.tgz", + "integrity": "sha512-WQfkSw0QbQ5aJ2CHYw23ZGkqnRwqKHD/KYsMeTkZzPT4Jcf0DcBxBtwMJxnu6E7oxw5+JC6ZAiePgh28uJ1HBA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/core": "^7.28.4", + "@babel/core": "^7.28.5", "@babel/plugin-transform-react-jsx-self": "^7.27.1", "@babel/plugin-transform-react-jsx-source": "^7.27.1", - "@rolldown/pluginutils": "1.0.0-beta.38", + "@rolldown/pluginutils": "1.0.0-beta.47", "@types/babel__core": "^7.20.5", - "react-refresh": "^0.17.0" + "react-refresh": "^0.18.0" }, "engines": { "node": "^20.19.0 || >=22.12.0" @@ -1849,6 +1990,172 @@ "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" } }, + "node_modules/@vitest/coverage-v8": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-4.0.10.tgz", + "integrity": "sha512-g+brmtoKa/sAeIohNJnnWhnHtU6GuqqVOSQ4SxDIPcgZWZyhJs5RmF5LpqXs8Kq64lANP+vnbn5JLzhLj/G56g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@bcoe/v8-coverage": "^1.0.2", + "@vitest/utils": "4.0.10", + "ast-v8-to-istanbul": "^0.3.8", + "debug": "^4.4.3", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", + "istanbul-lib-source-maps": "^5.0.6", + "istanbul-reports": "^3.2.0", + "magicast": "^0.5.1", + "std-env": "^3.10.0", + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@vitest/browser": "4.0.10", + "vitest": "4.0.10" + }, + "peerDependenciesMeta": { + "@vitest/browser": { + "optional": true + } + } + }, + "node_modules/@vitest/expect": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.10.tgz", + "integrity": "sha512-3QkTX/lK39FBNwARCQRSQr0TP9+ywSdxSX+LgbJ2M1WmveXP72anTbnp2yl5fH+dU6SUmBzNMrDHs80G8G2DZg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@types/chai": "^5.2.2", + "@vitest/spy": "4.0.10", + "@vitest/utils": "4.0.10", + "chai": "^6.2.1", + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/mocker": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.10.tgz", + "integrity": "sha512-e2OfdexYkjkg8Hh3L9NVEfbwGXq5IZbDovkf30qW2tOh7Rh9sVtmSr2ztEXOFbymNxS4qjzLXUQIvATvN4B+lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "4.0.10", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.21" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/@vitest/pretty-format": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.10.tgz", + "integrity": "sha512-99EQbpa/zuDnvVjthwz5bH9o8iPefoQZ63WV8+bsRJZNw3qQSvSltfut8yu1Jc9mqOYi7pEbsKxYTi/rjaq6PA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.0.10.tgz", + "integrity": "sha512-EXU2iSkKvNwtlL8L8doCpkyclw0mc/t4t9SeOnfOFPyqLmQwuceMPA4zJBa6jw0MKsZYbw7kAn+gl7HxrlB8UQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "4.0.10", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/snapshot": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.10.tgz", + "integrity": "sha512-2N4X2ZZl7kZw0qeGdQ41H0KND96L3qX1RgwuCfy6oUsF2ISGD/HpSbmms+CkIOsQmg2kulwfhJ4CI0asnZlvkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.0.10", + "magic-string": "^0.30.21", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.0.10.tgz", + "integrity": "sha512-AsY6sVS8OLb96GV5RoG8B6I35GAbNrC49AO+jNRF9YVGb/g9t+hzNm1H6kD0NDp8tt7VJLs6hb7YMkDXqu03iw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/ui": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-4.0.10.tgz", + "integrity": "sha512-oWtNM89Np+YsQO3ttT5i1Aer/0xbzQzp66NzuJn/U16bB7MnvSzdLKXgk1kkMLYyKSSzA2ajzqMkYheaE9opuQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@vitest/utils": "4.0.10", + "fflate": "^0.8.2", + "flatted": "^3.3.3", + "pathe": "^2.0.3", + "sirv": "^3.0.2", + "tinyglobby": "^0.2.15", + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "vitest": "4.0.10" + } + }, + "node_modules/@vitest/utils": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.0.10.tgz", + "integrity": "sha512-kOuqWnEwZNtQxMKg3WmPK1vmhZu9WcoX69iwWjVz+jvKTsF1emzsv3eoPcDr6ykA3qP2bsCQE7CwqfNtAVzsmg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.0.10", + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, "node_modules/acorn": { "version": "8.15.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", @@ -1906,6 +2213,16 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/are-docs-informative": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/are-docs-informative/-/are-docs-informative-0.0.2.tgz", + "integrity": "sha512-ixiS0nLNNG5jNQzgZJNoUpBKdo9yTYZMGJ+QgT2jmjR7G7+QHRCc4v6LQ3NgE7EBJq+o0ams3waJwkrlBom8Ig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -1913,6 +2230,35 @@ "dev": true, "license": "Python-2.0" }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/ast-v8-to-istanbul": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.8.tgz", + "integrity": "sha512-szgSZqUxI5T8mLKvS7WTjF9is+MVbOeLADU73IseOcrqhxr/VAvy6wfoVE39KnKzA7JRhjF5eUagNlHwvZPlKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.31", + "estree-walker": "^3.0.3", + "js-tokens": "^9.0.1" + } + }, + "node_modules/ast-v8-to-istanbul/node_modules/js-tokens": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", + "dev": true, + "license": "MIT" + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -2027,6 +2373,16 @@ ], "license": "CC-BY-4.0" }, + "node_modules/chai": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.1.tgz", + "integrity": "sha512-p4Z49OGG5W/WBCPSS/dH3jQ73kD6tiMmUM+bckNK6Jr5JHMG3k9bg/BvKR8lKmtVBKmOiuVaV2ws8s9oSbwysg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -2064,6 +2420,16 @@ "dev": true, "license": "MIT" }, + "node_modules/comment-parser": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/comment-parser/-/comment-parser-1.4.1.tgz", + "integrity": "sha512-buhp5kePrmda3vhc5B9t7pUQXAb2Tnd0qgpkIhPhkHXxJpiPJ11H0ZEU0oBpJ2QztSbzG/ZxMj/CHsYJqRHmyg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 12.0.0" + } + }, "node_modules/compare-versions": { "version": "6.1.1", "resolved": "https://registry.npmjs.org/compare-versions/-/compare-versions-6.1.1.tgz", @@ -2101,12 +2467,18 @@ } }, "node_modules/csstype": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", "dev": true, "license": "MIT" }, + "node_modules/csv-range": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/csv-range/-/csv-range-0.0.8.tgz", + "integrity": "sha512-dzpSVfbiFmqblg9LPf4LUGHWkYwdwKBLRNw7543trcqriVXlb73qqszc1wQJ4fvXevN3N+2xaKSptvcKOQXNjw==", + "license": "MIT" + }, "node_modules/debug": { "version": "4.4.3", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", @@ -2139,6 +2511,13 @@ "dev": true, "license": "ISC" }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "dev": true, + "license": "MIT" + }, "node_modules/esbuild": { "version": "0.25.10", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.10.tgz", @@ -2205,26 +2584,25 @@ } }, "node_modules/eslint": { - "version": "9.37.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.37.0.tgz", - "integrity": "sha512-XyLmROnACWqSxiGYArdef1fItQd47weqB7iwtfr9JHwRrqIXZdcFMvvEcL9xHCmL0SNsOvF0c42lWyM1U5dgig==", + "version": "9.39.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.1.tgz", + "integrity": "sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==", "dev": true, "license": "MIT", "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.21.0", - "@eslint/config-helpers": "^0.4.0", - "@eslint/core": "^0.16.0", + "@eslint/config-array": "^0.21.1", + "@eslint/config-helpers": "^0.4.2", + "@eslint/core": "^0.17.0", "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "9.37.0", - "@eslint/plugin-kit": "^0.4.0", + "@eslint/js": "9.39.1", + "@eslint/plugin-kit": "^0.4.1", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", - "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", @@ -2266,45 +2644,87 @@ } } }, - "node_modules/eslint-plugin-react-dom": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-dom/-/eslint-plugin-react-dom-2.2.1.tgz", - "integrity": "sha512-g6B4yTLHWhgqu3mN0kUJvrQp285uFMQYXzWVAIBqziV6n93sgPH8Eb8ht3gTzRUfA9Rt3JQr8QaxBxpfSkp67w==", + "node_modules/eslint-plugin-jsdoc": { + "version": "61.3.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-61.3.0.tgz", + "integrity": "sha512-E4m/5J5lrasd63Z74q4CCZ4PFnywnnrcvA7zZ98802NPhrZKKTp5NH+XAT+afcjXp2ps2/OQF5gPSWCT2XFCJg==", "dev": true, - "license": "MIT", + "license": "BSD-3-Clause", "dependencies": { - "@eslint-react/ast": "2.2.1", - "@eslint-react/core": "2.2.1", - "@eslint-react/eff": "2.2.1", - "@eslint-react/shared": "2.2.1", - "@eslint-react/var": "2.2.1", - "@typescript-eslint/scope-manager": "^8.46.0", - "@typescript-eslint/types": "^8.46.0", - "@typescript-eslint/utils": "^8.46.0", - "compare-versions": "^6.1.1", - "string-ts": "^2.2.1", - "ts-pattern": "^5.8.0" + "@es-joy/jsdoccomment": "~0.76.0", + "@es-joy/resolve.exports": "1.2.0", + "are-docs-informative": "^0.0.2", + "comment-parser": "1.4.1", + "debug": "^4.4.3", + "escape-string-regexp": "^4.0.0", + "espree": "^10.4.0", + "esquery": "^1.6.0", + "html-entities": "^2.6.0", + "object-deep-merge": "^2.0.0", + "parse-imports-exports": "^0.2.4", + "semver": "^7.7.3", + "spdx-expression-parse": "^4.0.0", + "to-valid-identifier": "^1.0.0" }, "engines": { - "node": ">=20.19.0" + "node": ">=20.11.0" }, "peerDependencies": { - "eslint": "^9.37.0", - "typescript": "^5.9.3" + "eslint": "^7.0.0 || ^8.0.0 || ^9.0.0" } }, - "node_modules/eslint-plugin-react-hooks": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-7.0.0.tgz", - "integrity": "sha512-fNXaOwvKwq2+pXiRpXc825Vd63+KM4DLL40Rtlycb8m7fYpp6efrTp1sa6ZbP/Ap58K2bEKFXRmhURE+CJAQWw==", + "node_modules/eslint-plugin-jsdoc/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", "dev": true, - "license": "MIT", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint-plugin-react-dom": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-dom/-/eslint-plugin-react-dom-2.3.5.tgz", + "integrity": "sha512-SsIF5HbsXLJcbEoFbzgabqA7DOnfGd0BhD7QzZd5tqgz4gL2j2mUGCBbQjQIE0BMbKtOihbhuceQfQ/QxoJJIg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-react/ast": "2.3.5", + "@eslint-react/core": "2.3.5", + "@eslint-react/eff": "2.3.5", + "@eslint-react/shared": "2.3.5", + "@eslint-react/var": "2.3.5", + "@typescript-eslint/scope-manager": "^8.46.4", + "@typescript-eslint/types": "^8.46.4", + "@typescript-eslint/utils": "^8.46.4", + "compare-versions": "^6.1.1", + "string-ts": "^2.2.1", + "ts-pattern": "^5.9.0" + }, + "engines": { + "node": ">=20.19.0" + }, + "peerDependencies": { + "eslint": "^9.39.1", + "typescript": "^5.9.3" + } + }, + "node_modules/eslint-plugin-react-hooks": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-7.0.1.tgz", + "integrity": "sha512-O0d0m04evaNzEPoSW+59Mezf8Qt0InfgGIBJnpC0h3NH/WjUAR7BIKUfysC6todmtiZ/A0oUVS8Gce0WhBrHsA==", + "dev": true, + "license": "MIT", "dependencies": { "@babel/core": "^7.24.4", "@babel/parser": "^7.24.4", "hermes-parser": "^0.25.1", - "zod": "^3.22.4 || ^4.0.0", - "zod-validation-error": "^3.0.3 || ^4.0.0" + "zod": "^3.25.0 || ^4.0.0", + "zod-validation-error": "^3.5.0 || ^4.0.0" }, "engines": { "node": ">=18" @@ -2314,9 +2734,9 @@ } }, "node_modules/eslint-plugin-react-refresh": { - "version": "0.4.23", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.23.tgz", - "integrity": "sha512-G4j+rv0NmbIR45kni5xJOrYvCtyD3/7LjpVH8MPPcudXDcNu8gv+4ATTDXTtbRR8rTCM5HxECvCSsRmxKnWDsA==", + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.24.tgz", + "integrity": "sha512-nLHIW7TEq3aLrEYWpVaJ1dRgFR+wLDPN8e8FpYAql/bMV2oBEfC37K0gLEGgv9fy66juNShSMV8OkTqzltcG/w==", "dev": true, "license": "MIT", "peerDependencies": { @@ -2324,35 +2744,45 @@ } }, "node_modules/eslint-plugin-react-x": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-x/-/eslint-plugin-react-x-2.2.1.tgz", - "integrity": "sha512-Bz5MoLgimALqiJ5O7/KQ/JhZ7AC24qILvA7KHsjT5n0XEQKrktGKGZEm4AKiKsTmboAitpVbHDB9kGpwXIrFXw==", + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-x/-/eslint-plugin-react-x-2.3.5.tgz", + "integrity": "sha512-Yj+6e2ds6Gg3KRPgNdifincu3cuxDYPcboCXc5EGHC//6JZXRgtqQ3N5uP9RVHnCHmKF2EiZ76XyPDnp4hMgEg==", "dev": true, "license": "MIT", "dependencies": { - "@eslint-react/ast": "2.2.1", - "@eslint-react/core": "2.2.1", - "@eslint-react/eff": "2.2.1", - "@eslint-react/shared": "2.2.1", - "@eslint-react/var": "2.2.1", - "@typescript-eslint/scope-manager": "^8.46.0", - "@typescript-eslint/type-utils": "^8.46.0", - "@typescript-eslint/types": "^8.46.0", - "@typescript-eslint/utils": "^8.46.0", + "@eslint-react/ast": "2.3.5", + "@eslint-react/core": "2.3.5", + "@eslint-react/eff": "2.3.5", + "@eslint-react/shared": "2.3.5", + "@eslint-react/var": "2.3.5", + "@typescript-eslint/scope-manager": "^8.46.4", + "@typescript-eslint/type-utils": "^8.46.4", + "@typescript-eslint/types": "^8.46.4", + "@typescript-eslint/utils": "^8.46.4", "compare-versions": "^6.1.1", "is-immutable-type": "^5.0.1", "string-ts": "^2.2.1", "ts-api-utils": "^2.1.0", - "ts-pattern": "^5.8.0" + "ts-pattern": "^5.9.0" }, "engines": { "node": ">=20.19.0" }, "peerDependencies": { - "eslint": "^9.37.0", + "eslint": "^9.39.1", "typescript": "^5.9.3" } }, + "node_modules/eslint-plugin-simple-import-sort": { + "version": "12.1.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-simple-import-sort/-/eslint-plugin-simple-import-sort-12.1.1.tgz", + "integrity": "sha512-6nuzu4xwQtE3332Uz0to+TxDQYRLTKRESSc2hefVT48Zc8JthmN23Gx9lnYhu0FtkRSL1oxny3kJ2aveVhmOVA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "eslint": ">=5.0.0" + } + }, "node_modules/eslint-scope": { "version": "8.4.0", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", @@ -2437,6 +2867,16 @@ "node": ">=4.0" } }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -2447,6 +2887,16 @@ "node": ">=0.10.0" } }, + "node_modules/expect-type": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.2.tgz", + "integrity": "sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -2508,6 +2958,13 @@ "reusify": "^1.0.4" } }, + "node_modules/fflate": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", + "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==", + "dev": true, + "license": "MIT" + }, "node_modules/file-entry-cache": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", @@ -2611,9 +3068,9 @@ } }, "node_modules/globals": { - "version": "16.4.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-16.4.0.tgz", - "integrity": "sha512-ob/2LcVVaVGCYN+r14cnwnoDPUufjiYgSqRhiFD0Q1iI4Odora5RE8Iv1D24hAz5oMophRGkGz+yuvQmmUMnMw==", + "version": "16.5.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-16.5.0.tgz", + "integrity": "sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==", "dev": true, "license": "MIT", "engines": { @@ -2658,15 +3115,39 @@ } }, "node_modules/hightable": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/hightable/-/hightable-0.20.1.tgz", - "integrity": "sha512-b3M5XD8c8yPs1AfbBLxQdvaZkGEP9w7YdGEsIuaqxgLNmUNAp117za6ZoTFSkhPyjSZBbiynLaX4cj3df6xtAQ==", + "version": "0.22.2", + "resolved": "https://registry.npmjs.org/hightable/-/hightable-0.22.2.tgz", + "integrity": "sha512-YoTOzTsBHRhZ40iSNpM2xDJPUod/vxIhi5SF5hDvisesFRAjqlyAY4GuGwSGSWZzi1knnRVs/7VJL9KWRwO70A==", "license": "MIT", "peerDependencies": { "react": "^18.3.1 || ^19", "react-dom": "^18.3.1 || ^19" } }, + "node_modules/html-entities": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.6.0.tgz", + "integrity": "sha512-kig+rMn/QOVRvr7c86gQ8lWXq+Hkv6CbAH1hLu+RG338StTpE8Z0b44SDVaqVu7HGKf27frdmUYEs9hTUX/cLQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/mdevils" + }, + { + "type": "patreon", + "url": "https://patreon.com/mdevils" + } + ], + "license": "MIT" + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true, + "license": "MIT" + }, "node_modules/ignore": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", @@ -2760,6 +3241,60 @@ "dev": true, "license": "ISC" }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.6.tgz", + "integrity": "sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.23", + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz", + "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -2780,6 +3315,16 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/jsdoc-type-pratt-parser": { + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-6.10.0.tgz", + "integrity": "sha512-+LexoTRyYui5iOhJGn13N9ZazL23nAHGkXsa1p/C8yeq79WRfLBag6ZZ0FQG2aRoc9yfo59JT9EYCQonOkHKkQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } + }, "node_modules/jsesc": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", @@ -2884,6 +3429,57 @@ "yallist": "^3.0.2" } }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/magicast": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.5.1.tgz", + "integrity": "sha512-xrHS24IxaLrvuo613F719wvOIv9xPHFWQHuvGUBmPnCA/3MQxKI3b+r7n1jAoDHmsbC5bRhTZYR77invLAxVnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", + "source-map-js": "^1.2.1" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", @@ -2921,6 +3517,16 @@ "node": "*" } }, + "node_modules/mrmime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz", + "integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -2961,6 +3567,13 @@ "dev": true, "license": "MIT" }, + "node_modules/object-deep-merge": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/object-deep-merge/-/object-deep-merge-2.0.0.tgz", + "integrity": "sha512-3DC3UMpeffLTHiuXSy/UG4NOIYTLlY9u3V82+djSCLYClWobZiS4ivYzpIUWrRY/nfsJ8cWsKyG3QfyLePmhvg==", + "dev": true, + "license": "MIT" + }, "node_modules/optionator": { "version": "0.9.4", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", @@ -3024,6 +3637,23 @@ "node": ">=6" } }, + "node_modules/parse-imports-exports": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/parse-imports-exports/-/parse-imports-exports-0.2.4.tgz", + "integrity": "sha512-4s6vd6dx1AotCx/RCI2m7t7GCh5bDRUtGNvRfHSP2wbBQdMi67pPe7mtzmgwcaQ8VKK/6IB7Glfyu3qdZJPybQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parse-statements": "1.0.11" + } + }, + "node_modules/parse-statements": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/parse-statements/-/parse-statements-1.0.11.tgz", + "integrity": "sha512-HlsyYdMBnbPQ9Jr/VgJ1YF4scnldvJpJxCVx6KgqPL4dxppsWrJHCIIxQXMJrqGnsRkNPATbeMJ8Yxu7JMsYcA==", + "dev": true, + "license": "MIT" + }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -3044,6 +3674,13 @@ "node": ">=8" } }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -3103,22 +3740,6 @@ "node": ">= 0.8.0" } }, - "node_modules/prettier": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz", - "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==", - "dev": true, - "license": "MIT", - "bin": { - "prettier": "bin/prettier.cjs" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" - } - }, "node_modules/punycode": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", @@ -3174,15 +3795,28 @@ } }, "node_modules/react-refresh": { - "version": "0.17.0", - "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", - "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.18.0.tgz", + "integrity": "sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw==", "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" } }, + "node_modules/reserved-identifiers": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/reserved-identifiers/-/reserved-identifiers-1.2.0.tgz", + "integrity": "sha512-yE7KUfFvaBFzGPs5H3Ops1RevfUEsDc5Iz65rOwWg4lE8HJSYtle77uul3+573457oHvBKuHYDl/xqUkKpEEdw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/resolve-from": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", @@ -3309,6 +3943,28 @@ "node": ">=8" } }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true, + "license": "ISC" + }, + "node_modules/sirv": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.2.tgz", + "integrity": "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@polka/url": "^1.0.0-next.24", + "mrmime": "^2.0.0", + "totalist": "^3.0.0" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/source-map-js": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", @@ -3319,6 +3975,45 @@ "node": ">=0.10.0" } }, + "node_modules/spdx-exceptions": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", + "dev": true, + "license": "CC-BY-3.0" + }, + "node_modules/spdx-expression-parse": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-4.0.0.tgz", + "integrity": "sha512-Clya5JIij/7C6bRR22+tnGXbc4VKlibKSVj2iHvVeX5iMW7s1SIQlqu699JkODJJIhh/pUu8L0/VLh8xflD+LQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.22", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz", + "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/std-env": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", + "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", + "dev": true, + "license": "MIT" + }, "node_modules/string-ts": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/string-ts/-/string-ts-2.2.1.tgz", @@ -3352,6 +4047,20 @@ "node": ">=8" } }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", + "dev": true, + "license": "MIT" + }, "node_modules/tinyglobby": { "version": "0.2.15", "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", @@ -3401,6 +4110,16 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/tinyrainbow": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.0.3.tgz", + "integrity": "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -3414,6 +4133,33 @@ "node": ">=8.0" } }, + "node_modules/to-valid-identifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/to-valid-identifier/-/to-valid-identifier-1.0.0.tgz", + "integrity": "sha512-41wJyvKep3yT2tyPqX/4blcfybknGB4D+oETKLs7Q76UiPqRpUJK3hr1nxelyYO0PHKVzJwlu0aCeEAsGI6rpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sindresorhus/base62": "^1.0.0", + "reserved-identifiers": "^1.0.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/totalist": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", + "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/ts-api-utils": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", @@ -3464,9 +4210,9 @@ } }, "node_modules/ts-pattern": { - "version": "5.8.0", - "resolved": "https://registry.npmjs.org/ts-pattern/-/ts-pattern-5.8.0.tgz", - "integrity": "sha512-kIjN2qmWiHnhgr5DAkAafF9fwb0T5OhMVSWrm8XEdTFnX6+wfXwYOFjeF86UZ54vduqiR7BfqScFmXSzSaH8oA==", + "version": "5.9.0", + "resolved": "https://registry.npmjs.org/ts-pattern/-/ts-pattern-5.9.0.tgz", + "integrity": "sha512-6s5V71mX8qBUmlgbrfL33xDUwO0fq48rxAu2LBE11WBeGdpCPOsXksQbZJHvHwhrd3QjUusd3mAOM5Gg0mFBLg==", "dev": true, "license": "MIT" }, @@ -3499,16 +4245,16 @@ } }, "node_modules/typescript-eslint": { - "version": "8.46.1", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.46.1.tgz", - "integrity": "sha512-VHgijW803JafdSsDO8I761r3SHrgk4T00IdyQ+/UsthtgPRsBWQLqoSxOolxTpxRKi1kGXK0bSz4CoAc9ObqJA==", + "version": "8.47.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.47.0.tgz", + "integrity": "sha512-Lwe8i2XQ3WoMjua/r1PHrCTpkubPYJCAfOurtn+mtTzqB6jNd+14n9UN1bJ4s3F49x9ixAm0FLflB/JzQ57M8Q==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/eslint-plugin": "8.46.1", - "@typescript-eslint/parser": "8.46.1", - "@typescript-eslint/typescript-estree": "8.46.1", - "@typescript-eslint/utils": "8.46.1" + "@typescript-eslint/eslint-plugin": "8.47.0", + "@typescript-eslint/parser": "8.47.0", + "@typescript-eslint/typescript-estree": "8.47.0", + "@typescript-eslint/utils": "8.47.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -3523,9 +4269,9 @@ } }, "node_modules/undici-types": { - "version": "7.14.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.14.0.tgz", - "integrity": "sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA==", + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", "dev": true, "license": "MIT" }, @@ -3571,9 +4317,9 @@ } }, "node_modules/vite": { - "version": "7.1.9", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.9.tgz", - "integrity": "sha512-4nVGliEpxmhCL8DslSAUdxlB6+SMrhB0a1v5ijlh1xB1nEPuy1mxaHxysVucLHuWryAxLWg6a5ei+U4TLn/rFg==", + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.2.2.tgz", + "integrity": "sha512-BxAKBWmIbrDgrokdGZH1IgkIk/5mMHDreLDmCJ0qpyJaAteP8NvMhkwr/ZCQNqNH97bw/dANTE9PDzqwJghfMQ==", "dev": true, "license": "MIT", "peer": true, @@ -3678,6 +4424,98 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/vitest": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.10.tgz", + "integrity": "sha512-2Fqty3MM9CDwOVet/jaQalYlbcjATZwPYGcqpiYQqgQ/dLC7GuHdISKgTYIVF/kaishKxLzleKWWfbSDklyIKg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@vitest/expect": "4.0.10", + "@vitest/mocker": "4.0.10", + "@vitest/pretty-format": "4.0.10", + "@vitest/runner": "4.0.10", + "@vitest/snapshot": "4.0.10", + "@vitest/spy": "4.0.10", + "@vitest/utils": "4.0.10", + "debug": "^4.4.3", + "es-module-lexer": "^1.7.0", + "expect-type": "^1.2.2", + "magic-string": "^0.30.21", + "pathe": "^2.0.3", + "picomatch": "^4.0.3", + "std-env": "^3.10.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.15", + "tinyrainbow": "^3.0.3", + "vite": "^6.0.0 || ^7.0.0", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/debug": "^4.1.12", + "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", + "@vitest/browser-playwright": "4.0.10", + "@vitest/browser-preview": "4.0.10", + "@vitest/browser-webdriverio": "4.0.10", + "@vitest/ui": "4.0.10", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/debug": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser-playwright": { + "optional": true + }, + "@vitest/browser-preview": { + "optional": true + }, + "@vitest/browser-webdriverio": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/vitest/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -3694,6 +4532,23 @@ "node": ">= 8" } }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/word-wrap": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", diff --git a/package.json b/package.json index c91c752..872b0c2 100644 --- a/package.json +++ b/package.json @@ -1,37 +1,46 @@ { - "name": "parquet-table", + "name": "csv-table", "private": true, "version": "0.0.0", "type": "module", "scripts": { + "coverage": "vitest run --coverage", "dev": "vite", "build": "tsc -b && vite build", "format": "prettier --write .", "lint": "eslint .", "preview": "vite preview", + "test": "vitest", + "test:ui": "vitest --coverage --ui", "typecheck": "tsc -b --noEmit" }, "dependencies": { - "@severo_tests/papaparse": "^5.5.4", - "hightable": "0.20.1", - "react": "^19.2.0", - "react-dom": "^19.2.0" + "csv-range": "0.0.8", + "hightable": "0.22.2", + "react": "19.2.0", + "react-dom": "19.2.0" }, "devDependencies": { - "@eslint/js": "^9.37.0", - "@types/papaparse": "^5.3.16", - "@types/react": "^19.2.2", - "@types/react-dom": "^19.2.2", - "@vitejs/plugin-react": "^5.0.4", - "eslint": "^9.37.0", - "eslint-plugin-react-dom": "^2.2.1", - "eslint-plugin-react-hooks": "^7.0.0", - "eslint-plugin-react-refresh": "^0.4.23", - "eslint-plugin-react-x": "^2.2.1", - "globals": "^16.3.0", - "prettier": "^3.6.2", - "typescript": "^5.9.3", - "typescript-eslint": "^8.46.1", - "vite": "^7.1.9" + "@eslint/compat": "2.0.0", + "@eslint/js": "9.39.1", + "@stylistic/eslint-plugin": "5.6.1", + "@types/node": "24.10.1", + "@types/react": "19.2.6", + "@types/react-dom": "19.2.3", + "@vitejs/plugin-react": "5.1.1", + "@vitest/coverage-v8": "4.0.10", + "@vitest/ui": "4.0.10", + "eslint": "9.39.1", + "eslint-plugin-jsdoc": "61.3.0", + "eslint-plugin-react-dom": "2.3.5", + "eslint-plugin-react-hooks": "7.0.1", + "eslint-plugin-react-refresh": "0.4.24", + "eslint-plugin-react-x": "2.3.5", + "eslint-plugin-simple-import-sort": "12.1.1", + "globals": "16.5.0", + "typescript": "5.9.3", + "typescript-eslint": "8.47.0", + "vite": "7.2.2", + "vitest": "4.0.10" } } diff --git a/src/App.tsx b/src/App.tsx index 9df1d52..548d8b1 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -1,120 +1,123 @@ // import { sortableDataFrame } from "hightable"; -import type { ReactNode } from "react"; -import { useCallback, useReducer, useState } from "react"; +import type { DataFrame } from 'hightable' +import type { ReactNode } from 'react' +import { useCallback, useReducer, useState } from 'react' -import { type CSVDataFrame, csvDataFrame } from "./csv.js"; -import { byteLengthFromUrl } from "./helpers.js"; -import Dropzone from "./Dropzone.js"; -import Layout from "./Layout.js"; -import Page from "./Page.js"; -import Welcome from "./Welcome.js"; +import { csvDataFrame } from './dataframe.js' +import Dropzone from './Dropzone.js' +import { byteLengthFromUrl } from './helpers.js' +import Layout from './Layout.js' +import Page from './Page.js' +import Welcome from './Welcome.js' interface State { url?: string name?: string byteLength?: number - controller?: AbortController - df?: CSVDataFrame + df?: DataFrame } type Action = { - type: 'setUrl', - url: string, - name?: string, -} | { - type: "setByteLength", - byteLength: number, + type: 'setUrl' + url: string + name?: string } | { - type: "setController", - controller: AbortController, + type: 'setByteLength' + byteLength: number } | { - type: "setDataFrame", - df: CSVDataFrame, + type: 'setDataFrame' + df: DataFrame } +/** + * Reducer function for managing state + * @param state - The current state + * @param action - The action to perform + * @returns The new state + */ function reducer(state: State, action: Action): State { switch (action.type) { case 'setUrl': - state.controller?.abort(); if (state.url) { // revoke obsolete object URL, if any (silently ignores error if not an object URL) - URL.revokeObjectURL(state.url); + URL.revokeObjectURL(state.url) } return { url: action.url, name: action.name ?? action.url } case 'setByteLength': return { ...state, byteLength: action.byteLength } - case 'setController': - return { ...state, controller: action.controller } case 'setDataFrame': return { ...state, df: action.df } default: - throw new Error("Unknown action"); + throw new Error('Unknown action') } } +/** + * App component + * @returns App component + */ export default function App(): ReactNode { - const params = new URLSearchParams(location.search); - const url = params.get("url") ?? undefined; - const iframe = params.get("iframe") ? true : false; + const params = new URLSearchParams(location.search) + const url = params.get('url') ?? undefined + const iframe = params.get('iframe') ? true : false - const [state, dispatch] = useReducer(reducer, {}); - const [error, setError] = useState(); + const [state, dispatch] = useReducer(reducer, {}) + const [error, setError] = useState() const setUnknownError = useCallback((e: unknown) => { - setError(e instanceof Error ? e : new Error(String(e))); - }, [setError]); + setError(e instanceof Error ? e : new Error(String(e))) + }, [setError]) const prepareDataFrame = useCallback(async function ({ url, byteLength }: { url: string, byteLength: number }) { - const controller = new AbortController(); - dispatch({ type: 'setController', controller }); - const df = await csvDataFrame({ url, byteLength, signal: controller.signal }); - dispatch({ type: 'setDataFrame', df }); - // sortableDataFrame( ... // TODO(SL): enable sorting? (requires all the data - maybe on small data?) - }, []); + const df = await csvDataFrame({ url, byteLength }) + dispatch({ type: 'setDataFrame', df }) + }, []) const setUrl = useCallback((url: string) => { - dispatch({ type: 'setUrl', url }); - byteLengthFromUrl(url).then(byteLength => { - dispatch({ type: 'setByteLength', byteLength }); + dispatch({ type: 'setUrl', url }) + byteLengthFromUrl(url).then((byteLength) => { + dispatch({ type: 'setByteLength', byteLength }) return prepareDataFrame({ url, byteLength }) - }).catch(setUnknownError); - }, [setUnknownError, prepareDataFrame]); + }).catch(setUnknownError) + }, [setUnknownError, prepareDataFrame]) const onUrlDrop = useCallback((url: string) => { // Add url=url to query string - const params = new URLSearchParams(location.search); - params.set("url", url); - history.pushState({}, "", `${location.pathname}?${params}`); - setUrl(url); - }, [setUrl]); + const params = new URLSearchParams(location.search) + params.set('url', url) + history.pushState({}, '', `${location.pathname}?${params}`) + setUrl(url) + }, [setUrl]) const onFileDrop = useCallback((file: File) => { // Clear query string - history.pushState({}, "", location.pathname); - const url = URL.createObjectURL(file); - dispatch({ type: 'setUrl', url, name: file.name }); - dispatch({ type: 'setByteLength', byteLength: file.size }); + history.pushState({}, '', location.pathname) + const url = URL.createObjectURL(file) + dispatch({ type: 'setUrl', url, name: file.name }) + dispatch({ type: 'setByteLength', byteLength: file.size }) prepareDataFrame({ url, byteLength: file.size }).catch(setUnknownError) - }, [setUnknownError, prepareDataFrame]); + }, [setUnknownError, prepareDataFrame]) if (url !== undefined && url !== state.url) { // if we have a url in the query string, it's not the same as the current one, load it - setUrl(url); + setUrl(url) } return ( { - setError(e); + setError(e) }} onFileDrop={onFileDrop} onUrlDrop={onUrlDrop} > - {state.url ? ( - - ) : ( - - )} + {state.url + ? ( + + ) + : ( + + )} - ); + ) } diff --git a/src/Dropzone.tsx b/src/Dropzone.tsx index 0fb7146..e8500e0 100644 --- a/src/Dropzone.tsx +++ b/src/Dropzone.tsx @@ -1,10 +1,10 @@ -import React, { type ReactNode, useEffect, useRef, useState } from "react"; +import React, { type ReactNode, useEffect, useRef, useState } from 'react' interface DropzoneProps { - children: ReactNode; - onFileDrop: (file: File) => void; - onUrlDrop: (url: string) => void; - onError: (error: Error) => void; + children: ReactNode + onFileDrop: (file: File) => void + onUrlDrop: (url: string) => void + onError: (error: Error) => void } /** @@ -14,23 +14,21 @@ interface DropzoneProps { * * You can have an element inside the dropzone that triggers the file input * dialog when clicked by adding the class 'dropzone-select' to it. - * - * @param {Object} props - * @param {ReactNode} props.children - message to display in dropzone. - * @param {Function} props.onFileDrop - called when a file is dropped. - * @param {Function} props.onUrlDrop - called when a url is dropped. - * @param {Function} props.onError - called when an error occurs. - * @returns {ReactNode} + * @param props - props + * @param props.children - message to display in dropzone. + * @param props.onFileDrop - called when a file is dropped. + * @param props.onUrlDrop - called when a url is dropped. + * @returns Dropzone component */ export default function Dropzone({ children, onFileDrop, onUrlDrop, }: DropzoneProps): ReactNode { - const dropzoneRef = useRef(null); - const fileInputRef = useRef(null); + const dropzoneRef = useRef(null) + const fileInputRef = useRef(null) // number of dragenter events minus dragleave events - const [enters, setEnters] = useState(0); + const [enters, setEnters] = useState(0) /** * Trigger file input dialog. @@ -38,89 +36,104 @@ export default function Dropzone({ */ function triggerFileSelect(e: React.MouseEvent) { // If click inside '.dropzone', activate file input dialog - if ((e.target as Element).classList.contains("dropzone")) { - fileInputRef.current?.click(); + if ((e.target as Element).classList.contains('dropzone')) { + fileInputRef.current?.click() } } /** * Handle file selection event. * Recursively upload files and directories, in parallel. - * @param {ChangeEvent} e - * @returns {void} + * @param e - file input change event */ function handleFileSelect(e: React.ChangeEvent): void { - const { files } = e.target; - const file = files?.[0]; + const { files } = e.target + const file = files?.[0] if (files?.length !== 1 || !file) { - return; + return } - onFileDrop(file); + onFileDrop(file) } useEffect(() => { - const dropzone = dropzoneRef.current; - if (!dropzone) return; + const dropzone = dropzoneRef.current + if (!dropzone) return // Attach drag-and-drop event listeners + /** + * Handle drag enter event. + * @param e - drag event + */ function onDragEnter(e: DragEvent) { // check if any of the items are files (not strings) - const items = e.dataTransfer?.items; - if (!items) return; - if (!Array.from(items).some((item) => item.kind === "file")) return; - setEnters((enters) => enters + 1); + const items = e.dataTransfer?.items + if (!items) return + if (!Array.from(items).some(item => item.kind === 'file')) return + setEnters(enters => enters + 1) } + /** + * Handle drag over event. + * @param e - drag event + */ function onDragOver(e: DragEvent) { - e.preventDefault(); + e.preventDefault() } + /** + * Handle drag leave event. + * @param e - drag event + */ function onDragLeave(e: DragEvent) { - const items = e.dataTransfer?.items; - if (!items) return; - if (!Array.from(items).some((item) => item.kind === "file")) return; - setEnters((enters) => enters - 1); + const items = e.dataTransfer?.items + if (!items) return + if (!Array.from(items).some(item => item.kind === 'file')) return + setEnters(enters => enters - 1) } + /** + * Handle file drop event. + * @param e - drag event + */ function handleFileDrop(e: DragEvent) { - e.preventDefault(); - setEnters(0); + e.preventDefault() + setEnters(0) - if (!e.dataTransfer) throw new Error("Missing dataTransfer"); - const { files, items } = e.dataTransfer; + if (!e.dataTransfer) throw new Error('Missing dataTransfer') + const { files, items } = e.dataTransfer if (files.length > 0) { - const file = files[0]; + const file = files[0] if (!file) { - return; + return } - onFileDrop(file); + onFileDrop(file) } if (items.length > 0) { - const item = items[0]; - if (item?.kind === "string") { + const item = items[0] + if (item?.kind === 'string') { item.getAsString((url) => { - if (url.startsWith("http")) { - onUrlDrop(url); + if (url.startsWith('http')) { + onUrlDrop(url) } - }); + }) } } } - window.addEventListener("dragenter", onDragEnter); - window.addEventListener("dragover", onDragOver); - window.addEventListener("dragleave", onDragLeave); - dropzone.addEventListener("drop", handleFileDrop); + window.addEventListener('dragenter', onDragEnter) + window.addEventListener('dragover', onDragOver) + window.addEventListener('dragleave', onDragLeave) + dropzone.addEventListener('drop', handleFileDrop) // Cleanup event listeners when component is unmounted return () => { - window.removeEventListener("dragenter", onDragEnter); - window.removeEventListener("dragover", onDragOver); - window.removeEventListener("dragleave", onDragLeave); - dropzone.removeEventListener("drop", handleFileDrop); - }; - }); + window.removeEventListener('dragenter', onDragEnter) + window.removeEventListener('dragover', onDragOver) + window.removeEventListener('dragleave', onDragLeave) + dropzone.removeEventListener('drop', handleFileDrop) + } + }) return (
0 ? "dropzone hover" : "dropzone"} + className={enters > 0 ? 'dropzone hover' : 'dropzone'} onClick={triggerFileSelect} ref={dropzoneRef} > @@ -133,9 +146,9 @@ export default function Dropzone({
- ); + ) } diff --git a/src/Layout.tsx b/src/Layout.tsx index 8d60a2e..c0a707e 100644 --- a/src/Layout.tsx +++ b/src/Layout.tsx @@ -1,34 +1,32 @@ -import type { ReactNode } from "react"; +import type { ReactNode } from 'react' -import { cn } from "./helpers.js"; +import { cn } from './helpers.js' interface LayoutProps { - children: ReactNode; - className?: string; - error?: Error; + children: ReactNode + className?: string + error?: Error } /** * Layout for shared UI. * Content div style can be overridden by className prop. - * - * @param {Object} props - * @param {ReactNode} props.children - content to display inside the layout - * @param {string | undefined} props.className - additional class names to apply to the content container - * @param {Error} props.error - error message to display - * @returns {ReactNode} + * @param props - layout properties + * @param props.children - content to display inside the layout + * @param props.className - additional class names to apply to the content container + * @returns Layout component */ export default function Layout({ children, className, - // error, // TODO(SL): implement error bar + // error, }: LayoutProps): ReactNode { return ( <>
-
{children}
+
{children}
{/* */}
- ); + ) } diff --git a/src/Loading.tsx b/src/Loading.tsx index 59175e4..19eb351 100644 --- a/src/Loading.tsx +++ b/src/Loading.tsx @@ -1,19 +1,18 @@ -import type { ReactNode } from "react"; +import type { ReactNode } from 'react' -import { cn } from "./helpers.js"; +import { cn } from './helpers.js' interface LoadingProps { - className?: string; + className?: string } /** * Loading component. * div style can be overridden by className prop. - * - * @param {Object} props - * @param {string | undefined} props.className - additional class names to apply to the div - * @returns {ReactNode} + * @param props - loading properties + * @param props.className - additional class names to apply to the div + * @returns Loading component */ export default function Loading({ className }: LoadingProps): ReactNode { - return
; + return
} diff --git a/src/Page.tsx b/src/Page.tsx index 6399894..3db6ad7 100644 --- a/src/Page.tsx +++ b/src/Page.tsx @@ -1,20 +1,25 @@ -import HighTable from "hightable"; -import { type ReactNode } from "react"; -import Loading from "./Loading.js"; -import type { CSVDataFrame } from "./csv.js"; +import { type DataFrame, HighTable } from 'hightable' +import { type ReactNode } from 'react' + +import Loading from './Loading.js' export interface PageProps { - df?: CSVDataFrame; - name?: string; - byteLength?: number; - setError: (e: unknown) => void; - iframe: boolean; + df?: DataFrame + name?: string + byteLength?: number + setError: (e: unknown) => void + iframe: boolean } /** * CSV viewer page - * @param {Object} props - * @returns {ReactNode} + * @param props - page properties + * @param props.df - data frame to display + * @param props.name - name of the file + * @param props.byteLength - size of the file in bytes + * @param props.setError - function to call on error + * @param props.iframe - whether the page is displayed in an iframe + * @returns Page component */ export default function Page({ df, @@ -25,44 +30,55 @@ export default function Page({ }: PageProps): ReactNode { return ( <> - {iframe ? "" :
{name}
} + {iframe ? '' :
{name}
}
{byteLength !== undefined && ( - + {formatFileSize(byteLength)} )} - {df ? {df.numRows.toLocaleString()} row{df.numRows > 1 ? 's': ''}{df.metadata?.isPartial ? ' (estimated)': ''} : null} + {df + ? ( + + {df.numRows.toLocaleString()} + {' '} + row + {df.numRows > 1 ? 's' : ''} + {df.metadata?.isNumRowsEstimated ? ' (estimated)' : ''} + + ) + : null}
- {!df ? ( - - ) : ( - - )} + {!df + ? ( + + ) + : ( + + )} - ); + ) } /** * Returns the file size in human readable format. - * * @param {number} bytes file size in bytes * @returns {string} formatted file size string */ function formatFileSize(bytes: number): string { - const sizes = ["b", "kb", "mb", "gb", "tb"]; - if (bytes === 0) return "0 b"; - const i = Math.floor(Math.log2(bytes) / 10); - if (i === 0) return `${bytes.toString()} b`; - const base = bytes / Math.pow(1024, i); - const size = sizes[i]; + const sizes = ['b', 'kb', 'mb', 'gb', 'tb'] + if (bytes === 0) return '0 b' + const i = Math.floor(Math.log2(bytes) / 10) + if (i === 0) return `${bytes.toString()} b` + const base = bytes / Math.pow(1024, i) + const size = sizes[i] if (!size) { - throw new Error("File size too large"); + throw new Error('File size too large') } - return `${base < 10 ? base.toFixed(1) : Math.round(base).toString()} ${size}`; + return `${base < 10 ? base.toFixed(1) : Math.round(base).toString()} ${size}` } diff --git a/src/Welcome.tsx b/src/Welcome.tsx index e0733ec..6e0b4de 100644 --- a/src/Welcome.tsx +++ b/src/Welcome.tsx @@ -1,5 +1,9 @@ -import { type ReactNode } from "react"; +import { type ReactNode } from 'react' +/** + * Welcome page with quick links to example CSV files. + * @returns Welcome page React node + */ export default function Welcome(): ReactNode { return (
@@ -40,5 +44,5 @@ export default function Welcome(): ReactNode {
- ); + ) } diff --git a/src/cache.ts b/src/cache.ts new file mode 100644 index 0000000..4f72e7f --- /dev/null +++ b/src/cache.ts @@ -0,0 +1,562 @@ +import type { Newline, ParseResult } from 'csv-range' + +import { checkNonNegativeInteger } from './helpers.js' + +/** + * A byte range in a CSV file, with the parsed rows + */ +export class CSVRange { + #firstByte: number // byte position of the start of the range (excludes the ignored bytes if the range starts in the middle of a row) + #byteCount = 0 // number of bytes in the range + #rowByteCount = 0 // number of bytes in the range's rows (excludes ignored bytes) + #rows: string[][] = [] // sorted array of the range rows, filtering out the empty rows and the header if any + #firstRow: number // index of the first row in the range (0-based) + + constructor({ firstByte, firstRow }: { firstByte: number, firstRow: number }) { + this.#firstByte = checkNonNegativeInteger(firstByte) + this.#firstRow = checkNonNegativeInteger(firstRow) + } + + /* + * Get the number of rows in the range + * @returns The number of rows in the range + */ + get rowCount() { + return this.#rows.length + } + + /** + * Get the number of bytes covered by the range + * @returns The number of bytes in the range + */ + get byteCount() { + return this.#byteCount + } + + /** + * Get the first byte of the range + * @returns The first byte of the range + */ + get firstByte(): number { + return this.#firstByte + } + + /** + * Get the first row number of the range + * @returns The first row number of the range + */ + get firstRow(): number { + return this.#firstRow + } + + /** + * Set the first row number of the range + * @param value The first row number + */ + set firstRow(value: number) { + this.#firstRow = checkNonNegativeInteger(value) + } + + /** + * Get the row number and first byte of the next row in the range + * @returns The next row number and first byte + */ + get next(): { row: number, firstByte: number } { + return { + row: this.#firstRow + this.#rows.length, + firstByte: this.#firstByte + this.#byteCount, + } + } + + /** + * Get the rows in the range + * @returns The rows in the range + */ + get rows(): string[][] { + return this.#rows + } + + /** + * Get the number of bytes covered by the rows in the range + * @returns The number of bytes in the range's rows + */ + get rowByteCount(): number { + return this.#rowByteCount + } + + /** + * Append a new row into the range + * @param row The row to append. It must be contiguous to the last row. + * @param row.byteOffset The byte offset of the row in the file. + * @param row.byteCount The number of bytes of the row. + * @param row.cells The cells of the row. If not provided, the row is considered ignored and not cached (i.e., empty or header). + */ + append(row: { byteOffset: number, byteCount: number, cells?: string[] }) { + checkNonNegativeInteger(row.byteOffset) + checkNonNegativeInteger(row.byteCount) + if (row.byteOffset !== this.#firstByte + this.#byteCount) { + throw new Error('Cannot append the row: it is not contiguous with the last row') + } + this.#byteCount = row.byteOffset + row.byteCount - this.#firstByte + if (row.cells) { + this.#rows.push(row.cells.slice()) + this.#rowByteCount += row.byteCount + } + } + + /** + * Prepend a new row into the range + * @param row The row to prepend. It must be contiguous to the first row. + * @param row.byteOffset The byte offset of the row in the file. + * @param row.byteCount The number of bytes of the row. + * @param row.cells The cells of the row. If not provided, the row is considered ignored and not cached (i.e., empty or header). + */ + prepend(row: { byteOffset: number, byteCount: number, cells?: string[] }): void { + checkNonNegativeInteger(row.byteOffset) + checkNonNegativeInteger(row.byteCount) + if (row.byteOffset + row.byteCount !== this.#firstByte) { + throw new Error('Cannot prepend the row: it is not contiguous with the first row') + } + this.#firstByte = row.byteOffset + this.#byteCount += row.byteCount + if (row.cells) { + this.#firstRow -= 1 + this.#rows.unshift(row.cells.slice()) + this.#rowByteCount += row.byteCount + } + } + + /** + * Get the cells of a given row + * @param options Options + * @param options.row The row number (0-based) + * @returns The cells of the row, or undefined if the row is not in this range + */ + getCells({ row }: { row: number }): string[] | undefined { + checkNonNegativeInteger(row) + const rowIndex = row - this.#firstRow + if (rowIndex < 0 || rowIndex >= this.#rows.length) { + return undefined + } + return this.#rows[rowIndex] + } + + /** + * Merge another CSVRange into this one. The other range must be immediately after this one. + * @param followingRange The range to merge + */ + merge(followingRange: CSVRange): void { + if (this.next.firstByte !== followingRange.firstByte) { + throw new Error('Cannot merge ranges: not contiguous') + } + this.#byteCount += followingRange.byteCount + for (const row of followingRange.rows) { + this.#rows.push(row) + } + } +} + +/** + * Cache of a remote CSV file + */ +export class CSVCache { + /** + * The total byte length of the CSV file + */ + #byteLength: number + /** + * The column names + */ + #columnNames: string[] + /** + * The header byte count + */ + #headerByteCount: number + /** + * The serial range, starting at byte 0 + */ + #serial: CSVRange + /** + * The random access ranges, after the serial range + */ + #random: CSVRange[] + /** + * The CSV delimiter + */ + #delimiter: string + /** + * The CSV newline character(s) + */ + #newline: Newline + /** + * The average number of bytes per row, used for estimating row positions + */ + #averageRowByteCount: number | undefined = undefined + + constructor({ columnNames, headerByteCount, byteLength, delimiter, newline }: { columnNames: string[], headerByteCount?: number, byteLength: number, delimiter: string, newline: Newline }) { + headerByteCount ??= 0 + checkNonNegativeInteger(headerByteCount) + checkNonNegativeInteger(byteLength) + if (columnNames.length === 0) { + throw new Error('Cannot create CSVCache: no column names provided') + } + if (headerByteCount > byteLength) { + throw new Error('Initial byte count exceeds byte length') + } + this.#byteLength = byteLength + this.#columnNames = columnNames.slice() + this.#headerByteCount = headerByteCount + this.#delimiter = delimiter + this.#newline = newline + this.#serial = new CSVRange({ firstByte: 0, firstRow: 0 }) + // Account for the header row and previous ignored rows if any + this.#serial.append({ + byteOffset: 0, + byteCount: headerByteCount, + }) + this.#random = [] + + this.#updateAverageRowByteCount() + } + + /** + * Get the CSV column names + * @returns The column names + */ + get columnNames(): string[] { + return this.#columnNames.slice() + } + + /** + * Get the header byte count + * @returns The header byte count + */ + get headerByteCount(): number { + return this.#headerByteCount + } + + /** + * Get the number of rows in the cache + * @returns The number of rows in the cache + */ + get rowCount(): number { + return this.#serial.rowCount + this.#random.reduce((sum, range) => sum + range.rowCount, 0) + } + + /** + * Get the CSV delimiter + * @returns The CSV delimiter + */ + get delimiter(): string { + return this.#delimiter + } + + /** + * Get the number of columns + * @returns The number of columns + */ + get columnCount(): number { + return this.#columnNames.length + } + + /** + * Get the CSV newline character(s) + * @returns The CSV newline character(s) + */ + get newline(): Newline { + return this.#newline + } + + /** + * Update the average row byte count based on the cached rows + */ + #updateAverageRowByteCount(): void { + const rowByteCount = this.#serial.rowByteCount + this.#random.reduce((sum, range) => sum + range.rowByteCount, 0) + const rowCount = this.#serial.rowCount + this.#random.reduce((sum, range) => sum + range.rowCount, 0) + if (rowCount === 0) { + this.#averageRowByteCount = undefined + return + } + const averageRowByteCount = rowByteCount / rowCount + this.#averageRowByteCount = averageRowByteCount + } + + /** + * Re-assign row numbers in random ranges to reduce overlaps + */ + updateRowEstimates(): void { + const averageRowByteCount = this.averageRowByteCount + if (averageRowByteCount === undefined || averageRowByteCount === 0) { + return + } + + let previousRange = this.#serial + + // loop on the random ranges + for (const range of this.#random) { + // v8 ignore if -- @preserve + if (range.firstByte <= previousRange.next.firstByte) { + // should not happen + throw new Error('Cannot update row estimates: overlap with previous range') + } + + const firstRow = Math.max( + // ensure at least one row gap + previousRange.next.row + 1, + // estimate based on byte position + Math.round(previousRange.next.row + (range.firstByte - previousRange.next.firstByte) / averageRowByteCount), + ) + + range.firstRow = firstRow + + previousRange = range + } + } + + get averageRowByteCount(): number | undefined { + return this.#averageRowByteCount + } + + get allRowsCached(): boolean { + return this.#serial.next.firstByte >= this.#byteLength + } + + /** + * Get an estimate of the total number of rows in the CSV file + * @returns The estimated number of rows and if it's an estimate + */ + get numRowsEstimate(): { numRows: number, isEstimate: boolean } { + const averageRowByteCount = this.averageRowByteCount + const numRows = this.allRowsCached + ? this.rowCount + : averageRowByteCount === 0 || averageRowByteCount === undefined + ? 0 + : Math.round((this.#byteLength - this.headerByteCount) / averageRowByteCount) + return { + numRows, + isEstimate: !this.allRowsCached, + } + } + + /** + * Get the cells of a given row + * @param options Options + * @param options.row The row number (0-based) + * @returns The cells of the row, or undefined if the row is not in this range + */ + #getCells({ row }: { row: number }): string[] | undefined { + const cells = this.#serial.getCells({ row }) + if (cells !== undefined) { + return cells + } + // find the range containing this row + for (const range of this.#random) { + const cells = range.getCells({ row }) + if (cells !== undefined) { + return cells + } + } + return undefined + } + + /** + * Get the cell value at the given row and column + * @param options Options + * @param options.row The row index (0-based) + * @param options.column The column index (0-based) + * @returns The cell value, or undefined if the row is not in the cache + */ + getCell({ row, column }: { row: number, column: number }): { value: string } | undefined { + checkNonNegativeInteger(column) + if (column >= this.columnCount) { + throw new Error(`Column index out of bounds: ${column}`) + } + const cells = this.#getCells({ row }) + if (cells === undefined) { + return undefined + } + return { + // return empty string for missing columns in existing row + value: cells[column] ?? '', + } + } + + /** + * Get the row number for the given row index. + * @param options Options + * @param options.row The row index (0-based) + * @returns The row number, or undefined if not found + */ + getRowNumber({ row }: { row: number }): { value: number } | undefined { + if (this.#getCells({ row }) === undefined) { + return undefined + } + return { value: row } + } + + /** + * Store a new row + * @param row The row to store. + * @param row.byteOffset The byte offset of the row in the file. + * @param row.byteCount The number of bytes of the row. + * @param row.cells The cells of the row. If not provided, the row is considered ignored and not cached (i.e., empty or header). + */ + store(row: { byteOffset: number, byteCount: number, cells?: string[] }): void { + checkNonNegativeInteger(row.byteOffset) + checkNonNegativeInteger(row.byteCount) + if (row.byteOffset + row.byteCount > this.#byteLength) { + throw new Error('Cannot store the row: byte range is out of bounds') + } + + let previousRange = this.#serial + + // loop on the ranges to find where to put the row + for (const [i, nextRange] of [...this.#random, undefined].entries()) { + if (row.byteOffset < previousRange.next.firstByte) { + throw new Error('Cannot store the row: overlap with previous range') + } + + // the row is after the next range + if (nextRange && row.byteOffset >= nextRange.next.firstByte) { + previousRange = nextRange + continue + } + + if (nextRange && row.byteOffset + row.byteCount > nextRange.firstByte) { + throw new Error('Cannot store the row: overlap with next range') + } + + // append to the previous range + if (row.byteOffset === previousRange.next.firstByte) { + previousRange.append(row) + // merge with the next range if needed + if (nextRange && previousRange.next.firstByte === nextRange.firstByte) { + // merge nextRange into previousRange + this.#merge(previousRange, nextRange) + } + break + } + + // prepend to the next range + if (nextRange && row.byteOffset + row.byteCount === nextRange.firstByte) { + nextRange.prepend(row) + break + } + + // create a new random range between previousRange and nextRange + const averageRowByteCount = this.averageRowByteCount + ? this.averageRowByteCount + : row.byteCount // use the current row byte count if we don't have an average yet (0 or undefined) + const firstRow = Math.max( + Math.round(previousRange.next.row + (row.byteOffset - previousRange.next.firstByte) / averageRowByteCount), + previousRange.next.row + 1, // ensure at least one row gap + ) + // Note that we might have a situation where firstRow overlaps with nextRange.previous.row. It will be fixed the next time we update the average row byte count. + const newRange = new CSVRange({ firstByte: row.byteOffset, firstRow }) + newRange.append(row) + this.#random.splice(i, 0, newRange) + break + } + + // Update the average row byte count + this.#updateAverageRowByteCount() + } + + /** + * Merge two CSV ranges + * @param range The first range. It can be the serial range, or a random range. + * @param followingRange The second range, must be immediately after the first range. It is a random range. + */ + #merge(range: CSVRange, followingRange: CSVRange): void { + const index = this.#random.indexOf(followingRange) + // v8 ignore if -- @preserve + if (index === -1) { + throw new Error('Cannot merge ranges: following range not found in cache') + } + range.merge(followingRange) + // remove followingRange from the random ranges + this.#random.splice(index, 1) + } + + /** + * Get the next missing row for the given row range + * @param options Options + * @param options.rowStart The start row index (0-based, inclusive) + * @param options.rowEnd The end row index (0-based, exclusive) + * @returns The first byte of the next missing row and if it's an estimate, or undefined if no missing row + */ + getNextMissingRow({ rowStart, rowEnd }: { rowStart: number, rowEnd: number }): { firstByte: number, isEstimate: boolean } | undefined { + checkNonNegativeInteger(rowStart) + checkNonNegativeInteger(rowEnd) + + // try every empty range between cached rows + let first = this.#serial.next + + if (first.firstByte >= this.#byteLength) { + // No missing row if all rows are cached + return undefined + } + + for (const { firstRow, next } of [...this.#random, { firstRow: Infinity, next: { row: Infinity, firstByte: this.#byteLength } }]) { + if (rowStart < first.row) { + // ignore cached rows + rowStart = first.row + } + if (rowEnd <= rowStart) { + // no missing row (rowEnd is exclusive) + return + } + if (rowStart < firstRow) { + // the first row is in this missing range + if (rowStart === first.row || this.averageRowByteCount === undefined) { + // if the start row is the same as the first row, we can use the first byte directly + // Same if we cannot estimate positions + return { firstByte: first.firstByte, isEstimate: false } + } + // estimate the byte position based on the average row byte count, trying to get the middle of the previous row + const delta = Math.floor((rowStart - first.row - 0.5) * this.averageRowByteCount) + const firstByte = first.firstByte + Math.max(0, delta) + + // avoid going beyond the end of the file + if (firstByte >= this.#byteLength) { + return undefined + } + + return { + firstByte, + isEstimate: true, + } + } + // try the next missing range + first = next + } + } + + /** + * Check if the given byte range is stored in the cache. + * @param options Options + * @param options.byteOffset The byte offset of the range. + * @returns True if the byte range is stored, false otherwise. + */ + isStored({ byteOffset }: { byteOffset: number }): boolean { + checkNonNegativeInteger(byteOffset) + + for (const range of [this.#serial, ...this.#random]) { + if (range.firstByte <= byteOffset && byteOffset < range.next.firstByte) { + return true + } + } + + return false + } + + static fromHeader({ header, byteLength }: { header: ParseResult, byteLength: number }): CSVCache { + return new CSVCache({ + columnNames: header.row, + byteLength, + delimiter: header.meta.delimiter, + newline: header.meta.newline, + headerByteCount: header.meta.byteOffset + header.meta.byteCount, + }) + } +} diff --git a/src/csv.ts b/src/csv.ts deleted file mode 100644 index 4456042..0000000 --- a/src/csv.ts +++ /dev/null @@ -1,635 +0,0 @@ -import type { - CustomEventTarget, - DataFrame, - DataFrameEvents, - OrderBy, - ResolvedValue, -} from "hightable"; -import { - checkSignal, - createEventTarget, - validateFetchParams, - validateGetCellParams, - validateGetRowNumberParams, -} from "hightable"; -import Papa from "@severo_tests/papaparse"; -import { formatBytes } from "./helpers.js"; - -interface Metadata { - isPartial: boolean; - cachedBytes: number; -} -export type CSVDataFrame = DataFrame; - -const defaultChunkSize = 50 * 1024; // 50 KB, same as Papaparse default -const defaultMaxCachedBytes = 20 * 1024 * 1024; // 20 MB -const paddingRows = 20; // fetch a bit before and after the requested range, to avoid cutting rows - -interface Params { - url: string; - byteLength: number; // total byte length of the file - chunkSize?: number; // download chunk size - maxCachedBytes?: number; // max number of bytes to keep in cache before evicting old rows - signal?: AbortSignal; // to abort the DataFrame creation and any ongoing fetches -} - -// rows are indexed by their first byte position. Includes empty row and the header -interface ParsedRow { - start: number; // byte position of the start of the row - end: number; // byte position of the end of the row, including the delimiters and the following linebreak if any (exclusive) - data: string[]; // raw string values, as parsed by Papaparse (no eviction yet - we could handle them with "undefined" cells) -} - -// ranges are sorted. We use binary search to find the missing ranges, and then merge them if needed -interface ParsedRange { - start: number; // byte position of the start of the range (excludes the ignored bytes if the range starts in the middle of a row) - end: number; // byte position of the end of the range (exclusive) - validRows: ParsedRow[]; // sorted array of the range rows, filtering out the empty rows and the header if any -} - -interface Cache { - header: CSVHeader; - serial: ParsedRange; - random: ParsedRange[]; - cachedBytes: number; // total number of bytes cached (for statistics) - chunkSize: number; // chunk size used for fetching - url: string; - averageRowBytes: number; // average number of bytes per row -} - -interface CSVHeader extends ParsedRow { - delimiter: string; - newline: Exclude["newline"], undefined>; - bytes: number; // number of bytes used by the header row, including the delimiters and the following linebreak if any -} - -/** - * Helpers to load a CSV file as a dataframe - */ -export async function csvDataFrame({ - url, - byteLength, - chunkSize, - maxCachedBytes, - signal, -}: Params): Promise { - checkSignal(signal); - chunkSize ??= defaultChunkSize; - maxCachedBytes ??= defaultMaxCachedBytes; - - if (chunkSize > maxCachedBytes) { - throw new Error( - `chunkSize (${formatBytes(chunkSize)}) cannot be greater than maxCachedBytes (${formatBytes(maxCachedBytes)})` - ); - } - - const eventTarget = createEventTarget(); - - // const parsedRowIndex: ParsedRowIndex = new Map(); // first byte offset -> parsed row // TODO(SL): delete? I think it's not needed - - // type assertion is needed because Typescript cannot see if variable is updated in the Papa.parse step callback - let header = undefined as CSVHeader | undefined; - let cursor = 0; - let cachedBytes = 0; - - // Fetch the first chunk (stop at 80% of the chunk size, to avoid doing another fetch, as we have no way to limit to one chunk in Papaparse) - // TODO(SL): should we return the dataframe after parsing one row, and then keep parsing the chunk, but triggering updates?) - const firstParsedRange: ParsedRange = { - start: 0, - end: cursor, - validRows: [], - }; - const isPartial = await new Promise((resolve, reject) => { - Papa.parse(url, { - download: true, - chunkSize, - header: false, - worker: false, // don't use the worker! because it does not provide the cursor at a line level! - skipEmptyLines: false, // to be able to compute the byte ranges. Beware, it requires post processing (see result.rows.at(-1), for example, when fetching all the rows) - dynamicTyping: false, // keep strings, and let the user convert them if needed - step: ({ data, meta }, parser) => { - const parsedRow = { - start: cursor, - end: meta.cursor, // it's not exact! if the string contains "é", it counts as 2 bytes, but Papaparse counts it as 1 character!!! - data, - }; - cursor = parsedRow.end; - - if ( - cursor >= 0.8 * chunkSize || // stop at 80% of the chunk size, to avoid doing another fetch, as we have no way to limit to one chunk in Papaparse - firstParsedRange.validRows.length >= 100 - ) { - // abort the parsing, we have enough rows for now - parser.abort(); - return; - } - // update the range end, even if the row is empty - firstParsedRange.end = parsedRow.end; - - if (isEmpty(data)) { - // empty row, ignore - return; - } - if (header === undefined) { - // TODO(SL): should the header be included in the first range bytes? - // first non-empty row: header - header = { - ...parsedRow, - delimiter: meta.delimiter, - newline: getNewline(meta.linebreak), - bytes: parsedRow.end - parsedRow.start, - }; - } else { - if (meta.delimiter !== header.delimiter) { - reject( - new Error( - `Delimiter changed from ${header.delimiter} to ${meta.delimiter}` - ) - ); - } - if (meta.linebreak !== header.newline) { - reject( - new Error( - `Linebreak changed from ${header.newline} to ${meta.linebreak}` - ) - ); - } - // valid row: add it to the range - firstParsedRange.validRows.push(parsedRow); - // for the statistics: - cachedBytes += parsedRow.end - parsedRow.start; - } - // the errors field is ignored - }, - complete: ({ meta }) => { - const isPartial = meta.aborted; - resolve(isPartial); - }, - }); - }); - console.log(firstParsedRange); - if (header === undefined) { - throw new Error("No header row found in the CSV file"); - } - - const averageRowBytes = getAverageRowBytes({ - serial: firstParsedRange, - header, - random: [], - }); - - const cache: Cache = { - header, - serial: firstParsedRange, - random: [], - cachedBytes, - chunkSize, - url, - averageRowBytes, - }; - - const numRows = - isPartial && averageRowBytes - ? Math.floor(byteLength / averageRowBytes) // see https://github.com/hyparam/hightable/issues/298 - : firstParsedRange.validRows.length; - - const columnDescriptors: DataFrame["columnDescriptors"] = header.data.map( - (name) => ({ name }) - ); - const metadata: Metadata = { - isPartial, - cachedBytes, - }; - - function getCell({ - row, - column, - orderBy, - }: { - row: number; - column: string; - orderBy?: OrderBy; - }): ResolvedValue | undefined { - // TODO(SL): how to handle the last rows when the number of rows is uncertain? - validateGetCellParams({ - row, - column, - orderBy, - data: { - numRows: Infinity, // we don't (always) know the exact number of rows yet - columnDescriptors, - }, - }); - const parsedRow = findParsedRow({ cache, row }); - if (parsedRow) { - const columnIndex = columnDescriptors.findIndex( - (cd) => cd.name === column - ); - if (columnIndex === -1) { - // should not happen because of the validation above - throw new Error(`Column not found: ${column}`); - } - const value = parsedRow.data[columnIndex]; // TODO(SL): we could convert to a type, here or in the cache - // return value ? { value } : undefined; - return { value: value ?? "" }; // return empty cells as empty strings, because we assume that all the row has been parsed - } - return undefined; - } - - function getRowNumber({ - row, - orderBy, - }: { - row: number; - orderBy?: OrderBy; - }): ResolvedValue | undefined { - // TODO(SL): how to handle the last rows when the number of rows is uncertain? - validateGetRowNumberParams({ - row, - orderBy, - data: { - numRows: Infinity, // we don't (always) know the exact number of rows yet - columnDescriptors, - }, - }); - const parsedRow = findParsedRow({ cache, row }); - if (parsedRow?.type === "serial") { - return { value: row }; - } - if (parsedRow?.type === "random") { - // TODO(SL): how could we convey the fact that the row number is approximate? - return { value: row }; - } - } - - async function fetch({ - rowStart, - rowEnd, - columns, - orderBy, - signal, - }: { - rowStart: number; - rowEnd: number; - columns?: string[]; - orderBy?: OrderBy; - signal?: AbortSignal; - }): Promise { - checkSignal(signal); - - validateFetchParams({ - rowStart, - rowEnd, - columns, - orderBy, - data: { - numRows: Infinity, // we don't (always) know the exact number of rows yet - columnDescriptors, - }, - }); - - let previousAverageRowBytes = undefined as number | undefined; - let i = 0; - while (previousAverageRowBytes !== cache.averageRowBytes && i < 10) { - i++; // to avoid infinite loops in case of instability - - if (rowEnd < cache.serial.validRows.length) { - // all rows are in the serial range - return; - } - if (rowStart < cache.serial.validRows.length) { - // ignore the rows already cached - rowStart = cache.serial.validRows.length; - } - - const estimatedStart = Math.floor( - cache.serial.end + - (rowStart - cache.serial.validRows.length) * cache.averageRowBytes - ); - const estimatedEnd = Math.min( - byteLength, - Math.ceil( - cache.serial.end + - (rowEnd - cache.serial.validRows.length) * cache.averageRowBytes - ) - ); - // find the ranges of rows we don't have yet - // start with the full range, and then remove the parts we have - const missingRange = { - start: estimatedStart, - end: estimatedEnd, - }; - const missingRanges: { start: number; end: number }[] = []; - // Loop on the random ranges, which are sorted and non-overlapping - for (const range of cache.random) { - if (missingRange.end <= range.start) { - // no overlap, and no more overlap possible - missingRanges.push(missingRange); - break; - } - if (missingRange.start >= range.end) { - // no overlap, check the next range - continue; - } - // overlap - if (missingRange.start < range.start) { - // add the part before the overlap - missingRanges.push({ - start: missingRange.start, - end: range.start, - }); - } - // move the start to the end of the range - missingRange.start = range.end; - if (missingRange.start >= missingRange.end) { - // no more missing range - break; - } - } - if (missingRange.start < missingRange.end) { - // add the remaining part - missingRanges.push(missingRange); - } - - if (missingRanges.length === 0) { - // all rows are already cached - return; - } - - // fetch each missing range and fill the cache - await Promise.all( - missingRanges.map(({ start, end }) => - fetchRange({ start, end, signal, cache, eventTarget }) - ) - ).finally(() => { - // TODO(SL): Update the average size of a row? - // For now, we keep it constant, to provide stability - otherwise empty rows appear after the update - previousAverageRowBytes = cache.averageRowBytes; - cache.averageRowBytes = getAverageRowBytes(cache); - //eventTarget.dispatchEvent(new CustomEvent("resolve")); // to refresh the table (hmmm. Or better call fetch again until we reach stability?) - }); - } - - // TODO(SL): evict old rows (or only cell contents?) if needed - // TODO(SL): handle fetching (and most importantly storing) only part of the columns? - // Note that source.coop does not support negative ranges for now https://github.com/source-cooperative/data.source.coop/issues/57 (for https://github.com/hyparam/hightable/issues/298#issuecomment-3381567614) - } - - return { - metadata, - numRows, - columnDescriptors, - getCell, - getRowNumber, - fetch, - eventTarget, - }; -} - -function findParsedRow({ cache, row }: { cache: Cache; row: number }): - | (ParsedRow & { - type: "serial" | "random"; - }) - | undefined { - // TODO(SL): optimize (cache the row numbers?) - const serialParsedRow = cache.serial.validRows[row]; - if (serialParsedRow) { - return { - type: "serial", - ...serialParsedRow, - }; - } - const estimatedStart = - cache.serial.end + - (row - cache.serial.validRows.length) * cache.averageRowBytes; - // find the range containing this row - const range = cache.random.find( - (r) => estimatedStart >= r.start && estimatedStart < r.end - ); - if (!range) { - return; // not found - } - // estimate the row index of the first row in the range - const firstRowIndex = - cache.serial.validRows.length + - Math.round( - // is .round() better than .floor() or .ceil()? - (range.start - cache.serial.end) / cache.averageRowBytes - ); - // get the row in the range. This way, we ensure that calls to findParsedRow() with increasing row numbers - // will return rows in the same order, without holes or duplicates, even if the averageRowBytes is not accurate. - const parsedRow = range.validRows[row - firstRowIndex]; - if (!parsedRow) { - return; // not found - } - return { - type: "random", - ...parsedRow, - }; -} - -function getAverageRowBytes( - cache: Pick -): number { - let numRows = cache.serial.validRows.length; - let numBytes = cache.serial.end - cache.serial.start - cache.header.bytes; - - for (const range of cache.random) { - numRows += range.validRows.length; - numBytes += range.end - range.start; - } - if (numRows === 0 || numBytes === 0) { - throw new Error("No data row found in the CSV file"); - } - return numBytes / numRows; -} - -function getNewline( - linebreak: string -): Exclude["newline"], undefined> { - switch (linebreak) { - case "\r\n": - case "\n": - case "\r": - return linebreak; - default: - throw new Error(`Unsupported linebreak: ${linebreak}`); // should not happen - } -} - -function fetchRange({ - start, - end, - signal, - cache, - eventTarget, -}: { - start: number; - end: number; - signal?: AbortSignal; - cache: Cache; - eventTarget: CustomEventTarget; -}): Promise { - checkSignal(signal); - - const firstChunkOffset = Math.max( - cache.serial.end, // don't fetch known rows again - Math.floor(start - paddingRows * cache.averageRowBytes) // fetch a bit before, to ensure we get a complete first row - ); - let cursor = firstChunkOffset; - let isFirstStep = true; - const endCursor = Math.ceil(end + paddingRows * cache.averageRowBytes); // fetch a bit after, just in case the average is not accurate - - return new Promise((resolve, reject) => { - Papa.parse(cache.url, { - download: true, - header: false, - worker: false, // don't use the worker! because it does not provide the cursor at a line level! - skipEmptyLines: false, // to be able to compute the byte ranges. Beware, it requires post processing (see result.rows.at(-1), for example, when fetching all the rows) - dynamicTyping: false, // keep strings, and let the user convert them if needed - delimiter: cache.header.delimiter, - newline: cache.header.newline, - chunkSize: cache.chunkSize, - firstChunkOffset, // custom option, only available in the modified Papaparse @severo_tests/papaparse - step: ({ data, meta }, parser) => { - if (signal?.aborted) { - parser.abort(); - return; - } - - const parsedRow = { - start: cursor, - end: firstChunkOffset + meta.cursor, - data, - }; - cursor = parsedRow.end; - - if (isFirstStep) { - isFirstStep = false; - return; // ignore the first row, because we cannot know if it's partial or complete - } - - if (meta.delimiter !== cache.header.delimiter) { - reject( - new Error( - `Delimiter changed from ${cache.header.delimiter} to ${meta.delimiter}` - ) - ); - } - if (meta.linebreak !== cache.header.newline) { - reject( - new Error( - `Linebreak changed from ${cache.header.newline} to ${meta.linebreak}` - ) - ); - } - - // add the row to the cache - if (addParsedRowToCache({ cache, parsedRow })) { - // send an event for the new row - eventTarget.dispatchEvent(new CustomEvent("resolve")); - } - - if (cursor >= endCursor) { - // abort the parsing, we have enough rows for now - parser.abort(); - return; - } - - isFirstStep = false; - }, - complete: () => { - resolve(); - }, - }); - }); -} - -function isEmpty(data: string[]): boolean { - return data.length <= 1 && data[0]?.trim() === ""; -} - -/** - * Returns true if the row was added to the cache, false if it was already present or empty - */ -function addParsedRowToCache({ - cache, - parsedRow, -}: { - cache: Cache; - parsedRow: ParsedRow; -}): boolean { - // TODO(SL): optimize - const inserted = !isEmpty(parsedRow.data); - const allRanges = [cache.serial, ...cache.random]; - - if ( - allRanges.some((r) => parsedRow.start < r.end && parsedRow.end > r.start) - ) { - // an overlap means the row is already in the cache. ignore it - return false; - } - - for (const [i, range] of allRanges.entries()) { - if (parsedRow.end < range.start) { - // create a new random range before this one - const newRange: ParsedRange = { - start: parsedRow.start, - end: parsedRow.end, - validRows: [], - }; - if (inserted) { - newRange.validRows.push(parsedRow); - cache.cachedBytes += parsedRow.end - parsedRow.start; - } - // the range cannot be cache.serial because of the check above, let's assert it - if (i < 1) { - throw new Error( - "Unexpected state: cannot insert before the serial range" - ); - } - cache.random.splice(i - 1, 0, newRange); - return inserted; - } - if (parsedRow.end === range.start) { - // expand this range at the beginning - range.start = parsedRow.start; - if (inserted) { - range.validRows.unshift(parsedRow); - cache.cachedBytes += parsedRow.end - parsedRow.start; - } - return inserted; - } - if (parsedRow.start === range.end) { - // expand this range at the end - range.end = parsedRow.end; - if (inserted) { - range.validRows.push(parsedRow); - cache.cachedBytes += parsedRow.end - parsedRow.start; - } - // try to merge with the next range - const nextRange = cache.random[i]; // equivalent to allRanges[i + 1] - if (nextRange && range.end === nextRange.start) { - range.end = nextRange.end; - for (const r of nextRange.validRows) { - range.validRows.push(r); - } - // remove the next range - cache.random.splice(i, 1); - } - return inserted; - } - } - // add a new range at the end - const newRange: ParsedRange = { - start: parsedRow.start, - end: parsedRow.end, - validRows: [], - }; - if (inserted) { - newRange.validRows.push(parsedRow); - cache.cachedBytes += parsedRow.end - parsedRow.start; - } - cache.random.push(newRange); - return inserted; -} diff --git a/src/dataframe.ts b/src/dataframe.ts new file mode 100644 index 0000000..3bb6c66 --- /dev/null +++ b/src/dataframe.ts @@ -0,0 +1,337 @@ +import { isEmptyLine, parseURL } from 'csv-range' +import type { + DataFrame, + DataFrameEvents, + OrderBy, + ResolvedValue, +} from 'hightable' +import { + checkSignal, + createEventTarget, + validateFetchParams, + validateGetCellParams, + validateGetRowNumberParams, +} from 'hightable' + +import { CSVCache } from './cache' +import { checkNonNegativeInteger } from './helpers.js' + +const defaultChunkSize = 500 * 1024 // 500 KB +const defaultInitialRowCount = 500 +// const paddingRowCount = 20 // fetch a bit before and after the requested range, to avoid cutting rows + +interface Params { + url: string + byteLength: number // total byte length of the file + chunkSize?: number // download chunk size + initialRowCount?: number // number of rows to fetch at dataframe creation +} + +export type CSVDataFrame = DataFrame<{ isNumRowsEstimated: boolean }> + +/** + * Helpers to load a CSV file as a dataframe + * @param params - params for creating the dataframe + * @param params.url - URL of the CSV file + * @param params.byteLength - total byte length of the file + * @param params.chunkSize - download chunk size + * @param params.initialRowCount - number of rows to fetch at dataframe creation + * @returns DataFrame representing the CSV file + */ +export async function csvDataFrame(params: Params): Promise { + const chunkSize = params.chunkSize ?? defaultChunkSize + const initialRowCount = params.initialRowCount ?? defaultInitialRowCount + const { url, byteLength } = params + + const eventTarget = createEventTarget() + const cache = await initializeCSVCachefromURL({ url, byteLength, chunkSize, initialRowCount }) + const { numRows, isEstimate } = cache.numRowsEstimate + const metadata = { + isNumRowsEstimated: isEstimate, + } + const columnDescriptors: DataFrame['columnDescriptors'] = cache.columnNames.map(name => ({ name })) + + /** + * Get the cached cell value at the given row and column. + * @param options - options + * @param options.row - row index + * @param options.column - column name + * @param options.orderBy - optional sorting order + * @returns The cell value, or undefined if not cached + */ + function getCell({ + row, + column, + orderBy, + }: { + row: number + column: string + orderBy?: OrderBy + }): ResolvedValue | undefined { + // until the CSV is fully loaded, we don't know the exact number of rows + const numRows = cache.allRowsCached ? cache.rowCount : Infinity + validateGetCellParams({ + row, + column, + orderBy, + data: { + numRows, + columnDescriptors, + }, + }) + const columnIndex = columnDescriptors.findIndex( + cd => cd.name === column, + ) + // v8 ignore if -- @preserve + if (columnIndex === -1) { + // should not happen because of the validation above + throw new Error(`Column not found: ${column}`) + } + return cache.getCell({ row, column: columnIndex }) + } + + /** + * Get the cached row number for the given row index. + * @param options - options + * @param options.row - row index + * @param options.orderBy - optional sorting order + * @returns The row number, or undefined if not cached + */ + function getRowNumber({ + row, + orderBy, + }: { + row: number + orderBy?: OrderBy + }): ResolvedValue | undefined { + // until the CSV is fully loaded, we don't know the exact number of rows + const numRows = cache.allRowsCached ? cache.rowCount : Infinity + validateGetRowNumberParams({ + row, + orderBy, + data: { + numRows, + columnDescriptors, + }, + }) + return cache.getRowNumber({ row }) + } + + /** + * Fetch the given range of rows, filling the cache as needed. + * @param options - options + * @param options.rowStart - starting row index + * @param options.rowEnd - ending row index (exclusive) + * @param options.columns - optional list of columns to fetch + * @param options.orderBy - optional sorting order + * @param options.signal - optional abort signal + */ + async function fetch({ + rowStart, + rowEnd, + columns, + orderBy, + signal, + }: { + rowStart: number + rowEnd: number + columns?: string[] + orderBy?: OrderBy + signal?: AbortSignal + }): Promise { + checkSignal(signal) + + validateFetchParams({ + rowStart, + rowEnd, + columns, + orderBy, + data: { + numRows: Infinity, // we don't (always) know the exact number of rows yet + columnDescriptors, + }, + }) + + if (cache.allRowsCached) { + // all rows are already cached + if (rowEnd > cache.rowCount) { + // requested rows are beyond the end of the file + throw new Error(`Requested rows are beyond the end of the file: ${rowEnd} > ${cache.rowCount}`) + } + // else nothing to do + return + } + + const maxLoops = (rowEnd - rowStart) + 10 // safety to avoid infinite loops + let hasFetchedSomeRows = false + + let i = 0 + while (true) { + // fetch all missing ranges + i++ + let next = cache.getNextMissingRow({ rowStart, rowEnd }) + let j = 0 + while (next) { + // fetch next missing range + j++ + // v8 ignore if -- @preserve + if (j > maxLoops) { + // should not happen + throw new Error('Maximum fetch loops exceeded') + } + const firstByte = next.firstByte + const ignoreFirstRow = next.isEstimate // if it's an estimate, we may be cutting a row + let isFirstRow = true + let k = 0 + for await (const result of parseURL(url, { + delimiter: cache.delimiter, + newline: cache.newline, + chunkSize, + firstByte, + lastByte: byteLength - 1, + })) { + checkSignal(signal) + k++ + // v8 ignore if -- @preserve + if (k > maxLoops) { + // should not happen + throw new Error('Maximum parse loops exceeded') + } + if (isFirstRow && ignoreFirstRow) { + isFirstRow = false + continue + } + if (result.meta.byteCount === 0) { + // no progress, avoid infinite loop + // it's the last line in the file and it's empty + next = undefined + break + } + + // Store the new row in the cache + if (!cache.isStored({ byteOffset: result.meta.byteOffset })) { + const isEmpty = isEmptyLine(result.row) + cache.store({ + cells: isEmpty ? undefined : result.row, + byteOffset: result.meta.byteOffset, + byteCount: result.meta.byteCount, + }) + hasFetchedSomeRows ||= !isEmpty + } + + // next row + next = cache.getNextMissingRow({ rowStart, rowEnd }) + if (!next) { + // no more missing ranges + break + } + const nextByte = result.meta.byteOffset + result.meta.byteCount + if (next.firstByte > nextByte + chunkSize) { + // the next missing range is beyond the current chunk, so we can stop the current loop and start a new fetch + break + } + // otherwise, continue fetching in the current loop, + // Note that some rows might already be cached. It's ok since fetching takes more time than parsing. + } + + if (k === 0) { + // No progress (no row fetched in this missing range) + // Break to avoid infinite loop + break + // For example, it occurs when the estimated byte offset is beyond the end of the file. + // To fix that, we could fetch more rows at the start to improve the estimation, then retry. + // See https://github.com/source-cooperative/csv-table/issues/11 + } + } + + // update the cache stats (average row size, firstRow of each random access block, etc.) + cache.updateRowEstimates() + + // and then check again if all the requested rows have been fetched + const updatedNext = cache.getNextMissingRow({ rowStart, rowEnd }) + + // if all the requested rows are now cached, we can exit + if (!updatedNext) { + break + } + + // if we made no progress, we can also exit to avoid infinite loops + if (next && updatedNext.firstByte === next.firstByte) { + break + } + + // v8 ignore if -- @preserve + if (i >= maxLoops) { + // should not happen + throw new Error('Maximum estimation loops exceeded') + } + + // else, continue the loop + } + + // Dispatch resolve event if some rows were fetched + // We do it only at the end, because the row numbers might change while fetching, producing instable behavior. + if (hasFetchedSomeRows) { + eventTarget.dispatchEvent(new CustomEvent('resolve')) + } + } + + return { + metadata, + numRows, + columnDescriptors, + getCell, + getRowNumber, + fetch, + eventTarget, + } +} + +/** + * Create a CSVCache from a remote CSV file URL + * @param options Options + * @param options.url The URL of the CSV file + * @param options.byteLength The byte length of the CSV file + * @param options.chunkSize The chunk size to use when fetching the CSV file + * @param options.initialRowCount The initial number of rows to fetch + * @returns A promise that resolves to the CSVCache + */ +async function initializeCSVCachefromURL({ url, byteLength, chunkSize, initialRowCount }: { url: string, byteLength: number, chunkSize: number, initialRowCount: number }): Promise { + checkNonNegativeInteger(byteLength) + checkNonNegativeInteger(chunkSize) + checkNonNegativeInteger(initialRowCount) + + // type assertion is needed because Typescript cannot see if variable is updated in the Papa.parse step callback + let cache: CSVCache | undefined = undefined + + // Fetch the first rows, including the header + for await (const result of parseURL(url, { chunkSize, lastByte: byteLength - 1 })) { + if (cache === undefined) { + if (isEmptyLine(result.row, { greedy: true })) { + continue // skip empty lines before the header + } + // first non-empty row is the header + cache = CSVCache.fromHeader({ header: result, byteLength }) + continue + } + else if (cache.rowCount >= initialRowCount) { + // enough rows for now + break + } + else { + // data row + cache.store({ + // ignore empty lines + cells: isEmptyLine(result.row) ? undefined : result.row, + byteOffset: result.meta.byteOffset, + byteCount: result.meta.byteCount, + }) + } + } + + if (cache === undefined) { + throw new Error('No row found in the CSV file') + } + + return cache +} diff --git a/src/helpers.ts b/src/helpers.ts index 8a77b99..859e3c9 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -1,9 +1,10 @@ /** * Helper function to join class names - * + * @param names - class names to join + * @returns Joined class names */ export function cn(...names: (string | undefined | false)[]): string { - return names.filter((n) => n).join(' ') + return names.filter(n => n).join(' ') } /** @@ -13,16 +14,15 @@ export function cn(...names: (string | undefined | false)[]): string { /** * Get the byte length of a URL using a HEAD request. * If requestInit is provided, it will be passed to fetch. - * - * @param {string} url - * @param {RequestInit} [requestInit] fetch options - * @param {typeof globalThis.fetch} [customFetch] fetch function to use - * @returns {Promise} + * @param url - The URL to fetch + * @param requestInit - Fetch options + * @param customFetch - Fetch function to use + * @returns The byte length of the URL */ export async function byteLengthFromUrl(url: string, requestInit?: RequestInit, customFetch?: typeof globalThis.fetch): Promise { const fetch = customFetch ?? globalThis.fetch return await fetch(url, { ...requestInit, method: 'HEAD' }) - .then(res => { + .then((res) => { if (!res.ok) throw new Error(`fetch head failed ${res.status.toString()}`) const length = res.headers.get('Content-Length') if (!length) throw new Error('missing content length') @@ -30,13 +30,14 @@ export async function byteLengthFromUrl(url: string, requestInit?: RequestInit, }) } -const bytesFormat = new Intl.NumberFormat('en-US', { - style: 'unit', - unit: 'byte', - unitDisplay: 'narrow', - maximumFractionDigits: 0, - }) - -export function formatBytes(bytes: number): string { - return bytesFormat.format(bytes) +/** + * Throws if the provided value is not a non-negative integer. + * @param value The desired value. + * @returns The validated value: a non-negative integer. + */ +export function checkNonNegativeInteger(value: number): number { + if (!Number.isInteger(value) || value < 0) { + throw new Error('Value is not a non-negative integer') + } + return value } diff --git a/src/main.tsx b/src/main.tsx index cb18a1e..23d2bf8 100644 --- a/src/main.tsx +++ b/src/main.tsx @@ -1,14 +1,16 @@ -import "hightable/src/HighTable.css"; -import { StrictMode } from "react"; -import { createRoot } from "react-dom/client"; -import "./styles/index.css"; -import App from "./App.tsx"; +import 'hightable/src/HighTable.css' +import './styles/index.css' -const app = document.getElementById("app"); -if (!app) throw new Error("missing app element"); +import { StrictMode } from 'react' +import { createRoot } from 'react-dom/client' + +import App from './App.tsx' + +const app = document.getElementById('app') +if (!app) throw new Error('missing app element') createRoot(app).render( - -); + , +) diff --git a/src/styles/hightable.css b/src/styles/hightable.css index ab64645..7925aa6 100644 --- a/src/styles/hightable.css +++ b/src/styles/hightable.css @@ -21,8 +21,8 @@ tbody { [role="rowheader"] { text-align: right; - padding-right: 2px; - font-size: 0.625rem; /* column width computation is not optimal in hightable, reducing the font size to avoid cutting the numbers */ + padding-right: 0.5em; + font-size: 0.75em; font-family: var(--code-font-family); } [role="cell"] { diff --git a/test/cache.test.ts b/test/cache.test.ts new file mode 100644 index 0000000..31d94a7 --- /dev/null +++ b/test/cache.test.ts @@ -0,0 +1,772 @@ +import { describe, expect, it } from 'vitest' + +import { CSVCache, CSVRange } from '../src/cache.js' + +describe('CSVRange', () => { + it('should initialize correctly', () => { + const range = new CSVRange({ firstByte: 0, firstRow: 0 }) + expect(range.rowCount).toBe(0) + expect(range.byteCount).toBe(0) + expect(range.firstRow).toBe(0) + expect(range.next).toStrictEqual({ firstByte: 0, row: 0 }) + expect(range.rows).toEqual([]) + expect(range.getCells({ row: 0 })).toBeUndefined() + }) + + it('should initialize correctly at a random position', () => { + const range = new CSVRange({ firstByte: 100, firstRow: 10 }) + expect(range.rowCount).toBe(0) + expect(range.byteCount).toBe(0) + expect(range.firstRow).toBe(10) + expect(range.next).toStrictEqual({ firstByte: 100, row: 10 }) + expect(range.rows).toStrictEqual([]) + expect(range.getCells({ row: 10 })).toBeUndefined() + }) + + it('firstRow can be set', () => { + const range = new CSVRange({ firstByte: 0, firstRow: 0 }) + expect(range.firstRow).toBe(0) + range.firstRow = 5 + expect(range.firstRow).toBe(5) + }) + + it.each([-1, +Infinity, NaN, 1.5])('firstRow setter throws for incorrect value: %d', (value) => { + const range = new CSVRange({ firstByte: 0, firstRow: 0 }) + expect(range.firstRow).toBe(0) + expect(() => { + range.firstRow = value + }).toThrow() + }) + + it('should add rows correctly', () => { + const range = new CSVRange({ firstByte: 100, firstRow: 10 }) + range.append({ + byteOffset: 100, + byteCount: 10, + }) + range.append({ + cells: ['d', 'e', 'f'], + byteOffset: 110, + byteCount: 10, + }) + range.prepend({ + cells: ['1', '2', '3'], + byteOffset: 90, + byteCount: 10, + }) + range.prepend({ + byteOffset: 80, + byteCount: 10, + }) + expect(range.rowCount).toBe(2) + expect(range.byteCount).toBe(40) + expect(range.firstRow).toBe(9) + expect(range.next).toStrictEqual({ firstByte: 120, row: 11 }) + expect(range.rows).toEqual([['1', '2', '3'], ['d', 'e', 'f']]) + expect(range.getCells({ row: 7 })).toBeUndefined() + expect(range.getCells({ row: 8 })).toBeUndefined() + expect(range.getCells({ row: 9 })).toEqual(['1', '2', '3']) + expect(range.getCells({ row: 10 })).toEqual(['d', 'e', 'f']) + expect(range.getCells({ row: 11 })).toBeUndefined() + expect(range.getCells({ row: 12 })).toBeUndefined() + }) + + it('should throw when adding non-contiguous rows', () => { + const range = new CSVRange({ firstByte: 100, firstRow: 10 }) + expect(() => { + range.prepend({ + cells: ['x'], + byteOffset: 10, + byteCount: 10, + }) + }).toThrow('Cannot prepend the row: it is not contiguous with the first row') + expect(() => { + range.append({ + byteOffset: 120, + byteCount: 10, + }) + }).toThrow('Cannot append the row: it is not contiguous with the last row') + }) + + it('should merge with another range correctly', () => { + const range1 = new CSVRange({ firstByte: 0, firstRow: 0 }) + range1.append({ + byteOffset: 0, + byteCount: 10, + }) + range1.append({ + cells: ['b', 'c', 'd'], + byteOffset: 10, + byteCount: 10, + }) + + const range2 = new CSVRange({ firstByte: 20, firstRow: 2 }) + range2.append({ + cells: ['e', 'f', 'g'], + byteOffset: 20, + byteCount: 10, + }) + range2.append({ + byteOffset: 30, + byteCount: 10, + }) + + range1.merge(range2) + + expect(range1.rowCount).toBe(2) + expect(range1.byteCount).toBe(40) + expect(range1.firstRow).toBe(0) + expect(range1.next).toStrictEqual({ firstByte: 40, row: 2 }) + expect(range1.rows).toEqual([['b', 'c', 'd'], ['e', 'f', 'g']]) + expect(range1.getCells({ row: 0 })).toEqual(['b', 'c', 'd']) + expect(range1.getCells({ row: 1 })).toEqual(['e', 'f', 'g']) + }) + + it('should throw when merging non-contiguous ranges', () => { + const range1 = new CSVRange({ firstByte: 0, firstRow: 0 }) + const range2 = new CSVRange({ firstByte: 30, firstRow: 3 }) + expect(() => { + range1.merge(range2) + }).toThrow('Cannot merge ranges: not contiguous') + }) +}) + +describe('CSVCache', () => { + describe('constructor', () => { + it('should initialize correctly', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 15, + delimiter: ',', + newline: '\n' as const, + }) + expect(cache.columnNames).toEqual(['col1', 'col2', 'col3']) + expect(cache.rowCount).toBe(0) + expect(cache.delimiter).toBe(',') + expect(cache.newline).toBe('\n') + expect(cache.averageRowByteCount).toBe(undefined) + expect(cache.headerByteCount).toBe(15) + expect(cache.allRowsCached).toBe(false) + expect(cache.numRowsEstimate).toEqual({ numRows: 0, isEstimate: true }) + expect(cache.getCell({ row: 0, column: 0 })).toBeUndefined() + expect(cache.getRowNumber({ row: 0 })).toBeUndefined() + expect(cache.getNextMissingRow({ rowStart: 0, rowEnd: 10 })).toEqual({ firstByte: 15, isEstimate: false }) + // As no rows are cached, any range should return the same firstByte + expect(cache.getNextMissingRow({ rowStart: 100, rowEnd: 200 })).toEqual({ firstByte: 15, isEstimate: false }) + }) + + it('should initialize from header correctly', () => { + const header = { + row: ['col1', 'col2', 'col3'], + errors: [], + meta: { + byteOffset: 0, + byteCount: 15, + charCount: 14, + delimiter: ',', + newline: '\n' as const, + }, + } + const cache = CSVCache.fromHeader({ header, byteLength: 100 }) + expect(cache.columnNames).toEqual(['col1', 'col2', 'col3']) + expect(cache.rowCount).toBe(0) + expect(cache.delimiter).toBe(',') + expect(cache.newline).toBe('\n') + expect(cache.averageRowByteCount).toBe(undefined) + expect(cache.getCell({ row: 0, column: 0 })).toBeUndefined() + expect(cache.getRowNumber({ row: 0 })).toBeUndefined() + expect(cache.getNextMissingRow({ rowStart: 0, rowEnd: 10 })).toEqual({ firstByte: 15, isEstimate: false }) + // As no rows are cached, any range should return the same firstByte + expect(cache.getNextMissingRow({ rowStart: 100, rowEnd: 200 })).toEqual({ firstByte: 15, isEstimate: false }) + }) + + it.each([ + { columnNames: [] }, + { headerByteCount: 200 }, + ])('throws when initializing from invalid options: %o', (options) => { + expect(() => { + new CSVCache({ + columnNames: options.columnNames ?? ['a', 'b', 'c'], + byteLength: 100, + headerByteCount: options.headerByteCount ?? 15, + delimiter: ',', + newline: '\n' as const, + }) + }).toThrow() + }) + }) + + describe('store and retrieve rows', () => { + it('should store and retrieve rows correctly', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 15, + delimiter: ',', + newline: '\n' as const, + }) + // should be row 0 + cache.store({ + cells: ['a', 'b', 'c'], + byteOffset: 15, + byteCount: 0, // not forbidden + }) + // The average row byte count should be 0 + expect(cache.averageRowByteCount).toBe(0) + // should be row 1 + cache.store({ + cells: ['d', 'e', 'f'], + byteOffset: 15, + byteCount: 20, + }) + // The average row byte count should be 10 now + expect(cache.averageRowByteCount).toBe(10) + expect(cache.numRowsEstimate).toEqual({ numRows: 9, isEstimate: true }) + // the first row must be retrieved correctly + expect(cache.getCell({ row: 0, column: 0 })).toStrictEqual({ value: 'a' }) + expect(cache.getCell({ row: 0, column: 1 })).toStrictEqual({ value: 'b' }) + expect(cache.getCell({ row: 0, column: 2 })).toStrictEqual({ value: 'c' }) + expect(cache.getRowNumber({ row: 0 })).toStrictEqual({ value: 0 }) + // the second row must be retrieved correctly + expect(cache.getCell({ row: 1, column: 0 })).toStrictEqual({ value: 'd' }) + expect(cache.getCell({ row: 1, column: 1 })).toStrictEqual({ value: 'e' }) + expect(cache.getCell({ row: 1, column: 2 })).toStrictEqual({ value: 'f' }) + expect(cache.getRowNumber({ row: 1 })).toStrictEqual({ value: 1 }) + + // This row should be in a new random range, and the estimated row number should be 3 + cache.store({ + cells: ['d', 'e', 'f'], + byteOffset: 44, + byteCount: 7, + }) + // the average row byte count should be 9 now + expect(cache.averageRowByteCount).toBe(9) + // it should be retrieved correctly with row: 2 + expect(cache.getCell({ row: 3, column: 0 })).toStrictEqual({ value: 'd' }) + expect(cache.getCell({ row: 3, column: 1 })).toStrictEqual({ value: 'e' }) + expect(cache.getCell({ row: 3, column: 2 })).toStrictEqual({ value: 'f' }) + expect(cache.getRowNumber({ row: 3 })).toStrictEqual({ value: 3 }) + + // adding an ignored row should not change the average row byte count + cache.store({ + byteOffset: 60, + byteCount: 5, + }) + expect(cache.averageRowByteCount).toBe(9) + }) + + it('should report if all the rows are cached', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + expect(cache.allRowsCached).toBe(false) + // Cache some rows + cache.store({ + cells: ['a', 'b', 'c'], + byteOffset: 10, + byteCount: 10, + }) + cache.store({ + cells: ['d', 'e', 'f'], + byteOffset: 20, + byteCount: 10, + }) + expect(cache.allRowsCached).toBe(false) + // Simulate caching all rows by adjusting byteLength and storing a row at the end + cache.store({ + cells: ['x', 'y', 'z'], + byteOffset: 30, + byteCount: 70, + }) + expect(cache.allRowsCached).toBe(true) + expect(cache.numRowsEstimate).toEqual({ numRows: 3, isEstimate: false }) + }) + + it.each([ + { byteOffset: -10, byteCount: 10 }, + { byteOffset: 10, byteCount: -10 }, + { byteOffset: 100, byteCount: 60 }, + { byteOffset: 95, byteCount: 10 }, + ])('throws if trying to store a row outside of the cache bounds: %o', ({ byteOffset, byteCount }) => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + expect(() => { + cache.store({ + cells: ['a', 'b', 'c'], + byteOffset, + byteCount, + }) + }).toThrow() + }) + + it.each([ + { initialRow: { byteOffset: 10, byteCount: 10 }, row: { byteOffset: 5, byteCount: 10 }, expected: 'Cannot store the row: overlap with previous range' }, + { initialRow: { byteOffset: 20, byteCount: 10 }, row: { byteOffset: 5, byteCount: 10 }, expected: 'Cannot store the row: overlap with previous range' }, + { initialRow: { byteOffset: 10, byteCount: 10 }, row: { byteOffset: 15, byteCount: 10 }, expected: 'Cannot store the row: overlap with previous range' }, + { initialRow: { byteOffset: 20, byteCount: 10 }, row: { byteOffset: 15, byteCount: 10 }, expected: 'Cannot store the row: overlap with next range' }, + { initialRow: { byteOffset: 20, byteCount: 10 }, row: { byteOffset: 25, byteCount: 10 }, expected: 'Cannot store the row: overlap with next range' }, + ])('throws when storing rows that overlap existing cached rows: %o', ({ initialRow, row, expected }) => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 200, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Store an initial row + cache.store(initialRow) + expect(() => { + cache.store(row) + }).toThrow(expected) + }) + + it('should merge two adjacent ranges when storing rows', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Store first row + cache.store({ + cells: ['a', 'b', 'c'], + byteOffset: 10, + byteCount: 10, + }) + // Store third row, creating a new random range + cache.store({ + cells: ['e', 'f', 'g'], + byteOffset: 40, + byteCount: 60, + }) + // At this point, we should have two ranges, and not all the rows have been cached + expect(cache.allRowsCached).toBe(false) + expect(cache.rowCount).toBe(2) + // Now store the second row, which should merge the two ranges + cache.store({ + cells: ['d', 'e', 'f'], + byteOffset: 20, + byteCount: 20, + }) + // now, the cache should include all the rows + expect(cache.allRowsCached).toBe(true) + expect(cache.rowCount).toBe(3) + }) + + it('should prepend rows to a random range correctly', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Store a row to create a random range + cache.store({ + cells: ['d', 'e', 'f'], + byteOffset: 40, + byteCount: 10, + }) + // Prepend a row to the random range + cache.store({ + cells: ['a', 'b', 'c'], + byteOffset: 30, + byteCount: 10, + }) + // Check that both rows are stored correctly + expect(cache.getCell({ row: 2, column: 0 })).toStrictEqual({ value: 'a' }) + expect(cache.getCell({ row: 2, column: 1 })).toStrictEqual({ value: 'b' }) + expect(cache.getCell({ row: 2, column: 2 })).toStrictEqual({ value: 'c' }) + expect(cache.getCell({ row: 3, column: 0 })).toStrictEqual({ value: 'd' }) + expect(cache.getCell({ row: 3, column: 1 })).toStrictEqual({ value: 'e' }) + expect(cache.getCell({ row: 3, column: 2 })).toStrictEqual({ value: 'f' }) + }) + + it('should store rows after checking multiple random ranges', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 300, + headerByteCount: 0, + delimiter: ',', + newline: '\n' as const, + }) + // Store first row to create first random range + cache.store({ + byteOffset: 10, + byteCount: 10, + }) + // Store second row to create second random range + cache.store({ + byteOffset: 30, + byteCount: 10, + }) + // Store third row to create third random range + cache.store({ + byteOffset: 50, + byteCount: 10, + }) + }) + + it('should create a random range between existing ones if there is space', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 300, + headerByteCount: 0, + delimiter: ',', + newline: '\n' as const, + }) + // Store first row to create first random range + cache.store({ + byteOffset: 10, + byteCount: 10, + }) + // Store third row to create second random range + cache.store({ + byteOffset: 50, + byteCount: 10, + }) + // Now store the second row in between + cache.store({ + byteOffset: 30, + byteCount: 10, + }) + }) + }) + + describe('getCell', () => { + it('should throw for out-of-bounds rows or columns', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + expect(() => cache.getCell({ row: -1, column: 0 })).toThrow() + expect(() => cache.getCell({ row: 0, column: -1 })).toThrow() + expect(() => cache.getCell({ row: 0, column: 3 })).toThrow() + }) + + it('should return undefined for missing row', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Store a row + cache.store({ + cells: ['a', 'b', 'c'], + byteOffset: 10, + byteCount: 10, + }) + // Missing row + expect(cache.getCell({ row: 1, column: 0 })).toBeUndefined() + }) + + it('should return empty string for missing column', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Store a row + cache.store({ + cells: ['a', 'b'], // missing last column + byteOffset: 10, + byteCount: 10, + }) + // Missing column + expect(cache.getCell({ row: 0, column: 2 })).toStrictEqual({ value: '' }) + }) + + it('should return the correct cell value for existing rows and columns', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Store a row + cache.store({ + cells: ['a', 'b', 'c'], + byteOffset: 10, + byteCount: 10, + }) + // Existing row and columns + expect(cache.getCell({ row: 0, column: 0 })).toStrictEqual({ value: 'a' }) + expect(cache.getCell({ row: 0, column: 1 })).toStrictEqual({ value: 'b' }) + expect(cache.getCell({ row: 0, column: 2 })).toStrictEqual({ value: 'c' }) + }) + }) + + describe('getRowNumber', () => { + it('should throw for out-of-bounds rows', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + expect(() => cache.getRowNumber({ row: -1 })).toThrow() + }) + + it('should return undefined for missing row', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Store a row + cache.store({ + cells: ['a', 'b', 'c'], + byteOffset: 10, + byteCount: 10, + }) + // Missing row + expect(cache.getRowNumber({ row: 1 })).toBeUndefined() + }) + + it('should return the correct row number for existing rows', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Store a row + cache.store({ + cells: ['a', 'b', 'c'], + byteOffset: 10, + byteCount: 10, + }) + // Existing row + expect(cache.getRowNumber({ row: 0 })).toStrictEqual({ value: 0 }) + }) + + it('should estimate row numbers for random ranges', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 200, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Store a row to create a random range + cache.store({ + cells: ['d', 'e', 'f'], + byteOffset: 40, + byteCount: 10, + }) + // Store a row to create another random range + cache.store({ + cells: ['d', 'e', 'f'], + byteOffset: 60, + byteCount: 10, + }) + // the estimated row number for the last row should be 4 + expect(cache.getRowNumber({ row: 5 })).toEqual({ value: 5 }) + }) + }) + + describe('getNextMissingRow', () => { + it.each([ + { range: { rowStart: 0, rowEnd: 10 } }, + { range: { rowStart: 5, rowEnd: 15 } }, + { range: { rowStart: 10, rowEnd: 20 } }, + ])('should propose the first byte if the cache is empty since it cannot estimate positions: %o', ({ range }) => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 200, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + expect(cache.getNextMissingRow(range)).toEqual({ firstByte: 10, isEstimate: false }) + }) + + it.each([ + { options: { rowStart: 0, rowEnd: 0 }, expected: undefined }, + { options: { rowStart: 0, rowEnd: 2 }, expected: { firstByte: 20, isEstimate: false } }, + { options: { rowStart: 1, rowEnd: 2 }, expected: { firstByte: 20, isEstimate: false } }, + { options: { rowStart: 2, rowEnd: 2 }, expected: undefined }, + { options: { rowStart: 3, rowEnd: 2 }, expected: undefined }, + ])('should propose the next missing row correctly after #serial range: %o', ({ options, expected }) => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 200, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Stored row is in the #serial range, next.firstByte is 20 + cache.store({ byteOffset: 10, byteCount: 10, cells: ['x', 'y', 'z'] }) + expect(cache.getNextMissingRow(options)).toEqual(expected) + }) + + it.each([ + { options: { rowStart: 0, rowEnd: 4 }, expected: { firstByte: 10, isEstimate: false } }, + { options: { rowStart: 1, rowEnd: 4 }, expected: { firstByte: 15, isEstimate: true } }, + { options: { rowStart: 2, rowEnd: 4 }, expected: { firstByte: 40, isEstimate: false } }, + { options: { rowStart: 3, rowEnd: 4 }, expected: { firstByte: 40, isEstimate: false } }, + { options: { rowStart: 4, rowEnd: 5 }, expected: { firstByte: 45, isEstimate: true } }, + ])('should propose the next missing row correctly around a random range: %o', ({ options, expected }) => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 200, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Stored row is in a random range, next.firstByte is 40 + cache.store({ byteOffset: 30, byteCount: 10, cells: ['x', 'y', 'z'] }) + // the average row byte count is now 10 + expect(cache.averageRowByteCount).toBe(10) + // the estimated row number is 2 + expect(cache.getRowNumber({ row: 2 })).toEqual({ value: 2 }) + + expect(cache.getNextMissingRow(options)).toEqual(expected) + }) + + it('should return undefined when all rows are cached', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Simulate caching all rows by storing a row that covers the entire byteLength + cache.store({ + cells: ['x', 'y', 'z'], + byteOffset: 10, + byteCount: 90, + }) + expect(cache.allRowsCached).toBe(true) + expect(cache.getNextMissingRow({ rowStart: 0, rowEnd: 10 })).toBeUndefined() + }) + + it('should return undefined when all rows until rowEnd - 1 is already cached (exclusive)', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 200, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Store rows 0, 1, and 2 + cache.store({ byteOffset: 10, byteCount: 10, cells: ['a', 'b', 'c'] }) // row 0 + cache.store({ byteOffset: 20, byteCount: 10, cells: ['d', 'e', 'f'] }) // row 1 + cache.store({ byteOffset: 30, byteCount: 10, cells: ['g', 'h', 'i'] }) // row 2 + + expect(cache.getNextMissingRow({ rowStart: 2, rowEnd: 2 })).toBeUndefined() + expect(cache.getNextMissingRow({ rowStart: 2, rowEnd: 3 })).toBeUndefined() + expect(cache.getNextMissingRow({ rowStart: 2, rowEnd: 4 })).toStrictEqual({ firstByte: 40, isEstimate: false }) + }) + }) + + describe('isStored', () => { + it('should correctly identify stored and non-stored rows', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 100, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Store a row + cache.store({ + cells: ['a', 'b', 'c'], + byteOffset: 10, + byteCount: 10, + }) + cache.store({ + byteOffset: 60, + byteCount: 10, + }) + cache.store({ + cells: ['a', 'b', 'c'], + byteOffset: 80, + byteCount: 10, + }) + expect(cache.isStored({ byteOffset: 10 })).toBe(true) + expect(cache.isStored({ byteOffset: 20 })).toBe(false) + expect(cache.isStored({ byteOffset: 60 })).toBe(true) + expect(cache.isStored({ byteOffset: 80 })).toBe(true) + }) + }) + + describe('updateRowEstimates', () => { + it('should update row estimates correctly', () => { + const cache = new CSVCache({ + columnNames: ['col1', 'col2', 'col3'], + byteLength: 200, + headerByteCount: 10, + delimiter: ',', + newline: '\n' as const, + }) + // Initially, averageRowByteCount should be undefined + expect(cache.averageRowByteCount).toBeUndefined() + expect(cache.rowCount).toBe(0) + // Update changes nothing as there are no rows + cache.updateRowEstimates() + expect(cache.averageRowByteCount).toBeUndefined() + expect(cache.rowCount).toBe(0) + // Store an empty row + cache.store({ + byteOffset: 10, + byteCount: 10, + }) + expect(cache.averageRowByteCount).toBeUndefined() + expect(cache.rowCount).toBe(0) + // Update changes nothing as there are no stored rows with cells + cache.updateRowEstimates() + expect(cache.averageRowByteCount).toBeUndefined() + expect(cache.rowCount).toBe(0) + // Store one row + cache.store({ + cells: ['a', 'b', 'c'], + byteOffset: 20, + byteCount: 10, + }) + expect(cache.averageRowByteCount).toBe(10) + expect(cache.rowCount).toBe(1) + // Store another row at a random position + cache.store({ + cells: ['d', 'e', 'f'], + byteOffset: 50, + byteCount: 70, + }) + expect(cache.rowCount).toBe(2) + // its row index is estimated to be 3, because it has been computed with the current average (10) + expect(cache.getRowNumber({ row: 3 })).toEqual({ value: 3 }) + // but the averageRowByteCount should be updated correctly + expect(cache.averageRowByteCount).toBe(40) + // Now, update the row estimates + cache.updateRowEstimates() + // The averageRowByteCount should remain the same + expect(cache.averageRowByteCount).toBe(40) + // but the row index should be updated to 2 + expect(cache.getRowNumber({ row: 2 })).toEqual({ value: 2 }) + }) + }) +}) diff --git a/test/dataframe.test.ts b/test/dataframe.test.ts new file mode 100644 index 0000000..354c22e --- /dev/null +++ b/test/dataframe.test.ts @@ -0,0 +1,571 @@ +import { toURL } from 'csv-range' +import { describe, expect, it } from 'vitest' + +import { csvDataFrame } from '../src/dataframe' + +describe('csvDataFrame', () => { + describe('creation', () => { + it('should create a dataframe from a CSV file', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + // Includes the extra character ' ' to handle bug in Node.js (see toURL) + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(df.numRows).toBe(3) + expect(df.metadata).toEqual({ isNumRowsEstimated: false }) + expect(df.columnDescriptors).toStrictEqual(['a', 'b', 'c'].map(name => ({ name }))) + revoke() + }) + + it('should throw when creating a dataframe from an empty CSV file without a header (one column is required)', async () => { + const text = '\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + await expect(csvDataFrame({ + url, + byteLength: fileSize, + })).rejects.toThrow() + revoke() + }) + + it('should create a dataframe from a CSV file with only a header', async () => { + const text = 'a' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(df.numRows).toBe(0) + expect(df.metadata).toEqual({ isNumRowsEstimated: false }) + expect(df.columnDescriptors).toStrictEqual(['a'].map(name => ({ name }))) + expect(() => df.getRowNumber({ row: 0 })).toThrow() + revoke() + }) + + it('should fetch initial rows when specified', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 2, + }) + expect(df.getCell({ row: 1, column: 'b' })).toStrictEqual({ value: '5' }) + expect(df.getCell({ row: 2, column: 'b' })).toBeUndefined() + revoke() + }) + + it.each([ + { text: 'a,b,c\n1111,2222,3333\nn44,55,66\n77,88,99\n', expectedRows: 2 }, + { text: 'a,b,c\n11,22,33\n44,55,66\n77,88,99\n', expectedRows: 3 }, + { text: 'a,b,c\n1,2,3\nn44,55,66\n77,88,99\n', expectedRows: 4 }, + ])('when the CSV file is not fully loaded, the number of rows might be inaccurate: $expectedRows (correct: 3)', async ({ text, expectedRows }) => { + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 1, + }) + // with only one row loaded, the average row size is not accurate enough to estimate the number of rows + expect(df.numRows).toBe(expectedRows) // the estimate is not perfect + expect(df.metadata).toEqual({ isNumRowsEstimated: true }) + revoke() + }) + + it('should fetch initial rows when specified, even if it is 0', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 0, + }) + expect(df.getCell({ row: 1, column: 'b' })).toBeUndefined() + revoke() + }) + + it('should ignore empty rows when fetching initial rows', async () => { + const text = 'a,b,c\n1,2,3\n\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 4, + }) + expect(df.getCell({ row: 1, column: 'b' })).toStrictEqual({ value: '8' }) + expect(() => df.getCell({ row: 2, column: 'b' })).toThrow() + revoke() + }) + + it('should not ignore rows with empty cells when fetching initial rows', async () => { + const text = 'a,b,c\n1,2,3\n,\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(df.getCell({ row: 1, column: 'b' })).toStrictEqual({ value: '' }) + expect(df.getCell({ row: 2, column: 'b' })).toStrictEqual({ value: '8' }) + revoke() + }) + + it('should ignore empty rows before the header', async () => { + const text = '\n\n\na,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(df.numRows).toBe(3) + expect(df.columnDescriptors).toStrictEqual(['a', 'b', 'c'].map(name => ({ name }))) + expect(df.getCell({ row: 0, column: 'a' })).toStrictEqual({ value: '1' }) + revoke() + }) + + it('should ignore rows with only whitespace and delimiters before the header', async () => { + const text = '\n\t\n , , \n,,\na,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(df.numRows).toBe(3) + expect(df.columnDescriptors).toStrictEqual(['a', 'b', 'c'].map(name => ({ name }))) + expect(df.getCell({ row: 0, column: 'a' })).toStrictEqual({ value: '1' }) + revoke() + }) + }) + + describe('getCell', () => { + it('should return the correct cell values', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(df.getCell({ row: 0, column: 'a' })).toStrictEqual({ value: '1' }) + expect(df.getCell({ row: 1, column: 'b' })).toStrictEqual({ value: '5' }) + expect(df.getCell({ row: 2, column: 'c' })).toStrictEqual({ value: '9' }) + revoke() + }) + + it('should throw when called with invalid parameters', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(() => df.getCell({ row: -1, column: 'a' })).toThrow() + expect(() => df.getCell({ row: 0, column: 'd' })).toThrow() + revoke() + }) + + it('should return undefined for not yet cached cells', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 2, + }) + expect(df.getCell({ row: 2, column: 'a' })).toBeUndefined() + revoke() + }) + + it('should return undefined for out-of-bound cells, if the dataframe is not fully loaded', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 2, + }) + expect(df.getCell({ row: 5, column: 'a' })).toBeUndefined() + revoke() + }) + + it('should throw for out-of-bound cells, if the dataframe is fully loaded', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(() => df.getCell({ row: 5, column: 'a' })).toThrow() + revoke() + }) + + it('should throw when called with an orderBy parameter', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(() => df.getCell({ row: 0, column: 'a', orderBy: [{ column: 'a', direction: 'ascending' }] })).toThrow() + revoke() + }) + }) + + describe('getRowNumber', () => { + it('should return the correct row numbers', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(df.getRowNumber({ row: 0 })).toStrictEqual({ value: 0 }) + expect(df.getRowNumber({ row: 1 })).toStrictEqual({ value: 1 }) + expect(df.getRowNumber({ row: 2 })).toStrictEqual({ value: 2 }) + revoke() + }) + + it('should throw when called with invalid parameters', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(() => df.getRowNumber({ row: -1 })).toThrow() + revoke() + }) + + it('should return undefined for not yet cached rows', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 2, + }) + expect(df.getRowNumber({ row: 2 })).toBeUndefined() + revoke() + }) + + it('should return undefined for out-of-bound rows, if the dataframe is not fully loaded', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 2, + }) + expect(df.getRowNumber({ row: 5 })).toBeUndefined() + revoke() + }) + + it('should throw for out-of-bound rows, if the dataframe is fully loaded', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(() => df.getRowNumber({ row: 5 })).toThrow() + revoke() + }) + + it('should throw when called with an orderBy parameter', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + expect(() => df.getRowNumber({ row: 0, orderBy: [{ column: 'a', direction: 'ascending' }] })).toThrow() + revoke() + }) + }) + + describe('fetch', () => { + it('should fetch more rows', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n10,11,12\n13,14,15\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 2, + }) + expect(df.getCell({ row: 2, column: 'a' })).toBeUndefined() + await df.fetch?.({ rowStart: 2, rowEnd: 5 }) + expect(df.getCell({ row: 2, column: 'a' })).toStrictEqual({ value: '7' }) + expect(df.getCell({ row: 3, column: 'b' })).toStrictEqual({ value: '11' }) + expect(df.getCell({ row: 4, column: 'c' })).toStrictEqual({ value: '15' }) + revoke() + }) + + it('should fetch rows even if some are empty', async () => { + const text = 'a,b,c\n1,2,3\n\n4,5,6\n\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 1, + }) + expect(df.getCell({ row: 1, column: 'a' })).toBeUndefined() + await df.fetch?.({ rowStart: 1, rowEnd: 10 }) + expect(df.getCell({ row: 1, column: 'a' })).toStrictEqual({ value: '4' }) + expect(df.getCell({ row: 2, column: 'b' })).toStrictEqual({ value: '8' }) + expect(() => df.getCell({ row: 3, column: 'c' })).toThrow() + revoke() + }) + + it('should use the chunk size when fetching rows', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n10,11,12\n13,14,15\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 1, + chunkSize: 8, // small chunk size to force multiple fetches + }) + expect(df.getCell({ row: 1, column: 'a' })).toBeUndefined() + await df.fetch?.({ rowStart: 1, rowEnd: 5 }) + expect(df.getCell({ row: 1, column: 'a' })).toStrictEqual({ value: '4' }) + expect(df.getCell({ row: 2, column: 'b' })).toStrictEqual({ value: '8' }) + expect(df.getCell({ row: 3, column: 'c' })).toStrictEqual({ value: '12' }) + revoke() + }) + + it('should do nothing when fetching already cached rows', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 3, + }) + expect(df.numRows).toBe(3) + const before = df.getCell({ row: 0, column: 'b' }) + await df.fetch?.({ rowStart: 0, rowEnd: 1 }) + const after = df.getCell({ row: 0, column: 'b' }) + expect(after).toStrictEqual(before) + revoke() + }) + + it('should throw when fetching out-of-bound rows, if the dataframe is fully loaded', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + }) + await expect(df.fetch?.({ rowStart: 5, rowEnd: 10 })).rejects.toThrow() + revoke() + }) + + it('should fetch out-of-bound rows, if the dataframe is not fully loaded', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 2, + }) + expect(df.getCell({ row: 2, column: 'a' })).toBeUndefined() + await df.fetch?.({ rowStart: 2, rowEnd: 10 }) + expect(df.getCell({ row: 2, column: 'a' })).toStrictEqual({ value: '7' }) + revoke() + }) + + it('should fetch rows at a random position if requested', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n10,11,12\n13,14,15\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 2, + }) + expect(df.getCell({ row: 3, column: 'a' })).toBeUndefined() + await df.fetch?.({ rowStart: 3, rowEnd: 4 }) + // row 3 is now cached, while row 2 is still not cached + expect(df.getCell({ row: 3, column: 'a' })).toStrictEqual({ value: '10' }) + expect(df.getCell({ row: 2, column: 'a' })).toBeUndefined() + revoke() + }) + + it('will fetch an incorrect row if the average row size had been overestimated', async () => { + const text = 'a,b,c\n111,222,333\n4,5,6\n7,8,9\n10,11,12\n13,14,15\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 1, + }) + expect(df.getCell({ row: 2, column: 'a' })).toBeUndefined() + await df.fetch?.({ rowStart: 2, rowEnd: 5 }) + expect(df.getCell({ row: 2, column: 'a' })).toStrictEqual({ value: '10' }) // should be 7 + expect(df.getCell({ row: 3, column: 'b' })).toStrictEqual({ value: '14' }) // should be 11 + expect(df.getCell({ row: 4, column: 'c' })).toBeUndefined() // should be 15 + revoke() + }) + + it('fails to fetch the last rows if the average row size has been overestimated', async () => { + const text = 'a,b,c\n111111111,222222222,333333333\n,4,5,6\n7,8,9\n10,11,12\n13,14,15\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 1, + }) + expect(df.getCell({ row: 1, column: 'a' })).toBeUndefined() + await df.fetch?.({ rowStart: 3, rowEnd: 5 }) + expect(df.getCell({ row: 1, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 2, column: 'b' })).toBeUndefined() + expect(df.getCell({ row: 3, column: 'c' })).toBeUndefined() + revoke() + }) + + it('should break the current parsing and start a new one if the next row is beyond one chunk size', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n10,11,12\n13,14,15\n16,17,18\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 1, + chunkSize: 8, // small chunk size to force multiple fetches + }) + expect(df.getCell({ row: 0, column: 'a' })).toStrictEqual({ value: '1' }) + expect(df.getCell({ row: 1, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 2, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 3, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 4, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 5, column: 'a' })).toBeUndefined() + await df.fetch?.({ rowStart: 2, rowEnd: 5 }) + expect(df.getCell({ row: 1, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 2, column: 'a' })).toStrictEqual({ value: '7' }) + expect(df.getCell({ row: 3, column: 'a' })).toStrictEqual({ value: '10' }) + expect(df.getCell({ row: 4, column: 'a' })).toStrictEqual({ value: '13' }) + expect(df.getCell({ row: 5, column: 'a' })).toBeUndefined() + await df.fetch?.({ rowStart: 1, rowEnd: 6 }) + expect(df.getCell({ row: 1, column: 'a' })).toStrictEqual({ value: '4' }) + expect(df.getCell({ row: 5, column: 'a' })).toStrictEqual({ value: '16' }) + revoke() + }) + + it('fetches incorrect rows if the row estimation is incorrect', async () => { + const text = 'a,b,c\n111111,222222,333333\n4,5,6\n7,8,9\n10,11,12\n13,14,15\n16,17,18\n19,20,21\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 1, + }) + expect(df.getCell({ row: 0, column: 'a' })).toStrictEqual({ value: '111111' }) + expect(df.getCell({ row: 1, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 2, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 3, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 4, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 5, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 6, column: 'a' })).toBeUndefined() + // average row size here is 21, because of the first row + + await df.fetch?.({ rowStart: 2, rowEnd: 6 }) + + expect(df.getCell({ row: 1, column: 'a' })).toBeUndefined() + // erroneously got row 3 instead of row 2, due to the overestimation of the average row size, even after correcting it once + expect(df.getCell({ row: 2, column: 'a' })).toStrictEqual({ value: '10' }) + expect(df.getCell({ row: 3, column: 'a' })).toStrictEqual({ value: '13' }) + expect(df.getCell({ row: 4, column: 'a' })).toStrictEqual({ value: '16' }) + expect(df.getCell({ row: 5, column: 'a' })).toStrictEqual({ value: '19' }) + // Note: the four rows were fetched, but in multiple loops, since only three of + // them were fetched in the first pass, as the parsing reached the end of the file + expect(df.getCell({ row: 6, column: 'a' })).toBeUndefined() + revoke() + }) + + it('does not fetch all the rows, if the row estimation is incorrect', async () => { + const text = 'a,b,c\n111111,222222,333333\n4,5,6\n7,8,9\n10,11,12\n13,14,15\n16,17,18\n19,20,21\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 1, + }) + expect(df.getCell({ row: 0, column: 'a' })).toStrictEqual({ value: '111111' }) + expect(df.getCell({ row: 1, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 2, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 3, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 4, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 5, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 6, column: 'a' })).toBeUndefined() + // average row size here is 21, because of the first row + + await df.fetch?.({ rowStart: 2, rowEnd: 7 }) + + expect(df.getCell({ row: 1, column: 'a' })).toBeUndefined() + // erroneously got row 3 instead of row 2, due to the overestimation of the average row size, even after correcting it once + expect(df.getCell({ row: 2, column: 'a' })).toStrictEqual({ value: '10' }) + expect(df.getCell({ row: 3, column: 'a' })).toStrictEqual({ value: '13' }) + expect(df.getCell({ row: 4, column: 'a' })).toStrictEqual({ value: '16' }) + expect(df.getCell({ row: 5, column: 'a' })).toStrictEqual({ value: '19' }) + // the four rows were fetched, even if only three of them were fetched in the first pass, because the parsing reached the end of the file + + // the last row, row 6, was not fetched, even if it was requested + expect(df.getCell({ row: 6, column: 'a' })).toBeUndefined() + revoke() + }) + + it('dispatches one "resolve" event when fetch is complete and rows have been resolved', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n10,11,12\n13,14,15\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 2, + }) + + let resolveEventCount = 0 + df.eventTarget?.addEventListener('resolve', () => { + resolveEventCount++ + }) + + await df.fetch?.({ rowStart: 2, rowEnd: 4 }) + expect(resolveEventCount).toBe(1) + + // No event because rows are already resolved + await df.fetch?.({ rowStart: 2, rowEnd: 4 }) + expect(resolveEventCount).toBe(1) + + await df.fetch?.({ rowStart: 2, rowEnd: 6 }) + expect(resolveEventCount).toBe(2) + + revoke() + }) + + it('can parse again the same rows, if the chunk has been fetched but some rows were already cached', async () => { + const text = 'a,b,c\n1,2,3\n4,5,6\n7,8,9\n10,11,12\n13,14,15\n' + const { url, revoke, fileSize } = toURL(text, { withNodeWorkaround: true }) + const df = await csvDataFrame({ + url, + byteLength: fileSize, + initialRowCount: 1, + }) + expect(df.getCell({ row: 0, column: 'a' })).toStrictEqual({ value: '1' }) + expect(df.getCell({ row: 1, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 2, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 3, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 4, column: 'a' })).toBeUndefined() + + await df.fetch?.({ rowStart: 3, rowEnd: 4 }) + expect(df.getCell({ row: 1, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 2, column: 'a' })).toBeUndefined() + expect(df.getCell({ row: 3, column: 'a' })).toStrictEqual({ value: '10' }) + expect(df.getCell({ row: 4, column: 'a' })).toBeUndefined() + + // Fetch again the same chunk + await df.fetch?.({ rowStart: 1, rowEnd: 5 }) + expect(df.getCell({ row: 1, column: 'a' })).toStrictEqual({ value: '4' }) + expect(df.getCell({ row: 2, column: 'a' })).toStrictEqual({ value: '7' }) + expect(df.getCell({ row: 3, column: 'a' })).toStrictEqual({ value: '10' }) + expect(df.getCell({ row: 4, column: 'a' })).toStrictEqual({ value: '13' }) + + revoke() + }) + }) +}) diff --git a/tsconfig.json b/tsconfig.json index 1ffef60..4a3e749 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -2,6 +2,7 @@ "files": [], "references": [ { "path": "./tsconfig.app.json" }, + { "path": "./tsconfig.test.json" }, { "path": "./tsconfig.node.json" } ] } diff --git a/tsconfig.test.json b/tsconfig.test.json new file mode 100644 index 0000000..e7b8dd4 --- /dev/null +++ b/tsconfig.test.json @@ -0,0 +1,4 @@ +{ + "extends": "./tsconfig.app.json", + "include": ["test"] +} diff --git a/vite.config.ts b/vite.config.ts index 53bbae4..b308a8e 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -1,8 +1,8 @@ -import { defineConfig } from "vite"; -import react from "@vitejs/plugin-react"; +import react from '@vitejs/plugin-react' +import { defineConfig } from 'vite' // https://vite.dev/config/ export default defineConfig({ plugins: [react()], base: './', -}); +})