diff --git a/.github/workflows/js-test-and-release.yml b/.github/workflows/js-test-and-release.yml index ef28bb0b..546f6d4a 100644 --- a/.github/workflows/js-test-and-release.yml +++ b/.github/workflows/js-test-and-release.yml @@ -96,6 +96,44 @@ jobs: with: flags: firefox-webworker + test-webkit: + needs: check + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, macos-latest] + node: [lts/*] + fail-fast: true + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: lts/* + - uses: ipfs/aegir/actions/cache-node-modules@master + - run: npm run --if-present test:webkit + - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # v3.1.1 + with: + flags: webkit + + test-webkit-webworker: + needs: check + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, macos-latest] + node: [lts/*] + fail-fast: true + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: lts/* + - uses: ipfs/aegir/actions/cache-node-modules@master + - run: npm run --if-present test:webkit-webworker + - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # v3.1.1 + with: + flags: webkit-webworker + test-electron-main: needs: check runs-on: ubuntu-latest @@ -125,7 +163,7 @@ jobs: flags: electron-renderer release: - needs: [test-node, test-chrome, test-chrome-webworker, test-firefox, test-firefox-webworker, test-electron-main, test-electron-renderer] + needs: [test-node, test-chrome, test-chrome-webworker, test-firefox, test-firefox-webworker, test-webkit, test-webkit-webworker, test-electron-main, test-electron-renderer] runs-on: ubuntu-latest if: github.event_name == 'push' && github.ref == 'refs/heads/master' steps: @@ -142,5 +180,5 @@ jobs: docker-username: ${{ secrets.DOCKER_USERNAME }} - run: npm run --if-present release env: - GITHUB_TOKEN: ${{ github.token }} + GITHUB_TOKEN: ${{ secrets.UCI_GITHUB_TOKEN || github.token }} NPM_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/benchmarks/import/README.md b/benchmarks/import/README.md new file mode 100644 index 00000000..51ffd7e6 --- /dev/null +++ b/benchmarks/import/README.md @@ -0,0 +1,27 @@ +# Import Benchmark + +How much memory does the importer use while importing files? + +It should be relatively flat to enable importing files larger than physical memory. + +## Usage + +```console +$ npm i +$ npm start + +> benchmarks-gc@1.0.0 start +> npm run build && node dist/src/index.js + + +> benchmarks-gc@1.0.0 build +> aegir build --bundle false + +[14:51:28] tsc [started] +[14:51:33] tsc [completed] +generating Ed25519 keypair... +┌─────────┬────────────────┬─────────┬───────────┬──────┐ +│ (index) │ Implementation │ ops/s │ ms/op │ runs │ +├─────────┼────────────────┼─────────┼───────────┼──────┤ +//... results here +``` diff --git a/benchmarks/import/package.json b/benchmarks/import/package.json new file mode 100644 index 00000000..56f5618e --- /dev/null +++ b/benchmarks/import/package.json @@ -0,0 +1,42 @@ +{ + "name": "ipfs-unixfs-memory-benchmark", + "version": "0.0.0", + "description": "Memory benchmarks for ipfs-unixfs-importer", + "license": "Apache-2.0 OR MIT", + "private": true, + "type": "module", + "types": "./dist/src/index.d.ts", + "files": [ + "src", + "dist", + "!dist/test", + "!**/*.tsbuildinfo" + ], + "exports": { + ".": { + "types": "./dist/src/index.d.ts", + "import": "./dist/src/index.js" + } + }, + "eslintConfig": { + "extends": "ipfs", + "parserOptions": { + "sourceType": "module" + } + }, + "scripts": { + "build": "aegir build --bundle false", + "clean": "aegir clean", + "lint": "aegir lint", + "dep-check": "aegir dep-check", + "start": "npm run build && node --expose-gc ./dist/test/index.spec.js" + }, + "devDependencies": { + "aegir": "^38.1.2", + "blockstore-core": "^4.0.1", + "blockstore-fs": "^1.0.0", + "ipfs-unixfs-importer": "../../packages/ipfs-unixfs-importer", + "it-buffer-stream": "^3.0.1", + "it-drain": "^2.0.1" + } +} diff --git a/benchmarks/import/src/index.ts b/benchmarks/import/src/index.ts new file mode 100644 index 00000000..336ce12b --- /dev/null +++ b/benchmarks/import/src/index.ts @@ -0,0 +1 @@ +export {} diff --git a/benchmarks/import/test/index.spec.ts b/benchmarks/import/test/index.spec.ts new file mode 100644 index 00000000..dbd65ced --- /dev/null +++ b/benchmarks/import/test/index.spec.ts @@ -0,0 +1,61 @@ +/* eslint-env mocha */ + +import { importer, ImporterOptions } from 'ipfs-unixfs-importer' +import bufferStream from 'it-buffer-stream' +import { MemoryBlockstore } from 'blockstore-core' +import drain from 'it-drain' + +const REPEATS = 10 +const FILE_SIZE = Math.pow(2, 20) * 500 // 500MB +const CHUNK_SIZE = 65536 + +async function main (): Promise { + const block = new MemoryBlockstore() + const times: number[] = [] + + for (let i = 0; i < REPEATS; i++) { + const size = FILE_SIZE + let read = 0 + let lastDate = Date.now() + let lastPercent = 0 + + const options: Partial = { + onProgress: (evt) => { + if (evt.type === 'unixfs:importer:progress:file:read') { + read += Number(evt.detail.bytesRead) + + const percent = Math.round((read / size) * 100) + + if (percent > lastPercent) { + times[percent] = (times[percent] ?? 0) + (Date.now() - lastDate) + + lastDate = Date.now() + lastPercent = percent + } + } + } + } + + const buf = new Uint8Array(CHUNK_SIZE).fill(0) + + await drain(importer([{ + path: '200Bytes.txt', + content: bufferStream(size, { + chunkSize: CHUNK_SIZE, + generator: () => { + return buf + } + }) + }], block, options)) + } + + console.info('Percent\tms') // eslint-disable-line no-console + times.forEach((time, index) => { + console.info(`${index}\t${Math.round(time / REPEATS)}`) // eslint-disable-line no-console + }) +} + +main().catch(err => { + console.error(err) // eslint-disable-line no-console + process.exit(1) +}) diff --git a/benchmarks/import/tsconfig.json b/benchmarks/import/tsconfig.json new file mode 100644 index 00000000..13a35996 --- /dev/null +++ b/benchmarks/import/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "test" + ] +} diff --git a/benchmarks/memory/README.md b/benchmarks/memory/README.md new file mode 100644 index 00000000..55db8073 --- /dev/null +++ b/benchmarks/memory/README.md @@ -0,0 +1,27 @@ +# Memory Benchmark + +How much memory does the importer use while importing files? + +It should be relatively flat to enable importing files larger than physical memory. + +## Usage + +```console +$ npm i +$ npm start + +> benchmarks-gc@1.0.0 start +> npm run build && node dist/src/index.js + + +> benchmarks-gc@1.0.0 build +> aegir build --bundle false + +[14:51:28] tsc [started] +[14:51:33] tsc [completed] +generating Ed25519 keypair... +┌─────────┬────────────────┬─────────┬───────────┬──────┐ +│ (index) │ Implementation │ ops/s │ ms/op │ runs │ +├─────────┼────────────────┼─────────┼───────────┼──────┤ +//... results here +``` diff --git a/benchmarks/memory/package.json b/benchmarks/memory/package.json new file mode 100644 index 00000000..eee3cd0f --- /dev/null +++ b/benchmarks/memory/package.json @@ -0,0 +1,40 @@ +{ + "name": "ipfs-unixfs-memory-benchmark", + "version": "0.0.0", + "description": "Memory benchmarks for ipfs-unixfs-importer", + "license": "Apache-2.0 OR MIT", + "private": true, + "type": "module", + "types": "./dist/src/index.d.ts", + "files": [ + "src", + "dist", + "!dist/test", + "!**/*.tsbuildinfo" + ], + "exports": { + ".": { + "types": "./dist/src/index.d.ts", + "import": "./dist/src/index.js" + } + }, + "eslintConfig": { + "extends": "ipfs", + "parserOptions": { + "sourceType": "module" + } + }, + "scripts": { + "build": "aegir build --bundle false", + "clean": "aegir clean", + "lint": "aegir lint", + "dep-check": "aegir dep-check", + "start": "npm run build && node --expose-gc ./dist/test/index.spec.js" + }, + "devDependencies": { + "aegir": "^38.1.2", + "blockstore-fs": "^1.0.0", + "ipfs-unixfs-importer": "../../packages/ipfs-unixfs-importer", + "it-drain": "^2.0.1" + } +} diff --git a/benchmarks/memory/src/index.ts b/benchmarks/memory/src/index.ts new file mode 100644 index 00000000..336ce12b --- /dev/null +++ b/benchmarks/memory/src/index.ts @@ -0,0 +1 @@ +export {} diff --git a/benchmarks/memory/test/index.spec.ts b/benchmarks/memory/test/index.spec.ts new file mode 100644 index 00000000..6a02873a --- /dev/null +++ b/benchmarks/memory/test/index.spec.ts @@ -0,0 +1,48 @@ +/* eslint-disable no-console */ + +import { importer } from 'ipfs-unixfs-importer' +import path from 'node:path' +import os from 'node:os' +import fs from 'node:fs' +import drain from 'it-drain' +import { FsBlockstore } from 'blockstore-fs' + +const ONE_MEG = 1024 * 1024 + +const FILE_SIZE = ONE_MEG * 1000 +const CHUNK_SIZE = ONE_MEG + +async function main (): Promise { + const dir = path.join(os.tmpdir(), `test-${Date.now()}`) + const blocks = new FsBlockstore(dir) + await blocks.open() + + console.info('bytes imported (mb), heap total (mb), heap used (mb), rss (mb)') + + try { + await drain(importer([{ + content: (async function * (): AsyncIterable { + for (let i = 0; i < FILE_SIZE; i += CHUNK_SIZE) { + yield new Uint8Array(CHUNK_SIZE) + + // @ts-expect-error only present when node is run with --expose-gc + global.gc() + + console.info(`${i / ONE_MEG}, ${process.memoryUsage().heapTotal / ONE_MEG}, ${process.memoryUsage().heapUsed / ONE_MEG}, ${process.memoryUsage().rss / ONE_MEG}`) + } + })() + }], blocks)) + } catch (err) { + console.error(err) + } finally { + await blocks.close() + fs.rmSync(dir, { + recursive: true + }) + } +} + +main().catch(err => { + console.error(err) // eslint-disable-line no-console + process.exit(1) +}) diff --git a/benchmarks/memory/tsconfig.json b/benchmarks/memory/tsconfig.json new file mode 100644 index 00000000..13a35996 --- /dev/null +++ b/benchmarks/memory/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "test" + ] +} diff --git a/package.json b/package.json index 831f8c7a..7823f27a 100644 --- a/package.json +++ b/package.json @@ -36,7 +36,7 @@ "release": "npm run docs:no-publish && aegir run release && npm run docs" }, "devDependencies": { - "aegir": "^38.1.2" + "aegir": "^39.0.6" }, "workspaces": [ "packages/*" diff --git a/packages/ipfs-unixfs-exporter/CHANGELOG.md b/packages/ipfs-unixfs-exporter/CHANGELOG.md index ca05ff10..41b9f520 100644 --- a/packages/ipfs-unixfs-exporter/CHANGELOG.md +++ b/packages/ipfs-unixfs-exporter/CHANGELOG.md @@ -1,3 +1,127 @@ +## [ipfs-unixfs-exporter-v13.1.4](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v13.1.3...ipfs-unixfs-exporter-v13.1.4) (2023-05-11) + + +### Bug Fixes + +* usage with readble-stream ([#333](https://github.com/ipfs/js-ipfs-unixfs/issues/333)) ([9b6203f](https://github.com/ipfs/js-ipfs-unixfs/commit/9b6203f16f77c0dbe80e41a844026aa52ab71652)) + +## [ipfs-unixfs-exporter-v13.1.3](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v13.1.2...ipfs-unixfs-exporter-v13.1.3) (2023-05-11) + + +### Dependencies + +* bump it-all from 2.0.1 to 3.0.2 ([#324](https://github.com/ipfs/js-ipfs-unixfs/issues/324)) ([0738c35](https://github.com/ipfs/js-ipfs-unixfs/commit/0738c35cf437020c94dc8cc17644b01c9289b9a2)) +* bump it-first from 2.0.1 to 3.0.2 ([#325](https://github.com/ipfs/js-ipfs-unixfs/issues/325)) ([3db2948](https://github.com/ipfs/js-ipfs-unixfs/commit/3db2948ecb3fa2780582c8cddd32c1030e226356)) + +## [ipfs-unixfs-exporter-v13.1.2](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v13.1.1...ipfs-unixfs-exporter-v13.1.2) (2023-05-11) + + +### Dependencies + +* bump it-filter from 2.0.2 to 3.0.2 ([#328](https://github.com/ipfs/js-ipfs-unixfs/issues/328)) ([c4b624b](https://github.com/ipfs/js-ipfs-unixfs/commit/c4b624b99f0bfe05059de815e66443c80dbe7693)) +* bump it-map from 2.0.1 to 3.0.3 ([#329](https://github.com/ipfs/js-ipfs-unixfs/issues/329)) ([6974025](https://github.com/ipfs/js-ipfs-unixfs/commit/6974025542860b84d78b447e41b975a4b8a03f31)) + +## [ipfs-unixfs-exporter-v13.1.1](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v13.1.0...ipfs-unixfs-exporter-v13.1.1) (2023-05-11) + + +### Dependencies + +* bump it-last from 2.0.1 to 3.0.2 ([#330](https://github.com/ipfs/js-ipfs-unixfs/issues/330)) ([7f8df4d](https://github.com/ipfs/js-ipfs-unixfs/commit/7f8df4d437befacd36c36c85f178b14fc7930fd6)) +* bump it-pipe from 2.0.5 to 3.0.1 ([#319](https://github.com/ipfs/js-ipfs-unixfs/issues/319)) ([ab5dd0a](https://github.com/ipfs/js-ipfs-unixfs/commit/ab5dd0ab85fc2235b1fca912114b75f243634bf3)) + +## [ipfs-unixfs-exporter-v13.1.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v13.0.6...ipfs-unixfs-exporter-v13.1.0) (2023-03-17) + + +### Features + +* adds progress events to the importer and exporter ([#302](https://github.com/ipfs/js-ipfs-unixfs/issues/302)) ([d0df723](https://github.com/ipfs/js-ipfs-unixfs/commit/d0df7237f155b73b8c722d6750742f9976232c0e)) + +## [ipfs-unixfs-exporter-v13.0.6](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v13.0.5...ipfs-unixfs-exporter-v13.0.6) (2023-03-16) + + +### Bug Fixes + +* align blockstore pick interface name ([#301](https://github.com/ipfs/js-ipfs-unixfs/issues/301)) ([ca10d79](https://github.com/ipfs/js-ipfs-unixfs/commit/ca10d792083b80fd45754b5260eb486a621cc489)) + +## [ipfs-unixfs-exporter-v13.0.5](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v13.0.4...ipfs-unixfs-exporter-v13.0.5) (2023-03-16) + + +### Bug Fixes + +* test for file under/over reads ([#300](https://github.com/ipfs/js-ipfs-unixfs/issues/300)) ([8413c70](https://github.com/ipfs/js-ipfs-unixfs/commit/8413c7065b30c3347c98f18e03a3dcf086de69cb)) + +## [ipfs-unixfs-exporter-v13.0.4](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v13.0.3...ipfs-unixfs-exporter-v13.0.4) (2023-03-16) + + +### Bug Fixes + +* add tests for unbalanced dags and very deep dags ([#299](https://github.com/ipfs/js-ipfs-unixfs/issues/299)) ([44e7792](https://github.com/ipfs/js-ipfs-unixfs/commit/44e7792416d400defeb4b8d34b2b90895d275dec)) + +## [ipfs-unixfs-exporter-v13.0.3](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v13.0.2...ipfs-unixfs-exporter-v13.0.3) (2023-03-15) + + +### Bug Fixes + +* reduce required number of blockstore methods ([#298](https://github.com/ipfs/js-ipfs-unixfs/issues/298)) ([238fe4e](https://github.com/ipfs/js-ipfs-unixfs/commit/238fe4e4f2bac5075eac4dced9a52f3c9c8e307a)) + +## [ipfs-unixfs-exporter-v13.0.2](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v13.0.1...ipfs-unixfs-exporter-v13.0.2) (2023-03-15) + + +### Bug Fixes + +* remove unused timeout option from exporter ([#296](https://github.com/ipfs/js-ipfs-unixfs/issues/296)) ([bcb38a7](https://github.com/ipfs/js-ipfs-unixfs/commit/bcb38a7320dd2d6f411e36851df3395f53ff7059)) + +## [ipfs-unixfs-exporter-v13.0.1](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v13.0.0...ipfs-unixfs-exporter-v13.0.1) (2023-03-15) + + +### Bug Fixes + +* pass onProgress option to blockstore ([#294](https://github.com/ipfs/js-ipfs-unixfs/issues/294)) ([3bfb34d](https://github.com/ipfs/js-ipfs-unixfs/commit/3bfb34d8e660404c89e39925ad053c940bf176ce)) + +## [ipfs-unixfs-exporter-v13.0.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v12.0.2...ipfs-unixfs-exporter-v13.0.0) (2023-03-15) + + +### ⚠ BREAKING CHANGES + +* please use the latest interface-blockstore versions of everything, aside from this +impact should be minimal + +### Dependencies + +* update blockstore ([#290](https://github.com/ipfs/js-ipfs-unixfs/issues/290)) ([6efaab5](https://github.com/ipfs/js-ipfs-unixfs/commit/6efaab5dc509beb5bd5049e104399a5d3b46301d)) +* update sibling dependencies ([a1c8f49](https://github.com/ipfs/js-ipfs-unixfs/commit/a1c8f4932a2d351bc512e4f872a3566a2aaa3045)) + +## [ipfs-unixfs-exporter-v12.0.2](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v12.0.1...ipfs-unixfs-exporter-v12.0.2) (2023-03-13) + + +### Bug Fixes + +* use simpler blockstore interface ([#287](https://github.com/ipfs/js-ipfs-unixfs/issues/287)) ([b332b16](https://github.com/ipfs/js-ipfs-unixfs/commit/b332b167ecbb1083030a57144088d318bf59701e)) + +## [ipfs-unixfs-exporter-v12.0.1](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v12.0.0...ipfs-unixfs-exporter-v12.0.1) (2023-03-10) + + +### Bug Fixes + +* parallelise loading of dag-pb links in directories when exporting ([#286](https://github.com/ipfs/js-ipfs-unixfs/issues/286)) ([9e01878](https://github.com/ipfs/js-ipfs-unixfs/commit/9e01878a6ec8fc32bb830f0ff67ec613c260e24f)) + +## [ipfs-unixfs-exporter-v12.0.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v11.0.0...ipfs-unixfs-exporter-v12.0.0) (2023-02-16) + + +### ⚠ BREAKING CHANGES + +* The options object now accepts preconfigured instances of chunkers and file layouts - these can be imported from this module - see https://github.com/ipfs/js-ipfs-unixfs/pull/283 for more + +### Features + +* accept pre-configured import components as options instead of options for components ([#283](https://github.com/ipfs/js-ipfs-unixfs/issues/283)) ([5a38d01](https://github.com/ipfs/js-ipfs-unixfs/commit/5a38d0126457926d1c17aeee75700565b400e4cf)) + + +### Dependencies + +* update sibling dependencies ([c59954c](https://github.com/ipfs/js-ipfs-unixfs/commit/c59954c64933ee330cd40746d0fa720de83b6ea3)) +* update sibling dependencies ([b4f6fc8](https://github.com/ipfs/js-ipfs-unixfs/commit/b4f6fc83245bc99223704ce918fd4db691221412)) + ## [ipfs-unixfs-exporter-v11.0.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-v10.0.1...ipfs-unixfs-exporter-v11.0.0) (2023-02-09) diff --git a/packages/ipfs-unixfs-exporter/package.json b/packages/ipfs-unixfs-exporter/package.json index 8ecc7cb1..4cebd96a 100644 --- a/packages/ipfs-unixfs-exporter/package.json +++ b/packages/ipfs-unixfs-exporter/package.json @@ -1,6 +1,6 @@ { "name": "ipfs-unixfs-exporter", - "version": "11.0.0", + "version": "13.1.4", "description": "JavaScript implementation of the UnixFs exporter used by IPFS", "license": "Apache-2.0 OR MIT", "homepage": "https://github.com/ipfs/js-ipfs-unixfs/tree/master/packages/ipfs-unixfs-exporter#readme", @@ -131,7 +131,7 @@ "build": "aegir build", "clean": "aegir clean", "lint": "aegir lint", - "dep-check": "aegir dep-check -i interface-blockstore", + "dep-check": "aegir dep-check", "release": "aegir release" }, "dependencies": { @@ -140,31 +140,39 @@ "@multiformats/murmur3": "^2.0.0", "err-code": "^3.0.1", "hamt-sharding": "^3.0.0", - "interface-blockstore": "^4.0.0", - "ipfs-unixfs": "^10.0.0", - "it-last": "^2.0.0", - "it-map": "^2.0.0", + "interface-blockstore": "^5.0.0", + "ipfs-unixfs": "^11.0.0", + "it-filter": "^3.0.2", + "it-last": "^3.0.2", + "it-map": "^3.0.3", "it-parallel": "^3.0.0", - "it-pipe": "^2.0.4", + "it-pipe": "^3.0.1", "it-pushable": "^3.1.0", "multiformats": "^11.0.0", "p-queue": "^7.3.0", + "progress-events": "^1.0.0", "uint8arrays": "^4.0.2" }, "devDependencies": { + "@types/readable-stream": "^2.3.15", "@types/sinon": "^10.0.0", - "aegir": "^38.1.2", - "blockstore-core": "^3.0.0", + "aegir": "^39.0.6", + "blockstore-core": "^4.0.1", "delay": "^5.0.0", - "ipfs-unixfs-importer": "^13.0.0", - "it-all": "^2.0.0", + "ipfs-unixfs-importer": "^15.0.0", + "iso-random-stream": "^2.0.2", + "it-all": "^3.0.2", "it-buffer-stream": "^3.0.0", - "it-first": "^2.0.0", + "it-first": "^3.0.2", + "it-to-buffer": "^4.0.2", "merge-options": "^3.0.4", - "sinon": "^15.0.0" + "readable-stream": "^4.4.0", + "sinon": "^15.0.0", + "wherearewe": "^2.0.1" }, "browser": { - "fs": false + "fs": false, + "readable-stream": false }, "typedoc": { "entryPoint": "./src/index.ts" diff --git a/packages/ipfs-unixfs-exporter/src/index.ts b/packages/ipfs-unixfs-exporter/src/index.ts index 8c3b984b..e0293c53 100644 --- a/packages/ipfs-unixfs-exporter/src/index.ts +++ b/packages/ipfs-unixfs-exporter/src/index.ts @@ -1,17 +1,55 @@ import errCode from 'err-code' +import last from 'it-last' import { CID } from 'multiformats/cid' import resolve from './resolvers/index.js' -import last from 'it-last' -import type { UnixFS } from 'ipfs-unixfs' import type { PBNode } from '@ipld/dag-pb' -import type { Blockstore } from 'interface-blockstore' import type { Bucket } from 'hamt-sharding' +import type { Blockstore } from 'interface-blockstore' +import type { UnixFS } from 'ipfs-unixfs' +import type { ProgressOptions, ProgressEvent } from 'progress-events' + +export interface ExportProgress { + /** + * How many bytes of the file have been read + */ + bytesRead: bigint + + /** + * How many bytes of the file will be read - n.b. this may be + * smaller than `fileSize` if `offset`/`length` have been + * specified + */ + totalBytes: bigint + + /** + * The size of the file being read - n.b. this may be + * larger than `total` if `offset`/`length` has been + * specified + */ + fileSize: bigint +} -export interface ExporterOptions { +export interface ExportWalk { + cid: CID +} + +/** + * Progress events emitted by the exporter + */ +export type ExporterProgressEvents = + ProgressEvent<'unixfs:exporter:progress:unixfs:file', ExportProgress> | + ProgressEvent<'unixfs:exporter:progress:unixfs:raw', ExportProgress> | + ProgressEvent<'unixfs:exporter:progress:raw', ExportProgress> | + ProgressEvent<'unixfs:exporter:progress:identity', ExportProgress> | + ProgressEvent<'unixfs:exporter:walk:file', ExportWalk> | + ProgressEvent<'unixfs:exporter:walk:directory', ExportWalk> | + ProgressEvent<'unixfs:exporter:walk:hamt-sharded-directory', ExportWalk> | + ProgressEvent<'unixfs:exporter:walk:raw', ExportWalk> + +export interface ExporterOptions extends ProgressOptions { offset?: number length?: number signal?: AbortSignal - timeout?: number } export interface Exportable { @@ -21,7 +59,7 @@ export interface Exportable { cid: CID depth: number size: bigint - content: (options?: ExporterOptions) => AsyncIterable + content: (options?: ExporterOptions) => AsyncGenerator } export interface UnixFSFile extends Exportable { @@ -65,13 +103,13 @@ export interface ResolveResult { next?: NextResult } -export interface Resolve { (cid: CID, name: string, path: string, toResolve: string[], depth: number, blockstore: Blockstore, options: ExporterOptions): Promise } -export interface Resolver { (cid: CID, name: string, path: string, toResolve: string[], resolve: Resolve, depth: number, blockstore: Blockstore, options: ExporterOptions): Promise } +export interface Resolve { (cid: CID, name: string, path: string, toResolve: string[], depth: number, blockstore: ReadableStorage, options: ExporterOptions): Promise } +export interface Resolver { (cid: CID, name: string, path: string, toResolve: string[], resolve: Resolve, depth: number, blockstore: ReadableStorage, options: ExporterOptions): Promise } export type UnixfsV1FileContent = AsyncIterable | Iterable export type UnixfsV1DirectoryContent = AsyncIterable | Iterable export type UnixfsV1Content = UnixfsV1FileContent | UnixfsV1DirectoryContent -export interface UnixfsV1Resolver { (cid: CID, node: PBNode, unixfs: UnixFS, path: string, resolve: Resolve, depth: number, blockstore: Blockstore): (options: ExporterOptions) => UnixfsV1Content } +export interface UnixfsV1Resolver { (cid: CID, node: PBNode, unixfs: UnixFS, path: string, resolve: Resolve, depth: number, blockstore: ReadableStorage): (options: ExporterOptions) => UnixfsV1Content } export interface ShardTraversalContext { hamtDepth: number @@ -79,6 +117,8 @@ export interface ShardTraversalContext { lastBucket: Bucket } +export type ReadableStorage = Pick + const toPathComponents = (path: string = ''): string[] => { // split on / unless escaped with \ return (path @@ -119,7 +159,7 @@ const cidAndRest = (path: string | Uint8Array | CID): { cid: CID, toResolve: str throw errCode(new Error(`Unknown path type ${path}`), 'ERR_BAD_PATH') } -export async function * walkPath (path: string | CID, blockstore: Blockstore, options: ExporterOptions = {}): AsyncGenerator { +export async function * walkPath (path: string | CID, blockstore: ReadableStorage, options: ExporterOptions = {}): AsyncGenerator { let { cid, toResolve @@ -151,7 +191,7 @@ export async function * walkPath (path: string | CID, blockstore: Blockstore, op } } -export async function exporter (path: string | CID, blockstore: Blockstore, options: ExporterOptions = {}): Promise { +export async function exporter (path: string | CID, blockstore: ReadableStorage, options: ExporterOptions = {}): Promise { const result = await last(walkPath(path, blockstore, options)) if (result == null) { @@ -161,7 +201,7 @@ export async function exporter (path: string | CID, blockstore: Blockstore, opti return result } -export async function * recursive (path: string | CID, blockstore: Blockstore, options: ExporterOptions = {}): AsyncGenerator { +export async function * recursive (path: string | CID, blockstore: ReadableStorage, options: ExporterOptions = {}): AsyncGenerator { const node = await exporter(path, blockstore, options) if (node == null) { diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.ts b/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.ts index 13e5c2e4..a653a1ad 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.ts @@ -1,10 +1,10 @@ -import { CID } from 'multiformats/cid' -import errCode from 'err-code' import * as dagCbor from '@ipld/dag-cbor' +import errCode from 'err-code' +import { CID } from 'multiformats/cid' import type { Resolver } from '../index.js' const resolve: Resolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { - const block = await blockstore.get(cid) + const block = await blockstore.get(cid, options) const object = dagCbor.decode(block) let subObject = object let subPath = path diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/identity.ts b/packages/ipfs-unixfs-exporter/src/resolvers/identity.ts index f226dbf3..861e6dc7 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/identity.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/identity.ts @@ -1,17 +1,26 @@ import errCode from 'err-code' +import * as mh from 'multiformats/hashes/digest' +import { CustomProgressEvent } from 'progress-events' import extractDataFromBlock from '../utils/extract-data-from-block.js' import validateOffsetAndLength from '../utils/validate-offset-and-length.js' -import * as mh from 'multiformats/hashes/digest' -import type { ExporterOptions, Resolver } from '../index.js' +import type { ExporterOptions, Resolver, ExportProgress } from '../index.js' const rawContent = (node: Uint8Array): ((options?: ExporterOptions) => AsyncGenerator) => { async function * contentGenerator (options: ExporterOptions = {}): AsyncGenerator { const { - offset, - length + start, + end } = validateOffsetAndLength(node.length, options.offset, options.length) - yield extractDataFromBlock(node, 0n, offset, offset + length) + const buf = extractDataFromBlock(node, 0n, start, end) + + options.onProgress?.(new CustomProgressEvent('unixfs:exporter:progress:identity', { + bytesRead: BigInt(buf.byteLength), + totalBytes: end - start, + fileSize: BigInt(node.byteLength) + })) + + yield buf } return contentGenerator diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/index.ts b/packages/ipfs-unixfs-exporter/src/resolvers/index.ts index fbd5dbbb..4f3279cd 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/index.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/index.ts @@ -1,14 +1,13 @@ -import errCode from 'err-code' -import * as dagPb from '@ipld/dag-pb' import * as dagCbor from '@ipld/dag-cbor' +import * as dagPb from '@ipld/dag-pb' +import errCode from 'err-code' import * as raw from 'multiformats/codecs/raw' import { identity } from 'multiformats/hashes/identity' - -import dagPbResolver from './unixfs-v1/index.js' -import rawResolver from './raw.js' import dagCborResolver from './dag-cbor.js' import identifyResolver from './identity.js' +import rawResolver from './raw.js' +import dagPbResolver from './unixfs-v1/index.js' import type { Resolve, Resolver } from '../index.js' const resolvers: Record = { @@ -25,7 +24,7 @@ const resolve: Resolve = async (cid, name, path, toResolve, depth, blockstore, o throw errCode(new Error(`No resolver for code ${cid.code}`), 'ERR_NO_RESOLVER') } - return await resolver(cid, name, path, toResolve, resolve, depth, blockstore, options) + return resolver(cid, name, path, toResolve, resolve, depth, blockstore, options) } export default resolve diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts b/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts index f567cdcb..7b5d2429 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts @@ -1,16 +1,25 @@ import errCode from 'err-code' -import type { ExporterOptions, Resolver } from '../index.js' +import { CustomProgressEvent } from 'progress-events' import extractDataFromBlock from '../utils/extract-data-from-block.js' import validateOffsetAndLength from '../utils/validate-offset-and-length.js' +import type { ExporterOptions, Resolver, ExportProgress } from '../index.js' const rawContent = (node: Uint8Array): ((options?: ExporterOptions) => AsyncGenerator) => { async function * contentGenerator (options: ExporterOptions = {}): AsyncGenerator { const { - offset, - length + start, + end } = validateOffsetAndLength(node.length, options.offset, options.length) - yield extractDataFromBlock(node, 0n, offset, offset + length) + const buf = extractDataFromBlock(node, 0n, start, end) + + options.onProgress?.(new CustomProgressEvent('unixfs:exporter:progress:raw', { + bytesRead: BigInt(buf.byteLength), + totalBytes: end - start, + fileSize: BigInt(node.byteLength) + })) + + yield buf } return contentGenerator diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.ts index dedcc8a2..bfa1d61d 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.ts @@ -1,4 +1,9 @@ -import type { ExporterOptions, UnixfsV1DirectoryContent, UnixfsV1Resolver } from '../../../index.js' +import filter from 'it-filter' +import map from 'it-map' +import parallel from 'it-parallel' +import { pipe } from 'it-pipe' +import { CustomProgressEvent } from 'progress-events' +import type { ExporterOptions, ExportWalk, UnixfsV1DirectoryContent, UnixfsV1Resolver } from '../../../index.js' const directoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => { async function * yieldDirectoryContent (options: ExporterOptions = {}): UnixfsV1DirectoryContent { @@ -6,13 +11,23 @@ const directoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, de const length = options.length ?? node.Links.length const links = node.Links.slice(offset, length) - for (const link of links) { - const result = await resolve(link.Hash, link.Name ?? '', `${path}/${link.Name ?? ''}`, [], depth + 1, blockstore, options) + options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:directory', { + cid + })) - if (result.entry != null) { - yield result.entry - } - } + yield * pipe( + links, + source => map(source, link => { + return async () => { + const linkName = link.Name ?? '' + const linkPath = `${path}/${linkName}` + const result = await resolve(link.Hash, linkName, linkPath, [], depth + 1, blockstore, options) + return result.entry + } + }), + source => parallel(source, { ordered: true }), + source => filter(source, entry => entry != null) + ) } return yieldDirectoryContent diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts index b3798226..1da18056 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts @@ -1,21 +1,23 @@ -import extractDataFromBlock from '../../../utils/extract-data-from-block.js' -import validateOffsetAndLength from '../../../utils/validate-offset-and-length.js' -import { UnixFS } from 'ipfs-unixfs' -import errCode from 'err-code' import * as dagPb from '@ipld/dag-pb' -import * as raw from 'multiformats/codecs/raw' -import { Pushable, pushable } from 'it-pushable' +import errCode from 'err-code' +import { UnixFS } from 'ipfs-unixfs' +import map from 'it-map' import parallel from 'it-parallel' import { pipe } from 'it-pipe' -import map from 'it-map' +import { type Pushable, pushable } from 'it-pushable' +import * as raw from 'multiformats/codecs/raw' import PQueue from 'p-queue' -import type { Blockstore } from 'interface-blockstore' -import type { ExporterOptions, UnixfsV1FileContent, UnixfsV1Resolver } from '../../../index.js' +import { CustomProgressEvent } from 'progress-events' +import extractDataFromBlock from '../../../utils/extract-data-from-block.js' +import validateOffsetAndLength from '../../../utils/validate-offset-and-length.js' +import type { ExporterOptions, UnixfsV1FileContent, UnixfsV1Resolver, ReadableStorage, ExportProgress, ExportWalk } from '../../../index.js' -async function walkDAG (blockstore: Blockstore, node: dagPb.PBNode | Uint8Array, queue: Pushable, streamPosition: bigint, start: bigint, end: bigint, walkQueue: PQueue, options: ExporterOptions): Promise { +async function walkDAG (blockstore: ReadableStorage, node: dagPb.PBNode | Uint8Array, queue: Pushable, streamPosition: bigint, start: bigint, end: bigint, options: ExporterOptions): Promise { // a `raw` node if (node instanceof Uint8Array) { - queue.push(extractDataFromBlock(node, streamPosition, start, end)) + const buf = extractDataFromBlock(node, streamPosition, start, end) + + queue.push(buf) return } @@ -73,9 +75,7 @@ async function walkDAG (blockstore: Blockstore, node: dagPb.PBNode | Uint8Array, childOps, (source) => map(source, (op) => { return async () => { - const block = await blockstore.get(op.link.Hash, { - signal: options.signal - }) + const block = await blockstore.get(op.link.Hash, options) return { ...op, @@ -101,12 +101,34 @@ async function walkDAG (blockstore: Blockstore, node: dagPb.PBNode | Uint8Array, return } - void walkQueue.add(async () => { - await walkDAG(blockstore, child, queue, blockStart, start, end, walkQueue, options) + // create a queue for this child - we use a queue instead of recursion + // to avoid overflowing the stack + const childQueue = new PQueue({ + concurrency: 1 + }) + // if any of the child jobs error, end the read queue with the error + childQueue.on('error', error => { + queue.end(error) + }) + + // if the job rejects the 'error' event will be emitted on the child queue + void childQueue.add(async () => { + options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:file', { + cid: link.Hash + })) + + await walkDAG(blockstore, child, queue, blockStart, start, end, options) }) + + // wait for this child to complete before moving on to the next + await childQueue.onIdle() } } ) + + if (streamPosition >= end) { + queue.end() + } } const fileContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => { @@ -118,30 +140,26 @@ const fileContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, } const { - offset, - length + start, + end } = validateOffsetAndLength(fileSize, options.offset, options.length) - if (length === 0n) { + if (end === 0n) { return } - // use a queue to walk the DAG instead of recursion to ensure very deep DAGs - // don't overflow the stack - const walkQueue = new PQueue({ - concurrency: 1 - }) + let read = 0n + const wanted = end - start const queue = pushable() - void walkQueue.add(async () => { - await walkDAG(blockstore, node, queue, 0n, offset, offset + length, walkQueue, options) - }) - - walkQueue.on('error', error => { - queue.end(error) - }) + options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:file', { + cid + })) - let read = 0n + void walkDAG(blockstore, node, queue, 0n, start, end, options) + .catch(err => { + queue.end(err) + }) for await (const buf of queue) { if (buf == null) { @@ -150,12 +168,27 @@ const fileContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, read += BigInt(buf.byteLength) - if (read === length) { + if (read > wanted) { + queue.end() + throw errCode(new Error('Read too many bytes - the file size reported by the UnixFS data in the root node may be incorrect'), 'ERR_OVER_READ') + } + + if (read === wanted) { queue.end() } + options.onProgress?.(new CustomProgressEvent('unixfs:exporter:progress:unixfs:file', { + bytesRead: read, + totalBytes: wanted, + fileSize + })) + yield buf } + + if (read < wanted) { + throw errCode(new Error('Traversed entire DAG but did not read enough bytes'), 'ERR_UNDER_READ') + } } return yieldFileContent diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts index 54b67382..2f2126ce 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts @@ -1,43 +1,53 @@ -import { decode, PBNode } from '@ipld/dag-pb' -import type { Blockstore } from 'interface-blockstore' -import type { ExporterOptions, Resolve, UnixfsV1DirectoryContent, UnixfsV1Resolver } from '../../../index.js' - -/** - * @typedef {import('interface-blockstore').Blockstore} Blockstore - * @typedef {import('../../../types').ExporterOptions} ExporterOptions - * @typedef {import('../../../types').Resolve} Resolve - * @typedef {import('../../../types').UnixfsV1DirectoryContent} UnixfsV1DirectoryContent - * @typedef {import('../../../types').UnixfsV1Resolver} UnixfsV1Resolver - * @typedef {import('@ipld/dag-pb').PBNode} PBNode - */ +import { decode, type PBNode } from '@ipld/dag-pb' +import map from 'it-map' +import parallel from 'it-parallel' +import { pipe } from 'it-pipe' +import { CustomProgressEvent } from 'progress-events' +import type { ExporterOptions, Resolve, UnixfsV1DirectoryContent, UnixfsV1Resolver, ReadableStorage, ExportWalk } from '../../../index.js' const hamtShardedDirectoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => { function yieldHamtDirectoryContent (options: ExporterOptions = {}): UnixfsV1DirectoryContent { + options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:hamt-sharded-directory', { + cid + })) + return listDirectory(node, path, resolve, depth, blockstore, options) } return yieldHamtDirectoryContent } -async function * listDirectory (node: PBNode, path: string, resolve: Resolve, depth: number, blockstore: Blockstore, options: ExporterOptions): UnixfsV1DirectoryContent { +async function * listDirectory (node: PBNode, path: string, resolve: Resolve, depth: number, blockstore: ReadableStorage, options: ExporterOptions): UnixfsV1DirectoryContent { const links = node.Links - for (const link of links) { - const name = link.Name != null ? link.Name.substring(2) : null + const results = pipe( + links, + source => map(source, link => { + return async () => { + const name = link.Name != null ? link.Name.substring(2) : null + + if (name != null && name !== '') { + const result = await resolve(link.Hash, name, `${path}/${name}`, [], depth + 1, blockstore, options) - if (name != null && name !== '') { - const result = await resolve(link.Hash, name, `${path}/${name}`, [], depth + 1, blockstore, options) + return { entries: result.entry == null ? [] : [result.entry] } + } else { + // descend into subshard + const block = await blockstore.get(link.Hash, options) + node = decode(block) - yield result.entry - } else { - // descend into subshard - const block = await blockstore.get(link.Hash) - node = decode(block) + options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:hamt-sharded-directory', { + cid: link.Hash + })) - for await (const file of listDirectory(node, path, resolve, depth, blockstore, options)) { - yield file + return { entries: listDirectory(node, path, resolve, depth, blockstore, options) } + } } - } + }), + source => parallel(source, { ordered: true }) + ) + + for await (const { entries } of results) { + yield * entries } } diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.ts index e9f2a37e..73a0b71b 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.ts @@ -1,6 +1,7 @@ -import type { ExporterOptions, UnixfsV1Resolver } from '../../../index.js' +import { CustomProgressEvent } from 'progress-events' import extractDataFromBlock from '../../../utils/extract-data-from-block.js' import validateOffsetAndLength from '../../../utils/validate-offset-and-length.js' +import type { ExporterOptions, ExportProgress, ExportWalk, UnixfsV1Resolver } from '../../../index.js' const rawContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => { function * yieldRawContent (options: ExporterOptions = {}): Generator { @@ -11,11 +12,23 @@ const rawContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, b const size = unixfs.data.length const { - offset, - length + start, + end } = validateOffsetAndLength(size, options.offset, options.length) - yield extractDataFromBlock(unixfs.data, 0n, offset, offset + length) + options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:raw', { + cid + })) + + const buf = extractDataFromBlock(unixfs.data, 0n, start, end) + + options.onProgress?.(new CustomProgressEvent('unixfs:exporter:progress:unixfs:raw', { + bytesRead: BigInt(buf.byteLength), + totalBytes: end - start, + fileSize: BigInt(unixfs.data.byteLength) + })) + + yield buf } return yieldRawContent diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts index 127694be..148ac81f 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts @@ -1,12 +1,12 @@ +import { decode, type PBNode } from '@ipld/dag-pb' import errCode from 'err-code' import { UnixFS } from 'ipfs-unixfs' import findShardCid from '../../utils/find-cid-in-shard.js' -import { decode, PBNode } from '@ipld/dag-pb' -import contentFile from './content/file.js' import contentDirectory from './content/directory.js' +import contentFile from './content/file.js' import contentHamtShardedDirectory from './content/hamt-sharded-directory.js' -import type { CID } from 'multiformats/cid' import type { Resolver, UnixfsV1Resolver } from '../../index.js' +import type { CID } from 'multiformats/cid' const findLinkCid = (node: PBNode, name: string): CID | undefined => { const link = node.Links.find(link => link.Name === name) diff --git a/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts b/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts index a0e36ee1..3b257320 100644 --- a/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts +++ b/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts @@ -1,9 +1,8 @@ -import { Bucket, BucketPosition, createHAMT } from 'hamt-sharding' -import { decode, PBLink, PBNode } from '@ipld/dag-pb' +import { decode, type PBLink, type PBNode } from '@ipld/dag-pb' import { murmur3128 } from '@multiformats/murmur3' -import type { Blockstore } from 'interface-blockstore' -import type { ExporterOptions, ShardTraversalContext } from '../index.js' +import { Bucket, type BucketPosition, createHAMT } from 'hamt-sharding' +import type { ExporterOptions, ShardTraversalContext, ReadableStorage } from '../index.js' import type { CID } from 'multiformats/cid' // FIXME: this is copy/pasted from ipfs-unixfs-importer/src/options.js @@ -62,7 +61,7 @@ const toBucketPath = (position: BucketPosition): Array> return path.reverse() } -const findShardCid = async (node: PBNode, name: string, blockstore: Blockstore, context?: ShardTraversalContext, options?: ExporterOptions): Promise => { +const findShardCid = async (node: PBNode, name: string, blockstore: ReadableStorage, context?: ShardTraversalContext, options?: ExporterOptions): Promise => { if (context == null) { const rootBucket = createHAMT({ hashFn @@ -121,7 +120,7 @@ const findShardCid = async (node: PBNode, name: string, blockstore: Blockstore, const block = await blockstore.get(link.Hash, options) node = decode(block) - return await findShardCid(node, name, blockstore, context, options) + return findShardCid(node, name, blockstore, context, options) } export default findShardCid diff --git a/packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.ts b/packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.ts index 0984aea9..da6d9427 100644 --- a/packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.ts +++ b/packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.ts @@ -1,36 +1,37 @@ import errCode from 'err-code' -const validateOffsetAndLength = (size: number | bigint, offset: number | bigint = 0, length: number | bigint = size): { offset: bigint, length: bigint } => { - offset = BigInt(offset ?? 0) - length = BigInt(length ?? size) +const validateOffsetAndLength = (size: number | bigint, offset: number | bigint = 0, length: number | bigint = size): { start: bigint, end: bigint } => { + const fileSize = BigInt(size) + const start = BigInt(offset ?? 0) + let end = BigInt(length) - if (offset == null) { - offset = 0n + if (end !== fileSize) { + end = start + end } - if (offset < 0n) { - throw errCode(new Error('Offset must be greater than or equal to 0'), 'ERR_INVALID_PARAMS') + if (end > fileSize) { + end = fileSize } - if (offset > size) { - throw errCode(new Error('Offset must be less than the file size'), 'ERR_INVALID_PARAMS') + if (start < 0n) { + throw errCode(new Error('Offset must be greater than or equal to 0'), 'ERR_INVALID_PARAMS') } - if (length == null) { - length = BigInt(size) - offset + if (start > fileSize) { + throw errCode(new Error('Offset must be less than the file size'), 'ERR_INVALID_PARAMS') } - if (length < 0n) { + if (end < 0n) { throw errCode(new Error('Length must be greater than or equal to 0'), 'ERR_INVALID_PARAMS') } - if (offset + length > size) { - length = BigInt(size) - offset + if (end > fileSize) { + throw errCode(new Error('Length must be less than the file size'), 'ERR_INVALID_PARAMS') } return { - offset, - length + start, + end } } diff --git a/packages/ipfs-unixfs-exporter/test/exporter-esoteric.spec.ts b/packages/ipfs-unixfs-exporter/test/exporter-esoteric.spec.ts new file mode 100644 index 00000000..2653c5cf --- /dev/null +++ b/packages/ipfs-unixfs-exporter/test/exporter-esoteric.spec.ts @@ -0,0 +1,364 @@ +/* eslint-env mocha */ + +import * as dagPb from '@ipld/dag-pb' +import { expect } from 'aegir/chai' +import { MemoryBlockstore } from 'blockstore-core' +import { UnixFS } from 'ipfs-unixfs' +import randomBytes from 'iso-random-stream/src/random.js' +import all from 'it-all' +import { CID } from 'multiformats/cid' +import * as raw from 'multiformats/codecs/raw' +import { sha256 } from 'multiformats/hashes/sha2' +import { concat, concat as uint8ArrayConcat } from 'uint8arrays/concat' +import { isNode } from 'wherearewe' +import { exporter } from './../src/index.js' +import type { Blockstore } from 'interface-blockstore' + +describe('exporter esoteric DAGs', () => { + let block: Blockstore + + beforeEach(() => { + block = new MemoryBlockstore() + }) + + async function storeBlock (buf: Uint8Array, codec: number): Promise { + const mh = await sha256.digest(buf) + const cid = CID.createV1(codec, mh) + + await block.put(cid, buf) + + return cid + } + + it('exports an unbalanced DAG', async () => { + const leaves = await Promise.all([ + randomBytes(5), + randomBytes(3), + randomBytes(6), + randomBytes(10), + randomBytes(4), + randomBytes(7), + randomBytes(8) + ].map(async buf => { + return { + cid: await storeBlock(buf, raw.code), + buf + } + })) + + // create an unbalanced DAG: + // + // root + // / | | \ + // 0 * 5 6 + // / | \ + // 1 * 4 + // / \ + // 2 3 + + const intermediateNode1 = { + Data: new UnixFS({ + type: 'file', + blockSizes: [ + BigInt(leaves[2].buf.byteLength), + BigInt(leaves[3].buf.byteLength) + ] + }).marshal(), + Links: [{ + Name: '', + Hash: leaves[2].cid, + Tsize: leaves[2].buf.byteLength + }, { + Name: '', + Hash: leaves[3].cid, + Tsize: leaves[3].buf.byteLength + }] + } + const intermediateNode1Buf = dagPb.encode(intermediateNode1) + const intermediateNode1Cid = await storeBlock(intermediateNode1Buf, dagPb.code) + + const intermediateNode2 = { + Data: new UnixFS({ + type: 'file', + blockSizes: [ + BigInt(leaves[1].buf.byteLength), + BigInt(leaves[2].buf.byteLength + leaves[3].buf.byteLength), + BigInt(leaves[4].buf.byteLength) + ] + }).marshal(), + Links: [{ + Name: '', + Hash: leaves[1].cid, + Tsize: leaves[1].buf.byteLength + }, { + Name: '', + Hash: intermediateNode1Cid, + Tsize: intermediateNode1Buf.length + }, { + Name: '', + Hash: leaves[4].cid, + Tsize: leaves[4].buf.byteLength + }] + } + + const intermediateNode2Buf = dagPb.encode(intermediateNode2) + const intermediateNode2Cid = await storeBlock(intermediateNode2Buf, dagPb.code) + + const unixfs = new UnixFS({ + type: 'file', + blockSizes: [ + BigInt(leaves[0].buf.byteLength), + BigInt(leaves[1].buf.byteLength + leaves[2].buf.byteLength + leaves[3].buf.byteLength + leaves[4].buf.byteLength), + BigInt(leaves[5].buf.byteLength), + BigInt(leaves[6].buf.byteLength) + ] + }) + + const rootNode = { + Data: unixfs.marshal(), + Links: [{ + Name: '', + Hash: leaves[0].cid, + Tsize: leaves[0].buf.byteLength + }, { + Name: '', + Hash: intermediateNode2Cid, + Tsize: intermediateNode2Buf.byteLength + }, { + Name: '', + Hash: leaves[5].cid, + Tsize: leaves[5].buf.byteLength + }, { + Name: '', + Hash: leaves[6].cid, + Tsize: leaves[6].buf.byteLength + }] + } + + const rootBuf = dagPb.encode(rootNode) + const rootCid = await storeBlock(rootBuf, dagPb.code) + const exported = await exporter(rootCid, block) + + if (exported.type !== 'file') { + throw new Error('Unexpected type') + } + + const data = uint8ArrayConcat(await all(exported.content())) + expect(data).to.deep.equal(concat( + leaves.map(l => l.buf) + )) + }) + + it('exports a very deep DAG', async () => { + if (!isNode) { + // browsers are quite slow so only run on node + return + } + + const buf: Uint8Array = randomBytes(5) + let child = { + cid: await storeBlock(buf, raw.code), + buf + } + + // create a very deep DAG: + // + // root + // \ + // * + // \ + // * + // \ + // ... many nodes here + // \ + // 0 + let rootCid: CID | undefined + + for (let i = 0; i < 100000; i++) { + const parent = { + Data: new UnixFS({ + type: 'file', + blockSizes: [ + BigInt(buf.byteLength) + ] + }).marshal(), + Links: [{ + Name: '', + Hash: child.cid, + Tsize: child.buf.byteLength + }] + } + + const parentBuf = dagPb.encode(parent) + rootCid = await storeBlock(parentBuf, dagPb.code) + + child = { + cid: rootCid, + buf: parentBuf + } + } + + if (rootCid == null) { + throw new Error('Root CID not set') + } + + const exported = await exporter(rootCid, block) + + if (exported.type !== 'file') { + throw new Error('Unexpected type') + } + + const data = uint8ArrayConcat(await all(exported.content())) + expect(data).to.deep.equal(buf) + }) + + it('errors on DAG with blocksizes that are too large', async () => { + const leaves = await Promise.all([ + randomBytes(5), + randomBytes(3), + randomBytes(6) + ].map(async buf => { + return { + cid: await storeBlock(buf, raw.code), + buf + } + })) + + const unixfs = new UnixFS({ + type: 'file', + blockSizes: [ + BigInt(leaves[0].buf.byteLength), + BigInt(leaves[1].buf.byteLength + 5), // this is wrong + BigInt(leaves[2].buf.byteLength) + ] + }) + + const rootNode = { + Data: unixfs.marshal(), + Links: [{ + Name: '', + Hash: leaves[0].cid, + Tsize: leaves[0].buf.byteLength + }, { + Name: '', + Hash: leaves[1].cid, + Tsize: leaves[1].buf.byteLength + }, { + Name: '', + Hash: leaves[2].cid, + Tsize: leaves[2].buf.byteLength + }] + } + + const rootBuf = dagPb.encode(rootNode) + const rootCid = await storeBlock(rootBuf, dagPb.code) + const exported = await exporter(rootCid, block) + + if (exported.type !== 'file') { + throw new Error('Unexpected type') + } + + await expect(all(exported.content())).to.eventually.be.rejected + .with.property('code', 'ERR_UNDER_READ') + }) + + it('errors on DAG with blocksizes that are too small', async () => { + const leaves = await Promise.all([ + randomBytes(5), + randomBytes(3), + randomBytes(6) + ].map(async buf => { + return { + cid: await storeBlock(buf, raw.code), + buf + } + })) + + const unixfs = new UnixFS({ + type: 'file', + blockSizes: [ + BigInt(leaves[0].buf.byteLength), + BigInt(leaves[1].buf.byteLength - 2), // this is wrong + BigInt(leaves[2].buf.byteLength) + ] + }) + + const rootNode = { + Data: unixfs.marshal(), + Links: [{ + Name: '', + Hash: leaves[0].cid, + Tsize: leaves[0].buf.byteLength + }, { + Name: '', + Hash: leaves[1].cid, + Tsize: leaves[1].buf.byteLength + }, { + Name: '', + Hash: leaves[2].cid, + Tsize: leaves[2].buf.byteLength + }] + } + + const rootBuf = dagPb.encode(rootNode) + const rootCid = await storeBlock(rootBuf, dagPb.code) + const exported = await exporter(rootCid, block) + + if (exported.type !== 'file') { + throw new Error('Unexpected type') + } + + await expect(all(exported.content())).to.eventually.be.rejected + .with.property('code', 'ERR_OVER_READ') + }) + + it('errors on DAG with incorrect number of blocksizes', async () => { + const leaves = await Promise.all([ + randomBytes(5), + randomBytes(3), + randomBytes(6) + ].map(async buf => { + return { + cid: await storeBlock(buf, raw.code), + buf + } + })) + + const unixfs = new UnixFS({ + type: 'file', + blockSizes: [ + BigInt(leaves[0].buf.byteLength), + // BigInt(leaves[1].buf.byteLength), // this is wrong + BigInt(leaves[2].buf.byteLength) + ] + }) + + const rootNode = { + Data: unixfs.marshal(), + Links: [{ + Name: '', + Hash: leaves[0].cid, + Tsize: leaves[0].buf.byteLength + }, { + Name: '', + Hash: leaves[1].cid, + Tsize: leaves[1].buf.byteLength + }, { + Name: '', + Hash: leaves[2].cid, + Tsize: leaves[2].buf.byteLength + }] + } + + const rootBuf = dagPb.encode(rootNode) + const rootCid = await storeBlock(rootBuf, dagPb.code) + const exported = await exporter(rootCid, block) + + if (exported.type !== 'file') { + throw new Error('Unexpected type') + } + + await expect(all(exported.content())).to.eventually.be.rejected + .with.property('code', 'ERR_NOT_UNIXFS') + }) +}) diff --git a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts index f473849f..7d1b1be6 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts @@ -1,18 +1,18 @@ /* eslint-env mocha */ +import * as dagPb from '@ipld/dag-pb' import { expect } from 'aegir/chai' +import { MemoryBlockstore } from 'blockstore-core' import { UnixFS } from 'ipfs-unixfs' +import { importer } from 'ipfs-unixfs-importer' import all from 'it-all' -import last from 'it-last' import randomBytes from 'it-buffer-stream' -import { exporter, walkPath } from '../src/index.js' -import { importer } from 'ipfs-unixfs-importer' -import * as dagPb from '@ipld/dag-pb' -import { concat as uint8ArrayConcat } from 'uint8arrays/concat' -import asAsyncIterable from './helpers/as-async-iterable.js' +import last from 'it-last' import { CID } from 'multiformats/cid' import { sha256 } from 'multiformats/hashes/sha2' -import { MemoryBlockstore } from 'blockstore-core' +import { concat as uint8ArrayConcat } from 'uint8arrays/concat' +import { exporter, walkPath } from '../src/index.js' +import asAsyncIterable from './helpers/as-async-iterable.js' const SHARD_SPLIT_THRESHOLD = 10 @@ -22,7 +22,7 @@ describe('exporter sharded', function () { const block = new MemoryBlockstore() const createShard = async (numFiles: number): Promise => { - return await createShardWithFileNames(numFiles, (index) => `file-${index}`) + return createShardWithFileNames(numFiles, (index) => `file-${index}`) } const createShardWithFileNames = async (numFiles: number, fileName: (index: number) => string): Promise => { @@ -31,7 +31,7 @@ describe('exporter sharded', function () { content: asAsyncIterable(Uint8Array.from([0, 1, 2, 3, 4, index])) })) - return await createShardWithFiles(files) + return createShardWithFiles(files) } const createShardWithFiles = async (files: Array<{ path: string, content: AsyncIterable }>): Promise => { diff --git a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.ts b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.ts index 5958d2e6..41a456fa 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.ts @@ -1,14 +1,14 @@ /* eslint-env mocha */ import { expect } from 'aegir/chai' +import { MemoryBlockstore } from 'blockstore-core' import { importer } from 'ipfs-unixfs-importer' import all from 'it-all' -import last from 'it-last' -import { MemoryBlockstore } from 'blockstore-core' import randomBytes from 'it-buffer-stream' +import last from 'it-last' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' -import asAsyncIterable from './helpers/as-async-iterable.js' import { exporter, walkPath } from './../src/index.js' +import asAsyncIterable from './helpers/as-async-iterable.js' const ONE_MEG = Math.pow(1024, 2) diff --git a/packages/ipfs-unixfs-exporter/test/exporter.spec.ts b/packages/ipfs-unixfs-exporter/test/exporter.spec.ts index 12cb9373..af28be2e 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/exporter.spec.ts @@ -1,30 +1,33 @@ /* eslint-env mocha */ +import * as dagCbor from '@ipld/dag-cbor' +import * as dagPb from '@ipld/dag-pb' import { expect } from 'aegir/chai' +import { MemoryBlockstore } from 'blockstore-core' +import delay from 'delay' import { UnixFS } from 'ipfs-unixfs' -import { CID } from 'multiformats/cid' -import * as dagPb from '@ipld/dag-pb' -import * as dagCbor from '@ipld/dag-cbor' -import { sha256 } from 'multiformats/hashes/sha2' -import { identity } from 'multiformats/hashes/identity' -import * as raw from 'multiformats/codecs/raw' -import { exporter, recursive } from '../src/index.js' import { importer } from 'ipfs-unixfs-importer' +import { fixedSize } from 'ipfs-unixfs-importer/chunker' +import { balanced, type FileLayout, flat, trickle } from 'ipfs-unixfs-importer/layout' import all from 'it-all' -import last from 'it-last' -import first from 'it-first' import randomBytes from 'it-buffer-stream' -import { MemoryBlockstore } from 'blockstore-core' +import first from 'it-first' +import last from 'it-last' +import toBuffer from 'it-to-buffer' +import { CID } from 'multiformats/cid' +import * as raw from 'multiformats/codecs/raw' +import { identity } from 'multiformats/hashes/identity' +import { sha256 } from 'multiformats/hashes/sha2' +import { Readable } from 'readable-stream' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { isNode } from 'wherearewe' +import { exporter, recursive } from '../src/index.js' import asAsyncIterable from './helpers/as-async-iterable.js' -import delay from 'delay' import type { PBNode } from '@ipld/dag-pb' import type { Blockstore } from 'interface-blockstore' -import { balanced, FileLayout, flat, trickle } from 'ipfs-unixfs-importer/layout' import type { Chunker } from 'ipfs-unixfs-importer/chunker' -import { fixedSize } from 'ipfs-unixfs-importer/chunker' const ONE_MEG = Math.pow(1024, 2) @@ -333,7 +336,7 @@ describe('exporter', () => { async get (cid: CID) { await delay(Math.random() * 10) - return await block.get(cid) + return block.get(cid) } } @@ -1002,7 +1005,7 @@ describe('exporter', () => { throw new Error('Unexpected type') } - return await expect(first(exported.content())).to.eventually.deep.equal(node) + return expect(first(exported.content())).to.eventually.deep.equal(node) }) it('errors when exporting a node with no resolver', async () => { @@ -1216,7 +1219,7 @@ describe('exporter', () => { const customBlock = { get: async (cid: CID, options: { signal: AbortSignal }) => { // promise will never resolve, so reject it when the abort signal is sent - return await new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { options.signal.addEventListener('abort', () => { reject(new Error(message)) }) @@ -1229,4 +1232,81 @@ describe('exporter', () => { signal: abortController.signal })).to.eventually.be.rejectedWith(message) }) + + it('should support being used with readable-stream', async () => { + if (!isNode) { + // node-only test + return + } + + let dataSizeInBytes = 10 + + // iterate through order of magnitude in size until hitting 10MB + while (dataSizeInBytes <= 10_000_000) { + const bytes = await toBuffer(randomBytes(dataSizeInBytes)) + + // chunk up the bytes to simulate a more real-world like behavior + const chunkLength = 100_000 + let currentIndex = 0 + + const readableStream = new Readable({ + read (): void { + // if this is the last chunk + if (currentIndex + chunkLength > bytes.length) { + this.push(bytes.subarray(currentIndex)) + this.push(null) + } else { + this.push(bytes.subarray(currentIndex, currentIndex + chunkLength)) + + currentIndex = currentIndex + chunkLength + } + } + }) + + const result = await last(importer([{ + content: readableStream + }], block)) + + if (result == null) { + throw new Error('Import failed') + } + + const file = await exporter(result.cid, block) + const contentIterator = file.content() + + const readableStreamToBytes = async (readableStream: Readable): Promise => { + return new Promise((resolve, reject) => { + const chunks: any[] = [] + readableStream.on('data', chunk => { + chunks.push(chunk) + }) + + readableStream.on('end', () => { + const uint8Array = uint8ArrayConcat(chunks) + resolve(uint8Array) + }) + + readableStream.on('error', reject) + }) + } + + const dataStream = new Readable({ + async read (): Promise { + const result = await contentIterator.next() + if (result.done === true) { + this.push(null) // end the stream + } else { + this.push(result.value) + } + } + }) + + const data = await readableStreamToBytes(dataStream) + + expect(data.byteLength).to.equal(dataSizeInBytes) + expect(data).to.equalBytes(bytes) + + dataSizeInBytes *= 10 + } + }) }) diff --git a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts index 8ea7ec40..2f4d5451 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts @@ -1,14 +1,14 @@ /* eslint-env mocha */ -import { importer } from 'ipfs-unixfs-importer' -import { exporter, UnixFSDirectory, UnixFSEntry } from '../src/index.js' import { expect } from 'aegir/chai' +import { MemoryBlockstore } from 'blockstore-core' +import { importer } from 'ipfs-unixfs-importer' import all from 'it-all' import last from 'it-last' -import { MemoryBlockstore } from 'blockstore-core' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { exporter, type UnixFSDirectory, type UnixFSEntry } from '../src/index.js' import asAsyncIterable from './helpers/as-async-iterable.js' import type { CID } from 'multiformats/cid' diff --git a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts index 919fd04d..e1ef084a 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts @@ -1,13 +1,13 @@ /* eslint-env mocha */ import { expect } from 'aegir/chai' -import all from 'it-all' -import { importer } from 'ipfs-unixfs-importer' -import { exporter, UnixFSEntry } from '../src/index.js' import { MemoryBlockstore } from 'blockstore-core' +import { importer } from 'ipfs-unixfs-importer' +import all from 'it-all' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { exporter, type UnixFSEntry } from '../src/index.js' import asAsyncIterable from './helpers/as-async-iterable.js' import type { CID } from 'multiformats/cid' diff --git a/packages/ipfs-unixfs-exporter/test/import-export.spec.ts b/packages/ipfs-unixfs-exporter/test/import-export.spec.ts index 8e400375..de7e6479 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/import-export.spec.ts @@ -4,32 +4,32 @@ import { expect } from 'aegir/chai' import loadFixture from 'aegir/fixtures' import { MemoryBlockstore } from 'blockstore-core' -import asAsyncIterable from './helpers/as-async-iterable.js' -import { importer } from 'ipfs-unixfs-importer' +import { importer, type ImporterOptions } from 'ipfs-unixfs-importer' +import { flat, balanced, trickle, type FileLayout } from 'ipfs-unixfs-importer/layout' import { exporter } from '../src/index.js' +import asAsyncIterable from './helpers/as-async-iterable.js' const bigFile = loadFixture(('test') + '/fixtures/1.2MiB.txt') -const strategies = [ - 'flat', - 'balanced', - 'trickle' -] +const layouts: Record = { + flat: flat(), + balanced: balanced(), + trickle: trickle() +} describe('import and export', function () { this.timeout(30 * 1000) - strategies.forEach((strategy) => { - const importerOptions = { strategy } + Object.entries(layouts).forEach(([name, layout]) => { + const importerOptions: ImporterOptions = { layout } - describe('using builder: ' + strategy, () => { + describe('using builder: ' + name, () => { const block = new MemoryBlockstore() it('imports and exports', async () => { - const path = `${strategy}-big.dat` + const path = `${name}-big.dat` const values = [{ path, content: asAsyncIterable(bigFile) }] - // @ts-expect-error for await (const file of importer(values, block, importerOptions)) { expect(file.path).to.equal(path) diff --git a/packages/ipfs-unixfs-exporter/test/importer.spec.ts b/packages/ipfs-unixfs-exporter/test/importer.spec.ts index 7d5cea99..8a354159 100644 --- a/packages/ipfs-unixfs-exporter/test/importer.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/importer.spec.ts @@ -1,26 +1,26 @@ /* eslint-env mocha */ -import { importer, ImporterOptions } from 'ipfs-unixfs-importer' -import { exporter, recursive } from '../src/index.js' -import extend from 'merge-options' +import { decode } from '@ipld/dag-pb' import { expect } from 'aegir/chai' -import sinon from 'sinon' -import { Mtime, UnixFS } from 'ipfs-unixfs' -import collectLeafCids from './helpers/collect-leaf-cids.js' import loadFixture from 'aegir/fixtures' +import { MemoryBlockstore } from 'blockstore-core' +import { type Mtime, UnixFS } from 'ipfs-unixfs' +import { importer, type ImporterOptions } from 'ipfs-unixfs-importer' +import { fixedSize } from 'ipfs-unixfs-importer/chunker' +import { balanced, type FileLayout, flat, trickle } from 'ipfs-unixfs-importer/layout' import all from 'it-all' import first from 'it-first' -import { MemoryBlockstore } from 'blockstore-core' +import last from 'it-last' +import extend from 'merge-options' +import { base58btc } from 'multiformats/bases/base58' +import { CID } from 'multiformats/cid' +import sinon from 'sinon' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { exporter, recursive } from '../src/index.js' import asAsyncIterable from './helpers/as-async-iterable.js' -import last from 'it-last' -import { CID } from 'multiformats/cid' -import { base58btc } from 'multiformats/bases/base58' -import { decode } from '@ipld/dag-pb' +import collectLeafCids from './helpers/collect-leaf-cids.js' import type { Blockstore } from 'interface-blockstore' -import { balanced, FileLayout, flat, trickle } from 'ipfs-unixfs-importer/layout' -import { fixedSize } from 'ipfs-unixfs-importer/chunker' const bigFile = loadFixture('test/fixtures/1.2MiB.txt') const smallFile = loadFixture('test/fixtures/200Bytes.txt') @@ -44,7 +44,14 @@ function dateToTimespec (date: Date): Mtime { } } -const baseFiles = { +interface File { + cid: string + size: bigint + type: string + path: string +} + +const baseFiles: Record = { '200Bytes.txt': { cid: 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8', size: 200n, @@ -65,7 +72,7 @@ const baseFiles = { } } -const strategyBaseFiles = { +const strategyBaseFiles: Record> = { flat: baseFiles, balanced: extend({}, baseFiles, { '1.2MiB.txt': { @@ -93,7 +100,7 @@ const strategies: Array<'flat' | 'balanced' | 'trickle'> = [ 'trickle' ] -const strategyOverrides = { +const strategyOverrides: Record> = { balanced: { 'foo-big': { cid: 'QmaFgyFJUP4fxFySJCddg2Pj6rpwSywopWk87VEVv52RSj', @@ -203,7 +210,7 @@ const checkLeafNodeTypes = async (blockstore: Blockstore, options: Partial await blockstore.get(link.Hash)) + node.Links.map(async link => blockstore.get(link.Hash)) ) linkedBlocks.forEach(bytes => { @@ -305,7 +312,6 @@ strategies.forEach((strategy) => { cid: 'QmVfHowk2oKuWFyVwSRt8H1dQ3v272jyWSwhfQnTtWNmfw', size: 200n }) - // @ts-expect-error }, strategyOverrides[strategy]) const expected = extend({}, defaultResults) @@ -634,13 +640,13 @@ strategies.forEach((strategy) => { } }) - it('will call an optional progress function', async () => { + it('will call an optional onProgress function', async () => { const chunkSize = 2048 const path = '1.2MiB.txt' - const progress = sinon.stub() + const onProgress = sinon.stub() const options: Partial = { - progress, + onProgress, chunker: fixedSize({ chunkSize }) @@ -651,8 +657,9 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }], block, options)) - expect(progress.called).to.equal(true) - expect(progress.args[0]).to.deep.equal([chunkSize, path]) + expect(onProgress.called).to.equal(true) + expect(onProgress.getCall(0).args[0]).to.have.property('type', 'unixfs:importer:progress:file:read') + expect(onProgress.getCall(0).args[0]).to.have.deep.property('detail', { bytesRead: BigInt(chunkSize), chunkSize: BigInt(chunkSize), path }) }) it('will import files with CID version 1', async () => { @@ -1069,7 +1076,7 @@ describe('configuration', () => { /** @type {import('ipfs-unixfs-importer').DAGBuilder} */ dagBuilder: async function * (source, block) { // eslint-disable-line require-await yield async function () { - return await Promise.resolve({ + return Promise.resolve({ cid, path: 'path', unixfs, diff --git a/packages/ipfs-unixfs-importer/CHANGELOG.md b/packages/ipfs-unixfs-importer/CHANGELOG.md index a50b2ecb..10b30840 100644 --- a/packages/ipfs-unixfs-importer/CHANGELOG.md +++ b/packages/ipfs-unixfs-importer/CHANGELOG.md @@ -1,3 +1,109 @@ +## [ipfs-unixfs-importer-v15.1.4](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v15.1.3...ipfs-unixfs-importer-v15.1.4) (2023-05-11) + + +### Dependencies + +* bump it-all from 2.0.1 to 3.0.2 ([#324](https://github.com/ipfs/js-ipfs-unixfs/issues/324)) ([0738c35](https://github.com/ipfs/js-ipfs-unixfs/commit/0738c35cf437020c94dc8cc17644b01c9289b9a2)) +* bump it-first from 2.0.1 to 3.0.2 ([#325](https://github.com/ipfs/js-ipfs-unixfs/issues/325)) ([3db2948](https://github.com/ipfs/js-ipfs-unixfs/commit/3db2948ecb3fa2780582c8cddd32c1030e226356)) + +## [ipfs-unixfs-importer-v15.1.3](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v15.1.2...ipfs-unixfs-importer-v15.1.3) (2023-05-11) + + +### Dependencies + +* bump it-parallel-batch from 2.0.1 to 3.0.1 ([#327](https://github.com/ipfs/js-ipfs-unixfs/issues/327)) ([021dd0d](https://github.com/ipfs/js-ipfs-unixfs/commit/021dd0dbaa4ee3e9751dd422eb22c1ae8f1695be)) +* **dev:** bump it-drain from 2.0.1 to 3.0.2 ([#331](https://github.com/ipfs/js-ipfs-unixfs/issues/331)) ([4b1462c](https://github.com/ipfs/js-ipfs-unixfs/commit/4b1462c9a5f7730f41f76d06543fc63b58404e20)) + +## [ipfs-unixfs-importer-v15.1.2](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v15.1.1...ipfs-unixfs-importer-v15.1.2) (2023-05-11) + + +### Dependencies + +* bump it-batch from 2.0.1 to 3.0.2 ([#332](https://github.com/ipfs/js-ipfs-unixfs/issues/332)) ([4c1c01d](https://github.com/ipfs/js-ipfs-unixfs/commit/4c1c01d834096176f5c814ae54d4e77f4aef05e3)) +* bump it-last from 2.0.1 to 3.0.2 ([#330](https://github.com/ipfs/js-ipfs-unixfs/issues/330)) ([7f8df4d](https://github.com/ipfs/js-ipfs-unixfs/commit/7f8df4d437befacd36c36c85f178b14fc7930fd6)) + +## [ipfs-unixfs-importer-v15.1.1](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v15.1.0...ipfs-unixfs-importer-v15.1.1) (2023-03-23) + + +### Dependencies + +* update interface-store to 5.x.x ([#304](https://github.com/ipfs/js-ipfs-unixfs/issues/304)) ([46f4de5](https://github.com/ipfs/js-ipfs-unixfs/commit/46f4de564c83aaf120172b93309d1519a52f2c6d)) + + +### Documentation + +* example in README of ipfs-unixfs-importer ([#303](https://github.com/ipfs/js-ipfs-unixfs/issues/303)) ([abbfe0c](https://github.com/ipfs/js-ipfs-unixfs/commit/abbfe0ce63eb72771d017cee5026151c45d0e3bd)) + +## [ipfs-unixfs-importer-v15.1.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v15.0.3...ipfs-unixfs-importer-v15.1.0) (2023-03-17) + + +### Features + +* adds progress events to the importer and exporter ([#302](https://github.com/ipfs/js-ipfs-unixfs/issues/302)) ([d0df723](https://github.com/ipfs/js-ipfs-unixfs/commit/d0df7237f155b73b8c722d6750742f9976232c0e)) + +## [ipfs-unixfs-importer-v15.0.3](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v15.0.2...ipfs-unixfs-importer-v15.0.3) (2023-03-16) + + +### Bug Fixes + +* align blockstore pick interface name ([#301](https://github.com/ipfs/js-ipfs-unixfs/issues/301)) ([ca10d79](https://github.com/ipfs/js-ipfs-unixfs/commit/ca10d792083b80fd45754b5260eb486a621cc489)) + +## [ipfs-unixfs-importer-v15.0.2](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v15.0.1...ipfs-unixfs-importer-v15.0.2) (2023-03-15) + + +### Bug Fixes + +* reduce required number of blockstore methods ([#298](https://github.com/ipfs/js-ipfs-unixfs/issues/298)) ([238fe4e](https://github.com/ipfs/js-ipfs-unixfs/commit/238fe4e4f2bac5075eac4dced9a52f3c9c8e307a)) + +## [ipfs-unixfs-importer-v15.0.1](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v15.0.0...ipfs-unixfs-importer-v15.0.1) (2023-03-15) + + +### Bug Fixes + +* pass onProgress option to blockstore ([#294](https://github.com/ipfs/js-ipfs-unixfs/issues/294)) ([3bfb34d](https://github.com/ipfs/js-ipfs-unixfs/commit/3bfb34d8e660404c89e39925ad053c940bf176ce)) + +## [ipfs-unixfs-importer-v15.0.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v14.0.2...ipfs-unixfs-importer-v15.0.0) (2023-03-15) + + +### ⚠ BREAKING CHANGES + +* please use the latest interface-blockstore versions of everything, aside from this +impact should be minimal + +### Dependencies + +* update blockstore ([#290](https://github.com/ipfs/js-ipfs-unixfs/issues/290)) ([6efaab5](https://github.com/ipfs/js-ipfs-unixfs/commit/6efaab5dc509beb5bd5049e104399a5d3b46301d)) + +## [ipfs-unixfs-importer-v14.0.2](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v14.0.1...ipfs-unixfs-importer-v14.0.2) (2023-03-13) + + +### Bug Fixes + +* use simpler blockstore interface ([#287](https://github.com/ipfs/js-ipfs-unixfs/issues/287)) ([b332b16](https://github.com/ipfs/js-ipfs-unixfs/commit/b332b167ecbb1083030a57144088d318bf59701e)) + +## [ipfs-unixfs-importer-v14.0.1](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v14.0.0...ipfs-unixfs-importer-v14.0.1) (2023-02-17) + + +### Bug Fixes + +* export importFile and importDirectory function ([#284](https://github.com/ipfs/js-ipfs-unixfs/issues/284)) ([4b83a19](https://github.com/ipfs/js-ipfs-unixfs/commit/4b83a19a19157dd996c62f51cca063a11a6196b1)) + +## [ipfs-unixfs-importer-v14.0.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v13.0.0...ipfs-unixfs-importer-v14.0.0) (2023-02-16) + + +### ⚠ BREAKING CHANGES + +* The options object now accepts preconfigured instances of chunkers and file layouts - these can be imported from this module - see https://github.com/ipfs/js-ipfs-unixfs/pull/283 for more + +### Features + +* accept pre-configured import components as options instead of options for components ([#283](https://github.com/ipfs/js-ipfs-unixfs/issues/283)) ([5a38d01](https://github.com/ipfs/js-ipfs-unixfs/commit/5a38d0126457926d1c17aeee75700565b400e4cf)) + + +### Dependencies + +* update sibling dependencies ([b4f6fc8](https://github.com/ipfs/js-ipfs-unixfs/commit/b4f6fc83245bc99223704ce918fd4db691221412)) + ## [ipfs-unixfs-importer-v13.0.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-v12.0.1...ipfs-unixfs-importer-v13.0.0) (2023-02-09) diff --git a/packages/ipfs-unixfs-importer/README.md b/packages/ipfs-unixfs-importer/README.md index 00abe7c0..9eb326d9 100644 --- a/packages/ipfs-unixfs-importer/README.md +++ b/packages/ipfs-unixfs-importer/README.md @@ -14,7 +14,8 @@ - [Example](#example) - [API](#api) - [const stream = importer(source, blockstore \[, options\])](#const-stream--importersource-blockstore--options) - - [const result = await importContent(content, blockstore \[, options\])](#const-result--await-importcontentcontent-blockstore--options) + - [const result = await importFile(content, blockstore \[, options\])](#const-result--await-importfilecontent-blockstore--options) + - [const result = await importDirectory(content, blockstore \[, options\])](#const-result--await-importdirectorycontent-blockstore--options) - [const result = await importBytes(buf, blockstore \[, options\])](#const-result--await-importbytesbuf-blockstore--options) - [const result = await importByteStream(source, blockstore \[, options\])](#const-result--await-importbytestreamsource-blockstore--options) - [API Docs](#api-docs) @@ -51,20 +52,21 @@ And write the importing logic: ```js import { importer } from 'ipfs-unixfs-importer' import { MemoryBlockstore } from 'blockstore-core/memory' +import * as fs from 'node:fs' // Where the blocks will be stored const blockstore = new MemoryBlockstore() -// Import path /tmp/foo/bar +// Import path /tmp/foo/ const source = [{ path: '/tmp/foo/bar', - content: fs.createReadStream(file) + content: fs.createReadStream('/tmp/foo/bar') }, { path: '/tmp/foo/quxx', - content: fs.createReadStream(file2) + content: fs.createReadStream('/tmp/foo/quux') }] -for await (const entry of importer(source, blockstore, options)) { +for await (const entry of importer(source, blockstore)) { console.info(entry) } ``` @@ -97,7 +99,7 @@ When run, metadata about DAGNodes in the created tree is printed until the root: ## API ```js -import { importer, importContent, importBytes } from 'ipfs-unixfs-importer' +import { importer, importFile, importDir, importBytes, importByteStream } from 'ipfs-unixfs-importer' ``` ### const stream = importer(source, blockstore \[, options]) @@ -119,10 +121,14 @@ The `importer` function returns an async iterator takes a source async iterator The input's file paths and directory structure will be preserved in the [`dag-pb`](https://github.com/ipld/js-dag-pb) created nodes. -### const result = await importContent(content, blockstore \[, options]) +### const result = await importFile(content, blockstore \[, options]) A convenience function for importing a single file or directory. +### const result = await importDirectory(content, blockstore \[, options]) + +A convenience function for importing a directory - note this is non-recursive, to import recursively use the [importer](#const-stream--importersource-blockstore--options) function. + ### const result = await importBytes(buf, blockstore \[, options]) A convenience function for importing a single Uint8Array. diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index 568ab307..9bd2a3ce 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -1,6 +1,6 @@ { "name": "ipfs-unixfs-importer", - "version": "13.0.0", + "version": "15.1.4", "description": "JavaScript implementation of the UnixFs importer used by IPFS", "license": "Apache-2.0 OR MIT", "homepage": "https://github.com/ipfs/js-ipfs-unixfs/tree/master/packages/ipfs-unixfs-importer#readme", @@ -155,7 +155,7 @@ "build": "aegir build", "clean": "aegir clean", "lint": "aegir lint", - "dep-check": "aegir dep-check -i interface-blockstore", + "dep-check": "aegir dep-check", "release": "aegir release" }, "dependencies": { @@ -163,23 +163,23 @@ "@multiformats/murmur3": "^2.0.0", "err-code": "^3.0.1", "hamt-sharding": "^3.0.0", - "interface-blockstore": "^4.0.0", - "ipfs-unixfs": "^10.0.0", - "it-all": "^2.0.0", - "it-batch": "^2.0.0", - "it-first": "^2.0.0", - "it-parallel-batch": "^2.0.0", + "interface-blockstore": "^5.0.0", + "interface-store": "^5.0.1", + "ipfs-unixfs": "^11.0.0", + "it-all": "^3.0.2", + "it-batch": "^3.0.2", + "it-first": "^3.0.2", + "it-parallel-batch": "^3.0.1", "multiformats": "^11.0.0", + "progress-events": "^1.0.0", "rabin-wasm": "^0.1.4", "uint8arraylist": "^2.4.3", "uint8arrays": "^4.0.2" }, "devDependencies": { - "aegir": "^38.1.2", - "blockstore-core": "^3.0.0", - "it-buffer-stream": "^3.0.0", - "it-drain": "^2.0.0", - "it-last": "^2.0.0", + "aegir": "^39.0.6", + "blockstore-core": "^4.0.1", + "it-last": "^3.0.2", "wherearewe": "^2.0.1" }, "browser": { diff --git a/packages/ipfs-unixfs-importer/src/chunker/rabin.ts b/packages/ipfs-unixfs-importer/src/chunker/rabin.ts index 110185e7..2f0567c6 100644 --- a/packages/ipfs-unixfs-importer/src/chunker/rabin.ts +++ b/packages/ipfs-unixfs-importer/src/chunker/rabin.ts @@ -1,7 +1,7 @@ -import { Uint8ArrayList } from 'uint8arraylist' -// @ts-expect-error -import { create } from 'rabin-wasm' import errcode from 'err-code' +// @ts-expect-error no types +import { create } from 'rabin-wasm' +import { Uint8ArrayList } from 'uint8arraylist' import type { Chunker } from './index.js' const DEFAULT_MIN_CHUNK_SIZE = 262144 diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/buffer-importer.ts b/packages/ipfs-unixfs-importer/src/dag-builder/buffer-importer.ts index f9b77eab..c52b7285 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/buffer-importer.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/buffer-importer.ts @@ -1,27 +1,54 @@ -import { UnixFS } from 'ipfs-unixfs' -import { persist, PersistOptions } from '../utils/persist.js' import * as dagPb from '@ipld/dag-pb' +import { UnixFS } from 'ipfs-unixfs' import * as raw from 'multiformats/codecs/raw' -import type { BufferImporter, ProgressHandler } from '../index.js' -import type { Version } from 'multiformats/cid' +import { CustomProgressEvent } from 'progress-events' +import { persist, type PersistOptions } from '../utils/persist.js' +import type { BufferImporter } from '../index.js' +import type { CID, Version } from 'multiformats/cid' +import type { ProgressOptions, ProgressEvent } from 'progress-events' + +/** + * Passed to the onProgress callback while importing files + */ +export interface ImportWriteProgress { + /** + * How many bytes we have written for this source so far - this may be + * bigger than the file size due to the DAG-PB wrappers of each block + */ + bytesWritten: bigint + + /** + * The CID of the block that has been written + */ + cid: CID + + /** + * The path of the file being imported, if one was specified + */ + path?: string +} -export interface BufferImporterOptions { +export type BufferImportProgressEvents = + ProgressEvent<'unixfs:importer:progress:file:write', ImportWriteProgress> + +export interface BufferImporterOptions extends ProgressOptions { cidVersion: Version rawLeaves: boolean leafType: 'file' | 'raw' - progress?: ProgressHandler } export function defaultBufferImporter (options: BufferImporterOptions): BufferImporter { - return async function * bufferImporter (file, block) { - for await (let buffer of file.content) { - yield async () => { - options.progress?.(buffer.length, file.path) + return async function * bufferImporter (file, blockstore) { + let bytesWritten = 0n + + for await (let block of file.content) { + yield async () => { // eslint-disable-line no-loop-func let unixfs const opts: PersistOptions = { codec: dagPb, - cidVersion: options.cidVersion + cidVersion: options.cidVersion, + onProgress: options.onProgress } if (options.rawLeaves) { @@ -30,19 +57,30 @@ export function defaultBufferImporter (options: BufferImporterOptions): BufferIm } else { unixfs = new UnixFS({ type: options.leafType, - data: buffer + data: block }) - buffer = dagPb.encode({ + block = dagPb.encode({ Data: unixfs.marshal(), Links: [] }) } + const cid = await persist(block, blockstore, opts) + + bytesWritten += BigInt(block.byteLength) + + options.onProgress?.(new CustomProgressEvent('unixfs:importer:progress:file:write', { + bytesWritten, + cid, + path: file.path + })) + return { - cid: await persist(buffer, block, opts), + cid, unixfs, - size: BigInt(buffer.length) + size: BigInt(block.length), + block } } } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts b/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts index 21c4ccae..a29675f6 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts @@ -1,8 +1,7 @@ +import { encode, prepare } from '@ipld/dag-pb' import { UnixFS } from 'ipfs-unixfs' import { persist } from '../utils/persist.js' -import { encode, prepare } from '@ipld/dag-pb' -import type { Directory, InProgressImportResult } from '../index.js' -import type { Blockstore } from 'interface-blockstore' +import type { Directory, InProgressImportResult, WritableStorage } from '../index.js' import type { Version } from 'multiformats/cid' export interface DirBuilderOptions { @@ -10,22 +9,23 @@ export interface DirBuilderOptions { signal?: AbortSignal } -export const dirBuilder = async (dir: Directory, blockstore: Blockstore, options: DirBuilderOptions): Promise => { +export const dirBuilder = async (dir: Directory, blockstore: WritableStorage, options: DirBuilderOptions): Promise => { const unixfs = new UnixFS({ type: 'directory', mtime: dir.mtime, mode: dir.mode }) - const buffer = encode(prepare({ Data: unixfs.marshal() })) - const cid = await persist(buffer, blockstore, options) + const block = encode(prepare({ Data: unixfs.marshal() })) + const cid = await persist(block, blockstore, options) const path = dir.path return { cid, path, unixfs, - size: BigInt(buffer.length), - originalPath: dir.originalPath + size: BigInt(block.length), + originalPath: dir.originalPath, + block } } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file.ts b/packages/ipfs-unixfs-importer/src/dag-builder/file.ts index 268c1667..41d94ca0 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file.ts @@ -1,93 +1,113 @@ +import { encode, type PBLink, type PBNode, prepare } from '@ipld/dag-pb' import { UnixFS } from 'ipfs-unixfs' -import { persist } from '../utils/persist.js' -import { encode, PBLink, prepare } from '@ipld/dag-pb' import parallelBatch from 'it-parallel-batch' import * as rawCodec from 'multiformats/codecs/raw' -import type { BufferImporter, File, InProgressImportResult } from '../index.js' -import type { Blockstore } from 'interface-blockstore' +import { CustomProgressEvent } from 'progress-events' +import { persist } from '../utils/persist.js' +import type { BufferImporter, File, InProgressImportResult, WritableStorage, SingleBlockImportResult, ImporterProgressEvents } from '../index.js' import type { FileLayout, Reducer } from '../layout/index.js' -import type { Version } from 'multiformats/cid' +import type { CID, Version } from 'multiformats/cid' +import type { ProgressOptions, ProgressEvent } from 'progress-events' interface BuildFileBatchOptions { bufferImporter: BufferImporter blockWriteConcurrency: number } -async function * buildFileBatch (file: File, blockstore: Blockstore, options: BuildFileBatchOptions): AsyncGenerator { +async function * buildFileBatch (file: File, blockstore: WritableStorage, options: BuildFileBatchOptions): AsyncGenerator { let count = -1 - let previous: InProgressImportResult | undefined + let previous: SingleBlockImportResult | undefined for await (const entry of parallelBatch(options.bufferImporter(file, blockstore), options.blockWriteConcurrency)) { count++ if (count === 0) { - previous = entry + // cache the first entry if case there aren't any more + previous = { + ...entry, + single: true + } + continue } else if (count === 1 && (previous != null)) { - yield previous + // we have the second block of a multiple block import so yield the first + yield { + ...previous, + block: undefined, + single: undefined + } previous = undefined } - yield entry + // yield the second or later block of a multiple block import + yield { + ...entry, + block: undefined + } } if (previous != null) { - previous.single = true yield previous } } -interface ReduceOptions { +export interface LayoutLeafProgress { + /** + * The CID of the leaf being written + */ + cid: CID + + /** + * The path of the file being imported, if one was specified + */ + path?: string +} + +export type ReducerProgressEvents = + ProgressEvent<'unixfs:importer:progress:file:layout', LayoutLeafProgress> + +interface ReduceOptions extends ProgressOptions { reduceSingleLeafToSelf: boolean cidVersion: Version signal?: AbortSignal } -const reduce = (file: File, blockstore: Blockstore, options: ReduceOptions): Reducer => { +function isSingleBlockImport (result: any): result is SingleBlockImportResult { + return result.single === true +} + +const reduce = (file: File, blockstore: WritableStorage, options: ReduceOptions): Reducer => { const reducer: Reducer = async function (leaves) { - if (leaves.length === 1 && leaves[0]?.single === true && options.reduceSingleLeafToSelf) { + if (leaves.length === 1 && isSingleBlockImport(leaves[0]) && options.reduceSingleLeafToSelf) { const leaf = leaves[0] + let node: Uint8Array | PBNode = leaf.block - if (file.mtime !== undefined || file.mode !== undefined) { - // only one leaf node which is a buffer - we have metadata so convert it into a + if (isSingleBlockImport(leaf) && (file.mtime !== undefined || file.mode !== undefined)) { + // only one leaf node which is a raw leaf - we have metadata so convert it into a // UnixFS entry otherwise we'll have nowhere to store the metadata - let buffer = await blockstore.get(leaf.cid) - leaf.unixfs = new UnixFS({ type: 'file', mtime: file.mtime, mode: file.mode, - data: buffer + data: leaf.block }) - buffer = encode(prepare({ Data: leaf.unixfs.marshal() })) - - // // TODO vmx 2021-03-26: This is what the original code does, it checks - // // the multihash of the original leaf node and uses then the same - // // hasher. i wonder if that's really needed or if we could just use - // // the hasher from `options.hasher` instead. - // const multihash = mh.decode(leaf.cid.multihash.bytes) - // let hasher - // switch multihash { - // case sha256.code { - // hasher = sha256 - // break; - // } - // //case identity.code { - // // hasher = identity - // // break; - // //} - // default: { - // throw new Error(`Unsupported hasher "${multihash}"`) - // } - // } - leaf.cid = await persist(buffer, blockstore, { + node = { Data: leaf.unixfs.marshal(), Links: [] } + + leaf.block = encode(prepare(node)) + + leaf.cid = await persist(leaf.block, blockstore, { ...options, cidVersion: options.cidVersion }) - leaf.size = BigInt(buffer.length) + leaf.size = BigInt(leaf.block.length) } + options.onProgress?.(new CustomProgressEvent('unixfs:importer:progress:file:layout', { + cid: leaf.cid, + path: leaf.originalPath + })) + return { cid: leaf.cid, path: file.path, @@ -147,15 +167,21 @@ const reduce = (file: File, blockstore: Blockstore, options: ReduceOptions): Red Data: f.marshal(), Links: links } - const buffer = encode(prepare(node)) - const cid = await persist(buffer, blockstore, options) + const block = encode(prepare(node)) + const cid = await persist(block, blockstore, options) + + options.onProgress?.(new CustomProgressEvent('unixfs:importer:progress:file:layout', { + cid, + path: file.originalPath + })) return { cid, path: file.path, unixfs: f, - size: BigInt(buffer.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)), - originalPath: file.originalPath + size: BigInt(block.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)), + originalPath: file.originalPath, + block } } @@ -166,6 +192,6 @@ export interface FileBuilderOptions extends BuildFileBatchOptions, ReduceOptions layout: FileLayout } -export const fileBuilder = async (file: File, block: Blockstore, options: FileBuilderOptions): Promise => { - return await options.layout(buildFileBatch(file, block, options), reduce(file, block, options)) +export const fileBuilder = async (file: File, block: WritableStorage, options: FileBuilderOptions): Promise => { + return options.layout(buildFileBatch(file, block, options), reduce(file, block, options)) } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/index.ts b/packages/ipfs-unixfs-importer/src/dag-builder/index.ts index d5cba6e6..1559b86a 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/index.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/index.ts @@ -1,10 +1,34 @@ -import { dirBuilder, DirBuilderOptions } from './dir.js' -import { fileBuilder, FileBuilderOptions } from './file.js' import errCode from 'err-code' -import type { Directory, File, ImportCandidate, InProgressImportResult } from '../index.js' -import type { Blockstore } from 'interface-blockstore' +import { CustomProgressEvent } from 'progress-events' +import { dirBuilder, type DirBuilderOptions } from './dir.js' +import { fileBuilder, type FileBuilderOptions } from './file.js' import type { ChunkValidator } from './validate-chunks.js' import type { Chunker } from '../chunker/index.js' +import type { Directory, File, FileCandidate, ImportCandidate, ImporterProgressEvents, InProgressImportResult, WritableStorage } from '../index.js' +import type { ProgressEvent, ProgressOptions } from 'progress-events' + +/** + * Passed to the onProgress callback while importing files + */ +export interface ImportReadProgress { + /** + * How many bytes we have read from this source so far + */ + bytesRead: bigint + + /** + * The size of the current chunk + */ + chunkSize: bigint + + /** + * The path of the file being imported, if one was specified + */ + path?: string +} + +export type DagBuilderProgressEvents = + ProgressEvent<'unixfs:importer:progress:file:read', ImportReadProgress> function isIterable (thing: any): thing is Iterable { return Symbol.iterator in thing @@ -34,7 +58,7 @@ function contentAsAsyncIterable (content: Uint8Array | AsyncIterable throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') } -export interface DagBuilderOptions extends FileBuilderOptions, DirBuilderOptions { +export interface DagBuilderOptions extends FileBuilderOptions, DirBuilderOptions, ProgressOptions { chunker: Chunker chunkValidator: ChunkValidator wrapWithDirectory: boolean @@ -43,7 +67,7 @@ export interface DagBuilderOptions extends FileBuilderOptions, DirBuilderOptions export type ImporterSourceStream = AsyncIterable | Iterable export interface DAGBuilder { - (source: ImporterSourceStream, blockstore: Blockstore): AsyncIterable<() => Promise> + (source: ImporterSourceStream, blockstore: WritableStorage): AsyncIterable<() => Promise> } export function defaultDagBuilder (options: DagBuilderOptions): DAGBuilder { @@ -59,16 +83,31 @@ export function defaultDagBuilder (options: DagBuilderOptions): DAGBuilder { .join('/') } - if (entry.content != null) { + if (isFileCandidate(entry)) { const file: File = { path: entry.path, mtime: entry.mtime, mode: entry.mode, - content: options.chunker(options.chunkValidator(contentAsAsyncIterable(entry.content))), + content: (async function * () { + let bytesRead = 0n + + for await (const chunk of options.chunker(options.chunkValidator(contentAsAsyncIterable(entry.content)))) { + const currentChunkSize = BigInt(chunk.byteLength) + bytesRead += currentChunkSize + + options.onProgress?.(new CustomProgressEvent('unixfs:importer:progress:file:read', { + bytesRead, + chunkSize: currentChunkSize, + path: entry.path + })) + + yield chunk + } + })(), originalPath } - yield async () => await fileBuilder(file, blockstore, options) + yield async () => fileBuilder(file, blockstore, options) } else if (entry.path != null) { const dir: Directory = { path: entry.path, @@ -77,10 +116,14 @@ export function defaultDagBuilder (options: DagBuilderOptions): DAGBuilder { originalPath } - yield async () => await dirBuilder(dir, blockstore, options) + yield async () => dirBuilder(dir, blockstore, options) } else { throw new Error('Import candidate must have content or path or both') } } } } + +function isFileCandidate (entry: any): entry is FileCandidate { + return entry.content != null +} diff --git a/packages/ipfs-unixfs-importer/src/dir-flat.ts b/packages/ipfs-unixfs-importer/src/dir-flat.ts index 2597a5f7..8783b8c2 100644 --- a/packages/ipfs-unixfs-importer/src/dir-flat.ts +++ b/packages/ipfs-unixfs-importer/src/dir-flat.ts @@ -1,9 +1,10 @@ -import { encode, PBNode, prepare } from '@ipld/dag-pb' -import type { Blockstore } from 'interface-blockstore' +import { encode, type PBNode, prepare } from '@ipld/dag-pb' import { UnixFS } from 'ipfs-unixfs' -import { Dir, CID_V0, CID_V1, DirProps } from './dir.js' +import { Dir, CID_V0, CID_V1, type DirProps } from './dir.js' +import { persist, type PersistOptions } from './utils/persist.js' import type { ImportResult, InProgressImportResult } from './index.js' -import { persist, PersistOptions } from './utils/persist.js' +import type { Blockstore } from 'interface-blockstore' +import type { CID } from 'multiformats/cid' export class DirFlat extends Dir { private readonly _children: Map @@ -23,7 +24,7 @@ export class DirFlat extends Dir { } async get (name: string): Promise { - return await Promise.resolve(this._children.get(name)) + return Promise.resolve(this._children.get(name)) } childCount (): number { @@ -68,20 +69,22 @@ export class DirFlat extends Dir { async * flush (block: Blockstore): AsyncGenerator { const links = [] - for (let [name, child] of this._children.entries()) { + for (const [name, child] of this._children.entries()) { + let result: { size?: bigint | number, cid?: CID } = child + if (child instanceof Dir) { for await (const entry of child.flush(block)) { - child = entry + result = entry - yield child + yield entry } } - if (child.size != null && (child.cid != null)) { + if (result.size != null && (result.cid != null)) { links.push({ Name: name, - Tsize: Number(child.size), - Hash: child.cid + Tsize: Number(result.size), + Hash: result.cid }) } } diff --git a/packages/ipfs-unixfs-importer/src/dir-sharded.ts b/packages/ipfs-unixfs-importer/src/dir-sharded.ts index 24468629..c30fbfbb 100644 --- a/packages/ipfs-unixfs-importer/src/dir-sharded.ts +++ b/packages/ipfs-unixfs-importer/src/dir-sharded.ts @@ -1,9 +1,9 @@ -import { encode, PBLink, prepare } from '@ipld/dag-pb' -import { UnixFS } from 'ipfs-unixfs' -import { Dir, CID_V0, CID_V1, DirProps } from './dir.js' -import { persist, PersistOptions } from './utils/persist.js' -import { createHAMT, Bucket, BucketChild } from 'hamt-sharding' +import { encode, type PBLink, prepare } from '@ipld/dag-pb' import { murmur3128 } from '@multiformats/murmur3' +import { createHAMT, Bucket, type BucketChild } from 'hamt-sharding' +import { UnixFS } from 'ipfs-unixfs' +import { Dir, CID_V0, CID_V1, type DirProps } from './dir.js' +import { persist, type PersistOptions } from './utils/persist.js' import type { ImportResult, InProgressImportResult } from './index.js' import type { Blockstore } from 'interface-blockstore' @@ -40,7 +40,7 @@ class DirSharded extends Dir { } async get (name: string): Promise { - return await this._bucket.get(name) + return this._bucket.get(name) } childCount (): number { diff --git a/packages/ipfs-unixfs-importer/src/dir.ts b/packages/ipfs-unixfs-importer/src/dir.ts index 1e0303d7..3e1b2a4c 100644 --- a/packages/ipfs-unixfs-importer/src/dir.ts +++ b/packages/ipfs-unixfs-importer/src/dir.ts @@ -1,8 +1,7 @@ -import type { Blockstore } from 'interface-blockstore' -import type { Mtime, UnixFS } from 'ipfs-unixfs' import { CID } from 'multiformats/cid' -import type { ImportResult, InProgressImportResult } from './index.js' +import type { WritableStorage, ImportResult, InProgressImportResult } from './index.js' import type { PersistOptions } from './utils/persist.js' +import type { Mtime, UnixFS } from 'ipfs-unixfs' export interface DirProps { root: boolean @@ -51,7 +50,7 @@ export abstract class Dir { abstract put (name: string, value: InProgressImportResult | Dir): Promise abstract get (name: string): Promise abstract eachChildSeries (): AsyncIterable<{ key: string, child: InProgressImportResult | Dir }> - abstract flush (blockstore: Blockstore): AsyncGenerator + abstract flush (blockstore: WritableStorage): AsyncGenerator abstract estimateNodeSize (): number abstract childCount (): number } diff --git a/packages/ipfs-unixfs-importer/src/flat-to-shard.ts b/packages/ipfs-unixfs-importer/src/flat-to-shard.ts index 5059d014..a5b4d419 100644 --- a/packages/ipfs-unixfs-importer/src/flat-to-shard.ts +++ b/packages/ipfs-unixfs-importer/src/flat-to-shard.ts @@ -1,10 +1,10 @@ -import DirSharded from './dir-sharded.js' import { DirFlat } from './dir-flat.js' +import DirSharded from './dir-sharded.js' import type { Dir } from './dir.js' import type { PersistOptions } from './utils/persist.js' export async function flatToShard (child: Dir | null, dir: Dir, threshold: number, options: PersistOptions): Promise { - let newDir = dir + let newDir = dir as DirSharded if (dir instanceof DirFlat && dir.estimateNodeSize() > threshold) { newDir = await convertToShard(dir, options) @@ -25,10 +25,9 @@ export async function flatToShard (child: Dir | null, dir: Dir, threshold: numbe await parent.put(newDir.parentKey, newDir) } - return await flatToShard(newDir, parent, threshold, options) + return flatToShard(newDir, parent, threshold, options) } - // @ts-expect-error return newDir } diff --git a/packages/ipfs-unixfs-importer/src/index.ts b/packages/ipfs-unixfs-importer/src/index.ts index e14950ba..fbd69b1b 100644 --- a/packages/ipfs-unixfs-importer/src/index.ts +++ b/packages/ipfs-unixfs-importer/src/index.ts @@ -1,28 +1,40 @@ +import errcode from 'err-code' +import first from 'it-first' import parallelBatch from 'it-parallel-batch' -import { DAGBuilder, defaultDagBuilder } from './dag-builder/index.js' +import { fixedSize } from './chunker/fixed-size.js' +import { type BufferImportProgressEvents, defaultBufferImporter } from './dag-builder/buffer-importer.js' +import { type DAGBuilder, type DagBuilderProgressEvents, defaultDagBuilder } from './dag-builder/index.js' +import { type ChunkValidator, defaultChunkValidator } from './dag-builder/validate-chunks.js' +import { balanced, type FileLayout } from './layout/index.js' import { defaultTreeBuilder } from './tree-builder.js' +import type { Chunker } from './chunker/index.js' +import type { ReducerProgressEvents } from './dag-builder/file.js' +import type { Blockstore } from 'interface-blockstore' +import type { AwaitIterable } from 'interface-store' import type { UnixFS, Mtime } from 'ipfs-unixfs' import type { CID, Version as CIDVersion } from 'multiformats/cid' -import type { Blockstore } from 'interface-blockstore' -import { ChunkValidator, defaultChunkValidator } from './dag-builder/validate-chunks.js' -import { fixedSize } from './chunker/fixed-size.js' -import type { Chunker } from './chunker/index.js' -import { balanced, FileLayout } from './layout/index.js' -import { defaultBufferImporter } from './dag-builder/buffer-importer.js' -import first from 'it-first' -import errcode from 'err-code' -import type { AwaitIterable } from 'blockstore-core/base' +import type { ProgressOptions } from 'progress-events' export type ByteStream = AwaitIterable export type ImportContent = ByteStream | Uint8Array -export interface ImportCandidate { +export type WritableStorage = Pick + +export interface FileCandidate { path?: string - content?: ImportContent + content: ImportContent mtime?: Mtime mode?: number } +export interface DirectoryCandidate { + path: string + mtime?: Mtime + mode?: number +} + +export type ImportCandidate = FileCandidate | DirectoryCandidate + export interface File { content: AsyncIterable path?: string @@ -45,20 +57,35 @@ export interface ImportResult { unixfs?: UnixFS } -export interface InProgressImportResult extends ImportResult { - single?: boolean +export interface MultipleBlockImportResult extends ImportResult { + originalPath?: string +} + +export interface SingleBlockImportResult extends ImportResult { + single: true originalPath?: string + block: Uint8Array +} + +export type InProgressImportResult = SingleBlockImportResult | MultipleBlockImportResult + +export interface BufferImporterResult extends ImportResult { + block: Uint8Array } -export interface ProgressHandler { (chunkSize: number, path?: string): void } export interface HamtHashFn { (value: Uint8Array): Promise } -export interface TreeBuilder { (source: AsyncIterable, blockstore: Blockstore): AsyncIterable } -export interface BufferImporter { (file: File, blockstore: Blockstore): AsyncIterable<() => Promise> } +export interface TreeBuilder { (source: AsyncIterable, blockstore: WritableStorage): AsyncIterable } +export interface BufferImporter { (file: File, blockstore: WritableStorage): AsyncIterable<() => Promise> } + +export type ImporterProgressEvents = + BufferImportProgressEvents | + DagBuilderProgressEvents | + ReducerProgressEvents /** * Options to control the importer's behaviour */ -export interface ImporterOptions { +export interface ImporterOptions extends ProgressOptions { /** * When a file would span multiple DAGNodes, if this is true the leaf nodes * will not be wrapped in `UnixFS` protobufs and will instead contain the @@ -90,12 +117,6 @@ export interface ImporterOptions { */ cidVersion?: CIDVersion - /** - * A function that will be called with the byte length of chunks as a file - * is added to ipfs. - */ - progress?: ProgressHandler - /** * If the serialized node is larger than this it might be converted to a HAMT * sharded directory. Default: 256KiB @@ -180,7 +201,7 @@ export interface ImporterOptions { chunkValidator?: ChunkValidator } -export type ImportCandidateStream = AsyncIterable | Iterable +export type ImportCandidateStream = AsyncIterable | Iterable /** * The importer creates UnixFS DAGs and stores the blocks that make @@ -209,8 +230,8 @@ export type ImportCandidateStream = AsyncIterable | Iterable { - let candidates: AsyncIterable | Iterable +export async function * importer (source: ImportCandidateStream, blockstore: WritableStorage, options: ImporterOptions = {}): AsyncGenerator { + let candidates: AsyncIterable | Iterable if (Symbol.asyncIterator in source || Symbol.iterator in source) { candidates = source @@ -238,16 +259,18 @@ export async function * importer (source: ImportCandidateStream, blockstore: Blo cidVersion, rawLeaves, leafType, - progress: options.progress + onProgress: options.onProgress }), blockWriteConcurrency, reduceSingleLeafToSelf, - cidVersion + cidVersion, + onProgress: options.onProgress }) const buildTree: TreeBuilder = options.treeBuilder ?? defaultTreeBuilder({ wrapWithDirectory, shardSplitThresholdBytes, - cidVersion + cidVersion, + onProgress: options.onProgress }) for await (const entry of buildTree(parallelBatch(buildDag(candidates, blockstore), fileImportConcurrency), blockstore)) { @@ -261,28 +284,59 @@ export async function * importer (source: ImportCandidateStream, blockstore: Blo } /** - * `importContent` is similar to `importer` except it accepts a single - * `ImportCandidate` and returns a promise of a single `ImportResult` + * `importFile` is similar to `importer` except it accepts a single + * `FileCandidate` and returns a promise of a single `ImportResult` * instead of a stream of results. * * @example * * ```typescript - * import { importOne } from 'ipfs-unixfs-importer' + * import { importFile } from 'ipfs-unixfs-importer' * import { MemoryBlockstore } from 'blockstore-core' * * // store blocks in memory, other blockstores are available * const blockstore = new MemoryBlockstore() * - * const input = { + * const input: FileCandidate = { * path: './foo.txt', * content: Uint8Array.from([0, 1, 2, 3, 4]) * } * - * const entry = await importContent(input, blockstore) + * const entry = await importFile(input, blockstore) + * ``` + */ +export async function importFile (content: FileCandidate, blockstore: WritableStorage, options: ImporterOptions = {}): Promise { + const result = await first(importer([content], blockstore, options)) + + if (result == null) { + throw errcode(new Error('Nothing imported'), 'ERR_INVALID_PARAMS') + } + + return result +} + +/** + * `importDir` is similar to `importer` except it accepts a single + * `DirectoryCandidate` and returns a promise of a single `ImportResult` + * instead of a stream of results. + * + * @example + * + * ```typescript + * import { importDirectory } from 'ipfs-unixfs-importer' + * import { MemoryBlockstore } from 'blockstore-core' + * + * // store blocks in memory, other blockstores are available + * const blockstore = new MemoryBlockstore() + * + * const input: DirectoryCandidate = { + * path: './foo.txt' + * } + * + * const entry = await importDirectory(input, blockstore) * ``` */ -export async function importContent (content: ImportCandidate, blockstore: Blockstore, options: ImporterOptions = {}): Promise { +export async function importDirectory (content: DirectoryCandidate, blockstore: WritableStorage, options: ImporterOptions = {}): Promise { const result = await first(importer([content], blockstore, options)) if (result == null) { @@ -299,7 +353,7 @@ export async function importContent (content: ImportCandidate, blockstore: Block * @example * * ```typescript - * import { importOne } from 'ipfs-unixfs-importer' + * import { importBytes } from 'ipfs-unixfs-importer' * import { MemoryBlockstore } from 'blockstore-core' * * // store blocks in memory, other blockstores are available @@ -310,8 +364,8 @@ export async function importContent (content: ImportCandidate, blockstore: Block * const entry = await importBytes(input, blockstore) * ``` */ -export async function importBytes (buf: ImportContent, blockstore: Blockstore, options: ImporterOptions = {}): Promise { - return await importContent({ +export async function importBytes (buf: ImportContent, blockstore: WritableStorage, options: ImporterOptions = {}): Promise { + return importFile({ content: buf }, blockstore, options) } @@ -323,7 +377,7 @@ export async function importBytes (buf: ImportContent, blockstore: Blockstore, o * @example * * ```typescript - * import { importOne } from 'ipfs-unixfs-importer' + * import { importByteStream } from 'ipfs-unixfs-importer' * import { MemoryBlockstore } from 'blockstore-core' * * // store blocks in memory, other blockstores are available @@ -337,8 +391,8 @@ export async function importBytes (buf: ImportContent, blockstore: Blockstore, o * const entry = await importByteStream(input, blockstore) * ``` */ -export async function importByteStream (bufs: ByteStream, blockstore: Blockstore, options: ImporterOptions = {}): Promise { - return await importContent({ +export async function importByteStream (bufs: ByteStream, blockstore: WritableStorage, options: ImporterOptions = {}): Promise { + return importFile({ content: bufs }, blockstore, options) } diff --git a/packages/ipfs-unixfs-importer/src/layout/balanced.ts b/packages/ipfs-unixfs-importer/src/layout/balanced.ts index 30194f7c..944538a5 100644 --- a/packages/ipfs-unixfs-importer/src/layout/balanced.ts +++ b/packages/ipfs-unixfs-importer/src/layout/balanced.ts @@ -1,6 +1,6 @@ import batch from 'it-batch' -import type { InProgressImportResult } from '../index.js' import type { FileLayout } from './index.js' +import type { InProgressImportResult } from '../index.js' const DEFAULT_MAX_CHILDREN_PER_NODE = 174 @@ -19,7 +19,7 @@ export function balanced (options?: BalancedOptions): FileLayout { } if (roots.length > 1) { - return await balancedLayout(roots, reduce) + return balancedLayout(roots, reduce) } return roots[0] diff --git a/packages/ipfs-unixfs-importer/src/layout/flat.ts b/packages/ipfs-unixfs-importer/src/layout/flat.ts index f75b44f7..9d60a565 100644 --- a/packages/ipfs-unixfs-importer/src/layout/flat.ts +++ b/packages/ipfs-unixfs-importer/src/layout/flat.ts @@ -4,6 +4,6 @@ import type { InProgressImportResult } from '../index.js' export function flat (): FileLayout { return async function flatLayout (source, reduce): Promise { - return await reduce(await all(source)) + return reduce(await all(source)) } } diff --git a/packages/ipfs-unixfs-importer/src/layout/trickle.ts b/packages/ipfs-unixfs-importer/src/layout/trickle.ts index c0cc194a..490d35da 100644 --- a/packages/ipfs-unixfs-importer/src/layout/trickle.ts +++ b/packages/ipfs-unixfs-importer/src/layout/trickle.ts @@ -1,8 +1,8 @@ -import type { UnixFS } from 'ipfs-unixfs' import batch from 'it-batch' -import type { CID } from 'multiformats/cid' import type { InProgressImportResult } from '../index.js' import type { FileLayout, Reducer } from '../layout/index.js' +import type { UnixFS } from 'ipfs-unixfs' +import type { CID } from 'multiformats/cid' const DEFAULT_LAYER_REPEAT = 4 const DEFAULT_MAX_CHILDREN_PER_NODE = 174 @@ -59,7 +59,7 @@ export function trickle (options?: TrickleOptions): FileLayout { root.addChild(await subTree.reduce(reduce)) } - return await root.reduce(reduce) + return root.reduce(reduce) } } @@ -122,7 +122,7 @@ class SubTree { maxChildren: Math.floor(parent.children.length / this.layerRepeat) * this.layerRepeat } - // @ts-expect-error + // @ts-expect-error nextNode is different type parent.children.push(nextNode) this.currentDepth = nextNode.depth @@ -134,7 +134,7 @@ class SubTree { } async reduce (reduce: Reducer): Promise { - return await this._reduce(this.root, reduce) + return this._reduce(this.root, reduce) } async _reduce (node: TrickleDagNode, reduce: Reducer): Promise { @@ -143,14 +143,14 @@ class SubTree { if (node.children.length > 0) { children = await Promise.all( node.children - // @ts-expect-error + // @ts-expect-error data is not present on type .filter(child => child.data) - // @ts-expect-error - .map(async child => await this._reduce(child, reduce)) + // @ts-expect-error child is wrong type + .map(async child => this._reduce(child, reduce)) ) } - return await reduce((node.data ?? []).concat(children)) + return reduce((node.data ?? []).concat(children)) } _findParent (node: TrickleDagNode, depth: number): TrickleDagNode | undefined { @@ -182,6 +182,6 @@ class Root extends SubTree { } async reduce (reduce: Reducer): Promise { - return await reduce((this.root.data ?? []).concat(this.root.children)) + return reduce((this.root.data ?? []).concat(this.root.children)) } } diff --git a/packages/ipfs-unixfs-importer/src/tree-builder.ts b/packages/ipfs-unixfs-importer/src/tree-builder.ts index b40e0f29..1c2e457d 100644 --- a/packages/ipfs-unixfs-importer/src/tree-builder.ts +++ b/packages/ipfs-unixfs-importer/src/tree-builder.ts @@ -1,9 +1,8 @@ import { DirFlat } from './dir-flat.js' -import { flatToShard } from './flat-to-shard.js' import { Dir } from './dir.js' +import { flatToShard } from './flat-to-shard.js' import { toPathComponents } from './utils/to-path-components.js' -import type { ImportResult, InProgressImportResult, TreeBuilder } from './index.js' -import type { Blockstore } from 'interface-blockstore' +import type { ImportResult, InProgressImportResult, TreeBuilder, WritableStorage } from './index.js' import type { PersistOptions } from './utils/persist.js' export interface AddToTreeOptions extends PersistOptions { @@ -55,7 +54,7 @@ async function addToTree (elem: InProgressImportResult, tree: Dir, options: AddT return tree } -async function * flushAndYield (tree: Dir | InProgressImportResult, blockstore: Blockstore): AsyncGenerator { +async function * flushAndYield (tree: Dir | InProgressImportResult, blockstore: WritableStorage): AsyncGenerator { if (!(tree instanceof Dir)) { if (tree.unixfs?.isDirectory() === true) { yield tree diff --git a/packages/ipfs-unixfs-importer/src/utils/persist.ts b/packages/ipfs-unixfs-importer/src/utils/persist.ts index 15f12752..2911b043 100644 --- a/packages/ipfs-unixfs-importer/src/utils/persist.ts +++ b/packages/ipfs-unixfs-importer/src/utils/persist.ts @@ -1,17 +1,18 @@ -import { CID } from 'multiformats/cid' import * as dagPb from '@ipld/dag-pb' +import { CID } from 'multiformats/cid' import { sha256 } from 'multiformats/hashes/sha2' -import type { Blockstore } from 'interface-blockstore' -import type { BlockCodec } from 'multiformats/codecs/interface' +import type { WritableStorage } from '../index.js' import type { Version as CIDVersion } from 'multiformats/cid' +import type { BlockCodec } from 'multiformats/codecs/interface' +import type { ProgressOptions } from 'progress-events' -export interface PersistOptions { +export interface PersistOptions extends ProgressOptions { codec?: BlockCodec cidVersion: CIDVersion signal?: AbortSignal } -export const persist = async (buffer: Uint8Array, blockstore: Blockstore, options: PersistOptions): Promise => { +export const persist = async (buffer: Uint8Array, blockstore: WritableStorage, options: PersistOptions): Promise => { if (options.codec == null) { options.codec = dagPb } @@ -19,9 +20,7 @@ export const persist = async (buffer: Uint8Array, blockstore: Blockstore, option const multihash = await sha256.digest(buffer) const cid = CID.create(options.cidVersion, options.codec.code, multihash) - await blockstore.put(cid, buffer, { - signal: options.signal - }) + await blockstore.put(cid, buffer, options) return cid } diff --git a/packages/ipfs-unixfs-importer/test/benchmark.spec.ts b/packages/ipfs-unixfs-importer/test/benchmark.spec.ts deleted file mode 100644 index cde008b8..00000000 --- a/packages/ipfs-unixfs-importer/test/benchmark.spec.ts +++ /dev/null @@ -1,63 +0,0 @@ -/* eslint-env mocha */ - -import { importer } from '../src/index.js' -import bufferStream from 'it-buffer-stream' -import { MemoryBlockstore } from 'blockstore-core' -import drain from 'it-drain' - -const REPEATS = 10 -const FILE_SIZE = Math.pow(2, 20) * 500 // 500MB -const CHUNK_SIZE = 65536 - -describe.skip('benchmark', function () { - this.timeout(30 * 1000) - - const block = new MemoryBlockstore() - - const times: number[] = [] - - after(() => { - console.info('Percent\tms') // eslint-disable-line no-console - times.forEach((time, index) => { - console.info(`${index}\t${Math.round(time / REPEATS)}`) // eslint-disable-line no-console - }) - }) - - for (let i = 0; i < REPEATS; i++) { - it(`run ${i}`, async () => { // eslint-disable-line no-loop-func - this.timeout(0) - - const size = FILE_SIZE - let read = 0 - let lastDate = Date.now() - let lastPercent = 0 - - const options = { - progress: (prog: number) => { - read += prog - - const percent = Math.round((read / size) * 100) - - if (percent > lastPercent) { - times[percent] = (times[percent] ?? 0) + (Date.now() - lastDate) - - lastDate = Date.now() - lastPercent = percent - } - } - } - - const buf = new Uint8Array(CHUNK_SIZE).fill(0) - - await drain(importer([{ - path: '200Bytes.txt', - content: bufferStream(size, { - chunkSize: CHUNK_SIZE, - generator: () => { - return buf - } - }) - }], block, options)) - }) - } -}) diff --git a/packages/ipfs-unixfs-importer/test/builder-balanced.spec.ts b/packages/ipfs-unixfs-importer/test/builder-balanced.spec.ts index f40f4272..63fd4a13 100644 --- a/packages/ipfs-unixfs-importer/test/builder-balanced.spec.ts +++ b/packages/ipfs-unixfs-importer/test/builder-balanced.spec.ts @@ -1,14 +1,14 @@ /* eslint-env mocha */ import { expect } from 'aegir/chai' -import { balanced } from '../src/layout/balanced.js' import { CID } from 'multiformats/cid' +import { balanced } from '../src/layout/balanced.js' import type { InProgressImportResult } from '../src/index.js' async function reduce (leaves: InProgressImportResult[]): Promise { if (leaves.length > 1) { return { - // @ts-expect-error + // @ts-expect-error children is not part of InProgressImportResult children: leaves } } else { @@ -24,7 +24,8 @@ describe('builder: balanced', () => { it('reduces one value into itself', async () => { const source = [{ cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), - size: 0n + size: 0n, + block: Uint8Array.from([]) }] const result = await balanced(options)((async function * () { @@ -37,13 +38,16 @@ describe('builder: balanced', () => { it('reduces 3 values into parent', async () => { const source = [{ cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), - size: 0n + size: 0n, + block: Uint8Array.from([]) }, { cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), - size: 0n + size: 0n, + block: Uint8Array.from([]) }, { cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), - size: 0n + size: 0n, + block: Uint8Array.from([]) }] const result = await balanced(options)((async function * () { @@ -58,7 +62,7 @@ describe('builder: balanced', () => { it('obeys max children per node', async () => { const source = [1, 2, 3, 4] - // @ts-expect-error + // @ts-expect-error number is incorrect type const result = await balanced(options)((async function * () { yield * source }()), reduce) @@ -75,7 +79,7 @@ describe('builder: balanced', () => { it('refolds 2 parent nodes', async () => { const source = [1, 2, 3, 4, 5, 6, 7] - // @ts-expect-error + // @ts-expect-error number is incorrect type const result = await balanced(options)((async function * () { yield * source }()), reduce) diff --git a/packages/ipfs-unixfs-importer/test/builder-flat.spec.ts b/packages/ipfs-unixfs-importer/test/builder-flat.spec.ts index 6a8c6a52..f79b47d1 100644 --- a/packages/ipfs-unixfs-importer/test/builder-flat.spec.ts +++ b/packages/ipfs-unixfs-importer/test/builder-flat.spec.ts @@ -14,7 +14,7 @@ function reduce (leaves: any[]): any { describe('builder: flat', () => { it('reduces one value into itself', async () => { const source = [1] - // @ts-expect-error + // @ts-expect-error number is incorrect type const result = await flat()(source, reduce) expect(result).to.be.equal(1) @@ -22,7 +22,7 @@ describe('builder: flat', () => { it('reduces 2 values into parent', async () => { const source = [1, 2] - // @ts-expect-error + // @ts-expect-error number is incorrect type const result = await flat()(source, reduce) expect(result).to.be.eql({ children: [1, 2] }) diff --git a/packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.ts b/packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.ts index dc5f0843..6dd046d4 100644 --- a/packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.ts +++ b/packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.ts @@ -29,14 +29,14 @@ const options = { describe('builder: trickle', () => { it('reduces one value into itself', async () => { - // @ts-expect-error + // @ts-expect-error number is incorrect type const result = await trickle(options)(asAsyncIterable([1]), reduce) expect(result).to.deep.equal(1) }) it('reduces 3 values into parent', async () => { - // @ts-expect-error + // @ts-expect-error number is incorrect type const result = await trickle(options)(createValues(3), reduce) expect(result).to.deep.equal({ @@ -49,7 +49,7 @@ describe('builder: trickle', () => { }) it('reduces 6 values correctly', async () => { - // @ts-expect-error + // @ts-expect-error number is incorrect type const result = await trickle(options)(createValues(6), reduce) expect(result).to.deep.equal({ @@ -69,7 +69,7 @@ describe('builder: trickle', () => { }) it('reduces 9 values correctly', async () => { - // @ts-expect-error + // @ts-expect-error number is incorrect type const result = await trickle(options)(createValues(9), reduce) expect(result).to.deep.equal({ @@ -96,7 +96,7 @@ describe('builder: trickle', () => { }) it('reduces 12 values correctly', async () => { - // @ts-expect-error + // @ts-expect-error number is incorrect type const result = await trickle(options)(createValues(12), reduce) expect(result).to.deep.equal({ @@ -130,7 +130,7 @@ describe('builder: trickle', () => { }) it('reduces 21 values correctly', async () => { - // @ts-expect-error + // @ts-expect-error number is incorrect type const result = await trickle(options)(createValues(21), reduce) expect(result).to.deep.equal({ @@ -185,7 +185,7 @@ describe('builder: trickle', () => { }) it('reduces 68 values correctly', async () => { - // @ts-expect-error + // @ts-expect-error number is incorrect type const result = await trickle(options)(createValues(68), reduce) expect(result).to.deep.equal( @@ -353,7 +353,7 @@ describe('builder: trickle', () => { }) it('reduces 93 values correctly', async () => { - // @ts-expect-error + // @ts-expect-error number is incorrect type const result = await trickle(options)(createValues(93), reduce) expect(result).to.deep.equal( diff --git a/packages/ipfs-unixfs-importer/test/chunker-custom.spec.ts b/packages/ipfs-unixfs-importer/test/chunker-custom.spec.ts index 9d5cae8f..3237ce76 100644 --- a/packages/ipfs-unixfs-importer/test/chunker-custom.spec.ts +++ b/packages/ipfs-unixfs-importer/test/chunker-custom.spec.ts @@ -1,13 +1,13 @@ /* eslint-env mocha */ -import { importer } from '../src/index.js' import { expect } from 'aegir/chai' +import { MemoryBlockstore } from 'blockstore-core' +import { UnixFS } from 'ipfs-unixfs' +import * as Block from 'multiformats/block' import * as rawCodec from 'multiformats/codecs/raw' import { sha256 } from 'multiformats/hashes/sha2' -import * as Block from 'multiformats/block' -import { MemoryBlockstore } from 'blockstore-core' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { UnixFS } from 'ipfs-unixfs' +import { importer } from '../src/index.js' import type { CID } from 'multiformats' const iter = async function * (): AsyncGenerator { @@ -19,7 +19,7 @@ describe('custom chunker', function () { const block = new MemoryBlockstore() const fromPartsTest = (content: AsyncIterable, size: bigint) => async () => { - const put = async (buf: Uint8Array): Promise<{ cid: CID, size: bigint, unixfs: UnixFS }> => { + const put = async (buf: Uint8Array): Promise<{ cid: CID, size: bigint, unixfs: UnixFS, block: Uint8Array }> => { const encodedBlock = await Block.encode({ value: buf, codec: rawCodec, @@ -29,7 +29,8 @@ describe('custom chunker', function () { return { cid: encodedBlock.cid, size: BigInt(buf.length), - unixfs: new UnixFS() + unixfs: new UnixFS(), + block: buf } } @@ -39,7 +40,7 @@ describe('custom chunker', function () { chunker: source => source, bufferImporter: async function * (file, block) { for await (const item of file.content) { - yield async () => await put(item) + yield async () => put(item) } } })) { diff --git a/packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.ts b/packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.ts index 5597a83d..1ed56294 100644 --- a/packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.ts +++ b/packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.ts @@ -1,10 +1,10 @@ /* eslint-env mocha */ -import { fixedSize } from '../src/chunker/fixed-size.js' import { expect } from 'aegir/chai' import all from 'it-all' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { fixedSize } from '../src/chunker/fixed-size.js' import asAsyncIterable from './helpers/as-async-iterable.js' const rawFile = new Uint8Array(Math.pow(2, 20)) diff --git a/packages/ipfs-unixfs-importer/test/chunker-rabin.spec.ts b/packages/ipfs-unixfs-importer/test/chunker-rabin.spec.ts index f6788e2c..f081687e 100644 --- a/packages/ipfs-unixfs-importer/test/chunker-rabin.spec.ts +++ b/packages/ipfs-unixfs-importer/test/chunker-rabin.spec.ts @@ -1,12 +1,12 @@ /* eslint-env mocha */ -import { rabin } from '../src/chunker/rabin.js' import { expect } from 'aegir/chai' import all from 'it-all' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' -import asAsyncIterable from './helpers/as-async-iterable.js' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { isElectronRenderer } from 'wherearewe' +import { rabin } from '../src/chunker/rabin.js' +import asAsyncIterable from './helpers/as-async-iterable.js' const rawFile = new Uint8Array(Math.pow(2, 20)).fill(1) @@ -102,8 +102,7 @@ describe('chunker: rabin', function () { } try { - // @ts-expect-error invalid opts - await all(rabin(asAsyncIterable([]), opts)) + await all(rabin(opts)(asAsyncIterable([]))) throw new Error('Should have thrown') } catch (err: any) { expect(err.code).to.equal('ERR_INVALID_AVG_CHUNK_SIZE') diff --git a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts index 1be6b2c5..04637910 100644 --- a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts +++ b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts @@ -1,13 +1,13 @@ /* eslint-env mocha */ -import { importer, ImporterOptions } from '../src/index.js' import { expect } from 'aegir/chai' -import randomByteStream from './helpers/finite-pseudorandom-byte-stream.js' +import { MemoryBlockstore } from 'blockstore-core' import first from 'it-first' import last from 'it-last' -import { MemoryBlockstore } from 'blockstore-core' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { balanced, FileLayout, flat, trickle } from '../src/layout/index.js' +import { importer, type ImporterOptions } from '../src/index.js' +import { balanced, type FileLayout, flat, trickle } from '../src/layout/index.js' +import randomByteStream from './helpers/finite-pseudorandom-byte-stream.js' const strategies: Record<'flat' | 'trickle' | 'balanced', FileLayout> = { flat: flat(), diff --git a/packages/ipfs-unixfs/CHANGELOG.md b/packages/ipfs-unixfs/CHANGELOG.md index f7ce240c..4913ff18 100644 --- a/packages/ipfs-unixfs/CHANGELOG.md +++ b/packages/ipfs-unixfs/CHANGELOG.md @@ -1,3 +1,10 @@ +## [ipfs-unixfs-v11.0.1](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-v11.0.0...ipfs-unixfs-v11.0.1) (2023-05-11) + + +### Dependencies + +* **dev:** bump aegir from 38.1.8 to 39.0.6 ([#326](https://github.com/ipfs/js-ipfs-unixfs/issues/326)) ([a32453b](https://github.com/ipfs/js-ipfs-unixfs/commit/a32453bc43a98366258fa4ba1330b1b362775f46)) + ## [ipfs-unixfs-v11.0.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-v10.0.0...ipfs-unixfs-v11.0.0) (2023-02-16) diff --git a/packages/ipfs-unixfs/package.json b/packages/ipfs-unixfs/package.json index a5f1d8ef..e38eec14 100644 --- a/packages/ipfs-unixfs/package.json +++ b/packages/ipfs-unixfs/package.json @@ -1,6 +1,6 @@ { "name": "ipfs-unixfs", - "version": "11.0.0", + "version": "11.0.1", "description": "JavaScript implementation of IPFS' unixfs (a Unix FileSystem representation on top of a MerkleDAG)", "license": "Apache-2.0 OR MIT", "homepage": "https://github.com/ipfs/js-ipfs-unixfs/tree/master/packages/ipfs-unixfs#readme", @@ -144,7 +144,7 @@ "uint8arraylist": "^2.4.3" }, "devDependencies": { - "aegir": "^38.1.2", + "aegir": "^39.0.6", "protons": "^7.0.2", "uint8arrays": "^4.0.2" }, diff --git a/packages/ipfs-unixfs/test/unixfs-format.spec.ts b/packages/ipfs-unixfs/test/unixfs-format.spec.ts index 7f483bcf..a15d0f2e 100644 --- a/packages/ipfs-unixfs/test/unixfs-format.spec.ts +++ b/packages/ipfs-unixfs/test/unixfs-format.spec.ts @@ -3,8 +3,7 @@ import { expect } from 'aegir/chai' import loadFixture from 'aegir/fixtures' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' - -import { Mtime, UnixFS } from '../src/index.js' +import { type Mtime, UnixFS } from '../src/index.js' import * as Pb from '../src/unixfs.js' const PBData = Pb.Data