diff --git a/.commitlintrc.js b/.commitlintrc.js deleted file mode 100644 index 5b0b1a52..00000000 --- a/.commitlintrc.js +++ /dev/null @@ -1,10 +0,0 @@ -/* This file is automatically added by @npmcli/template-oss. Do not edit. */ - -module.exports = { - extends: ['@commitlint/config-conventional'], - rules: { - 'type-enum': [2, 'always', ['feat', 'fix', 'docs', 'deps', 'chore']], - 'header-max-length': [2, 'always', 80], - 'subject-case': [0, 'always', ['lower-case', 'sentence-case', 'start-case']], - }, -} diff --git a/.eslintrc.js b/.eslintrc.js deleted file mode 100644 index 5db9f815..00000000 --- a/.eslintrc.js +++ /dev/null @@ -1,17 +0,0 @@ -/* This file is automatically added by @npmcli/template-oss. Do not edit. */ - -'use strict' - -const { readdirSync: readdir } = require('fs') - -const localConfigs = readdir(__dirname) - .filter((file) => file.startsWith('.eslintrc.local.')) - .map((file) => `./${file}`) - -module.exports = { - root: true, - extends: [ - '@npmcli', - ...localConfigs, - ], -} diff --git a/.eslintrc.local.js b/.eslintrc.local.js deleted file mode 100644 index b820d970..00000000 --- a/.eslintrc.local.js +++ /dev/null @@ -1,16 +0,0 @@ -module.exports = { - rules: { - 'max-len': 0, - 'no-shadow': 0, - 'no-unused-expressions': 0, - 'no-sequences': 0, - 'no-empty': 0, - }, - overrides: [{ - files: ['test/**'], - rules: { - 'promise/catch-or-return': 0, - 'promise/always-return': 0, - }, - }], -} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d026a49f..7a400a47 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,16 +6,12 @@ jobs: build: strategy: matrix: - node-version: [16.x, 18.x, 20.x] + node-version: [18.x, 20.x, 21.x] platform: - os: ubuntu-latest shell: bash - os: macos-latest shell: bash - - os: windows-latest - shell: bash - - os: windows-latest - shell: powershell fail-fast: false runs-on: ${{ matrix.platform.os }} @@ -25,10 +21,10 @@ jobs: steps: - name: Checkout Repository - uses: actions/checkout@v1.1.0 + uses: actions/checkout@v4 - name: Use Nodejs ${{ matrix.node-version }} - uses: actions/setup-node@v1 + uses: actions/setup-node@v4 with: node-version: ${{ matrix.node-version }} @@ -36,4 +32,4 @@ jobs: run: npm install - name: Run Tests - run: npm test -- -c -t0 + run: npm test -- -c diff --git a/.github/workflows/typedoc.yml b/.github/workflows/typedoc.yml new file mode 100644 index 00000000..e5bc0ef8 --- /dev/null +++ b/.github/workflows/typedoc.yml @@ -0,0 +1,50 @@ +# Simple workflow for deploying static content to GitHub Pages +name: Deploy static content to Pages + +on: + # Runs on pushes targeting the default branch + push: + branches: ["main"] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages +permissions: + contents: read + pages: write + id-token: write + +# Allow one concurrent deployment +concurrency: + group: "pages" + cancel-in-progress: true + +jobs: + # Single deploy job since we're just deploying + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Use Nodejs ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: 18.x + - name: Install dependencies + run: npm install + - name: Generate typedocs + run: npm run typedoc + + - name: Setup Pages + uses: actions/configure-pages@v3 + - name: Upload artifact + uses: actions/upload-pages-artifact@v1 + with: + path: './docs' + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v1 diff --git a/.gitignore b/.gitignore index effd9b9a..70c32c65 100644 --- a/.gitignore +++ b/.gitignore @@ -5,7 +5,10 @@ # keep these !**/.gitignore +!/src +!/.tshy !/.commitlintrc.js +!/tsconfig.json !/.eslintrc.js !/.eslintrc.local.* !/.github/ @@ -15,12 +18,12 @@ !/bin/ !/CHANGELOG* !/CODE_OF_CONDUCT.md -!/docs/ !/index.js !/lib/ !/LICENSE* !/map.js !/package.json +!/package-lock.json !/README* !/release-please-config.json !/scripts/ diff --git a/.npmrc b/.npmrc deleted file mode 100644 index 529f93e7..00000000 --- a/.npmrc +++ /dev/null @@ -1,3 +0,0 @@ -; This file is automatically added by @npmcli/template-oss. Do not edit. - -package-lock=false diff --git a/.tshy/build.json b/.tshy/build.json new file mode 100644 index 00000000..aea1a9e9 --- /dev/null +++ b/.tshy/build.json @@ -0,0 +1,8 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "../src", + "module": "nodenext", + "moduleResolution": "nodenext" + } +} diff --git a/.tshy/commonjs.json b/.tshy/commonjs.json new file mode 100644 index 00000000..5ace94d0 --- /dev/null +++ b/.tshy/commonjs.json @@ -0,0 +1,14 @@ +{ + "extends": "./build.json", + "include": [ + "../src/**/*.ts", + "../src/**/*.cts", + "../src/**/*.tsx" + ], + "exclude": [ + "../src/**/*.mts" + ], + "compilerOptions": { + "outDir": "../.tshy-build/commonjs" + } +} diff --git a/.tshy/esm.json b/.tshy/esm.json new file mode 100644 index 00000000..ff5264e6 --- /dev/null +++ b/.tshy/esm.json @@ -0,0 +1,12 @@ +{ + "extends": "./build.json", + "include": [ + "../src/**/*.ts", + "../src/**/*.mts", + "../src/**/*.tsx" + ], + "exclude": [], + "compilerOptions": { + "outDir": "../.tshy-build/esm" + } +} diff --git a/CHANGELOG.md b/CHANGELOG.md index f4b27a7a..0ee7b913 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,43 +1,73 @@ # Changelog -## 6.2 +## 7.4 + +- Deprecate `onentry` in favor of `onReadEntry` for clarity. + +## 7.3 -* Add support for brotli compression +- Add `onWriteEntry` option -## [6.1.13](https://github.com/npm/node-tar/compare/v6.1.12...v6.1.13) (2022-12-07) +## 7.2 -### Dependencies +- DRY the command definitions into a single `makeCommand` method, + and update the type signatures to more appropriately infer the + return type from the options and arguments provided. -* [`cc4e0dd`](https://github.com/npm/node-tar/commit/cc4e0ddfe523a0bce383846a67442c637a65d486) [#343](https://github.com/npm/node-tar/pull/343) bump minipass from 3.3.6 to 4.0.0 +## 7.1 -## [6.1.12](https://github.com/npm/node-tar/compare/v6.1.11...v6.1.12) (2022-10-31) +- Update minipass to v7.1.0 +- Update the type definitions of `write()` and `end()` methods on + `Unpack` and `Parser` classes to be compatible with the + NodeJS.WritableStream type in the latest versions of + `@types/node`. -### Bug Fixes +## 7.0 + +- Rewrite in TypeScript, provide ESM and CommonJS hybrid + interface +- Add tree-shake friendly exports, like `import('tar/create')` + and `import('tar/read-entry')` to get individual functions or + classes. +- Add `chmod` option that defaults to false, and deprecate + `noChmod`. That is, reverse the default option regarding + explicitly setting file system modes to match tar entry + settings. +- Add `processUmask` option to avoid having to call + `process.umask()` when `chmod: true` (or `noChmod: false`) is + set. + +## 6.2 -* [`57493ee`](https://github.com/npm/node-tar/commit/57493ee66ece50d62114e02914282fc37be3a91a) [#332](https://github.com/npm/node-tar/pull/332) ensuring close event is emited after stream has ended (@webark) -* [`b003c64`](https://github.com/npm/node-tar/commit/b003c64f624332e24e19b30dc011069bb6708680) [#314](https://github.com/npm/node-tar/pull/314) replace deprecated String.prototype.substr() (#314) (@CommanderRoot, @lukekarrys) +- Add support for brotli compression +- Add `maxDepth` option to prevent extraction into excessively + deep folders. -### Documentation +## 6.1 -* [`f129929`](https://github.com/npm/node-tar/commit/f12992932f171ea248b27fad95e7d489a56d31ed) [#313](https://github.com/npm/node-tar/pull/313) remove dead link to benchmarks (#313) (@yetzt) -* [`c1faa9f`](https://github.com/npm/node-tar/commit/c1faa9f44001dfb0bc7638b2850eb6058bd56a4a) add examples/explanation of using tar.t (@isaacs) +- remove dead link to benchmarks (#313) (@yetzt) +- add examples/explanation of using tar.t (@isaacs) +- ensure close event is emited after stream has ended (@webark) +- replace deprecated String.prototype.substr() (@CommanderRoot, + @lukekarrys) ## 6.0 - Drop support for node 6 and 8 -- fix symlinks and hardlinks on windows being packed with `\`-style path - targets +- fix symlinks and hardlinks on windows being packed with + `\`-style path targets ## 5.0 - Address unpack race conditions using path reservations - Change large-numbers errors from TypeError to Error - Add `TAR_*` error codes -- Raise `TAR_BAD_ARCHIVE` warning/error when there are no valid entries - found in an archive +- Raise `TAR_BAD_ARCHIVE` warning/error when there are no valid + entries found in an archive - do not treat ignored entries as an invalid archive - drop support for node v4 -- unpack: conditionally use a file mapping to write files on Windows +- unpack: conditionally use a file mapping to write files on + Windows - Set more portable 'mode' value in portable mode - Set `portable` gzip option in portable mode @@ -69,8 +99,8 @@ ## 3.1 -- Support `@file.tar` as an entry argument to copy entries from one tar - file to another. +- Support `@file.tar` as an entry argument to copy entries from + one tar file to another. - Add `noPax` option - `noResume` option for tar.t - win32: convert `>|, file: 'my-tarball.tgz' @@ -173,9 +176,12 @@ tar.c( To replicate `tar cz files and folders > my-tarball.tgz`, you'd do: ```js -tar.c( // or tar.create +// if you're familiar with the tar(1) cli flags, this can be nice +import * as tar from 'tar' +tar.c( { - gzip: + // 'z' is alias for 'gzip' option + z: }, ['some', 'files', 'and', 'folders'] ).pipe(fs.createWriteStream('my-tarball.tgz')) @@ -184,9 +190,10 @@ tar.c( // or tar.create To replicate `tar xf my-tarball.tgz` you'd do: ```js -tar.x( // or tar.extract( +tar.x( // or `tar.extract` { - file: 'my-tarball.tgz' + // or `file:` + f: 'my-tarball.tgz' } ).then(_=> { .. tarball has been dumped in cwd .. }) ``` @@ -197,8 +204,8 @@ To replicate `cat my-tarball.tgz | tar x -C some-dir --strip=1`: fs.createReadStream('my-tarball.tgz').pipe( tar.x({ strip: 1, - C: 'some-dir' // alias for cwd:'some-dir', also ok - }) + C: 'some-dir', // alias for cwd:'some-dir', also ok + }), ) ``` @@ -207,7 +214,7 @@ To replicate `tar tf my-tarball.tgz`, do this: ```js tar.t({ file: 'my-tarball.tgz', - onentry: entry => { .. do whatever with it .. } + onReadEntry: entry => { .. do whatever with it .. } }) ``` @@ -218,7 +225,7 @@ const getEntryFilenames = async tarballFilename => { const filenames = [] await tar.t({ file: tarballFilename, - onentry: entry => filenames.push(entry.path), + onReadEntry: entry => filenames.push(entry.path), }) return filenames } @@ -232,10 +239,10 @@ fs.createReadStream('my-tarball.tgz') .on('entry', entry => { .. do whatever with it .. }) ``` -To do anything synchronous, add `sync: true` to the options. Note +To do anything synchronous, add `sync: true` to the options. Note that sync functions don't take a callback and don't return a promise. -When the function returns, it's already done. Sync methods without a -file argument return a sync stream, which flushes immediately. But, +When the function returns, it's already done. Sync methods without a +file argument return a sync stream, which flushes immediately. But, of course, it still won't be done until you `.end()` it. ```js @@ -243,7 +250,7 @@ const getEntryFilenamesSync = tarballFilename => { const filenames = [] tar.t({ file: tarballFilename, - onentry: entry => filenames.push(entry.path), + onReadEntry: entry => filenames.push(entry.path), sync: true, }) return filenames @@ -253,7 +260,7 @@ const getEntryFilenamesSync = tarballFilename => { To filter entries, add `filter: ` to the options. Tar-creating methods call the filter with `filter(path, stat)`. Tar-reading methods (including extraction) call the filter with -`filter(path, entry)`. The filter is called in the `this`-context of +`filter(path, entry)`. The filter is called in the `this`-context of the `Pack` or `Unpack` stream object. The arguments list to `tar t` and `tar x` specify a list of filenames @@ -277,49 +284,49 @@ the low-level API that they are built on. Create a tarball archive. -The `fileList` is an array of paths to add to the tarball. Adding a +The `fileList` is an array of paths to add to the tarball. Adding a directory also adds its children recursively. An entry in `fileList` that starts with an `@` symbol is a tar archive -whose entries will be added. To add a file that starts with `@`, +whose entries will be added. To add a file that starts with `@`, prepend it with `./`. The following options are supported: -- `file` Write the tarball archive to the specified filename. If this +- `file` Write the tarball archive to the specified filename. If this is specified, then the callback will be fired when the file has been written, and a promise will be returned that resolves when the file - is written. If a filename is not specified, then a Readable Stream + is written. If a filename is not specified, then a Readable Stream will be returned which will emit the file data. [Alias: `f`] -- `sync` Act synchronously. If this is set, then any provided file - will be fully written after the call to `tar.c`. If this is set, +- `sync` Act synchronously. If this is set, then any provided file + will be fully written after the call to `tar.c`. If this is set, and a file is not provided, then the resulting stream will already have the data ready to `read` or `emit('data')` as soon as you request it. - `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `strict` Treat warnings as crash-worthy errors. Default false. + any warnings encountered. (See "Warnings and Errors") +- `strict` Treat warnings as crash-worthy errors. Default false. - `cwd` The current working directory for creating the archive. - Defaults to `process.cwd()`. [Alias: `C`] + Defaults to `process.cwd()`. [Alias: `C`] - `prefix` A path portion to prefix onto the entries in the archive. - `gzip` Set to any truthy value to create a gzipped archive, or an object with settings for `zlib.Gzip()` [Alias: `z`] - `filter` A function that gets called with `(path, stat)` for each - entry being added. Return `true` to add the entry to the archive, + entry being added. Return `true` to add the entry to the archive, or `false` to omit it. - `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable + time-based operations. Additionally, `mode` is set to a "reasonable default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped +- `preservePaths` Allow absolute paths. By default, `/` is stripped from absolute paths. [Alias: `P`] - `mode` The mode to set on the created file archive - `noDirRecurse` Do not recursively archive the contents of directories. [Alias: `n`] -- `follow` Set to true to pack the targets of symbolic links. Without +- `follow` Set to true to pack the targets of symbolic links. Without this option, symbolic links are archived as such. [Alias: `L`, `h`] -- `noPax` Suppress pax extended headers. Note that this means that +- `noPax` Suppress pax extended headers. Note that this means that long paths and linkpaths will be truncated, and large or negative numeric values may be interpreted incorrectly. - `noMtime` Set to true to omit writing `mtime` values for entries. @@ -327,7 +334,10 @@ The following options are supported: `tar.update` or the `keepNewer` option with the resulting tar archive. [Alias: `m`, `no-mtime`] - `mtime` Set to a `Date` object to force a specific `mtime` for - everything added to the archive. Overridden by `noMtime`. + everything added to the archive. Overridden by `noMtime`. +- `onWriteEntry` Called with each `WriteEntry` or + `WriteEntrySync` that is created in the course of writing the + archive. The following options are mostly internal, but can be modified in some advanced use cases, such as re-using caches between runs. @@ -345,7 +355,7 @@ advanced use cases, such as re-using caches between runs. Extract a tarball archive. -The `fileList` is an array of paths to extract from the tarball. If +The `fileList` is an array of paths to extract from the tarball. If no paths are provided, then all the entries are extracted. If the archive is gzipped, then tar will detect this and unzip it. @@ -355,80 +365,84 @@ writable, readable, and listable by their owner, to avoid cases where a directory prevents extraction of child entries by virtue of its mode. -Most extraction errors will cause a `warn` event to be emitted. If +Most extraction errors will cause a `warn` event to be emitted. If the `cwd` is missing, or not a directory, then the extraction will fail completely. The following options are supported: -- `cwd` Extract files relative to the specified directory. Defaults - to `process.cwd()`. If provided, this must exist and must be a +- `cwd` Extract files relative to the specified directory. Defaults + to `process.cwd()`. If provided, this must exist and must be a directory. [Alias: `C`] -- `file` The archive file to extract. If not specified, then a +- `file` The archive file to extract. If not specified, then a Writable stream is returned where the archive data should be written. [Alias: `f`] - `sync` Create files and directories synchronously. -- `strict` Treat warnings as crash-worthy errors. Default false. +- `strict` Treat warnings as crash-worthy errors. Default false. - `filter` A function that gets called with `(path, entry)` for each - entry being unpacked. Return `true` to unpack the entry from the + entry being unpacked. Return `true` to unpack the entry from the archive, or `false` to skip it. - `newer` Set to true to keep the existing file on disk if it's newer than the file in the archive. [Alias: `keep-newer`, `keep-newer-files`] -- `keep` Do not overwrite existing files. In particular, if a file +- `keep` Do not overwrite existing files. In particular, if a file appears more than once in an archive, later copies will not overwrite earlier copies. [Alias: `k`, `keep-existing`] - `preservePaths` Allow absolute paths, paths containing `..`, and - extracting through symbolic links. By default, `/` is stripped from + extracting through symbolic links. By default, `/` is stripped from absolute paths, `..` paths are not extracted, and any file whose location would be modified by a symbolic link is not extracted. [Alias: `P`] -- `unlink` Unlink files before creating them. Without this option, +- `unlink` Unlink files before creating them. Without this option, tar overwrites existing files, which preserves existing hardlinks. With this option, existing hardlinks will be broken, as will any symlink that would affect the location of an extracted file. [Alias: `U`] - `strip` Remove the specified number of leading path elements. - Pathnames with fewer elements will be silently skipped. Note that + Pathnames with fewer elements will be silently skipped. Note that the pathname is edited after applying the filter, but before security checks. [Alias: `strip-components`, `stripComponents`] - `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") + any warnings encountered. (See "Warnings and Errors") - `preserveOwner` If true, tar will set the `uid` and `gid` of extracted entries to the `uid` and `gid` fields in the archive. - This defaults to true when run as root, and false otherwise. If + This defaults to true when run as root, and false otherwise. If false, then files and directories will be set with the owner and - group of the user running the process. This is similar to `-p` in + group of the user running the process. This is similar to `-p` in `tar(1)`, but ACLs and other system-specific data is never unpacked in this implementation, and modes are set by default already. [Alias: `p`] - `uid` Set to a number to force ownership of all extracted files and folders, and all implicitly created directories, to be owned by the specified user id, regardless of the `uid` field in the archive. - Cannot be used along with `preserveOwner`. Requires also setting a + Cannot be used along with `preserveOwner`. Requires also setting a `gid` option. - `gid` Set to a number to force ownership of all extracted files and folders, and all implicitly created directories, to be owned by the specified group id, regardless of the `gid` field in the archive. - Cannot be used along with `preserveOwner`. Requires also setting a + Cannot be used along with `preserveOwner`. Requires also setting a `uid` option. - `noMtime` Set to true to omit writing `mtime` value for extracted entries. [Alias: `m`, `no-mtime`] - `transform` Provide a function that takes an `entry` object, and - returns a stream, or any falsey value. If a stream is provided, + returns a stream, or any falsey value. If a stream is provided, then that stream's data will be written instead of the contents of - the archive entry. If a falsey value is provided, then the entry is - written to disk as normal. (To exclude items from extraction, use + the archive entry. If a falsey value is provided, then the entry is + written to disk as normal. (To exclude items from extraction, use the `filter` option described above.) -- `onentry` A function that gets called with `(entry)` for each entry +- `onReadEntry` A function that gets called with `(entry)` for each entry that passes the filter. - `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `noChmod` Set to true to omit calling `fs.chmod()` to ensure that the - extracted file matches the entry mode. This also suppresses the call to - `process.umask()` to determine the default umask value, since tar will - extract with whatever mode is provided, and let the process `umask` apply - normally. + any warnings encountered. (See "Warnings and Errors") +- `chmod` Set to true to call `fs.chmod()` to ensure that the + extracted file matches the entry mode. This may necessitate a + call to the deprecated and thread-unsafe `process.umask()` + method to determine the default umask value, unless a + `processUmask` options is also provided. Otherwise tar will + extract with whatever mode is provided, and let the process + `umask` apply normally. +- `processUmask` Set to an explicit numeric value to avoid + calling `process.umask()` when `chmod: true` is set. - `maxDepth` The maximum depth of subfolders to extract into. This defaults to 1024. Anything deeper than the limit will raise a warning and skip the entry. Set to `Infinity` to remove the @@ -444,7 +458,7 @@ advanced use cases, such as re-using caches between runs. - `fmode` Default mode for files - `dirCache` A Map object of which directories exist. - `maxMetaEntrySize` The maximum size of meta entries that is - supported. Defaults to 1 MB. + supported. Defaults to 1 MB. Note that using an asynchronous stream type with the `transform` option will cause undefined behavior in sync extractions. @@ -455,88 +469,88 @@ use case. List the contents of a tarball archive. -The `fileList` is an array of paths to list from the tarball. If +The `fileList` is an array of paths to list from the tarball. If no paths are provided, then all the entries are listed. If the archive is gzipped, then tar will detect this and unzip it. If the `file` option is _not_ provided, then returns an event emitter that -emits `entry` events with `tar.ReadEntry` objects. However, they don't -emit `'data'` or `'end'` events. (If you want to get actual readable +emits `entry` events with `tar.ReadEntry` objects. However, they don't +emit `'data'` or `'end'` events. (If you want to get actual readable entries, use the `tar.Parse` class instead.) If a `file` option _is_ provided, then the return value will be a promise that resolves when the file has been fully traversed in async mode, or -`undefined` if `sync: true` is set. Thus, you _must_ specify an `onentry` +`undefined` if `sync: true` is set. Thus, you _must_ specify an `onReadEntry` method in order to do anything useful with the data it parses. The following options are supported: -- `file` The archive file to list. If not specified, then a +- `file` The archive file to list. If not specified, then a Writable stream is returned where the archive data should be written. [Alias: `f`] -- `sync` Read the specified file synchronously. (This has no effect +- `sync` Read the specified file synchronously. (This has no effect when a file option isn't specified, because entries are emitted as fast as they are parsed from the stream anyway.) -- `strict` Treat warnings as crash-worthy errors. Default false. +- `strict` Treat warnings as crash-worthy errors. Default false. - `filter` A function that gets called with `(path, entry)` for each - entry being listed. Return `true` to emit the entry from the + entry being listed. Return `true` to emit the entry from the archive, or `false` to skip it. -- `onentry` A function that gets called with `(entry)` for each entry - that passes the filter. This is important for when `file` is set, +- `onReadEntry` A function that gets called with `(entry)` for each entry + that passes the filter. This is important for when `file` is set, because there is no other way to do anything useful with this method. - `maxReadSize` The maximum buffer size for `fs.read()` operations. Defaults to 16 MB. - `noResume` By default, `entry` streams are resumed immediately after - the call to `onentry`. Set `noResume: true` to suppress this - behavior. Note that by opting into this, the stream will never + the call to `onReadEntry`. Set `noResume: true` to suppress this + behavior. Note that by opting into this, the stream will never complete until the entry data is consumed. - `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") + any warnings encountered. (See "Warnings and Errors") ### tar.u(options, fileList, callback) [alias: tar.update] Add files to an archive if they are newer than the entry already in the tarball archive. -The `fileList` is an array of paths to add to the tarball. Adding a +The `fileList` is an array of paths to add to the tarball. Adding a directory also adds its children recursively. An entry in `fileList` that starts with an `@` symbol is a tar archive -whose entries will be added. To add a file that starts with `@`, +whose entries will be added. To add a file that starts with `@`, prepend it with `./`. The following options are supported: - `file` Required. Write the tarball archive to the specified filename. [Alias: `f`] -- `sync` Act synchronously. If this is set, then any provided file +- `sync` Act synchronously. If this is set, then any provided file will be fully written after the call to `tar.c`. - `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `strict` Treat warnings as crash-worthy errors. Default false. + any warnings encountered. (See "Warnings and Errors") +- `strict` Treat warnings as crash-worthy errors. Default false. - `cwd` The current working directory for adding entries to the - archive. Defaults to `process.cwd()`. [Alias: `C`] + archive. Defaults to `process.cwd()`. [Alias: `C`] - `prefix` A path portion to prefix onto the entries in the archive. - `gzip` Set to any truthy value to create a gzipped archive, or an object with settings for `zlib.Gzip()` [Alias: `z`] - `filter` A function that gets called with `(path, stat)` for each - entry being added. Return `true` to add the entry to the archive, + entry being added. Return `true` to add the entry to the archive, or `false` to omit it. - `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable + time-based operations. Additionally, `mode` is set to a "reasonable default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped +- `preservePaths` Allow absolute paths. By default, `/` is stripped from absolute paths. [Alias: `P`] - `maxReadSize` The maximum buffer size for `fs.read()` operations. Defaults to 16 MB. - `noDirRecurse` Do not recursively archive the contents of directories. [Alias: `n`] -- `follow` Set to true to pack the targets of symbolic links. Without +- `follow` Set to true to pack the targets of symbolic links. Without this option, symbolic links are archived as such. [Alias: `L`, `h`] -- `noPax` Suppress pax extended headers. Note that this means that +- `noPax` Suppress pax extended headers. Note that this means that long paths and linkpaths will be truncated, and large or negative numeric values may be interpreted incorrectly. - `noMtime` Set to true to omit writing `mtime` values for entries. @@ -544,51 +558,54 @@ The following options are supported: `tar.update` or the `keepNewer` option with the resulting tar archive. [Alias: `m`, `no-mtime`] - `mtime` Set to a `Date` object to force a specific `mtime` for - everything added to the archive. Overridden by `noMtime`. + everything added to the archive. Overridden by `noMtime`. +- `onWriteEntry` Called with each `WriteEntry` or + `WriteEntrySync` that is created in the course of writing the + archive. ### tar.r(options, fileList, callback) [alias: tar.replace] -Add files to an existing archive. Because later entries override +Add files to an existing archive. Because later entries override earlier entries, this effectively replaces any existing entries. -The `fileList` is an array of paths to add to the tarball. Adding a +The `fileList` is an array of paths to add to the tarball. Adding a directory also adds its children recursively. An entry in `fileList` that starts with an `@` symbol is a tar archive -whose entries will be added. To add a file that starts with `@`, +whose entries will be added. To add a file that starts with `@`, prepend it with `./`. The following options are supported: - `file` Required. Write the tarball archive to the specified filename. [Alias: `f`] -- `sync` Act synchronously. If this is set, then any provided file +- `sync` Act synchronously. If this is set, then any provided file will be fully written after the call to `tar.c`. - `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `strict` Treat warnings as crash-worthy errors. Default false. + any warnings encountered. (See "Warnings and Errors") +- `strict` Treat warnings as crash-worthy errors. Default false. - `cwd` The current working directory for adding entries to the - archive. Defaults to `process.cwd()`. [Alias: `C`] + archive. Defaults to `process.cwd()`. [Alias: `C`] - `prefix` A path portion to prefix onto the entries in the archive. - `gzip` Set to any truthy value to create a gzipped archive, or an object with settings for `zlib.Gzip()` [Alias: `z`] - `filter` A function that gets called with `(path, stat)` for each - entry being added. Return `true` to add the entry to the archive, + entry being added. Return `true` to add the entry to the archive, or `false` to omit it. - `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable + time-based operations. Additionally, `mode` is set to a "reasonable default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped +- `preservePaths` Allow absolute paths. By default, `/` is stripped from absolute paths. [Alias: `P`] - `maxReadSize` The maximum buffer size for `fs.read()` operations. Defaults to 16 MB. - `noDirRecurse` Do not recursively archive the contents of directories. [Alias: `n`] -- `follow` Set to true to pack the targets of symbolic links. Without +- `follow` Set to true to pack the targets of symbolic links. Without this option, symbolic links are archived as such. [Alias: `L`, `h`] -- `noPax` Suppress pax extended headers. Note that this means that +- `noPax` Suppress pax extended headers. Note that this means that long paths and linkpaths will be truncated, and large or negative numeric values may be interpreted incorrectly. - `noMtime` Set to true to omit writing `mtime` values for entries. @@ -596,16 +613,18 @@ The following options are supported: `tar.update` or the `keepNewer` option with the resulting tar archive. [Alias: `m`, `no-mtime`] - `mtime` Set to a `Date` object to force a specific `mtime` for - everything added to the archive. Overridden by `noMtime`. - + everything added to the archive. Overridden by `noMtime`. +- `onWriteEntry` Called with each `WriteEntry` or + `WriteEntrySync` that is created in the course of writing the + archive. ## Low-Level API -### class tar.Pack +### class Pack A readable tar stream. -Has all the standard readable stream interface stuff. `'data'` and +Has all the standard readable stream interface stuff. `'data'` and `'end'` events, `read()` method, `pause()` and `resume()`, etc. #### constructor(options) @@ -613,22 +632,22 @@ Has all the standard readable stream interface stuff. `'data'` and The following options are supported: - `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `strict` Treat warnings as crash-worthy errors. Default false. + any warnings encountered. (See "Warnings and Errors") +- `strict` Treat warnings as crash-worthy errors. Default false. - `cwd` The current working directory for creating the archive. Defaults to `process.cwd()`. - `prefix` A path portion to prefix onto the entries in the archive. - `gzip` Set to any truthy value to create a gzipped archive, or an object with settings for `zlib.Gzip()` - `filter` A function that gets called with `(path, stat)` for each - entry being added. Return `true` to add the entry to the archive, + entry being added. Return `true` to add the entry to the archive, or `false` to omit it. - `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable + time-based operations. Additionally, `mode` is set to a "reasonable default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped +- `preservePaths` Allow absolute paths. By default, `/` is stripped from absolute paths. - `linkCache` A Map object containing the device and inode value for any file whose nlink is > 1, to identify hard links. @@ -640,38 +659,41 @@ The following options are supported: Defaults to 16 MB. - `noDirRecurse` Do not recursively archive the contents of directories. -- `follow` Set to true to pack the targets of symbolic links. Without +- `follow` Set to true to pack the targets of symbolic links. Without this option, symbolic links are archived as such. -- `noPax` Suppress pax extended headers. Note that this means that +- `noPax` Suppress pax extended headers. Note that this means that long paths and linkpaths will be truncated, and large or negative numeric values may be interpreted incorrectly. - `noMtime` Set to true to omit writing `mtime` values for entries. Note that this prevents using other mtime-based features like `tar.update` or the `keepNewer` option with the resulting tar archive. - `mtime` Set to a `Date` object to force a specific `mtime` for - everything added to the archive. Overridden by `noMtime`. + everything added to the archive. Overridden by `noMtime`. +- `onWriteEntry` Called with each `WriteEntry` or + `WriteEntrySync` that is created in the course of writing the + archive. #### add(path) -Adds an entry to the archive. Returns the Pack stream. +Adds an entry to the archive. Returns the Pack stream. #### write(path) -Adds an entry to the archive. Returns true if flushed. +Adds an entry to the archive. Returns true if flushed. #### end() Finishes the archive. -### class tar.Pack.Sync +### class PackSync -Synchronous version of `tar.Pack`. +Synchronous version of `Pack`. -### class tar.Unpack +### class Unpack A writable stream that unpacks a tar archive onto the file system. -All the normal writable stream stuff is supported. `write()` and +All the normal writable stream stuff is supported. `write()` and `end()` methods, `'drain'` events, etc. Note that all directories that are created will be forced to be @@ -681,88 +703,92 @@ mode. `'close'` is emitted when it's done writing stuff to the file system. -Most unpack errors will cause a `warn` event to be emitted. If the +Most unpack errors will cause a `warn` event to be emitted. If the `cwd` is missing, or not a directory, then an error will be emitted. #### constructor(options) -- `cwd` Extract files relative to the specified directory. Defaults - to `process.cwd()`. If provided, this must exist and must be a +- `cwd` Extract files relative to the specified directory. Defaults + to `process.cwd()`. If provided, this must exist and must be a directory. - `filter` A function that gets called with `(path, entry)` for each - entry being unpacked. Return `true` to unpack the entry from the + entry being unpacked. Return `true` to unpack the entry from the archive, or `false` to skip it. - `newer` Set to true to keep the existing file on disk if it's newer than the file in the archive. -- `keep` Do not overwrite existing files. In particular, if a file +- `keep` Do not overwrite existing files. In particular, if a file appears more than once in an archive, later copies will not overwrite earlier copies. - `preservePaths` Allow absolute paths, paths containing `..`, and - extracting through symbolic links. By default, `/` is stripped from + extracting through symbolic links. By default, `/` is stripped from absolute paths, `..` paths are not extracted, and any file whose location would be modified by a symbolic link is not extracted. -- `unlink` Unlink files before creating them. Without this option, +- `unlink` Unlink files before creating them. Without this option, tar overwrites existing files, which preserves existing hardlinks. With this option, existing hardlinks will be broken, as will any symlink that would affect the location of an extracted file. - `strip` Remove the specified number of leading path elements. - Pathnames with fewer elements will be silently skipped. Note that + Pathnames with fewer elements will be silently skipped. Note that the pathname is edited after applying the filter, but before security checks. - `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") + any warnings encountered. (See "Warnings and Errors") - `umask` Filter the modes of entries like `process.umask()`. - `dmode` Default mode for directories - `fmode` Default mode for files - `dirCache` A Map object of which directories exist. - `maxMetaEntrySize` The maximum size of meta entries that is - supported. Defaults to 1 MB. + supported. Defaults to 1 MB. - `preserveOwner` If true, tar will set the `uid` and `gid` of extracted entries to the `uid` and `gid` fields in the archive. - This defaults to true when run as root, and false otherwise. If + This defaults to true when run as root, and false otherwise. If false, then files and directories will be set with the owner and - group of the user running the process. This is similar to `-p` in + group of the user running the process. This is similar to `-p` in `tar(1)`, but ACLs and other system-specific data is never unpacked in this implementation, and modes are set by default already. -- `win32` True if on a windows platform. Causes behavior where +- `win32` True if on a windows platform. Causes behavior where filenames containing `<|>?` chars are converted to windows-compatible values while being unpacked. - `uid` Set to a number to force ownership of all extracted files and folders, and all implicitly created directories, to be owned by the specified user id, regardless of the `uid` field in the archive. - Cannot be used along with `preserveOwner`. Requires also setting a + Cannot be used along with `preserveOwner`. Requires also setting a `gid` option. - `gid` Set to a number to force ownership of all extracted files and folders, and all implicitly created directories, to be owned by the specified group id, regardless of the `gid` field in the archive. - Cannot be used along with `preserveOwner`. Requires also setting a + Cannot be used along with `preserveOwner`. Requires also setting a `uid` option. - `noMtime` Set to true to omit writing `mtime` value for extracted entries. - `transform` Provide a function that takes an `entry` object, and - returns a stream, or any falsey value. If a stream is provided, + returns a stream, or any falsey value. If a stream is provided, then that stream's data will be written instead of the contents of - the archive entry. If a falsey value is provided, then the entry is - written to disk as normal. (To exclude items from extraction, use + the archive entry. If a falsey value is provided, then the entry is + written to disk as normal. (To exclude items from extraction, use the `filter` option described above.) -- `strict` Treat warnings as crash-worthy errors. Default false. -- `onentry` A function that gets called with `(entry)` for each entry +- `strict` Treat warnings as crash-worthy errors. Default false. +- `onReadEntry` A function that gets called with `(entry)` for each entry that passes the filter. - `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `noChmod` Set to true to omit calling `fs.chmod()` to ensure that the - extracted file matches the entry mode. This also suppresses the call to - `process.umask()` to determine the default umask value, since tar will - extract with whatever mode is provided, and let the process `umask` apply - normally. + any warnings encountered. (See "Warnings and Errors") +- `chmod` Set to true to call `fs.chmod()` to ensure that the + extracted file matches the entry mode. This may necessitate a + call to the deprecated and thread-unsafe `process.umask()` + method to determine the default umask value, unless a + `processUmask` options is also provided. Otherwise tar will + extract with whatever mode is provided, and let the process + `umask` apply normally. +- `processUmask` Set to an explicit numeric value to avoid + calling `process.umask()` when `chmod: true` is set. - `maxDepth` The maximum depth of subfolders to extract into. This defaults to 1024. Anything deeper than the limit will raise a warning and skip the entry. Set to `Infinity` to remove the limitation. -### class tar.Unpack.Sync +### class UnpackSync -Synchronous version of `tar.Unpack`. +Synchronous version of `Unpack`. Note that using an asynchronous stream type with the `transform` option will cause undefined behavior in sync unpack streams. @@ -771,7 +797,7 @@ use case. ### class tar.Parse -A writable stream that parses a tar archive stream. All the standard +A writable stream that parses a tar archive stream. All the standard writable stream stuff is supported. If the archive is gzipped, then tar will detect this and unzip it. @@ -791,19 +817,19 @@ Returns an event emitter that emits `entry` events with The following options are supported: -- `strict` Treat warnings as crash-worthy errors. Default false. +- `strict` Treat warnings as crash-worthy errors. Default false. - `filter` A function that gets called with `(path, entry)` for each - entry being listed. Return `true` to emit the entry from the + entry being listed. Return `true` to emit the entry from the archive, or `false` to skip it. -- `onentry` A function that gets called with `(entry)` for each entry +- `onReadEntry` A function that gets called with `(entry)` for each entry that passes the filter. - `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") + any warnings encountered. (See "Warnings and Errors") #### abort(error) -Stop all parsing activities. This is called when there are zlib -errors. It also emits an unrecoverable warning with the error provided. +Stop all parsing activities. This is called when there are zlib +errors. It also emits an unrecoverable warning with the error provided. ### class tar.ReadEntry extends [MiniPass](http://npm.im/minipass) @@ -822,7 +848,7 @@ It has the following fields: - `meta` True if this represents metadata about the next entry, false if it represents a filesystem object. - All the fields from the header, extended header, and global extended - header are added to the ReadEntry object. So it has `path`, `type`, + header are added to the ReadEntry object. So it has `path`, `type`, `size`, `mode`, and so on. #### constructor(header, extended, globalExtended) @@ -843,43 +869,42 @@ WriteEntry objects for all of the directory contents. It has the following fields: -- `path` The path field that will be written to the archive. By +- `path` The path field that will be written to the archive. By default, this is also the path from the cwd to the file system object. - `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable + time-based operations. Additionally, `mode` is set to a "reasonable default" for most unix systems, based on a `umask` value of `0o22`. - `myuid` If supported, the uid of the user running the current process. -- `myuser` The `env.USER` string if set, or `''`. Set as the entry +- `myuser` The `env.USER` string if set, or `''`. Set as the entry `uname` field if the file's `uid` matches `this.myuid`. - `maxReadSize` The maximum buffer size for `fs.read()` operations. Defaults to 1 MB. - `linkCache` A Map object containing the device and inode value for any file whose nlink is > 1, to identify hard links. - `statCache` A Map object that caches calls `lstat`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped +- `preservePaths` Allow absolute paths. By default, `/` is stripped from absolute paths. - `cwd` The current working directory for creating the archive. Defaults to `process.cwd()`. -- `absolute` The absolute path to the entry on the filesystem. By +- `absolute` The absolute path to the entry on the filesystem. By default, this is `path.resolve(this.cwd, this.path)`, but it can be overridden explicitly. -- `strict` Treat warnings as crash-worthy errors. Default false. -- `win32` True if on a windows platform. Causes behavior where paths +- `strict` Treat warnings as crash-worthy errors. Default false. +- `win32` True if on a windows platform. Causes behavior where paths replace `\` with `/` and filenames containing the windows-compatible forms of `<|>?:` characters are converted to actual `<|>?:` characters in the archive. -- `noPax` Suppress pax extended headers. Note that this means that +- `noPax` Suppress pax extended headers. Note that this means that long paths and linkpaths will be truncated, and large or negative numeric values may be interpreted incorrectly. - `noMtime` Set to true to omit writing `mtime` values for entries. Note that this prevents using other mtime-based features like `tar.update` or the `keepNewer` option with the resulting tar archive. - #### constructor(path, options) `path` is the path of the entry as it is written in the archive. @@ -887,32 +912,32 @@ It has the following fields: The following options are supported: - `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable + time-based operations. Additionally, `mode` is set to a "reasonable default" for most unix systems, based on a `umask` value of `0o22`. - `maxReadSize` The maximum buffer size for `fs.read()` operations. Defaults to 1 MB. - `linkCache` A Map object containing the device and inode value for any file whose nlink is > 1, to identify hard links. - `statCache` A Map object that caches calls `lstat`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped +- `preservePaths` Allow absolute paths. By default, `/` is stripped from absolute paths. - `cwd` The current working directory for creating the archive. Defaults to `process.cwd()`. -- `absolute` The absolute path to the entry on the filesystem. By +- `absolute` The absolute path to the entry on the filesystem. By default, this is `path.resolve(this.cwd, this.path)`, but it can be overridden explicitly. -- `strict` Treat warnings as crash-worthy errors. Default false. -- `win32` True if on a windows platform. Causes behavior where paths +- `strict` Treat warnings as crash-worthy errors. Default false. +- `win32` True if on a windows platform. Causes behavior where paths replace `\` with `/`. - `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") + any warnings encountered. (See "Warnings and Errors") - `noMtime` Set to true to omit writing `mtime` values for entries. Note that this prevents using other mtime-based features like `tar.update` or the `keepNewer` option with the resulting tar archive. - `umask` Set to restrict the modes on the entries in the archive, - somewhat like how umask works on file creation. Defaults to + somewhat like how umask works on file creation. Defaults to `process.umask()` on unix systems, or `0o22` on Windows. #### warn(message, data) @@ -937,15 +962,15 @@ instead of from the filesystem. The following options are supported: - `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable + time-based operations. Additionally, `mode` is set to a "reasonable default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped +- `preservePaths` Allow absolute paths. By default, `/` is stripped from absolute paths. -- `strict` Treat warnings as crash-worthy errors. Default false. +- `strict` Treat warnings as crash-worthy errors. Default false. - `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") + any warnings encountered. (See "Warnings and Errors") - `noMtime` Set to true to omit writing `mtime` values for entries. Note that this prevents using other mtime-based features like `tar.update` or the `keepNewer` option with the resulting tar archive. @@ -957,21 +982,21 @@ A class for reading and writing header blocks. It has the following fields: - `nullBlock` True if decoding a block which is entirely composed of - `0x00` null bytes. (Useful because tar files are terminated by + `0x00` null bytes. (Useful because tar files are terminated by at least 2 null blocks.) - `cksumValid` True if the checksum in the header is valid, false otherwise. - `needPax` True if the values, as encoded, will require a Pax extended header. - `path` The path of the entry. -- `mode` The 4 lowest-order octal digits of the file mode. That is, +- `mode` The 4 lowest-order octal digits of the file mode. That is, read/write/execute permissions for world, group, and owner, and the setuid, setgid, and sticky bits. - `uid` Numeric user id of the file owner - `gid` Numeric group id of the file owner - `size` Size of the file in bytes - `mtime` Modified time of the file -- `cksum` The checksum of the header. This is generated by adding all +- `cksum` The checksum of the header. This is generated by adding all the bytes of the header block, treating the checksum field itself as all ascii space characters (that is, `0x20`). - `type` The human-readable name of the type of entry this represents, @@ -981,16 +1006,16 @@ It has the following fields: - `linkpath` The target of Link and SymbolicLink entries. - `uname` Human-readable user name of the file owner - `gname` Human-readable group name of the file owner -- `devmaj` The major portion of the device number. Always `0` for +- `devmaj` The major portion of the device number. Always `0` for files, directories, and links. -- `devmin` The minor portion of the device number. Always `0` for +- `devmin` The minor portion of the device number. Always `0` for files, directories, and links. - `atime` File access time. - `ctime` File change time. #### constructor(data, [offset=0]) -`data` is optional. It is either a Buffer that should be interpreted +`data` is optional. It is either a Buffer that should be interpreted as a tar Header starting at the specified offset and continuing for 512 bytes, or a data object of keys and values to set on the header object, and eventually encode as a tar Header. @@ -1017,7 +1042,7 @@ required to properly encode the specified data. An object representing a set of key-value pairs in an Pax extended header entry. -It has the following fields. Where the same name is used, they have +It has the following fields. Where the same name is used, they have the same semantics as the tar.Header field of the same name. - `global` True if this represents a global extended header, or false @@ -1040,7 +1065,7 @@ the same semantics as the tar.Header field of the same name. #### constructor(object, global) -Set the fields set in the object. `global` is a boolean that defaults +Set the fields set in the object. `global` is a boolean that defaults to false. #### encode() @@ -1064,7 +1089,7 @@ Return a new Pax object created by parsing the contents of the string provided. If the `extended` object is set, then also add the fields from that -object. (This is necessary because multiple metadata entries can +object. (This is necessary because multiple metadata entries can occur in sequence.) ### tar.types diff --git a/lib/create.js b/lib/create.js deleted file mode 100644 index 9c860d4e..00000000 --- a/lib/create.js +++ /dev/null @@ -1,111 +0,0 @@ -'use strict' - -// tar -c -const hlo = require('./high-level-opt.js') - -const Pack = require('./pack.js') -const fsm = require('fs-minipass') -const t = require('./list.js') -const path = require('path') - -module.exports = (opt_, files, cb) => { - if (typeof files === 'function') { - cb = files - } - - if (Array.isArray(opt_)) { - files = opt_, opt_ = {} - } - - if (!files || !Array.isArray(files) || !files.length) { - throw new TypeError('no files or directories specified') - } - - files = Array.from(files) - - const opt = hlo(opt_) - - if (opt.sync && typeof cb === 'function') { - throw new TypeError('callback not supported for sync tar functions') - } - - if (!opt.file && typeof cb === 'function') { - throw new TypeError('callback only supported with file option') - } - - return opt.file && opt.sync ? createFileSync(opt, files) - : opt.file ? createFile(opt, files, cb) - : opt.sync ? createSync(opt, files) - : create(opt, files) -} - -const createFileSync = (opt, files) => { - const p = new Pack.Sync(opt) - const stream = new fsm.WriteStreamSync(opt.file, { - mode: opt.mode || 0o666, - }) - p.pipe(stream) - addFilesSync(p, files) -} - -const createFile = (opt, files, cb) => { - const p = new Pack(opt) - const stream = new fsm.WriteStream(opt.file, { - mode: opt.mode || 0o666, - }) - p.pipe(stream) - - const promise = new Promise((res, rej) => { - stream.on('error', rej) - stream.on('close', res) - p.on('error', rej) - }) - - addFilesAsync(p, files) - - return cb ? promise.then(cb, cb) : promise -} - -const addFilesSync = (p, files) => { - files.forEach(file => { - if (file.charAt(0) === '@') { - t({ - file: path.resolve(p.cwd, file.slice(1)), - sync: true, - noResume: true, - onentry: entry => p.add(entry), - }) - } else { - p.add(file) - } - }) - p.end() -} - -const addFilesAsync = (p, files) => { - while (files.length) { - const file = files.shift() - if (file.charAt(0) === '@') { - return t({ - file: path.resolve(p.cwd, file.slice(1)), - noResume: true, - onentry: entry => p.add(entry), - }).then(_ => addFilesAsync(p, files)) - } else { - p.add(file) - } - } - p.end() -} - -const createSync = (opt, files) => { - const p = new Pack.Sync(opt) - addFilesSync(p, files) - return p -} - -const create = (opt, files) => { - const p = new Pack(opt) - addFilesAsync(p, files) - return p -} diff --git a/lib/extract.js b/lib/extract.js deleted file mode 100644 index 54767982..00000000 --- a/lib/extract.js +++ /dev/null @@ -1,113 +0,0 @@ -'use strict' - -// tar -x -const hlo = require('./high-level-opt.js') -const Unpack = require('./unpack.js') -const fs = require('fs') -const fsm = require('fs-minipass') -const path = require('path') -const stripSlash = require('./strip-trailing-slashes.js') - -module.exports = (opt_, files, cb) => { - if (typeof opt_ === 'function') { - cb = opt_, files = null, opt_ = {} - } else if (Array.isArray(opt_)) { - files = opt_, opt_ = {} - } - - if (typeof files === 'function') { - cb = files, files = null - } - - if (!files) { - files = [] - } else { - files = Array.from(files) - } - - const opt = hlo(opt_) - - if (opt.sync && typeof cb === 'function') { - throw new TypeError('callback not supported for sync tar functions') - } - - if (!opt.file && typeof cb === 'function') { - throw new TypeError('callback only supported with file option') - } - - if (files.length) { - filesFilter(opt, files) - } - - return opt.file && opt.sync ? extractFileSync(opt) - : opt.file ? extractFile(opt, cb) - : opt.sync ? extractSync(opt) - : extract(opt) -} - -// construct a filter that limits the file entries listed -// include child entries if a dir is included -const filesFilter = (opt, files) => { - const map = new Map(files.map(f => [stripSlash(f), true])) - const filter = opt.filter - - const mapHas = (file, r) => { - const root = r || path.parse(file).root || '.' - const ret = file === root ? false - : map.has(file) ? map.get(file) - : mapHas(path.dirname(file), root) - - map.set(file, ret) - return ret - } - - opt.filter = filter - ? (file, entry) => filter(file, entry) && mapHas(stripSlash(file)) - : file => mapHas(stripSlash(file)) -} - -const extractFileSync = opt => { - const u = new Unpack.Sync(opt) - - const file = opt.file - const stat = fs.statSync(file) - // This trades a zero-byte read() syscall for a stat - // However, it will usually result in less memory allocation - const readSize = opt.maxReadSize || 16 * 1024 * 1024 - const stream = new fsm.ReadStreamSync(file, { - readSize: readSize, - size: stat.size, - }) - stream.pipe(u) -} - -const extractFile = (opt, cb) => { - const u = new Unpack(opt) - const readSize = opt.maxReadSize || 16 * 1024 * 1024 - - const file = opt.file - const p = new Promise((resolve, reject) => { - u.on('error', reject) - u.on('close', resolve) - - // This trades a zero-byte read() syscall for a stat - // However, it will usually result in less memory allocation - fs.stat(file, (er, stat) => { - if (er) { - reject(er) - } else { - const stream = new fsm.ReadStream(file, { - readSize: readSize, - size: stat.size, - }) - stream.on('error', reject) - stream.pipe(u) - } - }) - }) - return cb ? p.then(cb, cb) : p -} - -const extractSync = opt => new Unpack.Sync(opt) - -const extract = opt => new Unpack(opt) diff --git a/lib/header.js b/lib/header.js deleted file mode 100644 index 411d5e45..00000000 --- a/lib/header.js +++ /dev/null @@ -1,304 +0,0 @@ -'use strict' -// parse a 512-byte header block to a data object, or vice-versa -// encode returns `true` if a pax extended header is needed, because -// the data could not be faithfully encoded in a simple header. -// (Also, check header.needPax to see if it needs a pax header.) - -const types = require('./types.js') -const pathModule = require('path').posix -const large = require('./large-numbers.js') - -const SLURP = Symbol('slurp') -const TYPE = Symbol('type') - -class Header { - constructor (data, off, ex, gex) { - this.cksumValid = false - this.needPax = false - this.nullBlock = false - - this.block = null - this.path = null - this.mode = null - this.uid = null - this.gid = null - this.size = null - this.mtime = null - this.cksum = null - this[TYPE] = '0' - this.linkpath = null - this.uname = null - this.gname = null - this.devmaj = 0 - this.devmin = 0 - this.atime = null - this.ctime = null - - if (Buffer.isBuffer(data)) { - this.decode(data, off || 0, ex, gex) - } else if (data) { - this.set(data) - } - } - - decode (buf, off, ex, gex) { - if (!off) { - off = 0 - } - - if (!buf || !(buf.length >= off + 512)) { - throw new Error('need 512 bytes for header') - } - - this.path = decString(buf, off, 100) - this.mode = decNumber(buf, off + 100, 8) - this.uid = decNumber(buf, off + 108, 8) - this.gid = decNumber(buf, off + 116, 8) - this.size = decNumber(buf, off + 124, 12) - this.mtime = decDate(buf, off + 136, 12) - this.cksum = decNumber(buf, off + 148, 12) - - // if we have extended or global extended headers, apply them now - // See https://github.com/npm/node-tar/pull/187 - this[SLURP](ex) - this[SLURP](gex, true) - - // old tar versions marked dirs as a file with a trailing / - this[TYPE] = decString(buf, off + 156, 1) - if (this[TYPE] === '') { - this[TYPE] = '0' - } - if (this[TYPE] === '0' && this.path.slice(-1) === '/') { - this[TYPE] = '5' - } - - // tar implementations sometimes incorrectly put the stat(dir).size - // as the size in the tarball, even though Directory entries are - // not able to have any body at all. In the very rare chance that - // it actually DOES have a body, we weren't going to do anything with - // it anyway, and it'll just be a warning about an invalid header. - if (this[TYPE] === '5') { - this.size = 0 - } - - this.linkpath = decString(buf, off + 157, 100) - if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') { - this.uname = decString(buf, off + 265, 32) - this.gname = decString(buf, off + 297, 32) - this.devmaj = decNumber(buf, off + 329, 8) - this.devmin = decNumber(buf, off + 337, 8) - if (buf[off + 475] !== 0) { - // definitely a prefix, definitely >130 chars. - const prefix = decString(buf, off + 345, 155) - this.path = prefix + '/' + this.path - } else { - const prefix = decString(buf, off + 345, 130) - if (prefix) { - this.path = prefix + '/' + this.path - } - this.atime = decDate(buf, off + 476, 12) - this.ctime = decDate(buf, off + 488, 12) - } - } - - let sum = 8 * 0x20 - for (let i = off; i < off + 148; i++) { - sum += buf[i] - } - - for (let i = off + 156; i < off + 512; i++) { - sum += buf[i] - } - - this.cksumValid = sum === this.cksum - if (this.cksum === null && sum === 8 * 0x20) { - this.nullBlock = true - } - } - - [SLURP] (ex, global) { - for (const k in ex) { - // we slurp in everything except for the path attribute in - // a global extended header, because that's weird. - if (ex[k] !== null && ex[k] !== undefined && - !(global && k === 'path')) { - this[k] = ex[k] - } - } - } - - encode (buf, off) { - if (!buf) { - buf = this.block = Buffer.alloc(512) - off = 0 - } - - if (!off) { - off = 0 - } - - if (!(buf.length >= off + 512)) { - throw new Error('need 512 bytes for header') - } - - const prefixSize = this.ctime || this.atime ? 130 : 155 - const split = splitPrefix(this.path || '', prefixSize) - const path = split[0] - const prefix = split[1] - this.needPax = split[2] - - this.needPax = encString(buf, off, 100, path) || this.needPax - this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax - this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax - this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax - this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax - this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax - buf[off + 156] = this[TYPE].charCodeAt(0) - this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax - buf.write('ustar\u000000', off + 257, 8) - this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax - this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax - this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax - this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax - this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax - if (buf[off + 475] !== 0) { - this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax - } else { - this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax - this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax - this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax - } - - let sum = 8 * 0x20 - for (let i = off; i < off + 148; i++) { - sum += buf[i] - } - - for (let i = off + 156; i < off + 512; i++) { - sum += buf[i] - } - - this.cksum = sum - encNumber(buf, off + 148, 8, this.cksum) - this.cksumValid = true - - return this.needPax - } - - set (data) { - for (const i in data) { - if (data[i] !== null && data[i] !== undefined) { - this[i] = data[i] - } - } - } - - get type () { - return types.name.get(this[TYPE]) || this[TYPE] - } - - get typeKey () { - return this[TYPE] - } - - set type (type) { - if (types.code.has(type)) { - this[TYPE] = types.code.get(type) - } else { - this[TYPE] = type - } - } -} - -const splitPrefix = (p, prefixSize) => { - const pathSize = 100 - let pp = p - let prefix = '' - let ret - const root = pathModule.parse(p).root || '.' - - if (Buffer.byteLength(pp) < pathSize) { - ret = [pp, prefix, false] - } else { - // first set prefix to the dir, and path to the base - prefix = pathModule.dirname(pp) - pp = pathModule.basename(pp) - - do { - if (Buffer.byteLength(pp) <= pathSize && - Buffer.byteLength(prefix) <= prefixSize) { - // both fit! - ret = [pp, prefix, false] - } else if (Buffer.byteLength(pp) > pathSize && - Buffer.byteLength(prefix) <= prefixSize) { - // prefix fits in prefix, but path doesn't fit in path - ret = [pp.slice(0, pathSize - 1), prefix, true] - } else { - // make path take a bit from prefix - pp = pathModule.join(pathModule.basename(prefix), pp) - prefix = pathModule.dirname(prefix) - } - } while (prefix !== root && !ret) - - // at this point, found no resolution, just truncate - if (!ret) { - ret = [p.slice(0, pathSize - 1), '', true] - } - } - return ret -} - -const decString = (buf, off, size) => - buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '') - -const decDate = (buf, off, size) => - numToDate(decNumber(buf, off, size)) - -const numToDate = num => num === null ? null : new Date(num * 1000) - -const decNumber = (buf, off, size) => - buf[off] & 0x80 ? large.parse(buf.slice(off, off + size)) - : decSmallNumber(buf, off, size) - -const nanNull = value => isNaN(value) ? null : value - -const decSmallNumber = (buf, off, size) => - nanNull(parseInt( - buf.slice(off, off + size) - .toString('utf8').replace(/\0.*$/, '').trim(), 8)) - -// the maximum encodable as a null-terminated octal, by field size -const MAXNUM = { - 12: 0o77777777777, - 8: 0o7777777, -} - -const encNumber = (buf, off, size, number) => - number === null ? false : - number > MAXNUM[size] || number < 0 - ? (large.encode(number, buf.slice(off, off + size)), true) - : (encSmallNumber(buf, off, size, number), false) - -const encSmallNumber = (buf, off, size, number) => - buf.write(octalString(number, size), off, size, 'ascii') - -const octalString = (number, size) => - padOctal(Math.floor(number).toString(8), size) - -const padOctal = (string, size) => - (string.length === size - 1 ? string - : new Array(size - string.length - 1).join('0') + string + ' ') + '\0' - -const encDate = (buf, off, size, date) => - date === null ? false : - encNumber(buf, off, size, date.getTime() / 1000) - -// enough to fill the longest string we've got -const NULLS = new Array(156).join('\0') -// pad with nulls, return true if it's longer or non-ascii -const encString = (buf, off, size, string) => - string === null ? false : - (buf.write(string + NULLS, off, size, 'utf8'), - string.length !== Buffer.byteLength(string) || string.length > size) - -module.exports = Header diff --git a/lib/high-level-opt.js b/lib/high-level-opt.js deleted file mode 100644 index 40e44180..00000000 --- a/lib/high-level-opt.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -// turn tar(1) style args like `C` into the more verbose things like `cwd` - -const argmap = new Map([ - ['C', 'cwd'], - ['f', 'file'], - ['z', 'gzip'], - ['P', 'preservePaths'], - ['U', 'unlink'], - ['strip-components', 'strip'], - ['stripComponents', 'strip'], - ['keep-newer', 'newer'], - ['keepNewer', 'newer'], - ['keep-newer-files', 'newer'], - ['keepNewerFiles', 'newer'], - ['k', 'keep'], - ['keep-existing', 'keep'], - ['keepExisting', 'keep'], - ['m', 'noMtime'], - ['no-mtime', 'noMtime'], - ['p', 'preserveOwner'], - ['L', 'follow'], - ['h', 'follow'], -]) - -module.exports = opt => opt ? Object.keys(opt).map(k => [ - argmap.has(k) ? argmap.get(k) : k, opt[k], -]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {} diff --git a/lib/list.js b/lib/list.js deleted file mode 100644 index f2358c25..00000000 --- a/lib/list.js +++ /dev/null @@ -1,139 +0,0 @@ -'use strict' - -// XXX: This shares a lot in common with extract.js -// maybe some DRY opportunity here? - -// tar -t -const hlo = require('./high-level-opt.js') -const Parser = require('./parse.js') -const fs = require('fs') -const fsm = require('fs-minipass') -const path = require('path') -const stripSlash = require('./strip-trailing-slashes.js') - -module.exports = (opt_, files, cb) => { - if (typeof opt_ === 'function') { - cb = opt_, files = null, opt_ = {} - } else if (Array.isArray(opt_)) { - files = opt_, opt_ = {} - } - - if (typeof files === 'function') { - cb = files, files = null - } - - if (!files) { - files = [] - } else { - files = Array.from(files) - } - - const opt = hlo(opt_) - - if (opt.sync && typeof cb === 'function') { - throw new TypeError('callback not supported for sync tar functions') - } - - if (!opt.file && typeof cb === 'function') { - throw new TypeError('callback only supported with file option') - } - - if (files.length) { - filesFilter(opt, files) - } - - if (!opt.noResume) { - onentryFunction(opt) - } - - return opt.file && opt.sync ? listFileSync(opt) - : opt.file ? listFile(opt, cb) - : list(opt) -} - -const onentryFunction = opt => { - const onentry = opt.onentry - opt.onentry = onentry ? e => { - onentry(e) - e.resume() - } : e => e.resume() -} - -// construct a filter that limits the file entries listed -// include child entries if a dir is included -const filesFilter = (opt, files) => { - const map = new Map(files.map(f => [stripSlash(f), true])) - const filter = opt.filter - - const mapHas = (file, r) => { - const root = r || path.parse(file).root || '.' - const ret = file === root ? false - : map.has(file) ? map.get(file) - : mapHas(path.dirname(file), root) - - map.set(file, ret) - return ret - } - - opt.filter = filter - ? (file, entry) => filter(file, entry) && mapHas(stripSlash(file)) - : file => mapHas(stripSlash(file)) -} - -const listFileSync = opt => { - const p = list(opt) - const file = opt.file - let threw = true - let fd - try { - const stat = fs.statSync(file) - const readSize = opt.maxReadSize || 16 * 1024 * 1024 - if (stat.size < readSize) { - p.end(fs.readFileSync(file)) - } else { - let pos = 0 - const buf = Buffer.allocUnsafe(readSize) - fd = fs.openSync(file, 'r') - while (pos < stat.size) { - const bytesRead = fs.readSync(fd, buf, 0, readSize, pos) - pos += bytesRead - p.write(buf.slice(0, bytesRead)) - } - p.end() - } - threw = false - } finally { - if (threw && fd) { - try { - fs.closeSync(fd) - } catch (er) {} - } - } -} - -const listFile = (opt, cb) => { - const parse = new Parser(opt) - const readSize = opt.maxReadSize || 16 * 1024 * 1024 - - const file = opt.file - const p = new Promise((resolve, reject) => { - parse.on('error', reject) - parse.on('end', resolve) - - fs.stat(file, (er, stat) => { - if (er) { - reject(er) - } else { - const stream = new fsm.ReadStream(file, { - readSize: readSize, - size: stat.size, - }) - stream.on('error', reject) - stream.pipe(parse) - } - }) - }) - return cb ? p.then(cb, cb) : p -} - -const list = opt => new Parser(opt) diff --git a/lib/mkdir.js b/lib/mkdir.js deleted file mode 100644 index 8ee8de78..00000000 --- a/lib/mkdir.js +++ /dev/null @@ -1,229 +0,0 @@ -'use strict' -// wrapper around mkdirp for tar's needs. - -// TODO: This should probably be a class, not functionally -// passing around state in a gazillion args. - -const mkdirp = require('mkdirp') -const fs = require('fs') -const path = require('path') -const chownr = require('chownr') -const normPath = require('./normalize-windows-path.js') - -class SymlinkError extends Error { - constructor (symlink, path) { - super('Cannot extract through symbolic link') - this.path = path - this.symlink = symlink - } - - get name () { - return 'SylinkError' - } -} - -class CwdError extends Error { - constructor (path, code) { - super(code + ': Cannot cd into \'' + path + '\'') - this.path = path - this.code = code - } - - get name () { - return 'CwdError' - } -} - -const cGet = (cache, key) => cache.get(normPath(key)) -const cSet = (cache, key, val) => cache.set(normPath(key), val) - -const checkCwd = (dir, cb) => { - fs.stat(dir, (er, st) => { - if (er || !st.isDirectory()) { - er = new CwdError(dir, er && er.code || 'ENOTDIR') - } - cb(er) - }) -} - -module.exports = (dir, opt, cb) => { - dir = normPath(dir) - - // if there's any overlap between mask and mode, - // then we'll need an explicit chmod - const umask = opt.umask - const mode = opt.mode | 0o0700 - const needChmod = (mode & umask) !== 0 - - const uid = opt.uid - const gid = opt.gid - const doChown = typeof uid === 'number' && - typeof gid === 'number' && - (uid !== opt.processUid || gid !== opt.processGid) - - const preserve = opt.preserve - const unlink = opt.unlink - const cache = opt.cache - const cwd = normPath(opt.cwd) - - const done = (er, created) => { - if (er) { - cb(er) - } else { - cSet(cache, dir, true) - if (created && doChown) { - chownr(created, uid, gid, er => done(er)) - } else if (needChmod) { - fs.chmod(dir, mode, cb) - } else { - cb() - } - } - } - - if (cache && cGet(cache, dir) === true) { - return done() - } - - if (dir === cwd) { - return checkCwd(dir, done) - } - - if (preserve) { - return mkdirp(dir, { mode }).then(made => done(null, made), done) - } - - const sub = normPath(path.relative(cwd, dir)) - const parts = sub.split('/') - mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done) -} - -const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { - if (!parts.length) { - return cb(null, created) - } - const p = parts.shift() - const part = normPath(path.resolve(base + '/' + p)) - if (cGet(cache, part)) { - return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) - } - fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) -} - -const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => { - if (er) { - fs.lstat(part, (statEr, st) => { - if (statEr) { - statEr.path = statEr.path && normPath(statEr.path) - cb(statEr) - } else if (st.isDirectory()) { - mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) - } else if (unlink) { - fs.unlink(part, er => { - if (er) { - return cb(er) - } - fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) - }) - } else if (st.isSymbolicLink()) { - return cb(new SymlinkError(part, part + '/' + parts.join('/'))) - } else { - cb(er) - } - }) - } else { - created = created || part - mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) - } -} - -const checkCwdSync = dir => { - let ok = false - let code = 'ENOTDIR' - try { - ok = fs.statSync(dir).isDirectory() - } catch (er) { - code = er.code - } finally { - if (!ok) { - throw new CwdError(dir, code) - } - } -} - -module.exports.sync = (dir, opt) => { - dir = normPath(dir) - // if there's any overlap between mask and mode, - // then we'll need an explicit chmod - const umask = opt.umask - const mode = opt.mode | 0o0700 - const needChmod = (mode & umask) !== 0 - - const uid = opt.uid - const gid = opt.gid - const doChown = typeof uid === 'number' && - typeof gid === 'number' && - (uid !== opt.processUid || gid !== opt.processGid) - - const preserve = opt.preserve - const unlink = opt.unlink - const cache = opt.cache - const cwd = normPath(opt.cwd) - - const done = (created) => { - cSet(cache, dir, true) - if (created && doChown) { - chownr.sync(created, uid, gid) - } - if (needChmod) { - fs.chmodSync(dir, mode) - } - } - - if (cache && cGet(cache, dir) === true) { - return done() - } - - if (dir === cwd) { - checkCwdSync(cwd) - return done() - } - - if (preserve) { - return done(mkdirp.sync(dir, mode)) - } - - const sub = normPath(path.relative(cwd, dir)) - const parts = sub.split('/') - let created = null - for (let p = parts.shift(), part = cwd; - p && (part += '/' + p); - p = parts.shift()) { - part = normPath(path.resolve(part)) - if (cGet(cache, part)) { - continue - } - - try { - fs.mkdirSync(part, mode) - created = created || part - cSet(cache, part, true) - } catch (er) { - const st = fs.lstatSync(part) - if (st.isDirectory()) { - cSet(cache, part, true) - continue - } else if (unlink) { - fs.unlinkSync(part) - fs.mkdirSync(part, mode) - created = created || part - cSet(cache, part, true) - continue - } else if (st.isSymbolicLink()) { - return new SymlinkError(part, part + '/' + parts.join('/')) - } - } - } - - return done(created) -} diff --git a/lib/path-reservations.js b/lib/path-reservations.js deleted file mode 100644 index 8d349d58..00000000 --- a/lib/path-reservations.js +++ /dev/null @@ -1,156 +0,0 @@ -// A path exclusive reservation system -// reserve([list, of, paths], fn) -// When the fn is first in line for all its paths, it -// is called with a cb that clears the reservation. -// -// Used by async unpack to avoid clobbering paths in use, -// while still allowing maximal safe parallelization. - -const assert = require('assert') -const normalize = require('./normalize-unicode.js') -const stripSlashes = require('./strip-trailing-slashes.js') -const { join } = require('path') - -const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform -const isWindows = platform === 'win32' - -module.exports = () => { - // path => [function or Set] - // A Set object means a directory reservation - // A fn is a direct reservation on that path - const queues = new Map() - - // fn => {paths:[path,...], dirs:[path, ...]} - const reservations = new Map() - - // return a set of parent dirs for a given path - // '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d'] - const getDirs = path => { - const dirs = path.split('/').slice(0, -1).reduce((set, path) => { - if (set.length) { - path = join(set[set.length - 1], path) - } - set.push(path || '/') - return set - }, []) - return dirs - } - - // functions currently running - const running = new Set() - - // return the queues for each path the function cares about - // fn => {paths, dirs} - const getQueues = fn => { - const res = reservations.get(fn) - /* istanbul ignore if - unpossible */ - if (!res) { - throw new Error('function does not have any path reservations') - } - return { - paths: res.paths.map(path => queues.get(path)), - dirs: [...res.dirs].map(path => queues.get(path)), - } - } - - // check if fn is first in line for all its paths, and is - // included in the first set for all its dir queues - const check = fn => { - const { paths, dirs } = getQueues(fn) - return paths.every(q => q[0] === fn) && - dirs.every(q => q[0] instanceof Set && q[0].has(fn)) - } - - // run the function if it's first in line and not already running - const run = fn => { - if (running.has(fn) || !check(fn)) { - return false - } - running.add(fn) - fn(() => clear(fn)) - return true - } - - const clear = fn => { - if (!running.has(fn)) { - return false - } - - const { paths, dirs } = reservations.get(fn) - const next = new Set() - - paths.forEach(path => { - const q = queues.get(path) - assert.equal(q[0], fn) - if (q.length === 1) { - queues.delete(path) - } else { - q.shift() - if (typeof q[0] === 'function') { - next.add(q[0]) - } else { - q[0].forEach(fn => next.add(fn)) - } - } - }) - - dirs.forEach(dir => { - const q = queues.get(dir) - assert(q[0] instanceof Set) - if (q[0].size === 1 && q.length === 1) { - queues.delete(dir) - } else if (q[0].size === 1) { - q.shift() - - // must be a function or else the Set would've been reused - next.add(q[0]) - } else { - q[0].delete(fn) - } - }) - running.delete(fn) - - next.forEach(fn => run(fn)) - return true - } - - const reserve = (paths, fn) => { - // collide on matches across case and unicode normalization - // On windows, thanks to the magic of 8.3 shortnames, it is fundamentally - // impossible to determine whether two paths refer to the same thing on - // disk, without asking the kernel for a shortname. - // So, we just pretend that every path matches every other path here, - // effectively removing all parallelization on windows. - paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => { - // don't need normPath, because we skip this entirely for windows - return stripSlashes(join(normalize(p))).toLowerCase() - }) - - const dirs = new Set( - paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)) - ) - reservations.set(fn, { dirs, paths }) - paths.forEach(path => { - const q = queues.get(path) - if (!q) { - queues.set(path, [fn]) - } else { - q.push(fn) - } - }) - dirs.forEach(dir => { - const q = queues.get(dir) - if (!q) { - queues.set(dir, [new Set([fn])]) - } else if (q[q.length - 1] instanceof Set) { - q[q.length - 1].add(fn) - } else { - q.push(new Set([fn])) - } - }) - - return run(fn) - } - - return { check, reserve } -} diff --git a/lib/read-entry.js b/lib/read-entry.js deleted file mode 100644 index 6186266e..00000000 --- a/lib/read-entry.js +++ /dev/null @@ -1,107 +0,0 @@ -'use strict' -const { Minipass } = require('minipass') -const normPath = require('./normalize-windows-path.js') - -const SLURP = Symbol('slurp') -module.exports = class ReadEntry extends Minipass { - constructor (header, ex, gex) { - super() - // read entries always start life paused. this is to avoid the - // situation where Minipass's auto-ending empty streams results - // in an entry ending before we're ready for it. - this.pause() - this.extended = ex - this.globalExtended = gex - this.header = header - this.startBlockSize = 512 * Math.ceil(header.size / 512) - this.blockRemain = this.startBlockSize - this.remain = header.size - this.type = header.type - this.meta = false - this.ignore = false - switch (this.type) { - case 'File': - case 'OldFile': - case 'Link': - case 'SymbolicLink': - case 'CharacterDevice': - case 'BlockDevice': - case 'Directory': - case 'FIFO': - case 'ContiguousFile': - case 'GNUDumpDir': - break - - case 'NextFileHasLongLinkpath': - case 'NextFileHasLongPath': - case 'OldGnuLongPath': - case 'GlobalExtendedHeader': - case 'ExtendedHeader': - case 'OldExtendedHeader': - this.meta = true - break - - // NOTE: gnutar and bsdtar treat unrecognized types as 'File' - // it may be worth doing the same, but with a warning. - default: - this.ignore = true - } - - this.path = normPath(header.path) - this.mode = header.mode - if (this.mode) { - this.mode = this.mode & 0o7777 - } - this.uid = header.uid - this.gid = header.gid - this.uname = header.uname - this.gname = header.gname - this.size = header.size - this.mtime = header.mtime - this.atime = header.atime - this.ctime = header.ctime - this.linkpath = normPath(header.linkpath) - this.uname = header.uname - this.gname = header.gname - - if (ex) { - this[SLURP](ex) - } - if (gex) { - this[SLURP](gex, true) - } - } - - write (data) { - const writeLen = data.length - if (writeLen > this.blockRemain) { - throw new Error('writing more to entry than is appropriate') - } - - const r = this.remain - const br = this.blockRemain - this.remain = Math.max(0, r - writeLen) - this.blockRemain = Math.max(0, br - writeLen) - if (this.ignore) { - return true - } - - if (r >= writeLen) { - return super.write(data) - } - - // r < writeLen - return super.write(data.slice(0, r)) - } - - [SLURP] (ex, global) { - for (const k in ex) { - // we slurp in everything except for the path attribute in - // a global extended header, because that's weird. - if (ex[k] !== null && ex[k] !== undefined && - !(global && k === 'path')) { - this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k] - } - } - } -} diff --git a/lib/types.js b/lib/types.js deleted file mode 100644 index 7bfc2546..00000000 --- a/lib/types.js +++ /dev/null @@ -1,44 +0,0 @@ -'use strict' -// map types from key to human-friendly name -exports.name = new Map([ - ['0', 'File'], - // same as File - ['', 'OldFile'], - ['1', 'Link'], - ['2', 'SymbolicLink'], - // Devices and FIFOs aren't fully supported - // they are parsed, but skipped when unpacking - ['3', 'CharacterDevice'], - ['4', 'BlockDevice'], - ['5', 'Directory'], - ['6', 'FIFO'], - // same as File - ['7', 'ContiguousFile'], - // pax headers - ['g', 'GlobalExtendedHeader'], - ['x', 'ExtendedHeader'], - // vendor-specific stuff - // skip - ['A', 'SolarisACL'], - // like 5, but with data, which should be skipped - ['D', 'GNUDumpDir'], - // metadata only, skip - ['I', 'Inode'], - // data = link path of next file - ['K', 'NextFileHasLongLinkpath'], - // data = path of next file - ['L', 'NextFileHasLongPath'], - // skip - ['M', 'ContinuationFile'], - // like L - ['N', 'OldGnuLongPath'], - // skip - ['S', 'SparseFile'], - // skip - ['V', 'TapeVolumeHeader'], - // like x - ['X', 'OldExtendedHeader'], -]) - -// map the other direction -exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]])) diff --git a/lib/update.js b/lib/update.js deleted file mode 100644 index 4d328543..00000000 --- a/lib/update.js +++ /dev/null @@ -1,40 +0,0 @@ -'use strict' - -// tar -u - -const hlo = require('./high-level-opt.js') -const r = require('./replace.js') -// just call tar.r with the filter and mtimeCache - -module.exports = (opt_, files, cb) => { - const opt = hlo(opt_) - - if (!opt.file) { - throw new TypeError('file is required') - } - - if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) { - throw new TypeError('cannot append to compressed archives') - } - - if (!files || !Array.isArray(files) || !files.length) { - throw new TypeError('no files or directories specified') - } - - files = Array.from(files) - - mtimeFilter(opt) - return r(opt, files, cb) -} - -const mtimeFilter = opt => { - const filter = opt.filter - - if (!opt.mtimeCache) { - opt.mtimeCache = new Map() - } - - opt.filter = filter ? (path, stat) => - filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime) - : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime) -} diff --git a/lib/warn-mixin.js b/lib/warn-mixin.js deleted file mode 100644 index a9406396..00000000 --- a/lib/warn-mixin.js +++ /dev/null @@ -1,24 +0,0 @@ -'use strict' -module.exports = Base => class extends Base { - warn (code, message, data = {}) { - if (this.file) { - data.file = this.file - } - if (this.cwd) { - data.cwd = this.cwd - } - data.code = message instanceof Error && message.code || code - data.tarCode = code - if (!this.strict && data.recoverable !== false) { - if (message instanceof Error) { - data = Object.assign(message, data) - message = message.message - } - this.emit('warn', data.tarCode, message, data) - } else if (message instanceof Error) { - this.emit('error', Object.assign(message, data)) - } else { - this.emit('error', Object.assign(new Error(`${code}: ${message}`), data)) - } - } -} diff --git a/lib/winchars.js b/lib/winchars.js deleted file mode 100644 index ebcab4ae..00000000 --- a/lib/winchars.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict' - -// When writing files on Windows, translate the characters to their -// 0xf000 higher-encoded versions. - -const raw = [ - '|', - '<', - '>', - '?', - ':', -] - -const win = raw.map(char => - String.fromCharCode(0xf000 + char.charCodeAt(0))) - -const toWin = new Map(raw.map((char, i) => [char, win[i]])) -const toRaw = new Map(win.map((char, i) => [char, raw[i]])) - -module.exports = { - encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s), - decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s), -} diff --git a/lib/write-entry.js b/lib/write-entry.js deleted file mode 100644 index 7d2f3eb1..00000000 --- a/lib/write-entry.js +++ /dev/null @@ -1,546 +0,0 @@ -'use strict' -const { Minipass } = require('minipass') -const Pax = require('./pax.js') -const Header = require('./header.js') -const fs = require('fs') -const path = require('path') -const normPath = require('./normalize-windows-path.js') -const stripSlash = require('./strip-trailing-slashes.js') - -const prefixPath = (path, prefix) => { - if (!prefix) { - return normPath(path) - } - path = normPath(path).replace(/^\.(\/|$)/, '') - return stripSlash(prefix) + '/' + path -} - -const maxReadSize = 16 * 1024 * 1024 -const PROCESS = Symbol('process') -const FILE = Symbol('file') -const DIRECTORY = Symbol('directory') -const SYMLINK = Symbol('symlink') -const HARDLINK = Symbol('hardlink') -const HEADER = Symbol('header') -const READ = Symbol('read') -const LSTAT = Symbol('lstat') -const ONLSTAT = Symbol('onlstat') -const ONREAD = Symbol('onread') -const ONREADLINK = Symbol('onreadlink') -const OPENFILE = Symbol('openfile') -const ONOPENFILE = Symbol('onopenfile') -const CLOSE = Symbol('close') -const MODE = Symbol('mode') -const AWAITDRAIN = Symbol('awaitDrain') -const ONDRAIN = Symbol('ondrain') -const PREFIX = Symbol('prefix') -const HAD_ERROR = Symbol('hadError') -const warner = require('./warn-mixin.js') -const winchars = require('./winchars.js') -const stripAbsolutePath = require('./strip-absolute-path.js') - -const modeFix = require('./mode-fix.js') - -const WriteEntry = warner(class WriteEntry extends Minipass { - constructor (p, opt) { - opt = opt || {} - super(opt) - if (typeof p !== 'string') { - throw new TypeError('path is required') - } - this.path = normPath(p) - // suppress atime, ctime, uid, gid, uname, gname - this.portable = !!opt.portable - // until node has builtin pwnam functions, this'll have to do - this.myuid = process.getuid && process.getuid() || 0 - this.myuser = process.env.USER || '' - this.maxReadSize = opt.maxReadSize || maxReadSize - this.linkCache = opt.linkCache || new Map() - this.statCache = opt.statCache || new Map() - this.preservePaths = !!opt.preservePaths - this.cwd = normPath(opt.cwd || process.cwd()) - this.strict = !!opt.strict - this.noPax = !!opt.noPax - this.noMtime = !!opt.noMtime - this.mtime = opt.mtime || null - this.prefix = opt.prefix ? normPath(opt.prefix) : null - - this.fd = null - this.blockLen = null - this.blockRemain = null - this.buf = null - this.offset = null - this.length = null - this.pos = null - this.remain = null - - if (typeof opt.onwarn === 'function') { - this.on('warn', opt.onwarn) - } - - let pathWarn = false - if (!this.preservePaths) { - const [root, stripped] = stripAbsolutePath(this.path) - if (root) { - this.path = stripped - pathWarn = root - } - } - - this.win32 = !!opt.win32 || process.platform === 'win32' - if (this.win32) { - // force the \ to / normalization, since we might not *actually* - // be on windows, but want \ to be considered a path separator. - this.path = winchars.decode(this.path.replace(/\\/g, '/')) - p = p.replace(/\\/g, '/') - } - - this.absolute = normPath(opt.absolute || path.resolve(this.cwd, p)) - - if (this.path === '') { - this.path = './' - } - - if (pathWarn) { - this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { - entry: this, - path: pathWarn + this.path, - }) - } - - if (this.statCache.has(this.absolute)) { - this[ONLSTAT](this.statCache.get(this.absolute)) - } else { - this[LSTAT]() - } - } - - emit (ev, ...data) { - if (ev === 'error') { - this[HAD_ERROR] = true - } - return super.emit(ev, ...data) - } - - [LSTAT] () { - fs.lstat(this.absolute, (er, stat) => { - if (er) { - return this.emit('error', er) - } - this[ONLSTAT](stat) - }) - } - - [ONLSTAT] (stat) { - this.statCache.set(this.absolute, stat) - this.stat = stat - if (!stat.isFile()) { - stat.size = 0 - } - this.type = getType(stat) - this.emit('stat', stat) - this[PROCESS]() - } - - [PROCESS] () { - switch (this.type) { - case 'File': return this[FILE]() - case 'Directory': return this[DIRECTORY]() - case 'SymbolicLink': return this[SYMLINK]() - // unsupported types are ignored. - default: return this.end() - } - } - - [MODE] (mode) { - return modeFix(mode, this.type === 'Directory', this.portable) - } - - [PREFIX] (path) { - return prefixPath(path, this.prefix) - } - - [HEADER] () { - if (this.type === 'Directory' && this.portable) { - this.noMtime = true - } - - this.header = new Header({ - path: this[PREFIX](this.path), - // only apply the prefix to hard links. - linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) - : this.linkpath, - // only the permissions and setuid/setgid/sticky bitflags - // not the higher-order bits that specify file type - mode: this[MODE](this.stat.mode), - uid: this.portable ? null : this.stat.uid, - gid: this.portable ? null : this.stat.gid, - size: this.stat.size, - mtime: this.noMtime ? null : this.mtime || this.stat.mtime, - type: this.type, - uname: this.portable ? null : - this.stat.uid === this.myuid ? this.myuser : '', - atime: this.portable ? null : this.stat.atime, - ctime: this.portable ? null : this.stat.ctime, - }) - - if (this.header.encode() && !this.noPax) { - super.write(new Pax({ - atime: this.portable ? null : this.header.atime, - ctime: this.portable ? null : this.header.ctime, - gid: this.portable ? null : this.header.gid, - mtime: this.noMtime ? null : this.mtime || this.header.mtime, - path: this[PREFIX](this.path), - linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) - : this.linkpath, - size: this.header.size, - uid: this.portable ? null : this.header.uid, - uname: this.portable ? null : this.header.uname, - dev: this.portable ? null : this.stat.dev, - ino: this.portable ? null : this.stat.ino, - nlink: this.portable ? null : this.stat.nlink, - }).encode()) - } - super.write(this.header.block) - } - - [DIRECTORY] () { - if (this.path.slice(-1) !== '/') { - this.path += '/' - } - this.stat.size = 0 - this[HEADER]() - this.end() - } - - [SYMLINK] () { - fs.readlink(this.absolute, (er, linkpath) => { - if (er) { - return this.emit('error', er) - } - this[ONREADLINK](linkpath) - }) - } - - [ONREADLINK] (linkpath) { - this.linkpath = normPath(linkpath) - this[HEADER]() - this.end() - } - - [HARDLINK] (linkpath) { - this.type = 'Link' - this.linkpath = normPath(path.relative(this.cwd, linkpath)) - this.stat.size = 0 - this[HEADER]() - this.end() - } - - [FILE] () { - if (this.stat.nlink > 1) { - const linkKey = this.stat.dev + ':' + this.stat.ino - if (this.linkCache.has(linkKey)) { - const linkpath = this.linkCache.get(linkKey) - if (linkpath.indexOf(this.cwd) === 0) { - return this[HARDLINK](linkpath) - } - } - this.linkCache.set(linkKey, this.absolute) - } - - this[HEADER]() - if (this.stat.size === 0) { - return this.end() - } - - this[OPENFILE]() - } - - [OPENFILE] () { - fs.open(this.absolute, 'r', (er, fd) => { - if (er) { - return this.emit('error', er) - } - this[ONOPENFILE](fd) - }) - } - - [ONOPENFILE] (fd) { - this.fd = fd - if (this[HAD_ERROR]) { - return this[CLOSE]() - } - - this.blockLen = 512 * Math.ceil(this.stat.size / 512) - this.blockRemain = this.blockLen - const bufLen = Math.min(this.blockLen, this.maxReadSize) - this.buf = Buffer.allocUnsafe(bufLen) - this.offset = 0 - this.pos = 0 - this.remain = this.stat.size - this.length = this.buf.length - this[READ]() - } - - [READ] () { - const { fd, buf, offset, length, pos } = this - fs.read(fd, buf, offset, length, pos, (er, bytesRead) => { - if (er) { - // ignoring the error from close(2) is a bad practice, but at - // this point we already have an error, don't need another one - return this[CLOSE](() => this.emit('error', er)) - } - this[ONREAD](bytesRead) - }) - } - - [CLOSE] (cb) { - fs.close(this.fd, cb) - } - - [ONREAD] (bytesRead) { - if (bytesRead <= 0 && this.remain > 0) { - const er = new Error('encountered unexpected EOF') - er.path = this.absolute - er.syscall = 'read' - er.code = 'EOF' - return this[CLOSE](() => this.emit('error', er)) - } - - if (bytesRead > this.remain) { - const er = new Error('did not encounter expected EOF') - er.path = this.absolute - er.syscall = 'read' - er.code = 'EOF' - return this[CLOSE](() => this.emit('error', er)) - } - - // null out the rest of the buffer, if we could fit the block padding - // at the end of this loop, we've incremented bytesRead and this.remain - // to be incremented up to the blockRemain level, as if we had expected - // to get a null-padded file, and read it until the end. then we will - // decrement both remain and blockRemain by bytesRead, and know that we - // reached the expected EOF, without any null buffer to append. - if (bytesRead === this.remain) { - for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) { - this.buf[i + this.offset] = 0 - bytesRead++ - this.remain++ - } - } - - const writeBuf = this.offset === 0 && bytesRead === this.buf.length ? - this.buf : this.buf.slice(this.offset, this.offset + bytesRead) - - const flushed = this.write(writeBuf) - if (!flushed) { - this[AWAITDRAIN](() => this[ONDRAIN]()) - } else { - this[ONDRAIN]() - } - } - - [AWAITDRAIN] (cb) { - this.once('drain', cb) - } - - write (writeBuf) { - if (this.blockRemain < writeBuf.length) { - const er = new Error('writing more data than expected') - er.path = this.absolute - return this.emit('error', er) - } - this.remain -= writeBuf.length - this.blockRemain -= writeBuf.length - this.pos += writeBuf.length - this.offset += writeBuf.length - return super.write(writeBuf) - } - - [ONDRAIN] () { - if (!this.remain) { - if (this.blockRemain) { - super.write(Buffer.alloc(this.blockRemain)) - } - return this[CLOSE](er => er ? this.emit('error', er) : this.end()) - } - - if (this.offset >= this.length) { - // if we only have a smaller bit left to read, alloc a smaller buffer - // otherwise, keep it the same length it was before. - this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length)) - this.offset = 0 - } - this.length = this.buf.length - this.offset - this[READ]() - } -}) - -class WriteEntrySync extends WriteEntry { - [LSTAT] () { - this[ONLSTAT](fs.lstatSync(this.absolute)) - } - - [SYMLINK] () { - this[ONREADLINK](fs.readlinkSync(this.absolute)) - } - - [OPENFILE] () { - this[ONOPENFILE](fs.openSync(this.absolute, 'r')) - } - - [READ] () { - let threw = true - try { - const { fd, buf, offset, length, pos } = this - const bytesRead = fs.readSync(fd, buf, offset, length, pos) - this[ONREAD](bytesRead) - threw = false - } finally { - // ignoring the error from close(2) is a bad practice, but at - // this point we already have an error, don't need another one - if (threw) { - try { - this[CLOSE](() => {}) - } catch (er) {} - } - } - } - - [AWAITDRAIN] (cb) { - cb() - } - - [CLOSE] (cb) { - fs.closeSync(this.fd) - cb() - } -} - -const WriteEntryTar = warner(class WriteEntryTar extends Minipass { - constructor (readEntry, opt) { - opt = opt || {} - super(opt) - this.preservePaths = !!opt.preservePaths - this.portable = !!opt.portable - this.strict = !!opt.strict - this.noPax = !!opt.noPax - this.noMtime = !!opt.noMtime - - this.readEntry = readEntry - this.type = readEntry.type - if (this.type === 'Directory' && this.portable) { - this.noMtime = true - } - - this.prefix = opt.prefix || null - - this.path = normPath(readEntry.path) - this.mode = this[MODE](readEntry.mode) - this.uid = this.portable ? null : readEntry.uid - this.gid = this.portable ? null : readEntry.gid - this.uname = this.portable ? null : readEntry.uname - this.gname = this.portable ? null : readEntry.gname - this.size = readEntry.size - this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime - this.atime = this.portable ? null : readEntry.atime - this.ctime = this.portable ? null : readEntry.ctime - this.linkpath = normPath(readEntry.linkpath) - - if (typeof opt.onwarn === 'function') { - this.on('warn', opt.onwarn) - } - - let pathWarn = false - if (!this.preservePaths) { - const [root, stripped] = stripAbsolutePath(this.path) - if (root) { - this.path = stripped - pathWarn = root - } - } - - this.remain = readEntry.size - this.blockRemain = readEntry.startBlockSize - - this.header = new Header({ - path: this[PREFIX](this.path), - linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) - : this.linkpath, - // only the permissions and setuid/setgid/sticky bitflags - // not the higher-order bits that specify file type - mode: this.mode, - uid: this.portable ? null : this.uid, - gid: this.portable ? null : this.gid, - size: this.size, - mtime: this.noMtime ? null : this.mtime, - type: this.type, - uname: this.portable ? null : this.uname, - atime: this.portable ? null : this.atime, - ctime: this.portable ? null : this.ctime, - }) - - if (pathWarn) { - this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { - entry: this, - path: pathWarn + this.path, - }) - } - - if (this.header.encode() && !this.noPax) { - super.write(new Pax({ - atime: this.portable ? null : this.atime, - ctime: this.portable ? null : this.ctime, - gid: this.portable ? null : this.gid, - mtime: this.noMtime ? null : this.mtime, - path: this[PREFIX](this.path), - linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) - : this.linkpath, - size: this.size, - uid: this.portable ? null : this.uid, - uname: this.portable ? null : this.uname, - dev: this.portable ? null : this.readEntry.dev, - ino: this.portable ? null : this.readEntry.ino, - nlink: this.portable ? null : this.readEntry.nlink, - }).encode()) - } - - super.write(this.header.block) - readEntry.pipe(this) - } - - [PREFIX] (path) { - return prefixPath(path, this.prefix) - } - - [MODE] (mode) { - return modeFix(mode, this.type === 'Directory', this.portable) - } - - write (data) { - const writeLen = data.length - if (writeLen > this.blockRemain) { - throw new Error('writing more to entry than is appropriate') - } - this.blockRemain -= writeLen - return super.write(data) - } - - end () { - if (this.blockRemain) { - super.write(Buffer.alloc(this.blockRemain)) - } - return super.end() - } -}) - -WriteEntry.Sync = WriteEntrySync -WriteEntry.Tar = WriteEntryTar - -const getType = stat => - stat.isFile() ? 'File' - : stat.isDirectory() ? 'Directory' - : stat.isSymbolicLink() ? 'SymbolicLink' - : 'Unsupported' - -module.exports = WriteEntry diff --git a/map.js b/map.js index 1d7e33ae..b5f52c9b 100644 --- a/map.js +++ b/map.js @@ -1,9 +1,9 @@ -const { basename } = require('path') +import { basename } from 'path' const map = test => - test === 'index.js' || test === 'map.js' ? test - : test === 'unpack.js' ? ['lib/unpack.js', 'lib/mkdir.js'] + test === 'map.js' ? test + : test === 'unpack.js' ? ['src/unpack.ts', 'src/mkdir.ts'] : test === 'load-all.js' ? [] - : `lib/${test}` + : `src/${test.replace(/js$/, 'ts')}` -module.exports = test => map(basename(test)) +export default test => map(basename(test)) diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..fc866c54 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,4471 @@ +{ + "name": "tar", + "version": "7.4.3", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "tar", + "version": "7.4.3", + "license": "ISC", + "dependencies": { + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.0.1", + "mkdirp": "^3.0.1", + "yallist": "^5.0.0" + }, + "devDependencies": { + "chmodr": "^1.2.0", + "end-of-stream": "^1.4.3", + "events-to-array": "^2.0.3", + "mutate-fs": "^2.1.1", + "nock": "^13.5.4", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tap": "^18.7.2", + "tshy": "^1.13.1", + "typedoc": "^0.25.13" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@alcalzone/ansi-tokenize": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@alcalzone/ansi-tokenize/-/ansi-tokenize-0.1.3.tgz", + "integrity": "sha512-3yWxPTq3UQ/FY9p1ErPxIyfT64elWaMvM9lIHnaqpyft63tkxodF5aUElYHrdisWve5cETkh1+KBw1yJuW0aRw==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.2.1", + "is-fullwidth-code-point": "^4.0.0" + }, + "engines": { + "node": ">=14.13.1" + } + }, + "node_modules/@alcalzone/ansi-tokenize/node_modules/is-fullwidth-code-point": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz", + "integrity": "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@base2/pretty-print-object": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@base2/pretty-print-object/-/pretty-print-object-1.0.1.tgz", + "integrity": "sha512-4iri8i1AqYHJE2DstZYkyEprg6Pq6sKx3xn5FpySk9sNhH7qN2LLlHJCfDTZRILNwQNPD7mATWM0TBui7uC1pA==", + "dev": true + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/fs-minipass": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", + "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "dependencies": { + "minipass": "^7.0.4" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@isaacs/ts-node-temp-fork-for-pr-2009": { + "version": "10.9.7", + "resolved": "https://registry.npmjs.org/@isaacs/ts-node-temp-fork-for-pr-2009/-/ts-node-temp-fork-for-pr-2009-10.9.7.tgz", + "integrity": "sha512-9f0bhUr9TnwwpgUhEpr3FjxSaH/OHaARkE2F9fM0lS4nIs2GNerrvGwQz493dk0JKlTaGYVrKbq36vA/whZ34g==", + "dev": true, + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node14": "*", + "@tsconfig/node16": "*", + "@tsconfig/node18": "*", + "@tsconfig/node20": "*", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=4.2" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/@isaacs/ts-node-temp-fork-for-pr-2009/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.15", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", + "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@npmcli/agent": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz", + "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==", + "dev": true, + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/fs": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz", + "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==", + "dev": true, + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/git": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.7.tgz", + "integrity": "sha512-WaOVvto604d5IpdCRV2KjQu8PzkfE96d50CQGKgywXh2GxXmDeUO5EWcBC4V57uFyrNqx83+MewuJh3WTR3xPA==", + "dev": true, + "dependencies": { + "@npmcli/promise-spawn": "^7.0.0", + "lru-cache": "^10.0.1", + "npm-pick-manifest": "^9.0.0", + "proc-log": "^4.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/git/node_modules/isexe": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", + "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", + "dev": true, + "engines": { + "node": ">=16" + } + }, + "node_modules/@npmcli/git/node_modules/which": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz", + "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "dev": true, + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/installed-package-contents": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.1.0.tgz", + "integrity": "sha512-c8UuGLeZpm69BryRykLuKRyKFZYJsZSCT4aVY5ds4omyZqJ172ApzgfKJ5eV/r3HgLdUYgFVe54KSFVjKoe27w==", + "dev": true, + "dependencies": { + "npm-bundled": "^3.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "bin": { + "installed-package-contents": "bin/index.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/node-gyp": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-3.0.0.tgz", + "integrity": "sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/package-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.0.tgz", + "integrity": "sha512-qe/kiqqkW0AGtvBjL8TJKZk/eBBSpnJkUWvHdQ9jM2lKHXRYYJuyNpJPlJw3c8QjC2ow6NZYiLExhUaeJelbxQ==", + "dev": true, + "dependencies": { + "@npmcli/git": "^5.0.0", + "glob": "^10.2.2", + "hosted-git-info": "^7.0.0", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^6.0.0", + "proc-log": "^4.0.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/promise-spawn": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz", + "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==", + "dev": true, + "dependencies": { + "which": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/promise-spawn/node_modules/isexe": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", + "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", + "dev": true, + "engines": { + "node": ">=16" + } + }, + "node_modules/@npmcli/promise-spawn/node_modules/which": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz", + "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "dev": true, + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/redact": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-1.1.0.tgz", + "integrity": "sha512-PfnWuOkQgu7gCbnSsAisaX7hKOdZ4wSAhAzH3/ph5dSGau52kCRrMMGbiSQLwyTZpgldkZ49b0brkOr1AzGBHQ==", + "dev": true, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/run-script": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-7.0.4.tgz", + "integrity": "sha512-9ApYM/3+rBt9V80aYg6tZfzj3UWdiYyCt7gJUD1VJKvWF5nwKDSICXbYIQbspFTq6TOpbsEtIC0LArB8d9PFmg==", + "dev": true, + "dependencies": { + "@npmcli/node-gyp": "^3.0.0", + "@npmcli/package-json": "^5.0.0", + "@npmcli/promise-spawn": "^7.0.0", + "node-gyp": "^10.0.0", + "which": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/run-script/node_modules/isexe": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", + "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", + "dev": true, + "engines": { + "node": ">=16" + } + }, + "node_modules/@npmcli/run-script/node_modules/which": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz", + "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "dev": true, + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@sigstore/bundle": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.3.2.tgz", + "integrity": "sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA==", + "dev": true, + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/core": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-1.1.0.tgz", + "integrity": "sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==", + "dev": true, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/protobuf-specs": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.3.2.tgz", + "integrity": "sha512-c6B0ehIWxMI8wiS/bj6rHMPqeFvngFV7cDU/MY+B16P9Z3Mp9k8L93eYZ7BYzSickzuqAQqAq0V956b3Ju6mLw==", + "dev": true, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/sign": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.3.2.tgz", + "integrity": "sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==", + "dev": true, + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "make-fetch-happen": "^13.0.1", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/tuf": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-2.3.4.tgz", + "integrity": "sha512-44vtsveTPUpqhm9NCrbU8CWLe3Vck2HO1PNLw7RIajbB7xhtn5RBPm1VNSCMwqGYHhDsBJG8gDF0q4lgydsJvw==", + "dev": true, + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2", + "tuf-js": "^2.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/verify": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-1.2.1.tgz", + "integrity": "sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g==", + "dev": true, + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.1.0", + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@tapjs/after": { + "version": "1.1.22", + "resolved": "https://registry.npmjs.org/@tapjs/after/-/after-1.1.22.tgz", + "integrity": "sha512-8Ui8dfTFgDS3ENfzKpsWGJw+v4LHXvifaSB79chQbucuggW+nM2zzWu7grw7mDUBBR3Mknk+qL4Nb1KrnZvfWQ==", + "dev": true, + "dependencies": { + "is-actual-promise": "^1.0.1" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/after-each": { + "version": "1.1.22", + "resolved": "https://registry.npmjs.org/@tapjs/after-each/-/after-each-1.1.22.tgz", + "integrity": "sha512-KKbCnMlOFspW6YoaFfzbU3kwwolF9DfP7ikGGMZItex/EB+OcLxoFV++DCWIDIl12mzQfYZMJ0wJXtHFc0ux0Q==", + "dev": true, + "dependencies": { + "function-loop": "^4.0.0" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/asserts": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@tapjs/asserts/-/asserts-1.2.0.tgz", + "integrity": "sha512-QTs1kALeJKrlX9Yns3f8/hfsWgf4mdFYPN3lQKxZ/3C/DkGnjlrpVd4I2fnTC7cgJ116kwEgwhxVJUpw9QPp9A==", + "dev": true, + "dependencies": { + "@tapjs/stack": "1.2.8", + "is-actual-promise": "^1.0.1", + "tcompare": "6.4.6", + "trivial-deferred": "^2.0.0" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/before": { + "version": "1.1.22", + "resolved": "https://registry.npmjs.org/@tapjs/before/-/before-1.1.22.tgz", + "integrity": "sha512-Uv2odGCtOgY/EevyDZv2rHbIbe9WGrouC6HI+lJv4whGUKgiIYTOjrssl4YxvqvnNWx289/6Tp4Kpu7EeXT7yA==", + "dev": true, + "dependencies": { + "is-actual-promise": "^1.0.1" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/before-each": { + "version": "1.1.22", + "resolved": "https://registry.npmjs.org/@tapjs/before-each/-/before-each-1.1.22.tgz", + "integrity": "sha512-uKKllHDvQgTXjAm+F+29Iqcb9Bzh5U6LH45m6v/zfKPm8UNnNpJ/XxFbbsFqi0EQX2czYH0ivHfyQwiO40R8lw==", + "dev": true, + "dependencies": { + "function-loop": "^4.0.0" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/config": { + "version": "2.4.19", + "resolved": "https://registry.npmjs.org/@tapjs/config/-/config-2.4.19.tgz", + "integrity": "sha512-8fkUnf2d3g9wbnfSirXI92bx4ZO5X37nqYVb5fua9VDC2MsTLAmd4JyDSNG1ngn8/nO5o8aFNEeUaePswGId4A==", + "dev": true, + "dependencies": { + "@tapjs/core": "1.5.4", + "@tapjs/test": "1.4.4", + "chalk": "^5.2.0", + "jackspeak": "^2.3.6", + "polite-json": "^4.0.1", + "tap-yaml": "2.2.2", + "walk-up-path": "^3.0.1" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4", + "@tapjs/test": "1.4.4" + } + }, + "node_modules/@tapjs/config/node_modules/jackspeak": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", + "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", + "dev": true, + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/@tapjs/core": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@tapjs/core/-/core-1.5.4.tgz", + "integrity": "sha512-kDgRxTkSRxfLbX5orDmizxuyFBLLC3Mu4mQ2dMzw/UMYkrN8jZbkKZqIR0BdXgxE+GqvVFqkYvFJImXJBygBKQ==", + "dev": true, + "dependencies": { + "@tapjs/processinfo": "^3.1.7", + "@tapjs/stack": "1.2.8", + "@tapjs/test": "1.4.4", + "async-hook-domain": "^4.0.1", + "diff": "^5.2.0", + "is-actual-promise": "^1.0.1", + "minipass": "^7.0.4", + "signal-exit": "4.1", + "tap-parser": "15.3.2", + "tap-yaml": "2.2.2", + "tcompare": "6.4.6", + "trivial-deferred": "^2.0.0" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + } + }, + "node_modules/@tapjs/error-serdes": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@tapjs/error-serdes/-/error-serdes-1.2.2.tgz", + "integrity": "sha512-RW2aU50JR7SSAlvoTyuwouXETLM9lP+7oZ5Z+dyKhNp8mkbbz4mXKcgd9SDHY5qTh6zvVN7OFK7ev7dYWXbrWw==", + "dev": true, + "dependencies": { + "minipass": "^7.0.4" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@tapjs/filter": { + "version": "1.2.22", + "resolved": "https://registry.npmjs.org/@tapjs/filter/-/filter-1.2.22.tgz", + "integrity": "sha512-qVWbsFem2R1htQVh0+4xWMPsDPpQ2NhA/6mnlg4ApzAFvaTr5T/zK72VpR+AqPaMcMgrp4a/m5DQ03dLFqckZQ==", + "dev": true, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/fixture": { + "version": "1.2.22", + "resolved": "https://registry.npmjs.org/@tapjs/fixture/-/fixture-1.2.22.tgz", + "integrity": "sha512-ZYjkRzLSwW+cOg2CbL3GrgjatKVXcEGLQa7vjfmYVxDrPHkK7tiu3lf1KU6pFxTyqTlMMRUfMehHQrH+JjDC7Q==", + "dev": true, + "dependencies": { + "mkdirp": "^3.0.0", + "rimraf": "^5.0.5" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/intercept": { + "version": "1.2.22", + "resolved": "https://registry.npmjs.org/@tapjs/intercept/-/intercept-1.2.22.tgz", + "integrity": "sha512-OiayUlV+0fxwGM3B7JyRSwryq2kRpuWiF+4wQCiufSbbF20H4uEIlkRq1YrfUlla4zWVvHeQOQlUoqb6fSEcSQ==", + "dev": true, + "dependencies": { + "@tapjs/after": "1.1.22", + "@tapjs/stack": "1.2.8" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/mock": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/@tapjs/mock/-/mock-1.3.4.tgz", + "integrity": "sha512-tEz5hIdJdAGzl+KxjZol4DD7cWAdYMmvLU/QCZ5BThAOJ+FUAOxtBFA31nd7IWkMseIqcbeeqLmeMtan6QlPKA==", + "dev": true, + "dependencies": { + "@tapjs/after": "1.1.22", + "@tapjs/stack": "1.2.8", + "resolve-import": "^1.4.5", + "walk-up-path": "^3.0.1" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/node-serialize": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/@tapjs/node-serialize/-/node-serialize-1.3.4.tgz", + "integrity": "sha512-OwnSWdNnukgIGBsgnPy1ZpBDxp274GwLx2Ag+CulhsQ+IF9rOCq5P0EQ2kbxhxRet1386kbNzgXgaEeXmDXlLQ==", + "dev": true, + "dependencies": { + "@tapjs/error-serdes": "1.2.2", + "@tapjs/stack": "1.2.8", + "tap-parser": "15.3.2" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/processinfo": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/@tapjs/processinfo/-/processinfo-3.1.8.tgz", + "integrity": "sha512-FIriEB+qqArPhmVYc1PZwRHD99myRdl7C9Oe/uts04Q2LOxQ5MEmqP9XOP8vVYzpDOYwmL8OmL6eOYt9eZlQKQ==", + "dev": true, + "dependencies": { + "pirates": "^4.0.5", + "process-on-spawn": "^1.0.0", + "signal-exit": "^4.0.2", + "uuid": "^8.3.2" + }, + "engines": { + "node": ">=16.17" + } + }, + "node_modules/@tapjs/reporter": { + "version": "1.3.20", + "resolved": "https://registry.npmjs.org/@tapjs/reporter/-/reporter-1.3.20.tgz", + "integrity": "sha512-OTZeTC1/dr69mtZlRulynFH7+b7/C45MwLdLqaeTTeW2saAtojDMt7K2J8c74JlOO5+EKl71rBxrdKS6VBFqLw==", + "dev": true, + "dependencies": { + "@tapjs/config": "2.4.19", + "@tapjs/stack": "1.2.8", + "chalk": "^5.2.0", + "ink": "^4.4.1", + "minipass": "^7.0.4", + "ms": "^2.1.3", + "patch-console": "^2.0.0", + "prismjs-terminal": "^1.2.3", + "react": "^18.2.0", + "string-length": "^6.0.0", + "tap-parser": "15.3.2", + "tap-yaml": "2.2.2", + "tcompare": "6.4.6" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/reporter/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/@tapjs/run": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@tapjs/run/-/run-1.5.4.tgz", + "integrity": "sha512-mwzU/KalqYOGZTTf7lPyfBdRDCoIgec69NXrq/+Le7PXYWKrRoYvIUoBGwgZYyjfiYshhnzb+ayZdtd76Lj0Kw==", + "dev": true, + "dependencies": { + "@tapjs/after": "1.1.22", + "@tapjs/before": "1.1.22", + "@tapjs/config": "2.4.19", + "@tapjs/processinfo": "^3.1.7", + "@tapjs/reporter": "1.3.20", + "@tapjs/spawn": "1.1.22", + "@tapjs/stdin": "1.1.22", + "@tapjs/test": "1.4.4", + "c8": "^8.0.1", + "chalk": "^5.3.0", + "chokidar": "^3.6.0", + "foreground-child": "^3.1.1", + "glob": "^10.3.10", + "minipass": "^7.0.4", + "mkdirp": "^3.0.1", + "opener": "^1.5.2", + "pacote": "^17.0.6", + "resolve-import": "^1.4.5", + "rimraf": "^5.0.5", + "semver": "^7.6.0", + "signal-exit": "^4.1.0", + "tap-parser": "15.3.2", + "tap-yaml": "2.2.2", + "tcompare": "6.4.6", + "trivial-deferred": "^2.0.0", + "which": "^4.0.0" + }, + "bin": { + "tap-run": "dist/esm/index.js" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/run/node_modules/isexe": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", + "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", + "dev": true, + "engines": { + "node": ">=16" + } + }, + "node_modules/@tapjs/run/node_modules/which": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz", + "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "dev": true, + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/@tapjs/snapshot": { + "version": "1.2.22", + "resolved": "https://registry.npmjs.org/@tapjs/snapshot/-/snapshot-1.2.22.tgz", + "integrity": "sha512-6nhNY6uFPnQEVQ8vuxV3rKiC7NXDY5k/Bv1bPatfo//6z1T41INfQbnfwQXoufaHveLPpGBTLwpOWjtFsUHgdg==", + "dev": true, + "dependencies": { + "is-actual-promise": "^1.0.1", + "tcompare": "6.4.6", + "trivial-deferred": "^2.0.0" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/spawn": { + "version": "1.1.22", + "resolved": "https://registry.npmjs.org/@tapjs/spawn/-/spawn-1.1.22.tgz", + "integrity": "sha512-/MbFSmSpvLA0N2rKd8rI0vMLYM+0E3OB+doj+YUZe5m3G0YCHTBzZrnFGLw7Am1VsaREy4fSgchNEdn1NyikcQ==", + "dev": true, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/stack": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@tapjs/stack/-/stack-1.2.8.tgz", + "integrity": "sha512-VC8h6U62ScerTKN+MYpRPiwH2bCL65S6v1wcj1hukE2hojLcRvVdET7S3ZtRfSj/eNWW/5OVfzTpHiGjEYD6Xg==", + "dev": true, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@tapjs/stdin": { + "version": "1.1.22", + "resolved": "https://registry.npmjs.org/@tapjs/stdin/-/stdin-1.1.22.tgz", + "integrity": "sha512-JUyzZHG01iM6uDfplVGRiK+OdNalwl5Okv+eljHBdZOA8kO3hHI6N9bkZa472/st4NBj0lcMMGb2IKGgIBBUQg==", + "dev": true, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/test": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/@tapjs/test/-/test-1.4.4.tgz", + "integrity": "sha512-I0mzxs8+RUULd9g0R6+LXsLzkeqhu5jJPpA7w5BzTxA++jQ0ACjyHs1BBy1IhhP9DeZ5N2LPg+WxLs7Dijs9Uw==", + "dev": true, + "dependencies": { + "@isaacs/ts-node-temp-fork-for-pr-2009": "^10.9.5", + "@tapjs/after": "1.1.22", + "@tapjs/after-each": "1.1.22", + "@tapjs/asserts": "1.2.0", + "@tapjs/before": "1.1.22", + "@tapjs/before-each": "1.1.22", + "@tapjs/filter": "1.2.22", + "@tapjs/fixture": "1.2.22", + "@tapjs/intercept": "1.2.22", + "@tapjs/mock": "1.3.4", + "@tapjs/node-serialize": "1.3.4", + "@tapjs/snapshot": "1.2.22", + "@tapjs/spawn": "1.1.22", + "@tapjs/stdin": "1.1.22", + "@tapjs/typescript": "1.4.4", + "@tapjs/worker": "1.1.22", + "glob": "^10.3.10", + "jackspeak": "^2.3.6", + "mkdirp": "^3.0.0", + "resolve-import": "^1.4.5", + "rimraf": "^5.0.5", + "sync-content": "^1.0.1", + "tap-parser": "15.3.2", + "tshy": "^1.12.0", + "typescript": "5.2" + }, + "bin": { + "generate-tap-test-class": "scripts/build.mjs" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/test/node_modules/jackspeak": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", + "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", + "dev": true, + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/@tapjs/test/node_modules/typescript": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", + "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/@tapjs/typescript": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/@tapjs/typescript/-/typescript-1.4.4.tgz", + "integrity": "sha512-Mf2vIK1yk5ipQRmuIznFtC8Iboti0p0D90ENDZdEx678h60vAVPh9vebVX+oQ0LccAHGyu/CiOSFL4Za8b5/Rg==", + "dev": true, + "dependencies": { + "@isaacs/ts-node-temp-fork-for-pr-2009": "^10.9.5" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tapjs/worker": { + "version": "1.1.22", + "resolved": "https://registry.npmjs.org/@tapjs/worker/-/worker-1.1.22.tgz", + "integrity": "sha512-1PO9Qstfevr4Wdh318eC3O1mytSyXT3q/K6EeivBhnuPeyHsy3QCAd1bfVD7gqzWNbJ/UzeGN3knfIi5qXifmA==", + "dev": true, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "peerDependencies": { + "@tapjs/core": "1.5.4" + } + }, + "node_modules/@tsconfig/node14": { + "version": "14.1.2", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-14.1.2.tgz", + "integrity": "sha512-1vncsbfCZ3TBLPxesRYz02Rn7SNJfbLoDVkcZ7F/ixOV6nwxwgdhD1mdPcc5YQ413qBJ8CvMxXMFfJ7oawjo7Q==", + "dev": true + }, + "node_modules/@tsconfig/node16": { + "version": "16.1.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-16.1.3.tgz", + "integrity": "sha512-9nTOUBn+EMKO6rtSZJk+DcqsfgtlERGT9XPJ5PRj/HNENPCBY1yu/JEj5wT6GLtbCLBO2k46SeXDaY0pjMqypw==", + "dev": true + }, + "node_modules/@tsconfig/node18": { + "version": "18.2.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node18/-/node18-18.2.4.tgz", + "integrity": "sha512-5xxU8vVs9/FNcvm3gE07fPbn9tl6tqGGWA9tSlwsUEkBxtRnTsNmwrV8gasZ9F/EobaSv9+nu8AxUKccw77JpQ==", + "dev": true + }, + "node_modules/@tsconfig/node20": { + "version": "20.1.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node20/-/node20-20.1.4.tgz", + "integrity": "sha512-sqgsT69YFeLWf5NtJ4Xq/xAF8p4ZQHlmGW74Nu2tD4+g5fAsposc4ZfaaPixVu4y01BEiDCWLRDCvDM5JOsRxg==", + "dev": true + }, + "node_modules/@tufjs/canonical-json": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-2.0.0.tgz", + "integrity": "sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==", + "dev": true, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@tufjs/models": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-2.0.1.tgz", + "integrity": "sha512-92F7/SFyufn4DXsha9+QfKnN03JGqtMFMXgSHbZOo8JG59WkTni7UzAouNQDf7AuP9OAMxVOPQcqG3sB7w+kkg==", + "dev": true, + "dependencies": { + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.4" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true + }, + "node_modules/@types/node": { + "version": "20.14.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.5.tgz", + "integrity": "sha512-aoRR+fJkZT2l0aGOJhuA8frnCSoNX6W7U2mpNq63+BxBIj5BQFt8rHy627kijCmm63ijdSdwvGgpUsU6MBsZZA==", + "dev": true, + "peer": true, + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/abbrev": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz", + "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/acorn": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.0.tgz", + "integrity": "sha512-RTvkC4w+KNXrM39/lWCUaG0IbRkWdCv7W/IOW9oU6SawyxulvkQy5HQPVTKxEjczcUvapcrw3cFx/60VN/NRNw==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.3", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.3.tgz", + "integrity": "sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==", + "dev": true, + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/agent-base": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "dev": true, + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/aggregate-error/node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-escapes": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-6.2.1.tgz", + "integrity": "sha512-4nJ3yixlEthEJ9Rk4vPcdBRkZvQZlYyu8j4/Mqz5sgIkddmEnH2Yj2ZrnP9S3tQOvSNRUIgVNF/1yPpRAGNRig==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/ansi-sequence-parser": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ansi-sequence-parser/-/ansi-sequence-parser-1.1.1.tgz", + "integrity": "sha512-vJXt3yiaUL4UU546s3rPXlsry/RnM730G1+HkpKE012AN0sx1eOrxSu95oKDIonskeLTijMgqWZ3uDEe3NFvyg==", + "dev": true + }, + "node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true + }, + "node_modules/async-hook-domain": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-4.0.1.tgz", + "integrity": "sha512-bSktexGodAjfHWIrSrrqxqWzf1hWBZBpmPNZv+TYUMyWa2eoefFc6q6H1+KtdHYSz35lrhWdmXt/XK9wNEZvww==", + "dev": true, + "engines": { + "node": ">=16" + } + }, + "node_modules/auto-bind": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/auto-bind/-/auto-bind-5.0.1.tgz", + "integrity": "sha512-ooviqdwwgfIfNmDwo94wlshcdzfO64XV0Cg6oDsDYBJfITDz1EngD2z7DkbvCWn+XIMsIqW27sEVF6qcpJrRcg==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/c8": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/c8/-/c8-8.0.1.tgz", + "integrity": "sha512-EINpopxZNH1mETuI0DzRA4MZpAUH+IFiRhnmFD3vFr3vdrgxqi3VfE3KL0AIL+zDq8rC9bZqwM/VDmmoe04y7w==", + "dev": true, + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@istanbuljs/schema": "^0.1.3", + "find-up": "^5.0.0", + "foreground-child": "^2.0.0", + "istanbul-lib-coverage": "^3.2.0", + "istanbul-lib-report": "^3.0.1", + "istanbul-reports": "^3.1.6", + "rimraf": "^3.0.2", + "test-exclude": "^6.0.0", + "v8-to-istanbul": "^9.0.0", + "yargs": "^17.7.2", + "yargs-parser": "^21.1.1" + }, + "bin": { + "c8": "bin/c8.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/c8/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/c8/node_modules/foreground-child": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", + "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/c8/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/c8/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/c8/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/c8/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/cacache": { + "version": "18.0.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.3.tgz", + "integrity": "sha512-qXCd4rh6I07cnDqh8V48/94Tc/WSfj+o3Gn6NZ0aZovS255bUx8O13uKxRFd2eWG0xgsco7+YItQNPaa5E85hg==", + "dev": true, + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "dev": true, + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chmodr": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/chmodr/-/chmodr-1.2.0.tgz", + "integrity": "sha512-Y5uI7Iq/Az6HgJEL6pdw7THVd7jbVOTPwsmcPOBjQL8e3N+pz872kzK5QxYGEy21iRys+iHWV0UZQXDFJo1hyA==", + "dev": true + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chownr": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", + "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", + "engines": { + "node": ">=18" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "engines": { + "node": ">=8" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/cli-boxes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz", + "integrity": "sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-cursor": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-4.0.0.tgz", + "integrity": "sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==", + "dev": true, + "dependencies": { + "restore-cursor": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-3.1.0.tgz", + "integrity": "sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==", + "dev": true, + "dependencies": { + "slice-ansi": "^5.0.0", + "string-width": "^5.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate/node_modules/is-fullwidth-code-point": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz", + "integrity": "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate/node_modules/slice-ansi": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-5.0.0.tgz", + "integrity": "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.0.0", + "is-fullwidth-code-point": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cliui/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/cliui/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/cliui/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/code-excerpt": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/code-excerpt/-/code-excerpt-4.0.0.tgz", + "integrity": "sha512-xxodCmBen3iy2i0WtAK8FlFNrRzjUqjRsMfho58xT/wvZU1YTM3fCnRjcy1gJPMepaRlgm/0e6w8SpWHpn3/cA==", + "dev": true, + "dependencies": { + "convert-to-spaces": "^2.0.1" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/convert-to-spaces": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/convert-to-spaces/-/convert-to-spaces-2.0.1.tgz", + "integrity": "sha512-rcQ1bsQO9799wq24uE5AM2tAILy4gXGIK/njFWcVQkGNZ96edlpY+A7bjwvzjYvLDyzmG1MmMLZhpcsb+klNMQ==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", + "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/diff": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + }, + "node_modules/encoding": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", + "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "dev": true, + "optional": true, + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/err-code": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", + "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", + "dev": true + }, + "node_modules/escalade": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", + "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/events-to-array": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/events-to-array/-/events-to-array-2.0.3.tgz", + "integrity": "sha512-f/qE2gImHRa4Cp2y1stEOSgw8wTFyUdVJX7G//bMwbaV9JqISFxg99NbmVQeP7YLnDUZ2un851jlaDrlpmGehQ==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/exponential-backoff": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.1.tgz", + "integrity": "sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==", + "dev": true + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/foreground-child": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.2.1.tgz", + "integrity": "sha512-PXUUyLqrR2XCWICfv6ukppP96sdFwWbNEnfEMt7jNsISjMsvaLNinAHNDYyvkyU+SZG2BTSbT5NjG+vZslfGTA==", + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/fromentries": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz", + "integrity": "sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/fs-minipass": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.3.tgz", + "integrity": "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==", + "dev": true, + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function-loop": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-4.0.0.tgz", + "integrity": "sha512-f34iQBedYF3XcI93uewZZOnyscDragxgTK/eTvVB74k3fCD0ZorOi5BV9GS4M8rz/JoNi0Kl3qX5Y9MH3S/CLQ==", + "dev": true + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/glob": { + "version": "10.4.2", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.2.tgz", + "integrity": "sha512-GwMlUF6PkPo3Gk21UxkCohOv0PLcIXVtKyLlpEI28R/cO/4eNOdmLk3CMW1wROV/WR/EsZOWAfBbBOqYvs88/w==", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hosted-git-info": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz", + "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==", + "dev": true, + "dependencies": { + "lru-cache": "^10.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true + }, + "node_modules/http-cache-semantics": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", + "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", + "dev": true + }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "dev": true, + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", + "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "dev": true, + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ignore-walk": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-6.0.5.tgz", + "integrity": "sha512-VuuG0wCnjhnylG1ABXT3dAuIpTNDs/G8jlpmwXY03fXoXy/8ZK8/T+hMzt8L4WnrLCJgdybqgPagnF/f97cg3A==", + "dev": true, + "dependencies": { + "minimatch": "^9.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz", + "integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/ink": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/ink/-/ink-4.4.1.tgz", + "integrity": "sha512-rXckvqPBB0Krifk5rn/5LvQGmyXwCUpBfmTwbkQNBY9JY8RSl3b8OftBNEYxg4+SWUhEKcPifgope28uL9inlA==", + "dev": true, + "dependencies": { + "@alcalzone/ansi-tokenize": "^0.1.3", + "ansi-escapes": "^6.0.0", + "auto-bind": "^5.0.1", + "chalk": "^5.2.0", + "cli-boxes": "^3.0.0", + "cli-cursor": "^4.0.0", + "cli-truncate": "^3.1.0", + "code-excerpt": "^4.0.0", + "indent-string": "^5.0.0", + "is-ci": "^3.0.1", + "is-lower-case": "^2.0.2", + "is-upper-case": "^2.0.2", + "lodash": "^4.17.21", + "patch-console": "^2.0.0", + "react-reconciler": "^0.29.0", + "scheduler": "^0.23.0", + "signal-exit": "^3.0.7", + "slice-ansi": "^6.0.0", + "stack-utils": "^2.0.6", + "string-width": "^5.1.2", + "type-fest": "^0.12.0", + "widest-line": "^4.0.1", + "wrap-ansi": "^8.1.0", + "ws": "^8.12.0", + "yoga-wasm-web": "~0.3.3" + }, + "engines": { + "node": ">=14.16" + }, + "peerDependencies": { + "@types/react": ">=18.0.0", + "react": ">=18.0.0", + "react-devtools-core": "^4.19.1" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "react-devtools-core": { + "optional": true + } + } + }, + "node_modules/ink/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/ip-address": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", + "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", + "dev": true, + "dependencies": { + "jsbn": "1.1.0", + "sprintf-js": "^1.1.3" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/is-actual-promise": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-actual-promise/-/is-actual-promise-1.0.2.tgz", + "integrity": "sha512-xsFiO1of0CLsQnPZ1iXHNTyR9YszOeWKYv+q6n8oSFW3ipooFJ1j1lbRMgiMCr+pp2gLruESI4zb5Ak6eK5OnQ==", + "dev": true + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-ci": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", + "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", + "dev": true, + "dependencies": { + "ci-info": "^3.2.0" + }, + "bin": { + "is-ci": "bin.js" + } + }, + "node_modules/is-core-module": { + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", + "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "dev": true, + "dependencies": { + "hasown": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-lambda": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", + "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", + "dev": true + }, + "node_modules/is-lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-lower-case/-/is-lower-case-2.0.2.tgz", + "integrity": "sha512-bVcMJy4X5Og6VZfdOZstSexlEy20Sr0k/p/b2IlQJlfdKAQuMpiv5w2Ccxb8sKdRUNAG1PnHVHjFSdRDVS6NlQ==", + "dev": true, + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-upper-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-upper-case/-/is-upper-case-2.0.2.tgz", + "integrity": "sha512-44pxmxAvnnAOwBg4tHPnkfvgjPwbc5QIsSstNU+YcJ1ovxVzCWpSGosPJOZh/a1tdl81fbgnLc9LLv+x2ywbPQ==", + "dev": true, + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", + "dev": true, + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jackspeak": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.0.tgz", + "integrity": "sha512-JVYhQnN59LVPFCEcVa2C3CrEKYacvjRfqIQl+h8oi91aLYQVWRYbxjPcv1bUiUy/kLmQaANrYfNMCO3kuEDHfw==", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/jsbn": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", + "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==", + "dev": true + }, + "node_modules/json-parse-even-better-errors": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz", + "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true + }, + "node_modules/jsonc-parser": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.1.tgz", + "integrity": "sha512-AilxAyFOAcK5wA1+LeaySVBrHsGQvUFCDWXKpZjzaL0PqW+xfBOttn8GNtWKFWqneyMZj41MWF9Kl6iPWLwgOA==", + "dev": true + }, + "node_modules/jsonparse": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", + "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==", + "dev": true, + "engines": [ + "node >= 0.2.0" + ] + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dev": true, + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lru-cache": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", + "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/lunr": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz", + "integrity": "sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==", + "dev": true + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, + "node_modules/make-fetch-happen": { + "version": "13.0.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz", + "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==", + "dev": true, + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/marked": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-4.3.0.tgz", + "integrity": "sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A==", + "dev": true, + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/minipass-collect": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-2.0.1.tgz", + "integrity": "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==", + "dev": true, + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/minipass-fetch": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.5.tgz", + "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==", + "dev": true, + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/minipass-fetch/node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minipass-fetch/node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-fetch/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/minipass-flush": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", + "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minipass-flush/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-flush/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/minipass-json-stream": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minipass-json-stream/-/minipass-json-stream-1.0.1.tgz", + "integrity": "sha512-ODqY18UZt/I8k+b7rl2AENgbWE8IDYam+undIJONvigAz8KR5GWblsFTEfQs0WODsjbSXWlm+JHEv8Gr6Tfdbg==", + "dev": true, + "dependencies": { + "jsonparse": "^1.3.1", + "minipass": "^3.0.0" + } + }, + "node_modules/minipass-json-stream/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-json-stream/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/minipass-pipeline": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", + "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-pipeline/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-pipeline/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/minipass-sized": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", + "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-sized/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-sized/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/minizlib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.1.tgz", + "integrity": "sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg==", + "dependencies": { + "minipass": "^7.0.4", + "rimraf": "^5.0.5" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/mkdirp": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", + "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", + "bin": { + "mkdirp": "dist/cjs/src/bin.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/mutate-fs": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/mutate-fs/-/mutate-fs-2.1.1.tgz", + "integrity": "sha512-WI5pPPUNiWqaK2XdK94AVpxIc8GmZEXYlLfFbWuc4gUtBGHTK92jdPqFdx/lilxgb5Ep7tQ15NqCcJEOeq6wdA==", + "dev": true + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/nock": { + "version": "13.5.4", + "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.4.tgz", + "integrity": "sha512-yAyTfdeNJGGBFxWdzSKCBYxs5FxLbCg5X5Q4ets974hcQzG1+qCxvIyOo4j2Ry6MUlhWVMX4OoYDefAIIwupjw==", + "dev": true, + "dependencies": { + "debug": "^4.1.0", + "json-stringify-safe": "^5.0.1", + "propagate": "^2.0.0" + }, + "engines": { + "node": ">= 10.13" + } + }, + "node_modules/node-gyp": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-10.1.0.tgz", + "integrity": "sha512-B4J5M1cABxPc5PwfjhbV5hoy2DP9p8lFXASnEN6hugXOa61416tnTZ29x9sSwAd0o99XNIcpvDDy1swAExsVKA==", + "dev": true, + "dependencies": { + "env-paths": "^2.2.0", + "exponential-backoff": "^3.1.1", + "glob": "^10.3.10", + "graceful-fs": "^4.2.6", + "make-fetch-happen": "^13.0.0", + "nopt": "^7.0.0", + "proc-log": "^3.0.0", + "semver": "^7.3.5", + "tar": "^6.1.2", + "which": "^4.0.0" + }, + "bin": { + "node-gyp": "bin/node-gyp.js" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/node-gyp/node_modules/isexe": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", + "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", + "dev": true, + "engines": { + "node": ">=16" + } + }, + "node_modules/node-gyp/node_modules/proc-log": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-3.0.0.tgz", + "integrity": "sha512-++Vn7NS4Xf9NacaU9Xq3URUuqZETPsf8L4j5/ckhaRYsfPeRyzGw+iDjFhV/Jr3uNmTvvddEJFWh5R1gRgUH8A==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/node-gyp/node_modules/which": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz", + "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "dev": true, + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/nopt": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.1.tgz", + "integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==", + "dev": true, + "dependencies": { + "abbrev": "^2.0.0" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/normalize-package-data": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.1.tgz", + "integrity": "sha512-6rvCfeRW+OEZagAB4lMLSNuTNYZWLVtKccK79VSTf//yTY5VOCgcpH80O+bZK8Neps7pUnd5G+QlMg1yV/2iZQ==", + "dev": true, + "dependencies": { + "hosted-git-info": "^7.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-bundled": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-3.0.1.tgz", + "integrity": "sha512-+AvaheE/ww1JEwRHOrn4WHNzOxGtVp+adrg2AeZS/7KuxGUYFuBta98wYpfHBbJp6Tg6j1NKSEVHNcfZzJHQwQ==", + "dev": true, + "dependencies": { + "npm-normalize-package-bin": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-install-checks": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.3.0.tgz", + "integrity": "sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==", + "dev": true, + "dependencies": { + "semver": "^7.1.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-normalize-package-bin": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz", + "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-package-arg": { + "version": "11.0.2", + "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.2.tgz", + "integrity": "sha512-IGN0IAwmhDJwy13Wc8k+4PEbTPhpJnMtfR53ZbOyjkvmEcLS4nCwp6mvMWjS5sUjeiW3mpx6cHmuhKEu9XmcQw==", + "dev": true, + "dependencies": { + "hosted-git-info": "^7.0.0", + "proc-log": "^4.0.0", + "semver": "^7.3.5", + "validate-npm-package-name": "^5.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm-packlist": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-8.0.2.tgz", + "integrity": "sha512-shYrPFIS/JLP4oQmAwDyk5HcyysKW8/JLTEA32S0Z5TzvpaeeX2yMFfoK1fjEBnCBvVyIB/Jj/GBFdm0wsgzbA==", + "dev": true, + "dependencies": { + "ignore-walk": "^6.0.4" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-pick-manifest": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-9.0.1.tgz", + "integrity": "sha512-Udm1f0l2nXb3wxDpKjfohwgdFUSV50UVwzEIpDXVsbDMXVIEF81a/i0UhuQbhrPMMmdiq3+YMFLFIRVLs3hxQw==", + "dev": true, + "dependencies": { + "npm-install-checks": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0", + "npm-package-arg": "^11.0.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm-registry-fetch": { + "version": "16.2.1", + "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-16.2.1.tgz", + "integrity": "sha512-8l+7jxhim55S85fjiDGJ1rZXBWGtRLi1OSb4Z3BPLObPuIaeKRlPRiYMSHU4/81ck3t71Z+UwDDl47gcpmfQQA==", + "dev": true, + "dependencies": { + "@npmcli/redact": "^1.1.0", + "make-fetch-happen": "^13.0.0", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-json-stream": "^1.0.1", + "minizlib": "^2.1.2", + "npm-package-arg": "^11.0.0", + "proc-log": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm-registry-fetch/node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm-registry-fetch/node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm-registry-fetch/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/opener": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz", + "integrity": "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==", + "dev": true, + "bin": { + "opener": "bin/opener-bin.js" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz", + "integrity": "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==" + }, + "node_modules/pacote": { + "version": "17.0.7", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-17.0.7.tgz", + "integrity": "sha512-sgvnoUMlkv9xHwDUKjKQFXVyUi8dtJGKp3vg6sYy+TxbDic5RjZCHF3ygv0EJgNRZ2GfRONjlKPUfokJ9lDpwQ==", + "dev": true, + "dependencies": { + "@npmcli/git": "^5.0.0", + "@npmcli/installed-package-contents": "^2.0.1", + "@npmcli/promise-spawn": "^7.0.0", + "@npmcli/run-script": "^7.0.0", + "cacache": "^18.0.0", + "fs-minipass": "^3.0.0", + "minipass": "^7.0.2", + "npm-package-arg": "^11.0.0", + "npm-packlist": "^8.0.0", + "npm-pick-manifest": "^9.0.0", + "npm-registry-fetch": "^16.0.0", + "proc-log": "^4.0.0", + "promise-retry": "^2.0.1", + "read-package-json": "^7.0.0", + "read-package-json-fast": "^3.0.0", + "sigstore": "^2.2.0", + "ssri": "^10.0.0", + "tar": "^6.1.11" + }, + "bin": { + "pacote": "lib/bin.js" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/patch-console": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/patch-console/-/patch-console-2.0.0.tgz", + "integrity": "sha512-0YNdUceMdaQwoKce1gatDScmMo5pu/tfABfnzEqeG0gtTmd7mh/WcwgUjtAeOU7N8nFFlbQBnFK2gXW5fGvmMA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pirates": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", + "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/polite-json": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/polite-json/-/polite-json-4.0.1.tgz", + "integrity": "sha512-8LI5ZeCPBEb4uBbcYKNVwk4jgqNx1yHReWoW4H4uUihWlSqZsUDfSITrRhjliuPgxsNPFhNSudGO2Zu4cbWinQ==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/prettier": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.2.tgz", + "integrity": "sha512-rAVeHYMcv8ATV5d508CFdn+8/pHPpXeIid1DdrPwXnaAdH7cqjVbpJaT5eq4yRAFU/lsbwYwSF/n5iNrdJHPQA==", + "dev": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prismjs": { + "version": "1.29.0", + "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.29.0.tgz", + "integrity": "sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/prismjs-terminal": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/prismjs-terminal/-/prismjs-terminal-1.2.3.tgz", + "integrity": "sha512-xc0zuJ5FMqvW+DpiRkvxURlz98DdfDsZcFHdO699+oL+ykbFfgI7O4VDEgUyc07BSL2NHl3zdb8m/tZ/aaqUrw==", + "dev": true, + "dependencies": { + "chalk": "^5.2.0", + "prismjs": "^1.29.0", + "string-length": "^6.0.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/proc-log": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", + "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/process-on-spawn": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.0.0.tgz", + "integrity": "sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==", + "dev": true, + "dependencies": { + "fromentries": "^1.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/promise-inflight": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", + "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==", + "dev": true + }, + "node_modules/promise-retry": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", + "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", + "dev": true, + "dependencies": { + "err-code": "^2.0.2", + "retry": "^0.12.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/propagate": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz", + "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "dev": true, + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "dev": true, + "peer": true, + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/react-element-to-jsx-string": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/react-element-to-jsx-string/-/react-element-to-jsx-string-15.0.0.tgz", + "integrity": "sha512-UDg4lXB6BzlobN60P8fHWVPX3Kyw8ORrTeBtClmIlGdkOOE+GYQSFvmEU5iLLpwp/6v42DINwNcwOhOLfQ//FQ==", + "dev": true, + "dependencies": { + "@base2/pretty-print-object": "1.0.1", + "is-plain-object": "5.0.0", + "react-is": "18.1.0" + }, + "peerDependencies": { + "react": "^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || ^18.0.0", + "react-dom": "^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || ^18.0.0" + } + }, + "node_modules/react-is": { + "version": "18.1.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.1.0.tgz", + "integrity": "sha512-Fl7FuabXsJnV5Q1qIOQwx/sagGF18kogb4gpfcG4gjLBWO0WDiiz1ko/ExayuxE7InyQkBLkxRFG5oxY6Uu3Kg==", + "dev": true + }, + "node_modules/react-reconciler": { + "version": "0.29.2", + "resolved": "https://registry.npmjs.org/react-reconciler/-/react-reconciler-0.29.2.tgz", + "integrity": "sha512-zZQqIiYgDCTP/f1N/mAR10nJGrPD2ZR+jDSEsKWJHYC7Cm2wodlwbR3upZRdC3cjIjSlTLNVyO7Iu0Yy7t2AYg==", + "dev": true, + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "engines": { + "node": ">=0.10.0" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/read-package-json": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-7.0.1.tgz", + "integrity": "sha512-8PcDiZ8DXUjLf687Ol4BR8Bpm2umR7vhoZOzNRt+uxD9GpBh/K+CAAALVIiYFknmvlmyg7hM7BSNUXPaCCqd0Q==", + "deprecated": "This package is no longer supported. Please use @npmcli/package-json instead.", + "dev": true, + "dependencies": { + "glob": "^10.2.2", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/read-package-json-fast": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-3.0.2.tgz", + "integrity": "sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==", + "dev": true, + "dependencies": { + "json-parse-even-better-errors": "^3.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve-import": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/resolve-import/-/resolve-import-1.4.5.tgz", + "integrity": "sha512-HXb4YqODuuXT7Icq1Z++0g2JmhgbUHSs3VT2xR83gqvAPUikYT2Xk+562KHQgiaNkbBOlPddYrDLsC44qQggzw==", + "dev": true, + "dependencies": { + "glob": "^10.3.3", + "walk-up-path": "^3.0.1" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/restore-cursor": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-4.0.0.tgz", + "integrity": "sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==", + "dev": true, + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/restore-cursor/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/retry": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/rimraf": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.7.tgz", + "integrity": "sha512-nV6YcJo5wbLW77m+8KjH8aB/7/rxQy9SZ0HY5shnwULfS+9nmTtVXAJET5NdZmCzA4fPI/Hm1wo/Po/4mopOdg==", + "dependencies": { + "glob": "^10.3.7" + }, + "bin": { + "rimraf": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, + "optional": true + }, + "node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "dev": true, + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/semver": { + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", + "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "engines": { + "node": ">=8" + } + }, + "node_modules/shiki": { + "version": "0.14.7", + "resolved": "https://registry.npmjs.org/shiki/-/shiki-0.14.7.tgz", + "integrity": "sha512-dNPAPrxSc87ua2sKJ3H5dQ/6ZaY8RNnaAqK+t0eG7p0Soi2ydiqbGOTaZCqaYvA/uZYfS1LJnemt3Q+mSfcPCg==", + "dev": true, + "dependencies": { + "ansi-sequence-parser": "^1.1.0", + "jsonc-parser": "^3.2.0", + "vscode-oniguruma": "^1.7.0", + "vscode-textmate": "^8.0.0" + } + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sigstore": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.3.1.tgz", + "integrity": "sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ==", + "dev": true, + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "@sigstore/sign": "^2.3.2", + "@sigstore/tuf": "^2.3.4", + "@sigstore/verify": "^1.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/slice-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-6.0.0.tgz", + "integrity": "sha512-6bn4hRfkTvDfUoEQYkERg0BVF1D0vrX9HEkMl08uDiNWvVvjylLHvZFZWkDo6wjT8tUctbYl1nCOuE66ZTaUtA==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.2.1", + "is-fullwidth-code-point": "^4.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/slice-ansi/node_modules/is-fullwidth-code-point": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz", + "integrity": "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "dev": true, + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks": { + "version": "2.8.3", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.3.tgz", + "integrity": "sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==", + "dev": true, + "dependencies": { + "ip-address": "^9.0.5", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks-proxy-agent": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.3.tgz", + "integrity": "sha512-VNegTZKhuGq5vSD6XNKlbqWhyt/40CgoEw8XxD6dhnm8Jq9IEa3nIa4HwnM8XOqU0CdB0BwWVXusqiFXfHB3+A==", + "dev": true, + "dependencies": { + "agent-base": "^7.1.1", + "debug": "^4.3.4", + "socks": "^2.7.1" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/spdx-correct": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", + "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.18", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.18.tgz", + "integrity": "sha512-xxRs31BqRYHwiMzudOrpSiHtZ8i/GeionCBDSilhYRj+9gIcI8wCZTlXZKu9vZIVqViP3dcp9qE5G6AlIaD+TQ==", + "dev": true + }, + "node_modules/sprintf-js": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", + "dev": true + }, + "node_modules/ssri": { + "version": "10.0.6", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz", + "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==", + "dev": true, + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-length": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-6.0.0.tgz", + "integrity": "sha512-1U361pxZHEQ+FeSjzqRpV+cu2vTzYeWeafXFLykiFlv4Vc0n3njgU8HrMbyik5uwm77naWMuVG8fhEF+Ovb1Kg==", + "dev": true, + "dependencies": { + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/string-width-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/sync-content": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/sync-content/-/sync-content-1.0.2.tgz", + "integrity": "sha512-znd3rYiiSxU3WteWyS9a6FXkTA/Wjk8WQsOyzHbineeL837dLn3DA4MRhsIX3qGcxDMH6+uuFV4axztssk7wEQ==", + "dev": true, + "dependencies": { + "glob": "^10.2.6", + "mkdirp": "^3.0.1", + "path-scurry": "^1.9.2", + "rimraf": "^5.0.1" + }, + "bin": { + "sync-content": "dist/mjs/bin.mjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/tap": { + "version": "18.8.0", + "resolved": "https://registry.npmjs.org/tap/-/tap-18.8.0.tgz", + "integrity": "sha512-tX02yXmzBcemYfNGKtTJFf3cn7e8VgBvxKswaew8YnrE+1cUZtxyN0GhMzPQ5cWznVz47DfgcuYR1QtCr+4LOw==", + "dev": true, + "dependencies": { + "@tapjs/after": "1.1.22", + "@tapjs/after-each": "1.1.22", + "@tapjs/asserts": "1.2.0", + "@tapjs/before": "1.1.22", + "@tapjs/before-each": "1.1.22", + "@tapjs/core": "1.5.4", + "@tapjs/filter": "1.2.22", + "@tapjs/fixture": "1.2.22", + "@tapjs/intercept": "1.2.22", + "@tapjs/mock": "1.3.4", + "@tapjs/node-serialize": "1.3.4", + "@tapjs/run": "1.5.4", + "@tapjs/snapshot": "1.2.22", + "@tapjs/spawn": "1.1.22", + "@tapjs/stdin": "1.1.22", + "@tapjs/test": "1.4.4", + "@tapjs/typescript": "1.4.4", + "@tapjs/worker": "1.1.22", + "resolve-import": "^1.4.5" + }, + "bin": { + "tap": "dist/esm/run.mjs" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/tap-parser": { + "version": "15.3.2", + "resolved": "https://registry.npmjs.org/tap-parser/-/tap-parser-15.3.2.tgz", + "integrity": "sha512-uvauHuQqAMwfeFVxNpFXhvnWLVL0sthnHk4TxRM3cUy6+dejO9fatoKR7YejbMu4+2/1nR6UQE9+eUcX3PUmsA==", + "dev": true, + "dependencies": { + "events-to-array": "^2.0.3", + "tap-yaml": "2.2.2" + }, + "bin": { + "tap-parser": "bin/cmd.cjs" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + } + }, + "node_modules/tap-yaml": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/tap-yaml/-/tap-yaml-2.2.2.tgz", + "integrity": "sha512-MWG4OpAKtNoNVjCz/BqlDJiwTM99tiHRhHPS4iGOe1ZS0CgM4jSFH92lthSFvvy4EdDjQZDV7uYqUFlU9JuNhw==", + "dev": true, + "dependencies": { + "yaml": "^2.4.1", + "yaml-types": "^0.3.0" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + } + }, + "node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "dev": true, + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tar/node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/tar/node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/tar/node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/tar/node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/tar/node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tar/node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tar/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/tcompare": { + "version": "6.4.6", + "resolved": "https://registry.npmjs.org/tcompare/-/tcompare-6.4.6.tgz", + "integrity": "sha512-sxvgCgO2GAIWHibnK4zLvvi9GHd/ZlR9DOUJ4ufwvNtkdKE2I9MNwJUwzYvOmGrJXMcfhhw0CDBb+6j0ia+I7A==", + "dev": true, + "dependencies": { + "diff": "^5.2.0", + "react-element-to-jsx-string": "^15.0.0" + }, + "engines": { + "node": "16 >=16.17.0 || 18 >= 18.6.0 || >=20" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/test-exclude/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/test-exclude/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/test-exclude/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/trivial-deferred": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/trivial-deferred/-/trivial-deferred-2.0.0.tgz", + "integrity": "sha512-iGbM7X2slv9ORDVj2y2FFUq3cP/ypbtu2nQ8S38ufjL0glBABvmR9pTdsib1XtS2LUhhLMbelaBUaf/s5J3dSw==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/tshy": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/tshy/-/tshy-1.15.1.tgz", + "integrity": "sha512-7p30vmXaNX7OL1yLy/MYUtO0SJOm9fQSnzk3DXaM+LmQosooCB4elVeHAGIIZdABhL2E8dx5t/5msR5lh0xnaQ==", + "dev": true, + "dependencies": { + "chalk": "^5.3.0", + "chokidar": "^3.6.0", + "foreground-child": "^3.1.1", + "minimatch": "^9.0.4", + "mkdirp": "^3.0.1", + "polite-json": "^4.0.1", + "resolve-import": "^1.4.5", + "rimraf": "^5.0.1", + "sync-content": "^1.0.2", + "typescript": "^5.4.5", + "walk-up-path": "^3.0.1" + }, + "bin": { + "tshy": "dist/esm/index.js" + }, + "engines": { + "node": "16 >=16.17 || 18 >=18.15.0 || >=20.6.1" + } + }, + "node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "dev": true + }, + "node_modules/tuf-js": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-2.2.1.tgz", + "integrity": "sha512-GwIJau9XaA8nLVbUXsN3IlFi7WmQ48gBUrl3FTkkL/XLu/POhBzfmX9hd33FNMX1qAsfl6ozO1iMmW9NC8YniA==", + "dev": true, + "dependencies": { + "@tufjs/models": "2.0.1", + "debug": "^4.3.4", + "make-fetch-happen": "^13.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/type-fest": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.12.0.tgz", + "integrity": "sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typedoc": { + "version": "0.25.13", + "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.25.13.tgz", + "integrity": "sha512-pQqiwiJ+Z4pigfOnnysObszLiU3mVLWAExSPf+Mu06G/qsc3wzbuM56SZQvONhHLncLUhYzOVkjFFpFfL5AzhQ==", + "dev": true, + "dependencies": { + "lunr": "^2.3.9", + "marked": "^4.3.0", + "minimatch": "^9.0.3", + "shiki": "^0.14.7" + }, + "bin": { + "typedoc": "bin/typedoc" + }, + "engines": { + "node": ">= 16" + }, + "peerDependencies": { + "typescript": "4.6.x || 4.7.x || 4.8.x || 4.9.x || 5.0.x || 5.1.x || 5.2.x || 5.3.x || 5.4.x" + } + }, + "node_modules/typescript": { + "version": "5.4.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", + "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true, + "peer": true + }, + "node_modules/unique-filename": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", + "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==", + "dev": true, + "dependencies": { + "unique-slug": "^4.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/unique-slug": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz", + "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true + }, + "node_modules/v8-to-istanbul": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.2.0.tgz", + "integrity": "sha512-/EH/sDgxU2eGxajKdwLCDmQ4FWq+kpi3uCmBGpw1xJtnAxEjlD8j8PEiGWpCIMIs3ciNAgH0d3TTJiUkYzyZjA==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/v8-to-istanbul/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/validate-npm-package-name": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz", + "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/vscode-oniguruma": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/vscode-oniguruma/-/vscode-oniguruma-1.7.0.tgz", + "integrity": "sha512-L9WMGRfrjOhgHSdOYgCt/yRMsXzLDJSL7BPrOZt73gU0iWO4mpqzqQzOz5srxqTvMBaR0XZTSrVWo4j55Rc6cA==", + "dev": true + }, + "node_modules/vscode-textmate": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/vscode-textmate/-/vscode-textmate-8.0.0.tgz", + "integrity": "sha512-AFbieoL7a5LMqcnOF04ji+rpXadgOXnZsxQr//r83kLPr7biP7am3g9zbaZIaBGwBRWeSvoMD4mgPdX3e4NWBg==", + "dev": true + }, + "node_modules/walk-up-path": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/walk-up-path/-/walk-up-path-3.0.1.tgz", + "integrity": "sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA==", + "dev": true + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/widest-line": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz", + "integrity": "sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==", + "dev": true, + "dependencies": { + "string-width": "^5.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/ws": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", + "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", + "dev": true, + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", + "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", + "engines": { + "node": ">=18" + } + }, + "node_modules/yaml": { + "version": "2.4.5", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.5.tgz", + "integrity": "sha512-aBx2bnqDzVOyNKfsysjA2ms5ZlnjSAW2eG3/L5G/CSujfjLJTJsEw1bGw8kCf04KodQWk1pxlGnZ56CRxiawmg==", + "dev": true, + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/yaml-types": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/yaml-types/-/yaml-types-0.3.0.tgz", + "integrity": "sha512-i9RxAO/LZBiE0NJUy9pbN5jFz5EasYDImzRkj8Y81kkInTi1laia3P3K/wlMKzOxFQutZip8TejvQP/DwgbU7A==", + "dev": true, + "engines": { + "node": ">= 16", + "npm": ">= 7" + }, + "peerDependencies": { + "yaml": "^2.3.0" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/yargs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yoga-wasm-web": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/yoga-wasm-web/-/yoga-wasm-web-0.3.3.tgz", + "integrity": "sha512-N+d4UJSJbt/R3wqY7Coqs5pcV0aUj2j9IaQ3rNj9bVCLld8tTGKRa2USARjnvZJWVx1NDmQev8EknoczaOQDOA==", + "dev": true + } + } +} diff --git a/package.json b/package.json index f84a41cc..0283103e 100644 --- a/package.json +++ b/package.json @@ -1,8 +1,8 @@ { - "author": "GitHub Inc.", + "author": "Isaac Z. Schlueter", "name": "tar", "description": "tar for node", - "version": "6.2.1", + "version": "7.4.3", "repository": { "type": "git", "url": "https://github.com/isaacs/node-tar.git" @@ -10,61 +10,316 @@ "scripts": { "genparse": "node scripts/generate-parse-fixtures.js", "snap": "tap", - "test": "tap" + "test": "tap", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "tshy", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --log-level warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" }, "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.0.1", + "mkdirp": "^3.0.1", + "yallist": "^5.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.0", "chmodr": "^1.2.0", "end-of-stream": "^1.4.3", "events-to-array": "^2.0.3", "mutate-fs": "^2.1.1", - "nock": "^13.2.9", - "rimraf": "^3.0.2", - "tap": "^16.0.1" + "nock": "^13.5.4", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tap": "^18.7.2", + "tshy": "^1.13.1", + "typedoc": "^0.25.13" }, "license": "ISC", "engines": { - "node": ">=10" + "node": ">=18" }, "files": [ - "bin/", - "lib/", - "index.js" + "dist" ], "tap": { "coverage-map": "map.js", "timeout": 0, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] + "typecheck": true }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.0", - "content": "scripts/template-oss", - "engines": ">=10", - "distPaths": [ - "index.js" - ], - "allowPaths": [ - "/index.js" - ], - "ciVersions": [ - "10.x", - "12.x", - "14.x", - "16.x", - "18.x" - ] - } + "prettier": { + "experimentalTernaries": true, + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts", + "./c": "./src/create.ts", + "./create": "./src/create.ts", + "./replace": "./src/create.ts", + "./r": "./src/create.ts", + "./list": "./src/list.ts", + "./t": "./src/list.ts", + "./update": "./src/update.ts", + "./u": "./src/update.ts", + "./extract": "./src/extract.ts", + "./x": "./src/extract.ts", + "./pack": "./src/pack.ts", + "./unpack": "./src/unpack.ts", + "./parse": "./src/parse.ts", + "./read-entry": "./src/read-entry.ts", + "./write-entry": "./src/write-entry.ts", + "./header": "./src/header.ts", + "./pax": "./src/pax.ts", + "./types": "./src/types.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "source": "./src/index.ts", + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "source": "./src/index.ts", + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + }, + "./c": { + "import": { + "source": "./src/create.ts", + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "source": "./src/create.ts", + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./create": { + "import": { + "source": "./src/create.ts", + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "source": "./src/create.ts", + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./replace": { + "import": { + "source": "./src/create.ts", + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "source": "./src/create.ts", + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./r": { + "import": { + "source": "./src/create.ts", + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "source": "./src/create.ts", + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./list": { + "import": { + "source": "./src/list.ts", + "types": "./dist/esm/list.d.ts", + "default": "./dist/esm/list.js" + }, + "require": { + "source": "./src/list.ts", + "types": "./dist/commonjs/list.d.ts", + "default": "./dist/commonjs/list.js" + } + }, + "./t": { + "import": { + "source": "./src/list.ts", + "types": "./dist/esm/list.d.ts", + "default": "./dist/esm/list.js" + }, + "require": { + "source": "./src/list.ts", + "types": "./dist/commonjs/list.d.ts", + "default": "./dist/commonjs/list.js" + } + }, + "./update": { + "import": { + "source": "./src/update.ts", + "types": "./dist/esm/update.d.ts", + "default": "./dist/esm/update.js" + }, + "require": { + "source": "./src/update.ts", + "types": "./dist/commonjs/update.d.ts", + "default": "./dist/commonjs/update.js" + } + }, + "./u": { + "import": { + "source": "./src/update.ts", + "types": "./dist/esm/update.d.ts", + "default": "./dist/esm/update.js" + }, + "require": { + "source": "./src/update.ts", + "types": "./dist/commonjs/update.d.ts", + "default": "./dist/commonjs/update.js" + } + }, + "./extract": { + "import": { + "source": "./src/extract.ts", + "types": "./dist/esm/extract.d.ts", + "default": "./dist/esm/extract.js" + }, + "require": { + "source": "./src/extract.ts", + "types": "./dist/commonjs/extract.d.ts", + "default": "./dist/commonjs/extract.js" + } + }, + "./x": { + "import": { + "source": "./src/extract.ts", + "types": "./dist/esm/extract.d.ts", + "default": "./dist/esm/extract.js" + }, + "require": { + "source": "./src/extract.ts", + "types": "./dist/commonjs/extract.d.ts", + "default": "./dist/commonjs/extract.js" + } + }, + "./pack": { + "import": { + "source": "./src/pack.ts", + "types": "./dist/esm/pack.d.ts", + "default": "./dist/esm/pack.js" + }, + "require": { + "source": "./src/pack.ts", + "types": "./dist/commonjs/pack.d.ts", + "default": "./dist/commonjs/pack.js" + } + }, + "./unpack": { + "import": { + "source": "./src/unpack.ts", + "types": "./dist/esm/unpack.d.ts", + "default": "./dist/esm/unpack.js" + }, + "require": { + "source": "./src/unpack.ts", + "types": "./dist/commonjs/unpack.d.ts", + "default": "./dist/commonjs/unpack.js" + } + }, + "./parse": { + "import": { + "source": "./src/parse.ts", + "types": "./dist/esm/parse.d.ts", + "default": "./dist/esm/parse.js" + }, + "require": { + "source": "./src/parse.ts", + "types": "./dist/commonjs/parse.d.ts", + "default": "./dist/commonjs/parse.js" + } + }, + "./read-entry": { + "import": { + "source": "./src/read-entry.ts", + "types": "./dist/esm/read-entry.d.ts", + "default": "./dist/esm/read-entry.js" + }, + "require": { + "source": "./src/read-entry.ts", + "types": "./dist/commonjs/read-entry.d.ts", + "default": "./dist/commonjs/read-entry.js" + } + }, + "./write-entry": { + "import": { + "source": "./src/write-entry.ts", + "types": "./dist/esm/write-entry.d.ts", + "default": "./dist/esm/write-entry.js" + }, + "require": { + "source": "./src/write-entry.ts", + "types": "./dist/commonjs/write-entry.d.ts", + "default": "./dist/commonjs/write-entry.js" + } + }, + "./header": { + "import": { + "source": "./src/header.ts", + "types": "./dist/esm/header.d.ts", + "default": "./dist/esm/header.js" + }, + "require": { + "source": "./src/header.ts", + "types": "./dist/commonjs/header.d.ts", + "default": "./dist/commonjs/header.js" + } + }, + "./pax": { + "import": { + "source": "./src/pax.ts", + "types": "./dist/esm/pax.d.ts", + "default": "./dist/esm/pax.js" + }, + "require": { + "source": "./src/pax.ts", + "types": "./dist/commonjs/pax.d.ts", + "default": "./dist/commonjs/pax.js" + } + }, + "./types": { + "import": { + "source": "./src/types.ts", + "types": "./dist/esm/types.d.ts", + "default": "./dist/esm/types.js" + }, + "require": { + "source": "./src/types.ts", + "types": "./dist/commonjs/types.d.ts", + "default": "./dist/commonjs/types.js" + } + } + }, + "type": "module", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts" } diff --git a/scripts/generate-parse-fixtures.js b/scripts/generate-parse-fixtures.js index b46e8bd7..b81f5265 100644 --- a/scripts/generate-parse-fixtures.js +++ b/scripts/generate-parse-fixtures.js @@ -1,9 +1,12 @@ -'use strict' -const Parse = require('../lib/parse.js') -const fs = require('fs') -const path = require('path') -const tardir = path.resolve(__dirname, '../test/fixtures/tars') -const parsedir = path.resolve(__dirname, '../test/fixtures/parse') +import { Parser } from '../dist/esm/parse.js' +import fs from 'fs' +import path, { dirname, resolve } from 'path' +import { fileURLToPath } from 'url' +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) + +const tardir = resolve(__dirname, '../test/fixtures/tars') +const parsedir = resolve(__dirname, '../test/fixtures/parse') const maxMetaOpt = [250, null] const filterOpt = [true, false] const strictOpt = [true, false] @@ -14,67 +17,78 @@ const makeTest = (tarfile, tardata, maxMeta, filter, strict) => { (filter ? '-filter' : '') + (strict ? '-strict' : '') const tail = (o ? '-' + o : '') + '.json' - const eventsfile = parsedir + '/' + path.basename(tarfile, '.tar') + tail + const eventsfile = + parsedir + '/' + path.basename(tarfile, '.tar') + tail - const p = new Parse({ + const p = new Parser({ maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, + filter: filter ? (_path, entry) => entry.size % 2 !== 0 : null, strict: strict, }) const events = [] const pushEntry = type => entry => { - events.push([type, { - extended: entry.extended, - globalExtended: entry.globalExtended, - type: entry.type, - meta: entry.meta, - ignore: entry.ignore, - path: entry.path, - mode: entry.mode, - uid: entry.uid, - gid: entry.gid, - uname: entry.uname, - gname: entry.gname, - size: entry.size, - mtime: entry.mtime, - atime: entry.atime, - ctime: entry.ctime, - linkpath: entry.linkpath, - header: { - cksumValid: entry.header.cksumValid, - needPax: entry.header.needPax, - path: entry.header.path, - mode: entry.header.mode, - uid: entry.header.uid, - gid: entry.header.gid, - size: entry.header.size, - mtime: entry.header.mtime, - cksum: entry.header.cksum, - linkpath: entry.header.linkpath, - ustar: entry.header.ustar, - ustarver: entry.header.ustarver, - uname: entry.header.uname, - gname: entry.header.gname, - devmaj: entry.header.devmaj, - devmin: entry.header.devmin, - ustarPrefix: entry.header.ustarPrefix, - xstarPrefix: entry.header.xstarPrefix, - prefixTerminator: entry.header.prefixTerminator, - atime: entry.header.atime, - ctime: entry.header.atime, + events.push([ + type, + { + extended: entry.extended, + globalExtended: entry.globalExtended, + type: entry.type, + meta: entry.meta, + ignore: entry.ignore, + path: entry.path, + mode: entry.mode, + uid: entry.uid, + gid: entry.gid, + uname: entry.uname, + gname: entry.gname, + size: entry.size, + mtime: entry.mtime, + atime: entry.atime, + ctime: entry.ctime, + linkpath: entry.linkpath, + header: { + cksumValid: entry.header.cksumValid, + needPax: entry.header.needPax, + path: entry.header.path, + mode: entry.header.mode, + uid: entry.header.uid, + gid: entry.header.gid, + size: entry.header.size, + mtime: entry.header.mtime, + cksum: entry.header.cksum, + linkpath: entry.header.linkpath, + ustar: entry.header.ustar, + ustarver: entry.header.ustarver, + uname: entry.header.uname, + gname: entry.header.gname, + devmaj: entry.header.devmaj, + devmin: entry.header.devmin, + ustarPrefix: entry.header.ustarPrefix, + xstarPrefix: entry.header.xstarPrefix, + prefixTerminator: entry.header.prefixTerminator, + atime: entry.header.atime, + ctime: entry.header.atime, + }, }, - }]) + ]) entry.resume() } p.on('entry', pushEntry('entry')) p.on('ignoredEntry', pushEntry('ignoredEntry')) - p.on('warn', (code, message, data) => events.push(['warn', code, message])) - p.on('error', er => events.push(['error', { - message: er.message, - code: er.code, - }])) + p.on('warn', (code, message, _data) => + events.push(['warn', code, message]), + ) + p.on('error', er => + events.push([ + 'error', + { + message: er.message, + code: er.code, + }, + ]), + ) p.on('end', _ => events.push(['end'])) p.on('nullBlock', _ => events.push(['nullBlock'])) p.on('eof', _ => events.push(['eof'])) @@ -85,12 +99,14 @@ const makeTest = (tarfile, tardata, maxMeta, filter, strict) => { fs.writeFileSync(eventsfile, JSON.stringify(events, null, 2) + '\n') } -fs.readdirSync(tardir) - .forEach(tar => { - const tarfile = tardir + '/' + tar - const tardata = fs.readFileSync(tarfile) - maxMetaOpt.forEach(maxMeta => - filterOpt.forEach(filter => - strictOpt.forEach(strict => - makeTest(tarfile, tardata, maxMeta, filter, strict)))) - }) +fs.readdirSync(tardir).forEach(tar => { + const tarfile = tardir + '/' + tar + const tardata = fs.readFileSync(tarfile) + maxMetaOpt.forEach(maxMeta => + filterOpt.forEach(filter => + strictOpt.forEach(strict => + makeTest(tarfile, tardata, maxMeta, filter, strict), + ), + ), + ) +}) diff --git a/scripts/template-oss/_step-git.yml b/scripts/template-oss/_step-git.yml deleted file mode 100644 index 329bf5bb..00000000 --- a/scripts/template-oss/_step-git.yml +++ /dev/null @@ -1,4 +0,0 @@ -- name: Support Long Paths - if: matrix.platform.os == 'windows-latest' - run: git config --system core.longpaths true -{{> defaultStepGit }} diff --git a/scripts/template-oss/_step-node.yml b/scripts/template-oss/_step-node.yml deleted file mode 100644 index f15e8698..00000000 --- a/scripts/template-oss/_step-node.yml +++ /dev/null @@ -1,31 +0,0 @@ -- name: Setup Node - uses: actions/setup-node@v3 - with: - node-version: {{#if jobIsMatrix}}$\{{ matrix.node-version }}{{else}}{{ last ciVersions }}{{/if}} - {{#if lockfile}} - cache: npm - {{/if}} -{{#if updateNpm}} -{{#if jobIsMatrix}} -- name: Update Windows npm - # node 12 and 14 ship with npm@6, which is known to fail when updating itself in windows - if: matrix.platform.os == 'windows-latest' && (startsWith(matrix.node-version, '12.') || startsWith(matrix.node-version, '14.')) - run: | - curl -sO https://registry.npmjs.org/npm/-/npm-7.5.4.tgz - tar xf npm-7.5.4.tgz - cd package - node lib/npm.js install --no-fund --no-audit -g ..\npm-7.5.4.tgz - cd .. - rmdir /s /q package -- name: Install npm@7 - if: startsWith(matrix.node-version, '10.') || startsWith(matrix.node-version, '12.') - run: npm i --prefer-online --no-fund --no-audit -g npm@7 -- name: Install npm@latest - if: $\{{ !startsWith(matrix.node-version, '10.') && !startsWith(matrix.node-version, '12.') }} -{{else}} -- name: Install npm@latest -{{/if}} - run: npm i --prefer-online --no-fund --no-audit -g npm@latest -- name: npm Version - run: npm -v -{{/if}} diff --git a/scripts/template-oss/_step-test.yml b/scripts/template-oss/_step-test.yml deleted file mode 100644 index 84b6d549..00000000 --- a/scripts/template-oss/_step-test.yml +++ /dev/null @@ -1,3 +0,0 @@ -- name: Set Tap RC - run: node ./test/fixtures/taprc.js -{{> defaultStepTest }} diff --git a/src/create.ts b/src/create.ts new file mode 100644 index 00000000..13d79852 --- /dev/null +++ b/src/create.ts @@ -0,0 +1,100 @@ +import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass' +import { Minipass } from 'minipass' +import path from 'node:path' +import { list } from './list.js' +import { makeCommand } from './make-command.js' +import { + TarOptions, + TarOptionsFile, + TarOptionsSync, + TarOptionsSyncFile, +} from './options.js' +import { Pack, PackSync } from './pack.js' + +const createFileSync = (opt: TarOptionsSyncFile, files: string[]) => { + const p = new PackSync(opt) + const stream = new WriteStreamSync(opt.file, { + mode: opt.mode || 0o666, + }) + p.pipe(stream as unknown as Minipass.Writable) + addFilesSync(p, files) +} + +const createFile = (opt: TarOptionsFile, files: string[]) => { + const p = new Pack(opt) + const stream = new WriteStream(opt.file, { + mode: opt.mode || 0o666, + }) + p.pipe(stream as unknown as Minipass.Writable) + + const promise = new Promise((res, rej) => { + stream.on('error', rej) + stream.on('close', res) + p.on('error', rej) + }) + + addFilesAsync(p, files) + + return promise +} + +const addFilesSync = (p: PackSync, files: string[]) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + list({ + file: path.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onReadEntry: entry => p.add(entry), + }) + } else { + p.add(file) + } + }) + p.end() +} + +const addFilesAsync = async ( + p: Pack, + files: string[], +): Promise => { + for (let i = 0; i < files.length; i++) { + const file = String(files[i]) + if (file.charAt(0) === '@') { + await list({ + file: path.resolve(String(p.cwd), file.slice(1)), + noResume: true, + onReadEntry: entry => { + p.add(entry) + }, + }) + } else { + p.add(file) + } + } + p.end() +} + +const createSync = (opt: TarOptionsSync, files: string[]) => { + const p = new PackSync(opt) + addFilesSync(p, files) + return p +} + +const createAsync = (opt: TarOptions, files: string[]) => { + const p = new Pack(opt) + addFilesAsync(p, files) + return p +} + +export const create = makeCommand( + createFileSync, + createFile, + createSync, + createAsync, + (_opt, files) => { + if (!files?.length) { + throw new TypeError('no paths specified to add to archive') + } + }, +) diff --git a/src/cwd-error.ts b/src/cwd-error.ts new file mode 100644 index 00000000..bdc48046 --- /dev/null +++ b/src/cwd-error.ts @@ -0,0 +1,15 @@ +export class CwdError extends Error { + path: string + code: string + syscall: 'chdir' = 'chdir' + + constructor(path: string, code: string) { + super(`${code}: Cannot cd into '${path}'`) + this.path = path + this.code = code + } + + get name() { + return 'CwdError' + } +} diff --git a/src/extract.ts b/src/extract.ts new file mode 100644 index 00000000..85a5bcdf --- /dev/null +++ b/src/extract.ts @@ -0,0 +1,58 @@ +// tar -x +import * as fsm from '@isaacs/fs-minipass' +import fs from 'node:fs' +import { filesFilter } from './list.js' +import { makeCommand } from './make-command.js' +import { TarOptionsFile, TarOptionsSyncFile } from './options.js' +import { Unpack, UnpackSync } from './unpack.js' + +const extractFileSync = (opt: TarOptionsSyncFile) => { + const u = new UnpackSync(opt) + const file = opt.file + const stat = fs.statSync(file) + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + const readSize = opt.maxReadSize || 16 * 1024 * 1024 + const stream = new fsm.ReadStreamSync(file, { + readSize: readSize, + size: stat.size, + }) + stream.pipe(u) +} + +const extractFile = (opt: TarOptionsFile, _?: string[]) => { + const u = new Unpack(opt) + const readSize = opt.maxReadSize || 16 * 1024 * 1024 + + const file = opt.file + const p = new Promise((resolve, reject) => { + u.on('error', reject) + u.on('close', resolve) + + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + fs.stat(file, (er, stat) => { + if (er) { + reject(er) + } else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }) + stream.on('error', reject) + stream.pipe(u) + } + }) + }) + return p +} + +export const extract = makeCommand( + extractFileSync, + extractFile, + opt => new UnpackSync(opt), + opt => new Unpack(opt), + (opt, files) => { + if (files?.length) filesFilter(opt, files) + }, +) diff --git a/lib/get-write-flag.js b/src/get-write-flag.ts similarity index 64% rename from lib/get-write-flag.js rename to src/get-write-flag.ts index e8695999..aa3fd883 100644 --- a/lib/get-write-flag.js +++ b/src/get-write-flag.ts @@ -5,16 +5,24 @@ // library is used for is extracting tarballs of many // relatively small files in npm packages and the like, // it can be a big boost on Windows platforms. -// Only supported in Node v12.9.0 and above. + +import fs from 'fs' + const platform = process.env.__FAKE_PLATFORM__ || process.platform const isWindows = platform === 'win32' -const fs = global.__FAKE_TESTING_FS__ || require('fs') -/* istanbul ignore next */ -const { O_CREAT, O_TRUNC, O_WRONLY, UV_FS_O_FILEMAP = 0 } = fs.constants +/* c8 ignore start */ +const { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants +const UV_FS_O_FILEMAP = + Number(process.env.__FAKE_FS_O_FILENAME__) || + fs.constants.UV_FS_O_FILEMAP || + 0 +/* c8 ignore stop */ const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP const fMapLimit = 512 * 1024 const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY -module.exports = !fMapEnabled ? () => 'w' - : size => size < fMapLimit ? fMapFlag : 'w' +export const getWriteFlag = + !fMapEnabled ? + () => 'w' + : (size: number) => (size < fMapLimit ? fMapFlag : 'w') diff --git a/src/header.ts b/src/header.ts new file mode 100644 index 00000000..1b639c05 --- /dev/null +++ b/src/header.ts @@ -0,0 +1,406 @@ +// parse a 512-byte header block to a data object, or vice-versa +// encode returns `true` if a pax extended header is needed, because +// the data could not be faithfully encoded in a simple header. +// (Also, check header.needPax to see if it needs a pax header.) + +import { posix as pathModule } from 'node:path' +import * as large from './large-numbers.js' +import type { EntryTypeCode, EntryTypeName } from './types.js' +import * as types from './types.js' + +export type HeaderData = { + path?: string + mode?: number + uid?: number + gid?: number + size?: number + cksum?: number + type?: EntryTypeName | 'Unsupported' + linkpath?: string + uname?: string + gname?: string + devmaj?: number + devmin?: number + atime?: Date + ctime?: Date + mtime?: Date + + // fields that are common in extended PAX headers, but not in the + // "standard" tar header block + charset?: string + comment?: string + dev?: number + ino?: number + nlink?: number +} + +export class Header implements HeaderData { + cksumValid: boolean = false + needPax: boolean = false + nullBlock: boolean = false + + block?: Buffer + path?: string + mode?: number + uid?: number + gid?: number + size?: number + cksum?: number + #type: EntryTypeCode | 'Unsupported' = 'Unsupported' + linkpath?: string + uname?: string + gname?: string + devmaj: number = 0 + devmin: number = 0 + atime?: Date + ctime?: Date + mtime?: Date + + charset?: string + comment?: string + + constructor( + data?: Buffer | HeaderData, + off: number = 0, + ex?: HeaderData, + gex?: HeaderData, + ) { + if (Buffer.isBuffer(data)) { + this.decode(data, off || 0, ex, gex) + } else if (data) { + this.#slurp(data) + } + } + + decode( + buf: Buffer, + off: number, + ex?: HeaderData, + gex?: HeaderData, + ) { + if (!off) { + off = 0 + } + + if (!buf || !(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header') + } + + this.path = decString(buf, off, 100) + this.mode = decNumber(buf, off + 100, 8) + this.uid = decNumber(buf, off + 108, 8) + this.gid = decNumber(buf, off + 116, 8) + this.size = decNumber(buf, off + 124, 12) + this.mtime = decDate(buf, off + 136, 12) + this.cksum = decNumber(buf, off + 148, 12) + + // if we have extended or global extended headers, apply them now + // See https://github.com/npm/node-tar/pull/187 + // Apply global before local, so it overrides + if (gex) this.#slurp(gex, true) + if (ex) this.#slurp(ex) + + // old tar versions marked dirs as a file with a trailing / + const t = decString(buf, off + 156, 1) + if (types.isCode(t)) { + this.#type = t || '0' + } + if (this.#type === '0' && this.path.slice(-1) === '/') { + this.#type = '5' + } + + // tar implementations sometimes incorrectly put the stat(dir).size + // as the size in the tarball, even though Directory entries are + // not able to have any body at all. In the very rare chance that + // it actually DOES have a body, we weren't going to do anything with + // it anyway, and it'll just be a warning about an invalid header. + if (this.#type === '5') { + this.size = 0 + } + + this.linkpath = decString(buf, off + 157, 100) + if ( + buf.subarray(off + 257, off + 265).toString() === + 'ustar\u000000' + ) { + this.uname = decString(buf, off + 265, 32) + this.gname = decString(buf, off + 297, 32) + /* c8 ignore start */ + this.devmaj = decNumber(buf, off + 329, 8) ?? 0 + this.devmin = decNumber(buf, off + 337, 8) ?? 0 + /* c8 ignore stop */ + if (buf[off + 475] !== 0) { + // definitely a prefix, definitely >130 chars. + const prefix = decString(buf, off + 345, 155) + this.path = prefix + '/' + this.path + } else { + const prefix = decString(buf, off + 345, 130) + if (prefix) { + this.path = prefix + '/' + this.path + } + this.atime = decDate(buf, off + 476, 12) + this.ctime = decDate(buf, off + 488, 12) + } + } + + let sum = 8 * 0x20 + for (let i = off; i < off + 148; i++) { + sum += buf[i] as number + } + + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i] as number + } + + this.cksumValid = sum === this.cksum + if (this.cksum === undefined && sum === 8 * 0x20) { + this.nullBlock = true + } + } + + #slurp(ex: HeaderData, gex: boolean = false) { + Object.assign( + this, + Object.fromEntries( + Object.entries(ex).filter(([k, v]) => { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. Also, any + // null/undefined values are ignored. + return !( + v === null || + v === undefined || + (k === 'path' && gex) || + (k === 'linkpath' && gex) || + k === 'global' + ) + }), + ), + ) + } + + encode(buf?: Buffer, off: number = 0) { + if (!buf) { + buf = this.block = Buffer.alloc(512) + } + + if (this.#type === 'Unsupported') { + this.#type = '0' + } + + if (!(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header') + } + + const prefixSize = this.ctime || this.atime ? 130 : 155 + const split = splitPrefix(this.path || '', prefixSize) + const path = split[0] + const prefix = split[1] + this.needPax = !!split[2] + + this.needPax = encString(buf, off, 100, path) || this.needPax + this.needPax = + encNumber(buf, off + 100, 8, this.mode) || this.needPax + this.needPax = + encNumber(buf, off + 108, 8, this.uid) || this.needPax + this.needPax = + encNumber(buf, off + 116, 8, this.gid) || this.needPax + this.needPax = + encNumber(buf, off + 124, 12, this.size) || this.needPax + this.needPax = + encDate(buf, off + 136, 12, this.mtime) || this.needPax + buf[off + 156] = this.#type.charCodeAt(0) + this.needPax = + encString(buf, off + 157, 100, this.linkpath) || this.needPax + buf.write('ustar\u000000', off + 257, 8) + this.needPax = + encString(buf, off + 265, 32, this.uname) || this.needPax + this.needPax = + encString(buf, off + 297, 32, this.gname) || this.needPax + this.needPax = + encNumber(buf, off + 329, 8, this.devmaj) || this.needPax + this.needPax = + encNumber(buf, off + 337, 8, this.devmin) || this.needPax + this.needPax = + encString(buf, off + 345, prefixSize, prefix) || this.needPax + if (buf[off + 475] !== 0) { + this.needPax = + encString(buf, off + 345, 155, prefix) || this.needPax + } else { + this.needPax = + encString(buf, off + 345, 130, prefix) || this.needPax + this.needPax = + encDate(buf, off + 476, 12, this.atime) || this.needPax + this.needPax = + encDate(buf, off + 488, 12, this.ctime) || this.needPax + } + + let sum = 8 * 0x20 + for (let i = off; i < off + 148; i++) { + sum += buf[i] as number + } + + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i] as number + } + + this.cksum = sum + encNumber(buf, off + 148, 8, this.cksum) + this.cksumValid = true + + return this.needPax + } + + get type(): EntryTypeName { + return ( + this.#type === 'Unsupported' ? + this.#type + : types.name.get(this.#type)) as EntryTypeName + } + + get typeKey(): EntryTypeCode | 'Unsupported' { + return this.#type + } + + set type(type: EntryTypeCode | EntryTypeName | 'Unsupported') { + const c = String(types.code.get(type as EntryTypeName)) + if (types.isCode(c) || c === 'Unsupported') { + this.#type = c + } else if (types.isCode(type)) { + this.#type = type + } else { + throw new TypeError('invalid entry type: ' + type) + } + } +} + +const splitPrefix = ( + p: string, + prefixSize: number, +): [string, string, boolean] => { + const pathSize = 100 + let pp = p + let prefix = '' + let ret: undefined | [string, string, boolean] = undefined + const root = pathModule.parse(p).root || '.' + + if (Buffer.byteLength(pp) < pathSize) { + ret = [pp, prefix, false] + } else { + // first set prefix to the dir, and path to the base + prefix = pathModule.dirname(pp) + pp = pathModule.basename(pp) + + do { + if ( + Buffer.byteLength(pp) <= pathSize && + Buffer.byteLength(prefix) <= prefixSize + ) { + // both fit! + ret = [pp, prefix, false] + } else if ( + Buffer.byteLength(pp) > pathSize && + Buffer.byteLength(prefix) <= prefixSize + ) { + // prefix fits in prefix, but path doesn't fit in path + ret = [pp.slice(0, pathSize - 1), prefix, true] + } else { + // make path take a bit from prefix + pp = pathModule.join(pathModule.basename(prefix), pp) + prefix = pathModule.dirname(prefix) + } + } while (prefix !== root && ret === undefined) + + // at this point, found no resolution, just truncate + if (!ret) { + ret = [p.slice(0, pathSize - 1), '', true] + } + } + return ret +} + +const decString = (buf: Buffer, off: number, size: number) => + buf + .subarray(off, off + size) + .toString('utf8') + .replace(/\0.*/, '') + +const decDate = (buf: Buffer, off: number, size: number) => + numToDate(decNumber(buf, off, size)) + +const numToDate = (num?: number) => + num === undefined ? undefined : new Date(num * 1000) + +const decNumber = (buf: Buffer, off: number, size: number) => + Number(buf[off]) & 0x80 ? + large.parse(buf.subarray(off, off + size)) + : decSmallNumber(buf, off, size) + +const nanUndef = (value: number) => (isNaN(value) ? undefined : value) + +const decSmallNumber = (buf: Buffer, off: number, size: number) => + nanUndef( + parseInt( + buf + .subarray(off, off + size) + .toString('utf8') + .replace(/\0.*$/, '') + .trim(), + 8, + ), + ) + +// the maximum encodable as a null-terminated octal, by field size +const MAXNUM = { + 12: 0o77777777777, + 8: 0o7777777, +} + +const encNumber = ( + buf: Buffer, + off: number, + size: 12 | 8, + num?: number, +) => + num === undefined ? false + : num > MAXNUM[size] || num < 0 ? + (large.encode(num, buf.subarray(off, off + size)), true) + : (encSmallNumber(buf, off, size, num), false) + +const encSmallNumber = ( + buf: Buffer, + off: number, + size: number, + num: number, +) => buf.write(octalString(num, size), off, size, 'ascii') + +const octalString = (num: number, size: number) => + padOctal(Math.floor(num).toString(8), size) + +const padOctal = (str: string, size: number) => + (str.length === size - 1 ? + str + : new Array(size - str.length - 1).join('0') + str + ' ') + '\0' + +const encDate = ( + buf: Buffer, + off: number, + size: 8 | 12, + date?: Date, +) => + date === undefined ? false : ( + encNumber(buf, off, size, date.getTime() / 1000) + ) + +// enough to fill the longest string we've got +const NULLS = new Array(156).join('\0') +// pad with nulls, return true if it's longer or non-ascii +const encString = ( + buf: Buffer, + off: number, + size: number, + str?: string, +) => + str === undefined ? false : ( + (buf.write(str + NULLS, off, size, 'utf8'), + str.length !== Buffer.byteLength(str) || str.length > size) + ) diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 00000000..f19166e8 --- /dev/null +++ b/src/index.ts @@ -0,0 +1,30 @@ +export { + type TarOptionsWithAliasesAsync, + type TarOptionsWithAliasesAsyncFile, + type TarOptionsWithAliasesAsyncNoFile, + type TarOptionsWithAliasesSyncNoFile, + type TarOptionsWithAliases, + type TarOptionsWithAliasesFile, + type TarOptionsWithAliasesSync, + type TarOptionsWithAliasesSyncFile, +} from './options.js' + +export * from './create.js' +export { create as c } from './create.js' +export * from './extract.js' +export { extract as x } from './extract.js' +export * from './header.js' +export * from './list.js' +export { list as t } from './list.js' +// classes +export * from './pack.js' +export * from './parse.js' +export * from './pax.js' +export * from './read-entry.js' +export * from './replace.js' +export { replace as r } from './replace.js' +export * as types from './types.js' +export * from './unpack.js' +export * from './update.js' +export { update as u } from './update.js' +export * from './write-entry.js' diff --git a/lib/large-numbers.js b/src/large-numbers.ts similarity index 62% rename from lib/large-numbers.js rename to src/large-numbers.ts index b11e72d9..cc015315 100644 --- a/lib/large-numbers.js +++ b/src/large-numbers.ts @@ -1,12 +1,13 @@ -'use strict' // Tar can encode large and negative numbers using a leading byte of // 0xff for negative, and 0x80 for positive. -const encode = (num, buf) => { +export const encode = (num: number, buf: Buffer) => { if (!Number.isSafeInteger(num)) { - // The number is so large that javascript cannot represent it with integer - // precision. - throw Error('cannot encode number outside of javascript safe integer range') + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error( + 'cannot encode number outside of javascript safe integer range', + ) } else if (num < 0) { encodeNegative(num, buf) } else { @@ -15,7 +16,7 @@ const encode = (num, buf) => { return buf } -const encodePositive = (num, buf) => { +const encodePositive = (num: number, buf: Buffer) => { buf[0] = 0x80 for (var i = buf.length; i > 1; i--) { @@ -24,7 +25,7 @@ const encodePositive = (num, buf) => { } } -const encodeNegative = (num, buf) => { +const encodeNegative = (num: number, buf: Buffer) => { buf[0] = 0xff var flipped = false num = num * -1 @@ -42,9 +43,10 @@ const encodeNegative = (num, buf) => { } } -const parse = (buf) => { +export const parse = (buf: Buffer) => { const pre = buf[0] - const value = pre === 0x80 ? pos(buf.slice(1, buf.length)) + const value = + pre === 0x80 ? pos(buf.subarray(1, buf.length)) : pre === 0xff ? twos(buf) : null if (value === null) { @@ -52,20 +54,22 @@ const parse = (buf) => { } if (!Number.isSafeInteger(value)) { - // The number is so large that javascript cannot represent it with integer - // precision. - throw Error('parsed number outside of javascript safe integer range') + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error( + 'parsed number outside of javascript safe integer range', + ) } return value } -const twos = (buf) => { +const twos = (buf: Buffer) => { var len = buf.length var sum = 0 var flipped = false for (var i = len - 1; i > -1; i--) { - var byte = buf[i] + var byte = Number(buf[i]) var f if (flipped) { f = onesComp(byte) @@ -82,11 +86,11 @@ const twos = (buf) => { return sum } -const pos = (buf) => { +const pos = (buf: Buffer) => { var len = buf.length var sum = 0 for (var i = len - 1; i > -1; i--) { - var byte = buf[i] + var byte = Number(buf[i]) if (byte !== 0) { sum += byte * Math.pow(256, len - i - 1) } @@ -94,11 +98,6 @@ const pos = (buf) => { return sum } -const onesComp = byte => (0xff ^ byte) & 0xff +const onesComp = (byte: number) => (0xff ^ byte) & 0xff -const twosComp = byte => ((0xff ^ byte) + 1) & 0xff - -module.exports = { - encode, - parse, -} +const twosComp = (byte: number) => ((0xff ^ byte) + 1) & 0xff diff --git a/src/list.ts b/src/list.ts new file mode 100644 index 00000000..eab4b78c --- /dev/null +++ b/src/list.ts @@ -0,0 +1,124 @@ +// tar -t +import * as fsm from '@isaacs/fs-minipass' +import fs from 'node:fs' +import { dirname, parse } from 'path' +import { makeCommand } from './make-command.js' +import { + TarOptions, + TarOptionsFile, + TarOptionsSyncFile, +} from './options.js' +import { Parser } from './parse.js' +import { stripTrailingSlashes } from './strip-trailing-slashes.js' + +const onReadEntryFunction = (opt: TarOptions) => { + const onReadEntry = opt.onReadEntry + opt.onReadEntry = + onReadEntry ? + e => { + onReadEntry(e) + e.resume() + } + : e => e.resume() +} + +// construct a filter that limits the file entries listed +// include child entries if a dir is included +export const filesFilter = (opt: TarOptions, files: string[]) => { + const map = new Map( + files.map(f => [stripTrailingSlashes(f), true]), + ) + const filter = opt.filter + + const mapHas = (file: string, r: string = ''): boolean => { + const root = r || parse(file).root || '.' + let ret: boolean + if (file === root) ret = false + else { + const m = map.get(file) + if (m !== undefined) { + ret = m + } else { + ret = mapHas(dirname(file), root) + } + } + + map.set(file, ret) + return ret + } + + opt.filter = + filter ? + (file, entry) => + filter(file, entry) && mapHas(stripTrailingSlashes(file)) + : file => mapHas(stripTrailingSlashes(file)) +} + +const listFileSync = (opt: TarOptionsSyncFile) => { + const p = new Parser(opt) + const file = opt.file + let fd + try { + const stat = fs.statSync(file) + const readSize = opt.maxReadSize || 16 * 1024 * 1024 + if (stat.size < readSize) { + p.end(fs.readFileSync(file)) + } else { + let pos = 0 + const buf = Buffer.allocUnsafe(readSize) + fd = fs.openSync(file, 'r') + while (pos < stat.size) { + const bytesRead = fs.readSync(fd, buf, 0, readSize, pos) + pos += bytesRead + p.write(buf.subarray(0, bytesRead)) + } + p.end() + } + } finally { + if (typeof fd === 'number') { + try { + fs.closeSync(fd) + /* c8 ignore next */ + } catch (er) {} + } + } +} + +const listFile = ( + opt: TarOptionsFile, + _files: string[], +): Promise => { + const parse = new Parser(opt) + const readSize = opt.maxReadSize || 16 * 1024 * 1024 + + const file = opt.file + const p = new Promise((resolve, reject) => { + parse.on('error', reject) + parse.on('end', resolve) + + fs.stat(file, (er, stat) => { + if (er) { + reject(er) + } else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }) + stream.on('error', reject) + stream.pipe(parse) + } + }) + }) + return p +} + +export const list = makeCommand( + listFileSync, + listFile, + opt => new Parser(opt) as Parser & { sync: true }, + opt => new Parser(opt), + (opt, files) => { + if (files?.length) filesFilter(opt, files) + if (!opt.noResume) onReadEntryFunction(opt) + }, +) diff --git a/src/make-command.ts b/src/make-command.ts new file mode 100644 index 00000000..7a0e8b2e --- /dev/null +++ b/src/make-command.ts @@ -0,0 +1,246 @@ +import { + dealias, + isAsyncFile, + isAsyncNoFile, + isSyncFile, + isSyncNoFile, + TarOptions, + TarOptionsAsyncFile, + TarOptionsAsyncNoFile, + TarOptionsSyncFile, + TarOptionsSyncNoFile, + TarOptionsWithAliases, + TarOptionsWithAliasesAsync, + TarOptionsWithAliasesAsyncFile, + TarOptionsWithAliasesAsyncNoFile, + TarOptionsWithAliasesFile, + TarOptionsWithAliasesNoFile, + TarOptionsWithAliasesSync, + TarOptionsWithAliasesSyncFile, + TarOptionsWithAliasesSyncNoFile, +} from './options.js' + +export type CB = (er?: Error) => any + +export type TarCommand< + AsyncClass, + SyncClass extends { sync: true }, +> = { + // async and no file specified + (): AsyncClass + (opt: TarOptionsWithAliasesAsyncNoFile): AsyncClass + (entries: string[]): AsyncClass + ( + opt: TarOptionsWithAliasesAsyncNoFile, + entries: string[], + ): AsyncClass +} & { + // sync and no file + (opt: TarOptionsWithAliasesSyncNoFile): SyncClass + (opt: TarOptionsWithAliasesSyncNoFile, entries: string[]): SyncClass +} & { + // async and file + (opt: TarOptionsWithAliasesAsyncFile): Promise + ( + opt: TarOptionsWithAliasesAsyncFile, + entries: string[], + ): Promise + (opt: TarOptionsWithAliasesAsyncFile, cb: CB): Promise + ( + opt: TarOptionsWithAliasesAsyncFile, + entries: string[], + cb: CB, + ): Promise +} & { + // sync and file + (opt: TarOptionsWithAliasesSyncFile): void + (opt: TarOptionsWithAliasesSyncFile, entries: string[]): void +} & { + // sync, maybe file + (opt: TarOptionsWithAliasesSync): typeof opt extends ( + TarOptionsWithAliasesFile + ) ? + void + : typeof opt extends TarOptionsWithAliasesNoFile ? SyncClass + : void | SyncClass + ( + opt: TarOptionsWithAliasesSync, + entries: string[], + ): typeof opt extends TarOptionsWithAliasesFile ? void + : typeof opt extends TarOptionsWithAliasesNoFile ? SyncClass + : void | SyncClass +} & { + // async, maybe file + (opt: TarOptionsWithAliasesAsync): typeof opt extends ( + TarOptionsWithAliasesFile + ) ? + Promise + : typeof opt extends TarOptionsWithAliasesNoFile ? AsyncClass + : Promise | AsyncClass + ( + opt: TarOptionsWithAliasesAsync, + entries: string[], + ): typeof opt extends TarOptionsWithAliasesFile ? Promise + : typeof opt extends TarOptionsWithAliasesNoFile ? AsyncClass + : Promise | AsyncClass + (opt: TarOptionsWithAliasesAsync, cb: CB): Promise + ( + opt: TarOptionsWithAliasesAsync, + entries: string[], + cb: CB, + ): typeof opt extends TarOptionsWithAliasesFile ? Promise + : typeof opt extends TarOptionsWithAliasesNoFile ? never + : Promise +} & { + // maybe sync, file + (opt: TarOptionsWithAliasesFile): Promise | void + ( + opt: TarOptionsWithAliasesFile, + entries: string[], + ): typeof opt extends TarOptionsWithAliasesSync ? void + : typeof opt extends TarOptionsWithAliasesAsync ? Promise + : Promise | void + (opt: TarOptionsWithAliasesFile, cb: CB): Promise + ( + opt: TarOptionsWithAliasesFile, + entries: string[], + cb: CB, + ): typeof opt extends TarOptionsWithAliasesSync ? never + : typeof opt extends TarOptionsWithAliasesAsync ? Promise + : Promise +} & { + // maybe sync, no file + (opt: TarOptionsWithAliasesNoFile): typeof opt extends ( + TarOptionsWithAliasesSync + ) ? + SyncClass + : typeof opt extends TarOptionsWithAliasesAsync ? AsyncClass + : SyncClass | AsyncClass + ( + opt: TarOptionsWithAliasesNoFile, + entries: string[], + ): typeof opt extends TarOptionsWithAliasesSync ? SyncClass + : typeof opt extends TarOptionsWithAliasesAsync ? AsyncClass + : SyncClass | AsyncClass +} & { + // maybe sync, maybe file + (opt: TarOptionsWithAliases): typeof opt extends ( + TarOptionsWithAliasesFile + ) ? + typeof opt extends TarOptionsWithAliasesSync ? void + : typeof opt extends TarOptionsWithAliasesAsync ? Promise + : void | Promise + : typeof opt extends TarOptionsWithAliasesNoFile ? + typeof opt extends TarOptionsWithAliasesSync ? SyncClass + : typeof opt extends TarOptionsWithAliasesAsync ? AsyncClass + : SyncClass | AsyncClass + : typeof opt extends TarOptionsWithAliasesSync ? SyncClass | void + : typeof opt extends TarOptionsWithAliasesAsync ? + AsyncClass | Promise + : SyncClass | void | AsyncClass | Promise +} & { + // extras + syncFile: (opt: TarOptionsSyncFile, entries: string[]) => void + asyncFile: ( + opt: TarOptionsAsyncFile, + entries: string[], + cb?: CB, + ) => Promise + syncNoFile: ( + opt: TarOptionsSyncNoFile, + entries: string[], + ) => SyncClass + asyncNoFile: ( + opt: TarOptionsAsyncNoFile, + entries: string[], + ) => AsyncClass + validate?: (opt: TarOptions, entries?: string[]) => void +} + +export const makeCommand = < + AsyncClass, + SyncClass extends { sync: true }, +>( + syncFile: (opt: TarOptionsSyncFile, entries: string[]) => void, + asyncFile: ( + opt: TarOptionsAsyncFile, + entries: string[], + cb?: CB, + ) => Promise, + syncNoFile: ( + opt: TarOptionsSyncNoFile, + entries: string[], + ) => SyncClass, + asyncNoFile: ( + opt: TarOptionsAsyncNoFile, + entries: string[], + ) => AsyncClass, + validate?: (opt: TarOptions, entries?: string[]) => void, +): TarCommand => { + return Object.assign( + ( + opt_: TarOptionsWithAliases | string[] = [], + entries?: string[] | CB, + cb?: CB, + ) => { + if (Array.isArray(opt_)) { + entries = opt_ + opt_ = {} + } + + if (typeof entries === 'function') { + cb = entries + entries = undefined + } + + if (!entries) { + entries = [] + } else { + entries = Array.from(entries) + } + + const opt = dealias(opt_) + + validate?.(opt, entries) + + if (isSyncFile(opt)) { + if (typeof cb === 'function') { + throw new TypeError( + 'callback not supported for sync tar functions', + ) + } + return syncFile(opt, entries) + } else if (isAsyncFile(opt)) { + const p = asyncFile(opt, entries) + // weirdness to make TS happy + const c = cb ? cb : undefined + return c ? p.then(() => c(), c) : p + } else if (isSyncNoFile(opt)) { + if (typeof cb === 'function') { + throw new TypeError( + 'callback not supported for sync tar functions', + ) + } + return syncNoFile(opt, entries) + } else if (isAsyncNoFile(opt)) { + if (typeof cb === 'function') { + throw new TypeError( + 'callback only supported with file option', + ) + } + return asyncNoFile(opt, entries) + /* c8 ignore start */ + } else { + throw new Error('impossible options??') + } + /* c8 ignore stop */ + }, + { + syncFile, + asyncFile, + syncNoFile, + asyncNoFile, + validate, + }, + ) as TarCommand +} diff --git a/src/mkdir.ts b/src/mkdir.ts new file mode 100644 index 00000000..f65c98ef --- /dev/null +++ b/src/mkdir.ts @@ -0,0 +1,293 @@ +import { chownr, chownrSync } from 'chownr' +import fs from 'fs' +import { mkdirp, mkdirpSync } from 'mkdirp' +import path from 'node:path' +import { CwdError } from './cwd-error.js' +import { normalizeWindowsPath } from './normalize-windows-path.js' +import { SymlinkError } from './symlink-error.js' + +export type MkdirOptions = { + uid?: number + gid?: number + processUid?: number + processGid?: number + umask?: number + preserve: boolean + unlink: boolean + cache: Map + cwd: string + mode: number +} + +export type MkdirError = + | NodeJS.ErrnoException + | CwdError + | SymlinkError + +const cGet = (cache: Map, key: string) => + cache.get(normalizeWindowsPath(key)) +const cSet = ( + cache: Map, + key: string, + val: boolean, +) => cache.set(normalizeWindowsPath(key), val) + +const checkCwd = ( + dir: string, + cb: (er?: null | MkdirError) => any, +) => { + fs.stat(dir, (er, st) => { + if (er || !st.isDirectory()) { + er = new CwdError( + dir, + (er as NodeJS.ErrnoException)?.code || 'ENOTDIR', + ) + } + cb(er) + }) +} + +/** + * Wrapper around mkdirp for tar's needs. + * + * The main purpose is to avoid creating directories if we know that + * they already exist (and track which ones exist for this purpose), + * and prevent entries from being extracted into symlinked folders, + * if `preservePaths` is not set. + */ +export const mkdir = ( + dir: string, + opt: MkdirOptions, + cb: (er?: null | MkdirError, made?: string) => void, +) => { + dir = normalizeWindowsPath(dir) + + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + /* c8 ignore next */ + const umask = opt.umask ?? 0o22 + const mode = opt.mode | 0o0700 + const needChmod = (mode & umask) !== 0 + + const uid = opt.uid + const gid = opt.gid + const doChown = + typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid) + + const preserve = opt.preserve + const unlink = opt.unlink + const cache = opt.cache + const cwd = normalizeWindowsPath(opt.cwd) + + const done = (er?: null | MkdirError, created?: string) => { + if (er) { + cb(er) + } else { + cSet(cache, dir, true) + if (created && doChown) { + chownr(created, uid, gid, er => + done(er as NodeJS.ErrnoException), + ) + } else if (needChmod) { + fs.chmod(dir, mode, cb) + } else { + cb() + } + } + } + + if (cache && cGet(cache, dir) === true) { + return done() + } + + if (dir === cwd) { + return checkCwd(dir, done) + } + + if (preserve) { + return mkdirp(dir, { mode }).then( + made => done(null, made ?? undefined), // oh, ts + done, + ) + } + + const sub = normalizeWindowsPath(path.relative(cwd, dir)) + const parts = sub.split('/') + mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done) +} + +const mkdir_ = ( + base: string, + parts: string[], + mode: number, + cache: Map, + unlink: boolean, + cwd: string, + created: string | undefined, + cb: (er?: null | MkdirError, made?: string) => void, +): void => { + if (!parts.length) { + return cb(null, created) + } + const p = parts.shift() + const part = normalizeWindowsPath(path.resolve(base + '/' + p)) + if (cGet(cache, part)) { + return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + } + fs.mkdir( + part, + mode, + onmkdir(part, parts, mode, cache, unlink, cwd, created, cb), + ) +} + +const onmkdir = + ( + part: string, + parts: string[], + mode: number, + cache: Map, + unlink: boolean, + cwd: string, + created: string | undefined, + cb: (er?: null | MkdirError, made?: string) => void, + ) => + (er?: null | NodeJS.ErrnoException) => { + if (er) { + fs.lstat(part, (statEr, st) => { + if (statEr) { + statEr.path = + statEr.path && normalizeWindowsPath(statEr.path) + cb(statEr) + } else if (st.isDirectory()) { + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + } else if (unlink) { + fs.unlink(part, er => { + if (er) { + return cb(er) + } + fs.mkdir( + part, + mode, + onmkdir( + part, + parts, + mode, + cache, + unlink, + cwd, + created, + cb, + ), + ) + }) + } else if (st.isSymbolicLink()) { + return cb( + new SymlinkError(part, part + '/' + parts.join('/')), + ) + } else { + cb(er) + } + }) + } else { + created = created || part + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + } + } + +const checkCwdSync = (dir: string) => { + let ok = false + let code: string | undefined = undefined + try { + ok = fs.statSync(dir).isDirectory() + } catch (er) { + code = (er as NodeJS.ErrnoException)?.code + } finally { + if (!ok) { + throw new CwdError(dir, code ?? 'ENOTDIR') + } + } +} + +export const mkdirSync = (dir: string, opt: MkdirOptions) => { + dir = normalizeWindowsPath(dir) + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + /* c8 ignore next */ + const umask = opt.umask ?? 0o22 + const mode = opt.mode | 0o700 + const needChmod = (mode & umask) !== 0 + + const uid = opt.uid + const gid = opt.gid + const doChown = + typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid) + + const preserve = opt.preserve + const unlink = opt.unlink + const cache = opt.cache + const cwd = normalizeWindowsPath(opt.cwd) + + const done = (created?: string | undefined) => { + cSet(cache, dir, true) + if (created && doChown) { + chownrSync(created, uid, gid) + } + if (needChmod) { + fs.chmodSync(dir, mode) + } + } + + if (cache && cGet(cache, dir) === true) { + return done() + } + + if (dir === cwd) { + checkCwdSync(cwd) + return done() + } + + if (preserve) { + return done(mkdirpSync(dir, mode) ?? undefined) + } + + const sub = normalizeWindowsPath(path.relative(cwd, dir)) + const parts = sub.split('/') + let created: string | undefined = undefined + for ( + let p = parts.shift(), part = cwd; + p && (part += '/' + p); + p = parts.shift() + ) { + part = normalizeWindowsPath(path.resolve(part)) + if (cGet(cache, part)) { + continue + } + + try { + fs.mkdirSync(part, mode) + created = created || part + cSet(cache, part, true) + } catch (er) { + const st = fs.lstatSync(part) + if (st.isDirectory()) { + cSet(cache, part, true) + continue + } else if (unlink) { + fs.unlinkSync(part) + fs.mkdirSync(part, mode) + created = created || part + cSet(cache, part, true) + continue + } else if (st.isSymbolicLink()) { + return new SymlinkError(part, part + '/' + parts.join('/')) + } + } + } + + return done(created) +} diff --git a/lib/mode-fix.js b/src/mode-fix.ts similarity index 87% rename from lib/mode-fix.js rename to src/mode-fix.ts index 42f1d6e6..0a7bbe54 100644 --- a/lib/mode-fix.js +++ b/src/mode-fix.ts @@ -1,5 +1,8 @@ -'use strict' -module.exports = (mode, isDir, portable) => { +export const modeFix = ( + mode: number, + isDir: boolean, + portable: boolean, +) => { mode &= 0o7777 // in portable mode, use the minimum reasonable umask diff --git a/lib/normalize-unicode.js b/src/normalize-unicode.ts similarity index 88% rename from lib/normalize-unicode.js rename to src/normalize-unicode.ts index 79e285ab..61dacf06 100644 --- a/lib/normalize-unicode.js +++ b/src/normalize-unicode.ts @@ -4,7 +4,7 @@ // Do not edit without careful benchmarking. const normalizeCache = Object.create(null) const { hasOwnProperty } = Object.prototype -module.exports = s => { +export const normalizeUnicode = (s: string) => { if (!hasOwnProperty.call(normalizeCache, s)) { normalizeCache[s] = s.normalize('NFD') } diff --git a/lib/normalize-windows-path.js b/src/normalize-windows-path.ts similarity index 55% rename from lib/normalize-windows-path.js rename to src/normalize-windows-path.ts index eb13ba01..7655119a 100644 --- a/lib/normalize-windows-path.js +++ b/src/normalize-windows-path.ts @@ -3,6 +3,10 @@ // so, on windows, and only on windows, we replace all \ chars with /, // so that we can use / as our one and only directory separator char. -const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform -module.exports = platform !== 'win32' ? p => p - : p => p && p.replace(/\\/g, '/') +const platform = + process.env.TESTING_TAR_FAKE_PLATFORM || process.platform + +export const normalizeWindowsPath = + platform !== 'win32' ? + (p: string) => p + : (p: string) => p && p.replace(/\\/g, '/') diff --git a/src/options.ts b/src/options.ts new file mode 100644 index 00000000..c0d8aac1 --- /dev/null +++ b/src/options.ts @@ -0,0 +1,712 @@ +// turn tar(1) style args like `C` into the more verbose things like `cwd` + +import { type GzipOptions, type ZlibOptions } from 'minizlib' +import { type Stats } from 'node:fs' +import { type ReadEntry } from './read-entry.js' +import { type WarnData } from './warn-method.js' +import { WriteEntry } from './write-entry.js' + +const argmap = new Map( + [ + ['C', 'cwd'], + ['f', 'file'], + ['z', 'gzip'], + ['P', 'preservePaths'], + ['U', 'unlink'], + ['strip-components', 'strip'], + ['stripComponents', 'strip'], + ['keep-newer', 'newer'], + ['keepNewer', 'newer'], + ['keep-newer-files', 'newer'], + ['keepNewerFiles', 'newer'], + ['k', 'keep'], + ['keep-existing', 'keep'], + ['keepExisting', 'keep'], + ['m', 'noMtime'], + ['no-mtime', 'noMtime'], + ['p', 'preserveOwner'], + ['L', 'follow'], + ['h', 'follow'], + ['onentry', 'onReadEntry'], + ], +) + +/** + * The options that can be provided to tar commands. + * + * Note that some of these are only relevant for certain commands, since + * they are specific to reading or writing. + * + * Aliases are provided in the {@link TarOptionsWithAliases} type. + */ +export interface TarOptions { + ////////////////////////// + // shared options + + /** + * Perform all I/O operations synchronously. If the stream is ended + * immediately, then it will be processed entirely synchronously. + */ + sync?: boolean + + /** + * The tar file to be read and/or written. When this is set, a stream + * is not returned. Asynchronous commands will return a promise indicating + * when the operation is completed, and synchronous commands will return + * immediately. + */ + file?: string + + /** + * Treat warnings as crash-worthy errors. Defaults false. + */ + strict?: boolean + + /** + * The effective current working directory for this tar command + */ + cwd?: string + + /** + * When creating a tar archive, this can be used to compress it as well. + * Set to `true` to use the default gzip options, or customize them as + * needed. + * + * When reading, if this is unset, then the compression status will be + * inferred from the archive data. This is generally best, unless you are + * sure of the compression settings in use to create the archive, and want to + * fail if the archive doesn't match expectations. + */ + gzip?: boolean | GzipOptions + + /** + * When creating archives, preserve absolute and `..` paths in the archive, + * rather than sanitizing them under the cwd. + * + * When extracting, allow absolute paths, paths containing `..`, and + * extracting through symbolic links. By default, the root `/` is stripped + * from absolute paths (eg, turning `/x/y/z` into `x/y/z`), paths containing + * `..` are not extracted, and any file whose location would be modified by a + * symbolic link is not extracted. + * + * **WARNING** This is almost always unsafe, and must NEVER be used on + * archives from untrusted sources, such as user input, and every entry must + * be validated to ensure it is safe to write. Even if the input is not + * malicious, mistakes can cause a lot of damage! + */ + preservePaths?: boolean + + /** + * When extracting, do not set the `mtime` value for extracted entries to + * match the `mtime` in the archive. + * + * When creating archives, do not store the `mtime` value in the entry. Note + * that this prevents properly using other mtime-based features (such as + * `tar.update` or the `newer` option) with the resulting archive. + */ + noMtime?: boolean + + /** + * Set to `true` or an object with settings for `zlib.BrotliCompress()` to + * create a brotli-compressed archive + * + * When extracting, this will cause the archive to be treated as a + * brotli-compressed file if set to `true` or a ZlibOptions object. + * + * If set `false`, then brotli options will not be used. + * + * If both this and the `gzip` option are left `undefined`, then tar will + * attempt to infer the brotli compression status, but can only do so based + * on the filename. If the filename ends in `.tbr` or `.tar.br`, and the + * first 512 bytes are not a valid tar header, then brotli decompression + * will be attempted. + */ + brotli?: boolean | ZlibOptions + + /** + * A function that is called with `(path, stat)` when creating an archive, or + * `(path, entry)` when extracting. Return true to process the file/entry, or + * false to exclude it. + */ + filter?: (path: string, entry: Stats | ReadEntry) => boolean + + /** + * A function that gets called for any warning encountered. + * + * Note: if `strict` is set, then the warning will throw, and this method + * will not be called. + */ + onwarn?: (code: string, message: string, data: WarnData) => any + + ////////////////////////// + // extraction options + + /** + * When extracting, unlink files before creating them. Without this option, + * tar overwrites existing files, which preserves existing hardlinks. With + * this option, existing hardlinks will be broken, as will any symlink that + * would affect the location of an extracted file. + */ + unlink?: boolean + + /** + * When extracting, strip the specified number of path portions from the + * entry path. For example, with `{strip: 2}`, the entry `a/b/c/d` would be + * extracted to `{cwd}/c/d`. + * + * Any entry whose entire path is stripped will be excluded. + */ + strip?: number + + /** + * When extracting, keep the existing file on disk if it's newer than the + * file in the archive. + */ + newer?: boolean + + /** + * When extracting, do not overwrite existing files at all. + */ + keep?: boolean + + /** + * When extracting, set the `uid` and `gid` of extracted entries to the `uid` + * and `gid` fields in the archive. Defaults to true when run as root, and + * false otherwise. + * + * If false, then files and directories will be set with the owner and group + * of the user running the process. This is similar to `-p` in `tar(1)`, but + * ACLs and other system-specific data is never unpacked in this + * implementation, and modes are set by default already. + */ + preserveOwner?: boolean + + /** + * The maximum depth of subfolders to extract into. This defaults to 1024. + * Anything deeper than the limit will raise a warning and skip the entry. + * Set to `Infinity` to remove the limitation. + */ + maxDepth?: number + + /** + * When extracting, force all created files and directories, and all + * implicitly created directories, to be owned by the specified user id, + * regardless of the `uid` field in the archive. + * + * Cannot be used along with `preserveOwner`. Requires also setting the `gid` + * option. + */ + uid?: number + + /** + * When extracting, force all created files and directories, and all + * implicitly created directories, to be owned by the specified group id, + * regardless of the `gid` field in the archive. + * + * Cannot be used along with `preserveOwner`. Requires also setting the `uid` + * option. + */ + gid?: number + + /** + * When extracting, provide a function that takes an `entry` object, and + * returns a stream, or any falsey value. If a stream is provided, then that + * stream's data will be written instead of the contents of the archive + * entry. If a falsey value is provided, then the entry is written to disk as + * normal. + * + * To exclude items from extraction, use the `filter` option. + * + * Note that using an asynchronous stream type with the `transform` option + * will cause undefined behavior in synchronous extractions. + * [MiniPass](http://npm.im/minipass)-based streams are designed for this use + * case. + */ + transform?: (entry: ReadEntry) => any + + /** + * Call `chmod()` to ensure that extracted files match the entry's mode + * field. Without this field set, all mode fields in archive entries are a + * best effort attempt only. + * + * Setting this necessitates a call to the deprecated `process.umask()` + * method to determine the default umask value, unless a `processUmask` + * config is provided as well. + * + * If not set, tar will attempt to create file system entries with whatever + * mode is provided, and let the implicit process `umask` apply normally, but + * if a file already exists to be written to, then its existing mode will not + * be modified. + * + * When setting `chmod: true`, it is highly recommend to set the + * {@link TarOptions#processUmask} option as well, to avoid the call to the + * deprecated (and thread-unsafe) `process.umask()` method. + */ + chmod?: boolean + + /** + * When setting the {@link TarOptions#chmod} option to `true`, you may + * provide a value here to avoid having to call the deprecated and + * thread-unsafe `process.umask()` method. + * + * This has no effect with `chmod` is not set to true, as mode values are not + * set explicitly anyway. If `chmod` is set to `true`, and a value is not + * provided here, then `process.umask()` must be called, which will result in + * deprecation warnings. + * + * The most common values for this are `0o22` (resulting in directories + * created with mode `0o755` and files with `0o644` by default) and `0o2` + * (resulting in directores created with mode `0o775` and files `0o664`, so + * they are group-writable). + */ + processUmask?: number + + ////////////////////////// + // archive creation options + + /** + * When parsing/listing archives, `entry` streams are by default resumed + * (set into "flowing" mode) immediately after the call to `onReadEntry()`. + * Set `noResume: true` to suppress this behavior. + * + * Note that when this is set, the stream will never complete until the + * data is consumed somehow. + * + * Set automatically in extract operations, since the entry is piped to + * a file system entry right away. Only relevant when parsing. + */ + noResume?: boolean + + /** + * When creating, updating, or replacing within archives, this method will + * be called with each WriteEntry that is created. + */ + onWriteEntry?: (entry: WriteEntry) => any + + /** + * When extracting or listing archives, this method will be called with + * each entry that is not excluded by a `filter`. + * + * Important when listing archives synchronously from a file, because there + * is otherwise no way to interact with the data! + */ + onReadEntry?: (entry: ReadEntry) => any + + /** + * Pack the targets of symbolic links rather than the link itself. + */ + follow?: boolean + + /** + * When creating archives, omit any metadata that is system-specific: + * `ctime`, `atime`, `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and + * `nlink`. Note that `mtime` is still included, because this is necessary + * for other time-based operations such as `tar.update`. Additionally, `mode` + * is set to a "reasonable default" for mose unix systems, based on an + * effective `umask` of `0o22`. + * + * This also defaults the `portable` option in the gzip configs when creating + * a compressed archive, in order to produce deterministic archives that are + * not operating-system specific. + */ + portable?: boolean + + /** + * When creating archives, do not recursively archive the contents of + * directories. By default, archiving a directory archives all of its + * contents as well. + */ + noDirRecurse?: boolean + + /** + * Suppress Pax extended headers when creating archives. Note that this means + * long paths and linkpaths will be truncated, and large or negative numeric + * values may be interpreted incorrectly. + */ + noPax?: boolean + + /** + * Set to a `Date` object to force a specific `mtime` value for everything + * written to an archive. + * + * This is useful when creating archives that are intended to be + * deterministic based on their contents, irrespective of the file's last + * modification time. + * + * Overridden by `noMtime`. + */ + mtime?: Date + + /** + * A path portion to prefix onto the entries added to an archive. + */ + prefix?: string + + /** + * The mode to set on any created file archive, defaults to 0o666 + * masked by the process umask, often resulting in 0o644. + * + * This does *not* affect the mode fields of individual entries, or the + * mode status of extracted entries on the filesystem. + */ + mode?: number + + ////////////////////////// + // internal options + + /** + * A cache of mtime values, to avoid having to stat the same file repeatedly. + * + * @internal + */ + mtimeCache?: Map + + /** + * maximum buffer size for `fs.read()` operations. + * + * @internal + */ + maxReadSize?: number + + /** + * Filter modes of entries being unpacked, like `process.umask()` + * + * @internal + */ + umask?: number + + /** + * Default mode for directories. Used for all implicitly created directories, + * and any directories in the archive that do not have a mode field. + * + * @internal + */ + dmode?: number + + /** + * default mode for files + * + * @internal + */ + fmode?: number + + /** + * Map that tracks which directories already exist, for extraction + * + * @internal + */ + dirCache?: Map + /** + * maximum supported size of meta entries. Defaults to 1MB + * + * @internal + */ + maxMetaEntrySize?: number + + /** + * A Map object containing the device and inode value for any file whose + * `nlink` value is greater than 1, to identify hard links when creating + * archives. + * + * @internal + */ + linkCache?: Map + + /** + * A map object containing the results of `fs.readdir()` calls. + * + * @internal + */ + readdirCache?: Map + + /** + * A cache of all `lstat` results, for use in creating archives. + * + * @internal + */ + statCache?: Map + + /** + * Number of concurrent jobs to run when creating archives. + * + * Defaults to 4. + * + * @internal + */ + jobs?: number + + /** + * Automatically set to true on Windows systems. + * + * When extracting, causes behavior where filenames containing `<|>?:` + * characters are converted to windows-compatible escape sequences in the + * created filesystem entries. + * + * When packing, causes behavior where paths replace `\` with `/`, and + * filenames containing the windows-compatible escaped forms of `<|>?:` are + * converted to actual `<|>?:` characters in the archive. + * + * @internal + */ + win32?: boolean + + /** + * For `WriteEntry` objects, the absolute path to the entry on the + * filesystem. By default, this is `resolve(cwd, entry.path)`, but it can be + * overridden explicitly. + * + * @internal + */ + absolute?: string + + /** + * Used with Parser stream interface, to attach and take over when the + * stream is completely parsed. If this is set, then the prefinish, + * finish, and end events will not fire, and are the responsibility of + * the ondone method to emit properly. + * + * @internal + */ + ondone?: () => void + + /** + * Mostly for testing, but potentially useful in some cases. + * Forcibly trigger a chown on every entry, no matter what. + */ + forceChown?: boolean + + /** + * ambiguous deprecated name for {@link onReadEntry} + * + * @deprecated + */ + onentry?: (entry: ReadEntry) => any +} + +export type TarOptionsSync = TarOptions & { sync: true } +export type TarOptionsAsync = TarOptions & { sync?: false } +export type TarOptionsFile = TarOptions & { file: string } +export type TarOptionsNoFile = TarOptions & { file?: undefined } +export type TarOptionsSyncFile = TarOptionsSync & TarOptionsFile +export type TarOptionsAsyncFile = TarOptionsAsync & TarOptionsFile +export type TarOptionsSyncNoFile = TarOptionsSync & TarOptionsNoFile +export type TarOptionsAsyncNoFile = TarOptionsAsync & TarOptionsNoFile + +export type LinkCacheKey = `${number}:${number}` + +export interface TarOptionsWithAliases extends TarOptions { + /** + * The effective current working directory for this tar command + */ + C?: TarOptions['cwd'] + /** + * The tar file to be read and/or written. When this is set, a stream + * is not returned. Asynchronous commands will return a promise indicating + * when the operation is completed, and synchronous commands will return + * immediately. + */ + f?: TarOptions['file'] + /** + * When creating a tar archive, this can be used to compress it as well. + * Set to `true` to use the default gzip options, or customize them as + * needed. + * + * When reading, if this is unset, then the compression status will be + * inferred from the archive data. This is generally best, unless you are + * sure of the compression settings in use to create the archive, and want to + * fail if the archive doesn't match expectations. + */ + z?: TarOptions['gzip'] + /** + * When creating archives, preserve absolute and `..` paths in the archive, + * rather than sanitizing them under the cwd. + * + * When extracting, allow absolute paths, paths containing `..`, and + * extracting through symbolic links. By default, the root `/` is stripped + * from absolute paths (eg, turning `/x/y/z` into `x/y/z`), paths containing + * `..` are not extracted, and any file whose location would be modified by a + * symbolic link is not extracted. + * + * **WARNING** This is almost always unsafe, and must NEVER be used on + * archives from untrusted sources, such as user input, and every entry must + * be validated to ensure it is safe to write. Even if the input is not + * malicious, mistakes can cause a lot of damage! + */ + P?: TarOptions['preservePaths'] + /** + * When extracting, unlink files before creating them. Without this option, + * tar overwrites existing files, which preserves existing hardlinks. With + * this option, existing hardlinks will be broken, as will any symlink that + * would affect the location of an extracted file. + */ + U?: TarOptions['unlink'] + /** + * When extracting, strip the specified number of path portions from the + * entry path. For example, with `{strip: 2}`, the entry `a/b/c/d` would be + * extracted to `{cwd}/c/d`. + */ + 'strip-components'?: TarOptions['strip'] + /** + * When extracting, strip the specified number of path portions from the + * entry path. For example, with `{strip: 2}`, the entry `a/b/c/d` would be + * extracted to `{cwd}/c/d`. + */ + stripComponents?: TarOptions['strip'] + /** + * When extracting, keep the existing file on disk if it's newer than the + * file in the archive. + */ + 'keep-newer'?: TarOptions['newer'] + /** + * When extracting, keep the existing file on disk if it's newer than the + * file in the archive. + */ + keepNewer?: TarOptions['newer'] + /** + * When extracting, keep the existing file on disk if it's newer than the + * file in the archive. + */ + 'keep-newer-files'?: TarOptions['newer'] + /** + * When extracting, keep the existing file on disk if it's newer than the + * file in the archive. + */ + keepNewerFiles?: TarOptions['newer'] + /** + * When extracting, do not overwrite existing files at all. + */ + k?: TarOptions['keep'] + /** + * When extracting, do not overwrite existing files at all. + */ + 'keep-existing'?: TarOptions['keep'] + /** + * When extracting, do not overwrite existing files at all. + */ + keepExisting?: TarOptions['keep'] + /** + * When extracting, do not set the `mtime` value for extracted entries to + * match the `mtime` in the archive. + * + * When creating archives, do not store the `mtime` value in the entry. Note + * that this prevents properly using other mtime-based features (such as + * `tar.update` or the `newer` option) with the resulting archive. + */ + m?: TarOptions['noMtime'] + /** + * When extracting, do not set the `mtime` value for extracted entries to + * match the `mtime` in the archive. + * + * When creating archives, do not store the `mtime` value in the entry. Note + * that this prevents properly using other mtime-based features (such as + * `tar.update` or the `newer` option) with the resulting archive. + */ + 'no-mtime'?: TarOptions['noMtime'] + /** + * When extracting, set the `uid` and `gid` of extracted entries to the `uid` + * and `gid` fields in the archive. Defaults to true when run as root, and + * false otherwise. + * + * If false, then files and directories will be set with the owner and group + * of the user running the process. This is similar to `-p` in `tar(1)`, but + * ACLs and other system-specific data is never unpacked in this + * implementation, and modes are set by default already. + */ + p?: TarOptions['preserveOwner'] + /** + * Pack the targets of symbolic links rather than the link itself. + */ + L?: TarOptions['follow'] + /** + * Pack the targets of symbolic links rather than the link itself. + */ + h?: TarOptions['follow'] + + /** + * Deprecated option. Set explicitly false to set `chmod: true`. Ignored + * if {@link TarOptions#chmod} is set to any boolean value. + * + * @deprecated + */ + noChmod?: boolean +} + +export type TarOptionsWithAliasesSync = TarOptionsWithAliases & { + sync: true +} +export type TarOptionsWithAliasesAsync = TarOptionsWithAliases & { + sync?: false +} +export type TarOptionsWithAliasesFile = + | (TarOptionsWithAliases & { + file: string + }) + | (TarOptionsWithAliases & { f: string }) +export type TarOptionsWithAliasesSyncFile = + TarOptionsWithAliasesSync & TarOptionsWithAliasesFile +export type TarOptionsWithAliasesAsyncFile = + TarOptionsWithAliasesAsync & TarOptionsWithAliasesFile + +export type TarOptionsWithAliasesNoFile = TarOptionsWithAliases & { + f?: undefined + file?: undefined +} + +export type TarOptionsWithAliasesSyncNoFile = + TarOptionsWithAliasesSync & TarOptionsWithAliasesNoFile +export type TarOptionsWithAliasesAsyncNoFile = + TarOptionsWithAliasesAsync & TarOptionsWithAliasesNoFile + +export const isSyncFile = ( + o: O, +): o is O & TarOptionsSyncFile => !!o.sync && !!o.file +export const isAsyncFile = ( + o: O, +): o is O & TarOptionsAsyncFile => !o.sync && !!o.file +export const isSyncNoFile = ( + o: O, +): o is O & TarOptionsSyncNoFile => !!o.sync && !o.file +export const isAsyncNoFile = ( + o: O, +): o is O & TarOptionsAsyncNoFile => !o.sync && !o.file +export const isSync = ( + o: O, +): o is O & TarOptionsSync => !!o.sync +export const isAsync = ( + o: O, +): o is O & TarOptionsAsync => !o.sync +export const isFile = ( + o: O, +): o is O & TarOptionsFile => !!o.file +export const isNoFile = ( + o: O, +): o is O & TarOptionsNoFile => !o.file + +const dealiasKey = ( + k: keyof TarOptionsWithAliases, +): keyof TarOptions => { + const d = argmap.get(k) + if (d) return d + return k as keyof TarOptions +} + +export const dealias = ( + opt: TarOptionsWithAliases = {}, +): TarOptions => { + if (!opt) return {} + const result: Record = {} + for (const [key, v] of Object.entries(opt) as [ + keyof TarOptionsWithAliases, + any, + ][]) { + // TS doesn't know that aliases are going to always be the same type + const k = dealiasKey(key) + result[k] = v + } + // affordance for deprecated noChmod -> chmod + if (result.chmod === undefined && result.noChmod === false) { + result.chmod = true + } + delete result.noChmod + return result as TarOptions +} diff --git a/lib/pack.js b/src/pack.ts similarity index 60% rename from lib/pack.js rename to src/pack.ts index d533a068..cb721476 100644 --- a/lib/pack.js +++ b/src/pack.ts @@ -1,5 +1,3 @@ -'use strict' - // A readable tar stream creator // Technically, this is a transform stream that you write paths into, // and tar format comes out of. @@ -9,26 +7,39 @@ // You could also do something like: // streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) -class PackJob { - constructor (path, absolute) { +import fs, { type Stats } from 'fs' +import { + WriteEntry, + WriteEntrySync, + WriteEntryTar, +} from './write-entry.js' + +export class PackJob { + path: string + absolute: string + entry?: WriteEntry | WriteEntryTar + stat?: Stats + readdir?: string[] + pending: boolean = false + ignore: boolean = false + piped: boolean = false + constructor(path: string, absolute: string) { this.path = path || './' this.absolute = absolute - this.entry = null - this.stat = null - this.readdir = null - this.pending = false - this.ignore = false - this.piped = false } } -const { Minipass } = require('minipass') -const zlib = require('minizlib') -const ReadEntry = require('./read-entry.js') -const WriteEntry = require('./write-entry.js') -const WriteEntrySync = WriteEntry.Sync -const WriteEntryTar = WriteEntry.Tar -const Yallist = require('yallist') +import { Minipass } from 'minipass' +import * as zlib from 'minizlib' +import { Yallist } from 'yallist' +import { ReadEntry } from './read-entry.js' +import { + WarnEvent, + warnMethod, + type WarnData, + type Warner, +} from './warn-method.js' + const EOF = Buffer.alloc(1024) const ONSTAT = Symbol('onStat') const ENDED = Symbol('ended') @@ -51,15 +62,44 @@ const WRITEENTRYCLASS = Symbol('writeEntryClass') const WRITE = Symbol('write') const ONDRAIN = Symbol('ondrain') -const fs = require('fs') -const path = require('path') -const warner = require('./warn-mixin.js') -const normPath = require('./normalize-windows-path.js') - -const Pack = warner(class Pack extends Minipass { - constructor (opt) { - super(opt) - opt = opt || Object.create(null) +import path from 'path' +import { normalizeWindowsPath } from './normalize-windows-path.js' +import { TarOptions } from './options.js' + +export class Pack + extends Minipass> + implements Warner +{ + opt: TarOptions + cwd: string + maxReadSize?: number + preservePaths: boolean + strict: boolean + noPax: boolean + prefix: string + linkCache: Exclude + statCache: Exclude + file: string + portable: boolean + zip?: zlib.BrotliCompress | zlib.Gzip + readdirCache: Exclude + noDirRecurse: boolean + follow: boolean + noMtime: boolean + mtime?: Date + filter: Exclude + jobs: number; + + [WRITEENTRYCLASS]: typeof WriteEntry | typeof WriteEntrySync + onWriteEntry?: (entry: WriteEntry) => void; + [QUEUE]: Yallist; + [JOBS]: number = 0; + [PROCESSING]: boolean = false; + [ENDED]: boolean = false + + constructor(opt: TarOptions = {}) { + //@ts-ignore + super() this.opt = opt this.file = opt.file || '' this.cwd = opt.cwd || process.cwd() @@ -67,10 +107,11 @@ const Pack = warner(class Pack extends Minipass { this.preservePaths = !!opt.preservePaths this.strict = !!opt.strict this.noPax = !!opt.noPax - this.prefix = normPath(opt.prefix || '') + this.prefix = normalizeWindowsPath(opt.prefix || '') this.linkCache = opt.linkCache || new Map() this.statCache = opt.statCache || new Map() this.readdirCache = opt.readdirCache || new Map() + this.onWriteEntry = opt.onWriteEntry this[WRITEENTRYCLASS] = WriteEntry if (typeof opt.onwarn === 'function') { @@ -78,7 +119,6 @@ const Pack = warner(class Pack extends Minipass { } this.portable = !!opt.portable - this.zip = null if (opt.gzip || opt.brotli) { if (opt.gzip && opt.brotli) { @@ -99,10 +139,13 @@ const Pack = warner(class Pack extends Minipass { } this.zip = new zlib.BrotliCompress(opt.brotli) } - this.zip.on('data', chunk => super.write(chunk)) - this.zip.on('end', _ => super.end()) - this.zip.on('drain', _ => this[ONDRAIN]()) - this.on('resume', _ => this.zip.resume()) + /* c8 ignore next */ + if (!this.zip) throw new Error('impossible') + const zip = this.zip + zip.on('data', chunk => super.write(chunk as unknown as string)) + zip.on('end', () => super.end()) + zip.on('drain', () => this[ONDRAIN]()) + this.on('resume', () => zip.resume()) } else { this.on('drain', this[ONDRAIN]) } @@ -110,36 +153,60 @@ const Pack = warner(class Pack extends Minipass { this.noDirRecurse = !!opt.noDirRecurse this.follow = !!opt.follow this.noMtime = !!opt.noMtime - this.mtime = opt.mtime || null + if (opt.mtime) this.mtime = opt.mtime - this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true + this.filter = + typeof opt.filter === 'function' ? opt.filter : () => true - this[QUEUE] = new Yallist() + this[QUEUE] = new Yallist() this[JOBS] = 0 - this.jobs = +opt.jobs || 4 + this.jobs = Number(opt.jobs) || 4 this[PROCESSING] = false this[ENDED] = false } - [WRITE] (chunk) { - return super.write(chunk) + [WRITE](chunk: Buffer) { + return super.write(chunk as unknown as string) } - add (path) { + add(path: string | ReadEntry) { this.write(path) return this } - end (path) { + end(cb?: () => void): this + end(path: string | ReadEntry, cb?: () => void): this + end( + path: string | ReadEntry, + encoding?: Minipass.Encoding, + cb?: () => void, + ): this + end( + path?: string | ReadEntry | (() => void), + encoding?: Minipass.Encoding | (() => void), + cb?: () => void, + ) { + /* c8 ignore start */ + if (typeof path === 'function') { + cb = path + path = undefined + } + if (typeof encoding === 'function') { + cb = encoding + encoding = undefined + } + /* c8 ignore stop */ if (path) { - this.write(path) + this.add(path) } this[ENDED] = true this[PROCESS]() + /* c8 ignore next */ + if (cb) cb() return this } - write (path) { + write(path: string | ReadEntry) { if (this[ENDED]) { throw new Error('write after end') } @@ -152,15 +219,17 @@ const Pack = warner(class Pack extends Minipass { return this.flowing } - [ADDTARENTRY] (p) { - const absolute = normPath(path.resolve(this.cwd, p.path)) + [ADDTARENTRY](p: ReadEntry) { + const absolute = normalizeWindowsPath( + path.resolve(this.cwd, p.path), + ) // in this case, we don't have to wait for the stat if (!this.filter(p.path, p)) { p.resume() } else { - const job = new PackJob(p.path, absolute, false) + const job = new PackJob(p.path, absolute) job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)) - job.entry.on('end', _ => this[JOBDONE](job)) + job.entry.on('end', () => this[JOBDONE](job)) this[JOBS] += 1 this[QUEUE].push(job) } @@ -168,13 +237,13 @@ const Pack = warner(class Pack extends Minipass { this[PROCESS]() } - [ADDFSENTRY] (p) { - const absolute = normPath(path.resolve(this.cwd, p)) + [ADDFSENTRY](p: string) { + const absolute = normalizeWindowsPath(path.resolve(this.cwd, p)) this[QUEUE].push(new PackJob(p, absolute)) this[PROCESS]() } - [STAT] (job) { + [STAT](job: PackJob) { job.pending = true this[JOBS] += 1 const stat = this.follow ? 'stat' : 'lstat' @@ -189,7 +258,7 @@ const Pack = warner(class Pack extends Minipass { }) } - [ONSTAT] (job, stat) { + [ONSTAT](job: PackJob, stat: Stats) { this.statCache.set(job.absolute, stat) job.stat = stat @@ -201,7 +270,7 @@ const Pack = warner(class Pack extends Minipass { this[PROCESS]() } - [READDIR] (job) { + [READDIR](job: PackJob) { job.pending = true this[JOBS] += 1 fs.readdir(job.absolute, (er, entries) => { @@ -214,21 +283,23 @@ const Pack = warner(class Pack extends Minipass { }) } - [ONREADDIR] (job, entries) { + [ONREADDIR](job: PackJob, entries: string[]) { this.readdirCache.set(job.absolute, entries) job.readdir = entries this[PROCESS]() } - [PROCESS] () { + [PROCESS]() { if (this[PROCESSING]) { return } this[PROCESSING] = true - for (let w = this[QUEUE].head; - w !== null && this[JOBS] < this.jobs; - w = w.next) { + for ( + let w = this[QUEUE].head; + !!w && this[JOBS] < this.jobs; + w = w.next + ) { this[PROCESSJOB](w.value) if (w.value.ignore) { const p = w.next @@ -243,23 +314,23 @@ const Pack = warner(class Pack extends Minipass { if (this.zip) { this.zip.end(EOF) } else { - super.write(EOF) + super.write(EOF as unknown as string) super.end() } } } - get [CURRENT] () { + get [CURRENT]() { return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value } - [JOBDONE] (job) { + [JOBDONE](_job: PackJob) { this[QUEUE].shift() this[JOBS] -= 1 this[PROCESS]() } - [PROCESSJOB] (job) { + [PROCESSJOB](job: PackJob) { if (job.pending) { return } @@ -272,8 +343,9 @@ const Pack = warner(class Pack extends Minipass { } if (!job.stat) { - if (this.statCache.has(job.absolute)) { - this[ONSTAT](job, this.statCache.get(job.absolute)) + const sc = this.statCache.get(job.absolute) + if (sc) { + this[ONSTAT](job, sc) } else { this[STAT](job) } @@ -287,9 +359,14 @@ const Pack = warner(class Pack extends Minipass { return } - if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) { - if (this.readdirCache.has(job.absolute)) { - this[ONREADDIR](job, this.readdirCache.get(job.absolute)) + if ( + !this.noDirRecurse && + job.stat.isDirectory() && + !job.readdir + ) { + const rc = this.readdirCache.get(job.absolute) + if (rc) { + this[ONREADDIR](job, rc) } else { this[READDIR](job) } @@ -310,7 +387,7 @@ const Pack = warner(class Pack extends Minipass { } } - [ENTRYOPT] (job) { + [ENTRYOPT](job: PackJob): TarOptions { return { onwarn: (code, msg, data) => this.warn(code, msg, data), noPax: this.noPax, @@ -325,13 +402,18 @@ const Pack = warner(class Pack extends Minipass { noMtime: this.noMtime, mtime: this.mtime, prefix: this.prefix, + onWriteEntry: this.onWriteEntry, } } - [ENTRY] (job) { + [ENTRY](job: PackJob) { this[JOBS] += 1 try { - return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)) + const e = new this[WRITEENTRYCLASS]( + job.path, + this[ENTRYOPT](job), + ) + return e .on('end', () => this[JOBDONE](job)) .on('error', er => this.emit('error', er)) } catch (er) { @@ -339,14 +421,14 @@ const Pack = warner(class Pack extends Minipass { } } - [ONDRAIN] () { + [ONDRAIN]() { if (this[CURRENT] && this[CURRENT].entry) { this[CURRENT].entry.resume() } } // like .pipe() but using super, because our write() is special - [PIPE] (job) { + [PIPE](job: PackJob) { job.piped = true if (job.readdir) { @@ -359,6 +441,9 @@ const Pack = warner(class Pack extends Minipass { const source = job.entry const zip = this.zip + /* c8 ignore start */ + if (!source) throw new Error('cannot pipe without source') + /* c8 ignore stop */ if (zip) { source.on('data', chunk => { @@ -368,42 +453,50 @@ const Pack = warner(class Pack extends Minipass { }) } else { source.on('data', chunk => { - if (!super.write(chunk)) { + if (!super.write(chunk as unknown as string)) { source.pause() } }) } } - pause () { + pause() { if (this.zip) { this.zip.pause() } return super.pause() } -}) + warn( + code: string, + message: string | Error, + data: WarnData = {}, + ): void { + warnMethod(this, code, message, data) + } +} -class PackSync extends Pack { - constructor (opt) { +export class PackSync extends Pack { + sync: true = true + constructor(opt: TarOptions) { super(opt) this[WRITEENTRYCLASS] = WriteEntrySync } // pause/resume are no-ops in sync streams. - pause () {} - resume () {} + pause() {} + resume() {} - [STAT] (job) { + [STAT](job: PackJob) { const stat = this.follow ? 'statSync' : 'lstatSync' this[ONSTAT](job, fs[stat](job.absolute)) } - [READDIR] (job, stat) { + [READDIR](job: PackJob) { this[ONREADDIR](job, fs.readdirSync(job.absolute)) } // gotta get it all in this tick - [PIPE] (job) { + [PIPE](job: PackJob) { const source = job.entry const zip = this.zip @@ -415,6 +508,10 @@ class PackSync extends Pack { }) } + /* c8 ignore start */ + if (!source) throw new Error('Cannot pipe without source') + /* c8 ignore stop */ + if (zip) { source.on('data', chunk => { zip.write(chunk) @@ -426,7 +523,3 @@ class PackSync extends Pack { } } } - -Pack.Sync = PackSync - -module.exports = Pack diff --git a/lib/parse.js b/src/parse.ts similarity index 60% rename from lib/parse.js rename to src/parse.ts index 94e53042..b4db277d 100644 --- a/lib/parse.js +++ b/src/parse.ts @@ -1,5 +1,3 @@ -'use strict' - // this[BUFFER] is the remainder of a chunk if we're waiting for // the full 512 bytes of a header to come in. We will Buffer.concat() // it to the next write(), which is a mem copy, but a small one. @@ -20,17 +18,22 @@ // // ignored entries get .resume() called on them straight away -const warner = require('./warn-mixin.js') -const Header = require('./header.js') -const EE = require('events') -const Yallist = require('yallist') -const maxMetaEntrySize = 1024 * 1024 -const Entry = require('./read-entry.js') -const Pax = require('./pax.js') -const zlib = require('minizlib') -const { nextTick } = require('process') +import { EventEmitter as EE } from 'events' +import { BrotliDecompress, Unzip } from 'minizlib' +import { Yallist } from 'yallist' +import { Header } from './header.js' +import { TarOptions } from './options.js' +import { Pax } from './pax.js' +import { ReadEntry } from './read-entry.js' +import { + warnMethod, + type WarnData, + type Warner, +} from './warn-method.js' +const maxMetaEntrySize = 1024 * 1024 const gzipHeader = Buffer.from([0x1f, 0x8b]) + const STATE = Symbol('state') const WRITEENTRY = Symbol('writeEntry') const READENTRY = Symbol('readEntry') @@ -62,22 +65,50 @@ const SAW_NULL_BLOCK = Symbol('sawNullBlock') const SAW_EOF = Symbol('sawEOF') const CLOSESTREAM = Symbol('closeStream') -const noop = _ => true - -module.exports = warner(class Parser extends EE { - constructor (opt) { - opt = opt || {} - super(opt) +const noop = () => true + +export type State = 'begin' | 'header' | 'ignore' | 'meta' | 'body' + +export class Parser extends EE implements Warner { + file: string + strict: boolean + maxMetaEntrySize: number + filter: Exclude + brotli?: TarOptions['brotli'] + + writable: true = true + readable: false = false; + + [QUEUE]: Yallist = + new Yallist(); + [BUFFER]?: Buffer; + [READENTRY]?: ReadEntry; + [WRITEENTRY]?: ReadEntry; + [STATE]: State = 'begin'; + [META]: string = ''; + [EX]?: Pax; + [GEX]?: Pax; + [ENDED]: boolean = false; + [UNZIP]?: false | Unzip | BrotliDecompress; + [ABORTED]: boolean = false; + [SAW_VALID_ENTRY]?: boolean; + [SAW_NULL_BLOCK]: boolean = false; + [SAW_EOF]: boolean = false; + [WRITING]: boolean = false; + [CONSUMING]: boolean = false; + [EMITTEDEND]: boolean = false + + constructor(opt: TarOptions = {}) { + super() this.file = opt.file || '' - // set to boolean false when an entry starts. 1024 bytes of \0 - // is technically a valid tarball, albeit a boring one. - this[SAW_VALID_ENTRY] = null - // these BADARCHIVE errors can't be detected early. listen on DONE. - this.on(DONE, _ => { - if (this[STATE] === 'begin' || this[SAW_VALID_ENTRY] === false) { + this.on(DONE, () => { + if ( + this[STATE] === 'begin' || + this[SAW_VALID_ENTRY] === false + ) { // either less than 1 block of data, or all entries were invalid. // Either way, probably not even a tarball. this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format') @@ -87,7 +118,7 @@ module.exports = warner(class Parser extends EE { if (opt.ondone) { this.on(DONE, opt.ondone) } else { - this.on(DONE, _ => { + this.on(DONE, () => { this.emit('prefinish') this.emit('finish') this.emit('end') @@ -100,51 +131,44 @@ module.exports = warner(class Parser extends EE { // Unlike gzip, brotli doesn't have any magic bytes to identify it // Users need to explicitly tell us they're extracting a brotli file // Or we infer from the file extension - const isTBR = (opt.file && ( - opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'))) + const isTBR = + opt.file && + (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')) // if it's a tbr file it MIGHT be brotli, but we don't know until // we look at it and verify it's not a valid tar file. - this.brotli = !opt.gzip && opt.brotli !== undefined ? opt.brotli + this.brotli = + !opt.gzip && opt.brotli !== undefined ? opt.brotli : isTBR ? undefined : false // have to set this so that streams are ok piping into it - this.writable = true - this.readable = false - - this[QUEUE] = new Yallist() - this[BUFFER] = null - this[READENTRY] = null - this[WRITEENTRY] = null - this[STATE] = 'begin' - this[META] = '' - this[EX] = null - this[GEX] = null - this[ENDED] = false - this[UNZIP] = null - this[ABORTED] = false - this[SAW_NULL_BLOCK] = false - this[SAW_EOF] = false - this.on('end', () => this[CLOSESTREAM]()) if (typeof opt.onwarn === 'function') { this.on('warn', opt.onwarn) } - if (typeof opt.onentry === 'function') { - this.on('entry', opt.onentry) + if (typeof opt.onReadEntry === 'function') { + this.on('entry', opt.onReadEntry) } } - [CONSUMEHEADER] (chunk, position) { - if (this[SAW_VALID_ENTRY] === null) { + warn( + code: string, + message: string | Error, + data: WarnData = {}, + ): void { + warnMethod(this, code, message, data) + } + + [CONSUMEHEADER](chunk: Buffer, position: number) { + if (this[SAW_VALID_ENTRY] === undefined) { this[SAW_VALID_ENTRY] = false } let header try { header = new Header(chunk, position, this[EX], this[GEX]) } catch (er) { - return this.warn('TAR_ENTRY_INVALID', er) + return this.warn('TAR_ENTRY_INVALID', er as Error) } if (header.nullBlock) { @@ -168,11 +192,23 @@ module.exports = warner(class Parser extends EE { } else { const type = header.type if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) { - this.warn('TAR_ENTRY_INVALID', 'linkpath required', { header }) - } else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath) { - this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { header }) + this.warn('TAR_ENTRY_INVALID', 'linkpath required', { + header, + }) + } else if ( + !/^(Symbolic)?Link$/.test(type) && + !/^(Global)?ExtendedHeader$/.test(type) && + header.linkpath + ) { + this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { + header, + }) } else { - const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX]) + const entry = (this[WRITEENTRY] = new ReadEntry( + header, + this[EX], + this[GEX], + )) // we do this for meta & ignored entries as well, because they // are still valid tar, or else we wouldn't know to ignore them @@ -198,12 +234,13 @@ module.exports = warner(class Parser extends EE { entry.resume() } else if (entry.size > 0) { this[META] = '' - entry.on('data', c => this[META] += c) + entry.on('data', c => (this[META] += c)) this[STATE] = 'meta' } } else { - this[EX] = null - entry.ignore = entry.ignore || !this.filter(entry.path, entry) + this[EX] = undefined + entry.ignore = + entry.ignore || !this.filter(entry.path, entry) if (entry.ignore) { // probably valid, just not something we care about @@ -231,23 +268,24 @@ module.exports = warner(class Parser extends EE { } } - [CLOSESTREAM] () { - nextTick(() => this.emit('close')) + [CLOSESTREAM]() { + queueMicrotask(() => this.emit('close')) } - [PROCESSENTRY] (entry) { + [PROCESSENTRY](entry?: ReadEntry | [string | symbol, any, any]) { let go = true if (!entry) { - this[READENTRY] = null + this[READENTRY] = undefined go = false } else if (Array.isArray(entry)) { - this.emit.apply(this, entry) + const [ev, ...args]: [string | symbol, any, any] = entry + this.emit(ev, ...args) } else { this[READENTRY] = entry this.emit('entry', entry) if (!entry.emittedEnd) { - entry.on('end', _ => this[NEXTENTRY]()) + entry.on('end', () => this[NEXTENTRY]()) go = false } } @@ -255,7 +293,7 @@ module.exports = warner(class Parser extends EE { return go } - [NEXTENTRY] () { + [NEXTENTRY]() { do {} while (this[PROCESSENTRY](this[QUEUE].shift())) if (!this[QUEUE].length) { @@ -273,42 +311,49 @@ module.exports = warner(class Parser extends EE { this.emit('drain') } } else { - re.once('drain', _ => this.emit('drain')) + re.once('drain', () => this.emit('drain')) } } } - [CONSUMEBODY] (chunk, position) { + [CONSUMEBODY](chunk: Buffer, position: number) { // write up to but no more than writeEntry.blockRemain const entry = this[WRITEENTRY] - const br = entry.blockRemain - const c = (br >= chunk.length && position === 0) ? chunk - : chunk.slice(position, position + br) + /* c8 ignore start */ + if (!entry) { + throw new Error('attempt to consume body without entry??') + } + const br = entry.blockRemain ?? 0 + /* c8 ignore stop */ + const c = + br >= chunk.length && position === 0 ? + chunk + : chunk.subarray(position, position + br) entry.write(c) if (!entry.blockRemain) { this[STATE] = 'header' - this[WRITEENTRY] = null + this[WRITEENTRY] = undefined entry.end() } return c.length } - [CONSUMEMETA] (chunk, position) { + [CONSUMEMETA](chunk: Buffer, position: number) { const entry = this[WRITEENTRY] const ret = this[CONSUMEBODY](chunk, position) // if we finished, then the entry is reset - if (!this[WRITEENTRY]) { + if (!this[WRITEENTRY] && entry) { this[EMITMETA](entry) } return ret } - [EMIT] (ev, data, extra) { + [EMIT](ev: string | symbol, data?: any, extra?: any) { if (!this[QUEUE].length && !this[READENTRY]) { this.emit(ev, data, extra) } else { @@ -316,7 +361,7 @@ module.exports = warner(class Parser extends EE { } } - [EMITMETA] (entry) { + [EMITMETA](entry: ReadEntry) { this[EMIT]('meta', this[META]) switch (entry.type) { case 'ExtendedHeader': @@ -329,48 +374,87 @@ module.exports = warner(class Parser extends EE { break case 'NextFileHasLongPath': - case 'OldGnuLongPath': - this[EX] = this[EX] || Object.create(null) - this[EX].path = this[META].replace(/\0.*/, '') + case 'OldGnuLongPath': { + const ex = this[EX] ?? Object.create(null) + this[EX] = ex + ex.path = this[META].replace(/\0.*/, '') break + } - case 'NextFileHasLongLinkpath': - this[EX] = this[EX] || Object.create(null) - this[EX].linkpath = this[META].replace(/\0.*/, '') + case 'NextFileHasLongLinkpath': { + const ex = this[EX] || Object.create(null) + this[EX] = ex + ex.linkpath = this[META].replace(/\0.*/, '') break + } - /* istanbul ignore next */ - default: throw new Error('unknown meta: ' + entry.type) + /* c8 ignore start */ + default: + throw new Error('unknown meta: ' + entry.type) + /* c8 ignore stop */ } } - abort (error) { + abort(error: Error) { this[ABORTED] = true this.emit('abort', error) // always throws, even in non-strict mode this.warn('TAR_ABORT', error, { recoverable: false }) } - write (chunk) { + write( + buffer: Uint8Array | string, + cb?: (err?: Error | null) => void, + ): boolean + write( + str: string, + encoding?: BufferEncoding, + cb?: (err?: Error | null) => void, + ): boolean + write( + chunk: Buffer | string, + encoding?: BufferEncoding | (() => any), + cb?: () => any, + ): boolean { + if (typeof encoding === 'function') { + cb = encoding + encoding = undefined + } + if (typeof chunk === 'string') { + chunk = Buffer.from( + chunk, + /* c8 ignore next */ + typeof encoding === 'string' ? encoding : 'utf8', + ) + } if (this[ABORTED]) { - return + /* c8 ignore next */ + cb?.() + return false } // first write, might be gzipped - const needSniff = this[UNZIP] === null || - this.brotli === undefined && this[UNZIP] === false + const needSniff = + this[UNZIP] === undefined || + (this.brotli === undefined && this[UNZIP] === false) if (needSniff && chunk) { if (this[BUFFER]) { chunk = Buffer.concat([this[BUFFER], chunk]) - this[BUFFER] = null + this[BUFFER] = undefined } if (chunk.length < gzipHeader.length) { this[BUFFER] = chunk + /* c8 ignore next */ + cb?.() return true } // look for gzip header - for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) { + for ( + let i = 0; + this[UNZIP] === undefined && i < gzipHeader.length; + i++ + ) { if (chunk[i] !== gzipHeader[i]) { this[UNZIP] = false } @@ -387,13 +471,15 @@ module.exports = warner(class Parser extends EE { this.brotli = true } else { this[BUFFER] = chunk + /* c8 ignore next */ + cb?.() return true } } else { // if it's tar, it's pretty reliably not brotli, chances of // that happening are astronomical. try { - new Header(chunk.slice(0, 512)) + new Header(chunk.subarray(0, 512)) this.brotli = false } catch (_) { this.brotli = true @@ -401,21 +487,26 @@ module.exports = warner(class Parser extends EE { } } - if (this[UNZIP] === null || (this[UNZIP] === false && this.brotli)) { + if ( + this[UNZIP] === undefined || + (this[UNZIP] === false && this.brotli) + ) { const ended = this[ENDED] this[ENDED] = false - this[UNZIP] = this[UNZIP] === null - ? new zlib.Unzip() - : new zlib.BrotliDecompress() + this[UNZIP] = + this[UNZIP] === undefined ? + new Unzip({}) + : new BrotliDecompress({}) this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk)) - this[UNZIP].on('error', er => this.abort(er)) - this[UNZIP].on('end', _ => { + this[UNZIP].on('error', er => this.abort(er as Error)) + this[UNZIP].on('end', () => { this[ENDED] = true this[CONSUMECHUNK]() }) this[WRITING] = true - const ret = this[UNZIP][ended ? 'end' : 'write'](chunk) + const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk) this[WRITING] = false + cb?.() return ret } } @@ -430,36 +521,44 @@ module.exports = warner(class Parser extends EE { // return false if there's a queue, or if the current entry isn't flowing const ret = - this[QUEUE].length ? false : - this[READENTRY] ? this[READENTRY].flowing : - true + this[QUEUE].length ? false + : this[READENTRY] ? this[READENTRY].flowing + : true // if we have no queue, then that means a clogged READENTRY if (!ret && !this[QUEUE].length) { - this[READENTRY].once('drain', _ => this.emit('drain')) + this[READENTRY]?.once('drain', () => this.emit('drain')) } + /* c8 ignore next */ + cb?.() return ret } - [BUFFERCONCAT] (c) { + [BUFFERCONCAT](c: Buffer) { if (c && !this[ABORTED]) { - this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c + this[BUFFER] = + this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c } } - [MAYBEEND] () { - if (this[ENDED] && - !this[EMITTEDEND] && - !this[ABORTED] && - !this[CONSUMING]) { + [MAYBEEND]() { + if ( + this[ENDED] && + !this[EMITTEDEND] && + !this[ABORTED] && + !this[CONSUMING] + ) { this[EMITTEDEND] = true const entry = this[WRITEENTRY] if (entry && entry.blockRemain) { // truncated, likely a damaged file const have = this[BUFFER] ? this[BUFFER].length : 0 - this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${ - entry.blockRemain} more bytes, only ${have} available)`, { entry }) + this.warn( + 'TAR_BAD_ARCHIVE', + `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, + { entry }, + ) if (this[BUFFER]) { entry.write(this[BUFFER]) } @@ -469,28 +568,30 @@ module.exports = warner(class Parser extends EE { } } - [CONSUMECHUNK] (chunk) { - if (this[CONSUMING]) { + [CONSUMECHUNK](chunk?: Buffer) { + if (this[CONSUMING] && chunk) { this[BUFFERCONCAT](chunk) } else if (!chunk && !this[BUFFER]) { this[MAYBEEND]() - } else { + } else if (chunk) { this[CONSUMING] = true if (this[BUFFER]) { this[BUFFERCONCAT](chunk) const c = this[BUFFER] - this[BUFFER] = null + this[BUFFER] = undefined this[CONSUMECHUNKSUB](c) } else { this[CONSUMECHUNKSUB](chunk) } - while (this[BUFFER] && - this[BUFFER].length >= 512 && - !this[ABORTED] && - !this[SAW_EOF]) { + while ( + this[BUFFER] && + (this[BUFFER] as Buffer)?.length >= 512 && + !this[ABORTED] && + !this[SAW_EOF] + ) { const c = this[BUFFER] - this[BUFFER] = null + this[BUFFER] = undefined this[CONSUMECHUNKSUB](c) } this[CONSUMING] = false @@ -501,12 +602,16 @@ module.exports = warner(class Parser extends EE { } } - [CONSUMECHUNKSUB] (chunk) { + [CONSUMECHUNKSUB](chunk: Buffer) { // we know that we are in CONSUMING mode, so anything written goes into // the buffer. Advance the position and put any remainder in the buffer. let position = 0 const length = chunk.length - while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) { + while ( + position + 512 <= length && + !this[ABORTED] && + !this[SAW_EOF] + ) { switch (this[STATE]) { case 'begin': case 'header': @@ -523,30 +628,60 @@ module.exports = warner(class Parser extends EE { position += this[CONSUMEMETA](chunk, position) break - /* istanbul ignore next */ + /* c8 ignore start */ default: throw new Error('invalid state: ' + this[STATE]) + /* c8 ignore stop */ } } if (position < length) { if (this[BUFFER]) { - this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]]) + this[BUFFER] = Buffer.concat([ + chunk.subarray(position), + this[BUFFER], + ]) } else { - this[BUFFER] = chunk.slice(position) + this[BUFFER] = chunk.subarray(position) } } } - end (chunk) { + end(cb?: () => void): this + end(data: string | Buffer, cb?: () => void): this + end(str: string, encoding?: BufferEncoding, cb?: () => void): this + end( + chunk?: string | Buffer | (() => void), + encoding?: BufferEncoding | (() => void), + cb?: () => void, + ) { + if (typeof chunk === 'function') { + cb = chunk + encoding = undefined + chunk = undefined + } + if (typeof encoding === 'function') { + cb = encoding + encoding = undefined + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding) + } + if (cb) this.once('finish', cb) if (!this[ABORTED]) { if (this[UNZIP]) { - this[UNZIP].end(chunk) + /* c8 ignore start */ + if (chunk) this[UNZIP].write(chunk) + /* c8 ignore stop */ + this[UNZIP].end() } else { this[ENDED] = true - if (this.brotli === undefined) chunk = chunk || Buffer.alloc(0) - this.write(chunk) + if (this.brotli === undefined) + chunk = chunk || Buffer.alloc(0) + if (chunk) this.write(chunk) + this[MAYBEEND]() } } + return this } -}) +} diff --git a/src/path-reservations.ts b/src/path-reservations.ts new file mode 100644 index 00000000..9edc7fb6 --- /dev/null +++ b/src/path-reservations.ts @@ -0,0 +1,195 @@ +// A path exclusive reservation system +// reserve([list, of, paths], fn) +// When the fn is first in line for all its paths, it +// is called with a cb that clears the reservation. +// +// Used by async unpack to avoid clobbering paths in use, +// while still allowing maximal safe parallelization. + +import { join } from 'node:path' +import { normalizeUnicode } from './normalize-unicode.js' +import { stripTrailingSlashes } from './strip-trailing-slashes.js' + +const platform = + process.env.TESTING_TAR_FAKE_PLATFORM || process.platform +const isWindows = platform === 'win32' + +export type Reservation = { + paths: string[] + dirs: Set +} + +export type Handler = (clear: () => void) => void + +// return a set of parent dirs for a given path +// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d'] +const getDirs = (path: string) => { + const dirs = path + .split('/') + .slice(0, -1) + .reduce((set: string[], path) => { + const s = set[set.length - 1] + if (s !== undefined) { + path = join(s, path) + } + set.push(path || '/') + return set + }, []) + return dirs +} + +export class PathReservations { + // path => [function or Set] + // A Set object means a directory reservation + // A fn is a direct reservation on that path + #queues = new Map)[]>() + + // fn => {paths:[path,...], dirs:[path, ...]} + #reservations = new Map() + + // functions currently running + #running = new Set() + + reserve(paths: string[], fn: Handler) { + paths = + isWindows ? + ['win32 parallelization disabled'] + : paths.map(p => { + // don't need normPath, because we skip this entirely for windows + return stripTrailingSlashes( + join(normalizeUnicode(p)), + ).toLowerCase() + }) + + const dirs = new Set( + paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)), + ) + this.#reservations.set(fn, { dirs, paths }) + for (const p of paths) { + const q = this.#queues.get(p) + if (!q) { + this.#queues.set(p, [fn]) + } else { + q.push(fn) + } + } + for (const dir of dirs) { + const q = this.#queues.get(dir) + if (!q) { + this.#queues.set(dir, [new Set([fn])]) + } else { + const l = q[q.length - 1] + if (l instanceof Set) { + l.add(fn) + } else { + q.push(new Set([fn])) + } + } + } + return this.#run(fn) + } + + // return the queues for each path the function cares about + // fn => {paths, dirs} + #getQueues(fn: Handler): { + paths: Handler[][] + dirs: (Handler | Set)[][] + } { + const res = this.#reservations.get(fn) + /* c8 ignore start */ + if (!res) { + throw new Error('function does not have any path reservations') + } + /* c8 ignore stop */ + return { + paths: res.paths.map((path: string) => + this.#queues.get(path), + ) as Handler[][], + dirs: [...res.dirs].map(path => this.#queues.get(path)) as ( + | Handler + | Set + )[][], + } + } + + // check if fn is first in line for all its paths, and is + // included in the first set for all its dir queues + check(fn: Handler) { + const { paths, dirs } = this.#getQueues(fn) + return ( + paths.every(q => q && q[0] === fn) && + dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)) + ) + } + + // run the function if it's first in line and not already running + #run(fn: Handler) { + if (this.#running.has(fn) || !this.check(fn)) { + return false + } + this.#running.add(fn) + fn(() => this.#clear(fn)) + return true + } + + #clear(fn: Handler) { + if (!this.#running.has(fn)) { + return false + } + const res = this.#reservations.get(fn) + /* c8 ignore start */ + if (!res) { + throw new Error('invalid reservation') + } + /* c8 ignore stop */ + const { paths, dirs } = res + + const next = new Set() + for (const path of paths) { + const q = this.#queues.get(path) + /* c8 ignore start */ + if (!q || q?.[0] !== fn) { + continue + } + /* c8 ignore stop */ + const q0 = q[1] + if (!q0) { + this.#queues.delete(path) + continue + } + q.shift() + if (typeof q0 === 'function') { + next.add(q0) + } else { + for (const f of q0) { + next.add(f) + } + } + } + + for (const dir of dirs) { + const q = this.#queues.get(dir) + const q0 = q?.[0] + /* c8 ignore next - type safety only */ + if (!q || !(q0 instanceof Set)) continue + if (q0.size === 1 && q.length === 1) { + this.#queues.delete(dir) + continue + } else if (q0.size === 1) { + q.shift() + // next one must be a function, + // or else the Set would've been reused + const n = q[0] + if (typeof n === 'function') { + next.add(n) + } + } else { + q0.delete(fn) + } + } + + this.#running.delete(fn) + next.forEach(fn => this.#run(fn)) + return true + } +} diff --git a/lib/pax.js b/src/pax.ts similarity index 54% rename from lib/pax.js rename to src/pax.ts index 4a7ca853..f72e85c8 100644 --- a/lib/pax.js +++ b/src/pax.ts @@ -1,31 +1,52 @@ -'use strict' -const Header = require('./header.js') -const path = require('path') - -class Pax { - constructor (obj, global) { - this.atime = obj.atime || null - this.charset = obj.charset || null - this.comment = obj.comment || null - this.ctime = obj.ctime || null - this.gid = obj.gid || null - this.gname = obj.gname || null - this.linkpath = obj.linkpath || null - this.mtime = obj.mtime || null - this.path = obj.path || null - this.size = obj.size || null - this.uid = obj.uid || null - this.uname = obj.uname || null - this.dev = obj.dev || null - this.ino = obj.ino || null - this.nlink = obj.nlink || null - this.global = global || false +import { basename } from 'node:path' +import { Header, HeaderData } from './header.js' + +export class Pax implements HeaderData { + atime?: Date + mtime?: Date + ctime?: Date + + charset?: string + comment?: string + + gid?: number + uid?: number + + gname?: string + uname?: string + linkpath?: string + dev?: number + ino?: number + nlink?: number + path?: string + size?: number + mode?: number + + global: boolean + + constructor(obj: HeaderData, global: boolean = false) { + this.atime = obj.atime + this.charset = obj.charset + this.comment = obj.comment + this.ctime = obj.ctime + this.dev = obj.dev + this.gid = obj.gid + this.global = global + this.gname = obj.gname + this.ino = obj.ino + this.linkpath = obj.linkpath + this.mtime = obj.mtime + this.nlink = obj.nlink + this.path = obj.path + this.size = obj.size + this.uid = obj.uid + this.uname = obj.uname } - encode () { + encode() { const body = this.encodeBody() if (body === '') { - return null + return Buffer.allocUnsafe(0) } const bodyLen = Buffer.byteLength(body) @@ -43,20 +64,22 @@ class Pax { // XXX split the path // then the path should be PaxHeader + basename, but less than 99, // prepend with the dirname - path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99), + /* c8 ignore start */ + path: ('PaxHeader/' + basename(this.path ?? '')).slice(0, 99), + /* c8 ignore stop */ mode: this.mode || 0o644, - uid: this.uid || null, - gid: this.gid || null, + uid: this.uid, + gid: this.gid, size: bodyLen, - mtime: this.mtime || null, + mtime: this.mtime, type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader', linkpath: '', uname: this.uname || '', gname: this.gname || '', devmaj: 0, devmin: 0, - atime: this.atime || null, - ctime: this.ctime || null, + atime: this.atime, + ctime: this.ctime, }).encode(buf) buf.write(body, 512, bodyLen, 'utf8') @@ -69,7 +92,7 @@ class Pax { return buf } - encodeBody () { + encodeBody() { return ( this.encodeField('path') + this.encodeField('ctime') + @@ -89,16 +112,21 @@ class Pax { ) } - encodeField (field) { - if (this[field] === null || this[field] === undefined) { + encodeField(field: keyof Pax): string { + if (this[field] === undefined) { return '' } - const v = this[field] instanceof Date ? this[field].getTime() / 1000 - : this[field] - const s = ' ' + - (field === 'dev' || field === 'ino' || field === 'nlink' - ? 'SCHILY.' : '') + - field + '=' + v + '\n' + const r = this[field] + const v = r instanceof Date ? r.getTime() / 1000 : r + const s = + ' ' + + (field === 'dev' || field === 'ino' || field === 'nlink' ? + 'SCHILY.' + : '') + + field + + '=' + + v + + '\n' const byteLen = Buffer.byteLength(s) // the digits includes the length of the digits in ascii base-10 // so if it's 9 characters, then adding 1 for the 9 makes it 10 @@ -110,20 +138,22 @@ class Pax { const len = digits + byteLen return len + s } -} -Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g) + static parse(str: string, ex?: HeaderData, g: boolean = false) { + return new Pax(merge(parseKV(str), ex), g) + } +} -const merge = (a, b) => - b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a +const merge = (a: HeaderData, b?: HeaderData) => + b ? Object.assign({}, b, a) : a -const parseKV = string => - string +const parseKV = (str: string) => + str .replace(/\n$/, '') .split('\n') .reduce(parseKVLine, Object.create(null)) -const parseKVLine = (set, line) => { +const parseKVLine = (set: Record, line: string) => { const n = parseInt(line, 10) // XXX Values with \n in them will fail this. @@ -134,17 +164,19 @@ const parseKVLine = (set, line) => { line = line.slice((n + ' ').length) const kv = line.split('=') - const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1') - if (!k) { + const r = kv.shift() + + if (!r) { return set } + const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1') + const v = kv.join('=') - set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) - ? new Date(v * 1000) + set[k] = + /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ? + new Date(Number(v) * 1000) : /^[0-9]+$/.test(v) ? +v : v return set } - -module.exports = Pax diff --git a/src/read-entry.ts b/src/read-entry.ts new file mode 100644 index 00000000..0649bc99 --- /dev/null +++ b/src/read-entry.ts @@ -0,0 +1,156 @@ +import { Minipass } from 'minipass' +import { Header } from './header.js' +import { normalizeWindowsPath } from './normalize-windows-path.js' +import { Pax } from './pax.js' +import { EntryTypeName } from './types.js' + +export class ReadEntry extends Minipass { + extended?: Pax + globalExtended?: Pax + header: Header + startBlockSize: number + blockRemain: number + remain: number + type: EntryTypeName + meta: boolean = false + ignore: boolean = false + path: string + mode?: number + uid?: number + gid?: number + uname?: string + gname?: string + size: number = 0 + mtime?: Date + atime?: Date + ctime?: Date + linkpath?: string + + dev?: number + ino?: number + nlink?: number + invalid: boolean = false + absolute?: string + unsupported: boolean = false + + constructor(header: Header, ex?: Pax, gex?: Pax) { + super({}) + // read entries always start life paused. this is to avoid the + // situation where Minipass's auto-ending empty streams results + // in an entry ending before we're ready for it. + this.pause() + this.extended = ex + this.globalExtended = gex + this.header = header + /* c8 ignore start */ + this.remain = header.size ?? 0 + /* c8 ignore stop */ + this.startBlockSize = 512 * Math.ceil(this.remain / 512) + this.blockRemain = this.startBlockSize + this.type = header.type + switch (this.type) { + case 'File': + case 'OldFile': + case 'Link': + case 'SymbolicLink': + case 'CharacterDevice': + case 'BlockDevice': + case 'Directory': + case 'FIFO': + case 'ContiguousFile': + case 'GNUDumpDir': + break + + case 'NextFileHasLongLinkpath': + case 'NextFileHasLongPath': + case 'OldGnuLongPath': + case 'GlobalExtendedHeader': + case 'ExtendedHeader': + case 'OldExtendedHeader': + this.meta = true + break + + // NOTE: gnutar and bsdtar treat unrecognized types as 'File' + // it may be worth doing the same, but with a warning. + default: + this.ignore = true + } + + /* c8 ignore start */ + if (!header.path) { + throw new Error('no path provided for tar.ReadEntry') + } + /* c8 ignore stop */ + + this.path = normalizeWindowsPath(header.path) as string + this.mode = header.mode + if (this.mode) { + this.mode = this.mode & 0o7777 + } + this.uid = header.uid + this.gid = header.gid + this.uname = header.uname + this.gname = header.gname + this.size = this.remain + this.mtime = header.mtime + this.atime = header.atime + this.ctime = header.ctime + /* c8 ignore start */ + this.linkpath = + header.linkpath ? + normalizeWindowsPath(header.linkpath) + : undefined + /* c8 ignore stop */ + this.uname = header.uname + this.gname = header.gname + + if (ex) { + this.#slurp(ex) + } + if (gex) { + this.#slurp(gex, true) + } + } + + write(data: Buffer) { + const writeLen = data.length + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate') + } + + const r = this.remain + const br = this.blockRemain + this.remain = Math.max(0, r - writeLen) + this.blockRemain = Math.max(0, br - writeLen) + if (this.ignore) { + return true + } + + if (r >= writeLen) { + return super.write(data) + } + + // r < writeLen + return super.write(data.subarray(0, r)) + } + + #slurp(ex: Pax, gex: boolean = false) { + if (ex.path) ex.path = normalizeWindowsPath(ex.path) + if (ex.linkpath) ex.linkpath = normalizeWindowsPath(ex.linkpath) + Object.assign( + this, + Object.fromEntries( + Object.entries(ex).filter(([k, v]) => { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. Also, any + // null/undefined values are ignored. + return !( + v === null || + v === undefined || + (k === 'path' && gex) + ) + }), + ), + ) + } +} diff --git a/lib/replace.js b/src/replace.ts similarity index 50% rename from lib/replace.js rename to src/replace.ts index 8db6800b..9fe2ed79 100644 --- a/lib/replace.js +++ b/src/replace.ts @@ -1,12 +1,17 @@ -'use strict' - // tar -r -const hlo = require('./high-level-opt.js') -const Pack = require('./pack.js') -const fs = require('fs') -const fsm = require('fs-minipass') -const t = require('./list.js') -const path = require('path') +import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass' +import { Minipass } from 'minipass' +import fs from 'node:fs' +import path from 'node:path' +import { Header } from './header.js' +import { list } from './list.js' +import { makeCommand } from './make-command.js' +import { + isFile, + TarOptionsFile, + TarOptionsSyncFile, +} from './options.js' +import { Pack, PackSync } from './pack.js' // starting at the head of the file, read a Header // If the checksum is invalid, that's our position to start writing @@ -14,31 +19,8 @@ const path = require('path') // and try again. // Write the new Pack stream starting there. -const Header = require('./header.js') - -module.exports = (opt_, files, cb) => { - const opt = hlo(opt_) - - if (!opt.file) { - throw new TypeError('file is required') - } - - if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) { - throw new TypeError('cannot append to compressed archives') - } - - if (!files || !Array.isArray(files) || !files.length) { - throw new TypeError('no files or directories specified') - } - - files = Array.from(files) - - return opt.sync ? replaceSync(opt, files) - : replace(opt, files, cb) -} - -const replaceSync = (opt, files) => { - const p = new Pack.Sync(opt) +const replaceSync = (opt: TarOptionsSyncFile, files: string[]) => { + const p = new PackSync(opt) let threw = true let fd @@ -48,7 +30,7 @@ const replaceSync = (opt, files) => { try { fd = fs.openSync(opt.file, 'r+') } catch (er) { - if (er.code === 'ENOENT') { + if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') { fd = fs.openSync(opt.file, 'w+') } else { throw er @@ -58,13 +40,25 @@ const replaceSync = (opt, files) => { const st = fs.fstatSync(fd) const headBuf = Buffer.alloc(512) - POSITION: for (position = 0; position < st.size; position += 512) { + POSITION: for ( + position = 0; + position < st.size; + position += 512 + ) { for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) { bytes = fs.readSync( - fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos + fd, + headBuf, + bufPos, + headBuf.length - bufPos, + position + bufPos, ) - if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) { + if ( + position === 0 && + headBuf[0] === 0x1f && + headBuf[1] === 0x8b + ) { throw new Error('cannot append to compressed archives') } @@ -77,15 +71,15 @@ const replaceSync = (opt, files) => { if (!h.cksumValid) { break } - const entryBlockSize = 512 * Math.ceil(h.size / 512) + const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512) if (position + entryBlockSize + 512 > st.size) { break } // the 512 for the header we just parsed will be added as well // also jump ahead all the blocks for the body position += entryBlockSize - if (opt.mtimeCache) { - opt.mtimeCache.set(h.path, h.mtime) + if (opt.mtimeCache && h.mtime) { + opt.mtimeCache.set(String(h.path), h.mtime) } } threw = false @@ -94,27 +88,40 @@ const replaceSync = (opt, files) => { } finally { if (threw) { try { - fs.closeSync(fd) + fs.closeSync(fd as number) } catch (er) {} } } } -const streamSync = (opt, p, position, fd, files) => { - const stream = new fsm.WriteStreamSync(opt.file, { +const streamSync = ( + opt: TarOptionsSyncFile, + p: Pack, + position: number, + fd: number, + files: string[], +) => { + const stream = new WriteStreamSync(opt.file, { fd: fd, start: position, }) - p.pipe(stream) + p.pipe(stream as unknown as Minipass.Writable) addFilesSync(p, files) } -const replace = (opt, files, cb) => { +const replaceAsync = ( + opt: TarOptionsFile, + files: string[], +): Promise => { files = Array.from(files) const p = new Pack(opt) - const getPos = (fd, size, cb_) => { - const cb = (er, pos) => { + const getPos = ( + fd: number, + size: number, + cb_: (er?: null | Error, pos?: number) => void, + ) => { + const cb = (er?: Error | null, pos?: number) => { if (er) { fs.close(fd, _ => cb_(er)) } else { @@ -129,19 +136,27 @@ const replace = (opt, files, cb) => { let bufPos = 0 const headBuf = Buffer.alloc(512) - const onread = (er, bytes) => { - if (er) { + const onread = (er?: null | Error, bytes?: number): void => { + if (er || typeof bytes === 'undefined') { return cb(er) } bufPos += bytes if (bufPos < 512 && bytes) { return fs.read( - fd, headBuf, bufPos, headBuf.length - bufPos, - position + bufPos, onread + fd, + headBuf, + bufPos, + headBuf.length - bufPos, + position + bufPos, + onread, ) } - if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) { + if ( + position === 0 && + headBuf[0] === 0x1f && + headBuf[1] === 0x8b + ) { return cb(new Error('cannot append to compressed archives')) } @@ -155,7 +170,8 @@ const replace = (opt, files, cb) => { return cb(null, position) } - const entryBlockSize = 512 * Math.ceil(h.size / 512) + /* c8 ignore next */ + const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512) if (position + entryBlockSize + 512 > size) { return cb(null, position) } @@ -165,8 +181,8 @@ const replace = (opt, files, cb) => { return cb(null, position) } - if (opt.mtimeCache) { - opt.mtimeCache.set(h.path, h.mtime) + if (opt.mtimeCache && h.mtime) { + opt.mtimeCache.set(String(h.path), h.mtime) } bufPos = 0 fs.read(fd, headBuf, 0, 512, position, onread) @@ -174,16 +190,19 @@ const replace = (opt, files, cb) => { fs.read(fd, headBuf, 0, 512, position, onread) } - const promise = new Promise((resolve, reject) => { + const promise = new Promise((resolve, reject) => { p.on('error', reject) let flag = 'r+' - const onopen = (er, fd) => { + const onopen = ( + er?: NodeJS.ErrnoException | null, + fd?: number, + ) => { if (er && er.code === 'ENOENT' && flag === 'r+') { flag = 'w+' return fs.open(opt.file, flag, onopen) } - if (er) { + if (er || !fd) { return reject(er) } @@ -196,11 +215,11 @@ const replace = (opt, files, cb) => { if (er) { return reject(er) } - const stream = new fsm.WriteStream(opt.file, { + const stream = new WriteStream(opt.file, { fd: fd, start: position, }) - p.pipe(stream) + p.pipe(stream as unknown as Minipass.Writable) stream.on('error', reject) stream.on('close', resolve) addFilesAsync(p, files) @@ -210,17 +229,17 @@ const replace = (opt, files, cb) => { fs.open(opt.file, flag, onopen) }) - return cb ? promise.then(cb, cb) : promise + return promise } -const addFilesSync = (p, files) => { +const addFilesSync = (p: Pack, files: string[]) => { files.forEach(file => { if (file.charAt(0) === '@') { - t({ + list({ file: path.resolve(p.cwd, file.slice(1)), sync: true, noResume: true, - onentry: entry => p.add(entry), + onReadEntry: entry => p.add(entry), }) } else { p.add(file) @@ -229,18 +248,52 @@ const addFilesSync = (p, files) => { p.end() } -const addFilesAsync = (p, files) => { - while (files.length) { - const file = files.shift() +const addFilesAsync = async ( + p: Pack, + files: string[], +): Promise => { + for (let i = 0; i < files.length; i++) { + const file = String(files[i]) if (file.charAt(0) === '@') { - return t({ - file: path.resolve(p.cwd, file.slice(1)), + await list({ + file: path.resolve(String(p.cwd), file.slice(1)), noResume: true, - onentry: entry => p.add(entry), - }).then(_ => addFilesAsync(p, files)) + onReadEntry: entry => p.add(entry), + }) } else { p.add(file) } } p.end() } + +export const replace = makeCommand( + replaceSync, + replaceAsync, + /* c8 ignore start */ + (): never => { + throw new TypeError('file is required') + }, + (): never => { + throw new TypeError('file is required') + }, + /* c8 ignore stop */ + (opt, entries) => { + if (!isFile(opt)) { + throw new TypeError('file is required') + } + + if ( + opt.gzip || + opt.brotli || + opt.file.endsWith('.br') || + opt.file.endsWith('.tbr') + ) { + throw new TypeError('cannot append to compressed archives') + } + + if (!entries?.length) { + throw new TypeError('no paths specified to add/replace') + } + }, +) diff --git a/lib/strip-absolute-path.js b/src/strip-absolute-path.ts similarity index 78% rename from lib/strip-absolute-path.js rename to src/strip-absolute-path.ts index 185e2dea..49a446c2 100644 --- a/lib/strip-absolute-path.js +++ b/src/strip-absolute-path.ts @@ -1,5 +1,6 @@ // unix absolute paths are also absolute on win32, so we use this for both -const { isAbsolute, parse } = require('path').win32 +import { win32 } from 'node:path' +const { isAbsolute, parse } = win32 // returns [root, stripped] // Note that windows will think that //x/y/z/a has a "root" of //x/y, and in @@ -7,14 +8,16 @@ const { isAbsolute, parse } = require('path').win32 // explicitly if it's the first character. // drive-specific relative paths on Windows get their root stripped off even // though they are not absolute, so `c:../foo` becomes ['c:', '../foo'] -module.exports = path => { +export const stripAbsolutePath = (path: string) => { let r = '' let parsed = parse(path) while (isAbsolute(path) || parsed.root) { // windows will think that //x/y/z has a "root" of //x/y/ // but strip the //?/C:/ off of //?/C:/path - const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? '/' + const root = + path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? + '/' : parsed.root path = path.slice(root.length) r += root diff --git a/lib/strip-trailing-slashes.js b/src/strip-trailing-slashes.ts similarity index 86% rename from lib/strip-trailing-slashes.js rename to src/strip-trailing-slashes.ts index 3e3ecec5..b2a111ac 100644 --- a/lib/strip-trailing-slashes.js +++ b/src/strip-trailing-slashes.ts @@ -2,7 +2,7 @@ // This has been meticulously optimized for use // within npm install on large package trees. // Do not edit without careful benchmarking. -module.exports = str => { +export const stripTrailingSlashes = (str: string) => { let i = str.length - 1 let slashesStart = -1 while (i > -1 && str.charAt(i) === '/') { diff --git a/src/symlink-error.ts b/src/symlink-error.ts new file mode 100644 index 00000000..13c4531d --- /dev/null +++ b/src/symlink-error.ts @@ -0,0 +1,14 @@ +export class SymlinkError extends Error { + path: string + symlink: string + syscall: 'symlink' = 'symlink' + code: 'TAR_SYMLINK_ERROR' = 'TAR_SYMLINK_ERROR' + constructor(symlink: string, path: string) { + super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link') + this.symlink = symlink + this.path = path + } + get name() { + return 'SymlinkError' + } +} diff --git a/src/types.ts b/src/types.ts new file mode 100644 index 00000000..96b8f74b --- /dev/null +++ b/src/types.ts @@ -0,0 +1,98 @@ +export const isCode = (c: string): c is EntryTypeCode => + name.has(c as EntryTypeCode) + +export const isName = (c: string): c is EntryTypeName => + code.has(c as EntryTypeName) + +export type EntryTypeCode = + | '0' + | '' + | '1' + | '2' + | '3' + | '4' + | '5' + | '6' + | '7' + | 'g' + | 'x' + | 'A' + | 'D' + | 'I' + | 'K' + | 'L' + | 'M' + | 'N' + | 'S' + | 'V' + | 'X' + +export type EntryTypeName = + | 'File' + | 'OldFile' + | 'Link' + | 'SymbolicLink' + | 'CharacterDevice' + | 'BlockDevice' + | 'Directory' + | 'FIFO' + | 'ContiguousFile' + | 'GlobalExtendedHeader' + | 'ExtendedHeader' + | 'SolarisACL' + | 'GNUDumpDir' + | 'Inode' + | 'NextFileHasLongLinkpath' + | 'NextFileHasLongPath' + | 'ContinuationFile' + | 'OldGnuLongPath' + | 'SparseFile' + | 'TapeVolumeHeader' + | 'OldExtendedHeader' + | 'Unsupported' + +// map types from key to human-friendly name +export const name = new Map([ + ['0', 'File'], + // same as File + ['', 'OldFile'], + ['1', 'Link'], + ['2', 'SymbolicLink'], + // Devices and FIFOs aren't fully supported + // they are parsed, but skipped when unpacking + ['3', 'CharacterDevice'], + ['4', 'BlockDevice'], + ['5', 'Directory'], + ['6', 'FIFO'], + // same as File + ['7', 'ContiguousFile'], + // pax headers + ['g', 'GlobalExtendedHeader'], + ['x', 'ExtendedHeader'], + // vendor-specific stuff + // skip + ['A', 'SolarisACL'], + // like 5, but with data, which should be skipped + ['D', 'GNUDumpDir'], + // metadata only, skip + ['I', 'Inode'], + // data = link path of next file + ['K', 'NextFileHasLongLinkpath'], + // data = path of next file + ['L', 'NextFileHasLongPath'], + // skip + ['M', 'ContinuationFile'], + // like L + ['N', 'OldGnuLongPath'], + // skip + ['S', 'SparseFile'], + // skip + ['V', 'TapeVolumeHeader'], + // like x + ['X', 'OldExtendedHeader'], +]) + +// map the other direction +export const code = new Map( + Array.from(name).map(kv => [kv[1], kv[0]]), +) diff --git a/lib/unpack.js b/src/unpack.ts similarity index 57% rename from lib/unpack.js rename to src/unpack.ts index 03172e2c..154b9492 100644 --- a/lib/unpack.js +++ b/src/unpack.ts @@ -1,23 +1,27 @@ -'use strict' - // the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet. // but the path reservations are required to avoid race conditions where // parallelized unpack ops may mess with one another, due to dependencies // (like a Link depending on its target) or destructive operations (like // clobbering an fs object to create one of a different type.) -const assert = require('assert') -const Parser = require('./parse.js') -const fs = require('fs') -const fsm = require('fs-minipass') -const path = require('path') -const mkdir = require('./mkdir.js') -const wc = require('./winchars.js') -const pathReservations = require('./path-reservations.js') -const stripAbsolutePath = require('./strip-absolute-path.js') -const normPath = require('./normalize-windows-path.js') -const stripSlash = require('./strip-trailing-slashes.js') -const normalize = require('./normalize-unicode.js') +import * as fsm from '@isaacs/fs-minipass' +import assert from 'node:assert' +import { randomBytes } from 'node:crypto' +import fs, { type Stats } from 'node:fs' +import path from 'node:path' +import { getWriteFlag } from './get-write-flag.js' +import { mkdir, MkdirError, mkdirSync } from './mkdir.js' +import { normalizeUnicode } from './normalize-unicode.js' +import { normalizeWindowsPath } from './normalize-windows-path.js' +import { Parser } from './parse.js' +import { stripAbsolutePath } from './strip-absolute-path.js' +import { stripTrailingSlashes } from './strip-trailing-slashes.js' +import * as wc from './winchars.js' + +import { TarOptions } from './options.js' +import { PathReservations } from './path-reservations.js' +import { ReadEntry } from './read-entry.js' +import { WarnData } from './warn-method.js' const ONENTRY = Symbol('onEntry') const CHECKFS = Symbol('checkFs') @@ -44,9 +48,8 @@ const DOCHOWN = Symbol('doChown') const UID = Symbol('uid') const GID = Symbol('gid') const CHECKED_CWD = Symbol('checkedCwd') -const crypto = require('crypto') -const getFlag = require('./get-write-flag.js') -const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform +const platform = + process.env.TESTING_TAR_FAKE_PLATFORM || process.platform const isWindows = platform === 'win32' const DEFAULT_MAX_DEPTH = 1024 @@ -65,13 +68,16 @@ const DEFAULT_MAX_DEPTH = 1024 // semantics. // // See: https://github.com/npm/node-tar/issues/183 -/* istanbul ignore next */ -const unlinkFile = (path, cb) => { +/* c8 ignore start */ +const unlinkFile = ( + path: string, + cb: (er?: Error | null) => void, +) => { if (!isWindows) { return fs.unlink(path, cb) } - const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') + const name = path + '.DELETE.' + randomBytes(16).toString('hex') fs.rename(path, name, er => { if (er) { return cb(er) @@ -79,22 +85,28 @@ const unlinkFile = (path, cb) => { fs.unlink(name, cb) }) } +/* c8 ignore stop */ -/* istanbul ignore next */ -const unlinkFileSync = path => { +/* c8 ignore start */ +const unlinkFileSync = (path: string) => { if (!isWindows) { return fs.unlinkSync(path) } - const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') + const name = path + '.DELETE.' + randomBytes(16).toString('hex') fs.renameSync(path, name) fs.unlinkSync(name) } +/* c8 ignore stop */ // this.gid, entry.gid, this.processUid -const uint32 = (a, b, c) => - a === a >>> 0 ? a - : b === b >>> 0 ? b +const uint32 = ( + a: number | undefined, + b: number | undefined, + c: number | undefined, +) => + a !== undefined && a === a >>> 0 ? a + : b !== undefined && b === b >>> 0 ? b : c // clear the cache if it's a case-insensitive unicode-squashing match. @@ -106,10 +118,13 @@ const uint32 = (a, b, c) => // Note that on windows, we always drop the entire cache whenever a // symbolic link is encountered, because 8.3 filenames are impossible // to reason about, and collisions are hazards rather than just failures. -const cacheKeyNormalize = path => stripSlash(normPath(normalize(path))) - .toLowerCase() +const cacheKeyNormalize = (path: string) => + stripTrailingSlashes( + normalizeWindowsPath(normalizeUnicode(path)), + ).toLowerCase() -const pruneCache = (cache, abs) => { +// remove all cache entries matching ${abs}/** +const pruneCache = (cache: Map, abs: string) => { abs = cacheKeyNormalize(abs) for (const path of cache.keys()) { const pnorm = cacheKeyNormalize(path) @@ -119,73 +134,107 @@ const pruneCache = (cache, abs) => { } } -const dropCache = cache => { +const dropCache = (cache: Map) => { for (const key of cache.keys()) { cache.delete(key) } } -class Unpack extends Parser { - constructor (opt) { - if (!opt) { - opt = {} - } - - opt.ondone = _ => { +export class Unpack extends Parser { + [ENDED]: boolean = false; + [CHECKED_CWD]: boolean = false; + [PENDING]: number = 0 + + reservations: PathReservations = new PathReservations() + transform?: TarOptions['transform'] + writable: true = true + readable: false = false + dirCache: Exclude + uid?: number + gid?: number + setOwner: boolean + preserveOwner: boolean + processGid?: number + processUid?: number + maxDepth: number + forceChown: boolean + win32: boolean + newer: boolean + keep: boolean + noMtime: boolean + preservePaths: boolean + unlink: boolean + cwd: string + strip: number + processUmask: number + umask: number + dmode: number + fmode: number + chmod: boolean + + constructor(opt: TarOptions = {}) { + opt.ondone = () => { this[ENDED] = true this[MAYBECLOSE]() } super(opt) - this[CHECKED_CWD] = false - - this.reservations = pathReservations() - - this.transform = typeof opt.transform === 'function' ? opt.transform : null - - this.writable = true - this.readable = false - - this[PENDING] = 0 - this[ENDED] = false + this.transform = opt.transform this.dirCache = opt.dirCache || new Map() + this.chmod = !!opt.chmod if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { // need both or neither - if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number') { - throw new TypeError('cannot set owner without number uid and gid') + if ( + typeof opt.uid !== 'number' || + typeof opt.gid !== 'number' + ) { + throw new TypeError( + 'cannot set owner without number uid and gid', + ) } if (opt.preserveOwner) { throw new TypeError( - 'cannot preserve owner in archive and also set owner explicitly') + 'cannot preserve owner in archive and also set owner explicitly', + ) } this.uid = opt.uid this.gid = opt.gid this.setOwner = true } else { - this.uid = null - this.gid = null + this.uid = undefined + this.gid = undefined this.setOwner = false } // default true for root - if (opt.preserveOwner === undefined && typeof opt.uid !== 'number') { - this.preserveOwner = process.getuid && process.getuid() === 0 + if ( + opt.preserveOwner === undefined && + typeof opt.uid !== 'number' + ) { + this.preserveOwner = !!( + process.getuid && process.getuid() === 0 + ) } else { this.preserveOwner = !!opt.preserveOwner } - this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ? - process.getuid() : null - this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ? - process.getgid() : null + this.processUid = + (this.preserveOwner || this.setOwner) && process.getuid ? + process.getuid() + : undefined + this.processGid = + (this.preserveOwner || this.setOwner) && process.getgid ? + process.getgid() + : undefined // prevent excessively deep nesting of subfolders // set to `Infinity` to remove this restriction - this.maxDepth = typeof opt.maxDepth === 'number' - ? opt.maxDepth + this.maxDepth = + typeof opt.maxDepth === 'number' ? + opt.maxDepth : DEFAULT_MAX_DEPTH // mostly just for testing, but useful in some cases. @@ -213,15 +262,21 @@ class Unpack extends Parser { // links, and removes symlink directories rather than erroring this.unlink = !!opt.unlink - this.cwd = normPath(path.resolve(opt.cwd || process.cwd())) - this.strip = +opt.strip || 0 + this.cwd = normalizeWindowsPath( + path.resolve(opt.cwd || process.cwd()), + ) + this.strip = Number(opt.strip) || 0 // if we're not chmodding, then we don't need the process umask - this.processUmask = opt.noChmod ? 0 : process.umask() - this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask + this.processUmask = + !this.chmod ? 0 + : typeof opt.processUmask === 'number' ? opt.processUmask + : process.umask() + this.umask = + typeof opt.umask === 'number' ? opt.umask : this.processUmask // default mode for dirs created as parents - this.dmode = opt.dmode || (0o0777 & (~this.umask)) - this.fmode = opt.fmode || (0o0666 & (~this.umask)) + this.dmode = opt.dmode || 0o0777 & ~this.umask + this.fmode = opt.fmode || 0o0666 & ~this.umask this.on('entry', entry => this[ONENTRY](entry)) } @@ -229,14 +284,14 @@ class Unpack extends Parser { // a bad or damaged archive is a warning for Parser, but an error // when extracting. Mark those errors as unrecoverable, because // the Unpack contract cannot be met. - warn (code, msg, data = {}) { + warn(code: string, msg: string | Error, data: WarnData = {}) { if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') { data.recoverable = false } return super.warn(code, msg, data) } - [MAYBECLOSE] () { + [MAYBECLOSE]() { if (this[ENDED] && this[PENDING] === 0) { this.emit('prefinish') this.emit('finish') @@ -244,8 +299,8 @@ class Unpack extends Parser { } } - [CHECKPATH] (entry) { - const p = normPath(entry.path) + [CHECKPATH](entry: ReadEntry) { + const p = normalizeWindowsPath(entry.path) const parts = p.split('/') if (this.strip) { @@ -253,7 +308,9 @@ class Unpack extends Parser { return false } if (entry.type === 'Link') { - const linkparts = normPath(entry.linkpath).split('/') + const linkparts = normalizeWindowsPath( + String(entry.linkpath), + ).split('/') if (linkparts.length >= this.strip) { entry.linkpath = linkparts.slice(this.strip).join('/') } else { @@ -275,7 +332,11 @@ class Unpack extends Parser { } if (!this.preservePaths) { - if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) { + if ( + parts.includes('..') || + /* c8 ignore next */ + (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? '')) + ) { this.warn('TAR_ENTRY_ERROR', `path contains '..'`, { entry, path: p, @@ -286,48 +347,61 @@ class Unpack extends Parser { // strip off the root const [root, stripped] = stripAbsolutePath(p) if (root) { - entry.path = stripped - this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, { - entry, - path: p, - }) + entry.path = String(stripped) + this.warn( + 'TAR_ENTRY_INFO', + `stripping ${root} from absolute path`, + { + entry, + path: p, + }, + ) } } if (path.isAbsolute(entry.path)) { - entry.absolute = normPath(path.resolve(entry.path)) + entry.absolute = normalizeWindowsPath(path.resolve(entry.path)) } else { - entry.absolute = normPath(path.resolve(this.cwd, entry.path)) + entry.absolute = normalizeWindowsPath( + path.resolve(this.cwd, entry.path), + ) } // if we somehow ended up with a path that escapes the cwd, and we are // not in preservePaths mode, then something is fishy! This should have // been prevented above, so ignore this for coverage. - /* istanbul ignore if - defense in depth */ - if (!this.preservePaths && - entry.absolute.indexOf(this.cwd + '/') !== 0 && - entry.absolute !== this.cwd) { + /* c8 ignore start - defense in depth */ + if ( + !this.preservePaths && + typeof entry.absolute === 'string' && + entry.absolute.indexOf(this.cwd + '/') !== 0 && + entry.absolute !== this.cwd + ) { this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', { entry, - path: normPath(entry.path), + path: normalizeWindowsPath(entry.path), resolvedPath: entry.absolute, cwd: this.cwd, }) return false } + /* c8 ignore stop */ // an archive can set properties on the extraction directory, but it // may not replace the cwd with a different kind of thing entirely. - if (entry.absolute === this.cwd && - entry.type !== 'Directory' && - entry.type !== 'GNUDumpDir') { + if ( + entry.absolute === this.cwd && + entry.type !== 'Directory' && + entry.type !== 'GNUDumpDir' + ) { return false } // only encode : chars that aren't drive letter indicators if (this.win32) { - const { root: aRoot } = path.win32.parse(entry.absolute) - entry.absolute = aRoot + wc.encode(entry.absolute.slice(aRoot.length)) + const { root: aRoot } = path.win32.parse(String(entry.absolute)) + entry.absolute = + aRoot + wc.encode(String(entry.absolute).slice(aRoot.length)) const { root: pRoot } = path.win32.parse(entry.path) entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length)) } @@ -335,7 +409,7 @@ class Unpack extends Parser { return true } - [ONENTRY] (entry) { + [ONENTRY](entry: ReadEntry) { if (!this[CHECKPATH](entry)) { return entry.resume() } @@ -365,7 +439,7 @@ class Unpack extends Parser { } } - [ONERROR] (er, entry) { + [ONERROR](er: Error, entry: ReadEntry) { // Cwd has to exist, or else nothing works. That's serious. // Other errors are warnings, which raise the error in strict // mode, but otherwise continue on. @@ -378,50 +452,65 @@ class Unpack extends Parser { } } - [MKDIR] (dir, mode, cb) { - mkdir(normPath(dir), { - uid: this.uid, - gid: this.gid, - processUid: this.processUid, - processGid: this.processGid, - umask: this.processUmask, - preserve: this.preservePaths, - unlink: this.unlink, - cache: this.dirCache, - cwd: this.cwd, - mode: mode, - noChmod: this.noChmod, - }, cb) + [MKDIR]( + dir: string, + mode: number, + cb: (er?: null | MkdirError, made?: string) => void, + ) { + mkdir( + normalizeWindowsPath(dir), + { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + }, + cb, + ) } - [DOCHOWN] (entry) { + [DOCHOWN](entry: ReadEntry) { // in preserve owner mode, chown if the entry doesn't match process // in set owner mode, chown if setting doesn't match process - return this.forceChown || - this.preserveOwner && - (typeof entry.uid === 'number' && entry.uid !== this.processUid || - typeof entry.gid === 'number' && entry.gid !== this.processGid) - || - (typeof this.uid === 'number' && this.uid !== this.processUid || - typeof this.gid === 'number' && this.gid !== this.processGid) + return ( + this.forceChown || + (this.preserveOwner && + ((typeof entry.uid === 'number' && + entry.uid !== this.processUid) || + (typeof entry.gid === 'number' && + entry.gid !== this.processGid))) || + (typeof this.uid === 'number' && + this.uid !== this.processUid) || + (typeof this.gid === 'number' && this.gid !== this.processGid) + ) } - [UID] (entry) { + [UID](entry: ReadEntry) { return uint32(this.uid, entry.uid, this.processUid) } - [GID] (entry) { + [GID](entry: ReadEntry) { return uint32(this.gid, entry.gid, this.processGid) } - [FILE] (entry, fullyDone) { - const mode = entry.mode & 0o7777 || this.fmode - const stream = new fsm.WriteStream(entry.absolute, { - flags: getFlag(entry.size), + [FILE](entry: ReadEntry, fullyDone: () => void) { + const mode = + typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.fmode + const stream = new fsm.WriteStream(String(entry.absolute), { + // slight lie, but it can be numeric flags + flags: getWriteFlag(entry.size) as string, mode: mode, autoClose: false, }) - stream.on('error', er => { + stream.on('error', (er: Error) => { if (stream.fd) { fs.close(stream.fd, () => {}) } @@ -435,12 +524,13 @@ class Unpack extends Parser { }) let actions = 1 - const done = er => { + const done = (er?: null | Error) => { if (er) { - /* istanbul ignore else - we should always have a fd by now */ + /* c8 ignore start - we should always have a fd by now */ if (stream.fd) { fs.close(stream.fd, () => {}) } + /* c8 ignore stop */ this[ONERROR](er, entry) fullyDone() @@ -448,40 +538,48 @@ class Unpack extends Parser { } if (--actions === 0) { - fs.close(stream.fd, er => { - if (er) { - this[ONERROR](er, entry) - } else { - this[UNPEND]() - } - fullyDone() - }) + if (stream.fd !== undefined) { + fs.close(stream.fd, er => { + if (er) { + this[ONERROR](er, entry) + } else { + this[UNPEND]() + } + fullyDone() + }) + } } } - stream.on('finish', _ => { + stream.on('finish', () => { // if futimes fails, try utimes // if utimes fails, fail with the original error // same for fchown/chown - const abs = entry.absolute + const abs = String(entry.absolute) const fd = stream.fd - if (entry.mtime && !this.noMtime) { + if (typeof fd === 'number' && entry.mtime && !this.noMtime) { actions++ const atime = entry.atime || new Date() const mtime = entry.mtime fs.futimes(fd, atime, mtime, er => - er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er)) - : done()) + er ? + fs.utimes(abs, atime, mtime, er2 => done(er2 && er)) + : done(), + ) } - if (this[DOCHOWN](entry)) { + if (typeof fd === 'number' && this[DOCHOWN](entry)) { actions++ const uid = this[UID](entry) const gid = this[GID](entry) - fs.fchown(fd, uid, gid, er => - er ? fs.chown(abs, uid, gid, er2 => done(er2 && er)) - : done()) + if (typeof uid === 'number' && typeof gid === 'number') { + fs.fchown(fd, uid, gid, er => + er ? + fs.chown(abs, uid, gid, er2 => done(er2 && er)) + : done(), + ) + } } done() @@ -489,7 +587,7 @@ class Unpack extends Parser { const tx = this.transform ? this.transform(entry) || entry : entry if (tx !== entry) { - tx.on('error', er => { + tx.on('error', (er: Error) => { this[ONERROR](er, entry) fullyDone() }) @@ -498,9 +596,12 @@ class Unpack extends Parser { tx.pipe(stream) } - [DIRECTORY] (entry, fullyDone) { - const mode = entry.mode & 0o7777 || this.dmode - this[MKDIR](entry.absolute, mode, er => { + [DIRECTORY](entry: ReadEntry, fullyDone: () => void) { + const mode = + typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.dmode + this[MKDIR](String(entry.absolute), mode, er => { if (er) { this[ONERROR](er, entry) fullyDone() @@ -508,7 +609,7 @@ class Unpack extends Parser { } let actions = 1 - const done = _ => { + const done = () => { if (--actions === 0) { fullyDone() this[UNPEND]() @@ -518,44 +619,59 @@ class Unpack extends Parser { if (entry.mtime && !this.noMtime) { actions++ - fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done) + fs.utimes( + String(entry.absolute), + entry.atime || new Date(), + entry.mtime, + done, + ) } if (this[DOCHOWN](entry)) { actions++ - fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done) + fs.chown( + String(entry.absolute), + Number(this[UID](entry)), + Number(this[GID](entry)), + done, + ) } done() }) } - [UNSUPPORTED] (entry) { + [UNSUPPORTED](entry: ReadEntry) { entry.unsupported = true - this.warn('TAR_ENTRY_UNSUPPORTED', - `unsupported entry type: ${entry.type}`, { entry }) + this.warn( + 'TAR_ENTRY_UNSUPPORTED', + `unsupported entry type: ${entry.type}`, + { entry }, + ) entry.resume() } - [SYMLINK] (entry, done) { - this[LINK](entry, entry.linkpath, 'symlink', done) + [SYMLINK](entry: ReadEntry, done: () => void) { + this[LINK](entry, String(entry.linkpath), 'symlink', done) } - [HARDLINK] (entry, done) { - const linkpath = normPath(path.resolve(this.cwd, entry.linkpath)) + [HARDLINK](entry: ReadEntry, done: () => void) { + const linkpath = normalizeWindowsPath( + path.resolve(this.cwd, String(entry.linkpath)), + ) this[LINK](entry, linkpath, 'link', done) } - [PEND] () { + [PEND]() { this[PENDING]++ } - [UNPEND] () { + [UNPEND]() { this[PENDING]-- this[MAYBECLOSE]() } - [SKIP] (entry) { + [SKIP](entry: ReadEntry) { this[UNPEND]() entry.resume() } @@ -563,25 +679,29 @@ class Unpack extends Parser { // Check if we can reuse an existing filesystem entry safely and // overwrite it, rather than unlinking and recreating // Windows doesn't report a useful nlink, so we just never reuse entries - [ISREUSABLE] (entry, st) { - return entry.type === 'File' && + [ISREUSABLE](entry: ReadEntry, st: Stats) { + return ( + entry.type === 'File' && !this.unlink && st.isFile() && st.nlink <= 1 && !isWindows + ) } // check if a thing is there, and if so, try to clobber it - [CHECKFS] (entry) { + [CHECKFS](entry: ReadEntry) { this[PEND]() const paths = [entry.path] if (entry.linkpath) { paths.push(entry.linkpath) } - this.reservations.reserve(paths, done => this[CHECKFS2](entry, done)) + this.reservations.reserve(paths, done => + this[CHECKFS2](entry, done), + ) } - [PRUNECACHE] (entry) { + [PRUNECACHE](entry: ReadEntry) { // if we are not creating a directory, and the path is in the dirCache, // then that means we are about to delete the directory we created // previously, and it is no longer going to be a directory, and neither @@ -595,14 +715,14 @@ class Unpack extends Parser { if (entry.type === 'SymbolicLink') { dropCache(this.dirCache) } else if (entry.type !== 'Directory') { - pruneCache(this.dirCache, entry.absolute) + pruneCache(this.dirCache, String(entry.absolute)) } } - [CHECKFS2] (entry, fullyDone) { + [CHECKFS2](entry: ReadEntry, fullyDone: (er?: Error) => void) { this[PRUNECACHE](entry) - const done = er => { + const done = (er?: Error) => { this[PRUNECACHE](entry) fullyDone(er) } @@ -621,7 +741,9 @@ class Unpack extends Parser { const start = () => { if (entry.absolute !== this.cwd) { - const parent = normPath(path.dirname(entry.absolute)) + const parent = normalizeWindowsPath( + path.dirname(String(entry.absolute)), + ) if (parent !== this.cwd) { return this[MKDIR](parent, this.dmode, er => { if (er) { @@ -637,8 +759,13 @@ class Unpack extends Parser { } const afterMakeParent = () => { - fs.lstat(entry.absolute, (lstatEr, st) => { - if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { + fs.lstat(String(entry.absolute), (lstatEr, st) => { + if ( + st && + (this.keep || + /* c8 ignore next */ + (this.newer && st.mtime > (entry.mtime ?? st.mtime))) + ) { this[SKIP](entry) done() return @@ -649,14 +776,20 @@ class Unpack extends Parser { if (st.isDirectory()) { if (entry.type === 'Directory') { - const needChmod = !this.noChmod && + const needChmod = + this.chmod && entry.mode && (st.mode & 0o7777) !== entry.mode - const afterChmod = er => this[MAKEFS](er, entry, done) + const afterChmod = (er?: Error | null | undefined) => + this[MAKEFS](er ?? null, entry, done) if (!needChmod) { return afterChmod() } - return fs.chmod(entry.absolute, entry.mode, afterChmod) + return fs.chmod( + String(entry.absolute), + Number(entry.mode), + afterChmod, + ) } // Not a dir entry, have to remove it. // NB: the only way to end up with an entry that is the cwd @@ -666,8 +799,11 @@ class Unpack extends Parser { // In that case, the user has opted out of path protections // explicitly, so if they blow away the cwd, c'est la vie. if (entry.absolute !== this.cwd) { - return fs.rmdir(entry.absolute, er => - this[MAKEFS](er, entry, done)) + return fs.rmdir( + String(entry.absolute), + (er?: null | Error) => + this[MAKEFS](er ?? null, entry, done), + ) } } @@ -677,8 +813,9 @@ class Unpack extends Parser { return this[MAKEFS](null, entry, done) } - unlinkFile(entry.absolute, er => - this[MAKEFS](er, entry, done)) + unlinkFile(String(entry.absolute), er => + this[MAKEFS](er ?? null, entry, done), + ) }) } @@ -689,7 +826,11 @@ class Unpack extends Parser { } } - [MAKEFS] (er, entry, done) { + [MAKEFS]( + er: null | undefined | Error, + entry: ReadEntry, + done: () => void, + ) { if (er) { this[ONERROR](er, entry) done() @@ -714,9 +855,14 @@ class Unpack extends Parser { } } - [LINK] (entry, linkpath, link, done) { + [LINK]( + entry: ReadEntry, + linkpath: string, + link: 'link' | 'symlink', + done: () => void, + ) { // XXX: get the type ('symlink' or 'junction') for windows - fs[link](linkpath, entry.absolute, er => { + fs[link](linkpath, String(entry.absolute), er => { if (er) { this[ONERROR](er, entry) } else { @@ -728,25 +874,28 @@ class Unpack extends Parser { } } -const callSync = fn => { +const callSync = (fn: () => any) => { try { return [null, fn()] } catch (er) { return [er, null] } } -class UnpackSync extends Unpack { - [MAKEFS] (er, entry) { + +export class UnpackSync extends Unpack { + sync: true = true; + + [MAKEFS](er: null | Error | undefined, entry: ReadEntry) { return super[MAKEFS](er, entry, () => {}) } - [CHECKFS] (entry) { + [CHECKFS](entry: ReadEntry) { this[PRUNECACHE](entry) if (!this[CHECKED_CWD]) { const er = this[MKDIR](this.cwd, this.dmode) if (er) { - return this[ONERROR](er, entry) + return this[ONERROR](er as Error, entry) } this[CHECKED_CWD] = true } @@ -754,17 +903,26 @@ class UnpackSync extends Unpack { // don't bother to make the parent if the current entry is the cwd, // we've already checked it. if (entry.absolute !== this.cwd) { - const parent = normPath(path.dirname(entry.absolute)) + const parent = normalizeWindowsPath( + path.dirname(String(entry.absolute)), + ) if (parent !== this.cwd) { const mkParent = this[MKDIR](parent, this.dmode) if (mkParent) { - return this[ONERROR](mkParent, entry) + return this[ONERROR](mkParent as Error, entry) } } } - const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute)) - if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { + const [lstatEr, st] = callSync(() => + fs.lstatSync(String(entry.absolute)), + ) + if ( + st && + (this.keep || + /* c8 ignore next */ + (this.newer && st.mtime > (entry.mtime ?? st.mtime))) + ) { return this[SKIP](entry) } @@ -774,30 +932,41 @@ class UnpackSync extends Unpack { if (st.isDirectory()) { if (entry.type === 'Directory') { - const needChmod = !this.noChmod && + const needChmod = + this.chmod && entry.mode && (st.mode & 0o7777) !== entry.mode - const [er] = needChmod ? callSync(() => { - fs.chmodSync(entry.absolute, entry.mode) - }) : [] + const [er] = + needChmod ? + callSync(() => { + fs.chmodSync(String(entry.absolute), Number(entry.mode)) + }) + : [] return this[MAKEFS](er, entry) } // not a dir entry, have to remove it - const [er] = callSync(() => fs.rmdirSync(entry.absolute)) + const [er] = callSync(() => + fs.rmdirSync(String(entry.absolute)), + ) this[MAKEFS](er, entry) } // not a dir, and not reusable. // don't remove if it's the cwd, since we want that error. - const [er] = entry.absolute === this.cwd ? [] - : callSync(() => unlinkFileSync(entry.absolute)) + const [er] = + entry.absolute === this.cwd ? + [] + : callSync(() => unlinkFileSync(String(entry.absolute))) this[MAKEFS](er, entry) } - [FILE] (entry, done) { - const mode = entry.mode & 0o7777 || this.fmode + [FILE](entry: ReadEntry, done: () => void) { + const mode = + typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.fmode - const oner = er => { + const oner = (er?: null | Error | undefined) => { let closeError try { fs.closeSync(fd) @@ -805,32 +974,36 @@ class UnpackSync extends Unpack { closeError = e } if (er || closeError) { - this[ONERROR](er || closeError, entry) + this[ONERROR]((er as Error) || closeError, entry) } done() } - let fd + let fd: number try { - fd = fs.openSync(entry.absolute, getFlag(entry.size), mode) + fd = fs.openSync( + String(entry.absolute), + getWriteFlag(entry.size), + mode, + ) } catch (er) { - return oner(er) + return oner(er as Error) } const tx = this.transform ? this.transform(entry) || entry : entry if (tx !== entry) { - tx.on('error', er => this[ONERROR](er, entry)) + tx.on('error', (er: Error) => this[ONERROR](er, entry)) entry.pipe(tx) } - tx.on('data', chunk => { + tx.on('data', (chunk: Buffer) => { try { fs.writeSync(fd, chunk, 0, chunk.length) } catch (er) { - oner(er) + oner(er as Error) } }) - tx.on('end', _ => { + tx.on('end', () => { let er = null // try both, falling futimes back to utimes // if either fails, handle the first error @@ -841,7 +1014,7 @@ class UnpackSync extends Unpack { fs.futimesSync(fd, atime, mtime) } catch (futimeser) { try { - fs.utimesSync(entry.absolute, atime, mtime) + fs.utimesSync(String(entry.absolute), atime, mtime) } catch (utimeser) { er = futimeser } @@ -853,45 +1026,61 @@ class UnpackSync extends Unpack { const gid = this[GID](entry) try { - fs.fchownSync(fd, uid, gid) + fs.fchownSync(fd, Number(uid), Number(gid)) } catch (fchowner) { try { - fs.chownSync(entry.absolute, uid, gid) + fs.chownSync( + String(entry.absolute), + Number(uid), + Number(gid), + ) } catch (chowner) { er = er || fchowner } } } - oner(er) + oner(er as Error) }) } - [DIRECTORY] (entry, done) { - const mode = entry.mode & 0o7777 || this.dmode - const er = this[MKDIR](entry.absolute, mode) + [DIRECTORY](entry: ReadEntry, done: () => void) { + const mode = + typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.dmode + const er = this[MKDIR](String(entry.absolute), mode) if (er) { - this[ONERROR](er, entry) + this[ONERROR](er as Error, entry) done() return } if (entry.mtime && !this.noMtime) { try { - fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime) + fs.utimesSync( + String(entry.absolute), + entry.atime || new Date(), + entry.mtime, + ) + /* c8 ignore next */ } catch (er) {} } if (this[DOCHOWN](entry)) { try { - fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry)) + fs.chownSync( + String(entry.absolute), + Number(this[UID](entry)), + Number(this[GID](entry)), + ) } catch (er) {} } done() entry.resume() } - [MKDIR] (dir, mode) { + [MKDIR](dir: string, mode: number) { try { - return mkdir.sync(normPath(dir), { + return mkdirSync(normalizeWindowsPath(dir), { uid: this.uid, gid: this.gid, processUid: this.processUid, @@ -908,16 +1097,19 @@ class UnpackSync extends Unpack { } } - [LINK] (entry, linkpath, link, done) { + [LINK]( + entry: ReadEntry, + linkpath: string, + link: 'link' | 'symlink', + done: () => void, + ) { + const ls: `${typeof link}Sync` = `${link}Sync` try { - fs[link + 'Sync'](linkpath, entry.absolute) + fs[ls](linkpath, String(entry.absolute)) done() entry.resume() } catch (er) { - return this[ONERROR](er, entry) + return this[ONERROR](er as Error, entry) } } } - -Unpack.Sync = UnpackSync -module.exports = Unpack diff --git a/src/update.ts b/src/update.ts new file mode 100644 index 00000000..06dcc46e --- /dev/null +++ b/src/update.ts @@ -0,0 +1,48 @@ +// tar -u + +import { makeCommand } from './make-command.js' +import { type TarOptionsWithAliases } from './options.js' + +import { replace as r } from './replace.js' + +// just call tar.r with the filter and mtimeCache +export const update = makeCommand( + r.syncFile, + r.asyncFile, + r.syncNoFile, + r.asyncNoFile, + (opt, entries = []) => { + r.validate?.(opt, entries) + mtimeFilter(opt) + }, +) + +const mtimeFilter = (opt: TarOptionsWithAliases) => { + const filter = opt.filter + + if (!opt.mtimeCache) { + opt.mtimeCache = new Map() + } + + opt.filter = + filter ? + (path, stat) => + filter(path, stat) && + !( + /* c8 ignore start */ + ( + (opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > + (stat.mtime ?? 0) + ) + /* c8 ignore stop */ + ) + : (path, stat) => + !( + /* c8 ignore start */ + ( + (opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > + (stat.mtime ?? 0) + ) + /* c8 ignore stop */ + ) +} diff --git a/src/warn-method.ts b/src/warn-method.ts new file mode 100644 index 00000000..65ca99f0 --- /dev/null +++ b/src/warn-method.ts @@ -0,0 +1,65 @@ +import { type Minipass } from 'minipass' + +/** has a warn method */ +export type Warner = { + warn(code: string, message: string | Error, data: any): void + file?: string + cwd?: string + strict?: boolean + + emit( + event: 'warn', + code: string, + message: string, + data?: WarnData, + ): void + emit(event: 'error', error: TarError): void +} + +export type WarnEvent = Minipass.Events & { + warn: [code: string, message: string, data: WarnData] +} + +export type WarnData = { + file?: string + cwd?: string + code?: string + tarCode?: string + recoverable?: boolean + [k: string]: any +} + +export type TarError = Error & WarnData + +export const warnMethod = ( + self: Warner, + code: string, + message: string | Error, + data: WarnData = {}, +) => { + if (self.file) { + data.file = self.file + } + if (self.cwd) { + data.cwd = self.cwd + } + data.code = + (message instanceof Error && + (message as NodeJS.ErrnoException).code) || + code + data.tarCode = code + if (!self.strict && data.recoverable !== false) { + if (message instanceof Error) { + data = Object.assign(message, data) + message = message.message + } + self.emit('warn', code, message, data) + } else if (message instanceof Error) { + self.emit('error', Object.assign(message, data)) + } else { + self.emit( + 'error', + Object.assign(new Error(`${code}: ${message}`), data), + ) + } +} diff --git a/src/winchars.ts b/src/winchars.ts new file mode 100644 index 00000000..b8edeb36 --- /dev/null +++ b/src/winchars.ts @@ -0,0 +1,16 @@ +// When writing files on Windows, translate the characters to their +// 0xf000 higher-encoded versions. + +const raw = ['|', '<', '>', '?', ':'] + +const win = raw.map(char => + String.fromCharCode(0xf000 + char.charCodeAt(0)), +) + +const toWin = new Map(raw.map((char, i) => [char, win[i]])) +const toRaw = new Map(win.map((char, i) => [char, raw[i]])) + +export const encode = (s: string) => + raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s) +export const decode = (s: string) => + win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s) diff --git a/src/write-entry.ts b/src/write-entry.ts new file mode 100644 index 00000000..fabead4d --- /dev/null +++ b/src/write-entry.ts @@ -0,0 +1,830 @@ +import fs, { type Stats } from 'fs' +import { Minipass } from 'minipass' +import path from 'path' +import { Header } from './header.js' +import { modeFix } from './mode-fix.js' +import { normalizeWindowsPath } from './normalize-windows-path.js' +import { + dealias, + LinkCacheKey, + TarOptions, + TarOptionsWithAliases, +} from './options.js' +import { Pax } from './pax.js' +import { ReadEntry } from './read-entry.js' +import { stripAbsolutePath } from './strip-absolute-path.js' +import { stripTrailingSlashes } from './strip-trailing-slashes.js' +import { EntryTypeName } from './types.js' +import { + WarnData, + Warner, + WarnEvent, + warnMethod, +} from './warn-method.js' +import * as winchars from './winchars.js' + +const prefixPath = (path: string, prefix?: string) => { + if (!prefix) { + return normalizeWindowsPath(path) + } + path = normalizeWindowsPath(path).replace(/^\.(\/|$)/, '') + return stripTrailingSlashes(prefix) + '/' + path +} + +const maxReadSize = 16 * 1024 * 1024 + +const PROCESS = Symbol('process') +const FILE = Symbol('file') +const DIRECTORY = Symbol('directory') +const SYMLINK = Symbol('symlink') +const HARDLINK = Symbol('hardlink') +const HEADER = Symbol('header') +const READ = Symbol('read') +const LSTAT = Symbol('lstat') +const ONLSTAT = Symbol('onlstat') +const ONREAD = Symbol('onread') +const ONREADLINK = Symbol('onreadlink') +const OPENFILE = Symbol('openfile') +const ONOPENFILE = Symbol('onopenfile') +const CLOSE = Symbol('close') +const MODE = Symbol('mode') +const AWAITDRAIN = Symbol('awaitDrain') +const ONDRAIN = Symbol('ondrain') +const PREFIX = Symbol('prefix') + +export class WriteEntry + extends Minipass + implements Warner +{ + path: string + portable: boolean + myuid: number = (process.getuid && process.getuid()) || 0 + // until node has builtin pwnam functions, this'll have to do + myuser: string = process.env.USER || '' + maxReadSize: number + linkCache: Exclude + statCache: Exclude + preservePaths: boolean + cwd: string + strict: boolean + mtime?: Date + noPax: boolean + noMtime: boolean + prefix?: string + fd?: number + + blockLen: number = 0 + blockRemain: number = 0 + buf?: Buffer + pos: number = 0 + remain: number = 0 + length: number = 0 + offset: number = 0 + + win32: boolean + absolute: string + + header?: Header + type?: EntryTypeName | 'Unsupported' + linkpath?: string + stat?: Stats + onWriteEntry?: (entry: WriteEntry) => any + + #hadError: boolean = false + + constructor(p: string, opt_: TarOptionsWithAliases = {}) { + const opt = dealias(opt_) + super() + this.path = normalizeWindowsPath(p) + // suppress atime, ctime, uid, gid, uname, gname + this.portable = !!opt.portable + this.maxReadSize = opt.maxReadSize || maxReadSize + this.linkCache = opt.linkCache || new Map() + this.statCache = opt.statCache || new Map() + this.preservePaths = !!opt.preservePaths + this.cwd = normalizeWindowsPath(opt.cwd || process.cwd()) + this.strict = !!opt.strict + this.noPax = !!opt.noPax + this.noMtime = !!opt.noMtime + this.mtime = opt.mtime + this.prefix = + opt.prefix ? normalizeWindowsPath(opt.prefix) : undefined + this.onWriteEntry = opt.onWriteEntry + + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn) + } + + let pathWarn: string | boolean = false + if (!this.preservePaths) { + const [root, stripped] = stripAbsolutePath(this.path) + if (root && typeof stripped === 'string') { + this.path = stripped + pathWarn = root + } + } + + this.win32 = !!opt.win32 || process.platform === 'win32' + if (this.win32) { + // force the \ to / normalization, since we might not *actually* + // be on windows, but want \ to be considered a path separator. + this.path = winchars.decode(this.path.replace(/\\/g, '/')) + p = p.replace(/\\/g, '/') + } + + this.absolute = normalizeWindowsPath( + opt.absolute || path.resolve(this.cwd, p), + ) + + if (this.path === '') { + this.path = './' + } + + if (pathWarn) { + this.warn( + 'TAR_ENTRY_INFO', + `stripping ${pathWarn} from absolute path`, + { + entry: this, + path: pathWarn + this.path, + }, + ) + } + + const cs = this.statCache.get(this.absolute) + if (cs) { + this[ONLSTAT](cs) + } else { + this[LSTAT]() + } + } + + warn(code: string, message: string | Error, data: WarnData = {}) { + return warnMethod(this, code, message, data) + } + + emit(ev: keyof WarnEvent, ...data: any[]) { + if (ev === 'error') { + this.#hadError = true + } + return super.emit(ev, ...data) + } + + [LSTAT]() { + fs.lstat(this.absolute, (er, stat) => { + if (er) { + return this.emit('error', er) + } + this[ONLSTAT](stat) + }) + } + + [ONLSTAT](stat: Stats) { + this.statCache.set(this.absolute, stat) + this.stat = stat + if (!stat.isFile()) { + stat.size = 0 + } + this.type = getType(stat) + this.emit('stat', stat) + this[PROCESS]() + } + + [PROCESS]() { + switch (this.type) { + case 'File': + return this[FILE]() + case 'Directory': + return this[DIRECTORY]() + case 'SymbolicLink': + return this[SYMLINK]() + // unsupported types are ignored. + default: + return this.end() + } + } + + [MODE](mode: number) { + return modeFix(mode, this.type === 'Directory', this.portable) + } + + [PREFIX](path: string) { + return prefixPath(path, this.prefix) + } + + [HEADER]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot write header before stat') + } + /* c8 ignore stop */ + + if (this.type === 'Directory' && this.portable) { + this.noMtime = true + } + + this.onWriteEntry?.(this) + this.header = new Header({ + path: this[PREFIX](this.path), + // only apply the prefix to hard links. + linkpath: + this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this[MODE](this.stat.mode), + uid: this.portable ? undefined : this.stat.uid, + gid: this.portable ? undefined : this.stat.gid, + size: this.stat.size, + mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime, + /* c8 ignore next */ + type: this.type === 'Unsupported' ? undefined : this.type, + uname: + this.portable ? undefined + : this.stat.uid === this.myuid ? this.myuser + : '', + atime: this.portable ? undefined : this.stat.atime, + ctime: this.portable ? undefined : this.stat.ctime, + }) + + if (this.header.encode() && !this.noPax) { + super.write( + new Pax({ + atime: this.portable ? undefined : this.header.atime, + ctime: this.portable ? undefined : this.header.ctime, + gid: this.portable ? undefined : this.header.gid, + mtime: + this.noMtime ? undefined : ( + this.mtime || this.header.mtime + ), + path: this[PREFIX](this.path), + linkpath: + this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + size: this.header.size, + uid: this.portable ? undefined : this.header.uid, + uname: this.portable ? undefined : this.header.uname, + dev: this.portable ? undefined : this.stat.dev, + ino: this.portable ? undefined : this.stat.ino, + nlink: this.portable ? undefined : this.stat.nlink, + }).encode(), + ) + } + const block = this.header?.block + /* c8 ignore start */ + if (!block) { + throw new Error('failed to encode header') + } + /* c8 ignore stop */ + super.write(block) + } + + [DIRECTORY]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create directory entry without stat') + } + /* c8 ignore stop */ + if (this.path.slice(-1) !== '/') { + this.path += '/' + } + this.stat.size = 0 + this[HEADER]() + this.end() + } + + [SYMLINK]() { + fs.readlink(this.absolute, (er, linkpath) => { + if (er) { + return this.emit('error', er) + } + this[ONREADLINK](linkpath) + }) + } + + [ONREADLINK](linkpath: string) { + this.linkpath = normalizeWindowsPath(linkpath) + this[HEADER]() + this.end() + } + + [HARDLINK](linkpath: string) { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create link entry without stat') + } + /* c8 ignore stop */ + this.type = 'Link' + this.linkpath = normalizeWindowsPath( + path.relative(this.cwd, linkpath), + ) + this.stat.size = 0 + this[HEADER]() + this.end() + } + + [FILE]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create file entry without stat') + } + /* c8 ignore stop */ + if (this.stat.nlink > 1) { + const linkKey = + `${this.stat.dev}:${this.stat.ino}` as LinkCacheKey + const linkpath = this.linkCache.get(linkKey) + if (linkpath?.indexOf(this.cwd) === 0) { + return this[HARDLINK](linkpath) + } + this.linkCache.set(linkKey, this.absolute) + } + + this[HEADER]() + if (this.stat.size === 0) { + return this.end() + } + + this[OPENFILE]() + } + + [OPENFILE]() { + fs.open(this.absolute, 'r', (er, fd) => { + if (er) { + return this.emit('error', er) + } + this[ONOPENFILE](fd) + }) + } + + [ONOPENFILE](fd: number) { + this.fd = fd + if (this.#hadError) { + return this[CLOSE]() + } + /* c8 ignore start */ + if (!this.stat) { + throw new Error('should stat before calling onopenfile') + } + /* c8 ignore start */ + + this.blockLen = 512 * Math.ceil(this.stat.size / 512) + this.blockRemain = this.blockLen + const bufLen = Math.min(this.blockLen, this.maxReadSize) + this.buf = Buffer.allocUnsafe(bufLen) + this.offset = 0 + this.pos = 0 + this.remain = this.stat.size + this.length = this.buf.length + this[READ]() + } + + [READ]() { + const { fd, buf, offset, length, pos } = this + if (fd === undefined || buf === undefined) { + throw new Error('cannot read file without first opening') + } + fs.read(fd, buf, offset, length, pos, (er, bytesRead) => { + if (er) { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + return this[CLOSE](() => this.emit('error', er)) + } + this[ONREAD](bytesRead) + }) + } + + /* c8 ignore start */ + [CLOSE]( + cb: (er?: null | Error | NodeJS.ErrnoException) => any = () => {}, + ) { + /* c8 ignore stop */ + if (this.fd !== undefined) fs.close(this.fd, cb) + } + + [ONREAD](bytesRead: number) { + if (bytesRead <= 0 && this.remain > 0) { + const er = Object.assign( + new Error('encountered unexpected EOF'), + { + path: this.absolute, + syscall: 'read', + code: 'EOF', + }, + ) + return this[CLOSE](() => this.emit('error', er)) + } + + if (bytesRead > this.remain) { + const er = Object.assign( + new Error('did not encounter expected EOF'), + { + path: this.absolute, + syscall: 'read', + code: 'EOF', + }, + ) + return this[CLOSE](() => this.emit('error', er)) + } + + /* c8 ignore start */ + if (!this.buf) { + throw new Error('should have created buffer prior to reading') + } + /* c8 ignore stop */ + + // null out the rest of the buffer, if we could fit the block padding + // at the end of this loop, we've incremented bytesRead and this.remain + // to be incremented up to the blockRemain level, as if we had expected + // to get a null-padded file, and read it until the end. then we will + // decrement both remain and blockRemain by bytesRead, and know that we + // reached the expected EOF, without any null buffer to append. + if (bytesRead === this.remain) { + for ( + let i = bytesRead; + i < this.length && bytesRead < this.blockRemain; + i++ + ) { + this.buf[i + this.offset] = 0 + bytesRead++ + this.remain++ + } + } + + const chunk = + this.offset === 0 && bytesRead === this.buf.length ? + this.buf + : this.buf.subarray(this.offset, this.offset + bytesRead) + + const flushed = this.write(chunk) + if (!flushed) { + this[AWAITDRAIN](() => this[ONDRAIN]()) + } else { + this[ONDRAIN]() + } + } + + [AWAITDRAIN](cb: () => any) { + this.once('drain', cb) + } + + write(buffer: Buffer | string, cb?: () => void): boolean + write( + str: Buffer | string, + encoding?: BufferEncoding | null, + cb?: () => void, + ): boolean + write( + chunk: Buffer | string, + encoding?: BufferEncoding | (() => any) | null, + cb?: () => any, + ): boolean { + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof encoding === 'function') { + cb = encoding + encoding = undefined + } + if (typeof chunk === 'string') { + chunk = Buffer.from( + chunk, + typeof encoding === 'string' ? encoding : 'utf8', + ) + } + /* c8 ignore stop */ + + if (this.blockRemain < chunk.length) { + const er = Object.assign( + new Error('writing more data than expected'), + { + path: this.absolute, + }, + ) + return this.emit('error', er) + } + this.remain -= chunk.length + this.blockRemain -= chunk.length + this.pos += chunk.length + this.offset += chunk.length + return super.write(chunk, null, cb) + } + + [ONDRAIN]() { + if (!this.remain) { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)) + } + return this[CLOSE](er => + er ? this.emit('error', er) : this.end(), + ) + } + + /* c8 ignore start */ + if (!this.buf) { + throw new Error('buffer lost somehow in ONDRAIN') + } + /* c8 ignore stop */ + + if (this.offset >= this.length) { + // if we only have a smaller bit left to read, alloc a smaller buffer + // otherwise, keep it the same length it was before. + this.buf = Buffer.allocUnsafe( + Math.min(this.blockRemain, this.buf.length), + ) + this.offset = 0 + } + this.length = this.buf.length - this.offset + this[READ]() + } +} + +export class WriteEntrySync extends WriteEntry implements Warner { + sync: true = true; + + [LSTAT]() { + this[ONLSTAT](fs.lstatSync(this.absolute)) + } + + [SYMLINK]() { + this[ONREADLINK](fs.readlinkSync(this.absolute)) + } + + [OPENFILE]() { + this[ONOPENFILE](fs.openSync(this.absolute, 'r')) + } + + [READ]() { + let threw = true + try { + const { fd, buf, offset, length, pos } = this + /* c8 ignore start */ + if (fd === undefined || buf === undefined) { + throw new Error('fd and buf must be set in READ method') + } + /* c8 ignore stop */ + const bytesRead = fs.readSync(fd, buf, offset, length, pos) + this[ONREAD](bytesRead) + threw = false + } finally { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + if (threw) { + try { + this[CLOSE](() => {}) + } catch (er) {} + } + } + } + + [AWAITDRAIN](cb: () => any) { + cb() + } + + /* c8 ignore start */ + [CLOSE]( + cb: (er?: null | Error | NodeJS.ErrnoException) => any = () => {}, + ) { + /* c8 ignore stop */ + if (this.fd !== undefined) fs.closeSync(this.fd) + cb() + } +} + +export class WriteEntryTar + extends Minipass + implements Warner +{ + blockLen: number = 0 + blockRemain: number = 0 + buf: number = 0 + pos: number = 0 + remain: number = 0 + length: number = 0 + preservePaths: boolean + portable: boolean + strict: boolean + noPax: boolean + noMtime: boolean + readEntry: ReadEntry + type: EntryTypeName + prefix?: string + path: string + mode?: number + uid?: number + gid?: number + uname?: string + gname?: string + header?: Header + mtime?: Date + atime?: Date + ctime?: Date + linkpath?: string + size: number + onWriteEntry?: (entry: WriteEntry) => any + + warn(code: string, message: string | Error, data: WarnData = {}) { + return warnMethod(this, code, message, data) + } + + constructor( + readEntry: ReadEntry, + opt_: TarOptionsWithAliases = {}, + ) { + const opt = dealias(opt_) + super() + this.preservePaths = !!opt.preservePaths + this.portable = !!opt.portable + this.strict = !!opt.strict + this.noPax = !!opt.noPax + this.noMtime = !!opt.noMtime + this.onWriteEntry = opt.onWriteEntry + + this.readEntry = readEntry + const { type } = readEntry + /* c8 ignore start */ + if (type === 'Unsupported') { + throw new Error('writing entry that should be ignored') + } + /* c8 ignore stop */ + this.type = type + if (this.type === 'Directory' && this.portable) { + this.noMtime = true + } + + this.prefix = opt.prefix + + this.path = normalizeWindowsPath(readEntry.path) + this.mode = + readEntry.mode !== undefined ? + this[MODE](readEntry.mode) + : undefined + this.uid = this.portable ? undefined : readEntry.uid + this.gid = this.portable ? undefined : readEntry.gid + this.uname = this.portable ? undefined : readEntry.uname + this.gname = this.portable ? undefined : readEntry.gname + this.size = readEntry.size + this.mtime = + this.noMtime ? undefined : opt.mtime || readEntry.mtime + this.atime = this.portable ? undefined : readEntry.atime + this.ctime = this.portable ? undefined : readEntry.ctime + this.linkpath = + readEntry.linkpath !== undefined ? + normalizeWindowsPath(readEntry.linkpath) + : undefined + + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn) + } + + let pathWarn: false | string = false + if (!this.preservePaths) { + const [root, stripped] = stripAbsolutePath(this.path) + if (root && typeof stripped === 'string') { + this.path = stripped + pathWarn = root + } + } + + this.remain = readEntry.size + this.blockRemain = readEntry.startBlockSize + + this.onWriteEntry?.(this as unknown as WriteEntry) + this.header = new Header({ + path: this[PREFIX](this.path), + linkpath: + this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this.mode, + uid: this.portable ? undefined : this.uid, + gid: this.portable ? undefined : this.gid, + size: this.size, + mtime: this.noMtime ? undefined : this.mtime, + type: this.type, + uname: this.portable ? undefined : this.uname, + atime: this.portable ? undefined : this.atime, + ctime: this.portable ? undefined : this.ctime, + }) + + if (pathWarn) { + this.warn( + 'TAR_ENTRY_INFO', + `stripping ${pathWarn} from absolute path`, + { + entry: this, + path: pathWarn + this.path, + }, + ) + } + + if (this.header.encode() && !this.noPax) { + super.write( + new Pax({ + atime: this.portable ? undefined : this.atime, + ctime: this.portable ? undefined : this.ctime, + gid: this.portable ? undefined : this.gid, + mtime: this.noMtime ? undefined : this.mtime, + path: this[PREFIX](this.path), + linkpath: + this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + size: this.size, + uid: this.portable ? undefined : this.uid, + uname: this.portable ? undefined : this.uname, + dev: this.portable ? undefined : this.readEntry.dev, + ino: this.portable ? undefined : this.readEntry.ino, + nlink: this.portable ? undefined : this.readEntry.nlink, + }).encode(), + ) + } + + const b = this.header?.block + /* c8 ignore start */ + if (!b) throw new Error('failed to encode header') + /* c8 ignore stop */ + super.write(b) + readEntry.pipe(this) + } + + [PREFIX](path: string) { + return prefixPath(path, this.prefix) + } + + [MODE](mode: number) { + return modeFix(mode, this.type === 'Directory', this.portable) + } + + write(buffer: Buffer | string, cb?: () => void): boolean + write( + str: Buffer | string, + encoding?: BufferEncoding | null, + cb?: () => void, + ): boolean + write( + chunk: Buffer | string, + encoding?: BufferEncoding | (() => any) | null, + cb?: () => any, + ): boolean { + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof encoding === 'function') { + cb = encoding + encoding = undefined + } + if (typeof chunk === 'string') { + chunk = Buffer.from( + chunk, + typeof encoding === 'string' ? encoding : 'utf8', + ) + } + /* c8 ignore stop */ + const writeLen = chunk.length + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate') + } + this.blockRemain -= writeLen + return super.write(chunk, cb) + } + + end(cb?: () => void): this + end(chunk: Buffer | string, cb?: () => void): this + end( + chunk: Buffer | string, + encoding?: BufferEncoding, + cb?: () => void, + ): this + end( + chunk?: Buffer | string | (() => void), + encoding?: BufferEncoding | (() => void), + cb?: () => void, + ): this { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)) + } + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof chunk === 'function') { + cb = chunk + encoding = undefined + chunk = undefined + } + if (typeof encoding === 'function') { + cb = encoding + encoding = undefined + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding ?? 'utf8') + } + if (cb) this.once('finish', cb) + chunk ? super.end(chunk, cb) : super.end(cb) + /* c8 ignore stop */ + return this + } +} + +const getType = (stat: Stats): EntryTypeName | 'Unsupported' => + stat.isFile() ? 'File' + : stat.isDirectory() ? 'Directory' + : stat.isSymbolicLink() ? 'SymbolicLink' + : 'Unsupported' diff --git a/tap-snapshots/test/normalize-unicode.js-win32.test.cjs b/tap-snapshots/test/normalize-unicode.js-win32.test.cjs new file mode 100644 index 00000000..85e353d8 --- /dev/null +++ b/tap-snapshots/test/normalize-unicode.js-win32.test.cjs @@ -0,0 +1,30 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/normalize-unicode.js win32 > TAP > normalize with strip slashes > "\\\\\eee\\\\\\" > normalized 1`] = ` +\\\\\eee\\\\\\ +` + +exports[`test/normalize-unicode.js win32 > TAP > normalize with strip slashes > "\\\\a\\\\b\\\\c\\\\d\\\\" > normalized 1`] = ` +/a/b/c/d +` + +exports[`test/normalize-unicode.js win32 > TAP > normalize with strip slashes > "﹨aaaa﹨dddd﹨" > normalized 1`] = ` +﹨aaaa﹨dddd﹨ +` + +exports[`test/normalize-unicode.js win32 > TAP > normalize with strip slashes > "\bbb\eee\" > normalized 1`] = ` +\bbb\eee\ +` + +exports[`test/normalize-unicode.js win32 > TAP > normalize with strip slashes > "1/4foo.txt" > normalized 1`] = ` +1/4foo.txt +` + +exports[`test/normalize-unicode.js win32 > TAP > normalize with strip slashes > "¼foo.txt" > normalized 1`] = ` +¼foo.txt +` diff --git a/tap-snapshots/test/normalize-unicode.js.test.cjs b/tap-snapshots/test/normalize-unicode.js.test.cjs deleted file mode 100644 index 3163313d..00000000 --- a/tap-snapshots/test/normalize-unicode.js.test.cjs +++ /dev/null @@ -1,30 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/normalize-unicode.js TAP normalize with strip slashes "1/4foo.txt" > normalized 1`] = ` -1/4foo.txt -` - -exports[`test/normalize-unicode.js TAP normalize with strip slashes "\\\\a\\\\b\\\\c\\\\d\\\\" > normalized 1`] = ` -/a/b/c/d -` - -exports[`test/normalize-unicode.js TAP normalize with strip slashes "¼foo.txt" > normalized 1`] = ` -¼foo.txt -` - -exports[`test/normalize-unicode.js TAP normalize with strip slashes "﹨aaaa﹨dddd﹨" > normalized 1`] = ` -﹨aaaa﹨dddd﹨ -` - -exports[`test/normalize-unicode.js TAP normalize with strip slashes "\bbb\eee\" > normalized 1`] = ` -\bbb\eee\ -` - -exports[`test/normalize-unicode.js TAP normalize with strip slashes "\\\\\eee\\\\\\" > normalized 1`] = ` -\\\\\eee\\\\\\ -` diff --git a/tap-snapshots/test/unpack.js.test.cjs b/tap-snapshots/test/unpack.js.test.cjs new file mode 100644 index 00000000..2446eb2e --- /dev/null +++ b/tap-snapshots/test/unpack.js.test.cjs @@ -0,0 +1,18 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/unpack.js > TAP > ignore self-referential hardlinks > async > must match snapshot 1`] = ` +Array [ + "ENOENT: no such file or directory, link '{CWD}/.tap/fixtures/test-unpack.js-ignore-self-referential-hardlinks-async/autolink' -> '{CWD}/.tap/fixtures/test-unpack.js-ignore-self-referential-hardlinks-async/autolink'", +] +` + +exports[`test/unpack.js > TAP > ignore self-referential hardlinks > sync > must match snapshot 1`] = ` +Array [ + "ENOENT: no such file or directory, link '{CWD}/.tap/fixtures/test-unpack.js-ignore-self-referential-hardlinks-sync/autolink' -> '{CWD}/.tap/fixtures/test-unpack.js-ignore-self-referential-hardlinks-sync/autolink'", +] +` diff --git a/test/create.js b/test/create.js deleted file mode 100644 index fdfd78b8..00000000 --- a/test/create.js +++ /dev/null @@ -1,230 +0,0 @@ -'use strict' - -const isWindows = process.platform === 'win32' -const t = require('tap') -const c = require('../lib/create.js') -const list = require('../lib/list.js') -const fs = require('fs') -const path = require('path') -const dir = path.resolve(__dirname, 'fixtures/create') -const tars = path.resolve(__dirname, 'fixtures/tars') -const rimraf = require('rimraf') -const mkdirp = require('mkdirp') -const spawn = require('child_process').spawn -const Pack = require('../lib/pack.js') -const mutateFS = require('mutate-fs') -const { promisify } = require('util') - -const readtar = (file, cb) => { - const child = spawn('tar', ['tf', file]) - const out = [] - child.stdout.on('data', c => out.push(c)) - child.on('close', (code, signal) => - cb(code, signal, Buffer.concat(out).toString())) -} - -t.teardown(() => new Promise(resolve => rimraf(dir, resolve))) - -t.before(async () => { - await promisify(rimraf)(dir) - await mkdirp(dir) -}) - -t.test('no cb if sync or without file', t => { - t.throws(_ => c({ sync: true }, ['asdf'], _ => _)) - t.throws(_ => c(_ => _)) - t.throws(_ => c({}, _ => _)) - t.throws(_ => c({}, ['asdf'], _ => _)) - t.end() -}) - -t.test('create file', t => { - const files = [path.basename(__filename)] - - t.test('sync', t => { - const file = path.resolve(dir, 'sync.tar') - c({ - file: file, - cwd: __dirname, - sync: true, - }, files) - readtar(file, (code, signal, list) => { - t.equal(code, 0) - t.equal(signal, null) - t.equal(list.trim(), 'create.js') - t.end() - }) - }) - - t.test('async', t => { - const file = path.resolve(dir, 'async.tar') - c({ - file: file, - cwd: __dirname, - }, files, er => { - if (er) { - throw er - } - readtar(file, (code, signal, list) => { - t.equal(code, 0) - t.equal(signal, null) - t.equal(list.trim(), 'create.js') - t.end() - }) - }) - }) - - t.test('async promise only', t => { - const file = path.resolve(dir, 'promise.tar') - c({ - file: file, - cwd: __dirname, - }, files).then(_ => { - readtar(file, (code, signal, list) => { - t.equal(code, 0) - t.equal(signal, null) - t.equal(list.trim(), 'create.js') - t.end() - }) - }) - }) - - t.test('with specific mode', t => { - const mode = isWindows ? 0o666 : 0o740 - t.test('sync', t => { - const file = path.resolve(dir, 'sync-mode.tar') - c({ - mode: mode, - file: file, - cwd: __dirname, - sync: true, - }, files) - readtar(file, (code, signal, list) => { - t.equal(code, 0) - t.equal(signal, null) - t.equal(list.trim(), 'create.js') - t.equal(fs.lstatSync(file).mode & 0o7777, mode) - t.end() - }) - }) - - t.test('async', t => { - const file = path.resolve(dir, 'async-mode.tar') - c({ - mode: mode, - file: file, - cwd: __dirname, - }, files, er => { - if (er) { - throw er - } - readtar(file, (code, signal, list) => { - t.equal(code, 0) - t.equal(signal, null) - t.equal(list.trim(), 'create.js') - t.equal(fs.lstatSync(file).mode & 0o7777, mode) - t.end() - }) - }) - }) - - t.end() - }) - t.end() -}) - -t.test('create', t => { - t.type(c({ sync: true }, ['README.md']), Pack.Sync) - t.type(c(['README.md']), Pack) - t.end() -}) - -t.test('open fails', t => { - const poop = new Error('poop') - const file = path.resolve(dir, 'throw-open.tar') - t.teardown(mutateFS.statFail(poop)) - t.throws(_ => c({ - file: file, - sync: true, - cwd: __dirname, - }, [path.basename(__filename)])) - t.throws(_ => fs.lstatSync(file)) - t.end() -}) - -t.test('gzipped tarball that makes some drain/resume stuff', t => { - const cwd = path.dirname(__dirname) - const out = path.resolve(dir, 'package.tgz') - - // don't include node_modules/.cache, since that gets written to - // by nyc during tests, and can result in spurious errors. - const entries = fs.readdirSync(`${cwd}/node_modules`) - .filter(e => !/^\./.test(e)) - .map(e => `node_modules/${e}`) - - c({ z: true, C: cwd }, entries) - .pipe(fs.createWriteStream(out)) - .on('finish', _ => { - const child = spawn('tar', ['tf', out], { - stdio: ['ignore', 'ignore', 'pipe'], - }) - child.stderr.on('data', c => { - t.fail(c + '') - }) - child.on('close', (code, signal) => { - t.equal(code, 0) - t.equal(signal, null) - t.end() - }) - }) -}) - -t.test('create tarball out of another tarball', t => { - const out = path.resolve(dir, 'out.tar') - - const check = t => { - const expect = [ - 'dir/', - 'Ω.txt', - '🌟.txt', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', - 'hardlink-1', - 'hardlink-2', - 'symlink', - ] - list({ f: out, - sync: true, - onentry: entry => { - if (entry.path === 'hardlink-2') { - t.equal(entry.type, 'Link') - } else if (entry.path === 'symlink') { - t.equal(entry.type, 'SymbolicLink') - } else if (entry.path === 'dir/') { - t.equal(entry.type, 'Directory') - } else { - t.equal(entry.type, 'File') - } - t.equal(entry.path, expect.shift()) - } }) - t.same(expect, []) - t.end() - } - - t.test('sync', t => { - c({ - f: out, - cwd: tars, - sync: true, - }, ['@dir.tar', '@utf8.tar', '@links.tar']) - check(t) - }) - - t.test('async', t => { - c({ - f: out, - cwd: tars, - }, ['@dir.tar', '@utf8.tar', '@links.tar'], _ => check(t)) - }) - - t.end() -}) diff --git a/test/create.ts b/test/create.ts new file mode 100644 index 00000000..e0c4a0e8 --- /dev/null +++ b/test/create.ts @@ -0,0 +1,310 @@ +import fs from 'fs' +import { mkdirp } from 'mkdirp' +import path from 'path' +import { rimraf } from 'rimraf' +import t, { Test } from 'tap' +import { c, list, Pack, PackSync } from '../dist/esm/index.js' +import { spawn } from 'child_process' +//@ts-ignore +import mutateFS from 'mutate-fs' +import { fileURLToPath } from 'url' + +const isWindows = process.platform === 'win32' +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) +const dir = path.resolve(__dirname, 'fixtures/create') +const tars = path.resolve(__dirname, 'fixtures/tars') + +const readtar = ( + file: string, + cb: ( + code: number | null, + signal: null | NodeJS.Signals, + output: string, + ) => any, +) => { + const child = spawn('tar', ['tf', file]) + const out: Buffer[] = [] + child.stdout.on('data', c => out.push(c)) + child.on('close', (code, signal) => + cb(code, signal, Buffer.concat(out).toString()), + ) +} + +t.teardown(() => rimraf(dir)) + +t.before(async () => { + await rimraf(dir) + await mkdirp(dir) +}) + +t.test('no cb if sync or without file', t => { + //@ts-expect-error + t.throws(() => c({ sync: true }, ['asdf'], () => {})) + //@ts-expect-error + t.throws(() => c(() => {})) + t.throws(() => c({}, () => {})) + t.throws(() => c({}, ['asdf'], () => {})) + t.end() +}) + +t.test('create file', t => { + const files = [path.basename(__filename)] + + t.test('sync', t => { + const file = path.resolve(dir, 'sync.tar') + c( + { + file: file, + cwd: __dirname, + sync: true, + }, + files, + ) + readtar(file, (code, signal, list) => { + t.equal(code, 0) + t.equal(signal, null) + t.equal(list.trim(), 'create.ts') + t.end() + }) + }) + + t.test('async', t => { + const file = path.resolve(dir, 'async.tar') + c( + { + file: file, + cwd: __dirname, + }, + files, + er => { + if (er) { + throw er + } + readtar(file, (code, signal, list) => { + t.equal(code, 0) + t.equal(signal, null) + t.equal(list.trim(), 'create.ts') + t.end() + }) + }, + ) + }) + + t.test('async promise only', t => { + const file = path.resolve(dir, 'promise.tar') + c( + { + file: file, + cwd: __dirname, + }, + files, + ).then(() => { + readtar(file, (code, signal, list) => { + t.equal(code, 0) + t.equal(signal, null) + t.equal(list.trim(), 'create.ts') + t.end() + }) + }) + }) + + t.test('with specific mode', t => { + const mode = isWindows ? 0o666 : 0o740 + t.test('sync', t => { + const file = path.resolve(dir, 'sync-mode.tar') + c( + { + mode: mode, + file: file, + cwd: __dirname, + sync: true, + }, + files, + ) + readtar(file, (code, signal, list) => { + t.equal(code, 0) + t.equal(signal, null) + t.equal(list.trim(), 'create.ts') + t.equal(fs.lstatSync(file).mode & 0o7777, mode) + t.end() + }) + }) + + t.test('async', t => { + const file = path.resolve(dir, 'async-mode.tar') + c( + { + mode: mode, + file: file, + cwd: __dirname, + }, + files, + er => { + if (er) { + throw er + } + readtar(file, (code, signal, list) => { + t.equal(code, 0) + t.equal(signal, null) + t.equal(list.trim(), 'create.ts') + t.equal(fs.lstatSync(file).mode & 0o7777, mode) + t.end() + }) + }, + ) + }) + + t.end() + }) + t.end() +}) + +t.test('create', t => { + const ps = c({ sync: true }, ['README.md']) + t.equal(ps.sync, true) + t.type(ps, PackSync) + const p = c(['README.md']) + //@ts-expect-error + p.then + //@ts-expect-error + p.sync + t.type(c(['README.md']), Pack) + + t.end() +}) + +t.test('open fails', t => { + const poop = new Error('poop') + const file = path.resolve(dir, 'throw-open.tar') + t.teardown(mutateFS.statFail(poop)) + t.throws(() => + c( + { + file: file, + sync: true, + cwd: __dirname, + }, + [path.basename(__filename)], + ), + ) + t.throws(() => fs.lstatSync(file)) + t.end() +}) + +t.test('gzipped tarball that makes some drain/resume stuff', t => { + const cwd = path.dirname(__dirname) + const out = path.resolve(dir, 'package.tgz') + + // don't include node_modules/.cache, since that gets written to + // by nyc during tests, and can result in spurious errors. + const entries = fs + .readdirSync(`${cwd}/node_modules`) + .filter(e => !/^[@.]/.test(e)) + .map(e => `node_modules/${e}`) + + const stream = c({ z: true, C: cwd }, entries) + + const outStream = fs.createWriteStream(out) + outStream.on('drain', () => { + stream.resume() + }) + + stream.pipe(outStream).on('finish', () => { + const child = spawn('tar', ['tf', out], { + stdio: ['ignore', 'ignore', 'pipe'], + }) + child.stderr.on('data', c => { + t.fail(c + '') + }) + child.on('close', (code, signal) => { + t.equal(code, 0) + t.equal(signal, null) + t.end() + }) + }) +}) + +t.test('create tarball out of another tarball', t => { + const out = path.resolve(dir, 'out.tar') + + const check = (t: Test) => { + const expect = [ + 'dir/', + 'Ω.txt', + '🌟.txt', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + 'hardlink-1', + 'hardlink-2', + 'symlink', + ] + list({ + f: out, + sync: true, + onReadEntry: entry => { + if (entry.path === 'hardlink-2') { + t.equal(entry.type, 'Link') + } else if (entry.path === 'symlink') { + t.equal(entry.type, 'SymbolicLink') + } else if (entry.path === 'dir/') { + t.equal(entry.type, 'Directory') + } else { + t.equal(entry.type, 'File') + } + t.equal(entry.path, expect.shift()) + }, + }) + t.same(expect, []) + t.end() + } + + t.test('sync', t => { + c( + { + f: out, + cwd: tars, + sync: true, + }, + ['@dir.tar', '@utf8.tar', '@links.tar'], + ) + check(t) + }) + + t.test('async', async t => { + await c( + { + f: out, + cwd: tars, + }, + ['@dir.tar', '@utf8.tar', '@links.tar'], + ) + check(t) + }) + + t.end() +}) + +t.test('must specify some files', t => { + t.throws(() => c({}), 'no paths specified to add to archive') + t.end() +}) + +t.test('transform a filename', async t => { + const cwd = t.testdir({ + 'README.md': 'hello, world', + }) + const data = await c( + { + cwd, + onWriteEntry: entry => { + entry.path = 'bloorg.md' + }, + sync: true, + }, + ['README.md'], + ).concat() + t.equal( + data.subarray(0, 'bloorg.md'.length).toString(), + 'bloorg.md', + ) +}) diff --git a/test/cwd-error.js b/test/cwd-error.js new file mode 100644 index 00000000..1961b404 --- /dev/null +++ b/test/cwd-error.js @@ -0,0 +1,10 @@ +import t from 'tap' +import { CwdError } from '../dist/esm/cwd-error.js' + +t.match(new CwdError('path', 'code'), { + name: 'CwdError', + path: 'path', + code: 'code', + syscall: 'chdir', + message: `code: Cannot cd into 'path'`, +}) diff --git a/test/extract.js b/test/extract.js deleted file mode 100644 index c11d0afc..00000000 --- a/test/extract.js +++ /dev/null @@ -1,351 +0,0 @@ -'use strict' - -const t = require('tap') -const nock = require('nock') -const x = require('../lib/extract.js') -const path = require('path') -const fs = require('fs') -const extractdir = path.resolve(__dirname, 'fixtures/extract') -const tars = path.resolve(__dirname, 'fixtures/tars') -const mkdirp = require('mkdirp') -const { promisify } = require('util') -const rimraf = promisify(require('rimraf')) -const mutateFS = require('mutate-fs') -const pipeline = promisify(require('stream').pipeline) -const http = require('http') - -const tnock = (t, host, opts) => { - nock.disableNetConnect() - const server = nock(host, opts) - t.teardown(function () { - nock.enableNetConnect() - server.done() - }) - return server -} - -t.teardown(_ => rimraf(extractdir)) - -t.test('basic extracting', t => { - const file = path.resolve(tars, 'utf8.tar') - const dir = path.resolve(extractdir, 'basic') - - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - - const check = async t => { - fs.lstatSync(dir + '/Ω.txt') - fs.lstatSync(dir + '/🌟.txt') - t.throws(_ => fs.lstatSync(dir + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' + - '/f/o/l/d/e/r/-/p/a/t/h/Ω.txt')) - - await rimraf(dir) - t.end() - } - - const files = ['🌟.txt', 'Ω.txt'] - t.test('sync', t => { - x({ file: file, sync: true, C: dir }, files) - return check(t) - }) - - t.test('async promisey', t => { - return x({ file: file, cwd: dir }, files).then(_ => check(t)) - }) - - t.test('async cb', t => { - return x({ file: file, cwd: dir }, files, er => { - if (er) { - throw er - } - return check(t) - }) - }) - - t.end() -}) - -t.test('ensure an open stream is not prematuraly closed', t => { - t.plan(1) - - const file = path.resolve(tars, 'long-paths.tar') - const dir = path.resolve(extractdir, 'basic-with-stream') - - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - - const check = async t => { - t.ok(fs.lstatSync(dir + '/long-path')) - await rimraf(dir) - t.end() - } - - t.test('async promisey', t => { - const stream = fs.createReadStream(file, { - highWaterMark: 1, - }) - pipeline( - stream, - x({ cwd: dir }) - ).then(_ => check(t)) - }) - - t.end() -}) - -t.test('ensure an open stream is not prematuraly closed http', t => { - t.plan(1) - - const file = path.resolve(tars, 'long-paths.tar') - const dir = path.resolve(extractdir, 'basic-with-stream-http') - - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - - const check = async t => { - t.ok(fs.lstatSync(dir + '/long-path')) - await rimraf(dir) - t.end() - } - - t.test('async promisey', t => { - tnock(t, 'http://codeload.github.com/') - .get('/npm/node-tar/tar.gz/main') - .delay(250) - .reply(200, () => fs.createReadStream(file)) - - http.get('http://codeload.github.com/npm/node-tar/tar.gz/main', (stream) => { - return pipeline( - stream, - x({ cwd: dir }) - ).then(_ => check(t)) - }) - }) - - t.end() -}) - -t.test('file list and filter', t => { - const file = path.resolve(tars, 'utf8.tar') - const dir = path.resolve(extractdir, 'filter') - - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - - const check = async t => { - fs.lstatSync(dir + '/Ω.txt') - t.throws(_ => fs.lstatSync(dir + '/🌟.txt')) - t.throws(_ => fs.lstatSync(dir + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' + - '/f/o/l/d/e/r/-/p/a/t/h/Ω.txt')) - - await rimraf(dir) - t.end() - } - - const filter = path => path === 'Ω.txt' - - t.test('sync', t => { - x({ filter: filter, file: file, sync: true, C: dir }, ['🌟.txt', 'Ω.txt']) - return check(t) - }) - - t.test('async promisey', t => { - return x({ filter: filter, file: file, cwd: dir }, ['🌟.txt', 'Ω.txt']).then(_ => { - return check(t) - }) - }) - - t.test('async cb', t => { - return x({ filter: filter, file: file, cwd: dir }, ['🌟.txt', 'Ω.txt'], er => { - if (er) { - throw er - } - return check(t) - }) - }) - - t.end() -}) - -t.test('no file list', t => { - const file = path.resolve(tars, 'body-byte-counts.tar') - const dir = path.resolve(extractdir, 'no-list') - - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - - const check = async t => { - t.equal(fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, 1024) - t.equal(fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, 512) - t.equal(fs.lstatSync(path.resolve(dir, 'one-byte.txt')).size, 1) - t.equal(fs.lstatSync(path.resolve(dir, 'zero-byte.txt')).size, 0) - await rimraf(dir) - t.end() - } - - t.test('sync', t => { - x({ file: file, sync: true, C: dir }) - return check(t) - }) - - t.test('async promisey', t => { - return x({ file: file, cwd: dir }).then(_ => { - return check(t) - }) - }) - - t.test('async cb', t => { - return x({ file: file, cwd: dir }, er => { - if (er) { - throw er - } - return check(t) - }) - }) - - t.end() -}) - -t.test('read in itty bits', t => { - const maxReadSize = 1000 - const file = path.resolve(tars, 'body-byte-counts.tar') - const dir = path.resolve(extractdir, 'no-list') - - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - - const check = async t => { - t.equal(fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, 1024) - t.equal(fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, 512) - t.equal(fs.lstatSync(path.resolve(dir, 'one-byte.txt')).size, 1) - t.equal(fs.lstatSync(path.resolve(dir, 'zero-byte.txt')).size, 0) - await rimraf(dir) - t.end() - } - - t.test('sync', t => { - x({ file: file, sync: true, C: dir, maxReadSize: maxReadSize }) - return check(t) - }) - - t.test('async promisey', t => { - return x({ file: file, cwd: dir, maxReadSize: maxReadSize }).then(_ => { - return check(t) - }) - }) - - t.test('async cb', t => { - return x({ file: file, cwd: dir, maxReadSize: maxReadSize }, er => { - if (er) { - throw er - } - return check(t) - }) - }) - - t.end() -}) - -t.test('bad calls', t => { - t.throws(_ => x(_ => _)) - t.throws(_ => x({ sync: true }, _ => _)) - t.throws(_ => x({ sync: true }, [], _ => _)) - t.end() -}) - -t.test('no file', t => { - const Unpack = require('../lib/unpack.js') - t.type(x(), Unpack) - t.type(x(['asdf']), Unpack) - t.type(x({ sync: true }), Unpack.Sync) - t.end() -}) - -t.test('nonexistent', t => { - t.throws(_ => x({ sync: true, file: 'does not exist' })) - x({ file: 'does not exist' }).catch(_ => t.end()) -}) - -t.test('read fail', t => { - const poop = new Error('poop') - t.teardown(mutateFS.fail('read', poop)) - - t.throws(_ => x({ maxReadSize: 10, sync: true, file: __filename }), poop) - t.end() -}) - -t.test('sync gzip error edge case test', async t => { - const file = path.resolve(__dirname, 'fixtures/sync-gzip-fail.tgz') - const dir = path.resolve(__dirname, 'sync-gzip-fail') - const cwd = process.cwd() - await mkdirp(dir + '/x') - process.chdir(dir) - t.teardown(async () => { - process.chdir(cwd) - await rimraf(dir) - }) - - x({ - sync: true, - file: file, - onwarn: (c, m, er) => { - throw er - }, - }) - - t.same(fs.readdirSync(dir + '/x').sort(), - ['1', '10', '2', '3', '4', '5', '6', '7', '8', '9']) - - t.end() -}) - -t.test('brotli', async t => { - const file = path.resolve(__dirname, 'fixtures/example.tbr') - const dir = path.resolve(__dirname, 'brotli') - - t.beforeEach(async () => { - await mkdirp(dir) - }) - - t.afterEach(async () => { - await rimraf(dir) - }) - - t.test('fails if unknown file extension', async t => { - const filename = path.resolve(__dirname, 'brotli/example.unknown') - const f = fs.openSync(filename, 'a') - fs.closeSync(f) - - const expect = new Error('TAR_BAD_ARCHIVE: Unrecognized archive format') - - t.throws(_ => x({ sync: true, file: filename }), expect) - }) - - t.test('succeeds based on file extension', t => { - x({ sync: true, file: file, C: dir }) - - t.same(fs.readdirSync(dir + '/x').sort(), - ['1', '10', '2', '3', '4', '5', '6', '7', '8', '9']) - t.end() - }) - - t.test('succeeds when passed explicit option', t => { - x({ sync: true, file: file, C: dir, brotli: true }) - - t.same(fs.readdirSync(dir + '/x').sort(), - ['1', '10', '2', '3', '4', '5', '6', '7', '8', '9']) - t.end() - }) -}) diff --git a/test/extract.ts b/test/extract.ts new file mode 100644 index 00000000..37d2939e --- /dev/null +++ b/test/extract.ts @@ -0,0 +1,493 @@ +import fs from 'fs' +import http from 'http' +import { mkdirp } from 'mkdirp' +import nock from 'nock' +import path from 'path' +import { rimraf } from 'rimraf' +import { pipeline as PL } from 'stream' +import t, { Test } from 'tap' +import { fileURLToPath } from 'url' +import { promisify } from 'util' +import { extract as x } from '../dist/esm/extract.js' +import { Unpack, UnpackSync } from '../dist/esm/unpack.js' +const pipeline = promisify(PL) + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) +const extractdir = path.resolve(__dirname, 'fixtures/extract') +const tars = path.resolve(__dirname, 'fixtures/tars') +//@ts-ignore +import mutateFS from 'mutate-fs' + +const tnock = (t: Test, host: string, opts?: nock.Options) => { + nock.disableNetConnect() + const server = nock(host, opts) + t.teardown(function () { + nock.enableNetConnect() + server.done() + }) + return server +} + +t.teardown(() => rimraf(extractdir)) + +t.test('basic extracting', t => { + const file = path.resolve(tars, 'utf8.tar') + const dir = path.resolve(extractdir, 'basic') + + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) + }) + + const check = async (t: Test) => { + fs.lstatSync(dir + '/Ω.txt') + fs.lstatSync(dir + '/🌟.txt') + t.throws(() => + fs.lstatSync( + dir + + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' + + '/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + ), + ) + + await rimraf(dir) + } + + const files = ['🌟.txt', 'Ω.txt'] + t.test('sync', t => { + x({ file, sync: true, C: dir }, files) + return check(t) + }) + + t.test('async promisey', async t => { + const p = x({ file, cwd: dir }, files) + //@ts-expect-error + p.sync + //@ts-expect-error + p.write + await p + return check(t) + }) + + t.test('async cb', async t => { + const p = x({ file, cwd: dir }, files, er => { + if (er) { + throw er + } + return check(t) + }) + //@ts-expect-error + p.sync + //@ts-expect-error + p.write + await p + }) + + t.test('stream sync', t => { + const ups = x({ cwd: dir, sync: true }, files) + ups.end(fs.readFileSync(file)) + //@ts-expect-error + ups.then + t.equal(ups.sync, true) + return check(t) + }) + + t.test('stream async', async t => { + const up = x({ cwd: dir }, files) + //@ts-expect-error + up.then + //@ts-expect-error + up.sync + await new Promise(r => + up.end(fs.readFileSync(file)).on('end', r), + ) + return check(t) + }) + + t.end() +}) + +t.test('ensure an open stream is not prematurely closed', t => { + t.plan(1) + + const file = path.resolve(tars, 'long-paths.tar') + const dir = t.testdir({}) + + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) + }) + + const check = async (t: Test) => { + t.ok(fs.lstatSync(dir + '/long-path')) + await rimraf(dir) + t.end() + } + + t.test('async promisey', async t => { + const stream = fs.createReadStream(file, { + highWaterMark: 1, + }) + await pipeline(stream, x({ cwd: dir })) + return check(t) + }) + + t.end() +}) + +t.test('ensure an open stream is not prematuraly closed http', t => { + t.plan(1) + + const file = path.resolve(tars, 'long-paths.tar') + const dir = path.resolve(extractdir, 'basic-with-stream-http') + + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) + }) + + const check = async (t: Test) => { + t.ok(fs.lstatSync(dir + '/long-path')) + await rimraf(dir) + t.end() + } + + t.test('async promisey', t => { + tnock(t, 'http://codeload.github.com/') + .get('/npm/node-tar/tar.gz/main') + .delay(250) + .reply(200, () => fs.createReadStream(file)) + + http.get( + 'http://codeload.github.com/npm/node-tar/tar.gz/main', + async stream => { + await pipeline(stream, x({ cwd: dir })) + return check(t) + }, + ) + }) + + t.end() +}) + +t.test('file list and filter', t => { + const file = path.resolve(tars, 'utf8.tar') + const dir = path.resolve(extractdir, 'filter') + + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) + }) + + const check = async (t: Test) => { + fs.lstatSync(dir + '/Ω.txt') + t.throws(() => fs.lstatSync(dir + '/🌟.txt')) + t.throws(() => + fs.lstatSync( + dir + + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' + + '/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + ), + ) + + await rimraf(dir) + } + + const filter = (path: string) => path === 'Ω.txt' + + t.test('sync file', t => { + x({ filter, file, sync: true, C: dir }, ['🌟.txt', 'Ω.txt']) + return check(t) + }) + + t.test('async file', async t => { + await x({ filter, file, cwd: dir }, ['🌟.txt', 'Ω.txt']) + check(t) + }) + + t.test('async cb', t => { + return x({ filter, file, cwd: dir }, ['🌟.txt', 'Ω.txt'], er => { + if (er) { + throw er + } + return check(t) + }) + }) + + t.end() +}) + +t.test('no file list', t => { + const file = path.resolve(tars, 'body-byte-counts.tar') + const dir = path.resolve(extractdir, 'no-list') + + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) + }) + + const check = async (t: Test) => { + t.equal( + fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, + 1024, + ) + t.equal( + fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, + 512, + ) + t.equal(fs.lstatSync(path.resolve(dir, 'one-byte.txt')).size, 1) + t.equal(fs.lstatSync(path.resolve(dir, 'zero-byte.txt')).size, 0) + await rimraf(dir) + } + + t.test('sync file', t => { + x({ file, sync: true, C: dir }) + return check(t) + }) + + t.test('async promisey file', async t => { + await x({ file, cwd: dir }) + return check(t) + }) + + t.test('async cb', t => { + return x({ file: file, cwd: dir }, er => { + if (er) { + throw er + } + return check(t) + }) + }) + + t.test('sync stream', t => { + const up = x({ sync: true, C: dir }) + t.equal(up.sync, true) + t.type(up, UnpackSync) + //@ts-expect-error + up.then + up.end(fs.readFileSync(file)) + return check(t) + }) + + t.test('async stream', t => { + const up = x({ C: dir }) + t.type(up, Unpack) + //@ts-expect-error + up.sync + //@ts-expect-error + up.then + up.end(fs.readFileSync(file)) + return new Promise(r => up.on('close', () => r(check(t)))) + }) + + t.end() +}) + +t.test('read in itty bits', t => { + const maxReadSize = 1000 + const file = path.resolve(tars, 'body-byte-counts.tar') + const dir = path.resolve(extractdir, 'no-list') + + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) + }) + + const check = async (t: Test) => { + t.equal( + fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, + 1024, + ) + t.equal( + fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, + 512, + ) + t.equal(fs.lstatSync(path.resolve(dir, 'one-byte.txt')).size, 1) + t.equal(fs.lstatSync(path.resolve(dir, 'zero-byte.txt')).size, 0) + await rimraf(dir) + } + + t.test('sync', t => { + x({ file: file, sync: true, C: dir, maxReadSize: maxReadSize }) + return check(t) + }) + + t.test('async promisey', async t => { + await x({ file: file, cwd: dir, maxReadSize: maxReadSize }) + return check(t) + }) + + t.test('async cb', t => { + return x( + { file: file, cwd: dir, maxReadSize: maxReadSize }, + er => { + if (er) { + throw er + } + return check(t) + }, + ) + }) + + t.end() +}) + +t.test('bad calls', t => { + t.throws(() => x({}, () => {})) + t.throws(() => x({}, [], () => {})) + //@ts-expect-error + t.throws(() => x({ sync: true }, () => {})) + //@ts-expect-error + t.throws(() => x({ sync: true }, [], () => {})) + t.end() +}) + +t.test('no file', t => { + const up = x() + t.type(up, Unpack) + //@ts-expect-error + up.then + //@ts-expect-error + up.sync + const upf = x(['asdf']) + //@ts-expect-error + upf.then + //@ts-expect-error + upf.sync + t.type(upf, Unpack) + const ups = x({ sync: true }) + //@ts-expect-error + ups.then + t.equal(ups.sync, true) + t.type(ups, UnpackSync) + t.end() +}) + +t.test('nonexistent', async t => { + t.throws(() => x({ sync: true, file: 'does not exist' })) + await t.rejects(x({ file: 'does not exist' })) +}) + +t.test('read fail', t => { + const poop = new Error('poop') + t.teardown(mutateFS.fail('read', poop)) + + t.throws( + () => x({ maxReadSize: 10, sync: true, file: __filename }), + poop, + ) + t.end() +}) + +t.test('sync gzip error edge case test', async t => { + const file = path.resolve(__dirname, 'fixtures/sync-gzip-fail.tgz') + const dir = path.resolve(__dirname, 'sync-gzip-fail') + const cwd = process.cwd() + await mkdirp(dir + '/x') + process.chdir(dir) + t.teardown(async () => { + process.chdir(cwd) + await rimraf(dir) + }) + + x({ + sync: true, + file: file, + onwarn: (_c: any, _m: any, er) => { + throw er + }, + }) + + t.same(fs.readdirSync(dir + '/x').sort(), [ + '1', + '10', + '2', + '3', + '4', + '5', + '6', + '7', + '8', + '9', + ]) + + t.end() +}) + +t.test('brotli', async t => { + const file = path.resolve(__dirname, 'fixtures/example.tbr') + const dir = path.resolve(__dirname, 'brotli') + + t.beforeEach(async () => { + await mkdirp(dir) + }) + + t.afterEach(async () => { + await rimraf(dir) + }) + + t.test('fails if unknown file extension', async t => { + const filename = path.resolve(__dirname, 'brotli/example.unknown') + const f = fs.openSync(filename, 'a') + fs.closeSync(f) + + const expect = new Error( + 'TAR_BAD_ARCHIVE: Unrecognized archive format', + ) + + t.throws(() => x({ sync: true, file: filename }), expect) + }) + + t.test('succeeds based on file extension', t => { + x({ sync: true, file: file, C: dir }) + + t.same(fs.readdirSync(dir + '/x').sort(), [ + '1', + '10', + '2', + '3', + '4', + '5', + '6', + '7', + '8', + '9', + ]) + t.end() + }) + + t.test('succeeds when passed explicit option', t => { + x({ sync: true, file: file, C: dir, brotli: true }) + + t.same(fs.readdirSync(dir + '/x').sort(), [ + '1', + '10', + '2', + '3', + '4', + '5', + '6', + '7', + '8', + '9', + ]) + t.end() + }) +}) + +t.test('verify long linkname is not a problem', async t => { + // See: https://github.com/isaacs/node-tar/issues/312 + const file = path.resolve(__dirname, 'fixtures/long-linkname.tar') + t.test('sync', t => { + const cwd = t.testdir({}) + const result = x({ sync: true, strict: true, file, cwd }) + t.equal(result, undefined) + t.ok(fs.lstatSync(cwd + '/test').isSymbolicLink()) + t.end() + }) + t.test('async', async t => { + await x({ file, C: t.testdir({}), strict: true }) + t.ok(fs.lstatSync(t.testdirName + '/test').isSymbolicLink()) + }) +}) diff --git a/test/fixtures/long-linkname.tar b/test/fixtures/long-linkname.tar new file mode 100644 index 00000000..34c0ea57 Binary files /dev/null and b/test/fixtures/long-linkname.tar differ diff --git a/test/fixtures/make-tar.js b/test/fixtures/make-tar.js new file mode 100644 index 00000000..77ee2b26 --- /dev/null +++ b/test/fixtures/make-tar.js @@ -0,0 +1,26 @@ +import { Header } from '../../dist/esm/header.js' +export const makeTar = chunks => { + let dataLen = 0 + return Buffer.concat( + chunks.map(chunk => { + if (Buffer.isBuffer(chunk)) { + dataLen += chunk.length + return chunk + } + const size = Math.max( + typeof chunk === 'string' + ? 512 * Math.ceil(chunk.length / 512) + : 512, + ) + dataLen += size + const buf = Buffer.alloc(size) + if (typeof chunk === 'string') { + buf.write(chunk) + } else { + new Header(chunk).encode(buf, 0) + } + return buf + }), + dataLen, + ) +} diff --git a/test/fixtures/parse/bad-cksum--filter-strict.json b/test/fixtures/parse/bad-cksum--filter-strict.json index edfce539..48e0680b 100644 --- a/test/fixtures/parse/bad-cksum--filter-strict.json +++ b/test/fixtures/parse/bad-cksum--filter-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum--filter.json b/test/fixtures/parse/bad-cksum--filter.json index 55274a4e..d2664877 100644 --- a/test/fixtures/parse/bad-cksum--filter.json +++ b/test/fixtures/parse/bad-cksum--filter.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum--meta-250-filter-strict.json b/test/fixtures/parse/bad-cksum--meta-250-filter-strict.json index edfce539..48e0680b 100644 --- a/test/fixtures/parse/bad-cksum--meta-250-filter-strict.json +++ b/test/fixtures/parse/bad-cksum--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum--meta-250-filter.json b/test/fixtures/parse/bad-cksum--meta-250-filter.json index 55274a4e..d2664877 100644 --- a/test/fixtures/parse/bad-cksum--meta-250-filter.json +++ b/test/fixtures/parse/bad-cksum--meta-250-filter.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum--meta-250-strict.json b/test/fixtures/parse/bad-cksum--meta-250-strict.json index edfce539..48e0680b 100644 --- a/test/fixtures/parse/bad-cksum--meta-250-strict.json +++ b/test/fixtures/parse/bad-cksum--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum--meta-250.json b/test/fixtures/parse/bad-cksum--meta-250.json index 55274a4e..d2664877 100644 --- a/test/fixtures/parse/bad-cksum--meta-250.json +++ b/test/fixtures/parse/bad-cksum--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum--strict.json b/test/fixtures/parse/bad-cksum--strict.json index edfce539..48e0680b 100644 --- a/test/fixtures/parse/bad-cksum--strict.json +++ b/test/fixtures/parse/bad-cksum--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum.json b/test/fixtures/parse/bad-cksum.json index 55274a4e..d2664877 100644 --- a/test/fixtures/parse/bad-cksum.json +++ b/test/fixtures/parse/bad-cksum.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--filter-strict.json b/test/fixtures/parse/body-byte-counts--filter-strict.json index 7290291b..9462cd61 100644 --- a/test/fixtures/parse/body-byte-counts--filter-strict.json +++ b/test/fixtures/parse/body-byte-counts--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--filter.json b/test/fixtures/parse/body-byte-counts--filter.json index 7290291b..9462cd61 100644 --- a/test/fixtures/parse/body-byte-counts--filter.json +++ b/test/fixtures/parse/body-byte-counts--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--meta-250-filter-strict.json b/test/fixtures/parse/body-byte-counts--meta-250-filter-strict.json index 7290291b..9462cd61 100644 --- a/test/fixtures/parse/body-byte-counts--meta-250-filter-strict.json +++ b/test/fixtures/parse/body-byte-counts--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--meta-250-filter.json b/test/fixtures/parse/body-byte-counts--meta-250-filter.json index 7290291b..9462cd61 100644 --- a/test/fixtures/parse/body-byte-counts--meta-250-filter.json +++ b/test/fixtures/parse/body-byte-counts--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--meta-250-strict.json b/test/fixtures/parse/body-byte-counts--meta-250-strict.json index 8069e9cd..8ae48534 100644 --- a/test/fixtures/parse/body-byte-counts--meta-250-strict.json +++ b/test/fixtures/parse/body-byte-counts--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--meta-250.json b/test/fixtures/parse/body-byte-counts--meta-250.json index 8069e9cd..8ae48534 100644 --- a/test/fixtures/parse/body-byte-counts--meta-250.json +++ b/test/fixtures/parse/body-byte-counts--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--strict.json b/test/fixtures/parse/body-byte-counts--strict.json index 8069e9cd..8ae48534 100644 --- a/test/fixtures/parse/body-byte-counts--strict.json +++ b/test/fixtures/parse/body-byte-counts--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts.json b/test/fixtures/parse/body-byte-counts.json index 8069e9cd..8ae48534 100644 --- a/test/fixtures/parse/body-byte-counts.json +++ b/test/fixtures/parse/body-byte-counts.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--filter-strict.json b/test/fixtures/parse/dir--filter-strict.json index 69926438..52757dae 100644 --- a/test/fixtures/parse/dir--filter-strict.json +++ b/test/fixtures/parse/dir--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--filter.json b/test/fixtures/parse/dir--filter.json index 69926438..52757dae 100644 --- a/test/fixtures/parse/dir--filter.json +++ b/test/fixtures/parse/dir--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--meta-250-filter-strict.json b/test/fixtures/parse/dir--meta-250-filter-strict.json index 69926438..52757dae 100644 --- a/test/fixtures/parse/dir--meta-250-filter-strict.json +++ b/test/fixtures/parse/dir--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--meta-250-filter.json b/test/fixtures/parse/dir--meta-250-filter.json index 69926438..52757dae 100644 --- a/test/fixtures/parse/dir--meta-250-filter.json +++ b/test/fixtures/parse/dir--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--meta-250-strict.json b/test/fixtures/parse/dir--meta-250-strict.json index 0a933bf5..0688cc0b 100644 --- a/test/fixtures/parse/dir--meta-250-strict.json +++ b/test/fixtures/parse/dir--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--meta-250.json b/test/fixtures/parse/dir--meta-250.json index 0a933bf5..0688cc0b 100644 --- a/test/fixtures/parse/dir--meta-250.json +++ b/test/fixtures/parse/dir--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--strict.json b/test/fixtures/parse/dir--strict.json index 0a933bf5..0688cc0b 100644 --- a/test/fixtures/parse/dir--strict.json +++ b/test/fixtures/parse/dir--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir.json b/test/fixtures/parse/dir.json index 0a933bf5..0688cc0b 100644 --- a/test/fixtures/parse/dir.json +++ b/test/fixtures/parse/dir.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--filter-strict.json b/test/fixtures/parse/emptypax--filter-strict.json index 2810367d..eb30c2be 100644 --- a/test/fixtures/parse/emptypax--filter-strict.json +++ b/test/fixtures/parse/emptypax--filter-strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--filter.json b/test/fixtures/parse/emptypax--filter.json index 2810367d..eb30c2be 100644 --- a/test/fixtures/parse/emptypax--filter.json +++ b/test/fixtures/parse/emptypax--filter.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--meta-250-filter-strict.json b/test/fixtures/parse/emptypax--meta-250-filter-strict.json index 2810367d..eb30c2be 100644 --- a/test/fixtures/parse/emptypax--meta-250-filter-strict.json +++ b/test/fixtures/parse/emptypax--meta-250-filter-strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--meta-250-filter.json b/test/fixtures/parse/emptypax--meta-250-filter.json index 2810367d..eb30c2be 100644 --- a/test/fixtures/parse/emptypax--meta-250-filter.json +++ b/test/fixtures/parse/emptypax--meta-250-filter.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--meta-250-strict.json b/test/fixtures/parse/emptypax--meta-250-strict.json index 686a428a..55125fe7 100644 --- a/test/fixtures/parse/emptypax--meta-250-strict.json +++ b/test/fixtures/parse/emptypax--meta-250-strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--meta-250.json b/test/fixtures/parse/emptypax--meta-250.json index 686a428a..55125fe7 100644 --- a/test/fixtures/parse/emptypax--meta-250.json +++ b/test/fixtures/parse/emptypax--meta-250.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--strict.json b/test/fixtures/parse/emptypax--strict.json index 686a428a..55125fe7 100644 --- a/test/fixtures/parse/emptypax--strict.json +++ b/test/fixtures/parse/emptypax--strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax.json b/test/fixtures/parse/emptypax.json index 686a428a..55125fe7 100644 --- a/test/fixtures/parse/emptypax.json +++ b/test/fixtures/parse/emptypax.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--filter-strict.json b/test/fixtures/parse/file--filter-strict.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--filter-strict.json +++ b/test/fixtures/parse/file--filter-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--filter.json b/test/fixtures/parse/file--filter.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--filter.json +++ b/test/fixtures/parse/file--filter.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--meta-250-filter-strict.json b/test/fixtures/parse/file--meta-250-filter-strict.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--meta-250-filter-strict.json +++ b/test/fixtures/parse/file--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--meta-250-filter.json b/test/fixtures/parse/file--meta-250-filter.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--meta-250-filter.json +++ b/test/fixtures/parse/file--meta-250-filter.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--meta-250-strict.json b/test/fixtures/parse/file--meta-250-strict.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--meta-250-strict.json +++ b/test/fixtures/parse/file--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--meta-250.json b/test/fixtures/parse/file--meta-250.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--meta-250.json +++ b/test/fixtures/parse/file--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--strict.json b/test/fixtures/parse/file--strict.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--strict.json +++ b/test/fixtures/parse/file--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file.json b/test/fixtures/parse/file.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file.json +++ b/test/fixtures/parse/file.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--filter-strict.json b/test/fixtures/parse/global-header--filter-strict.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--filter-strict.json +++ b/test/fixtures/parse/global-header--filter-strict.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--filter.json b/test/fixtures/parse/global-header--filter.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--filter.json +++ b/test/fixtures/parse/global-header--filter.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--meta-250-filter-strict.json b/test/fixtures/parse/global-header--meta-250-filter-strict.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--meta-250-filter-strict.json +++ b/test/fixtures/parse/global-header--meta-250-filter-strict.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--meta-250-filter.json b/test/fixtures/parse/global-header--meta-250-filter.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--meta-250-filter.json +++ b/test/fixtures/parse/global-header--meta-250-filter.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--meta-250-strict.json b/test/fixtures/parse/global-header--meta-250-strict.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--meta-250-strict.json +++ b/test/fixtures/parse/global-header--meta-250-strict.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--meta-250.json b/test/fixtures/parse/global-header--meta-250.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--meta-250.json +++ b/test/fixtures/parse/global-header--meta-250.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--strict.json b/test/fixtures/parse/global-header--strict.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--strict.json +++ b/test/fixtures/parse/global-header--strict.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header.json b/test/fixtures/parse/global-header.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header.json +++ b/test/fixtures/parse/global-header.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--filter-strict.json b/test/fixtures/parse/links--filter-strict.json index ed902aa1..acf569af 100644 --- a/test/fixtures/parse/links--filter-strict.json +++ b/test/fixtures/parse/links--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--filter.json b/test/fixtures/parse/links--filter.json index ed902aa1..acf569af 100644 --- a/test/fixtures/parse/links--filter.json +++ b/test/fixtures/parse/links--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--meta-250-filter-strict.json b/test/fixtures/parse/links--meta-250-filter-strict.json index ed902aa1..acf569af 100644 --- a/test/fixtures/parse/links--meta-250-filter-strict.json +++ b/test/fixtures/parse/links--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--meta-250-filter.json b/test/fixtures/parse/links--meta-250-filter.json index ed902aa1..acf569af 100644 --- a/test/fixtures/parse/links--meta-250-filter.json +++ b/test/fixtures/parse/links--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--meta-250-strict.json b/test/fixtures/parse/links--meta-250-strict.json index fdf07254..216a3ba7 100644 --- a/test/fixtures/parse/links--meta-250-strict.json +++ b/test/fixtures/parse/links--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--meta-250.json b/test/fixtures/parse/links--meta-250.json index fdf07254..216a3ba7 100644 --- a/test/fixtures/parse/links--meta-250.json +++ b/test/fixtures/parse/links--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--strict.json b/test/fixtures/parse/links--strict.json index fdf07254..216a3ba7 100644 --- a/test/fixtures/parse/links--strict.json +++ b/test/fixtures/parse/links--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--filter-strict.json b/test/fixtures/parse/links-invalid--filter-strict.json index fd2d5dc8..6fdfb2d5 100644 --- a/test/fixtures/parse/links-invalid--filter-strict.json +++ b/test/fixtures/parse/links-invalid--filter-strict.json @@ -23,8 +23,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -36,8 +34,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -53,9 +49,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--filter.json b/test/fixtures/parse/links-invalid--filter.json index 42a58d14..1e14b8a0 100644 --- a/test/fixtures/parse/links-invalid--filter.json +++ b/test/fixtures/parse/links-invalid--filter.json @@ -17,8 +17,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -30,8 +28,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -47,9 +43,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--meta-250-filter-strict.json b/test/fixtures/parse/links-invalid--meta-250-filter-strict.json index fd2d5dc8..6fdfb2d5 100644 --- a/test/fixtures/parse/links-invalid--meta-250-filter-strict.json +++ b/test/fixtures/parse/links-invalid--meta-250-filter-strict.json @@ -23,8 +23,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -36,8 +34,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -53,9 +49,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--meta-250-filter.json b/test/fixtures/parse/links-invalid--meta-250-filter.json index 42a58d14..1e14b8a0 100644 --- a/test/fixtures/parse/links-invalid--meta-250-filter.json +++ b/test/fixtures/parse/links-invalid--meta-250-filter.json @@ -17,8 +17,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -30,8 +28,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -47,9 +43,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--meta-250-strict.json b/test/fixtures/parse/links-invalid--meta-250-strict.json index 0c6c72d0..6e9c1531 100644 --- a/test/fixtures/parse/links-invalid--meta-250-strict.json +++ b/test/fixtures/parse/links-invalid--meta-250-strict.json @@ -23,8 +23,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -36,8 +34,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -53,9 +49,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--meta-250.json b/test/fixtures/parse/links-invalid--meta-250.json index c62395e3..569dcb62 100644 --- a/test/fixtures/parse/links-invalid--meta-250.json +++ b/test/fixtures/parse/links-invalid--meta-250.json @@ -17,8 +17,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -30,8 +28,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -47,9 +43,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--strict.json b/test/fixtures/parse/links-invalid--strict.json index 0c6c72d0..6e9c1531 100644 --- a/test/fixtures/parse/links-invalid--strict.json +++ b/test/fixtures/parse/links-invalid--strict.json @@ -23,8 +23,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -36,8 +34,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -53,9 +49,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid.json b/test/fixtures/parse/links-invalid.json index c62395e3..569dcb62 100644 --- a/test/fixtures/parse/links-invalid.json +++ b/test/fixtures/parse/links-invalid.json @@ -17,8 +17,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -30,8 +28,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -47,9 +43,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--filter-strict.json b/test/fixtures/parse/links-strip--filter-strict.json index 9a640e0b..47069c6b 100644 --- a/test/fixtures/parse/links-strip--filter-strict.json +++ b/test/fixtures/parse/links-strip--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--filter.json b/test/fixtures/parse/links-strip--filter.json index 9a640e0b..47069c6b 100644 --- a/test/fixtures/parse/links-strip--filter.json +++ b/test/fixtures/parse/links-strip--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--meta-250-filter-strict.json b/test/fixtures/parse/links-strip--meta-250-filter-strict.json index 9a640e0b..47069c6b 100644 --- a/test/fixtures/parse/links-strip--meta-250-filter-strict.json +++ b/test/fixtures/parse/links-strip--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--meta-250-filter.json b/test/fixtures/parse/links-strip--meta-250-filter.json index 9a640e0b..47069c6b 100644 --- a/test/fixtures/parse/links-strip--meta-250-filter.json +++ b/test/fixtures/parse/links-strip--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--meta-250-strict.json b/test/fixtures/parse/links-strip--meta-250-strict.json index 17e5397d..f908c9e3 100644 --- a/test/fixtures/parse/links-strip--meta-250-strict.json +++ b/test/fixtures/parse/links-strip--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--meta-250.json b/test/fixtures/parse/links-strip--meta-250.json index 17e5397d..f908c9e3 100644 --- a/test/fixtures/parse/links-strip--meta-250.json +++ b/test/fixtures/parse/links-strip--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--strict.json b/test/fixtures/parse/links-strip--strict.json index 17e5397d..f908c9e3 100644 --- a/test/fixtures/parse/links-strip--strict.json +++ b/test/fixtures/parse/links-strip--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip.json b/test/fixtures/parse/links-strip.json index 17e5397d..f908c9e3 100644 --- a/test/fixtures/parse/links-strip.json +++ b/test/fixtures/parse/links-strip.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links.json b/test/fixtures/parse/links.json index fdf07254..216a3ba7 100644 --- a/test/fixtures/parse/links.json +++ b/test/fixtures/parse/links.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--filter-strict.json b/test/fixtures/parse/long-paths--filter-strict.json index 8dfe9de3..53e376da 100644 --- a/test/fixtures/parse/long-paths--filter-strict.json +++ b/test/fixtures/parse/long-paths--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -107,23 +85,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:56:18.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836326, "nlink": 1, + "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -136,8 +103,6 @@ "size": 100, "mtime": "2017-04-10T16:56:18.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:56:18.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -152,17 +117,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -174,9 +135,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -191,17 +149,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -213,9 +167,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -230,17 +181,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -252,9 +199,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -269,17 +213,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -291,9 +231,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -308,17 +245,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -330,9 +263,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -347,17 +277,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -369,9 +295,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -386,17 +309,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -408,9 +327,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -425,17 +341,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -447,9 +359,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -464,17 +373,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -486,9 +391,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -503,17 +405,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -525,9 +423,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -542,17 +437,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -564,9 +455,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -581,17 +469,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -603,9 +487,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -620,17 +501,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -642,9 +519,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -659,17 +533,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -681,9 +551,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -698,17 +565,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -720,9 +583,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -737,17 +597,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -759,9 +615,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -776,17 +629,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -798,9 +647,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -815,17 +661,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -837,9 +679,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -854,17 +693,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -876,9 +711,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -893,17 +725,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -915,9 +743,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -932,17 +757,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -954,9 +775,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -971,17 +789,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -993,9 +807,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1010,17 +821,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1032,9 +839,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1049,17 +853,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1071,9 +871,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1088,17 +885,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1110,9 +903,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1127,17 +917,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1149,9 +935,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1166,9 +949,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1181,23 +962,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836253, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1210,8 +980,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1226,9 +994,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1241,23 +1007,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836254, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1270,8 +1025,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1286,9 +1039,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1301,23 +1052,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1330,8 +1070,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1346,9 +1084,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--filter.json b/test/fixtures/parse/long-paths--filter.json index 8dfe9de3..53e376da 100644 --- a/test/fixtures/parse/long-paths--filter.json +++ b/test/fixtures/parse/long-paths--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -107,23 +85,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:56:18.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836326, "nlink": 1, + "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -136,8 +103,6 @@ "size": 100, "mtime": "2017-04-10T16:56:18.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:56:18.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -152,17 +117,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -174,9 +135,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -191,17 +149,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -213,9 +167,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -230,17 +181,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -252,9 +199,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -269,17 +213,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -291,9 +231,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -308,17 +245,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -330,9 +263,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -347,17 +277,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -369,9 +295,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -386,17 +309,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -408,9 +327,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -425,17 +341,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -447,9 +359,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -464,17 +373,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -486,9 +391,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -503,17 +405,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -525,9 +423,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -542,17 +437,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -564,9 +455,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -581,17 +469,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -603,9 +487,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -620,17 +501,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -642,9 +519,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -659,17 +533,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -681,9 +551,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -698,17 +565,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -720,9 +583,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -737,17 +597,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -759,9 +615,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -776,17 +629,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -798,9 +647,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -815,17 +661,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -837,9 +679,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -854,17 +693,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -876,9 +711,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -893,17 +725,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -915,9 +743,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -932,17 +757,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -954,9 +775,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -971,17 +789,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -993,9 +807,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1010,17 +821,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1032,9 +839,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1049,17 +853,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1071,9 +871,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1088,17 +885,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1110,9 +903,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1127,17 +917,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1149,9 +935,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1166,9 +949,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1181,23 +962,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836253, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1210,8 +980,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1226,9 +994,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1241,23 +1007,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836254, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1270,8 +1025,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1286,9 +1039,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1301,23 +1052,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1330,8 +1070,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1346,9 +1084,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--meta-250-filter-strict.json b/test/fixtures/parse/long-paths--meta-250-filter-strict.json index 2a2e4960..0ceedf38 100644 --- a/test/fixtures/parse/long-paths--meta-250-filter-strict.json +++ b/test/fixtures/parse/long-paths--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,17 +72,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -114,9 +90,6 @@ "gname": "staff", "size": 283, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -131,17 +104,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -153,9 +122,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -170,17 +136,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -192,9 +154,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -209,17 +168,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -231,9 +186,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -248,17 +200,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -270,9 +218,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -287,17 +232,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -309,9 +250,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -326,17 +264,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -348,9 +282,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -365,17 +296,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -387,9 +314,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -404,17 +328,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -426,9 +346,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -443,17 +360,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -465,9 +378,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -482,17 +392,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -504,9 +410,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -521,17 +424,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -543,9 +442,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -560,17 +456,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -582,9 +474,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -599,17 +488,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -621,9 +506,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -638,17 +520,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -660,9 +538,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -677,17 +552,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -699,9 +570,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -716,17 +584,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -738,9 +602,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -755,17 +616,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -777,9 +634,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -794,17 +648,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -816,9 +666,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -833,17 +680,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -855,9 +698,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -872,17 +712,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -894,9 +730,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -911,17 +744,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -933,9 +762,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -950,17 +776,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -972,9 +794,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -989,17 +808,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1011,9 +826,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1028,17 +840,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1050,9 +858,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1067,17 +872,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1089,9 +890,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1106,17 +904,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1128,9 +922,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1145,17 +936,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1167,9 +954,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1184,17 +968,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1206,9 +986,6 @@ "gname": "staff", "size": 289, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1223,17 +1000,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1245,9 +1018,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1262,17 +1032,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1284,9 +1050,6 @@ "gname": "staff", "size": 339, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1301,17 +1064,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1323,9 +1082,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1340,9 +1096,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1355,23 +1109,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1384,8 +1127,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1400,9 +1141,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--meta-250-filter.json b/test/fixtures/parse/long-paths--meta-250-filter.json index 2a2e4960..0ceedf38 100644 --- a/test/fixtures/parse/long-paths--meta-250-filter.json +++ b/test/fixtures/parse/long-paths--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,17 +72,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -114,9 +90,6 @@ "gname": "staff", "size": 283, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -131,17 +104,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -153,9 +122,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -170,17 +136,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -192,9 +154,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -209,17 +168,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -231,9 +186,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -248,17 +200,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -270,9 +218,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -287,17 +232,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -309,9 +250,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -326,17 +264,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -348,9 +282,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -365,17 +296,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -387,9 +314,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -404,17 +328,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -426,9 +346,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -443,17 +360,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -465,9 +378,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -482,17 +392,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -504,9 +410,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -521,17 +424,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -543,9 +442,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -560,17 +456,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -582,9 +474,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -599,17 +488,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -621,9 +506,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -638,17 +520,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -660,9 +538,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -677,17 +552,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -699,9 +570,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -716,17 +584,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -738,9 +602,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -755,17 +616,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -777,9 +634,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -794,17 +648,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -816,9 +666,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -833,17 +680,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -855,9 +698,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -872,17 +712,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -894,9 +730,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -911,17 +744,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -933,9 +762,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -950,17 +776,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -972,9 +794,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -989,17 +808,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1011,9 +826,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1028,17 +840,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1050,9 +858,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1067,17 +872,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1089,9 +890,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1106,17 +904,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1128,9 +922,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1145,17 +936,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1167,9 +954,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1184,17 +968,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1206,9 +986,6 @@ "gname": "staff", "size": 289, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1223,17 +1000,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1245,9 +1018,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1262,17 +1032,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1284,9 +1050,6 @@ "gname": "staff", "size": 339, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1301,17 +1064,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1323,9 +1082,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1340,9 +1096,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1355,23 +1109,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1384,8 +1127,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1400,9 +1141,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--meta-250-strict.json b/test/fixtures/parse/long-paths--meta-250-strict.json index 532f2365..7fdf8ac4 100644 --- a/test/fixtures/parse/long-paths--meta-250-strict.json +++ b/test/fixtures/parse/long-paths--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,17 +72,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -114,9 +90,6 @@ "gname": "staff", "size": 283, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -131,17 +104,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -153,9 +122,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -170,17 +136,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -192,9 +154,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -209,17 +168,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -231,9 +186,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -248,17 +200,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -270,9 +218,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -287,17 +232,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -309,9 +250,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -326,17 +264,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -348,9 +282,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -365,17 +296,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -387,9 +314,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -404,17 +328,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -426,9 +346,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -443,17 +360,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -465,9 +378,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -482,17 +392,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -504,9 +410,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -521,17 +424,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -543,9 +442,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -560,17 +456,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -582,9 +474,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -599,17 +488,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -621,9 +506,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -638,17 +520,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -660,9 +538,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -677,17 +552,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -699,9 +570,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -716,17 +584,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -738,9 +602,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -755,17 +616,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -777,9 +634,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -794,17 +648,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -816,9 +666,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -833,17 +680,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -855,9 +698,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -872,17 +712,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -894,9 +730,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -911,17 +744,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -933,9 +762,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -950,17 +776,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -972,9 +794,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -989,17 +808,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1011,9 +826,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1028,17 +840,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1050,9 +858,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1067,17 +872,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1089,9 +890,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1106,17 +904,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1128,9 +922,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1145,17 +936,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1167,9 +954,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1184,17 +968,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1206,9 +986,6 @@ "gname": "staff", "size": 289, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1223,17 +1000,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1245,9 +1018,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1262,17 +1032,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1284,9 +1050,6 @@ "gname": "staff", "size": 339, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1301,17 +1064,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1323,9 +1082,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1340,9 +1096,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1355,23 +1109,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1384,8 +1127,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1400,9 +1141,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--meta-250.json b/test/fixtures/parse/long-paths--meta-250.json index 532f2365..7fdf8ac4 100644 --- a/test/fixtures/parse/long-paths--meta-250.json +++ b/test/fixtures/parse/long-paths--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,17 +72,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -114,9 +90,6 @@ "gname": "staff", "size": 283, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -131,17 +104,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -153,9 +122,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -170,17 +136,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -192,9 +154,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -209,17 +168,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -231,9 +186,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -248,17 +200,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -270,9 +218,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -287,17 +232,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -309,9 +250,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -326,17 +264,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -348,9 +282,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -365,17 +296,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -387,9 +314,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -404,17 +328,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -426,9 +346,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -443,17 +360,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -465,9 +378,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -482,17 +392,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -504,9 +410,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -521,17 +424,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -543,9 +442,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -560,17 +456,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -582,9 +474,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -599,17 +488,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -621,9 +506,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -638,17 +520,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -660,9 +538,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -677,17 +552,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -699,9 +570,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -716,17 +584,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -738,9 +602,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -755,17 +616,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -777,9 +634,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -794,17 +648,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -816,9 +666,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -833,17 +680,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -855,9 +698,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -872,17 +712,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -894,9 +730,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -911,17 +744,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -933,9 +762,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -950,17 +776,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -972,9 +794,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -989,17 +808,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1011,9 +826,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1028,17 +840,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1050,9 +858,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1067,17 +872,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1089,9 +890,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1106,17 +904,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1128,9 +922,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1145,17 +936,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1167,9 +954,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1184,17 +968,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1206,9 +986,6 @@ "gname": "staff", "size": 289, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1223,17 +1000,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1245,9 +1018,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1262,17 +1032,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1284,9 +1050,6 @@ "gname": "staff", "size": 339, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1301,17 +1064,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1323,9 +1082,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1340,9 +1096,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1355,23 +1109,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1384,8 +1127,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1400,9 +1141,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--strict.json b/test/fixtures/parse/long-paths--strict.json index 835fd96c..fb49f544 100644 --- a/test/fixtures/parse/long-paths--strict.json +++ b/test/fixtures/parse/long-paths--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -107,23 +85,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:56:18.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836326, "nlink": 1, + "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -136,8 +103,6 @@ "size": 100, "mtime": "2017-04-10T16:56:18.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:56:18.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -152,17 +117,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -174,9 +135,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -191,17 +149,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -213,9 +167,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -230,17 +181,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -252,9 +199,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -269,17 +213,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -291,9 +231,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -308,17 +245,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -330,9 +263,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -347,17 +277,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -369,9 +295,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -386,17 +309,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -408,9 +327,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -425,17 +341,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -447,9 +359,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -464,17 +373,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -486,9 +391,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -503,17 +405,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -525,9 +423,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -542,17 +437,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -564,9 +455,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -581,17 +469,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -603,9 +487,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -620,17 +501,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -642,9 +519,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -659,17 +533,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -681,9 +551,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -698,17 +565,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -720,9 +583,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -737,17 +597,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -759,9 +615,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -776,17 +629,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -798,9 +647,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -815,17 +661,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -837,9 +679,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -854,17 +693,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -876,9 +711,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -893,17 +725,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -915,9 +743,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -932,17 +757,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -954,9 +775,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -971,17 +789,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -993,9 +807,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1010,17 +821,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1032,9 +839,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1049,17 +853,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1071,9 +871,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1088,17 +885,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1110,9 +903,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1127,17 +917,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1149,9 +935,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1166,9 +949,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1181,23 +962,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836253, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1210,8 +980,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1226,9 +994,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1241,23 +1007,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836254, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1270,8 +1025,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1286,9 +1039,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1301,23 +1052,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1330,8 +1070,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1346,9 +1084,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths.json b/test/fixtures/parse/long-paths.json index 835fd96c..fb49f544 100644 --- a/test/fixtures/parse/long-paths.json +++ b/test/fixtures/parse/long-paths.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -107,23 +85,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:56:18.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836326, "nlink": 1, + "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -136,8 +103,6 @@ "size": 100, "mtime": "2017-04-10T16:56:18.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:56:18.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -152,17 +117,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -174,9 +135,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -191,17 +149,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -213,9 +167,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -230,17 +181,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -252,9 +199,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -269,17 +213,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -291,9 +231,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -308,17 +245,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -330,9 +263,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -347,17 +277,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -369,9 +295,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -386,17 +309,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -408,9 +327,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -425,17 +341,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -447,9 +359,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -464,17 +373,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -486,9 +391,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -503,17 +405,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -525,9 +423,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -542,17 +437,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -564,9 +455,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -581,17 +469,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -603,9 +487,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -620,17 +501,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -642,9 +519,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -659,17 +533,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -681,9 +551,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -698,17 +565,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -720,9 +583,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -737,17 +597,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -759,9 +615,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -776,17 +629,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -798,9 +647,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -815,17 +661,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -837,9 +679,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -854,17 +693,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -876,9 +711,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -893,17 +725,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -915,9 +743,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -932,17 +757,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -954,9 +775,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -971,17 +789,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -993,9 +807,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1010,17 +821,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1032,9 +839,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1049,17 +853,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1071,9 +871,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1088,17 +885,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1110,9 +903,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1127,17 +917,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1149,9 +935,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1166,9 +949,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1181,23 +962,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836253, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1210,8 +980,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1226,9 +994,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1241,23 +1007,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836254, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1270,8 +1025,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1286,9 +1039,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1301,23 +1052,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1330,8 +1070,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1346,9 +1084,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--filter-strict.json b/test/fixtures/parse/long-pax--filter-strict.json index 7e9df8bb..57f80513 100644 --- a/test/fixtures/parse/long-pax--filter-strict.json +++ b/test/fixtures/parse/long-pax--filter-strict.json @@ -7,24 +7,16 @@ "ignoredEntry", { "extended": { - "atime": null, - "charset": null, + "mtime": "2017-04-10T16:54:12.000Z", "comment": "all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy", - "ctime": null, "gid": 20, + "uid": 501, "gname": "staff", - "linkpath": null, - "mtime": "2017-04-10T16:54:12.000Z", + "uname": "isaacs", "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "size": 100, - "uid": 501, - "uname": "isaacs", - "dev": null, - "ino": null, - "nlink": null, "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -36,9 +28,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +42,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--filter.json b/test/fixtures/parse/long-pax--filter.json index 7e9df8bb..57f80513 100644 --- a/test/fixtures/parse/long-pax--filter.json +++ b/test/fixtures/parse/long-pax--filter.json @@ -7,24 +7,16 @@ "ignoredEntry", { "extended": { - "atime": null, - "charset": null, + "mtime": "2017-04-10T16:54:12.000Z", "comment": "all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy", - "ctime": null, "gid": 20, + "uid": 501, "gname": "staff", - "linkpath": null, - "mtime": "2017-04-10T16:54:12.000Z", + "uname": "isaacs", "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "size": 100, - "uid": 501, - "uname": "isaacs", - "dev": null, - "ino": null, - "nlink": null, "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -36,9 +28,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +42,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--meta-250-filter-strict.json b/test/fixtures/parse/long-pax--meta-250-filter-strict.json index 584863e0..4129b6fc 100644 --- a/test/fixtures/parse/long-pax--meta-250-filter-strict.json +++ b/test/fixtures/parse/long-pax--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1282, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,9 +59,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--meta-250-filter.json b/test/fixtures/parse/long-pax--meta-250-filter.json index 584863e0..4129b6fc 100644 --- a/test/fixtures/parse/long-pax--meta-250-filter.json +++ b/test/fixtures/parse/long-pax--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1282, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,9 +59,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--meta-250-strict.json b/test/fixtures/parse/long-pax--meta-250-strict.json index 5a19f0ec..f989196b 100644 --- a/test/fixtures/parse/long-pax--meta-250-strict.json +++ b/test/fixtures/parse/long-pax--meta-250-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1282, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,9 +59,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--meta-250.json b/test/fixtures/parse/long-pax--meta-250.json index 5a19f0ec..f989196b 100644 --- a/test/fixtures/parse/long-pax--meta-250.json +++ b/test/fixtures/parse/long-pax--meta-250.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1282, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,9 +59,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--strict.json b/test/fixtures/parse/long-pax--strict.json index c72def09..2385c664 100644 --- a/test/fixtures/parse/long-pax--strict.json +++ b/test/fixtures/parse/long-pax--strict.json @@ -7,24 +7,16 @@ "entry", { "extended": { - "atime": null, - "charset": null, + "mtime": "2017-04-10T16:54:12.000Z", "comment": "all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy", - "ctime": null, "gid": 20, + "uid": 501, "gname": "staff", - "linkpath": null, - "mtime": "2017-04-10T16:54:12.000Z", + "uname": "isaacs", "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "size": 100, - "uid": 501, - "uname": "isaacs", - "dev": null, - "ino": null, - "nlink": null, "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -36,9 +28,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +42,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax.json b/test/fixtures/parse/long-pax.json index c72def09..2385c664 100644 --- a/test/fixtures/parse/long-pax.json +++ b/test/fixtures/parse/long-pax.json @@ -7,24 +7,16 @@ "entry", { "extended": { - "atime": null, - "charset": null, + "mtime": "2017-04-10T16:54:12.000Z", "comment": "all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy", - "ctime": null, "gid": 20, + "uid": 501, "gname": "staff", - "linkpath": null, - "mtime": "2017-04-10T16:54:12.000Z", + "uname": "isaacs", "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "size": 100, - "uid": 501, - "uname": "isaacs", - "dev": null, - "ino": null, - "nlink": null, "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -36,9 +28,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +42,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--filter-strict.json b/test/fixtures/parse/next-file-has-long--filter-strict.json index a010d9e3..efe114cd 100644 --- a/test/fixtures/parse/next-file-has-long--filter-strict.json +++ b/test/fixtures/parse/next-file-has-long--filter-strict.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--filter.json b/test/fixtures/parse/next-file-has-long--filter.json index a010d9e3..efe114cd 100644 --- a/test/fixtures/parse/next-file-has-long--filter.json +++ b/test/fixtures/parse/next-file-has-long--filter.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--meta-250-filter-strict.json b/test/fixtures/parse/next-file-has-long--meta-250-filter-strict.json index a010d9e3..efe114cd 100644 --- a/test/fixtures/parse/next-file-has-long--meta-250-filter-strict.json +++ b/test/fixtures/parse/next-file-has-long--meta-250-filter-strict.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--meta-250-filter.json b/test/fixtures/parse/next-file-has-long--meta-250-filter.json index a010d9e3..efe114cd 100644 --- a/test/fixtures/parse/next-file-has-long--meta-250-filter.json +++ b/test/fixtures/parse/next-file-has-long--meta-250-filter.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--meta-250-strict.json b/test/fixtures/parse/next-file-has-long--meta-250-strict.json index 9d6eef02..852c825b 100644 --- a/test/fixtures/parse/next-file-has-long--meta-250-strict.json +++ b/test/fixtures/parse/next-file-has-long--meta-250-strict.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--meta-250.json b/test/fixtures/parse/next-file-has-long--meta-250.json index 9d6eef02..852c825b 100644 --- a/test/fixtures/parse/next-file-has-long--meta-250.json +++ b/test/fixtures/parse/next-file-has-long--meta-250.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--strict.json b/test/fixtures/parse/next-file-has-long--strict.json index 9d6eef02..852c825b 100644 --- a/test/fixtures/parse/next-file-has-long--strict.json +++ b/test/fixtures/parse/next-file-has-long--strict.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long.json b/test/fixtures/parse/next-file-has-long.json index 9d6eef02..852c825b 100644 --- a/test/fixtures/parse/next-file-has-long.json +++ b/test/fixtures/parse/next-file-has-long.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--filter-strict.json b/test/fixtures/parse/null-byte--filter-strict.json index 3c27f2bb..e96a4f8a 100644 --- a/test/fixtures/parse/null-byte--filter-strict.json +++ b/test/fixtures/parse/null-byte--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--filter.json b/test/fixtures/parse/null-byte--filter.json index 3c27f2bb..e96a4f8a 100644 --- a/test/fixtures/parse/null-byte--filter.json +++ b/test/fixtures/parse/null-byte--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--meta-250-filter-strict.json b/test/fixtures/parse/null-byte--meta-250-filter-strict.json index 3c27f2bb..e96a4f8a 100644 --- a/test/fixtures/parse/null-byte--meta-250-filter-strict.json +++ b/test/fixtures/parse/null-byte--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--meta-250-filter.json b/test/fixtures/parse/null-byte--meta-250-filter.json index 3c27f2bb..e96a4f8a 100644 --- a/test/fixtures/parse/null-byte--meta-250-filter.json +++ b/test/fixtures/parse/null-byte--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--meta-250-strict.json b/test/fixtures/parse/null-byte--meta-250-strict.json index a9bad194..bb1f092c 100644 --- a/test/fixtures/parse/null-byte--meta-250-strict.json +++ b/test/fixtures/parse/null-byte--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--meta-250.json b/test/fixtures/parse/null-byte--meta-250.json index a9bad194..bb1f092c 100644 --- a/test/fixtures/parse/null-byte--meta-250.json +++ b/test/fixtures/parse/null-byte--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--strict.json b/test/fixtures/parse/null-byte--strict.json index a9bad194..bb1f092c 100644 --- a/test/fixtures/parse/null-byte--strict.json +++ b/test/fixtures/parse/null-byte--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte.json b/test/fixtures/parse/null-byte.json index a9bad194..bb1f092c 100644 --- a/test/fixtures/parse/null-byte.json +++ b/test/fixtures/parse/null-byte.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--filter-strict.json b/test/fixtures/parse/trailing-slash-corner-case--filter-strict.json index 3739da97..151be717 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--filter-strict.json +++ b/test/fixtures/parse/trailing-slash-corner-case--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--filter.json b/test/fixtures/parse/trailing-slash-corner-case--filter.json index 3739da97..151be717 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--filter.json +++ b/test/fixtures/parse/trailing-slash-corner-case--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter-strict.json b/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter-strict.json index 3739da97..151be717 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter-strict.json +++ b/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter.json b/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter.json index 3739da97..151be717 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter.json +++ b/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--meta-250-strict.json b/test/fixtures/parse/trailing-slash-corner-case--meta-250-strict.json index 14200298..c5110f0c 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--meta-250-strict.json +++ b/test/fixtures/parse/trailing-slash-corner-case--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--meta-250.json b/test/fixtures/parse/trailing-slash-corner-case--meta-250.json index 14200298..c5110f0c 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--meta-250.json +++ b/test/fixtures/parse/trailing-slash-corner-case--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--strict.json b/test/fixtures/parse/trailing-slash-corner-case--strict.json index 14200298..c5110f0c 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--strict.json +++ b/test/fixtures/parse/trailing-slash-corner-case--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case.json b/test/fixtures/parse/trailing-slash-corner-case.json index 14200298..c5110f0c 100644 --- a/test/fixtures/parse/trailing-slash-corner-case.json +++ b/test/fixtures/parse/trailing-slash-corner-case.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--filter-strict.json b/test/fixtures/parse/utf8--filter-strict.json index b8c5bac5..7dc9969f 100644 --- a/test/fixtures/parse/utf8--filter-strict.json +++ b/test/fixtures/parse/utf8--filter-strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--filter.json b/test/fixtures/parse/utf8--filter.json index b8c5bac5..7dc9969f 100644 --- a/test/fixtures/parse/utf8--filter.json +++ b/test/fixtures/parse/utf8--filter.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--meta-250-filter-strict.json b/test/fixtures/parse/utf8--meta-250-filter-strict.json index b8c5bac5..7dc9969f 100644 --- a/test/fixtures/parse/utf8--meta-250-filter-strict.json +++ b/test/fixtures/parse/utf8--meta-250-filter-strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--meta-250-filter.json b/test/fixtures/parse/utf8--meta-250-filter.json index b8c5bac5..7dc9969f 100644 --- a/test/fixtures/parse/utf8--meta-250-filter.json +++ b/test/fixtures/parse/utf8--meta-250-filter.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--meta-250-strict.json b/test/fixtures/parse/utf8--meta-250-strict.json index 611f052a..4ee5a61a 100644 --- a/test/fixtures/parse/utf8--meta-250-strict.json +++ b/test/fixtures/parse/utf8--meta-250-strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--meta-250.json b/test/fixtures/parse/utf8--meta-250.json index 611f052a..4ee5a61a 100644 --- a/test/fixtures/parse/utf8--meta-250.json +++ b/test/fixtures/parse/utf8--meta-250.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--strict.json b/test/fixtures/parse/utf8--strict.json index 611f052a..4ee5a61a 100644 --- a/test/fixtures/parse/utf8--strict.json +++ b/test/fixtures/parse/utf8--strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8.json b/test/fixtures/parse/utf8.json index 611f052a..4ee5a61a 100644 --- a/test/fixtures/parse/utf8.json +++ b/test/fixtures/parse/utf8.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/get-write-flag.js b/test/get-write-flag.js index 81c2f547..1e81ba20 100644 --- a/test/get-write-flag.js +++ b/test/get-write-flag.js @@ -1,67 +1,73 @@ -const t = require('tap') +import fs from 'fs' +import t from 'tap' +import { fileURLToPath } from 'url' +import { getWriteFlag } from '../dist/esm/get-write-flag.js' + +const __filename = fileURLToPath(import.meta.url) // run three scenarios // unix (no fmap) // win32 (without fmap support) // win32 (with fmap support) -const fs = require('fs') const hasFmap = !!fs.constants.UV_FS_O_FILEMAP -const platform = process.platform +const { platform } = process const UV_FS_O_FILEMAP = 0x20000000 switch (process.argv[2]) { case 'win32-fmap': { - if (!hasFmap) { - global.__FAKE_TESTING_FS__ = { - constants: { - ...fs.constants, - ...{ UV_FS_O_FILEMAP }, - }, - } - } const { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants - if (platform !== 'win32') { - process.env.__FAKE_PLATFORM__ = 'win32' - } - const getFlag = require('../lib/get-write-flag.js') - t.equal(getFlag(1), UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY) - t.equal(getFlag(512 * 1024 + 1), 'w') + t.equal( + getWriteFlag(1), + UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY, + ) + t.equal(getWriteFlag(512 * 1024 + 1), 'w') break } case 'win32-nofmap': { - if (hasFmap) { - global.__FAKE_TESTING_FS__ = { - constants: { - ...fs.constants, - ...{ UV_FS_O_FILEMAP: 0 }, - }, - } - } - if (platform !== 'win32') { - process.env.__FAKE_PLATFORM__ = 'win32' - } - const getFlag = require('../lib/get-write-flag.js') - t.equal(getFlag(1), 'w') - t.equal(getFlag(512 * 1024 + 1), 'w') + t.equal(getWriteFlag(1), 'w') + t.equal(getWriteFlag(512 * 1024 + 1), 'w') break } case 'unix': { - if (platform === 'win32') { - process.env.__FAKE_PLATFORM__ = 'darwin' - } - const getFlag = require('../lib/get-write-flag.js') - t.equal(getFlag(1), 'w') - t.equal(getFlag(512 * 1024 + 1), 'w') + t.equal(getWriteFlag(1), 'w') + t.equal(getWriteFlag(512 * 1024 + 1), 'w') break } default: { const node = process.execPath - t.spawn(node, [__filename, 'win32-fmap']) - t.spawn(node, [__filename, 'win32-nofmap']) - t.spawn(node, [__filename, 'unix']) + t.spawn(node, [__filename, 'win32-fmap'], { + env: { + ...process.env, + ...(platform === 'win32' ? + {} + : { + __FAKE_FS_O_FILENAME__: String(UV_FS_O_FILEMAP), + __FAKE_PLATFORM__: 'win32', + }), + }, + }) + t.spawn(node, [__filename, 'win32-nofmap'], { + env: { + ...process.env, + ...(platform === 'win32' ? + {} + : { + __FAKE_FS_O_FILENAME__: '0', + __FAKE_PLATFORM__: 'win32', + }), + }, + }) + t.spawn(node, [__filename, 'unix'], { + env: { + ...process.env, + ...(platform === 'win32' ? + { __FAKE_PLATFORM__: 'linux' } + : {}), + }, + }) } } diff --git a/test/header.js b/test/header.js index 1a17eb83..40332a45 100644 --- a/test/header.js +++ b/test/header.js @@ -1,32 +1,32 @@ -'use strict' -const t = require('tap') -const Header = require('../lib/header.js') +import t from 'tap' +import { Header } from '../dist/esm/header.js' t.test('ustar format', t => { const buf = Buffer.from( '666f6f2e74787400000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303037353520003035373736312000303030303234200037373737' + - '3737373737373700313236373735363735343000303133303531200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303037353520003035373736312000303030303234200037373737' + + '3737373737373700313236373735363735343000303133303531200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header({ path: 'foo.txt', }) const slab = Buffer.alloc(1024) - h.set({ + Object.assign(h, { mode: 0o755, uid: 24561, gid: 20, @@ -38,9 +38,11 @@ t.test('ustar format', t => { }) h.encode(slab) - t.equal(slab.slice(0, 512).toString('hex'), buf.toString('hex')) - t.equal(slab.toString('hex'), buf.toString('hex') + - (new Array(1025).join('0'))) + t.equal(slab.subarray(0, 512).toString('hex'), buf.toString('hex')) + t.equal( + slab.toString('hex'), + buf.toString('hex') + new Array(1025).join('0'), + ) const h2 = new Header(buf) @@ -64,30 +66,31 @@ t.test('ustar format', t => { t.test('xstar format', t => { const buf = Buffer.from( '666f6f2e74787400000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303037353520003035373736312000303030303234200030303030' + - '3030303134342000313236373735363735343000303135313331200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000031323637' + - '3735363735343000313236373735363735343000000000000000000000000000' + - // just some junk - '420420420420420420420420420420420420420420420420420420420420', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303037353520003035373736312000303030303234200030303030' + + '3030303134342000313236373735363735343000303135313331200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000031323637' + + '3735363735343000313236373735363735343000000000000000000000000000' + + // just some junk + '420420420420420420420420420420420420420420420420420420420420', + 'hex', + ) const h = new Header({ path: 'foo.txt', }) - h.set({ + Object.assign(h, { mode: 0o755, uid: 24561, gid: 20, @@ -102,7 +105,7 @@ t.test('xstar format', t => { h.encode() const slab = h.block - t.equal(slab.toString('hex'), buf.slice(0, 512).toString('hex')) + t.equal(slab.toString('hex'), buf.subarray(0, 512).toString('hex')) const h2 = new Header(buf) @@ -131,25 +134,27 @@ t.test('prefix handling', t => { t.test('no times', t => { const buf = Buffer.from( '666f6f2e74787400000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303037353520003035373736312000303030303234200030303030' + - '3030303134342000313236373735363735343000303337323734200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '00000000000000000030303030303020003030303030302000722f652f612f6c' + - '2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f79' + - '2f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f72' + - '2f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f61' + - '2f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d' + - '2f702f612f742f68000000000000000000000000000000000000000000000000', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303037353520003035373736312000303030303234200030303030' + + '3030303134342000313236373735363735343000303337323734200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '00000000000000000030303030303020003030303030302000722f652f612f6c' + + '2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f79' + + '2f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f72' + + '2f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f61' + + '2f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d' + + '2f702f612f742f68000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header({ - path: 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + + path: + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/foo.txt', mode: 0o755, @@ -166,14 +171,17 @@ t.test('prefix handling', t => { const b2 = Buffer.alloc(512) h.encode(b2, 0) - t.equal(b2.toString().replace(/\0+/g, ' '), - buf.toString().replace(/\0+/g, ' ')) + t.equal( + b2.toString().replace(/\0+/g, ' '), + buf.toString().replace(/\0+/g, ' '), + ) t.equal(b2.toString('hex'), buf.toString('hex')) const h2 = new Header(buf) t.match(h2, { - path: 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + + path: + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/foo.txt', mode: 0o755, @@ -192,10 +200,13 @@ t.test('prefix handling', t => { }) t.equal(b2.toString().replace(/\0.*$/, ''), 'foo.txt') - t.equal(b2.slice(345).toString().replace(/\0.*$/, ''), 'r/e/a/l/l/y/-' + - '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + - '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + - '/d/e/e/p/-/p/a/t/h') + t.equal( + b2.subarray(345).toString().replace(/\0.*$/, ''), + 'r/e/a/l/l/y/-' + + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + + '/d/e/e/p/-/p/a/t/h', + ) t.end() }) @@ -203,27 +214,29 @@ t.test('prefix handling', t => { t.test('a/c times, use shorter prefix field', t => { const buf = Buffer.from( '652f702f2d2f702f612f742f682f666f6f2e7478740000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303037353520003035373736312000303030303234200030303030' + - '3030303134342000313236373735363735343000303431353030200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '00000000000000000030303030303020003030303030302000722f652f612f6c' + - '2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f79' + - '2f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f72' + - '2f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f61' + - '2f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f642f65000031323637' + - '3735363735343000313236373735363735343000000000000000000000000000', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303037353520003035373736312000303030303234200030303030' + + '3030303134342000313236373735363735343000303431353030200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '00000000000000000030303030303020003030303030302000722f652f612f6c' + + '2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f79' + + '2f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f72' + + '2f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f61' + + '2f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f642f65000031323637' + + '3735363735343000313236373735363735343000000000000000000000000000', + 'hex', + ) const h = new Header() - h.path = 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + - 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + - '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/foo.txt' + h.path = + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/foo.txt' h.mode = 0o755 h.uid = 24561 h.gid = 20 @@ -241,33 +254,44 @@ t.test('prefix handling', t => { const b3 = Buffer.alloc(1024) h.encode(b3, 100) - t.equal(b2.toString('hex'), b3.slice(100, 612).toString('hex')) + t.equal(b2.toString('hex'), b3.subarray(100, 612).toString('hex')) const h2 = new Header(b3, 100) - t.match(h2, { - path: 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + - 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + - '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/foo.txt', - mode: 0o755, - uid: 24561, - gid: 20, - size: 100, - mtime: new Date('2016-04-01T22:00Z'), - ctime: new Date('2016-04-01T22:00Z'), - atime: new Date('2016-04-01T22:00Z'), - type: 'File', - uname: 'isaacs', - gname: 'staff', - cksumValid: true, - cksum: 17216, - needPax: false, - }, 'header from buffer') - - t.equal(b2.toString().replace(/\0.*$/, ''), 'e/p/-/p/a/t/h/foo.txt') - t.equal(b2.slice(345).toString().replace(/\0.*$/, ''), 'r/e/a/l/l/y/-' + - '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + - '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e') + t.match( + h2, + { + path: + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/foo.txt', + mode: 0o755, + uid: 24561, + gid: 20, + size: 100, + mtime: new Date('2016-04-01T22:00Z'), + ctime: new Date('2016-04-01T22:00Z'), + atime: new Date('2016-04-01T22:00Z'), + type: 'File', + uname: 'isaacs', + gname: 'staff', + cksumValid: true, + cksum: 17216, + needPax: false, + }, + 'header from buffer', + ) + + t.equal( + b2.toString().replace(/\0.*$/, ''), + 'e/p/-/p/a/t/h/foo.txt', + ) + t.equal( + b2.subarray(345).toString().replace(/\0.*$/, ''), + 'r/e/a/l/l/y/-' + + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e', + ) t.end() }) @@ -275,27 +299,30 @@ t.test('prefix handling', t => { t.test('hella long basename', t => { const buf = Buffer.from( '6c6f6e672d66696c652d6c6f6e672d66696c652d6c6f6e672d66696c652d6c6f' + - '6e672d66696c652d6c6f6e672d66696c652d6c6f6e672d66696c652d6c6f6e67' + - '2d66696c652d6c6f6e672d66696c652d6c6f6e672d66696c652d6c6f6e672d66' + - '696c650030303037353520003035373736312000303030303234200030303030' + - '3030303134342000313236373735363735343000303630313431200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '00000000000000000030303030303020003030303030302000722f652f612f6c' + - '2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f79' + - '2f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f72' + - '2f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f61' + - '2f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d' + - '2f702f612f742f68000000000000000000000000000000000000000000000000', - 'hex') + '6e672d66696c652d6c6f6e672d66696c652d6c6f6e672d66696c652d6c6f6e67' + + '2d66696c652d6c6f6e672d66696c652d6c6f6e672d66696c652d6c6f6e672d66' + + '696c650030303037353520003035373736312000303030303234200030303030' + + '3030303134342000313236373735363735343000303630313431200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '00000000000000000030303030303020003030303030302000722f652f612f6c' + + '2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f79' + + '2f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f72' + + '2f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f61' + + '2f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d' + + '2f702f612f742f68000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header({ - path: 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + + path: + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/' + - (new Array(20).join('long-file-')) + 'long-file.txt', + new Array(20).join('long-file-') + + 'long-file.txt', mode: 0o755, uid: 24561, gid: 20, @@ -318,7 +345,8 @@ t.test('prefix handling', t => { t.match(h2, { cksumValid: true, cksum: 24673, - path: 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + + path: + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + 'r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/long-file-long-file-long-' + 'file-long-file-long-file-long-file-long-file-long-file-long-' + @@ -332,26 +360,30 @@ t.test('prefix handling', t => { t.test('long basename, long dirname', t => { const buf = Buffer.from( '6c6f6e672d6469726e616d652d6c6f6e672d6469726e616d652d6c6f6e672d64' + - '69726e616d652d6c6f6e672d6469726e616d652d6c6f6e672d6469726e616d65' + - '2d6c6f6e672d6469726e616d652d6c6f6e672d6469726e616d652d6c6f6e672d' + - '6469720030303037353520003035373736312000303030303234200030303030' + - '3030303134342000313236373735363735343000303334323035200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', - 'hex') + '69726e616d652d6c6f6e672d6469726e616d652d6c6f6e672d6469726e616d65' + + '2d6c6f6e672d6469726e616d652d6c6f6e672d6469726e616d652d6c6f6e672d' + + '6469720030303037353520003035373736312000303030303234200030303030' + + '3030303134342000313236373735363735343000303334323035200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header({ - path: (new Array(30).join('long-dirname-')) + 'long-dirname/' + - (new Array(20).join('long-file-')) + 'long-file.txt', + path: + new Array(30).join('long-dirname-') + + 'long-dirname/' + + new Array(20).join('long-file-') + + 'long-file.txt', mode: 0o755, uid: 24561, gid: 20, @@ -376,7 +408,8 @@ t.test('prefix handling', t => { const h2 = new Header(b2) t.match(h2, { - path: 'long-dirname-long-dirname-long-dirname-long-dirname-' + + path: + 'long-dirname-long-dirname-long-dirname-long-dirname-' + 'long-dirname-long-dirname-long-dirname-long-dir', cksum: 14469, cksumValid: true, @@ -388,11 +421,15 @@ t.test('prefix handling', t => { }) t.test('throwers', t => { - t.throws(_ => new Header(Buffer.alloc(100)), - new Error('need 512 bytes for header')) + t.throws( + _ => new Header(Buffer.alloc(100)), + new Error('need 512 bytes for header'), + ) - t.throws(_ => new Header({}).encode(Buffer.alloc(100)), - new Error('need 512 bytes for header')) + t.throws( + _ => new Header({}).encode(Buffer.alloc(100)), + new Error('need 512 bytes for header'), + ) t.end() }) @@ -404,68 +441,72 @@ t.test('null block', t => { needPax: false, path: '', type: 'File', - mode: null, - uid: null, - gid: null, - size: null, - mtime: null, - cksum: null, + mode: undefined, + uid: undefined, + gid: undefined, + size: undefined, + mtime: undefined, + cksum: undefined, linkpath: '', - uname: null, - gname: null, + uname: undefined, + gname: undefined, devmaj: 0, devmin: 0, - atime: null, - ctime: null, + atime: undefined, + ctime: undefined, nullBlock: true, }) t.end() }) t.test('unknown type', t => { - const h = new Header(Buffer.from( - '666f6f2e74787400000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303037353520003035373736312000303030303234200030303030' + - '303030313434200031323637373536373534300030303630373620005a000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', - 'hex')) - - t.equal(h.type, 'Z') - t.equal(h.typeKey, 'Z') + const h = new Header( + Buffer.from( + '666f6f2e74787400000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303037353520003035373736312000303030303234200030303030' + + '303030313434200031323637373536373534300030303630373620005a000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ), + ) + + t.equal(h.type, 'Unsupported') + t.equal(h.typeKey, 'Unsupported') t.end() }) t.test('dir as file with trailing /', t => { const b = Buffer.from( '782f792f00000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000030303030' + - '3030303030302000000000000000000000000000303034363136200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030300000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000030303030' + + '3030303030302000000000000000000000000000303034363136200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030300000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header(b) t.equal(h.type, 'Directory') b[156] = '0'.charCodeAt(0) @@ -478,24 +519,28 @@ t.test('null numeric values do not get written', t => { const b = Buffer.alloc(512) const h = new Header() h.encode(b, 0) - t.equal( - b.toString('hex'), - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000303033303737200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030300000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000') + t.same( + b, + Buffer.from( + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000303033303737200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030300000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ), + ) const h2 = new Header(b) t.match(h2, { type: 'File', @@ -503,19 +548,19 @@ t.test('null numeric values do not get written', t => { needPax: false, nullBlock: false, path: '', - mode: null, - uid: null, - gid: null, - size: null, - mtime: null, + mode: undefined, + uid: undefined, + gid: undefined, + size: undefined, + mtime: undefined, cksum: 1599, linkpath: '', uname: '', gname: '', devmaj: 0, devmin: 0, - atime: null, - ctime: null, + atime: undefined, + ctime: undefined, }) t.end() }) @@ -535,22 +580,23 @@ t.test('big numbers', t => { t.test('dir with long body', t => { const b = Buffer.from( '7061636b6167652f76656e646f72000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303037353520003030303030302000303030303030200030303030' + - '3030313030303020313330363133303232343120303132303236200035000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030300000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303037353520003030303030302000303030303030200030303030' + + '3030313030303020313330363133303232343120303132303236200035000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030300000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header(b) t.equal(h.type, 'Directory') t.equal(h.size, 0) @@ -558,13 +604,20 @@ t.test('dir with long body', t => { }) t.test('null block, global extended header', t => { - const h = new Header(Buffer.alloc(512), 0, { - undef: undefined, - blerg: 'bloo', - }, { - path: '/global.path', - foo: 'global foo', - }) + const h = new Header( + Buffer.alloc(512), + 0, + { + undef: undefined, + blerg: 'bloo', + }, + { + path: '/global.path', + foo: 'global foo', + global: true, + linkpath: 'asdf', + }, + ) t.match(h, { cksumValid: false, needPax: false, @@ -593,22 +646,26 @@ t.test('null block, global extended header', t => { t.test('gnutar-generated 10gb file size', t => { const b = Buffer.from( '313067622e696d67000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303030363634003030303137353000303030313735300080000000' + - '0000000280000000313334373434303132303500303131313437002030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461722020006973616163730000000000000000000000000000000000' + - '0000000000000000006973616163730000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303030363634003030303137353000303030313735300080000000' + + '0000000280000000313334373434303132303500303131313437002030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461722020006973616163730000000000000000000000000000000000' + + '0000000000000000006973616163730000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header(b) t.equal(h.size, 1024 * 1024 * 1024 * 10, 'should be 10gb file') + // cannot set type to something invalid + t.throws(() => (h.type = 'Z')) t.end() }) diff --git a/test/high-level-opt.js b/test/high-level-opt.js deleted file mode 100644 index 7a82ef3f..00000000 --- a/test/high-level-opt.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -const t = require('tap') -const hlo = require('../lib/high-level-opt.js') - -t.same(hlo(), {}) - -t.same(hlo({ - C: 'dir', - f: 'file', - z: 'zip', - P: 'preserve', - U: 'unlink', - 'strip-components': 99, - foo: 'bar', -}), { - cwd: 'dir', - file: 'file', - gzip: 'zip', - preservePaths: 'preserve', - unlink: 'unlink', - strip: 99, - foo: 'bar', -}) - -t.same(hlo({ - C: 'dir', - f: 'file', - z: 'zip', - P: 'preserve', - U: 'unlink', - stripComponents: 99, - foo: 'bar', -}), { - cwd: 'dir', - file: 'file', - gzip: 'zip', - preservePaths: 'preserve', - unlink: 'unlink', - strip: 99, - foo: 'bar', -}) diff --git a/test/index.js b/test/index.js index 548a1dee..147f821f 100644 --- a/test/index.js +++ b/test/index.js @@ -1,5 +1,6 @@ -const t = require('tap') -const tar = require('../') +import t from 'tap' +import * as tar from '../dist/esm/index.js' + t.match(tar, { create: Function, c: Function, @@ -13,7 +14,7 @@ t.match(tar, { x: Function, Pack: Function, Unpack: Function, - Parse: Function, + Parser: Function, ReadEntry: Function, WriteEntry: Function, Header: Function, @@ -67,7 +68,7 @@ t.match(tar, { ]), }, }) -t.match(tar.Pack.Sync, Function) -t.match(tar.WriteEntry.Sync, Function) -t.match(tar.WriteEntry.Tar, Function) +t.match(tar.PackSync, Function) +t.match(tar.WriteEntrySync, Function) +t.match(tar.WriteEntryTar, Function) t.match(tar.Pax.parse, Function) diff --git a/test/large-numbers.js b/test/large-numbers.js index 055493e9..c84956d2 100644 --- a/test/large-numbers.js +++ b/test/large-numbers.js @@ -1,8 +1,5 @@ -'use strict' -const large = require('../lib/large-numbers.js') -const encode = large.encode -const parse = large.parse -const t = require('tap') +import t from 'tap' +import { encode, parse } from '../dist/esm/large-numbers.js' t.test('parse', t => { const cases = new Map([ @@ -19,7 +16,8 @@ t.test('parse', t => { ]) t.plan(cases.size) cases.forEach((value, hex) => - t.equal(parse(Buffer.from(hex, 'hex')), value)) + t.equal(parse(Buffer.from(hex, 'hex')), value), + ) }) t.test('parse out of range', t => { @@ -30,9 +28,12 @@ t.test('parse out of range', t => { 'fffffffffdd0000000000000', ] t.plan(cases.length) - cases.forEach((hex) => - t.throws(_ => parse(Buffer.from(hex, 'hex')), - Error('parsed number outside of javascript safe integer range'))) + cases.forEach(hex => + t.throws( + _ => parse(Buffer.from(hex, 'hex')), + Error('parsed number outside of javascript safe integer range'), + ), + ) }) t.test('parse invalid base256 encoding', t => { @@ -41,9 +42,12 @@ t.test('parse invalid base256 encoding', t => { '700000030000000000000000', // does not start with 0x80 or 0xff ] t.plan(cases.length) - cases.forEach((hex) => - t.throws(_ => parse(Buffer.from(hex, 'hex')), - Error('invalid base256 encoding'))) + cases.forEach(hex => + t.throws( + _ => parse(Buffer.from(hex, 'hex')), + Error('invalid base256 encoding'), + ), + ) }) t.test('encode', t => { @@ -61,12 +65,17 @@ t.test('encode', t => { t.test('alloc', t => { t.plan(cases.size) cases.forEach((value, hex) => - t.equal(encode(value, Buffer.alloc(12)).toString('hex'), hex)) + t.equal(encode(value, Buffer.alloc(12)).toString('hex'), hex), + ) }) t.test('allocUnsafe', t => { t.plan(cases.size) cases.forEach((value, hex) => - t.equal(encode(value, Buffer.allocUnsafe(12)).toString('hex'), hex)) + t.equal( + encode(value, Buffer.allocUnsafe(12)).toString('hex'), + hex, + ), + ) }) }) @@ -79,7 +88,12 @@ t.test('encode unsafe numbers', t => { ] t.plan(cases.length) - cases.forEach((value) => - t.throws(_ => encode(value), - Error('cannot encode number outside of javascript safe integer range'))) + cases.forEach(value => + t.throws( + _ => encode(value), + Error( + 'cannot encode number outside of javascript safe integer range', + ), + ), + ) }) diff --git a/test/list.js b/test/list.js deleted file mode 100644 index 26f59d9f..00000000 --- a/test/list.js +++ /dev/null @@ -1,229 +0,0 @@ -'use strict' -const t = require('tap') -const list = require('../lib/list.js') -const path = require('path') -const fs = require('fs') -const mutateFS = require('mutate-fs') - -t.test('basic', t => { - const file = path.resolve(__dirname, 'fixtures/tars/long-paths.tar') - const expect = require('./fixtures/parse/long-paths.json').filter( - e => Array.isArray(e) && e[0] === 'entry' - ).map(e => e[1].path) - - const check = (actual, t) => { - t.same(actual, expect) - return Promise.resolve(null) - } - - ;[1000, null].forEach(maxReadSize => { - t.test('file maxReadSize=' + maxReadSize, t => { - t.test('sync', t => { - const actual = [] - const onentry = entry => actual.push(entry.path) - list({ - file: file, - sync: true, - onentry: onentry, - maxReadSize: maxReadSize, - }) - return check(actual, t) - }) - - t.test('async promise', t => { - const actual = [] - const onentry = entry => actual.push(entry.path) - return list({ - file: file, - onentry: onentry, - maxReadSize: maxReadSize, - }).then(_ => check(actual, t)) - }) - - t.test('async cb', t => { - const actual = [] - const onentry = entry => actual.push(entry.path) - list({ - file: file, - onentry: onentry, - maxReadSize: maxReadSize, - }, er => { - if (er) { - throw er - } - check(actual, t) - t.end() - }) - }) - t.end() - }) - }) - - t.test('stream', t => { - t.test('sync', t => { - const actual = [] - const onentry = entry => actual.push(entry.path) - const l = list({ sync: true, onentry: onentry }) - l.end(fs.readFileSync(file)) - return check(actual, t) - }) - - t.test('async', t => { - const actual = [] - const onentry = entry => actual.push(entry.path) - const l = list() - l.on('entry', onentry) - l.on('end', _ => check(actual, t).then(_ => t.end())) - fs.createReadStream(file).pipe(l) - }) - t.end() - }) - - t.test('no onentry function', t => list({ file: file })) - - t.test('limit to specific files', t => { - const fileList = [ - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t', - '170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc///', - ] - - const expect = [ - '170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', - ] - - t.test('no filter function', t => { - const check = _ => t.same(actual, expect) - const actual = [] - return list({ - file: file, - onentry: entry => actual.push(entry.path), - }, fileList).then(check) - }) - - t.test('no filter function, stream', t => { - const check = _ => t.same(actual, expect) - const actual = [] - const onentry = entry => actual.push(entry.path) - fs.createReadStream(file).pipe(list(fileList) - .on('entry', onentry) - .on('end', _ => { - check() - t.end() - })) - }) - - t.test('filter function', t => { - const check = _ => t.same(actual, expect.slice(0, 1)) - const actual = [] - return list({ - file: file, - filter: path => path === expect[0], - onentry: entry => actual.push(entry.path), - }, fileList).then(check) - }) - - return t.test('list is unmunged', t => { - t.same(fileList, [ - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t', - '170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc///', - ]) - t.end() - }) - }) - - t.end() -}) - -t.test('bad args', t => { - t.throws(_ => list({ file: __filename, sync: true }, _ => _), - new TypeError('callback not supported for sync tar functions')) - t.throws(_ => list(_ => _), - new TypeError('callback only supported with file option')) - t.end() -}) - -t.test('stat fails', t => { - const poop = new Error('poop') - t.teardown(mutateFS.statFail(poop)) - t.test('sync', t => { - t.plan(1) - t.throws(_ => list({ file: __filename, sync: true }), poop) - }) - t.test('cb', t => { - t.plan(1) - list({ file: __filename }, er => t.equal(er, poop)) - }) - t.test('promise', t => { - t.plan(1) - list({ file: __filename }).catch(er => t.equal(er, poop)) - }) - t.end() -}) - -t.test('read fail', t => { - t.test('sync', t => { - const poop = new Error('poop') - t.teardown(mutateFS.fail('read', poop)) - t.plan(1) - t.throws(_ => list({ - file: __filename, - sync: true, - maxReadSize: 10, - }), poop) - }) - t.test('cb', t => { - const poop = new Error('poop') - t.teardown(mutateFS.fail('read', poop)) - t.plan(1) - list({ file: __filename }, er => t.equal(er, poop)) - }) - t.test('promise', t => { - const poop = new Error('poop') - t.teardown(mutateFS.fail('read', poop)) - t.plan(1) - list({ file: __filename }).catch(er => t.equal(er, poop)) - }) - t.end() -}) - -t.test('noResume option', t => { - const file = path.resolve(__dirname, 'fixtures/tars/file.tar') - t.test('sync', t => { - let e - list({ - file: file, - onentry: entry => { - e = entry - process.nextTick(_ => { - t.notOk(entry.flowing) - entry.resume() - }) - }, - sync: true, - noResume: true, - }) - t.ok(e) - t.notOk(e.flowing) - e.on('end', _ => t.end()) - }) - - t.test('async', t => list({ - file: file, - onentry: entry => { - process.nextTick(_ => { - t.notOk(entry.flowing) - entry.resume() - }) - }, - noResume: true, - })) - - t.end() -}) diff --git a/test/list.ts b/test/list.ts new file mode 100644 index 00000000..9a28d0f7 --- /dev/null +++ b/test/list.ts @@ -0,0 +1,278 @@ +import fs, { readFileSync } from 'fs' +//@ts-ignore +import mutateFS from 'mutate-fs' +import { dirname, resolve } from 'path' +import t, { Test } from 'tap' +import { fileURLToPath } from 'url' +import { list } from '../dist/esm/list.js' +import { Parser } from '../dist/esm/parse.js' +import { ReadEntry } from '../dist/esm/read-entry.js' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) + +const lp = JSON.parse( + readFileSync(__dirname + '/fixtures/parse/long-paths.json', 'utf8'), +) as ( + | ['meta', string] + | ['entry', Record] + | ['nullBlock' | 'eof' | 'end'] +)[] + +t.test('basic', t => { + const file = resolve(__dirname, 'fixtures/tars/long-paths.tar') + const expect = (lp as any[]) + .filter(e => Array.isArray(e) && e[0] === 'entry') + .map((e: ['entry', Record]) => e[1].path as string) + + const check = (actual: string[], t: Test) => { + t.same(actual, expect) + return Promise.resolve(null) + } + + ;[1000, undefined].forEach(maxReadSize => { + t.test('file maxReadSize=' + maxReadSize, t => { + t.test('sync', t => { + const actual: string[] = [] + const onReadEntry = (entry: ReadEntry) => + actual.push(entry.path) + list({ + file: file, + sync: true, + onReadEntry, + maxReadSize, + }) + return check(actual, t) + }) + + t.test('async promise', async t => { + const actual: string[] = [] + const onReadEntry = (entry: ReadEntry) => + actual.push(entry.path) + return await list({ + file, + onReadEntry, + maxReadSize, + }).then(() => check(actual, t)) + }) + + t.test('async cb', t => { + const actual: string[] = [] + const onReadEntry = (entry: ReadEntry) => + actual.push(entry.path) + list( + { + file: file, + onReadEntry: onReadEntry, + maxReadSize: maxReadSize, + }, + (er?: Error) => { + if (er) { + throw er + } + check(actual, t) + t.end() + }, + ) + }) + t.end() + }) + }) + + t.test('stream', t => { + t.test('sync', t => { + const actual: string[] = [] + const onReadEntry = (entry: ReadEntry) => + actual.push(entry.path) + const l = list({ sync: true, onReadEntry }) + l.end(fs.readFileSync(file)) + return check(actual, t) + }) + + t.test('async', t => { + const actual: string[] = [] + const onReadEntry = (entry: ReadEntry) => + actual.push(entry.path) + const l = list() + l.on('entry', onReadEntry) + l.on('end', _ => check(actual, t).then(_ => t.end())) + fs.createReadStream(file).pipe(l) + }) + t.end() + }) + + t.test('no onReadEntry function', () => list({ file: file })) + + t.test('limit to specific files', t => { + const fileList = [ + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t', + '170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc///', + ] + + const expect = [ + '170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + ] + + t.test('no filter function', async t => { + const check = () => t.same(actual, expect) + const actual: string[] = [] + return list( + { + file: file, + onReadEntry: entry => actual.push(entry.path), + }, + fileList, + ).then(check) + }) + + t.test('no filter function, stream', t => { + const check = () => t.same(actual, expect) + const actual: string[] = [] + const onReadEntry = (entry: ReadEntry) => + actual.push(entry.path) + fs.createReadStream(file).pipe( + list(fileList) + .on('entry', onReadEntry) + .on('end', _ => { + check() + t.end() + }), + ) + }) + + t.test('filter function', async t => { + const check = () => t.same(actual, expect.slice(0, 1)) + const actual: string[] = [] + return list( + { + file: file, + filter: path => path === expect[0], + onReadEntry: entry => actual.push(entry.path), + }, + fileList, + ).then(check) + }) + + return t.test('list is unmunged', t => { + t.same(fileList, [ + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t', + '170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc///', + ]) + t.end() + }) + }) + + t.end() +}) + +t.test('bad args', t => { + t.throws( + () => list({ file: __filename, sync: true }, () => {}), + new TypeError('callback not supported for sync tar functions'), + ) + t.throws( + () => list({}, () => {}), + new TypeError('callback only supported with file option'), + ) + t.end() +}) + +t.test('stat fails', t => { + const poop = new Error('poop') + t.teardown(mutateFS.statFail(poop)) + t.test('sync', t => { + t.plan(1) + t.throws(() => list({ file: __filename, sync: true }), poop) + }) + t.test('cb', t => { + t.plan(1) + list({ file: __filename }, er => t.equal(er, poop)) + }) + t.test('promise', t => { + t.plan(1) + list({ file: __filename }).catch(er => t.equal(er, poop)) + }) + t.end() +}) + +t.test('read fail', t => { + t.test('sync', t => { + const poop = new Error('poop') + t.teardown(mutateFS.fail('read', poop)) + t.plan(1) + t.throws( + () => + list({ + file: __filename, + sync: true, + maxReadSize: 10, + }), + poop, + ) + }) + t.test('cb', t => { + const poop = new Error('poop') + t.teardown(mutateFS.fail('read', poop)) + t.plan(1) + list({ file: __filename }, er => t.equal(er, poop)) + }) + t.test('promise', t => { + const poop = new Error('poop') + t.teardown(mutateFS.fail('read', poop)) + t.plan(1) + list({ file: __filename }).catch(er => t.equal(er, poop)) + }) + t.end() +}) + +t.test('noResume option', t => { + const file = resolve(__dirname, 'fixtures/tars/file.tar') + t.test('sync', t => { + let e!: ReadEntry + list({ + file: file, + onReadEntry: entry => { + e = entry + process.nextTick(() => { + t.notOk(entry.flowing) + entry.resume() + }) + }, + sync: true, + noResume: true, + }) + t.ok(e) + t.notOk(e.flowing) + e.on('end', () => t.end()) + }) + + t.test('async', t => + list({ + file: file, + onReadEntry: entry => { + process.nextTick(() => { + t.notOk(entry.flowing) + entry.resume() + }) + }, + noResume: true, + }), + ) + + t.end() +}) + +t.test('typechecks', t => { + const p = list() + //@ts-expect-error + p.then + t.type(p, Parser) + t.end() +}) diff --git a/test/load-all.js b/test/load-all.js index 524a3f4e..111a5fb6 100644 --- a/test/load-all.js +++ b/test/load-all.js @@ -1,10 +1,16 @@ -'use strict' // just load all the files so we can't cheat coverage by avoiding something -require('../') -const fs = require('fs') -const path = require('path') -const lib = path.resolve(__dirname, '../lib') -fs.readdirSync(lib) - .filter(f => /\.js$/.test(f)) - .forEach(f => require('../lib/' + f)) -require('tap').pass('all lib files loaded') +import fs from 'fs' +import t from 'tap' +import path, { dirname } from 'path' +import { fileURLToPath } from 'url' +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) +const lib = path.resolve(__dirname, '../dist/esm') +await Promise.all( + fs + .readdirSync(lib) + .filter(f => /\.js$/.test(f)) + .map(f => import('../dist/esm/' + f)), +) + +t.pass('all lib files loaded') diff --git a/test/make-command.ts b/test/make-command.ts new file mode 100644 index 00000000..822abb7d --- /dev/null +++ b/test/make-command.ts @@ -0,0 +1,74 @@ +import t from 'tap' +import { makeCommand } from '../src/make-command.js' +import { + isAsyncFile, + isAsyncNoFile, + isSyncFile, + isSyncNoFile, +} from '../src/options.js' + +class Sync { + sync: true = true +} +class Async {} + +const cmd = makeCommand( + (opt, entries) => { + t.equal(isSyncFile(opt), true) + t.type(entries, Array) + }, + async (opt, entries) => { + t.equal(isAsyncFile(opt), true) + t.type(entries, Array) + }, + (opt, entries) => { + t.equal(isSyncNoFile(opt), true) + t.type(entries, Array) + return new Sync() + }, + (opt, entries) => { + t.equal(isAsyncNoFile(opt), true) + t.type(entries, Array) + return new Async() + }, + (opt, entries) => { + if (entries?.length === 2) throw new Error('should not be len 2') + if (!opt) throw new Error('should get opt') + }, +) + +t.test('validation function is called', t => { + t.throws(() => cmd({}, ['a', 'b'])) + t.throws(() => cmd({ sync: true }, ['a', 'b'])) + t.throws(() => cmd({ sync: true, file: 'x' }, ['a', 'b'])) + t.throws(() => cmd({ file: 'x' }, ['a', 'b'])) + // cases where cb is not allowed + t.throws(() => cmd({}, [], () => {})) + t.throws(() => cmd({}, () => {})) + //@ts-expect-error + t.throws(() => cmd({ sync: true }, [], () => {})) + //@ts-expect-error + t.throws(() => cmd({ sync: true }, () => {})) + t.throws(() => cmd({ sync: true, file: 'x' }, [], () => {})) + t.throws(() => cmd({ sync: true, file: 'x' }, () => {})) + t.end() +}) + +t.test('basic calls', async t => { + t.match(cmd(), Async) + t.match(cmd({}), Async) + t.match(cmd({}, []), Async) + t.match(cmd({ sync: true }), Sync) + t.match(cmd({ sync: true }, []), Sync) + t.equal(cmd({ sync: true, file: 'x' }), undefined) + t.equal(await cmd({ file: 'x' }), undefined) + t.equal(await cmd({ file: 'x' }, []), undefined) + let cbCalled = false + t.equal( + await cmd({ file: 'x' }, [], () => { + cbCalled = true + }), + undefined, + ) + t.equal(cbCalled, true, 'called callback') +}) diff --git a/test/make-tar.js b/test/make-tar.js deleted file mode 100644 index 668d2164..00000000 --- a/test/make-tar.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict' -// a little utility to create virtual tar data -if (module === require.main) { - return require('tap').pass('this is fine') -} - -const Header = require('../lib/header.js') -module.exports = chunks => { - let dataLen = 0 - return Buffer.concat(chunks.map(chunk => { - if (Buffer.isBuffer(chunk)) { - dataLen += chunk.length - return chunk - } - const size = Math.max(typeof chunk === 'string' - ? 512 * Math.ceil(chunk.length / 512) - : 512) - dataLen += size - const buf = Buffer.alloc(size) - if (typeof chunk === 'string') { - buf.write(chunk) - } else { - new Header(chunk).encode(buf, 0) - } - return buf - }), dataLen) -} diff --git a/test/map.js b/test/map.js index fcd4e47f..33d3251b 100644 --- a/test/map.js +++ b/test/map.js @@ -1,7 +1,9 @@ -const t = require('tap') -const map = require('../map.js') -t.equal(map('test/index.js'), 'index.js') -t.same(map('test/unpack.js'), ['lib/unpack.js', 'lib/mkdir.js']) +import t from 'tap' +import map from '../map.js' +import { fileURLToPath } from 'url' +const __filename = fileURLToPath(import.meta.url) +t.equal(map('test/index.js'), 'src/index.ts') +t.same(map('test/unpack.js'), ['src/unpack.ts', 'src/mkdir.ts']) t.same(map('test/load-all.js'), []) t.equal(map(__filename), 'map.js') -t.equal(map('test/asdf'), 'lib/asdf') +t.equal(map('test/asdf'), 'src/asdf') diff --git a/test/mode-fix.js b/test/mode-fix.js index 779124b1..6c5b54a6 100644 --- a/test/mode-fix.js +++ b/test/mode-fix.js @@ -1,16 +1,15 @@ -'use strict' -const t = require('tap') -const mf = require('../lib/mode-fix.js') +import t from 'tap' +import { modeFix } from '../dist/esm/mode-fix.js' -t.equal(mf(0o10644, false), 0o644) -t.equal(mf(0o10644, true), 0o755) -t.equal(mf(0o10604, true), 0o705) -t.equal(mf(0o10600, true), 0o700) -t.equal(mf(0o10066, true), 0o077) +t.equal(modeFix(0o10644, false), 0o644) +t.equal(modeFix(0o10644, true), 0o755) +t.equal(modeFix(0o10604, true), 0o705) +t.equal(modeFix(0o10600, true), 0o700) +t.equal(modeFix(0o10066, true), 0o077) -t.equal(mf(0o10664, false, true), 0o644) -t.equal(mf(0o10066, false, true), 0o644) -t.equal(mf(0o10666, true, true), 0o755) -t.equal(mf(0o10604, true, true), 0o705) -t.equal(mf(0o10600, true, true), 0o700) -t.equal(mf(0o10066, true, true), 0o755) +t.equal(modeFix(0o10664, false, true), 0o644) +t.equal(modeFix(0o10066, false, true), 0o644) +t.equal(modeFix(0o10666, true, true), 0o755) +t.equal(modeFix(0o10604, true, true), 0o705) +t.equal(modeFix(0o10600, true, true), 0o700) +t.equal(modeFix(0o10066, true, true), 0o755) diff --git a/test/normalize-unicode.js b/test/normalize-unicode.js index 0d34f38c..ffbb07bf 100644 --- a/test/normalize-unicode.js +++ b/test/normalize-unicode.js @@ -1,39 +1,63 @@ -process.env.TESTING_TAR_FAKE_PLATFORM = 'win32' -const t = require('tap') -const normalize = require('../lib/normalize-unicode.js') -const stripSlash = require('../lib/strip-trailing-slashes.js') -const normPath = require('../lib/normalize-windows-path.js') +import t from 'tap' +import { fileURLToPath } from 'url' +import { normalizeUnicode } from '../dist/esm/normalize-unicode.js' +import { stripTrailingSlashes } from '../dist/esm/strip-trailing-slashes.js' +import { normalizeWindowsPath } from '../dist/esm/normalize-windows-path.js' + +const __filename = fileURLToPath(import.meta.url) +const fakePlatform = process.env.TESTING_TAR_FAKE_PLATFORM // café const cafe1 = Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString() // cafe with a ` -const cafe2 = Buffer.from([0x63, 0x61, 0x66, 0x65, 0xcc, 0x81]).toString() - -t.equal(normalize(cafe1), normalize(cafe2), 'matching unicodes') -t.equal(normalize(cafe1), normalize(cafe2), 'cached') -t.equal(normalize('foo'), 'foo', 'non-unicode string') - -t.test('normalize with strip slashes', t => { - const paths = [ - '\\a\\b\\c\\d\\', - '﹨aaaa﹨dddd﹨', - '\bbb\eee\', - '\\\\\eee\\\\\\', - '¼foo.txt', - '1/4foo.txt', - ] - - t.plan(paths.length) - - for (const path of paths) { - t.test(JSON.stringify(path), t => { - const a = normalize(stripSlash(normPath(path))) - const b = stripSlash(normPath(normalize(path))) - t.matchSnapshot(a, 'normalized') - t.equal(a, b, 'order should not matter') - t.end() - }) - } - t.end() -}) +const cafe2 = Buffer.from([ + 0x63, 0x61, 0x66, 0x65, 0xcc, 0x81, +]).toString() + +t.equal( + normalizeUnicode(cafe1), + normalizeUnicode(cafe2), + 'matching unicodes', +) +t.equal(normalizeUnicode(cafe1), normalizeUnicode(cafe2), 'cached') +t.equal(normalizeUnicode('foo'), 'foo', 'non-unicode string') + +if (fakePlatform === 'win32') { + t.test('normalize with strip slashes', t => { + const paths = [ + '\\a\\b\\c\\d\\', + '﹨aaaa﹨dddd﹨', + '\bbb\eee\', + '\\\\\eee\\\\\\', + '¼foo.txt', + '1/4foo.txt', + ] + + t.plan(paths.length) + + for (const path of paths) { + t.test(JSON.stringify(path), t => { + const a = normalizeUnicode( + stripTrailingSlashes(normalizeWindowsPath(path)), + ) + const b = stripTrailingSlashes( + normalizeWindowsPath(normalizeUnicode(path)), + ) + t.matchSnapshot(a, 'normalized') + t.equal(a, b, 'order should not matter') + t.end() + }) + } + t.end() + }) +} + +if (fakePlatform !== 'win32') { + t.spawn(process.execPath, [__filename, 'win32'], { + env: { + ...process.env, + TESTING_TAR_FAKE_PLATFORM: 'win32', + }, + }) +} diff --git a/test/normalize-windows-path.js b/test/normalize-windows-path.js index e9c705ab..8fbaa647 100644 --- a/test/normalize-windows-path.js +++ b/test/normalize-windows-path.js @@ -1,28 +1,38 @@ -const t = require('tap') +import t from 'tap' const realPlatform = process.platform const fakePlatform = realPlatform === 'win32' ? 'posix' : 'win32' -t.test('posix', t => { +t.test('posix', async t => { if (realPlatform === 'win32') { process.env.TESTING_TAR_FAKE_PLATFORM = fakePlatform } else { delete process.env.TESTING_TAR_FAKE_PLATFORM } - const normPath = t.mock('../lib/normalize-windows-path.js') - t.equal(normPath('/some/path/back\\slashes'), '/some/path/back\\slashes') - t.equal(normPath('c:\\foo\\bar'), 'c:\\foo\\bar') + const { normalizeWindowsPath } = await t.mockImport( + '../dist/esm/normalize-windows-path.js', + ) + t.equal( + normalizeWindowsPath('/some/path/back\\slashes'), + '/some/path/back\\slashes', + ) + t.equal(normalizeWindowsPath('c:\\foo\\bar'), 'c:\\foo\\bar') t.end() }) -t.test('win32', t => { +t.test('win32', async t => { if (realPlatform !== 'win32') { process.env.TESTING_TAR_FAKE_PLATFORM = fakePlatform } else { delete process.env.TESTING_TAR_FAKE_PLATFORM } - const normPath = t.mock('../lib/normalize-windows-path.js') - t.equal(normPath('/some/path/back\\slashes'), '/some/path/back/slashes') - t.equal(normPath('c:\\foo\\bar'), 'c:/foo/bar') + const { normalizeWindowsPath } = await t.mockImport( + '../dist/esm/normalize-windows-path.js', + ) + t.equal( + normalizeWindowsPath('/some/path/back\\slashes'), + '/some/path/back/slashes', + ) + t.equal(normalizeWindowsPath('c:\\foo\\bar'), 'c:/foo/bar') t.end() }) diff --git a/test/options.js b/test/options.js new file mode 100644 index 00000000..b5cac1b1 --- /dev/null +++ b/test/options.js @@ -0,0 +1,79 @@ +import t from 'tap' +import { + dealias, + isSync, + isSyncFile, + isFile, + isAsyncFile, + isAsyncNoFile, + isSyncNoFile, + isAsync, + isNoFile, +} from '../dist/esm/options.js' + +t.same(dealias(), {}) +t.same(dealias(false), {}) + +t.same( + dealias({ + C: 'dir', + f: 'file', + z: 'zip', + P: 'preserve', + U: 'unlink', + 'strip-components': 99, + foo: 'bar', + }), + { + cwd: 'dir', + file: 'file', + gzip: 'zip', + preservePaths: 'preserve', + unlink: 'unlink', + strip: 99, + foo: 'bar', + }, +) + +t.same( + dealias({ + C: 'dir', + f: 'file', + z: 'zip', + P: 'preserve', + U: 'unlink', + stripComponents: 99, + foo: 'bar', + }), + { + cwd: 'dir', + file: 'file', + gzip: 'zip', + preservePaths: 'preserve', + unlink: 'unlink', + strip: 99, + foo: 'bar', + }, +) + +t.same(dealias({ noChmod: false }), { chmod: true }) +t.same(dealias({ noChmod: true }), {}) + +t.equal(isSyncFile(dealias({ sync: true, f: 'x' })), true) +t.equal(isSyncFile(dealias({ file: 'x' })), false) +t.equal(isSyncFile(dealias({ sync: true })), false) +t.equal(isSyncFile(dealias({})), false) +t.equal(isSync(dealias({ sync: true, f: 'x' })), true) +t.equal(isSync(dealias({ file: 'x' })), false) +t.equal(isSync(dealias({ sync: true })), true) +t.equal(isSync(dealias({})), false) +t.equal(isAsync(dealias({})), true) +t.equal(isFile(dealias({ sync: true, f: 'x' })), true) +t.equal(isNoFile(dealias({ sync: true, f: 'x' })), false) +t.equal(isFile(dealias({ file: 'x' })), true) +t.equal(isFile(dealias({ sync: true })), false) +t.equal(isFile(dealias({})), false) +t.equal(isSyncFile(dealias({})), false) +t.equal(isSyncNoFile(dealias({ sync: true })), true) +t.equal(isAsyncFile(dealias({})), false) +t.equal(isAsyncNoFile(dealias({})), true) diff --git a/test/pack.js b/test/pack.js index a4f8bfbe..8026dcc5 100644 --- a/test/pack.js +++ b/test/pack.js @@ -1,37 +1,45 @@ -'use strict' -const t = require('tap') -const Pack = require('../lib/pack.js') -const PackSync = Pack.Sync -const fs = require('fs') -const path = require('path') +import t from 'tap' +import { Pack, PackSync } from '../dist/esm/pack.js' +import fs from 'fs' +import path from 'path' +import { fileURLToPath } from 'url' + +import { Header } from '../dist/esm/header.js' +import zlib from 'zlib' +import * as miniz from 'minizlib' +import mutateFS from 'mutate-fs' +import { Minipass } from 'minipass' +import EE from 'events' +import { rimraf } from 'rimraf' +import { mkdirp } from 'mkdirp' +import { ReadEntry } from '../dist/esm/read-entry.js' +import { normalizeWindowsPath as normPath } from '../dist/esm/normalize-windows-path.js' + +const { default: chmodr } = await import('chmodr') + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + const fixtures = path.resolve(__dirname, 'fixtures') const files = path.resolve(fixtures, 'files') const tars = path.resolve(fixtures, 'tars') -const chmodr = require('chmodr') -const Header = require('../lib/header.js') -const zlib = require('zlib') -const miniz = require('minizlib') -const mutateFS = require('mutate-fs') -const { Minipass } = require('minipass') + process.env.USER = 'isaacs' -const EE = require('events').EventEmitter -const rimraf = require('rimraf') -const mkdirp = require('mkdirp') -const ReadEntry = require('../lib/read-entry.js') const isWindows = process.platform === 'win32' -const normPath = require('../lib/normalize-windows-path.js') const ctime = new Date('2017-05-10T01:03:12.000Z') const atime = new Date('2017-04-17T00:00:00.000Z') const mtime = new Date('2016-04-01T19:00:00.000Z') -t.teardown(mutateFS.statMutate((er, st) => { - if (st) { - st.ctime = ctime - st.atime = atime - st.mtime = mtime - } -})) +t.teardown( + mutateFS.statMutate((_er, st) => { + if (st) { + st.ctime = ctime + st.atime = atime + st.mtime = mtime + } + }), +) t.test('set up', t => { const one = fs.statSync(files + '/hardlink-1') @@ -48,13 +56,14 @@ t.test('set up', t => { t.test('pack a file', t => { const out = [] - new Pack({ cwd: files }) + const seen = [] + new Pack({ cwd: files, onWriteEntry: e => seen.push(e) }) .end('one-byte.txt') .on('data', c => out.push(c)) .on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 2048) - t.match(data.slice(512).toString(), /^a\0{511}\0{1024}$/) + t.match(data.subarray(512).toString(), /^a\0{511}\0{1024}$/) const h = new Header(data) const expect = { cksumValid: true, @@ -84,22 +93,29 @@ t.test('pack a file', t => { throw new Error('no data!') } - t.equal(sync.slice(512).toString(), data.slice(512).toString()) + t.equal( + sync.subarray(512).toString(), + data.subarray(512).toString(), + ) const hs = new Header(sync) t.match(hs, expect) + t.strictSame( + seen.map(e => e.path), + ['one-byte.txt'], + ) t.end() }) }) t.test('pack a file with a prefix', t => { const out = [] - new Pack({ cwd: files, prefix: 'package/' }) + new Pack({ mtime, cwd: files, prefix: 'package/' }) .end('.dotfile') .on('data', c => out.push(c)) .on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 2048) - t.match(data.slice(512).toString(), /^.\n\0{510}\0{1024}$/) + t.match(data.subarray(512).toString(), /^.\n\0{510}\0{1024}$/) const h = new Header(data) const expect = { cksumValid: true, @@ -121,8 +137,13 @@ t.test('pack a file with a prefix', t => { } t.match(h, expect) const sync = new PackSync({ cwd: files, prefix: 'package' }) - .add('.dotfile').end().read() - t.equal(sync.slice(512).toString(), data.slice(512).toString()) + .add('.dotfile') + .end() + .read() + t.equal( + sync.subarray(512).toString(), + data.subarray(512).toString(), + ) const hs = new Header(sync) t.match(hs, expect) t.end() @@ -163,15 +184,24 @@ t.test('portable pack a dir', t => { } t.match(h, expect) t.equal(data.length, 2048) - t.match(data.slice(1024).toString(), /^\0{1024}$/) + t.match(data.subarray(1024).toString(), /^\0{1024}$/) - const syncgz = new PackSync({ cwd: files, portable: true, gzip: true }) - .add('dir').end().read() + const syncgz = new PackSync({ + cwd: files, + portable: true, + gzip: true, + }) + .add('dir') + .end() + .read() t.equal(syncgz[9], 255, 'gzip OS flag set to "unknown"') const sync = new miniz.Gunzip().end(zipped).read() - t.equal(sync.slice(512).toString(), data.slice(512).toString()) + t.equal( + sync.subarray(512).toString(), + data.subarray(512).toString(), + ) const hs = new Header(sync) t.match(hs, expect) @@ -193,8 +223,8 @@ t.test('portable pack a dir', t => { ctime: null, nullBlock: false, } - t.match(new Header(data.slice(512)), expect2) - t.match(new Header(sync.slice(512)), expect2) + t.match(new Header(data.subarray(512)), expect2) + t.match(new Header(sync.subarray(512)), expect2) t.end() }) }) @@ -235,11 +265,16 @@ t.test('use process cwd if cwd not specified', t => { } t.match(h, expect) t.equal(data.length, 2048) - t.match(data.slice(1024).toString(), /^\0{1024}$/) + t.match(data.subarray(1024).toString(), /^\0{1024}$/) const sync = new PackSync({ cwd: files }) - .add('dir').end().read() - t.equal(sync.slice(512).toString(), data.slice(512).toString()) + .add('dir') + .end() + .read() + t.equal( + sync.subarray(512).toString(), + data.subarray(512).toString(), + ) const hs = new Header(sync) t.match(hs, expect) @@ -261,15 +296,15 @@ t.test('use process cwd if cwd not specified', t => { ctime: ctime, nullBlock: false, } - t.match(new Header(data.slice(512)), expect2) - t.match(new Header(sync.slice(512)), expect2) + t.match(new Header(data.subarray(512)), expect2) + t.match(new Header(sync.subarray(512)), expect2) t.end() }) }) t.test('filter', t => { const out = [] - const filter = (path, stat) => stat.isDirectory() + const filter = (_path, stat) => stat.isDirectory() // only include directories, so dir/x should not appear new Pack({ cwd: files, filter: filter }) @@ -301,11 +336,16 @@ t.test('filter', t => { } t.match(h, expect) t.equal(data.length, 1536) - t.match(data.slice(512).toString(), /^\0{1024}$/) + t.match(data.subarray(512).toString(), /^\0{1024}$/) const sync = new PackSync({ cwd: files, filter: filter }) - .add('dir').end().read() - t.equal(sync.slice(512).toString(), data.slice(512).toString()) + .add('dir') + .end() + .read() + t.equal( + sync.subarray(512).toString(), + data.subarray(512).toString(), + ) const hs = new Header(sync) t.match(hs, expect) t.end() @@ -314,7 +354,7 @@ t.test('filter', t => { t.test('add the same dir twice (exercise cache code)', t => { const out = [] - const filter = (path, stat) => stat.isDirectory() + const filter = (_path, stat) => stat.isDirectory() // only include directories, so dir/x should not appear const pack = new Pack({ cwd: files, filter: filter }) @@ -346,10 +386,10 @@ t.test('add the same dir twice (exercise cache code)', t => { nullBlock: false, } t.match(h, expect) - const h2 = new Header(data.slice(512)) + const h2 = new Header(data.subarray(512)) t.match(h2, expect) t.equal(data.length, 2048) - t.match(data.slice(1024).toString(), /^\0{1024}$/) + t.match(data.subarray(1024).toString(), /^\0{1024}$/) const sync = new PackSync({ cwd: files, @@ -358,11 +398,17 @@ t.test('add the same dir twice (exercise cache code)', t => { readdirCache: pack.readdirCache, statCache: pack.statCache, }) - .add('dir').add('dir').end().read() - t.equal(sync.slice(1024).toString(), data.slice(1024).toString()) + .add('dir') + .add('dir') + .end() + .read() + t.equal( + sync.subarray(1024).toString(), + data.subarray(1024).toString(), + ) const hs = new Header(sync) t.match(hs, expect) - const hs2 = new Header(sync.slice(512)) + const hs2 = new Header(sync.subarray(512)) t.match(hs2, expect) t.end() }) @@ -384,7 +430,10 @@ t.test('if brotli is truthy, make it an object', t => { t.test('throws if both gzip and brotli are truthy', t => { const opt = { gzip: true, brotli: true } - t.throws(_ => new Pack(opt), new TypeError('gzip and brotli are mutually exclusive')) + t.throws( + _ => new Pack(opt), + new TypeError('gzip and brotli are mutually exclusive'), + ) t.end() }) @@ -404,14 +453,16 @@ t.test('gzip, also a very deep path', t => { const data = zlib.unzipSync(zipped) const entries = [] for (var i = 0; i < data.length; i += 512) { - const slice = data.slice(i, i + 512) + const slice = data.subarray(i, i + 512) const h = new Header(slice) if (h.nullBlock) { entries.push('null block') } else if (h.cksumValid) { entries.push([h.type, h.path]) } else if (entries[entries.length - 1][0] === 'File') { - entries[entries.length - 1].push(slice.toString().replace(/\0.*$/, '')) + entries[entries.length - 1].push( + slice.toString().replace(/\0.*$/, ''), + ) } } @@ -436,27 +487,72 @@ t.test('gzip, also a very deep path', t => { ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', 'short\n'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'], - ['ExtendedHeader', 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222'], - ['ExtendedHeader', 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', + 'short\n', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111', + ], + [ + 'ExtendedHeader', + 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222', + ], + [ + 'ExtendedHeader', + 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], ['ExtendedHeader', 'PaxHeader/Ω.txt'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', 'Ω'], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + 'Ω', + ], 'null block', 'null block', ] let ok = true entries.forEach((entry, i) => { - ok = ok && + ok = + ok && t.equal(entry[0], expect[i][0]) && t.equal(entry[1], expect[i][1]) && (!entry[2] || t.equal(entry[2], expect[i][2])) @@ -483,14 +579,16 @@ t.test('brotli, also a very deep path', t => { const data = zlib.brotliDecompressSync(zipped) const entries = [] for (var i = 0; i < data.length; i += 512) { - const slice = data.slice(i, i + 512) + const slice = data.subarray(i, i + 512) const h = new Header(slice) if (h.nullBlock) { entries.push('null block') } else if (h.cksumValid) { entries.push([h.type, h.path]) } else if (entries[entries.length - 1][0] === 'File') { - entries[entries.length - 1].push(slice.toString().replace(/\0.*$/, '')) + entries[entries.length - 1].push( + slice.toString().replace(/\0.*$/, ''), + ) } } @@ -515,30 +613,75 @@ t.test('brotli, also a very deep path', t => { ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', 'short\n'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'], - ['ExtendedHeader', 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222'], - ['ExtendedHeader', 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', + 'short\n', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111', + ], + [ + 'ExtendedHeader', + 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222', + ], + [ + 'ExtendedHeader', + 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], ['ExtendedHeader', 'PaxHeader/Ω.txt'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', 'Ω'], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + 'Ω', + ], 'null block', 'null block', ] let ok = true entries.forEach((entry, i) => { - ok = ok && - t.equal(entry[0], expect[i][0]) && - t.equal(entry[1], expect[i][1]) && - (!entry[2] || t.equal(entry[2], expect[i][2])) + ok = + ok && + t.equal(entry[0], expect[i][0]) && + t.equal(entry[1], expect[i][1]) && + (!entry[2] || t.equal(entry[2], expect[i][2])) }) t.end() @@ -549,7 +692,8 @@ t.test('very deep gzip path, sync', t => { const pack = new PackSync({ cwd: files, gzip: true, - }).add('dir') + }) + .add('dir') .add('long-path') .end() @@ -562,14 +706,16 @@ t.test('very deep gzip path, sync', t => { const data = zlib.unzipSync(zipped) const entries = [] for (var i = 0; i < data.length; i += 512) { - const slice = data.slice(i, i + 512) + const slice = data.subarray(i, i + 512) const h = new Header(slice) if (h.nullBlock) { entries.push('null block') } else if (h.cksumValid) { entries.push([h.type, h.path]) } else if (entries[entries.length - 1][0] === 'File') { - entries[entries.length - 1].push(slice.toString().replace(/\0.*$/, '')) + entries[entries.length - 1].push( + slice.toString().replace(/\0.*$/, ''), + ) } } @@ -596,25 +742,64 @@ t.test('very deep gzip path, sync', t => { ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', 'short\n'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'], - ['ExtendedHeader', 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222'], - ['ExtendedHeader', 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', + 'short\n', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111', + ], + [ + 'ExtendedHeader', + 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222', + ], + [ + 'ExtendedHeader', + 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], ['ExtendedHeader', 'PaxHeader/Ω.txt'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', 'Ω'], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + 'Ω', + ], 'null block', 'null block', ] let ok = true entries.forEach((entry, i) => { - ok = ok && + ok = + ok && t.equal(entry[0], expect[i][0]) && t.equal(entry[1], expect[i][1]) && (!entry[2] || t.equal(entry[2], expect[i][2])) @@ -628,7 +813,8 @@ t.test('very deep brotli path, sync', t => { const pack = new PackSync({ cwd: files, brotli: true, - }).add('dir') + }) + .add('dir') .add('long-path') .end() @@ -641,14 +827,16 @@ t.test('very deep brotli path, sync', t => { const data = zlib.brotliDecompressSync(zipped) const entries = [] for (var i = 0; i < data.length; i += 512) { - const slice = data.slice(i, i + 512) + const slice = data.subarray(i, i + 512) const h = new Header(slice) if (h.nullBlock) { entries.push('null block') } else if (h.cksumValid) { entries.push([h.type, h.path]) } else if (entries[entries.length - 1][0] === 'File') { - entries[entries.length - 1].push(slice.toString().replace(/\0.*$/, '')) + entries[entries.length - 1].push( + slice.toString().replace(/\0.*$/, ''), + ) } } @@ -675,28 +863,67 @@ t.test('very deep brotli path, sync', t => { ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', 'short\n'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'], - ['ExtendedHeader', 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222'], - ['ExtendedHeader', 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', + 'short\n', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111', + ], + [ + 'ExtendedHeader', + 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222', + ], + [ + 'ExtendedHeader', + 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], ['ExtendedHeader', 'PaxHeader/Ω.txt'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', 'Ω'], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + 'Ω', + ], 'null block', 'null block', ] let ok = true entries.forEach((entry, i) => { - ok = ok && + ok = + ok && t.equal(entry[0], expect[i][0]) && - t.equal(entry[1], expect[i][1]) && - (!entry[2] || t.equal(entry[2], expect[i][2])) + t.equal(entry[1], expect[i][1]) && + (!entry[2] || t.equal(entry[2], expect[i][2])) }) t.end() @@ -711,8 +938,10 @@ t.test('write after end', t => { t.test('emit error when stat fail', t => { t.teardown(mutateFS.statFail(new Error('xyz'))) - t.throws(_ => new PackSync({ cwd: files }).add('one-byte.txt'), - new Error('xyz')) + t.throws( + _ => new PackSync({ cwd: files }).add('one-byte.txt'), + new Error('xyz'), + ) new Pack({ cwd: files }).add('one-byte.txt').on('error', e => { t.match(e, { message: 'xyz' }) @@ -722,7 +951,10 @@ t.test('emit error when stat fail', t => { t.test('readdir fail', t => { t.teardown(mutateFS.fail('readdir', new Error('xyz'))) - t.throws(_ => new PackSync({ cwd: files }).add('dir'), new Error('xyz')) + t.throws( + _ => new PackSync({ cwd: files }).add('dir'), + new Error('xyz'), + ) new Pack({ cwd: files }).add('dir').on('error', e => { t.match(e, { message: 'xyz' }) @@ -764,7 +996,10 @@ t.test('pipe into a slow reader', t => { } t.match(h, expect) t.equal(data.length, 21504) - t.match(data.slice(data.length - 1024).toString(), /^\0{1024}$/) + t.match( + data.subarray(data.length - 1024).toString(), + /^\0{1024}$/, + ) t.end() }) }) @@ -772,11 +1007,13 @@ t.test('pipe into a slow reader', t => { t.test('pipe into a slow gzip reader', t => { const out = [] const mp2 = new miniz.Unzip() - const p = new Pack({ cwd: files, gzip: true }).add('long-path').end() + const p = new Pack({ cwd: files, gzip: true }) + .add('long-path') + .end() p.pause() class SlowStream extends EE { - write (chunk) { + write(chunk) { mp2.write(chunk) setTimeout(_ => { this.emit('drain') @@ -785,7 +1022,7 @@ t.test('pipe into a slow gzip reader', t => { return false } - end (chunk) { + end(chunk) { return mp2.end(chunk) } } @@ -823,7 +1060,10 @@ t.test('pipe into a slow gzip reader', t => { } t.match(h, expect) t.equal(data.length, 21504) - t.match(data.slice(data.length - 1024).toString(), /^\0{1024}$/) + t.match( + data.subarray(data.length - 1024).toString(), + /^\0{1024}$/, + ) t.end() }) }) @@ -835,12 +1075,12 @@ t.test('ignores mid-queue', t => { let didFirst = false const p = new Pack({ cwd: tars, - filter: (p, st) => { + filter: (p, _st) => { if (p === './') { return true } if (!didFirst) { - return didFirst = true + return (didFirst = true) } return false }, @@ -852,8 +1092,14 @@ t.test('ignores mid-queue', t => { p.on('data', c => out.push(c)) p.on('end', _ => { const data = Buffer.concat(out) - t.equal(data.slice(0, 100).toString().replace(/\0.*$/, ''), './') - const file = data.slice(512, 612).toString().replace(/\0.*$/, '') + t.equal( + data.subarray(0, 100).toString().replace(/\0.*$/, ''), + './', + ) + const file = data + .subarray(512, 612) + .toString() + .replace(/\0.*$/, '') t.not(files.indexOf(file), -1) t.end() }) @@ -869,17 +1115,21 @@ t.test('warnings', t => { const p = new Pack({ cwd: files, onwarn: (c, m, p) => warnings.push([c, m, p]), - }).end(f).on('data', c => out.push(c)) + }) + .end(f) + .on('data', c => out.push(c)) const out = [] p.on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 2048) - t.match(warnings, [[ - 'TAR_ENTRY_INFO', - /stripping .* from absolute path/, - { path: normPath(f) }, - ]]) + t.match(warnings, [ + [ + 'TAR_ENTRY_INFO', + /stripping .* from absolute path/, + { path: normPath(f) }, + ], + ]) t.match(new Header(data), { path: normPath(f).replace(/^(\/|[a-z]:\/)/i, ''), @@ -900,7 +1150,9 @@ t.test('warnings', t => { strict: strict, preservePaths: true, onwarn: (c, m, p) => warnings.push([c, m, p]), - }).end(f).on('data', c => out.push(c)) + }) + .end(f) + .on('data', c => out.push(c)) p.on('end', _ => { const data = Buffer.concat(out) t.equal(warnings.length, 0) @@ -918,13 +1170,15 @@ t.test('warnings', t => { new Pack({ strict: true, cwd: files, - }).end(f).on('error', e => { - t.match(e, { - message: /stripping .* from absolute path/, - path: normPath(f), - }) - t.end() }) + .end(f) + .on('error', e => { + t.match(e, { + message: /stripping .* from absolute path/, + path: normPath(f), + }) + t.end() + }) }) t.end() @@ -963,7 +1217,7 @@ t.test('no dir recurse', t => { }) t.test('sync', t => { - const p = new Pack.Sync({ + const p = new PackSync({ cwd: dir, noDirRecurse: true, }) @@ -975,48 +1229,57 @@ t.test('no dir recurse', t => { t.end() }) -t.test('follow', { skip: isWindows && 'file symlinks not available' }, t => { - const check = (out, t) => { - const data = Buffer.concat(out) - t.equal(data.length, 2048) - t.match(new Header(data, 0), { - type: 'File', - cksumValid: true, - needPax: false, - path: 'symlink', - mode: isWindows ? 0o666 : 0o644, - size: 26, - }) - t.match(data.slice(512).toString(), /this link is like diamond\n\0+$/) - t.end() - } +t.test( + 'follow', + { skip: isWindows && 'file symlinks not available' }, + t => { + const check = (out, t) => { + const data = Buffer.concat(out) + t.equal(data.length, 2048) + t.match(new Header(data, 0), { + type: 'File', + cksumValid: true, + needPax: false, + path: 'symlink', + mode: isWindows ? 0o666 : 0o644, + size: 26, + }) + t.match( + data.subarray(512).toString(), + /this link is like diamond\n\0+$/, + ) + t.end() + } - t.test('async', t => { - const out = [] - const p = new Pack({ cwd: files, follow: true }) - p.on('data', c => out.push(c)) - p.on('end', _ => check(out, t)) - p.end('symlink') - }) + t.test('async', t => { + const out = [] + const p = new Pack({ cwd: files, follow: true }) + p.on('data', c => out.push(c)) + p.on('end', _ => check(out, t)) + p.end('symlink') + }) - t.test('sync', t => { - const out = [] - const p = new Pack.Sync({ cwd: files, follow: true }) - p.on('data', c => out.push(c)) - p.end('symlink') - check(out, t) - }) + t.test('sync', t => { + const out = [] + const p = new PackSync({ cwd: files, follow: true }) + p.on('data', c => out.push(c)) + p.end('symlink') + check(out, t) + }) - t.end() -}) + t.end() + }, +) t.test('pack ReadEntries', t => { t.test('basic', t => { - const readEntry = new ReadEntry(new Header({ - path: 'x', - type: 'File', - size: 1, - })) + const readEntry = new ReadEntry( + new Header({ + path: 'x', + type: 'File', + size: 1, + }), + ) const p = new Pack() p.end(readEntry) const out = [] @@ -1024,9 +1287,12 @@ t.test('pack ReadEntries', t => { p.on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 2048) - t.match(data.slice(1024).toString(), /^\0+$/) - t.equal(data.slice(0, 100).toString().replace(/\0.*$/, ''), 'x') - t.equal(data.slice(512, 514).toString(), 'x\0') + t.match(data.subarray(1024).toString(), /^\0+$/) + t.equal( + data.subarray(0, 100).toString().replace(/\0.*$/, ''), + 'x', + ) + t.equal(data.subarray(512, 514).toString(), 'x\0') t.end() }) const buf = Buffer.alloc(512) @@ -1035,11 +1301,13 @@ t.test('pack ReadEntries', t => { }) t.test('prefix', t => { - const readEntry = new ReadEntry(new Header({ - path: 'x', - type: 'File', - size: 1, - })) + const readEntry = new ReadEntry( + new Header({ + path: 'x', + type: 'File', + size: 1, + }), + ) const p = new Pack({ prefix: 'y' }) p.end(readEntry) const out = [] @@ -1047,9 +1315,12 @@ t.test('pack ReadEntries', t => { p.on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 2048) - t.match(data.slice(1024).toString(), /^\0+$/) - t.equal(data.slice(0, 100).toString().replace(/\0.*$/, ''), 'y/x') - t.equal(data.slice(512, 514).toString(), 'x\0') + t.match(data.subarray(1024).toString(), /^\0+$/) + t.equal( + data.subarray(0, 100).toString().replace(/\0.*$/, ''), + 'y/x', + ) + t.equal(data.subarray(512, 514).toString(), 'x\0') t.end() }) const buf = Buffer.alloc(512) @@ -1058,21 +1329,27 @@ t.test('pack ReadEntries', t => { }) t.test('filter out', t => { - const re1 = new ReadEntry(new Header({ - path: 'a', - type: 'File', - size: 1, - })) - const re2 = new ReadEntry(new Header({ - path: 'x', - type: 'File', - size: 1, - })) - const re3 = new ReadEntry(new Header({ - path: 'y', - type: 'File', - size: 1, - })) + const re1 = new ReadEntry( + new Header({ + path: 'a', + type: 'File', + size: 1, + }), + ) + const re2 = new ReadEntry( + new Header({ + path: 'x', + type: 'File', + size: 1, + }), + ) + const re3 = new ReadEntry( + new Header({ + path: 'y', + type: 'File', + size: 1, + }), + ) const p = new Pack({ filter: p => p === 'x' }) p.add(re1) p.add(re2) @@ -1082,9 +1359,12 @@ t.test('pack ReadEntries', t => { p.on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 2048) - t.match(data.slice(1024).toString(), /^\0+$/) - t.equal(data.slice(0, 100).toString().replace(/\0.*$/, ''), 'x') - t.equal(data.slice(512, 514).toString(), 'x\0') + t.match(data.subarray(1024).toString(), /^\0+$/) + t.equal( + data.subarray(0, 100).toString().replace(/\0.*$/, ''), + 'x', + ) + t.equal(data.subarray(512, 514).toString(), 'x\0') t.end() }) { @@ -1119,7 +1399,7 @@ t.test('filter out everything', t => { t.test('sync', t => { const out = [] - const p = new Pack.Sync({ cwd: files, filter: filter }) + const p = new PackSync({ cwd: files, filter: filter }) p.on('data', c => out.push(c)) p.end('./') check(out, t) @@ -1127,7 +1407,7 @@ t.test('filter out everything', t => { t.test('async', t => { const out = [] - const p = new Pack.Sync({ cwd: files, filter: filter }) + const p = new PackSync({ cwd: files, filter: filter }) p.on('data', c => out.push(c)) p.on('end', _ => check(out, t)) p.end('./') @@ -1149,41 +1429,53 @@ t.test('fs.open fails', t => { t.test('sync', t => { t.plan(1) - t.throws(_ => - new Pack.Sync({ cwd: files }).end('one-byte.txt'), poop) + t.throws( + _ => new PackSync({ cwd: files }).end('one-byte.txt'), + poop, + ) }) t.end() }) -const write = opts => new Promise((resolve, reject) => { - const p = new Pack() - let totalSize = 0 - p.on('data', d => totalSize += d.length) - p.once('error', reject) - p.once('end', () => resolve(totalSize)) - - const file1 = new ReadEntry(new Header({ - path: 'file1.txt', - size: 5, - })) - if (opts.before) { - file1.end('file1') - p.add(file1) - } else { - p.add(file1) - file1.end('file1') - } +const write = opts => + new Promise((resolve, reject) => { + const p = new Pack() + let totalSize = 0 + p.on('data', d => (totalSize += d.length)) + p.once('error', reject) + p.once('end', () => resolve(totalSize)) + + const file1 = new ReadEntry( + new Header({ + path: 'file1.txt', + size: 5, + type: 'File', + }), + ) + if (opts.before) { + file1.end('file1') + p.add(file1) + } else { + p.add(file1) + file1.end('file1') + } - p.end() -}) + p.end() + }) t.test('padding works regardless of arite/add order', t => Promise.all([ write({ before: true }), write({ before: false }), ]).then(res => - t.equal(res[0], res[1], 'length is the same regardless of write/add order'))) + t.equal( + res[0], + res[1], + 'length is the same regardless of write/add order', + ), + ), +) t.test('prefix and subdirs', t => { const dir = path.resolve(fixtures, 'pack-prefix-subdirs') @@ -1214,7 +1506,11 @@ t.test('prefix and subdirs', t => { const check = (out, t) => { const data = Buffer.concat(out) expect.forEach((e, i) => - t.equal(e, data.slice(i * 512, i * 512 + e.length).toString())) + t.equal( + e, + data.subarray(i * 512, i * 512 + e.length).toString(), + ), + ) t.end() } @@ -1235,8 +1531,8 @@ t.test('prefix and subdirs', t => { }) return t.test('sync', t => { - t.test('.', t => runTest(t, '.', Pack.Sync)) - return t.test('./', t => runTest(t, './', Pack.Sync)) + t.test('.', t => runTest(t, '.', PackSync)) + return t.test('./', t => runTest(t, './', PackSync)) }) }) @@ -1291,9 +1587,12 @@ t.test('prefix and hard links', t => { const data = Buffer.concat(out) expect.forEach((e, i) => { if (typeof e === 'string') { - t.equal(data.slice(i * 512, i * 512 + e.length).toString(), e) + t.equal( + data.subarray(i * 512, i * 512 + e.length).toString(), + e, + ) } else { - t.match(new Header(data.slice(i * 512, (i + 1) * 512)), e) + t.match(new Header(data.subarray(i * 512, (i + 1) * 512)), e) } }) t.end() @@ -1330,8 +1629,8 @@ t.test('prefix and hard links', t => { }) t.test('sync', t => { - t.test('.', t => runTest(t, '.', Pack.Sync)) - return t.test('./', t => runTest(t, './', Pack.Sync)) + t.test('.', t => runTest(t, '.', PackSync)) + return t.test('./', t => runTest(t, './', PackSync)) }) t.end() diff --git a/test/parse.js b/test/parse.js index 2cc68782..7fd21153 100644 --- a/test/parse.js +++ b/test/parse.js @@ -1,24 +1,32 @@ -'use strict' -const t = require('tap') -const Parse = require('../lib/parse.js') - -const makeTar = require('./make-tar.js') -const fs = require('fs') -const path = require('path') +import t from 'tap' +import { Parser } from '../dist/esm/parse.js' +import { makeTar } from './fixtures/make-tar.js' +import fs, { readFileSync } from 'fs' +import path, { dirname } from 'path' +import zlib from 'zlib' +import { Minipass } from 'minipass' +import { Header } from '../dist/esm/header.js' +import EE from 'events' +import { fileURLToPath } from 'url' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) const tardir = path.resolve(__dirname, 'fixtures/tars') -const zlib = require('zlib') -const { Minipass } = require('minipass') -const Header = require('../lib/header.js') -const EE = require('events').EventEmitter t.test('fixture tests', t => { class ByteStream extends Minipass { - write (chunk) { + write(chunk) { for (let i = 0; i < chunk.length - 1; i++) { - super.write(chunk.slice(i, i + 1)) + super.write(chunk.subarray(i, i + 1)) } - return super.write(chunk.slice(chunk.length - 1, chunk.length)) + const ret = super.write( + chunk.subarray(chunk.length - 1, chunk.length), + ) + if (ret === false) { + throw new Error('BS write return false') + } + return ret } } @@ -26,21 +34,28 @@ t.test('fixture tests', t => { let ok = true let cursor = 0 p.on('entry', entry => { - ok = ok && t.match(['entry', entry], expect[cursor++], entry.path) + ok = + ok && t.match(['entry', entry], expect[cursor++], entry.path) if (slow) { - setTimeout(_ => entry.resume()) + setTimeout(() => entry.resume()) } else { entry.resume() } }) p.on('ignoredEntry', entry => { - ok = ok && t.match(['ignoredEntry', entry], expect[cursor++], - 'ignored: ' + entry.path) + ok = + ok && + t.match( + ['ignoredEntry', entry], + expect[cursor++], + 'ignored: ' + entry.path, + ) }) - p.on('warn', (c, message, data) => { - ok = ok && t.match(['warn', c, message], expect[cursor++], 'warn') + p.on('warn', (c, message, _data) => { + ok = + ok && t.match(['warn', c, message], expect[cursor++], 'warn') }) - p.on('nullBlock', _ => { + p.on('nullBlock', () => { ok = ok && t.match(['nullBlock'], expect[cursor++], 'null') }) p.on('error', er => { @@ -49,204 +64,280 @@ t.test('fixture tests', t => { p.on('meta', meta => { ok = ok && t.match(['meta', meta], expect[cursor++], 'meta') }) - p.on('eof', _ => { + p.on('eof', () => { ok = ok && t.match(['eof'], expect[cursor++], 'eof') }) - p.on('end', _ => { + p.on('end', () => { ok = ok && t.match(['end'], expect[cursor++], 'end') t.end() }) } t.jobs = 4 - const path = require('path') const parsedir = path.resolve(__dirname, 'fixtures/parse') const files = fs.readdirSync(tardir) - const maxMetaOpt = [250, null] + const maxMetaOpt = [250, undefined] const filterOpt = [true, false] const strictOpt = [true, false] const runTest = (file, maxMeta, filter, strict) => { const tardata = fs.readFileSync(file) const base = path.basename(file, '.tar') - t.test('file=' + base + '.tar' + - ' maxmeta=' + maxMeta + - ' filter=' + filter + - ' strict=' + strict, t => { - const o = - (maxMeta ? '-meta-' + maxMeta : '') + - (filter ? '-filter' : '') + - (strict ? '-strict' : '') - const tail = (o ? '-' + o : '') + '.json' - const eventsFile = parsedir + '/' + base + tail - const expect = require(eventsFile) - - t.test('uncompressed one byte at a time', t => { - const bs = new ByteStream() - const opt = (maxMeta || filter || strict) ? { - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - } : null - const bp = new Parse(opt) - trackEvents(t, expect, bp) - bs.pipe(bp) - bs.end(tardata) - }) - - t.test('uncompressed all at once', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, + t.test( + 'file=' + + base + + '.tar' + + ' maxmeta=' + + maxMeta + + ' filter=' + + filter + + ' strict=' + + strict, + t => { + const o = + (maxMeta ? '-meta-' + maxMeta : '') + + (filter ? '-filter' : '') + + (strict ? '-strict' : '') + const tail = (o ? '-' + o : '') + '.json' + const eventsFile = parsedir + '/' + base + tail + const expect = JSON.parse(readFileSync(eventsFile, 'utf8')) + + t.test('uncompressed one byte at a time', t => { + const bs = new ByteStream() + bs.on('data', c => { + if (!Buffer.isBuffer(c)) throw new Error('wat1') + if (c.length !== 1) throw new Error('wat2') + }) + const opt = + maxMeta || filter || strict ? + { + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + } + : undefined + const p = new Parser(opt) + trackEvents(t, expect, p) + bs.pipe(p) + bs.write(tardata) + bs.end() }) - trackEvents(t, expect, p) - p.end(tardata) - }) - - t.test('uncompressed one byte at a time, filename .tbr', t => { - const bs = new ByteStream() - const opt = (maxMeta || filter || strict) ? { - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - file: 'example.tbr', - } : null - const bp = new Parse(opt) - trackEvents(t, expect, bp) - bs.pipe(bp) - bs.end(tardata) - }) - - t.test('uncompressed all at once, filename .tar.br', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - file: 'example.tar.br', + + t.test('uncompressed all at once', t => { + // this one writes it as a string + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + }) + trackEvents(t, expect, p) + p.end(tardata.toString('hex'), 'hex', () => {}) }) - trackEvents(t, expect, p) - p.end(tardata) - }) - - t.test('gzipped all at once', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, + + t.test( + 'uncompressed one byte at a time, filename .tbr', + t => { + const bs = new ByteStream() + const opt = + maxMeta || filter || strict ? + { + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + file: 'example.tbr', + } + : undefined + const bp = new Parser(opt) + trackEvents(t, expect, bp) + bs.pipe(bp) + bs.end(tardata) + }, + ) + + t.test('uncompressed all at once, filename .tar.br', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + file: 'example.tar.br', + }) + trackEvents(t, expect, p) + p.end(tardata) }) - trackEvents(t, expect, p) - p.end(zlib.gzipSync(tardata)) - }) - - t.test('gzipped all at once, filename .tbr', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - file: 'example.tbr', + + t.test('gzipped all at once', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + }) + trackEvents(t, expect, p) + p.end(zlib.gzipSync(tardata), () => {}) }) - trackEvents(t, expect, p) - p.end(zlib.gzipSync(tardata)) - }) - - t.test('gzipped byte at a time', t => { - const bs = new ByteStream() - const bp = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, + + t.test('gzipped all at once, filename .tbr', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + file: 'example.tbr', + }) + trackEvents(t, expect, p) + p.write(zlib.gzipSync(tardata), () => {}) + p.end(() => {}) }) - trackEvents(t, expect, bp) - bs.pipe(bp) - bs.end(zlib.gzipSync(tardata)) - }) - - t.test('compress with brotli based on filename .tar.br', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - file: 'example.tar.br', + + t.test('gzipped byte at a time', t => { + const bs = new ByteStream() + const bp = new Parser({ + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + }) + trackEvents(t, expect, bp) + bs.pipe(bp) + bs.end(zlib.gzipSync(tardata)) }) - trackEvents(t, expect, p) - p.end(zlib.brotliCompressSync(tardata)) - }) - - t.test('compress with brotli based on filename .tbr', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - file: 'example.tbr', + + t.test( + 'compress with brotli based on filename .tar.br', + t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + file: 'example.tar.br', + }) + trackEvents(t, expect, p) + p.end(zlib.brotliCompressSync(tardata)) + }, + ) + + t.test('compress with brotli based on filename .tbr', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + file: 'example.tbr', + }) + trackEvents(t, expect, p) + p.end(zlib.brotliCompressSync(tardata)) }) - trackEvents(t, expect, p) - p.end(zlib.brotliCompressSync(tardata)) - }) - - t.test('compress with brotli all at once', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - brotli: {}, + + t.test('compress with brotli all at once', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + brotli: {}, + }) + trackEvents(t, expect, p) + p.end(zlib.brotliCompressSync(tardata)) }) - trackEvents(t, expect, p) - p.end(zlib.brotliCompressSync(tardata)) - }) - - t.test('compress with brotli byte at a time', t => { - const bs = new ByteStream() - const bp = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - brotli: {}, + + t.test('compress with brotli byte at a time', t => { + const bs = new ByteStream() + const bp = new Parser({ + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + brotli: {}, + }) + trackEvents(t, expect, bp) + bs.pipe(bp) + bs.end(zlib.brotliCompressSync(tardata)) }) - trackEvents(t, expect, bp) - bs.pipe(bp) - bs.end(zlib.brotliCompressSync(tardata)) - }) - - t.test('compress with brotli .tbr byte at a time', t => { - const bs = new ByteStream() - const bp = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - file: 'example.tbr', + + t.test('compress with brotli .tbr byte at a time', t => { + const bs = new ByteStream() + const bp = new Parser({ + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + file: 'example.tbr', + }) + trackEvents(t, expect, bp) + bs.pipe(bp) + bs.end(zlib.brotliCompressSync(tardata)) }) - trackEvents(t, expect, bp) - bs.pipe(bp) - bs.end(zlib.brotliCompressSync(tardata)) - }) - - t.test('async chunks', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, + + t.test('async chunks', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: + filter ? + (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + }) + trackEvents(t, expect, p, true) + const first = tardata.subarray( + 0, + Math.floor(tardata.length / 2), + ) + p.write(first.toString('hex'), 'hex') + process.nextTick(() => + p.end(tardata.subarray(Math.floor(tardata.length / 2))), + ) }) - trackEvents(t, expect, p, true) - p.write(tardata.slice(0, Math.floor(tardata.length / 2))) - process.nextTick(_ => p.end(tardata.slice(Math.floor(tardata.length / 2)))) - }) - t.end() - }) + t.end() + }, + ) } files - .map(f => path.resolve(tardir, f)).forEach(file => + .map(f => path.resolve(tardir, f)) + .forEach(file => maxMetaOpt.forEach(maxMeta => strictOpt.forEach(strict => filterOpt.forEach(filter => - runTest(file, maxMeta, filter, strict))))) + runTest(file, maxMeta, filter, strict), + ), + ), + ), + ) t.end() }) t.test('strict warn with an error emits that error', t => { t.plan(1) - const p = new Parse({ + const p = new Parser({ strict: true, }) p.on('error', emitted => t.equal(emitted, er)) @@ -256,18 +347,18 @@ t.test('strict warn with an error emits that error', t => { t.test('onwarn gets added to the warn event', t => { t.plan(1) - const p = new Parse({ - onwarn (code, message) { + const p = new Parser({ + onwarn(_code, message) { t.equal(message, 'this is fine') }, }) p.warn('TAR_TEST', 'this is fine') }) -t.test('onentry gets added to entry event', t => { +t.test('onReadEntry gets added to entry event', t => { t.plan(1) - const p = new Parse({ - onentry: entry => t.equal(entry, 'yes hello this is dog'), + const p = new Parser({ + onReadEntry: entry => t.equal(entry, 'yes hello this is dog'), }) p.emit('entry', 'yes hello this is dog') }) @@ -400,36 +491,56 @@ t.test('drain event timings', t => { ].map(chunks => makeTar(chunks)) const expect = [ - 'one', 'two', 'three', - 'four', 'five', 'six', 'seven', 'eight', - 'four', 'five', 'six', 'seven', 'eight', + 'one', + 'two', + 'three', + 'four', + 'five', + 'six', + 'seven', + 'eight', + 'four', + 'five', + 'six', + 'seven', + 'eight', 'nine', - 'one', 'two', 'three', - 'four', 'five', 'six', 'seven', 'eight', - 'four', 'five', 'six', 'seven', 'eight', + 'one', + 'two', + 'three', + 'four', + 'five', + 'six', + 'seven', + 'eight', + 'four', + 'five', + 'six', + 'seven', + 'eight', 'nine', ] class SlowStream extends EE { - write () { - setTimeout(_ => this.emit('drain')) + write() { + setTimeout(() => this.emit('drain')) return false } - end () { + end() { return this.write() } } let currentEntry const autoPipe = true - const p = new Parse({ + const p = new Parser({ ondone, - onentry: entry => { + onReadEntry: entry => { t.equal(entry.path, expect.shift()) currentEntry = entry if (autoPipe) { - setTimeout(_ => entry.pipe(new SlowStream())) + setTimeout(() => entry.pipe(new SlowStream())) } }, }) @@ -441,7 +552,7 @@ t.test('drain event timings', t => { }) let interval - const go = _ => { + const go = () => { const d = data.shift() if (d === undefined) { return p.end() @@ -454,19 +565,21 @@ t.test('drain event timings', t => { } const hunklen = Math.floor(d.length / 2) - const hunks = [ - d.slice(0, hunklen), - d.slice(hunklen), - ] + const hunks = [d.subarray(0, hunklen), d.subarray(hunklen)] p.write(hunks[0]) if (currentEntry && !paused) { - console.error('has current entry') currentEntry.pause() paused = true } - if (!t.equal(p.write(hunks[1]), false, 'write should return false: ' + d)) { + if ( + !t.equal( + p.write(hunks[1]), + false, + 'write should return false: ' + d, + ) + ) { return t.end() } @@ -478,7 +591,7 @@ t.test('drain event timings', t => { } p.once('drain', go) - p.on('end', _ => { + p.on('end', () => { clearInterval(interval) t.ok(sawOndone) t.end() @@ -542,18 +655,18 @@ t.test('consume while consuming', t => { ]) const runTest = (t, size) => { - const p = new Parse() - const first = data.slice(0, size) - const rest = data.slice(size) - p.once('entry', entry => { + const p = new Parser() + const first = data.subarray(0, size) + const rest = data.subarray(size) + p.once('entry', _entry => { for (let pos = 0; pos < rest.length; pos += size) { - p.write(rest.slice(pos, pos + size)) + p.write(rest.subarray(pos, pos + size)) } p.end() }) .on('entry', entry => entry.resume()) - .on('end', _ => t.end()) + .on('end', () => t.end()) .write(first) } @@ -579,7 +692,9 @@ t.test('truncated input', t => { t.test('truncated at block boundary', t => { const warnings = [] - const p = new Parse({ onwarn: (c, message) => warnings.push(message) }) + const p = new Parser({ + onwarn: (_c, message) => warnings.push(message), + }) p.end(data) t.same(warnings, [ 'Truncated input (needed 512 more bytes, only 0 available)', @@ -589,7 +704,9 @@ t.test('truncated input', t => { t.test('truncated mid-block', t => { const warnings = [] - const p = new Parse({ onwarn: (c, message) => warnings.push(message) }) + const p = new Parser({ + onwarn: (_c, message) => warnings.push(message), + }) p.write(data) p.end(Buffer.from('not a full block')) t.same(warnings, [ @@ -617,33 +734,37 @@ t.test('truncated gzip input', t => { '', ]) const tgz = zlib.gzipSync(raw) - const split = Math.floor(tgz.length * 2 / 3) - const trunc = tgz.slice(0, split) + const split = Math.floor((tgz.length * 2) / 3) + const trunc = tgz.subarray(0, split) const skipEarlyEnd = process.version.match(/^v4\./) - t.test('early end', { - skip: skipEarlyEnd ? 'not a zlib error on v4' : false, - }, t => { - const warnings = [] - const p = new Parse() - p.on('error', er => warnings.push(er.message)) - let aborted = false - p.on('abort', _ => aborted = true) - p.end(trunc) - t.equal(aborted, true, 'aborted writing') - t.same(warnings, ['zlib: unexpected end of file']) - t.end() - }) + t.test( + 'early end', + { + skip: skipEarlyEnd ? 'not a zlib error on v4' : false, + }, + t => { + const warnings = [] + const p = new Parser() + p.on('error', er => warnings.push(er.message)) + let aborted = false + p.on('abort', () => (aborted = true)) + p.end(trunc) + t.equal(aborted, true, 'aborted writing') + t.same(warnings, ['zlib: unexpected end of file']) + t.end() + }, + ) t.test('just wrong', t => { const warnings = [] - const p = new Parse() + const p = new Parser() p.on('error', er => warnings.push(er.message)) let aborted = false - p.on('abort', _ => aborted = true) + p.on('abort', () => (aborted = true)) p.write(trunc) p.write(trunc) - p.write(tgz.slice(split)) + p.write(tgz.subarray(split)) p.end() t.equal(aborted, true, 'aborted writing') t.match(warnings, [/^zlib: /]) @@ -655,40 +776,42 @@ t.test('truncated gzip input', t => { t.test('end while consuming', t => { // https://github.com/npm/node-tar/issues/157 - const data = zlib.gzipSync(makeTar([ - { - path: 'package/package.json', - type: 'File', - size: 130, - }, - new Array(131).join('x'), - { - path: 'package/node_modules/@c/d/node_modules/e/package.json', - type: 'File', - size: 30, - }, - new Array(31).join('e'), - { - path: 'package/node_modules/@c/d/package.json', - type: 'File', - size: 33, - }, - new Array(34).join('d'), - { - path: 'package/node_modules/a/package.json', - type: 'File', - size: 59, - }, - new Array(60).join('a'), - { - path: 'package/node_modules/b/package.json', - type: 'File', - size: 30, - }, - new Array(31).join('b'), - '', - '', - ])) + const data = zlib.gzipSync( + makeTar([ + { + path: 'package/package.json', + type: 'File', + size: 130, + }, + new Array(131).join('x'), + { + path: 'package/node_modules/@c/d/node_modules/e/package.json', + type: 'File', + size: 30, + }, + new Array(31).join('e'), + { + path: 'package/node_modules/@c/d/package.json', + type: 'File', + size: 33, + }, + new Array(34).join('d'), + { + path: 'package/node_modules/a/package.json', + type: 'File', + size: 59, + }, + new Array(60).join('a'), + { + path: 'package/node_modules/b/package.json', + type: 'File', + size: 30, + }, + new Array(31).join('b'), + '', + '', + ]), + ) const actual = [] const expect = [ @@ -700,8 +823,8 @@ t.test('end while consuming', t => { ] const mp = new Minipass() - const p = new Parse({ - onentry: entry => { + const p = new Parser({ + onReadEntry: entry => { actual.push(entry.path) entry.resume() }, @@ -716,7 +839,7 @@ t.test('end while consuming', t => { }) t.test('bad archives', t => { - const p = new Parse() + const p = new Parser() const warnings = [] p.on('warn', (code, msg, data) => { warnings.push([code, msg, data]) @@ -735,8 +858,8 @@ t.test('bad archives', t => { }) t.test('header that throws', t => { - const p = new Parse() - p.on('warn', (c, m, d) => { + const p = new Parser() + p.on('warn', (_c, m, d) => { t.equal(m, 'invalid base256 encoding') t.match(d, { code: 'TAR_ENTRY_INVALID', @@ -753,14 +876,19 @@ t.test('header that throws', t => { }) h.encode() const buf = h.block - const bad = Buffer.from([0x81, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]) + const bad = Buffer.from([ + 0x81, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + ]) bad.copy(buf, 100) - t.throws(() => new Header(buf), 'the header with that buffer throws') + t.throws( + () => new Header(buf), + 'the header with that buffer throws', + ) p.write(buf) }) t.test('warnings that are not so bad', t => { - const p = new Parse() + const p = new Parser() const warnings = [] p.on('warn', (code, m, d) => { warnings.push([code, m, d]) @@ -768,7 +896,7 @@ t.test('warnings that are not so bad', t => { }) // the parser doesn't actually decide what's "ok" or "supported", // it just parses. So we have to set it ourselves like unpack does - p.once('entry', entry => entry.invalid = true) + p.once('entry', entry => (entry.invalid = true)) p.on('entry', entry => entry.resume()) const data = makeTar([ { diff --git a/test/path-reservations.js b/test/path-reservations.js index 9a1d7a77..67f9ab17 100644 --- a/test/path-reservations.js +++ b/test/path-reservations.js @@ -1,19 +1,30 @@ -const t = require('tap') +import t from 'tap' + +import { posix, win32 } from 'node:path' // load up the posix and windows versions of the reserver if (process.platform === 'win32') { process.env.TESTING_TAR_FAKE_PLATFORM = 'posix' } -const { reserve } = t.mock('../lib/path-reservations.js', { - path: require('path').posix, -})() + +const { PathReservations } = await t.mockImport( + '../dist/esm/path-reservations.js', + { + path: posix, + }, +) + delete process.env.TESTING_TAR_FAKE_PLATFORM if (process.platform !== 'win32') { process.env.TESTING_TAR_FAKE_PLATFORM = 'win32' } -const { reserve: winReserve } = t.mock('../lib/path-reservations.js', { - path: require('path').win32, -})() + +const { PathReservations: WinPathReservations } = await t.mockImport( + '../dist/esm/path-reservations.js', + { + path: win32, + }, +) t.test('basic race', t => { // simulate the race conditions we care about @@ -62,11 +73,13 @@ t.test('basic race', t => { t.end() } - t.ok(reserve(['a/b/c/d'], file), 'file starts right away') - t.notOk(reserve(['a/B/c////D', 'a/b/e'], link), 'link waits') - t.notOk(reserve(['a/b/e/f'], dir), 'dir waits') - t.notOk(reserve(['a/b'], dir2), 'dir2 waits') - t.notOk(reserve(['a/b/x'], dir3), 'dir3 waits') + const r = new PathReservations() + + t.ok(r.reserve(['a/b/c/d'], file), 'file starts right away') + t.notOk(r.reserve(['a/B/c////D', 'a/b/e'], link), 'link waits') + t.notOk(r.reserve(['a/b/e/f'], dir), 'dir waits') + t.notOk(r.reserve(['a/b'], dir2), 'dir2 waits') + t.notOk(r.reserve(['a/b/x'], dir3), 'dir3 waits') }) t.test('unicode shenanigans', t => { @@ -89,8 +102,9 @@ t.test('unicode shenanigans', t => { } const cafePath1 = `c/a/f/${e1}` const cafePath2 = `c/a/f/${e2}` - t.ok(reserve([cafePath1], cafe1)) - t.notOk(reserve([cafePath2], cafe2)) + const r = new PathReservations() + t.ok(r.reserve([cafePath1], cafe1)) + t.notOk(r.reserve([cafePath2], cafe2)) }) t.test('absolute paths and trailing slash', t => { @@ -128,14 +142,15 @@ t.test('absolute paths and trailing slash', t => { t.end() } } - t.ok(reserve(['/p/a/t/h'], a1)) - t.notOk(reserve(['/p/a/t/h/'], a2)) - t.ok(reserve(['p/a/t/h'], r1)) - t.notOk(reserve(['p/a/t/h/'], r2)) + const r = new PathReservations() + t.ok(r.reserve(['/p/a/t/h'], a1)) + t.notOk(r.reserve(['/p/a/t/h/'], a2)) + t.ok(r.reserve(['p/a/t/h'], r1)) + t.notOk(r.reserve(['p/a/t/h/'], r2)) }) t.test('on windows, everything collides with everything', t => { - const reserve = winReserve + const r = new WinPathReservations() let called1 = false let called2 = false const f1 = done => { @@ -151,6 +166,6 @@ t.test('on windows, everything collides with everything', t => { done() t.end() } - t.equal(reserve(['some/path'], f1), true) - t.equal(reserve(['other/path'], f2), false) + t.equal(r.reserve(['some/path'], f1), true) + t.equal(r.reserve(['other/path'], f2), false) }) diff --git a/test/pax.js b/test/pax.js index cef9fc51..ea325b37 100644 --- a/test/pax.js +++ b/test/pax.js @@ -1,6 +1,5 @@ -'use strict' -const t = require('tap') -const Pax = require('../lib/pax.js') +import t from 'tap' +import { Pax } from '../dist/esm/pax.js' t.test('create a pax', t => { const p = new Pax({ @@ -18,54 +17,52 @@ t.test('create a pax', t => { nlink: 1, }) - // console.log(p.encode().toString('hex').split('').reduce((s,c)=>{if(s[s.length-1].length<64)s[s.length-1]+=c;else s.push(c);return s},[''])) - const buf = Buffer.from( // pax entry header '5061784865616465722f666f6f2e747874000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303036343420003035373736312000303030303234200030303030' + - '3030303330342000323136373231373631302000303136373332200078000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000032313637' + - '3231373631302000323136373231373631302000000000000000000000000000' + - - // entry body - '313620706174683d666f6f2e7478740a3139206374696d653d32393937303432' + - '30300a3139206174696d653d3239393730343230300a323120534348494c592e' + - '6465763d3132333435360a313920534348494c592e696e6f3d373839300a3138' + - '20534348494c592e6e6c696e6b3d310a39206769643d32300a313520676e616d' + - '653d73746166660a3139206d74696d653d3239393730343230300a3132207369' + - '7a653d3130300a3133207569643d32343536310a313620756e616d653d697361' + - '6163730a00000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303036343420003035373736312000303030303234200030303030' + + '3030303330342000323136373231373631302000303136373332200078000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000032313637' + + '3231373631302000323136373231373631302000000000000000000000000000' + + // entry body + '313620706174683d666f6f2e7478740a3139206374696d653d32393937303432' + + '30300a3139206174696d653d3239393730343230300a323120534348494c592e' + + '6465763d3132333435360a313920534348494c592e696e6f3d373839300a3138' + + '20534348494c592e6e6c696e6b3d310a39206769643d32300a313520676e616d' + + '653d73746166660a3139206d74696d653d3239393730343230300a3132207369' + + '7a653d3130300a3133207569643d32343536310a313620756e616d653d697361' + + '6163730a00000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ) const actual = p.encode() - t.equal(actual.toString('hex'), buf.toString('hex')) + t.match(actual, buf) t.end() }) t.test('null pax', t => { const p = new Pax({}) - t.equal(p.encode(), null) + t.same(p.encode(), Buffer.allocUnsafe(0)) t.end() }) @@ -74,150 +71,162 @@ t.test('tiny pax', t => { // an error? const p = new Pax({ path: 'ab' }, true) const actual = p.encode() - // console.log(actual.toString('hex').split('').reduce((s,c)=>{if(s[s.length-1].length<64)s[s.length-1]+=c;else s.push(c);return s},[''])) - // return Promise.resolve() const buf = Buffer.from( // header '5061784865616465722f61620000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303036343420000000000000000000000000000000000030303030' + - '3030303031332000000000000000000000000000303037303534200067000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030300000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - - // body - // note that a 2-char path is 11, but a 1 char path is 9, because - // adding the second char bumps the n to 10, which adds 1, which - // means it has to be 11. - // a 1-char path COULD be encoded as EITHER "10 path=x\n", or as - // "9 path=x\n", and it'd be true either way. - '313120706174683d61620a000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', - 'hex') - - t.equal(actual.toString('hex'), buf.toString('hex')) + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303036343420000000000000000000000000000000000030303030' + + '3030303031332000000000000000000000000000303037303534200067000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030300000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + // body + // note that a 2-char path is 11, but a 1 char path is 9, because + // adding the second char bumps the n to 10, which adds 1, which + // means it has to be 11. + // a 1-char path COULD be encoded as EITHER "10 path=x\n", or as + // "9 path=x\n", and it'd be true either way. + '313120706174683d61620a000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ) + + t.same(actual, buf) t.end() }) t.test('parse', t => { - t.same(Pax.parse('11 path=ab\n', { uid: 24561 }, true), { - atime: null, - charset: null, - comment: null, - ctime: null, - gid: null, - gname: null, - linkpath: null, - mtime: null, - path: 'ab', - size: null, - uid: 24561, - uname: null, - dev: null, - ino: null, - nlink: null, - global: true, - }) + const p = Pax.parse('11 path=ab\n', { uid: 24561 }, true) + t.same( + p, + Object.assign(Object.create(Pax.prototype), { + atime: undefined, + mode: undefined, + charset: undefined, + comment: undefined, + ctime: undefined, + gid: undefined, + gname: undefined, + linkpath: undefined, + mtime: undefined, + path: 'ab', + size: undefined, + uid: 24561, + uname: undefined, + dev: undefined, + ino: undefined, + nlink: undefined, + global: true, + }), + ) - t.same(Pax.parse('11 path=ab\n', null, false), { - atime: null, - charset: null, - comment: null, - ctime: null, - gid: null, - gname: null, - linkpath: null, - mtime: null, - path: 'ab', - size: null, - uid: null, - uname: null, - dev: null, - ino: null, - nlink: null, - global: false, - }) + t.same( + Pax.parse('11 path=ab\n'), + Object.assign(Object.create(Pax.prototype), { + atime: undefined, + mtime: undefined, + ctime: undefined, + charset: undefined, + comment: undefined, + gid: undefined, + gname: undefined, + uname: undefined, + linkpath: undefined, + path: 'ab', + size: undefined, + mode: undefined, + uid: undefined, + uname: undefined, + dev: undefined, + ino: undefined, + nlink: undefined, + global: false, + }), + ) - t.same(Pax.parse('9 gid=20\n9 path=x\n', null, false), { - atime: null, - charset: null, - comment: null, - ctime: null, + t.same(Pax.parse('9 gid=20\n9 path=x\n'), { + atime: undefined, + mtime: undefined, + ctime: undefined, + charset: undefined, + comment: undefined, gid: 20, - gname: null, - linkpath: null, - mtime: null, + gname: undefined, + linkpath: undefined, + mtime: undefined, path: 'x', - size: null, - uid: null, - uname: null, - dev: null, - ino: null, - nlink: null, + size: undefined, + uid: undefined, + uname: undefined, + dev: undefined, + ino: undefined, + nlink: undefined, + mode: undefined, global: false, }) - t.same(Pax.parse('9 gid=20\n9 path=x\n', null, false), { - atime: null, - charset: null, - comment: null, - ctime: null, + t.same(Pax.parse('9 gid=20\n9 path=x\n'), { + atime: undefined, + charset: undefined, + comment: undefined, + ctime: undefined, gid: 20, - gname: null, - linkpath: null, - mtime: null, + gname: undefined, + linkpath: undefined, + mtime: undefined, path: 'x', - size: null, - uid: null, - uname: null, - dev: null, - ino: null, - nlink: null, + size: undefined, + uid: undefined, + uname: undefined, + dev: undefined, + ino: undefined, + mode: undefined, + nlink: undefined, global: false, }) - t.same(Pax.parse('20 mtime=1491436800\n', null, false), { - atime: null, - charset: null, - comment: null, - ctime: null, - gid: null, - gname: null, - linkpath: null, + t.same(Pax.parse('20 mtime=1491436800\n'), { + atime: undefined, + charset: undefined, + comment: undefined, + ctime: undefined, + gid: undefined, + gname: undefined, + linkpath: undefined, mtime: new Date('2017-04-06'), - path: null, - size: null, - uid: null, - uname: null, - dev: null, - ino: null, - nlink: null, + path: undefined, + size: undefined, + uid: undefined, + uname: undefined, + dev: undefined, + ino: undefined, + nlink: undefined, + mode: undefined, global: false, }) @@ -230,22 +239,23 @@ t.test('parse', t => { const noKey = '10 =pathx\n' - t.same(Pax.parse(breaky + '9 gid=20\n10 path=x\n' + noKey, null, false), { - atime: null, - charset: null, - comment: null, - ctime: null, + t.same(Pax.parse(breaky + '9 gid=20\n10 path=x\n' + noKey), { + atime: undefined, + charset: undefined, + comment: undefined, + ctime: undefined, gid: 20, - gname: null, - linkpath: null, - mtime: null, + gname: undefined, + linkpath: undefined, + mtime: undefined, path: 'x', - size: null, - uid: null, - uname: null, - dev: null, - ino: null, - nlink: null, + size: undefined, + uid: undefined, + uname: undefined, + dev: undefined, + ino: undefined, + nlink: undefined, + mode: undefined, global: false, }) diff --git a/test/read-entry.js b/test/read-entry.js index 4e12e87a..6d690390 100644 --- a/test/read-entry.js +++ b/test/read-entry.js @@ -1,7 +1,6 @@ -'use strict' -const t = require('tap') -const ReadEntry = require('../lib/read-entry.js') -const Header = require('../lib/header.js') +import t from 'tap' +import { ReadEntry } from '../dist/esm/read-entry.js' +import { Header } from '../dist/esm/header.js' t.test('create read entry', t => { const h = new Header({ @@ -19,7 +18,11 @@ t.test('create read entry', t => { }) h.encode() - const entry = new ReadEntry(h, { x: 'y', path: 'foo.txt' }, { z: 0, a: null, b: undefined }) + const entry = new ReadEntry( + h, + { x: 'y', path: 'foo.txt' }, + { z: 0, a: null, b: undefined }, + ) t.ok(entry.header.cksumValid, 'header checksum should be valid') @@ -67,8 +70,8 @@ t.test('create read entry', t => { let data = '' let ended = false - entry.on('data', c => data += c) - entry.on('end', _ => ended = true) + entry.on('data', c => (data += c)) + entry.on('end', _ => (ended = true)) const body = Buffer.alloc(512) body.write(new Array(101).join('z'), 0) @@ -81,6 +84,85 @@ t.test('create read entry', t => { t.end() }) +t.test('entry with extended linkpath', t => { + const h = new Header({ + path: 'oof.txt', + mode: 0o755, + uid: 24561, + gid: 20, + size: 0, + mtime: new Date('2016-04-01T22:00Z'), + ctime: new Date('2016-04-01T22:00Z'), + atime: new Date('2016-04-01T22:00Z'), + type: 'SymbolicLink', + uname: 'isaacs', + gname: 'staff', + }) + h.encode() + + const entry = new ReadEntry( + h, + { x: 'y', linkpath: 'bar.txt', path: 'foo.txt' }, + { z: 0, a: null, b: undefined }, + ) + + t.ok(entry.header.cksumValid, 'header checksum should be valid') + + t.match(entry, { + extended: { x: 'y', path: 'foo.txt', linkpath: 'bar.txt' }, + globalExtended: { z: 0, a: null, b: undefined }, + header: { + cksumValid: true, + needPax: false, + path: 'oof.txt', + mode: 0o755, + uid: 24561, + gid: 20, + size: 0, + mtime: new Date('2016-04-01T22:00:00.000Z'), + typeKey: '2', + type: 'SymbolicLink', + linkpath: null, + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + atime: new Date('2016-04-01T22:00:00.000Z'), + ctime: new Date('2016-04-01T22:00:00.000Z'), + }, + blockRemain: 0, + remain: 0, + type: 'SymbolicLink', + meta: false, + ignore: false, + path: 'foo.txt', + mode: 0o755, + uid: 24561, + gid: 20, + uname: 'isaacs', + gname: 'staff', + size: 0, + mtime: new Date('2016-04-01T22:00:00.000Z'), + atime: new Date('2016-04-01T22:00:00.000Z'), + ctime: new Date('2016-04-01T22:00:00.000Z'), + linkpath: 'bar.txt', + x: 'y', + z: 0, + }) + + let data = '' + entry.on('data', c => (data += c)) + + const body = Buffer.alloc(512) + body.write(new Array(101).join('z'), 0) + t.throws(() => entry.write(body)) + entry.end() + + t.equal(data, '') + + t.end() +}) + t.test('meta entry', t => { const h = new Header({ path: 'PaxHeader/foo.txt', @@ -102,11 +184,11 @@ t.test('meta entry', t => { let actual = '' const entry = new ReadEntry(h) - entry.on('data', c => actual += c) + entry.on('data', c => (actual += c)) - entry.write(body.slice(0, 1)) - entry.write(body.slice(1, 25)) - entry.write(body.slice(25)) + entry.write(body.subarray(0, 1)) + entry.write(body.subarray(1, 25)) + entry.write(body.subarray(25)) t.throws(_ => entry.write(Buffer.alloc(1024))) t.equal(actual, expect) @@ -128,6 +210,8 @@ t.test('unknown entry type', t => { gname: 'staff', }) h.encode() + // this triggers its type to be Unsupported, which means that any + // data written to it will be thrown away. h.block.write('9', 156, 1, 'ascii') const body = Buffer.alloc(512) @@ -138,12 +222,12 @@ t.test('unknown entry type', t => { const entry = new ReadEntry(new Header(h.block)) - entry.on('data', c => actual += c) + entry.on('data', c => (actual += c)) - entry.write(body.slice(0, 1)) - entry.write(body.slice(1, 25)) - entry.write(body.slice(25)) - t.throws(_ => entry.write(Buffer.alloc(1024))) + entry.write(body.subarray(0, 1)) + entry.write(body.subarray(1, 25)) + entry.write(body.subarray(25)) + t.throws(() => entry.write(Buffer.alloc(1024))) t.equal(actual, expect) t.match(entry, { ignore: true }) @@ -209,8 +293,8 @@ t.test('entry without mode', t => { let data = '' let ended = false - entry.on('data', c => data += c) - entry.on('end', _ => ended = true) + entry.on('data', c => (data += c)) + entry.on('end', _ => (ended = true)) const body = Buffer.alloc(512) body.write(new Array(101).join('z'), 0) diff --git a/test/replace.js b/test/replace.ts similarity index 55% rename from test/replace.js rename to test/replace.ts index 75c97027..1b475aff 100644 --- a/test/replace.js +++ b/test/replace.ts @@ -1,25 +1,32 @@ -'use strict' -const t = require('tap') -const r = require('../lib/replace.js') -const path = require('path') -const fs = require('fs') -const mutateFS = require('mutate-fs') -const list = require('../lib/list.js') -const { resolve } = require('path') - +import t, { Test } from 'tap' +import { replace as r } from '../dist/esm/replace.js' +import path, { dirname, resolve } from 'path' +import fs from 'fs' +//@ts-ignore +import mutateFS from 'mutate-fs' +import { list } from '../dist/esm/list.js' +import { fileURLToPath } from 'url' +import zlib from 'zlib' +import { spawn } from 'child_process' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) const fixtures = path.resolve(__dirname, 'fixtures') const tars = path.resolve(fixtures, 'tars') -const zlib = require('zlib') - -const spawn = require('child_process').spawn const data = fs.readFileSync(tars + '/body-byte-counts.tar') -const dataNoNulls = data.slice(0, data.length - 1024) +const dataNoNulls = data.subarray(0, data.length - 1024) const fixtureDef = { 'body-byte-counts.tar': data, 'no-null-eof.tar': dataNoNulls, - 'truncated-head.tar': Buffer.concat([dataNoNulls, data.slice(0, 500)]), - 'truncated-body.tar': Buffer.concat([dataNoNulls, data.slice(0, 700)]), + 'truncated-head.tar': Buffer.concat([ + dataNoNulls, + data.subarray(0, 500), + ]), + 'truncated-body.tar': Buffer.concat([ + dataNoNulls, + data.subarray(0, 700), + ]), 'zero.tar': Buffer.from(''), 'empty.tar': Buffer.alloc(512), 'compressed.tgz': zlib.gzipSync(data), @@ -27,14 +34,17 @@ const fixtureDef = { } t.test('basic file add to archive (good or truncated)', t => { - const check = (file, t) => { + const check = (file: string, t: Test) => { const c = spawn('tar', ['tf', file], { stdio: [0, 'pipe', 2] }) - const out = [] - c.stdout.on('data', chunk => out.push(chunk)) + const out: Buffer[] = [] + c.stdout?.on('data', (chunk: Buffer) => out.push(chunk)) c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - const actual = Buffer.concat(out).toString().trim().split(/\r?\n/) + const actual = Buffer.concat(out) + .toString() + .trim() + .split(/\r?\n/) t.same(actual, [ '1024-bytes.txt', '512-bytes.txt', @@ -46,27 +56,27 @@ t.test('basic file add to archive (good or truncated)', t => { }) } - const files = [ + const files: (keyof typeof fixtureDef)[] = [ 'body-byte-counts.tar', 'no-null-eof.tar', 'truncated-head.tar', 'truncated-body.tar', ] - const td = files.map(f => [f, fixtureDef[f]]).reduce((s, [k, v]) => { - s[k] = v - return s - }, {}) + const td = Object.fromEntries(files.map(f => [f, fixtureDef[f]])) const fileList = [path.basename(__filename)] t.test('sync', t => { t.plan(files.length) const dir = t.testdir(td) for (const file of files) { t.test(file, t => { - r({ - sync: true, - file: resolve(dir, file), - cwd: __dirname, - }, fileList) + r( + { + sync: true, + file: resolve(dir, file), + cwd: __dirname, + }, + fileList, + ) check(resolve(dir, file), t) }) } @@ -77,15 +87,19 @@ t.test('basic file add to archive (good or truncated)', t => { const dir = t.testdir(td) for (const file of files) { t.test(file, t => { - r({ - file: resolve(dir, file), - cwd: __dirname, - }, fileList, er => { - if (er) { - throw er - } - check(resolve(dir, file), t) - }) + r( + { + file: resolve(dir, file), + cwd: __dirname, + }, + fileList, + er => { + if (er) { + throw er + } + check(resolve(dir, file), t) + }, + ) }) } }) @@ -95,10 +109,13 @@ t.test('basic file add to archive (good or truncated)', t => { const dir = t.testdir(td) for (const file of files) { t.test(file, t => { - r({ - file: resolve(dir, file), - cwd: __dirname, - }, fileList).then(() => { + r( + { + file: resolve(dir, file), + cwd: __dirname, + }, + fileList, + ).then(() => { check(resolve(dir, file), t) }) }) @@ -109,29 +126,22 @@ t.test('basic file add to archive (good or truncated)', t => { }) t.test('add to empty archive', t => { - const check = (file, t) => { + const check = (file: string, t: Test) => { const c = spawn('tar', ['tf', file]) - const out = [] - c.stdout.on('data', chunk => out.push(chunk)) + const out: Buffer[] = [] + c.stdout.on('data', (chunk: Buffer) => out.push(chunk)) c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) const actual = Buffer.concat(out).toString().trim().split('\n') - t.same(actual, [ - path.basename(__filename), - ]) + t.same(actual, [path.basename(__filename)]) t.end() }) } - const files = [ - 'empty.tar', - 'zero.tar', - ] - const td = files.map(f => [f, fixtureDef[f]]).reduce((s, [k, v]) => { - s[k] = v - return s - }, {}) + const files: (keyof typeof fixtureDef)[] = ['empty.tar', 'zero.tar'] + const td = Object.fromEntries(files.map(f => [f, fixtureDef[f]])) + //@ts-ignore files.push('not-existing.tar') t.test('sync', t => { @@ -139,11 +149,14 @@ t.test('add to empty archive', t => { t.plan(files.length) for (const file of files) { t.test(file, t => { - r({ - sync: true, - file: resolve(dir, file), - cwd: __dirname, - }, [path.basename(__filename)]) + r( + { + sync: true, + file: resolve(dir, file), + cwd: __dirname, + }, + [path.basename(__filename)], + ) check(resolve(dir, file), t) }) } @@ -154,15 +167,19 @@ t.test('add to empty archive', t => { t.plan(files.length) for (const file of files) { t.test(file, t => { - r({ - file: resolve(dir, file), - cwd: __dirname, - }, [path.basename(__filename)], er => { - if (er) { - throw er - } - check(resolve(dir, file), t) - }) + r( + { + file: resolve(dir, file), + cwd: __dirname, + }, + [path.basename(__filename)], + er => { + if (er) { + throw er + } + check(resolve(dir, file), t) + }, + ) }) } }) @@ -172,10 +189,13 @@ t.test('add to empty archive', t => { t.plan(files.length) for (const file of files) { t.test(file, t => { - r({ - file: resolve(dir, file), - cwd: __dirname, - }, [path.basename(__filename)]).then(() => { + r( + { + file: resolve(dir, file), + cwd: __dirname, + }, + [path.basename(__filename)], + ).then(() => { check(resolve(dir, file), t) }) }) @@ -192,24 +212,44 @@ t.test('cannot append to gzipped archives', async t => { const file = resolve(dir, 'compressed.tgz') const expect = new Error('cannot append to compressed archives') - const expectT = new TypeError('cannot append to compressed archives') - - t.throws(_ => r({ - file, - cwd: __dirname, - gzip: true, - }, [path.basename(__filename)]), expectT) - - t.throws(_ => r({ - file, - cwd: __dirname, - sync: true, - }, [path.basename(__filename)]), expect) - - return r({ - file, - cwd: __dirname, - }, [path.basename(__filename)], er => t.match(er, expect)) + const expectT = new TypeError( + 'cannot append to compressed archives', + ) + + t.throws( + () => + r( + { + file, + cwd: __dirname, + gzip: true, + }, + [path.basename(__filename)], + ), + expectT, + ) + + t.throws( + () => + r( + { + file, + cwd: __dirname, + sync: true, + }, + [path.basename(__filename)], + ), + expect, + ) + + return r( + { + file, + cwd: __dirname, + }, + [path.basename(__filename)], + er => t.match(er, expect), + ) }) t.test('cannot append to brotli compressed archives', async t => { @@ -219,27 +259,45 @@ t.test('cannot append to brotli compressed archives', async t => { const file = resolve(dir, 'compressed.tbr') const expect = new Error('cannot append to compressed archives') - const expectT = new TypeError('cannot append to compressed archives') - - t.throws(_ => r({ - file, - cwd: __dirname, - brotli: true, - }, [path.basename(__filename)]), expectT) - - t.throws(_ => r({ - file, - cwd: __dirname, - sync: true, - }, [path.basename(__filename)]), expect) + const expectT = new TypeError( + 'cannot append to compressed archives', + ) + + t.throws( + () => + r( + { + file, + cwd: __dirname, + brotli: true, + }, + [path.basename(__filename)], + ), + expectT, + ) + + t.throws( + () => + r( + { + file, + cwd: __dirname, + sync: true, + }, + [path.basename(__filename)], + ), + expect, + ) t.end() }) t.test('other throws', t => { - t.throws(_ => r({}, ['asdf']), new TypeError('file is required')) - t.throws(_ => r({ file: 'asdf' }, []), - new TypeError('no files or directories specified')) + t.throws(() => r({}, ['asdf']), new TypeError('file is required')) + t.throws( + () => r({ file: 'asdf' }, []), + new TypeError('no paths specified to add/replace'), + ) t.end() }) @@ -250,7 +308,7 @@ t.test('broken open', t => { const file = resolve(dir, 'body-byte-counts.tar') const poop = new Error('poop') t.teardown(mutateFS.fail('open', poop)) - t.throws(_ => r({ sync: true, file }, ['README.md']), poop) + t.throws(() => r({ sync: true, file }, ['README.md']), poop) r({ file }, ['README.md'], er => { t.match(er, poop) t.end() @@ -266,7 +324,7 @@ t.test('broken fstat', t => { const dir = t.testdir(td) const file = resolve(dir, 'body-byte-counts.tar') t.teardown(mutateFS.fail('fstat', poop)) - t.throws(_ => r({ sync: true, file }, ['README.md']), poop) + t.throws(() => r({ sync: true, file }, ['README.md']), poop) t.end() }) t.test('async', t => { @@ -288,7 +346,7 @@ t.test('broken read', t => { const file = resolve(dir, 'body-byte-counts.tar') const poop = new Error('poop') t.teardown(mutateFS.fail('read', poop)) - t.throws(_ => r({ sync: true, file }, ['README.md']), poop) + t.throws(() => r({ sync: true, file }, ['README.md']), poop) r({ file }, ['README.md'], er => { t.match(er, poop) t.end() @@ -300,16 +358,19 @@ t.test('mtime cache', async t => { 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], } - let mtimeCache + let mtimeCache: Map - const check = (file, t) => { + const check = (file: string, t: Test) => { const c = spawn('tar', ['tf', file]) - const out = [] + const out: Buffer[] = [] c.stdout.on('data', chunk => out.push(chunk)) c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - const actual = Buffer.concat(out).toString().trim().split(/\r?\n/) + const actual = Buffer.concat(out) + .toString() + .trim() + .split(/\r?\n/) t.same(actual, [ '1024-bytes.txt', '512-bytes.txt', @@ -317,8 +378,10 @@ t.test('mtime cache', async t => { 'zero-byte.txt', path.basename(__filename), ]) - const mtc = {} - mtimeCache.forEach((v, k) => mtc[k] = mtimeCache.get(k).toISOString()) + const mtc: Record = {} + mtimeCache.forEach( + (_v, k) => (mtc[k] = mtimeCache.get(k)!.toISOString()), + ) t.same(mtc, { '1024-bytes.txt': '2017-04-10T16:57:47.000Z', '512-bytes.txt': '2017-04-10T17:08:55.000Z', @@ -332,38 +395,48 @@ t.test('mtime cache', async t => { t.test('sync', t => { const dir = t.testdir(td) const file = resolve(dir, 'body-byte-counts.tar') - r({ - sync: true, - file, - cwd: __dirname, - mtimeCache: mtimeCache = new Map(), - }, [path.basename(__filename)]) + r( + { + sync: true, + file, + cwd: __dirname, + mtimeCache: (mtimeCache = new Map()), + }, + [path.basename(__filename)], + ) check(file, t) }) t.test('async cb', t => { const dir = t.testdir(td) const file = resolve(dir, 'body-byte-counts.tar') - r({ - file, - cwd: __dirname, - mtimeCache: mtimeCache = new Map(), - }, [path.basename(__filename)], er => { - if (er) { - throw er - } - check(file, t) - }) + r( + { + file, + cwd: __dirname, + mtimeCache: (mtimeCache = new Map()), + }, + [path.basename(__filename)], + er => { + if (er) { + throw er + } + check(file, t) + }, + ) }) t.test('async promise', t => { const dir = t.testdir(td) const file = resolve(dir, 'body-byte-counts.tar') - r({ - file, - cwd: __dirname, - mtimeCache: mtimeCache = new Map(), - }, [path.basename(__filename)]).then(_ => check(file, t)) + r( + { + file, + cwd: __dirname, + mtimeCache: (mtimeCache = new Map()), + }, + [path.basename(__filename)], + ).then(_ => check(file, t)) }) t.end() @@ -374,7 +447,7 @@ t.test('create tarball out of another tarball', t => { 'out.tar': fs.readFileSync(path.resolve(tars, 'dir.tar')), } - const check = (out, t) => { + const check = (out: string, t: Test) => { const expect = [ 'dir/', 'Ω.txt', @@ -384,9 +457,10 @@ t.test('create tarball out of another tarball', t => { list({ f: out, sync: true, - onentry: entry => { + onReadEntry: entry => { t.equal(entry.path, expect.shift()) - } }) + }, + }) t.same(expect, []) t.end() } @@ -394,35 +468,45 @@ t.test('create tarball out of another tarball', t => { t.test('sync', t => { const dir = t.testdir(td) const out = resolve(dir, 'out.tar') - r({ - f: out, - cwd: tars, - sync: true, - }, ['@utf8.tar']) + r( + { + f: out, + cwd: tars, + sync: true, + }, + ['@utf8.tar'], + ) check(out, t) }) t.test('async cb', t => { const dir = t.testdir(td) const out = resolve(dir, 'out.tar') - r({ - f: out, - cwd: tars, - }, ['@utf8.tar'], er => { - if (er) { - throw er - } - check(out, t) - }) + r( + { + f: out, + cwd: tars, + }, + ['@utf8.tar'], + er => { + if (er) { + throw er + } + check(out, t) + }, + ) }) t.test('async', t => { const dir = t.testdir(td) const out = resolve(dir, 'out.tar') - r({ - f: out, - cwd: tars, - }, ['@utf8.tar']).then(() => check(out, t)) + r( + { + f: out, + cwd: tars, + }, + ['@utf8.tar'], + ).then(() => check(out, t)) }) t.end() diff --git a/test/strip-absolute-path.js b/test/strip-absolute-path.js index 3e871a9f..59529d6f 100644 --- a/test/strip-absolute-path.js +++ b/test/strip-absolute-path.js @@ -1,5 +1,7 @@ -const t = require('tap') -const stripAbsolutePath = require('../lib/strip-absolute-path.js') +import t from 'tap' +import { stripAbsolutePath } from '../dist/esm/strip-absolute-path.js' +import realPath from 'node:path' + const cwd = process.cwd() t.test('basic', t => { @@ -9,34 +11,46 @@ t.test('basic', t => { 'c:///a/b/c': ['c:///', 'a/b/c'], '\\\\foo\\bar\\baz': ['\\\\foo\\bar\\', 'baz'], '//foo//bar//baz': ['//', 'foo//bar//baz'], - 'c:\\c:\\c:\\c:\\\\d:\\e/f/g': ['c:\\c:\\c:\\c:\\\\d:\\', 'e/f/g'], + 'c:\\c:\\c:\\c:\\\\d:\\e/f/g': [ + 'c:\\c:\\c:\\c:\\\\d:\\', + 'e/f/g', + ], } for (const [input, [root, stripped]] of Object.entries(cases)) { - t.strictSame(stripAbsolutePath(input, cwd), [root, stripped], input) + t.strictSame( + stripAbsolutePath(input, cwd), + [root, stripped], + input, + ) } t.end() }) -t.test('drive-local paths', t => { +t.test('drive-local paths', async t => { const env = process.env - t.teardown(() => process.env = env) + t.teardown(() => (process.env = env)) const cwd = 'D:\\safety\\land' - const realPath = require('path') // be windowsy const path = { ...realPath.win32, win32: realPath.win32, posix: realPath.posix, } - const stripAbsolutePath = t.mock('../lib/strip-absolute-path.js', { path }) + const { stripAbsolutePath } = await t.mockImport( + '../dist/esm/strip-absolute-path.js', + { path }, + ) const cases = { '/': ['/', ''], '////': ['////', ''], 'c:///a/b/c': ['c:///', 'a/b/c'], '\\\\foo\\bar\\baz': ['\\\\foo\\bar\\', 'baz'], '//foo//bar//baz': ['//', 'foo//bar//baz'], - 'c:\\c:\\c:\\c:\\\\d:\\e/f/g': ['c:\\c:\\c:\\c:\\\\d:\\', 'e/f/g'], + 'c:\\c:\\c:\\c:\\\\d:\\e/f/g': [ + 'c:\\c:\\c:\\c:\\\\d:\\', + 'e/f/g', + ], 'c:..\\system\\explorer.exe': ['c:', '..\\system\\explorer.exe'], 'd:..\\..\\unsafe\\land': ['d:', '..\\..\\unsafe\\land'], 'c:foo': ['c:', 'foo'], @@ -45,7 +59,13 @@ t.test('drive-local paths', t => { '\\\\?\\X:\\y\\z': ['\\\\?\\X:\\', 'y\\z'], } for (const [input, [root, stripped]] of Object.entries(cases)) { - if (!t.strictSame(stripAbsolutePath(input, cwd), [root, stripped], input)) { + if ( + !t.strictSame( + stripAbsolutePath(input, cwd), + [root, stripped], + input, + ) + ) { break } } diff --git a/test/strip-trailing-slashes.js b/test/strip-trailing-slashes.js index ce0695f8..97f8a16f 100644 --- a/test/strip-trailing-slashes.js +++ b/test/strip-trailing-slashes.js @@ -1,8 +1,8 @@ -const t = require('tap') -const stripSlash = require('../lib/strip-trailing-slashes.js') +import t from 'tap' +import { stripTrailingSlashes } from '../dist/esm/strip-trailing-slashes.js' const short = '///a///b///c///' const long = short.repeat(10) + '/'.repeat(1000000) -t.equal(stripSlash('no slash'), 'no slash') -t.equal(stripSlash(short), '///a///b///c') -t.equal(stripSlash(long), short.repeat(9) + '///a///b///c') +t.equal(stripTrailingSlashes('no slash'), 'no slash') +t.equal(stripTrailingSlashes(short), '///a///b///c') +t.equal(stripTrailingSlashes(long), short.repeat(9) + '///a///b///c') diff --git a/test/symlink-error.js b/test/symlink-error.js new file mode 100644 index 00000000..92a71bd3 --- /dev/null +++ b/test/symlink-error.js @@ -0,0 +1,11 @@ +import t from 'tap' +import { SymlinkError } from '../dist/esm/symlink-error.js' + +t.match(new SymlinkError('symlink', 'path'), { + name: 'SymlinkError', + path: 'path', + symlink: 'symlink', + syscall: 'symlink', + code: 'TAR_SYMLINK_ERROR', + message: 'TAR_SYMLINK_ERROR: Cannot extract through symbolic link', +}) diff --git a/test/types.js b/test/types.js index c2ca5f67..93425d39 100644 --- a/test/types.js +++ b/test/types.js @@ -1,6 +1,8 @@ -'use strict' -// not much to test here, just 2 maps. -const t = require('tap') -const types = require('../lib/types.js') +import t from 'tap' +import * as types from '../dist/esm/types.js' t.equal(types.name.get('0'), 'File') t.equal(types.code.get('File'), '0') +t.equal(types.isCode('0'), true) +t.equal(types.isCode('Z'), false) +t.equal(types.isName('TapeVolumeHeader'), true) +t.equal(types.isName('Unsupported'), false) diff --git a/test/unpack.js b/test/unpack.js index 2f1d3026..ed575deb 100644 --- a/test/unpack.js +++ b/test/unpack.js @@ -1,47 +1,39 @@ -'use strict' - -process.umask(0o022) - -const Unpack = require('../lib/unpack.js') -const UnpackSync = Unpack.Sync -const t = require('tap') -const { Minipass } = require('minipass') - -const makeTar = require('./make-tar.js') -const Header = require('../lib/header.js') -const z = require('minizlib') -const fs = require('fs') -const path = require('path') +import { Unpack, UnpackSync } from '../dist/esm/unpack.js' + +import fs, { readdirSync } from 'fs' +import { Minipass } from 'minipass' +import * as z from 'minizlib' +import path from 'path' +import { rimraf } from 'rimraf' +import t from 'tap' +import { fileURLToPath } from 'url' +import { Header } from '../dist/esm/header.js' +import { makeTar } from './fixtures/make-tar.js' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) const fixtures = path.resolve(__dirname, 'fixtures') const tars = path.resolve(fixtures, 'tars') const parses = path.resolve(fixtures, 'parse') -const unpackdir = path.resolve(fixtures, 'unpack') -const { promisify } = require('util') -const rimraf = promisify(require('rimraf')) -const mkdirp = require('mkdirp') -const mutateFS = require('mutate-fs') -const eos = require('end-of-stream') -const normPath = require('../lib/normalize-windows-path.js') -const ReadEntry = require('../lib/read-entry.js') + +import eos from 'end-of-stream' +import { mkdirp } from 'mkdirp' +import mutateFS from 'mutate-fs' +import { normalizeWindowsPath as normPath } from '../dist/esm/normalize-windows-path.js' + +import { ReadEntry } from '../dist/esm/read-entry.js' // On Windows in particular, the "really deep folder path" file // often tends to cause problems, which don't indicate a failure // of this library, it's just what happens on Windows with super // long file paths. const isWindows = process.platform === 'win32' -const isLongFile = f => f.match(/r.e.a.l.l.y.-.d.e.e.p.-.f.o.l.d.e.r.-.p.a.t.h/) - -t.teardown(_ => rimraf(unpackdir)) +const isLongFile = f => + f.match(/r.e.a.l.l.y.-.d.e.e.p.-.f.o.l.d.e.r.-.p.a.t.h/) -t.before(async () => { - await rimraf(unpackdir) - await mkdirp(unpackdir) -}) +t.capture(process, 'umask', () => 0o22) t.test('basic file unpack tests', t => { - const basedir = path.resolve(unpackdir, 'basic') - t.teardown(_ => rimraf(basedir)) - const cases = { 'emptypax.tar': { '🌟.txt': '🌟✧✩⭐︎✪✫✬✭✮⚝✯✰✵✶✷✸✹❂⭑⭒★☆✡☪✴︎✦✡️🔯✴️🌠\n', @@ -56,7 +48,8 @@ t.test('basic file unpack tests', t => { 'utf8.tar': { '🌟.txt': '🌟✧✩⭐︎✪✫✬✭✮⚝✯✰✵✶✷✸✹❂⭑⭒★☆✡☪✴︎✦✡️🔯✴️🌠\n', 'Ω.txt': 'Ω', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': 'Ω', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': + 'Ω', }, 'file.tar': { 'one-byte.txt': 'a', @@ -65,17 +58,26 @@ t.test('basic file unpack tests', t => { 'one-byte.txt': 'a', }, 'long-pax.tar': { - '120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', }, 'long-paths.tar': { - '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - '120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - '170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt': 'short\n', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': 'Ω', + '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt': + 'short\n', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': + 'Ω', }, } @@ -86,13 +88,11 @@ t.test('basic file unpack tests', t => { tarfiles.forEach(tarfile => { t.test(tarfile, t => { const tf = path.resolve(tars, tarfile) - const dir = path.resolve(basedir, tarfile) - const linkdir = path.resolve(basedir, tarfile + '.link') + const dir = t.testdir({}) + const linkdir = dir + '.link' t.beforeEach(async () => { - await rimraf(dir) await rimraf(linkdir) - await mkdirp(dir) - fs.symlinkSync(dir, linkdir, 'junction') + fs.symlinkSync(dir, linkdir) }) const check = t => { @@ -114,12 +114,12 @@ t.test('basic file unpack tests', t => { t.test('strict', t => { const unpack = new Unpack({ cwd: linkdir, strict: true }) fs.createReadStream(tf).pipe(unpack) - eos(unpack, _ => check(t)) + eos(unpack, () => check(t)) }) t.test('loose', t => { const unpack = new Unpack({ cwd: linkdir }) fs.createReadStream(tf).pipe(unpack) - eos(unpack, _ => check(t)) + eos(unpack, () => check(t)) }) }) @@ -150,7 +150,7 @@ t.test('cwd default to process cwd', t => { }) t.test('links!', t => { - const dir = path.resolve(unpackdir, 'links') + const dir = t.testdir({}) const data = fs.readFileSync(tars + '/links.tar') const stripData = fs.readFileSync(tars + '/links-strip.tar') @@ -199,9 +199,11 @@ t.test('links!', t => { t.test('async', t => { const unpack = new Unpack({ cwd: dir }) let finished = false - unpack.on('finish', _ => finished = true) - unpack.on('close', _ => t.ok(finished, 'emitted finish before close')) - unpack.on('close', _ => check(t)) + unpack.on('finish', () => (finished = true)) + unpack.on('close', () => + t.ok(finished, 'emitted finish before close'), + ) + unpack.on('close', () => check(t)) unpack.end(data) }) @@ -220,9 +222,11 @@ t.test('links!', t => { t.test('async strip', t => { const unpack = new Unpack({ cwd: dir, strip: 1 }) let finished = false - unpack.on('finish', _ => finished = true) - unpack.on('close', _ => t.ok(finished, 'emitted finish before close')) - unpack.on('close', _ => checkForStrip(t)) + unpack.on('finish', () => (finished = true)) + unpack.on('close', () => + t.ok(finished, 'emitted finish before close'), + ) + unpack.on('close', () => checkForStrip(t)) unpack.end(stripData) }) @@ -235,20 +239,20 @@ t.test('links!', t => { t.test('async strip 3', t => { const unpack = new Unpack({ cwd: dir, strip: 3 }) let finished = false - unpack.on('finish', _ => finished = true) - unpack.on('close', _ => t.ok(finished, 'emitted finish before close')) - unpack.on('close', _ => checkForStrip3(t)) + unpack.on('finish', () => (finished = true)) + unpack.on('close', () => + t.ok(finished, 'emitted finish before close'), + ) + unpack.on('close', () => checkForStrip3(t)) unpack.end(stripData) }) }) t.test('links without cleanup (exercise clobbering code)', t => { - const dir = path.resolve(unpackdir, 'links') + const dir = t.testdir({}) const data = fs.readFileSync(tars + '/links.tar') t.plan(6) - mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) t.beforeEach(() => { // clobber this junk @@ -277,10 +281,11 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.test('async', t => { const unpack = new Unpack({ cwd: dir }) let prefinished = false - unpack.on('prefinish', _ => prefinished = true) - unpack.on('finish', _ => - t.ok(prefinished, 'emitted prefinish before finish')) - unpack.on('close', _ => check(t)) + unpack.on('prefinish', () => (prefinished = true)) + unpack.on('finish', () => + t.ok(prefinished, 'emitted prefinish before finish'), + ) + unpack.on('close', () => check(t)) unpack.end(data) }) @@ -292,7 +297,7 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.test('async again', t => { const unpack = new Unpack({ cwd: dir }) - eos(unpack, _ => check(t)) + eos(unpack, () => check(t)) unpack.end(data) }) @@ -304,7 +309,7 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.test('async unlink', t => { const unpack = new Unpack({ cwd: dir, unlink: true }) - unpack.on('close', _ => check(t)) + unpack.on('close', () => check(t)) unpack.end(data) }) @@ -316,14 +321,16 @@ t.test('links without cleanup (exercise clobbering code)', t => { }) t.test('nested dir dupe', t => { - const dir = path.resolve(unpackdir, 'nested-dir') + const dir = t.testdir({}) mkdirp.sync(dir + '/d/e/e/p') - t.teardown(_ => rimraf(dir)) const expect = { 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt': 'short\n', - 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': 'Ω', } @@ -343,278 +350,365 @@ t.test('nested dir dupe', t => { // while we're at it, why not use gzip too? const zip = new z.Gzip() zip.pipe(unpack) - unpack.on('close', _ => check(t)) + unpack.on('close', () => check(t)) zip.end(data) }) -t.test('symlink in dir path', { - skip: isWindows && 'symlinks not fully supported', -}, t => { - const dir = path.resolve(unpackdir, 'symlink-junk') - - t.teardown(_ => rimraf(dir)) - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - - const data = makeTar([ - { - path: 'd/i', - type: 'Directory', - }, - { - path: 'd/i/r/dir', - type: 'Directory', - mode: 0o751, - mtime: new Date('2011-03-27T22:16:31.000Z'), - }, - { - path: 'd/i/r/file', - type: 'File', - size: 1, - atime: new Date('1979-07-01T19:10:00.000Z'), - ctime: new Date('2011-03-27T22:16:31.000Z'), - }, - 'a', - { - path: 'd/i/r/link', - type: 'Link', - linkpath: 'd/i/r/file', - atime: new Date('1979-07-01T19:10:00.000Z'), - ctime: new Date('2011-03-27T22:16:31.000Z'), - mtime: new Date('2011-03-27T22:16:31.000Z'), - }, - { - path: 'd/i/r/symlink', - type: 'SymbolicLink', - linkpath: './dir', - atime: new Date('1979-07-01T19:10:00.000Z'), - ctime: new Date('2011-03-27T22:16:31.000Z'), - mtime: new Date('2011-03-27T22:16:31.000Z'), - }, - { - path: 'd/i/r/symlink/x', - type: 'File', - size: 0, - atime: new Date('1979-07-01T19:10:00.000Z'), - ctime: new Date('2011-03-27T22:16:31.000Z'), - mtime: new Date('2011-03-27T22:16:31.000Z'), - }, - '', - '', - ]) +t.test( + 'symlink in dir path', + { + skip: isWindows && 'symlinks not fully supported', + }, + t => { + const data = makeTar([ + { + path: 'd/i', + type: 'Directory', + }, + { + path: 'd/i/r/dir', + type: 'Directory', + mode: 0o751, + mtime: new Date('2011-03-27T22:16:31.000Z'), + }, + { + path: 'd/i/r/file', + type: 'File', + size: 1, + atime: new Date('1979-07-01T19:10:00.000Z'), + ctime: new Date('2011-03-27T22:16:31.000Z'), + }, + 'a', + { + path: 'd/i/r/link', + type: 'Link', + linkpath: 'd/i/r/file', + atime: new Date('1979-07-01T19:10:00.000Z'), + ctime: new Date('2011-03-27T22:16:31.000Z'), + mtime: new Date('2011-03-27T22:16:31.000Z'), + }, + { + path: 'd/i/r/symlink', + type: 'SymbolicLink', + linkpath: './dir', + atime: new Date('1979-07-01T19:10:00.000Z'), + ctime: new Date('2011-03-27T22:16:31.000Z'), + mtime: new Date('2011-03-27T22:16:31.000Z'), + }, + { + path: 'd/i/r/symlink/x', + type: 'File', + size: 0, + atime: new Date('1979-07-01T19:10:00.000Z'), + ctime: new Date('2011-03-27T22:16:31.000Z'), + mtime: new Date('2011-03-27T22:16:31.000Z'), + }, + '', + '', + ]) - t.test('no clobbering', t => { - const warnings = [] - const u = new Unpack({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), + t.test('no clobbering', t => { + const warnings = [] + const cwd = t.testdir({}) + const u = new Unpack({ + cwd, + onwarn: (c, w, d) => warnings.push([c, w, d]), + }) + u.on('close', () => { + t.equal( + fs.lstatSync(cwd + '/d/i').mode & 0o7777, + isWindows ? 0o666 : 0o755, + ) + t.equal( + fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, + isWindows ? 0o666 : 0o751, + ) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') + if (!isWindows) { + t.ok( + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), + 'got symlink', + ) + t.throws(() => fs.statSync(cwd + '/d/i/r/symlink/x')) + } + t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') + if (!isWindows) { + t.equal( + warnings[0][1], + 'TAR_SYMLINK_ERROR: Cannot extract through symbolic link', + ) + t.match(warnings[0][2], { + name: 'SymlinkError', + code: 'TAR_SYMLINK_ERROR', + tarCode: 'TAR_ENTRY_ERROR', + path: cwd + '/d/i/r/symlink/', + symlink: cwd + '/d/i/r/symlink', + }) + } + t.equal(warnings.length, 1) + t.end() + }) + u.end(data) }) - u.on('close', _ => { - t.equal(fs.lstatSync(dir + '/d/i').mode & 0o7777, isWindows ? 0o666 : 0o755) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - if (!isWindows) { - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) - } - t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') + + t.test('no clobbering, sync', t => { + const warnings = [] + const cwd = t.testdir({}) + const u = new UnpackSync({ + cwd, + onwarn: (c, w, d) => warnings.push([c, w, d]), + }) + u.end(data) + t.equal( + fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, + isWindows ? 0o666 : 0o751, + ) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') if (!isWindows) { - t.equal(warnings[0][1], 'Cannot extract through symbolic link') - t.match(warnings[0][2], { - name: 'SylinkError', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink', - }) + t.ok( + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), + 'got symlink', + ) + t.throws(() => fs.statSync(cwd + '/d/i/r/symlink/x')) } t.equal(warnings.length, 1) + t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') + t.equal( + warnings[0][1], + 'TAR_SYMLINK_ERROR: Cannot extract through symbolic link', + ) + t.match(warnings[0][2], { + name: 'SymlinkError', + path: cwd + '/d/i/r/symlink/', + symlink: cwd + '/d/i/r/symlink', + }) t.end() }) - u.end(data) - }) - t.test('no clobbering, sync', t => { - const warnings = [] - const u = new UnpackSync({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), - }) - u.end(data) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - if (!isWindows) { - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) - } - t.equal(warnings.length, 1) - t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') - t.equal(warnings[0][1], 'Cannot extract through symbolic link') - t.match(warnings[0][2], { - name: 'SylinkError', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink', + t.test('extract through symlink', t => { + const warnings = [] + const cwd = t.testdir({}) + const u = new Unpack({ + cwd, + onwarn: (c, w, d) => warnings.push([c, w, d]), + preservePaths: true, + }) + u.on('close', () => { + t.same(warnings, []) + t.equal(fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') + t.ok( + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), + 'got symlink', + ) + t.ok( + fs.lstatSync(cwd + '/d/i/r/dir/x').isFile(), + 'x thru link', + ) + t.ok( + fs.lstatSync(cwd + '/d/i/r/symlink/x').isFile(), + 'x thru link', + ) + t.end() + }) + u.end(data) }) - t.end() - }) - t.test('extract through symlink', t => { - const warnings = [] - const u = new Unpack({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), - preservePaths: true, - }) - u.on('close', _ => { + t.test('extract through symlink sync', t => { + const warnings = [] + const cwd = t.testdir({}) + const u = new UnpackSync({ + cwd, + onwarn: (c, w, d) => warnings.push([c, w, d]), + preservePaths: true, + }) + u.end(data) t.same(warnings, []) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.ok(fs.lstatSync(dir + '/d/i/r/dir/x').isFile(), 'x thru link') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), 'x thru link') + t.equal(fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') + t.ok( + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), + 'got symlink', + ) + t.ok(fs.lstatSync(cwd + '/d/i/r/dir/x').isFile(), 'x thru link') + t.ok( + fs.lstatSync(cwd + '/d/i/r/symlink/x').isFile(), + 'x thru link', + ) t.end() }) - u.end(data) - }) - t.test('extract through symlink sync', t => { - const warnings = [] - const u = new UnpackSync({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), - preservePaths: true, + t.test('clobber through symlink', t => { + const warnings = [] + const cwd = t.testdir({}) + const u = new Unpack({ + cwd, + onwarn: (c, w, d) => warnings.push([c, w, d]), + unlink: true, + }) + u.on('close', () => { + t.same(warnings, []) + t.equal(fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') + t.notOk( + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), + 'no link', + ) + t.ok( + fs.lstatSync(cwd + '/d/i/r/symlink').isDirectory(), + 'sym is dir', + ) + t.ok( + fs.lstatSync(cwd + '/d/i/r/symlink/x').isFile(), + 'x thru link', + ) + t.end() + }) + u.end(data) }) - u.end(data) - t.same(warnings, []) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.ok(fs.lstatSync(dir + '/d/i/r/dir/x').isFile(), 'x thru link') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), 'x thru link') - t.end() - }) - t.test('clobber through symlink', t => { - const warnings = [] - const u = new Unpack({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), - unlink: true, - }) - u.on('close', _ => { - t.same(warnings, []) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.notOk(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'no link') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isDirectory(), 'sym is dir') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), 'x thru link') - t.end() + t.test('clobber through symlink with busted unlink', t => { + const poop = new Error('poop') + // for some reason, resetting fs.unlink in the teardown was breaking + const reset = mutateFS.fail('unlink', poop) + const cwd = t.testdir({}) + const warnings = [] + const u = new Unpack({ + cwd, + onwarn: (c, w, d) => warnings.push([c, w, d]), + unlink: true, + }) + u.on('close', () => { + t.same(warnings, [['TAR_ENTRY_ERROR', 'poop', poop]]) + reset() + t.end() + }) + u.end(data) }) - u.end(data) - }) - t.test('clobber through symlink with busted unlink', t => { - const poop = new Error('poop') - // for some reason, resetting fs.unlink in the teardown was breaking - const reset = mutateFS.fail('unlink', poop) - const warnings = [] - const u = new Unpack({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), - unlink: true, - }) - u.on('close', _ => { - t.same(warnings, [['TAR_ENTRY_ERROR', 'poop', poop]]) - reset() + t.test('clobber through symlink sync', t => { + const warnings = [] + const cwd = t.testdir({}) + const u = new UnpackSync({ + cwd, + onwarn: (c, w, d) => warnings.push([c, w, d]), + unlink: true, + }) + u.end(data) + t.equal(fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') + t.notOk( + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), + 'no link', + ) + t.ok( + fs.lstatSync(cwd + '/d/i/r/symlink').isDirectory(), + 'sym is dir', + ) + t.ok( + fs.lstatSync(cwd + '/d/i/r/symlink/x').isFile(), + 'x thru link', + ) t.end() }) - u.end(data) - }) - t.test('clobber through symlink sync', t => { - const warnings = [] - const u = new UnpackSync({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), - unlink: true, + t.test('clobber dirs', t => { + const cwd = t.testdir({ + d: { + i: { + r: { + dir: {}, + file: {}, + link: {}, + symlink: {}, + }, + }, + }, + }) + const warnings = [] + const u = new Unpack({ + cwd, + onwarn: (c, w, d) => { + warnings.push([c, w, d]) + }, + chmod: true, + }) + u.on('close', () => { + t.equal(fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') + t.ok( + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), + 'got symlink', + ) + t.throws(() => fs.statSync(cwd + '/d/i/r/symlink/x')) + t.equal(warnings.length, 1) + t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') + t.equal( + warnings[0][1], + 'TAR_SYMLINK_ERROR: Cannot extract through symbolic link', + ) + t.match(warnings[0][2], { + name: 'SymlinkError', + path: cwd + '/d/i/r/symlink/', + symlink: cwd + '/d/i/r/symlink', + }) + t.end() + }) + u.end(data) }) - u.end(data) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.notOk(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'no link') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isDirectory(), 'sym is dir') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), 'x thru link') - t.end() - }) - t.test('clobber dirs', t => { - mkdirp.sync(dir + '/d/i/r/dir') - mkdirp.sync(dir + '/d/i/r/file') - mkdirp.sync(dir + '/d/i/r/link') - mkdirp.sync(dir + '/d/i/r/symlink') - const warnings = [] - const u = new Unpack({ - cwd: dir, - onwarn: (c, w, d) => { - warnings.push([c, w, d]) - }, - }) - u.on('close', _ => { - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + t.test('clobber dirs sync', t => { + const cwd = t.testdir({ + d: { + i: { + r: { + dir: {}, + file: {}, + link: {}, + symlink: {}, + }, + }, + }, + }) + const warnings = [] + const u = new UnpackSync({ + cwd, + onwarn: (c, w, d) => { + warnings.push([c, w, d]) + }, + chmod: true, + processUmask: 0o22, + }) + u.end(data) + t.equal(fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') + t.ok( + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), + 'got symlink', + ) + t.throws(() => fs.statSync(cwd + '/d/i/r/symlink/x')) t.equal(warnings.length, 1) t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') - t.equal(warnings[0][1], 'Cannot extract through symbolic link') + t.equal( + warnings[0][1], + 'TAR_SYMLINK_ERROR: Cannot extract through symbolic link', + ) t.match(warnings[0][2], { - name: 'SylinkError', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink', + name: 'SymlinkError', + path: cwd + '/d/i/r/symlink/', + symlink: cwd + '/d/i/r/symlink', }) t.end() }) - u.end(data) - }) - t.test('clobber dirs sync', t => { - mkdirp.sync(dir + '/d/i/r/dir') - mkdirp.sync(dir + '/d/i/r/file') - mkdirp.sync(dir + '/d/i/r/link') - mkdirp.sync(dir + '/d/i/r/symlink') - const warnings = [] - const u = new UnpackSync({ - cwd: dir, - onwarn: (c, w, d) => { - warnings.push([c, w, d]) - }, - }) - u.end(data) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) - t.equal(warnings.length, 1) - t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') - t.equal(warnings[0][1], 'Cannot extract through symbolic link') - t.match(warnings[0][2], { - name: 'SylinkError', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink', - }) t.end() - }) - - t.end() -}) + }, +) t.test('unsupported entries', t => { - const dir = path.resolve(unpackdir, 'unsupported-entries') - mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) - const unknown = new Header({ path: 'qux', type: 'File', size: 4 }) - unknown.type = 'Z' + const unknown = new Header({ path: 'qux', size: 4 }) unknown.encode() + unknown.block?.write('Z', 156) const data = makeTar([ { path: 'dev/random', @@ -638,19 +732,38 @@ t.test('unsupported entries', t => { ]) t.test('basic, warns', t => { + const cwd = t.testdir({}) const warnings = [] - const u = new Unpack({ cwd: dir, onwarn: (c, w, d) => warnings.push([c, w, d]) }) + const u = new Unpack({ + cwd, + onwarn: (c, w, d) => warnings.push([c, w, d]), + }) const c = 'TAR_ENTRY_UNSUPPORTED' const expect = [ - [c, 'unsupported entry type: CharacterDevice', { - entry: { path: 'dev/random' } }], - [c, 'unsupported entry type: BlockDevice', { - entry: { path: 'dev/hd0' } }], - [c, 'unsupported entry type: FIFO', { - entry: { path: 'dev/fifo0' } }], + [ + c, + 'unsupported entry type: CharacterDevice', + { + entry: { path: 'dev/random' }, + }, + ], + [ + c, + 'unsupported entry type: BlockDevice', + { + entry: { path: 'dev/hd0' }, + }, + ], + [ + c, + 'unsupported entry type: FIFO', + { + entry: { path: 'dev/fifo0' }, + }, + ], ] - u.on('close', _ => { - t.equal(fs.readdirSync(dir).length, 0) + u.on('close', () => { + t.equal(fs.readdirSync(cwd).length, 0) t.match(warnings, expect) t.end() }) @@ -658,16 +771,17 @@ t.test('unsupported entries', t => { }) t.test('strict, throws', t => { + const cwd = t.testdir({}) const warnings = [] const errors = [] const u = new Unpack({ - cwd: dir, + cwd, strict: true, onwarn: (c, w, d) => warnings.push([c, w, d]), }) u.on('error', e => errors.push(e)) - u.on('close', _ => { - t.equal(fs.readdirSync(dir).length, 0) + u.on('close', () => { + t.equal(fs.readdirSync(cwd).length, 0) t.same(warnings, []) t.match(errors, [ { @@ -692,14 +806,6 @@ t.test('unsupported entries', t => { }) t.test('file in dir path', t => { - const dir = path.resolve(unpackdir, 'file-junk') - - t.teardown(_ => rimraf(dir)) - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - const data = makeTar([ { path: 'd/i/r/file', @@ -725,38 +831,46 @@ t.test('file in dir path', t => { t.test('fail because of file', t => { const check = t => { - t.equal(fs.readFileSync(dir + '/d/i/r/file', 'utf8'), 'a') - t.throws(_ => fs.statSync(dir + '/d/i/r/file/a/b/c')) + const cwd = t.testdirName + t.equal(fs.readFileSync(cwd + '/d/i/r/file', 'utf8'), 'a') + t.throws(() => fs.statSync(cwd + '/d/i/r/file/a/b/c')) t.end() } t.plan(2) t.test('async', t => { - new Unpack({ cwd: dir }).on('close', _ => check(t)).end(data) + const cwd = t.testdir({}) + new Unpack({ cwd }).on('close', () => check(t)).end(data) }) t.test('sync', t => { - new UnpackSync({ cwd: dir }).end(data) + const cwd = t.testdir({}) + new UnpackSync({ cwd }).end(data) check(t) }) }) t.test('clobber on through', t => { const check = t => { - t.ok(fs.statSync(dir + '/d/i/r/file').isDirectory()) - t.equal(fs.readFileSync(dir + '/d/i/r/file/a/b/c', 'utf8'), 'b') + const cwd = t.testdirName + t.ok(fs.statSync(cwd + '/d/i/r/file').isDirectory()) + t.equal(fs.readFileSync(cwd + '/d/i/r/file/a/b/c', 'utf8'), 'b') t.end() } t.plan(2) t.test('async', t => { - new Unpack({ cwd: dir, unlink: true }).on('close', _ => check(t)).end(data) + const cwd = t.testdir({}) + new Unpack({ cwd, unlink: true }) + .on('close', () => check(t)) + .end(data) }) t.test('sync', t => { - new UnpackSync({ cwd: dir, unlink: true }).end(data) + const cwd = t.testdir({}) + new UnpackSync({ cwd, unlink: true }).end(data) check(t) }) }) @@ -765,9 +879,7 @@ t.test('file in dir path', t => { }) t.test('set umask option', t => { - const dir = path.resolve(unpackdir, 'umask') - mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + const cwd = t.testdir({}) const data = makeTar([ { @@ -781,17 +893,25 @@ t.test('set umask option', t => { new Unpack({ umask: 0o027, - cwd: dir, - }).on('close', _ => { - t.equal(fs.statSync(dir + '/d/i/r').mode & 0o7777, isWindows ? 0o666 : 0o750) - t.equal(fs.statSync(dir + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751) - t.end() - }).end(data) + cwd, + }) + .on('close', () => { + t.equal( + fs.statSync(cwd + '/d/i/r').mode & 0o7777, + isWindows ? 0o666 : 0o750, + ) + t.equal( + fs.statSync(cwd + '/d/i/r/dir').mode & 0o7777, + isWindows ? 0o666 : 0o751, + ) + t.end() + }) + .end(data) }) t.test('absolute paths', t => { - const dir = path.join(unpackdir, 'absolute-paths') - t.teardown(_ => rimraf(dir)) + const dir = t.testdir({}) + t.teardown(() => rimraf(dir)) t.beforeEach(async () => { await rimraf(dir) await mkdirp(dir) @@ -823,11 +943,16 @@ t.test('absolute paths', t => { t.test('warn and correct', t => { const check = t => { const r = normPath(root) - t.match(warnings, [[ - `stripping ${r}${r}${r}${r} from absolute path`, - { path: normPath(absolute), code: 'TAR_ENTRY_INFO' }, - ]]) - t.ok(fs.lstatSync(path.resolve(dir, relative)).isFile(), 'is file') + t.match(warnings, [ + [ + `stripping ${r}${r}${r}${r} from absolute path`, + { path: normPath(absolute), code: 'TAR_ENTRY_INFO' }, + ], + ]) + t.ok( + fs.lstatSync(path.resolve(dir, relative)).isFile(), + 'is file', + ) t.end() } @@ -837,15 +962,17 @@ t.test('absolute paths', t => { warnings.length = 0 new Unpack({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', () => check(t)) + .end(data) }) t.test('sync', t => { warnings.length = 0 new UnpackSync({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t) }) @@ -883,8 +1010,10 @@ t.test('absolute paths', t => { new Unpack({ preservePaths: true, cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', () => check(t)) + .end(data) }) t.test('sync', t => { @@ -892,7 +1021,7 @@ t.test('absolute paths', t => { new UnpackSync({ preservePaths: true, cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t) }) @@ -904,8 +1033,7 @@ t.test('absolute paths', t => { }) t.test('.. paths', t => { - const dir = path.join(unpackdir, 'dotted-paths') - t.teardown(_ => rimraf(dir)) + const dir = t.testdir({}) t.beforeEach(async () => { await rimraf(dir) await mkdirp(dir) @@ -931,11 +1059,13 @@ t.test('.. paths', t => { t.test('warn and skip', t => { const check = t => { - t.match(warnings, [[ - 'path contains \'..\'', - { path: dotted, code: 'TAR_ENTRY_ERROR' }, - ]]) - t.throws(_ => fs.lstatSync(resolved)) + t.match(warnings, [ + [ + "path contains '..'", + { path: dotted, code: 'TAR_ENTRY_ERROR' }, + ], + ]) + t.throws(() => fs.lstatSync(resolved)) t.end() } @@ -946,8 +1076,10 @@ t.test('.. paths', t => { new Unpack({ fmode: fmode, cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', () => check(t)) + .end(data) }) t.test('sync', t => { @@ -955,7 +1087,7 @@ t.test('.. paths', t => { new UnpackSync({ fmode: fmode, cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t) }) @@ -967,7 +1099,10 @@ t.test('.. paths', t => { const check = t => { t.same(warnings, []) t.ok(fs.lstatSync(resolved).isFile(), 'is file') - t.equal(fs.lstatSync(resolved).mode & 0o777, isWindows ? 0o666 : fmode) + t.equal( + fs.lstatSync(resolved).mode & 0o777, + isWindows ? 0o666 : fmode, + ) t.end() } @@ -979,8 +1114,10 @@ t.test('.. paths', t => { fmode: fmode, preservePaths: true, cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', () => check(t)) + .end(data) }) t.test('sync', t => { @@ -989,7 +1126,7 @@ t.test('.. paths', t => { fmode: fmode, preservePaths: true, cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t) }) @@ -1003,37 +1140,37 @@ t.test('.. paths', t => { t.test('fail all stats', t => { const poop = new Error('poop') poop.code = 'EPOOP' - const dir = normPath(path.join(unpackdir, 'stat-fail')) - const { - stat, - fstat, - lstat, - statSync, - fstatSync, - lstatSync, - } = fs - const unmutate = () => Object.assign(fs, { - stat, - fstat, - lstat, - statSync, - fstatSync, - lstatSync, - }) + const dir = normPath(t.testdir({})) + const { stat, fstat, lstat, statSync, fstatSync, lstatSync } = fs + const unmutate = () => + Object.assign(fs, { + stat, + fstat, + lstat, + statSync, + fstatSync, + lstatSync, + }) const mutate = () => { - fs.stat = fs.lstat = fs.fstat = (...args) => { - // don't fail statting the cwd, or we get different errors - if (normPath(args[0]) === dir) { - return lstat(dir, args.pop()) - } - process.nextTick(() => args.pop()(poop)) - } - fs.statSync = fs.lstatSync = fs.fstatSync = (...args) => { - if (normPath(args[0]) === dir) { - return lstatSync(dir) - } - throw poop - } + fs.stat = + fs.lstat = + fs.fstat = + (...args) => { + // don't fail statting the cwd, or we get different errors + if (normPath(args[0]) === dir) { + return lstat(dir, args.pop()) + } + process.nextTick(() => args.pop()(poop)) + } + fs.statSync = + fs.lstatSync = + fs.fstatSync = + (...args) => { + if (normPath(args[0]) === dir) { + return lstatSync(dir) + } + throw poop + } } const warnings = [] @@ -1105,8 +1242,10 @@ t.test('fail all stats', t => { ] new Unpack({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t, expect)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', () => check(t, expect)) + .end(data) }) t.test('sync', t => { @@ -1130,7 +1269,7 @@ t.test('fail all stats', t => { ] new UnpackSync({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t, expect) }) @@ -1141,12 +1280,8 @@ t.test('fail all stats', t => { t.test('fail symlink', t => { const poop = new Error('poop') poop.code = 'EPOOP' - const unmutate = mutateFS.fail('symlink', poop) - const dir = path.join(unpackdir, 'symlink-fail') - t.teardown(async _ => { - unmutate() - await rimraf(dir) - }) + const dir = t.testdir({}) + t.teardown(mutateFS.fail('symlink', poop)) const warnings = [] t.beforeEach(async () => { @@ -1186,15 +1321,17 @@ t.test('fail symlink', t => { const expect = [['poop', poop]] new Unpack({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t, expect)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', () => check(t, expect)) + .end(data) }) t.test('sync', t => { const expect = [['poop', poop]] new UnpackSync({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t, expect) }) @@ -1205,12 +1342,8 @@ t.test('fail symlink', t => { t.test('fail chmod', t => { const poop = new Error('poop') poop.code = 'EPOOP' - const unmutate = mutateFS.fail('chmod', poop) - const dir = path.join(unpackdir, 'chmod-fail') - t.teardown(async _ => { - unmutate() - await rimraf(dir) - }) + const dir = t.testdir() + t.teardown(mutateFS.fail('chmod', poop)) const warnings = [] t.beforeEach(async () => { @@ -1249,15 +1382,21 @@ t.test('fail chmod', t => { const expect = [['poop', poop]] new Unpack({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t, expect)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + chmod: true, + processUmask: 0o22, + }) + .on('close', () => check(t, expect)) + .end(data) }) t.test('sync', t => { const expect = [['poop', poop]] new UnpackSync({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), + chmod: true, + processUmask: 0o22, }).end(data) check(t, expect) }) @@ -1269,8 +1408,7 @@ t.test('fail mkdir', t => { const poop = new Error('poop') poop.code = 'EPOOP' let unmutate - const dir = path.join(unpackdir, 'mkdir-fail') - t.teardown(_ => rimraf(dir)) + const dir = t.testdir({}) const warnings = [] t.beforeEach(async () => { @@ -1294,14 +1432,16 @@ t.test('fail mkdir', t => { '', ]) - const expect = [[ - 'ENOENT: no such file or directory', - { - code: 'ENOENT', - syscall: 'lstat', - path: normPath(path.resolve(dir, 'dir')), - }, - ]] + const expect = [ + [ + 'ENOENT: no such file or directory', + { + code: 'ENOENT', + syscall: 'lstat', + path: normPath(path.resolve(dir, 'dir')), + }, + ], + ] const check = t => { t.match(warnings, expect) @@ -1312,7 +1452,7 @@ t.test('fail mkdir', t => { t.test('sync', t => { new UnpackSync({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t) }) @@ -1320,8 +1460,10 @@ t.test('fail mkdir', t => { t.test('async', t => { new Unpack({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', () => check(t)) + .end(data) }) t.end() @@ -1330,12 +1472,8 @@ t.test('fail mkdir', t => { t.test('fail write', t => { const poop = new Error('poop') poop.code = 'EPOOP' - const unmutate = mutateFS.fail('write', poop) - const dir = path.join(unpackdir, 'write-fail') - t.teardown(async _ => { - unmutate() - await rimraf(dir) - }) + const dir = t.testdir({}) + t.teardown(mutateFS.fail('write', poop)) const warnings = [] t.beforeEach(async () => { @@ -1368,14 +1506,16 @@ t.test('fail write', t => { t.test('async', t => { new Unpack({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', () => check(t)) + .end(data) }) t.test('sync', t => { new UnpackSync({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t) }) @@ -1384,14 +1524,11 @@ t.test('fail write', t => { }) t.test('skip existing', t => { - const dir = path.join(unpackdir, 'skip-newer') - t.teardown(_ => rimraf(dir)) - const date = new Date('2011-03-27T22:16:31.000Z') - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - fs.writeFileSync(dir + '/x', 'y') + t.beforeEach(async t => { + const dir = t.testdir({ + x: 'y', + }) fs.utimesSync(dir + '/x', date, date) }) @@ -1409,6 +1546,7 @@ t.test('skip existing', t => { ]) const check = t => { + const dir = t.testdirName const st = fs.lstatSync(dir + '/x') t.equal(st.atime.toISOString(), date.toISOString()) t.equal(st.mtime.toISOString(), date.toISOString()) @@ -1418,13 +1556,17 @@ t.test('skip existing', t => { } t.test('async', t => { + const dir = t.testdirName new Unpack({ cwd: dir, keep: true, - }).on('close', _ => check(t)).end(data) + }) + .on('close', () => check(t)) + .end(data) }) t.test('sync', t => { + const dir = t.testdirName new UnpackSync({ cwd: dir, keep: true, @@ -1436,14 +1578,9 @@ t.test('skip existing', t => { }) t.test('skip newer', t => { - const dir = path.join(unpackdir, 'skip-newer') - t.teardown(_ => rimraf(dir)) - const date = new Date('2013-12-19T17:00:00.000Z') - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - fs.writeFileSync(dir + '/x', 'y') + t.beforeEach(async t => { + const dir = t.testdir({ x: 'y' }) fs.utimesSync(dir + '/x', date, date) }) @@ -1461,6 +1598,7 @@ t.test('skip newer', t => { ]) const check = t => { + const dir = t.testdirName const st = fs.lstatSync(dir + '/x') t.equal(st.atime.toISOString(), date.toISOString()) t.equal(st.mtime.toISOString(), date.toISOString()) @@ -1471,14 +1609,16 @@ t.test('skip newer', t => { t.test('async', t => { new Unpack({ - cwd: dir, + cwd: t.testdirName, newer: true, - }).on('close', _ => check(t)).end(data) + }) + .on('close', () => check(t)) + .end(data) }) t.test('sync', t => { new UnpackSync({ - cwd: dir, + cwd: t.testdirName, newer: true, }).end(data) check(t) @@ -1488,14 +1628,6 @@ t.test('skip newer', t => { }) t.test('no mtime', t => { - const dir = path.join(unpackdir, 'skip-newer') - t.teardown(_ => rimraf(dir)) - - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - const date = new Date('2011-03-27T22:16:31.000Z') const data = makeTar([ { @@ -1521,6 +1653,7 @@ t.test('no mtime', t => { ]) const check = t => { + const dir = t.testdirName // this may fail if it's run on March 27, 2011 const stx = fs.lstatSync(dir + '/x') t.not(stx.atime.toISOString(), date.toISOString()) @@ -1534,13 +1667,17 @@ t.test('no mtime', t => { } t.test('async', t => { + const dir = t.testdir({}) new Unpack({ cwd: dir, noMtime: true, - }).on('close', _ => check(t)).end(data) + }) + .on('close', () => check(t)) + .end(data) }) t.test('sync', t => { + const dir = t.testdir({}) new UnpackSync({ cwd: dir, noMtime: true, @@ -1552,9 +1689,7 @@ t.test('no mtime', t => { }) t.test('unpack big enough to pause/drain', t => { - const dir = path.resolve(unpackdir, 'drain-clog') - mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + const dir = t.testdir({}) const stream = fs.createReadStream(fixtures + '/parses.tar') const u = new Unpack({ cwd: dir, @@ -1563,9 +1698,10 @@ t.test('unpack big enough to pause/drain', t => { }) u.on('ignoredEntry', entry => - t.fail('should not get ignored entry: ' + entry.path)) + t.fail('should not get ignored entry: ' + entry.path), + ) - u.on('close', _ => { + u.on('close', () => { t.pass('extraction finished') const actual = fs.readdirSync(dir) const expected = fs.readdirSync(parses) @@ -1580,29 +1716,22 @@ t.test('set owner', t => { // fake it on platforms that don't have getuid const myUid = 501 const myGid = 1024 - const getuid = process.getuid - const getgid = process.getgid - process.getuid = _ => myUid - process.getgid = _ => myGid - t.teardown(_ => (process.getuid = getuid, process.getgid = getgid)) + t.capture(process, 'getuid', () => myUid) + t.capture(process, 'getgid', () => myGid) // can't actually do this because it requires root, but we can // verify that chown gets called. t.test('as root, defaults to true', t => { - const getuid = process.getuid - process.getuid = _ => 0 + t.capture(process, 'getuid', () => 0) const u = new Unpack() t.equal(u.preserveOwner, true, 'preserveOwner enabled') - process.getuid = getuid t.end() }) t.test('as non-root, defaults to false', t => { - const getuid = process.getuid - process.getuid = _ => 501 + t.capture(process, 'getuid', () => 501) const u = new Unpack() t.equal(u.preserveOwner, false, 'preserveOwner disabled') - process.getuid = getuid t.end() }) @@ -1671,7 +1800,6 @@ t.test('set owner', t => { ]) t.test('chown failure results in unpack failure', t => { - const dir = path.resolve(unpackdir, 'chown') const poop = new Error('expected chown failure') const un = mutateFS.fail('chown', poop) const unl = mutateFS.fail('lchown', poop) @@ -1681,42 +1809,43 @@ t.test('set owner', t => { un() unf() unl() - await rimraf(dir) }) t.test('sync', t => { - mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + const cwd = t.testdir({}) let warned = false - const u = new Unpack.Sync({ - cwd: dir, + const u = new UnpackSync({ + cwd, preserveOwner: true, - onwarn: (c, m, er) => { + onwarn: (_c, _m, er) => { if (!warned) { warned = true t.equal(er, poop) - t.end() } }, }) u.end(data) + t.equal(warned, true) + t.end() }) t.test('async', t => { - mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + const cwd = t.testdir({}) let warned = false const u = new Unpack({ - cwd: dir, + cwd, preserveOwner: true, - onwarn: (c, m, er) => { + onwarn: (_c, _m, er) => { if (!warned) { warned = true t.equal(er, poop) - t.end() } }, }) + u.on('finish', () => { + t.equal(warned, true) + t.end() + }) u.end(data) }) @@ -1724,7 +1853,6 @@ t.test('set owner', t => { }) t.test('chown when true', t => { - const dir = path.resolve(unpackdir, 'chown') const chown = fs.chown const lchown = fs.lchown const fchown = fs.fchown @@ -1732,13 +1860,16 @@ t.test('set owner', t => { const fchownSync = fs.fchownSync const lchownSync = fs.lchownSync let called = 0 - fs.fchown = fs.chown = fs.lchown = (path, owner, group, cb) => { - called++ - cb() - } - fs.chownSync = fs.lchownSync = fs.fchownSync = _ => called++ + fs.fchown = + fs.chown = + fs.lchown = + (_path, _owner, _group, cb) => { + called++ + cb() + } + fs.chownSync = fs.lchownSync = fs.fchownSync = () => called++ - t.teardown(_ => { + t.teardown(() => { fs.chown = chown fs.fchown = fchown fs.lchown = lchown @@ -1748,22 +1879,20 @@ t.test('set owner', t => { }) t.test('sync', t => { - mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + const cwd = t.testdir({}) called = 0 - const u = new Unpack.Sync({ cwd: dir, preserveOwner: true }) + const u = new UnpackSync({ cwd, preserveOwner: true }) u.end(data) t.ok(called >= 5, 'called chowns') t.end() }) t.test('async', t => { - mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + const cwd = t.testdir({}) called = 0 - const u = new Unpack({ cwd: dir, preserveOwner: true }) + const u = new Unpack({ cwd, preserveOwner: true }) u.end(data) - u.on('close', _ => { + u.on('close', () => { t.ok(called >= 5, 'called chowns') t.end() }) @@ -1773,22 +1902,18 @@ t.test('set owner', t => { }) t.test('no chown when false', t => { - const dir = path.resolve(unpackdir, 'nochown') const poop = new Error('poop') const un = mutateFS.fail('chown', poop) const unf = mutateFS.fail('fchown', poop) const unl = mutateFS.fail('lchown', poop) - t.teardown(async _ => { + t.teardown(async () => { un() unf() unl() - await rimraf(dir) }) - t.beforeEach(() => mkdirp(dir)) - t.afterEach(() => rimraf(dir)) - const check = t => { + const dir = t.testdirName const dirStat = fs.statSync(dir + '/foo') t.not(dirStat.uid, 2456124561) t.not(dirStat.gid, 813708013) @@ -1797,21 +1922,25 @@ t.test('set owner', t => { t.not(fileStat.gid, 813708013) const dirStat2 = fs.statSync(dir + '/foo/different-uid-nogid') t.not(dirStat2.uid, 2456124561) - const fileStat2 = fs.statSync(dir + '/foo/different-uid-nogid/bar') + const fileStat2 = fs.statSync( + dir + '/foo/different-uid-nogid/bar', + ) t.not(fileStat2.uid, 2456124561) t.end() } t.test('sync', t => { - const u = new Unpack.Sync({ cwd: dir, preserveOwner: false }) + const dir = t.testdir({}) + const u = new UnpackSync({ cwd: dir, preserveOwner: false }) u.end(data) check(t) }) t.test('async', t => { + const dir = t.testdir({}) const u = new Unpack({ cwd: dir, preserveOwner: false }) u.end(data) - u.on('close', _ => check(t)) + u.on('close', () => check(t)) }) t.end() @@ -1837,26 +1966,28 @@ t.test('unpack when dir is not writable', t => { '', ]) - const dir = path.resolve(unpackdir, 'nowrite-dir') - t.beforeEach(() => mkdirp(dir)) - t.afterEach(() => rimraf(dir)) - const check = t => { - t.equal(fs.statSync(dir + '/a').mode & 0o7777, isWindows ? 0o666 : 0o744) + const dir = t.testdirName + t.equal( + fs.statSync(dir + '/a').mode & 0o7777, + isWindows ? 0o666 : 0o744, + ) t.equal(fs.readFileSync(dir + '/a/b', 'utf8'), 'a') t.end() } t.test('sync', t => { - const u = new Unpack.Sync({ cwd: dir, strict: true }) + const dir = t.testdir({}) + const u = new UnpackSync({ cwd: dir, strict: true }) u.end(data) check(t) }) t.test('async', t => { + const dir = t.testdir({}) const u = new Unpack({ cwd: dir, strict: true }) u.end(data) - u.on('close', _ => check(t)) + u.on('close', () => check(t)) }) t.end() @@ -1874,31 +2005,30 @@ t.test('transmute chars on windows', t => { '', ]) - const dir = path.resolve(unpackdir, 'winchars') - t.beforeEach(() => mkdirp(dir)) - t.afterEach(() => rimraf(dir)) - const hex = 'ef80bcef81bcef80beef80bfef80ba2e747874' const uglyName = Buffer.from(hex, 'hex').toString() - const ugly = path.resolve(dir, uglyName) const check = t => { + const dir = t.testdirName + const ugly = path.resolve(dir, uglyName) t.same(fs.readdirSync(dir), [uglyName]) t.equal(fs.readFileSync(ugly, 'utf8'), '<|>?:') t.end() } t.test('async', t => { + const dir = t.testdir({}) const u = new Unpack({ cwd: dir, win32: true, }) u.end(data) - u.on('close', _ => check(t)) + u.on('close', () => check(t)) }) t.test('sync', t => { - const u = new Unpack.Sync({ + const dir = t.testdir({}) + const u = new UnpackSync({ cwd: dir, win32: true, }) @@ -1942,10 +2072,6 @@ t.test('safely transmute chars on windows with absolutes', t => { }) t.test('use explicit chmod when required by umask', t => { - process.umask(0o022) - - const basedir = path.resolve(unpackdir, 'umask-chmod') - const data = makeTar([ { path: 'x/y/z', @@ -1957,36 +2083,39 @@ t.test('use explicit chmod when required by umask', t => { ]) const check = async t => { - const st = fs.statSync(basedir + '/x/y/z') + const cwd = t.testdirName + const st = fs.statSync(cwd + '/x/y/z') t.equal(st.mode & 0o777, isWindows ? 0o666 : 0o775) - await rimraf(basedir) t.end() } t.test('async', t => { - mkdirp.sync(basedir) - const unpack = new Unpack({ cwd: basedir }) - unpack.on('close', _ => check(t)) + const cwd = t.testdir({}) + const unpack = new Unpack({ + cwd, + chmod: true, + processUmask: 0o22, + }) + unpack.on('close', () => check(t)) unpack.end(data) }) return t.test('sync', t => { - mkdirp.sync(basedir) - const unpack = new Unpack.Sync({ cwd: basedir }) + const cwd = t.testdir({}) + const unpack = new UnpackSync({ + cwd, + chmod: true, + processUmask: 0o22, + }) unpack.end(data) check(t) }) }) -t.test('dont use explicit chmod if noChmod flag set', t => { - process.umask(0o022) - const { umask } = process - t.teardown(() => process.umask = umask) - process.umask = () => { +t.test('dont use explicit chmod if chmod flag not set', t => { + t.capture(process, 'umask', () => { throw new Error('should not call process.umask()') - } - - const basedir = path.resolve(unpackdir, 'umask-no-chmod') + }) const data = makeTar([ { @@ -1999,29 +2128,29 @@ t.test('dont use explicit chmod if noChmod flag set', t => { ]) const check = async t => { - const st = fs.statSync(basedir + '/x/y/z') + const cwd = t.testdirName + const st = fs.statSync(cwd + '/x/y/z') t.equal(st.mode & 0o777, isWindows ? 0o666 : 0o755) - await rimraf(basedir) t.end() } t.test('async', t => { - mkdirp.sync(basedir) - const unpack = new Unpack({ cwd: basedir, noChmod: true }) - unpack.on('close', _ => check(t)) + const cwd = t.testdir({}) + const unpack = new Unpack({ cwd }) + unpack.on('close', () => check(t)) unpack.end(data) }) return t.test('sync', t => { - mkdirp.sync(basedir) - const unpack = new Unpack.Sync({ cwd: basedir, noChmod: true }) + const cwd = t.testdir({}) + const unpack = new UnpackSync({ cwd }) unpack.end(data) check(t) }) }) t.test('chown implicit dirs and also the entries', t => { - const basedir = path.resolve(unpackdir, 'chownr') + const basedir = t.testdir({}) // club these so that the test can run as non-root const chown = fs.chown @@ -2033,7 +2162,7 @@ t.test('chown implicit dirs and also the entries', t => { const getuid = process.getuid const getgid = process.getgid - t.teardown(_ => { + t.teardown(() => { fs.chown = chown fs.chownSync = chownSync fs.lchown = lchown @@ -2046,18 +2175,24 @@ t.test('chown implicit dirs and also the entries', t => { let chowns = 0 let currentTest = null - fs.lchown = fs.fchown = fs.chown = (path, uid, gid, cb) => { - currentTest.equal(uid, 420, 'chown(' + path + ') uid') - currentTest.equal(gid, 666, 'chown(' + path + ') gid') - chowns++ - cb() - } + fs.lchown = + fs.fchown = + fs.chown = + (path, uid, gid, cb) => { + currentTest.equal(uid, 420, 'chown(' + path + ') uid') + currentTest.equal(gid, 666, 'chown(' + path + ') gid') + chowns++ + cb() + } - fs.lchownSync = fs.chownSync = fs.fchownSync = (path, uid, gid) => { - currentTest.equal(uid, 420, 'chownSync(' + path + ') uid') - currentTest.equal(gid, 666, 'chownSync(' + path + ') gid') - chowns++ - } + fs.lchownSync = + fs.chownSync = + fs.fchownSync = + (path, uid, gid) => { + currentTest.equal(uid, 420, 'chownSync(' + path + ') uid') + currentTest.equal(gid, 666, 'chownSync(' + path + ') gid') + chowns++ + } const data = makeTar([ { @@ -2091,29 +2226,49 @@ t.test('chown implicit dirs and also the entries', t => { } t.test('throws when setting uid/gid improperly', t => { - t.throws(_ => new Unpack({ uid: 420 }), - TypeError('cannot set owner without number uid and gid')) - t.throws(_ => new Unpack({ gid: 666 }), - TypeError('cannot set owner without number uid and gid')) - t.throws(_ => new Unpack({ uid: 1, gid: 2, preserveOwner: true }), - TypeError('cannot preserve owner in archive and also set owner explicitly')) + t.throws( + () => new Unpack({ uid: 420 }), + TypeError('cannot set owner without number uid and gid'), + ) + t.throws( + () => new Unpack({ gid: 666 }), + TypeError('cannot set owner without number uid and gid'), + ) + t.throws( + () => new Unpack({ uid: 1, gid: 2, preserveOwner: true }), + TypeError( + 'cannot preserve owner in archive and also set owner explicitly', + ), + ) t.end() }) const tests = () => - t.test('async', t => { - currentTest = t - mkdirp.sync(basedir) - const unpack = new Unpack({ cwd: basedir, uid: 420, gid: 666 }) - unpack.on('close', _ => check(t)) - unpack.end(data) - }).then(t.test('sync', t => { - currentTest = t - mkdirp.sync(basedir) - const unpack = new Unpack.Sync({ cwd: basedir, uid: 420, gid: 666 }) - unpack.end(data) - check(t) - })) + t + .test('async', t => { + currentTest = t + mkdirp.sync(basedir) + const unpack = new Unpack({ + cwd: basedir, + uid: 420, + gid: 666, + }) + unpack.on('close', () => check(t)) + unpack.end(data) + }) + .then( + t.test('sync', t => { + currentTest = t + mkdirp.sync(basedir) + const unpack = new UnpackSync({ + cwd: basedir, + uid: 420, + gid: 666, + }) + unpack.end(data) + check(t) + }), + ) tests() @@ -2134,9 +2289,7 @@ t.test('chown implicit dirs and also the entries', t => { }) t.test('bad cwd setting', t => { - const basedir = path.resolve(unpackdir, 'bad-cwd') - mkdirp.sync(basedir) - t.teardown(_ => rimraf(basedir)) + const basedir = t.testdir({}) const cases = [ // the cwd itself @@ -2158,72 +2311,77 @@ t.test('bad cwd setting', t => { fs.writeFileSync(basedir + '/file', 'xyz') - cases.forEach(c => t.test(c.type + ' ' + c.path, t => { - const data = makeTar([ - { - path: c.path, - mode: 0o775, - type: c.type, - size: 0, - uid: null, - gid: null, - }, - '', - '', - ]) - - t.test('cwd is a file', t => { - const cwd = basedir + '/file' - const opt = { cwd: cwd } + cases.forEach(c => + t.test(c.type + ' ' + c.path, t => { + const data = makeTar([ + { + path: c.path, + mode: 0o775, + type: c.type, + size: 0, + uid: null, + gid: null, + }, + '', + '', + ]) - t.throws(_ => new Unpack.Sync(opt).end(data), { - name: 'CwdError', - message: 'ENOTDIR: Cannot cd into \'' + normPath(cwd) + '\'', - path: normPath(cwd), - code: 'ENOTDIR', - }) + t.test('cwd is a file', t => { + const cwd = basedir + '/file' + const opt = { cwd: cwd } - new Unpack(opt).on('error', er => { - t.match(er, { + t.throws(() => new UnpackSync(opt).end(data), { name: 'CwdError', - message: 'ENOTDIR: Cannot cd into \'' + normPath(cwd) + '\'', + message: "ENOTDIR: Cannot cd into '" + normPath(cwd) + "'", path: normPath(cwd), code: 'ENOTDIR', }) - t.end() - }).end(data) - }) - - return t.test('cwd is missing', t => { - const cwd = basedir + '/asdf/asdf/asdf' - const opt = { cwd: cwd } - t.throws(_ => new Unpack.Sync(opt).end(data), { - name: 'CwdError', - message: 'ENOENT: Cannot cd into \'' + normPath(cwd) + '\'', - path: normPath(cwd), - code: 'ENOENT', + new Unpack(opt) + .on('error', er => { + t.match(er, { + name: 'CwdError', + message: + "ENOTDIR: Cannot cd into '" + normPath(cwd) + "'", + path: normPath(cwd), + code: 'ENOTDIR', + }) + t.end() + }) + .end(data) }) - new Unpack(opt).on('error', er => { - t.match(er, { + return t.test('cwd is missing', t => { + const cwd = basedir + '/asdf/asdf/asdf' + const opt = { cwd: cwd } + + t.throws(() => new UnpackSync(opt).end(data), { name: 'CwdError', - message: 'ENOENT: Cannot cd into \'' + normPath(cwd) + '\'', + message: "ENOENT: Cannot cd into '" + normPath(cwd) + "'", path: normPath(cwd), code: 'ENOENT', }) - t.end() - }).end(data) - }) - })) + + new Unpack(opt) + .on('error', er => { + t.match(er, { + name: 'CwdError', + message: + "ENOENT: Cannot cd into '" + normPath(cwd) + "'", + path: normPath(cwd), + code: 'ENOENT', + }) + t.end() + }) + .end(data) + }) + }), + ) t.end() }) t.test('transform', t => { - const basedir = path.resolve(unpackdir, 'transform') - t.teardown(_ => rimraf(basedir)) - const cases = { 'emptypax.tar': { '🌟.txt': '🌟✧✩⭐︎✪✫✬✭✮⚝✯✰✵✶✷✸✹❂⭑⭒★☆✡☪✴︎✦✡️🔯✴️🌠\n', @@ -2238,7 +2396,8 @@ t.test('transform', t => { 'utf8.tar': { '🌟.txt': '🌟✧✩⭐︎✪✫✬✭✮⚝✯✰✵✶✷✸✹❂⭑⭒★☆✡☪✴︎✦✡️🔯✴️🌠\n', 'Ω.txt': '[Ω]', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': '[Ω]', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': + '[Ω]', }, } @@ -2256,8 +2415,12 @@ t.test('transform', t => { } class Bracer extends Minipass { - write (data) { - const d = data.toString().split('').map(c => '[' + c + ']').join('') + write(data) { + const d = data + .toString() + .split('') + .map(c => '[' + c + ']') + .join('') return super.write(d) } } @@ -2269,13 +2432,9 @@ t.test('transform', t => { tarfiles.forEach(tarfile => { t.test(tarfile, t => { const tf = path.resolve(tars, tarfile) - const dir = path.resolve(basedir, tarfile) - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) const check = t => { + const dir = t.testdirName const expect = cases[tarfile] Object.keys(expect).forEach(file => { const f = path.resolve(dir, file) @@ -2289,25 +2448,37 @@ t.test('transform', t => { t.test('async unpack', t => { t.plan(2) t.test('strict', t => { - const unpack = new Unpack({ cwd: dir, strict: true, transform: txFn }) + const dir = t.testdir({}) + const unpack = new Unpack({ + cwd: dir, + strict: true, + transform: txFn, + }) fs.createReadStream(tf).pipe(unpack) - eos(unpack, _ => check(t)) + eos(unpack, () => check(t)) }) t.test('loose', t => { + const dir = t.testdir({}) const unpack = new Unpack({ cwd: dir, transform: txFn }) fs.createReadStream(tf).pipe(unpack) - eos(unpack, _ => check(t)) + eos(unpack, () => check(t)) }) }) t.test('sync unpack', t => { t.plan(2) t.test('strict', t => { - const unpack = new UnpackSync({ cwd: dir, strict: true, transform: txFn }) + const dir = t.testdir({}) + const unpack = new UnpackSync({ + cwd: dir, + strict: true, + transform: txFn, + }) unpack.end(fs.readFileSync(tf)) check(t) }) t.test('loose', t => { + const dir = t.testdir({}) const unpack = new UnpackSync({ cwd: dir, transform: txFn }) unpack.end(fs.readFileSync(tf)) check(t) @@ -2318,10 +2489,6 @@ t.test('transform', t => { }) t.test('transform error', t => { - const dir = path.resolve(unpackdir, 'transform-error') - mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) - const tarfile = path.resolve(tars, 'body-byte-counts.tar') const tardata = fs.readFileSync(tarfile) const poop = new Error('poop') @@ -2335,7 +2502,12 @@ t.test('transform error', t => { t.test('sync unpack', t => { t.test('strict', t => { - const unpack = new UnpackSync({ cwd: dir, strict: true, transform: txFn }) + const dir = t.testdir({}) + const unpack = new UnpackSync({ + cwd: dir, + strict: true, + transform: txFn, + }) const expect = 3 let actual = 0 unpack.on('error', er => { @@ -2347,10 +2519,11 @@ t.test('transform error', t => { t.end() }) t.test('loose', t => { + const dir = t.testdir({}) const unpack = new UnpackSync({ cwd: dir, transform: txFn }) const expect = 3 let actual = 0 - unpack.on('warn', (code, msg, er) => { + unpack.on('warn', (_code, _msg, er) => { t.equal(er, poop) actual++ }) @@ -2361,9 +2534,14 @@ t.test('transform error', t => { t.end() }) t.test('async unpack', t => { + const dir = t.testdir({}) // the last error is about the folder being deleted, just ignore that one t.test('strict', t => { - const unpack = new Unpack({ cwd: dir, strict: true, transform: txFn }) + const unpack = new Unpack({ + cwd: dir, + strict: true, + transform: txFn, + }) t.plan(3) t.teardown(() => { unpack.removeAllListeners('error') @@ -2373,10 +2551,11 @@ t.test('transform error', t => { unpack.end(tardata) }) t.test('loose', t => { + const dir = t.testdir({}) const unpack = new Unpack({ cwd: dir, transform: txFn }) t.plan(3) t.teardown(() => unpack.removeAllListeners('warn')) - unpack.on('warn', (code, msg, er) => t.equal(er, poop)) + unpack.on('warn', (_code, _msg, er) => t.equal(er, poop)) unpack.end(tardata) }) t.end() @@ -2387,23 +2566,16 @@ t.test('transform error', t => { t.test('futimes/fchown failures', t => { const archive = path.resolve(tars, 'utf8.tar') - const dir = path.resolve(unpackdir, 'futimes-fchown-fails') const tardata = fs.readFileSync(archive) const poop = new Error('poop') const second = new Error('second error') - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - - t.teardown(() => rimraf(dir)) - const methods = ['utimes', 'chown'] methods.forEach(method => { const fc = method === 'chown' t.test(method + ' fallback', t => { + const dir = t.testdir({}) t.teardown(mutateFS.fail('f' + method, poop)) // forceChown will fail on systems where the user is not root // and/or the uid/gid in the archive aren't valid. We're just @@ -2413,13 +2585,17 @@ t.test('futimes/fchown failures', t => { t.test('async unpack', t => { t.plan(2) t.test('strict', t => { - const unpack = new Unpack({ cwd: dir, strict: true, forceChown: fc }) - unpack.on('finish', t.end) + const unpack = new Unpack({ + cwd: dir, + strict: true, + forceChown: fc, + }) + unpack.on('finish', () => t.end()) unpack.end(tardata) }) t.test('loose', t => { const unpack = new Unpack({ cwd: dir, forceChown: fc }) - unpack.on('finish', t.end) + unpack.on('finish', () => t.end()) unpack.on('warn', t.fail) unpack.end(tardata) }) @@ -2427,12 +2603,16 @@ t.test('futimes/fchown failures', t => { t.test('sync unpack', t => { t.plan(2) t.test('strict', t => { - const unpack = new Unpack.Sync({ cwd: dir, strict: true, forceChown: fc }) + const unpack = new UnpackSync({ + cwd: dir, + strict: true, + forceChown: fc, + }) unpack.end(tardata) t.end() }) t.test('loose', t => { - const unpack = new Unpack.Sync({ cwd: dir, forceChown: fc }) + const unpack = new UnpackSync({ cwd: dir, forceChown: fc }) unpack.on('warn', t.fail) unpack.end(tardata) t.end() @@ -2451,30 +2631,42 @@ t.test('futimes/fchown failures', t => { t.test('async unpack', t => { t.plan(2) t.test('strict', t => { - const unpack = new Unpack({ cwd: dir, strict: true, forceChown: fc }) + const dir = t.testdir({}) + const unpack = new Unpack({ + cwd: dir, + strict: true, + forceChown: fc, + }) t.plan(3) unpack.on('error', er => t.equal(er, poop)) unpack.end(tardata) }) t.test('loose', t => { + const dir = t.testdir({}) const unpack = new Unpack({ cwd: dir, forceChown: fc }) t.plan(3) - unpack.on('warn', (code, m, er) => t.equal(er, poop)) + unpack.on('warn', (_code, _m, er) => t.equal(er, poop)) unpack.end(tardata) }) }) t.test('sync unpack', t => { t.plan(2) t.test('strict', t => { - const unpack = new Unpack.Sync({ cwd: dir, strict: true, forceChown: fc }) + const dir = t.testdir({}) + const unpack = new UnpackSync({ + cwd: dir, + strict: true, + forceChown: fc, + }) t.plan(3) unpack.on('error', er => t.equal(er, poop)) unpack.end(tardata) }) t.test('loose', t => { - const unpack = new Unpack.Sync({ cwd: dir, forceChown: fc }) + const dir = t.testdir({}) + const unpack = new UnpackSync({ cwd: dir, forceChown: fc }) t.plan(3) - unpack.on('warn', (c, m, er) => t.equal(er, poop)) + unpack.on('warn', (_c, _m, er) => t.equal(er, poop)) unpack.end(tardata) }) }) @@ -2484,13 +2676,9 @@ t.test('futimes/fchown failures', t => { t.end() }) -t.test('onentry option is preserved', t => { - const basedir = path.resolve(unpackdir, 'onentry-method') - mkdirp.sync(basedir) - t.teardown(() => rimraf(basedir)) - +t.test('onReadEntry option is preserved', t => { let oecalls = 0 - const onentry = entry => oecalls++ + const onReadEntry = _entry => oecalls++ const data = makeTar([ { path: 'd/i', @@ -2521,17 +2709,16 @@ t.test('onentry option is preserved', t => { } t.test('sync', t => { - const dir = path.join(basedir, 'sync') - mkdirp.sync(dir) - const unpack = new UnpackSync({ cwd: dir, onentry }) + const dir = t.testdir({}) + const unpack = new UnpackSync({ cwd: dir, onReadEntry }) unpack.end(data) check(t) }) t.test('async', t => { - const dir = path.join(basedir, 'async') + const dir = t.testdir({}) mkdirp.sync(dir) - const unpack = new Unpack({ cwd: dir, onentry }) + const unpack = new Unpack({ cwd: dir, onReadEntry }) unpack.on('finish', () => check(t)) unpack.end(data) }) @@ -2540,10 +2727,6 @@ t.test('onentry option is preserved', t => { }) t.test('do not reuse hardlinks, only nlink=1 files', t => { - const basedir = path.resolve(unpackdir, 'hardlink-reuse') - mkdirp.sync(basedir) - t.teardown(() => rimraf(basedir)) - const now = new Date('2018-04-30T18:30:39.025Z') const data = makeTar([ @@ -2580,21 +2763,24 @@ t.test('do not reuse hardlinks, only nlink=1 files', t => { } const check = t => { + const dir = t.testdirName for (const f in checks) { - t.equal(fs.readFileSync(basedir + '/' + f, 'utf8'), checks[f], f) - t.equal(fs.statSync(basedir + '/' + f).nlink, 1, f) + t.equal(fs.readFileSync(dir + '/' + f, 'utf8'), checks[f], f) + t.equal(fs.statSync(dir + '/' + f).nlink, 1, f) } t.end() } t.test('async', t => { - const u = new Unpack({ cwd: basedir }) + const dir = t.testdir({}) + const u = new Unpack({ cwd: dir }) u.on('close', () => check(t)) u.end(data) }) t.test('sync', t => { - const u = new UnpackSync({ cwd: basedir }) + const dir = t.testdir({}) + const u = new UnpackSync({ cwd: dir }) u.end(data) check(t) }) @@ -2605,28 +2791,34 @@ t.test('do not reuse hardlinks, only nlink=1 files', t => { t.test('trying to unpack a non-zlib gzip file should fail', t => { const data = Buffer.from('hello this is not gzip data') const dataGzip = Buffer.concat([Buffer.from([0x1f, 0x8b]), data]) - const basedir = path.resolve(unpackdir, 'bad-archive') + t.test('abort if gzip has an error', t => { - t.plan(2) const expect = { message: /^zlib/, errno: Number, code: /^Z/, recoverable: false, - cwd: normPath(basedir), + cwd: normPath(t.testdirName), tarCode: 'TAR_ABORT', } const opts = { - cwd: basedir, + cwd: t.testdir({}), gzip: true, } new Unpack(opts) .once('error', er => t.match(er, expect, 'async emits')) .end(dataGzip) - const skip = !/^v([0-9]|1[0-3])\./.test(process.version) ? false + const skip = + !/^v([0-9]|1[0-3])\./.test(process.version) ? + false : 'node prior to v14 did not raise sync zlib errors properly' - t.throws(() => new UnpackSync(opts).end(dataGzip), - expect, 'sync throws', { skip }) + t.throws( + () => new UnpackSync(opts).end(dataGzip), + expect, + 'sync throws', + { skip }, + ) + t.end() }) t.test('bad archive if no gzip', t => { @@ -2635,11 +2827,15 @@ t.test('trying to unpack a non-zlib gzip file should fail', t => { tarCode: 'TAR_BAD_ARCHIVE', recoverable: false, } - const opts = { cwd: basedir } + const opts = { cwd: t.testdir({}) } new Unpack(opts) - .on('error', er => t.match(er, expect, 'async emits')) + .once('error', er => t.match(er, expect, 'async emits')) .end(data) - t.throws(() => new UnpackSync(opts).end(data), expect, 'sync throws') + t.throws( + () => new UnpackSync(opts).end(data), + expect, + 'sync throws', + ) }) t.end() @@ -2647,17 +2843,9 @@ t.test('trying to unpack a non-zlib gzip file should fail', t => { t.test('handle errors on fs.close', t => { const poop = new Error('poop') - const { close, closeSync } = fs // have to actually close them, or else windows gets mad - fs.close = (fd, cb) => close(fd, () => cb(poop)) - fs.closeSync = (fd) => { - closeSync(fd) - throw poop - } - t.teardown(() => Object.assign(fs, { close, closeSync })) - const dir = path.resolve(unpackdir, 'close-fail') - mkdirp.sync(dir + '/sync') - mkdirp.sync(dir + '/async') + t.teardown(mutateFS.fail('close', poop)) + const data = makeTar([ { path: 'file', @@ -2673,73 +2861,85 @@ t.test('handle errors on fs.close', t => { ]) t.plan(2) - new Unpack({ cwd: dir + '/async', strict: true }) - .on('error', er => t.equal(er, poop, 'async')) - .end(data) - t.throws(() => new UnpackSync({ - cwd: normPath(dir + '/sync'), strict: true, - }).end(data), poop, 'sync') -}) - -t.test('drop entry from dirCache if no longer a directory', { - skip: isWindows && 'symlinks not fully supported', -}, t => { - const dir = path.resolve(unpackdir, 'dir-cache-error') - mkdirp.sync(dir + '/sync/y') - mkdirp.sync(dir + '/async/y') - const data = makeTar([ - { - path: 'x', - type: 'Directory', - }, - { - path: 'x', - type: 'SymbolicLink', - linkpath: './y', - }, - { - path: 'x/ginkoid', - type: 'File', - size: 'ginkoid'.length, - }, - 'ginkoid', - '', - '', - ]) - t.plan(2) - const WARNINGS = {} - const check = (t, path) => { - t.equal(fs.statSync(path + '/x').isDirectory(), true) - t.equal(fs.lstatSync(path + '/x').isSymbolicLink(), true) - t.equal(fs.statSync(path + '/y').isDirectory(), true) - t.strictSame(fs.readdirSync(path + '/y'), []) - t.throws(() => fs.readFileSync(path + '/x/ginkoid'), { code: 'ENOENT' }) - t.strictSame(WARNINGS[path], [ - 'TAR_ENTRY_ERROR', - 'Cannot extract through symbolic link', - ]) - t.end() - } t.test('async', t => { - const path = dir + '/async' - new Unpack({ cwd: path }) - .on('warn', (code, msg) => WARNINGS[path] = [code, msg]) - .on('end', () => check(t, path)) + new Unpack({ cwd: t.testdir({}), strict: true }) + .on('error', er => t.equal(er, poop, 'async')) + .on('end', () => t.end()) .end(data) }) t.test('sync', t => { - const path = dir + '/sync' - new UnpackSync({ cwd: path }) - .on('warn', (code, msg) => WARNINGS[path] = [code, msg]) - .end(data) - check(t, path) + t.throws( + () => + new UnpackSync({ + cwd: normPath(t.testdir({})), + strict: true, + }).end(data), + poop, + 'sync', + ) + t.end() }) }) +t.test( + 'drop entry from dirCache if no longer a directory', + { + skip: isWindows && 'symlinks not fully supported', + }, + t => { + const data = makeTar([ + { + path: 'x', + type: 'Directory', + }, + { + path: 'x', + type: 'SymbolicLink', + linkpath: './y', + }, + { + path: 'x/ginkoid', + type: 'File', + size: 'ginkoid'.length, + }, + 'ginkoid', + '', + '', + ]) + t.plan(2) + const WARNINGS = {} + const check = (t, path) => { + t.equal(fs.statSync(path + '/x').isDirectory(), true) + t.equal(fs.lstatSync(path + '/x').isSymbolicLink(), true) + t.equal(fs.statSync(path + '/y').isDirectory(), true) + t.strictSame(fs.readdirSync(path + '/y'), []) + t.throws(() => fs.readFileSync(path + '/x/ginkoid'), { + code: 'ENOENT', + }) + t.strictSame(WARNINGS[path], [ + 'TAR_ENTRY_ERROR', + 'TAR_SYMLINK_ERROR: Cannot extract through symbolic link', + ]) + t.end() + } + t.test('async', t => { + const path = t.testdir({ y: {} }) + new Unpack({ cwd: path }) + .on('warn', (code, msg) => (WARNINGS[path] = [code, msg])) + .on('end', () => check(t, path)) + .end(data) + }) + t.test('sync', t => { + const path = t.testdir({ y: {} }) + new UnpackSync({ cwd: path }) + .on('warn', (code, msg) => (WARNINGS[path] = [code, msg])) + .end(data) + check(t, path) + }) + }, +) + t.test('using strip option when top level file exists', t => { - const dir = path.resolve(unpackdir, 'strip-with-top-file') - mkdirp.sync(dir + '/sync/y') - mkdirp.sync(dir + '/async/y') const data = makeTar([ { path: 'top', @@ -2778,13 +2978,13 @@ t.test('using strip option when top level file exists', t => { t.end() } t.test('async', t => { - const path = dir + '/async' + const path = t.testdir({ y: {} }) new Unpack({ cwd: path, strip: 1 }) .on('end', () => check(t, path)) .end(data) }) t.test('sync', t => { - const path = dir + '/sync' + const path = t.testdir({ y: {} }) new UnpackSync({ cwd: path, strip: 1 }).end(data) check(t, path) }) @@ -2792,7 +2992,8 @@ t.test('using strip option when top level file exists', t => { t.test('handle EPERMs when creating symlinks', t => { // https://github.com/npm/node-tar/issues/265 - const msg = 'You do not have sufficient privilege to perform this operation.' + const msg = + 'You do not have sufficient privilege to perform this operation.' const er = Object.assign(new Error(msg), { code: 'EPERM', }) @@ -2833,16 +3034,16 @@ t.test('handle EPERMs when creating symlinks', t => { '', ]) - const dir = path.resolve(unpackdir, 'eperm-symlinks') - mkdirp.sync(`${dir}/sync`) - mkdirp.sync(`${dir}/async`) - - const check = path => { - t.match(WARNINGS, [ - ['TAR_ENTRY_ERROR', msg], - ['TAR_ENTRY_ERROR', msg], - ['TAR_ENTRY_ERROR', msg], - ], 'got expected warnings') + const check = (t, path) => { + t.match( + WARNINGS, + [ + ['TAR_ENTRY_ERROR', msg], + ['TAR_ENTRY_ERROR', msg], + ['TAR_ENTRY_ERROR', msg], + ], + 'got expected warnings', + ) t.equal(WARNINGS.length, 3) WARNINGS.length = 0 t.equal(fs.readFileSync(`${path}/x/y`, 'utf8'), 'hello, world') @@ -2853,21 +3054,29 @@ t.test('handle EPERMs when creating symlinks', t => { } const WARNINGS = [] - const u = new Unpack({ - cwd: `${dir}/async`, - onwarn: (code, msg, er) => WARNINGS.push([code, msg]), + t.test('async', t => { + const dir = t.testdir({}) + const u = new Unpack({ + cwd: dir, + onwarn: (code, msg, _er) => WARNINGS.push([code, msg]), + }) + u.on('end', () => { + check(t, dir) + t.end() + }) + u.end(data) }) - u.on('end', () => { - check(`${dir}/async`) + t.test('sync', t => { + const dir = t.testdir({}) const u = new UnpackSync({ - cwd: `${dir}/sync`, - onwarn: (code, msg, er) => WARNINGS.push([code, msg]), + cwd: dir, + onwarn: (code, msg, _er) => WARNINGS.push([code, msg]), }) u.end(data) - check(`${dir}/sync`) + check(t, dir) t.end() }) - u.end(data) + t.end() }) t.test('close fd when error writing', t => { @@ -2888,8 +3097,8 @@ t.test('close fd when error writing', t => { t.teardown(mutateFS.fail('write', new Error('nope'))) const CLOSES = [] const OPENS = {} - const { open } = require('fs') - t.teardown(() => fs.open = open) + const { open } = fs + t.teardown(() => (fs.open = open)) fs.open = (...args) => { const cb = args.pop() args.push((er, fd) => { @@ -2898,13 +3107,15 @@ t.test('close fd when error writing', t => { }) return open.call(fs, ...args) } - t.teardown(mutateFS.mutateArgs('close', ([fd]) => { - CLOSES.push(fd) - return [fd] - })) + t.teardown( + mutateFS.mutateArgs('close', ([fd]) => { + CLOSES.push(fd) + return [fd] + }), + ) const WARNINGS = [] - const dir = path.resolve(unpackdir, 'close-on-write-error') - mkdirp.sync(dir) + + const dir = t.testdir({}) const unpack = new Unpack({ cwd: dir, onwarn: (code, msg) => WARNINGS.push([code, msg]), @@ -2941,23 +3152,23 @@ t.test('close fd when error setting mtime', t => { t.teardown(mutateFS.fail('utimes', new Error('nooooope'))) const CLOSES = [] const OPENS = {} - const { open } = require('fs') - t.teardown(() => fs.open = open) - fs.open = (...args) => { + const { open } = fs + t.capture(fs, 'open', (...args) => { const cb = args.pop() args.push((er, fd) => { OPENS[args[0]] = fd cb(er, fd) }) return open.call(fs, ...args) - } - t.teardown(mutateFS.mutateArgs('close', ([fd]) => { - CLOSES.push(fd) - return [fd] - })) + }) + t.teardown( + mutateFS.mutateArgs('close', ([fd]) => { + CLOSES.push(fd) + return [fd] + }), + ) const WARNINGS = [] - const dir = path.resolve(unpackdir, 'close-on-futimes-error') - mkdirp.sync(dir) + const dir = t.testdir({}) const unpack = new Unpack({ cwd: dir, onwarn: (code, msg) => WARNINGS.push([code, msg]), @@ -2987,8 +3198,8 @@ t.test('do not hang on large files that fail to open()', t => { '', ]) t.teardown(mutateFS.fail('open', new Error('nope'))) - const dir = path.resolve(unpackdir, 'no-hang-for-large-file-failures') - mkdirp.sync(dir) + const dir = t.testdir({}) + const WARNINGS = [] const unpack = new Unpack({ cwd: dir, @@ -2998,11 +3209,11 @@ t.test('do not hang on large files that fail to open()', t => { t.strictSame(WARNINGS, [['TAR_ENTRY_ERROR', 'nope']]) t.end() }) - unpack.write(data.slice(0, 2048)) + unpack.write(data.subarray(0, 2048)) setTimeout(() => { - unpack.write(data.slice(2048, 4096)) + unpack.write(data.subarray(2048, 4096)) setTimeout(() => { - unpack.write(data.slice(4096)) + unpack.write(data.subarray(4096)) setTimeout(() => { unpack.end() }) @@ -3010,165 +3221,178 @@ t.test('do not hang on large files that fail to open()', t => { }) }) -t.test('dirCache pruning unicode normalized collisions', { - skip: isWindows && 'symlinks not fully supported', -}, t => { - const data = makeTar([ - { - type: 'Directory', - path: 'foo', - }, - { - type: 'File', - path: 'foo/bar', - size: 1, - }, - 'x', - { - type: 'Directory', - // café - path: Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString(), - }, - { - type: 'SymbolicLink', - // cafe with a ` - path: Buffer.from([0x63, 0x61, 0x66, 0x65, 0xcc, 0x81]).toString(), - linkpath: 'foo', - }, - { - type: 'Directory', - path: 'foo', - }, - { - type: 'File', - path: Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString() + '/bar', - size: 1, - }, - 'y', - '', - '', - ]) - - const check = (path, dirCache, t) => { - path = path.replace(/\\/g, '/') - t.strictSame([...dirCache.entries()][0], [`${path}/foo`, true]) - t.equal(fs.readFileSync(path + '/foo/bar', 'utf8'), 'x') - t.end() - } - - t.test('sync', t => { - const path = t.testdir() - const dirCache = new Map() - new UnpackSync({ cwd: path, dirCache }).end(data) - check(path, dirCache, t) - }) - t.test('async', t => { - const path = t.testdir() - const dirCache = new Map() - new Unpack({ cwd: path, dirCache }) - .on('close', () => check(path, dirCache, t)) - .end(data) - }) +t.test( + 'dirCache pruning unicode normalized collisions', + { + skip: isWindows && 'symlinks not fully supported', + }, + t => { + const data = makeTar([ + { + type: 'Directory', + path: 'foo', + }, + { + type: 'File', + path: 'foo/bar', + size: 1, + }, + 'x', + { + type: 'Directory', + // café + path: Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString(), + }, + { + type: 'SymbolicLink', + // cafe with a ` + path: Buffer.from([ + 0x63, 0x61, 0x66, 0x65, 0xcc, 0x81, + ]).toString(), + linkpath: 'foo', + }, + { + type: 'Directory', + path: 'foo', + }, + { + type: 'File', + path: + Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString() + + '/bar', + size: 1, + }, + 'y', + '', + '', + ]) - t.end() -}) + const check = (path, dirCache, t) => { + path = path.replace(/\\/g, '/') + t.strictSame([...dirCache.entries()][0], [`${path}/foo`, true]) + t.equal(fs.readFileSync(path + '/foo/bar', 'utf8'), 'x') + t.end() + } -t.test('dircache prune all on windows when symlink encountered', t => { - if (process.platform !== 'win32') { - process.env.TESTING_TAR_FAKE_PLATFORM = 'win32' - t.teardown(() => { - delete process.env.TESTING_TAR_FAKE_PLATFORM + t.test('sync', t => { + const path = t.testdir() + const dirCache = new Map() + new UnpackSync({ cwd: path, dirCache }).end(data) + check(path, dirCache, t) + }) + t.test('async', t => { + const path = t.testdir() + const dirCache = new Map() + new Unpack({ cwd: path, dirCache }) + .on('close', () => check(path, dirCache, t)) + .end(data) }) - } - const symlinks = [] - const Unpack = t.mock('../lib/unpack.js', { - fs: { - ...fs, - symlink: (target, dest, cb) => { - symlinks.push(['async', target, dest]) - process.nextTick(cb) - }, - symlinkSync: (target, dest) => symlinks.push(['sync', target, dest]), - }, - }) - const UnpackSync = Unpack.Sync - const data = makeTar([ - { - type: 'Directory', - path: 'foo', - }, - { - type: 'File', - path: 'foo/bar', - size: 1, - }, - 'x', - { - type: 'Directory', - // café - path: Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString(), - }, - { - type: 'SymbolicLink', - // cafe with a ` - path: Buffer.from([0x63, 0x61, 0x66, 0x65, 0xcc, 0x81]).toString(), - linkpath: 'safe/actually/but/cannot/be/too/careful', - }, - { - type: 'File', - path: 'bar/baz', - size: 1, - }, - 'z', - '', - '', - ]) + t.end() + }, +) + +t.test( + 'dircache prune all on windows when symlink encountered', + async t => { + if (process.platform !== 'win32') { + process.env.TESTING_TAR_FAKE_PLATFORM = 'win32' + t.teardown(() => { + delete process.env.TESTING_TAR_FAKE_PLATFORM + }) + } + const symlinks = [] + const { Unpack } = await t.mockImport('../dist/esm/unpack.js', { + fs: { + ...fs, + symlink: (target, dest, cb) => { + symlinks.push(['async', target, dest]) + process.nextTick(cb) + }, + symlinkSync: (target, dest) => + symlinks.push(['sync', target, dest]), + }, + }) - const check = (path, dirCache, t) => { - // symlink blew away all dirCache entries before it - path = path.replace(/\\/g, '/') - t.strictSame([...dirCache.entries()], [ - [`${path}/bar`, true], + const data = makeTar([ + { + type: 'Directory', + path: 'foo', + }, + { + type: 'File', + path: 'foo/bar', + size: 1, + }, + 'x', + { + type: 'Directory', + // café + path: Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString(), + }, + { + type: 'SymbolicLink', + // cafe with a ` + path: Buffer.from([ + 0x63, 0x61, 0x66, 0x65, 0xcc, 0x81, + ]).toString(), + linkpath: 'safe/actually/but/cannot/be/too/careful', + }, + { + type: 'File', + path: 'bar/baz', + size: 1, + }, + 'z', + '', + '', ]) - t.equal(fs.readFileSync(`${path}/foo/bar`, 'utf8'), 'x') - t.equal(fs.readFileSync(`${path}/bar/baz`, 'utf8'), 'z') - t.end() - } - t.test('sync', t => { - const path = t.testdir() - const dirCache = new Map() - new UnpackSync({ cwd: path, dirCache }).end(data) - check(path, dirCache, t) - }) + const check = (path, dirCache, t) => { + // symlink blew away all dirCache entries before it + path = path.replace(/\\/g, '/') + t.strictSame([...dirCache.entries()], [[`${path}/bar`, true]]) + t.equal(fs.readFileSync(`${path}/foo/bar`, 'utf8'), 'x') + t.equal(fs.readFileSync(`${path}/bar/baz`, 'utf8'), 'z') + t.end() + } - t.test('async', t => { - const path = t.testdir() - const dirCache = new Map() - new Unpack({ cwd: path, dirCache }) - .on('close', () => check(path, dirCache, t)) - .end(data) - }) + t.test('sync', t => { + const path = t.testdir() + const dirCache = new Map() + new UnpackSync({ cwd: path, dirCache }).end(data) + check(path, dirCache, t) + }) - t.end() -}) + t.test('async', t => { + const path = t.testdir() + const dirCache = new Map() + new Unpack({ cwd: path, dirCache }) + .on('close', () => check(path, dirCache, t)) + .end(data) + }) -t.test('recognize C:.. as a dot path part', t => { + t.end() + }, +) + +t.test('recognize C:.. as a dot path part', async t => { if (process.platform !== 'win32') { process.env.TESTING_TAR_FAKE_PLATFORM = 'win32' t.teardown(() => { delete process.env.TESTING_TAR_FAKE_PLATFORM }) } - const Unpack = t.mock('../lib/unpack.js', { - path: { - ...path.win32, - win32: path.win32, - posix: path.posix, + const { Unpack, UnpackSync } = await t.mockImport( + '../dist/esm/unpack.js', + { + path: { + ...path.win32, + win32: path.win32, + posix: path.posix, + }, }, - }) - const UnpackSync = Unpack.Sync + ) const data = makeTar([ { @@ -3202,7 +3426,12 @@ t.test('recognize C:.. as a dot path part', t => { 'C:../x/y/z', 'C:../x/y/z', ], - ['TAR_ENTRY_ERROR', "path contains '..'", 'x:../y/z', 'x:../y/z'], + [ + 'TAR_ENTRY_ERROR', + "path contains '..'", + 'x:../y/z', + 'x:../y/z', + ], [ 'TAR_ENTRY_INFO', 'stripping Y: from absolute path', @@ -3218,7 +3447,8 @@ t.test('recognize C:.. as a dot path part', t => { const path = t.testdir() new Unpack({ cwd: path, - onwarn: (c, w, { entry, path }) => warnings.push([c, w, path, entry.path]), + onwarn: (c, w, { entry, path }) => + warnings.push([c, w, path, entry.path]), }) .on('close', () => check(path, warnings, t)) .end(data) @@ -3229,7 +3459,8 @@ t.test('recognize C:.. as a dot path part', t => { const path = t.testdir() new UnpackSync({ cwd: path, - onwarn: (c, w, { entry, path }) => warnings.push([c, w, path, entry.path]), + onwarn: (c, w, { entry, path }) => + warnings.push([c, w, path, entry.path]), }).end(data) check(path, warnings, t) }) @@ -3246,15 +3477,16 @@ t.test('excessively deep subfolder nesting', async t => { const check = (t, maxDepth = 1024) => { t.match(warnings, [ - ['TAR_ENTRY_ERROR', + [ + 'TAR_ENTRY_ERROR', 'path excessively deep', { entry: ReadEntry, path: /^\.(\/a){1024,}\/foo.txt$/, depth: 222372, maxDepth, - } - ] + }, + ], ]) warnings.length = 0 t.end() @@ -3264,15 +3496,17 @@ t.test('excessively deep subfolder nesting', async t => { const cwd = t.testdir() new Unpack({ cwd, - onwarn - }).on('end', () => check(t)).end(data) + onwarn, + }) + .on('end', () => check(t)) + .end(data) }) t.test('sync', t => { const cwd = t.testdir() new UnpackSync({ cwd, - onwarn + onwarn, }).end(data) check(t) }) @@ -3283,7 +3517,9 @@ t.test('excessively deep subfolder nesting', async t => { cwd, onwarn, maxDepth: 64, - }).on('end', () => check(t, 64)).end(data) + }) + .on('end', () => check(t, 64)) + .end(data) }) t.test('sync set md', t => { @@ -3296,3 +3532,35 @@ t.test('excessively deep subfolder nesting', async t => { check(t, 64) }) }) + +t.test('ignore self-referential hardlinks', async t => { + const data = makeTar([ + { + path: 'autolink', + linkpath: './autolink', + type: 'Link', + }, + ]) + const check = (t, warnings) => { + t.matchSnapshot(warnings) + t.strictSame(readdirSync(t.testdirName), [], 'nothing extracted') + t.end() + } + t.test('async', t => { + const cwd = t.testdir({}) + const warnings = [] + const u = new Unpack({ cwd, onwarn: (_, m) => warnings.push(m) }) + u.on('end', () => check(t, warnings)) + u.end(data) + }) + t.test('sync', t => { + const cwd = t.testdir({}) + const warnings = [] + const u = new UnpackSync({ + cwd, + onwarn: (_, m) => warnings.push(m), + }) + u.end(data) + check(t, warnings) + }) +}) diff --git a/test/update.js b/test/update.js index 7034a165..f171539e 100644 --- a/test/update.js +++ b/test/update.js @@ -1,24 +1,32 @@ -'use strict' -const t = require('tap') -const u = require('../lib/update.js') -const path = require('path') -const fs = require('fs') -const mutateFS = require('mutate-fs') - -const { resolve } = require('path') +import t from 'tap' +import { update as u } from '../dist/esm/update.js' + +import path, { dirname } from 'path' +import fs from 'fs' +import mutateFS from 'mutate-fs' + +import { resolve } from 'path' +import { fileURLToPath } from 'url' +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) const fixtures = path.resolve(__dirname, 'fixtures') const tars = path.resolve(fixtures, 'tars') -const zlib = require('zlib') - -const spawn = require('child_process').spawn +import zlib from 'zlib' +import { spawn } from 'child_process' const data = fs.readFileSync(tars + '/body-byte-counts.tar') -const dataNoNulls = data.slice(0, data.length - 1024) +const dataNoNulls = data.subarray(0, data.length - 1024) const fixtureDef = { 'body-byte-counts.tar': data, 'no-null-eof.tar': dataNoNulls, - 'truncated-head.tar': Buffer.concat([dataNoNulls, data.slice(0, 500)]), - 'truncated-body.tar': Buffer.concat([dataNoNulls, data.slice(0, 700)]), + 'truncated-head.tar': Buffer.concat([ + dataNoNulls, + data.subarray(0, 500), + ]), + 'truncated-body.tar': Buffer.concat([ + dataNoNulls, + data.subarray(0, 700), + ]), 'zero.tar': Buffer.from(''), 'empty.tar': Buffer.alloc(512), 'compressed.tgz': zlib.gzipSync(data), @@ -33,7 +41,10 @@ t.test('basic file add to archive (good or truncated)', t => { c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - const actual = Buffer.concat(out).toString().trim().split(/\r?\n/) + const actual = Buffer.concat(out) + .toString() + .trim() + .split(/\r?\n/) t.same(actual, [ '1024-bytes.txt', '512-bytes.txt', @@ -51,21 +62,26 @@ t.test('basic file add to archive (good or truncated)', t => { 'truncated-head.tar', 'truncated-body.tar', ] - const td = files.map(f => [f, fixtureDef[f]]).reduce((s, [k, v]) => { - s[k] = v - return s - }, {}) + const td = files + .map(f => [f, fixtureDef[f]]) + .reduce((s, [k, v]) => { + s[k] = v + return s + }, {}) const fileList = [path.basename(__filename)] t.test('sync', t => { t.plan(files.length) const dir = t.testdir(td) for (const file of files) { t.test(file, t => { - u({ - sync: true, - file: resolve(dir, file), - cwd: __dirname, - }, fileList) + u( + { + sync: true, + file: resolve(dir, file), + cwd: __dirname, + }, + fileList, + ) check(resolve(dir, file), t) }) } @@ -76,15 +92,19 @@ t.test('basic file add to archive (good or truncated)', t => { const dir = t.testdir(td) for (const file of files) { t.test(file, t => { - u({ - file: resolve(dir, file), - cwd: __dirname, - }, fileList, er => { - if (er) { - throw er - } - check(resolve(dir, file), t) - }) + u( + { + file: resolve(dir, file), + cwd: __dirname, + }, + fileList, + er => { + if (er) { + throw er + } + check(resolve(dir, file), t) + }, + ) }) } }) @@ -94,10 +114,13 @@ t.test('basic file add to archive (good or truncated)', t => { const dir = t.testdir(td) for (const file of files) { t.test(file, t => { - u({ - file: resolve(dir, file), - cwd: __dirname, - }, fileList).then(() => { + u( + { + file: resolve(dir, file), + cwd: __dirname, + }, + fileList, + ).then(() => { check(resolve(dir, file), t) }) }) @@ -115,22 +138,22 @@ t.test('add to empty archive', t => { c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - const actual = Buffer.concat(out).toString().trim().split(/\r?\n/) - t.same(actual, [ - path.basename(__filename), - ]) + const actual = Buffer.concat(out) + .toString() + .trim() + .split(/\r?\n/) + t.same(actual, [path.basename(__filename)]) t.end() }) } - const files = [ - 'empty.tar', - 'zero.tar', - ] - const td = files.map(f => [f, fixtureDef[f]]).reduce((s, [k, v]) => { - s[k] = v - return s - }, {}) + const files = ['empty.tar', 'zero.tar'] + const td = files + .map(f => [f, fixtureDef[f]]) + .reduce((s, [k, v]) => { + s[k] = v + return s + }, {}) files.push('not-existing.tar') t.test('sync', t => { @@ -138,11 +161,14 @@ t.test('add to empty archive', t => { t.plan(files.length) for (const file of files) { t.test(file, t => { - u({ - sync: true, - file: resolve(dir, file), - cwd: __dirname, - }, [path.basename(__filename)]) + u( + { + sync: true, + file: resolve(dir, file), + cwd: __dirname, + }, + [path.basename(__filename)], + ) check(resolve(dir, file), t) }) } @@ -153,15 +179,19 @@ t.test('add to empty archive', t => { t.plan(files.length) for (const file of files) { t.test(file, t => { - u({ - file: resolve(dir, file), - cwd: __dirname, - }, [path.basename(__filename)], er => { - if (er) { - throw er - } - check(resolve(dir, file), t) - }) + u( + { + file: resolve(dir, file), + cwd: __dirname, + }, + [path.basename(__filename)], + er => { + if (er) { + throw er + } + check(resolve(dir, file), t) + }, + ) }) } }) @@ -171,10 +201,13 @@ t.test('add to empty archive', t => { t.plan(files.length) for (const file of files) { t.test(file, t => { - u({ - file: resolve(dir, file), - cwd: __dirname, - }, [path.basename(__filename)]).then(() => { + u( + { + file: resolve(dir, file), + cwd: __dirname, + }, + [path.basename(__filename)], + ).then(() => { check(resolve(dir, file), t) }) }) @@ -191,27 +224,47 @@ t.test('cannot append to gzipped archives', t => { const file = resolve(dir, 'compressed.tgz') const expect = new Error('cannot append to compressed archives') - const expectT = new TypeError('cannot append to compressed archives') - - t.throws(_ => u({ - file, - cwd: __dirname, - gzip: true, - }, [path.basename(__filename)]), expectT) - - t.throws(_ => u({ - file, - cwd: __dirname, - sync: true, - }, [path.basename(__filename)]), expect) - - u({ - file, - cwd: __dirname, - }, [path.basename(__filename)], er => { - t.match(er, expect) - t.end() - }) + const expectT = new TypeError( + 'cannot append to compressed archives', + ) + + t.throws( + _ => + u( + { + file, + cwd: __dirname, + gzip: true, + }, + [path.basename(__filename)], + ), + expectT, + ) + + t.throws( + _ => + u( + { + file, + cwd: __dirname, + sync: true, + }, + [path.basename(__filename)], + ), + expect, + ) + + u( + { + file, + cwd: __dirname, + }, + [path.basename(__filename)], + er => { + t.match(er, expect) + t.end() + }, + ) }) t.test('cannot append to brotli archives', t => { @@ -221,27 +274,45 @@ t.test('cannot append to brotli archives', t => { const file = resolve(dir, 'compressed.tbr') const expect = new Error('cannot append to compressed archives') - const expectT = new TypeError('cannot append to compressed archives') - - t.throws(_ => u({ - file, - cwd: __dirname, - brotli: true, - }, [path.basename(__filename)]), expectT) - - t.throws(_ => u({ - file, - cwd: __dirname, - sync: true, - }, [path.basename(__filename)]), expect) + const expectT = new TypeError( + 'cannot append to compressed archives', + ) + + t.throws( + _ => + u( + { + file, + cwd: __dirname, + brotli: true, + }, + [path.basename(__filename)], + ), + expectT, + ) + + t.throws( + _ => + u( + { + file, + cwd: __dirname, + sync: true, + }, + [path.basename(__filename)], + ), + expect, + ) t.end() }) t.test('other throws', t => { t.throws(_ => u({}, ['asdf']), new TypeError('file is required')) - t.throws(_ => u({ file: 'asdf' }, []), - new TypeError('no files or directories specified')) + t.throws( + _ => u({ file: 'asdf' }, []), + new TypeError('no paths specified to add/replace'), + ) t.end() }) @@ -317,13 +388,16 @@ t.test('do not add older file', t => { } t.test('sync', t => { - u({ - mtimeCache: new Map(), - file, - cwd: dir, - sync: true, - filter: path => path === '1024-bytes.txt', - }, ['1024-bytes.txt', 'foo']) + u( + { + mtimeCache: new Map(), + file, + cwd: dir, + sync: true, + filter: path => path === '1024-bytes.txt', + }, + ['1024-bytes.txt', 'foo'], + ) check(t) }) @@ -358,7 +432,8 @@ t.test('do add newer file', t => { } // a chunk for the header, then 2 for the body - const expect = fixtureDef['body-byte-counts.tar'].length + 512 + 1024 + const expect = + fixtureDef['body-byte-counts.tar'].length + 512 + 1024 const check = (file, t) => { t.equal(fs.statSync(file).size, expect) t.end() @@ -367,20 +442,25 @@ t.test('do add newer file', t => { t.test('sync', t => { const dir = setup(t) const file = resolve(dir, 'body-byte-counts.tar') - u({ - mtimeCache: new Map(), - file, - cwd: dir, - sync: true, - filter: path => path === '1024-bytes.txt', - }, ['1024-bytes.txt', 'foo']) + u( + { + mtimeCache: new Map(), + file, + cwd: dir, + sync: true, + filter: path => path === '1024-bytes.txt', + }, + ['1024-bytes.txt', 'foo'], + ) check(file, t) }) t.test('async', t => { const dir = setup(t) const file = resolve(dir, 'body-byte-counts.tar') - u({ file, cwd: dir }, ['1024-bytes.txt']).then(_ => check(file, t)) + u({ file, cwd: dir }, ['1024-bytes.txt']).then(_ => + check(file, t), + ) }) t.test('async cb', t => { diff --git a/test/warn-mixin.js b/test/warn-method.js similarity index 61% rename from test/warn-mixin.js rename to test/warn-method.js index 36350e47..7b04ff86 100644 --- a/test/warn-mixin.js +++ b/test/warn-method.js @@ -1,8 +1,12 @@ -const t = require('tap') -const EE = require('events').EventEmitter -const warner = require('../lib/warn-mixin.js') +import t from 'tap' +import EE from 'events' +import { warnMethod } from '../dist/esm/warn-method.js' -const Warner = warner(EE) +class Warner extends EE { + warn(code, message, data = {}) { + return warnMethod(this, code, message, data) + } +} const w = new Warner() @@ -16,14 +20,20 @@ t.same(warning, ['code', 'hello', { tarCode: 'code', code: 'code' }]) warning.length = 0 w.once('warn', (code, msg, data) => warning.push(code, msg, data)) w.warn('ok', new Error('this is fine'), { foo: 'bar' }) -t.match(warning, ['ok', 'this is fine', { - message: 'this is fine', - foo: 'bar', -}]) +t.match(warning, [ + 'ok', + 'this is fine', + { + message: 'this is fine', + foo: 'bar', + }, +]) w.strict = true -t.throws(_ => w.warn('code', 'hello', { data: 123 }), - { message: 'hello', data: 123 }) +t.throws(_ => w.warn('code', 'hello', { data: 123 }), { + message: 'hello', + data: 123, +}) const poop = new Error('poop') t.throws(_ => w.warn('ok', poop), poop) @@ -33,5 +43,7 @@ w.cwd = 'some/dir' t.throws(_ => w.warn('ok', 'this is fine'), { cwd: 'some/dir' }) w.strict = false -t.throws(_ => w.warn('ok', 'this is fine', { recoverable: false }), - { cwd: 'some/dir', recoverable: false }) +t.throws(_ => w.warn('ok', 'this is fine', { recoverable: false }), { + cwd: 'some/dir', + recoverable: false, +}) diff --git a/test/winchars.js b/test/winchars.js index 120c581d..b1ffe12c 100644 --- a/test/winchars.js +++ b/test/winchars.js @@ -1,6 +1,5 @@ -'use strict' -const t = require('tap') -const wc = require('../lib/winchars.js') +import t from 'tap' +import * as wc from '../dist/esm/winchars.js' t.equal(wc.encode('<>'), '\uf03c\uf03e', 'encode') t.equal(wc.decode(wc.encode('<>')), '<>', 'decode') diff --git a/test/writable-assignment-check.ts b/test/writable-assignment-check.ts new file mode 100644 index 00000000..a251bc80 --- /dev/null +++ b/test/writable-assignment-check.ts @@ -0,0 +1,14 @@ +import { Unpack } from '../src/unpack.js' +import { WriteEntry } from '../src/write-entry.js' +import { Parser } from '../src/parse.js' +import { fileURLToPath } from 'url' + +let tester: NodeJS.WritableStream +tester = new Parser() +tester = new Unpack() +tester = new WriteEntry(fileURLToPath(import.meta.url)) + +tester + +import { pass } from 'tap' +pass(`just making sure TS doesn't complain`) diff --git a/test/write-entry.js b/test/write-entry.js index b72e53a1..5e23b629 100644 --- a/test/write-entry.js +++ b/test/write-entry.js @@ -1,9 +1,27 @@ -'use strict' -const t = require('tap') -const mkdirp = require('mkdirp') +import t from 'tap' +import { mkdirp } from 'mkdirp' +import fs from 'fs' +import { ReadEntry } from '../dist/esm/read-entry.js' +import { makeTar } from './fixtures/make-tar.js' +import { + WriteEntry, + WriteEntrySync, + WriteEntryTar, +} from '../dist/esm/write-entry.js' +import path, { dirname } from 'path' +import { Header } from '../dist/esm/header.js' +import mutateFS from 'mutate-fs' +import { Parser } from '../dist/esm/parse.js' +import { rimraf } from 'rimraf' +import { normalizeWindowsPath as normPath } from '../dist/esm/normalize-windows-path.js' +import { fileURLToPath } from 'url' + +const { default: chmodr } = await import('chmodr') + +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) // make our tests verify that windows link targets get turned into / paths -const fs = require('fs') const { readlink, readlinkSync } = fs fs.readlink = (path, cb) => { readlink(path, (er, path) => { @@ -16,20 +34,10 @@ fs.readlink = (path, cb) => { } fs.readlinkSync = path => readlinkSync(path).replace(/\//g, '\\') -const ReadEntry = require('../lib/read-entry.js') -const makeTar = require('./make-tar.js') -const WriteEntry = require('../lib/write-entry.js') -const path = require('path') const fixtures = path.resolve(__dirname, 'fixtures') const files = path.resolve(fixtures, 'files') -const Header = require('../lib/header.js') -const mutateFS = require('mutate-fs') process.env.USER = 'isaacs' -const chmodr = require('chmodr') -const Parser = require('../lib/parse.js') -const rimraf = require('rimraf') const isWindows = process.platform === 'win32' -const normPath = require('../lib/normalize-windows-path.js') t.test('set up', t => { const one = fs.statSync(files + '/hardlink-1') @@ -49,17 +57,25 @@ t.test('100 byte filename', t => { t.plan(2) const runTest = t => { - const f = '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' + const f = + '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' + let entryInOWE = undefined const ws = new WriteEntry(f, { cwd: files, linkCache: linkCache, statCache: statCache, + onWriteEntry: self => { + entryInOWE = self + t.equal(self.path, f) + t.equal(self.header, undefined) + }, }) let out = [] ws.on('data', c => out.push(c)) ws.on('end', _ => { out = Buffer.concat(out) + t.equal(entryInOWE, ws) t.match(ws, { header: { cksumValid: true, @@ -75,7 +91,7 @@ t.test('100 byte filename', t => { }, }) - const wss = new WriteEntry.Sync(f, { + const wss = new WriteEntrySync(f, { cwd: files, linkCache: linkCache, statCache: statCache, @@ -83,8 +99,10 @@ t.test('100 byte filename', t => { linkCache = ws.linkCache statCache = ws.statCache - t.equal(out.slice(512).toString('hex'), - wss.read().slice(512).toString('hex')) + t.equal( + out.slice(512).toString('hex'), + wss.read().subarray(512).toString('hex'), + ) t.equal(out.length, 1024) t.equal(out.slice(0, 100).toString(), f) @@ -102,23 +120,25 @@ t.test('100 byte filename', t => { devmin: 0, }) - t.equal(out.slice(512).toString('hex'), - '6363636363636363636363636363636363636363636363636363636363636363' + + t.equal( + out.slice(512).toString('hex'), '6363636363636363636363636363636363636363636363636363636363636363' + - '6363636363636363636363636363636363636363636363636363636363636363' + - '6363636300000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000') + '6363636363636363636363636363636363636363636363636363636363636363' + + '6363636363636363636363636363636363636363636363636363636363636363' + + '6363636300000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + ) t.end() }) @@ -151,7 +171,7 @@ t.test('directory', t => { }) t.equal(out.length, 512) - const wss = new WriteEntry.Sync('dir', { cwd: files }) + const wss = new WriteEntrySync('dir', { cwd: files }) t.equal(wss.read().length, 512) t.match(wss.header, { cksumValid: true, @@ -193,34 +213,38 @@ t.test('empty path for cwd', t => { }) }) -t.test('symlink', { - skip: isWindows && 'symlinks not fully supported', -}, t => { - const ws = new WriteEntry('symlink', { cwd: files }) - let out = [] - ws.on('data', c => out.push(c)) - const header = { - cksumValid: true, - needPax: false, - path: 'symlink', - size: 0, - linkpath: 'hardlink-2', - uname: 'isaacs', - gname: null, - devmaj: 0, - devmin: 0, - } +t.test( + 'symlink', + { + skip: isWindows && 'symlinks not fully supported', + }, + t => { + const ws = new WriteEntry('symlink', { cwd: files }) + let out = [] + ws.on('data', c => out.push(c)) + const header = { + cksumValid: true, + needPax: false, + path: 'symlink', + size: 0, + linkpath: 'hardlink-2', + uname: 'isaacs', + gname: null, + devmaj: 0, + devmin: 0, + } - const wss = new WriteEntry.Sync('symlink', { cwd: files }) - t.match(wss.header, header) + const wss = new WriteEntrySync('symlink', { cwd: files }) + t.match(wss.header, header) - ws.on('end', _ => { - out = Buffer.concat(out) - t.equal(out.length, 512) - t.match(ws.header, header) - t.end() - }) -}) + ws.on('end', _ => { + out = Buffer.concat(out) + t.equal(out.length, 512) + t.match(ws.header, header) + t.end() + }) + }, +) t.test('zero-byte file', t => { const ws = new WriteEntry('files/zero-byte.txt', { cwd: fixtures }) @@ -253,7 +277,8 @@ t.test('zero-byte file, but close fails', t => { const ws = new WriteEntry('files/1024-bytes.txt', { cwd: fixtures }) ws.on('end', _ => - t.fail('should not get an end, because the close fails')) + t.fail('should not get an end, because the close fails'), + ) ws.on('error', er => { t.match(er, { message: 'poop' }) @@ -263,7 +288,7 @@ t.test('zero-byte file, but close fails', t => { }) t.test('hardlinks', t => { - const wss = new WriteEntry.Sync('hardlink-1', { + const wss = new WriteEntrySync('hardlink-1', { cwd: files, }) @@ -298,7 +323,9 @@ t.test('hardlinks far away', t => { const h1 = 'hardlink-1' const f = path.resolve(files, h1) const stat = fs.statSync(f) - const linkCache = new Map([[stat.dev + ':' + stat.ino, '/a/b/c/d/e']]) + const linkCache = new Map([ + [stat.dev + ':' + stat.ino, '/a/b/c/d/e'], + ]) const ws = new WriteEntry('files/hardlink-2', { cwd: fixtures, @@ -327,7 +354,8 @@ t.test('hardlinks far away', t => { }) t.test('really deep path', t => { - const f = 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' + const f = + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' const ws = new WriteEntry(f, { cwd: files }) let out = [] ws.on('data', c => out.push(c)) @@ -352,7 +380,8 @@ t.test('really deep path', t => { }) t.test('no pax', t => { - const f = 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' + const f = + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' const ws = new WriteEntry(f, { cwd: files, noPax: true }) let out = [] ws.on('data', c => out.push(c)) @@ -381,7 +410,7 @@ t.test('nonexistent file', t => { const ws = new WriteEntry('does not exist', { cwd: files }) ws.on('error', er => { t.match(er, { - message: 'ENOENT: no such file or directory, lstat \'' + f + '\'', + message: "ENOENT: no such file or directory, lstat '" + f + "'", code: 'ENOENT', path: f, syscall: 'lstat', @@ -395,7 +424,7 @@ t.test('absolute path', t => { const { root } = path.parse(absolute) const f = root + root + root + absolute const warn = normPath(isWindows ? root : root + root + root + root) - t.test('preservePaths=false strict=false', t => { + t.test('preservePaths=false strict=false warn=' + warn, t => { const warnings = [] // on windows, c:\c:\c:\... is a valid path, so just use the // single-root absolute version of it. @@ -408,11 +437,13 @@ t.test('absolute path', t => { ws.on('end', _ => { out = Buffer.concat(out) t.equal(out.length, 1024) - t.match(warnings, [[ - 'TAR_ENTRY_INFO', - `stripping ${warn} from absolute path`, - { path: normPath(isWindows ? absolute : f) }, - ]]) + t.match(warnings, [ + [ + 'TAR_ENTRY_INFO', + `stripping ${warn} from absolute path`, + { path: normPath(isWindows ? absolute : f) }, + ], + ]) t.match(ws.header, { cksumValid: true, @@ -467,22 +498,25 @@ t.test('absolute path', t => { }) t.test('preservePaths=false strict=true', t => { - t.throws(_ => { - new WriteEntry(isWindows ? absolute : f, { - strict: true, - cwd: files, - }) - }, { - message: /stripping .* from absolute path/, - path: normPath(isWindows ? absolute : f), - }) + t.throws( + _ => { + new WriteEntry(isWindows ? absolute : f, { + strict: true, + cwd: files, + }) + }, + { + message: /stripping .* from absolute path/, + path: normPath(isWindows ? absolute : f), + }, + ) t.end() }) t.end() }) -t.throws(_ => new WriteEntry(null), new TypeError('path is required')) +t.throws(() => new WriteEntry(null), TypeError) t.test('no user environ, sets uname to empty string', t => { delete process.env.USER @@ -508,31 +542,35 @@ t.test('no user environ, sets uname to empty string', t => { }) }) -t.test('an unsuppored type', { - skip: isWindows && '/dev/random on windows', -}, t => { - const ws = new WriteEntry('/dev/random', { preservePaths: true }) - ws.on('data', c => { - throw new Error('should not get data from random') - }) - ws.on('stat', stat => { - t.match(stat, { - dev: Number, - mode: 0o020666, - nlink: 1, - rdev: Number, - blksize: Number, - ino: Number, - size: 0, - blocks: 0, +t.test( + 'an unsuppored type', + { + skip: isWindows && '/dev/random on windows', + }, + t => { + const ws = new WriteEntry('/dev/random', { preservePaths: true }) + ws.on('data', _chunk => { + throw new Error('should not get data from random') }) - t.ok(stat.isCharacterDevice(), 'random is a character device') - }) - ws.on('end', _ => { - t.match(ws, { type: 'Unsupported', path: '/dev/random' }) - t.end() - }) -}) + ws.on('stat', stat => { + t.match(stat, { + dev: Number, + mode: 0o020666, + nlink: 1, + rdev: Number, + blksize: Number, + ino: Number, + size: 0, + blocks: 0, + }) + t.ok(stat.isCharacterDevice(), 'random is a character device') + }) + ws.on('end', _ => { + t.match(ws, { type: 'Unsupported', path: '/dev/random' }) + t.end() + }) + }, +) t.test('readlink fail', t => { const expect = { @@ -542,23 +580,31 @@ t.test('readlink fail', t => { // pretend everything is a symbolic link, then read something that isn't t.teardown(mutateFS.statType('SymbolicLink')) t.throws(_ => { - return new WriteEntry.Sync('write-entry.js', { cwd: __dirname }) + return new WriteEntrySync('write-entry.js', { cwd: __dirname }) }, expect) - new WriteEntry('write-entry.js', { cwd: __dirname }).on('error', er => { - t.match(er, expect) - t.equal(normPath(er.path), normPath(__filename)) - t.end() - }) + new WriteEntry('write-entry.js', { cwd: __dirname }).on( + 'error', + er => { + t.match(er, expect) + t.equal(normPath(er.path), normPath(__filename)) + t.end() + }, + ) }) t.test('open fail', t => { t.teardown(mutateFS.fail('open', new Error('pwn'))) - t.throws(_ => new WriteEntry.Sync('write-entry.js', { cwd: __dirname }), - { message: 'pwn' }) - new WriteEntry('write-entry.js', { cwd: __dirname }).on('error', er => { - t.match(er, { message: 'pwn' }) - t.end() - }) + t.throws( + _ => new WriteEntrySync('write-entry.js', { cwd: __dirname }), + { message: 'pwn' }, + ) + new WriteEntry('write-entry.js', { cwd: __dirname }).on( + 'error', + er => { + t.match(er, { message: 'pwn' }) + t.end() + }, + ) }) t.test('read fail', t => { @@ -568,16 +614,22 @@ t.test('read fail', t => { syscall: 'read', } // pretend everything is a file, then read something that isn't - t.teardown(mutateFS.statMutate((er, st) => { - if (er) { - return [er, st] - } - st.isFile = () => true - st.size = 123 - })) - t.throws(_ => new WriteEntry.Sync('fixtures', { - cwd: __dirname, - }), expect) + t.teardown( + mutateFS.statMutate((er, st) => { + if (er) { + return [er, st] + } + st.isFile = () => true + st.size = 123 + }), + ) + t.throws( + _ => + new WriteEntrySync('fixtures', { + cwd: __dirname, + }), + expect, + ) new WriteEntry('fixtures', { cwd: __dirname }).on('error', er => { t.match(er, expect) t.end() @@ -585,27 +637,34 @@ t.test('read fail', t => { }) t.test('read invalid EOF', t => { - t.teardown(mutateFS.mutate('read', (er, br) => [er, 0])) + t.teardown(mutateFS.mutate('read', (er, _bytesRead) => [er, 0])) const expect = { message: 'encountered unexpected EOF', path: normPath(__filename), syscall: 'read', code: 'EOF', } - t.throws(_ => new WriteEntry.Sync('write-entry.js', { cwd: __dirname }), - expect) - new WriteEntry('write-entry.js', { cwd: __dirname }).on('error', er => { - t.match(er, expect) - t.end() - }) + t.throws( + _ => new WriteEntrySync('write-entry.js', { cwd: __dirname }), + expect, + ) + new WriteEntry('write-entry.js', { cwd: __dirname }).on( + 'error', + er => { + t.match(er, expect) + t.end() + }, + ) }) t.test('read overflow expectation', t => { - t.teardown(mutateFS.statMutate((er, st) => { - if (st) { - st.size = 3 - } - })) + t.teardown( + mutateFS.statMutate((_er, st) => { + if (st) { + st.size = 3 + } + }), + ) const f = '512-bytes.txt' const expect = { message: 'did not encounter expected EOF', @@ -614,17 +673,23 @@ t.test('read overflow expectation', t => { code: 'EOF', } t.plan(2) - t.throws(_ => new WriteEntry.Sync(f, { cwd: files, maxReadSize: 2 }), expect) - new WriteEntry(f, { cwd: files, maxReadSize: 2 }).on('error', er => { - t.match(er, expect) - }).resume() + t.throws( + _ => new WriteEntrySync(f, { cwd: files, maxReadSize: 2 }), + expect, + ) + new WriteEntry(f, { cwd: files, maxReadSize: 2 }) + .on('error', er => { + t.match(er, expect) + }) + .resume() }) t.test('short reads', t => { t.teardown(mutateFS.zenoRead()) const cases = { '1024-bytes.txt': new Array(1024).join('x') + '\n', - '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': new Array(101).join('c'), + '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + new Array(101).join('c'), } const maxReadSize = [null, 1024, 100, 111] @@ -642,12 +707,21 @@ t.test('short reads', t => { ws.on('data', c => out.push(c)) ws.on('end', _ => { out = Buffer.concat(out) - t.equal(out.length, 512 * Math.ceil(1 + contents.length / 512)) - t.equal(out.slice(512).toString().replace(/\0.*$/, ''), contents) - const wss = new WriteEntry.Sync(filename, { cwd: files }) + t.equal( + out.length, + 512 * Math.ceil(1 + contents.length / 512), + ) + t.equal( + out.slice(512).toString().replace(/\0.*$/, ''), + contents, + ) + const wss = new WriteEntrySync(filename, { cwd: files }) const syncOut = wss.read() t.equal(syncOut.length, out.length) - t.equal(syncOut.slice(512).toString(), out.slice(512).toString()) + t.equal( + syncOut.subarray(512).toString(), + out.slice(512).toString(), + ) t.end() }) }) @@ -658,53 +732,67 @@ t.test('short reads', t => { t.end() }) -t.test('win32 path conversion', { - skip: isWindows && 'no need to test on windows', -}, t => { - const ws = new WriteEntry('long-path\\r', { - cwd: files, - win32: true, - }) - t.equal(ws.path, 'long-path/r') - t.end() -}) - -t.test('win32 <|>? in paths', { - skip: isWindows && 'do not create annoying junk on windows systems', -}, t => { - const file = path.resolve(fixtures, '<|>?.txt') - const uglyName = Buffer.from('ef80bcef81bcef80beef80bf2e747874', 'hex').toString() - const ugly = path.resolve(fixtures, uglyName) - t.teardown(_ => { - rimraf.sync(file) - rimraf.sync(ugly) - }) +t.test( + 'win32 path conversion', + { + skip: isWindows && 'no need to test on windows', + }, + t => { + const ws = new WriteEntry('long-path\\r', { + cwd: files, + win32: true, + }) + t.equal(ws.path, 'long-path/r') + t.end() + }, +) + +t.test( + 'win32 <|>? in paths', + { + skip: + isWindows && 'do not create annoying junk on windows systems', + }, + t => { + const file = path.resolve(fixtures, '<|>?.txt') + const uglyName = Buffer.from( + 'ef80bcef81bcef80beef80bf2e747874', + 'hex', + ).toString() + const ugly = path.resolve(fixtures, uglyName) + t.teardown(_ => { + rimraf.sync(file) + rimraf.sync(ugly) + }) - fs.writeFileSync(ugly, '<|>?') + fs.writeFileSync(ugly, '<|>?') - const wc = new WriteEntry(uglyName, { - cwd: fixtures, - win32: true, - }) + const wc = new WriteEntry(uglyName, { + cwd: fixtures, + win32: true, + }) - const out = [] - wc.on('data', c => out.push(c)) - wc.on('end', _ => { - const data = Buffer.concat(out).toString() - t.equal(data.slice(0, 4), '<|>?') - t.end() - }) + const out = [] + wc.on('data', c => out.push(c)) + wc.on('end', _ => { + const data = Buffer.concat(out).toString() + t.equal(data.slice(0, 4), '<|>?') + t.end() + }) - t.equal(wc.path, '<|>?.txt') - t.equal(wc.absolute, ugly) -}) + t.equal(wc.path, '<|>?.txt') + t.equal(wc.absolute, ugly) + }, +) t.test('uid doesnt match, dont set uname', t => { - t.teardown(mutateFS.statMutate((er, st) => { - if (st) { - st.uid -= 1 - } - })) + t.teardown( + mutateFS.statMutate((_er, st) => { + if (st) { + st.uid -= 1 + } + }), + ) const ws = new WriteEntry('long-path/r', { cwd: files, }) @@ -721,17 +809,17 @@ t.test('override absolute to some other file', t => { ws.on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 1024) - t.match(data.slice(512).toString(), /^a\0{511}$/) + t.match(data.subarray(512).toString(), /^a\0{511}$/) t.match(ws, { path: 'blerg', header: { size: 1 }, }) - const wss = new WriteEntry.Sync('blerg', { + const wss = new WriteEntrySync('blerg', { absolute: files + '/one-byte.txt', }) const sdata = wss.read() t.equal(sdata.length, 1024) - t.match(sdata.slice(512).toString(), /^a\0{511}$/) + t.match(sdata.subarray(512).toString(), /^a\0{511}$/) t.match(wss, { path: 'blerg', header: { size: 1 }, @@ -741,7 +829,8 @@ t.test('override absolute to some other file', t => { }) t.test('portable entries, nothing platform-specific', t => { - const om = 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' + const om = + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' const ws = new WriteEntry(om, { cwd: files, portable: true, @@ -775,7 +864,7 @@ t.test('portable entries, nothing platform-specific', t => { } const ps = new Parser() - const wss = new WriteEntry.Sync(om, { + const wss = new WriteEntrySync(om, { cwd: files, portable: true, }) @@ -795,7 +884,8 @@ t.test('portable entries, nothing platform-specific', t => { }) t.test('no mtime', t => { - const om = 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' + const om = + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' const ws = new WriteEntry(om, { cwd: files, noMtime: true, @@ -832,7 +922,7 @@ t.test('no mtime', t => { } const ps = new Parser() - const wss = new WriteEntry.Sync(om, { + const wss = new WriteEntrySync(om, { cwd: files, portable: true, noMtime: true, @@ -853,7 +943,8 @@ t.test('no mtime', t => { }) t.test('force mtime', t => { - const om = 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' + const om = + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' const date = new Date('1979-07-01T19:10:00.000Z') const ws = new WriteEntry(om, { cwd: files, @@ -891,7 +982,7 @@ t.test('force mtime', t => { } const ps = new Parser() - const wss = new WriteEntry.Sync(om, { + const wss = new WriteEntrySync(om, { cwd: files, portable: true, mtime: new Date('1979-07-01T19:10:00.000Z'), @@ -930,7 +1021,7 @@ t.test('portable dir entries, no mtime', t => { } const ps = new Parser() - const wss = new WriteEntry.Sync(dir, { + const wss = new WriteEntrySync(dir, { cwd: files, portable: true, }) @@ -985,19 +1076,19 @@ t.test('write entry from read entry', t => { t.test('basic file', t => { const fileEntry = new ReadEntry(new Header(data)) - const wetFile = new WriteEntry.Tar(fileEntry) + const wetFile = new WriteEntryTar(fileEntry) const out = [] let wetFileEnded = false wetFile.on('data', c => out.push(c)) - wetFile.on('end', _ => wetFileEnded = true) - fileEntry.write(data.slice(512, 550)) - fileEntry.write(data.slice(550, 1000)) - fileEntry.end(data.slice(1000, 1024)) + wetFile.on('end', _ => (wetFileEnded = true)) + fileEntry.write(data.subarray(512, 550)) + fileEntry.write(data.subarray(550, 1000)) + fileEntry.end(data.subarray(1000, 1024)) t.equal(wetFileEnded, true) const result = Buffer.concat(out) t.equal(result.length, 1024) t.equal(result.toString().replace(/\0.*$/, ''), '$') - const body = result.slice(512).toString().replace(/\0*$/, '') + const body = result.subarray(512).toString().replace(/\0*$/, '') t.equal(body, '$$$$$$$$$$') t.end() }) @@ -1019,11 +1110,22 @@ t.test('write entry from read entry', t => { '', ]) const fileEntry = new ReadEntry(new Header(data)) - const wetFile = new WriteEntry.Tar(fileEntry, { portable: true }) + let entryInOWE = undefined + const wetFile = new WriteEntryTar(fileEntry, { + portable: true, + onWriteEntry: self => { + entryInOWE = self + t.equal(self.path, '$') + t.equal(self.header, undefined) + }, + }) const out = [] let wetFileEnded = false wetFile.on('data', c => out.push(c)) - wetFile.on('end', _ => wetFileEnded = true) + wetFile.on('end', () => { + wetFileEnded = true + t.equal(entryInOWE, wetFile) + }) fileEntry.end() t.equal(wetFileEnded, true) const result = Buffer.concat(out) @@ -1034,19 +1136,19 @@ t.test('write entry from read entry', t => { t.test('with pax header', t => { const fileEntryPax = new ReadEntry(new Header(data)) fileEntryPax.path = new Array(200).join('$') - const wetPax = new WriteEntry.Tar(fileEntryPax) + const wetPax = new WriteEntryTar(fileEntryPax) let wetPaxEnded = false const out = [] wetPax.on('data', c => out.push(c)) - wetPax.on('end', _ => wetPaxEnded = true) - fileEntryPax.write(data.slice(512, 550)) - fileEntryPax.write(data.slice(550, 1000)) - fileEntryPax.end(data.slice(1000, 1024)) + wetPax.on('end', _ => (wetPaxEnded = true)) + fileEntryPax.write(data.subarray(512, 550)) + fileEntryPax.write(data.subarray(550, 1000)) + fileEntryPax.end(data.subarray(1000, 1024)) t.equal(wetPaxEnded, true) const result = Buffer.concat(out) t.equal(result.length, 2048) - t.match(result.slice(1024, 1124).toString(), /^\$+\0?$/) - const body = result.slice(1536).toString().replace(/\0*$/, '') + t.match(result.subarray(1024, 1124).toString(), /^\$+\0?$/) + const body = result.subarray(1536).toString().replace(/\0*$/, '') t.match(new Header(result), { type: 'ExtendedHeader' }) t.equal(body, '$$$$$$$$$$') t.end() @@ -1055,26 +1157,28 @@ t.test('write entry from read entry', t => { t.test('pax and portable', t => { const fileEntryPax = new ReadEntry(new Header(data)) fileEntryPax.path = new Array(200).join('$') - const wetPax = new WriteEntry.Tar(fileEntryPax, { portable: true }) + const wetPax = new WriteEntryTar(fileEntryPax, { + portable: true, + }) let wetPaxEnded = false const out = [] wetPax.on('data', c => out.push(c)) - wetPax.on('end', _ => wetPaxEnded = true) - fileEntryPax.write(data.slice(512, 550)) - fileEntryPax.write(data.slice(550, 1000)) - fileEntryPax.end(data.slice(1000, 1024)) + wetPax.on('end', _ => (wetPaxEnded = true)) + fileEntryPax.write(data.subarray(512, 550)) + fileEntryPax.write(data.subarray(550, 1000)) + fileEntryPax.end(data.subarray(1000, 1024)) t.equal(wetPaxEnded, true) const result = Buffer.concat(out) t.equal(result.length, 2048) - t.match(result.slice(1024, 1124).toString(), /^\$+\0?$/) + t.match(result.subarray(1024, 1124).toString(), /^\$+\0?$/) t.match(new Header(result), { type: 'ExtendedHeader' }) - t.match(new Header(result.slice(1024)), { + t.match(new Header(result.subarray(1024)), { ctime: null, atime: null, uname: '', gname: '', }) - const body = result.slice(1536).toString().replace(/\0*$/, '') + const body = result.subarray(1536).toString().replace(/\0*$/, '') t.equal(body, '$$$$$$$$$$') t.end() }) @@ -1082,30 +1186,30 @@ t.test('write entry from read entry', t => { t.test('pax, portable, and noMtime', t => { const fileEntryPax = new ReadEntry(new Header(data)) fileEntryPax.path = new Array(200).join('$') - const wetPax = new WriteEntry.Tar(fileEntryPax, { + const wetPax = new WriteEntryTar(fileEntryPax, { noMtime: true, portable: true, }) let wetPaxEnded = false const out = [] wetPax.on('data', c => out.push(c)) - wetPax.on('end', _ => wetPaxEnded = true) - fileEntryPax.write(data.slice(512, 550)) - fileEntryPax.write(data.slice(550, 1000)) - fileEntryPax.end(data.slice(1000, 1024)) + wetPax.on('end', _ => (wetPaxEnded = true)) + fileEntryPax.write(data.subarray(512, 550)) + fileEntryPax.write(data.subarray(550, 1000)) + fileEntryPax.end(data.subarray(1000, 1024)) t.equal(wetPaxEnded, true) const result = Buffer.concat(out) t.equal(result.length, 2048) - t.match(result.slice(1024, 1124).toString(), /^\$+\0?$/) + t.match(result.subarray(1024, 1124).toString(), /^\$+\0?$/) t.match(new Header(result), { type: 'ExtendedHeader' }) - t.match(new Header(result.slice(1024)), { + t.match(new Header(result.subarray(1024)), { mtime: null, ctime: null, atime: null, uname: '', gname: '', }) - const body = result.slice(1536).toString().replace(/\0*$/, '') + const body = result.subarray(1536).toString().replace(/\0*$/, '') t.equal(body, '$$$$$$$$$$') t.end() }) @@ -1116,18 +1220,22 @@ t.test('write entry from read entry', t => { t.test('warn', t => { const warnings = [] - new WriteEntry.Tar(fileEntry, { + new WriteEntryTar(fileEntry, { onwarn: (code, msg, data) => warnings.push(code, msg, data), }) - t.match(warnings, ['TAR_ENTRY_INFO', 'stripping / from absolute path', { - path: '/a/b/c', - }]) + t.match(warnings, [ + 'TAR_ENTRY_INFO', + 'stripping / from absolute path', + { + path: '/a/b/c', + }, + ]) t.end() }) t.test('preserve', t => { const warnings = [] - new WriteEntry.Tar(fileEntry, { + new WriteEntryTar(fileEntry, { onwarn: (code, msg, data) => warnings.push(code, msg, data), preservePaths: true, }) @@ -1136,41 +1244,48 @@ t.test('write entry from read entry', t => { }) t.test('throw', t => { - t.throws(_ => new WriteEntry.Tar(fileEntry, { - strict: true, - })) + t.throws( + _ => + new WriteEntryTar(fileEntry, { + strict: true, + }), + ) t.end() }) t.end() }) t.test('no block remain', t => { - const readEntry = new ReadEntry(new Header({ - size: 512, - type: 'File', - path: 'x', - })) - const wet = new WriteEntry.Tar(readEntry) + const readEntry = new ReadEntry( + new Header({ + size: 512, + type: 'File', + path: 'x', + }), + ) + const wet = new WriteEntryTar(readEntry) const out = [] wet.on('data', c => out.push(c)) let wetEnded = false - wet.on('end', _ => wetEnded = true) + wet.on('end', _ => (wetEnded = true)) t.equal(wetEnded, false) readEntry.end(Buffer.from(new Array(513).join('@'))) t.equal(wetEnded, true) const res = Buffer.concat(out) t.equal(res.length, 1024) - t.match(res.slice(512).toString(), /^@+$/) + t.match(res.subarray(512).toString(), /^@+$/) t.end() }) t.test('write more than appropriate', t => { - const readEntry = new ReadEntry(new Header({ - path: 'x', - type: 'File', - size: '1', - })) - const wet = new WriteEntry.Tar(readEntry) + const readEntry = new ReadEntry( + new Header({ + path: 'x', + type: 'File', + size: '1', + }), + ) + const wet = new WriteEntryTar(readEntry) t.throws(_ => wet.write(Buffer.from(new Array(1024).join('x')))) t.end() }) @@ -1212,7 +1327,9 @@ t.test('prefix and hard links', t => { path: 'PaxHeader/yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy', type: 'ExtendedHeader', }, - new RegExp('^266 path=out.x.' + long + '[\\w\\W]*linkpath=out.x.target'), + new RegExp( + '^266 path=out.x.' + long + '[\\w\\W]*linkpath=out.x.target', + ), { path: 'out/x/yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy', type: 'Link', @@ -1251,11 +1368,14 @@ t.test('prefix and hard links', t => { const data = Buffer.concat(out) expect.forEach((e, i) => { if (typeof e === 'string') { - t.equal(data.slice(i * 512, i * 512 + e.length).toString(), e) + t.equal( + data.subarray(i * 512, i * 512 + e.length).toString(), + e, + ) } else if (e instanceof RegExp) { - t.match(data.slice(i * 512, (i + 1) * 512).toString(), e) + t.match(data.subarray(i * 512, (i + 1) * 512).toString(), e) } else { - t.match(new Header(data.slice(i * 512, (i + 1) * 512)), e) + t.match(new Header(data.subarray(i * 512, (i + 1) * 512)), e) } }) } @@ -1271,11 +1391,12 @@ t.test('prefix and hard links', t => { statCache, } const out = [] - const entry = (path) => new Promise(resolve => { - const p = new Class(path, opt) - p.on('end', resolve) - p.on('data', d => out.push(d)) - }) + const entry = path => + new Promise(resolve => { + const p = new Class(path, opt) + p.on('end', resolve) + p.on('data', d => out.push(d)) + }) await entry(path) if (path === '.') { @@ -1299,8 +1420,8 @@ t.test('prefix and hard links', t => { }) t.test('sync', t => { - t.test('.', t => runTest(t, '.', WriteEntry.Sync)) - return t.test('./', t => runTest(t, './', WriteEntry.Sync)) + t.test('.', t => runTest(t, '.', WriteEntrySync)) + return t.test('./', t => runTest(t, './', WriteEntrySync)) }) t.end() @@ -1328,7 +1449,9 @@ t.test('prefix and hard links from tar entries', t => { path: 'PaxHeader/yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy', type: 'ExtendedHeader', }, - new RegExp('^266 path=out.x.' + long + '[\\w\\W]*linkpath=out.x.target'), + new RegExp( + '^266 path=out.x.' + long + '[\\w\\W]*linkpath=out.x.target', + ), { path: 'out/x/yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy', type: 'Link', @@ -1425,16 +1548,19 @@ t.test('prefix and hard links from tar entries', t => { const data = Buffer.concat(out) expect.forEach((e, i) => { if (typeof e === 'string') { - t.equal(data.slice(i * 512, i * 512 + e.length).toString(), e) + t.equal( + data.subarray(i * 512, i * 512 + e.length).toString(), + e, + ) } else if (e instanceof RegExp) { - t.match(data.slice(i * 512, (i + 1) * 512).toString(), e) + t.match(data.subarray(i * 512, (i + 1) * 512).toString(), e) } else { - t.match(new Header(data.slice(i * 512, (i + 1) * 512)), e) + t.match(new Header(data.subarray(i * 512, (i + 1) * 512)), e) } }) } - const runTest = async (t, path) => { + const runTest = async (t, _path) => { const linkCache = new Map() const statCache = new Map() const opt = { @@ -1446,8 +1572,8 @@ t.test('prefix and hard links from tar entries', t => { const out = [] const parser = new Parser({ strict: true, - onentry: readEntry => { - const p = new WriteEntry.Tar(readEntry, opt) + onReadEntry: readEntry => { + const p = new WriteEntryTar(readEntry, opt) p.on('data', d => out.push(d)) }, }) @@ -1520,9 +1646,12 @@ t.test('hard links and no prefix', t => { const data = Buffer.concat(out) expect.forEach((e, i) => { if (typeof e === 'string') { - t.equal(data.slice(i * 512, i * 512 + e.length).toString(), e) + t.equal( + data.subarray(i * 512, i * 512 + e.length).toString(), + e, + ) } else { - t.match(new Header(data.slice(i * 512, (i + 1) * 512)), e) + t.match(new Header(data.subarray(i * 512, (i + 1) * 512)), e) } }) } @@ -1536,11 +1665,12 @@ t.test('hard links and no prefix', t => { statCache, } const out = [] - const entry = (path) => new Promise(resolve => { - const p = new Class(path, opt) - p.on('end', resolve) - p.on('data', d => out.push(d)) - }) + const entry = path => + new Promise(resolve => { + const p = new Class(path, opt) + p.on('end', resolve) + p.on('data', d => out.push(d)) + }) await entry(path) if (path === '.') { @@ -1563,8 +1693,8 @@ t.test('hard links and no prefix', t => { }) t.test('sync', t => { - t.test('.', t => runTest(t, '.', WriteEntry.Sync)) - return t.test('./', t => runTest(t, './', WriteEntry.Sync)) + t.test('.', t => runTest(t, '.', WriteEntrySync)) + return t.test('./', t => runTest(t, './', WriteEntrySync)) }) t.end() @@ -1665,16 +1795,19 @@ t.test('hard links from tar entries and no prefix', t => { const data = Buffer.concat(out) expect.forEach((e, i) => { if (typeof e === 'string') { - t.equal(data.slice(i * 512, i * 512 + e.length).toString(), e) + t.equal( + data.subarray(i * 512, i * 512 + e.length).toString(), + e, + ) } else if (e instanceof RegExp) { - t.match(data.slice(i * 512, (i + 1) * 512).toString(), e) + t.match(data.subarray(i * 512, (i + 1) * 512).toString(), e) } else { - t.match(new Header(data.slice(i * 512, (i + 1) * 512)), e) + t.match(new Header(data.subarray(i * 512, (i + 1) * 512)), e) } }) } - const runTest = async (t, path) => { + const runTest = async (t, _path) => { const linkCache = new Map() const statCache = new Map() const opt = { @@ -1684,8 +1817,8 @@ t.test('hard links from tar entries and no prefix', t => { } const out = [] const parser = new Parser({ - onentry: readEntry => { - const p = new WriteEntry.Tar(readEntry, opt) + onReadEntry: readEntry => { + const p = new WriteEntryTar(readEntry, opt) p.on('data', d => out.push(d)) }, }) diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..7f39495d --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "declaration": true, + "declarationMap": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "jsx": "react", + "module": "nodenext", + "moduleResolution": "nodenext", + "noUncheckedIndexedAccess": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "es2022" + } +}