pax_global_header 0000666 0000000 0000000 00000000064 14466474510 0014525 g ustar 00root root 0000000 0000000 52 comment=8348d32797eadf1bad05fae1d8ba2af3da53cd44
fs-minipass-3.0.3/ 0000775 0000000 0000000 00000000000 14466474510 0013761 5 ustar 00root root 0000000 0000000 fs-minipass-3.0.3/.commitlintrc.js 0000664 0000000 0000000 00000000553 14466474510 0017104 0 ustar 00root root 0000000 0000000 /* This file is automatically added by @npmcli/template-oss. Do not edit. */
module.exports = {
extends: ['@commitlint/config-conventional'],
rules: {
'type-enum': [2, 'always', ['feat', 'fix', 'docs', 'deps', 'chore']],
'header-max-length': [2, 'always', 80],
'subject-case': [0, 'always', ['lower-case', 'sentence-case', 'start-case']],
},
}
fs-minipass-3.0.3/.eslintrc.js 0000664 0000000 0000000 00000000545 14466474510 0016224 0 ustar 00root root 0000000 0000000 /* This file is automatically added by @npmcli/template-oss. Do not edit. */
'use strict'
const { readdirSync: readdir } = require('fs')
const localConfigs = readdir(__dirname)
.filter((file) => file.startsWith('.eslintrc.local.'))
.map((file) => `./${file}`)
module.exports = {
root: true,
extends: [
'@npmcli',
...localConfigs,
],
}
fs-minipass-3.0.3/.github/ 0000775 0000000 0000000 00000000000 14466474510 0015321 5 ustar 00root root 0000000 0000000 fs-minipass-3.0.3/.github/CODEOWNERS 0000664 0000000 0000000 00000000132 14466474510 0016710 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
* @npm/cli-team
fs-minipass-3.0.3/.github/ISSUE_TEMPLATE/ 0000775 0000000 0000000 00000000000 14466474510 0017504 5 ustar 00root root 0000000 0000000 fs-minipass-3.0.3/.github/ISSUE_TEMPLATE/bug.yml 0000664 0000000 0000000 00000002655 14466474510 0021014 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
name: Bug
description: File a bug/issue
title: "[BUG]
"
labels: [ Bug, Needs Triage ]
body:
- type: checkboxes
attributes:
label: Is there an existing issue for this?
description: Please [search here](./issues) to see if an issue already exists for your problem.
options:
- label: I have searched the existing issues
required: true
- type: textarea
attributes:
label: Current Behavior
description: A clear & concise description of what you're experiencing.
validations:
required: false
- type: textarea
attributes:
label: Expected Behavior
description: A clear & concise description of what you expected to happen.
validations:
required: false
- type: textarea
attributes:
label: Steps To Reproduce
description: Steps to reproduce the behavior.
value: |
1. In this environment...
2. With this config...
3. Run '...'
4. See error...
validations:
required: false
- type: textarea
attributes:
label: Environment
description: |
examples:
- **npm**: 7.6.3
- **Node**: 13.14.0
- **OS**: Ubuntu 20.04
- **platform**: Macbook Pro
value: |
- npm:
- Node:
- OS:
- platform:
validations:
required: false
fs-minipass-3.0.3/.github/ISSUE_TEMPLATE/config.yml 0000664 0000000 0000000 00000000145 14466474510 0021474 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
blank_issues_enabled: true
fs-minipass-3.0.3/.github/dependabot.yml 0000664 0000000 0000000 00000000615 14466474510 0020153 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
version: 2
updates:
- package-ecosystem: npm
directory: /
schedule:
interval: daily
target-branch: "main"
allow:
- dependency-type: direct
versioning-strategy: increase-if-necessary
commit-message:
prefix: deps
prefix-development: chore
labels:
- "Dependencies"
fs-minipass-3.0.3/.github/matchers/ 0000775 0000000 0000000 00000000000 14466474510 0017127 5 ustar 00root root 0000000 0000000 fs-minipass-3.0.3/.github/matchers/tap.json 0000664 0000000 0000000 00000001204 14466474510 0020603 0 ustar 00root root 0000000 0000000 {
"//@npmcli/template-oss": "This file is automatically added by @npmcli/template-oss. Do not edit.",
"problemMatcher": [
{
"owner": "tap",
"pattern": [
{
"regexp": "^\\s*not ok \\d+ - (.*)",
"message": 1
},
{
"regexp": "^\\s*---"
},
{
"regexp": "^\\s*at:"
},
{
"regexp": "^\\s*line:\\s*(\\d+)",
"line": 1
},
{
"regexp": "^\\s*column:\\s*(\\d+)",
"column": 1
},
{
"regexp": "^\\s*file:\\s*(.*)",
"file": 1
}
]
}
]
}
fs-minipass-3.0.3/.github/settings.yml 0000664 0000000 0000000 00000001377 14466474510 0017714 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
repository:
allow_merge_commit: false
allow_rebase_merge: true
allow_squash_merge: true
squash_merge_commit_title: PR_TITLE
squash_merge_commit_message: PR_BODY
delete_branch_on_merge: true
enable_automated_security_fixes: true
enable_vulnerability_alerts: true
branches:
- name: main
protection:
required_status_checks: null
enforce_admins: true
block_creations: true
required_pull_request_reviews:
required_approving_review_count: 1
require_code_owner_reviews: true
require_last_push_approval: true
dismiss_stale_reviews: true
restrictions:
apps: []
users: []
teams: [ "cli-team" ]
fs-minipass-3.0.3/.github/workflows/ 0000775 0000000 0000000 00000000000 14466474510 0017356 5 ustar 00root root 0000000 0000000 fs-minipass-3.0.3/.github/workflows/audit.yml 0000664 0000000 0000000 00000002132 14466474510 0021205 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
name: Audit
on:
workflow_dispatch:
schedule:
# "At 08:00 UTC (01:00 PT) on Monday" https://crontab.guru/#0_8_*_*_1
- cron: "0 8 * * 1"
jobs:
audit:
name: Audit Dependencies
if: github.repository_owner == 'npm'
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: 18.x
- name: Install npm@latest
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
- name: npm Version
run: npm -v
- name: Install Dependencies
run: npm i --ignore-scripts --no-audit --no-fund --package-lock
- name: Run Production Audit
run: npm audit --omit=dev
- name: Run Full Audit
run: npm audit --audit-level=none
fs-minipass-3.0.3/.github/workflows/ci-release.yml 0000664 0000000 0000000 00000015516 14466474510 0022122 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
name: CI - Release
on:
workflow_dispatch:
inputs:
ref:
required: true
type: string
default: main
workflow_call:
inputs:
ref:
required: true
type: string
check-sha:
required: true
type: string
jobs:
lint-all:
name: Lint All
if: github.repository_owner == 'npm'
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- name: Get Workflow Job
uses: actions/github-script@v6
if: inputs.check-sha
id: check-output
env:
JOB_NAME: "Lint All"
MATRIX_NAME: ""
with:
script: |
const { owner, repo } = context.repo
const { data } = await github.rest.actions.listJobsForWorkflowRun({
owner,
repo,
run_id: context.runId,
per_page: 100
})
const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
const job = data.jobs.find(j => j.name.endsWith(jobName))
const jobUrl = job?.html_url
const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ inputs.check-sha }}`
let summary = `This check is assosciated with ${shaUrl}\n\n`
if (jobUrl) {
summary += `For run logs, click here: ${jobUrl}`
} else {
summary += `Run logs could not be found for a job with name: "${jobName}"`
}
return { summary }
- name: Create Check
uses: LouisBrunner/checks-action@v1.6.0
id: check
if: inputs.check-sha
with:
token: ${{ secrets.GITHUB_TOKEN }}
status: in_progress
name: Lint All
sha: ${{ inputs.check-sha }}
output: ${{ steps.check-output.outputs.result }}
- name: Checkout
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: 18.x
- name: Install npm@latest
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
- name: npm Version
run: npm -v
- name: Install Dependencies
run: npm i --ignore-scripts --no-audit --no-fund
- name: Lint
run: npm run lint --ignore-scripts
- name: Post Lint
run: npm run postlint --ignore-scripts
- name: Conclude Check
uses: LouisBrunner/checks-action@v1.6.0
if: steps.check.outputs.check_id && always()
with:
token: ${{ secrets.GITHUB_TOKEN }}
conclusion: ${{ job.status }}
check_id: ${{ steps.check.outputs.check_id }}
test-all:
name: Test All - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
if: github.repository_owner == 'npm'
strategy:
fail-fast: false
matrix:
platform:
- name: Linux
os: ubuntu-latest
shell: bash
- name: macOS
os: macos-latest
shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- 14.17.0
- 14.x
- 16.13.0
- 16.x
- 18.0.0
- 18.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Get Workflow Job
uses: actions/github-script@v6
if: inputs.check-sha
id: check-output
env:
JOB_NAME: "Test All"
MATRIX_NAME: " - ${{ matrix.platform.name }} - ${{ matrix.node-version }}"
with:
script: |
const { owner, repo } = context.repo
const { data } = await github.rest.actions.listJobsForWorkflowRun({
owner,
repo,
run_id: context.runId,
per_page: 100
})
const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
const job = data.jobs.find(j => j.name.endsWith(jobName))
const jobUrl = job?.html_url
const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ inputs.check-sha }}`
let summary = `This check is assosciated with ${shaUrl}\n\n`
if (jobUrl) {
summary += `For run logs, click here: ${jobUrl}`
} else {
summary += `Run logs could not be found for a job with name: "${jobName}"`
}
return { summary }
- name: Create Check
uses: LouisBrunner/checks-action@v1.6.0
id: check
if: inputs.check-sha
with:
token: ${{ secrets.GITHUB_TOKEN }}
status: in_progress
name: Test All - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
sha: ${{ inputs.check-sha }}
output: ${{ steps.check-output.outputs.result }}
- name: Checkout
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
- name: Update Windows npm
# node 12 and 14 ship with npm@6, which is known to fail when updating itself in windows
if: matrix.platform.os == 'windows-latest' && (startsWith(matrix.node-version, '12.') || startsWith(matrix.node-version, '14.'))
run: |
curl -sO https://registry.npmjs.org/npm/-/npm-7.5.4.tgz
tar xf npm-7.5.4.tgz
cd package
node lib/npm.js install --no-fund --no-audit -g ..\npm-7.5.4.tgz
cd ..
rmdir /s /q package
- name: Install npm@7
if: startsWith(matrix.node-version, '10.')
run: npm i --prefer-online --no-fund --no-audit -g npm@7
- name: Install npm@latest
if: ${{ !startsWith(matrix.node-version, '10.') }}
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
- name: npm Version
run: npm -v
- name: Install Dependencies
run: npm i --ignore-scripts --no-audit --no-fund
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
run: npm test --ignore-scripts
- name: Conclude Check
uses: LouisBrunner/checks-action@v1.6.0
if: steps.check.outputs.check_id && always()
with:
token: ${{ secrets.GITHUB_TOKEN }}
conclusion: ${{ job.status }}
check_id: ${{ steps.check.outputs.check_id }}
fs-minipass-3.0.3/.github/workflows/ci.yml 0000664 0000000 0000000 00000006307 14466474510 0020502 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
name: CI
on:
workflow_dispatch:
pull_request:
push:
branches:
- main
schedule:
# "At 09:00 UTC (02:00 PT) on Monday" https://crontab.guru/#0_9_*_*_1
- cron: "0 9 * * 1"
jobs:
lint:
name: Lint
if: github.repository_owner == 'npm'
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: 18.x
- name: Install npm@latest
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
- name: npm Version
run: npm -v
- name: Install Dependencies
run: npm i --ignore-scripts --no-audit --no-fund
- name: Lint
run: npm run lint --ignore-scripts
- name: Post Lint
run: npm run postlint --ignore-scripts
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
if: github.repository_owner == 'npm'
strategy:
fail-fast: false
matrix:
platform:
- name: Linux
os: ubuntu-latest
shell: bash
- name: macOS
os: macos-latest
shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- 14.17.0
- 14.x
- 16.13.0
- 16.x
- 18.0.0
- 18.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
- name: Update Windows npm
# node 12 and 14 ship with npm@6, which is known to fail when updating itself in windows
if: matrix.platform.os == 'windows-latest' && (startsWith(matrix.node-version, '12.') || startsWith(matrix.node-version, '14.'))
run: |
curl -sO https://registry.npmjs.org/npm/-/npm-7.5.4.tgz
tar xf npm-7.5.4.tgz
cd package
node lib/npm.js install --no-fund --no-audit -g ..\npm-7.5.4.tgz
cd ..
rmdir /s /q package
- name: Install npm@7
if: startsWith(matrix.node-version, '10.')
run: npm i --prefer-online --no-fund --no-audit -g npm@7
- name: Install npm@latest
if: ${{ !startsWith(matrix.node-version, '10.') }}
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
- name: npm Version
run: npm -v
- name: Install Dependencies
run: npm i --ignore-scripts --no-audit --no-fund
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
run: npm test --ignore-scripts
fs-minipass-3.0.3/.github/workflows/codeql-analysis.yml 0000664 0000000 0000000 00000001547 14466474510 0023200 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
name: CodeQL
on:
push:
branches:
- main
pull_request:
branches:
- main
schedule:
# "At 10:00 UTC (03:00 PT) on Monday" https://crontab.guru/#0_10_*_*_1
- cron: "0 10 * * 1"
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: javascript
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
fs-minipass-3.0.3/.github/workflows/post-dependabot.yml 0000664 0000000 0000000 00000011627 14466474510 0023200 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
name: Post Dependabot
on: pull_request
permissions:
contents: write
jobs:
template-oss:
name: template-oss
if: github.repository_owner == 'npm' && github.actor == 'dependabot[bot]'
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- name: Checkout
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.ref }}
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: 18.x
- name: Install npm@latest
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
- name: npm Version
run: npm -v
- name: Install Dependencies
run: npm i --ignore-scripts --no-audit --no-fund
- name: Fetch Dependabot Metadata
id: metadata
uses: dependabot/fetch-metadata@v1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
# Dependabot can update multiple directories so we output which directory
# it is acting on so we can run the command for the correct root or workspace
- name: Get Dependabot Directory
if: contains(steps.metadata.outputs.dependency-names, '@npmcli/template-oss')
id: flags
run: |
dependabot_dir="${{ steps.metadata.outputs.directory }}"
if [[ "$dependabot_dir" == "/" ]]; then
echo "workspace=-iwr" >> $GITHUB_OUTPUT
else
# strip leading slash from directory so it works as a
# a path to the workspace flag
echo "workspace=-w ${dependabot_dir#/}" >> $GITHUB_OUTPUT
fi
- name: Apply Changes
if: steps.flags.outputs.workspace
id: apply
run: |
npm run template-oss-apply ${{ steps.flags.outputs.workspace }}
if [[ `git status --porcelain` ]]; then
echo "changes=true" >> $GITHUB_OUTPUT
fi
# This only sets the conventional commit prefix. This workflow can't reliably determine
# what the breaking change is though. If a BREAKING CHANGE message is required then
# this PR check will fail and the commit will be amended with stafftools
if [[ "${{ steps.metadata.outputs.update-type }}" == "version-update:semver-major" ]]; then
prefix='feat!'
else
prefix='chore'
fi
echo "message=$prefix: postinstall for dependabot template-oss PR" >> $GITHUB_OUTPUT
# This step will fail if template-oss has made any workflow updates. It is impossible
# for a workflow to update other workflows. In the case it does fail, we continue
# and then try to apply only a portion of the changes in the next step
- name: Push All Changes
if: steps.apply.outputs.changes
id: push
continue-on-error: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
git commit -am "${{ steps.apply.outputs.message }}"
git push
# If the previous step failed, then reset the commit and remove any workflow changes
# and attempt to commit and push again. This is helpful because we will have a commit
# with the correct prefix that we can then --amend with @npmcli/stafftools later.
- name: Push All Changes Except Workflows
if: steps.apply.outputs.changes && steps.push.outcome == 'failure'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
git reset HEAD~
git checkout HEAD -- .github/workflows/
git clean -fd .github/workflows/
git commit -am "${{ steps.apply.outputs.message }}"
git push
# Check if all the necessary template-oss changes were applied. Since we continued
# on errors in one of the previous steps, this check will fail if our follow up
# only applied a portion of the changes and we need to followup manually.
#
# Note that this used to run `lint` and `postlint` but that will fail this action
# if we've also shipped any linting changes separate from template-oss. We do
# linting in another action, so we want to fail this one only if there are
# template-oss changes that could not be applied.
- name: Check Changes
if: steps.apply.outputs.changes
run: |
npm exec --offline ${{ steps.flags.outputs.workspace }} -- template-oss-check
- name: Fail on Breaking Change
if: steps.apply.outputs.changes && startsWith(steps.apply.outputs.message, 'feat!')
run: |
echo "This PR has a breaking change. Run 'npx -p @npmcli/stafftools gh template-oss-fix'"
echo "for more information on how to fix this with a BREAKING CHANGE footer."
exit 1
fs-minipass-3.0.3/.github/workflows/pull-request.yml 0000664 0000000 0000000 00000002624 14466474510 0022547 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
name: Pull Request
on:
pull_request:
types:
- opened
- reopened
- edited
- synchronize
jobs:
commitlint:
name: Lint Commits
if: github.repository_owner == 'npm'
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: 18.x
- name: Install npm@latest
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
- name: npm Version
run: npm -v
- name: Install Dependencies
run: npm i --ignore-scripts --no-audit --no-fund
- name: Run Commitlint on Commits
id: commit
continue-on-error: true
run: |
npx --offline commitlint -V --from 'origin/${{ github.base_ref }}' --to ${{ github.event.pull_request.head.sha }}
- name: Run Commitlint on PR Title
if: steps.commit.outcome == 'failure'
env:
PR_TITLE: ${{ github.event.pull_request.title }}
run: |
echo "$PR_TITLE" | npx --offline commitlint -V
fs-minipass-3.0.3/.github/workflows/release.yml 0000664 0000000 0000000 00000034030 14466474510 0021521 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
name: Release
on:
workflow_dispatch:
inputs:
release-pr:
description: a release PR number to rerun release jobs on
type: string
push:
branches:
- main
permissions:
contents: write
pull-requests: write
checks: write
jobs:
release:
outputs:
pr: ${{ steps.release.outputs.pr }}
release: ${{ steps.release.outputs.release }}
releases: ${{ steps.release.outputs.releases }}
branch: ${{ steps.release.outputs.pr-branch }}
pr-number: ${{ steps.release.outputs.pr-number }}
comment-id: ${{ steps.pr-comment.outputs.result }}
check-id: ${{ steps.check.outputs.check_id }}
name: Release
if: github.repository_owner == 'npm'
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: 18.x
- name: Install npm@latest
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
- name: npm Version
run: npm -v
- name: Install Dependencies
run: npm i --ignore-scripts --no-audit --no-fund
- name: Release Please
id: release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
npx --offline template-oss-release-please "${{ github.ref_name }}" "${{ inputs.release-pr }}"
- name: Post Pull Request Comment
if: steps.release.outputs.pr-number
uses: actions/github-script@v6
id: pr-comment
env:
PR_NUMBER: ${{ steps.release.outputs.pr-number }}
REF_NAME: ${{ github.ref_name }}
with:
script: |
const { REF_NAME, PR_NUMBER: issue_number } = process.env
const { runId, repo: { owner, repo } } = context
const { data: workflow } = await github.rest.actions.getWorkflowRun({ owner, repo, run_id: runId })
let body = '## Release Manager\n\n'
const comments = await github.paginate(github.rest.issues.listComments, { owner, repo, issue_number })
let commentId = comments.find(c => c.user.login === 'github-actions[bot]' && c.body.startsWith(body))?.id
body += `Release workflow run: ${workflow.html_url}\n\n#### Force CI to Update This Release\n\n`
body += `This PR will be updated and CI will run for every non-\`chore:\` commit that is pushed to \`${REF_NAME}\`. `
body += `To force CI to update this PR, run this command:\n\n`
body += `\`\`\`\ngh workflow run release.yml -r ${REF_NAME} -R ${owner}/${repo} -f release-pr=${issue_number}\n\`\`\``
if (commentId) {
await github.rest.issues.updateComment({ owner, repo, comment_id: commentId, body })
} else {
const { data: comment } = await github.rest.issues.createComment({ owner, repo, issue_number, body })
commentId = comment?.id
}
return commentId
- name: Get Workflow Job
uses: actions/github-script@v6
if: steps.release.outputs.pr-sha
id: check-output
env:
JOB_NAME: "Release"
MATRIX_NAME: ""
with:
script: |
const { owner, repo } = context.repo
const { data } = await github.rest.actions.listJobsForWorkflowRun({
owner,
repo,
run_id: context.runId,
per_page: 100
})
const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
const job = data.jobs.find(j => j.name.endsWith(jobName))
const jobUrl = job?.html_url
const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ steps.release.outputs.pr-sha }}`
let summary = `This check is assosciated with ${shaUrl}\n\n`
if (jobUrl) {
summary += `For run logs, click here: ${jobUrl}`
} else {
summary += `Run logs could not be found for a job with name: "${jobName}"`
}
return { summary }
- name: Create Check
uses: LouisBrunner/checks-action@v1.6.0
id: check
if: steps.release.outputs.pr-sha
with:
token: ${{ secrets.GITHUB_TOKEN }}
status: in_progress
name: Release
sha: ${{ steps.release.outputs.pr-sha }}
output: ${{ steps.check-output.outputs.result }}
update:
needs: release
outputs:
sha: ${{ steps.commit.outputs.sha }}
check-id: ${{ steps.check.outputs.check_id }}
name: Update - Release
if: github.repository_owner == 'npm' && needs.release.outputs.pr
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ needs.release.outputs.branch }}
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: 18.x
- name: Install npm@latest
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
- name: npm Version
run: npm -v
- name: Install Dependencies
run: npm i --ignore-scripts --no-audit --no-fund
- name: Run Post Pull Request Actions
env:
RELEASE_PR_NUMBER: ${{ needs.release.outputs.pr-number }}
RELEASE_COMMENT_ID: ${{ needs.release.outputs.comment-id }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
npm exec --offline -- template-oss-release-manager --lockfile=false --publish=true
npm run rp-pull-request --ignore-scripts --if-present
- name: Commit
id: commit
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
git commit --all --amend --no-edit || true
git push --force-with-lease
echo "sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
- name: Get Workflow Job
uses: actions/github-script@v6
if: steps.commit.outputs.sha
id: check-output
env:
JOB_NAME: "Update - Release"
MATRIX_NAME: ""
with:
script: |
const { owner, repo } = context.repo
const { data } = await github.rest.actions.listJobsForWorkflowRun({
owner,
repo,
run_id: context.runId,
per_page: 100
})
const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
const job = data.jobs.find(j => j.name.endsWith(jobName))
const jobUrl = job?.html_url
const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ steps.commit.outputs.sha }}`
let summary = `This check is assosciated with ${shaUrl}\n\n`
if (jobUrl) {
summary += `For run logs, click here: ${jobUrl}`
} else {
summary += `Run logs could not be found for a job with name: "${jobName}"`
}
return { summary }
- name: Create Check
uses: LouisBrunner/checks-action@v1.6.0
id: check
if: steps.commit.outputs.sha
with:
token: ${{ secrets.GITHUB_TOKEN }}
status: in_progress
name: Release
sha: ${{ steps.commit.outputs.sha }}
output: ${{ steps.check-output.outputs.result }}
- name: Conclude Check
uses: LouisBrunner/checks-action@v1.6.0
if: needs.release.outputs.check-id && always()
with:
token: ${{ secrets.GITHUB_TOKEN }}
conclusion: ${{ job.status }}
check_id: ${{ needs.release.outputs.check-id }}
ci:
name: CI - Release
needs: [ release, update ]
if: needs.release.outputs.pr
uses: ./.github/workflows/ci-release.yml
with:
ref: ${{ needs.release.outputs.branch }}
check-sha: ${{ needs.update.outputs.sha }}
post-ci:
needs: [ release, update, ci ]
name: Post CI - Release
if: github.repository_owner == 'npm' && needs.release.outputs.pr && always()
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- name: Get Needs Result
id: needs-result
run: |
result=""
if [[ "${{ contains(needs.*.result, 'failure') }}" == "true" ]]; then
result="failure"
elif [[ "${{ contains(needs.*.result, 'cancelled') }}" == "true" ]]; then
result="cancelled"
else
result="success"
fi
echo "result=$result" >> $GITHUB_OUTPUT
- name: Conclude Check
uses: LouisBrunner/checks-action@v1.6.0
if: needs.update.outputs.check-id && always()
with:
token: ${{ secrets.GITHUB_TOKEN }}
conclusion: ${{ steps.needs-result.outputs.result }}
check_id: ${{ needs.update.outputs.check-id }}
post-release:
needs: release
name: Post Release - Release
if: github.repository_owner == 'npm' && needs.release.outputs.releases
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- name: Create Release PR Comment
uses: actions/github-script@v6
env:
RELEASES: ${{ needs.release.outputs.releases }}
with:
script: |
const releases = JSON.parse(process.env.RELEASES)
const { runId, repo: { owner, repo } } = context
const issue_number = releases[0].prNumber
let body = '## Release Workflow\n\n'
for (const { pkgName, version, url } of releases) {
body += `- \`${pkgName}@${version}\` ${url}\n`
}
const comments = await github.paginate(github.rest.issues.listComments, { owner, repo, issue_number })
.then(cs => cs.map(c => ({ id: c.id, login: c.user.login, body: c.body })))
console.log(`Found comments: ${JSON.stringify(comments, null, 2)}`)
const releaseComments = comments.filter(c => c.login === 'github-actions[bot]' && c.body.includes('Release is at'))
for (const comment of releaseComments) {
console.log(`Release comment: ${JSON.stringify(comment, null, 2)}`)
await github.rest.issues.deleteComment({ owner, repo, comment_id: comment.id })
}
const runUrl = `https://github.com/${owner}/${repo}/actions/runs/${runId}`
await github.rest.issues.createComment({
owner,
repo,
issue_number,
body: `${body}- Workflow run: :arrows_counterclockwise: ${runUrl}`,
})
release-integration:
needs: release
name: Release Integration
if: needs.release.outputs.release
runs-on: ubuntu-latest
defaults:
run:
shell: bash
permissions:
deployments: write
id-token: write
steps:
- name: Checkout
uses: actions/checkout@v3
with:
ref: ${{ fromJSON(needs.release.outputs.release).tagName }}
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: 18.x
- name: Install npm@latest
run: |
npm i --prefer-online --no-fund --no-audit -g npm@latest
npm config set '//registry.npmjs.org/:_authToken'=\${PUBLISH_TOKEN}
- name: Publish
env:
PUBLISH_TOKEN: ${{ secrets.PUBLISH_TOKEN }}
run: npm publish --provenance
post-release-integration:
needs: [ release, release-integration ]
name: Post Release Integration - Release
if: github.repository_owner == 'npm' && needs.release.outputs.release && always()
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- name: Get Needs Result
id: needs-result
run: |
if [[ "${{ contains(needs.*.result, 'failure') }}" == "true" ]]; then
result="x"
elif [[ "${{ contains(needs.*.result, 'cancelled') }}" == "true" ]]; then
result="heavy_multiplication_x"
else
result="white_check_mark"
fi
echo "result=$result" >> $GITHUB_OUTPUT
- name: Update Release PR Comment
uses: actions/github-script@v6
env:
PR_NUMBER: ${{ fromJSON(needs.release.outputs.release).prNumber }}
RESULT: ${{ steps.needs-result.outputs.result }}
with:
script: |
const { PR_NUMBER: issue_number, RESULT } = process.env
const { runId, repo: { owner, repo } } = context
const comments = await github.paginate(github.rest.issues.listComments, { owner, repo, issue_number })
const updateComment = comments.find(c =>
c.user.login === 'github-actions[bot]' &&
c.body.startsWith('## Release Workflow\n\n') &&
c.body.includes(runId)
)
if (updateComment) {
console.log('Found comment to update:', JSON.stringify(updateComment, null, 2))
let body = updateComment.body.replace(/Workflow run: :[a-z_]+:/, `Workflow run: :${RESULT}:`)
const tagCodeowner = RESULT !== 'white_check_mark'
if (tagCodeowner) {
body += `\n\n:rotating_light:`
body += ` @npm/cli-team: The post-release workflow failed for this release.`
body += ` Manual steps may need to be taken after examining the workflow output`
body += ` from the above workflow run. :rotating_light:`
}
await github.rest.issues.updateComment({
owner,
repo,
body,
comment_id: updateComment.id,
})
} else {
console.log('No matching comments found:', JSON.stringify(comments, null, 2))
}
fs-minipass-3.0.3/.gitignore 0000664 0000000 0000000 00000000707 14466474510 0015755 0 ustar 00root root 0000000 0000000 # This file is automatically added by @npmcli/template-oss. Do not edit.
# ignore everything in the root
/*
# keep these
!**/.gitignore
!/.commitlintrc.js
!/.eslintrc.js
!/.eslintrc.local.*
!/.github/
!/.gitignore
!/.npmrc
!/.release-please-manifest.json
!/bin/
!/CHANGELOG*
!/CODE_OF_CONDUCT.md
!/CONTRIBUTING.md
!/docs/
!/lib/
!/LICENSE*
!/map.js
!/package.json
!/README*
!/release-please-config.json
!/scripts/
!/SECURITY.md
!/tap-snapshots/
!/test/
fs-minipass-3.0.3/.npmrc 0000664 0000000 0000000 00000000135 14466474510 0015100 0 ustar 00root root 0000000 0000000 ; This file is automatically added by @npmcli/template-oss. Do not edit.
package-lock=false
fs-minipass-3.0.3/.release-please-manifest.json 0000664 0000000 0000000 00000000023 14466474510 0021420 0 ustar 00root root 0000000 0000000 {
".": "3.0.3"
}
fs-minipass-3.0.3/CHANGELOG.md 0000664 0000000 0000000 00000003051 14466474510 0015571 0 ustar 00root root 0000000 0000000 # Changelog
## [3.0.3](https://github.com/npm/fs-minipass/compare/v3.0.2...v3.0.3) (2023-08-14)
### Dependencies
* [`f112e83`](https://github.com/npm/fs-minipass/commit/f112e83a49192644b44fb3a7067b1558ce3eacba) [#43](https://github.com/npm/fs-minipass/pull/43) bump minipass from 5.0.0 to 7.0.3
## [3.0.2](https://github.com/npm/fs-minipass/compare/v3.0.1...v3.0.2) (2023-04-26)
### Dependencies
* [`b06a2c0`](https://github.com/npm/fs-minipass/commit/b06a2c08ef7498a77cfbe1d90532822b250f6ec8) [#30](https://github.com/npm/fs-minipass/pull/30) bump minipass from 4.2.7 to 5.0.0 (#30)
## [3.0.1](https://github.com/npm/fs-minipass/compare/v3.0.0...v3.0.1) (2023-01-30)
### Bug Fixes
* [`97116ba`](https://github.com/npm/fs-minipass/commit/97116ba3e5644ee0b295a49d4f92358693b0a823) [#25](https://github.com/npm/fs-minipass/pull/25) only flush the queue after open if not already writing (#25) (@nlf)
## [3.0.0](https://github.com/npm/fs-minipass/compare/v2.1.0...v3.0.0) (2022-12-12)
### ⚠️ BREAKING CHANGES
* `fs-minipass` is now compatible with the following semver range for node: `^14.17.0 || ^16.13.0 || >=18.0.0`
### Features
* [`bc88a4e`](https://github.com/npm/fs-minipass/commit/bc88a4e53a1b20d9e3856376c4bf5c17e56dd994) add template-oss (@lukekarrys)
### Dependencies
* [`36cb4bc`](https://github.com/npm/fs-minipass/commit/36cb4bc20613b7228dc3d1b6cd52fd1d37b6d449) [#19](https://github.com/npm/fs-minipass/pull/19) `minipass@4.0.0`
* [`1f1a5eb`](https://github.com/npm/fs-minipass/commit/1f1a5ebeccfa277cd65e3d2456d529146f98a6f5) `mutate-fs@2.1.1`
fs-minipass-3.0.3/CODE_OF_CONDUCT.md 0000664 0000000 0000000 00000000507 14466474510 0016562 0 ustar 00root root 0000000 0000000
All interactions in this repo are covered by the [npm Code of
Conduct](https://docs.npmjs.com/policies/conduct)
The npm cli team may, at its own discretion, moderate, remove, or edit
any interactions such as pull requests, issues, and comments.
fs-minipass-3.0.3/CONTRIBUTING.md 0000664 0000000 0000000 00000005133 14466474510 0016214 0 ustar 00root root 0000000 0000000
# Contributing
## Code of Conduct
All interactions in the **npm** organization on GitHub are considered to be covered by our standard [Code of Conduct](https://docs.npmjs.com/policies/conduct).
## Reporting Bugs
Before submitting a new bug report please search for an existing or similar report.
Use one of our existing issue templates if you believe you've come across a unique problem.
Duplicate issues, or issues that don't use one of our templates may get closed without a response.
## Pull Request Conventions
### Commits
We use [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/).
When opening a pull request please be sure that either the pull request title, or each commit in the pull request, has one of the following prefixes:
- `feat`: For when introducing a new feature. The result will be a new semver minor version of the package when it is next published.
- `fix`: For bug fixes. The result will be a new semver patch version of the package when it is next published.
- `docs`: For documentation updates. The result will be a new semver patch version of the package when it is next published.
- `chore`: For changes that do not affect the published module. Often these are changes to tests. The result will be *no* change to the version of the package when it is next published (as the commit does not affect the published version).
### Test Coverage
Pull requests made against this repo will run `npm test` automatically. Please make sure tests pass locally before submitting a PR.
Every new feature or bug fix should come with a corresponding test or tests that validate the solutions. Testing also reports on code coverage and will fail if code coverage drops.
### Linting
Linting is also done automatically once tests pass. `npm run lintfix` will fix most linting errors automatically.
Please make sure linting passes before submitting a PR.
## What _not_ to contribute?
### Dependencies
It should be noted that our team does not accept third-party dependency updates/PRs. If you submit a PR trying to update our dependencies we will close it with or without a reference to these contribution guidelines.
### Tools/Automation
Our core team is responsible for the maintenance of the tooling/automation in this project and we ask contributors to not make changes to these when contributing (e.g. `.github/*`, `.eslintrc.json`, `.licensee.json`). Most of those files also have a header at the top to remind folks they are automatically generated. Pull requests that alter these will not be accepted.
fs-minipass-3.0.3/LICENSE 0000664 0000000 0000000 00000001375 14466474510 0014774 0 ustar 00root root 0000000 0000000 The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
fs-minipass-3.0.3/README.md 0000664 0000000 0000000 00000004645 14466474510 0015251 0 ustar 00root root 0000000 0000000 # fs-minipass
Filesystem streams based on [minipass](http://npm.im/minipass).
4 classes are exported:
- ReadStream
- ReadStreamSync
- WriteStream
- WriteStreamSync
When using `ReadStreamSync`, all of the data is made available
immediately upon consuming the stream. Nothing is buffered in memory
when the stream is constructed. If the stream is piped to a writer,
then it will synchronously `read()` and emit data into the writer as
fast as the writer can consume it. (That is, it will respect
backpressure.) If you call `stream.read()` then it will read the
entire file and return the contents.
When using `WriteStreamSync`, every write is flushed to the file
synchronously. If your writes all come in a single tick, then it'll
write it all out in a single tick. It's as synchronous as you are.
The async versions work much like their node builtin counterparts,
with the exception of introducing significantly less Stream machinery
overhead.
## USAGE
It's just streams, you pipe them or read() them or write() to them.
```js
const fsm = require('fs-minipass')
const readStream = new fsm.ReadStream('file.txt')
const writeStream = new fsm.WriteStream('output.txt')
writeStream.write('some file header or whatever\n')
readStream.pipe(writeStream)
```
## ReadStream(path, options)
Path string is required, but somewhat irrelevant if an open file
descriptor is passed in as an option.
Options:
- `fd` Pass in a numeric file descriptor, if the file is already open.
- `readSize` The size of reads to do, defaults to 16MB
- `size` The size of the file, if known. Prevents zero-byte read()
call at the end.
- `autoClose` Set to `false` to prevent the file descriptor from being
closed when the file is done being read.
## WriteStream(path, options)
Path string is required, but somewhat irrelevant if an open file
descriptor is passed in as an option.
Options:
- `fd` Pass in a numeric file descriptor, if the file is already open.
- `mode` The mode to create the file with. Defaults to `0o666`.
- `start` The position in the file to start reading. If not
specified, then the file will start writing at position zero, and be
truncated by default.
- `autoClose` Set to `false` to prevent the file descriptor from being
closed when the stream is ended.
- `flags` Flags to use when opening the file. Irrelevant if `fd` is
passed in, since file won't be opened in that case. Defaults to
`'a'` if a `pos` is specified, or `'w'` otherwise.
fs-minipass-3.0.3/SECURITY.md 0000664 0000000 0000000 00000002321 14466474510 0015550 0 ustar 00root root 0000000 0000000
GitHub takes the security of our software products and services seriously, including the open source code repositories managed through our GitHub organizations, such as [GitHub](https://github.com/GitHub).
If you believe you have found a security vulnerability in this GitHub-owned open source repository, you can report it to us in one of two ways.
If the vulnerability you have found is *not* [in scope for the GitHub Bug Bounty Program](https://bounty.github.com/#scope) or if you do not wish to be considered for a bounty reward, please report the issue to us directly through [opensource-security@github.com](mailto:opensource-security@github.com).
If the vulnerability you have found is [in scope for the GitHub Bug Bounty Program](https://bounty.github.com/#scope) and you would like for your finding to be considered for a bounty reward, please submit the vulnerability to us through [HackerOne](https://hackerone.com/github) in order to be eligible to receive a bounty award.
**Please do not report security vulnerabilities through public GitHub issues, discussions, or pull requests.**
Thanks for helping make GitHub safe for everyone.
fs-minipass-3.0.3/lib/ 0000775 0000000 0000000 00000000000 14466474510 0014527 5 ustar 00root root 0000000 0000000 fs-minipass-3.0.3/lib/index.js 0000664 0000000 0000000 00000023234 14466474510 0016200 0 ustar 00root root 0000000 0000000 'use strict'
const { Minipass } = require('minipass')
const EE = require('events').EventEmitter
const fs = require('fs')
const writev = fs.writev
const _autoClose = Symbol('_autoClose')
const _close = Symbol('_close')
const _ended = Symbol('_ended')
const _fd = Symbol('_fd')
const _finished = Symbol('_finished')
const _flags = Symbol('_flags')
const _flush = Symbol('_flush')
const _handleChunk = Symbol('_handleChunk')
const _makeBuf = Symbol('_makeBuf')
const _mode = Symbol('_mode')
const _needDrain = Symbol('_needDrain')
const _onerror = Symbol('_onerror')
const _onopen = Symbol('_onopen')
const _onread = Symbol('_onread')
const _onwrite = Symbol('_onwrite')
const _open = Symbol('_open')
const _path = Symbol('_path')
const _pos = Symbol('_pos')
const _queue = Symbol('_queue')
const _read = Symbol('_read')
const _readSize = Symbol('_readSize')
const _reading = Symbol('_reading')
const _remain = Symbol('_remain')
const _size = Symbol('_size')
const _write = Symbol('_write')
const _writing = Symbol('_writing')
const _defaultFlag = Symbol('_defaultFlag')
const _errored = Symbol('_errored')
class ReadStream extends Minipass {
constructor (path, opt) {
opt = opt || {}
super(opt)
this.readable = true
this.writable = false
if (typeof path !== 'string') {
throw new TypeError('path must be a string')
}
this[_errored] = false
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_path] = path
this[_readSize] = opt.readSize || 16 * 1024 * 1024
this[_reading] = false
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity
this[_remain] = this[_size]
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
if (typeof this[_fd] === 'number') {
this[_read]()
} else {
this[_open]()
}
}
get fd () {
return this[_fd]
}
get path () {
return this[_path]
}
write () {
throw new TypeError('this is a readable stream')
}
end () {
throw new TypeError('this is a readable stream')
}
[_open] () {
fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd))
}
[_onopen] (er, fd) {
if (er) {
this[_onerror](er)
} else {
this[_fd] = fd
this.emit('open', fd)
this[_read]()
}
}
[_makeBuf] () {
return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]))
}
[_read] () {
if (!this[_reading]) {
this[_reading] = true
const buf = this[_makeBuf]()
/* istanbul ignore if */
if (buf.length === 0) {
return process.nextTick(() => this[_onread](null, 0, buf))
}
fs.read(this[_fd], buf, 0, buf.length, null, (er, br, b) =>
this[_onread](er, br, b))
}
}
[_onread] (er, br, buf) {
this[_reading] = false
if (er) {
this[_onerror](er)
} else if (this[_handleChunk](br, buf)) {
this[_read]()
}
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
}
}
[_onerror] (er) {
this[_reading] = true
this[_close]()
this.emit('error', er)
}
[_handleChunk] (br, buf) {
let ret = false
// no effect if infinite
this[_remain] -= br
if (br > 0) {
ret = super.write(br < buf.length ? buf.slice(0, br) : buf)
}
if (br === 0 || this[_remain] <= 0) {
ret = false
this[_close]()
super.end()
}
return ret
}
emit (ev, data) {
switch (ev) {
case 'prefinish':
case 'finish':
break
case 'drain':
if (typeof this[_fd] === 'number') {
this[_read]()
}
break
case 'error':
if (this[_errored]) {
return
}
this[_errored] = true
return super.emit(ev, data)
default:
return super.emit(ev, data)
}
}
}
class ReadStreamSync extends ReadStream {
[_open] () {
let threw = true
try {
this[_onopen](null, fs.openSync(this[_path], 'r'))
threw = false
} finally {
if (threw) {
this[_close]()
}
}
}
[_read] () {
let threw = true
try {
if (!this[_reading]) {
this[_reading] = true
do {
const buf = this[_makeBuf]()
/* istanbul ignore next */
const br = buf.length === 0 ? 0
: fs.readSync(this[_fd], buf, 0, buf.length, null)
if (!this[_handleChunk](br, buf)) {
break
}
} while (true)
this[_reading] = false
}
threw = false
} finally {
if (threw) {
this[_close]()
}
}
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.closeSync(fd)
this.emit('close')
}
}
}
class WriteStream extends EE {
constructor (path, opt) {
opt = opt || {}
super(opt)
this.readable = false
this.writable = true
this[_errored] = false
this[_writing] = false
this[_ended] = false
this[_needDrain] = false
this[_queue] = []
this[_path] = path
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode
this[_pos] = typeof opt.start === 'number' ? opt.start : null
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
// truncating makes no sense when writing into the middle
const defaultFlag = this[_pos] !== null ? 'r+' : 'w'
this[_defaultFlag] = opt.flags === undefined
this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags
if (this[_fd] === null) {
this[_open]()
}
}
emit (ev, data) {
if (ev === 'error') {
if (this[_errored]) {
return
}
this[_errored] = true
}
return super.emit(ev, data)
}
get fd () {
return this[_fd]
}
get path () {
return this[_path]
}
[_onerror] (er) {
this[_close]()
this[_writing] = true
this.emit('error', er)
}
[_open] () {
fs.open(this[_path], this[_flags], this[_mode],
(er, fd) => this[_onopen](er, fd))
}
[_onopen] (er, fd) {
if (this[_defaultFlag] &&
this[_flags] === 'r+' &&
er && er.code === 'ENOENT') {
this[_flags] = 'w'
this[_open]()
} else if (er) {
this[_onerror](er)
} else {
this[_fd] = fd
this.emit('open', fd)
if (!this[_writing]) {
this[_flush]()
}
}
}
end (buf, enc) {
if (buf) {
this.write(buf, enc)
}
this[_ended] = true
// synthetic after-write logic, where drain/finish live
if (!this[_writing] && !this[_queue].length &&
typeof this[_fd] === 'number') {
this[_onwrite](null, 0)
}
return this
}
write (buf, enc) {
if (typeof buf === 'string') {
buf = Buffer.from(buf, enc)
}
if (this[_ended]) {
this.emit('error', new Error('write() after end()'))
return false
}
if (this[_fd] === null || this[_writing] || this[_queue].length) {
this[_queue].push(buf)
this[_needDrain] = true
return false
}
this[_writing] = true
this[_write](buf)
return true
}
[_write] (buf) {
fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) =>
this[_onwrite](er, bw))
}
[_onwrite] (er, bw) {
if (er) {
this[_onerror](er)
} else {
if (this[_pos] !== null) {
this[_pos] += bw
}
if (this[_queue].length) {
this[_flush]()
} else {
this[_writing] = false
if (this[_ended] && !this[_finished]) {
this[_finished] = true
this[_close]()
this.emit('finish')
} else if (this[_needDrain]) {
this[_needDrain] = false
this.emit('drain')
}
}
}
}
[_flush] () {
if (this[_queue].length === 0) {
if (this[_ended]) {
this[_onwrite](null, 0)
}
} else if (this[_queue].length === 1) {
this[_write](this[_queue].pop())
} else {
const iovec = this[_queue]
this[_queue] = []
writev(this[_fd], iovec, this[_pos],
(er, bw) => this[_onwrite](er, bw))
}
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
}
}
}
class WriteStreamSync extends WriteStream {
[_open] () {
let fd
// only wrap in a try{} block if we know we'll retry, to avoid
// the rethrow obscuring the error's source frame in most cases.
if (this[_defaultFlag] && this[_flags] === 'r+') {
try {
fd = fs.openSync(this[_path], this[_flags], this[_mode])
} catch (er) {
if (er.code === 'ENOENT') {
this[_flags] = 'w'
return this[_open]()
} else {
throw er
}
}
} else {
fd = fs.openSync(this[_path], this[_flags], this[_mode])
}
this[_onopen](null, fd)
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.closeSync(fd)
this.emit('close')
}
}
[_write] (buf) {
// throw the original, but try to close if it fails
let threw = true
try {
this[_onwrite](null,
fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]))
threw = false
} finally {
if (threw) {
try {
this[_close]()
} catch {
// ok error
}
}
}
}
}
exports.ReadStream = ReadStream
exports.ReadStreamSync = ReadStreamSync
exports.WriteStream = WriteStream
exports.WriteStreamSync = WriteStreamSync
fs-minipass-3.0.3/package.json 0000664 0000000 0000000 00000002417 14466474510 0016253 0 ustar 00root root 0000000 0000000 {
"name": "fs-minipass",
"version": "3.0.3",
"main": "lib/index.js",
"scripts": {
"test": "tap",
"lint": "eslint \"**/*.js\"",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force",
"lintfix": "npm run lint -- --fix",
"snap": "tap",
"posttest": "npm run lint"
},
"keywords": [],
"author": "GitHub Inc.",
"license": "ISC",
"repository": {
"type": "git",
"url": "https://github.com/npm/fs-minipass.git"
},
"bugs": {
"url": "https://github.com/npm/fs-minipass/issues"
},
"homepage": "https://github.com/npm/fs-minipass#readme",
"description": "fs read and write streams based on minipass",
"dependencies": {
"minipass": "^7.0.3"
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.1",
"@npmcli/template-oss": "4.18.0",
"mutate-fs": "^2.1.1",
"tap": "^16.3.2"
},
"files": [
"bin/",
"lib/"
],
"tap": {
"check-coverage": true,
"nyc-arg": [
"--exclude",
"tap-snapshots/**"
]
},
"engines": {
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.18.0",
"publish": "true"
}
}
fs-minipass-3.0.3/release-please-config.json 0000664 0000000 0000000 00000001254 14466474510 0021010 0 ustar 00root root 0000000 0000000 {
"exclude-packages-from-root": true,
"group-pull-request-title-pattern": "chore: release ${version}",
"pull-request-title-pattern": "chore: release${component} ${version}",
"changelog-sections": [
{
"type": "feat",
"section": "Features",
"hidden": false
},
{
"type": "fix",
"section": "Bug Fixes",
"hidden": false
},
{
"type": "docs",
"section": "Documentation",
"hidden": false
},
{
"type": "deps",
"section": "Dependencies",
"hidden": false
},
{
"type": "chore",
"hidden": true
}
],
"packages": {
".": {
"package-name": ""
}
}
}
fs-minipass-3.0.3/test/ 0000775 0000000 0000000 00000000000 14466474510 0014740 5 ustar 00root root 0000000 0000000 fs-minipass-3.0.3/test/read.js 0000664 0000000 0000000 00000021213 14466474510 0016210 0 ustar 00root root 0000000 0000000 'use strict'
const t = require('tap')
const fsm = require('../')
const fs = require('fs')
const { resolve } = require('path')
const mutateFS = require('mutate-fs')
t.test('read the readme', t => {
const p = resolve(__dirname, '..', 'README.md')
const rm = fs.readFileSync(p, 'utf8')
const check = (t, res) => {
t.equal(rm, res)
t.end()
}
t.test('sync', t => {
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8' })
t.type(str.fd, 'number')
const out = []
str.on('data', chunk => out.push(chunk))
check(t, out.join(''))
})
t.test('sync using read()', t => {
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8' })
t.type(str.fd, 'number')
const out = []
let chunk
while (chunk = str.read()) {
out.push(chunk)
}
check(t, out.join(''))
})
return t.test('async', t => {
const str = new fsm.ReadStream(p, { encoding: 'utf8' })
t.equal(str.fd, null)
let sawFD
str.on('open', fd => sawFD = fd)
const out = []
t.equal(str.read(), null)
str.on('data', chunk => out.push(chunk))
str.on('close', _ => {
t.type(sawFD, 'number')
check(t, out.join(''))
})
})
})
t.test('read the readme sized', t => {
const p = resolve(__dirname, '..', 'README.md')
const size = fs.statSync(p).size
const rm = fs.readFileSync(p, 'utf8')
const check = (t, res) => {
t.equal(rm, res)
t.end()
}
t.test('sync', t => {
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8', size: size })
t.equal(str.fd, null)
const out = []
str.on('data', chunk => out.push(chunk))
check(t, out.join(''))
})
t.test('sync using read()', t => {
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8', size: size })
t.equal(str.fd, null)
const out = []
let chunk
while (chunk = str.read()) {
out.push(chunk)
}
check(t, out.join(''))
})
return t.test('async', t => {
const str = new fsm.ReadStream(p, { encoding: 'utf8', size: size })
t.equal(str.fd, null)
let sawFD
str.on('open', fd => sawFD = fd)
const out = []
t.equal(str.read(), null)
str.on('data', chunk => out.push(chunk))
str.on('end', _ => {
t.type(sawFD, 'number')
check(t, out.join(''))
})
})
})
t.test('slow sink', t => {
const chunks = []
const EE = require('events').EventEmitter
class SlowStream extends EE {
write (chunk) {
chunks.push(chunk)
setTimeout(_ => this.emit('drain'))
return false
}
end () {
return this.write()
}
}
const p = resolve(__dirname, '..', 'README.md')
const rm = fs.readFileSync(p, 'utf8')
const check = t => {
t.equal(chunks.join(''), rm)
chunks.length = 0
t.end()
}
t.test('sync', t => {
const ss = new SlowStream()
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8', readSize: 5 })
str.pipe(ss)
// trigger a read-while-reading
str.on('readable', _ => str.emit('drain'))
str.on('end', _ => check(t))
})
return t.test('async', t => {
const ss = new SlowStream()
const str = new fsm.ReadStream(p, { encoding: 'utf8', readSize: 256 })
str.pipe(ss)
str.on('end', _ => check(t))
})
})
t.test('zeno reading style', t => {
t.teardown(mutateFS.zenoRead())
const chunks = []
const EE = require('events').EventEmitter
class Collector extends EE {
write (chunk) {
chunks.push(chunk)
return true
}
end () {}
}
const p = resolve(__dirname, '..', 'README.md')
const rm = fs.readFileSync(p, 'utf8')
const check = t => {
t.equal(chunks.join(''), rm)
chunks.length = 0
t.end()
}
t.test('sync', t => {
const ss = new Collector()
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8', readSize: 256 })
str.pipe(ss)
check(t)
})
return t.test('async', t => {
const ss = new Collector()
const str = new fsm.ReadStream(p, { encoding: 'utf8', readSize: 256 })
str.pipe(ss)
str.on('end', _ => check(t))
})
})
t.test('fail open', t => {
const poop = new Error('poop')
t.teardown(mutateFS.fail('open', poop))
t.throws(_ => new fsm.ReadStreamSync(__filename), poop)
const str = new fsm.ReadStream(__filename)
str.on('error', er => {
t.equal(er, poop)
t.end()
})
})
t.test('fail close', t => {
const poop = new Error('poop')
t.teardown(mutateFS.fail('close', poop))
t.throws(_ => new fsm.ReadStreamSync(__filename).resume(), poop)
const str = new fsm.ReadStream(__filename)
str.resume()
str.on('error', er => {
t.equal(er, poop)
t.end()
})
})
t.test('type errors', t => {
const er = new TypeError('this is a readable stream')
t.throws(_ => new fsm.ReadStream(__filename).write('hello'), er)
t.throws(_ => new fsm.ReadStream(__filename).end(), er)
const pathstr = new TypeError('path must be a string')
t.throws(_ => new fsm.ReadStream(1234), pathstr)
t.end()
})
t.test('fail read', t => {
// also fail close, just to exercise the double-error logic
const closeError = new Error('close error')
t.teardown(mutateFS.fail('close', closeError))
const poop = new Error('poop')
const badFDs = new Set()
const read = fs.read
const readSync = fs.readSync
const open = fs.open
const openSync = fs.openSync
t.teardown(_ => {
fs.open = open
fs.openSync = openSync
fs.read = read
fs.readSync = readSync
})
fs.open = (path, flags, cb) => {
if (path === __filename) {
open(path, flags, (er, fd) => {
if (!er) {
badFDs.add(fd)
}
return cb(er, fd)
})
} else {
open(path, flags, cb)
}
}
fs.openSync = (path, flags) => {
const fd = openSync(path, flags)
if (path === __filename) {
badFDs.add(fd)
}
return fd
}
fs.read = function (fd, buf, offset, length, pos, cb) {
if (badFDs.has(fd)) {
process.nextTick(_ => cb(new Error('poop')))
} else {
read(fd, buf, offset, length, pos, cb)
}
}
fs.readSync = function (fd, buf, offset, length, pos) {
if (badFDs.has(fd)) {
throw new Error('poop sync')
}
}
t.throws(_ => new fsm.ReadStreamSync(__filename))
t.test('async', t => {
const str = new fsm.ReadStream(__filename)
str.once('error', er => {
t.match(er, poop)
t.end()
})
})
t.end()
})
t.test('fd test', t => {
const p = resolve(__dirname, '..', 'README.md')
const rm = fs.readFileSync(p, 'utf8')
const check = (t, res) => {
t.equal(rm, res)
t.end()
}
t.test('sync', t => {
const fd = fs.openSync(p, 'r')
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8', fd: fd })
t.type(str.fd, 'number')
t.equal(str.path, p)
const out = []
str.on('data', chunk => out.push(chunk))
check(t, out.join(''))
})
t.test('sync using read()', t => {
const fd = fs.openSync(p, 'r')
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8', fd: fd })
t.type(str.fd, 'number')
t.equal(str.path, p)
const out = []
let chunk
while (chunk = str.read()) {
out.push(chunk)
}
check(t, out.join(''))
})
t.test('async', t => {
const fd = fs.openSync(p, 'r')
const str = new fsm.ReadStream(p, { encoding: 'utf8', fd: fd })
t.type(str.fd, 'number')
t.equal(str.path, p)
const out = []
t.equal(str.read(), null)
str.on('data', chunk => out.push(chunk))
str.on('end', _ => check(t, out.join('')))
})
t.end()
})
t.test('fd test, no autoClose', t => {
const p = resolve(__dirname, '..', 'README.md')
const rm = fs.readFileSync(p, 'utf8')
const check = (t, res, fd) => {
// will throw EBADF if already closed
fs.closeSync(fd)
t.equal(rm, res)
t.end()
}
t.test('sync', t => {
const fd = fs.openSync(p, 'r')
const str = new fsm.ReadStreamSync(p, {
encoding: 'utf8',
fd: fd,
autoClose: false,
})
t.type(str.fd, 'number')
t.equal(str.path, p)
const out = []
str.on('data', chunk => out.push(chunk))
check(t, out.join(''), fd)
})
t.test('sync using read()', t => {
const fd = fs.openSync(p, 'r')
const str = new fsm.ReadStreamSync(p, {
encoding: 'utf8',
fd: fd,
autoClose: false,
})
t.type(str.fd, 'number')
t.equal(str.path, p)
const out = []
let chunk
while (chunk = str.read()) {
out.push(chunk)
}
check(t, out.join(''), fd)
})
t.test('async', t => {
const fd = fs.openSync(p, 'r')
const str = new fsm.ReadStream(p, {
encoding: 'utf8',
fd: fd,
autoClose: false,
})
t.type(str.fd, 'number')
t.equal(str.path, p)
const out = []
t.equal(str.read(), null)
str.on('data', chunk => out.push(chunk))
str.on('end', _ => check(t, out.join(''), fd))
})
t.end()
})
fs-minipass-3.0.3/test/write.js 0000664 0000000 0000000 00000030073 14466474510 0016433 0 ustar 00root root 0000000 0000000 'use strict'
const t = require('tap')
const fsm = require('../')
const fs = require('fs')
const { join } = require('path')
const mutateFS = require('mutate-fs')
t.test('basic write', t => {
const p = join(__dirname, 'basic-write')
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'ok')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
new fsm.WriteStreamSync(p).end('ok')
check(t)
})
t.test('async', t => {
const s = new fsm.WriteStream(p)
s.end('ok')
s.on('close', _ => check(t))
})
t.end()
})
t.test('write then end', t => {
const p = join(__dirname, '/write-then-end')
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'okend')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
const s = new fsm.WriteStreamSync(p)
s.write('ok')
s.end('end')
check(t)
})
t.test('async', t => {
const s = new fsm.WriteStream(p)
s.write('ok')
s.end('end')
t.equal(s.fd, null)
t.equal(s.path, p)
s.on('open', fd => {
t.equal(fd, s.fd)
t.type(fd, 'number')
})
s.on('finish', _ => check(t))
})
t.end()
})
t.test('multiple writes', t => {
const p = join(__dirname, '/multiple-writes')
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'abcdefghijklmnop')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
const s = new fsm.WriteStreamSync(p)
s.write('a')
s.write('b')
s.write('c')
s.write('d')
s.write('e')
s.write('f')
s.write(Buffer.from('676869', 'hex'))
s.write('jklm')
s.write(Buffer.from('nop'))
s.end()
check(t)
})
t.test('async', t => {
const s = new fsm.WriteStream(p)
s.write('a')
s.write('b')
s.write('c')
s.write('d')
s.write('e')
s.write('f')
s.write(Buffer.from('676869', 'hex'))
s.write('jklm')
s.write(Buffer.from('nop'))
s.end()
s.on('finish', _ => check(t))
})
t.test('async after open', t => {
const s = new fsm.WriteStream(p)
s.on('open', fd => {
t.type(fd, 'number')
t.ok(s.write('a'))
t.notOk(s.write('b'))
t.notOk(s.write('c'))
t.notOk(s.write('d'))
t.notOk(s.write('e'))
t.notOk(s.write('f'))
t.notOk(s.write(Buffer.from('676869', 'hex')))
t.notOk(s.write('jklm'))
t.notOk(s.write(Buffer.from('nop')))
s.end()
s.on('finish', _ => check(t))
})
})
t.test('async after open, drains', t => {
const s = new fsm.WriteStream(p)
s.on('open', fd => {
t.type(fd, 'number')
t.ok(s.write('a'))
t.notOk(s.write('b'))
s.once('drain', _ => {
t.ok(s.write('c'))
t.notOk(s.write('d'))
t.notOk(s.write('e'))
s.once('drain', () => {
t.ok(s.write('f'))
t.notOk(s.write(Buffer.from('676869', 'hex')))
t.notOk(s.write('jklm'))
t.notOk(s.write(Buffer.from('nop')))
s.once('drain', () => s.end())
})
})
s.on('finish', () => check(t))
})
})
t.test('async after open, writev delayed', t => {
const _fsm = t.mock('../', {
fs: {
...fs,
writev: (...args) => {
setTimeout(fs.writev, 1000, ...args) // make writev very slow
},
},
})
const s = new _fsm.WriteStream(p)
s.on('open', fd => {
t.type(fd, 'number')
t.ok(s.write('a'))
t.notOk(s.write('b'))
t.notOk(s.write('c'))
t.notOk(s.write('d'))
t.notOk(s.write('e'))
t.notOk(s.write('f'))
t.notOk(s.write(Buffer.from('676869', 'hex')))
t.notOk(s.write('jklm'))
t.notOk(s.write(Buffer.from('nop')))
s.end()
s.on('finish', _ => check(t))
})
})
t.end()
})
t.test('flags', t => {
const p = join(__dirname, '/flags')
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'ok')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
new fsm.WriteStreamSync(p, { flags: 'w+' }).end('ok')
check(t)
})
t.test('async', t => {
const s = new fsm.WriteStream(p, { flags: 'w+' })
s.end('ok')
s.on('finish', _ => check(t))
})
t.end()
})
t.test('mode', t => {
const p = join(__dirname, '/mode')
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'ok')
t.equal(fs.statSync(p).mode & 0o777, process.platform === 'win32' ? 0o666 : 0o700)
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
new fsm.WriteStreamSync(p, { mode: 0o700 }).end('ok')
check(t)
})
t.test('async', t => {
const s = new fsm.WriteStream(p, { mode: 0o700 })
s.end('ok')
s.on('finish', _ => check(t))
})
t.end()
})
t.test('write after end', t => {
const p = join(__dirname, '/write-after-end')
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'ok')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
const s = new fsm.WriteStreamSync(p, { mode: 0o700 })
s.end('ok')
t.throws(_ => s.write('626164', 'hex'),
new Error('write() after end()'))
check(t)
})
t.test('async', t => {
const s = new fsm.WriteStream(p, { mode: 0o700 })
s.end('ok')
s.on('error', e => {
t.match(e, new Error('write() after end()'))
s.on('finish', _ => check(t))
})
s.write('626164', 'hex')
})
t.end()
})
t.test('fd', t => {
const p = join(__dirname, '/fd')
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'ok')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
const fd = fs.openSync(p, 'w')
new fsm.WriteStreamSync(p, { fd: fd }).end('ok')
check(t)
})
t.test('async', t => {
const fd = fs.openSync(p, 'w')
const s = new fsm.WriteStream(p, { fd: fd })
s.end('ok')
s.on('finish', _ => check(t))
})
t.end()
})
t.test('empty write', t => {
const p = join(__dirname, '/empty-write')
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), '')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
t.test('empty string', t => {
new fsm.WriteStreamSync(p).end('')
check(t)
})
t.test('no chunk to end', t => {
new fsm.WriteStreamSync(p).end('')
check(t)
})
t.end()
})
return t.test('async', t => {
t.test('immediate', t => {
t.test('no chunk to end', t => {
const s = new fsm.WriteStream(p)
s.end()
s.on('finish', _ => check(t))
})
return t.test('empty string', t => {
const s = new fsm.WriteStream(p)
s.end('')
s.on('finish', _ => check(t))
})
})
return t.test('end on open', t => {
t.test('no chunk to end', t => {
const s = new fsm.WriteStream(p)
s.on('open', _ => s.end())
s.on('finish', _ => check(t))
})
return t.test('empty string', t => {
const s = new fsm.WriteStream(p)
s.on('open', _ => s.end(''))
s.on('finish', _ => check(t))
})
})
})
})
t.test('fail open', t => {
const p = join(__dirname, '/fail-open')
const poop = new Error('poop')
t.teardown(mutateFS.fail('open', poop))
t.throws(_ => new fsm.WriteStreamSync(p), poop)
const str = new fsm.WriteStream(p)
str.on('error', er => {
t.equal(er, poop)
t.end()
})
})
t.test('fail open, positioned write', t => {
const p = join(__dirname, '/fail-open-positioned')
const poop = new Error('poop')
t.teardown(mutateFS.fail('open', poop))
t.throws(_ => new fsm.WriteStreamSync(p, { start: 2 }), poop)
const str = new fsm.WriteStream(p, { start: 2 })
str.on('error', er => {
t.equal(er, poop)
t.end()
})
})
t.test('fail close', t => {
const p = join(__dirname, '/fail-close')
const poop = new Error('poop')
t.teardown(mutateFS.fail('close', poop))
t.teardown(() => fs.unlinkSync(p))
t.throws(_ => new fsm.WriteStreamSync(p).end('asdf'), poop)
const str = new fsm.WriteStream(p).end('asdf')
str.on('error', er => {
t.equal(er, poop)
t.end()
})
})
t.test('fail write', t => {
// also fail close, just to exercise the double-error logic
const closeError = new Error('close error')
t.teardown(mutateFS.fail('close', closeError))
const p = join(__dirname, '/fail-write')
const poop = new Error('poop')
t.teardown(mutateFS.fail('write', poop))
t.throws(_ => new fsm.WriteStreamSync(p).write('foo'), poop)
const str = new fsm.WriteStream(p)
str.write('foo')
str.on('error', er => {
t.equal(er, poop)
fs.unlinkSync(p)
t.end()
})
})
t.test('positioned write', t => {
const p = join(__dirname, '/positioned-write')
const write = Buffer.from('this is the data that is written')
const data = Buffer.allocUnsafe(256)
for (let i = 0; i < 256; i++) {
data[i] = i
}
const expect = Buffer.from(data.toString('hex'), 'hex')
for (let i = 0; i < write.length; i++) {
expect[i + 100] = write[i]
}
const check = t => {
t.same(fs.readFileSync(p), expect)
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
fs.writeFileSync(p, data)
new fsm.WriteStreamSync(p, { start: 100 }).end(write)
check(t)
})
t.test('async', t => {
fs.writeFileSync(p, data)
const s = new fsm.WriteStream(p, { start: 100 })
s.end(write)
s.on('finish', _ => check(t))
})
t.end()
})
t.test('positioned then unpositioned', t => {
const p = join(__dirname, '/positioned-then-unpositioned')
const write = Buffer.from('this is the data that is written')
const data = Buffer.allocUnsafe(256)
for (let i = 0; i < 256; i++) {
data[i] = i
}
const expect = Buffer.from(data.toString('hex'), 'hex')
for (let i = 0; i < write.length; i++) {
expect[i + 100] = write[i]
}
const check = t => {
t.same(fs.readFileSync(p), expect)
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
fs.writeFileSync(p, data)
const s = new fsm.WriteStreamSync(p, { start: 100 })
s.write(write.slice(0, 20))
s.end(write.slice(20))
check(t)
})
t.test('async', t => {
fs.writeFileSync(p, data)
const s = new fsm.WriteStream(p, { start: 100 })
s.write(write.slice(0, 20))
s.end(write.slice(20))
s.on('close', _ => check(t))
})
t.end()
})
t.test('positioned then unpositioned at zero', t => {
const p = join(__dirname, '/positioned-then-unpositioned')
const write = Buffer.from('this is the data that is written')
const data = Buffer.allocUnsafe(256)
for (let i = 0; i < 256; i++) {
data[i] = i
}
const expect = Buffer.from(data.toString('hex'), 'hex')
for (let i = 0; i < write.length; i++) {
expect[i] = write[i]
}
const check = t => {
t.same(fs.readFileSync(p), expect)
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
fs.writeFileSync(p, data)
const s = new fsm.WriteStreamSync(p, { start: 0 })
s.write(write.slice(0, 20))
s.end(write.slice(20))
check(t)
})
t.test('async', t => {
fs.writeFileSync(p, data)
const s = new fsm.WriteStream(p, { start: 0 })
s.write(write.slice(0, 20))
s.end(write.slice(20))
s.on('close', _ => check(t))
})
t.end()
})
t.test('fd, no autoClose', t => {
const p = join(__dirname, '/fd-no-autoclose')
const check = (t, fd) => {
fs.closeSync(fd)
t.equal(fs.readFileSync(p, 'utf8'), 'ok')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
const fd = fs.openSync(p, 'w')
new fsm.WriteStreamSync(p, { fd: fd, autoClose: false }).end('ok')
check(t, fd)
})
t.test('async', t => {
const fd = fs.openSync(p, 'w')
const s = new fsm.WriteStream(p, { fd: fd, autoClose: false })
s.end('ok')
s.on('finish', _ => check(t, fd))
})
t.end()
})
t.test('positioned, nonexistent file', t => {
const p = join(__dirname, '/pos-noent')
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), '\0\0asdf\0\0\0\0asdf')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
const w = new fsm.WriteStreamSync(p, { start: 10 })
w.end('asdf')
const w2 = new fsm.WriteStreamSync(p, { start: 2 })
w2.end('asdf')
check(t)
})
t.test('async', t => {
const w = new fsm.WriteStream(p, { start: 10 })
w.end('asdf')
w.on('close', _ => {
const w2 = new fsm.WriteStream(p, { start: 2 })
w2.end('asdf')
w2.on('close', () => check(t))
})
})
t.end()
})