pax_global_header00006660000000000000000000000064150061431410014505gustar00rootroot0000000000000052 comment=cbac4273836f5837cec641ab21f365c79b102a4b helix-cbac4273836f5837cec641ab21f365c79b102a4b/000077500000000000000000000000001500614314100176055ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/.cargo/000077500000000000000000000000001500614314100207565ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/.cargo/config.toml000066400000000000000000000016011500614314100231160ustar00rootroot00000000000000# we use tokio_unstable to enable runtime::Handle::id so we can separate # globals from multiple parallel tests. If that function ever does get removed # its possible to replace (with some additional overhead and effort) # Annoyingly build.rustflags doesn't work here because it gets overwritten # if people have their own global target.<..> config (for example to enable mold) # specifying flags this way is more robust as they get merged # This still gets overwritten by RUST_FLAGS though, luckily it shouldn't be necessary # to set those most of the time. If downstream does overwrite this its not a huge # deal since it will only break tests anyway [target."cfg(all())"] rustflags = ["--cfg", "tokio_unstable", "-C", "target-feature=-crt-static"] [alias] xtask = "run --package xtask --" integration-test = "test --features integration --profile integration --workspace --test integration" helix-cbac4273836f5837cec641ab21f365c79b102a4b/.envrc000066400000000000000000000003111500614314100207160ustar00rootroot00000000000000watch_file shell.nix watch_file default.nix watch_file flake.lock watch_file rust-toolchain.toml # try to use flakes, if it fails use normal nix (ie. shell.nix) use flake || use nix eval "$shellHook" helix-cbac4273836f5837cec641ab21f365c79b102a4b/.gitattributes000066400000000000000000000004601500614314100225000ustar00rootroot00000000000000# Auto detect text files and perform normalization * text=auto *.rs text diff=rust *.toml text diff=toml *.scm text diff=scheme *.md text diff=markdown book/theme/highlight.js linguist-vendored runtime/queries/**/*.scm linguist-language=Tree-sitter-Query Cargo.lock text helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/000077500000000000000000000000001500614314100211455ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/FUNDING.yml000066400000000000000000000000361500614314100227610ustar00rootroot00000000000000open_collective: helix-editor helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/ISSUE_TEMPLATE/000077500000000000000000000000001500614314100233305ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/ISSUE_TEMPLATE/blank_issue.md000066400000000000000000000000671500614314100261540ustar00rootroot00000000000000--- name: Blank Issue about: Create a blank issue. --- helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/ISSUE_TEMPLATE/bug_report.yaml000066400000000000000000000044311500614314100263660ustar00rootroot00000000000000name: Bug Report description: Create a report to help us improve labels: C-bug body: - type: markdown attributes: value: Thank you for filing a bug report! 🐛 - type: textarea id: problem attributes: label: Summary description: > Please provide a short summary of the bug, along with any information you feel relevant to replicate the bug. validations: required: true - type: textarea id: reproduction-steps attributes: label: Reproduction Steps value: | I tried this: 1. `hx` I expected this to happen: Instead, this happened: - type: textarea id: helix-log attributes: label: Helix log description: See `hx -h` for log file path. If you can reproduce the issue run `RUST_BACKTRACE=1 hx -vv` to generate a more detailed log file. value: |
~/.cache/helix/helix.log ``` please provide a copy of `~/.cache/helix/helix.log` here if possible, you may need to redact some of the lines ```
- type: input id: platform attributes: label: Platform placeholder: Linux / macOS / Windows validations: required: true - type: input id: terminal-emulator attributes: label: Terminal Emulator placeholder: wezterm 20220101-133340-7edc5b5a validations: required: true - type: input id: installation-method attributes: label: Installation Method description: > How you installed Helix - from a package manager like Homebrew or the AUR, built from source, downloaded a binary from the releases page, etc. placeholder: "source / brew / nixpkgs / flake / releases page" validations: required: true - type: input id: helix-version attributes: label: Helix Version description: > Helix version (`hx -V` if using a release, `git describe` if building from master). **Make sure that you are using the [latest helix release](https://github.com/helix-editor/helix/releases) or a newer master build** placeholder: "helix 22.12 (5eaa6d97)" validations: required: true helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/ISSUE_TEMPLATE/enhancement.md000066400000000000000000000004461500614314100261430ustar00rootroot00000000000000--- name: Enhancement about: Suggest an improvement title: '' labels: C-enhancement assignees: '' --- helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/dependabot.yml000066400000000000000000000007331500614314100240000ustar00rootroot00000000000000# Please see the documentation for all configuration options: # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates version: 2 updates: - package-ecosystem: "cargo" directory: "/" schedule: interval: "weekly" groups: rust-dependencies: update-types: - "minor" - "patch" - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/workflows/000077500000000000000000000000001500614314100232025ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/workflows/build.yml000066400000000000000000000111171500614314100250250ustar00rootroot00000000000000name: Build on: pull_request: push: branches: - master merge_group: schedule: - cron: "00 01 * * *" env: MSRV: "1.82" # This key can be changed to bust the cache of tree-sitter grammars. GRAMMAR_CACHE_VERSION: "" jobs: check: name: Check (msrv) runs-on: ubuntu-latest if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule' steps: - name: Checkout sources uses: actions/checkout@v4 - name: Install MSRV toolchain uses: dtolnay/rust-toolchain@master with: toolchain: ${{ env.MSRV }} - uses: Swatinem/rust-cache@v2 with: shared-key: "build" - name: Cache tree-sitter grammars uses: actions/cache@v4 with: path: runtime/grammars key: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }} restore-keys: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars- - name: Run cargo check run: cargo check test: name: Test Suite runs-on: ${{ matrix.os }} if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule' env: RUST_BACKTRACE: 1 HELIX_LOG_LEVEL: info steps: - name: Checkout sources uses: actions/checkout@v4 - name: Install MSRV toolchain uses: dtolnay/rust-toolchain@master with: toolchain: ${{ env.MSRV }} - uses: Swatinem/rust-cache@v2 with: shared-key: "build" - name: Cache tree-sitter grammars uses: actions/cache@v4 with: path: runtime/grammars key: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }} restore-keys: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars- - name: Run cargo test run: cargo test --workspace - name: Run cargo integration-test run: cargo integration-test strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest, ubuntu-24.04-arm] lints: name: Lints runs-on: ubuntu-latest if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule' steps: - name: Checkout sources uses: actions/checkout@v4 - name: Install MSRV toolchain uses: dtolnay/rust-toolchain@master with: toolchain: ${{ env.MSRV }} components: rustfmt, clippy - uses: Swatinem/rust-cache@v2 with: shared-key: "build" - name: Cache tree-sitter grammars uses: actions/cache@v4 with: path: runtime/grammars key: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }} restore-keys: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars- - name: Run cargo fmt run: cargo fmt --all --check - name: Run cargo clippy run: cargo clippy --workspace --all-targets -- -D warnings - name: Run cargo doc run: cargo doc --no-deps --workspace --document-private-items env: RUSTDOCFLAGS: -D warnings docs: name: Docs runs-on: ubuntu-latest if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule' steps: - name: Checkout sources uses: actions/checkout@v4 - name: Install MSRV toolchain uses: dtolnay/rust-toolchain@master with: toolchain: ${{ env.MSRV }} - uses: Swatinem/rust-cache@v2 with: shared-key: "build" - name: Cache tree-sitter grammars uses: actions/cache@v4 with: path: runtime/grammars key: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }} restore-keys: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars- - name: Validate queries run: cargo xtask query-check - name: Validate themes run: cargo xtask theme-check - name: Generate docs run: cargo xtask docgen - name: Check uncommitted documentation changes run: | git diff git diff-files --quiet \ || (echo "Run 'cargo xtask docgen', commit the changes and push again" \ && exit 1) helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/workflows/cachix.yml000066400000000000000000000010041500614314100251570ustar00rootroot00000000000000# Publish the Nix flake outputs to Cachix name: Cachix on: push: branches: - master jobs: publish: name: Publish Flake runs-on: ubuntu-latest steps: - name: Checkout sources uses: actions/checkout@v4 - name: Install nix uses: cachix/install-nix-action@v31 - name: Authenticate with Cachix uses: cachix/cachix-action@v16 with: name: helix authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} - name: Build nix flake run: nix build -L helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/workflows/gh-pages.yml000066400000000000000000000016641500614314100254270ustar00rootroot00000000000000name: GitHub Pages on: push: branches: - master tags: - '*' jobs: deploy: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Setup mdBook uses: peaceiris/actions-mdbook@v2 with: # mdbook-version: 'latest' mdbook-version: '0.4.43' - run: mdbook build book - name: Set output directory run: | OUTDIR=$(basename ${{ github.ref }}) echo "OUTDIR=$OUTDIR" >> $GITHUB_ENV - name: Deploy stable uses: peaceiris/actions-gh-pages@v4 if: startswith(github.ref, 'refs/tags/') with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./book/book - name: Deploy uses: peaceiris/actions-gh-pages@v4 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./book/book destination_dir: ./${{ env.OUTDIR }} helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/workflows/languages.toml000066400000000000000000000011751500614314100260510ustar00rootroot00000000000000# This languages.toml is used for testing in CI. [[language]] name = "rust" scope = "source.rust" injection-regex = "rust" file-types = ["rs"] comment-token = "//" roots = ["Cargo.toml", "Cargo.lock"] indent = { tab-width = 4, unit = " " } [[grammar]] name = "rust" source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "0431a2c60828731f27491ee9fdefe25e250ce9c9" } [[language]] name = "nix" scope = "source.nix" injection-regex = "nix" file-types = ["nix"] shebangs = [] roots = [] comment-token = "#" # A grammar entry is not necessary for this language - it is only used for # testing TOML merging behavior. helix-cbac4273836f5837cec641ab21f365c79b102a4b/.github/workflows/release.yml000066400000000000000000000227311500614314100253520ustar00rootroot00000000000000name: Release on: push: tags: - '[0-9]+.[0-9]+' - '[0-9]+.[0-9]+.[0-9]+' branches: - 'patch/ci-release-*' pull_request: paths: - '.github/workflows/release.yml' env: # Preview mode: Publishes the build output as a CI artifact instead of creating # a release, allowing for manual inspection of the output. This mode is # activated if the CI run was triggered by events other than pushed tags, or # if the repository is a fork. preview: ${{ !startsWith(github.ref, 'refs/tags/') || github.repository != 'helix-editor/helix' }} jobs: fetch-grammars: name: Fetch Grammars runs-on: ubuntu-latest steps: - name: Checkout sources uses: actions/checkout@v4 - name: Install stable toolchain uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 - name: Fetch tree-sitter grammars run: cargo run --package=helix-loader --bin=hx-loader - name: Bundle grammars run: tar cJf grammars.tar.xz -C runtime/grammars/sources . - uses: actions/upload-artifact@v4 with: name: grammars path: grammars.tar.xz dist: name: Dist needs: [fetch-grammars] env: # For some builds, we use cross to test on 32-bit and big-endian # systems. CARGO: cargo # When CARGO is set to CROSS, this is set to `--target matrix.target`. TARGET_FLAGS: # When CARGO is set to CROSS, TARGET_DIR includes matrix.target. TARGET_DIR: ./target # Emit backtraces on panics. RUST_BACKTRACE: 1 runs-on: ${{ matrix.os }} strategy: fail-fast: false # don't fail other jobs if one fails matrix: build: [x86_64-linux, aarch64-linux, x86_64-macos, x86_64-windows] #, x86_64-win-gnu, win32-msvc include: - build: x86_64-linux os: ubuntu-24.04 rust: stable target: x86_64-unknown-linux-gnu cross: false - build: aarch64-linux os: ubuntu-24.04-arm rust: stable target: aarch64-unknown-linux-gnu cross: false # - build: riscv64-linux # os: ubuntu-22.04 # rust: stable # target: riscv64gc-unknown-linux-gnu # cross: true - build: x86_64-macos os: macos-latest rust: stable target: x86_64-apple-darwin cross: false - build: x86_64-windows os: windows-latest rust: stable target: x86_64-pc-windows-msvc cross: false # 23.03: build issues - build: aarch64-macos os: macos-latest rust: stable target: aarch64-apple-darwin cross: false skip_tests: true # x86_64 host can't run aarch64 code # - build: x86_64-win-gnu # os: windows-2019 # rust: stable-x86_64-gnu # target: x86_64-pc-windows-gnu # - build: win32-msvc # os: windows-2019 # rust: stable # target: i686-pc-windows-msvc steps: - name: Checkout sources uses: actions/checkout@v4 - name: Download grammars uses: actions/download-artifact@v4 - name: Move grammars under runtime if: "!startsWith(matrix.os, 'windows')" run: | mkdir -p runtime/grammars/sources tar xJf grammars/grammars.tar.xz -C runtime/grammars/sources # The rust-toolchain action ignores rust-toolchain.toml files. # Removing this before building with cargo ensures that the rust-toolchain # is considered the same between installation and usage. - name: Remove the rust-toolchain.toml file run: rm rust-toolchain.toml - name: Install ${{ matrix.rust }} toolchain uses: dtolnay/rust-toolchain@master with: toolchain: ${{ matrix.rust }} target: ${{ matrix.target }} # Install a pre-release version of Cross # TODO: We need to pre-install Cross because we need cross-rs/cross#591 to # get a newer C++ compiler toolchain. Remove this step when Cross # 0.3.0, which includes cross-rs/cross#591, is released. - name: Install Cross if: "matrix.cross" run: | cargo install cross --git https://github.com/cross-rs/cross.git --rev 47df5c76e7cba682823a0b6aa6d95c17b31ba63a echo "CARGO=cross" >> $GITHUB_ENV # echo "TARGET_FLAGS=--target ${{ matrix.target }}" >> $GITHUB_ENV # echo "TARGET_DIR=./target/${{ matrix.target }}" >> $GITHUB_ENV - name: Show command used for Cargo run: | echo "cargo command is: ${{ env.CARGO }}" echo "target flag is: ${{ env.TARGET_FLAGS }}" - name: Run cargo test if: "!matrix.skip_tests" run: ${{ env.CARGO }} test --release --locked --target ${{ matrix.target }} --workspace - name: Build release binary run: ${{ env.CARGO }} build --profile opt --locked --target ${{ matrix.target }} - name: Build AppImage shell: bash if: matrix.build == 'x86_64-linux' run: | # Required as of 22.x https://github.com/AppImage/AppImageKit/wiki/FUSE sudo add-apt-repository universe sudo apt install libfuse2 mkdir dist name=dev if [[ $GITHUB_REF == refs/tags/* ]]; then name=${GITHUB_REF:10} fi build="${{ matrix.build }}" export VERSION="$name" export ARCH=${build%-linux} export APP=helix export OUTPUT="helix-$VERSION-$ARCH.AppImage" export UPDATE_INFORMATION="gh-releases-zsync|$GITHUB_REPOSITORY_OWNER|helix|latest|$APP-*-$ARCH.AppImage.zsync" mkdir -p "$APP.AppDir"/usr/{bin,lib/helix} cp "target/${{ matrix.target }}/opt/hx" "$APP.AppDir/usr/bin/hx" rm -rf runtime/grammars/sources cp -r runtime "$APP.AppDir/usr/lib/helix/runtime" cat << 'EOF' > "$APP.AppDir/AppRun" #!/bin/sh APPDIR="$(dirname "$(readlink -f "${0}")")" HELIX_RUNTIME="$APPDIR/usr/lib/helix/runtime" exec "$APPDIR/usr/bin/hx" "$@" EOF chmod 755 "$APP.AppDir/AppRun" curl -Lo linuxdeploy-x86_64.AppImage \ https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage chmod +x linuxdeploy-x86_64.AppImage ./linuxdeploy-x86_64.AppImage \ --appdir "$APP.AppDir" -d contrib/Helix.desktop \ -i contrib/helix.png --output appimage mv "$APP-$VERSION-$ARCH.AppImage" \ "$APP-$VERSION-$ARCH.AppImage.zsync" dist - name: Build Deb shell: bash if: matrix.build == 'x86_64-linux' run: | cargo install cargo-deb mkdir -p target/release cp target/${{ matrix.target }}/opt/hx target/release/ cargo deb --no-build mkdir -p dist mv target/debian/*.deb dist/ - name: Build archive shell: bash run: | mkdir -p dist if [ "${{ matrix.os }}" = "windows-2019" ]; then cp "target/${{ matrix.target }}/opt/hx.exe" "dist/" else cp "target/${{ matrix.target }}/opt/hx" "dist/" fi if [ -d runtime/grammars/sources ]; then rm -rf runtime/grammars/sources fi cp -r runtime dist - uses: actions/upload-artifact@v4 with: name: bins-${{ matrix.build }} path: dist publish: name: Publish needs: [dist] runs-on: ubuntu-latest steps: - name: Checkout sources uses: actions/checkout@v4 - uses: actions/download-artifact@v4 - name: Build archive shell: bash run: | set -ex source="$(pwd)" tag=${GITHUB_REF_NAME//\//} mkdir -p runtime/grammars/sources tar xJf grammars/grammars.tar.xz -C runtime/grammars/sources rm -rf grammars cd "$(mktemp -d)" mv $source/bins-* . mkdir dist for dir in bins-* ; do platform=${dir#"bins-"} if [[ $platform =~ "windows" ]]; then exe=".exe" fi pkgname=helix-$tag-$platform mkdir -p $pkgname cp $source/LICENSE $source/README.md $pkgname mkdir $pkgname/contrib cp -r $source/contrib/completion $pkgname/contrib mv bins-$platform/runtime $pkgname/ mv bins-$platform/hx$exe $pkgname chmod +x $pkgname/hx$exe if [[ "$platform" = "x86_64-linux" ]]; then mv bins-$platform/helix-*.AppImage* dist/ mv bins-$platform/*.deb dist/ fi if [ "$exe" = "" ]; then tar cJf dist/$pkgname.tar.xz $pkgname else 7z a -r dist/$pkgname.zip $pkgname fi done tar cJf dist/helix-$tag-source.tar.xz -C $source . mv dist $source/ - name: Upload binaries to release uses: svenstaro/upload-release-action@v2 if: env.preview == 'false' with: repo_token: ${{ secrets.GITHUB_TOKEN }} file: dist/* file_glob: true tag: ${{ github.ref_name }} overwrite: true - name: Upload binaries as artifact uses: actions/upload-artifact@v4 if: env.preview == 'true' with: name: release path: dist/* helix-cbac4273836f5837cec641ab21f365c79b102a4b/.gitignore000066400000000000000000000001111500614314100215660ustar00rootroot00000000000000target .direnv helix-term/rustfmt.toml result runtime/grammars .DS_Store helix-cbac4273836f5837cec641ab21f365c79b102a4b/CHANGELOG.md000066400000000000000000007427241500614314100214360ustar00rootroot00000000000000 # 25.01.1 (2025-01-19) 25.01.1 is a patch release focusing on fixing bugs and panics from changes in 25.01. Usability improvements: * Run external formatters from the document's directory ([#12315](https://github.com/helix-editor/helix/pull/12315)) Fixes: * Fix blank buffer picker preview on doc with no views ([917174e](https://github.com/helix-editor/helix/commit/917174e)) * Fix `join_selections` behavior on tabs ([#12452](https://github.com/helix-editor/helix/pull/12452)) * Fix recognition for color LSP completion hex codes for some language servers ([#12501](https://github.com/helix-editor/helix/pull/12501)) * Fix offsets to selections updated by `open_below`/`open_above` (`o`/`O`) in multi-cursor scenarios ([#12465](https://github.com/helix-editor/helix/pull/12465)) * Fix offsets to selections updated by `insert_newline` when trimming whitespace in multi-cursor scenarios ([4bd17e5](https://github.com/helix-editor/helix/commit/4bd17e5)) * Fix panic in path completion from resolving variables like `${HOME:-$HOME}` ([#12556](https://github.com/helix-editor/helix/pull/12556)) * Prevent line comment continuation when using `change_selection` (`c`) on a line above a comment ([#12575](https://github.com/helix-editor/helix/pull/12575)) Themes: * Update `onelight` ([#12399](https://github.com/helix-editor/helix/pull/12399)) * Add cursorline color to iceberg themes ([#12404](https://github.com/helix-editor/helix/pull/12404)) * Update `special`, `ui.text.directory` and `ui.virtual.wrap` in `dark_plus` ([#12530](https://github.com/helix-editor/helix/pull/12530)) New languages: * CodeQL ([#12470](https://github.com/helix-editor/helix/pull/12470)) * Gren ([#12525](https://github.com/helix-editor/helix/pull/12525)) Updated languages and queries: * Fix Teal LSP name ([#12395](https://github.com/helix-editor/helix/pull/12395)) * Highlight `:` in Rust as a delimiter ([#12408](https://github.com/helix-editor/helix/pull/12408)) * Update Swift highlights ([#12409](https://github.com/helix-editor/helix/pull/12409)) * Highlight JSX attributes as `@attribute` ([#12416](https://github.com/helix-editor/helix/pull/12416)) * Improve markdown heading highlights ([#12417](https://github.com/helix-editor/helix/pull/12417)) * Add comment tokens configuration for JSONC ([b26903c](https://github.com/helix-editor/helix/commit/b26903c)) * Highlight the never type `!` as a type in Rust ([#12485](https://github.com/helix-editor/helix/pull/12485)) * Expand builtin function highlights for ECMA languages, Rust and Haskell ([#12488](https://github.com/helix-editor/helix/pull/12488)) * Recognize `.clang-tidy` as YAML ([#12498](https://github.com/helix-editor/helix/pull/12498)) * Update MATLAB grammar and indent queries ([#12518](https://github.com/helix-editor/helix/pull/12518)) * Recognize `rockspec` as Lua ([#12516](https://github.com/helix-editor/helix/pull/12516)) * Add `///` to Dart comment tokens configuration ([99d33c7](https://github.com/helix-editor/helix/commit/99d33c7)) * Update Solidity grammar and queries ([#12457](https://github.com/helix-editor/helix/pull/12457)) * Update Spade grammar and queries ([#12583](https://github.com/helix-editor/helix/pull/12583)) * Re-enable Hare fetching and building by default ([#11507](https://github.com/helix-editor/helix/pull/11507)) Packaging: * `--version` now prints a leading zero for single-digit months, for example `25.01` (03f35af) * Pin the Ubuntu GitHub Actions runners used for releases to `ubuntu-22.04` ([#12464](https://github.com/helix-editor/helix/pull/12464)) * Produce a Debian package (`.deb` file) in the release GitHub Actions workflow ([#12453](https://github.com/helix-editor/helix/pull/12453)) # 25.01 (2025-01-03) As always, a big thank you to all of the contributors! This release saw changes from 171 contributors. Breaking changes: * The `editor.lsp.display-messages` key now controls messages sent with the LSP `window/showMessage` notification rather than progress messages. If you want to enable progress messages you should now enable the `editor.lsp.display-progress-messages` key instead. ([#5535](https://github.com/helix-editor/helix/pull/5535)) Features: * Big refactor for `Picker`s ([#9647](https://github.com/helix-editor/helix/pull/9647), [#11209](https://github.com/helix-editor/helix/pull/11209), [#11216](https://github.com/helix-editor/helix/pull/11216), [#11211](https://github.com/helix-editor/helix/pull/11211), [#11343](https://github.com/helix-editor/helix/pull/11343), [#11406](https://github.com/helix-editor/helix/pull/11406)) * Use a table layout and allow filtering by column * Reimplement `global_search` to allow changing the query dynamically * Add an alternative "inline" display for LSP diagnostics ([#6417](https://github.com/helix-editor/helix/pull/6417), [#11815](https://github.com/helix-editor/helix/pull/11815)) * Support defining keybindings as macros ([#4709](https://github.com/helix-editor/helix/pull/4709)) * Continue line comments in `o`/`O` and on `` in insert mode ([#10996](https://github.com/helix-editor/helix/pull/10996), [#12213](https://github.com/helix-editor/helix/pull/12213), [#12215](https://github.com/helix-editor/helix/pull/12215)) * Allow configuring and switching clipboard providers at runtime ([#10839](https://github.com/helix-editor/helix/pull/10839), [b855cd0](https://github.com/helix-editor/helix/commit/b855cd0), [467fad5](https://github.com/helix-editor/helix/commit/467fad5), [191b0f0](https://github.com/helix-editor/helix/commit/191b0f0)) * Add support for path completion ([#2608](https://github.com/helix-editor/helix/pull/2608)) * Support bindings with the Super (Cmd/Win/Meta) modifier ([#6592](https://github.com/helix-editor/helix/pull/6592)) * Support rendering and jumping between tabstops in snippet completions ([#9801](https://github.com/helix-editor/helix/pull/9801)) * Allow theming directory completions ([#12205](https://github.com/helix-editor/helix/pull/12205), [#12295](https://github.com/helix-editor/helix/pull/12295)) Commands: * Add commands to move within snake_case or camelCase words ([#8147](https://github.com/helix-editor/helix/pull/8147)) * Add `search_selection_detect_word_boundaries` ([#12126](https://github.com/helix-editor/helix/pull/12126)) * This command takes the `*` key in normal and select mode, replacing `search_selection` which was moved to `A-*`. Usability improvements: * Add `:edit` and `:e` aliases for `:open` ([#11186](https://github.com/helix-editor/helix/pull/11186), [#11196](https://github.com/helix-editor/helix/pull/11196)) * Trim trailing newline from pipe command outputs when the input doesn't have a trailing newline ([#11183](https://github.com/helix-editor/helix/pull/11183), [4f63a46](https://github.com/helix-editor/helix/commit/4f63a46)) * Add `:mv` alias for `:move` ([#11256](https://github.com/helix-editor/helix/pull/11256)) * Return document display name instead of absolute path from the `%` special register ([#11275](https://github.com/helix-editor/helix/pull/11275)) * Track view position on a per-view instead of per-document basis ([#10559](https://github.com/helix-editor/helix/pull/10559)) * Improve scrolloff calculation to leave a gap in the middle ([#11323](https://github.com/helix-editor/helix/pull/11323)) * Show a popup for stderr printed by failed `:sh` commands ([#11239](https://github.com/helix-editor/helix/pull/11239)) * Add statusline errors when nothing is selected with `s`, `K`, `A-K` ([#11370](https://github.com/helix-editor/helix/pull/11370)) * Add `.svn` as a workspace root marker ([#11429](https://github.com/helix-editor/helix/pull/11429)) * Trim the end of `:sh` outputs ([#11161](https://github.com/helix-editor/helix/pull/11161)) * Show LSP `window/showMessage` messages in the statusline ([#5535](https://github.com/helix-editor/helix/pull/5535)) * Support finding workspace directories via `.jj` directories ([#11685](https://github.com/helix-editor/helix/pull/11685)) * Join single-line comments with `join_selections` (`J`) ([#11742](https://github.com/helix-editor/helix/pull/11742)) * Show anonymous syntax tree nodes in `:tree-sitter-subtree` ([#11663](https://github.com/helix-editor/helix/pull/11663), [38e8382](https://github.com/helix-editor/helix/commit/38e8382)) * Save an undo checkpoint before paste in insert mode ([#8121](https://github.com/helix-editor/helix/pull/8121)) * Only break on ASCII spaces in `:reflow` ([#12048](https://github.com/helix-editor/helix/pull/12048)) * Add a `default-yank-register` config option ([#11430](https://github.com/helix-editor/helix/pull/11430)) * Show a statusline error for `:format` when a formatter is not available ([#12183](https://github.com/helix-editor/helix/pull/12183)) * Change to the home directory with `:cd` with no arguments ([#12042](https://github.com/helix-editor/helix/pull/12042)) * Change default comment token to `#` for unrecognized files ([#12080](https://github.com/helix-editor/helix/pull/12080), [#12266](https://github.com/helix-editor/helix/pull/12266), [bae6a58](https://github.com/helix-editor/helix/commit/bae6a58)) * Trim all trailing whitespace on `insert_newline` ([#12177](https://github.com/helix-editor/helix/pull/12177)) * Change to the prior directory with `:cd -` ([#12194](https://github.com/helix-editor/helix/pull/12194)) * Allow parsing `-` (with no modifiers) as a keybinding ([#12191](https://github.com/helix-editor/helix/pull/12191)) * Improve opening statusline and error messages when opening duplicate files or directories ([#12199](https://github.com/helix-editor/helix/pull/12199)) * Trim trailing colons in paths passed on the argv ([#9963](https://github.com/helix-editor/helix/pull/9963)) * Show tree-sitter parser availability in `hx --health ` ([#12228](https://github.com/helix-editor/helix/pull/12228)) * Show a preview block for colors in the LSP completion menu ([#12299](https://github.com/helix-editor/helix/pull/12299)) * Add infobox help for `surround_add`, `surround_replace` and `surround_delete` ([#12262](https://github.com/helix-editor/helix/pull/12262)) Fixes: * Respect document indentation settings in `format_selections` (`=`) ([#11169](https://github.com/helix-editor/helix/pull/11169)) * Avoid switching the current document to normal mode during an LSP `workspace/applyEdit` operation ([#11176](https://github.com/helix-editor/helix/pull/11176)) * Fix off-by-one in LSP `find_completion_range` ([#11266](https://github.com/helix-editor/helix/pull/11266)) * Prefer file-system mtime to local system time for detecting external modifications ([#11142](https://github.com/helix-editor/helix/pull/11142), [#11352](https://github.com/helix-editor/helix/pull/11352), [#11358](https://github.com/helix-editor/helix/pull/11358), [#11361](https://github.com/helix-editor/helix/pull/11361)) * Fix writing of hardlinks ([#11340](https://github.com/helix-editor/helix/pull/11340)) * Prevent language servers from being automatically restarted when stopped with `:lsp-stop` ([#11321](https://github.com/helix-editor/helix/pull/11321)) * Stable-sort LSP text edits ([#11357](https://github.com/helix-editor/helix/pull/11357)) * Fix determination of current language layer in documents with nested language injections ([#11365](https://github.com/helix-editor/helix/pull/11365)) * Fix a panic from `:move`ing a file to a new extension which starts a language server ([#11387](https://github.com/helix-editor/helix/pull/11387)) * Fix a panic from duplicating the diff gutter ([#11092](https://github.com/helix-editor/helix/pull/11092)) * Keep cursor position when exactly replacing text ([#5930](https://github.com/helix-editor/helix/pull/5930)) * Fix a panic from `jump_backward` on a newly opened split ([#11508](https://github.com/helix-editor/helix/pull/11508)) * Fix a panic from language servers sending an unknown diagnostic severity ([#11569](https://github.com/helix-editor/helix/pull/11569)) * Fix a panic when drawing at the edge of the screen ([#11737](https://github.com/helix-editor/helix/pull/11737)) * Fix git repo detection on symlinks ([#11732](https://github.com/helix-editor/helix/pull/11732)) * Fix a panic from a language server sending an out-of-range active signature index in `textDocument/signatureHelp` ([#11825](https://github.com/helix-editor/helix/pull/11825)) * Fix a panic from using `C-k` in a prompt ending in a multi-byte character ([#12237](https://github.com/helix-editor/helix/pull/12237)) * Expand tildes in paths passed to `:read` ([#12271](https://github.com/helix-editor/helix/pull/12271)) * Respect per-language `workspace-lsp-roots` configuration when opening new documents ([#12223](https://github.com/helix-editor/helix/pull/12223)) * Consistently replace line-endings in paste/replace commands ([c262fe4](https://github.com/helix-editor/helix/commit/c262fe4)) * Fix formatting in error statusline messages when inspecting variables in DAP ([#12354](https://github.com/helix-editor/helix/pull/12354)) * Fix invisible printing of headers in `--health` output on light terminals ([#12355](https://github.com/helix-editor/helix/pull/12355)) * Accept integers serialized as floats in the JSONRPC `id` field ([#12376](https://github.com/helix-editor/helix/pull/12376)) Themes: * Bring `kanagawa` colors better in line with neovim version ([#11187](https://github.com/helix-editor/helix/pull/11187), [#11270](https://github.com/helix-editor/helix/pull/11270)) * Add `ao` ([#11063](https://github.com/helix-editor/helix/pull/11063)) * Update `dark_plus` ([#11415](https://github.com/helix-editor/helix/pull/11415)) * Add `iceberg-light` and `iceberg-dark` ([#10674](https://github.com/helix-editor/helix/pull/10674)) * Update everforest themes ([#11459](https://github.com/helix-editor/helix/pull/11459)) * Update gruvbox themes ([#11477](https://github.com/helix-editor/helix/pull/11477)) * Change primary selection cursor color for `naysayer` ([#11617](https://github.com/helix-editor/helix/pull/11617)) * Style picker column names in `horizon-dark` ([#11649](https://github.com/helix-editor/helix/pull/11649)) * Style picker column names in Darcula themes ([#11649](https://github.com/helix-editor/helix/pull/11649)) * Update diagnostics colors in `snazzy` ([#11731](https://github.com/helix-editor/helix/pull/11731)) * Update bogster themes ([#11353](https://github.com/helix-editor/helix/pull/11353)) * Highlight `keyword.storage` in `onedark` ([#11802](https://github.com/helix-editor/helix/pull/11802)) * Add `ui.virtual.jump-label` to `serika-dark` ([#11911](https://github.com/helix-editor/helix/pull/11911)) * Add `adwaita-light` ([#10869](https://github.com/helix-editor/helix/pull/10869)) * Add seoul256 themes ([#11466](https://github.com/helix-editor/helix/pull/11466)) * Add yo themes ([#11703](https://github.com/helix-editor/helix/pull/11703)) * Add `eiffel` ([#11679](https://github.com/helix-editor/helix/pull/11679)) * Add `carbonfox` ([#11558](https://github.com/helix-editor/helix/pull/11558)) * Set tags color in monokai themes ([#11917](https://github.com/helix-editor/helix/pull/11917)) * Improve readability of spacebones picker selection ([#12064](https://github.com/helix-editor/helix/pull/12064)) * Update modus themes ([#11949](https://github.com/helix-editor/helix/pull/11949)) * Use bold for statusline mode indicator in `onedarker` ([#11958](https://github.com/helix-editor/helix/pull/11958)) * Update hex themes, add a new hex theme ([#10849](https://github.com/helix-editor/helix/pull/10849)) * Add `sunset` ([#12093](https://github.com/helix-editor/helix/pull/12093)) * Add bufferline highlighting for flexoki themes ([#12146](https://github.com/helix-editor/helix/pull/12146)) * Add colors for (un)checked list items to catppuccin themes ([#12167](https://github.com/helix-editor/helix/pull/12167)) * Update `voxed` ([#9328](https://github.com/helix-editor/helix/pull/9328)) * Add `vintage` ([#9361](https://github.com/helix-editor/helix/pull/9361)) * Add directory style to everforest themes ([#12287](https://github.com/helix-editor/helix/pull/12287)) * Add inactive text and update jump label highlights in `dark_plus` ([#12289](https://github.com/helix-editor/helix/pull/12289)) * Sync changes with catppuccin themes ([#12304](https://github.com/helix-editor/helix/pull/12304)) * Add `ui.text.directory` to `nightfox` ([#12328](https://github.com/helix-editor/helix/pull/12328)) * Add `ui.text.directory` to `sunset` ([#12328](https://github.com/helix-editor/helix/pull/12328)) * Add `diagnostic.unnecessary` to Catppuccin themes ([#12391](https://github.com/helix-editor/helix/pull/12391)) New languages: * `jjdescription` ([#11271](https://github.com/helix-editor/helix/pull/11271), [#11857](https://github.com/helix-editor/helix/pull/11857), [#12305](https://github.com/helix-editor/helix/pull/12305)) * i3wm and Sway configs ([#11424](https://github.com/helix-editor/helix/pull/11424)) * TypeSpec ([#11412](https://github.com/helix-editor/helix/pull/11412)) * jq ([#11393](https://github.com/helix-editor/helix/pull/11393)) * Thrift ([#11367](https://github.com/helix-editor/helix/pull/11367)) * Gherkin ([#11083](https://github.com/helix-editor/helix/pull/11083)) * Circom ([#11676](https://github.com/helix-editor/helix/pull/11676)) * Dune ([#11829](https://github.com/helix-editor/helix/pull/11829)) * Snakemake ([#11858](https://github.com/helix-editor/helix/pull/11858), [#11936](https://github.com/helix-editor/helix/pull/11936)) * Cylc ([#11830](https://github.com/helix-editor/helix/pull/11830)) * textproto ([#11874](https://github.com/helix-editor/helix/pull/11874)) * Spade ([#11448](https://github.com/helix-editor/helix/pull/11448), [#12276](https://github.com/helix-editor/helix/pull/12276)) * NestedText ([#11987](https://github.com/helix-editor/helix/pull/11987)) * Quint ([#11898](https://github.com/helix-editor/helix/pull/11898)) * Amber-lang ([#12021](https://github.com/helix-editor/helix/pull/12021)) * Vento ([#12147](https://github.com/helix-editor/helix/pull/12147)) * Teal ([#12081](https://github.com/helix-editor/helix/pull/12081)) * Koto ([#12307](https://github.com/helix-editor/helix/pull/12307)) * NGINX ([#12309](https://github.com/helix-editor/helix/pull/12309)) Updated languages and queries: * Add comment injections for Hare ([#11173](https://github.com/helix-editor/helix/pull/11173)) * Improve highlights for `blade.php` files ([#11138](https://github.com/helix-editor/helix/pull/11138)) * Update tree-sitter-slint ([#11224](https://github.com/helix-editor/helix/pull/11224), [#11757](https://github.com/helix-editor/helix/pull/11757), [#12297](https://github.com/helix-editor/helix/pull/12297)) * Recognize `just` files as Just ([#11286](https://github.com/helix-editor/helix/pull/11286)) * Recognize `mdx` as Markdown ([#11122](https://github.com/helix-editor/helix/pull/11122)) * Update Just grammar and queries ([#11306](https://github.com/helix-editor/helix/pull/11306)) * Recognize `tclsh` as TCL ([#11236](https://github.com/helix-editor/helix/pull/11236)) * Update Godot grammar and queries ([#11235](https://github.com/helix-editor/helix/pull/11235)) * Update Gleam grammar and queries ([#11427](https://github.com/helix-editor/helix/pull/11427)) * Add `mesonlsp` for Meson ([#11416](https://github.com/helix-editor/helix/pull/11416)) * Update HTML highlights ([#11400](https://github.com/helix-editor/helix/pull/11400)) * Add comment textobjects for Verilog ([#11388](https://github.com/helix-editor/helix/pull/11388)) * Switch tree-sitter-just grammar ([#11380](https://github.com/helix-editor/helix/pull/11380), [#11606](https://github.com/helix-editor/helix/pull/11606), [#12141](https://github.com/helix-editor/helix/pull/12141)) * Update tree-sitter-fsharp ([#11061](https://github.com/helix-editor/helix/pull/11061)) * Add `nixd` for Nix ([#10767](https://github.com/helix-editor/helix/pull/10767)) * Highlight types and enum members from the Rust prelude ([#8535](https://github.com/helix-editor/helix/pull/8535)) * Improve textobjects for HCL, Nix ([#11513](https://github.com/helix-editor/helix/pull/11513)) * Add textobjects queries for docker-compose, dockerfile, env, git-config, hcl, hocon, prisma, SQL and YAML ([#11513](https://github.com/helix-editor/helix/pull/11513)) * Recognize cshtml files as HTML ([#11540](https://github.com/helix-editor/helix/pull/11540)) * Set a memory limit for the Lean language server ([#11683](https://github.com/helix-editor/helix/pull/11683)) * Add configurations for jedi and ruff language servers ([#11630](https://github.com/helix-editor/helix/pull/11630)) * Update Vue highlights ([#11706](https://github.com/helix-editor/helix/pull/11706)) * Switch tree-sitter-hcl grammar ([#11749](https://github.com/helix-editor/helix/pull/11749)) * Fix `odinfmt` formatter configuration ([#11759](https://github.com/helix-editor/helix/pull/11759)) * Recognize `rbs` files as Ruby ([#11786](https://github.com/helix-editor/helix/pull/11786)) * Update tree-sitter-nickel ([#11771](https://github.com/helix-editor/helix/pull/11771)) * Recognize `ldtk` and `ldtkl` files as JSON ([#11793](https://github.com/helix-editor/helix/pull/11793)) * Fix highlights for builtin functions in Fish ([#11792](https://github.com/helix-editor/helix/pull/11792)) * Add `superhtml` for HTML ([#11609](https://github.com/helix-editor/helix/pull/11609)) * Add a configuration for the Vale language server ([#11636](https://github.com/helix-editor/helix/pull/11636)) * Add Erlang Language Platform (`elp`) for Erlang ([#11499](https://github.com/helix-editor/helix/pull/11499)) * Update Odin highlights ([#11804](https://github.com/helix-editor/helix/pull/11804)) * Remove auto-pairs for single quotes in SML ([#11838](https://github.com/helix-editor/helix/pull/11838)) * Add `glsl_analyzer` for GLSL ([#11891](https://github.com/helix-editor/helix/pull/11891)) * Recognize `.prettierrc` as YAML ([#11997](https://github.com/helix-editor/helix/pull/11997)) * Fix `swift-format` formatter configuration ([#12052](https://github.com/helix-editor/helix/pull/12052)) * Add `package.json` and `tsconfig.json` as JS/TS workspace roots ([#10652](https://github.com/helix-editor/helix/pull/10652)) * Add "INVARIANT" to comment error highlights ([#12094](https://github.com/helix-editor/helix/pull/12094)) * Update Rescript grammar and queries ([#11165](https://github.com/helix-editor/helix/pull/11165)) * Update tree-sitter-nasm ([#11795](https://github.com/helix-editor/helix/pull/11795)) * Update LLVM grammars ([#11851](https://github.com/helix-editor/helix/pull/11851)) * Update Perl and Pod grammars ([#11848](https://github.com/helix-editor/helix/pull/11848)) * Add Nim injections in Nix ([#11837](https://github.com/helix-editor/helix/pull/11837)) * Recognize `livemd` as Markdown ([#12034](https://github.com/helix-editor/helix/pull/12034)) * Update Unison grammar and queries ([#12039](https://github.com/helix-editor/helix/pull/12039)) * Turn off Swift auto-format by default ([#12071](https://github.com/helix-editor/helix/pull/12071)) * Recognize `.swift-format` as JSON ([#12071](https://github.com/helix-editor/helix/pull/12071)) * Recognize `.clangd` and `.clang-format` as YAML ([#12032](https://github.com/helix-editor/helix/pull/12032)) * Recognize `ssh_config.d/*.conf` as sshclientconfig ([#11947](https://github.com/helix-editor/helix/pull/11947)) * Update comment token configs for Zig ([#12049](https://github.com/helix-editor/helix/pull/12049)) * Update tree-sitter-bicep ([#11525](https://github.com/helix-editor/helix/pull/11525)) * Add `hyperls` for Hyperlang ([#11056](https://github.com/helix-editor/helix/pull/11056)) * Add highlight queries for Solidity ([#12102](https://github.com/helix-editor/helix/pull/12102)) * Recognize `WORKSPACE.bzlmod` as Starlark ([#12103](https://github.com/helix-editor/helix/pull/12103)) * Update Ada grammar and queries ([#12131](https://github.com/helix-editor/helix/pull/12131)) * Restrict Hocon file-types glob patterns ([#12156](https://github.com/helix-editor/helix/pull/12156)) * Update Mojo language server to Magic ([#12195](https://github.com/helix-editor/helix/pull/12195)) * Switch tree-sitter-v grammar ([#12236](https://github.com/helix-editor/helix/pull/12236)) * Add "COMPLIANCE" to comment error highlights ([#12094](https://github.com/helix-editor/helix/pull/12094)) * Add a language server configuration for `ltex-ls-plus` ([#12251](https://github.com/helix-editor/helix/pull/12251)) * Update tree-sitter-dockerfile ([#12230](https://github.com/helix-editor/helix/pull/12230)) * Add `]` to PHP outdents ([#12286](https://github.com/helix-editor/helix/pull/12286)) * Add textobjects for Odin ([#12302](https://github.com/helix-editor/helix/pull/12302)) * Update tree-sitter-heex and queries ([#12334](https://github.com/helix-editor/helix/pull/12334)) * Update protobuf highlights ([#12339](https://github.com/helix-editor/helix/pull/12339)) * Switch tree-sitter-query (TSQ) grammar ([#12148](https://github.com/helix-editor/helix/pull/12148), [e0bccd2](https://github.com/helix-editor/helix/commit/e0bccd2)) * Add block comment configurations for jinja and nunjucks ([#12348](https://github.com/helix-editor/helix/pull/12348)) * Add `uv` shebang for python ([#12360](https://github.com/helix-editor/helix/pull/12360)) * Update tree-sitter-vento ([#12368](https://github.com/helix-editor/helix/pull/12368)) * Switch Protobuf tree-sitter grammar ([#12225](https://github.com/helix-editor/helix/pull/12225)) * Recognize `hypr/*.conf` as Hyprland ([#12384](https://github.com/helix-editor/helix/pull/12384)) Packaging: * Add completions for Nushell ([#11262](https://github.com/helix-editor/helix/pull/11262), [#11346](https://github.com/helix-editor/helix/pull/11346)) * Fix completion of flags in Bash completions ([#11246](https://github.com/helix-editor/helix/pull/11246)) * Include shell completions in Nix outputs ([#11518](https://github.com/helix-editor/helix/pull/11518)) # 24.07 (2024-07-14) Thanks to all of the contributors! This release has changes from 160 contributors. Breaking changes: Features: - Add a textobject for entries/elements of list-like things ([#8150](https://github.com/helix-editor/helix/pull/8150)) - Add a picker showing files changed in VCS ([#5645](https://github.com/helix-editor/helix/pull/5645)) - Use a temporary file for writes ([#9236](https://github.com/helix-editor/helix/pull/9236), [#10339](https://github.com/helix-editor/helix/pull/10339), [#10790](https://github.com/helix-editor/helix/pull/10790)) - Allow cycling through LSP signature-help signatures with `A-n`/`A-p` ([#9974](https://github.com/helix-editor/helix/pull/9974), [#10654](https://github.com/helix-editor/helix/pull/10654), [#10655](https://github.com/helix-editor/helix/pull/10655)) - Use tree-sitter when finding matching brackets and closest pairs ([#8294](https://github.com/helix-editor/helix/pull/8294), [#10613](https://github.com/helix-editor/helix/pull/10613), [#10777](https://github.com/helix-editor/helix/pull/10777)) - Auto-save all buffers after a delay ([#10899](https://github.com/helix-editor/helix/pull/10899), [#11047](https://github.com/helix-editor/helix/pull/11047)) Commands: - `select_all_siblings` (`A-a`) - select all siblings of each selection ([87c4161](https://github.com/helix-editor/helix/commit/87c4161)) - `select_all_children` (`A-I`) - select all children of each selection ([fa67c5c](https://github.com/helix-editor/helix/commit/fa67c5c)) - `:read` - insert the contents of the given file at each selection ([#10447](https://github.com/helix-editor/helix/pull/10447)) Usability improvements: - Support scrolling popup contents using the mouse ([#10053](https://github.com/helix-editor/helix/pull/10053)) - Sort the jumplist picker so that most recent items come first ([#10095](https://github.com/helix-editor/helix/pull/10095)) - Improve `goto_file`'s (`gf`) automatic path detection strategy ([#9065](https://github.com/helix-editor/helix/pull/9065)) - Respect language server definition order in code action menu ([#9590](https://github.com/helix-editor/helix/pull/9590)) - Allow using a count with `goto_next_buffer` (`gn`) and `goto_previous_buffer` (`gp`) ([#10463](https://github.com/helix-editor/helix/pull/10463)) - Improve the positioning of popups ([#10257](https://github.com/helix-editor/helix/pull/10257), [#10573](https://github.com/helix-editor/helix/pull/10573)) - Reset all changes overlapped by selections in `:reset-diff-change` ([#10728](https://github.com/helix-editor/helix/pull/10728)) - Await pending writes in the `suspend` command (`C-z`) ([#10797](https://github.com/helix-editor/helix/pull/10797)) - Remove special handling of line ending characters in `replace` (`r`) ([#10786](https://github.com/helix-editor/helix/pull/10786)) - Use the selected register as a history register for `rename_symbol` (`r`) ([#10932](https://github.com/helix-editor/helix/pull/10932)) - Use the configured insert-mode cursor for prompt entry ([#10945](https://github.com/helix-editor/helix/pull/10945)) - Add tilted quotes to the matching brackets list ([#10971](https://github.com/helix-editor/helix/pull/10971)) - Prevent improper files like `/dev/urandom` from being used as file arguments ([#10733](https://github.com/helix-editor/helix/pull/10733)) - Allow multiple language servers to provide `:lsp-workspace-command`s ([#10176](https://github.com/helix-editor/helix/pull/10176), [#11105](https://github.com/helix-editor/helix/pull/11105)) - Trim output of commands executed through `:pipe` ([#10952](https://github.com/helix-editor/helix/pull/10952)) Fixes: - Use `lldb-dap` instead of `lldb-vscode` in default DAP configuration ([#10091](https://github.com/helix-editor/helix/pull/10091)) - Fix creation of uneven splits when closing windows ([#10004](https://github.com/helix-editor/helix/pull/10004)) - Avoid setting a register in `delete_selection_noyank`, fixing the command's use in command sequences ([#10050](https://github.com/helix-editor/helix/pull/10050), [#10148](https://github.com/helix-editor/helix/pull/10148)) - Fix jump alphabet config resetting when using `:config-reload` ([#10156](https://github.com/helix-editor/helix/pull/10156)) - Overlay LSP unnecessary/deprecated diagnostic tag highlights onto regular diagnostic highlights ([#10084](https://github.com/helix-editor/helix/pull/10084)) - Fix crash on LSP text edits with invalid ranges ([#9649](https://github.com/helix-editor/helix/pull/9649)) - Handle partial failure when sending multiple LSP `textDocument/didSave` notifications ([#10168](https://github.com/helix-editor/helix/pull/10168)) - Fix off-by-one error for completion-replace option ([#10279](https://github.com/helix-editor/helix/pull/10279)) - Fix mouse right-click selection behavior ([#10067](https://github.com/helix-editor/helix/pull/10067)) - Fix scrolling to the end within a popup ([#10181](https://github.com/helix-editor/helix/pull/10181)) - Fix jump label highlight locations when jumping in non-ascii text ([#10317](https://github.com/helix-editor/helix/pull/10317)) - Fix crashes from tree-sitter query captures that return non-grapheme aligned ranges ([#10310](https://github.com/helix-editor/helix/pull/10310)) - Include VCS change in `mi`/`ma` textobject infobox ([#10496](https://github.com/helix-editor/helix/pull/10496)) - Override crossterm's support for `NO_COLOR` ([#10514](https://github.com/helix-editor/helix/pull/10514)) - Respect mode when starting a search ([#10505](https://github.com/helix-editor/helix/pull/10505)) - Simplify first-in-line computation for indent queries ([#10527](https://github.com/helix-editor/helix/pull/10527)) - Ignore .svn version controlled files in file pickers ([#10536](https://github.com/helix-editor/helix/pull/10536)) - Fix overloading language servers with `completionItem/resolve` requests ([38ee845](https://github.com/helix-editor/helix/commit/38ee845), [#10873](https://github.com/helix-editor/helix/pull/10873)) - Specify direction for `select_next_sibling` / `select_prev_sibling` ([#10542](https://github.com/helix-editor/helix/pull/10542)) - Fix restarting language servers ([#10614](https://github.com/helix-editor/helix/pull/10614)) - Don't stop at the first URL in `goto_file` ([#10622](https://github.com/helix-editor/helix/pull/10622)) - Fix overflows in window size calculations for small terminals ([#10620](https://github.com/helix-editor/helix/pull/10620)) - Allow missing or empty completion lists in DAP ([#10332](https://github.com/helix-editor/helix/pull/10332)) - Revert statusline refactor that could cause the statusline to blank out on files with long paths ([#10642](https://github.com/helix-editor/helix/pull/10642)) - Synchronize files after writing ([#10735](https://github.com/helix-editor/helix/pull/10735)) - Avoid `cnorm` for cursor-type detection in certain terminals ([#10769](https://github.com/helix-editor/helix/pull/10769)) - Reset inlay hints when stopping or restarting a language server ([#10741](https://github.com/helix-editor/helix/pull/10741)) - Fix logic for updating `--version` when development VCS HEAD changes ([#10896](https://github.com/helix-editor/helix/pull/10896)) - Set a max value for the count ([#10930](https://github.com/helix-editor/helix/pull/10930)) - Deserialize number IDs in DAP module types ([#10943](https://github.com/helix-editor/helix/pull/10943)) - Fix the behavior of `jump_backwords` when the jumplist is at capacity ([#10968](https://github.com/helix-editor/helix/pull/10968)) - Fix injection layer heritage tracking for reused tree-sitter injection layers ([#1098](https://github.com/helix-editor/helix/pull/1098)) - Fix pluralization of "buffers" in the statusline for `:q`, `:q!`, `:wq` ([#11018](https://github.com/helix-editor/helix/pull/11018)) - Declare LSP formatting client capabilities ([#11064](https://github.com/helix-editor/helix/pull/11064)) - Commit uncommitted changes before attempting undo/earlier ([#11090](https://github.com/helix-editor/helix/pull/11090)) - Expand tilde for selected paths in `goto_file` ([#10964](https://github.com/helix-editor/helix/pull/10964)) - Commit undo checkpoints before `:write[-all]`, fixing the modification indicator ([#11062](https://github.com/helix-editor/helix/pull/11062)) Themes: - Add jump label styles to `nightfox` ([#10052](https://github.com/helix-editor/helix/pull/10052)) - Add jump label styles to Solarized themes ([#10056](https://github.com/helix-editor/helix/pull/10056)) - Add jump label styles to `cyan_light` ([#10058](https://github.com/helix-editor/helix/pull/10058)) - Add jump label styles to `onelight` ([#10061](https://github.com/helix-editor/helix/pull/10061)) - Add `flexoki-dark` and `flexoki-light` ([#10002](https://github.com/helix-editor/helix/pull/10002)) - Add default theme keys for LSP diagnostics tags to existing themes ([#10064](https://github.com/helix-editor/helix/pull/10064)) - Add jump label styles to base16 themes ([#10076](https://github.com/helix-editor/helix/pull/10076)) - Dim primary selection in `kanagawa` ([#10094](https://github.com/helix-editor/helix/pull/10094), [#10500](https://github.com/helix-editor/helix/pull/10500)) - Add jump label styles to tokyonight themes ([#10106](https://github.com/helix-editor/helix/pull/10106)) - Add jump label styles to papercolor themes ([#10104](https://github.com/helix-editor/helix/pull/10104)) - Add jump label styles to Darcula themes ([#10116](https://github.com/helix-editor/helix/pull/10116)) - Add jump label styles to `autumn` ([#10134](https://github.com/helix-editor/helix/pull/10134)) - Add jump label styles to Ayu themes ([#10133](https://github.com/helix-editor/helix/pull/10133)) - Add jump label styles to `dark_high_contrast` ([#10133](https://github.com/helix-editor/helix/pull/10133)) - Update material themes ([#10290](https://github.com/helix-editor/helix/pull/10290)) - Add jump label styles to `varua` ([#10299](https://github.com/helix-editor/helix/pull/10299)) - Add ruler style to `adwaita-dark` ([#10260](https://github.com/helix-editor/helix/pull/10260)) - Remove `ui.highlight` effects from `solarized_dark` ([#10261](https://github.com/helix-editor/helix/pull/10261)) - Fix statusline color in material themes ([#10308](https://github.com/helix-editor/helix/pull/10308)) - Brighten `nord` selection highlight ([#10307](https://github.com/helix-editor/helix/pull/10307)) - Add inlay-hint styles to monokai themes ([#10334](https://github.com/helix-editor/helix/pull/10334)) - Add bufferline and cursorline colors to `vim_dark_high_contrast` ([#10444](https://github.com/helix-editor/helix/pull/10444)) - Switch themes with foreground rulers to background ([#10309](https://github.com/helix-editor/helix/pull/10309)) - Fix statusline colors for `everblush` ([#10394](https://github.com/helix-editor/helix/pull/10394)) - Use `yellow1` for `gruvbox` warning diagnostics ([#10506](https://github.com/helix-editor/helix/pull/10506)) - Add jump label styles to Modus themes ([#10538](https://github.com/helix-editor/helix/pull/10538)) - Refactor `dark_plus` and switch maintainers ([#10543](https://github.com/helix-editor/helix/pull/10543), [#10574](https://github.com/helix-editor/helix/pull/10574)) - Add debug highlights to `dark_plus` ([#10593](https://github.com/helix-editor/helix/pull/10593)) - Fix per-mode cursor colors in the default theme ([#10608](https://github.com/helix-editor/helix/pull/10608)) - Add `tag` and `attribute` highlights to `dark_high_contrast` ([#10705](https://github.com/helix-editor/helix/pull/10705)) - Improve readability of virtual text with `noctis` theme ([#10910](https://github.com/helix-editor/helix/pull/10910)) - Sync `catppuccin` themes with upstream ([#10954](https://github.com/helix-editor/helix/pull/10954)) - Improve jump colors for `github_dark` themes ([#10946](https://github.com/helix-editor/helix/pull/10946)) - Add modeline and default virtual highlights to `base16_default` ([#10858](https://github.com/helix-editor/helix/pull/10858)) - Add `iroaseta` ([#10381](https://github.com/helix-editor/helix/pull/10381)) - Refactor `gruvbox` ([#10773](https://github.com/helix-editor/helix/pull/10773), [#11071](https://github.com/helix-editor/helix/pull/11071)) - Add cursorcolumn and cursorline to `base16_transparent` ([#11099](https://github.com/helix-editor/helix/pull/11099)) - Update cursorline color for `fleet_dark` ([#11046](https://github.com/helix-editor/helix/pull/11046)) - Add `kanagawa-dragon` ([#10172](https://github.com/helix-editor/helix/pull/10172)) New languages: - BitBake ([#10010](https://github.com/helix-editor/helix/pull/10010)) - Earthfile ([#10111](https://github.com/helix-editor/helix/pull/10111), [#10489](https://github.com/helix-editor/helix/pull/10489), [#10779](https://github.com/helix-editor/helix/pull/10779)) - TCL ([#9837](https://github.com/helix-editor/helix/pull/9837)) - ADL ([#10029](https://github.com/helix-editor/helix/pull/10029)) - LDIF ([#10330](https://github.com/helix-editor/helix/pull/10330)) - XTC ([#10448](https://github.com/helix-editor/helix/pull/10448)) - Move ([f06a166](https://github.com/helix-editor/helix/commit/f06a166)) - Pest ([#10616](https://github.com/helix-editor/helix/pull/10616)) - GJS/GTS ([#9940](https://github.com/helix-editor/helix/pull/9940)) - Inko ([#10656](https://github.com/helix-editor/helix/pull/10656)) - Mojo ([#10743](https://github.com/helix-editor/helix/pull/10743)) - Elisp ([#10644](https://github.com/helix-editor/helix/pull/10644)) Updated languages and queries: - Recognize `mkdn` files as markdown ([#10065](https://github.com/helix-editor/helix/pull/10065)) - Add comment injections for Gleam ([#10062](https://github.com/helix-editor/helix/pull/10062)) - Recognize BuildKite commands in YAML injections ([#10090](https://github.com/helix-editor/helix/pull/10090)) - Add F# block comment token configuration ([#10108](https://github.com/helix-editor/helix/pull/10108)) - Update tree-sitter-templ and queries ([#10114](https://github.com/helix-editor/helix/pull/10114)) - Recognize `Tiltfile` as Starlark ([#10072](https://github.com/helix-editor/helix/pull/10072)) - Remove `todo.txt` from files recognized as todotxt ([5fece00](https://github.com/helix-editor/helix/commit/5fece00)) - Highlight `type` keyword in Python from PEP695 ([#10165](https://github.com/helix-editor/helix/pull/10165)) - Update tree-sitter-koka, add language server config ([#10119](https://github.com/helix-editor/helix/pull/10119)) - Recognize node and Python history files ([#10120](https://github.com/helix-editor/helix/pull/10120)) - Recognize more shell files as bash ([#10120](https://github.com/helix-editor/helix/pull/10120)) - Recognize the bun shebang as typescript ([#10120](https://github.com/helix-editor/helix/pull/10120)) - Add a configuration for the angular language server ([#10166](https://github.com/helix-editor/helix/pull/10166)) - Add textobject queries for Solidity ([#10318](https://github.com/helix-editor/helix/pull/10318)) - Recognize `meson.options` as Meson ([#10323](https://github.com/helix-editor/helix/pull/10323)) - Improve Solidity highlighting ([4fc0a4d](https://github.com/helix-editor/helix/commit/4fc0a4d)) - Recognize `_.tpl` files as Helm ([#10344](https://github.com/helix-editor/helix/pull/10344)) - Update tree-sitter-ld and highlights ([#10379](https://github.com/helix-editor/helix/pull/10379)) - Add `lldb-dap` configuration for Odin ([#10175](https://github.com/helix-editor/helix/pull/10175)) - Update tree-sitter-rust ([#10365](https://github.com/helix-editor/helix/pull/10365)) - Update tree-sitter-typst ([#10321](https://github.com/helix-editor/helix/pull/10321)) - Recognize `hyprpaper.conf`, `hypridle.conf` and `hyprlock.conf` as Hyprlang ([#10383](https://github.com/helix-editor/helix/pull/10383)) - Improve HTML highlighting ([#10503](https://github.com/helix-editor/helix/pull/10503)) - Add `rust-script` and `cargo` as shebangs for Rust ([#10484](https://github.com/helix-editor/helix/pull/10484)) - Fix precedence of tag highlights in Svelte ([#10487](https://github.com/helix-editor/helix/pull/10487)) - Update tree-sitter-bash ([#10526](https://github.com/helix-editor/helix/pull/10526)) - Recognize `*.ignore` files as ignore ([#10579](https://github.com/helix-editor/helix/pull/10579)) - Add configuration to enable inlay hints in metals ([#10597](https://github.com/helix-editor/helix/pull/10597)) - Enable highlighting private members in ECMA languages ([#10554](https://github.com/helix-editor/helix/pull/10554)) - Add comment injection to typst queries ([#10628](https://github.com/helix-editor/helix/pull/10628)) - Add textobject queries for Hurl ([#10594](https://github.com/helix-editor/helix/pull/10594)) - Add `try` keyword to Rust ([#10641](https://github.com/helix-editor/helix/pull/10641)) - Add `is not` and `not in` to Python highlights ([#10647](https://github.com/helix-editor/helix/pull/10647)) - Remove ' and ⟨⟩ from Lean autopair configuration ([#10688](https://github.com/helix-editor/helix/pull/10688)) - Match TOML/YAML highlights for JSON keys ([#10676](https://github.com/helix-editor/helix/pull/10676)) - Recognize WORKSPACE files as Starlark ([#10713](https://github.com/helix-editor/helix/pull/10713)) - Switch Odin tree-sitter grammar and highlights ([#10698](https://github.com/helix-editor/helix/pull/10698)) - Update `tree-sitter-slint` ([#10749](https://github.com/helix-editor/helix/pull/10749)) - Add missing operators for Solidity highlights ([#10735](https://github.com/helix-editor/helix/pull/10735)) - Update `tree-sitter-inko` ([#10805](https://github.com/helix-editor/helix/pull/10805)) - Add `py`, `hs`, `rs` and `typ` injection regexes ([#10785](https://github.com/helix-editor/helix/pull/10785)) - Update Swift grammar and queries ([#10802](https://github.com/helix-editor/helix/pull/10802)) - Update Cairo grammar and queries ([#10919](https://github.com/helix-editor/helix/pull/10919), [#11067](https://github.com/helix-editor/helix/pull/11067)) - Update Rust grammar ([#10973](https://github.com/helix-editor/helix/pull/10973)) - Add block comment tokens for typst ([#10955](https://github.com/helix-editor/helix/pull/10955)) - Recognize `jsonl` as JSON ([#11004](https://github.com/helix-editor/helix/pull/11004)) - Add rulers and text-width at 100 columns for Lean language ([#10969](https://github.com/helix-editor/helix/pull/10969)) - Improve VDHL highlights ([#10845](https://github.com/helix-editor/helix/pull/10845)) - Recognize `hsc` as Haskell ([#11074](https://github.com/helix-editor/helix/pull/11074)) - Fix heredoc and `$''` highlights in Bash ([#11118](https://github.com/helix-editor/helix/pull/11118)) - Add LSP configuration for `basedpyright` ([#11121](https://github.com/helix-editor/helix/pull/11121)) - Recognize `npmrc` and `.nmprc` files as INI ([#11131](https://github.com/helix-editor/helix/pull/11131)) - Recognize `~/.config/git/ignore` as git-ignore ([#11131](https://github.com/helix-editor/helix/pull/11131)) - Recognize `pdm.lock` and `uv.lock` as TOML ([#11131](https://github.com/helix-editor/helix/pull/11131)) - Recognize `.yml` as well as `.yaml` for Helm chart templates ([#11135](https://github.com/helix-editor/helix/pull/11135)) - Add regex injections for Bash ([#11112](https://github.com/helix-editor/helix/pull/11112)) - Update tree-sitter-todo ([#11097](https://github.com/helix-editor/helix/pull/11097)) Packaging: - Make `Helix.appdata.xml` spec-compliant ([#10051](https://github.com/helix-editor/helix/pull/10051)) - Expose all flake outputs through flake-compat ([#10673](https://github.com/helix-editor/helix/pull/10673)) - Bump the MSRV to 1.74.0 ([#10714](https://github.com/helix-editor/helix/pull/10714)) - Improve FiSH completions ([#10853](https://github.com/helix-editor/helix/pull/10853)) - Improve ZSH completions ([#10853](https://github.com/helix-editor/helix/pull/10853)) # 24.03 (2024-03-30) As always, a big thank you to all of the contributors! This release saw changes from 125 contributors. Breaking changes: - `suffix` file-types in the `file-types` key in language configuration have been removed ([#8006](https://github.com/helix-editor/helix/pull/8006)) - The `file-types` key in language configuration no longer matches full filenames without a glob pattern ([#8006](https://github.com/helix-editor/helix/pull/8006)) Features: - Open URLs with the `goto_file` command ([#5820](https://github.com/helix-editor/helix/pull/5820)) - Support drawing a border around popups and menus ([#4313](https://github.com/helix-editor/helix/pull/4313), [#9508](https://github.com/helix-editor/helix/pull/9508)) - Track long lived diagnostic sources like Clippy or `rustc` ([#6447](https://github.com/helix-editor/helix/pull/6447), [#9280](https://github.com/helix-editor/helix/pull/9280)) - This improves the handling of diagnostics from sources that only update the diagnostic positions on save. - Add support for LSP `window/showDocument` requests ([#8865](https://github.com/helix-editor/helix/pull/8865)) - Refactor ad-hoc hooks to use a new generic event system ([#8021](https://github.com/helix-editor/helix/pull/8021), [#9668](https://github.com/helix-editor/helix/pull/9668), [#9660](https://github.com/helix-editor/helix/pull/9660)) - This improves the behavior of autocompletions. For example navigating in insert mode no longer automatically triggers completions. - Allow using globs in the language configuration `file-types` key ([#8006](https://github.com/helix-editor/helix/pull/8006)) - Allow specifying required roots for situational LSP activation ([#8696](https://github.com/helix-editor/helix/pull/8696)) - Extend selections using mouse clicks in select mode ([#5436](https://github.com/helix-editor/helix/pull/5436)) - Toggle block comments ([#4718](https://github.com/helix-editor/helix/pull/4718), [#9894](https://github.com/helix-editor/helix/pull/9894)) - Support LSP diagnostic tags ([#9780](https://github.com/helix-editor/helix/pull/9780)) - Add a `file-absolute-path` statusline element ([#4535](https://github.com/helix-editor/helix/pull/4535)) - Cross injection layers in tree-sitter motions (`A-p`/`A-o`/`A-i`/`A-n`) ([#5176](https://github.com/helix-editor/helix/pull/5176)) - Add a Amp-editor-like jumping command ([#8875](https://github.com/helix-editor/helix/pull/8875)) Commands: - `:move` - move buffers with LSP support ([#8584](https://github.com/helix-editor/helix/pull/8584)) - Also see [#8949](https://github.com/helix-editor/helix/pull/8949) which made path changes conform to the LSP spec and fixed the behavior of this command. - `page_cursor_up`, `page_cursor_down`, `page_cursor_half_up`, `page_cursor_half_down` - commands for scrolling the cursor and page together ([#8015](https://github.com/helix-editor/helix/pull/8015)) - `:yank-diagnostic` - yank the diagnostic(s) under the primary cursor ([#9640](https://github.com/helix-editor/helix/pull/9640)) - `select_line_above` / `select_line_below` - extend or shrink a selection based on the direction and anchor ([#9080](https://github.com/helix-editor/helix/pull/9080)) Usability improvements: - Make `roots` key of `[[language]]` entries in `languages.toml` configuration optional ([#8803](https://github.com/helix-editor/helix/pull/8803)) - Exit select mode in commands that modify the buffer ([#8689](https://github.com/helix-editor/helix/pull/8689)) - Use crossterm cursor when out of focus ([#6858](https://github.com/helix-editor/helix/pull/6858), [#8934](https://github.com/helix-editor/helix/pull/8934)) - Join empty lines with only one space in `join_selections` ([#8989](https://github.com/helix-editor/helix/pull/8989)) - Introduce a hybrid tree-sitter and contextual indentation heuristic ([#8307](https://github.com/helix-editor/helix/pull/8307)) - Allow configuring the indentation heuristic ([#8307](https://github.com/helix-editor/helix/pull/8307)) - Check for LSP rename support before showing rename prompt ([#9277](https://github.com/helix-editor/helix/pull/9277)) - Normalize `S-` keymaps to uppercase ascii ([#9213](https://github.com/helix-editor/helix/pull/9213)) - Add formatter status to `--health` output ([#7986](https://github.com/helix-editor/helix/pull/7986)) - Change path normalization strategy to not resolve symlinks ([#9330](https://github.com/helix-editor/helix/pull/9330)) - Select subtree within injections in `:tree-sitter-subtree` ([#9309](https://github.com/helix-editor/helix/pull/9309)) - Use tilde expansion and normalization for `$HELIX_RUNTIME` paths ([1bc7aac](https://github.com/helix-editor/helix/commit/1bc7aac)) - Improve failure message for LSP goto references ([#9382](https://github.com/helix-editor/helix/pull/9382)) - Use injection syntax trees for bracket matching ([5e0b3cc](https://github.com/helix-editor/helix/commit/5e0b3cc)) - Respect injections in `:tree-sitter-highlight-name` ([8b6565c](https://github.com/helix-editor/helix/commit/8b6565c)) - Respect injections in `move_parent_node_end` ([035b8ea](https://github.com/helix-editor/helix/commit/035b8ea)) - Use `gix` pipeline filter instead of manual CRLF implementation ([#9503](https://github.com/helix-editor/helix/pull/9503)) - Follow Neovim's truecolor detection ([#9577](https://github.com/helix-editor/helix/pull/9577)) - Reload language configuration with `:reload`, SIGHUP ([#9415](https://github.com/helix-editor/helix/pull/9415)) - Allow numbers as bindings ([#8471](https://github.com/helix-editor/helix/pull/8471), [#9887](https://github.com/helix-editor/helix/pull/9887)) - Respect undercurl config when terminfo is not available ([#9897](https://github.com/helix-editor/helix/pull/9897)) - Ignore `.pijul`, `.hg`, `.jj` in addition to `.git` in file pickers configured to show hidden files ([#9935](https://github.com/helix-editor/helix/pull/9935)) - Add completion for registers to `:clear-register` and `:yank-diagnostic` ([#9936](https://github.com/helix-editor/helix/pull/9936)) - Repeat last motion for goto next/prev diagnostic ([#9966](https://github.com/helix-editor/helix/pull/9966)) - Allow configuring a character to use when rendering narrow no-breaking space ([#9604](https://github.com/helix-editor/helix/pull/9604)) - Switch to a streaming regex engine (regex-cursor crate) to significantly speed up regex-based commands and features ([#9422](https://github.com/helix-editor/helix/pull/9422), [#9756](https://github.com/helix-editor/helix/pull/9756), [#9891](https://github.com/helix-editor/helix/pull/9891)) Fixes: - Swap `*` and `+` registers ([#8703](https://github.com/helix-editor/helix/pull/8703), [#8708](https://github.com/helix-editor/helix/pull/8708)) - Use terminfo to reset terminal cursor style ([#8591](https://github.com/helix-editor/helix/pull/8591)) - Fix precedence of `@align` captures in indentat computation ([#8659](https://github.com/helix-editor/helix/pull/8659)) - Only render the preview if a Picker has a preview function ([#8667](https://github.com/helix-editor/helix/pull/8667)) - Fix the precedence of `ui.virtual.whitespace` ([#8750](https://github.com/helix-editor/helix/pull/8750), [#8879](https://github.com/helix-editor/helix/pull/8879)) - Fix crash in `:indent-style` ([#9087](https://github.com/helix-editor/helix/pull/9087)) - Fix `didSave` text inclusion when sync capability is a kind variant ([#9101](https://github.com/helix-editor/helix/pull/9101)) - Update the history of newly focused views ([#9271](https://github.com/helix-editor/helix/pull/9271)) - Initialize diagnostics when opening a document ([#8873](https://github.com/helix-editor/helix/pull/8873)) - Sync views when applying edits to unfocused views ([#9173](https://github.com/helix-editor/helix/pull/9173)) - This fixes crashes that could occur from LSP workspace edits or `:write-all`. - Treat non-numeric `+arg`s passed in the CLI args as filenames ([#9333](https://github.com/helix-editor/helix/pull/9333)) - Fix crash when using `mm` on an empty plaintext file ([2fb7e50](https://github.com/helix-editor/helix/commit/2fb7e50)) - Ignore empty tree-sitter nodes in match bracket ([445f7a2](https://github.com/helix-editor/helix/commit/445f7a2)) - Exit a language server if it sends a message with invalid JSON ([#9332](https://github.com/helix-editor/helix/pull/9332)) - Handle failures to enable bracketed paste ([#9353](https://github.com/helix-editor/helix/pull/9353)) - Gate all captures in a pattern behind `#is-not? local` predicates ([#9390](https://github.com/helix-editor/helix/pull/9390)) - Make path changes LSP spec conformant ([#8949](https://github.com/helix-editor/helix/pull/8949)) - Use range positions to determine `insert_newline` motion ([#9448](https://github.com/helix-editor/helix/pull/9448)) - Fix division by zero when prompt completion area is too small ([#9524](https://github.com/helix-editor/helix/pull/9524)) - Add changes to history in clipboard replacement typable commands ([#9625](https://github.com/helix-editor/helix/pull/9625)) - Fix a crash in DAP with an unspecified `line` in breakpoints ([#9632](https://github.com/helix-editor/helix/pull/9632)) - Fix space handling for filenames in bash completion ([#9702](https://github.com/helix-editor/helix/pull/9702), [#9708](https://github.com/helix-editor/helix/pull/9708)) - Key diagnostics off of paths instead of LSP URIs ([#7367](https://github.com/helix-editor/helix/pull/7367)) - Fix panic when using `join_selections_space` ([#9783](https://github.com/helix-editor/helix/pull/9783)) - Fix panic when using `surround_replace`, `surround_delete` ([#9796](https://github.com/helix-editor/helix/pull/9796)) - Fix panic in `surround_replace`, `surround_delete` with nested surrounds and multiple cursors ([#9815](https://github.com/helix-editor/helix/pull/9815)) - Fix panic in `select_textobject_around` ([#9832](https://github.com/helix-editor/helix/pull/9832)) - Don't stop reloading documents when reloading fails in `:reload-all` ([#9870](https://github.com/helix-editor/helix/pull/9870)) - Prevent `shell_keep_pipe` from stopping on nonzero exit status codes ([#9817](https://github.com/helix-editor/helix/pull/9817)) Themes: - Add `gruber-dark` ([#8598](https://github.com/helix-editor/helix/pull/8598)) - Update `everblush` ([#8705](https://github.com/helix-editor/helix/pull/8705)) - Update `papercolor` ([#8718](https://github.com/helix-editor/helix/pull/8718), [#8827](https://github.com/helix-editor/helix/pull/8827)) - Add `polmandres` ([#8759](https://github.com/helix-editor/helix/pull/8759)) - Add `starlight` ([#8787](https://github.com/helix-editor/helix/pull/8787)) - Update `naysayer` ([#8838](https://github.com/helix-editor/helix/pull/8838)) - Add modus operandi themes ([#8728](https://github.com/helix-editor/helix/pull/8728), [#9912](https://github.com/helix-editor/helix/pull/9912)) - Update `rose_pine` ([#8946](https://github.com/helix-editor/helix/pull/8946)) - Update `darcula` ([#8738](https://github.com/helix-editor/helix/pull/8738), [#9002](https://github.com/helix-editor/helix/pull/9002), [#9449](https://github.com/helix-editor/helix/pull/9449), [#9588](https://github.com/helix-editor/helix/pull/9588)) - Add modus vivendi themes ([#8894](https://github.com/helix-editor/helix/pull/8894), [#9912](https://github.com/helix-editor/helix/pull/9912)) - Add `horizon-dark` ([#9008](https://github.com/helix-editor/helix/pull/9008), [#9493](https://github.com/helix-editor/helix/pull/9493)) - Update `noctis` ([#9123](https://github.com/helix-editor/helix/pull/9123)) - Update `nord` ([#9135](https://github.com/helix-editor/helix/pull/9135)) - Update monokai pro themes ([#9148](https://github.com/helix-editor/helix/pull/9148)) - Update tokyonight themes ([#9099](https://github.com/helix-editor/helix/pull/9099), [#9724](https://github.com/helix-editor/helix/pull/9724), [#9789](https://github.com/helix-editor/helix/pull/9789)) - Add `ttox` ([#8524](https://github.com/helix-editor/helix/pull/8524)) - Add `voxed` ([#9164](https://github.com/helix-editor/helix/pull/9164)) - Update `sonokai` ([#9370](https://github.com/helix-editor/helix/pull/9370), [#9376](https://github.com/helix-editor/helix/pull/9376), [#5379](https://github.com/helix-editor/helix/pull/5379)) - Update `onedark`, `onedarker` ([#9397](https://github.com/helix-editor/helix/pull/9397)) - Update `cyan_light` ([#9375](https://github.com/helix-editor/helix/pull/9375), [#9688](https://github.com/helix-editor/helix/pull/9688)) - Add `gruvbox_light_soft`, `gruvbox_light_hard` ([#9266](https://github.com/helix-editor/helix/pull/9266)) - Update GitHub themes ([#9487](https://github.com/helix-editor/helix/pull/9487)) - Add `term16_dark`, `term16_light` ([#9477](https://github.com/helix-editor/helix/pull/9477)) - Update Zed themes ([#9544](https://github.com/helix-editor/helix/pull/9544), [#9549](https://github.com/helix-editor/helix/pull/9549)) - Add `curzon` ([#9553](https://github.com/helix-editor/helix/pull/9553)) - Add `monokai_soda` ([#9651](https://github.com/helix-editor/helix/pull/9651)) - Update catppuccin themes ([#9859](https://github.com/helix-editor/helix/pull/9859)) - Update `rasmus` ([#9939](https://github.com/helix-editor/helix/pull/9939)) - Update `dark_plus` ([#9949](https://github.com/helix-editor/helix/pull/9949), [628dcd5](https://github.com/helix-editor/helix/commit/628dcd5)) - Update gruvbox themes ([#9960](https://github.com/helix-editor/helix/pull/9960)) - Add jump label theming to `dracula` ([#9973](https://github.com/helix-editor/helix/pull/9973)) - Add jump label theming to `horizon-dark` ([#9984](https://github.com/helix-editor/helix/pull/9984)) - Add jump label theming to catppuccin themes ([2178adf](https://github.com/helix-editor/helix/commit/2178adf), [#9983](https://github.com/helix-editor/helix/pull/9983)) - Add jump label theming to `onedark` themes ([da2dec1](https://github.com/helix-editor/helix/commit/da2dec1)) - Add jump label theming to rose-pine themes ([#9981](https://github.com/helix-editor/helix/pull/9981)) - Add jump label theming to Nord themes ([#10008](https://github.com/helix-editor/helix/pull/10008)) - Add jump label theming to Monokai themes ([#10009](https://github.com/helix-editor/helix/pull/10009)) - Add jump label theming to gruvbox themes ([#10012](https://github.com/helix-editor/helix/pull/10012)) - Add jump label theming to `kanagawa` ([#10030](https://github.com/helix-editor/helix/pull/10030)) - Update material themes ([#10043](https://github.com/helix-editor/helix/pull/10043)) - Add `jetbrains_dark` ([#9967](https://github.com/helix-editor/helix/pull/9967)) New languages: - Typst ([#7474](https://github.com/helix-editor/helix/pull/7474)) - LPF ([#8536](https://github.com/helix-editor/helix/pull/8536)) - GN ([#6969](https://github.com/helix-editor/helix/pull/6969)) - DBML ([#8860](https://github.com/helix-editor/helix/pull/8860)) - log ([#8916](https://github.com/helix-editor/helix/pull/8916)) - Janet ([#9081](https://github.com/helix-editor/helix/pull/9081), [#9247](https://github.com/helix-editor/helix/pull/9247)) - Agda ([#8285](https://github.com/helix-editor/helix/pull/8285)) - Avro ([#9113](https://github.com/helix-editor/helix/pull/9113)) - Smali ([#9089](https://github.com/helix-editor/helix/pull/9089)) - HOCON ([#9203](https://github.com/helix-editor/helix/pull/9203)) - Tact ([#9512](https://github.com/helix-editor/helix/pull/9512)) - PKL ([#9515](https://github.com/helix-editor/helix/pull/9515)) - CEL ([#9296](https://github.com/helix-editor/helix/pull/9296)) - SpiceDB ([#9296](https://github.com/helix-editor/helix/pull/9296)) - Hoon ([#9190](https://github.com/helix-editor/helix/pull/9190)) - DockerCompose ([#9661](https://github.com/helix-editor/helix/pull/9661), [#9916](https://github.com/helix-editor/helix/pull/9916)) - Groovy ([#9350](https://github.com/helix-editor/helix/pull/9350), [#9681](https://github.com/helix-editor/helix/pull/9681), [#9677](https://github.com/helix-editor/helix/pull/9677)) - FIDL ([#9713](https://github.com/helix-editor/helix/pull/9713)) - Powershell ([#9827](https://github.com/helix-editor/helix/pull/9827)) - ld ([#9835](https://github.com/helix-editor/helix/pull/9835)) - Hyperland config ([#9899](https://github.com/helix-editor/helix/pull/9899)) - JSONC ([#9906](https://github.com/helix-editor/helix/pull/9906)) - PHP Blade ([#9513](https://github.com/helix-editor/helix/pull/9513)) - SuperCollider ([#9329](https://github.com/helix-editor/helix/pull/9329)) - Koka ([#8727](https://github.com/helix-editor/helix/pull/8727)) - PKGBUILD ([#9909](https://github.com/helix-editor/helix/pull/9909), [#9943](https://github.com/helix-editor/helix/pull/9943)) - Ada ([#9908](https://github.com/helix-editor/helix/pull/9908)) - Helm charts ([#9900](https://github.com/helix-editor/helix/pull/9900)) - Ember.js templates ([#9902](https://github.com/helix-editor/helix/pull/9902)) - Ohm ([#9991](https://github.com/helix-editor/helix/pull/9991)) Updated languages and queries: - Add HTML injection queries for Rust ([#8603](https://github.com/helix-editor/helix/pull/8603)) - Switch to tree-sitter-ron for RON ([#8624](https://github.com/helix-editor/helix/pull/8624)) - Update and improve comment highlighting ([#8564](https://github.com/helix-editor/helix/pull/8564), [#9253](https://github.com/helix-editor/helix/pull/9253), [#9800](https://github.com/helix-editor/helix/pull/9800), [#10014](https://github.com/helix-editor/helix/pull/10014)) - Highlight type parameters in Rust ([#8660](https://github.com/helix-editor/helix/pull/8660)) - Change KDL tree-sitter parsers ([#8652](https://github.com/helix-editor/helix/pull/8652)) - Update tree-sitter-markdown ([#8721](https://github.com/helix-editor/helix/pull/8721), [#10039](https://github.com/helix-editor/helix/pull/10039)) - Update tree-sitter-purescript ([#8712](https://github.com/helix-editor/helix/pull/8712)) - Add type parameter highlighting to TypeScript, Go, Haskell, OCaml and Kotlin ([#8718](https://github.com/helix-editor/helix/pull/8718)) - Add indentation queries for Scheme and lisps using tree-sitter-scheme ([#8720](https://github.com/helix-editor/helix/pull/8720)) - Recognize `meson_options.txt` as Meson ([#8794](https://github.com/helix-editor/helix/pull/8794)) - Add language server configuration for Nushell ([#8878](https://github.com/helix-editor/helix/pull/8878)) - Recognize `musicxml` as XML ([#8935](https://github.com/helix-editor/helix/pull/8935)) - Update tree-sitter-rescript ([#8962](https://github.com/helix-editor/helix/pull/8962)) - Update tree-sitter-python ([#8976](https://github.com/helix-editor/helix/pull/8976)) - Recognize `.envrc.local` and `.envrc.private` as env ([#8988](https://github.com/helix-editor/helix/pull/8988)) - Update tree-sitter-gleam ([#9003](https://github.com/helix-editor/helix/pull/9003), [9ceeea5](https://github.com/helix-editor/helix/commit/9ceeea5)) - Update tree-sitter-d ([#9021](https://github.com/helix-editor/helix/pull/9021)) - Fix R-markdown language name for LSP detection ([#9012](https://github.com/helix-editor/helix/pull/9012)) - Add haskell-language-server LSP configuration ([#9111](https://github.com/helix-editor/helix/pull/9111)) - Recognize `glif` as XML ([#9130](https://github.com/helix-editor/helix/pull/9130)) - Recognize `.prettierrc` as JSON ([#9214](https://github.com/helix-editor/helix/pull/9214)) - Add auto-pairs configuration for scheme ([#9232](https://github.com/helix-editor/helix/pull/9232)) - Add textobject queries for Scala ([#9191](https://github.com/helix-editor/helix/pull/9191)) - Add textobject queries for Protobuf ([#9184](https://github.com/helix-editor/helix/pull/9184)) - Update tree-sitter-wren ([#8544](https://github.com/helix-editor/helix/pull/8544)) - Add `spago.yaml` as an LSP root for PureScript ([#9362](https://github.com/helix-editor/helix/pull/9362)) - Improve highlight and indent queries for Bash, Make and CSS ([#9393](https://github.com/helix-editor/helix/pull/9393)) - Update tree-sitter-scala ([#9348](https://github.com/helix-editor/helix/pull/9348), [#9340](https://github.com/helix-editor/helix/pull/9340), [#9475](https://github.com/helix-editor/helix/pull/9475)) - Recognize `.bash_history` as Bash ([#9401](https://github.com/helix-editor/helix/pull/9401)) - Recognize Helix ignore files as ignore ([#9447](https://github.com/helix-editor/helix/pull/9447)) - Inject SQL into Scala SQL strings ([#9428](https://github.com/helix-editor/helix/pull/9428)) - Update gdscript textobjects ([#9288](https://github.com/helix-editor/helix/pull/9288)) - Update Go queries ([#9510](https://github.com/helix-editor/helix/pull/9510), [#9525](https://github.com/helix-editor/helix/pull/9525)) - Update tree-sitter-nushell ([#9502](https://github.com/helix-editor/helix/pull/9502)) - Update tree-sitter-unison, add indent queries ([#9505](https://github.com/helix-editor/helix/pull/9505)) - Update tree-sitter-slint ([#9551](https://github.com/helix-editor/helix/pull/9551), [#9698](https://github.com/helix-editor/helix/pull/9698)) - Update tree-sitter-swift ([#9586](https://github.com/helix-editor/helix/pull/9586)) - Add `fish_indent` as formatter for fish ([78ed3ad](https://github.com/helix-editor/helix/commit/78ed3ad)) - Recognize `zon` as Zig ([#9582](https://github.com/helix-editor/helix/pull/9582)) - Add a formatter for Odin ([#9537](https://github.com/helix-editor/helix/pull/9537)) - Update tree-sitter-erlang ([#9627](https://github.com/helix-editor/helix/pull/9627), [fdcd461](https://github.com/helix-editor/helix/commit/fdcd461)) - Capture Rust fields as argument textobjects ([#9637](https://github.com/helix-editor/helix/pull/9637)) - Improve Dart textobjects ([#9644](https://github.com/helix-editor/helix/pull/9644)) - Recognize `tmux.conf` as a bash file-type ([#9653](https://github.com/helix-editor/helix/pull/9653)) - Add textobjects queries for Nix ([#9659](https://github.com/helix-editor/helix/pull/9659)) - Add textobjects queries for HCL ([#9658](https://github.com/helix-editor/helix/pull/9658)) - Recognize osm and osc extensions as XML ([#9697](https://github.com/helix-editor/helix/pull/9697)) - Update tree-sitter-sql ([#9634](https://github.com/helix-editor/helix/pull/9634)) - Recognize pde Processing files as Java ([#9741](https://github.com/helix-editor/helix/pull/9741)) - Update tree-sitter-lua ([#9727](https://github.com/helix-editor/helix/pull/9727)) - Switch tree-sitter-nim parsers ([#9722](https://github.com/helix-editor/helix/pull/9722)) - Recognize GTK builder ui files as XML ([#9754](https://github.com/helix-editor/helix/pull/9754)) - Add configuration for markdown-oxide language server ([#9758](https://github.com/helix-editor/helix/pull/9758)) - Add a shebang for elvish ([#9779](https://github.com/helix-editor/helix/pull/9779)) - Fix precedence of Svelte TypeScript injection ([#9777](https://github.com/helix-editor/helix/pull/9777)) - Recognize common Dockerfile file types ([#9772](https://github.com/helix-editor/helix/pull/9772)) - Recognize NUON files as Nu ([#9839](https://github.com/helix-editor/helix/pull/9839)) - Add textobjects for Java native functions and constructors ([#9806](https://github.com/helix-editor/helix/pull/9806)) - Fix "braket" typo in JSX highlights ([#9910](https://github.com/helix-editor/helix/pull/9910)) - Update tree-sitter-hurl ([#9775](https://github.com/helix-editor/helix/pull/9775)) - Add textobjects queries for Vala ([#8541](https://github.com/helix-editor/helix/pull/8541)) - Update tree-sitter-git-config ([9610254](https://github.com/helix-editor/helix/commit/9610254)) - Recognize 'mmd' as Mermaid ([459eb9a](https://github.com/helix-editor/helix/commit/459eb9a)) - Highlight Rust extern crate aliases ([c099dde](https://github.com/helix-editor/helix/commit/c099dde)) - Improve parameter highlighting in C++ ([f5d95de](https://github.com/helix-editor/helix/commit/f5d95de)) - Recognize 'rclone.conf' as INI ([#9959](https://github.com/helix-editor/helix/pull/9959)) - Add injections for GraphQL and ERB in Ruby heredocs ([#10036](https://github.com/helix-editor/helix/pull/10036)) - Add `main.odin` to Odin LSP roots ([#9968](https://github.com/helix-editor/helix/pull/9968)) Packaging: - Allow user overlays in Nix grammars build ([#8749](https://github.com/helix-editor/helix/pull/8749)) - Set Cargo feature resolver to v2 ([#8917](https://github.com/helix-editor/helix/pull/8917)) - Use workspace inheritance for common Cargo metadata ([#8925](https://github.com/helix-editor/helix/pull/8925)) - Remove sourcehut-based tree-sitter grammars from default build ([#9316](https://github.com/helix-editor/helix/pull/9316), [#9326](https://github.com/helix-editor/helix/pull/9326)) - Add icon to Windows executable ([#9104](https://github.com/helix-editor/helix/pull/9104)) # 23.10 (2023-10-24) A big shout out to all the contributors! We had 118 contributors in this release. Breaking changes: - Support multiple language servers per language ([#2507](https://github.com/helix-editor/helix/pull/2507)) - This is a breaking change to language configuration Features: - Support multiple language servers per language ([#2507](https://github.com/helix-editor/helix/pull/2507), [#7082](https://github.com/helix-editor/helix/pull/7082), [#7286](https://github.com/helix-editor/helix/pull/7286), [#8374](https://github.com/helix-editor/helix/pull/8374)) - Add a statusline element for the selected register ([#7222](https://github.com/helix-editor/helix/pull/7222)) - Add `%`, `#`, `.`, `*` and `+` special registers ([#6985](https://github.com/helix-editor/helix/pull/6985)) - Add initial support for LSP DidChangeWatchedFiles notifications ([#7665](https://github.com/helix-editor/helix/pull/7665)) - Search buffer contents in `global_search` ([#5652](https://github.com/helix-editor/helix/pull/5652)) - Add a "smart tab" command that intelligently jumps the cursor on tab ([#4443](https://github.com/helix-editor/helix/pull/4443)) - Add a statusline element for whether a file is read-only ([#7222](https://github.com/helix-editor/helix/pull/7222), [#7875](https://github.com/helix-editor/helix/pull/7875)) - Syntax highlight regex prompts ([#7738](https://github.com/helix-editor/helix/pull/7738)) - Allow defining alignment in indent queries ([#5355](https://github.com/helix-editor/helix/pull/5355)) - Show visual feedback in `surround_replace` ([#7588](https://github.com/helix-editor/helix/pull/7588)) - Switch to Nucleo for fuzzy matching ([#7814](https://github.com/helix-editor/helix/pull/7814), [#8148](https://github.com/helix-editor/helix/pull/8148), [#8192](https://github.com/helix-editor/helix/pull/8192), [#8194](https://github.com/helix-editor/helix/pull/8194)) - Insert a trailing newline on write ([#8157](https://github.com/helix-editor/helix/pull/8157)) - Add a `-w`/`--working-dir` CLI flag for specifying a working directory on startup ([#8223](https://github.com/helix-editor/helix/pull/8223), [#8498](https://github.com/helix-editor/helix/pull/8498), [#8520](https://github.com/helix-editor/helix/pull/8520)) - Accept a `+N` CLI argument to set the first file's line number ([#8521](https://github.com/helix-editor/helix/pull/8521)) - Accept Helix-specific ignore files in `.helix/ignore` and `~/.config/helix/ignore` ([#8099](https://github.com/helix-editor/helix/pull/8099)) Commands: - `merge_selections` (`A-minus`) - merge all selections into one selection that covers all ranges ([#7053](https://github.com/helix-editor/helix/pull/7053)) - `move_prev_long_word_end` and `extend_prev_long_word_end` - move/extend to the end of the previous WORD ([#6905](https://github.com/helix-editor/helix/pull/6905)) - `reverse_selection_contents` - swaps the values of each selection so they are reversed ([#7329](https://github.com/helix-editor/helix/pull/7329)) - Add `:rl` and `:rla` aliases for `:reload` and `:reload-all` ([#7158](https://github.com/helix-editor/helix/pull/7158)) - `yank_joined` - join the selections and yank to the selected register ([#7195](https://github.com/helix-editor/helix/pull/7195)) - `:write-all!` (`:wa!`) - forcibly write all buffers to disk and create any necessary subdirectories ([#7577](https://github.com/helix-editor/helix/pull/7577)) - `:redraw` - clear re-render the UI ([#6949](https://github.com/helix-editor/helix/pull/6949)) - `:tree-sitter-highlight-name` - show the theme scope name of the highlight under the cursor ([#8170](https://github.com/helix-editor/helix/pull/8170)) Usability improvements: - Allow cycling option values at runtime ([#4411](https://github.com/helix-editor/helix/pull/4411), [#7240](https://github.com/helix-editor/helix/pull/7240), [#7877](https://github.com/helix-editor/helix/pull/7877)) - Exit gracefully on termination signals ([#7236](https://github.com/helix-editor/helix/pull/7236)) - Add plaintext matching fallback to tree-sitter pair matching ([#4288](https://github.com/helix-editor/helix/pull/4288)) - Persist register selection in pending keymaps ([0e08349](https://github.com/helix-editor/helix/commit/0e08349)) - Propagate the count and register to command palette commands ([b394997](https://github.com/helix-editor/helix/commit/b394997)) - Auto indent on `insert_at_line_start` ([#5837](https://github.com/helix-editor/helix/pull/5837)) - Add a config option to control whether LSP completions are automatically inserted on preview ([#7189](https://github.com/helix-editor/helix/pull/7189)) - Add a config option for default line endings ([#5621](https://github.com/helix-editor/helix/pull/5621), [#7357](https://github.com/helix-editor/helix/pull/7357)) - Allow ANSI colors in themes ([#5119](https://github.com/helix-editor/helix/pull/5119)) - Match pairs that don't form a standalone tree-sitter node ([#7242](https://github.com/helix-editor/helix/pull/7242)) - Allow indent sizes of up to 16 columns ([#7429](https://github.com/helix-editor/helix/pull/7429)) - Improve performance of mapping positions through changes ([#7408](https://github.com/helix-editor/helix/pull/7408), [8d39a81](https://github.com/helix-editor/helix/commit/8d39a81), [#7471](https://github.com/helix-editor/helix/pull/7471)) - Mark buffers created from stdin as modified ([#7431](https://github.com/helix-editor/helix/pull/7431)) - Forcibly shut down uninitialized language servers ([#7449](https://github.com/helix-editor/helix/pull/7449)) - Add filename completer for shell prompts ([#7569](https://github.com/helix-editor/helix/pull/7569)) - Allow binding F13-F24 ([#7672](https://github.com/helix-editor/helix/pull/7672)) - Resolve LSP code actions ([#7677](https://github.com/helix-editor/helix/pull/7677), [#8421](https://github.com/helix-editor/helix/pull/8421)) - Save an undo checkpoint before accepting completions ([#7747](https://github.com/helix-editor/helix/pull/7747)) - Include gitignored files in debugger completions ([#7936](https://github.com/helix-editor/helix/pull/7936)) - Make editor remember the last search register ([#5244](https://github.com/helix-editor/helix/pull/5244)) - Open directories with `goto_file` ([#7909](https://github.com/helix-editor/helix/pull/7909)) - Use relative path to open buffer in `goto_file` (`gf`) ([#7965](https://github.com/helix-editor/helix/pull/7965)) - Support `default` color in themes ([#8083](https://github.com/helix-editor/helix/pull/8083), [#8114](https://github.com/helix-editor/helix/pull/8114)) - Toggle between relative and absolute line numbers when the terminal loses focus ([#7955](https://github.com/helix-editor/helix/pull/7955)) - Lower default idle-timeout to 250ms ([060e73a](https://github.com/helix-editor/helix/commit/060e73a)) - Allow theming diff gutters separately from other diff colors ([#8343](https://github.com/helix-editor/helix/pull/8343)) - Style bold/italic/strikethrough in markdown doc popups ([#8385](https://github.com/helix-editor/helix/pull/8385)) - Maintain the cursor position and view when splitting with `:hsplit`/`:vsplit` ([#8109](https://github.com/helix-editor/helix/pull/8109)) - Accept `-` in macros outside of `<`/`>` ([#8475](https://github.com/helix-editor/helix/pull/8475)) - Show all language servers for each language in `--health` ([#7315](https://github.com/helix-editor/helix/pull/7315)) - Don't break on hyphens in `:reflow` ([#8569](https://github.com/helix-editor/helix/pull/8569)) Fixes: - Update diagnostics correctly on language server exit ([#7111](https://github.com/helix-editor/helix/pull/7111)) - Fix off-by-one in `select_references_to_symbol_under_cursor` ([#7132](https://github.com/helix-editor/helix/pull/7132)) - Extend selection with repeat-last-motion only if the original motion extended the selection ([#7159](https://github.com/helix-editor/helix/pull/7159)) - Fix undefined behavior in the diff gutter ([#7227](https://github.com/helix-editor/helix/pull/7227)) - Check that tab width is non-zero ([#7178](https://github.com/helix-editor/helix/pull/7178)) - Fix styles being overwritten in table rows with multiple cells ([#7281](https://github.com/helix-editor/helix/pull/7281)) - Add file for `--log` CLI arg in help text ([#7307](https://github.com/helix-editor/helix/pull/7307)) - Fix underflow when repeating a completion that has a negative shift position ([#7322](https://github.com/helix-editor/helix/pull/7322)) - Prefer longer matches in `select_next_sibling` and `select_prev_sibling` ([#7332](https://github.com/helix-editor/helix/pull/7332)) - Preview scratch buffers in the jumplist picker ([#7331](https://github.com/helix-editor/helix/pull/7331)) - Fix chunking by bytes in tree-sitter parsing ([#7417](https://github.com/helix-editor/helix/pull/7417)) - Discard LSP publishDiagnostic from uninitialized servers ([#7467](https://github.com/helix-editor/helix/pull/7467)) - Use negotiated position encoding for LSP workspace edits ([#7469](https://github.com/helix-editor/helix/pull/7469)) - Fix error message for unknown gutter types in config ([#7534](https://github.com/helix-editor/helix/pull/7534)) - Fix `:log-open` when `--log` CLI arg is specified ([#7573](https://github.com/helix-editor/helix/pull/7573), [#7585](https://github.com/helix-editor/helix/pull/7585)) - Fix debouncing of LSP messages to fix the last message sticking around ([#7538](https://github.com/helix-editor/helix/pull/7538), [#8023](https://github.com/helix-editor/helix/pull/8023)) - Fix crash when the current working directory is deleted ([#7185](https://github.com/helix-editor/helix/pull/7185)) - Fix piping to Helix on macOS ([#5468](https://github.com/helix-editor/helix/pull/5468)) - Fix crash when parsing overlapping injections ([#7621](https://github.com/helix-editor/helix/pull/7621)) - Clear the statusline when the prompt is visible ([#7646](https://github.com/helix-editor/helix/pull/7646)) - Fix range formatting error message typo ([#7823](https://github.com/helix-editor/helix/pull/7823)) - Skip rendering gutters when gutter width exceeds view width ([#7821](https://github.com/helix-editor/helix/pull/7821)) - Center the picker preview using visual lines ([#7837](https://github.com/helix-editor/helix/pull/7837)) - Align view correctly for background buffers opened with `A-ret` ([#7691](https://github.com/helix-editor/helix/pull/7691)) - Fix cursor resetting to block when quitting via a keybind ([#7931](https://github.com/helix-editor/helix/pull/7931)) - Remove path completions for the `:new` command ([#8010](https://github.com/helix-editor/helix/pull/8010)) - Use binary path resolved by `which` for formatter commands ([#8064](https://github.com/helix-editor/helix/pull/8064)) - Handle crossterm's `hidden` modifier ([#8120](https://github.com/helix-editor/helix/pull/8120)) - Clear completion when switching between windows with the mouse ([#8118](https://github.com/helix-editor/helix/pull/8118)) - Eagerly remove the last picker (`'`) when the picker has many items ([#8127](https://github.com/helix-editor/helix/pull/8127)) - Fix find commands for buffers with non-LF line-endings ([#8111](https://github.com/helix-editor/helix/pull/8111)) - Detect the tmux clipboard provider on macOS ([#8182](https://github.com/helix-editor/helix/pull/8182)) - Fix syntax highlighting in dynamic picker preview pane ([#8206](https://github.com/helix-editor/helix/pull/8206)) - Recognize HTML code tags with attributes as code in markdown previews ([#8397](https://github.com/helix-editor/helix/pull/8397)) - Fix multicursor snippet placeholder directions ([#8423](https://github.com/helix-editor/helix/pull/8423)) - Only show diagnostic highlights when diagnostics are enabled for a language server ([#8551](https://github.com/helix-editor/helix/pull/8551)) Themes: - Improve the selection color in `ferra` ([#7138](https://github.com/helix-editor/helix/pull/7138)) - Add `variable.other.member` theming to `spacebones_light` ([#7125](https://github.com/helix-editor/helix/pull/7125)) - Update `autumn` and theme the soft-wrap indicator ([#7229](https://github.com/helix-editor/helix/pull/7229)) - Add `gruvbox_dark_soft` ([#7139](https://github.com/helix-editor/helix/pull/7139)) - Add `merionette` ([#7186](https://github.com/helix-editor/helix/pull/7186)) - Add `zed_onedark` and `zed_onelight` ([#7250](https://github.com/helix-editor/helix/pull/7250)) - Use light-gray for `onedarker` inlay hint theming ([#7433](https://github.com/helix-editor/helix/pull/7433)) - Update the Nord theme to follow the style guidelines ([#7490](https://github.com/helix-editor/helix/pull/7490)) - Tune `dark_plus` inlay hint colors ([#7611](https://github.com/helix-editor/helix/pull/7611)) - Add `naysayer` ([#7570](https://github.com/helix-editor/helix/pull/7570)) - Add `kaolin-dark`, `kaolin-light` and `kaolin-valley-dark` ([#7151](https://github.com/helix-editor/helix/pull/7151)) - Fix selection highlighting in gruvbox variants ([#7717](https://github.com/helix-editor/helix/pull/7717)) - Add soft-wrap indicator to `gruvbox` ([#7736](https://github.com/helix-editor/helix/pull/7736)) - Add missing palette definitions in `everforest_dark` ([#7739](https://github.com/helix-editor/helix/pull/7739)) - Increase diagnostics clarity in `pop-dark` ([#7702](https://github.com/helix-editor/helix/pull/7702)) - Add `vim_dark_high_contrast` ([#7785](https://github.com/helix-editor/helix/pull/7785)) - Add `new_moon` ([#7834](https://github.com/helix-editor/helix/pull/7834)) - Add `yellowed` ([#7849](https://github.com/helix-editor/helix/pull/7849)) - Improve comment readability for `autumn` ([#7939](https://github.com/helix-editor/helix/pull/7939)) - Distinguish active bufferline buffer in `monokai` ([#7983](https://github.com/helix-editor/helix/pull/7983)) - Update ruler colors in `nord` ([#7995](https://github.com/helix-editor/helix/pull/7995)) - Update Catppuccin themes ([#8102](https://github.com/helix-editor/helix/pull/8102)) - Add text focus scope and diagnostics undercurls for `nord` ([#8165](https://github.com/helix-editor/helix/pull/8165)) - Add material theme collection ([#8211](https://github.com/helix-editor/helix/pull/8211)) - Improve indent line color in `dracula` ([#8266](https://github.com/helix-editor/helix/pull/8266)) - Clean up and refactor `papercolor` to use inheritance ([#8276](https://github.com/helix-editor/helix/pull/8276)) - Fix `zenburn` inlay hint color ([#8278](https://github.com/helix-editor/helix/pull/8278)a) - Fix picker crash when previewing an invalid range ([e9d0bd7](https://github.com/helix-editor/helix/commit/e9d0bd7)) - Correctly center items in the picker preview ([13d4463](https://github.com/helix-editor/helix/commit/13d4463)) - Add `cyan_light` ([#8293](https://github.com/helix-editor/helix/pull/8293), [#8587](https://github.com/helix-editor/helix/pull/8587)) - Theme HTML tags in `onedark` ([#8409](https://github.com/helix-editor/helix/pull/8409)) - Refine `darcula` and `darcula-solid` themes ([#8412](https://github.com/helix-editor/helix/pull/8412)) - Improve `nord` highlights ([#8414](https://github.com/helix-editor/helix/pull/8414)) - Add `nord-night` ([#8549](https://github.com/helix-editor/helix/pull/8549)) New languages: - Blueprint ([#7213](https://github.com/helix-editor/helix/pull/7213), [#8161](https://github.com/helix-editor/helix/pull/8161)) - Forth ([#7256](https://github.com/helix-editor/helix/pull/7256), [#7334](https://github.com/helix-editor/helix/pull/7334)) - t32 ([#7140](https://github.com/helix-editor/helix/pull/7140), [#7811](https://github.com/helix-editor/helix/pull/7811)) - WebC ([#7290](https://github.com/helix-editor/helix/pull/7290)) - Persistent DSL for Haskell ([#7261](https://github.com/helix-editor/helix/pull/7261)) - F# ([#7619](https://github.com/helix-editor/helix/pull/7619), [#8024](https://github.com/helix-editor/helix/pull/8024)) - Wren ([#7765](https://github.com/helix-editor/helix/pull/7765), [#7819](https://github.com/helix-editor/helix/pull/7819)) - Unison ([#7724](https://github.com/helix-editor/helix/pull/7724)) - Todo.txt ([#7835](https://github.com/helix-editor/helix/pull/7835)) - Jinja and Handlebars ([#7233](https://github.com/helix-editor/helix/pull/7233)) - Pod ([#7907](https://github.com/helix-editor/helix/pull/7907)) - Strace ([#7928](https://github.com/helix-editor/helix/pull/7928)) - Gemini ([#8070](https://github.com/helix-editor/helix/pull/8070)) - GNU Assembler (GAS) ([#8291](https://github.com/helix-editor/helix/pull/8291)) - JSON5 ([#8473](https://github.com/helix-editor/helix/pull/8473)) - TEMPL ([#8540](https://github.com/helix-editor/helix/pull/8540)) Updated languages and queries: - Add one to the ruler numbers for git-commit ([#7072](https://github.com/helix-editor/helix/pull/7072)) - Recognize XAML files as XML ([#7083](https://github.com/helix-editor/helix/pull/7083)) - Recognize `Cargo.lock` as TOML ([#7095](https://github.com/helix-editor/helix/pull/7095)) - Use Rust grammar for Cairo ([c6d1430](https://github.com/helix-editor/helix/commit/c6d1430)) - Update tree-sitter-nickel ([#7059](https://github.com/helix-editor/helix/pull/7059), [#7551](https://github.com/helix-editor/helix/pull/7551)) - Tune auto-pair characters for Nickel ([#7059](https://github.com/helix-editor/helix/pull/7059)) - Recognize `Vagrantfile` as Ruby ([#7112](https://github.com/helix-editor/helix/pull/7112)) - Recognize hidden justfiles as Just ([#7088](https://github.com/helix-editor/helix/pull/7088)) - Update Java and TypeScript highlight queries ([#7145](https://github.com/helix-editor/helix/pull/7145)) - Recognize `.zimrc` as Bash ([#7146](https://github.com/helix-editor/helix/pull/7146)) - Recognize `.gir` as XML ([#7152](https://github.com/helix-editor/helix/pull/7152)) - Update tree-sitter-scala ([#7147](https://github.com/helix-editor/helix/pull/7147)) - Recognize make file-type as Makefile ([#7212](https://github.com/helix-editor/helix/pull/7212)) - Update tree-sitter-verilog ([#7262](https://github.com/helix-editor/helix/pull/7262)) - Update tree-sitter-cpp ([#7285](https://github.com/helix-editor/helix/pull/7285)) - Support core mode for delve debugger ([#7300](https://github.com/helix-editor/helix/pull/7300)) - Add Fortran comment injections ([#7305](https://github.com/helix-editor/helix/pull/7305)) - Switch Vue language server to `vue-language-server` ([#7312](https://github.com/helix-editor/helix/pull/7312)) - Update tree-sitter-sql ([#7387](https://github.com/helix-editor/helix/pull/7387), [#8464](https://github.com/helix-editor/helix/pull/8464)) - Replace the MATLAB tree-sitter grammar ([#7388](https://github.com/helix-editor/helix/pull/7388), [#7442](https://github.com/helix-editor/helix/pull/7442), [#7491](https://github.com/helix-editor/helix/pull/7491), [#7493](https://github.com/helix-editor/helix/pull/7493), [#7511](https://github.com/helix-editor/helix/pull/7511), [#7532](https://github.com/helix-editor/helix/pull/7532), [#8040](https://github.com/helix-editor/helix/pull/8040)) - Highlight TOML table headers ([#7441](https://github.com/helix-editor/helix/pull/7441)) - Recognize `cppm` file-type as C++ ([#7492](https://github.com/helix-editor/helix/pull/7492)) - Refactor ecma language queries into private and public queries ([#7207](https://github.com/helix-editor/helix/pull/7207)) - Update tree-sitter-dart ([#7576](https://github.com/helix-editor/helix/pull/7576)) - Add shebang for nushell files ([#7606](https://github.com/helix-editor/helix/pull/7606)) - Recognize systemd files as INI ([#7592](https://github.com/helix-editor/helix/pull/7592)) - Update TypeScript, TSX and Svelte grammars ([#6874](https://github.com/helix-editor/helix/pull/6874)) - Enable inlay hints in the Svelte language server ([#7622](https://github.com/helix-editor/helix/pull/7622)) - Recognize `Brewfile`s as Ruby ([#7629](https://github.com/helix-editor/helix/pull/7629)) - Add more file-types for R ([#7633](https://github.com/helix-editor/helix/pull/7633)) - Switch tree-sitter-perl to official upstream parser ([#7644](https://github.com/helix-editor/helix/pull/7644), [#7947](https://github.com/helix-editor/helix/pull/7947)) - Fix predicate typo in comment highlights ([#7732](https://github.com/helix-editor/helix/pull/7732)) - Update tree-sitter-prql ([#7771](https://github.com/helix-editor/helix/pull/7771)) - Recognize `.gitf` as JSON ([#7781](https://github.com/helix-editor/helix/pull/7781)) - Switch V language server to `v-analyzer` ([#7760](https://github.com/helix-editor/helix/pull/7760)) - Add protobuf language servers ([#7796](https://github.com/helix-editor/helix/pull/7796)) - Update tree-sitter-zig ([#7803](https://github.com/helix-editor/helix/pull/7803)) - Update tree-sitter-hare ([#7784](https://github.com/helix-editor/helix/pull/7784)) - Add Java indent queries ([#7844](https://github.com/helix-editor/helix/pull/7844)) - Update tree-sitter-scheme ([979933b](https://github.com/helix-editor/helix/commit/979933b)) - Recognize `scm` as Scheme instead of TSQ ([5707151](https://github.com/helix-editor/helix/commit/5707151)) - Update tree-sitter-git-commit ([#7831](https://github.com/helix-editor/helix/pull/7831)) - Update JavaScript, TypeScript and TSX grammars ([#7852](https://github.com/helix-editor/helix/pull/7852)) - Update tree-sitter-nu ([#7873](https://github.com/helix-editor/helix/pull/7873)) - Fix YAML indentation ([#6768](https://github.com/helix-editor/helix/pull/6768)) - Add `csharp-ls`, Pyright, Pylyzer and add roots for Python ([#7897](https://github.com/helix-editor/helix/pull/7897), [#8032](https://github.com/helix-editor/helix/pull/8032)) - Update tree-sitter-slint ([#7893](https://github.com/helix-editor/helix/pull/7893)) - Recognize more ZSH file-types as Bash ([#7930](https://github.com/helix-editor/helix/pull/7930)) - Recognize `star` extension as Starlark ([#7922](https://github.com/helix-editor/helix/pull/7922)) - Fix inline HTML tag highlighting in markdown ([#7960](https://github.com/helix-editor/helix/pull/7960)) - Update tree-sitter-robot ([#7970](https://github.com/helix-editor/helix/pull/7970)) - Highlight Dart 3 `sealed` and `base` keywords ([#7974](https://github.com/helix-editor/helix/pull/7974)) - Add configuration for `ltex-ls` to the default `languages.toml` ([#7838](https://github.com/helix-editor/helix/pull/7838)) - Update tree-sitter-strace ([#8087](https://github.com/helix-editor/helix/pull/8087)) - Update tree-sitter-gleam, enable auto-format ([#8085](https://github.com/helix-editor/helix/pull/8085)) - Update tree-sitter-esdl ([#8222](https://github.com/helix-editor/helix/pull/8222)) - Expand ignore file-types ([#8220](https://github.com/helix-editor/helix/pull/8220)) - Recognize feed related formats as XML ([#8232](https://github.com/helix-editor/helix/pull/8232)) - Improve YAML injections ([#8217](https://github.com/helix-editor/helix/pull/8217)) - Add shebangs for TypeScript, Julia, Java and OCaml ([95e994a](https://github.com/helix-editor/helix/commit/95e994a)) - Highlight abbreviations in Scheme ([ef23847](https://github.com/helix-editor/helix/commit/ef23847)) - Remove backtic auto-pair in OCaml ([#8260](https://github.com/helix-editor/helix/pull/8260)) - Recognize `flake.lock` as JSON ([#8304](https://github.com/helix-editor/helix/pull/8304)) - Add Python test script injection for Nix ([b4494e1](https://github.com/helix-editor/helix/commit/b4494e1)) - Fix Nix comment injection precedence ([37e48f4](https://github.com/helix-editor/helix/commit/37e48f4)) - Recognize editorconfig files as INI ([#8308](https://github.com/helix-editor/helix/pull/8308)) - Recognize `.babelrc` as JSON ([#8309](https://github.com/helix-editor/helix/pull/8309)) - Switch Purescript to its own tree-sitter parser ([#8306](https://github.com/helix-editor/helix/pull/8306), [#8338](https://github.com/helix-editor/helix/pull/8338), [#8527](https://github.com/helix-editor/helix/pull/8527)) - Update Unison highlights ([#8315](https://github.com/helix-editor/helix/pull/8315)) - Recognize `.webmanifest` as JSON ([#8342](https://github.com/helix-editor/helix/pull/8342)) - Recognize polkit policy files as XML ([#8369](https://github.com/helix-editor/helix/pull/8369)) - Recognize polkit rules files as JavaScript ([#8370](https://github.com/helix-editor/helix/pull/8370)) - Update Go highlight queries ([#8399](https://github.com/helix-editor/helix/pull/8399)) - Add shebangs for Makefiles ([#8410](https://github.com/helix-editor/helix/pull/8410)) - Add file-type associations from VSCode ([#8388](https://github.com/helix-editor/helix/pull/8388)) - Add validation to JSON/CSS language server configs ([#8433](https://github.com/helix-editor/helix/pull/8433)) - Add a configuration for the tailwind language server ([#8442](https://github.com/helix-editor/helix/pull/8442)) - Add a configuration for the ansible language server ([#7973](https://github.com/helix-editor/helix/pull/7973)) - Add a configuration for the GraphQL language server ([#8492](https://github.com/helix-editor/helix/pull/8492)) - Indent while statements in Bash ([#8528](https://github.com/helix-editor/helix/pull/8528)) - Update tree-sitter-haskell and queries ([#8558](https://github.com/helix-editor/helix/pull/8558)) Packaging: - Add an overlay to the Nix flake ([#7078](https://github.com/helix-editor/helix/pull/7078)) - Check for `git` before fetching or building grammars ([#7320](https://github.com/helix-editor/helix/pull/7320)) - Refactor Nix flake to use Crane ([#7763](https://github.com/helix-editor/helix/pull/7763)) - Remove the aarch64 appimage from the release CI ([#7832](https://github.com/helix-editor/helix/pull/7832)) - Add desktop and icon files to Nix flake output ([#7979](https://github.com/helix-editor/helix/pull/7979)) - Build flake packages with the latest stable Rust ([#8133](https://github.com/helix-editor/helix/pull/8133)) # 23.05 (2023-05-18) 23.05 is a smaller release focusing on fixes. There were 88 contributors in this release. Thank you all! Features: - Add a config option to exclude declaration from LSP references request ([#6886](https://github.com/helix-editor/helix/pull/6886)) - Enable injecting languages based on their file extension and shebang ([#3970](https://github.com/helix-editor/helix/pull/3970)) - Sort the buffer picker by most recent access ([#2980](https://github.com/helix-editor/helix/pull/2980)) - Perform syntax highlighting in the picker asynchronously ([#7028](https://github.com/helix-editor/helix/pull/7028)) Commands: - `:update` is now aliased as `:u` ([#6835](https://github.com/helix-editor/helix/pull/6835)) - Add `extend_to_first_nonwhitespace` which acts the same as `goto_first_nonwhitespace` but always extends ([#6837](https://github.com/helix-editor/helix/pull/6837)) - Add `:clear-register` for clearing the given register or all registers ([#5695](https://github.com/helix-editor/helix/pull/5695)) - Add `:write-buffer-close` and `:write-buffer-close!` ([#6947](https://github.com/helix-editor/helix/pull/6947)) Fixes: - Normalize LSP workspace paths ([#6517](https://github.com/helix-editor/helix/pull/6517)) - Robustly handle invalid LSP ranges ([#6512](https://github.com/helix-editor/helix/pull/6512)) - Fix line number display for LSP goto pickers ([#6559](https://github.com/helix-editor/helix/pull/6559)) - Fix toggling of `soft-wrap.enable` option ([#6656](https://github.com/helix-editor/helix/pull/6656), [58e457a](https://github.com/helix-editor/helix/commit/58e457a), [#6742](https://github.com/helix-editor/helix/pull/6742)) - Handle `workspace/configuration` requests from stopped language servers ([#6693](https://github.com/helix-editor/helix/pull/6693)) - Fix possible crash from opening the jumplist picker ([#6672](https://github.com/helix-editor/helix/pull/6672)) - Fix theme preview returning to current theme on line and word deletions ([#6694](https://github.com/helix-editor/helix/pull/6694)) - Re-run crate build scripts on changes to revision and grammar repositories ([#6743](https://github.com/helix-editor/helix/pull/6743)) - Fix crash on opening from suspended state ([#6764](https://github.com/helix-editor/helix/pull/6764)) - Fix unwrap bug in DAP ([#6786](https://github.com/helix-editor/helix/pull/6786)) - Always build tree-sitter parsers with C++14 and C11 ([#6792](https://github.com/helix-editor/helix/pull/6792), [#6834](https://github.com/helix-editor/helix/pull/6834), [#6845](https://github.com/helix-editor/helix/pull/6845)) - Exit with a non-zero statuscode when tree-sitter parser builds fail ([#6795](https://github.com/helix-editor/helix/pull/6795)) - Flip symbol range in LSP goto commands ([#6794](https://github.com/helix-editor/helix/pull/6794)) - Fix runtime toggling of the `mouse` option ([#6675](https://github.com/helix-editor/helix/pull/6675)) - Fix panic in inlay hint computation when view anchor is out of bounds ([#6883](https://github.com/helix-editor/helix/pull/6883)) - Significantly improve performance of git discovery on slow file systems ([#6890](https://github.com/helix-editor/helix/pull/6890)) - Downgrade gix log level to info ([#6915](https://github.com/helix-editor/helix/pull/6915)) - Conserve BOM and properly support saving UTF16 files ([#6497](https://github.com/helix-editor/helix/pull/6497)) - Correctly handle completion re-request ([#6594](https://github.com/helix-editor/helix/pull/6594)) - Fix offset encoding in LSP `didChange` notifications ([#6921](https://github.com/helix-editor/helix/pull/6921)) - Change `gix` logging level to info ([#6915](https://github.com/helix-editor/helix/pull/6915)) - Improve error message when writes fail because parent directories do not exist ([#7014](https://github.com/helix-editor/helix/pull/7014)) - Replace DAP variables popup instead of pushing more popups ([#7034](https://github.com/helix-editor/helix/pull/7034)) - Disable tree-sitter for files after parsing for 500ms ([#7028](https://github.com/helix-editor/helix/pull/7028)) - Fix crash when deleting with multiple cursors ([#6024](https://github.com/helix-editor/helix/pull/6024)) - Fix selection sliding when deleting forwards in append mode ([#6024](https://github.com/helix-editor/helix/pull/6024)) - Fix completion on paths containing spaces ([#6779](https://github.com/helix-editor/helix/pull/6779)) Themes: - Style inlay hints in `dracula` theme ([#6515](https://github.com/helix-editor/helix/pull/6515)) - Style inlay hints in `onedark` theme ([#6503](https://github.com/helix-editor/helix/pull/6503)) - Style inlay hints and the soft-wrap indicator in `varua` ([#6568](https://github.com/helix-editor/helix/pull/6568), [#6589](https://github.com/helix-editor/helix/pull/6589)) - Style inlay hints in `emacs` theme ([#6569](https://github.com/helix-editor/helix/pull/6569)) - Update `base16_transparent` and `dark_high_contrast` themes ([#6577](https://github.com/helix-editor/helix/pull/6577)) - Style inlay hints for `mellow` and `rasmus` themes ([#6583](https://github.com/helix-editor/helix/pull/6583)) - Dim pane divider for `base16_transparent` theme ([#6534](https://github.com/helix-editor/helix/pull/6534)) - Style inlay hints in `zenburn` theme ([#6593](https://github.com/helix-editor/helix/pull/6593)) - Style inlay hints in `boo_berry` theme ([#6625](https://github.com/helix-editor/helix/pull/6625)) - Add `ferra` theme ([#6619](https://github.com/helix-editor/helix/pull/6619), [#6776](https://github.com/helix-editor/helix/pull/6776)) - Style inlay hints in `nightfox` theme ([#6655](https://github.com/helix-editor/helix/pull/6655)) - Fix `ayu` theme family markup code block background ([#6538](https://github.com/helix-editor/helix/pull/6538)) - Improve whitespace and search match colors in `rose_pine` theme ([#6679](https://github.com/helix-editor/helix/pull/6679)) - Highlight selected items in `base16_transparent` theme ([#6716](https://github.com/helix-editor/helix/pull/6716)) - Adjust everforest to resemble original more closely ([#5866](https://github.com/helix-editor/helix/pull/5866)) - Refactor `dracula` theme ([#6552](https://github.com/helix-editor/helix/pull/6552), [#6767](https://github.com/helix-editor/helix/pull/6767), [#6855](https://github.com/helix-editor/helix/pull/6855), [#6987](https://github.com/helix-editor/helix/pull/6987)) - Style inlay hints in `darcula` theme ([#6732](https://github.com/helix-editor/helix/pull/6732)) - Style inlay hints in `kanagawa` theme ([#6773](https://github.com/helix-editor/helix/pull/6773)) - Improve `ayu_dark` theme ([#6622](https://github.com/helix-editor/helix/pull/6622)) - Refactor `noctis` theme multiple cursor highlighting ([96720e7](https://github.com/helix-editor/helix/commit/96720e7)) - Refactor `noctis` theme whitespace rendering and indent guides ([f2ccc03](https://github.com/helix-editor/helix/commit/f2ccc03)) - Add `amberwood` theme ([#6924](https://github.com/helix-editor/helix/pull/6924)) - Update `nightfox` theme ([#7061](https://github.com/helix-editor/helix/pull/7061)) Language support: - R language server: use the `--no-echo` flag to silence output ([#6570](https://github.com/helix-editor/helix/pull/6570)) - Recognize CUDA files as C++ ([#6521](https://github.com/helix-editor/helix/pull/6521)) - Add support for Hurl ([#6450](https://github.com/helix-editor/helix/pull/6450)) - Add textobject queries for Julia ([#6588](https://github.com/helix-editor/helix/pull/6588)) - Update Ruby highlight queries ([#6587](https://github.com/helix-editor/helix/pull/6587)) - Add xsd to XML file-types ([#6631](https://github.com/helix-editor/helix/pull/6631)) - Support Robot Framework ([#6611](https://github.com/helix-editor/helix/pull/6611)) - Update Gleam tree-sitter parser ([#6641](https://github.com/helix-editor/helix/pull/6641)) - Update git-commit tree-sitter parser ([#6692](https://github.com/helix-editor/helix/pull/6692)) - Update Haskell tree-sitter parser ([#6317](https://github.com/helix-editor/helix/pull/6317)) - Add injection queries for Haskell quasiquotes ([#6474](https://github.com/helix-editor/helix/pull/6474)) - Highlight C/C++ escape sequences ([#6724](https://github.com/helix-editor/helix/pull/6724)) - Support Markdoc ([#6432](https://github.com/helix-editor/helix/pull/6432)) - Support OpenCL ([#6473](https://github.com/helix-editor/helix/pull/6473)) - Support DTD ([#6644](https://github.com/helix-editor/helix/pull/6644)) - Fix constant highlighting in Python queries ([#6751](https://github.com/helix-editor/helix/pull/6751)) - Support Just ([#6453](https://github.com/helix-editor/helix/pull/6453)) - Fix Go locals query for `var_spec` identifiers ([#6763](https://github.com/helix-editor/helix/pull/6763)) - Update Markdown tree-sitter parser ([#6785](https://github.com/helix-editor/helix/pull/6785)) - Fix Haskell workspace root for cabal projects ([#6828](https://github.com/helix-editor/helix/pull/6828)) - Avoid extra indentation in Go switches ([#6817](https://github.com/helix-editor/helix/pull/6817)) - Fix Go workspace roots ([#6884](https://github.com/helix-editor/helix/pull/6884)) - Set PerlNavigator as the default Perl language server ([#6860](https://github.com/helix-editor/helix/pull/6860)) - Highlight more sqlx macros in Rust ([#6793](https://github.com/helix-editor/helix/pull/6793)) - Switch Odin tree-sitter grammar ([#6766](https://github.com/helix-editor/helix/pull/6766)) - Recognize `poetry.lock` as TOML ([#6928](https://github.com/helix-editor/helix/pull/6928)) - Recognize Jupyter notebooks as JSON ([#6927](https://github.com/helix-editor/helix/pull/6927)) - Add language server configuration for Crystal ([#6948](https://github.com/helix-editor/helix/pull/6948)) - Add `build.gradle.kts` to Java and Scala roots ([#6970](https://github.com/helix-editor/helix/pull/6970)) - Recognize `sty` and `cls` files as latex ([#6986](https://github.com/helix-editor/helix/pull/6986)) - Update Dockerfile tree-sitter grammar ([#6895](https://github.com/helix-editor/helix/pull/6895)) - Add comment injections for Odin ([#7027](https://github.com/helix-editor/helix/pull/7027)) - Recognize `gml` as XML ([#7055](https://github.com/helix-editor/helix/pull/7055)) - Recognize `geojson` as JSON ([#7054](https://github.com/helix-editor/helix/pull/7054)) Packaging: - Update the Nix flake dependencies, remove a deprecated option ([#6546](https://github.com/helix-editor/helix/pull/6546)) - Fix and re-enable aarch64-macos release binary builds ([#6504](https://github.com/helix-editor/helix/pull/6504)) - The git dependency on `tree-sitter` has been replaced with a regular crates.io dependency ([#6608](https://github.com/helix-editor/helix/pull/6608)) # 23.03 (2023-03-31) 23.03 brings some long-awaited and exciting features. Thank you to everyone involved! This release saw changes from 102 contributors. For the full log, check out the [git log](https://github.com/helix-editor/helix/compare/22.12..23.03). Also check out the [release notes](https://helix-editor.com/news/release-23-03-highlights/) for more commentary on larger features. Breaking changes: - Select diagnostic range in `goto_*_diag` commands ([#4713](https://github.com/helix-editor/helix/pull/4713), [#5164](https://github.com/helix-editor/helix/pull/5164), [#6193](https://github.com/helix-editor/helix/pull/6193)) - Remove jump behavior from `increment`/`decrement` ([#4123](https://github.com/helix-editor/helix/pull/4123), [#5929](https://github.com/helix-editor/helix/pull/5929)) - Select change range in `goto_*_change` commands ([#5206](https://github.com/helix-editor/helix/pull/5206)) - Split file modification indicator from filename statusline elements ([#4731](https://github.com/helix-editor/helix/pull/4731), [#6036](https://github.com/helix-editor/helix/pull/6036)) - Jump to symbol ranges in LSP goto commands ([#5986](https://github.com/helix-editor/helix/pull/5986)) - Workspace detection now stops at the first `.helix/` directory (merging multiple `.helix/languages.toml` configurations is no longer supported) ([#5748](https://github.com/helix-editor/helix/pull/5748)) Features: - Dynamic workspace symbol picker ([#5055](https://github.com/helix-editor/helix/pull/5055)) - Soft-wrap ([#5420](https://github.com/helix-editor/helix/pull/5420), [#5786](https://github.com/helix-editor/helix/pull/5786), [#5893](https://github.com/helix-editor/helix/pull/5893), [#6142](https://github.com/helix-editor/helix/pull/6142), [#6440](https://github.com/helix-editor/helix/pull/6440)) - Initial support for LSP snippet completions ([#5864](https://github.com/helix-editor/helix/pull/5864), [b1f7528](https://github.com/helix-editor/helix/commit/b1f7528), [#6263](https://github.com/helix-editor/helix/pull/6263), [bbf4800](https://github.com/helix-editor/helix/commit/bbf4800), [90348b8](https://github.com/helix-editor/helix/commit/90348b8), [f87299f](https://github.com/helix-editor/helix/commit/f87299f), [#6371](https://github.com/helix-editor/helix/pull/6371), [9fe3adc](https://github.com/helix-editor/helix/commit/9fe3adc)) - Add a statusline element for showing the current version control HEAD ([#5682](https://github.com/helix-editor/helix/pull/5682)) - Display LSP type hints ([#5420](https://github.com/helix-editor/helix/pull/5420), [#5934](https://github.com/helix-editor/helix/pull/5934), [#6312](https://github.com/helix-editor/helix/pull/6312)) - Enable the Kitty keyboard protocol on terminals with support ([#4939](https://github.com/helix-editor/helix/pull/4939), [#6170](https://github.com/helix-editor/helix/pull/6170), [#6194](https://github.com/helix-editor/helix/pull/6194), [#6438](https://github.com/helix-editor/helix/pull/6438)) - Add a statusline element for the basename of the current file ([#5318](https://github.com/helix-editor/helix/pull/5318)) - Add substring matching syntax for the picker ([#5658](https://github.com/helix-editor/helix/pull/5658)) - Support LSP `textDocument/prepareRename` ([#6103](https://github.com/helix-editor/helix/pull/6103)) - Allow multiple runtime directories with priorities ([#5411](https://github.com/helix-editor/helix/pull/5411)) - Allow configuring whether to insert or replace completions ([#5728](https://github.com/helix-editor/helix/pull/5728)) - Allow per-workspace config file `.helix/config.toml` ([#5748](https://github.com/helix-editor/helix/pull/5748)) - Add `workspace-lsp-roots` config option to support multiple LSP roots for use with monorepos ([#5748](https://github.com/helix-editor/helix/pull/5748)) Commands: - `:pipe-to` which pipes selections into a shell command and ignores output ([#4931](https://github.com/helix-editor/helix/pull/4931)) - `merge_consecutive_selections` (`A-_`) combines all consecutive selections ([#5047](https://github.com/helix-editor/helix/pull/5047)) - `rotate_view_reverse` which focuses the previous view ([#5356](https://github.com/helix-editor/helix/pull/5356)) - `goto_declaration` (`gD`, requires LSP) which jumps to a symbol's declaration ([#5646](https://github.com/helix-editor/helix/pull/5646)) - `file_picker_in_current_buffer_directory` ([#4666](https://github.com/helix-editor/helix/pull/4666)) - `:character-info` which shows information about the character under the cursor ([#4000](https://github.com/helix-editor/helix/pull/4000)) - `:toggle-option` for toggling config options at runtime ([#4085](https://github.com/helix-editor/helix/pull/4085)) - `dap_restart` for restarting a debug session in DAP ([#5651](https://github.com/helix-editor/helix/pull/5651)) - `:lsp-stop` to stop the language server of the current buffer ([#5964](https://github.com/helix-editor/helix/pull/5964)) - `:reset-diff-change` for resetting a diff hunk to its original text ([#4974](https://github.com/helix-editor/helix/pull/4974)) - `:config-open-workspace` for opening the config file local to the current workspace ([#5748](https://github.com/helix-editor/helix/pull/5748)) Usability improvements: - Remove empty detail section in completion menu when LSP doesn't send details ([#4902](https://github.com/helix-editor/helix/pull/4902)) - Pass client information on LSP initialization ([#4904](https://github.com/helix-editor/helix/pull/4904)) - Allow specifying environment variables for language servers in language config ([#4004](https://github.com/helix-editor/helix/pull/4004)) - Allow detached git worktrees to be recognized as root paths ([#5097](https://github.com/helix-editor/helix/pull/5097)) - Improve error message handling for theme loading failures ([#5073](https://github.com/helix-editor/helix/pull/5073)) - Print the names of binaries required for LSP/DAP in health-check ([#5195](https://github.com/helix-editor/helix/pull/5195)) - Improve sorting in the picker in cases of ties ([#5169](https://github.com/helix-editor/helix/pull/5169)) - Add theming for prompt suggestions ([#5104](https://github.com/helix-editor/helix/pull/5104)) - Open a file picker when using `:open` on directories ([#2707](https://github.com/helix-editor/helix/pull/2707), [#5278](https://github.com/helix-editor/helix/pull/5278)) - Reload language config with `:config-reload` ([#5239](https://github.com/helix-editor/helix/pull/5239), [#5381](https://github.com/helix-editor/helix/pull/5381), [#5431](https://github.com/helix-editor/helix/pull/5431)) - Improve indent queries for python when the tree is errored ([#5332](https://github.com/helix-editor/helix/pull/5332)) - Picker: Open files without closing the picker with `A-ret` ([#4435](https://github.com/helix-editor/helix/pull/4435)) - Allow theming cursors by primary/secondary and by mode ([#5130](https://github.com/helix-editor/helix/pull/5130)) - Allow configuration of the minimum width for the line-numbers gutter ([#4724](https://github.com/helix-editor/helix/pull/4724), [#5696](https://github.com/helix-editor/helix/pull/5696)) - Use filename completer for `:run-shell-command` command ([#5729](https://github.com/helix-editor/helix/pull/5729)) - Surround with line-endings with `ms` ([#4571](https://github.com/helix-editor/helix/pull/4571)) - Hide duplicate symlinks in file pickers ([#5658](https://github.com/helix-editor/helix/pull/5658)) - Tabulate buffer picker contents ([#5777](https://github.com/helix-editor/helix/pull/5777)) - Add an option to disable LSP ([#4425](https://github.com/helix-editor/helix/pull/4425)) - Short-circuit tree-sitter and word object motions ([#5851](https://github.com/helix-editor/helix/pull/5851)) - Add exit code to failed command message ([#5898](https://github.com/helix-editor/helix/pull/5898)) - Make `m` textobject look for pairs enclosing selections ([#3344](https://github.com/helix-editor/helix/pull/3344)) - Negotiate LSP position encoding ([#5894](https://github.com/helix-editor/helix/pull/5894), [a48d1a4](https://github.com/helix-editor/helix/commit/a48d1a4)) - Display deprecated LSP completions with strikethrough ([#5932](https://github.com/helix-editor/helix/pull/5932)) - Add JSONRPC request ID to failed LSP/DAP request log messages ([#6010](https://github.com/helix-editor/helix/pull/6010), [#6018](https://github.com/helix-editor/helix/pull/6018)) - Ignore case when filtering LSP completions ([#6008](https://github.com/helix-editor/helix/pull/6008)) - Show current language when no arguments are passed to `:set-language` ([#5895](https://github.com/helix-editor/helix/pull/5895)) - Refactor and rewrite all book documentation ([#5534](https://github.com/helix-editor/helix/pull/5534)) - Separate diagnostic picker message and code ([#6095](https://github.com/helix-editor/helix/pull/6095)) - Add a config option to bypass undercurl detection ([#6253](https://github.com/helix-editor/helix/pull/6253)) - Only complete appropriate arguments for typed commands ([#5966](https://github.com/helix-editor/helix/pull/5966)) - Discard outdated LSP diagnostics ([3c9d5d0](https://github.com/helix-editor/helix/commit/3c9d5d0)) - Discard outdated LSP workspace edits ([b6a4927](https://github.com/helix-editor/helix/commit/b6a4927)) - Run shell commands asynchronously ([#6373](https://github.com/helix-editor/helix/pull/6373)) - Show diagnostic codes in LSP diagnostic messages ([#6378](https://github.com/helix-editor/helix/pull/6378)) - Highlight the current line in a DAP debug session ([#5957](https://github.com/helix-editor/helix/pull/5957)) - Hide signature help if it overlaps with the completion menu ([#5523](https://github.com/helix-editor/helix/pull/5523), [7a69c40](https://github.com/helix-editor/helix/commit/7a69c40)) Fixes: - Fix behavior of `auto-completion` flag for completion-on-trigger ([#5042](https://github.com/helix-editor/helix/pull/5042)) - Reset editor mode when changing buffers ([#5072](https://github.com/helix-editor/helix/pull/5072)) - Respect scrolloff settings in mouse movements ([#5255](https://github.com/helix-editor/helix/pull/5255)) - Avoid trailing `s` when only one file is opened ([#5189](https://github.com/helix-editor/helix/pull/5189)) - Fix erroneous indent between closers of auto-pairs ([#5330](https://github.com/helix-editor/helix/pull/5330)) - Expand `~` when parsing file paths in `:open` ([#5329](https://github.com/helix-editor/helix/pull/5329)) - Fix theme inheritance for default themes ([#5218](https://github.com/helix-editor/helix/pull/5218)) - Fix `extend_line` with a count when the current line(s) are selected ([#5288](https://github.com/helix-editor/helix/pull/5288)) - Prompt: Fix autocompletion for paths containing periods ([#5175](https://github.com/helix-editor/helix/pull/5175)) - Skip serializing JSONRPC params if params is null ([#5471](https://github.com/helix-editor/helix/pull/5471)) - Fix interaction with the `xclip` clipboard provider ([#5426](https://github.com/helix-editor/helix/pull/5426)) - Fix undo/redo execution from the command palette ([#5294](https://github.com/helix-editor/helix/pull/5294)) - Fix highlighting of non-block cursors ([#5575](https://github.com/helix-editor/helix/pull/5575)) - Fix panic when nooping in `join_selections` and `join_selections_space` ([#5423](https://github.com/helix-editor/helix/pull/5423)) - Fix selecting a changed file in global search ([#5639](https://github.com/helix-editor/helix/pull/5639)) - Fix initial syntax highlight layer sort order ([#5196](https://github.com/helix-editor/helix/pull/5196)) - Fix UTF-8 length handling for shellwords ([#5738](https://github.com/helix-editor/helix/pull/5738)) - Remove C-j and C-k bindings from the completion menu ([#5070](https://github.com/helix-editor/helix/pull/5070)) - Always commit to history when pasting ([#5790](https://github.com/helix-editor/helix/pull/5790)) - Properly handle LSP position encoding ([#5711](https://github.com/helix-editor/helix/pull/5711)) - Fix infinite loop in `copy_selection_on_prev_line` ([#5888](https://github.com/helix-editor/helix/pull/5888)) - Fix completion popup positioning ([#5842](https://github.com/helix-editor/helix/pull/5842)) - Fix a panic when uncommenting a line with only a comment token ([#5933](https://github.com/helix-editor/helix/pull/5933)) - Fix panic in `goto_window_center` at EOF ([#5987](https://github.com/helix-editor/helix/pull/5987)) - Ignore invalid file URIs sent by a language server ([#6000](https://github.com/helix-editor/helix/pull/6000)) - Decode LSP URIs for the workspace diagnostics picker ([#6016](https://github.com/helix-editor/helix/pull/6016)) - Fix incorrect usages of `tab_width` with `indent_width` ([#5918](https://github.com/helix-editor/helix/pull/5918)) - DAP: Send Disconnect if the Terminated event is received ([#5532](https://github.com/helix-editor/helix/pull/5532)) - DAP: Validate key and index exist when requesting variables ([#5628](https://github.com/helix-editor/helix/pull/5628)) - Check LSP renaming support before prompting for rename text ([#6257](https://github.com/helix-editor/helix/pull/6257)) - Fix indent guide rendering ([#6136](https://github.com/helix-editor/helix/pull/6136)) - Fix division by zero panic ([#6155](https://github.com/helix-editor/helix/pull/6155)) - Fix lacking space panic ([#6109](https://github.com/helix-editor/helix/pull/6109)) - Send error replies for malformed and unhandled LSP requests ([#6058](https://github.com/helix-editor/helix/pull/6058)) - Fix table column calculations for dynamic pickers ([#5920](https://github.com/helix-editor/helix/pull/5920)) - Skip adding jumplist entries for `:` line number previews ([#5751](https://github.com/helix-editor/helix/pull/5751)) - Fix completion race conditions ([#6173](https://github.com/helix-editor/helix/pull/6173)) - Fix `shrink_selection` with multiple cursors ([#6093](https://github.com/helix-editor/helix/pull/6093)) - Fix indentation calculation for lines with mixed tabs/spaces ([#6278](https://github.com/helix-editor/helix/pull/6278)) - No-op `client/registerCapability` LSP requests ([#6258](https://github.com/helix-editor/helix/pull/6258)) - Send the STOP signal to all processes in the process group ([#3546](https://github.com/helix-editor/helix/pull/3546)) - Fix workspace edit client capabilities declaration ([7bf168d](https://github.com/helix-editor/helix/commit/7bf168d)) - Fix highlighting in picker results with multiple columns ([#6333](https://github.com/helix-editor/helix/pull/6333)) - Canonicalize paths before stripping the current dir as a prefix ([#6290](https://github.com/helix-editor/helix/pull/6290)) - Fix truncation behavior for long path names in the file picker ([#6410](https://github.com/helix-editor/helix/pull/6410), [67783dd](https://github.com/helix-editor/helix/commit/67783dd)) - Fix theme reloading behavior in `:config-reload` ([ab819d8](https://github.com/helix-editor/helix/commit/ab819d8)) Themes: - Update `serika` ([#5038](https://github.com/helix-editor/helix/pull/5038), [#6344](https://github.com/helix-editor/helix/pull/6344)) - Update `flatwhite` ([#5036](https://github.com/helix-editor/helix/pull/5036), [#6323](https://github.com/helix-editor/helix/pull/6323)) - Update `autumn` ([#5051](https://github.com/helix-editor/helix/pull/5051), [#5397](https://github.com/helix-editor/helix/pull/5397), [#6280](https://github.com/helix-editor/helix/pull/6280), [#6316](https://github.com/helix-editor/helix/pull/6316)) - Update `acme` ([#5019](https://github.com/helix-editor/helix/pull/5019), [#5486](https://github.com/helix-editor/helix/pull/5486), [#5488](https://github.com/helix-editor/helix/pull/5488)) - Update `gruvbox` themes ([#5066](https://github.com/helix-editor/helix/pull/5066), [#5333](https://github.com/helix-editor/helix/pull/5333), [#5540](https://github.com/helix-editor/helix/pull/5540), [#6285](https://github.com/helix-editor/helix/pull/6285), [#6295](https://github.com/helix-editor/helix/pull/6295)) - Update `base16_transparent` ([#5105](https://github.com/helix-editor/helix/pull/5105)) - Update `dark_high_contrast` ([#5105](https://github.com/helix-editor/helix/pull/5105)) - Update `dracula` ([#5236](https://github.com/helix-editor/helix/pull/5236), [#5627](https://github.com/helix-editor/helix/pull/5627), [#6414](https://github.com/helix-editor/helix/pull/6414)) - Update `monokai_pro_spectrum` ([#5250](https://github.com/helix-editor/helix/pull/5250), [#5602](https://github.com/helix-editor/helix/pull/5602)) - Update `rose_pine` ([#5267](https://github.com/helix-editor/helix/pull/5267), [#5489](https://github.com/helix-editor/helix/pull/5489), [#6384](https://github.com/helix-editor/helix/pull/6384)) - Update `kanagawa` ([#5273](https://github.com/helix-editor/helix/pull/5273), [#5571](https://github.com/helix-editor/helix/pull/5571), [#6085](https://github.com/helix-editor/helix/pull/6085)) - Update `emacs` ([#5334](https://github.com/helix-editor/helix/pull/5334)) - Add `github` themes ([#5353](https://github.com/helix-editor/helix/pull/5353), [efeec12](https://github.com/helix-editor/helix/commit/efeec12)) - Dark themes: `github_dark`, `github_dark_colorblind`, `github_dark_dimmed`, `github_dark_high_contrast`, `github_dark_tritanopia` - Light themes: `github_light`, `github_light_colorblind`, `github_light_dimmed`, `github_light_high_contrast`, `github_light_tritanopia` - Update `solarized` variants ([#5445](https://github.com/helix-editor/helix/pull/5445), [#6327](https://github.com/helix-editor/helix/pull/6327)) - Update `catppuccin` variants ([#5404](https://github.com/helix-editor/helix/pull/5404), [#6107](https://github.com/helix-editor/helix/pull/6107), [#6269](https://github.com/helix-editor/helix/pull/6269), [#6464](https://github.com/helix-editor/helix/pull/6464)) - Use curly underlines in built-in themes ([#5419](https://github.com/helix-editor/helix/pull/5419)) - Update `zenburn` ([#5573](https://github.com/helix-editor/helix/pull/5573)) - Rewrite `snazzy` ([#3971](https://github.com/helix-editor/helix/pull/3971)) - Add `monokai_aqua` ([#5578](https://github.com/helix-editor/helix/pull/5578)) - Add `markup.strikethrough` to existing themes ([#5619](https://github.com/helix-editor/helix/pull/5619)) - Update `sonokai` ([#5440](https://github.com/helix-editor/helix/pull/5440)) - Update `onedark` ([#5755](https://github.com/helix-editor/helix/pull/5755)) - Add `ayu_evolve` ([#5638](https://github.com/helix-editor/helix/pull/5638), [#6028](https://github.com/helix-editor/helix/pull/6028), [#6225](https://github.com/helix-editor/helix/pull/6225)) - Add `jellybeans` ([#5719](https://github.com/helix-editor/helix/pull/5719)) - Update `fleet_dark` ([#5605](https://github.com/helix-editor/helix/pull/5605), [#6266](https://github.com/helix-editor/helix/pull/6266), [#6324](https://github.com/helix-editor/helix/pull/6324), [#6375](https://github.com/helix-editor/helix/pull/6375)) - Add `darcula-solid` ([#5778](https://github.com/helix-editor/helix/pull/5778)) - Remove text background from monokai themes ([#6009](https://github.com/helix-editor/helix/pull/6009)) - Update `pop_dark` ([#5992](https://github.com/helix-editor/helix/pull/5992), [#6208](https://github.com/helix-editor/helix/pull/6208), [#6227](https://github.com/helix-editor/helix/pull/6227), [#6292](https://github.com/helix-editor/helix/pull/6292)) - Add `everblush` ([#6086](https://github.com/helix-editor/helix/pull/6086)) - Add `adwaita-dark` ([#6042](https://github.com/helix-editor/helix/pull/6042), [#6342](https://github.com/helix-editor/helix/pull/6342)) - Update `papercolor` ([#6162](https://github.com/helix-editor/helix/pull/6162)) - Update `onelight` ([#6192](https://github.com/helix-editor/helix/pull/6192), [#6276](https://github.com/helix-editor/helix/pull/6276)) - Add `molokai` ([#6260](https://github.com/helix-editor/helix/pull/6260)) - Update `ayu` variants ([#6329](https://github.com/helix-editor/helix/pull/6329)) - Update `tokyonight` variants ([#6349](https://github.com/helix-editor/helix/pull/6349)) - Update `nord` variants ([#6376](https://github.com/helix-editor/helix/pull/6376)) New languages: - BibTeX ([#5064](https://github.com/helix-editor/helix/pull/5064)) - Mermaid.js ([#5147](https://github.com/helix-editor/helix/pull/5147)) - Crystal ([#4993](https://github.com/helix-editor/helix/pull/4993), [#5205](https://github.com/helix-editor/helix/pull/5205)) - MATLAB/Octave ([#5192](https://github.com/helix-editor/helix/pull/5192)) - `tfvars` (uses HCL) ([#5396](https://github.com/helix-editor/helix/pull/5396)) - Ponylang ([#5416](https://github.com/helix-editor/helix/pull/5416)) - DHall ([1f6809c](https://github.com/helix-editor/helix/commit/1f6809c)) - Sagemath ([#5649](https://github.com/helix-editor/helix/pull/5649)) - MSBuild ([#5793](https://github.com/helix-editor/helix/pull/5793)) - pem ([#5797](https://github.com/helix-editor/helix/pull/5797)) - passwd ([#4959](https://github.com/helix-editor/helix/pull/4959)) - hosts ([#4950](https://github.com/helix-editor/helix/pull/4950), [#5914](https://github.com/helix-editor/helix/pull/5914)) - uxntal ([#6047](https://github.com/helix-editor/helix/pull/6047)) - Yuck ([#6064](https://github.com/helix-editor/helix/pull/6064), [#6242](https://github.com/helix-editor/helix/pull/6242)) - GNU gettext PO ([#5996](https://github.com/helix-editor/helix/pull/5996)) - Sway ([#6023](https://github.com/helix-editor/helix/pull/6023)) - NASM ([#6068](https://github.com/helix-editor/helix/pull/6068)) - PRQL ([#6126](https://github.com/helix-editor/helix/pull/6126)) - reStructuredText ([#6180](https://github.com/helix-editor/helix/pull/6180)) - Smithy ([#6370](https://github.com/helix-editor/helix/pull/6370)) - VHDL ([#5826](https://github.com/helix-editor/helix/pull/5826)) - Rego (OpenPolicy Agent) ([#6415](https://github.com/helix-editor/helix/pull/6415)) - Nim ([#6123](https://github.com/helix-editor/helix/pull/6123)) Updated languages and queries: - Use diff syntax for patch files ([#5085](https://github.com/helix-editor/helix/pull/5085)) - Add Haskell textobjects ([#5061](https://github.com/helix-editor/helix/pull/5061)) - Fix commonlisp configuration ([#5091](https://github.com/helix-editor/helix/pull/5091)) - Update Scheme ([bae890d](https://github.com/helix-editor/helix/commit/bae890d)) - Add indent queries for Bash ([#5149](https://github.com/helix-editor/helix/pull/5149)) - Recognize `c++` as a C++ extension ([#5183](https://github.com/helix-editor/helix/pull/5183)) - Enable HTTP server in `metals` (Scala) config ([#5551](https://github.com/helix-editor/helix/pull/5551)) - Change V-lang language server to `v ls` from `vls` ([#5677](https://github.com/helix-editor/helix/pull/5677)) - Inject comment grammar into Nix ([#5208](https://github.com/helix-editor/helix/pull/5208)) - Update Rust highlights ([#5238](https://github.com/helix-editor/helix/pull/5238), [#5349](https://github.com/helix-editor/helix/pull/5349)) - Fix HTML injection within Markdown ([#5265](https://github.com/helix-editor/helix/pull/5265)) - Fix comment token for godot ([#5276](https://github.com/helix-editor/helix/pull/5276)) - Expand injections for Vue ([#5268](https://github.com/helix-editor/helix/pull/5268)) - Add `.bash_aliases` as a Bash file-type ([#5347](https://github.com/helix-editor/helix/pull/5347)) - Fix comment token for sshclientconfig ([#5351](https://github.com/helix-editor/helix/pull/5351)) - Update Prisma ([#5417](https://github.com/helix-editor/helix/pull/5417)) - Update C++ ([#5457](https://github.com/helix-editor/helix/pull/5457)) - Add more file-types for Python ([#5593](https://github.com/helix-editor/helix/pull/5593)) - Update tree-sitter-scala ([#5576](https://github.com/helix-editor/helix/pull/5576)) - Add an injection regex for Lua ([#5606](https://github.com/helix-editor/helix/pull/5606)) - Add `build.gradle` to java roots configuration ([#5641](https://github.com/helix-editor/helix/pull/5641)) - Add Hub PR files to markdown file-types ([#5634](https://github.com/helix-editor/helix/pull/5634)) - Add an external formatter configuration for Cue ([#5679](https://github.com/helix-editor/helix/pull/5679)) - Add injections for builders and writers to Nix ([#5629](https://github.com/helix-editor/helix/pull/5629)) - Update tree-sitter-xml to fix whitespace parsing ([#5685](https://github.com/helix-editor/helix/pull/5685)) - Add `Justfile` to the make file-types configuration ([#5687](https://github.com/helix-editor/helix/pull/5687)) - Update tree-sitter-sql and highlight queries ([#5683](https://github.com/helix-editor/helix/pull/5683), [#5772](https://github.com/helix-editor/helix/pull/5772)) - Use the bash grammar and queries for env language ([#5720](https://github.com/helix-editor/helix/pull/5720)) - Add podspec files to ruby file-types ([#5811](https://github.com/helix-editor/helix/pull/5811)) - Recognize `.C` and `.H` file-types as C++ ([#5808](https://github.com/helix-editor/helix/pull/5808)) - Recognize plist and mobileconfig files as XML ([#5863](https://github.com/helix-editor/helix/pull/5863)) - Fix `select` indentation in Go ([#5713](https://github.com/helix-editor/helix/pull/5713)) - Check for external file modifications when writing ([#5805](https://github.com/helix-editor/helix/pull/5805)) - Recognize containerfiles as dockerfile syntax ([#5873](https://github.com/helix-editor/helix/pull/5873)) - Update godot grammar and queries ([#5944](https://github.com/helix-editor/helix/pull/5944), [#6186](https://github.com/helix-editor/helix/pull/6186)) - Improve DHall highlights ([#5959](https://github.com/helix-editor/helix/pull/5959)) - Recognize `.env.dist` and `source.env` as env language ([#6003](https://github.com/helix-editor/helix/pull/6003)) - Update tree-sitter-git-rebase ([#6030](https://github.com/helix-editor/helix/pull/6030), [#6094](https://github.com/helix-editor/helix/pull/6094)) - Improve SQL highlights ([#6041](https://github.com/helix-editor/helix/pull/6041)) - Improve markdown highlights and inject LaTeX ([#6100](https://github.com/helix-editor/helix/pull/6100)) - Add textobject queries for Elm ([#6084](https://github.com/helix-editor/helix/pull/6084)) - Recognize graphql schema file type ([#6159](https://github.com/helix-editor/helix/pull/6159)) - Improve highlighting in comments ([#6143](https://github.com/helix-editor/helix/pull/6143)) - Improve highlighting for JavaScript/TypeScript/ECMAScript languages ([#6205](https://github.com/helix-editor/helix/pull/6205)) - Improve PHP highlights ([#6203](https://github.com/helix-editor/helix/pull/6203), [#6250](https://github.com/helix-editor/helix/pull/6250), [#6299](https://github.com/helix-editor/helix/pull/6299)) - Improve Go highlights ([#6204](https://github.com/helix-editor/helix/pull/6204)) - Highlight unchecked sqlx functions as SQL in Rust ([#6256](https://github.com/helix-editor/helix/pull/6256)) - Improve Erlang highlights ([cdd6c8d](https://github.com/helix-editor/helix/commit/cdd6c8d)) - Improve Nix highlights ([fb4d703](https://github.com/helix-editor/helix/commit/fb4d703)) - Improve gdscript highlights ([#6311](https://github.com/helix-editor/helix/pull/6311)) - Improve Vlang highlights ([#6279](https://github.com/helix-editor/helix/pull/6279)) - Improve Makefile highlights ([#6339](https://github.com/helix-editor/helix/pull/6339)) - Remove auto-pair for `'` in OCaml ([#6381](https://github.com/helix-editor/helix/pull/6381)) - Fix indents in switch statements in ECMA languages ([#6369](https://github.com/helix-editor/helix/pull/6369)) - Recognize xlb and storyboard file-types as XML ([#6407](https://github.com/helix-editor/helix/pull/6407)) - Recognize cts and mts file-types as TypeScript ([#6424](https://github.com/helix-editor/helix/pull/6424)) - Recognize SVG file-type as XML ([#6431](https://github.com/helix-editor/helix/pull/6431)) - Add theme scopes for (un)checked list item markup scopes ([#6434](https://github.com/helix-editor/helix/pull/6434)) - Update git commit grammar and add the comment textobject ([#6439](https://github.com/helix-editor/helix/pull/6439), [#6493](https://github.com/helix-editor/helix/pull/6493)) - Recognize ARB file-type as JSON ([#6452](https://github.com/helix-editor/helix/pull/6452)) - Inject markdown into markdown strings in Julia ([#6489](https://github.com/helix-editor/helix/pull/6489)) Packaging: - Fix Nix flake devShell for darwin hosts ([#5368](https://github.com/helix-editor/helix/pull/5368)) - Add Appstream metadata file to `contrib/` ([#5643](https://github.com/helix-editor/helix/pull/5643)) - Increase the MSRV to 1.65 ([#5570](https://github.com/helix-editor/helix/pull/5570), [#6185](https://github.com/helix-editor/helix/pull/6185)) - Expose the Nix flake's `wrapper` ([#5994](https://github.com/helix-editor/helix/pull/5994)) # 22.12 (2022-12-06) This is a great big release filled with changes from a 99 contributors. A big _thank you_ to you all! As usual, the following is a summary of each of the changes since the last release. For the full log, check out the [git log](https://github.com/helix-editor/helix/compare/22.08.1..22.12). Breaking changes: - Remove readline-like navigation bindings from the default insert mode keymap ([e12690e](https://github.com/helix-editor/helix/commit/e12690e), [#3811](https://github.com/helix-editor/helix/pull/3811), [#3827](https://github.com/helix-editor/helix/pull/3827), [#3915](https://github.com/helix-editor/helix/pull/3915), [#4088](https://github.com/helix-editor/helix/pull/4088)) - Rename `append_to_line` as `insert_at_line_end` and `prepend_to_line` as `insert_at_line_start` ([#3753](https://github.com/helix-editor/helix/pull/3753)) - Swap diagnostic picker and debug mode bindings in the space keymap ([#4229](https://github.com/helix-editor/helix/pull/4229)) - Select newly inserted text on paste or from shell commands ([#4458](https://github.com/helix-editor/helix/pull/4458), [#4608](https://github.com/helix-editor/helix/pull/4608), [#4619](https://github.com/helix-editor/helix/pull/4619), [#4824](https://github.com/helix-editor/helix/pull/4824)) - Select newly inserted surrounding characters on `ms` ([#4752](https://github.com/helix-editor/helix/pull/4752)) - Exit select-mode after executing `replace_*` commands ([#4554](https://github.com/helix-editor/helix/pull/4554)) - Exit select-mode after executing surround commands ([#4858](https://github.com/helix-editor/helix/pull/4858)) - Change tree-sitter text-object keys ([#3782](https://github.com/helix-editor/helix/pull/3782)) - Rename `fleetish` theme to `fleet_dark` ([#4997](https://github.com/helix-editor/helix/pull/4997)) Features: - Bufferline ([#2759](https://github.com/helix-editor/helix/pull/2759)) - Support underline styles and colors ([#4061](https://github.com/helix-editor/helix/pull/4061), [98c121c](https://github.com/helix-editor/helix/commit/98c121c)) - Inheritance for themes ([#3067](https://github.com/helix-editor/helix/pull/3067), [#4096](https://github.com/helix-editor/helix/pull/4096)) - Cursorcolumn ([#4084](https://github.com/helix-editor/helix/pull/4084)) - Overhauled system for writing files and quitting ([#2267](https://github.com/helix-editor/helix/pull/2267), [#4397](https://github.com/helix-editor/helix/pull/4397)) - Autosave when terminal loses focus ([#3178](https://github.com/helix-editor/helix/pull/3178)) - Use OSC52 as a fallback for the system clipboard ([#3220](https://github.com/helix-editor/helix/pull/3220)) - Show git diffs in the gutter ([#3890](https://github.com/helix-editor/helix/pull/3890), [#5012](https://github.com/helix-editor/helix/pull/5012), [#4995](https://github.com/helix-editor/helix/pull/4995)) - Add a logo ([dc1ec56](https://github.com/helix-editor/helix/commit/dc1ec56)) - Multi-cursor completion ([#4496](https://github.com/helix-editor/helix/pull/4496)) Commands: - `file_picker_in_current_directory` (`F`) ([#3701](https://github.com/helix-editor/helix/pull/3701)) - `:lsp-restart` to restart the current document's language server ([#3435](https://github.com/helix-editor/helix/pull/3435), [#3972](https://github.com/helix-editor/helix/pull/3972)) - `join_selections_space` (`A-j`) which joins selections and selects the joining whitespace ([#3549](https://github.com/helix-editor/helix/pull/3549)) - `:update` to write the current file if it is modified ([#4426](https://github.com/helix-editor/helix/pull/4426)) - `:lsp-workspace-command` for picking LSP commands to execute ([#3140](https://github.com/helix-editor/helix/pull/3140)) - `extend_prev_word_end` - the extend variant for `move_prev_word_end` ([7468fa2](https://github.com/helix-editor/helix/commit/7468fa2)) - `make_search_word_bounded` which adds regex word boundaries to the current search register value ([#4322](https://github.com/helix-editor/helix/pull/4322)) - `:reload-all` - `:reload` for all open buffers ([#4663](https://github.com/helix-editor/helix/pull/4663), [#4901](https://github.com/helix-editor/helix/pull/4901)) - `goto_next_change` (`]g`), `goto_prev_change` (`[g`), `goto_first_change` (`[G`), `goto_last_change` (`]G`) textobjects for jumping between VCS changes ([#4650](https://github.com/helix-editor/helix/pull/4650)) Usability improvements and fixes: - Don't log 'LSP not defined' errors in the logfile ([1caba2d](https://github.com/helix-editor/helix/commit/1caba2d)) - Look for the external formatter program before invoking it ([#3670](https://github.com/helix-editor/helix/pull/3670)) - Don't send LSP didOpen events for documents without URLs ([44b4479](https://github.com/helix-editor/helix/commit/44b4479)) - Fix off-by-one in `extend_line_above` command ([#3689](https://github.com/helix-editor/helix/pull/3689)) - Use the original scroll offset when opening a split ([1acdfaa](https://github.com/helix-editor/helix/commit/1acdfaa)) - Handle auto-formatting failures and save the file anyway ([#3684](https://github.com/helix-editor/helix/pull/3684)) - Ensure the cursor is in view after `:reflow` ([#3733](https://github.com/helix-editor/helix/pull/3733)) - Add default rulers and reflow config for git commit messages ([#3738](https://github.com/helix-editor/helix/pull/3738)) - Improve grammar fetching and building output ([#3773](https://github.com/helix-editor/helix/pull/3773)) - Add a `text` language to language completion ([cc47d3f](https://github.com/helix-editor/helix/commit/cc47d3f)) - Improve error handling for `:set-language` ([e8add6f](https://github.com/helix-editor/helix/commit/e8add6f)) - Improve error handling for `:config-reload` ([#3668](https://github.com/helix-editor/helix/pull/3668)) - Improve error handling when passing improper ranges to syntax highlighting ([#3826](https://github.com/helix-editor/helix/pull/3826)) - Render `` tags as raw markup in markdown ([#3425](https://github.com/helix-editor/helix/pull/3425)) - Remove border around the LSP code-actions popup ([#3444](https://github.com/helix-editor/helix/pull/3444)) - Canonicalize the path to the runtime directory ([#3794](https://github.com/helix-editor/helix/pull/3794)) - Add a `themelint` xtask for linting themes ([#3234](https://github.com/helix-editor/helix/pull/3234)) - Re-sort LSP diagnostics after applying transactions ([#3895](https://github.com/helix-editor/helix/pull/3895), [#4319](https://github.com/helix-editor/helix/pull/4319)) - Add a command-line flag to specify the log file ([#3807](https://github.com/helix-editor/helix/pull/3807)) - Track source and tag information in LSP diagnostics ([#3898](https://github.com/helix-editor/helix/pull/3898), [1df32c9](https://github.com/helix-editor/helix/commit/1df32c9)) - Fix theme returning to normal when exiting the `:theme` completion ([#3644](https://github.com/helix-editor/helix/pull/3644)) - Improve error messages for invalid commands in the keymap ([#3931](https://github.com/helix-editor/helix/pull/3931)) - Deduplicate regexs in `search_selection` command ([#3941](https://github.com/helix-editor/helix/pull/3941)) - Split the finding of LSP root and config roots ([#3929](https://github.com/helix-editor/helix/pull/3929)) - Ensure that the cursor is within view after auto-formatting ([#4047](https://github.com/helix-editor/helix/pull/4047)) - Add pseudo-pending to commands with on-next-key callbacks ([#4062](https://github.com/helix-editor/helix/pull/4062), [#4077](https://github.com/helix-editor/helix/pull/4077)) - Add live preview to `:goto` ([#2982](https://github.com/helix-editor/helix/pull/2982)) - Show regex compilation failure in a popup ([#3049](https://github.com/helix-editor/helix/pull/3049)) - Add 'cycled to end' and 'no more matches' for search ([#3176](https://github.com/helix-editor/helix/pull/3176), [#4101](https://github.com/helix-editor/helix/pull/4101)) - Add extending behavior to tree-sitter textobjects ([#3266](https://github.com/helix-editor/helix/pull/3266)) - Add `ui.gutter.selected` option for themes ([#3303](https://github.com/helix-editor/helix/pull/3303)) - Make statusline mode names configurable ([#3311](https://github.com/helix-editor/helix/pull/3311)) - Add a statusline element for total line count ([#3960](https://github.com/helix-editor/helix/pull/3960)) - Add extending behavior to `goto_window_*` commands ([#3985](https://github.com/helix-editor/helix/pull/3985)) - Fix a panic in signature help when the preview is too large ([#4030](https://github.com/helix-editor/helix/pull/4030)) - Add command names to the command palette ([#4071](https://github.com/helix-editor/helix/pull/4071), [#4223](https://github.com/helix-editor/helix/pull/4223), [#4495](https://github.com/helix-editor/helix/pull/4495)) - Find the LSP workspace root from the current document's path ([#3553](https://github.com/helix-editor/helix/pull/3553)) - Add an option to skip indent-guide levels ([#3819](https://github.com/helix-editor/helix/pull/3819), [2c36e33](https://github.com/helix-editor/helix/commit/2c36e33)) - Change focus to modified docs on quit ([#3872](https://github.com/helix-editor/helix/pull/3872)) - Respond to `USR1` signal by reloading config ([#3952](https://github.com/helix-editor/helix/pull/3952)) - Exit gracefully when the close operation fails ([#4081](https://github.com/helix-editor/helix/pull/4081)) - Fix goto/view center mismatch ([#4135](https://github.com/helix-editor/helix/pull/4135)) - Highlight the current file picker document on idle-timeout ([#3172](https://github.com/helix-editor/helix/pull/3172), [a85e386](https://github.com/helix-editor/helix/commit/a85e386)) - Apply transactions to jumplist selections ([#4186](https://github.com/helix-editor/helix/pull/4186), [#4227](https://github.com/helix-editor/helix/pull/4227), [#4733](https://github.com/helix-editor/helix/pull/4733), [#4865](https://github.com/helix-editor/helix/pull/4865), [#4912](https://github.com/helix-editor/helix/pull/4912), [#4965](https://github.com/helix-editor/helix/pull/4965), [#4981](https://github.com/helix-editor/helix/pull/4981)) - Use space as a separator for fuzzy matcher ([#3969](https://github.com/helix-editor/helix/pull/3969)) - Overlay all diagnostics with highest severity on top ([#4113](https://github.com/helix-editor/helix/pull/4113)) - Avoid re-parsing unmodified tree-sitter injections ([#4146](https://github.com/helix-editor/helix/pull/4146)) - Add extending captures for indentation, re-enable python indentation ([#3382](https://github.com/helix-editor/helix/pull/3382), [3e84434](https://github.com/helix-editor/helix/commit/3e84434)) - Only allow either `--vsplit` or `--hsplit` CLI flags at once ([#4202](https://github.com/helix-editor/helix/pull/4202)) - Fix append cursor location when selection anchor is at the end of the document ([#4147](https://github.com/helix-editor/helix/pull/4147)) - Improve selection yanking message ([#4275](https://github.com/helix-editor/helix/pull/4275)) - Log failures to load tree-sitter grammars as errors ([#4315](https://github.com/helix-editor/helix/pull/4315)) - Fix rendering of lines longer than 65,536 columns ([#4172](https://github.com/helix-editor/helix/pull/4172)) - Skip searching `.git` in `global_search` ([#4334](https://github.com/helix-editor/helix/pull/4334)) - Display tree-sitter scopes in a popup ([#4337](https://github.com/helix-editor/helix/pull/4337)) - Fix deleting a word from the end of the buffer ([#4328](https://github.com/helix-editor/helix/pull/4328)) - Pretty print the syntax tree in `:tree-sitter-subtree` ([#4295](https://github.com/helix-editor/helix/pull/4295), [#4606](https://github.com/helix-editor/helix/pull/4606)) - Allow specifying suffixes for file-type detection ([#2455](https://github.com/helix-editor/helix/pull/2455), [#4414](https://github.com/helix-editor/helix/pull/4414)) - Fix multi-byte auto-pairs ([#4024](https://github.com/helix-editor/helix/pull/4024)) - Improve sort scoring for LSP code-actions and completions ([#4134](https://github.com/helix-editor/helix/pull/4134)) - Fix the handling of quotes within shellwords ([#4098](https://github.com/helix-editor/helix/pull/4098)) - Fix `delete_word_backward` and `delete_word_forward` on newlines ([#4392](https://github.com/helix-editor/helix/pull/4392)) - Fix 'no entry found for key' crash on `:write-all` ([#4384](https://github.com/helix-editor/helix/pull/4384)) - Remove lowercase requirement for tree-sitter grammars ([#4346](https://github.com/helix-editor/helix/pull/4346)) - Resolve LSP completion items on idle-timeout ([#4406](https://github.com/helix-editor/helix/pull/4406), [#4797](https://github.com/helix-editor/helix/pull/4797)) - Render diagnostics in the file picker preview ([#4324](https://github.com/helix-editor/helix/pull/4324)) - Fix terminal freezing on `shell_insert_output` ([#4156](https://github.com/helix-editor/helix/pull/4156)) - Allow use of the count in the repeat operator (`.`) ([#4450](https://github.com/helix-editor/helix/pull/4450)) - Show the current theme name on `:theme` with no arguments ([#3740](https://github.com/helix-editor/helix/pull/3740)) - Fix rendering in very large terminals ([#4318](https://github.com/helix-editor/helix/pull/4318)) - Sort LSP preselected items to the top of the completion menu ([#4480](https://github.com/helix-editor/helix/pull/4480)) - Trim braces and quotes from paths in goto-file ([#4370](https://github.com/helix-editor/helix/pull/4370)) - Prevent automatic signature help outside of insert mode ([#4456](https://github.com/helix-editor/helix/pull/4456)) - Fix freezes with external programs that process stdin and stdout concurrently ([#4180](https://github.com/helix-editor/helix/pull/4180)) - Make `scroll` aware of tabs and wide characters ([#4519](https://github.com/helix-editor/helix/pull/4519)) - Correctly handle escaping in `command_mode` completion ([#4316](https://github.com/helix-editor/helix/pull/4316), [#4587](https://github.com/helix-editor/helix/pull/4587), [#4632](https://github.com/helix-editor/helix/pull/4632)) - Fix `delete_char_backward` for paired characters ([#4558](https://github.com/helix-editor/helix/pull/4558)) - Fix crash from two windows editing the same document ([#4570](https://github.com/helix-editor/helix/pull/4570)) - Fix pasting from the blackhole register ([#4497](https://github.com/helix-editor/helix/pull/4497)) - Support LSP insertReplace completion items ([1312682](https://github.com/helix-editor/helix/commit/1312682)) - Dynamically resize the line number gutter width ([#3469](https://github.com/helix-editor/helix/pull/3469)) - Fix crash for unknown completion item kinds ([#4658](https://github.com/helix-editor/helix/pull/4658)) - Re-enable `format_selections` for single selection ranges ([d4f5cab](https://github.com/helix-editor/helix/commit/d4f5cab)) - Limit the number of in-progress tree-sitter query matches ([#4707](https://github.com/helix-editor/helix/pull/4707), [#4830](https://github.com/helix-editor/helix/pull/4830)) - Use the special `#` register with `increment`/`decrement` to change by range number ([#4418](https://github.com/helix-editor/helix/pull/4418)) - Add a statusline element to show number of selected chars ([#4682](https://github.com/helix-editor/helix/pull/4682)) - Add a statusline element showing global LSP diagnostic warning and error counts ([#4569](https://github.com/helix-editor/helix/pull/4569)) - Add a scrollbar to popups ([#4449](https://github.com/helix-editor/helix/pull/4449)) - Prefer shorter matches in fuzzy matcher scoring ([#4698](https://github.com/helix-editor/helix/pull/4698)) - Use key-sequence format for command palette keybinds ([#4712](https://github.com/helix-editor/helix/pull/4712)) - Remove prefix filtering from autocompletion menu ([#4578](https://github.com/helix-editor/helix/pull/4578)) - Focus on the parent buffer when closing a split ([#4766](https://github.com/helix-editor/helix/pull/4766)) - Handle language server termination ([#4797](https://github.com/helix-editor/helix/pull/4797), [#4852](https://github.com/helix-editor/helix/pull/4852)) - Allow `r`/`t`/`f` to work on tab characters ([#4817](https://github.com/helix-editor/helix/pull/4817)) - Show a preview for scratch buffers in the buffer picker ([#3454](https://github.com/helix-editor/helix/pull/3454)) - Set a limit of entries in the jumplist ([#4750](https://github.com/helix-editor/helix/pull/4750)) - Re-use shell outputs when inserting or appending shell output ([#3465](https://github.com/helix-editor/helix/pull/3465)) - Check LSP server provider capabilities ([#3554](https://github.com/helix-editor/helix/pull/3554)) - Improve tree-sitter parsing performance on files with many language layers ([#4716](https://github.com/helix-editor/helix/pull/4716)) - Move indentation to the next line when using `` on a line with only whitespace ([#4854](https://github.com/helix-editor/helix/pull/4854)) - Remove selections for closed views from all documents ([#4888](https://github.com/helix-editor/helix/pull/4888)) - Improve performance of the `:reload` command ([#4457](https://github.com/helix-editor/helix/pull/4457)) - Properly handle media keys ([#4887](https://github.com/helix-editor/helix/pull/4887)) - Support LSP diagnostic data field ([#4935](https://github.com/helix-editor/helix/pull/4935)) - Handle C-i keycode as tab ([#4961](https://github.com/helix-editor/helix/pull/4961)) - Fix view alignment for jumplist picker jumps ([#3743](https://github.com/helix-editor/helix/pull/3743)) - Use OSC52 for tmux clipboard provider ([#5027](https://github.com/helix-editor/helix/pull/5027)) Themes: - Add `varua` ([#3610](https://github.com/helix-editor/helix/pull/3610), [#4964](https://github.com/helix-editor/helix/pull/4964)) - Update `boo_berry` ([#3653](https://github.com/helix-editor/helix/pull/3653)) - Add `rasmus` ([#3728](https://github.com/helix-editor/helix/pull/3728)) - Add `papercolor_dark` ([#3742](https://github.com/helix-editor/helix/pull/3742)) - Update `monokai_pro_spectrum` ([#3814](https://github.com/helix-editor/helix/pull/3814)) - Update `nord` ([#3792](https://github.com/helix-editor/helix/pull/3792)) - Update `fleetish` ([#3844](https://github.com/helix-editor/helix/pull/3844), [#4487](https://github.com/helix-editor/helix/pull/4487), [#4813](https://github.com/helix-editor/helix/pull/4813)) - Update `flatwhite` ([#3843](https://github.com/helix-editor/helix/pull/3843)) - Add `darcula` ([#3739](https://github.com/helix-editor/helix/pull/3739)) - Update `papercolor` ([#3938](https://github.com/helix-editor/helix/pull/3938), [#4317](https://github.com/helix-editor/helix/pull/4317)) - Add bufferline colors to multiple themes ([#3881](https://github.com/helix-editor/helix/pull/3881)) - Add `gruvbox_dark_hard` ([#3948](https://github.com/helix-editor/helix/pull/3948)) - Add `onedarker` ([#3980](https://github.com/helix-editor/helix/pull/3980), [#4060](https://github.com/helix-editor/helix/pull/4060)) - Add `dark_high_contrast` ([#3312](https://github.com/helix-editor/helix/pull/3312)) - Update `bogster` ([#4121](https://github.com/helix-editor/helix/pull/4121), [#4264](https://github.com/helix-editor/helix/pull/4264)) - Update `sonokai` ([#4089](https://github.com/helix-editor/helix/pull/4089)) - Update `ayu_*` themes ([#4140](https://github.com/helix-editor/helix/pull/4140), [#4109](https://github.com/helix-editor/helix/pull/4109), [#4662](https://github.com/helix-editor/helix/pull/4662), [#4764](https://github.com/helix-editor/helix/pull/4764)) - Update `everforest` ([#3998](https://github.com/helix-editor/helix/pull/3998)) - Update `monokai_pro_octagon` ([#4247](https://github.com/helix-editor/helix/pull/4247)) - Add `heisenberg` ([#4209](https://github.com/helix-editor/helix/pull/4209)) - Add `bogster_light` ([#4265](https://github.com/helix-editor/helix/pull/4265)) - Update `pop-dark` ([#4323](https://github.com/helix-editor/helix/pull/4323)) - Update `rose_pine` ([#4221](https://github.com/helix-editor/helix/pull/4221)) - Add `kanagawa` ([#4300](https://github.com/helix-editor/helix/pull/4300)) - Add `hex_steel`, `hex_toxic` and `hex_lavender` ([#4367](https://github.com/helix-editor/helix/pull/4367), [#4990](https://github.com/helix-editor/helix/pull/4990)) - Update `tokyonight` and `tokyonight_storm` ([#4415](https://github.com/helix-editor/helix/pull/4415)) - Update `gruvbox` ([#4626](https://github.com/helix-editor/helix/pull/4626)) - Update `dark_plus` ([#4661](https://github.com/helix-editor/helix/pull/4661), [#4678](https://github.com/helix-editor/helix/pull/4678)) - Add `zenburn` ([#4613](https://github.com/helix-editor/helix/pull/4613), [#4977](https://github.com/helix-editor/helix/pull/4977)) - Update `monokai_pro` ([#4789](https://github.com/helix-editor/helix/pull/4789)) - Add `mellow` ([#4770](https://github.com/helix-editor/helix/pull/4770)) - Add `nightfox` ([#4769](https://github.com/helix-editor/helix/pull/4769), [#4966](https://github.com/helix-editor/helix/pull/4966)) - Update `doom_acario_dark` ([#4979](https://github.com/helix-editor/helix/pull/4979)) - Update `autumn` ([#4996](https://github.com/helix-editor/helix/pull/4996)) - Update `acme` ([#4999](https://github.com/helix-editor/helix/pull/4999)) - Update `nord_light` ([#4999](https://github.com/helix-editor/helix/pull/4999)) - Update `serika_*` ([#5015](https://github.com/helix-editor/helix/pull/5015)) LSP configurations: - Switch to `openscad-lsp` for OpenScad ([#3750](https://github.com/helix-editor/helix/pull/3750)) - Support Jsonnet ([#3748](https://github.com/helix-editor/helix/pull/3748)) - Support Markdown ([#3499](https://github.com/helix-editor/helix/pull/3499)) - Support Bass ([#3771](https://github.com/helix-editor/helix/pull/3771)) - Set roots configuration for Elixir and HEEx ([#3917](https://github.com/helix-editor/helix/pull/3917), [#3959](https://github.com/helix-editor/helix/pull/3959)) - Support Purescript ([#4242](https://github.com/helix-editor/helix/pull/4242)) - Set roots configuration for Julia ([#4361](https://github.com/helix-editor/helix/pull/4361)) - Support D ([#4372](https://github.com/helix-editor/helix/pull/4372)) - Increase default language server timeout for Julia ([#4575](https://github.com/helix-editor/helix/pull/4575)) - Use ElixirLS for HEEx ([#4679](https://github.com/helix-editor/helix/pull/4679)) - Support Bicep ([#4403](https://github.com/helix-editor/helix/pull/4403)) - Switch to `nil` for Nix ([433ccef](https://github.com/helix-editor/helix/commit/433ccef)) - Support QML ([#4842](https://github.com/helix-editor/helix/pull/4842)) - Enable auto-format for CSS ([#4987](https://github.com/helix-editor/helix/pull/4987)) - Support CommonLisp ([4176769](https://github.com/helix-editor/helix/commit/4176769)) New languages: - SML ([#3692](https://github.com/helix-editor/helix/pull/3692)) - Jsonnet ([#3714](https://github.com/helix-editor/helix/pull/3714)) - Godot resource ([#3759](https://github.com/helix-editor/helix/pull/3759)) - Astro ([#3829](https://github.com/helix-editor/helix/pull/3829)) - SSH config ([#2455](https://github.com/helix-editor/helix/pull/2455), [#4538](https://github.com/helix-editor/helix/pull/4538)) - Bass ([#3771](https://github.com/helix-editor/helix/pull/3771)) - WAT (WebAssembly text format) ([#4040](https://github.com/helix-editor/helix/pull/4040), [#4542](https://github.com/helix-editor/helix/pull/4542)) - Purescript ([#4242](https://github.com/helix-editor/helix/pull/4242)) - D ([#4372](https://github.com/helix-editor/helix/pull/4372), [#4562](https://github.com/helix-editor/helix/pull/4562)) - VHS ([#4486](https://github.com/helix-editor/helix/pull/4486)) - KDL ([#4481](https://github.com/helix-editor/helix/pull/4481)) - XML ([#4518](https://github.com/helix-editor/helix/pull/4518)) - WIT ([#4525](https://github.com/helix-editor/helix/pull/4525)) - ENV ([#4536](https://github.com/helix-editor/helix/pull/4536)) - INI ([#4538](https://github.com/helix-editor/helix/pull/4538)) - Bicep ([#4403](https://github.com/helix-editor/helix/pull/4403), [#4751](https://github.com/helix-editor/helix/pull/4751)) - QML ([#4842](https://github.com/helix-editor/helix/pull/4842)) - CommonLisp ([4176769](https://github.com/helix-editor/helix/commit/4176769)) Updated languages and queries: - Zig ([#3621](https://github.com/helix-editor/helix/pull/3621), [#4745](https://github.com/helix-editor/helix/pull/4745)) - Rust ([#3647](https://github.com/helix-editor/helix/pull/3647), [#3729](https://github.com/helix-editor/helix/pull/3729), [#3927](https://github.com/helix-editor/helix/pull/3927), [#4073](https://github.com/helix-editor/helix/pull/4073), [#4510](https://github.com/helix-editor/helix/pull/4510), [#4659](https://github.com/helix-editor/helix/pull/4659), [#4717](https://github.com/helix-editor/helix/pull/4717)) - Solidity ([20ed8c2](https://github.com/helix-editor/helix/commit/20ed8c2)) - Fish ([#3704](https://github.com/helix-editor/helix/pull/3704)) - Elixir ([#3645](https://github.com/helix-editor/helix/pull/3645), [#4333](https://github.com/helix-editor/helix/pull/4333), [#4821](https://github.com/helix-editor/helix/pull/4821)) - Diff ([#3708](https://github.com/helix-editor/helix/pull/3708)) - Nix ([665e27f](https://github.com/helix-editor/helix/commit/665e27f), [1fe3273](https://github.com/helix-editor/helix/commit/1fe3273)) - Markdown ([#3749](https://github.com/helix-editor/helix/pull/3749), [#4078](https://github.com/helix-editor/helix/pull/4078), [#4483](https://github.com/helix-editor/helix/pull/4483), [#4478](https://github.com/helix-editor/helix/pull/4478)) - GDScript ([#3760](https://github.com/helix-editor/helix/pull/3760)) - JSX and TSX ([#3853](https://github.com/helix-editor/helix/pull/3853), [#3973](https://github.com/helix-editor/helix/pull/3973)) - Ruby ([#3976](https://github.com/helix-editor/helix/pull/3976), [#4601](https://github.com/helix-editor/helix/pull/4601)) - R ([#4031](https://github.com/helix-editor/helix/pull/4031)) - WGSL ([#3996](https://github.com/helix-editor/helix/pull/3996), [#4079](https://github.com/helix-editor/helix/pull/4079)) - C# ([#4118](https://github.com/helix-editor/helix/pull/4118), [#4281](https://github.com/helix-editor/helix/pull/4281), [#4213](https://github.com/helix-editor/helix/pull/4213)) - Twig ([#4176](https://github.com/helix-editor/helix/pull/4176)) - Lua ([#3552](https://github.com/helix-editor/helix/pull/3552)) - C/C++ ([#4079](https://github.com/helix-editor/helix/pull/4079), [#4278](https://github.com/helix-editor/helix/pull/4278), [#4282](https://github.com/helix-editor/helix/pull/4282)) - Cairo ([17488f1](https://github.com/helix-editor/helix/commit/17488f1), [431f9c1](https://github.com/helix-editor/helix/commit/431f9c1), [09a6df1](https://github.com/helix-editor/helix/commit/09a6df1)) - Rescript ([#4356](https://github.com/helix-editor/helix/pull/4356)) - Zig ([#4409](https://github.com/helix-editor/helix/pull/4409)) - Scala ([#4353](https://github.com/helix-editor/helix/pull/4353), [#4697](https://github.com/helix-editor/helix/pull/4697), [#4701](https://github.com/helix-editor/helix/pull/4701)) - LaTeX ([#4528](https://github.com/helix-editor/helix/pull/4528), [#4922](https://github.com/helix-editor/helix/pull/4922)) - SQL ([#4529](https://github.com/helix-editor/helix/pull/4529)) - Python ([#4560](https://github.com/helix-editor/helix/pull/4560)) - Bash/Zsh ([#4582](https://github.com/helix-editor/helix/pull/4582)) - Nu ([#4583](https://github.com/helix-editor/helix/pull/4583)) - Julia ([#4588](https://github.com/helix-editor/helix/pull/4588)) - Typescript ([#4703](https://github.com/helix-editor/helix/pull/4703)) - Meson ([#4572](https://github.com/helix-editor/helix/pull/4572)) - Haskell ([#4800](https://github.com/helix-editor/helix/pull/4800)) - CMake ([#4809](https://github.com/helix-editor/helix/pull/4809)) - HTML ([#4829](https://github.com/helix-editor/helix/pull/4829), [#4881](https://github.com/helix-editor/helix/pull/4881)) - Java ([#4886](https://github.com/helix-editor/helix/pull/4886)) - Go ([#4906](https://github.com/helix-editor/helix/pull/4906), [#4969](https://github.com/helix-editor/helix/pull/4969), [#5010](https://github.com/helix-editor/helix/pull/5010)) - CSS ([#4882](https://github.com/helix-editor/helix/pull/4882)) - Racket ([#4915](https://github.com/helix-editor/helix/pull/4915)) - SCSS ([#5003](https://github.com/helix-editor/helix/pull/5003)) Packaging: - Filter relevant source files in the Nix flake ([#3657](https://github.com/helix-editor/helix/pull/3657)) - Build a binary for `aarch64-linux` in the release CI ([038a91d](https://github.com/helix-editor/helix/commit/038a91d)) - Build an AppImage for `aarch64-linux` in the release CI ([b738031](https://github.com/helix-editor/helix/commit/b738031)) - Enable CI builds for `riscv64-linux` ([#3685](https://github.com/helix-editor/helix/pull/3685)) - Support preview releases in CI ([0090a2d](https://github.com/helix-editor/helix/commit/0090a2d)) - Strip binaries built in CI ([#3780](https://github.com/helix-editor/helix/pull/3780)) - Fix the development shell for the Nix Flake on `aarch64-darwin` ([#3810](https://github.com/helix-editor/helix/pull/3810)) - Raise the MSRV and create an MSRV policy ([#3896](https://github.com/helix-editor/helix/pull/3896), [#3913](https://github.com/helix-editor/helix/pull/3913), [#3961](https://github.com/helix-editor/helix/pull/3961)) - Fix Fish completions for `--config` and `--log` flags ([#3912](https://github.com/helix-editor/helix/pull/3912)) - Use builtin filenames option in Bash completion ([#4648](https://github.com/helix-editor/helix/pull/4648)) # 22.08.1 (2022-09-01) This is a patch release that fixes a panic caused by closing splits or buffers. ([#3633](https://github.com/helix-editor/helix/pull/3633)) # 22.08 (2022-08-31) A big _thank you_ to our contributors! This release had 87 contributors. As usual, the following is a summary of each of the changes since the last release. For the full log, check out the [git log](https://github.com/helix-editor/helix/compare/22.05..22.08). Breaking changes: - Special keymap names for `+`, `;` and `%` have been replaced with those literal characters ([#2677](https://github.com/helix-editor/helix/pull/2677), [#3556](https://github.com/helix-editor/helix/pull/3556)) - `A-Left` and `A-Right` have become `C-Left` and `C-Right` for word-wise motion ([#2500](https://github.com/helix-editor/helix/pull/2500)) - The `catppuccin` theme's name has been corrected from `catpuccin` ([#2713](https://github.com/helix-editor/helix/pull/2713)) - `catppuccin` has been replaced by its variants, `catppuccin_frappe`, `catppuccin_latte`, `catppuccin_macchiato`, `catppuccin_mocha` ([#3281](https://github.com/helix-editor/helix/pull/3281)) - `C-n` and `C-p` have been removed from the default insert mode keymap ([#3340](https://github.com/helix-editor/helix/pull/3340)) - The `extend_line` command has been replaced with `extend_line_below` and a new `extend_line` command now exists ([#3046](https://github.com/helix-editor/helix/pull/3046)) Features: - Add an integration testing harness ([#2359](https://github.com/helix-editor/helix/pull/2359)) - Indent guides ([#1796](https://github.com/helix-editor/helix/pull/1796), [906259c](https://github.com/helix-editor/helix/commit/906259c)) - Cursorline ([#2170](https://github.com/helix-editor/helix/pull/2170), [fde9e03](https://github.com/helix-editor/helix/commit/fde9e03)) - Select all instances of the symbol under the cursor (`h`) ([#2738](https://github.com/helix-editor/helix/pull/2738)) - A picker for document and workspace LSP diagnostics (`g`/`G`) ([#2013](https://github.com/helix-editor/helix/pull/2013), [#2984](https://github.com/helix-editor/helix/pull/2984)) - Allow styling the mode indicator per-mode ([#2676](https://github.com/helix-editor/helix/pull/2676)) - Live preview for the theme picker ([#1798](https://github.com/helix-editor/helix/pull/1798)) - Configurable statusline ([#2434](https://github.com/helix-editor/helix/pull/2434)) - LSP SignatureHelp ([#1755](https://github.com/helix-editor/helix/pull/1755), [a8b123f](https://github.com/helix-editor/helix/commit/a8b123f)) - A picker for the jumplist ([#3033](https://github.com/helix-editor/helix/pull/3033)) - Configurable external formatter binaries ([#2942](https://github.com/helix-editor/helix/pull/2942)) - Bracketed paste support ([#3233](https://github.com/helix-editor/helix/pull/3233), [12ddd03](https://github.com/helix-editor/helix/commit/12ddd03)) Commands: - `:insert-output` and `:append-output` which insert/append output from a shell command ([#2589](https://github.com/helix-editor/helix/pull/2589)) - The `t` textobject (`]t`/`[t`/`mit`/`mat`) for navigating tests ([#2807](https://github.com/helix-editor/helix/pull/2807)) - `C-Backspace` and `C-Delete` for word-wise deletion in prompts and pickers ([#2500](https://github.com/helix-editor/helix/pull/2500)) - `A-Delete` for forward word-wise deletion in insert mode ([#2500](https://github.com/helix-editor/helix/pull/2500)) - `C-t` for toggling the preview pane in pickers ([#3021](https://github.com/helix-editor/helix/pull/3021)) - `extend_line` now extends in the direction of the cursor ([#3046](https://github.com/helix-editor/helix/pull/3046)) Usability improvements and fixes: - Fix tree-sitter parser builds on illumos ([#2602](https://github.com/helix-editor/helix/pull/2602)) - Remove empty stratch buffer from jumplists when removing ([5ed6223](https://github.com/helix-editor/helix/commit/5ed6223)) - Fix panic on undo after `shell_append_output` ([#2625](https://github.com/helix-editor/helix/pull/2625)) - Sort LSP edits by start range ([3d91c99](https://github.com/helix-editor/helix/commit/3d91c99)) - Be more defensive about LSP URI conversions ([6de6a3e](https://github.com/helix-editor/helix/commit/6de6a3e), [378f438](https://github.com/helix-editor/helix/commit/378f438)) - Ignore SendErrors when grammar builds fail ([#2641](https://github.com/helix-editor/helix/pull/2641)) - Append `set_line_ending` to document history ([#2649](https://github.com/helix-editor/helix/pull/2649)) - Use last prompt entry when empty ([b14c258](https://github.com/helix-editor/helix/commit/b14c258), [#2870](https://github.com/helix-editor/helix/pull/2870)) - Do not add extra line breaks in markdown lists ([#2689](https://github.com/helix-editor/helix/pull/2689)) - Disable dialyzer by default for ElixirLS ([#2710](https://github.com/helix-editor/helix/pull/2710)) - Refactor textobject node capture ([#2741](https://github.com/helix-editor/helix/pull/2741)) - Prevent re-selecting the same range with `expand_selection` ([#2760](https://github.com/helix-editor/helix/pull/2760)) - Introduce `keyword.storage` highlight scope ([#2731](https://github.com/helix-editor/helix/pull/2731)) - Handle symlinks more consistently ([#2718](https://github.com/helix-editor/helix/pull/2718)) - Improve markdown list rendering ([#2687](https://github.com/helix-editor/helix/pull/2687)) - Update auto-pairs and idle-timeout settings when the config is reloaded ([#2736](https://github.com/helix-editor/helix/pull/2736)) - Fix panic on closing last buffer ([#2658](https://github.com/helix-editor/helix/pull/2658)) - Prevent modifying jumplist until jumping to a reference ([#2670](https://github.com/helix-editor/helix/pull/2670)) - Ensure `:quit` and `:quit!` take no arguments ([#2654](https://github.com/helix-editor/helix/pull/2654)) - Fix crash due to cycles when replaying macros ([#2647](https://github.com/helix-editor/helix/pull/2647)) - Pass LSP FormattingOptions ([#2635](https://github.com/helix-editor/helix/pull/2635)) - Prevent showing colors when the health-check is piped ([#2836](https://github.com/helix-editor/helix/pull/2836)) - Use character indexing for mouse selection ([#2839](https://github.com/helix-editor/helix/pull/2839)) - Display the highest severity diagnostic for a line in the gutter ([#2835](https://github.com/helix-editor/helix/pull/2835)) - Default the ruler color to red background ([#2669](https://github.com/helix-editor/helix/pull/2669)) - Make `move_vertically` aware of tabs and wide characters ([#2620](https://github.com/helix-editor/helix/pull/2620)) - Enable shellwords for Windows ([#2767](https://github.com/helix-editor/helix/pull/2767)) - Add history suggestions to global search ([#2717](https://github.com/helix-editor/helix/pull/2717)) - Fix the scrollbar's length proportional to total menu items ([#2860](https://github.com/helix-editor/helix/pull/2860)) - Reset terminal modifiers for diagnostic text ([#2861](https://github.com/helix-editor/helix/pull/2861), [#2900](https://github.com/helix-editor/helix/pull/2900)) - Redetect indents and line-endings after a Language Server replaces the document ([#2778](https://github.com/helix-editor/helix/pull/2778)) - Check selection's visible width when copying on mouse click ([#2711](https://github.com/helix-editor/helix/pull/2711)) - Fix edge-case in tree-sitter `expand_selection` command ([#2877](https://github.com/helix-editor/helix/pull/2877)) - Add a single-width left margin for the completion popup ([#2728](https://github.com/helix-editor/helix/pull/2728)) - Right-align the scrollbar in the completion popup ([#2754](https://github.com/helix-editor/helix/pull/2754)) - Fix recursive macro crash and empty macro lockout ([#2902](https://github.com/helix-editor/helix/pull/2902)) - Fix backwards character deletion on other whitespaces ([#2855](https://github.com/helix-editor/helix/pull/2855)) - Add search and space/backspace bindings to view modes ([#2803](https://github.com/helix-editor/helix/pull/2803)) - Add `--vsplit` and `--hsplit` CLI arguments for opening in splits ([#2773](https://github.com/helix-editor/helix/pull/2773), [#3073](https://github.com/helix-editor/helix/pull/3073)) - Sort themes, languages and files inputs by score and name ([#2675](https://github.com/helix-editor/helix/pull/2675)) - Highlight entire rows in ([#2939](https://github.com/helix-editor/helix/pull/2939)) - Fix backwards selection duplication widening bug ([#2945](https://github.com/helix-editor/helix/pull/2945), [#3024](https://github.com/helix-editor/helix/pull/3024)) - Skip serializing Option type DAP fields ([44f5963](https://github.com/helix-editor/helix/commit/44f5963)) - Fix required `cwd` field in DAP `RunTerminalArguments` type ([85411be](https://github.com/helix-editor/helix/commit/85411be), [#3240](https://github.com/helix-editor/helix/pull/3240)) - Add LSP `workspace/applyEdit` to client capabilities ([#3012](https://github.com/helix-editor/helix/pull/3012)) - Respect count for repeating motion ([#3057](https://github.com/helix-editor/helix/pull/3057)) - Respect count for selecting next/previous match ([#3056](https://github.com/helix-editor/helix/pull/3056)) - Respect count for tree-sitter motions ([#3058](https://github.com/helix-editor/helix/pull/3058)) - Make gutters padding optional ([#2996](https://github.com/helix-editor/helix/pull/2996)) - Support pre-filling prompts ([#2459](https://github.com/helix-editor/helix/pull/2459), [#3259](https://github.com/helix-editor/helix/pull/3259)) - Add statusline element to display file line-endings ([#3113](https://github.com/helix-editor/helix/pull/3113)) - Keep jump and file history when using `:split` ([#3031](https://github.com/helix-editor/helix/pull/3031), [#3160](https://github.com/helix-editor/helix/pull/3160)) - Make tree-sitter query `; inherits ` feature imperative ([#2470](https://github.com/helix-editor/helix/pull/2470)) - Indent with tabs by default ([#3095](https://github.com/helix-editor/helix/pull/3095)) - Fix non-msvc grammar compilation on Windows ([#3190](https://github.com/helix-editor/helix/pull/3190)) - Add spacer element to the statusline ([#3165](https://github.com/helix-editor/helix/pull/3165), [255c173](https://github.com/helix-editor/helix/commit/255c173)) - Make gutters padding automatic ([#3163](https://github.com/helix-editor/helix/pull/3163)) - Add `code` for LSP `Diagnostic` type ([#3096](https://github.com/helix-editor/helix/pull/3096)) - Add position percentage to the statusline ([#3168](https://github.com/helix-editor/helix/pull/3168)) - Add a configurable and themable statusline separator string ([#3175](https://github.com/helix-editor/helix/pull/3175)) - Use OR of all selections when `search_selection` acts on multiple selections ([#3138](https://github.com/helix-editor/helix/pull/3138)) - Add clipboard information to logs and the healthcheck ([#3271](https://github.com/helix-editor/helix/pull/3271)) - Fix align selection behavior on tabs ([#3276](https://github.com/helix-editor/helix/pull/3276)) - Fix terminal cursor shape reset ([#3289](https://github.com/helix-editor/helix/pull/3289)) - Add an `injection.include-unnamed-children` predicate to injections queries ([#3129](https://github.com/helix-editor/helix/pull/3129)) - Add a `-c`/`--config` CLI flag for specifying config file location ([#2666](https://github.com/helix-editor/helix/pull/2666)) - Detect indent-style in `:set-language` command ([#3330](https://github.com/helix-editor/helix/pull/3330)) - Fix non-deterministic highlighting ([#3275](https://github.com/helix-editor/helix/pull/3275)) - Avoid setting the stdin handle when not necessary ([#3248](https://github.com/helix-editor/helix/pull/3248), [#3379](https://github.com/helix-editor/helix/pull/3379)) - Fix indent guide styling ([#3324](https://github.com/helix-editor/helix/pull/3324)) - Fix tab highlight when tab is partially visible ([#3313](https://github.com/helix-editor/helix/pull/3313)) - Add completion for nested settings ([#3183](https://github.com/helix-editor/helix/pull/3183)) - Advertise WorkspaceSymbolClientCapabilities LSP client capability ([#3361](https://github.com/helix-editor/helix/pull/3361)) - Remove duplicate entries from the theme picker ([#3439](https://github.com/helix-editor/helix/pull/3439)) - Shorted output for grammar fetching and building ([#3396](https://github.com/helix-editor/helix/pull/3396)) - Add a `tabpad` option for visible tab padding whitespace characters ([#3458](https://github.com/helix-editor/helix/pull/3458)) - Make DAP external terminal provider configurable ([cb7615e](https://github.com/helix-editor/helix/commit/cb7615e)) - Use health checkmark character with shorter width ([#3505](https://github.com/helix-editor/helix/pull/3505)) - Reset document mode to normal on view focus loss ([e4c9d40](https://github.com/helix-editor/helix/commit/e4c9d40)) - Render indented code-blocks in markdown ([#3503](https://github.com/helix-editor/helix/pull/3503)) - Add WezTerm to DAP terminal provider defaults ([#3588](https://github.com/helix-editor/helix/pull/3588)) - Derive `Document` language name from `languages.toml` `name` key ([#3338](https://github.com/helix-editor/helix/pull/3338)) - Fix process spawning error handling ([#3349](https://github.com/helix-editor/helix/pull/3349)) - Don't resolve links for `:o` completion ([8a4fbf6](https://github.com/helix-editor/helix/commit/8a4fbf6)) - Recalculate completion after pasting into prompt ([e77b7d1](https://github.com/helix-editor/helix/commit/e77b7d1)) - Fix extra selections with regex anchors ([#3598](https://github.com/helix-editor/helix/pull/3598)) - Move mode transition logic to `handle_keymap_event` ([#2634](https://github.com/helix-editor/helix/pull/2634)) - Add documents to view history when using the jumplist ([#3593](https://github.com/helix-editor/helix/pull/3593)) - Prevent panic when loading tree-sitter queries ([fa1dc7e](https://github.com/helix-editor/helix/commit/fa1dc7e)) - Discard LSP publishDiagnostic when LS is not initialized ([#3403](https://github.com/helix-editor/helix/pull/3403)) - Refactor tree-sitter textobject motions as repeatable motions ([#3264](https://github.com/helix-editor/helix/pull/3264)) - Avoid command execution hooks on closed docs ([#3613](https://github.com/helix-editor/helix/pull/3613)) - Share `restore_term` code between panic and normal exits ([#2612](https://github.com/helix-editor/helix/pull/2612)) - Show clipboard info in `--health` output ([#2947](https://github.com/helix-editor/helix/pull/2947)) - Recalculate completion when going through prompt history ([#3193](https://github.com/helix-editor/helix/pull/3193)) Themes: - Update `tokyonight` and `tokyonight_storm` themes ([#2606](https://github.com/helix-editor/helix/pull/2606)) - Update `solarized_light` themes ([#2626](https://github.com/helix-editor/helix/pull/2626)) - Fix `catpuccin` `ui.popup` theme ([#2644](https://github.com/helix-editor/helix/pull/2644)) - Update selection style of `night_owl` ([#2668](https://github.com/helix-editor/helix/pull/2668)) - Fix spelling of `catppuccin` theme ([#2713](https://github.com/helix-editor/helix/pull/2713)) - Update `base16_default`'s `ui.menu` ([#2794](https://github.com/helix-editor/helix/pull/2794)) - Add `noctis_bordo` ([#2830](https://github.com/helix-editor/helix/pull/2830)) - Add `acme` ([#2876](https://github.com/helix-editor/helix/pull/2876)) - Add `meliora` ([#2884](https://github.com/helix-editor/helix/pull/2884), [#2890](https://github.com/helix-editor/helix/pull/2890)) - Add cursorline scopes to various themes ([33d287a](https://github.com/helix-editor/helix/commit/33d287a), [#2892](https://github.com/helix-editor/helix/pull/2892), [#2915](https://github.com/helix-editor/helix/pull/2915), [#2916](https://github.com/helix-editor/helix/pull/2916), [#2918](https://github.com/helix-editor/helix/pull/2918), [#2927](https://github.com/helix-editor/helix/pull/2927), [#2925](https://github.com/helix-editor/helix/pull/2925), [#2938](https://github.com/helix-editor/helix/pull/2938), [#2962](https://github.com/helix-editor/helix/pull/2962), [#3054](https://github.com/helix-editor/helix/pull/3054)) - Add mode colors to various themes ([#2926](https://github.com/helix-editor/helix/pull/2926), [#2933](https://github.com/helix-editor/helix/pull/2933), [#2929](https://github.com/helix-editor/helix/pull/2929), [#3098](https://github.com/helix-editor/helix/pull/3098), [#3104](https://github.com/helix-editor/helix/pull/3104), [#3128](https://github.com/helix-editor/helix/pull/3128), [#3135](https://github.com/helix-editor/helix/pull/3135), [#3200](https://github.com/helix-editor/helix/pull/3200)) - Add `nord_light` ([#2908](https://github.com/helix-editor/helix/pull/2908)) - Update `night_owl` ([#2929](https://github.com/helix-editor/helix/pull/2929)) - Update `autumn` ([2e70985](https://github.com/helix-editor/helix/commit/2e70985), [936ed3a](https://github.com/helix-editor/helix/commit/936ed3a)) - Update `one_dark` ([#3011](https://github.com/helix-editor/helix/pull/3011)) - Add `noctis` ([#3043](https://github.com/helix-editor/helix/pull/3043), [#3128](https://github.com/helix-editor/helix/pull/3128)) - Update `boo_berry` ([#3191](https://github.com/helix-editor/helix/pull/3191)) - Update `monokai` ([#3131](https://github.com/helix-editor/helix/pull/3131)) - Add `ayu_dark`, `ayu_light`, `ayu_mirage` ([#3184](https://github.com/helix-editor/helix/pull/3184)) - Update `onelight` ([#3226](https://github.com/helix-editor/helix/pull/3226)) - Add `base16_transparent` ([#3216](https://github.com/helix-editor/helix/pull/3216), [b565fff](https://github.com/helix-editor/helix/commit/b565fff)) - Add `flatwhite` ([#3236](https://github.com/helix-editor/helix/pull/3236)) - Update `dark_plus` ([#3302](https://github.com/helix-editor/helix/pull/3302)) - Add `doom_acario_dark` ([#3308](https://github.com/helix-editor/helix/pull/3308), [#3539](https://github.com/helix-editor/helix/pull/3539)) - Add `rose_pine_moon` ([#3229](https://github.com/helix-editor/helix/pull/3229)) - Update `spacebones_light` ([#3342](https://github.com/helix-editor/helix/pull/3342)) - Fix typos in themes ([8deaebd](https://github.com/helix-editor/helix/commit/8deaebd), [#3412](https://github.com/helix-editor/helix/pull/3412)) - Add `emacs` ([#3410](https://github.com/helix-editor/helix/pull/3410)) - Add `papercolor-light` ([#3426](https://github.com/helix-editor/helix/pull/3426), [#3470](https://github.com/helix-editor/helix/pull/3470), [#3585](https://github.com/helix-editor/helix/pull/3585)) - Add `penumbra+` ([#3398](https://github.com/helix-editor/helix/pull/3398)) - Add `fleetish` ([#3591](https://github.com/helix-editor/helix/pull/3591), [#3607](https://github.com/helix-editor/helix/pull/3607)) - Add `sonokai` ([#3595](https://github.com/helix-editor/helix/pull/3595)) - Update all themes for theme lints ([#3587](https://github.com/helix-editor/helix/pull/3587)) LSP: - V ([#2526](https://github.com/helix-editor/helix/pull/2526)) - Prisma ([#2703](https://github.com/helix-editor/helix/pull/2703)) - Clojure ([#2780](https://github.com/helix-editor/helix/pull/2780)) - WGSL ([#2872](https://github.com/helix-editor/helix/pull/2872)) - Elvish ([#2948](https://github.com/helix-editor/helix/pull/2948)) - Idris ([#2971](https://github.com/helix-editor/helix/pull/2971)) - Fortran ([#3025](https://github.com/helix-editor/helix/pull/3025)) - Gleam ([#3139](https://github.com/helix-editor/helix/pull/3139)) - Odin ([#3214](https://github.com/helix-editor/helix/pull/3214)) New languages: - V ([#2526](https://github.com/helix-editor/helix/pull/2526)) - EDoc ([#2640](https://github.com/helix-editor/helix/pull/2640)) - JSDoc ([#2650](https://github.com/helix-editor/helix/pull/2650)) - OpenSCAD ([#2680](https://github.com/helix-editor/helix/pull/2680)) - Prisma ([#2703](https://github.com/helix-editor/helix/pull/2703)) - Clojure ([#2780](https://github.com/helix-editor/helix/pull/2780)) - Starlark ([#2903](https://github.com/helix-editor/helix/pull/2903)) - Elvish ([#2948](https://github.com/helix-editor/helix/pull/2948)) - Fortran ([#3025](https://github.com/helix-editor/helix/pull/3025)) - Ungrammar ([#3048](https://github.com/helix-editor/helix/pull/3048)) - SCSS ([#3074](https://github.com/helix-editor/helix/pull/3074)) - Go Template ([#3091](https://github.com/helix-editor/helix/pull/3091)) - Graphviz dot ([#3241](https://github.com/helix-editor/helix/pull/3241)) - Cue ([#3262](https://github.com/helix-editor/helix/pull/3262)) - Slint ([#3355](https://github.com/helix-editor/helix/pull/3355)) - Beancount ([#3297](https://github.com/helix-editor/helix/pull/3297)) - Taskwarrior ([#3468](https://github.com/helix-editor/helix/pull/3468)) - xit ([#3521](https://github.com/helix-editor/helix/pull/3521)) - ESDL ([#3526](https://github.com/helix-editor/helix/pull/3526)) - Awk ([#3528](https://github.com/helix-editor/helix/pull/3528), [#3535](https://github.com/helix-editor/helix/pull/3535)) - Pascal ([#3542](https://github.com/helix-editor/helix/pull/3542)) Updated languages and queries: - Nix ([#2472](https://github.com/helix-editor/helix/pull/2472)) - Elixir ([#2619](https://github.com/helix-editor/helix/pull/2619)) - CPON ([#2643](https://github.com/helix-editor/helix/pull/2643)) - Textobjects queries for Erlang, Elixir, Gleam ([#2661](https://github.com/helix-editor/helix/pull/2661)) - Capture rust closures as function textobjects ([4a27e2d](https://github.com/helix-editor/helix/commit/4a27e2d)) - Heex ([#2800](https://github.com/helix-editor/helix/pull/2800), [#3170](https://github.com/helix-editor/helix/pull/3170)) - Add `<<=` operator highlighting for Rust ([#2805](https://github.com/helix-editor/helix/pull/2805)) - Fix comment injection in JavaScript/TypeScript ([#2763](https://github.com/helix-editor/helix/pull/2763)) - Nickel ([#2859](https://github.com/helix-editor/helix/pull/2859)) - Add `Rakefile` and `Gemfile` to Ruby file-types ([#2875](https://github.com/helix-editor/helix/pull/2875)) - Erlang ([#2910](https://github.com/helix-editor/helix/pull/2910), [ac669ad](https://github.com/helix-editor/helix/commit/ac669ad)) - Markdown ([#2910](https://github.com/helix-editor/helix/pull/2910), [#3108](https://github.com/helix-editor/helix/pull/3108), [#3400](https://github.com/helix-editor/helix/pull/3400)) - Bash ([#2910](https://github.com/helix-editor/helix/pull/2910)) - Rust ([#2910](https://github.com/helix-editor/helix/pull/2910), [#3397](https://github.com/helix-editor/helix/pull/3397)) - Edoc ([#2910](https://github.com/helix-editor/helix/pull/2910)) - HTML ([#2910](https://github.com/helix-editor/helix/pull/2910)) - Make ([#2910](https://github.com/helix-editor/helix/pull/2910)) - TSQ ([#2910](https://github.com/helix-editor/helix/pull/2910), [#2960](https://github.com/helix-editor/helix/pull/2960)) - git-commit ([#2910](https://github.com/helix-editor/helix/pull/2910)) - Use default fallback for Python indents ([9ae70cc](https://github.com/helix-editor/helix/commit/9ae70cc)) - Add Haskell LSP roots ([#2954](https://github.com/helix-editor/helix/pull/2954)) - Ledger ([#2936](https://github.com/helix-editor/helix/pull/2936), [#2988](https://github.com/helix-editor/helix/pull/2988)) - Nickel ([#2987](https://github.com/helix-editor/helix/pull/2987)) - JavaScript/TypeScript ([#2961](https://github.com/helix-editor/helix/pull/2961), [#3219](https://github.com/helix-editor/helix/pull/3219), [#3213](https://github.com/helix-editor/helix/pull/3213), [#3280](https://github.com/helix-editor/helix/pull/3280), [#3301](https://github.com/helix-editor/helix/pull/3301)) - GLSL ([#3051](https://github.com/helix-editor/helix/pull/3051)) - Fix locals tracking in Rust ([#3027](https://github.com/helix-editor/helix/pull/3027), [#3212](https://github.com/helix-editor/helix/pull/3212), [#3345](https://github.com/helix-editor/helix/pull/3345)) - Verilog ([#3158](https://github.com/helix-editor/helix/pull/3158)) - Ruby ([#3173](https://github.com/helix-editor/helix/pull/3173), [#3527](https://github.com/helix-editor/helix/pull/3527)) - Svelte ([#3147](https://github.com/helix-editor/helix/pull/3147)) - Add Elixir and HEEx comment textobjects ([#3179](https://github.com/helix-editor/helix/pull/3179)) - Python ([#3103](https://github.com/helix-editor/helix/pull/3103), [#3201](https://github.com/helix-editor/helix/pull/3201), [#3284](https://github.com/helix-editor/helix/pull/3284)) - PHP ([#3317](https://github.com/helix-editor/helix/pull/3317)) - Latex ([#3370](https://github.com/helix-editor/helix/pull/3370)) - Clojure ([#3387](https://github.com/helix-editor/helix/pull/3387)) - Swift ([#3461](https://github.com/helix-editor/helix/pull/3461)) - C# ([#3480](https://github.com/helix-editor/helix/pull/3480), [#3494](https://github.com/helix-editor/helix/pull/3494)) - Org ([#3489](https://github.com/helix-editor/helix/pull/3489)) - Elm ([#3497](https://github.com/helix-editor/helix/pull/3497)) - Dart ([#3419](https://github.com/helix-editor/helix/pull/3419)) - Julia ([#3507](https://github.com/helix-editor/helix/pull/3507)) - Fix Rust textobjects ([#3590](https://github.com/helix-editor/helix/pull/3590)) - C ([00d88e5](https://github.com/helix-editor/helix/commit/00d88e5)) - Update Rust ([0ef0ef9](https://github.com/helix-editor/helix/commit/0ef0ef9)) Packaging: - Add `rust-analyzer` to Nix flake devShell ([#2739](https://github.com/helix-editor/helix/pull/2739)) - Add cachix information to the Nix flake ([#2999](https://github.com/helix-editor/helix/pull/2999)) - Pass makeWrapperArgs to wrapProgram in the Nix flake ([#3003](https://github.com/helix-editor/helix/pull/3003)) - Add a way to override which grammars are built by Nix ([#3141](https://github.com/helix-editor/helix/pull/3141)) - Add a GitHub actions release for `aarch64-macos` ([#3137](https://github.com/helix-editor/helix/pull/3137)) - Add shell auto-completions for Elvish ([#3331](https://github.com/helix-editor/helix/pull/3331)) # 22.05 (2022-05-28) An even bigger shout out than usual to all the contributors - we had a whopping 110 contributors in this release! That's more than double the number of contributors as last release! Check out some of the highlights in the [news section](https://helix-editor.com/news/release-22-05-highlights/). As usual, the following is a summary of each of the changes since the last release. For the full log, check out the [git log](https://github.com/helix-editor/helix/compare/22.03..22.05). Breaking Changes: - Removed `C-j`, `C-k` bindings from file picker ([#1792](https://github.com/helix-editor/helix/pull/1792)) - Replaced `C-f` with `C-d` and `C-b` with `C-u` bindings in file picker ([#1792](https://github.com/helix-editor/helix/pull/1792)) - `A-hjkl` bindings have been moved to `A-pion` ([#2205](https://github.com/helix-editor/helix/pull/2205)) - `A-Left`/`A-Right` have been moved to `C-Left`/`C-Right` ([#2193](https://github.com/helix-editor/helix/pull/2193)) Features: - The indentation mechanism has been reworked ([#1562](https://github.com/helix-editor/helix/pull/1562), [#1908](https://github.com/helix-editor/helix/pull/1908)) - Configurable gutters ([#1967](https://github.com/helix-editor/helix/pull/1967)) - Support for local language configuration ([#1249](https://github.com/helix-editor/helix/pull/1249)) - Configurable themed rulers ([#2060](https://github.com/helix-editor/helix/pull/2060)) - Render visible whitespace ([e6b865e](https://github.com/helix-editor/helix/commit/e6b865e), [#2322](https://github.com/helix-editor/helix/pull/2322), [#2331](https://github.com/helix-editor/helix/pull/2331)) Commands: - Paragraph motion and textobject (`]p`, `[p`) ([#1627](https://github.com/helix-editor/helix/pull/1627), [#1956](https://github.com/helix-editor/helix/pull/1956), [#1969](https://github.com/helix-editor/helix/pull/1969), [#1992](https://github.com/helix-editor/helix/pull/1992), [#2226](https://github.com/helix-editor/helix/pull/2226)) - `:buffer-next`, `:buffer-previous` ([#1940](https://github.com/helix-editor/helix/pull/1940)) - `:set-language` to set the buffers language ([#1866](https://github.com/helix-editor/helix/pull/1866), [#1996](https://github.com/helix-editor/helix/pull/1996)) - Command for picking files from the current working directory (`Space-F`) ([#1600](https://github.com/helix-editor/helix/pull/1600), [#2308](https://github.com/helix-editor/helix/pull/2308)) - `:write!` which creates non-existent subdirectories ([#1839](https://github.com/helix-editor/helix/pull/1839)) - Add `m` textobject that selects closest surrounding pair ([de15d70](https://github.com/helix-editor/helix/commit/de15d70), [76175db](https://github.com/helix-editor/helix/commit/76175db)) - `:pipe` typable command for piping selections ([#1972](https://github.com/helix-editor/helix/pull/1972)) - `extend_line_above` which extends to previous lines ([#2117](https://github.com/helix-editor/helix/pull/2117)) - `set_line_ending` which replaces line endings ([#1871](https://github.com/helix-editor/helix/pull/1871)) - `:get-option` for getting the current value of an option (`:get`) ([#2231](https://github.com/helix-editor/helix/pull/2231)) - `:run-shell-command` which does not interact with selections ([#1682](https://github.com/helix-editor/helix/pull/1682)) - `:reflow` which hard-wraps selected text ([#2128](https://github.com/helix-editor/helix/pull/2128)) - `commit_undo_checkpoint` which adds an undo checkpoint ([#2115](https://github.com/helix-editor/helix/pull/2115)) - `:log-open` which opens the log file ([#2422](https://github.com/helix-editor/helix/pull/2422)) - `transpose_view` which transposes window splits ([#2461](https://github.com/helix-editor/helix/pull/2461)) - View-swapping: `swap_view_right`, `swap_view_left`, `swap_view_up`, `swap_view_down` ([#2445](https://github.com/helix-editor/helix/pull/2445)) - `shrink_to_line_bounds` which shrinks selections to line-bounds ([#2450](https://github.com/helix-editor/helix/pull/2450)) Usability improvements and fixes: - Handle broken pipes when piping `hx --health` through `head` ([#1876](https://github.com/helix-editor/helix/pull/1876)) - Fix for `copy_selection` on newlines ([ab7885e](https://github.com/helix-editor/helix/commit/ab7885e), [236c6b7](https://github.com/helix-editor/helix/commit/236c6b7)) - Use `win32yank` clipboard provider on WSL2 ([#1912](https://github.com/helix-editor/helix/pull/1912)) - Jump to the next number on the line before incrementing ([#1778](https://github.com/helix-editor/helix/pull/1778)) - Fix start position of next search ([#1904](https://github.com/helix-editor/helix/pull/1904)) - Use check and X marks for health check output ([#1918](https://github.com/helix-editor/helix/pull/1918)) - Clear terminal after switching to alternate screens ([#1944](https://github.com/helix-editor/helix/pull/1944)) - Fix `toggle_comments` command on multiple selections ([#1882](https://github.com/helix-editor/helix/pull/1882)) - Apply `ui.gutter` theming to empty gutter spans ([#2032](https://github.com/helix-editor/helix/pull/2032)) - Use checkboxes in `hx --health` output ([#1947](https://github.com/helix-editor/helix/pull/1947)) - Pass unmapped keys through prompt regardless of modifiers ([764adbd](https://github.com/helix-editor/helix/commit/764adbd)) - LSP: pull formatting options from config ([c18de0e](https://github.com/helix-editor/helix/commit/c18de0e)) - LSP: provide `rootPath` ([84e799f](https://github.com/helix-editor/helix/commit/84e799f)) - LSP: implement `workspace_folders` ([8adf0c1](https://github.com/helix-editor/helix/commit/8adf0c1)) - LSP: fix auto-import ([#2088](https://github.com/helix-editor/helix/pull/2088)) - Send active diagnostic to LSP when requesting code actions ([#2005](https://github.com/helix-editor/helix/pull/2005)) - Prevent panic when parsing malformed LSP `PublishDiagnostic` ([#2160](https://github.com/helix-editor/helix/pull/2160)) - Restore document state on completion cancel ([#2096](https://github.com/helix-editor/helix/pull/2096)) - Only merge top-level array when merging `languages.toml` ([#2145](https://github.com/helix-editor/helix/pull/2145), [#2215](https://github.com/helix-editor/helix/pull/2215)) - Fix open on multiline selection ([#2161](https://github.com/helix-editor/helix/pull/2161)) - Allow re-binding `0` if it is not used in a count ([#2174](https://github.com/helix-editor/helix/pull/2174)) - Fix `ctrl-u` behavior in insert mode ([#1957](https://github.com/helix-editor/helix/pull/1957)) - Check LSP rename capabilities before sending rename action ([#2203](https://github.com/helix-editor/helix/pull/2203)) - Register the `publish_diagnostics` LSP capability ([#2241](https://github.com/helix-editor/helix/pull/2241)) - Fix paste direction for typed paste commands ([#2288](https://github.com/helix-editor/helix/pull/2288)) - Improve handling of buffer-close ([#1397](https://github.com/helix-editor/helix/pull/1397)) - Extend the tutor file ([#2133](https://github.com/helix-editor/helix/pull/2133)) - Treat slashes as word separators in prompts ([#2315](https://github.com/helix-editor/helix/pull/2315)) - Auto-complete directory members ([#1682](https://github.com/helix-editor/helix/pull/1682)) - Allow disabling format-on-save as a global editor setting ([#2321](https://github.com/helix-editor/helix/pull/2321)) - Wrap command palette in overlay ([#2378](https://github.com/helix-editor/helix/pull/2378)) - Prevent selections from collapsing when inserting newlines ([#2414](https://github.com/helix-editor/helix/pull/2414)) - Allow configuration of LSP request timeout ([#2405](https://github.com/helix-editor/helix/pull/2405)) - Use debug console on Windows for DAP terminal ([#2294](https://github.com/helix-editor/helix/pull/2294)) - Exclude cursor when deleting with `C-w` in insert mode ([#2431](https://github.com/helix-editor/helix/pull/2431)) - Prevent panics from LSP parsing errors ([7ae6cad](https://github.com/helix-editor/helix/commit/7ae6cad)) - Prevent panics from LSP responses without requests ([#2475](https://github.com/helix-editor/helix/pull/2475)) - Fix scroll rate for documentation popups ([#2497](https://github.com/helix-editor/helix/pull/2497)) - Support inserting into prompts from registers ([#2458](https://github.com/helix-editor/helix/pull/2458)) - Separate theme scopes for diagnostic types ([#2437](https://github.com/helix-editor/helix/pull/2437)) - Use `ui.menu` instead of `ui.statusline` for command completion menu theming ([82fb217](https://github.com/helix-editor/helix/commit/82fb217)) - Panic when reloading a shrunk file ([#2506](https://github.com/helix-editor/helix/pull/2506)) - Add theme key for picker separator ([#2523](https://github.com/helix-editor/helix/pull/2523)) Themes: - Remove `ui.text` background from dark_plus ([#1950](https://github.com/helix-editor/helix/pull/1950)) - Add `boo_berry` ([#1962](https://github.com/helix-editor/helix/pull/1962)) - Update `dark_plus` markup colors ([#1989](https://github.com/helix-editor/helix/pull/1989)) - Update `dark_plus` `tag` and `ui.menu.selected` colors ([#2014](https://github.com/helix-editor/helix/pull/2014)) - Add `dracula_at_night` ([#2008](https://github.com/helix-editor/helix/pull/2008)) - Improve `dracula` selection theming ([#2077](https://github.com/helix-editor/helix/pull/2077)) - Remove dim attribute on `onedark` line-number gutter ([#2155](https://github.com/helix-editor/helix/pull/2155)) - Add `tokyonight` ([#2162](https://github.com/helix-editor/helix/pull/2162)) - Use border colors from the original `dark_plus` theme ([#2186](https://github.com/helix-editor/helix/pull/2186)) - Add `autumn` ([#2212](https://github.com/helix-editor/helix/pull/2212), [#2270](https://github.com/helix-editor/helix/pull/2270), [#2531](https://github.com/helix-editor/helix/pull/2531)) - Add `tokyonight_storm` ([#2240](https://github.com/helix-editor/helix/pull/2240)) - Add `pop-dark` ([#2189](https://github.com/helix-editor/helix/pull/2189)) - Fix `base16_terminal` theme using incorrect ansi-color ([#2279](https://github.com/helix-editor/helix/pull/2279)) - Add `onelight` ([#2287](https://github.com/helix-editor/helix/pull/2287), [#2323](https://github.com/helix-editor/helix/pull/2323)) - Add `ui.virtual` scopes to `onedark` theme ([3626e38](https://github.com/helix-editor/helix/commit/3626e38)) - Add `night_owl` ([#2330](https://github.com/helix-editor/helix/pull/2330)) - Use yellow foreground and red background for `monokai_pro_spectrum` ([#2433](https://github.com/helix-editor/helix/pull/2433)) - Add `snazzy` ([#2473](https://github.com/helix-editor/helix/pull/2473)) - Update `dark_plus` constructor color ([8e8d4ba](https://github.com/helix-editor/helix/commit/8e8d4ba)) - Add `ui.menu` to the default theme ([e7e13dc](https://github.com/helix-editor/helix/commit/e7e13dc)) - Add `ui.menu` to any themes missing the key ([9be810f](https://github.com/helix-editor/helix/commit/9be810f)) - Add `catppuccin` ([#2546](https://github.com/helix-editor/helix/pull/2546), [7160e74](https://github.com/helix-editor/helix/commit/7160e74)) LSP: - Use texlab for latex ([#1922](https://github.com/helix-editor/helix/pull/1922)) - HTML ([#2018](https://github.com/helix-editor/helix/pull/2018)) - JSON ([#2024](https://github.com/helix-editor/helix/pull/2024)) - CSS ([#2025](https://github.com/helix-editor/helix/pull/2025)) - PHP ([#2031](https://github.com/helix-editor/helix/pull/2031)) - Swift ([#2033](https://github.com/helix-editor/helix/pull/2033)) - OCaml ([#2035](https://github.com/helix-editor/helix/pull/2035)) - Vue ([#2043](https://github.com/helix-editor/helix/pull/2043)) - Yaml ([#2234](https://github.com/helix-editor/helix/pull/2234)) - Vala ([#2243](https://github.com/helix-editor/helix/pull/2243)) - TOML ([#2302](https://github.com/helix-editor/helix/pull/2302)) - Java ([#2511](https://github.com/helix-editor/helix/pull/2511)) - Lua ([#2560](https://github.com/helix-editor/helix/pull/2560)) - Verilog ([#2552](https://github.com/helix-editor/helix/pull/2552)) New Languages: - JSX ([#1906](https://github.com/helix-editor/helix/pull/1906), [a24fb17](https://github.com/helix-editor/helix/commit/a24fb17), [855e438](https://github.com/helix-editor/helix/commit/855e438), [#1921](https://github.com/helix-editor/helix/pull/1921)) - Rust Object Notation (RON) ([#1925](https://github.com/helix-editor/helix/pull/1925)) - R and R Markdown ([#1998](https://github.com/helix-editor/helix/pull/1998)) - Swift ([#2033](https://github.com/helix-editor/helix/pull/2033)) - EJS and ERB ([#2055](https://github.com/helix-editor/helix/pull/2055)) - EEx ([9d095e0](https://github.com/helix-editor/helix/commit/9d095e0)) - HEEx ([4836bb3](https://github.com/helix-editor/helix/commit/4836bb3), [#2149](https://github.com/helix-editor/helix/pull/2149)) - SQL ([#2097](https://github.com/helix-editor/helix/pull/2097)) - GDScript ([#1985](https://github.com/helix-editor/helix/pull/1985)) - Nickel ([#2173](https://github.com/helix-editor/helix/pull/2173), [#2320](https://github.com/helix-editor/helix/pull/2320)) - `go.mod` and `go.work` ([#2197](https://github.com/helix-editor/helix/pull/2197)) - Nushell ([#2225](https://github.com/helix-editor/helix/pull/2225)) - Vala ([#2243](https://github.com/helix-editor/helix/pull/2243)) - Hare ([#2289](https://github.com/helix-editor/helix/pull/2289), [#2480](https://github.com/helix-editor/helix/pull/2480)) - DeviceTree ([#2329](https://github.com/helix-editor/helix/pull/2329)) - Cairo ([7387905](https://github.com/helix-editor/helix/commit/7387905)) - CPON ([#2355](https://github.com/helix-editor/helix/pull/2355), [#2424](https://github.com/helix-editor/helix/pull/2424)) - git-ignore ([#2397](https://github.com/helix-editor/helix/pull/2397)) - git-attributes ([#2397](https://github.com/helix-editor/helix/pull/2397)) - Odin ([#2399](https://github.com/helix-editor/helix/pull/2399), [#2464](https://github.com/helix-editor/helix/pull/2464)) - Meson ([#2314](https://github.com/helix-editor/helix/pull/2314)) - SSH Client Config ([#2498](https://github.com/helix-editor/helix/pull/2498)) - Scheme ([d25bae8](https://github.com/helix-editor/helix/commit/d25bae8)) - Verilog ([#2552](https://github.com/helix-editor/helix/pull/2552)) Updated Languages and Queries: - Erlang ([e2a5071](https://github.com/helix-editor/helix/commit/e2a5071), [#2149](https://github.com/helix-editor/helix/pull/2149), [82da9bd](https://github.com/helix-editor/helix/commit/82da9bd)) - Elixir ([1819478](https://github.com/helix-editor/helix/commit/1819478), [8c3c901](https://github.com/helix-editor/helix/commit/8c3c901), [4ac94a5](https://github.com/helix-editor/helix/commit/4ac94a5)) - Gleam ([7cd6050](https://github.com/helix-editor/helix/commit/7cd6050), [45dd540](https://github.com/helix-editor/helix/commit/45dd540)) - Bash ([#1917](https://github.com/helix-editor/helix/pull/1917)) - JavaScript ([#2140](https://github.com/helix-editor/helix/pull/2140)) - Ruby textobject queries ([#2143](https://github.com/helix-editor/helix/pull/2143)) - Fix Golang textobject queries ([#2153](https://github.com/helix-editor/helix/pull/2153)) - Add more bash and HCL file extensions ([#2201](https://github.com/helix-editor/helix/pull/2201)) - Divide HCL and tfvars into separate languages ([#2244](https://github.com/helix-editor/helix/pull/2244)) - Use JavaScript for `cjs` files ([#2387](https://github.com/helix-editor/helix/pull/2387)) - Use Perl for `t` files ([#2395](https://github.com/helix-editor/helix/pull/2395)) - Use `markup.list` scopes for lists ([#2401](https://github.com/helix-editor/helix/pull/2401)) - Use PHP for `inc` files ([#2440](https://github.com/helix-editor/helix/pull/2440)) - Improve Rust textobjects ([#2494](https://github.com/helix-editor/helix/pull/2494), [10463fe](https://github.com/helix-editor/helix/commit/10463fe)) - Python ([#2451](https://github.com/helix-editor/helix/pull/2451)) Packaging: - Use `builtins.fromTOML` in Nix Flake on Nix 2.6+ ([#1892](https://github.com/helix-editor/helix/pull/1892)) - Shell auto-completion files are now available ([#2022](https://github.com/helix-editor/helix/pull/2022)) - Create an AppImage on release ([#2089](https://github.com/helix-editor/helix/pull/2089)) # 22.03 (2022-03-28) A big shout out to all the contributors! We had 51 contributors in this release. This release is particularly large and featureful. Check out some of the highlights in the [news section](https://helix-editor.com/news/release-22-03-highlights/). As usual, the following is a summary of each of the changes since the last release. For the full log, check out the [git log](https://github.com/helix-editor/helix/compare/v0.6.0..22.03). Breaking changes: - LSP config now lives under `editor.lsp` ([#1868](https://github.com/helix-editor/helix/pull/1868)) - Expand-selection was moved from `]o` to `Alt-h` ([#1495](https://github.com/helix-editor/helix/pull/1495)) Features: - Experimental Debug Adapter Protocol (DAP) support ([#574](https://github.com/helix-editor/helix/pull/574)) - Primary cursor shape may now be customized per mode ([#1154](https://github.com/helix-editor/helix/pull/1154)) - Overhaul incremental highlights and enable combined injections ([`6728344..4080341`](https://github.com/helix-editor/helix/compare/6728344..4080341)) - Allow specifying file start position ([#445](https://github.com/helix-editor/helix/pull/445), [#1676](https://github.com/helix-editor/helix/pull/1676)) - Dynamic line numbers ([#1522](https://github.com/helix-editor/helix/pull/1522)) - Show an info box with the contents of registers ([#980](https://github.com/helix-editor/helix/pull/980)) - Wrap-around behavior during search is now configurable ([#1516](https://github.com/helix-editor/helix/pull/1516)) - Tree-sitter textobjects motions for classes, functions, and parameters ([#1619](https://github.com/helix-editor/helix/pull/1619), [#1708](https://github.com/helix-editor/helix/pull/1708), [#1805](https://github.com/helix-editor/helix/pull/1805)) - Command palette: a picker for available commands ([#1400](https://github.com/helix-editor/helix/pull/1400)) - LSP `workspace/configuration` and `workspace/didChangeConfiguration` support ([#1684](https://github.com/helix-editor/helix/pull/1684)) - `hx --health [LANG]` command ([#1669](https://github.com/helix-editor/helix/pull/1669)) - Refactor of the tree-sitter grammar system ([#1659](https://github.com/helix-editor/helix/pull/1659)) - All submodules have been removed - New `hx --grammar {fetch|build}` flags for fetching and building tree-sitter grammars - A custom grammar selection may now be declared with the `use-grammars` key in `languages.toml` Commands: - `:cquit!` - quit forcefully with a non-zero exit-code ([#1414](https://github.com/helix-editor/helix/pull/1414)) - `shrink_selection` - shrink the selection to a child tree-sitter node (`Alt-j`, [#1340](https://github.com/helix-editor/helix/pull/1340)) - `:tree-sitter-subtree` - show the tree-sitter subtree under the primary selection ([#1453](https://github.com/helix-editor/helix/pull/1453), [#1524](https://github.com/helix-editor/helix/pull/1524)) - Add `Alt-Backspace`, `Alt-<`, `Alt->`, and `Ctrl-j` to insert mode ([#1441](https://github.com/helix-editor/helix/pull/1441)) - `select_next_sibling`, `select_prev_sibling` - select next and previous tree-sitter nodes (`Alt-l` and `Alt-h`, [#1495](https://github.com/helix-editor/helix/pull/1495)) - `:buffer-close-all`, `:buffer-close-all!`, `:buffer-close-others`, and `:buffer-close-others!` ([#1677](https://github.com/helix-editor/helix/pull/1677)) - `:vsplit-new` and `:hsplit-new` - open vertical and horizontal splits with new scratch buffers ([#1763](https://github.com/helix-editor/helix/pull/1763)) - `:open-config` to open the config file and `:refresh-config` to refresh config after changes ([#1771](https://github.com/helix-editor/helix/pull/1771), [#1803](https://github.com/helix-editor/helix/pull/1803)) Usability improvements and fixes: - Prevent `:cquit` from ignoring unsaved changes ([#1414](https://github.com/helix-editor/helix/pull/1414)) - Scrolling view keeps selections ([#1420](https://github.com/helix-editor/helix/pull/1420)) - Only use shellwords parsing on unix platforms ([`7767703`](https://github.com/helix-editor/helix/commit/7767703)) - Fix slash in search selector status message ([#1449](https://github.com/helix-editor/helix/pull/1449)) - Use `std::path::MAIN_SEPARATOR` to determine completion ([`3e4f815`](https://github.com/helix-editor/helix/commit/3e4f815)) - Expand to current node with `expand_selection` when the node has no children ([#1454](https://github.com/helix-editor/helix/pull/1454)) - Add vertical and horizontal splits to the buffer picker ([#1502](https://github.com/helix-editor/helix/pull/1502)) - Use the correct language ID for JavaScript & TypeScript LSP ([#1466](https://github.com/helix-editor/helix/pull/1466)) - Run format command for all buffers being written ([#1444](https://github.com/helix-editor/helix/pull/1444)) - Fix panics during resizing ([#1408](https://github.com/helix-editor/helix/pull/1408)) - Fix auto-pairs with CRLF ([#1470](https://github.com/helix-editor/helix/pull/1470)) - Fix picker scrolling when the bottom is reached ([#1567](https://github.com/helix-editor/helix/pull/1567)) - Use markup themes for the markdown component ([#1363](https://github.com/helix-editor/helix/pull/1363)) - Automatically commit changes to history if not in insert mode ([`2a7ae96`](https://github.com/helix-editor/helix/commit/2a7ae96)) - Render code-actions as a menu and add padding to popup ([`094a0aa`](https://github.com/helix-editor/helix/commit/094a0aa)) - Only render menu scrollbar if the menu doesn't fit ([`f10a06f`](https://github.com/helix-editor/helix/commit/f10a06f), [`36b975c`](https://github.com/helix-editor/helix/commit/36b975c)) - Parse git revision instead of tag for version ([`d3221b0`](https://github.com/helix-editor/helix/commit/d3221b0), [#1674](https://github.com/helix-editor/helix/pull/1674)) - Fix incorrect last modified buffer ([#1621](https://github.com/helix-editor/helix/pull/1621)) - Add `PageUp`, `PageDown`, `Ctrl-u`, `Ctrl-d`, `Home`, `End` bindings to the file picker ([#1612](https://github.com/helix-editor/helix/pull/1612)) - Display buffer IDs in the buffer picker ([#1134](https://github.com/helix-editor/helix/pull/1134)) - Allow multi-line prompt documentation ([`2af0432`](https://github.com/helix-editor/helix/commit/2af0432)) - Ignore the `.git` directory from the file picker ([#1604](https://github.com/helix-editor/helix/pull/1604)) - Allow separate styling for markup heading levels ([#1618](https://github.com/helix-editor/helix/pull/1618)) - Automatically close popups ([#1285](https://github.com/helix-editor/helix/pull/1285)) - Allow auto-pairs tokens to be configured ([#1624](https://github.com/helix-editor/helix/pull/1624)) - Don't indent empty lines in `indent` command ([#1653](https://github.com/helix-editor/helix/pull/1653)) - Ignore `Enter` keypress when a menu has no selection ([#1704](https://github.com/helix-editor/helix/pull/1704)) - Show errors when surround deletions and replacements fail ([#1709](https://github.com/helix-editor/helix/pull/1709)) - Show infobox hints for `mi` and `ma` ([#1686](https://github.com/helix-editor/helix/pull/1686)) - Highlight matching text in file picker suggestions ([#1635](https://github.com/helix-editor/helix/pull/1635)) - Allow capturing multiple nodes in textobject queries ([#1611](https://github.com/helix-editor/helix/pull/1611)) - Make repeat operator work with completion edits ([#1640](https://github.com/helix-editor/helix/pull/1640)) - Save to the jumplist when searching ([#1718](https://github.com/helix-editor/helix/pull/1718)) - Fix bug with auto-replacement of components in compositor ([#1711](https://github.com/helix-editor/helix/pull/1711)) - Use Kakoune logic for `align_selection` ([#1675](https://github.com/helix-editor/helix/pull/1675)) - Fix `follows` for `nixpkgs` in `flake.nix` ([#1729](https://github.com/helix-editor/helix/pull/1729)) - Performance improvements for the picker ([`78fba86`](https://github.com/helix-editor/helix/commit/78fba86)) - Rename infobox theme scopes ([#1741](https://github.com/helix-editor/helix/pull/1741)) - Fallback to broader scopes if a theme scope is not found ([#1714](https://github.com/helix-editor/helix/pull/1714)) - Add arrow-keys bindings for tree-sitter sibling selection commands ([#1724](https://github.com/helix-editor/helix/pull/1724)) - Fix a bug in LSP when creating a file in a folder that does not exist ([#1775](https://github.com/helix-editor/helix/pull/1775)) - Use `^` and `$` regex location assertions for search ([#1793](https://github.com/helix-editor/helix/pull/1793)) - Fix register names in `insert_register` command ([#1751](https://github.com/helix-editor/helix/pull/1751)) - Perform extend line for all selections ([#1804](https://github.com/helix-editor/helix/pull/1804)) - Prevent panic when moving in an empty picker ([#1786](https://github.com/helix-editor/helix/pull/1786)) - Fix line number calculations for non CR/CRLF line breaks ([`b4a282f`](https://github.com/helix-editor/helix/commit/b4a282f), [`0b96201`](https://github.com/helix-editor/helix/commit/0b96201)) - Deploy documentation for `master` builds separately from release docs ([#1783](https://github.com/helix-editor/helix/pull/1783)) Themes: - Add everforest_light ([#1412](https://github.com/helix-editor/helix/pull/1412)) - Add gruvbox_light ([#1509](https://github.com/helix-editor/helix/pull/1509)) - Add modified background to dracula popup ([#1434](https://github.com/helix-editor/helix/pull/1434)) - Markup support for monokai pro themes ([#1553](https://github.com/helix-editor/helix/pull/1553)) - Markup support for dracula theme ([#1554](https://github.com/helix-editor/helix/pull/1554)) - Add `tag` to gruvbox theme ([#1555](https://github.com/helix-editor/helix/pull/1555)) - Markup support for remaining themes ([#1525](https://github.com/helix-editor/helix/pull/1525)) - Serika light and dark ([#1566](https://github.com/helix-editor/helix/pull/1566)) - Fix rose_pine and rose_pine_dawn popup background color ([#1606](https://github.com/helix-editor/helix/pull/1606)) - Fix hover menu item text color in base16 themes ([#1668](https://github.com/helix-editor/helix/pull/1668)) - Update markup heading styles for everforest ([#1687](https://github.com/helix-editor/helix/pull/1687)) - Update markup heading styles for rose_pine themes ([#1706](https://github.com/helix-editor/helix/pull/1706)) - Style bogster cursors ([`6a6a9ab`](https://github.com/helix-editor/helix/commit/6a6a9ab)) - Fix `ui.selection` in rose_pine themes ([#1716](https://github.com/helix-editor/helix/pull/1716)) - Use distinct colors for cursor and matched pair in gruvbox ([#1791](https://github.com/helix-editor/helix/pull/1791)) - Improve colors for `ui.cursor.match` capture in some themes ([#1862](https://github.com/helix-editor/helix/pull/1862)) LSP: - Add default language server for JavaScript ([#1457](https://github.com/helix-editor/helix/pull/1457)) - Add `pom.xml` as maven root directory marker ([#1496](https://github.com/helix-editor/helix/pull/1496)) - Haskell LSP ([#1556](https://github.com/helix-editor/helix/pull/1556)) - C-sharp LSP support ([#1788](https://github.com/helix-editor/helix/pull/1788)) - Clean up Julia LSP config ([#1811](https://github.com/helix-editor/helix/pull/1811)) New Languages: - llvm-mir ([#1398](https://github.com/helix-editor/helix/pull/1398)) - regex ([#1362](https://github.com/helix-editor/helix/pull/1362)) - Make ([#1433](https://github.com/helix-editor/helix/pull/1433), [#1661](https://github.com/helix-editor/helix/pull/1661)) - git-config ([#1426](https://github.com/helix-editor/helix/pull/1426)) - Lean ([#1422](https://github.com/helix-editor/helix/pull/1422)) - Elm ([#1514](https://github.com/helix-editor/helix/pull/1514)) - GraphQL ([#1515](https://github.com/helix-editor/helix/pull/1515)) - Twig ([#1602](https://github.com/helix-editor/helix/pull/1602)) - Rescript ([#1616](https://github.com/helix-editor/helix/pull/1616), [#1863](https://github.com/helix-editor/helix/pull/1863)) - Erlang ([#1657](https://github.com/helix-editor/helix/pull/1657)) - Kotlin ([#1689](https://github.com/helix-editor/helix/pull/1689)) - HCL ([#1705](https://github.com/helix-editor/helix/pull/1705), [#1726](https://github.com/helix-editor/helix/pull/1726)) - Org ([#1845](https://github.com/helix-editor/helix/pull/1845)) - Solidity ([#1848](https://github.com/helix-editor/helix/pull/1848), [#1854](https://github.com/helix-editor/helix/pull/1854)) Updated Languages and Queries: - Textobject and indent queries for c and cpp ([#1293](https://github.com/helix-editor/helix/pull/1293)) - Fix null and boolean constant highlights for nix ([#1428](https://github.com/helix-editor/helix/pull/1428)) - Capture markdown link text as `markup.link.text` ([#1456](https://github.com/helix-editor/helix/pull/1456)) - Update and re-enable Haskell ([#1417](https://github.com/helix-editor/helix/pull/1417), [#1520](https://github.com/helix-editor/helix/pull/1520)) - Update Go with generics support ([`ddbf036`](https://github.com/helix-editor/helix/commit/ddbf036)) - Use `tree-sitter-css` for SCSS files ([#1507](https://github.com/helix-editor/helix/pull/1507)) - Update Zig ([#1501](https://github.com/helix-editor/helix/pull/1501)) - Update PHP ([#1521](https://github.com/helix-editor/helix/pull/1521)) - Expand language support for comment injections ([#1527](https://github.com/helix-editor/helix/pull/1527)) - Use tree-sitter-bash for `.zshrc` and `.bashrc` ([`7d51042`](https://github.com/helix-editor/helix/commit/7d51042)) - Use tree-sitter-bash for `.bash_profile` ([#1571](https://github.com/helix-editor/helix/pull/1571)) - Use tree-sitter-bash for `.zshenv` and ZSH files ([#1574](https://github.com/helix-editor/helix/pull/1574)) - IEx ([#1576](https://github.com/helix-editor/helix/pull/1576)) - Textobject queries for PHP ([#1601](https://github.com/helix-editor/helix/pull/1601)) - C-sharp highlight query improvements ([#1795](https://github.com/helix-editor/helix/pull/1795)) - Git commit performance has been improved on large verbose commits ([#1838](https://github.com/helix-editor/helix/pull/1838)) Packaging: - The submodules system has been replaced with command-line flags for fetching and building tree-sitter grammars ([#1659](https://github.com/helix-editor/helix/pull/1659)) - Flake outputs are pushed to Cachix on each push to `master` ([#1721](https://github.com/helix-editor/helix/pull/1721)) - Update flake's `nix-cargo-integration` to depend on `dream2nix` ([#1758](https://github.com/helix-editor/helix/pull/1758)) # 0.6.0 (2022-01-04) Happy new year and a big shout out to all the contributors! We had 55 contributors in this release. Helix has popped up in DPorts and Fedora Linux via COPR ([#1270](https://github.com/helix-editor/helix/pull/1270)) As usual the following is a brief summary, refer to the git history for a full log: Breaking changes: - fix: Normalize backtab into shift-tab Features: - Macros ([#1234](https://github.com/helix-editor/helix/pull/1234)) - Add reverse search functionality ([#958](https://github.com/helix-editor/helix/pull/958)) - Allow keys to be mapped to sequences of commands ([#589](https://github.com/helix-editor/helix/pull/589)) - Make it possible to keybind TypableCommands ([#1169](https://github.com/helix-editor/helix/pull/1169)) - Detect workspace root using language markers ([#1370](https://github.com/helix-editor/helix/pull/1370)) - Add WORD textobject ([#991](https://github.com/helix-editor/helix/pull/991)) - Add LSP rename_symbol (`space-r`) ([#1011](https://github.com/helix-editor/helix/pull/1011)) - Added workspace_symbol_picker ([#1041](https://github.com/helix-editor/helix/pull/1041)) - Detect filetype from shebang line ([#1001](https://github.com/helix-editor/helix/pull/1001)) - Allow piping from stdin into a buffer on startup ([#996](https://github.com/helix-editor/helix/pull/996)) - Add auto pairs for same-char pairs ([#1219](https://github.com/helix-editor/helix/pull/1219)) - Update settings at runtime ([#798](https://github.com/helix-editor/helix/pull/798)) - Enable thin LTO ([`cccc194`](https://github.com/helix-editor/helix/commit/cccc194)) Commands: - `:wonly` -- window only ([#1057](https://github.com/helix-editor/helix/pull/1057)) - buffer-close (`:bc`, `:bclose`) ([#1035](https://github.com/helix-editor/helix/pull/1035)) - Add `:` and `:goto ` commands ([#1128](https://github.com/helix-editor/helix/pull/1128)) - `:sort` command ([#1288](https://github.com/helix-editor/helix/pull/1288)) - Add m textobject for pair under cursor ([#961](https://github.com/helix-editor/helix/pull/961)) - Implement "Goto next buffer / Goto previous buffer" commands ([#950](https://github.com/helix-editor/helix/pull/950)) - Implement "Goto last modification" command ([#1067](https://github.com/helix-editor/helix/pull/1067)) - Add trim_selections command ([#1092](https://github.com/helix-editor/helix/pull/1092)) - Add movement shortcut for history ([#1088](https://github.com/helix-editor/helix/pull/1088)) - Add command to inc/dec number under cursor ([#1027](https://github.com/helix-editor/helix/pull/1027)) - Add support for dates for increment/decrement - Align selections (`&`) ([#1101](https://github.com/helix-editor/helix/pull/1101)) - Implement no-yank delete/change ([#1099](https://github.com/helix-editor/helix/pull/1099)) - Implement black hole register ([#1165](https://github.com/helix-editor/helix/pull/1165)) - `gf` as goto_file (`gf`) ([#1102](https://github.com/helix-editor/helix/pull/1102)) - Add last modified file (`gm`) ([#1093](https://github.com/helix-editor/helix/pull/1093)) - ensure_selections_forward ([#1393](https://github.com/helix-editor/helix/pull/1393)) - Readline style insert mode ([#1039](https://github.com/helix-editor/helix/pull/1039)) Usability improvements and fixes: - Detect filetype on `:write` ([#1141](https://github.com/helix-editor/helix/pull/1141)) - Add single and double quotes to matching pairs ([#995](https://github.com/helix-editor/helix/pull/995)) - Launch with defaults upon invalid config/theme (rather than panicking) ([#982](https://github.com/helix-editor/helix/pull/982)) - If switching away from an empty scratch buffer, remove it ([#935](https://github.com/helix-editor/helix/pull/935)) - Truncate the starts of file paths instead of the ends in picker ([#951](https://github.com/helix-editor/helix/pull/951)) - Truncate the start of file paths in the StatusLine ([#1351](https://github.com/helix-editor/helix/pull/1351)) - Prevent picker from previewing binaries or large file ([#939](https://github.com/helix-editor/helix/pull/939)) - Inform when reaching undo/redo bounds ([#981](https://github.com/helix-editor/helix/pull/981)) - search_impl will only align cursor center when it isn't in view ([#959](https://github.com/helix-editor/helix/pull/959)) - Add ``, ``, ``, Delete in prompt mode ([#1034](https://github.com/helix-editor/helix/pull/1034)) - Restore screen position when aborting search ([#1047](https://github.com/helix-editor/helix/pull/1047)) - Buffer picker: show is_modifier flag ([#1020](https://github.com/helix-editor/helix/pull/1020)) - Add commit hash to version info, if present ([#957](https://github.com/helix-editor/helix/pull/957)) - Implement indent-aware delete ([#1120](https://github.com/helix-editor/helix/pull/1120)) - Jump to end char of surrounding pair from any cursor pos ([#1121](https://github.com/helix-editor/helix/pull/1121)) - File picker configuration ([#988](https://github.com/helix-editor/helix/pull/988)) - Fix surround cursor position calculation ([#1183](https://github.com/helix-editor/helix/pull/1183)) - Accept count for goto_window ([#1033](https://github.com/helix-editor/helix/pull/1033)) - Make kill_to_line_end behave like Emacs ([#1235](https://github.com/helix-editor/helix/pull/1235)) - Only use a single documentation popup ([#1241](https://github.com/helix-editor/helix/pull/1241)) - ui: popup: Don't allow scrolling past the end of content ([`3307f44c`](https://github.com/helix-editor/helix/commit/3307f44c)) - Open files with spaces in filename, allow opening multiple files ([#1231](https://github.com/helix-editor/helix/pull/1231)) - Allow paste commands to take a count ([#1261](https://github.com/helix-editor/helix/pull/1261)) - Auto pairs selection ([#1254](https://github.com/helix-editor/helix/pull/1254)) - Use a fuzzy matcher for commands ([#1386](https://github.com/helix-editor/helix/pull/1386)) - Add `` to pick word under doc cursor to prompt line & search completion ([#831](https://github.com/helix-editor/helix/pull/831)) - Fix `:earlier`/`:later` missing changeset update ([#1069](https://github.com/helix-editor/helix/pull/1069)) - Support extend for multiple goto ([#909](https://github.com/helix-editor/helix/pull/909)) - Add arrow-key bindings for window switching ([#933](https://github.com/helix-editor/helix/pull/933)) - Implement key ordering for info box ([#952](https://github.com/helix-editor/helix/pull/952)) LSP: - Implement MarkedString rendering ([`e128a8702`](https://github.com/helix-editor/helix/commit/e128a8702)) - Don't panic if init fails ([`d31bef7`](https://github.com/helix-editor/helix/commit/d31bef7)) - Configurable diagnostic severity ([#1325](https://github.com/helix-editor/helix/pull/1325)) - Resolve completion item ([#1315](https://github.com/helix-editor/helix/pull/1315)) - Code action command support ([#1304](https://github.com/helix-editor/helix/pull/1304)) Grammars: - Adds mint language server ([#974](https://github.com/helix-editor/helix/pull/974)) - Perl ([#978](https://github.com/helix-editor/helix/pull/978)) ([#1280](https://github.com/helix-editor/helix/pull/1280)) - GLSL ([#993](https://github.com/helix-editor/helix/pull/993)) - Racket ([#1143](https://github.com/helix-editor/helix/pull/1143)) - WGSL ([#1166](https://github.com/helix-editor/helix/pull/1166)) - LLVM ([#1167](https://github.com/helix-editor/helix/pull/1167)) ([#1388](https://github.com/helix-editor/helix/pull/1388)) ([#1409](https://github.com/helix-editor/helix/pull/1409)) ([#1398](https://github.com/helix-editor/helix/pull/1398)) - Markdown ([`49e06787`](https://github.com/helix-editor/helix/commit/49e06787)) - Scala ([#1278](https://github.com/helix-editor/helix/pull/1278)) - Dart ([#1250](https://github.com/helix-editor/helix/pull/1250)) - Fish ([#1308](https://github.com/helix-editor/helix/pull/1308)) - Dockerfile ([#1303](https://github.com/helix-editor/helix/pull/1303)) - Git (commit, rebase, diff) ([#1338](https://github.com/helix-editor/helix/pull/1338)) ([#1402](https://github.com/helix-editor/helix/pull/1402)) ([#1373](https://github.com/helix-editor/helix/pull/1373)) - tree-sitter-comment ([#1300](https://github.com/helix-editor/helix/pull/1300)) - Highlight comments in c, cpp, cmake and llvm ([#1309](https://github.com/helix-editor/helix/pull/1309)) - Improve yaml syntax highlighting highlighting ([#1294](https://github.com/helix-editor/helix/pull/1294)) - Improve rust syntax highlighting ([#1295](https://github.com/helix-editor/helix/pull/1295)) - Add textobjects and indents to cmake ([#1307](https://github.com/helix-editor/helix/pull/1307)) - Add textobjects and indents to c and cpp ([#1293](https://github.com/helix-editor/helix/pull/1293)) New themes: - Solarized dark ([#999](https://github.com/helix-editor/helix/pull/999)) - Solarized light ([#1010](https://github.com/helix-editor/helix/pull/1010)) - Spacebones light ([#1131](https://github.com/helix-editor/helix/pull/1131)) - Monokai Pro ([#1206](https://github.com/helix-editor/helix/pull/1206)) - Base16 Light and Terminal ([#1078](https://github.com/helix-editor/helix/pull/1078)) - and a default 16 color theme, truecolor detection - Dracula ([#1258](https://github.com/helix-editor/helix/pull/1258)) # 0.5.0 (2021-10-28) A big shout out to all the contributors! We had 46 contributors in this release. Helix has popped up in [Scoop, FreeBSD Ports and Gentu GURU](https://repology.org/project/helix/versions)! The following is a quick rundown of the larger changes, there were many more (check the git history for more details). Breaking changes: - A couple of keymaps moved to resolve a few conflicting keybinds. - Documentation popups were moved from `K` to `space+k` - `K` is now `keep_selections` which filters selections to only keeps ones matching the regex - `keep_primary_selection` moved from `space+space` to `,` - `Alt-,` is now `remove_primary_selection` which keeps all selections except the primary one - Opening files in a split moved from `C-h` to `C-s` - Some configuration options moved from a `[terminal]` section to `[editor]`. [Consult the documentation for more information.](https://docs.helix-editor.com/configuration.html) Features: - LSP compatibility greatly improved for some implementations (Julia, Python, Typescript) - Autocompletion! Completion now triggers automatically after a set idle timeout - Completion documentation is now displayed next to the popup ([#691](https://github.com/helix-editor/helix/pull/691)) - Treesitter textobjects (select a function via `mf`, class via `mc`) ([#728](https://github.com/helix-editor/helix/pull/728)) - Global search across entire workspace `space+/` ([#651](https://github.com/helix-editor/helix/pull/651)) - Relative line number support ([#485](https://github.com/helix-editor/helix/pull/485)) - Prompts now store a history ([`72cf86e`](https://github.com/helix-editor/helix/commit/72cf86e)) - `:vsplit` and `:hsplit` commands ([#639](https://github.com/helix-editor/helix/pull/639)) - `C-w h/j/k/l` can now be used to navigate between splits ([#860](https://github.com/helix-editor/helix/pull/860)) - `C-j` and `C-k` are now alternative keybindings to `C-n` and `C-p` in the UI ([#876](https://github.com/helix-editor/helix/pull/876)) - Shell commands (shell-pipe, pipe-to, shell-insert-output, shell-append-output, keep-pipe) ([#547](https://github.com/helix-editor/helix/pull/547)) - Searching now defaults to smart case search (case insensitive unless uppercase is used) ([#761](https://github.com/helix-editor/helix/pull/761)) - The preview pane was improved to highlight and center line ranges - The user `languages.toml` is now merged into defaults, no longer need to copy the entire file ([`dc57f8dc`](https://github.com/helix-editor/helix/commit/dc57f8dc)) - Show hidden files in completions ([#648](https://github.com/helix-editor/helix/pull/648)) - Grammar injections are now properly handled ([`dd0b15e`](https://github.com/helix-editor/helix/commit/dd0b15e)) - `v` in select mode now switches back to normal mode ([#660](https://github.com/helix-editor/helix/pull/660)) - View mode can now be triggered as a "sticky" mode ([#719](https://github.com/helix-editor/helix/pull/719)) - `f`/`t` and object selection motions can now be repeated via `Alt-.` ([#891](https://github.com/helix-editor/helix/pull/891)) - Statusline now displays total selection count and diagnostics counts for both errors and warnings ([#916](https://github.com/helix-editor/helix/pull/916)) New grammars: - Ledger ([#572](https://github.com/helix-editor/helix/pull/572)) - Protobuf ([#614](https://github.com/helix-editor/helix/pull/614)) - Zig ([#631](https://github.com/helix-editor/helix/pull/631)) - YAML ([#667](https://github.com/helix-editor/helix/pull/667)) - Lua ([#665](https://github.com/helix-editor/helix/pull/665)) - OCaml ([#666](https://github.com/helix-editor/helix/pull/666)) - Svelte ([#733](https://github.com/helix-editor/helix/pull/733)) - Vue ([#787](https://github.com/helix-editor/helix/pull/787)) - Tree-sitter queries ([#845](https://github.com/helix-editor/helix/pull/845)) - CMake ([#888](https://github.com/helix-editor/helix/pull/888)) - Elixir (we switched over to the official grammar) ([`6c0786e`](https://github.com/helix-editor/helix/commit/6c0786e)) - Language server definitions for Nix and Elixir ([#725](https://github.com/helix-editor/helix/pull/725)) - Python now uses `pylsp` instead of `pyls` - Python now supports indentation New themes: - Monokai ([#628](https://github.com/helix-editor/helix/pull/628)) - Everforest Dark ([#760](https://github.com/helix-editor/helix/pull/760)) - Nord ([#799](https://github.com/helix-editor/helix/pull/799)) - Base16 Default Dark ([#833](https://github.com/helix-editor/helix/pull/833)) - Rose Pine ([#897](https://github.com/helix-editor/helix/pull/897)) Fixes: - Fix crash on empty rust file ([#592](https://github.com/helix-editor/helix/pull/592)) - Exit select mode after toggle comment ([#598](https://github.com/helix-editor/helix/pull/598)) - Pin popups with no positioning to the initial position ([`12ea3888`](https://github.com/helix-editor/helix/commit/12ea3888)) - xsel copy should not freeze the editor ([`6dd7dc4`](https://github.com/helix-editor/helix/commit/6dd7dc4)) - `*` now only sets the search register and doesn't jump to the next occurrence ([`3426285`](https://github.com/helix-editor/helix/commit/3426285)) - Goto line start/end commands extend when in select mode ([#739](https://github.com/helix-editor/helix/pull/739)) - Fix documentation popups sometimes not getting fully highlighted ([`066367c`](https://github.com/helix-editor/helix/commit/066367c)) - Refactor apply_workspace_edit to remove assert ([`b02d872`](https://github.com/helix-editor/helix/commit/b02d872)) - Wrap around the top of the picker menu when scrolling ([`c7d6e44`](https://github.com/helix-editor/helix/commit/c7d6e44)) - Don't allow closing the last split if there's unsaved changes ([`3ff5b00`](https://github.com/helix-editor/helix/commit/3ff5b00)) - Indentation used different default on hx vs hx new_file.txt ([`c913bad`](https://github.com/helix-editor/helix/commit/c913bad)) # 0.4.1 (2021-08-14) A minor release that includes: - A fix for rendering glitches that would occur after editing with multiple selections. - CI fix for grammars not being cross-compiled for aarch64 # 0.4.0 (2021-08-13) A big shout out to all the contributors! We had 28 contributors in this release. Two months have passed, so this is another big release. A big thank you to all the contributors and package maintainers! Helix has popped up in [Arch, Manjaro, Nix, MacPorts and Parabola and Termux repositories](https://repology.org/project/helix/versions)! A [large scale refactor](https://github.com/helix-editor/helix/pull/376) landed that allows us to support zero width (empty) selections in the future as well as resolves many bugs and edge cases. - Multi-key remapping! Key binds now support much more complex usecases ([#454](https://github.com/helix-editor/helix/pull/454)) - Pending keys are shown in the statusline ([#515](https://github.com/helix-editor/helix/pull/515)) - Object selection / textobjects. `mi(` to select text inside parentheses ([#385](https://github.com/helix-editor/helix/pull/385)) - Autoinfo: `whichkey`-like popups which show available sub-mode shortcuts ([#316](https://github.com/helix-editor/helix/pull/316)) - Added WORD movements (W/B/E) ([#390](https://github.com/helix-editor/helix/pull/390)) - Vertical selections (repeat selection above/below) ([#462](https://github.com/helix-editor/helix/pull/462)) - Selection rotation via `(` and `)` ([`66a90130`](https://github.com/helix-editor/helix/commit/66a90130a5f99d769e9f6034025297f78ecaa3ec)) - Selection contents rotation via `Alt-(` and `Alt-)` ([`02cba2a`](https://github.com/helix-editor/helix/commit/02cba2a7f403f48eccb18100fb751f7b42373dba)) - Completion behavior improvements ([`f917b5a4`](https://github.com/helix-editor/helix/commit/f917b5a441ff3ae582358b6939ffbf889f4aa530), [`627b899`](https://github.com/helix-editor/helix/commit/627b89931576f7af86166ae8d5cbc55537877473)) - Fixed a language server crash ([`385a6b5a`](https://github.com/helix-editor/helix/commit/385a6b5a1adddfc26e917982641530e1a7c7aa81)) - Case change commands (`` ` ``, `~`, ````) ([#441](https://github.com/helix-editor/helix/pull/441)) - File pickers (including goto) now provide a preview! ([#534](https://github.com/helix-editor/helix/pull/534)) - Injection query support. Rust macro calls and embedded languages are now properly highlighted ([#430](https://github.com/helix-editor/helix/pull/430)) - Formatting is now asynchronous, and the async job infrastructure has been improved ([#285](https://github.com/helix-editor/helix/pull/285)) - Grammars are now compiled as separate shared libraries and loaded on-demand at runtime ([#432](https://github.com/helix-editor/helix/pull/432)) - Code action support ([#478](https://github.com/helix-editor/helix/pull/478)) - Mouse support ([#509](https://github.com/helix-editor/helix/pull/509), [#548](https://github.com/helix-editor/helix/pull/548)) - Native Windows clipboard support ([#373](https://github.com/helix-editor/helix/pull/373)) - Themes can now use color palettes ([#393](https://github.com/helix-editor/helix/pull/393)) - `:reload` command ([#374](https://github.com/helix-editor/helix/pull/374)) - Ctrl-z to suspend ([#464](https://github.com/helix-editor/helix/pull/464)) - Language servers can now be configured with a custom JSON config ([#460](https://github.com/helix-editor/helix/pull/460)) - Comment toggling now uses a language specific comment token ([#463](https://github.com/helix-editor/helix/pull/463)) - Julia support ([#413](https://github.com/helix-editor/helix/pull/413)) - Java support ([#448](https://github.com/helix-editor/helix/pull/448)) - Prompts have an (in-memory) history ([`63e54e30`](https://github.com/helix-editor/helix/commit/63e54e30a74bb0d1d782877ddbbcf95f2817d061)) # 0.3.0 (2021-06-27) A big shout out to all the contributors! We had 24 contributors in this release. Another big release. Highlights: - Indentation is now automatically detected from file heuristics. ([#245](https://github.com/helix-editor/helix/pull/245)) - Support for other line endings (CRLF). Significantly improved Windows support. ([#224](https://github.com/helix-editor/helix/pull/224)) - Encodings other than UTF-8 are now supported! ([#228](https://github.com/helix-editor/helix/pull/228)) - Key bindings can now be configured via a `config.toml` file ([#268](https://github.com/helix-editor/helix/pull/268)) - Theme can now be configured and changed at runtime. ([Please feel free to contribute more themes!](https://github.com/helix-editor/helix/tree/master/runtime/themes)) ([#267](https://github.com/helix-editor/helix/pull/267)) - System clipboard yank/paste is now supported! ([#310](https://github.com/helix-editor/helix/pull/310)) - Surround commands were implemented ([#320](https://github.com/helix-editor/helix/pull/320)) Features: - File picker can now be repeatedly filtered ([#232](https://github.com/helix-editor/helix/pull/232)) - LSP progress is now received and rendered as a spinner ([#234](https://github.com/helix-editor/helix/pull/234)) - Current line number can now be themed ([#260](https://github.com/helix-editor/helix/pull/260)) - Arrow keys & home/end now work in insert mode ([#305](https://github.com/helix-editor/helix/pull/305)) - Cursors and selections can now be themed ([#325](https://github.com/helix-editor/helix/pull/325)) - Language servers are now gracefully shut down before `hx` exits ([#287](https://github.com/helix-editor/helix/pull/287)) - `:show-directory`/`:change-directory` ([#335](https://github.com/helix-editor/helix/pull/335)) - File picker is now sorted by access time (before filtering) ([#336](https://github.com/helix-editor/helix/pull/336)) - Code is being migrated from helix-term to helix-view (prerequisite for alternative frontends) ([#366](https://github.com/helix-editor/helix/pull/366)) - `x` and `X` merged ([`f41688d9`](https://github.com/helix-editor/helix/commit/f41688d960ef89c29c4a51c872b8406fb8f81a85)) Fixes: - The IME popup is now correctly positioned ([#273](https://github.com/helix-editor/helix/pull/273)) - A bunch of bugs regarding `o`/`O` behavior ([#281](https://github.com/helix-editor/helix/pull/281)) - `~` expansion now works in file completion ([#284](https://github.com/helix-editor/helix/pull/284)) - Several UI related overflow crashes ([#318](https://github.com/helix-editor/helix/pull/318)) - Fix a test failure occurring only on `test --release` ([`4f108ab1`](https://github.com/helix-editor/helix/commit/4f108ab1b2197809506bd7305ad903a3525eabfa)) - Prompts now support unicode input ([#295](https://github.com/helix-editor/helix/pull/295)) - Completion documentation no longer overlaps the popup ([#322](https://github.com/helix-editor/helix/pull/322)) - Fix a crash when trying to select `^` ([`9c534614`](https://github.com/helix-editor/helix/commit/9c53461429a3e72e3b1fb87d7ca490e168d7dee2)) - Prompt completions are now paginated ([`39dc09e6`](https://github.com/helix-editor/helix/commit/39dc09e6c4172299bc79de4c1c52288d3f624bd7)) - Goto did not work on Windows ([`503ca112`](https://github.com/helix-editor/helix/commit/503ca112ae57ebdf3ea323baf8940346204b46d2)) # 0.2.1 Includes a fix where wq/wqa could exit before file saving completed. # 0.2.0 A big shout out to all the contributors! We had 18 contributors in this release. Enough has changed to bump the version. We're skipping 0.1.x because previously the CLI would always report version as 0.1.0, and we'd like to distinguish it in bug reports.. - The `runtime/` directory is now properly detected on binary releases and on cargo run. `~/.config/helix/runtime` can also be used. - Registers can now be selected via " (for example, `"ay`) - Support for Nix files was added - Movement is now fully tested and matches Kakoune implementation - A per-file LSP symbol picker was added to space+s - Selection can be replaced with yanked text via R - `1g` now correctly goes to line 1 - `ctrl-i` now correctly jumps backwards in history - A small memory leak was fixed, where we tried to reuse tree-sitter query cursors, but always allocated a new one - Auto-formatting is now only on for certain languages - The root directory is now provided in LSP initialization, fixing certain language servers (typescript) - LSP failing to start no longer panics - Elixir language queries were fixed # 0.0.10 Keymaps: - Add mappings to jump to diagnostics - Add gt/gm/gb mappings to jump to top/middle/bottom of screen - ^ and $ are now gh, gl - The runtime/ can now optionally be embedded in the binary - Haskell syntax added - Window mode (ctrl-w) added - Show matching bracket (Vim's matchbrackets) - Themes now support style modifiers - First user contributed theme - Create a document if it doesn't exist yet on save - Detect language on a new file on save - Panic fixes, lots of them helix-cbac4273836f5837cec641ab21f365c79b102a4b/Cargo.lock000066400000000000000000002266751500614314100215340ustar00rootroot00000000000000# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "addr2line" version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" dependencies = [ "gimli", ] [[package]] name = "adler" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "adler2" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "ahash" version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "getrandom 0.2.15", "once_cell", "version_check", "zerocopy", ] [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "allocator-api2" version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "android-tzdata" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" [[package]] name = "android_system_properties" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" dependencies = [ "libc", ] [[package]] name = "anyhow" version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" [[package]] name = "arc-swap" version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] name = "autocfg" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "backtrace" version = "0.3.73" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" dependencies = [ "addr2line", "cc", "cfg-if", "libc", "miniz_oxide 0.7.4", "object", "rustc-demangle", ] [[package]] name = "bitflags" version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" [[package]] name = "block-buffer" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "bstr" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4" dependencies = [ "memchr", "regex-automata", "serde", ] [[package]] name = "bumpalo" version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "byteorder" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50" [[package]] name = "cassowary" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" [[package]] name = "cc" version = "1.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04da6a0d40b948dfc4fa8f5bbf402b0fc1a64a28dbf7d12ffd683550f2c1b63a" dependencies = [ "shlex", ] [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chardetng" version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "14b8f0b65b7b08ae3c8187e8d77174de20cb6777864c6b832d8ad365999cf1ea" dependencies = [ "cfg-if", "encoding_rs", "memchr", ] [[package]] name = "chrono" version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" dependencies = [ "android-tzdata", "iana-time-zone", "num-traits", "windows-link", ] [[package]] name = "clipboard-win" version = "5.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "15efe7a882b08f34e38556b14f2fb3daa98769d06c7f0c1b076dfd0d983bc892" dependencies = [ "error-code", ] [[package]] name = "clru" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cbd0f76e066e64fdc5631e3bb46381254deab9ef1158292f27c8c57e3bf3fe59" [[package]] name = "content_inspector" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7bda66e858c683005a53a9a60c69a4aca7eeaa45d124526e389f7aec8e62f38" dependencies = [ "memchr", ] [[package]] name = "core-foundation-sys" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crc32fast" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-deque" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" dependencies = [ "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" [[package]] name = "crossterm" version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6" dependencies = [ "bitflags", "crossterm_winapi", "filedescriptor", "futures-core", "libc", "mio", "parking_lot", "rustix 0.38.44", "signal-hook", "signal-hook-mio", "winapi", ] [[package]] name = "crossterm_winapi" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" dependencies = [ "winapi", ] [[package]] name = "crypto-common" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", "typenum", ] [[package]] name = "dashmap" version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" dependencies = [ "cfg-if", "crossbeam-utils", "hashbrown 0.14.5", "lock_api", "once_cell", "parking_lot_core", ] [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", ] [[package]] name = "displaydoc" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "dunce" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] name = "either" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "encoding_rs" version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] [[package]] name = "encoding_rs_io" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cc3c5651fb62ab8aa3103998dade57efdd028544bd300516baa31840c252a83" dependencies = [ "encoding_rs", ] [[package]] name = "env_home" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7f84e12ccf0a7ddc17a6c41c93326024c42920d7ee630d04950e6926645c0fe" [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", "windows-sys 0.59.0", ] [[package]] name = "error-code" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0474425d51df81997e2f90a21591180b38eccf27292d755f3e30750225c175b" [[package]] name = "etcetera" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26c7b13d0780cb82722fd59f6f57f925e143427e4a75313a6c77243bf5326ae6" dependencies = [ "cfg-if", "home", "windows-sys 0.59.0", ] [[package]] name = "faster-hex" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7223ae2d2f179b803433d9c830478527e92b8117eab39460edae7f1614d9fb73" dependencies = [ "heapless", "serde", ] [[package]] name = "fastrand" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" [[package]] name = "fern" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4316185f709b23713e41e3195f90edef7fb00c3ed4adc79769cf09cc762a3b29" dependencies = [ "log", ] [[package]] name = "filedescriptor" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7199d965852c3bac31f779ef99cbb4537f80e952e2d6aa0ffeb30cce00f4f46e" dependencies = [ "libc", "thiserror 1.0.69", "winapi", ] [[package]] name = "filetime" version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", "libredox", "windows-sys 0.59.0", ] [[package]] name = "flate2" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" dependencies = [ "crc32fast", "libz-rs-sys", "miniz_oxide 0.8.7", ] [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] name = "form_urlencoded" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] [[package]] name = "futures-core" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", "futures-util", ] [[package]] name = "futures-task" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-core", "futures-task", "pin-project-lite", "pin-utils", "slab", ] [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", ] [[package]] name = "getrandom" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", "wasi 0.11.0+wasi-snapshot-preview1", ] [[package]] name = "getrandom" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" dependencies = [ "cfg-if", "libc", "wasi 0.13.3+wasi-0.2.2", "windows-targets", ] [[package]] name = "gimli" version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" [[package]] name = "gix" version = "0.72.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01237e8d3d78581f71642be8b0c2ae8c0b2b5c251c9c5d9ebbea3c1ea280dce8" dependencies = [ "gix-actor", "gix-attributes", "gix-command", "gix-commitgraph", "gix-config", "gix-date", "gix-diff", "gix-dir", "gix-discover", "gix-features", "gix-filter", "gix-fs", "gix-glob", "gix-hash", "gix-hashtable", "gix-ignore", "gix-index", "gix-lock", "gix-object", "gix-odb", "gix-pack", "gix-path", "gix-pathspec", "gix-protocol", "gix-ref", "gix-refspec", "gix-revision", "gix-revwalk", "gix-sec", "gix-shallow", "gix-status", "gix-submodule", "gix-tempfile", "gix-trace", "gix-traverse", "gix-url", "gix-utils", "gix-validate", "gix-worktree", "once_cell", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-actor" version = "0.35.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b300e6e4f31f3f6bd2de5e2b0caab192ced00dc0fcd0f7cc56e28c575c8e1ff" dependencies = [ "bstr", "gix-date", "gix-utils", "itoa", "thiserror 2.0.12", "winnow", ] [[package]] name = "gix-attributes" version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7e26b3ac280ddb25bb6980d34f4a82ee326f78bf2c6d4ea45eef2d940048b8e" dependencies = [ "bstr", "gix-glob", "gix-path", "gix-quote", "gix-trace", "kstring", "smallvec", "thiserror 2.0.12", "unicode-bom", ] [[package]] name = "gix-bitmap" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1db9765c69502650da68f0804e3dc2b5f8ccc6a2d104ca6c85bc40700d37540" dependencies = [ "thiserror 2.0.12", ] [[package]] name = "gix-chunk" version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b1f1d8764958699dc764e3f727cef280ff4d1bd92c107bbf8acd85b30c1bd6f" dependencies = [ "thiserror 2.0.12", ] [[package]] name = "gix-command" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2f47f3fb4ba33644061e8e0e1030ef2a937d42dc969553118c320a205a9fb28" dependencies = [ "bstr", "gix-path", "gix-quote", "gix-trace", "shell-words", ] [[package]] name = "gix-commitgraph" version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e05050fd6caa6c731fe3bd7f9485b3b520be062d3d139cb2626e052d6c127951" dependencies = [ "bstr", "gix-chunk", "gix-hash", "memmap2", "thiserror 2.0.12", ] [[package]] name = "gix-config" version = "0.45.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48f3c8f357ae049bfb77493c2ec9010f58cfc924ae485e1116c3718fc0f0d881" dependencies = [ "bstr", "gix-config-value", "gix-features", "gix-glob", "gix-path", "gix-ref", "gix-sec", "memchr", "once_cell", "smallvec", "thiserror 2.0.12", "unicode-bom", "winnow", ] [[package]] name = "gix-config-value" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "439d62e241dae2dffd55bfeeabe551275cf9d9f084c5ebc6b48bad49d03285b7" dependencies = [ "bitflags", "bstr", "gix-path", "libc", "thiserror 2.0.12", ] [[package]] name = "gix-date" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a98593f1f1e14b9fa15c5b921b2c465e904d698b9463e21bb377be8376c3c1a" dependencies = [ "bstr", "itoa", "jiff", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-diff" version = "0.52.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e9b43e95fe352da82a969f0c84ff860c2de3e724d93f6681fedbcd6c917f252" dependencies = [ "bstr", "gix-attributes", "gix-command", "gix-filter", "gix-fs", "gix-hash", "gix-index", "gix-object", "gix-path", "gix-pathspec", "gix-tempfile", "gix-trace", "gix-traverse", "gix-worktree", "imara-diff", "thiserror 2.0.12", ] [[package]] name = "gix-dir" version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01e6e2dc5b8917142d0ffe272209d1671e45b771e433f90186bc71c016792e87" dependencies = [ "bstr", "gix-discover", "gix-fs", "gix-ignore", "gix-index", "gix-object", "gix-path", "gix-pathspec", "gix-trace", "gix-utils", "gix-worktree", "thiserror 2.0.12", ] [[package]] name = "gix-discover" version = "0.40.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dccfe3e25b4ea46083916c56db3ba9d1e6ef6dce54da485f0463f9fc0fe1837c" dependencies = [ "bstr", "dunce", "gix-fs", "gix-hash", "gix-path", "gix-ref", "gix-sec", "thiserror 2.0.12", ] [[package]] name = "gix-features" version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56f4399af6ec4fd9db84dd4cf9656c5c785ab492ab40a7c27ea92b4241923fed" dependencies = [ "crc32fast", "flate2", "gix-path", "gix-trace", "gix-utils", "libc", "once_cell", "prodash", "thiserror 2.0.12", "walkdir", ] [[package]] name = "gix-filter" version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f90c21f0d61778f518bbb7c431b00247bf4534b2153c3e85bcf383876c55ca6c" dependencies = [ "bstr", "encoding_rs", "gix-attributes", "gix-command", "gix-hash", "gix-object", "gix-packetline-blocking", "gix-path", "gix-quote", "gix-trace", "gix-utils", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-fs" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67a0637149b4ef24d3ea55f81f77231401c8463fae6da27331c987957eb597c7" dependencies = [ "bstr", "fastrand", "gix-features", "gix-path", "gix-utils", "thiserror 2.0.12", ] [[package]] name = "gix-glob" version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2926b03666e83b8d01c10cf06e5733521aacbd2d97179a4c9b1fdddabb9e937d" dependencies = [ "bitflags", "bstr", "gix-features", "gix-path", ] [[package]] name = "gix-hash" version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d4900562c662852a6b42e2ef03442eccebf24f047d8eab4f23bc12ef0d785d8" dependencies = [ "faster-hex", "gix-features", "sha1-checked", "thiserror 2.0.12", ] [[package]] name = "gix-hashtable" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5b5cb3c308b4144f2612ff64e32130e641279fcf1a84d8d40dad843b4f64904" dependencies = [ "gix-hash", "hashbrown 0.14.5", "parking_lot", ] [[package]] name = "gix-ignore" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae358c3c96660b10abc7da63c06788dfded603e717edbd19e38c6477911b71c8" dependencies = [ "bstr", "gix-glob", "gix-path", "gix-trace", "unicode-bom", ] [[package]] name = "gix-index" version = "0.40.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d505aea7d7c4267a3153cb90c712a89970b4dd02a2cb3205be322891f530b5" dependencies = [ "bitflags", "bstr", "filetime", "fnv", "gix-bitmap", "gix-features", "gix-fs", "gix-hash", "gix-lock", "gix-object", "gix-traverse", "gix-utils", "gix-validate", "hashbrown 0.14.5", "itoa", "libc", "memmap2", "rustix 1.0.5", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-lock" version = "17.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "570f8b034659f256366dc90f1a24924902f20acccd6a15be96d44d1269e7a796" dependencies = [ "gix-tempfile", "gix-utils", "thiserror 2.0.12", ] [[package]] name = "gix-object" version = "0.49.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d957ca3640c555d48bb27f8278c67169fa1380ed94f6452c5590742524c40fbb" dependencies = [ "bstr", "gix-actor", "gix-date", "gix-features", "gix-hash", "gix-hashtable", "gix-path", "gix-utils", "gix-validate", "itoa", "smallvec", "thiserror 2.0.12", "winnow", ] [[package]] name = "gix-odb" version = "0.69.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "868f703905fdbcfc1bd750942f82419903ecb7039f5288adb5206d6de405e0c9" dependencies = [ "arc-swap", "gix-date", "gix-features", "gix-fs", "gix-hash", "gix-hashtable", "gix-object", "gix-pack", "gix-path", "gix-quote", "parking_lot", "tempfile", "thiserror 2.0.12", ] [[package]] name = "gix-pack" version = "0.59.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d49c55d69c8449f2a0a5a77eb9cbacfebb6b0e2f1215f0fc23a4cb60528a450" dependencies = [ "clru", "gix-chunk", "gix-features", "gix-hash", "gix-hashtable", "gix-object", "gix-path", "memmap2", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-packetline" version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ddc034bc67c848e4ef7596ab5528cd8fd439d310858dbe1ce8b324f25deb91c" dependencies = [ "bstr", "faster-hex", "gix-trace", "thiserror 2.0.12", ] [[package]] name = "gix-packetline-blocking" version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c44880f028ba46d6cf37a66d27a300310c6b51b8ed0e44918f93df061168e2f3" dependencies = [ "bstr", "faster-hex", "gix-trace", "thiserror 2.0.12", ] [[package]] name = "gix-path" version = "0.10.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c091d2e887e02c3462f52252c5ea61150270c0f2657b642e8d0d6df56c16e642" dependencies = [ "bstr", "gix-trace", "gix-validate", "home", "once_cell", "thiserror 2.0.12", ] [[package]] name = "gix-pathspec" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce061c50e5f8f7c830cacb3da3e999ae935e283ce8522249f0ce2256d110979d" dependencies = [ "bitflags", "bstr", "gix-attributes", "gix-config-value", "gix-glob", "gix-path", "thiserror 2.0.12", ] [[package]] name = "gix-protocol" version = "0.50.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f5c17d78bb0414f8d60b5f952196dc2e47ec320dca885de9128ecdb4a0e38401" dependencies = [ "bstr", "gix-date", "gix-features", "gix-hash", "gix-ref", "gix-shallow", "gix-transport", "gix-utils", "maybe-async", "thiserror 2.0.12", "winnow", ] [[package]] name = "gix-quote" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a375a75b4d663e8bafe3bf4940a18a23755644c13582fa326e99f8f987d83fd" dependencies = [ "bstr", "gix-utils", "thiserror 2.0.12", ] [[package]] name = "gix-ref" version = "0.52.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1b7985657029684d759f656b09abc3e2c73085596d5cdb494428823970a7762" dependencies = [ "gix-actor", "gix-features", "gix-fs", "gix-hash", "gix-lock", "gix-object", "gix-path", "gix-tempfile", "gix-utils", "gix-validate", "memmap2", "thiserror 2.0.12", "winnow", ] [[package]] name = "gix-refspec" version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "445ed14e3db78e8e79980085e3723df94e1c8163b3ae5bc8ed6a8fe6cf983b42" dependencies = [ "bstr", "gix-hash", "gix-revision", "gix-validate", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-revision" version = "0.34.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78d0b8e5cbd1c329e25383e088cb8f17439414021a643b30afa5146b71e3c65d" dependencies = [ "bstr", "gix-commitgraph", "gix-date", "gix-hash", "gix-object", "gix-revwalk", "thiserror 2.0.12", ] [[package]] name = "gix-revwalk" version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bc756b73225bf005ddeb871d1ca7b3c33e2417d0d53e56effa5a36765b52b28" dependencies = [ "gix-commitgraph", "gix-date", "gix-hash", "gix-hashtable", "gix-object", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-sec" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0dabbc78c759ecc006b970339394951b2c8e1e38a37b072c105b80b84c308fd" dependencies = [ "bitflags", "gix-path", "libc", "windows-sys 0.59.0", ] [[package]] name = "gix-shallow" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b9a6f6e34d6ede08f522d89e5c7990b4f60524b8ae6ebf8e850963828119ad4" dependencies = [ "bstr", "gix-hash", "gix-lock", "thiserror 2.0.12", ] [[package]] name = "gix-status" version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "072099c2415cfa5397df7d47eacbcb6016d2cd17e0d674c74965e6ad1b17289f" dependencies = [ "bstr", "filetime", "gix-diff", "gix-dir", "gix-features", "gix-filter", "gix-fs", "gix-hash", "gix-index", "gix-object", "gix-path", "gix-pathspec", "gix-worktree", "portable-atomic", "thiserror 2.0.12", ] [[package]] name = "gix-submodule" version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f51472f05a450cc61bc91ed2f62fb06e31e2bbb31c420bc4be8793f26c8b0c1" dependencies = [ "bstr", "gix-config", "gix-path", "gix-pathspec", "gix-refspec", "gix-url", "thiserror 2.0.12", ] [[package]] name = "gix-tempfile" version = "17.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c750e8c008453a2dba67a2b0d928b7716e05da31173a3f5e351d5457ad4470aa" dependencies = [ "dashmap", "gix-fs", "libc", "once_cell", "parking_lot", "tempfile", ] [[package]] name = "gix-trace" version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c396a2036920c69695f760a65e7f2677267ccf483f25046977d87e4cb2665f7" [[package]] name = "gix-transport" version = "0.47.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edfe22ba26d4b65c17879f12b9882eafe65d3c8611c933b272fce2c10f546f59" dependencies = [ "bstr", "gix-command", "gix-features", "gix-packetline", "gix-quote", "gix-sec", "gix-url", "thiserror 2.0.12", ] [[package]] name = "gix-traverse" version = "0.46.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39094185f6d9a4d81101130fbbf7f598a06441d774ae3b3ae7930a613bbe1157" dependencies = [ "bitflags", "gix-commitgraph", "gix-date", "gix-hash", "gix-hashtable", "gix-object", "gix-revwalk", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-url" version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42a1ad0b04a5718b5cb233e6888e52a9b627846296161d81dcc5eb9203ec84b8" dependencies = [ "bstr", "gix-features", "gix-path", "percent-encoding", "thiserror 2.0.12", "url", ] [[package]] name = "gix-utils" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5351af2b172caf41a3728eb4455326d84e0d70fe26fc4de74ab0bd37df4191c5" dependencies = [ "bstr", "fastrand", "unicode-normalization", ] [[package]] name = "gix-validate" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77b9e00cacde5b51388d28ed746c493b18a6add1f19b5e01d686b3b9ece66d4d" dependencies = [ "bstr", "thiserror 2.0.12", ] [[package]] name = "gix-worktree" version = "0.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54f1916f8d928268300c977d773dd70a8746b646873b77add0a34876a8c847e9" dependencies = [ "bstr", "gix-attributes", "gix-features", "gix-fs", "gix-glob", "gix-hash", "gix-ignore", "gix-index", "gix-object", "gix-path", "gix-validate", ] [[package]] name = "globset" version = "0.4.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5" dependencies = [ "aho-corasick", "bstr", "log", "regex-automata", "regex-syntax", ] [[package]] name = "grep-matcher" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47a3141a10a43acfedc7c98a60a834d7ba00dfe7bec9071cbfc19b55b292ac02" dependencies = [ "memchr", ] [[package]] name = "grep-regex" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9edd147c7e3296e7a26bd3a81345ce849557d5a8e48ed88f736074e760f91f7e" dependencies = [ "bstr", "grep-matcher", "log", "regex-automata", "regex-syntax", ] [[package]] name = "grep-searcher" version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9b6c14b3fc2e0a107d6604d3231dec0509e691e62447104bc385a46a7892cda" dependencies = [ "bstr", "encoding_rs", "encoding_rs_io", "grep-matcher", "log", "memchr", "memmap2", ] [[package]] name = "hash32" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47d60b12902ba28e2730cd37e95b8c9223af2808df9e902d4df49588d1470606" dependencies = [ "byteorder", ] [[package]] name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", "allocator-api2", ] [[package]] name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ "allocator-api2", "equivalent", "foldhash", ] [[package]] name = "heapless" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bfb9eb618601c89945a70e254898da93b13be0388091d42117462b265bb3fad" dependencies = [ "hash32", "stable_deref_trait", ] [[package]] name = "helix-core" version = "25.1.1" dependencies = [ "ahash", "anyhow", "arc-swap", "bitflags", "chrono", "encoding_rs", "globset", "hashbrown 0.14.5", "helix-loader", "helix-parsec", "helix-stdx", "imara-diff", "indoc", "log", "nucleo", "once_cell", "parking_lot", "quickcheck", "regex", "regex-cursor", "ropey", "serde", "serde_json", "slotmap", "smallvec", "smartstring", "textwrap", "toml", "tree-sitter", "unicode-general-category", "unicode-segmentation", "unicode-width 0.1.12", "url", ] [[package]] name = "helix-dap" version = "25.1.1" dependencies = [ "anyhow", "fern", "helix-core", "helix-stdx", "log", "serde", "serde_json", "thiserror 2.0.12", "tokio", ] [[package]] name = "helix-event" version = "25.1.1" dependencies = [ "anyhow", "foldhash", "futures-executor", "hashbrown 0.15.2", "log", "once_cell", "parking_lot", "tokio", ] [[package]] name = "helix-loader" version = "25.1.1" dependencies = [ "anyhow", "cc", "etcetera", "helix-stdx", "libloading", "log", "once_cell", "serde", "tempfile", "threadpool", "toml", "tree-sitter", ] [[package]] name = "helix-lsp" version = "25.1.1" dependencies = [ "anyhow", "arc-swap", "futures-executor", "futures-util", "globset", "helix-core", "helix-loader", "helix-lsp-types", "helix-stdx", "log", "parking_lot", "serde", "serde_json", "slotmap", "thiserror 2.0.12", "tokio", "tokio-stream", ] [[package]] name = "helix-lsp-types" version = "0.95.1" dependencies = [ "bitflags", "serde", "serde_json", "url", ] [[package]] name = "helix-parsec" version = "25.1.1" [[package]] name = "helix-stdx" version = "25.1.1" dependencies = [ "bitflags", "dunce", "etcetera", "once_cell", "regex-automata", "regex-cursor", "ropey", "rustix 1.0.5", "tempfile", "unicode-segmentation", "which", "windows-sys 0.59.0", ] [[package]] name = "helix-term" version = "25.1.1" dependencies = [ "anyhow", "arc-swap", "chrono", "content_inspector", "crossterm", "fern", "futures-util", "grep-regex", "grep-searcher", "helix-core", "helix-dap", "helix-event", "helix-loader", "helix-lsp", "helix-stdx", "helix-tui", "helix-vcs", "helix-view", "ignore", "indexmap", "indoc", "libc", "log", "nucleo", "once_cell", "open", "pulldown-cmark", "same-file", "serde", "serde_json", "signal-hook", "signal-hook-tokio", "smallvec", "tempfile", "termini", "thiserror 2.0.12", "tokio", "tokio-stream", "toml", "url", ] [[package]] name = "helix-tui" version = "25.1.1" dependencies = [ "bitflags", "cassowary", "crossterm", "helix-core", "helix-view", "log", "once_cell", "termini", "unicode-segmentation", ] [[package]] name = "helix-vcs" version = "25.1.1" dependencies = [ "anyhow", "arc-swap", "gix", "helix-core", "helix-event", "imara-diff", "log", "parking_lot", "tempfile", "tokio", ] [[package]] name = "helix-view" version = "25.1.1" dependencies = [ "anyhow", "arc-swap", "bitflags", "chardetng", "clipboard-win", "crossterm", "futures-util", "helix-core", "helix-dap", "helix-event", "helix-loader", "helix-lsp", "helix-stdx", "helix-tui", "helix-vcs", "libc", "log", "once_cell", "parking_lot", "rustix 1.0.5", "serde", "serde_json", "slotmap", "tempfile", "thiserror 2.0.12", "tokio", "tokio-stream", "toml", "url", ] [[package]] name = "hermit-abi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "home" version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "iana-time-zone" version = "0.1.60" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", "windows-core", ] [[package]] name = "iana-time-zone-haiku" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ "cc", ] [[package]] name = "icu_collections" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" dependencies = [ "displaydoc", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_locid" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" dependencies = [ "displaydoc", "litemap", "tinystr", "writeable", "zerovec", ] [[package]] name = "icu_locid_transform" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" dependencies = [ "displaydoc", "icu_locid", "icu_locid_transform_data", "icu_provider", "tinystr", "zerovec", ] [[package]] name = "icu_locid_transform_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" [[package]] name = "icu_normalizer" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" dependencies = [ "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", "icu_provider", "smallvec", "utf16_iter", "utf8_iter", "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" [[package]] name = "icu_properties" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" dependencies = [ "displaydoc", "icu_collections", "icu_locid_transform", "icu_properties_data", "icu_provider", "tinystr", "zerovec", ] [[package]] name = "icu_properties_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" [[package]] name = "icu_provider" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" dependencies = [ "displaydoc", "icu_locid", "icu_provider_macros", "stable_deref_trait", "tinystr", "writeable", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_provider_macros" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "idna" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ "idna_adapter", "smallvec", "utf8_iter", ] [[package]] name = "idna_adapter" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" dependencies = [ "icu_normalizer", "icu_properties", ] [[package]] name = "ignore" version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" dependencies = [ "crossbeam-deque", "globset", "log", "memchr", "regex-automata", "same-file", "walkdir", "winapi-util", ] [[package]] name = "imara-diff" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17d34b7d42178945f775e84bc4c36dde7c1c6cdfea656d3354d009056f2bb3d2" dependencies = [ "hashbrown 0.15.2", ] [[package]] name = "indexmap" version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" dependencies = [ "equivalent", "hashbrown 0.15.2", ] [[package]] name = "indoc" version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" [[package]] name = "is-docker" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "928bae27f42bc99b60d9ac7334e3a21d10ad8f1835a4e12ec3ec0464765ed1b3" dependencies = [ "once_cell", ] [[package]] name = "is-wsl" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "173609498df190136aa7dea1a91db051746d339e18476eed5ca40521f02d7aa5" dependencies = [ "is-docker", "once_cell", ] [[package]] name = "itoa" version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jiff" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a064218214dc6a10fbae5ec5fa888d80c45d611aba169222fc272072bf7aef6" dependencies = [ "jiff-static", "jiff-tzdb-platform", "log", "portable-atomic", "portable-atomic-util", "serde", "windows-sys 0.59.0", ] [[package]] name = "jiff-static" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "199b7932d97e325aff3a7030e141eafe7f2c6268e1d1b24859b753a627f45254" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "jiff-tzdb" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1283705eb0a21404d2bfd6eef2a7593d240bc42a0bdb39db0ad6fa2ec026524" [[package]] name = "jiff-tzdb-platform" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "875a5a69ac2bab1a891711cf5eccbec1ce0341ea805560dcd90b7a2e925132e8" dependencies = [ "jiff-tzdb", ] [[package]] name = "js-sys" version = "0.3.70" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" dependencies = [ "wasm-bindgen", ] [[package]] name = "kstring" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "558bf9508a558512042d3095138b1f7b8fe90c5467d94f9f1da28b3731c5dbd1" dependencies = [ "static_assertions", ] [[package]] name = "libc" version = "0.2.172" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" [[package]] name = "libloading" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", "windows-targets", ] [[package]] name = "libredox" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags", "libc", "redox_syscall", ] [[package]] name = "libz-rs-sys" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6489ca9bd760fe9642d7644e827b0c9add07df89857b0416ee15c1cc1a3b8c5a" dependencies = [ "zlib-rs", ] [[package]] name = "linux-raw-sys" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "linux-raw-sys" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6db9c683daf087dc577b7506e9695b3d556a9f3849903fa28186283afd6809e9" [[package]] name = "litemap" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704" [[package]] name = "lock_api" version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", ] [[package]] name = "log" version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "maybe-async" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cf92c10c7e361d6b99666ec1c6f9805b0bea2c3bd8c78dc6fe98ac5bd78db11" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memmap2" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe751422e4a8caa417e13c3ea66452215d7d63e19e604f4980461212f3ae1322" dependencies = [ "libc", ] [[package]] name = "miniz_oxide" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" dependencies = [ "adler", ] [[package]] name = "miniz_oxide" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff70ce3e48ae43fa075863cef62e8b43b71a4f2382229920e0df362592919430" dependencies = [ "adler2", ] [[package]] name = "mio" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" dependencies = [ "hermit-abi", "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.52.0", ] [[package]] name = "nucleo" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5262af4c94921c2646c5ac6ff7900c2af9cbb08dc26a797e18130a7019c039d4" dependencies = [ "nucleo-matcher", "parking_lot", "rayon", ] [[package]] name = "nucleo-matcher" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf33f538733d1a5a3494b836ba913207f14d9d4a1d3cd67030c5061bdd2cac85" dependencies = [ "memchr", "unicode-segmentation", ] [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "num_cpus" version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ "hermit-abi", "libc", ] [[package]] name = "object" version = "0.36.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" dependencies = [ "memchr", ] [[package]] name = "once_cell" version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "open" version = "5.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2483562e62ea94312f3576a7aca397306df7990b8d89033e18766744377ef95" dependencies = [ "is-wsl", "libc", "pathdiff", ] [[package]] name = "parking_lot" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", ] [[package]] name = "parking_lot_core" version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", "windows-targets", ] [[package]] name = "pathdiff" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" [[package]] name = "percent-encoding" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pin-project-lite" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" [[package]] name = "pin-utils" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "portable-atomic" version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" [[package]] name = "portable-atomic-util" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" dependencies = [ "portable-atomic", ] [[package]] name = "proc-macro2" version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] [[package]] name = "prodash" version = "29.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f04bb108f648884c23b98a0e940ebc2c93c0c3b89f04dbaf7eb8256ce617d1bc" dependencies = [ "log", "parking_lot", ] [[package]] name = "pulldown-cmark" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e8bbe1a966bd2f362681a44f6edce3c2310ac21e4d5067a6e7ec396297a6ea0" dependencies = [ "bitflags", "memchr", "unicase", ] [[package]] name = "quickcheck" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" dependencies = [ "rand", ] [[package]] name = "quote" version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] [[package]] name = "rand" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "rand_core", ] [[package]] name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom 0.2.15", ] [[package]] name = "rayon" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", ] [[package]] name = "rayon-core" version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ "crossbeam-deque", "crossbeam-utils", ] [[package]] name = "redox_syscall" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" dependencies = [ "bitflags", ] [[package]] name = "regex" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-cursor" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0497c781d2f982ae8284d2932aee6a877e58a4541daa5e8fadc18cc75c23a61d" dependencies = [ "log", "memchr", "regex-automata", "regex-syntax", "ropey", ] [[package]] name = "regex-syntax" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "ropey" version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93411e420bcd1a75ddd1dc3caf18c23155eda2c090631a85af21ba19e97093b5" dependencies = [ "smallvec", "str_indices", ] [[package]] name = "rustc-demangle" version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustix" version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys 0.4.14", "windows-sys 0.59.0", ] [[package]] name = "rustix" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys 0.9.2", "windows-sys 0.59.0", ] [[package]] name = "ryu" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "serde" version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ "itoa", "memchr", "ryu", "serde", ] [[package]] name = "serde_spanned" version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" dependencies = [ "serde", ] [[package]] name = "sha1" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "sha1-checked" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89f599ac0c323ebb1c6082821a54962b839832b03984598375bff3975b804423" dependencies = [ "digest", "sha1", ] [[package]] name = "shell-words" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801" dependencies = [ "libc", "signal-hook-registry", ] [[package]] name = "signal-hook-mio" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34db1a06d485c9142248b7a054f034b349b212551f3dfd19c94d45a754a217cd" dependencies = [ "libc", "mio", "signal-hook", ] [[package]] name = "signal-hook-registry" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" dependencies = [ "libc", ] [[package]] name = "signal-hook-tokio" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "213241f76fb1e37e27de3b6aa1b068a2c333233b59cca6634f634b80a27ecf1e" dependencies = [ "futures-core", "libc", "signal-hook", "tokio", ] [[package]] name = "slab" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg", ] [[package]] name = "slotmap" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbff4acf519f630b3a3ddcfaea6c06b42174d9a44bc70c620e9ed1649d58b82a" dependencies = [ "version_check", ] [[package]] name = "smallvec" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" [[package]] name = "smartstring" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fb72c633efbaa2dd666986505016c32c3044395ceaf881518399d2f4127ee29" dependencies = [ "autocfg", "static_assertions", "version_check", ] [[package]] name = "smawk" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" [[package]] name = "socket2" version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" dependencies = [ "libc", "windows-sys 0.52.0", ] [[package]] name = "stable_deref_trait" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "static_assertions" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "str_indices" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9557cb6521e8d009c51a8666f09356f4b817ba9ba0981a305bd86aee47bd35c" [[package]] name = "syn" version = "2.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "synstructure" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "tempfile" version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" dependencies = [ "fastrand", "getrandom 0.3.1", "once_cell", "rustix 1.0.5", "windows-sys 0.59.0", ] [[package]] name = "termini" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ad441d87dd98bc5eeb31cf2fb7e4839968763006b478efb38668a3bf9da0d59" dependencies = [ "home", ] [[package]] name = "textwrap" version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" dependencies = [ "smawk", "unicode-linebreak", "unicode-width 0.2.0", ] [[package]] name = "thiserror" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl 1.0.69", ] [[package]] name = "thiserror" version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" dependencies = [ "thiserror-impl 2.0.12", ] [[package]] name = "thiserror-impl" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "thiserror-impl" version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "threadpool" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" dependencies = [ "num_cpus", ] [[package]] name = "tinystr" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" dependencies = [ "displaydoc", "zerovec", ] [[package]] name = "tinyvec" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] [[package]] name = "tinyvec_macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" version = "1.44.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48" dependencies = [ "backtrace", "bytes", "libc", "mio", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", "windows-sys 0.52.0", ] [[package]] name = "tokio-macros" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "tokio-stream" version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", "tokio", ] [[package]] name = "toml" version = "0.8.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05ae329d1f08c4d17a59bed7ff5b5a769d062e64a62d34a3261b219e62cd5aae" dependencies = [ "serde", "serde_spanned", "toml_datetime", "toml_edit", ] [[package]] name = "toml_datetime" version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" dependencies = [ "serde", ] [[package]] name = "toml_edit" version = "0.22.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e" dependencies = [ "indexmap", "serde", "serde_spanned", "toml_datetime", "toml_write", "winnow", ] [[package]] name = "toml_write" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfb942dfe1d8e29a7ee7fcbde5bd2b9a25fb89aa70caea2eba3bee836ff41076" [[package]] name = "tree-sitter" version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df7cc499ceadd4dcdf7ec6d4cbc34ece92c3fa07821e287aedecd4416c516dca" dependencies = [ "cc", "regex", ] [[package]] name = "typenum" version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "unicase" version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" dependencies = [ "version_check", ] [[package]] name = "unicode-bom" version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7eec5d1121208364f6793f7d2e222bf75a915c19557537745b195b253dd64217" [[package]] name = "unicode-general-category" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24adfe8311434967077a6adff125729161e6e4934d76f6b7c55318ac5c9246d3" [[package]] name = "unicode-ident" version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-linebreak" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" [[package]] name = "unicode-normalization" version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" dependencies = [ "tinyvec", ] [[package]] name = "unicode-segmentation" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] name = "unicode-width" version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6" [[package]] name = "unicode-width" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" [[package]] name = "url" version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", "percent-encoding", "serde", ] [[package]] name = "utf16_iter" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" [[package]] name = "utf8_iter" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasi" version = "0.13.3+wasi-0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" dependencies = [ "wit-bindgen-rt", ] [[package]] name = "wasm-bindgen" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" dependencies = [ "cfg-if", "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" [[package]] name = "which" version = "7.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d643ce3fd3e5b54854602a080f34fb10ab75e0b813ee32d00ca2b44fa74762" dependencies = [ "either", "env_home", "rustix 1.0.5", "winsafe", ] [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-core" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ "windows-targets", ] [[package]] name = "windows-link" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3" [[package]] name = "windows-sys" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6cb8234a863ea0e8cd7284fcdd4f145233eb00fee02bbdd9861aec44e6477bc5" dependencies = [ "memchr", ] [[package]] name = "winsafe" version = "0.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" [[package]] name = "wit-bindgen-rt" version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" dependencies = [ "bitflags", ] [[package]] name = "write16" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" [[package]] name = "writeable" version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" [[package]] name = "xtask" version = "25.1.1" dependencies = [ "helix-core", "helix-loader", "helix-term", "helix-view", "toml", ] [[package]] name = "yoke" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5" dependencies = [ "serde", "stable_deref_trait", "yoke-derive", "zerofrom", ] [[package]] name = "yoke-derive" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" dependencies = [ "proc-macro2", "quote", "syn", "synstructure", ] [[package]] name = "zerocopy" version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "zerofrom" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" dependencies = [ "proc-macro2", "quote", "syn", "synstructure", ] [[package]] name = "zerovec" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" dependencies = [ "yoke", "zerofrom", "zerovec-derive", ] [[package]] name = "zerovec-derive" version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "zlib-rs" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "868b928d7949e09af2f6086dfc1e01936064cc7a819253bce650d4e2a2d63ba8" helix-cbac4273836f5837cec641ab21f365c79b102a4b/Cargo.toml000066400000000000000000000021211500614314100215310ustar00rootroot00000000000000[workspace] resolver = "2" members = [ "helix-core", "helix-view", "helix-term", "helix-tui", "helix-lsp-types", "helix-lsp", "helix-event", "helix-dap", "helix-loader", "helix-vcs", "helix-parsec", "helix-stdx", "xtask", ] default-members = [ "helix-term" ] [profile.release] lto = "thin" [profile.opt] inherits = "release" lto = "fat" codegen-units = 1 strip = true opt-level = 3 [profile.integration] inherits = "test" package.helix-core.opt-level = 2 package.helix-tui.opt-level = 2 package.helix-term.opt-level = 2 [workspace.dependencies] tree-sitter = { version = "0.22" } nucleo = "0.5.0" slotmap = "1.0.7" thiserror = "2.0" tempfile = "3.19.1" bitflags = "2.9" unicode-segmentation = "1.2" ropey = { version = "1.6.1", default-features = false, features = ["simd"] } foldhash = "0.1" parking_lot = "0.12" [workspace.package] version = "25.1.1" edition = "2021" authors = ["Blaž Hrastnik "] categories = ["editor"] repository = "https://github.com/helix-editor/helix" homepage = "https://helix-editor.com" license = "MPL-2.0" rust-version = "1.82" helix-cbac4273836f5837cec641ab21f365c79b102a4b/LICENSE000066400000000000000000000405251500614314100206200ustar00rootroot00000000000000Mozilla Public License Version 2.0 ================================== 1. Definitions -------------- 1.1. "Contributor" means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. 1.2. "Contributor Version" means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor's Contribution. 1.3. "Contribution" means Covered Software of a particular Contributor. 1.4. "Covered Software" means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. 1.5. "Incompatible With Secondary Licenses" means (a) that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or (b) that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. 1.6. "Executable Form" means any form of the work other than Source Code Form. 1.7. "Larger Work" means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. 1.8. "License" means this document. 1.9. "Licensable" means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. 1.10. "Modifications" means any of the following: (a) any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or (b) any new file in Source Code Form that contains any Covered Software. 1.11. "Patent Claims" of a Contributor means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. 1.12. "Secondary License" means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. 1.13. "Source Code Form" means the form of the work preferred for making modifications. 1.14. "You" (or "Your") means an individual or a legal entity exercising rights under this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, "control" means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. 2. License Grants and Conditions -------------------------------- 2.1. Grants Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: (a) under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and (b) under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. 2.2. Effective Date The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. 2.3. Limitations on Grant Scope The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: (a) for any code that a Contributor has removed from Covered Software; or (b) for infringements caused by: (i) Your and any other third party's modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or (c) under Patent Claims infringed by Covered Software in the absence of its Contributions. This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). 2.4. Subsequent Licenses No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). 2.5. Representation Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. 2.6. Fair Use This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. 2.7. Conditions Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. 3. Responsibilities ------------------- 3.1. Distribution of Source Form All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients' rights in the Source Code Form. 3.2. Distribution of Executable Form If You distribute Covered Software in Executable Form then: (a) such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and (b) You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients' rights in the Source Code Form under this License. 3.3. Distribution of a Larger Work You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). 3.4. Notices You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. 3.5. Application of Additional Terms You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. 4. Inability to Comply Due to Statute or Regulation --------------------------------------------------- If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. 5. Termination -------------- 5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. 5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. ************************************************************************ * * * 6. Disclaimer of Warranty * * ------------------------- * * * * Covered Software is provided under this License on an "as is" * * basis, without warranty of any kind, either expressed, implied, or * * statutory, including, without limitation, warranties that the * * Covered Software is free of defects, merchantable, fit for a * * particular purpose or non-infringing. The entire risk as to the * * quality and performance of the Covered Software is with You. * * Should any Covered Software prove defective in any respect, You * * (not any Contributor) assume the cost of any necessary servicing, * * repair, or correction. This disclaimer of warranty constitutes an * * essential part of this License. No use of any Covered Software is * * authorized under this License except under this disclaimer. * * * ************************************************************************ ************************************************************************ * * * 7. Limitation of Liability * * -------------------------- * * * * Under no circumstances and under no legal theory, whether tort * * (including negligence), contract, or otherwise, shall any * * Contributor, or anyone who distributes Covered Software as * * permitted above, be liable to You for any direct, indirect, * * special, incidental, or consequential damages of any character * * including, without limitation, damages for lost profits, loss of * * goodwill, work stoppage, computer failure or malfunction, or any * * and all other commercial damages or losses, even if such party * * shall have been informed of the possibility of such damages. This * * limitation of liability shall not apply to liability for death or * * personal injury resulting from such party's negligence to the * * extent applicable law prohibits such limitation. Some * * jurisdictions do not allow the exclusion or limitation of * * incidental or consequential damages, so this exclusion and * * limitation may not apply to You. * * * ************************************************************************ 8. Litigation ------------- Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party's ability to bring cross-claims or counter-claims. 9. Miscellaneous ---------------- This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. 10. Versions of the License --------------------------- 10.1. New Versions Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. 10.2. Effect of New Versions You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. 10.3. Modified Versions If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. Exhibit A - Source Code Form License Notice ------------------------------------------- This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. You may add additional accurate notices of copyright ownership. Exhibit B - "Incompatible With Secondary Licenses" Notice --------------------------------------------------------- This Source Code Form is "Incompatible With Secondary Licenses", as defined by the Mozilla Public License, v. 2.0. helix-cbac4273836f5837cec641ab21f365c79b102a4b/README.md000066400000000000000000000052241500614314100210670ustar00rootroot00000000000000

Helix

[![Build status](https://github.com/helix-editor/helix/actions/workflows/build.yml/badge.svg)](https://github.com/helix-editor/helix/actions) [![GitHub Release](https://img.shields.io/github/v/release/helix-editor/helix)](https://github.com/helix-editor/helix/releases/latest) [![Documentation](https://shields.io/badge/-documentation-452859)](https://docs.helix-editor.com/) [![GitHub contributors](https://img.shields.io/github/contributors/helix-editor/helix)](https://github.com/helix-editor/helix/graphs/contributors) [![Matrix Space](https://img.shields.io/matrix/helix-community:matrix.org)](https://matrix.to/#/#helix-community:matrix.org)
![Screenshot](./screenshot.png) A [Kakoune](https://github.com/mawww/kakoune) / [Neovim](https://github.com/neovim/neovim) inspired editor, written in Rust. The editing model is very heavily based on Kakoune; during development I found myself agreeing with most of Kakoune's design decisions. For more information, see the [website](https://helix-editor.com) or [documentation](https://docs.helix-editor.com/). All shortcuts/keymaps can be found [in the documentation on the website](https://docs.helix-editor.com/keymap.html). [Troubleshooting](https://github.com/helix-editor/helix/wiki/Troubleshooting) # Features - Vim-like modal editing - Multiple selections - Built-in language server support - Smart, incremental syntax highlighting and code editing via tree-sitter Although it's primarily a terminal-based editor, I am interested in exploring a custom renderer (similar to Emacs) using wgpu or skulpin. Note: Only certain languages have indentation definitions at the moment. Check `runtime/queries//` for `indents.scm`. # Installation [Installation documentation](https://docs.helix-editor.com/install.html). [![Packaging status](https://repology.org/badge/vertical-allrepos/helix-editor.svg?exclude_unsupported=1)](https://repology.org/project/helix-editor/versions) # Contributing Contributing guidelines can be found [here](./docs/CONTRIBUTING.md). # Getting help Your question might already be answered on the [FAQ](https://github.com/helix-editor/helix/wiki/FAQ). Discuss the project on the community [Matrix Space](https://matrix.to/#/#helix-community:matrix.org) (make sure to join `#helix-editor:matrix.org` if you're on a client that doesn't support Matrix Spaces yet). # Credits Thanks to [@jakenvac](https://github.com/jakenvac) for designing the logo! helix-cbac4273836f5837cec641ab21f365c79b102a4b/base16_theme.toml000066400000000000000000000035341500614314100227520ustar00rootroot00000000000000# Author: NNB "ui.menu" = { fg = "black", bg = "white" } "ui.menu.selected" = { modifiers = ["reversed"] } "ui.linenr" = { fg = "gray", bg = "black" } "ui.popup" = { modifiers = ["reversed"] } "ui.linenr.selected" = { fg = "white", bg = "black", modifiers = ["bold"] } "ui.selection" = { fg = "black", bg = "blue" } "ui.selection.primary" = { fg = "white", bg = "blue" } "ui.text.inactive" = { fg = "gray" } "comment" = { fg = "gray" } "ui.statusline" = { fg = "black", bg = "white" } "ui.statusline.inactive" = { fg = "gray", bg = "white" } "ui.cursor" = { fg = "white", modifiers = ["reversed"] } "variable" = "red" "constant.numeric" = "yellow" "constant" = "yellow" "attributes" = "yellow" "type" = "yellow" "ui.cursor.match" = { fg = "yellow", modifiers = ["underlined"] } "string" = "green" "variable.other.member" = "green" "constant.character.escape" = "cyan" "function" = "blue" "constructor" = "blue" "special" = "blue" "keyword" = "magenta" "label" = "magenta" "namespace" = "magenta" "ui.help" = { fg = "white", bg = "black" } "ui.statusline.insert" = { fg = "black", bg = "green" } "ui.statusline.select" = { fg = "black", bg = "blue" } "ui.virtual" = { fg = "gray", modifiers = ["italic"] } "ui.virtual.jump-label" = { fg = "blue", modifiers = ["bold", "underlined"] } "ui.virtual.ruler" = { bg = "black" } "markup.heading" = "blue" "markup.list" = "red" "markup.bold" = { fg = "yellow", modifiers = ["bold"] } "markup.italic" = { fg = "magenta", modifiers = ["italic"] } "markup.link.url" = { fg = "yellow", modifiers = ["underlined"] } "markup.link.text" = "red" "markup.quote" = "cyan" "markup.raw" = "green" "diff.plus" = "green" "diff.delta" = "yellow" "diff.minus" = "red" "diagnostic" = { modifiers = ["underlined"] } "ui.gutter" = { bg = "black" } "info" = "blue" "hint" = "gray" "debug" = "gray" "warning" = "yellow" "error" = "red" helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/000077500000000000000000000000001500614314100205375ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/.gitignore000066400000000000000000000000051500614314100225220ustar00rootroot00000000000000book helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/book.toml000066400000000000000000000006101500614314100223630ustar00rootroot00000000000000[book] authors = ["Blaž Hrastnik"] language = "en" src = "src" [output.html] cname = "docs.helix-editor.com" default-theme = "colibri" preferred-dark-theme = "colibri" git-repository-url = "https://github.com/helix-editor/helix" edit-url-template = "https://github.com/helix-editor/helix/edit/master/book/{path}" additional-css = ["custom.css"] [output.html.search] use-boolean-and = true helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/custom.css000066400000000000000000000077721500614314100226000ustar00rootroot00000000000000html { font-family: "Inter", sans-serif; } .sidebar .sidebar-scrollbox { padding: 0; } .chapter { margin: 0.25rem 0; } .chapter li.chapter-item { line-height: initial; margin: 0; padding: 1rem 1.5rem; } .chapter .section li.chapter-item { line-height: inherit; padding: .5rem .5rem 0 .5rem; } .content { overflow-y: auto; padding: 0 15px; padding-bottom: 50px; } /* 2 1.75 1.5 1.25 1 .875 */ .content h1 { font-size: 2em } .content h2 { font-size: 1.75em } .content h3 { font-size: 1.5em } .content h4 { font-size: 1.25em } .content h5 { font-size: 1em } .content h6 { font-size: .875em } .content h1, .content h2, .content h3, .content h4 { font-weight: 500; margin-top: 1.275em; margin-bottom: .875em; } .content p, .content ol, .content ul, .content table { margin-top: 0; margin-bottom: .875em; } .content ul li { margin-bottom: .25rem; } .content ul { list-style-type: square; } .content ul ul, .content ol ul { margin-bottom: .5rem; } .content li p { margin-bottom: .5em; } blockquote { margin: 1.5rem 0; padding: 1rem 1.5rem; color: var(--fg); opacity: .9; background-color: var(--quote-bg); border-left: 4px solid var(--quote-border); border-top: none; border-bottom: none; } blockquote *:last-child { margin-bottom: 0; } table { width: 100%; } table thead th { padding: .75rem; text-align: left; font-weight: 500; line-height: 1.5; width: auto; } table td { padding: .75rem; border: none; } table thead tr { border: none; border-bottom: 2px var(--table-border-color) solid; } table tbody tr { border-bottom: 1px var(--table-border-line) solid; } table tbody tr:nth-child(2n) { background: unset; } pre code.hljs { display: block; overflow-x: auto; padding: 1em; } code.hljs { padding: 3px 5px; } .colibri { --bg: #3b224c; --fg: #bcbdd0; --heading-fg: #fff; --sidebar-bg: #281733; --sidebar-fg: #c8c9db; --sidebar-non-existent: #505274; --sidebar-active: #a4a0e8; --sidebar-spacer: #2d334f; --scrollbar: var(--sidebar-fg); --icons: #737480; --icons-hover: #b7b9cc; /* --links: #a4a0e8; */ --links: #ECCDBA; --inline-code-color: hsl(48.7, 7.8%, 70%); --theme-popup-bg: #161923; --theme-popup-border: #737480; --theme-hover: rgba(0, 0, 0, .2); --quote-bg: #281733; --quote-border: hsl(226, 15%, 22%); --table-border-color: hsl(226, 23%, 76%); --table-header-bg: hsla(226, 23%, 31%, 0); --table-alternate-bg: hsl(226, 23%, 14%); --table-border-line: hsla(201deg, 20%, 92%, 0.2); --searchbar-border-color: #aaa; --searchbar-bg: #aeaec6; --searchbar-fg: #000; --searchbar-shadow-color: #aaa; --searchresults-header-fg: #5f5f71; --searchresults-border-color: #5c5c68; --searchresults-li-bg: #242430; --search-mark-bg: #a2cff5; } .colibri .content .header { color: #fff; } /* highlight.js theme, :where() is used to avoid increasing specificity */ :where(.colibri) .hljs { background: #2f1e2e; color: #a39e9b; } :where(.colibri) .hljs-comment, :where(.colibri) .hljs-quote { color: #8d8687; } :where(.colibri) .hljs-link, :where(.colibri) .hljs-meta, :where(.colibri) .hljs-name, :where(.colibri) .hljs-regexp, :where(.colibri) .hljs-selector-class, :where(.colibri) .hljs-selector-id, :where(.colibri) .hljs-tag, :where(.colibri) .hljs-template-variable, :where(.colibri) .hljs-variable { color: #ef6155; } :where(.colibri) .hljs-built_in, :where(.colibri) .hljs-deletion, :where(.colibri) .hljs-literal, :where(.colibri) .hljs-number, :where(.colibri) .hljs-params, :where(.colibri) .hljs-type { color: #f99b15; } :where(.colibri) .hljs-attribute, :where(.colibri) .hljs-section, :where(.colibri) .hljs-title { color: #fec418; } :where(.colibri) .hljs-addition, :where(.colibri) .hljs-bullet, :where(.colibri) .hljs-string, :where(.colibri) .hljs-symbol { color: #48b685; } :where(.colibri) .hljs-keyword, :where(.colibri) .hljs-selector-tag { color: #815ba4; } :where(.colibri) .hljs-emphasis { font-style: italic; } :where(.colibri) .hljs-strong { font-weight: 700; } helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/000077500000000000000000000000001500614314100213265ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/SUMMARY.md000066400000000000000000000020101500614314100227760ustar00rootroot00000000000000# Summary [Helix](./title-page.md) - [Installation](./install.md) - [Package Managers](./package-managers.md) - [Building from source](./building-from-source.md) - [Usage](./usage.md) - [Registers](./registers.md) - [Surround](./surround.md) - [Textobjects](./textobjects.md) - [Syntax aware motions](./syntax-aware-motions.md) - [Pickers](./pickers.md) - [Keymap](./keymap.md) - [Command line](./command-line.md) - [Commands](./commands.md) - [Language support](./lang-support.md) - [Ecosystem](./ecosystem.md) - [Migrating from Vim](./from-vim.md) - [Helix mode in other software](./other-software.md) - [Configuration](./configuration.md) - [Editor](./editor.md) - [Themes](./themes.md) - [Key remapping](./remapping.md) - [Languages](./languages.md) - [Guides](./guides/README.md) - [Adding languages](./guides/adding_languages.md) - [Adding textobject queries](./guides/textobject.md) - [Adding indent queries](./guides/indent.md) - [Adding injection queries](./guides/injection.md) helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/building-from-source.md000066400000000000000000000163401500614314100257100ustar00rootroot00000000000000## Building from source - [Configuring Helix's runtime files](#configuring-helixs-runtime-files) - [Linux and macOS](#linux-and-macos) - [Windows](#windows) - [Multiple runtime directories](#multiple-runtime-directories) - [Note to packagers](#note-to-packagers) - [Validating the installation](#validating-the-installation) - [Configure the desktop shortcut](#configure-the-desktop-shortcut) - [Building the Debian package](#building-the-debian-package) Requirements: Clone the Helix GitHub repository into a directory of your choice. The examples in this documentation assume installation into either `~/src/` on Linux and macOS, or `%userprofile%\src\` on Windows. - The [Rust toolchain](https://www.rust-lang.org/tools/install) - The [Git version control system](https://git-scm.com/) - A C++14 compatible compiler to build the tree-sitter grammars, for example GCC or Clang If you are using the `musl-libc` standard library instead of `glibc` the following environment variable must be set during the build to ensure tree-sitter grammars can be loaded correctly: ```sh RUSTFLAGS="-C target-feature=-crt-static" ``` 1. Clone the repository: ```sh git clone https://github.com/helix-editor/helix cd helix ``` 2. Compile from source: ```sh cargo install --path helix-term --locked ``` This command will create the `hx` executable and construct the tree-sitter grammars in the local `runtime` folder. > 💡 If you do not want to fetch or build grammars, set an environment variable `HELIX_DISABLE_AUTO_GRAMMAR_BUILD` > 💡 Tree-sitter grammars can be fetched and compiled if not pre-packaged. Fetch > grammars with `hx --grammar fetch` and compile them with > `hx --grammar build`. This will install them in > the `runtime` directory within the user's helix config directory (more > [details below](#multiple-runtime-directories)). ### Configuring Helix's runtime files #### Linux and macOS The **runtime** directory is one below the Helix source, so either export a `HELIX_RUNTIME` environment variable to point to that directory and add it to your `~/.bashrc` or equivalent: ```sh export HELIX_RUNTIME=~/src/helix/runtime ``` Or, create a symbolic link: ```sh ln -Tsf $PWD/runtime ~/.config/helix/runtime ``` #### Windows Either set the `HELIX_RUNTIME` environment variable to point to the runtime files using the Windows setting (search for `Edit environment variables for your account`) or use the `setx` command in Cmd: ```sh setx HELIX_RUNTIME "%userprofile%\src\helix\runtime" ``` > 💡 `%userprofile%` resolves to your user directory like > `C:\Users\Your-Name\` for example. Or, create a symlink in `%appdata%\helix\` that links to the source code directory: | Method | Command | | ---------- | -------------------------------------------------------------------------------------- | | PowerShell | `New-Item -ItemType Junction -Target "runtime" -Path "$Env:AppData\helix\runtime"` | | Cmd | `cd %appdata%\helix`
`mklink /D runtime "%userprofile%\src\helix\runtime"` | > 💡 On Windows, creating a symbolic link may require running PowerShell or > Cmd as an administrator. #### Multiple runtime directories When Helix finds multiple runtime directories it will search through them for files in the following order: 1. `runtime/` sibling directory to `$CARGO_MANIFEST_DIR` directory (this is intended for developing and testing helix only). 2. `runtime/` subdirectory of OS-dependent helix user config directory. 3. `$HELIX_RUNTIME` 4. Distribution-specific fallback directory (set at compile time—not run time— with the `HELIX_DEFAULT_RUNTIME` environment variable) 5. `runtime/` subdirectory of path to Helix executable. This order also sets the priority for selecting which file will be used if multiple runtime directories have files with the same name. #### Note to packagers If you are making a package of Helix for end users, to provide a good out of the box experience, you should set the `HELIX_DEFAULT_RUNTIME` environment variable at build time (before invoking `cargo build`) to a directory which will store the final runtime files after installation. For example, say you want to package the runtime into `/usr/lib/helix/runtime`. The rough steps a build script could follow are: 1. `export HELIX_DEFAULT_RUNTIME=/usr/lib/helix/runtime` 1. `cargo build --profile opt --locked` 1. `cp -r runtime $BUILD_DIR/usr/lib/helix/` 1. `cp target/opt/hx $BUILD_DIR/usr/bin/hx` This way the resulting `hx` binary will always look for its runtime directory in `/usr/lib/helix/runtime` if the user has no custom runtime in `~/.config/helix` or `HELIX_RUNTIME`. ### Validating the installation To make sure everything is set up as expected you should run the Helix health check: ```sh hx --health ``` For more information on the health check results refer to [Health check](https://github.com/helix-editor/helix/wiki/Healthcheck). ### Configure the desktop shortcut If your desktop environment supports the [XDG desktop menu](https://specifications.freedesktop.org/menu-spec/menu-spec-latest.html) you can configure Helix to show up in the application menu by copying the provided `.desktop` and icon files to their correct folders: ```sh cp contrib/Helix.desktop ~/.local/share/applications cp contrib/helix.png ~/.icons # or ~/.local/share/icons ``` It is recommended to convert the links in the `.desktop` file to absolute paths to avoid potential problems: ```sh sed -i -e "s|Exec=hx %F|Exec=$(readlink -f ~/.cargo/bin/hx) %F|g" \ -e "s|Icon=helix|Icon=$(readlink -f ~/.icons/helix.png)|g" ~/.local/share/applications/Helix.desktop ``` To use another terminal than the system default, you can modify the `.desktop` file. For example, to use `kitty`: ```sh sed -i "s|Exec=hx %F|Exec=kitty hx %F|g" ~/.local/share/applications/Helix.desktop sed -i "s|Terminal=true|Terminal=false|g" ~/.local/share/applications/Helix.desktop ``` ### Building the Debian package If the `.deb` file provided on the release page uses a `libc` version higher than that used by your Debian, Ubuntu, or Mint system, you can build the package from source to match your system's dependencies. Install `cargo-deb`, the tool used for building the `.deb` file: ```sh cargo install cargo-deb ``` After cloning and entering the Helix repository as previously described, use the following command to build the release binary and package it into a `.deb` file in a single step. ```sh cargo deb -- --locked ``` > 💡 This locks you into the `--release` profile. But you can also build helix in any way you like. > As long as you leave a `target/release/hx` file, it will get packaged with `cargo deb --no-build` > 💡 Don't worry about the following: > ``` > warning: Failed to find dependency specification > ``` > Cargo deb just reports which packaged files it didn't derive dependencies for. But > so far the dependency deriving seams very good, even if some of the grammar files are skipped. You can find the resulted `.deb` in `target/debian/`. It should contain everything it needs, including the - completions for bash, fish, zsh - .desktop file - icon (though desktop environments might use their own since the name of the package is correctly `helix`) - launcher to the binary with the runtime helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/command-line.md000066400000000000000000000202211500614314100242100ustar00rootroot00000000000000# Command line - [Quoting](#quoting) - [Flags](#flags) - [Expansions](#expansions) - [Exceptions](#exceptions) The command line is used for executing [typable commands](./commands.md#typable-commands) like `:write` or `:quit`. Press `:` to activate the command line. Typable commands optionally accept arguments. `:write` for example accepts an optional path to write the file contents. The command line also supports a quoting syntax for arguments, flags to modify command behaviors, and _expansions_ - a way to insert values from the editor. Most commands support these features but some have custom parsing rules (see the [exceptions](#exceptions) below). ## Quoting By default, command arguments are split on tabs and space characters. `:open README.md CHANGELOG.md` for example should open two files, `README.md` and `CHANGELOG.md`. Arguments that contain spaces can be surrounded in single quotes (`'`) or backticks (`` ` ``) to prevent the space from separating the argument, like `:open 'a b.txt'`. Double quotes may be used the same way, but double quotes _expand_ their inner content. `:echo "%{cursor_line}"` for example may print `1` because of the expansion for the `cursor_line` variable. `:echo '%{cursor_line}'` though prints `%{cursor_line}` literally: content within single quotes or backticks is interpreted as-is. On Unix systems the backslash character may be used to escape certain characters depending on where it is used. Within an argument which isn't surround in quotes, the backslash can be used to escape the space or tab characters: `:open a\ b.txt` is equivalent to `:open 'a b.txt'`. The backslash may also be used to escape quote characters (`'`, `` ` ``, `"`) or the percent token (`%`) when used at the beginning of an argument. `:echo \%%sh{foo}` for example prints `%sh{foo}` instead of invoking a `foo` shell command and `:echo \"quote` prints `"quote`. The backslash character is treated literally in any other situation on Unix systems and always on Windows: `:echo \n` always prints `\n`. ## Flags Command flags are optional switches that can be used to alter the behavior of a command. For example the `:sort` command accepts an optional `--reverse` (or `-r` for short) flag which causes the sort command to reverse the sorting direction. Typing the `-` character shows completions for the current command's flags, if any. The `--` flag specifies the end of flags. All arguments after `--` are treated as positional arguments: `:open -- -a.txt` opens a file called `-a.txt`. ## Expansions Expansions are patterns that Helix recognizes and replaces within the command line. Helix recognizes anything starting with a percent token (`%`) as an expansion, for example `%sh{echo hi!}`. Expansions are particularly useful when used in commands like `:echo` or `:noop` for executing simple scripts. For example: ```toml [keys.normal] # Print the current line's git blame information to the statusline. space.B = ":echo %sh{git blame -L %{cursor_line},+1 %{buffer_name}}" ``` Expansions take the form `%[]`. In `%sh{echo hi!}`, for example, the kind is `sh` - the shell expansion - and the contents are "echo hi!", with `{` and `}` acting as opening and closing delimiters. The following open/close characters are recognized as expansion delimiter pairs: `(`/`)`, `[`/`]`, `{`/`}` and `<`/`>`. Plus the single characters `'`, `"` or `|` may be used instead: `%{cursor_line}` is equivalent to `%`, `%[cursor_line]` or `%|cursor_line|`. To escape a percent character instead of treating it as an expansion, use two percent characters consecutively. To execute a shell command like `date -u +'%Y-%m-%d'`, double the percent characters: `:echo %sh{date -u +'%%Y-%%m-%%d'}`. When no `` is provided, Helix will expand a **variable**. For example `%{cursor_line}` can be used as in argument to insert the line number. `:echo %{cursor_line}` for instance may print `1` to the statusline. The following variables are supported: | Name | Description | |--- |--- | | `cursor_line` | The line number of the primary cursor in the currently focused document, starting at 1. | | `cursor_column` | The column number of the primary cursor in the currently focused document, starting at 1. This is counted as the number of grapheme clusters from the start of the line rather than bytes or codepoints. | | `buffer_name` | The relative path of the currently focused document. `[scratch]` is expanded instead for scratch buffers. | | `line_ending` | A string containing the line ending of the currently focused document. For example on Unix systems this is usually a line-feed character (`\n`) but on Windows systems this may be a carriage-return plus a line-feed (`\r\n`). The line ending kind of the currently focused document can be inspected with the `:line-ending` command. | | `language` | A string containing the language name of the currently focused document.| | `selection` | A string containing the contents of the primary selection of the currently focused document. | Aside from editor variables, the following expansions may be used: * Unicode `%u{..}`. The contents may contain up to six hexadecimal numbers corresponding to a Unicode codepoint value. For example `:echo %u{25CF}` prints `●` to the statusline. * Shell `%sh{..}`. The contents are passed to the configured shell command. For example `:echo %sh{echo "20 * 5" | bc}` may print `100` on the statusline on when using a shell with `echo` and the `bc` calculator installed. Shell expansions are evaluated recursively. `%sh{echo '%{buffer_name}:%{cursor_line}'}` for example executes a command like `echo 'README.md:1'`: the variables within the `%sh{..}` expansion are evaluated before executing the shell command. As mentioned above, double quotes can be used to surround arguments containing spaces but also support expansions within the quoted content unlike singe quotes or backticks. For example `:echo "circle: %u{25CF}"` prints `circle: ●` to the statusline while `:echo 'circle: %u{25CF}'` prints `circle: %u{25CF}`. Note that expansions are only evaluated once the Enter key is pressed in command mode. ## Exceptions The following commands support expansions but otherwise pass the given argument directly to the shell program without interpreting quotes: * `:insert-output` * `:append-output` * `:pipe` * `:pipe-to` * `:run-shell-command` For example executing `:sh echo "%{buffer_name}:%{cursor_column}"` would pass text like `echo "README.md:1"` as an argument to the shell program: the expansions are evaluated but not the quotes. As mentioned above, percent characters can be used in shell commands by doubling the percent character. To insert the output of a command like `date -u +'%Y-%m-%d'` use `:insert-output date -u +'%%Y-%%m-%%d'`. The `:set-option` and `:toggle-option` commands use regular parsing for the first argument - the config option name - and parse the rest depending on the config option's type. `:set-option` interprets the second argument as a string for string config options and parses everything else as JSON. `:toggle-option`'s behavior depends on the JSON type of the config option supplied as the first argument: * Booleans: only the config option name should be provided. For example `:toggle-option auto-format` will flip the `auto-format` option. * Strings: the rest of the command line is parsed with regular quoting rules. For example `:toggle-option indent-heuristic hybrid tree-sitter simple` cycles through "hybrid", "tree-sitter" and "simple" values on each invocation of the command. * Numbers, arrays and objects: the rest of the command line is parsed as a stream of JSON values. For example `:toggle-option rulers [81] [51, 73]` cycles through `[81]` and `[51, 73]`. When providing multiple values to `:toggle-option` there should be no duplicates. `:toggle-option indent-heuristic hybrid simple tree-sitter simple` for example would only toggle between "hybrid" and "tree-sitter" values. `:lsp-workspace-command` works similarly to `:toggle-option`. The first argument (if present) is parsed according to normal rules. The rest of the line is parsed as JSON values. Unlike `:toggle-option`, string arguments for a command must be quoted. For example `:lsp-workspace-command lsp.Command "foo" "bar"`. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/commands.md000066400000000000000000000010251500614314100234470ustar00rootroot00000000000000# Commands - [Typable commands](#typable-commands) - [Static commands](#static-commands) ## Typable commands Typable commands are used from command mode and may take arguments. Command mode can be activated by pressing `:`. The built-in typable commands are: {{#include ./generated/typable-cmd.md}} ## Static Commands Static commands take no arguments and can be bound to keys. Static commands can also be executed from the command picker (`?`). The built-in static commands are: {{#include ./generated/static-cmd.md}} helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/configuration.md000066400000000000000000000021521500614314100245170ustar00rootroot00000000000000# Configuration To override global configuration parameters, create a `config.toml` file located in your config directory: - Linux and Mac: `~/.config/helix/config.toml` - Windows: `%AppData%\helix\config.toml` > 💡 You can easily open the config file by typing `:config-open` within Helix normal mode. Example config: ```toml theme = "onedark" [editor] line-number = "relative" mouse = false [editor.cursor-shape] insert = "bar" normal = "block" select = "underline" [editor.file-picker] hidden = false ``` You can use a custom configuration file by specifying it with the `-c` or `--config` command line argument, for example `hx -c path/to/custom-config.toml`. You can reload the config file by issuing the `:config-reload` command. Alternatively, on Unix operating systems, you can reload it by sending the USR1 signal to the Helix process, such as by using the command `pkill -USR1 hx`. Finally, you can have a `config.toml` local to a project by putting it under a `.helix` directory in your repository. Its settings will be merged with the configuration directory `config.toml` and the built-in configuration. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/ecosystem.md000066400000000000000000000001211500614314100236550ustar00rootroot00000000000000# Ecosystem This section has information related to the wider Helix ecosystem. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/editor.md000066400000000000000000000566561500614314100231600ustar00rootroot00000000000000## Editor - [`[editor]` Section](#editor-section) - [`[editor.clipboard-provider]` Section](#editorclipboard-provider-section) - [`[editor.statusline]` Section](#editorstatusline-section) - [`[editor.lsp]` Section](#editorlsp-section) - [`[editor.cursor-shape]` Section](#editorcursor-shape-section) - [`[editor.file-picker]` Section](#editorfile-picker-section) - [`[editor.auto-pairs]` Section](#editorauto-pairs-section) - [`[editor.auto-save]` Section](#editorauto-save-section) - [`[editor.search]` Section](#editorsearch-section) - [`[editor.whitespace]` Section](#editorwhitespace-section) - [`[editor.indent-guides]` Section](#editorindent-guides-section) - [`[editor.gutters]` Section](#editorgutters-section) - [`[editor.gutters.line-numbers]` Section](#editorguttersline-numbers-section) - [`[editor.gutters.diagnostics]` Section](#editorguttersdiagnostics-section) - [`[editor.gutters.diff]` Section](#editorguttersdiff-section) - [`[editor.gutters.spacer]` Section](#editorguttersspacer-section) - [`[editor.soft-wrap]` Section](#editorsoft-wrap-section) - [`[editor.smart-tab]` Section](#editorsmart-tab-section) - [`[editor.inline-diagnostics]` Section](#editorinline-diagnostics-section) ### `[editor]` Section | Key | Description | Default | |--|--|---------| | `scrolloff` | Number of lines of padding around the edge of the screen when scrolling | `5` | | `mouse` | Enable mouse mode | `true` | | `default-yank-register` | Default register used for yank/paste | `"` | | `middle-click-paste` | Middle click paste support | `true` | | `scroll-lines` | Number of lines to scroll per scroll wheel step | `3` | | `shell` | Shell to use when running external commands | Unix: `["sh", "-c"]`
Windows: `["cmd", "/C"]` | | `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers | `absolute` | | `cursorline` | Highlight all lines with a cursor | `false` | | `cursorcolumn` | Highlight all columns with a cursor | `false` | | `continue-comments` | if helix should automatically add a line comment token if you create a new line inside a comment. | `true` | | `gutters` | Gutters to display: Available are `diagnostics` and `diff` and `line-numbers` and `spacer`, note that `diagnostics` also includes other features like breakpoints, 1-width padding will be inserted if gutters is non-empty | `["diagnostics", "spacer", "line-numbers", "spacer", "diff"]` | | `auto-completion` | Enable automatic pop up of auto-completion | `true` | | `path-completion` | Enable filepath completion. Show files and directories if an existing path at the cursor was recognized, either absolute or relative to the current opened document or current working directory (if the buffer is not yet saved). Defaults to true. | `true` | | `auto-format` | Enable automatic formatting on save | `true` | | `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. | `250` | | `completion-timeout` | Time in milliseconds after typing a word character before completions are shown, set to 5 for instant. | `250` | | `preview-completion-insert` | Whether to apply completion item instantly when selected | `true` | | `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` | | `completion-replace` | Set to `true` to make completions always replace the entire word and not just the part before the cursor | `false` | | `auto-info` | Whether to display info boxes | `true` | | `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative | `false` | | `undercurl` | Set to `true` to override automatic detection of terminal undercurl support in the event of a false negative | `false` | | `rulers` | List of column positions at which to display the rulers. Can be overridden by language specific `rulers` in `languages.toml` file | `[]` | | `bufferline` | Renders a line at the top of the editor displaying open buffers. Can be `always`, `never` or `multiple` (only shown if more than one buffer is in use) | `never` | | `color-modes` | Whether to color the mode indicator with different colors depending on the mode itself | `false` | | `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap-at-text-width` is set | `80` | | `workspace-lsp-roots` | Directories relative to the workspace root that are treated as LSP roots. Should only be set in `.helix/config.toml` | `[]` | | `default-line-ending` | The line ending to use for new documents. Can be `native`, `lf`, `crlf`, `ff`, `cr` or `nel`. `native` uses the platform's native line ending (`crlf` on Windows, otherwise `lf`). | `native` | | `insert-final-newline` | Whether to automatically insert a trailing line-ending on write if missing | `true` | | `trim-final-newlines` | Whether to automatically remove line-endings after the final one on write | `false` | | `trim-trailing-whitespace` | Whether to automatically remove whitespace preceding line endings on write | `false` | | `popup-border` | Draw border around `popup`, `menu`, `all`, or `none` | `none` | | `indent-heuristic` | How the indentation for a newly inserted line is computed: `simple` just copies the indentation level from the previous line, `tree-sitter` computes the indentation based on the syntax tree and `hybrid` combines both approaches. If the chosen heuristic is not available, a different one will be used as a fallback (the fallback order being `hybrid` -> `tree-sitter` -> `simple`). | `hybrid` | `jump-label-alphabet` | The characters that are used to generate two character jump labels. Characters at the start of the alphabet are used first. | `"abcdefghijklmnopqrstuvwxyz"` | `end-of-line-diagnostics` | Minimum severity of diagnostics to render at the end of the line. Set to `disable` to disable entirely. Refer to the setting about `inline-diagnostics` for more details | "disable" | `clipboard-provider` | Which API to use for clipboard interaction. One of `pasteboard` (MacOS), `wayland`, `x-clip`, `x-sel`, `win-32-yank`, `termux`, `tmux`, `windows`, `termcode`, `none`, or a custom command set. | Platform and environment specific. | | `editor-config` | Whether to read settings from [EditorConfig](https://editorconfig.org) files | `true` | ### `[editor.clipboard-provider]` Section Helix can be configured either to use a builtin clipboard configuration or to use a provided command. For instance, setting it to use OSC 52 termcodes, the configuration would be: ```toml [editor] clipboard-provider = "termcode" ``` Alternatively, Helix can be configured to use arbitrary commands for clipboard integration: ```toml [editor.clipboard-provider.custom] yank = { command = "cat", args = ["test.txt"] } paste = { command = "tee", args = ["test.txt"] } primary-yank = { command = "cat", args = ["test-primary.txt"] } # optional primary-paste = { command = "tee", args = ["test-primary.txt"] } # optional ``` For custom commands the contents of the yank/paste is communicated over stdin/stdout. ### `[editor.statusline]` Section Allows configuring the statusline at the bottom of the editor. The configuration distinguishes between three areas of the status line: `[ ... ... LEFT ... ... | ... ... ... CENTER ... ... ... | ... ... RIGHT ... ... ]` Statusline elements can be defined as follows: ```toml [editor.statusline] left = ["mode", "spinner"] center = ["file-name"] right = ["diagnostics", "selections", "position", "file-encoding", "file-line-ending", "file-type"] separator = "│" mode.normal = "NORMAL" mode.insert = "INSERT" mode.select = "SELECT" diagnostics = ["warning", "error"] workspace-diagnostics = ["warning", "error"] ``` The `[editor.statusline]` key takes the following sub-keys: | Key | Description | Default | | --- | --- | --- | | `left` | A list of elements aligned to the left of the statusline | `["mode", "spinner", "file-name", "read-only-indicator", "file-modification-indicator"]` | | `center` | A list of elements aligned to the middle of the statusline | `[]` | | `right` | A list of elements aligned to the right of the statusline | `["diagnostics", "selections", "register", "position", "file-encoding"]` | | `separator` | The character used to separate elements in the statusline | `"│"` | | `mode.normal` | The text shown in the `mode` element for normal mode | `"NOR"` | | `mode.insert` | The text shown in the `mode` element for insert mode | `"INS"` | | `mode.select` | The text shown in the `mode` element for select mode | `"SEL"` | | `diagnostics` | A list of severities which are displayed for the current buffer | `["warning", "error"]` | | `workspace-diagnostics` | A list of severities which are displayed for the workspace | `["warning", "error"]` | The following statusline elements can be configured: | Key | Description | | ------ | ----------- | | `mode` | The current editor mode (`mode.normal`/`mode.insert`/`mode.select`) | | `spinner` | A progress spinner indicating LSP activity | | `file-name` | The path/name of the opened file | | `file-absolute-path` | The absolute path/name of the opened file | | `file-base-name` | The basename of the opened file | | `file-modification-indicator` | The indicator to show whether the file is modified (a `[+]` appears when there are unsaved changes) | | `file-encoding` | The encoding of the opened file if it differs from UTF-8 | | `file-line-ending` | The file line endings (CRLF or LF) | | `read-only-indicator` | An indicator that shows `[readonly]` when a file cannot be written | | `total-line-numbers` | The total line numbers of the opened file | | `file-type` | The type of the opened file | | `diagnostics` | The number of warnings and/or errors | | `workspace-diagnostics` | The number of warnings and/or errors on workspace | | `selections` | The number of active selections | | `primary-selection-length` | The number of characters currently in primary selection | | `position` | The cursor position | | `position-percentage` | The cursor position as a percentage of the total number of lines | | `separator` | The string defined in `editor.statusline.separator` (defaults to `"│"`) | | `spacer` | Inserts a space between elements (multiple/contiguous spacers may be specified) | | `version-control` | The current branch name or detached commit hash of the opened workspace | | `register` | The current selected register | ### `[editor.lsp]` Section | Key | Description | Default | | --- | ----------- | ------- | | `enable` | Enables LSP integration. Setting to false will completely disable language servers regardless of language settings.| `true` | | `display-messages` | Display LSP `window/showMessage` messages below statusline[^1] | `true` | | `display-progress-messages` | Display LSP progress messages below statusline[^1] | `false` | | `auto-signature-help` | Enable automatic popup of signature help (parameter hints) | `true` | | `display-inlay-hints` | Display inlay hints[^2] | `false` | | `display-color-swatches` | Show color swatches next to colors | `true` | | `display-signature-help-docs` | Display docs under signature help popup | `true` | | `snippets` | Enables snippet completions. Requires a server restart (`:lsp-restart`) to take effect after `:config-reload`/`:set`. | `true` | | `goto-reference-include-declaration` | Include declaration in the goto references popup. | `true` | [^1]: By default, a progress spinner is shown in the statusline beside the file path. [^2]: You may also have to activate them in the language server config for them to appear, not just in Helix. Inlay hints in Helix are still being improved on and may be a little bit laggy/janky under some circumstances. Please report any bugs you see so we can fix them! ### `[editor.cursor-shape]` Section Defines the shape of cursor in each mode. Valid values for these options are `block`, `bar`, `underline`, or `hidden`. > 💡 Due to limitations of the terminal environment, only the primary cursor can > change shape. | Key | Description | Default | | --- | ----------- | ------- | | `normal` | Cursor shape in [normal mode][normal mode] | `block` | | `insert` | Cursor shape in [insert mode][insert mode] | `block` | | `select` | Cursor shape in [select mode][select mode] | `block` | [normal mode]: ./keymap.md#normal-mode [insert mode]: ./keymap.md#insert-mode [select mode]: ./keymap.md#select--extend-mode ### `[editor.file-picker]` Section Set options for file picker and global search. Ignoring a file means it is not visible in the Helix file picker and global search. All git related options are only enabled in a git repository. | Key | Description | Default | |--|--|---------| |`hidden` | Enables ignoring hidden files | `true` |`follow-symlinks` | Follow symlinks instead of ignoring them | `true` |`deduplicate-links` | Ignore symlinks that point at files already shown in the picker | `true` |`parents` | Enables reading ignore files from parent directories | `true` |`ignore` | Enables reading `.ignore` files | `true` |`git-ignore` | Enables reading `.gitignore` files | `true` |`git-global` | Enables reading global `.gitignore`, whose path is specified in git's config: `core.excludesfile` option | `true` |`git-exclude` | Enables reading `.git/info/exclude` files | `true` |`max-depth` | Set with an integer value for maximum depth to recurse | Unset by default Ignore files can be placed locally as `.ignore` or put in your home directory as `~/.ignore`. They support the usual ignore and negative ignore (unignore) rules used in `.gitignore` files. Additionally, you can use Helix-specific ignore files by creating a local `.helix/ignore` file in the current workspace or a global `ignore` file located in your Helix config directory: - Linux and Mac: `~/.config/helix/ignore` - Windows: `%AppData%\helix\ignore` Example: ```ini # unignore in file picker and global search !.github/ !.gitignore !.gitattributes ``` ### `[editor.auto-pairs]` Section Enables automatic insertion of pairs to parentheses, brackets, etc. Can be a simple boolean value, or a specific mapping of pairs of single characters. To disable auto-pairs altogether, set `auto-pairs` to `false`: ```toml [editor] auto-pairs = false # defaults to `true` ``` The default pairs are (){}[]''""``, but these can be customized by setting `auto-pairs` to a TOML table: ```toml [editor.auto-pairs] '(' = ')' '{' = '}' '[' = ']' '"' = '"' '`' = '`' '<' = '>' ``` Additionally, this setting can be used in a language config. Unless the editor setting is `false`, this will override the editor config in documents with this language. Example `languages.toml` that adds `<>` and removes `''` ```toml [[language]] name = "rust" [language.auto-pairs] '(' = ')' '{' = '}' '[' = ']' '"' = '"' '`' = '`' '<' = '>' ``` ### `[editor.auto-save]` Section Control auto save behavior. | Key | Description | Default | |--|--|---------| | `focus-lost` | Enable automatic saving on the focus moving away from Helix. Requires [focus event support](https://github.com/helix-editor/helix/wiki/Terminal-Support) from your terminal | `false` | | `after-delay.enable` | Enable automatic saving after `auto-save.after-delay.timeout` milliseconds have passed since last edit. | `false` | | `after-delay.timeout` | Time in milliseconds since last edit before auto save timer triggers. | `3000` | ### `[editor.search]` Section Search specific options. | Key | Description | Default | |--|--|---------| | `smart-case` | Enable smart case regex searching (case-insensitive unless pattern contains upper case characters) | `true` | | `wrap-around`| Whether the search should wrap after depleting the matches | `true` | ### `[editor.whitespace]` Section Options for rendering whitespace with visible characters. Use `:set whitespace.render all` to temporarily enable visible whitespace. | Key | Description | Default | |-----|-------------|---------| | `render` | Whether to render whitespace. May either be `all` or `none`, or a table with sub-keys `space`, `nbsp`, `nnbsp`, `tab`, and `newline` | `none` | | `characters` | Literal characters to use when rendering whitespace. Sub-keys may be any of `tab`, `space`, `nbsp`, `nnbsp`, `newline` or `tabpad` | See example below | Example ```toml [editor.whitespace] render = "all" # or control each character [editor.whitespace.render] space = "all" tab = "all" nbsp = "none" nnbsp = "none" newline = "none" [editor.whitespace.characters] space = "·" nbsp = "⍽" nnbsp = "␣" tab = "→" newline = "⏎" tabpad = "·" # Tabs will look like "→···" (depending on tab width) ``` ### `[editor.indent-guides]` Section Options for rendering vertical indent guides. | Key | Description | Default | | --- | --- | --- | | `render` | Whether to render indent guides | `false` | | `character` | Literal character to use for rendering the indent guide | `│` | | `skip-levels` | Number of indent levels to skip | `0` | Example: ```toml [editor.indent-guides] render = true character = "╎" # Some characters that work well: "▏", "┆", "┊", "⸽" skip-levels = 1 ``` ### `[editor.gutters]` Section For simplicity, `editor.gutters` accepts an array of gutter types, which will use default settings for all gutter components. ```toml [editor] gutters = ["diff", "diagnostics", "line-numbers", "spacer"] ``` To customize the behavior of gutters, the `[editor.gutters]` section must be used. This section contains top level settings, as well as settings for specific gutter components as subsections. | Key | Description | Default | | --- | --- | --- | | `layout` | A vector of gutters to display | `["diagnostics", "spacer", "line-numbers", "spacer", "diff"]` | Example: ```toml [editor.gutters] layout = ["diff", "diagnostics", "line-numbers", "spacer"] ``` #### `[editor.gutters.line-numbers]` Section Options for the line number gutter | Key | Description | Default | | --- | --- | --- | | `min-width` | The minimum number of characters to use | `3` | Example: ```toml [editor.gutters.line-numbers] min-width = 1 ``` #### `[editor.gutters.diagnostics]` Section Currently unused #### `[editor.gutters.diff]` Section The `diff` gutter option displays colored bars indicating whether a `git` diff represents that a line was added, removed or changed. These colors are controlled by the theme attributes `diff.plus`, `diff.minus` and `diff.delta`. Other diff providers will eventually be supported by a future plugin system. There are currently no options for this section. #### `[editor.gutters.spacer]` Section Currently unused ### `[editor.soft-wrap]` Section Options for soft wrapping lines that exceed the view width: | Key | Description | Default | | --- | --- | --- | | `enable` | Whether soft wrapping is enabled. | `false` | | `max-wrap` | Maximum free space left at the end of the line. | `20` | | `max-indent-retain` | Maximum indentation to carry over when soft wrapping a line. | `40` | | `wrap-indicator` | Text inserted before soft wrapped lines, highlighted with `ui.virtual.wrap` | `↪ ` | | `wrap-at-text-width` | Soft wrap at `text-width` instead of using the full viewport size. | `false` | Example: ```toml [editor.soft-wrap] enable = true max-wrap = 25 # increase value to reduce forced mid-word wrapping max-indent-retain = 0 wrap-indicator = "" # set wrap-indicator to "" to hide it ``` ### `[editor.smart-tab]` Section Options for navigating and editing using tab key. | Key | Description | Default | |------------|-------------|---------| | `enable` | If set to true, then when the cursor is in a position with non-whitespace to its left, instead of inserting a tab, it will run `move_parent_node_end`. If there is only whitespace to the left, then it inserts a tab as normal. With the default bindings, to explicitly insert a tab character, press Shift-tab. | `true` | | `supersede-menu` | Normally, when a menu is on screen, such as when auto complete is triggered, the tab key is bound to cycling through the items. This means when menus are on screen, one cannot use the tab key to trigger the `smart-tab` command. If this option is set to true, the `smart-tab` command always takes precedence, which means one cannot use the tab key to cycle through menu items. One of the other bindings must be used instead, such as arrow keys or `C-n`/`C-p`. | `false` | Due to lack of support for S-tab in some terminals, the default keybindings don't fully embrace smart-tab editing experience. If you enjoy smart-tab navigation and a terminal that supports the [Enhanced Keyboard protocol](https://github.com/helix-editor/helix/wiki/Terminal-Support#enhanced-keyboard-protocol), consider setting extra keybindings: ``` [keys.normal] tab = "move_parent_node_end" S-tab = "move_parent_node_start" [keys.insert] S-tab = "move_parent_node_start" [keys.select] tab = "extend_parent_node_end" S-tab = "extend_parent_node_start" ``` ### `[editor.inline-diagnostics]` Section Options for rendering diagnostics inside the text like shown below ``` fn main() { let foo = bar; └─ no such value in this scope } ```` | Key | Description | Default | |------------|-------------|---------| | `cursor-line` | The minimum severity that a diagnostic must have to be shown inline on the line that contains the primary cursor. Set to `disable` to not show any diagnostics inline. This option does not have any effect when in insert-mode and will only take effect 350ms after moving the cursor to a different line. | `"disable"` | | `other-lines` | The minimum severity that a diagnostic must have to be shown inline on a line that does not contain the cursor-line. Set to `disable` to not show any diagnostics inline. | `"disable"` | | `prefix-len` | How many horizontal bars `─` are rendered before the diagnostic text. | `1` | | `max-wrap` | Equivalent of the `editor.soft-wrap.max-wrap` option for diagnostics. | `20` | | `max-diagnostics` | Maximum number of diagnostics to render inline for a given line | `10` | The allowed values for `cursor-line` and `other-lines` are: `error`, `warning`, `info`, `hint`. The (first) diagnostic with the highest severity that is not shown inline is rendered at the end of the line (as long as its severity is higher than the `end-of-line-diagnostics` config option): ``` fn main() { let baz = 1; let foo = bar; a local variable with a similar name exists: baz └─ no such value in this scope } ``` The new diagnostic rendering is not yet enabled by default. As soon as end of line or inline diagnostics are enabled the old diagnostics rendering is automatically disabled. The recommended default setting are: ```toml [editor] end-of-line-diagnostics = "hint" [editor.inline-diagnostics] cursor-line = "warning" # show warnings and errors on the cursorline inline ``` helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/from-vim.md000066400000000000000000000012521500614314100234040ustar00rootroot00000000000000# Migrating from Vim Helix's editing model is strongly inspired from Vim and Kakoune, and a notable difference from Vim (and the most striking similarity to Kakoune) is that Helix follows the `selection → action` model. This means that whatever you are going to act on (a word, a paragraph, a line, etc.) is selected first and the action itself (delete, change, yank, etc.) comes second. A cursor is simply a single width selection. See also Kakoune's [Migrating from Vim](https://github.com/mawww/kakoune/wiki/Migrating-from-Vim) and Helix's [Migrating from Vim](https://github.com/helix-editor/helix/wiki/Migrating-from-Vim). > TODO: Mention textobjects, surround, registers helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/generated/000077500000000000000000000000001500614314100232645ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/generated/lang-support.md000066400000000000000000000230421500614314100262420ustar00rootroot00000000000000| Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Default language servers | | --- | --- | --- | --- | --- | | ada | ✓ | ✓ | | `ada_language_server` | | adl | ✓ | ✓ | ✓ | | | agda | ✓ | | | | | amber | ✓ | | | | | astro | ✓ | | | `astro-ls` | | awk | ✓ | ✓ | | `awk-language-server` | | bash | ✓ | ✓ | ✓ | `bash-language-server` | | bass | ✓ | | | `bass` | | beancount | ✓ | | | `beancount-language-server` | | bibtex | ✓ | | | `texlab` | | bicep | ✓ | | | `bicep-langserver` | | bitbake | ✓ | | | `bitbake-language-server` | | blade | ✓ | | | | | blueprint | ✓ | | | `blueprint-compiler` | | c | ✓ | ✓ | ✓ | `clangd` | | c-sharp | ✓ | ✓ | | `OmniSharp` | | cabal | | | | `haskell-language-server-wrapper` | | cairo | ✓ | ✓ | ✓ | `cairo-language-server` | | capnp | ✓ | | ✓ | | | cel | ✓ | | | | | circom | ✓ | | | `circom-lsp` | | clojure | ✓ | | | `clojure-lsp` | | cmake | ✓ | ✓ | ✓ | `cmake-language-server` | | codeql | ✓ | ✓ | | `codeql` | | comment | ✓ | | | | | common-lisp | ✓ | | ✓ | `cl-lsp` | | cpon | ✓ | | ✓ | | | cpp | ✓ | ✓ | ✓ | `clangd` | | crystal | ✓ | ✓ | | `crystalline` | | css | ✓ | | ✓ | `vscode-css-language-server` | | csv | ✓ | | | | | cue | ✓ | | | `cuelsp` | | cylc | ✓ | ✓ | ✓ | | | d | ✓ | ✓ | ✓ | `serve-d` | | dart | ✓ | ✓ | ✓ | `dart` | | dbml | ✓ | | | | | debian | ✓ | | | | | devicetree | ✓ | | | | | dhall | ✓ | ✓ | | `dhall-lsp-server` | | diff | ✓ | | | | | djot | ✓ | | | | | docker-compose | ✓ | ✓ | ✓ | `docker-compose-langserver`, `yaml-language-server` | | dockerfile | ✓ | ✓ | | `docker-langserver` | | dot | ✓ | | | `dot-language-server` | | dtd | ✓ | | | | | dune | ✓ | | | | | dunstrc | ✓ | | | | | earthfile | ✓ | ✓ | ✓ | `earthlyls` | | edoc | ✓ | | | | | eex | ✓ | | | | | ejs | ✓ | | | | | elisp | ✓ | | | | | elixir | ✓ | ✓ | ✓ | `elixir-ls` | | elm | ✓ | ✓ | | `elm-language-server` | | elvish | ✓ | | | `elvish` | | env | ✓ | ✓ | | | | erb | ✓ | | | | | erlang | ✓ | ✓ | | `erlang_ls`, `elp` | | esdl | ✓ | | | | | fennel | ✓ | | | `fennel-ls` | | fga | ✓ | ✓ | ✓ | | | fidl | ✓ | | | | | fish | ✓ | ✓ | ✓ | `fish-lsp` | | forth | ✓ | | | `forth-lsp` | | fortran | ✓ | | ✓ | `fortls` | | fsharp | ✓ | | | `fsautocomplete` | | gas | ✓ | ✓ | | `asm-lsp` | | gdscript | ✓ | ✓ | ✓ | | | gemini | ✓ | | | | | gherkin | ✓ | | | | | ghostty | ✓ | | | | | git-attributes | ✓ | | | | | git-commit | ✓ | ✓ | | | | git-config | ✓ | ✓ | | | | git-ignore | ✓ | | | | | git-rebase | ✓ | | | | | gjs | ✓ | ✓ | ✓ | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` | | gleam | ✓ | ✓ | | `gleam` | | glimmer | ✓ | | | `ember-language-server` | | glsl | ✓ | ✓ | ✓ | `glsl_analyzer` | | gn | ✓ | | | | | go | ✓ | ✓ | ✓ | `gopls`, `golangci-lint-langserver` | | godot-resource | ✓ | ✓ | | | | gomod | ✓ | | | `gopls` | | gotmpl | ✓ | | | `gopls` | | gowork | ✓ | | | `gopls` | | gpr | ✓ | | | `ada_language_server` | | graphql | ✓ | ✓ | | `graphql-lsp` | | gren | ✓ | ✓ | | | | groovy | ✓ | | | | | gts | ✓ | ✓ | ✓ | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` | | hare | ✓ | | | | | haskell | ✓ | ✓ | | `haskell-language-server-wrapper` | | haskell-persistent | ✓ | | | | | hcl | ✓ | ✓ | ✓ | `terraform-ls` | | heex | ✓ | ✓ | | `elixir-ls` | | helm | ✓ | | | `helm_ls` | | hocon | ✓ | ✓ | ✓ | | | hoon | ✓ | | | | | hosts | ✓ | | | | | html | ✓ | | | `vscode-html-language-server`, `superhtml` | | hurl | ✓ | ✓ | ✓ | | | hyprlang | ✓ | | ✓ | `hyprls` | | idris | | | | `idris2-lsp` | | iex | ✓ | | | | | ini | ✓ | | | | | ink | ✓ | | | | | inko | ✓ | ✓ | ✓ | | | janet | ✓ | | | | | java | ✓ | ✓ | ✓ | `jdtls` | | javascript | ✓ | ✓ | ✓ | `typescript-language-server` | | jinja | ✓ | | | | | jjdescription | ✓ | | | | | jq | ✓ | ✓ | | `jq-lsp` | | jsdoc | ✓ | | | | | json | ✓ | ✓ | ✓ | `vscode-json-language-server` | | json5 | ✓ | | | | | jsonc | ✓ | | ✓ | `vscode-json-language-server` | | jsonnet | ✓ | | | `jsonnet-language-server` | | jsx | ✓ | ✓ | ✓ | `typescript-language-server` | | julia | ✓ | ✓ | ✓ | `julia` | | just | ✓ | ✓ | ✓ | `just-lsp` | | kdl | ✓ | ✓ | ✓ | | | koka | ✓ | | ✓ | `koka` | | kotlin | ✓ | ✓ | ✓ | `kotlin-language-server` | | koto | ✓ | ✓ | ✓ | `koto-ls` | | latex | ✓ | ✓ | | `texlab` | | ld | ✓ | | ✓ | | | ldif | ✓ | | | | | lean | ✓ | | | `lean` | | ledger | ✓ | | | | | llvm | ✓ | ✓ | ✓ | | | llvm-mir | ✓ | ✓ | ✓ | | | llvm-mir-yaml | ✓ | | ✓ | | | log | ✓ | | | | | lpf | ✓ | | | | | lua | ✓ | ✓ | ✓ | `lua-language-server` | | mail | ✓ | ✓ | | | | make | ✓ | | ✓ | | | markdoc | ✓ | | | `markdoc-ls` | | markdown | ✓ | | | `marksman`, `markdown-oxide` | | markdown.inline | ✓ | | | | | matlab | ✓ | ✓ | ✓ | | | mermaid | ✓ | | | | | meson | ✓ | | ✓ | `mesonlsp` | | mint | | | | `mint` | | mojo | ✓ | ✓ | ✓ | `magic` | | move | ✓ | | | | | msbuild | ✓ | | ✓ | | | nasm | ✓ | ✓ | | `asm-lsp` | | nestedtext | ✓ | ✓ | ✓ | | | nginx | ✓ | | | | | nickel | ✓ | | ✓ | `nls` | | nim | ✓ | ✓ | ✓ | `nimlangserver` | | nix | ✓ | ✓ | ✓ | `nil`, `nixd` | | nu | ✓ | | | `nu` | | nunjucks | ✓ | | | | | ocaml | ✓ | | ✓ | `ocamllsp` | | ocaml-interface | ✓ | | | `ocamllsp` | | odin | ✓ | ✓ | ✓ | `ols` | | ohm | ✓ | ✓ | ✓ | | | opencl | ✓ | ✓ | ✓ | `clangd` | | openscad | ✓ | | | `openscad-lsp` | | org | ✓ | | | | | pascal | ✓ | ✓ | | `pasls` | | passwd | ✓ | | | | | pem | ✓ | | | | | perl | ✓ | ✓ | ✓ | `perlnavigator` | | pest | ✓ | ✓ | ✓ | `pest-language-server` | | php | ✓ | ✓ | ✓ | `intelephense` | | php-only | ✓ | | | | | pkgbuild | ✓ | ✓ | ✓ | `termux-language-server`, `bash-language-server` | | pkl | ✓ | | ✓ | `pkl-lsp` | | po | ✓ | ✓ | | | | pod | ✓ | | | | | ponylang | ✓ | ✓ | ✓ | | | powershell | ✓ | | | | | prisma | ✓ | ✓ | | `prisma-language-server` | | prolog | ✓ | | ✓ | `swipl` | | protobuf | ✓ | ✓ | ✓ | `buf`, `pb`, `protols` | | prql | ✓ | | | | | pug | ✓ | | | | | purescript | ✓ | ✓ | | `purescript-language-server` | | python | ✓ | ✓ | ✓ | `ruff`, `jedi-language-server`, `pylsp` | | qml | ✓ | | ✓ | `qmlls` | | quarto | ✓ | | ✓ | | | quint | ✓ | | | `quint-language-server` | | r | ✓ | | | `R` | | racket | ✓ | | ✓ | `racket` | | regex | ✓ | | | | | rego | ✓ | | | `regols` | | rescript | ✓ | ✓ | | `rescript-language-server` | | rmarkdown | ✓ | | ✓ | `R` | | robot | ✓ | | | `robotframework_ls` | | ron | ✓ | | ✓ | | | rst | ✓ | | | | | ruby | ✓ | ✓ | ✓ | `ruby-lsp`, `solargraph` | | rust | ✓ | ✓ | ✓ | `rust-analyzer` | | sage | ✓ | ✓ | | | | scala | ✓ | ✓ | ✓ | `metals` | | scheme | ✓ | | ✓ | | | scss | ✓ | | | `vscode-css-language-server` | | slang | ✓ | ✓ | ✓ | `slangd` | | slint | ✓ | ✓ | ✓ | `slint-lsp` | | smali | ✓ | | ✓ | | | smithy | ✓ | | | `cs` | | sml | ✓ | | | | | snakemake | ✓ | | ✓ | `pylsp` | | solidity | ✓ | ✓ | | `solc` | | sourcepawn | ✓ | ✓ | | `sourcepawn-studio` | | spade | ✓ | | ✓ | `spade-language-server` | | spicedb | ✓ | | | | | sql | ✓ | ✓ | | | | sshclientconfig | ✓ | | | | | starlark | ✓ | ✓ | ✓ | `starpls` | | strace | ✓ | | | | | supercollider | ✓ | | | | | svelte | ✓ | | ✓ | `svelteserver` | | sway | ✓ | ✓ | ✓ | `forc` | | swift | ✓ | ✓ | | `sourcekit-lsp` | | t32 | ✓ | | | | | tablegen | ✓ | ✓ | ✓ | | | tact | ✓ | ✓ | ✓ | | | task | ✓ | | | | | tcl | ✓ | | ✓ | | | teal | ✓ | | | `teal-language-server` | | templ | ✓ | | | `templ` | | tera | ✓ | | | | | textproto | ✓ | ✓ | ✓ | | | tfvars | ✓ | | ✓ | `terraform-ls` | | thrift | ✓ | | | | | tlaplus | ✓ | | | | | todotxt | ✓ | | | | | toml | ✓ | ✓ | | `taplo` | | tsq | ✓ | | | `ts_query_ls` | | tsx | ✓ | ✓ | ✓ | `typescript-language-server` | | twig | ✓ | | | | | typescript | ✓ | ✓ | ✓ | `typescript-language-server` | | typespec | ✓ | ✓ | ✓ | `tsp-server` | | typst | ✓ | | | `tinymist` | | ungrammar | ✓ | | | | | unison | ✓ | ✓ | ✓ | | | uxntal | ✓ | | | | | v | ✓ | ✓ | ✓ | `v-analyzer` | | vala | ✓ | ✓ | | `vala-language-server` | | vento | ✓ | | | | | verilog | ✓ | ✓ | | `svlangserver` | | vhdl | ✓ | | | `vhdl_ls` | | vhs | ✓ | | | | | vue | ✓ | | | `vue-language-server` | | wast | ✓ | | | | | wat | ✓ | | | `wat_server` | | webc | ✓ | | | | | werk | ✓ | | | | | wesl | ✓ | ✓ | | | | wgsl | ✓ | | | `wgsl-analyzer` | | wit | ✓ | | ✓ | | | wren | ✓ | ✓ | ✓ | | | xit | ✓ | | | | | xml | ✓ | | ✓ | | | xtc | ✓ | | | | | yaml | ✓ | ✓ | ✓ | `yaml-language-server`, `ansible-language-server` | | yara | ✓ | | | `yls` | | yuck | ✓ | | | | | zig | ✓ | ✓ | ✓ | `zls` | helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/generated/static-cmd.md000066400000000000000000000712031500614314100256410ustar00rootroot00000000000000| Name | Description | Default keybinds | | --- | --- | --- | | `no_op` | Do nothing | | | `move_char_left` | Move left | normal: `` h ``, `` ``, insert: `` `` | | `move_char_right` | Move right | normal: `` l ``, `` ``, insert: `` `` | | `move_line_up` | Move up | normal: `` gk `` | | `move_line_down` | Move down | normal: `` gj `` | | `move_visual_line_up` | Move up | normal: `` k ``, `` ``, insert: `` `` | | `move_visual_line_down` | Move down | normal: `` j ``, `` ``, insert: `` `` | | `extend_char_left` | Extend left | select: `` h ``, `` `` | | `extend_char_right` | Extend right | select: `` l ``, `` `` | | `extend_line_up` | Extend up | select: `` gk `` | | `extend_line_down` | Extend down | select: `` gj `` | | `extend_visual_line_up` | Extend up | select: `` k ``, `` `` | | `extend_visual_line_down` | Extend down | select: `` j ``, `` `` | | `copy_selection_on_next_line` | Copy selection on next line | normal: `` C ``, select: `` C `` | | `copy_selection_on_prev_line` | Copy selection on previous line | normal: `` ``, select: `` `` | | `move_next_word_start` | Move to start of next word | normal: `` w `` | | `move_prev_word_start` | Move to start of previous word | normal: `` b `` | | `move_next_word_end` | Move to end of next word | normal: `` e `` | | `move_prev_word_end` | Move to end of previous word | | | `move_next_long_word_start` | Move to start of next long word | normal: `` W `` | | `move_prev_long_word_start` | Move to start of previous long word | normal: `` B `` | | `move_next_long_word_end` | Move to end of next long word | normal: `` E `` | | `move_prev_long_word_end` | Move to end of previous long word | | | `move_next_sub_word_start` | Move to start of next sub word | | | `move_prev_sub_word_start` | Move to start of previous sub word | | | `move_next_sub_word_end` | Move to end of next sub word | | | `move_prev_sub_word_end` | Move to end of previous sub word | | | `move_parent_node_end` | Move to end of the parent node | normal: `` `` | | `move_parent_node_start` | Move to beginning of the parent node | normal: `` `` | | `extend_next_word_start` | Extend to start of next word | select: `` w `` | | `extend_prev_word_start` | Extend to start of previous word | select: `` b `` | | `extend_next_word_end` | Extend to end of next word | select: `` e `` | | `extend_prev_word_end` | Extend to end of previous word | | | `extend_next_long_word_start` | Extend to start of next long word | select: `` W `` | | `extend_prev_long_word_start` | Extend to start of previous long word | select: `` B `` | | `extend_next_long_word_end` | Extend to end of next long word | select: `` E `` | | `extend_prev_long_word_end` | Extend to end of prev long word | | | `extend_next_sub_word_start` | Extend to start of next sub word | | | `extend_prev_sub_word_start` | Extend to start of previous sub word | | | `extend_next_sub_word_end` | Extend to end of next sub word | | | `extend_prev_sub_word_end` | Extend to end of prev sub word | | | `extend_parent_node_end` | Extend to end of the parent node | select: `` `` | | `extend_parent_node_start` | Extend to beginning of the parent node | select: `` `` | | `find_till_char` | Move till next occurrence of char | normal: `` t `` | | `find_next_char` | Move to next occurrence of char | normal: `` f `` | | `extend_till_char` | Extend till next occurrence of char | select: `` t `` | | `extend_next_char` | Extend to next occurrence of char | select: `` f `` | | `till_prev_char` | Move till previous occurrence of char | normal: `` T `` | | `find_prev_char` | Move to previous occurrence of char | normal: `` F `` | | `extend_till_prev_char` | Extend till previous occurrence of char | select: `` T `` | | `extend_prev_char` | Extend to previous occurrence of char | select: `` F `` | | `repeat_last_motion` | Repeat last motion | normal: `` ``, select: `` `` | | `replace` | Replace with new char | normal: `` r ``, select: `` r `` | | `switch_case` | Switch (toggle) case | normal: `` ~ ``, select: `` ~ `` | | `switch_to_uppercase` | Switch to uppercase | normal: `` ``, select: `` `` | | `switch_to_lowercase` | Switch to lowercase | normal: `` ` ``, select: `` ` `` | | `page_up` | Move page up | normal: `` ``, `` Z ``, `` z ``, `` ``, `` Z ``, `` z ``, select: `` ``, `` Z ``, `` z ``, `` ``, `` Z ``, `` z ``, insert: `` `` | | `page_down` | Move page down | normal: `` ``, `` Z ``, `` z ``, `` ``, `` Z ``, `` z ``, select: `` ``, `` Z ``, `` z ``, `` ``, `` Z ``, `` z ``, insert: `` `` | | `half_page_up` | Move half page up | | | `half_page_down` | Move half page down | | | `page_cursor_up` | Move page and cursor up | | | `page_cursor_down` | Move page and cursor down | | | `page_cursor_half_up` | Move page and cursor half up | normal: `` ``, `` Z ``, `` z ``, `` Z ``, `` z ``, select: `` ``, `` Z ``, `` z ``, `` Z ``, `` z `` | | `page_cursor_half_down` | Move page and cursor half down | normal: `` ``, `` Z ``, `` z ``, `` Z ``, `` z ``, select: `` ``, `` Z ``, `` z ``, `` Z ``, `` z `` | | `select_all` | Select whole document | normal: `` % ``, select: `` % `` | | `select_regex` | Select all regex matches inside selections | normal: `` s ``, select: `` s `` | | `split_selection` | Split selections on regex matches | normal: `` S ``, select: `` S `` | | `split_selection_on_newline` | Split selection on newlines | normal: `` ``, select: `` `` | | `merge_selections` | Merge selections | normal: `` ``, select: `` `` | | `merge_consecutive_selections` | Merge consecutive selections | normal: `` ``, select: `` `` | | `search` | Search for regex pattern | normal: `` / ``, `` Z/ ``, `` z/ ``, select: `` / ``, `` Z/ ``, `` z/ `` | | `rsearch` | Reverse search for regex pattern | normal: `` ? ``, `` Z? ``, `` z? ``, select: `` ? ``, `` Z? ``, `` z? `` | | `search_next` | Select next search match | normal: `` n ``, `` Zn ``, `` zn ``, select: `` Zn ``, `` zn `` | | `search_prev` | Select previous search match | normal: `` N ``, `` ZN ``, `` zN ``, select: `` ZN ``, `` zN `` | | `extend_search_next` | Add next search match to selection | select: `` n `` | | `extend_search_prev` | Add previous search match to selection | select: `` N `` | | `search_selection` | Use current selection as search pattern | normal: `` ``, select: `` `` | | `search_selection_detect_word_boundaries` | Use current selection as the search pattern, automatically wrapping with `\b` on word boundaries | normal: `` * ``, select: `` * `` | | `make_search_word_bounded` | Modify current search to make it word bounded | | | `global_search` | Global search in workspace folder | normal: `` / ``, select: `` / `` | | `extend_line` | Select current line, if already selected, extend to another line based on the anchor | | | `extend_line_below` | Select current line, if already selected, extend to next line | normal: `` x ``, select: `` x `` | | `extend_line_above` | Select current line, if already selected, extend to previous line | | | `select_line_above` | Select current line, if already selected, extend or shrink line above based on the anchor | | | `select_line_below` | Select current line, if already selected, extend or shrink line below based on the anchor | | | `extend_to_line_bounds` | Extend selection to line bounds | normal: `` X ``, select: `` X `` | | `shrink_to_line_bounds` | Shrink selection to line bounds | normal: `` ``, select: `` `` | | `delete_selection` | Delete selection | normal: `` d ``, select: `` d `` | | `delete_selection_noyank` | Delete selection without yanking | normal: `` ``, select: `` `` | | `change_selection` | Change selection | normal: `` c ``, select: `` c `` | | `change_selection_noyank` | Change selection without yanking | normal: `` ``, select: `` `` | | `collapse_selection` | Collapse selection into single cursor | normal: `` ; ``, select: `` ; `` | | `flip_selections` | Flip selection cursor and anchor | normal: `` ``, select: `` `` | | `ensure_selections_forward` | Ensure all selections face forward | normal: `` ``, select: `` `` | | `insert_mode` | Insert before selection | normal: `` i ``, select: `` i `` | | `append_mode` | Append after selection | normal: `` a ``, select: `` a `` | | `command_mode` | Enter command mode | normal: `` : ``, select: `` : `` | | `file_picker` | Open file picker | normal: `` f ``, select: `` f `` | | `file_picker_in_current_buffer_directory` | Open file picker at current buffer's directory | | | `file_picker_in_current_directory` | Open file picker at current working directory | normal: `` F ``, select: `` F `` | | `file_explorer` | Open file explorer in workspace root | normal: `` e ``, select: `` e `` | | `file_explorer_in_current_buffer_directory` | Open file explorer at current buffer's directory | normal: `` E ``, select: `` E `` | | `file_explorer_in_current_directory` | Open file explorer at current working directory | | | `code_action` | Perform code action | normal: `` a ``, select: `` a `` | | `buffer_picker` | Open buffer picker | normal: `` b ``, select: `` b `` | | `jumplist_picker` | Open jumplist picker | normal: `` j ``, select: `` j `` | | `symbol_picker` | Open symbol picker | normal: `` s ``, select: `` s `` | | `changed_file_picker` | Open changed file picker | normal: `` g ``, select: `` g `` | | `select_references_to_symbol_under_cursor` | Select symbol references | normal: `` h ``, select: `` h `` | | `workspace_symbol_picker` | Open workspace symbol picker | normal: `` S ``, select: `` S `` | | `diagnostics_picker` | Open diagnostic picker | normal: `` d ``, select: `` d `` | | `workspace_diagnostics_picker` | Open workspace diagnostic picker | normal: `` D ``, select: `` D `` | | `last_picker` | Open last picker | normal: `` ' ``, select: `` ' `` | | `insert_at_line_start` | Insert at start of line | normal: `` I ``, select: `` I `` | | `insert_at_line_end` | Insert at end of line | normal: `` A ``, select: `` A `` | | `open_below` | Open new line below selection | normal: `` o ``, select: `` o `` | | `open_above` | Open new line above selection | normal: `` O ``, select: `` O `` | | `normal_mode` | Enter normal mode | normal: `` ``, select: `` v ``, insert: `` `` | | `select_mode` | Enter selection extend mode | normal: `` v `` | | `exit_select_mode` | Exit selection mode | select: `` `` | | `goto_definition` | Goto definition | normal: `` gd ``, select: `` gd `` | | `goto_declaration` | Goto declaration | normal: `` gD ``, select: `` gD `` | | `add_newline_above` | Add newline above | normal: `` [ ``, select: `` [ `` | | `add_newline_below` | Add newline below | normal: `` ] ``, select: `` ] `` | | `goto_type_definition` | Goto type definition | normal: `` gy ``, select: `` gy `` | | `goto_implementation` | Goto implementation | normal: `` gi ``, select: `` gi `` | | `goto_file_start` | Goto line number else file start | normal: `` gg `` | | `goto_file_end` | Goto file end | | | `extend_to_file_start` | Extend to line number else file start | select: `` gg `` | | `extend_to_file_end` | Extend to file end | | | `goto_file` | Goto files/URLs in selections | normal: `` gf ``, select: `` gf `` | | `goto_file_hsplit` | Goto files in selections (hsplit) | normal: `` f ``, `` wf ``, select: `` f ``, `` wf `` | | `goto_file_vsplit` | Goto files in selections (vsplit) | normal: `` F ``, `` wF ``, select: `` F ``, `` wF `` | | `goto_reference` | Goto references | normal: `` gr ``, select: `` gr `` | | `goto_window_top` | Goto window top | normal: `` gt ``, select: `` gt `` | | `goto_window_center` | Goto window center | normal: `` gc ``, select: `` gc `` | | `goto_window_bottom` | Goto window bottom | normal: `` gb ``, select: `` gb `` | | `goto_last_accessed_file` | Goto last accessed file | normal: `` ga ``, select: `` ga `` | | `goto_last_modified_file` | Goto last modified file | normal: `` gm ``, select: `` gm `` | | `goto_last_modification` | Goto last modification | normal: `` g. ``, select: `` g. `` | | `goto_line` | Goto line | normal: `` G ``, select: `` G `` | | `goto_last_line` | Goto last line | normal: `` ge `` | | `extend_to_last_line` | Extend to last line | select: `` ge `` | | `goto_first_diag` | Goto first diagnostic | normal: `` [D ``, select: `` [D `` | | `goto_last_diag` | Goto last diagnostic | normal: `` ]D ``, select: `` ]D `` | | `goto_next_diag` | Goto next diagnostic | normal: `` ]d ``, select: `` ]d `` | | `goto_prev_diag` | Goto previous diagnostic | normal: `` [d ``, select: `` [d `` | | `goto_next_change` | Goto next change | normal: `` ]g ``, select: `` ]g `` | | `goto_prev_change` | Goto previous change | normal: `` [g ``, select: `` [g `` | | `goto_first_change` | Goto first change | normal: `` [G ``, select: `` [G `` | | `goto_last_change` | Goto last change | normal: `` ]G ``, select: `` ]G `` | | `goto_line_start` | Goto line start | normal: `` gh ``, `` ``, select: `` gh ``, insert: `` `` | | `goto_line_end` | Goto line end | normal: `` gl ``, `` ``, select: `` gl `` | | `goto_column` | Goto column | normal: `` g\| `` | | `extend_to_column` | Extend to column | select: `` g\| `` | | `goto_next_buffer` | Goto next buffer | normal: `` gn ``, select: `` gn `` | | `goto_previous_buffer` | Goto previous buffer | normal: `` gp ``, select: `` gp `` | | `goto_line_end_newline` | Goto newline at line end | insert: `` `` | | `goto_first_nonwhitespace` | Goto first non-blank in line | normal: `` gs ``, select: `` gs `` | | `trim_selections` | Trim whitespace from selections | normal: `` _ ``, select: `` _ `` | | `extend_to_line_start` | Extend to line start | select: `` `` | | `extend_to_first_nonwhitespace` | Extend to first non-blank in line | | | `extend_to_line_end` | Extend to line end | select: `` `` | | `extend_to_line_end_newline` | Extend to line end | | | `signature_help` | Show signature help | | | `smart_tab` | Insert tab if all cursors have all whitespace to their left; otherwise, run a separate command. | insert: `` `` | | `insert_tab` | Insert tab char | insert: `` `` | | `insert_newline` | Insert newline char | insert: `` ``, `` `` | | `delete_char_backward` | Delete previous char | insert: `` ``, `` ``, `` `` | | `delete_char_forward` | Delete next char | insert: `` ``, `` `` | | `delete_word_backward` | Delete previous word | insert: `` ``, `` `` | | `delete_word_forward` | Delete next word | insert: `` ``, `` `` | | `kill_to_line_start` | Delete till start of line | insert: `` `` | | `kill_to_line_end` | Delete till end of line | insert: `` `` | | `undo` | Undo change | normal: `` u ``, select: `` u `` | | `redo` | Redo change | normal: `` U ``, select: `` U `` | | `earlier` | Move backward in history | normal: `` ``, select: `` `` | | `later` | Move forward in history | normal: `` ``, select: `` `` | | `commit_undo_checkpoint` | Commit changes to new checkpoint | insert: `` `` | | `yank` | Yank selection | normal: `` y ``, select: `` y `` | | `yank_to_clipboard` | Yank selections to clipboard | normal: `` y ``, select: `` y `` | | `yank_to_primary_clipboard` | Yank selections to primary clipboard | | | `yank_joined` | Join and yank selections | | | `yank_joined_to_clipboard` | Join and yank selections to clipboard | | | `yank_main_selection_to_clipboard` | Yank main selection to clipboard | normal: `` Y ``, select: `` Y `` | | `yank_joined_to_primary_clipboard` | Join and yank selections to primary clipboard | | | `yank_main_selection_to_primary_clipboard` | Yank main selection to primary clipboard | | | `replace_with_yanked` | Replace with yanked text | normal: `` R ``, select: `` R `` | | `replace_selections_with_clipboard` | Replace selections by clipboard content | normal: `` R ``, select: `` R `` | | `replace_selections_with_primary_clipboard` | Replace selections by primary clipboard | | | `paste_after` | Paste after selection | normal: `` p ``, select: `` p `` | | `paste_before` | Paste before selection | normal: `` P ``, select: `` P `` | | `paste_clipboard_after` | Paste clipboard after selections | normal: `` p ``, select: `` p `` | | `paste_clipboard_before` | Paste clipboard before selections | normal: `` P ``, select: `` P `` | | `paste_primary_clipboard_after` | Paste primary clipboard after selections | | | `paste_primary_clipboard_before` | Paste primary clipboard before selections | | | `indent` | Indent selection | normal: `` ``, select: `` `` | | `unindent` | Unindent selection | normal: `` ``, select: `` `` | | `format_selections` | Format selection | normal: `` = ``, select: `` = `` | | `join_selections` | Join lines inside selection | normal: `` J ``, select: `` J `` | | `join_selections_space` | Join lines inside selection and select spaces | normal: `` ``, select: `` `` | | `keep_selections` | Keep selections matching regex | normal: `` K ``, select: `` K `` | | `remove_selections` | Remove selections matching regex | normal: `` ``, select: `` `` | | `align_selections` | Align selections in column | normal: `` & ``, select: `` & `` | | `keep_primary_selection` | Keep primary selection | normal: `` , ``, select: `` , `` | | `remove_primary_selection` | Remove primary selection | normal: `` ``, select: `` `` | | `completion` | Invoke completion popup | insert: `` `` | | `hover` | Show docs for item under cursor | normal: `` k ``, select: `` k `` | | `toggle_comments` | Comment/uncomment selections | normal: `` ``, `` c ``, select: `` ``, `` c `` | | `toggle_line_comments` | Line comment/uncomment selections | normal: `` ``, select: `` `` | | `toggle_block_comments` | Block comment/uncomment selections | normal: `` C ``, select: `` C `` | | `rotate_selections_forward` | Rotate selections forward | normal: `` ) ``, select: `` ) `` | | `rotate_selections_backward` | Rotate selections backward | normal: `` ( ``, select: `` ( `` | | `rotate_selection_contents_forward` | Rotate selection contents forward | normal: `` ``, select: `` `` | | `rotate_selection_contents_backward` | Rotate selections contents backward | normal: `` ``, select: `` `` | | `reverse_selection_contents` | Reverse selections contents | | | `expand_selection` | Expand selection to parent syntax node | normal: `` ``, `` ``, select: `` ``, `` `` | | `shrink_selection` | Shrink selection to previously expanded syntax node | normal: `` ``, `` ``, select: `` ``, `` `` | | `select_next_sibling` | Select next sibling in the syntax tree | normal: `` ``, `` ``, select: `` ``, `` `` | | `select_prev_sibling` | Select previous sibling the in syntax tree | normal: `` ``, `` ``, select: `` ``, `` `` | | `select_all_siblings` | Select all siblings of the current node | normal: `` ``, select: `` `` | | `select_all_children` | Select all children of the current node | normal: `` ``, `` ``, select: `` ``, `` `` | | `jump_forward` | Jump forward on jumplist | normal: `` ``, `` ``, select: `` ``, `` `` | | `jump_backward` | Jump backward on jumplist | normal: `` ``, select: `` `` | | `save_selection` | Save current selection to jumplist | normal: `` ``, select: `` `` | | `jump_view_right` | Jump to right split | normal: `` l ``, `` wl ``, `` ``, `` ``, `` w ``, `` w ``, select: `` l ``, `` wl ``, `` ``, `` ``, `` w ``, `` w `` | | `jump_view_left` | Jump to left split | normal: `` h ``, `` wh ``, `` ``, `` ``, `` w ``, `` w ``, select: `` h ``, `` wh ``, `` ``, `` ``, `` w ``, `` w `` | | `jump_view_up` | Jump to split above | normal: `` k ``, `` ``, `` wk ``, `` ``, `` w ``, `` w ``, select: `` k ``, `` ``, `` wk ``, `` ``, `` w ``, `` w `` | | `jump_view_down` | Jump to split below | normal: `` j ``, `` wj ``, `` ``, `` ``, `` w ``, `` w ``, select: `` j ``, `` wj ``, `` ``, `` ``, `` w ``, `` w `` | | `swap_view_right` | Swap with right split | normal: `` L ``, `` wL ``, select: `` L ``, `` wL `` | | `swap_view_left` | Swap with left split | normal: `` H ``, `` wH ``, select: `` H ``, `` wH `` | | `swap_view_up` | Swap with split above | normal: `` K ``, `` wK ``, select: `` K ``, `` wK `` | | `swap_view_down` | Swap with split below | normal: `` J ``, `` wJ ``, select: `` J ``, `` wJ `` | | `transpose_view` | Transpose splits | normal: `` t ``, `` wt ``, `` ``, `` w ``, select: `` t ``, `` wt ``, `` ``, `` w `` | | `rotate_view` | Goto next window | normal: `` w ``, `` ww ``, `` ``, `` w ``, select: `` w ``, `` ww ``, `` ``, `` w `` | | `rotate_view_reverse` | Goto previous window | | | `hsplit` | Horizontal bottom split | normal: `` s ``, `` ws ``, `` ``, `` w ``, select: `` s ``, `` ws ``, `` ``, `` w `` | | `hsplit_new` | Horizontal bottom split scratch buffer | normal: `` ns ``, `` wns ``, `` n ``, `` wn ``, select: `` ns ``, `` wns ``, `` n ``, `` wn `` | | `vsplit` | Vertical right split | normal: `` v ``, `` wv ``, `` ``, `` w ``, select: `` v ``, `` wv ``, `` ``, `` w `` | | `vsplit_new` | Vertical right split scratch buffer | normal: `` nv ``, `` wnv ``, `` n ``, `` wn ``, select: `` nv ``, `` wnv ``, `` n ``, `` wn `` | | `wclose` | Close window | normal: `` q ``, `` wq ``, `` ``, `` w ``, select: `` q ``, `` wq ``, `` ``, `` w `` | | `wonly` | Close windows except current | normal: `` o ``, `` wo ``, `` ``, `` w ``, select: `` o ``, `` wo ``, `` ``, `` w `` | | `select_register` | Select register | normal: `` " ``, select: `` " `` | | `insert_register` | Insert register | insert: `` `` | | `copy_between_registers` | Copy between two registers | | | `align_view_middle` | Align view middle | normal: `` Zm ``, `` zm ``, select: `` Zm ``, `` zm `` | | `align_view_top` | Align view top | normal: `` Zt ``, `` zt ``, select: `` Zt ``, `` zt `` | | `align_view_center` | Align view center | normal: `` Zc ``, `` Zz ``, `` zc ``, `` zz ``, select: `` Zc ``, `` Zz ``, `` zc ``, `` zz `` | | `align_view_bottom` | Align view bottom | normal: `` Zb ``, `` zb ``, select: `` Zb ``, `` zb `` | | `scroll_up` | Scroll view up | normal: `` Zk ``, `` zk ``, `` Z ``, `` z ``, select: `` Zk ``, `` zk ``, `` Z ``, `` z `` | | `scroll_down` | Scroll view down | normal: `` Zj ``, `` zj ``, `` Z ``, `` z ``, select: `` Zj ``, `` zj ``, `` Z ``, `` z `` | | `match_brackets` | Goto matching bracket | normal: `` mm ``, select: `` mm `` | | `surround_add` | Surround add | normal: `` ms ``, select: `` ms `` | | `surround_replace` | Surround replace | normal: `` mr ``, select: `` mr `` | | `surround_delete` | Surround delete | normal: `` md ``, select: `` md `` | | `select_textobject_around` | Select around object | normal: `` ma ``, select: `` ma `` | | `select_textobject_inner` | Select inside object | normal: `` mi ``, select: `` mi `` | | `goto_next_function` | Goto next function | normal: `` ]f ``, select: `` ]f `` | | `goto_prev_function` | Goto previous function | normal: `` [f ``, select: `` [f `` | | `goto_next_class` | Goto next type definition | normal: `` ]t ``, select: `` ]t `` | | `goto_prev_class` | Goto previous type definition | normal: `` [t ``, select: `` [t `` | | `goto_next_parameter` | Goto next parameter | normal: `` ]a ``, select: `` ]a `` | | `goto_prev_parameter` | Goto previous parameter | normal: `` [a ``, select: `` [a `` | | `goto_next_comment` | Goto next comment | normal: `` ]c ``, select: `` ]c `` | | `goto_prev_comment` | Goto previous comment | normal: `` [c ``, select: `` [c `` | | `goto_next_test` | Goto next test | normal: `` ]T ``, select: `` ]T `` | | `goto_prev_test` | Goto previous test | normal: `` [T ``, select: `` [T `` | | `goto_next_entry` | Goto next pairing | normal: `` ]e ``, select: `` ]e `` | | `goto_prev_entry` | Goto previous pairing | normal: `` [e ``, select: `` [e `` | | `goto_next_paragraph` | Goto next paragraph | normal: `` ]p ``, select: `` ]p `` | | `goto_prev_paragraph` | Goto previous paragraph | normal: `` [p ``, select: `` [p `` | | `dap_launch` | Launch debug target | normal: `` Gl ``, select: `` Gl `` | | `dap_restart` | Restart debugging session | normal: `` Gr ``, select: `` Gr `` | | `dap_toggle_breakpoint` | Toggle breakpoint | normal: `` Gb ``, select: `` Gb `` | | `dap_continue` | Continue program execution | normal: `` Gc ``, select: `` Gc `` | | `dap_pause` | Pause program execution | normal: `` Gh ``, select: `` Gh `` | | `dap_step_in` | Step in | normal: `` Gi ``, select: `` Gi `` | | `dap_step_out` | Step out | normal: `` Go ``, select: `` Go `` | | `dap_next` | Step to next | normal: `` Gn ``, select: `` Gn `` | | `dap_variables` | List variables | normal: `` Gv ``, select: `` Gv `` | | `dap_terminate` | End debug session | normal: `` Gt ``, select: `` Gt `` | | `dap_edit_condition` | Edit breakpoint condition on current line | normal: `` G ``, select: `` G `` | | `dap_edit_log` | Edit breakpoint log message on current line | normal: `` G ``, select: `` G `` | | `dap_switch_thread` | Switch current thread | normal: `` Gst ``, select: `` Gst `` | | `dap_switch_stack_frame` | Switch stack frame | normal: `` Gsf ``, select: `` Gsf `` | | `dap_enable_exceptions` | Enable exception breakpoints | normal: `` Ge ``, select: `` Ge `` | | `dap_disable_exceptions` | Disable exception breakpoints | normal: `` GE ``, select: `` GE `` | | `shell_pipe` | Pipe selections through shell command | normal: `` \| ``, select: `` \| `` | | `shell_pipe_to` | Pipe selections into shell command ignoring output | normal: `` ``, select: `` `` | | `shell_insert_output` | Insert shell command output before selections | normal: `` ! ``, select: `` ! `` | | `shell_append_output` | Append shell command output after selections | normal: `` ``, select: `` `` | | `shell_keep_pipe` | Filter selections with shell predicate | normal: `` $ ``, select: `` $ `` | | `suspend` | Suspend and return to shell | normal: `` ``, select: `` `` | | `rename_symbol` | Rename symbol | normal: `` r ``, select: `` r `` | | `increment` | Increment item under cursor | normal: `` ``, select: `` `` | | `decrement` | Decrement item under cursor | normal: `` ``, select: `` `` | | `record_macro` | Record macro | normal: `` Q ``, select: `` Q `` | | `replay_macro` | Replay macro | normal: `` q ``, select: `` q `` | | `command_palette` | Open command palette | normal: `` ? ``, select: `` ? `` | | `goto_word` | Jump to a two-character label | normal: `` gw `` | | `extend_to_word` | Extend to a two-character label | select: `` gw `` | | `goto_next_tabstop` | goto next snippet placeholder | | | `goto_prev_tabstop` | goto next snippet placeholder | | helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/generated/typable-cmd.md000066400000000000000000000173741500614314100260230ustar00rootroot00000000000000| Name | Description | | --- | --- | | `:quit`, `:q` | Close the current view. | | `:quit!`, `:q!` | Force close the current view, ignoring unsaved changes. | | `:open`, `:o`, `:edit`, `:e` | Open a file from disk into the current view. | | `:buffer-close`, `:bc`, `:bclose` | Close the current buffer. | | `:buffer-close!`, `:bc!`, `:bclose!` | Close the current buffer forcefully, ignoring unsaved changes. | | `:buffer-close-others`, `:bco`, `:bcloseother` | Close all buffers but the currently focused one. | | `:buffer-close-others!`, `:bco!`, `:bcloseother!` | Force close all buffers but the currently focused one. | | `:buffer-close-all`, `:bca`, `:bcloseall` | Close all buffers without quitting. | | `:buffer-close-all!`, `:bca!`, `:bcloseall!` | Force close all buffers ignoring unsaved changes without quitting. | | `:buffer-next`, `:bn`, `:bnext` | Goto next buffer. | | `:buffer-previous`, `:bp`, `:bprev` | Goto previous buffer. | | `:write`, `:w` | Write changes to disk. Accepts an optional path (:write some/path.txt) | | `:write!`, `:w!` | Force write changes to disk creating necessary subdirectories. Accepts an optional path (:write! some/path.txt) | | `:write-buffer-close`, `:wbc` | Write changes to disk and closes the buffer. Accepts an optional path (:write-buffer-close some/path.txt) | | `:write-buffer-close!`, `:wbc!` | Force write changes to disk creating necessary subdirectories and closes the buffer. Accepts an optional path (:write-buffer-close! some/path.txt) | | `:new`, `:n` | Create a new scratch buffer. | | `:format`, `:fmt` | Format the file using an external formatter or language server. | | `:indent-style` | Set the indentation style for editing. ('t' for tabs or 1-16 for number of spaces.) | | `:line-ending` | Set the document's default line ending. Options: crlf, lf. | | `:earlier`, `:ear` | Jump back to an earlier point in edit history. Accepts a number of steps or a time span. | | `:later`, `:lat` | Jump to a later point in edit history. Accepts a number of steps or a time span. | | `:write-quit`, `:wq`, `:x` | Write changes to disk and close the current view. Accepts an optional path (:wq some/path.txt) | | `:write-quit!`, `:wq!`, `:x!` | Write changes to disk and close the current view forcefully. Accepts an optional path (:wq! some/path.txt) | | `:write-all`, `:wa` | Write changes from all buffers to disk. | | `:write-all!`, `:wa!` | Forcefully write changes from all buffers to disk creating necessary subdirectories. | | `:write-quit-all`, `:wqa`, `:xa` | Write changes from all buffers to disk and close all views. | | `:write-quit-all!`, `:wqa!`, `:xa!` | Write changes from all buffers to disk and close all views forcefully (ignoring unsaved changes). | | `:quit-all`, `:qa` | Close all views. | | `:quit-all!`, `:qa!` | Force close all views ignoring unsaved changes. | | `:cquit`, `:cq` | Quit with exit code (default 1). Accepts an optional integer exit code (:cq 2). | | `:cquit!`, `:cq!` | Force quit with exit code (default 1) ignoring unsaved changes. Accepts an optional integer exit code (:cq! 2). | | `:theme` | Change the editor theme (show current theme if no name specified). | | `:yank-join` | Yank joined selections. A separator can be provided as first argument. Default value is newline. | | `:clipboard-yank` | Yank main selection into system clipboard. | | `:clipboard-yank-join` | Yank joined selections into system clipboard. A separator can be provided as first argument. Default value is newline. | | `:primary-clipboard-yank` | Yank main selection into system primary clipboard. | | `:primary-clipboard-yank-join` | Yank joined selections into system primary clipboard. A separator can be provided as first argument. Default value is newline. | | `:clipboard-paste-after` | Paste system clipboard after selections. | | `:clipboard-paste-before` | Paste system clipboard before selections. | | `:clipboard-paste-replace` | Replace selections with content of system clipboard. | | `:primary-clipboard-paste-after` | Paste primary clipboard after selections. | | `:primary-clipboard-paste-before` | Paste primary clipboard before selections. | | `:primary-clipboard-paste-replace` | Replace selections with content of system primary clipboard. | | `:show-clipboard-provider` | Show clipboard provider name in status bar. | | `:change-current-directory`, `:cd` | Change the current working directory. | | `:show-directory`, `:pwd` | Show the current working directory. | | `:encoding` | Set encoding. Based on `https://encoding.spec.whatwg.org`. | | `:character-info`, `:char` | Get info about the character under the primary cursor. | | `:reload`, `:rl` | Discard changes and reload from the source file. | | `:reload-all`, `:rla` | Discard changes and reload all documents from the source files. | | `:update`, `:u` | Write changes only if the file has been modified. | | `:lsp-workspace-command` | Open workspace command picker | | `:lsp-restart` | Restarts the given language servers, or all language servers that are used by the current file if no arguments are supplied | | `:lsp-stop` | Stops the given language servers, or all language servers that are used by the current file if no arguments are supplied | | `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. | | `:tree-sitter-highlight-name` | Display name of tree-sitter highlight scope under the cursor. | | `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. | | `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. | | `:debug-eval` | Evaluate expression in current debug context. | | `:vsplit`, `:vs` | Open the file in a vertical split. | | `:vsplit-new`, `:vnew` | Open a scratch buffer in a vertical split. | | `:hsplit`, `:hs`, `:sp` | Open the file in a horizontal split. | | `:hsplit-new`, `:hnew` | Open a scratch buffer in a horizontal split. | | `:tutor` | Open the tutorial. | | `:goto`, `:g` | Goto line number. | | `:set-language`, `:lang` | Set the language of current buffer (show current language if no value specified). | | `:set-option`, `:set` | Set a config option at runtime.
For example to disable smart case search, use `:set search.smart-case false`. | | `:toggle-option`, `:toggle` | Toggle a config option at runtime.
For example to toggle smart case search, use `:toggle search.smart-case`. | | `:get-option`, `:get` | Get the current value of a config option. | | `:sort` | Sort ranges in selection. | | `:reflow` | Hard-wrap the current selection of lines to a given width. | | `:tree-sitter-subtree`, `:ts-subtree` | Display the smallest tree-sitter subtree that spans the primary selection, primarily for debugging queries. | | `:config-reload` | Refresh user config. | | `:config-open` | Open the user config.toml file. | | `:config-open-workspace` | Open the workspace config.toml file. | | `:log-open` | Open the helix log file. | | `:insert-output` | Run shell command, inserting output before each selection. | | `:append-output` | Run shell command, appending output after each selection. | | `:pipe`, `:|` | Pipe each selection to the shell command. | | `:pipe-to` | Pipe each selection to the shell command, ignoring output. | | `:run-shell-command`, `:sh`, `:!` | Run a shell command | | `:reset-diff-change`, `:diffget`, `:diffg` | Reset the diff change at the cursor position. | | `:clear-register` | Clear given register. If no argument is provided, clear all registers. | | `:redraw` | Clear and re-render the whole UI | | `:move`, `:mv` | Move the current buffer and its corresponding file to a different path | | `:yank-diagnostic` | Yank diagnostic(s) under primary cursor to register, or clipboard by default | | `:read`, `:r` | Load a file into buffer | | `:echo` | Prints the given arguments to the statusline. | | `:noop` | Does nothing. | helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/guides/000077500000000000000000000000001500614314100226065ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/guides/README.md000066400000000000000000000002311500614314100240610ustar00rootroot00000000000000# Guides This section contains guides for adding new language server configurations, tree-sitter grammars, textobject queries, and other similar items. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/guides/adding_languages.md000066400000000000000000000045431500614314100264120ustar00rootroot00000000000000## Adding new languages to Helix In order to add a new language to Helix, you will need to follow the steps below. ## Language configuration 1. Add a new `[[language]]` entry in the `languages.toml` file and provide the necessary configuration for the new language. For more information on language configuration, refer to the [language configuration section](../languages.md) of the documentation. A new language server can be added by extending the `[language-server]` table in the same file. 2. If you are adding a new language or updating an existing language server configuration, run the command `cargo xtask docgen` to update the [Language Support](../lang-support.md) documentation. > 💡 If you are adding a new Language Server configuration, make sure to update > the > [Language Server Wiki](https://github.com/helix-editor/helix/wiki/Language-Server-Configurations) > with the installation instructions. ## Grammar configuration 1. If a tree-sitter grammar is available for the new language, add a new `[[grammar]]` entry to the `languages.toml` file. 2. If you are testing the grammar locally, you can use the `source.path` key with an absolute path to the grammar. However, before submitting a pull request, make sure to switch to using `source.git`. ## Queries 1. In order to provide syntax highlighting and indentation for the new language, you will need to add queries. 2. Create a new directory for the language with the path `runtime/queries//`. 3. Refer to the [tree-sitter website](https://tree-sitter.github.io/tree-sitter/3-syntax-highlighting.html#highlights) for more information on writing queries. 4. A list of highlight captures can be found [on the themes page](https://docs.helix-editor.com/themes.html#scopes). ## Common issues - If you encounter errors when running Helix after switching branches, you may need to update the tree-sitter grammars. Run the command `hx --grammar fetch` to fetch the grammars and `hx --grammar build` to build any out-of-date grammars. - If a parser is causing a segfault, or you want to remove it, make sure to remove the compiled parser located at `runtime/grammars/.so`. - If you are attempting to add queries and Helix is unable to locate them, ensure that the environment variable `HELIX_RUNTIME` is set to the location of the `runtime` folder you're developing in. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/guides/indent.md000066400000000000000000000363561500614314100244260ustar00rootroot00000000000000## Adding indent queries Helix uses tree-sitter to correctly indent new lines. This requires a tree- sitter grammar and an `indent.scm` query file placed in `runtime/queries/ {language}/indents.scm`. The indentation for a line is calculated by traversing the syntax tree from the lowest node at the beginning of the new line (see [Indent queries](#indent-queries)). Each of these nodes contributes to the total indent when it is captured by the query (in what way depends on the name of the capture. Note that it matters where these added indents begin. For example, multiple indent level increases that start on the same line only increase the total indent level by 1. See [Capture types](#capture-types). By default, Helix uses the `hybrid` indentation heuristic. This means that indent queries are not used to compute the expected absolute indentation of a line but rather the expected difference in indentation between the new and an already existing line. This difference is then added to the actual indentation of the already existing line. Since this makes errors in the indent queries harder to find, it is recommended to disable it when testing via `:set indent-heuristic tree-sitter`. The rest of this guide assumes that the `tree-sitter` heuristic is used. ## Indent queries When Helix is inserting a new line through `o`, `O`, or ``, to determine the indent level for the new line, the query in `indents.scm` is run on the document. The starting position of the query is the end of the line above where a new line will be inserted. For `o`, the inserted line is the line below the cursor, so that starting position of the query is the end of the current line. ```rust fn need_hero(some_hero: Hero, life: Life) -> { matches!(some_hero, Hero { // ←─────────────────╮ strong: true,//←╮ ↑ ↑ │ fast: true, // │ │ ╰── query start │ sure: true, // │ ╰───── cursor ├─ traversal soon: true, // ╰──────── new line inserted │ start node }) && // │ // ↑ │ // ╰───────────────────────────────────────────────╯ some_hero > life } ``` For `O`, the newly inserted line is the *current* line, so the starting position of the query is the end of the line above the cursor. ```rust fn need_hero(some_hero: Hero, life: Life) -> { // ←─╮ matches!(some_hero, Hero { // ←╮ ↑ │ strong: true,// ↑ ╭───╯ │ │ fast: true, // │ │ query start ─╯ │ sure: true, // ╰───┼ cursor ├─ traversal soon: true, // ╰ new line inserted │ start node }) && // │ some_hero > life // │ } // ←──────────────────────────────────────────────╯ ``` From this starting node, the syntax tree is traversed up until the root node. Each indent capture is collected along the way, and then combined according to their [capture types](#capture-types) and [scopes](#scopes) to a final indent level for the line. ### Capture types - `@indent` (default scope `tail`): Increase the indent level by 1. Multiple occurrences in the same line *do not* stack. If there is at least one `@indent` and one `@outdent` capture on the same line, the indent level isn't changed at all. - `@outdent` (default scope `all`): Decrease the indent level by 1. The same rules as for `@indent` apply. - `@indent.always` (default scope `tail`): Increase the indent level by 1. Multiple occurrences on the same line *do* stack. The final indent level is `@indent.always` – `@outdent.always`. If an `@indent` and an `@indent.always` are on the same line, the `@indent` is ignored. - `@outdent.always` (default scope `all`): Decrease the indent level by 1. The same rules as for `@indent.always` apply. - `@align` (default scope `all`): Align everything inside this node to some anchor. The anchor is given by the start of the node captured by `@anchor` in the same pattern. Every pattern with an `@align` should contain exactly one `@anchor`. Indent (and outdent) for nodes below (in terms of their starting line) the `@align` node is added to the indentation required for alignment. - `@extend`: Extend the range of this node to the end of the line and to lines that are indented more than the line that this node starts on. This is useful for languages like Python, where for the purpose of indentation some nodes (like functions or classes) should also contain indented lines that follow them. - `@extend.prevent-once`: Prevents the first extension of an ancestor of this node. For example, in Python a return expression always ends the block that it is in. Note that this only stops the extension of the next `@extend` capture. If multiple ancestors are captured, only the extension of the innermost one is prevented. All other ancestors are unaffected (regardless of whether the innermost ancestor would actually have been extended). #### `@indent` / `@outdent` Consider this example: ```rust fn shout(things: Vec) { // ↑ // ├───────────────────────╮ indent level // @indent ├┄┄┄┄┄┄┄┄┄┄┄┄┄┄ // │ let it_all = |out| { things.filter(|thing| { // │ 1 // ↑ ↑ │ // ├───────────────────────┼─────┼┄┄┄┄┄┄┄┄┄┄┄┄┄┄ // @indent @indent │ // │ 2 thing.can_do_with(out) // │ })}; // ├┄┄┄┄┄┄┄┄┄┄┄┄┄┄ //↑↑↑ │ 1 } //╰┼┴──────────────────────────────────────────────┴┄┄┄┄┄┄┄┄┄┄┄┄┄┄ // 3x @outdent ``` ```scm ((block) @indent) ["}" ")"] @outdent ``` Note how on the second line, we have two blocks begin on the same line. In this case, since both captures occur on the same line, they are combined and only result in a net increase of 1. Also note that the closing `}`s are part of the `@indent` captures, but the 3 `@outdent`s also combine into 1 and result in that line losing one indent level. #### `@extend` / `@extend.prevent-once` For an example of where `@extend` can be useful, consider Python, which is whitespace-sensitive. ```scm ] (parenthesized_expression) (function_definition) (class_definition) ] @indent ``` ```python class Hero: def __init__(self, strong, fast, sure, soon):# ←─╮ self.is_strong = strong # │ self.is_fast = fast # ╭─── query start │ self.is_sure = sure # │ ╭─ cursor │ self.is_soon = soon # │ │ │ # ↑ ↑ │ │ │ # │ ╰──────╯ │ │ # ╰─────────────────────╯ │ # ├─ traversal def need_hero(self, life): # │ start node return ( # │ self.is_strong # │ and self.is_fast # │ and self.is_sure # │ and self.is_soon # │ and self > life # │ ) # ←─────────────────────────────────────────╯ ``` Without braces to catch the scope of the function, the smallest descendant of the cursor on a line feed ends up being the entire inside of the class. Because of this, it will miss the entire function node and its indent capture, leading to an indent level one too small. To address this case, `@extend` tells helix to "extend" the captured node's span to the line feed and every consecutive line that has a greater indent level than the line of the node. ```scm (parenthesized_expression) @indent ] (function_definition) (class_definition) ] @indent @extend ``` ```python class Hero: def __init__(self, strong, fast, sure, soon):# ←─╮ self.is_strong = strong # │ self.is_fast = fast # ╭─── query start ├─ traversal self.is_sure = sure # │ ╭─ cursor │ start node self.is_soon = soon # │ │ ←───────────────╯ # ↑ ↑ │ │ # │ ╰──────╯ │ # ╰─────────────────────╯ def need_hero(self, life): return ( self.is_strong and self.is_fast and self.is_sure and self.is_soon and self > life ) ``` Furthermore, there are some cases where extending to everything with a greater indent level may not be desirable. Consider the `need_hero` function above. If our cursor is on the last line of the returned expression. ```python class Hero: def __init__(self, strong, fast, sure, soon): self.is_strong = strong self.is_fast = fast self.is_sure = sure self.is_soon = soon def need_hero(self, life): return ( self.is_strong and self.is_fast and self.is_sure and self.is_soon and self > life ) # ←─── cursor #←────────── where cursor should go on new line ``` In Python, the are a few tokens that will always end a scope, such as a return statement. Since the scope ends, so should the indent level. But because the function span is extended to every line with a greater indent level, a new line would just continue on the same level. And an `@outdent` would not help us here either, since it would cause everything in the parentheses to become outdented as well. To help, we need to signal an end to the extension. We can do this with `@extend.prevent-once`. ```scm (parenthesized_expression) @indent ] (function_definition) (class_definition) ] @indent @extend (return_statement) @extend.prevent-once ``` #### `@indent.always` / `@outdent.always` As mentioned before, normally if there is more than one `@indent` or `@outdent` capture on the same line, they are combined. Sometimes, there are cases when you may want to ensure that every indent capture is additive, regardless of how many occur on the same line. Consider this example in YAML. ```yaml - foo: bar # ↑ ↑ # │ ╰─────────────── start of map # ╰───────────────── start of list element baz: quux # ←─── cursor # ←───────────── where the cursor should go on a new line garply: waldo - quux: bar: baz xyzzy: thud fred: plugh ``` In YAML, you often have lists of maps. In these cases, the syntax is such that the list element and the map both start on the same line. But we really do want to start an indentation for each of these so that subsequent keys in the map hang over the list and align properly. This is where `@indent.always` helps. ```scm ((block_sequence_item) @item @indent.always @extend (#not-one-line? @item)) ((block_mapping_pair key: (_) @key value: (_) @val (#not-same-line? @key @val) ) @indent.always @extend ) ``` ## Predicates In some cases, an S-expression cannot express exactly what pattern should be matched. For that, tree-sitter allows for predicates to appear anywhere within a pattern, similar to how `#set!` declarations work: ```scm (some_kind (child_kind) @indent (#predicate? arg1 arg2 ...) ) ``` The number of arguments depends on the predicate that's used. Each argument is either a capture (`@name`) or a string (`"some string"`). The following predicates are supported by tree-sitter: - `#eq?`/`#not-eq?`: The first argument (a capture) must/must not be equal to the second argument (a capture or a string). - `#match?`/`#not-match?`: The first argument (a capture) must/must not match the regex given in the second argument (a string). - `#any-of?`/`#not-any-of?`: The first argument (a capture) must/must not be one of the other arguments (strings). Additionally, we support some custom predicates for indent queries: - `#not-kind-eq?`: The kind of the first argument (a capture) must not be equal to the second argument (a string). - `#same-line?`/`#not-same-line?`: The captures given by the 2 arguments must/must not start on the same line. - `#one-line?`/`#not-one-line?`: The captures given by the fist argument must/must span a total of one line. ### Scopes Added indents don't always apply to the whole node. For example, in most cases when a node should be indented, we actually only want everything except for its first line to be indented. For this, there are several scopes (more scopes may be added in the future if required): - `tail`: This scope applies to everything except for the first line of the captured node. - `all`: This scope applies to the whole captured node. This is only different from `tail` when the captured node is the first node on its line. For example, imagine we have the following function ```rust fn aha() { // ←─────────────────────────────────────╮ let take = "on me"; // ←──────────────╮ scope: │ let take = "me on"; // ├─ "tail" ├─ (block) @indent let ill = be_gone_days(1 || 2); // │ │ } // ←───────────────────────────────────┴──────────┴─ "}" @outdent // scope: "all" ``` We can write the following query with the `#set!` declaration: ```scm ((block) @indent (#set! "scope" "tail")) ("}" @outdent (#set! "scope" "all")) ``` As we can see, the "tail" scope covers the node, except for the first line. Everything up to and including the closing brace gets an indent level of 1. Then, on the closing brace, we encounter an outdent with a scope of "all", which means the first line is included, and the indent level is cancelled out on this line. (Note these scopes are the defaults for `@indent` and `@outdent`—they are written explicitly for demonstration.) helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/guides/injection.md000066400000000000000000000050031500614314100251100ustar00rootroot00000000000000## Adding Injection Queries Writing language injection queries allows one to highlight a specific node as a different language. In addition to the [standard][upstream-docs] language injection options used by tree-sitter, there are a few Helix specific extensions that allow for more control. An example of a simple query that would highlight all strings as bash in Nix: ```scm ((string_expression (string_fragment) @injection.content) (#set! injection.language "bash")) ``` Another example is this query, which highlights links in comments and keywords like "TODO", by reusing the dedicated "comment" language: ``` ((comment) @injection.content (#set! injection.language "comment")) ``` ## Capture Types - `@injection.language` (standard): The captured node may contain the language name used to highlight the node captured by `@injection.content`. - `@injection.content` (standard): Marks the content to be highlighted as the language captured with `@injection.language` _et al_. - `@injection.filename` (extension): The captured node may contain a filename with a file-extension known to Helix, highlighting `@injection.content` as that language. This uses the language extensions defined in both the default languages.toml distributed with Helix, as well as user defined languages. - `@injection.shebang` (extension): The captured node may contain a shebang used to choose a language to highlight as. This also uses the shebangs defined in the default and user `languages.toml`. ## Settings - `injection.combined` (standard): Indicates that all the matching nodes in the tree should have their content parsed as one nested document. - `injection.language` (standard): Forces the captured content to be highlighted as the given language - `injection.include-children` (standard): Indicates that the content node’s entire text should be re-parsed, including the text of its child nodes. By default, child nodes’ text will be excluded from the injected document. - `injection.include-unnamed-children` (extension): Same as `injection.include-children` but only for unnamed child nodes. ## Predicates - `#eq?` (standard): The first argument (a capture) must be equal to the second argument (a capture or a string). - `#match?` (standard): The first argument (a capture) must match the regex given in the second argument (a string). - `#any-of?` (standard): The first argument (a capture) must be one of the other arguments (strings). [upstream-docs]: https://tree-sitter.github.io/tree-sitter/3-syntax-highlighting.html#language-injection helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/guides/textobject.md000066400000000000000000000040171500614314100253050ustar00rootroot00000000000000## Adding textobject queries Helix supports textobjects that are language specific, such as functions, classes, etc. These textobjects require an accompanying tree-sitter grammar and a `textobjects.scm` query file to work properly. Tree-sitter allows us to query the source code syntax tree and capture specific parts of it. The queries are written in a lisp dialect. More information on how to write queries can be found in the [official tree-sitter documentation][tree-sitter-queries]. Query files should be placed in `runtime/queries/{language}/textobjects.scm` when contributing to Helix. Note that to test the query files locally you should put them under your local runtime directory (`~/.config/helix/runtime` on Linux for example). The following [captures][tree-sitter-captures] are recognized: | Capture Name | | --- | | `function.inside` | | `function.around` | | `class.inside` | | `class.around` | | `test.inside` | | `test.around` | | `parameter.inside` | | `parameter.around` | | `comment.inside` | | `comment.around` | | `entry.inside` | | `entry.around` | [Example query files][textobject-examples] can be found in the helix GitHub repository. ## Queries for textobject based navigation Tree-sitter based navigation in Helix is done using captures in the following order: - `object.movement` - `object.around` - `object.inside` For example if a `function.around` capture has been already defined for a language in its `textobjects.scm` file, function navigation should also work automatically. `function.movement` should be defined only if the node captured by `function.around` doesn't make sense in a navigation context. [tree-sitter-queries]: https://tree-sitter.github.io/tree-sitter/using-parsers/queries/1-syntax.html [tree-sitter-captures]: https://tree-sitter.github.io/tree-sitter/using-parsers/queries/2-operators.html#capturing-nodes [textobject-examples]: https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+path%3A%2A%2A/textobjects.scm&type=Code&ref=advsearch&l=&l= helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/install.md000066400000000000000000000021341500614314100233160ustar00rootroot00000000000000# Installing Helix The typical way to install Helix is via [your operating system's package manager](./package-managers.md). Note that: - To get the latest nightly version of Helix, you need to [build from source](./building-from-source.md). - To take full advantage of Helix, install the language servers for your preferred programming languages. See the [wiki](https://github.com/helix-editor/helix/wiki/Language-Server-Configurations) for instructions. ## Pre-built binaries Download pre-built binaries from the [GitHub Releases page](https://github.com/helix-editor/helix/releases). The tarball contents include an `hx` binary and a `runtime` directory. To set up Helix: 1. Add the `hx` binary to your system's `$PATH` to allow it to be used from the command line. 2. Copy the `runtime` directory to a location that `hx` searches for runtime files. A typical location on Linux/macOS is `~/.config/helix/runtime`. To see the runtime directories that `hx` searches, run `hx --health`. If necessary, you can override the default runtime location by setting the `HELIX_RUNTIME` environment variable. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/keymap.md000066400000000000000000001125171500614314100231450ustar00rootroot00000000000000## Keymap - [Normal mode](#normal-mode) - [Movement](#movement) - [Changes](#changes) - [Shell](#shell) - [Selection manipulation](#selection-manipulation) - [Search](#search) - [Minor modes](#minor-modes) - [View mode](#view-mode) - [Goto mode](#goto-mode) - [Match mode](#match-mode) - [Window mode](#window-mode) - [Space mode](#space-mode) - [Popup](#popup) - [Completion Menu](#completion-menu) - [Signature-help Popup](#signature-help-popup) - [Unimpaired](#unimpaired) - [Insert mode](#insert-mode) - [Select / extend mode](#select--extend-mode) - [Picker](#picker) - [Prompt](#prompt) > 💡 Mappings marked (**LSP**) require an active language server for the file. > 💡 Mappings marked (**TS**) require a tree-sitter grammar for the file type. > ⚠️ Some terminals' default key mappings conflict with Helix's. If any of the mappings described on this page do not work as expected, check your terminal's mappings to ensure they do not conflict. See the [wiki](https://github.com/helix-editor/helix/wiki/Terminal-Support) for known conflicts. ## Normal mode Normal mode is the default mode when you launch helix. You can return to it from other modes by pressing the `Escape` key. ### Movement > NOTE: Unlike Vim, `f`, `F`, `t` and `T` are not confined to the current line. | Key | Description | Command | | ----- | ----------- | ------- | | `h`, `Left` | Move left | `move_char_left` | | `j`, `Down` | Move down | `move_visual_line_down` | | `k`, `Up` | Move up | `move_visual_line_up` | | `l`, `Right` | Move right | `move_char_right` | | `w` | Move next word start | `move_next_word_start` | | `b` | Move previous word start | `move_prev_word_start` | | `e` | Move next word end | `move_next_word_end` | | `W` | Move next WORD start | `move_next_long_word_start` | | `B` | Move previous WORD start | `move_prev_long_word_start` | | `E` | Move next WORD end | `move_next_long_word_end` | | `t` | Find 'till next char | `find_till_char` | | `f` | Find next char | `find_next_char` | | `T` | Find 'till previous char | `till_prev_char` | | `F` | Find previous char | `find_prev_char` | | `G` | Go to line number `` | `goto_line` | | `Alt-.` | Repeat last motion (`f`, `t`, `m`, `[` or `]`) | `repeat_last_motion` | | `Home` | Move to the start of the line | `goto_line_start` | | `End` | Move to the end of the line | `goto_line_end` | | `Ctrl-b`, `PageUp` | Move page up | `page_up` | | `Ctrl-f`, `PageDown` | Move page down | `page_down` | | `Ctrl-u` | Move cursor and page half page up | `page_cursor_half_up` | | `Ctrl-d` | Move cursor and page half page down | `page_cursor_half_down` | | `Ctrl-i` | Jump forward on the jumplist | `jump_forward` | | `Ctrl-o` | Jump backward on the jumplist | `jump_backward` | | `Ctrl-s` | Save the current selection to the jumplist | `save_selection` | ### Changes | Key | Description | Command | | ----- | ----------- | ------- | | `r` | Replace with a character | `replace` | | `R` | Replace with yanked text | `replace_with_yanked` | | `~` | Switch case of the selected text | `switch_case` | | `` ` `` | Set the selected text to lower case | `switch_to_lowercase` | | `` Alt-` `` | Set the selected text to upper case | `switch_to_uppercase` | | `i` | Insert before selection | `insert_mode` | | `a` | Insert after selection (append) | `append_mode` | | `I` | Insert at the start of the line | `insert_at_line_start` | | `A` | Insert at the end of the line | `insert_at_line_end` | | `o` | Open new line below selection | `open_below` | | `O` | Open new line above selection | `open_above` | | `.` | Repeat last insert | N/A | | `u` | Undo change | `undo` | | `U` | Redo change | `redo` | | `Alt-u` | Move backward in history | `earlier` | | `Alt-U` | Move forward in history | `later` | | `y` | Yank selection | `yank` | | `p` | Paste after selection | `paste_after` | | `P` | Paste before selection | `paste_before` | | `"` `` | Select a register to yank to or paste from | `select_register` | | `>` | Indent selection | `indent` | | `<` | Unindent selection | `unindent` | | `=` | Format selection (**LSP**) | `format_selections` | | `d` | Delete selection | `delete_selection` | | `Alt-d` | Delete selection, without yanking | `delete_selection_noyank` | | `c` | Change selection (delete and enter insert mode) | `change_selection` | | `Alt-c` | Change selection (delete and enter insert mode, without yanking) | `change_selection_noyank` | | `Ctrl-a` | Increment object (number) under cursor | `increment` | | `Ctrl-x` | Decrement object (number) under cursor | `decrement` | | `Q` | Start/stop macro recording to the selected register (experimental) | `record_macro` | | `q` | Play back a recorded macro from the selected register (experimental) | `replay_macro` | #### Shell | Key | Description | Command | | ------ | ----------- | ------- | | | | Pipe each selection through shell command, replacing with output | `shell_pipe` | | Alt-| | Pipe each selection into shell command, ignoring output | `shell_pipe_to` | | `!` | Run shell command, inserting output before each selection | `shell_insert_output` | | `Alt-!` | Run shell command, appending output after each selection | `shell_append_output` | | `$` | Pipe each selection into shell command, keep selections where command returned 0 | `shell_keep_pipe` | ### Selection manipulation | Key | Description | Command | | ----- | ----------- | ------- | | `s` | Select all regex matches inside selections | `select_regex` | | `S` | Split selection into sub selections on regex matches | `split_selection` | | `Alt-s` | Split selection on newlines | `split_selection_on_newline` | | `Alt-minus` | Merge selections | `merge_selections` | | `Alt-_` | Merge consecutive selections | `merge_consecutive_selections` | | `&` | Align selection in columns | `align_selections` | | `_` | Trim whitespace from the selection | `trim_selections` | | `;` | Collapse selection onto a single cursor | `collapse_selection` | | `Alt-;` | Flip selection cursor and anchor | `flip_selections` | | `Alt-:` | Ensures the selection is in forward direction | `ensure_selections_forward` | | `,` | Keep only the primary selection | `keep_primary_selection` | | `Alt-,` | Remove the primary selection | `remove_primary_selection` | | `C` | Copy selection onto the next line (Add cursor below) | `copy_selection_on_next_line` | | `Alt-C` | Copy selection onto the previous line (Add cursor above) | `copy_selection_on_prev_line` | | `(` | Rotate main selection backward | `rotate_selections_backward` | | `)` | Rotate main selection forward | `rotate_selections_forward` | | `Alt-(` | Rotate selection contents backward | `rotate_selection_contents_backward` | | `Alt-)` | Rotate selection contents forward | `rotate_selection_contents_forward` | | `%` | Select entire file | `select_all` | | `x` | Select current line, if already selected, extend to next line | `extend_line_below` | | `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` | | `Alt-x` | Shrink selection to line bounds (line-wise selection) | `shrink_to_line_bounds` | | `J` | Join lines inside selection | `join_selections` | | `Alt-J` | Join lines inside selection and select the inserted space | `join_selections_space` | | `K` | Keep selections matching the regex | `keep_selections` | | `Alt-K` | Remove selections matching the regex | `remove_selections` | | `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` | | `Alt-o`, `Alt-up` | Expand selection to parent syntax node (**TS**) | `expand_selection` | | `Alt-i`, `Alt-down` | Shrink syntax tree object selection (**TS**) | `shrink_selection` | | `Alt-p`, `Alt-left` | Select previous sibling node in syntax tree (**TS**) | `select_prev_sibling` | | `Alt-n`, `Alt-right` | Select next sibling node in syntax tree (**TS**) | `select_next_sibling` | | `Alt-a` | Select all sibling nodes in syntax tree (**TS**) | `select_all_siblings` | | `Alt-I`, `Alt-Shift-down`| Select all children nodes in syntax tree (**TS**) | `select_all_children` | | `Alt-e` | Move to end of parent node in syntax tree (**TS**) | `move_parent_node_end` | | `Alt-b` | Move to start of parent node in syntax tree (**TS**) | `move_parent_node_start` | ### Search Search commands all operate on the `/` register by default. To use a different register, use `"`. | Key | Description | Command | | ----- | ----------- | ------- | | `/` | Search for regex pattern | `search` | | `?` | Search for previous pattern | `rsearch` | | `n` | Select next search match | `search_next` | | `N` | Select previous search match | `search_prev` | | `*` | Use current selection as the search pattern, automatically wrapping with `\b` on word boundaries | `search_selection_detect_word_boundaries` | | `Alt-*` | Use current selection as the search pattern | `search_selection` | ### Minor modes These sub-modes are accessible from normal mode and typically switch back to normal mode after a command. | Key | Description | Command | | ----- | ----------- | ------- | | `v` | Enter [select (extend) mode](#select--extend-mode) | `select_mode` | | `g` | Enter [goto mode](#goto-mode) | N/A | | `m` | Enter [match mode](#match-mode) | N/A | | `:` | Enter command mode | `command_mode` | | `z` | Enter [view mode](#view-mode) | N/A | | `Z` | Enter sticky [view mode](#view-mode) | N/A | | `Ctrl-w` | Enter [window mode](#window-mode) | N/A | | `Space` | Enter [space mode](#space-mode) | N/A | These modes (except command mode) can be configured by [remapping keys](https://docs.helix-editor.com/remapping.html#minor-modes). #### View mode Accessed by typing `z` in [normal mode](#normal-mode). View mode is intended for scrolling and manipulating the view without changing the selection. The "sticky" variant of this mode (accessed by typing `Z` in normal mode) is persistent and can be exited using the escape key. This is useful when you're simply looking over text and not actively editing it. | Key | Description | Command | | ----- | ----------- | ------- | | `z`, `c` | Vertically center the line | `align_view_center` | | `t` | Align the line to the top of the screen | `align_view_top` | | `b` | Align the line to the bottom of the screen | `align_view_bottom` | | `m` | Align the line to the middle of the screen (horizontally) | `align_view_middle` | | `j`, `down` | Scroll the view downwards | `scroll_down` | | `k`, `up` | Scroll the view upwards | `scroll_up` | | `Ctrl-f`, `PageDown` | Move page down | `page_down` | | `Ctrl-b`, `PageUp` | Move page up | `page_up` | | `Ctrl-u` | Move cursor and page half page up | `page_cursor_half_up` | | `Ctrl-d` | Move cursor and page half page down | `page_cursor_half_down` | #### Goto mode Accessed by typing `g` in [normal mode](#normal-mode). Jumps to various locations. | Key | Description | Command | | ----- | ----------- | ------- | | `g` | Go to line number `` else start of file | `goto_file_start` | | | | Go to column number `` else start of line | `goto_column` | | `e` | Go to the end of the file | `goto_last_line` | | `f` | Go to files in the selections | `goto_file` | | `h` | Go to the start of the line | `goto_line_start` | | `l` | Go to the end of the line | `goto_line_end` | | `s` | Go to first non-whitespace character of the line | `goto_first_nonwhitespace` | | `t` | Go to the top of the screen | `goto_window_top` | | `c` | Go to the middle of the screen | `goto_window_center` | | `b` | Go to the bottom of the screen | `goto_window_bottom` | | `d` | Go to definition (**LSP**) | `goto_definition` | | `y` | Go to type definition (**LSP**) | `goto_type_definition` | | `r` | Go to references (**LSP**) | `goto_reference` | | `i` | Go to implementation (**LSP**) | `goto_implementation` | | `a` | Go to the last accessed/alternate file | `goto_last_accessed_file` | | `m` | Go to the last modified/alternate file | `goto_last_modified_file` | | `n` | Go to next buffer | `goto_next_buffer` | | `p` | Go to previous buffer | `goto_previous_buffer` | | `.` | Go to last modification in current file | `goto_last_modification` | | `j` | Move down textual (instead of visual) line | `move_line_down` | | `k` | Move up textual (instead of visual) line | `move_line_up` | | `w` | Show labels at each word and select the word that belongs to the entered labels | `goto_word` | #### Match mode Accessed by typing `m` in [normal mode](#normal-mode). Please refer to the relevant sections for detailed explanations about [surround](./surround.md) and [textobjects](./textobjects.md). | Key | Description | Command | | ----- | ----------- | ------- | | `m` | Goto matching bracket (**TS**) | `match_brackets` | | `s` `` | Surround current selection with `` | `surround_add` | | `r` `` | Replace surround character `` with `` | `surround_replace` | | `d` `` | Delete surround character `` | `surround_delete` | | `a` `` | Select around textobject | `select_textobject_around` | | `i` `` | Select inside textobject | `select_textobject_inner` | TODO: Mappings for selecting syntax nodes (a superset of `[`). #### Window mode Accessed by typing `Ctrl-w` in [normal mode](#normal-mode). This layer is similar to Vim keybindings as Kakoune does not support windows. | Key | Description | Command | | ----- | ------------- | ------- | | `w`, `Ctrl-w` | Switch to next window | `rotate_view` | | `v`, `Ctrl-v` | Vertical right split | `vsplit` | | `s`, `Ctrl-s` | Horizontal bottom split | `hsplit` | | `f` | Go to files in the selections in horizontal splits | `goto_file` | | `F` | Go to files in the selections in vertical splits | `goto_file` | | `h`, `Ctrl-h`, `Left` | Move to left split | `jump_view_left` | | `j`, `Ctrl-j`, `Down` | Move to split below | `jump_view_down` | | `k`, `Ctrl-k`, `Up` | Move to split above | `jump_view_up` | | `l`, `Ctrl-l`, `Right` | Move to right split | `jump_view_right` | | `q`, `Ctrl-q` | Close current window | `wclose` | | `o`, `Ctrl-o` | Only keep the current window, closing all the others | `wonly` | | `H` | Swap window to the left | `swap_view_left` | | `J` | Swap window downwards | `swap_view_down` | | `K` | Swap window upwards | `swap_view_up` | | `L` | Swap window to the right | `swap_view_right` | #### Space mode Accessed by typing `Space` in [normal mode](#normal-mode). This layer is a kludge of mappings, mostly pickers. | Key | Description | Command | | ----- | ----------- | ------- | | `f` | Open file picker at LSP workspace root | `file_picker` | | `F` | Open file picker at current working directory | `file_picker_in_current_directory` | | `b` | Open buffer picker | `buffer_picker` | | `j` | Open jumplist picker | `jumplist_picker` | | `g` | Open changed file picker | `changed_file_picker` | | `G` | Debug (experimental) | N/A | | `k` | Show documentation for item under cursor in a [popup](#popup) (**LSP**) | `hover` | | `s` | Open document symbol picker (**LSP**) | `symbol_picker` | | `S` | Open workspace symbol picker (**LSP**) | `workspace_symbol_picker` | | `d` | Open document diagnostics picker (**LSP**) | `diagnostics_picker` | | `D` | Open workspace diagnostics picker (**LSP**) | `workspace_diagnostics_picker` | | `r` | Rename symbol (**LSP**) | `rename_symbol` | | `a` | Apply code action (**LSP**) | `code_action` | | `h` | Select symbol references (**LSP**) | `select_references_to_symbol_under_cursor` | | `'` | Open last fuzzy picker | `last_picker` | | `w` | Enter [window mode](#window-mode) | N/A | | `c` | Comment/uncomment selections | `toggle_comments` | | `C` | Block comment/uncomment selections | `toggle_block_comments` | | `Alt-c` | Line comment/uncomment selections | `toggle_line_comments` | | `p` | Paste system clipboard after selections | `paste_clipboard_after` | | `P` | Paste system clipboard before selections | `paste_clipboard_before` | | `y` | Yank selections to clipboard | `yank_to_clipboard` | | `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` | | `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` | | `/` | Global search in workspace folder | `global_search` | | `?` | Open command palette | `command_palette` | > 💡 Global search displays results in a fuzzy picker, use `Space + '` to bring it back up after opening a file. ##### Popup Displays documentation for item under cursor. Remapping currently not supported. | Key | Description | | ---- | ----------- | | `Ctrl-u` | Scroll up | | `Ctrl-d` | Scroll down | ##### Completion Menu Displays documentation for the selected completion item. Remapping currently not supported. | Key | Description | | ---- | ----------- | | `Shift-Tab`, `Ctrl-p`, `Up` | Previous entry | | `Tab`, `Ctrl-n`, `Down` | Next entry | | `Enter` | Close menu and accept completion | | `Ctrl-c` | Close menu and reject completion | Any other keypresses result in the completion being accepted. ##### Signature-help Popup Displays the signature of the selected completion item. Remapping currently not supported. | Key | Description | | ---- | ----------- | | `Alt-p` | Previous signature | | `Alt-n` | Next signature | #### Unimpaired These mappings are in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaired). | Key | Description | Command | | ----- | ----------- | ------- | | `]d` | Go to next diagnostic (**LSP**) | `goto_next_diag` | | `[d` | Go to previous diagnostic (**LSP**) | `goto_prev_diag` | | `]D` | Go to last diagnostic in document (**LSP**) | `goto_last_diag` | | `[D` | Go to first diagnostic in document (**LSP**) | `goto_first_diag` | | `]f` | Go to next function (**TS**) | `goto_next_function` | | `[f` | Go to previous function (**TS**) | `goto_prev_function` | | `]t` | Go to next type definition (**TS**) | `goto_next_class` | | `[t` | Go to previous type definition (**TS**) | `goto_prev_class` | | `]a` | Go to next argument/parameter (**TS**) | `goto_next_parameter` | | `[a` | Go to previous argument/parameter (**TS**) | `goto_prev_parameter` | | `]c` | Go to next comment (**TS**) | `goto_next_comment` | | `[c` | Go to previous comment (**TS**) | `goto_prev_comment` | | `]T` | Go to next test (**TS**) | `goto_next_test` | | `[T` | Go to previous test (**TS**) | `goto_prev_test` | | `]p` | Go to next paragraph | `goto_next_paragraph` | | `[p` | Go to previous paragraph | `goto_prev_paragraph` | | `]g` | Go to next change | `goto_next_change` | | `[g` | Go to previous change | `goto_prev_change` | | `]G` | Go to last change | `goto_last_change` | | `[G` | Go to first change | `goto_first_change` | | `]Space` | Add newline below | `add_newline_below` | | `[Space` | Add newline above | `add_newline_above` | ## Insert mode Accessed by typing `i` in [normal mode](#normal-mode). Insert mode bindings are minimal by default. Helix is designed to be a modal editor, and this is reflected in the user experience and internal mechanics. Changes to the text are only saved for undos when escaping from insert mode to normal mode. > 💡 New users are strongly encouraged to learn the modal editing paradigm > to get the smoothest experience. | Key | Description | Command | | ----- | ----------- | ------- | | `Escape` | Switch to normal mode | `normal_mode` | | `Ctrl-s` | Commit undo checkpoint | `commit_undo_checkpoint` | | `Ctrl-x` | Autocomplete | `completion` | | `Ctrl-r` | Insert a register content | `insert_register` | | `Ctrl-w`, `Alt-Backspace` | Delete previous word | `delete_word_backward` | | `Alt-d`, `Alt-Delete` | Delete next word | `delete_word_forward` | | `Ctrl-u` | Delete to start of line | `kill_to_line_start` | | `Ctrl-k` | Delete to end of line | `kill_to_line_end` | | `Ctrl-h`, `Backspace`, `Shift-Backspace` | Delete previous char | `delete_char_backward` | | `Ctrl-d`, `Delete` | Delete next char | `delete_char_forward` | | `Ctrl-j`, `Enter` | Insert new line | `insert_newline` | These keys are not recommended, but are included for new users less familiar with modal editors. | Key | Description | Command | | ----- | ----------- | ------- | | `Up` | Move to previous line | `move_line_up` | | `Down` | Move to next line | `move_line_down` | | `Left` | Backward a char | `move_char_left` | | `Right` | Forward a char | `move_char_right` | | `PageUp` | Move one page up | `page_up` | | `PageDown` | Move one page down | `page_down` | | `Home` | Move to line start | `goto_line_start` | | `End` | Move to line end | `goto_line_end_newline` | As you become more comfortable with modal editing, you may want to disable some insert mode bindings. You can do this by editing your `config.toml` file. ```toml [keys.insert] up = "no_op" down = "no_op" left = "no_op" right = "no_op" pageup = "no_op" pagedown = "no_op" home = "no_op" end = "no_op" ``` ## Select / extend mode Accessed by typing `v` in [normal mode](#normal-mode). Select mode echoes Normal mode, but changes any movements to extend selections rather than replace them. Goto motions are also changed to extend, so that `vgl`, for example, extends the selection to the end of the line. Search is also affected. By default, `n` and `N` will remove the current selection and select the next instance of the search term. Toggling this mode before pressing `n` or `N` makes it possible to keep the current selection. Toggling it on and off during your iterative searching allows you to selectively add search terms to your selections. ## Picker Keys to use within picker. Remapping currently not supported. See the documentation page on [pickers](./pickers.md) for more info. [Prompt](#prompt) keybinds also work in pickers, except where they conflict with picker keybinds. | Key | Description | | ----- | ------------- | | `Shift-Tab`, `Up`, `Ctrl-p` | Previous entry | | `Tab`, `Down`, `Ctrl-n` | Next entry | | `PageUp`, `Ctrl-u` | Page up | | `PageDown`, `Ctrl-d` | Page down | | `Home` | Go to first entry | | `End` | Go to last entry | | `Enter` | Open selected | | `Alt-Enter` | Open selected in the background without closing the picker | | `Ctrl-s` | Open horizontally | | `Ctrl-v` | Open vertically | | `Ctrl-t` | Toggle preview | | `Escape`, `Ctrl-c` | Close picker | ## Prompt Keys to use within prompt, Remapping currently not supported. | Key | Description | | ----- | ------------- | | `Escape`, `Ctrl-c` | Close prompt | | `Alt-b`, `Ctrl-Left` | Backward a word | | `Ctrl-b`, `Left` | Backward a char | | `Alt-f`, `Ctrl-Right` | Forward a word | | `Ctrl-f`, `Right` | Forward a char | | `Ctrl-e`, `End` | Move prompt end | | `Ctrl-a`, `Home` | Move prompt start | | `Ctrl-w`, `Alt-Backspace`, `Ctrl-Backspace` | Delete previous word | | `Alt-d`, `Alt-Delete`, `Ctrl-Delete` | Delete next word | | `Ctrl-u` | Delete to start of line | | `Ctrl-k` | Delete to end of line | | `Backspace`, `Ctrl-h`, `Shift-Backspace` | Delete previous char | | `Delete`, `Ctrl-d` | Delete next char | | `Ctrl-s` | Insert a word under doc cursor, may be changed to Ctrl-r Ctrl-w later | | `Ctrl-p`, `Up` | Select previous history | | `Ctrl-n`, `Down` | Select next history | | `Ctrl-r` | Insert the content of the register selected by following input char | | `Tab` | Select next completion item | | `BackTab` | Select previous completion item | | `Enter` | Open selected | helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/lang-support.md000066400000000000000000000012071500614314100243030ustar00rootroot00000000000000## Language Support The following languages and Language Servers are supported. To use Language Server features, you must first [configure][lsp-config-wiki] the appropriate Language Server. You can check the language support in your installed helix version with `hx --health`. Also see the [Language Configuration][lang-config] docs and the [Adding Languages][adding-languages] guide for more language configuration information. {{#include ./generated/lang-support.md}} [lsp-config-wiki]: https://github.com/helix-editor/helix/wiki/Language-Server-Configurations [lang-config]: ./languages.md [adding-languages]: ./guides/adding_languages.md helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/languages.md000066400000000000000000000340461500614314100236250ustar00rootroot00000000000000## Languages Language-specific settings and settings for language servers are configured in `languages.toml` files. ## `languages.toml` files There are three possible locations for a `languages.toml` file: 1. In the Helix source code, which lives in the [Helix repository](https://github.com/helix-editor/helix/blob/master/languages.toml). It provides the default configurations for languages and language servers. 2. In your [configuration directory](./configuration.md). This overrides values from the built-in language configuration. For example, to disable auto-formatting for Rust: ```toml # in /helix/languages.toml [language-server.mylang-lsp] command = "mylang-lsp" [[language]] name = "rust" auto-format = false ``` 3. In a `.helix` folder in your project. Language configuration may also be overridden local to a project by creating a `languages.toml` file in a `.helix` folder. Its settings will be merged with the language configuration in the configuration directory and the built-in configuration. ## Language configuration Each language is configured by adding a `[[language]]` section to a `languages.toml` file. For example: ```toml [[language]] name = "mylang" scope = "source.mylang" injection-regex = "mylang" file-types = ["mylang", "myl"] comment-tokens = "#" indent = { tab-width = 2, unit = " " } formatter = { command = "mylang-formatter" , args = ["--stdin"] } language-servers = [ "mylang-lsp" ] ``` These configuration keys are available: | Key | Description | | ---- | ----------- | | `name` | The name of the language | | `language-id` | The language-id for language servers, checkout the table at [TextDocumentItem](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentItem) for the right id | | `scope` | A string like `source.js` that identifies the language. Currently, we strive to match the scope names used by popular TextMate grammars and by the Linguist library. Usually `source.` or `text.` in case of markup languages | | `injection-regex` | regex pattern that will be tested against a language name in order to determine whether this language should be used for a potential [language injection][treesitter-language-injection] site. | | `file-types` | The filetypes of the language, for example `["yml", "yaml"]`. See the file-type detection section below. | | `shebangs` | The interpreters from the shebang line, for example `["sh", "bash"]` | | `roots` | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` | | `auto-format` | Whether to autoformat this language when saving | | `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `error`, `warning`, `info`, `hint`) | | `comment-tokens` | The tokens to use as a comment token, either a single token `"//"` or an array `["//", "///", "//!"]` (the first token will be used for commenting). Also configurable as `comment-token` for backwards compatibility| | `block-comment-tokens`| The start and end tokens for a multiline comment either an array or single table of `{ start = "/*", end = "*/"}`. The first set of tokens will be used for commenting, any pairs in the array can be uncommented | | `indent` | The indent to use. Has sub keys `unit` (the text inserted into the document when indenting; usually set to N spaces or `"\t"` for tabs) and `tab-width` (the number of spaces rendered for a tab) | | `language-servers` | The Language Servers used for this language. See below for more information in the section [Configuring Language Servers for a language](#configuring-language-servers-for-a-language) | | `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) | | `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout. The filename of the current buffer can be passed as argument by using the `%{buffer_name}` expansion variable. See below for more information in the [Configuring the formatter command](#configuring-the-formatter-command) | | `soft-wrap` | [editor.softwrap](./editor.md#editorsoft-wrap-section) | `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap-at-text-width` is set, defaults to `editor.text-width` | | `rulers` | Overrides the `editor.rulers` config key for the language. | | `path-completion` | Overrides the `editor.path-completion` config key for the language. | | `workspace-lsp-roots` | Directories relative to the workspace root that are treated as LSP roots. Should only be set in `.helix/config.toml`. Overwrites the setting of the same name in `config.toml` if set. | | `persistent-diagnostic-sources` | An array of LSP diagnostic sources assumed unchanged when the language server resends the same set of diagnostics. Helix can track the position for these diagnostics internally instead. Useful for diagnostics that are recomputed on save. ### File-type detection and the `file-types` key Helix determines which language configuration to use based on the `file-types` key from the above section. `file-types` is a list of strings or tables, for example: ```toml file-types = ["toml", { glob = "Makefile" }, { glob = ".git/config" }, { glob = ".github/workflows/*.yaml" } ] ``` When determining a language configuration to use, Helix searches the file-types with the following priorities: 1. Glob: values in `glob` tables are checked against the full path of the given file. Globs are standard Unix-style path globs (e.g. the kind you use in Shell) and can be used to match paths for a specific prefix, suffix, directory, etc. In the above example, the `{ glob = "Makefile" }` config would match files with the name `Makefile`, the `{ glob = ".git/config" }` config would match `config` files in `.git` directories, and the `{ glob = ".github/workflows/*.yaml" }` config would match any `yaml` files in `.github/workflow` directories. Note that globs should always use the Unix path separator `/` even on Windows systems; the matcher will automatically take the machine-specific separators into account. If the glob isn't an absolute path or doesn't already start with a glob prefix, `*/` will automatically be added to ensure it matches for any subdirectory. 2. Extension: if there are no glob matches, any `file-types` string that matches the file extension of a given file wins. In the example above, the `"toml"` config matches files like `Cargo.toml` or `languages.toml`. ### Configuring the formatter command [Command line expansions](./command-line.md#expansions) are supported in the arguments of the formatter command. In particular, the `%{buffer_name}` variable can be passed as argument to the formatter: ```toml formatter = { command = "mylang-formatter" , args = ["--stdin", "--stdin-filename %{buffer_name}"] } ``` ## Language Server configuration Language servers are configured separately in the table `language-server` in the same file as the languages `languages.toml` For example: ```toml [language-server.mylang-lsp] command = "mylang-lsp" args = ["--stdio"] config = { provideFormatter = true } environment = { "ENV1" = "value1", "ENV2" = "value2" } [language-server.efm-lsp-prettier] command = "efm-langserver" [language-server.efm-lsp-prettier.config] documentFormatting = true languages = { typescript = [ { formatCommand ="prettier --stdin-filepath ${INPUT}", formatStdin = true } ] } ``` These are the available options for a language server. | Key | Description | | ---- | ----------- | | `command` | The name or path of the language server binary to execute. Binaries must be in `$PATH` | | `args` | A list of arguments to pass to the language server binary | | `config` | Language server initialization options | | `timeout` | The maximum time a request to the language server may take, in seconds. Defaults to `20` | | `environment` | Any environment variables that will be used when starting the language server `{ "KEY1" = "Value1", "KEY2" = "Value2" }` | | `required-root-patterns` | A list of `glob` patterns to look for in the working directory. The language server is started if at least one of them is found. | A `format` sub-table within `config` can be used to pass extra formatting options to [Document Formatting Requests](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_formatting). For example, with typescript: ```toml [language-server.typescript-language-server] # pass format options according to https://github.com/typescript-language-server/typescript-language-server#workspacedidchangeconfiguration omitting the "[language].format." prefix. config = { format = { "semicolons" = "insert", "insertSpaceBeforeFunctionParenthesis" = true } } ``` ### Configuring Language Servers for a language The `language-servers` attribute in a language tells helix which language servers are used for this language. They have to be defined in the `[language-server]` table as described in the previous section. Different languages can use the same language server instance, e.g. `typescript-language-server` is used for javascript, jsx, tsx and typescript by default. The definition order of language servers affects the order in the results list of code action menu. In case multiple language servers are specified in the `language-servers` attribute of a `language`, it's often useful to only enable/disable certain language-server features for these language servers. As an example, `efm-lsp-prettier` of the previous example is used only with a formatting command `prettier`, so everything else should be handled by the `typescript-language-server` (which is configured by default). The language configuration for typescript could look like this: ```toml [[language]] name = "typescript" language-servers = [ { name = "efm-lsp-prettier", only-features = [ "format" ] }, "typescript-language-server" ] ``` or equivalent: ```toml [[language]] name = "typescript" language-servers = [ { name = "typescript-language-server", except-features = [ "format" ] }, "efm-lsp-prettier" ] ``` Each requested LSP feature is prioritized in the order of the `language-servers` array. For example, the first `goto-definition` supported language server (in this case `typescript-language-server`) will be taken for the relevant LSP request (command `goto_definition`). The features `diagnostics`, `code-action`, `completion`, `document-symbols` and `workspace-symbols` are an exception to that rule, as they are working for all language servers at the same time and are merged together, if enabled for the language. If no `except-features` or `only-features` is given, all features for the language server are enabled. If a language server itself doesn't support a feature, the next language server array entry will be tried (and so on). The list of supported features is: - `format` - `goto-definition` - `goto-declaration` - `goto-type-definition` - `goto-reference` - `goto-implementation` - `signature-help` - `hover` - `document-highlight` - `completion` - `code-action` - `workspace-command` - `document-symbols` - `workspace-symbols` - `diagnostics` - `rename-symbol` - `inlay-hints` ## Tree-sitter grammar configuration The source for a language's tree-sitter grammar is specified in a `[[grammar]]` section in `languages.toml`. For example: ```toml [[grammar]] name = "mylang" source = { git = "https://github.com/example/mylang", rev = "a250c4582510ff34767ec3b7dcdd3c24e8c8aa68" } ``` Grammar configuration takes these keys: | Key | Description | | --- | ----------- | | `name` | The name of the tree-sitter grammar | | `source` | The method of fetching the grammar - a table with a schema defined below | Where `source` is a table with either these keys when using a grammar from a git repository: | Key | Description | | --- | ----------- | | `git` | A git remote URL from which the grammar should be cloned | | `rev` | The revision (commit hash or tag) which should be fetched | | `subpath` | A path within the grammar directory which should be built. Some grammar repositories host multiple grammars (for example `tree-sitter-typescript` and `tree-sitter-ocaml`) in subdirectories. This key is used to point `hx --grammar build` to the correct path for compilation. When omitted, the root of repository is used | ### Choosing grammars You may use a top-level `use-grammars` key to control which grammars are fetched and built when using `hx --grammar fetch` and `hx --grammar build`. ```toml # Note: this key must come **before** the [[language]] and [[grammar]] sections use-grammars = { only = [ "rust", "c", "cpp" ] } # or use-grammars = { except = [ "yaml", "json" ] } ``` When omitted, all grammars are fetched and built. [treesitter-language-injection]: https://tree-sitter.github.io/tree-sitter/3-syntax-highlighting.html#language-injection helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/other-software.md000066400000000000000000000052371500614314100246300ustar00rootroot00000000000000# Helix mode in other software Helix' keymap and interaction model ([Using Helix](#usage.md)) is easier to adopt if it can be used consistently in many editing contexts. Yet, certain use cases cannot easily be addressed directly in Helix. Similar to vim, this leads to the creation of "Helix mode" in various other software products, allowing Helix-style editing for a greater variety of use cases. "Helix mode" is frequently still in early stages or missing entirely. For such cases, we also link to relevant bugs or discussions. ## Other editors | Editor | Plugin or feature providing Helix editing | Comments | --- | --- | --- | | [Vim](https://www.vim.org/) | [helix.vim](https://github.com/chtenb/helix.vim) config | | [IntelliJ IDEA](https://www.jetbrains.com/idea/) / [Android Studio](https://developer.android.com/studio)| [IdeaVim](https://plugins.jetbrains.com/plugin/164-ideavim) plugin + [helix.idea.vim](https://github.com/chtenb/helix.vim) config | Minimum recommended version is IdeaVim 2.19.0. | [Visual Studio](https://visualstudio.microsoft.com/) | [VsVim](https://marketplace.visualstudio.com/items?itemName=JaredParMSFT.VsVim) plugin + [helix.vs.vim](https://github.com/chtenb/helix.vim) config | | [Visual Studio Code](https://code.visualstudio.com/) | [Dance](https://marketplace.visualstudio.com/items?itemName=gregoire.dance) extension, or its [Helix fork](https://marketplace.visualstudio.com/items?itemName=kend.dancehelixkey) | The Helix fork has diverged. You can also use the original Dance and tweak its keybindings directly (try [this config](https://github.com/71/dance/issues/299#issuecomment-1655509531)). | [Visual Studio Code](https://code.visualstudio.com/) | [Helix for VS Code](https://marketplace.visualstudio.com/items?itemName=jasew.vscode-helix-emulation) extension| | [Zed](https://zed.dev/) | native via keybindings ([Bug](https://github.com/zed-industries/zed/issues/4642)) | | [CodeMirror](https://codemirror.net/) | [codemirror-helix](https://gitlab.com/_rvidal/codemirror-helix) | ## Shells | Shell | Plugin or feature providing Helix editing | --- | --- | Fish | [Feature Request](https://github.com/fish-shell/fish-shell/issues/7748) | Fish | [fish-helix](https://github.com/sshilovsky/fish-helix/tree/main) | Zsh | [helix-zsh](https://github.com/john-h-k/helix-zsh) or [zsh-helix-mode](https://github.com/Multirious/zsh-helix-mode) | Nushell | [Feature Request](https://github.com/nushell/reedline/issues/639) ## Other software | Software | Plugin or feature providing Helix editing. | Comments | --- | --- | --- | | [Obsidian](https://obsidian.md/) | [Obsidian-Helix](https://github.com/Sinono3/obsidian-helix) | Uses `codemirror-helix` listed above. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/package-managers.md000066400000000000000000000102171500614314100250370ustar00rootroot00000000000000## Package managers - [Linux](#linux) - [Ubuntu/Debian](#ubuntudebian) - [Ubuntu (PPA)](#ubuntu-ppa) - [Fedora/RHEL](#fedorarhel) - [Arch Linux extra](#arch-linux-extra) - [NixOS](#nixos) - [Flatpak](#flatpak) - [Snap](#snap) - [AppImage](#appimage) - [macOS](#macos) - [Homebrew Core](#homebrew-core) - [MacPorts](#macports) - [Windows](#windows) - [Winget](#winget) - [Scoop](#scoop) - [Chocolatey](#chocolatey) - [MSYS2](#msys2) [![Packaging status](https://repology.org/badge/vertical-allrepos/helix-editor.svg)](https://repology.org/project/helix-editor/versions) ## Linux The following third party repositories are available: ### Ubuntu/Debian Install the Debian package from the release page. If you are running a system older than Ubuntu 22.04, Mint 21, or Debian 12, you can build the `.deb` file locally [from source](./building-from-source.md#building-the-debian-package). ### Ubuntu (PPA) Add the `PPA` for Helix: ```sh sudo add-apt-repository ppa:maveonair/helix-editor sudo apt update sudo apt install helix ``` ### Fedora/RHEL ```sh sudo dnf install helix ``` ### Arch Linux extra Releases are available in the `extra` repository: ```sh sudo pacman -S helix ``` > 💡 When installed from the `extra` repository, run Helix with `helix` instead of `hx`. > > For example: > ```sh > helix --health > ``` > to check health Additionally, a [helix-git](https://aur.archlinux.org/packages/helix-git/) package is available in the AUR, which builds the master branch. ### NixOS Helix is available in [nixpkgs](https://github.com/nixos/nixpkgs) through the `helix` attribute, the unstable channel usually carries the latest release. Helix is also available as a [flake](https://wiki.nixos.org/wiki/Flakes) in the project root. Use `nix develop` to spin up a reproducible development shell. Outputs are cached for each push to master using [Cachix](https://www.cachix.org/). The flake is configured to automatically make use of this cache assuming the user accepts the new settings on first use. If you are using a version of Nix without flakes enabled, [install Cachix CLI](https://docs.cachix.org/installation) and use `cachix use helix` to configure Nix to use cached outputs when possible. ### Flatpak Helix is available on [Flathub](https://flathub.org/en-GB/apps/com.helix_editor.Helix): ```sh flatpak install flathub com.helix_editor.Helix flatpak run com.helix_editor.Helix ``` ### Snap Helix is available on [Snapcraft](https://snapcraft.io/helix) and can be installed with: ```sh snap install --classic helix ``` This will install Helix as both `/snap/bin/helix` and `/snap/bin/hx`, so make sure `/snap/bin` is in your `PATH`. ### AppImage Install Helix using the Linux [AppImage](https://appimage.org/) format. Download the official Helix AppImage from the [latest releases](https://github.com/helix-editor/helix/releases/latest) page. ```sh chmod +x helix-*.AppImage # change permission for executable mode ./helix-*.AppImage # run helix ``` You can optionally [add the `.desktop` file](./building-from-source.md#configure-the-desktop-shortcut). Helix must be installed in `PATH` with the name `hx`. For example: ```sh mkdir -p "$HOME/.local/bin" mv helix-*.AppImage "$HOME/.local/bin/hx" ``` and make sure `~/.local/bin` is in your `PATH`. ## macOS ### Homebrew Core ```sh brew install helix ``` ### MacPorts ```sh sudo port install helix ``` ## Windows Install on Windows using [Winget](https://learn.microsoft.com/en-us/windows/package-manager/winget/), [Scoop](https://scoop.sh/), [Chocolatey](https://chocolatey.org/) or [MSYS2](https://msys2.org/). ### Winget Windows Package Manager winget command-line tool is by default available on Windows 11 and modern versions of Windows 10 as a part of the App Installer. You can get [App Installer from the Microsoft Store](https://www.microsoft.com/p/app-installer/9nblggh4nns1#activetab=pivot:overviewtab). If it's already installed, make sure it is updated with the latest version. ```sh winget install Helix.Helix ``` ### Scoop ```sh scoop install helix ``` ### Chocolatey ```sh choco install helix ``` ### MSYS2 For 64-bit Windows 8.1 or above: ```sh pacman -S mingw-w64-ucrt-x86_64-helix ``` helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/pickers.md000066400000000000000000000031411500614314100233070ustar00rootroot00000000000000## Using pickers Helix has a variety of pickers, which are interactive windows used to select various kinds of items. These include a file picker, global search picker, and more. Most pickers are accessed via keybindings in [space mode](./keymap.md#space-mode). Pickers have their own [keymap](./keymap.md#picker) for navigation. ### Filtering Picker Results Most pickers perform fuzzy matching using [fzf syntax](https://github.com/junegunn/fzf?tab=readme-ov-file#search-syntax). Two exceptions are the global search picker, which uses regex, and the workspace symbol picker, which passes search terms to the language server. Note that OR operations (`|`) are not currently supported. If a picker shows multiple columns, you may apply the filter to a specific column by prefixing the column name with `%`. Column names can be shortened to any prefix, so `%p`, `%pa` or `%pat` all mean the same as `%path`. For example, a query of `helix %p .toml !lang` in the global search picker searches for the term "helix" within files with paths ending in ".toml" but not including "lang". You can insert the contents of a [register](./registers.md) using `Ctrl-r` followed by a register name. For example, one could insert the currently selected text using `Ctrl-r`-`.`, or the directory of the current file using `Ctrl-r`-`%` followed by `Ctrl-w` to remove the last path section. The global search picker will use the contents of the [search register](./registers.md#default-registers) if you press `Enter` without typing a filter. For example, pressing `*`-`Space-/`-`Enter` will start a global search for the currently selected text. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/registers.md000066400000000000000000000046721500614314100236700ustar00rootroot00000000000000## Registers - [User-defined registers](#user-defined-registers) - [Default registers](#default-registers) - [Special registers](#special-registers) In Helix, registers are storage locations for text and other data, such as the result of a search. Registers can be used to cut, copy, and paste text, similar to the clipboard in other text editors. Usage is similar to Vim, with `"` being used to select a register. ### User-defined registers Helix allows you to create your own named registers for storing text, for example: - `"ay` - Yank the current selection to register `a`. - `"op` - Paste the text in register `o` after the selection. If a register is selected before invoking a change or delete command, the selection will be stored in the register and the action will be carried out: - `"hc` - Store the selection in register `h` and then change it (delete and enter insert mode). - `"md` - Store the selection in register `m` and delete it. ### Default registers Commands that use registers, like yank (`y`), use a default register if none is specified. These registers are used as defaults: | Register character | Contains | | --- | --- | | `/` | Last search | | `:` | Last executed command | | `"` | Last yanked text | | `@` | Last recorded macro | ### Special registers Some registers have special behavior when read from and written to. | Register character | When read | When written | | --- | --- | --- | | `_` | No values are returned | All values are discarded | | `#` | Selection indices (first selection is `1`, second is `2`, etc.) | This register is not writable | | `.` | Contents of the current selections | This register is not writable | | `%` | Name of the current file | This register is not writable | | `+` | Reads from the system clipboard | Joins and yanks to the system clipboard | | `*` | Reads from the primary clipboard | Joins and yanks to the primary clipboard | When yanking multiple selections to the clipboard registers, the selections are joined with newlines. Pasting from these registers will paste multiple selections if the clipboard was last yanked to by the Helix session. Otherwise the clipboard contents are pasted as one selection. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/remapping.md000066400000000000000000000117601500614314100236370ustar00rootroot00000000000000## Key remapping Helix currently supports one-way key remapping through a simple TOML configuration file. (More powerful solutions such as rebinding via commands will be available in the future). There are three kinds of commands that can be used in keymaps: * Static commands: commands like `move_char_right` which are usually bound to keys and used for movement and editing. A list of static commands is available in the [Keymap](./keymap.html) documentation and in the source code in [`helix-term/src/commands.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands.rs) at the invocation of `static_commands!` macro. * Typable commands: commands that can be executed from command mode (`:`), for example `:write!`. See the [Commands](./commands.md) documentation for a list of available typeable commands or the `TypableCommandList` declaration in the source code at [`helix-term/src/commands/typed.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands/typed.rs). * Macros: sequences of keys that are executed in order. These keybindings start with `@` and then list any number of keys to be executed. For example `@miw` can be used to select the surrounding word. For now, macro keybindings are not allowed in keybinding sequences due to limitations in the way that command sequences are executed. Modifier keys (e.g. Alt+o) can be used like `""`, e.g. `"@miw"` To remap keys, create a `config.toml` file in your `helix` configuration directory (default `~/.config/helix` on Linux systems) with a structure like this: > 💡 To set a modifier + key as a keymap, type `A-X = ...` or `C-X = ...` for Alt + X or Ctrl + X. Combine with Shift using a dash, e.g. `C-S-esc`. > Within macros, wrap them in `<>`, e.g. `` and `` to distinguish from the `A` or `C` keys. ```toml # At most one section each of 'keys.normal', 'keys.insert' and 'keys.select' [keys.normal] C-s = ":w" # Maps Ctrl-s to the typable command :w which is an alias for :write (save file) C-o = ":open ~/.config/helix/config.toml" # Maps Ctrl-o to opening of the helix config file a = "move_char_left" # Maps the 'a' key to the move_char_left command w = "move_line_up" # Maps the 'w' key move_line_up "C-S-esc" = "extend_line" # Maps Ctrl-Shift-Escape to extend_line g = { a = "code_action" } # Maps `ga` to show possible code actions "ret" = ["open_below", "normal_mode"] # Maps the enter key to open_below then re-enter normal mode "A-x" = "@x" # Maps Alt-x to a macro selecting the whole line and deleting it without yanking it [keys.insert] "A-x" = "normal_mode" # Maps Alt-X to enter normal mode j = { k = "normal_mode" } # Maps `jk` to exit insert mode ``` ## Minor modes Minor modes are accessed by pressing a key (usually from normal mode), giving access to dedicated bindings. Bindings can be modified or added by nesting definitions. ```toml [keys.insert.j] k = "normal_mode" # Maps `jk` to exit insert mode [keys.normal.g] a = "code_action" # Maps `ga` to show possible code actions # invert `j` and `k` in view mode [keys.normal.z] j = "scroll_up" k = "scroll_down" # create a new minor mode bound to `+` [keys.normal."+"] m = ":run-shell-command make" c = ":run-shell-command cargo build" t = ":run-shell-command cargo test" ``` ## Special keys and modifiers Ctrl, Shift and Alt modifiers are encoded respectively with the prefixes `C-`, `S-` and `A-`. The [Super key](https://en.wikipedia.org/wiki/Super_key_(keyboard_button)) - the Windows/Linux key or the Command key on Mac keyboards - is also supported when using a terminal emulator that supports the [enhanced keyboard protocol](https://github.com/helix-editor/helix/wiki/Terminal-Support#enhanced-keyboard-protocol). The super key is encoded with prefixes `Meta-`, `Cmd-` or `Win-`. These are all synonyms for the super modifier - binding a key with a `Win-` modifier will mean it can be used with the Windows/Linux key or the Command key. ```toml [keys.normal] C-s = ":write" # Ctrl and 's' to write Cmd-s = ":write" # Cmd or Win or Meta and 's' to write ``` Special keys are encoded as follows: | Key name | Representation | | --- | --- | | Backspace | `"backspace"` | | Space | `"space"` | | Return/Enter | `"ret"` | | Left | `"left"` | | Right | `"right"` | | Up | `"up"` | | Down | `"down"` | | Home | `"home"` | | End | `"end"` | | Page Up | `"pageup"` | | Page Down | `"pagedown"` | | Tab | `"tab"` | | Delete | `"del"` | | Insert | `"ins"` | | Null | `"null"` | | Escape | `"esc"` | Keys can be disabled by binding them to the `no_op` command. All other keys such as `?`, `!`, `-` etc. can be used literally: ```toml [keys.normal] "?" = ":write" "!" = ":write" "-" = ":write" ``` Note: `-` can't be used when combined with a modifier, for example `Alt` + `-` should be written as `A-minus`. `A--` is not accepted. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/surround.md000066400000000000000000000021661500614314100235360ustar00rootroot00000000000000## Surround Helix includes built-in functionality similar to [vim-surround](https://github.com/tpope/vim-surround). The keymappings have been inspired from [vim-sandwich](https://github.com/machakann/vim-sandwich): ![Surround demo](https://user-images.githubusercontent.com/23398472/122865801-97073180-d344-11eb-8142-8f43809982c6.gif) | Key Sequence | Action | | --------------------------------- | --------------------------------------- | | `ms` (after selecting text) | Add surround characters to selection | | `mr` | Replace the closest surround characters | | `md` | Delete the closest surround characters | You can use counts to act on outer pairs. Surround can also act on multiple selections. For example, to change every occurrence of `(use)` to `[use]`: 1. `%` to select the whole file 2. `s` to split the selections on a search term 3. Input `use` and hit Enter 4. `mr([` to replace the parentheses with square brackets Multiple characters are currently not supported, but planned for future release. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/syntax-aware-motions.md000066400000000000000000000044231500614314100257640ustar00rootroot00000000000000## Moving the selection with syntax-aware motions `Alt-p`, `Alt-o`, `Alt-i`, and `Alt-n` (or `Alt` and arrow keys) allow you to move the selection according to its location in the syntax tree. For example, many languages have the following syntax for function calls: ```js func(arg1, arg2, arg3); ``` A function call might be parsed by tree-sitter into a tree like the following. ```tsq (call function: (identifier) ; func arguments: (arguments ; (arg1, arg2, arg3) (identifier) ; arg1 (identifier) ; arg2 (identifier))) ; arg3 ``` Use `:tree-sitter-subtree` to view the syntax tree of the primary selection. In a more intuitive tree format: ``` ┌────┐ │call│ ┌─────┴────┴─────┐ │ │ ┌─────▼────┐ ┌────▼────┐ │identifier│ │arguments│ │ "func" │ ┌────┴───┬─────┴───┐ └──────────┘ │ │ │ │ │ │ ┌─────────▼┐ ┌────▼─────┐ ┌▼─────────┐ │identifier│ │identifier│ │identifier│ │ "arg1" │ │ "arg2" │ │ "arg3" │ └──────────┘ └──────────┘ └──────────┘ ``` If you have a selection that wraps `arg1` (see the tree above), and you use `Alt-n`, it will select the next sibling in the syntax tree: `arg2`. ```js // before func([arg1], arg2, arg3) // after func(arg1, [arg2], arg3); ``` Similarly, `Alt-o` will expand the selection to the parent node, in this case, the arguments node. ```js func[(arg1, arg2, arg3)]; ``` There is also some nuanced behavior that prevents you from getting stuck on a node with no sibling. When using `Alt-p` with a selection on `arg1`, the previous child node will be selected. In the event that `arg1` does not have a previous sibling, the selection will move up the syntax tree and select the previous element. As a result, using `Alt-p` with a selection on `arg1` will move the selection to the "func" `identifier`. [lang-support]: ./lang-support.md helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/textobjects.md000066400000000000000000000043571500614314100242170ustar00rootroot00000000000000## Selecting and manipulating text with textobjects In Helix, textobjects are a way to select, manipulate and operate on a piece of text in a structured way. They allow you to refer to blocks of text based on their structure or purpose, such as a word, sentence, paragraph, or even a function or block of code. ![Textobject demo](https://user-images.githubusercontent.com/23398472/124231131-81a4bb00-db2d-11eb-9d10-8e577ca7b177.gif) ![Textobject tree-sitter demo](https://user-images.githubusercontent.com/23398472/132537398-2a2e0a54-582b-44ab-a77f-eb818942203d.gif) - `ma` - Select around the object (`va` in Vim, `` in Kakoune) - `mi` - Select inside the object (`vi` in Vim, `` in Kakoune) | Key after `mi` or `ma` | Textobject selected | | --- | --- | | `w` | Word | | `W` | WORD | | `p` | Paragraph | | `(`, `[`, `'`, etc. | Specified surround pairs | | `m` | The closest surround pair | | `f` | Function | | `t` | Type (or Class) | | `a` | Argument/parameter | | `c` | Comment | | `T` | Test | | `g` | Change | > 💡 `f`, `t`, etc. need a tree-sitter grammar active for the current document and a special tree-sitter query file to work properly. [Only some grammars](./lang-support.md) currently have the query file implemented. Contributions are welcome! ## Navigating using tree-sitter textobjects Navigating between functions, classes, parameters, and other elements is possible using tree-sitter and textobject queries. For example to move to the next function use `]f`, to move to previous type use `[t`, and so on. ![Tree-sitter-nav-demo](https://user-images.githubusercontent.com/23398472/152332550-7dfff043-36a2-4aec-b8f2-77c13eb56d6f.gif) For the full reference see the [unimpaired](./keymap.html#unimpaired) section of the key bind documentation. > 💡 This feature relies on tree-sitter textobjects > and requires the corresponding query file to work properly. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/themes.md000066400000000000000000000415461500614314100231470ustar00rootroot00000000000000## Themes To use a theme add `theme = ""` to the top of your [`config.toml`](./configuration.md) file, or select it during runtime using `:theme `. ## Creating a theme Create a file with the name of your theme as the file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes` or `%AppData%\helix\themes` on Windows). The directory might have to be created beforehand. > 💡 The names "default" and "base16_default" are reserved for built-in themes > and cannot be overridden by user-defined themes. ### Overview Each line in the theme file is specified as below: ```toml key = { fg = "#ffffff", bg = "#000000", underline = { color = "#ff0000", style = "curl"}, modifiers = ["bold", "italic"] } ``` Where `key` represents what you want to style, `fg` specifies the foreground color, `bg` the background color, `underline` the underline `style`/`color`, and `modifiers` is a list of style modifiers. `bg`, `underline` and `modifiers` can be omitted to defer to the defaults. To specify only the foreground color: ```toml key = "#ffffff" ``` If the key contains a dot `'.'`, it must be quoted to prevent it being parsed as a [dotted key](https://toml.io/en/v1.0.0#keys). ```toml "key.key" = "#ffffff" ``` For inspiration, you can find the default `theme.toml` [here](https://github.com/helix-editor/helix/blob/master/theme.toml) and user-submitted themes [here](https://github.com/helix-editor/helix/blob/master/runtime/themes). ## The details of theme creation ### Color palettes It's recommended to define a palette of named colors, and refer to them in the configuration values in your theme. To do this, add a table called `palette` to your theme file: ```toml "ui.background" = "white" "ui.text" = "black" [palette] white = "#ffffff" black = "#000000" ``` Keep in mind that the `[palette]` table includes all keys after its header, so it should be defined after the normal theme options. The default palette uses the terminal's default 16 colors, and the colors names are listed below. The `[palette]` section in the config file takes precedence over it and is merged into the default palette. | Color Name | | --- | | `default` | | `black` | | `red` | | `green` | | `yellow` | | `blue` | | `magenta` | | `cyan` | | `gray` | | `light-red` | | `light-green` | | `light-yellow` | | `light-blue` | | `light-magenta` | | `light-cyan` | | `light-gray` | | `white` | ### Modifiers The following values may be used as modifier, provided they are supported by your terminal emulator. | Modifier | | --- | | `bold` | | `dim` | | `italic` | | `underlined` | | `slow_blink` | | `rapid_blink` | | `reversed` | | `hidden` | | `crossed_out` | > 💡 The `underlined` modifier is deprecated and only available for backwards compatibility. > Its behavior is equivalent to setting `underline.style="line"`. ### Underline style One of the following values may be used as a value for `underline.style`, providing it is supported by your terminal emulator. | Modifier | | --- | | `line` | | `curl` | | `dashed` | | `dotted` | | `double_line` | ### Inheritance Extend other themes by setting the `inherits` property to an existing theme. ```toml inherits = "boo_berry" # Override the theming for "keyword"s: "keyword" = { fg = "gold" } # Override colors in the palette: [palette] berry = "#2A2A4D" ``` ### Scopes The following is a list of scopes available to use for styling: #### Syntax highlighting These keys match [tree-sitter scopes](https://tree-sitter.github.io/tree-sitter/3-syntax-highlighting.html#highlights). When determining styling for a highlight, the longest matching theme key will be used. For example, if the highlight is `function.builtin.static`, the key `function.builtin` will be used instead of `function`. We use a similar set of scopes as [Sublime Text](https://www.sublimetext.com/docs/scope_naming.html). See also [TextMate](https://macromates.com/manual/en/language_grammars) scopes. - `attribute` - Class attributes, HTML tag attributes - `type` - Types - `builtin` - Primitive types provided by the language (`int`, `usize`) - `parameter` - Generic type parameters (`T`) - `enum` - `variant` - `constructor` - `constant` (TODO: constant.other.placeholder for `%v`) - `builtin` Special constants provided by the language (`true`, `false`, `nil` etc) - `boolean` - `character` - `escape` - `numeric` (numbers) - `integer` - `float` - `string` (TODO: string.quoted.{single, double}, string.raw/.unquoted)? - `regexp` - Regular expressions - `special` - `path` - `url` - `symbol` - Erlang/Elixir atoms, Ruby symbols, Clojure keywords - `comment` - Code comments - `line` - Single line comments (`//`) - `block` - Block comments (e.g. (`/* */`) - `documentation` - Documentation comments (e.g. `///` in Rust) - `variable` - Variables - `builtin` - Reserved language variables (`self`, `this`, `super`, etc.) - `parameter` - Function parameters - `other` - `member` - Fields of composite data types (e.g. structs, unions) - `private` - Private fields that use a unique syntax (currently just ECMAScript-based languages) - `label` - `.class`, `#id` in CSS, etc. - `punctuation` - `delimiter` - Commas, colons - `bracket` - Parentheses, angle brackets, etc. - `special` - String interpolation brackets. - `keyword` - `control` - `conditional` - `if`, `else` - `repeat` - `for`, `while`, `loop` - `import` - `import`, `export` - `return` - `exception` - `operator` - `or`, `in` - `directive` - Preprocessor directives (`#if` in C) - `function` - `fn`, `func` - `storage` - Keywords describing how things are stored - `type` - The type of something, `class`, `function`, `var`, `let`, etc. - `modifier` - Storage modifiers like `static`, `mut`, `const`, `ref`, etc. - `operator` - `||`, `+=`, `>` - `function` - `builtin` - `method` - `private` - Private methods that use a unique syntax (currently just ECMAScript-based languages) - `macro` - `special` (preprocessor in C) - `tag` - Tags (e.g. `` in HTML) - `builtin` - `namespace` - `special` - `derive` in Rust, etc. - `markup` - `heading` - `marker` - `1`, `2`, `3`, `4`, `5`, `6` - heading text for h1 through h6 - `list` - `unnumbered` - `numbered` - `checked` - `unchecked` - `bold` - `italic` - `strikethrough` - `link` - `url` - URLs pointed to by links - `label` - non-URL link references - `text` - URL and image descriptions in links - `quote` - `raw` - `inline` - `block` - `diff` - version control changes - `plus` - additions - `gutter` - gutter indicator - `minus` - deletions - `gutter` - gutter indicator - `delta` - modifications - `moved` - renamed or moved files/changes - `conflict` - merge conflicts - `gutter` - gutter indicator #### Interface These scopes are used for theming the editor interface: - `markup` - `normal` - `completion` - for completion doc popup UI - `hover` - for hover popup UI - `heading` - `completion` - for completion doc popup UI - `hover` - for hover popup UI - `raw` - `inline` - `completion` - for completion doc popup UI - `hover` - for hover popup UI | Key | Notes | | --- | --- | | `ui.background` | | | `ui.background.separator` | Picker separator below input line | | `ui.cursor` | | | `ui.cursor.normal` | | | `ui.cursor.insert` | | | `ui.cursor.select` | | | `ui.cursor.match` | Matching bracket etc. | | `ui.cursor.primary` | Cursor with primary selection | | `ui.cursor.primary.normal` | | | `ui.cursor.primary.insert` | | | `ui.cursor.primary.select` | | | `ui.debug.breakpoint` | Breakpoint indicator, found in the gutter | | `ui.debug.active` | Indicator for the line at which debugging execution is paused at, found in the gutter | | `ui.gutter` | Gutter | | `ui.gutter.selected` | Gutter for the line the cursor is on | | `ui.linenr` | Line numbers | | `ui.linenr.selected` | Line number for the line the cursor is on | | `ui.statusline` | Statusline | | `ui.statusline.inactive` | Statusline (unfocused document) | | `ui.statusline.normal` | Statusline mode during normal mode ([only if `editor.color-modes` is enabled][editor-section]) | | `ui.statusline.insert` | Statusline mode during insert mode ([only if `editor.color-modes` is enabled][editor-section]) | | `ui.statusline.select` | Statusline mode during select mode ([only if `editor.color-modes` is enabled][editor-section]) | | `ui.statusline.separator` | Separator character in statusline | | `ui.bufferline` | Style for the buffer line | | `ui.bufferline.active` | Style for the active buffer in buffer line | | `ui.bufferline.background` | Style for bufferline background | | `ui.popup` | Documentation popups (e.g. Space + k) | | `ui.popup.info` | Prompt for multiple key options | | `ui.picker.header` | Header row area in pickers with multiple columns | | `ui.picker.header.column` | Column names in pickers with multiple columns | | `ui.picker.header.column.active` | The column name in pickers with multiple columns where the cursor is entering into. | | `ui.window` | Borderlines separating splits | | `ui.help` | Description box for commands | | `ui.text` | Default text style, command prompts, popup text, etc. | | `ui.text.focus` | The currently selected line in the picker | | `ui.text.inactive` | Same as `ui.text` but when the text is inactive (e.g. suggestions) | | `ui.text.info` | The key: command text in `ui.popup.info` boxes | | `ui.text.directory` | Directory names in prompt completion | | `ui.virtual.ruler` | Ruler columns (see the [`editor.rulers` config][editor-section]) | | `ui.virtual.whitespace` | Visible whitespace characters | | `ui.virtual.indent-guide` | Vertical indent width guides | | `ui.virtual.inlay-hint` | Default style for inlay hints of all kinds | | `ui.virtual.inlay-hint.parameter` | Style for inlay hints of kind `parameter` (language servers are not required to set a kind) | | `ui.virtual.inlay-hint.type` | Style for inlay hints of kind `type` (language servers are not required to set a kind) | | `ui.virtual.wrap` | Soft-wrap indicator (see the [`editor.soft-wrap` config][editor-section]) | | `ui.virtual.jump-label` | Style for virtual jump labels | | `ui.menu` | Code and command completion menus | | `ui.menu.selected` | Selected autocomplete item | | `ui.menu.scroll` | `fg` sets thumb color, `bg` sets track color of scrollbar | | `ui.selection` | For selections in the editing area | | `ui.selection.primary` | | | `ui.highlight` | Highlighted lines in the picker preview | | `ui.highlight.frameline` | Line at which debugging execution is paused at | | `ui.cursorline.primary` | The line of the primary cursor ([if cursorline is enabled][editor-section]) | | `ui.cursorline.secondary` | The lines of any other cursors ([if cursorline is enabled][editor-section]) | | `ui.cursorcolumn.primary` | The column of the primary cursor ([if cursorcolumn is enabled][editor-section]) | | `ui.cursorcolumn.secondary` | The columns of any other cursors ([if cursorcolumn is enabled][editor-section]) | | `warning` | Diagnostics warning (gutter) | | `error` | Diagnostics error (gutter) | | `info` | Diagnostics info (gutter) | | `hint` | Diagnostics hint (gutter) | | `diagnostic` | Diagnostics fallback style (editing area) | | `diagnostic.hint` | Diagnostics hint (editing area) | | `diagnostic.info` | Diagnostics info (editing area) | | `diagnostic.warning` | Diagnostics warning (editing area) | | `diagnostic.error` | Diagnostics error (editing area) | | `diagnostic.unnecessary` | Diagnostics with unnecessary tag (editing area) | | `diagnostic.deprecated` | Diagnostics with deprecated tag (editing area) | | `tabstop` | Snippet placeholder | [editor-section]: ./configuration.md#editor-section helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/title-page.md000066400000000000000000000012101500614314100236750ustar00rootroot00000000000000# Helix Docs for bleeding edge master can be found at [https://docs.helix-editor.com/master](https://docs.helix-editor.com/master). See the [usage] section for a quick overview of the editor, [keymap] section for all available keybindings and the [configuration] section for defining custom keybindings, setting themes, etc. For everything else (e.g., how to install supported language servers), see the [Helix Wiki]. Refer the [FAQ] for common questions. [FAQ]: https://github.com/helix-editor/helix/wiki/FAQ [usage]: ./usage.md [keymap]: ./keymap.md [configuration]: ./configuration.md [Helix Wiki]: https://github.com/helix-editor/helix/wikihelix-cbac4273836f5837cec641ab21f365c79b102a4b/book/src/usage.md000066400000000000000000000037121500614314100227570ustar00rootroot00000000000000# Using Helix For a full interactive introduction to Helix, refer to the [tutor](https://github.com/helix-editor/helix/blob/master/runtime/tutor) which can be accessed via the command `hx --tutor` or `:tutor`. > 💡 Currently, not all functionality is fully documented, please refer to the > [key mappings](./keymap.md) list. ## Modes Helix is a modal editor, meaning it has different modes for different tasks. The main modes are: * [Normal mode](./keymap.md#normal-mode): For navigation and editing commands. This is the default mode. * [Insert mode](./keymap.md#insert-mode): For typing text directly into the document. Access by typing `i` in normal mode. * [Select/extend mode](./keymap.md#select--extend-mode): For making selections and performing operations on them. Access by typing `v` in normal mode. ## Buffers Buffers are in-memory representations of files. You can have multiple buffers open at once. Use [pickers](./pickers.md) or commands like `:buffer-next` and `:buffer-previous` to open buffers or switch between them. ## Selection-first editing Inspired by [Kakoune](http://kakoune.org/), Helix follows the `selection → action` model. This means that whatever you are going to act on (a word, a paragraph, a line, etc.) is selected first and the action itself (delete, change, yank, etc.) comes second. A cursor is simply a single width selection. ## Multiple selections Also inspired by Kakoune, multiple selections are a core mode of interaction in Helix. For example, the standard way of replacing multiple instances of a word is to first select all instances (so there is one selection per instance) and then use the change action (`c`) to edit them all at the same time. ## Motions Motions are commands that move the cursor or modify selections. They're used for navigation and text manipulation. Examples include `w` to move to the next word, or `f` to find a character. See the [Movement](./keymap.md#movement) section of the keymap for more motions. helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/theme/000077500000000000000000000000001500614314100216415ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/theme/favicon.png000066400000000000000000001231371500614314100240030ustar00rootroot00000000000000PNG  IHDR pHYs  ~ IDATxyx9ÑB {~0b̕ a<1D0ꮒ۶0mZeK*~es%\>JR?fZRoט1@*^CJ@/jzYo_^k=(]U7)2P L )=iMmvYBP jj7yRlOSx|yHN^|?Ϟȫ ;z~\W[#@.}Tz! ab+ P Pd-U=^Q(geP mMx>WJ{AjP.QQZNU]]DE*ʖ P mUxܾWJ{A<9Muj[/@ /D5Dď[W_ij۷ʖ' _(3Y9&? (p9THN^|?ϞOJwA|wN3F ^/i I@)/֩ /^ڗP*U{eeO' ߛb,>ݹĭ[)@)/2Mkd7*J˩4DďJ5ݲ@.]Sq~P yz̮%~P P5%ݖP~P^94~O30qt&7@)/֙PlbřD)քj_?{"P GڥG1[cƈ6`A,J_, 5([[3gɞOWF~C қ<\o D[SEi -XK< O\jj^;,%(xE;ee4Ź//7 " ]=M_7 "9 噓HkN3F7 P !-baqHGh1x<Œ{" ұՙPlbř ~IbB͹Og8{,t 8 kM5FY  ]s6OIϟtÿzgWXtWBu~9|"Y \D#' ";jz\_7PȌ+IϿ-iz7yUA:]MZ^9@@!/V6Xjyn$zVD:ޕUƕ-MC7r 7e?D:Ce6 Q;r,/\oS= 1OsJN2Bczr)|dIl3s~n= ?5Z}C͍ϝo'P--L_a%:{~^)`A~syZS۾]V|<B(| -n—OH]MU9!ny "WS.?m]ʫ,t,)NU<3[¹rKwo٭ "}ӪW8ߏdz' WVWm 3 $s7,= 1)c\$`IjO2˖  "NJ՞[ 7;_M/x y%HV؊yc oo4:Mϯ+ ,t퓵T2:nrx з+}ffH ұ?Ow.70ښ`zǶ .=Iׯ]v󙼢Hքjϋm:0j idz$Zt_^U`AckP.QQZN70xǷ H\ВtWX{WOT2lo<`$5?ùz{ q]Sq-8GN7n8=͞+ ,t%1ښ_0܌nq\hKl5 >WX:)63'WNr{#w!Ğ%> D^e`AowBuUgak  N.r^i.-DPY9&%2vxC[-4 R)W3J2}ø +9OD iM5|;s/xwXq]m2f_ X#a^z3 f3WXix>vګ+z-cќ G`AV.\/ " oon51B1^ " oRjl D B6R o }>WXi{SWӝ |3O>b #:I7s^}`A;UW#~}>IO7˥{D\p@9 H}۩bh1N/ ,ׯ ;#w_?`A]=f?W[V|<Y!u\dC q 4)}} ] az#  ]3cF+-0KڊQ[sf_ldOXit;)鶄@ .n8Q|QԜ OX)?-ϞUga`t{w,.;^Dʯbߥ % 5{}W]? "_ V**V_ dwX.|X^& ~*HyYkBǟ=D(0CaWDv1D֘1b,P\r)[7 o4 06D%T۬Rھ_%ގ3L_"y z ]  "FuQʸeP8s\p\߂NzA`ABM]uOw./ C27# zyL/~RH[:fX;ru9|]G{d~ZHXCh 57>w _|克7DXG>H}lx`--5rz⪡GpSD*EQtչKU>&|p^SD*:_E`YۖT P Rv :z7_5r_ZzB R[>?SƌW0?l=+,1m~-?A`A9ا)j@cE.[+ ݤ4D*:5Vt} D N.Z]ꀕ fD*b&TXY} "J/1[O#g1yCD`A$JYQ~5-poȅN@/,H) j\w=oet bȂ,DӻDE*ʖ \ bqrCD`A$:Ce\$!&MD.QobȂ,DG\n̕@r;/ z1dAD,ơk ;+bednMr1l/o'eADal:Զo-ϕ@(d&L}%  "Qo7Z(tپK\\ wbȂ,D؞:U+_$7皹`酛Kv1dAD;PV~<=|g\V9CD`A$c:e1[ gTSe1dADaةӫNR2p%/{d.ʅYYht~s-Fg9hr bdADmRWV,FJgiZ3t%X75Y|sW0ĸ΁+,7xL.=o  "Q>cq|2fn( "~>D`A$*eT6s\n[^eX wjeespbb2O<MnbycAD"$T5ϞU (>L2=:3rōD@(ڥG1[cƈ\g1r>)=,l,wtũ q=+N- bv[\R5DQuA'+3gp ζܠR.'],h,P %1`&*ޕRM}0Z&p-N2j3,,g,P4%#9pz D՛<\U ȳ;1RƂ%:7QA֣+ČW克"2ƂI9U.0hjAʄju͍ϝU yFGYr.u,a,PJql"8:XV|$DD8J 0ч8t>qVaPsspD!0O܁brD,pTBM>{45t"b"= dΟfy g1"D8wch@LD&7b"4g.KO2  @D?NA>54ŮW+'F1s9/9,6ĂyWO l9d%clp|bBq~ۧse,=oeAL,4Ă H)7ts1xE;CZawI^m˖g!D( 345=Ky Dt8X .7=9C,PԄ$䢸""OSڪghW`v^( "ʖ ZELw Dtiʡkt%C_UH[np\ >B,Pd?Codr(vk/ U8g2i`y>bA"6b/\"EX;+*[ؚ}ZAdQ!D(FY z}9}T81Jǩ\34p3 @JH}USӦ} DMW8?{"Wq8FV|27_db |bA Z+j&["gnTS细=oet ױh "`DTb/K["w55|nY\ir18fyr9ϒA,F z,}Ug/-EvMK!&MDo\ "`Ti轏 2QDyz̮%sL7a D@^V+l9ma%:)63's}[nprd D@z9=n(vkL1 c\^_3fwxmy,Ă(!<3QSiz~Avn(b;Jv]M/,Ă(H+Lr9حc%Ք:t)hr}xL@,H]WҋP`KDu : վ&bnޓRx Rr`>@,"#J\Wn(v1DuC!Y?U{YPn$Mu˚>pKDMWҖVz}:gXp;K @IQUlMMWj`KD5xsnuT倈JZb ^"ᖈ"jD<}aL-N\Z3sΐdqJjT{-Eԛ<4||5w\acr~I&&Rt%}R DQݚb7(JWF:gntß!EpWt61Qq|-fPS)#aGYrny:"D1RɺWu;p(csjeeu!&&s4`~"Dq(/Lc+5ᖈi@Sq~-Yn\ b'bADWO܏PW3؞UϘWƖ<3ζp > "`%fY-EP~P^9XO6%݁^.A}"Dcur9=n(v*4 IDATk:e:s 1])_[.> "` OT{^~["&'bx}uhN6^ v2 @ǩ4%5]vl(:TT8cv_Ka兛Xy(s.T'rg%h> kM5FY L2=:3 {a )W11Ma%( ⮺%kVmvC< "$I չVv)A\":֪2iyh^NĂ(d-$b&,.ջ5m׋ym D,bR:wl;/}b9bo7 ։X0ĸJ%5UOjz/0~5j1O҉X@Q:Nz9 {3.ѼsD,gΕL]u63 Im~Y0 D,mj1A(d`&*⎨[V}y03$ʫ`2dBDMJ9Qke'v_Tj녚Zv5Q13\' G.7QAq@curMC[ DV5V\08gHT@1ݔtCy[NTj]q^C~oz*1 W4wpΐekD,Mqs8Q3[) ƐMT2s1y cQXqag 'ʓ4G-}]]ehd&*_?3q aqvTt<8gbHT$_RS#| @ὛHMJεd؉F3WA lD @.Ϝ#V]ep'VT23$*Mϯ`*ՙ)e OqmqR<m`*ۦʁ)]u2Ep\#f?F[ATKLs-+r]) >13l̈k9gHT4B#{2ShbPfaw ?3LeHoLTVwp$jۥo}Jq[.~Mm,,-#&@(/LT{_~Jz9gHT/pʴ(9}zBunCq;9úb^{/C2Q1̈́(i墸PS>*ss9gHTk/\ #Jb?"Y$[au sD찼PBe&=7r:W󲦤HP1V=k+;8gHD~IOz,t' 9ڕ bm Dv%b!Qj/tA戺+D{gf7C*T=f?*V4FC"$@3y71+tI|P^3lʈ%_c&(|o.8*m_T7P=M4<^m[|s+qg U39ńFJGݶV}eKD 6qG "2Ac3eQQ9gh񄹑V/^p'`iHAVԯM? Dt4Znv|7,[}سaUC -/ԍqwmFH|s[,4Yq[.ls^$.NzB,DM19g:g8w+O8gHD67_Qq,MIMC::su+Eż53$cԜ2y"s.c"93L8uHe%cntMf̝V5V\7 9'7;0y[NTSʅ %;bDCUZ%f>} DCQ#w w] :gkjRS.8iJ7y1 Iz w~;-(Qb]MߦNTB;^ƫ[qR"~Y0*T8<%Ž,PE؊5b9anDY\]"Q^9YWڮU'%|b3lʈ˻D:Qd3C. @_9WSMrbU'n FVY"p7'sTd]M+,ZX#jo[+9C". |.26};%%<3QTrxū@=an!EnSήsj&|YhzS42Q,|ČW5՞#a!ˣs6QCth(%Sv؏-Q,nb΋9C"꜡]v/DMijEgaVZ%f>QdL*ew:pYLK:8gXR4lsDY[YF#8;8ns؋u5 9CuK׊+VQTV[~>w2)0iJz~LSwX"(f]~[,DYo92^`X%I չV.8)Ac8iSF4-f(sL7h1{FG ܱ.o9'5dY5m׋ym De=w(0jTgijz 3m!EU.%z;.BYUmS"n5j1OylEczYb,w"wT]qKJMKacF4=%rΐ"[ [M]Yq"w̹rQ8iM/a|lgyalmj1A(,0nڟ&澴YXYQdV.org<3QSiz~A.QUbS0sΐ"kWr9vr1; (}zI.S9XՐMͯ2Q VgwP 5^v_Q3Lo@KDѝ34]sv\[wpA&avqΐ"{po~ Pn$MuB8X=k+;8gHDQ>=?scT9;81uIkWƇ6yk8gHD5([ ܰ'QbMM/K戺k[qR"nY|+?g0 1RIMIMI0f]~[,DYoZࡅ92G!HMJεdGM1󟲢>Y"I7=*C \Quy~M4t0KD34]]2޺qWtTb/KHJG޶Vfaqΐ"* n`^\AYUmSb:{w91Vxc+(vY#X[^=MW咷RS>gؘ3%rΐ"{o=pM<=+5V+lm9s6QCVY"N3 6q&WfQmjrY|ø-jfae9gHDd~(]j7DM1UbSh^9C"*O4gOzv+W`<ۧ'T禪Uo{f(^f?4z{O 6Yr[9 2Z"y{E?nZ">1󟳢j|\ ݚ~BW)aJRsR+>(Ý D9~1nQ;T[&7Ck悙nf%Hra׉b:TT8;(ZFjyan% (smU+ }1d վ&D xC!Y?U{h(sŬ_yX]4KUj}0@a+ng%HvbKEׄx9ڸ? {h(wuBםX ?ZuEq|4 %zz&Y5s~Jtj(+~)Wb9 :;.=ø$< irǬ\6-E~1b?gxm>xGZXϹ an( "~Z"d1b˛E풵BtX ?eI=ϯ/Fs-/\@KD="M4<^$X?9rIK.ϴݍ:03p#!&&s4_~Z"^1󗝢1wͫ:.x@.7كw]3c#dWp[n-EqN_Mڅc;ڪkfU⾦KP6,mmAMJn^ rVW-/cA-E _]E-?'j?\FufMd-t~dzAj(s 53Y-x|bݷ.]>sw6p3fwx~my D9u}bdEU,u檡~|I|bCzߔ-ϝ^8{M/@KDQ3&Q;sY]k?yA|boi|\+4Y^" *Y'ts#ġ$~Z.C\|I_g悔2Q$ lmڕ,n#XM k=%פ T0CqO5I9a%H>N_'j,l.G.d\Zo\cXr_lymr h(Űs^,j&8KڨE#_?iz_$kV[wmAh(p:D"c+FF'XRo7,{4;$ 6sr;PKD<^NQՐa)˳4[,?QoƪJM. @bl+[mZ"] b39QӼe,_&f5Ǿ ~KlnKWF T(`,/\$~Z"b}+ylE1w&69{6w$5](0 6qf r"{lEfhxpHTqΰPΆ_9xgCۿ$kVGbh(s˻DLZXR͂x#Mխ @[/a@KD3-jvX !~ҒxwZX7cKx g[^yl/nuw.tsU}f"]}yMIms' ,t~dzAj(s j9gX,KbYc GeAMJ;BL0}WA^Z"aG+b*$:j;4#lN2 w0Q$'Ŝ6EqΰXTó vS{V"UĻWlhy&Z"y0[E=DBa17S.KqA Pw7l7Qc(8^0Q$'/f?c5 0-+ɪwr_l% D9͇YJ\onL=?pozq9u2Q43 lwD(3wЂ8=ե+p,vwdzf.xYtj(sG;EUC%ԿTŝ # QַM.bl+<酏amZ"39QӼVа?Ӻw]znY w~= 0@KD|sWluz%WeeM# .5m]Oadn7aj^fhxp= mq̑_mݐgOd#bb2O 3Q$ 3W'Vš_?xY-I4g.KO0-Eq~17ݢjg ϬKFoANZhFQU9C*o/Ltta} @c(ĸV.|Ls0Q$nD"i^-5ς޻թ4(˿t%rh(09/oK =I)!6KL^ {Nٿ5V;R{eU⾦]nLꭗ3l y^@KD<sWluK s4|=o0oCL^TLfdnMr1l C-Erp6DU E>#duqM1yRy&3q^Οfz c%H39+fbya6nE5 bS.: 4g.rr1@KDќ3s9gH#S=KÏsWSMf.izfZ"$7Fzgu})YG]4Ә4uFjyan% (smU3|pyr9\V@P59i0FMr1rO\)2Q$']/f5jѪ{ŽMzb=x_Sz. `Tr[&@KD,]fQ5YVh俐faM9q]DD2:{Iv D a8[E}"hcYQyŒKV!.ġY#%34pSr;PKD<^ ,~ IDATNQՐaQQ2#1] Y#!5w\a0-E~1i^âBR}-~R[)eA8sSE/6g%Hì/漼Y.Y+J[AE4C8CDkYNp-E؊6z!Ieu[,oɂgz@ɜ?r"9gO4KT5rΐFơ/+/aA!&MDso1QD6k]ǜ},*4_]xgAs૖1ݠa")x>RM$v 9y [P! " FvdzAj(al8sCupV3dADe%"7*9CbKfܻIrizjAhX [%qRDD r{\칒JY΍L\c]7,OGb$+YIYS_O!a\Hi+wl>` sK+˂A,?z 0KD3;d}sؼ+_rΐX`ǻ,>Ϗ;9ȿ/[RW{x#wԜ ǾQ.o,6gbx% -s,xҸc1 'Sqrr-1/ȶZ2m1\(xIYGT{?1?N^ښR~+o%9ڼ3 x`\"xp7Dd9c$-gƜ'W:lĸY/ȱ>pi%V/D%! "-m=]t/f%c.:$nZ\77uc~l;,;s8gȂ8fZ{ɡa%yeܹWkk;++>+oo.1I, `A;QPMX)G]R:\Ueؗ嘕r{h%rΐp[MwM DtԵ>*6DB^Ob ΍L̛g^ zE.wg1\k,Q,,pV{~$- Dta7Tsn.X,+xl{\d}G.g -H! +Vr9ux(j*mMhJ 7`AL9C u_/--%/VYZϕf̍LܠX?e"D$άQ-%;S#V\11{2TbAA.e&{P.JεZMHǴm2|V;XxҠmgCa"9Ҙ,;gu3R"bϒ~Xv 1uŲ0}[2 J"4fBUy_6TbA4Rv/oqg &",,+^H@}+` @-m>^c0&V!DH .2TsK YgβThT;ZۻgqUD>;lbA!rM8%%dQY,+pib\@ _e1 /M=5 DEcRpCC1a!Dor8FbxX Ykk; m}5]1sB,A4!i^$cp&N/9VF t{\9)o4|bfi!D0;\ޒzhd'XUԥ\~blRwXO>dY!DPKv9VhhT 1ҭN@W%X ^<|M4lu5==rCkjϖy 0WĜ8Fطms]Pϕf̍LܠXɁ|?e1!D&J]<44ш}kխ$p Aﱐ " 3 vv^W/7Ɲk \ݐn, ~KW|r`!D86=gXO5q}MsUC=4x՞.צּb " c=Z DïT5Uҭ[=>]P X`P%)Ѱk?<^9F\ɐn"2]5+ |X`T%)7Ѱ?m{oʍFO t3吝'l "@ZYVKa&ٽ2{gsBP)}EX`.w eh1W}P.劅#41.PXC9X?+X`[wY2E1繒C:W+[ea%bCbA,0J{Ɖ2?3\ٶfMU >J 3Xn9cb-sW'ZI}ߐ,Ă,D44^je˹*! ],Ă, DCRATQ"!UC6*94w< "hhaֽ3ʕVYg9[ rhcq D "м0Yw4-+͘+ur9,C, Ă`A$XLʃ(jˢ@,D?ziWi* C?ﳲXhh: {$oq\En|w吻PMX#T?3M:Q-|C.}"D`A$ֲVg|H7cndźP)C> "  AqgL_پrH?1BBW`= "  Iqg\jj&p@U PC= "  M}T?Sq@kjΖClfXhhJΊ.ʀt?gX3+r= OĂ,DC1խ$ "2C1+.< "  9CxQiPMC3  ";Zۻgq@C-ra ah'bAD5UOv}O?Mĸ*Z/ : "  b\ 7DB|n)t"D`A$*97Gʼn|n9_- D,H4TRqwh|ґn'>9r(! "  bMSt4m\Ear}Pȉ2,bAD 9˿/[SW{x>H >b/q̬J*e\Fn}3cST#Pl9|N EaD`A#46giYs>f172qb]'ͺj}MUYʋl+ =yٙħCA/'a)C5?`&>/>|ZU XiD{w6Ɲk |ni۾eb9hv0l }j,4Ɯ]e9'VYg>NC5w!AhxHoQh&W5`Aaﳳ):/tӴ U5r|POwE֖U {x+1O+-Pd_f(f*e&)?Rc>QrD>ǝ?s(NSt3-4TSjDZibW:`AQ\Wh?O'm]~dȼaC0P H44κ{|*n&Uz^3Hck fpD؜\Iԙ,8g!s2@oj,i[5ENᓈt[_dMٹrhlfp&U)V_bk?Dʼ_'ƜE+ D=՜7> C138oVWA`Ap?[}|nb[IղFBs%?s5Ds˿/vK!ݪ y{F { '/k;cMOϩ|ڐnuڳL4=l_Ժ" &+RSt3F&nP`YWOFwE-.,XLӅ(P1=RdZX)LzHTwՄ' auZ+@d(&RUX)M Kcή9' T{@LDj sBHi5)k$!ݪjUzPOZ_߷tXip9Wb,"^d].e&/~{xih&WL`A{a7o{Sn4z"[ŒoPLD_G -r WN`AA9ۻoSB Og >KD_b=PBWpDr9-;VuYdO^_f14M*2Y9Cc_"rkWP`A$+9bIc]_vL<'N"Cea%bKDPWVThqDrR#)ɿ~'\7-=>Mpb-^ .E}jRYJ ,9Gd7tÿ.l:l>U8k5'|~Cz2Q>Z]XɅs;Lÿ^{Bɐ\oU~%V(uWiه Dt i\OqR{qNu<$p篦;xO"TU]1Wᮛ).ۘUXS1.1-buz]%7y4|Vfal] 11c(v~#\Y-Kf b.O01ǻ9~&ѭ_jQ!Wc(%O-~*WW`AlEvuY{K7eն_{MM>yOr l8S3D:so"3<<SjUj]Ɂ.`KD.|sӋ?O ,t4hv=]p,-+(n9)_Tt9m! ȍ"jݤibWY`A3:bnqgOJz8C2BhZ̈́ >* D#۪?pD`%ō=bv]gF/_DzBW{EejVC-cLJ UXi (js"]qؿQ %ʧZ^6(r)\*VY"rժ1 C?HjY#ŭ] ?(qOm"sfu=P0+.E6HI:bv< u;n>=Ojf4m\Ear{V-tpؿU1cXH_yK{u]#t1\yɍ1a!lUk͟^R@KD,ayM Biy @R1.1--g v%m>CQ-|.Z"r[ffW͕֞Xi stsT.]0̾ݳU`B?6T59}@KDd ^g"J ,4VD[̴;Y C`fjo8O`c++tzZs0k]5ӋB  GqVïx-~O8g1 IDAT;9Ob/7+@KD.eZH_>QXKzÝ+n>>uuj\-aPU-eHjy#rw8gxyw4>GǘX3+r;@KD.7Uzu9WZ`A39bѮn1snuP.jۯf(/3Pwfȝ̏u|(/\ipΰHbɿO|PM*b3K4^WO-~*WZ`AT{ĭN.GjȥdKXi -nx4Ϳv'{]$5j9Oĸ*Z/ ,4O6Ol " ō=bvwN[=^shȥITkSg^iDk+Q#D8gYzBH|j+9̵0K3e( r& Zn1si0zt;>׏k5'U)b 9C"r[N]J+ԺuDގn1säÞPghjYӴqYC?d%"/+}1b, W3tqG]K#o8Q`fa\O+zM> XiMVfvy2o~4?|N@ @KDn3Us^dsHGuΰs#`Y暶2k̍LܠXg%"tZ^ }) Xi:bn1.qH}}j0q1ߘ䞒 DR>k^,8g,,~xmSqVQC6{'! t@Web%fȥ2suX>ÔPq.q+ GsɬPvKMtN[<]WYr9l;jFj/,/r.q-oqD.q GlB>^6[x`>5fBUy\ _AZ"rIY3 Xڊ#!zMpGj1~0>"2C1+0.-/jLAį|EM=bNSdWL^r>rS0a\IӣbX2XPq#!Sp).?|^db찡GjȥZ \Ag  -;quJeY?y딁~5M >f%"zO.Oi8s1G,iX xceKbVq)Y"rjn kI8sJ9÷:E`Ln}5Η\ ,T9]b1 "g2XвGd7t鼶ҷ(6gvV~ZI}ߐCaZ"rGhNgTo^d`A9b&!Y;rk}oa Dү7Wj_9fT`AIm{ĝ[ym M37E5:Y"rÆjtպZkۓ5M41rXYWw2Y|{Hٌ?O-.@KDnuP̺@q]ENaHjTkqdrXsļ]"ao[Ě9gHD*-t`D[-nqve‚49g/c8gL*EU1k3K׋˙B)τLwu,:fojN^c1$":&] [=i0,\KS"m5b/qΐ\{eE[+֌o? 0*-uuqsC 1-/nږ&tZ"rᳵ K c}-NhS~2?{U<]Trΐ}] I ƞ7;ۆjA-,AڼG(c%"'=$4&>#ZRԑt/?3o;v_7VEf(2}0,&޷;{n,nq"^#!} qbbպzfȵ ,3 "s+hBki>5?j/ |Mh;D2k}dzQh#r`y4 #~jF{3. -.DZ Ep 6_KׂXvs+ hmboDZbu\hhsg8̍t}}YK$KǂXGQǂ8j>9C"r^C5וsG% vt]]z) " 9߷eN~9 ":`ZwuzY"r+KWз0]S#&,3i;s;{to3}w& }dzQh#rW`X=林Ӣ{ĝ[Y3lK-.0KDnuPQd\=(#P~8q7ƝE9"S17b}o "r-]b-EѶss0 O-:풸GLQqR"ri1Tb>)(pUSocZ1w?V3T&;Ek+ȭ̏u|IW뮮V('.lLݡ6;M}o]=volK-sΐ\{eE[+֌5p*wD_SX;UZ-|mIQqm3$"76]1 6(ߵ&Z:lI\G[A}mŜ[AT3kgz=;!7vJSOO\Mn2z6ʣ#+8gHD}cx(Z>ZM{N];i+}~ITm5bᯚăeY"rs ŜcD.*[MqG @KDn}khv>B}"dEq{]e%.%-n5Yᝯ b\ ȵI+]3F&ĸ=?UzZ-GyΰFQ^9C"rÆjtպZk<;HwZYZtkvdIe&/Īu3$"Sb3|qGcJsjSas%9C"r9]^,8g2Hd]34g[בiѽcXnpVZ"rղuY9;xs4t/uyd-!Y-3{ nQyiΉt>|=ZŸ? y\ko@"r,`ػk363\Mmg "r?JGMqMM\ %^)%O9gHDOkSX? 'gy` pږ9C"rs9LJ _pos'L VyU2K?@Z^`OF)vr\ڼG(!ᡀjm k5fw0͍`ur {4j:_5WFfȭ2sQ9;xɻ咸+#&D1mB1kvo/U{ &bP36Ux!~]UJ1Fj0PwKra{N~p=F6T;$Ûs_= <1$omb\²#ffv[ D͆Ω,=+:K~Ruo8Nf&ݿU9gHDn֫+C/ 0H&/;nq3}X9gHDu dMqd<=WyB<'GcXҭ DVG ŬUsENJ fޚiSk;˥ s"9C"rVC /Bqebl\UY3 &_nIȵE\2`<΀\ p6 5C"rjnҕdD&z53E}2kPZ3+Ӹdu=aC0nO37{CɅ-rP& ~(ge1G!zO띇=pG'r Vs-u"6~#o8Q(fgx/_j;rnm?;14MWQ䞕}@KD3\fw^i_Fr_u:C5z"w0mUk-8Ջ{i~nQ %rcaQ-|.Z"ru+#"&1Q~N캘; x uaSZ"r/y]`o3 $V(uW>`%"7\fG9/_*rW%ɿO &nV7}d*5|Z J0[ _Mdoi[X bQ|N>1NNLfal6ȁ衲X&q{MN&`vܨg֋wL<'N#Xz /f|9̵0+3=:"<,9 + 'M u|; /k5'U)b@9C"r:QF15^[͞PgնQLӴq2+ bWZ9g8|ÞP2$ś5z"wHFB;b/9\ygUԋ{^Z䒸`ǕO`X>Pmjȍ֭g8g8r %Ko뼈;'#T b=+Z"racbf.O01ǻ9~&wQFMO Ųhȍ*mQZ]y9.b8$nʪm UPMӐ\9gxm"{cTLb8 'OLMN 0^ }[WB)Ce[Ţ9Q^[V(Q0rN 0QO]1# DtNzqQ1s 'b''qGo[ujb%/kʈILs%X p~KDGV ]bfR &ߙޚӸp4M_lC0}Ŷsΐg墸)kjj&p`?*0k+ܿMq'a\8'|bJ-RDV "b/5,4VO(VL|тXloC5[Չbcm,4ROnI&BcDTݿ,7|1+qzKڼ(yw4 S]d(c Dÿb[n?1.8gH4&O8#oϴ6* 4TEk-( &u}9`XRj(Vh3o~[: 2xVsPyLfa2w1h *B!6oo(Q vNI&@F}֝b3p s>K|^ ao0;y&12kAT\9te&sorΐhLzCĥL#a("9|g'KDbkh.Y9նTȄo z!(.Ŋ;] D#ÞPgHvFOd: 7*C D8Ջ{^ZEq'xfj1BeJDɁSrhʈILsDN캘IH4 {dEP74Y8gHDLx#`A$Uۢ|r\xym}!wSy55b h36\TL!}'|b`A$A=T$n!ٴyBoM7b,DøZvٜ3$>1 u͘T "Ѱ|Eom]\"re/y$[-3X29uŚ6q^}pKD.NVy˙l,DYbI 4{fyc%4{j;yt˄`A$ʄ_'U ~B/o v|++,]-#6_;+K]0XqCjZQg@7ᖈ\7xNa%"W 'OLMNbJD^\WUC O^:Eva9a%"wVOst9i XF[ "Ye˞of0Kv'O %f=25 "H lҕGgzZm2o0Y &e%"XRVjoȸVX>ʊ>^WO->U#3q7xEvᖈ\Fq'(j%,D#ᳵ K G=,o0b Dtb0zt$,DõuzR/G/yÉ eP2pKDy M w|}4z"  Iɿ>M>a~؅++< DRyɍ  erl>Ws?07)-TWVc-osDLP2 r1/ 'w%q-V0̾]aX ܨ&1<4E'X %["rC`fjoۻ 9㻫$@2"<3HUXI#oq9Gac#n`8ig0)ݫ.)('#@b\!v~U}J\3=]733;-MN:VVэ/K| ,Cݣ82&lJ(>Kؔ6:,}&h҃mrӲa Zt ;-M8u;leipr16AoޤJN "c+m^UT'OG9^Mr;16A;!duk4;-wÎ5+XadE[ 7:16A;hN "ccˎc󪝦P,?3SX-S1 YeV/sA l+0Wm.C%p +/Unұ2_د\Or;-qv6^T"^^Ytp*we@d7 VPfji ,ݣJ82&Klx[& "Ǚ `ٳx֧R_'DD$.Ín3wY Y],Ky5JGw(167uq^srcvweڔ6kx)UzMyZx.clܑ>S;Y@dV7ڹོCwrN9qes?Qz]DVUߐ黶{UQg;Qgxqec%ҙQ@d#ﳶ|lú'/߻ C'˕~>8^C6@ >0Vexr7¡16}XƝD6;X7^EP✄^; /2de%l1|`Or|z`riV^Sw}ʍ>)MF?g1;+╃RnBx;comч{&]C?qrom^1N7gxϾ0cq?$}meipwN 9O3; ?Ԕ6jAP˔N,}co/wL ٩cxұ|϶f*AU /~D1Ì_7~h?(q]$ oY>Wj-9'93~aZ@ 1/W\?@-Ru?S^cnw z/wI ;m5-˻2sr'c>'";YpOoWN畀z7Xahxw 7:#%콳{[Pסr8Jk餒)'++~y?)Xa+ /0ܸ=ko3:JewD =aEUUF gFP^tC3c.bXf[; @l䟬c{ v4Q'5̒Rn]F]s]pn{`㛕g}ћ|?3 :VM{}x{7&jsfQ:z?77 kI_pL͝峹j'ʹ/җ sfFAmqWVw6}S~ma|n?1}N6f@ |˾'2Ϸpե/6~9pXM%] Zٰct8\@Rt}HߓXMl< `@ ^*X[\@X w]Zi>XQ3ܹ@tqE}۞ ~l᳟C8cU /;ո?iEUԇѯzʋrguc;մ vt[M3 sE2V9>.;*48] jY|%\}@&=?kPC##@d;jCp Wx.6sJtD wodkl8iP^*uD if~tWfG!@ n0o::;93F X{]ޕiSb2Í6ȡ@ N:V9]o}W1˧-29>*83F X% ?ky\MN\ e#@p,v.x/W4@B9Pbu c[WUqWQgxlqg@z~bD}`7:\9*g:YnxÍsg@qzcw\1-K̔VK$n}D vG$ ip\(]Kρ1 y\,L(_nxQ##@~p |0@RJ[;D0F 81m+|%\j57^#'2F 8o'm9CRBËt`@͂zr\ F6Ax11 `ٳԫR_'⧕DD ymu]3Q=r0~@`".ʴ)mYЈ2EY/Oy+#'+>g@˧i,W^m9,A00hK@׏Y_t4W690ka: DP!oԩó o $;x`"qD|1(Nj^IЇF 8Tʿ_bq'nqb@ o*XkcNVn]#D#Hdu:`ʳwࢬ;p F @Za.+}A\rKz@:'i Rږ] ?Scm%dDV9\@ ĵB]y`u5rAl0m3xFYK_ϳl9@ DWp`΢*Y*r^*G}`"?m8| `ٳxRc:9|?=z&B5.lóի-xѧ"!DQ;gf*ľ{ ԆLQ֏~Pl_/zw,@ȧi,W^m9A0j)LѶzW @mR:kQ̵0D+8Cm@k߶¬r@C0cV.(s7x7ntXa"T1mG%m1yfҙF9jU^DBz$cq_ ,rp!^U_h[w6wW@S:XrhF @qOI/ F҆uO3-a9J0@KH"I!)4trrJyADF{ {m+H|1³KÍoHI0G+tb!tņ/#`mf%vt ?菆)>ȧi,7hF F $I7VU<Pgf%z@IXp5%?qaOvwQGJX~b0 ̗HuʎL+Cuȸ^R 8L1&XwPodÓb;N,ÏZ=.;A1&s{äb.pm3X (gx "@$`ؖRB2y<tt%S:%WL|>m)t7I̽8IG0)ҴY"aEȿL"9k+\xp! Lk |D)%C8?f\ ٽ<IEcVĝ8$O ć7wn_# *r"C'$^!D8UqF QBԴ| ,xT5b@SKh4~'/٦G0*=&1*0sD~xұ/9DtZ]\`OKT&D8lʗδ-J>H4ǜ{yDӭ/z&C tJ/ <jN^p`Z#1.X_;+TU:Y-]"c"cԓ /`c]\6y停G?E (O[ ^^oI/ ugQEG/0F_3=W"1-8_VжzT+*Y_tUc#cYZP0;$NN 8G Pn ϺM9?,۱Co}jIDAT+Z-@-Է/*%DUqF].!1ΟH@> ˻G PҴYbd#$30FδpD_ C k7(I<=&nK_l[g%_,tx\&8yH#PX{bÿy% GP2EY/Oy+#gmeV ضH,I$MwGPib=%-F hc>Xo8fp%<2Zt4pvt?(7K쐨!Dm,ݓ.ٰ3x4wࢬ D|43O[x4-H<,q*G hpk+,&+J {"wcpp~֍H Dh߻0[$ѳ#44mYjx*# D RCI==@ /Ћ Mr'A($4|zr%ߖ0z8$48Kia@ڷ0ƦD@rX"iF DhptQZ=.;A @[gfO#"48N>+0^"Dԫ"4trSO)/")c; D@ ]te }"4| ,zT Nh҃mZZ6L @[Y\ x"/6~ "4|zrËnt@J̔VK$n!o;p%'MrK e DhpWT*ۿBlٛ"4k3nFl'0ڽK+K @Kf ="J @S] o#mO)/"CQG:U"q'hʔ:  .-2 G%^#)UzMN+VWzv D@SG9^MxDhpXad-z Dhpeupz#"4|(\4z|")_nxQ#"4+*Y_tUo@wcpp~֍H Mח^E"44m \*1D1\koQ8|z4 DhpmO^t"MJGɿ!g4IENDB`helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/theme/favicon.svg000066400000000000000000000054401500614314100240120ustar00rootroot00000000000000 helix-cbac4273836f5837cec641ab21f365c79b102a4b/book/theme/index.hbs000066400000000000000000000343731500614314100234600ustar00rootroot00000000000000 {{ title }} {{#if is_print }} {{/if}} {{#if base_url}} {{/if}} {{> head}} {{#if favicon_svg}} {{/if}} {{#if favicon_png}} {{/if}} {{#if print_enable}} {{/if}} {{#if copy_fonts}} {{/if}} {{#each additional_css}} {{/each}} {{#if mathjax_support}} {{/if}}
{{> header}} {{#if search_enabled}} {{/if}}
{{{ content }}}
{{#if live_reload_endpoint}} {{/if}} {{#if google_analytics}} {{/if}} {{#if playground_line_numbers}} {{/if}} {{#if playground_copyable}} {{/if}} {{#if playground_js}} {{/if}} {{#if search_js}} {{/if}} {{#each additional_js}} {{/each}} {{#if is_print}} {{#if mathjax_support}} {{else}} {{/if}} {{/if}}
helix-cbac4273836f5837cec641ab21f365c79b102a4b/contrib/000077500000000000000000000000001500614314100212455ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/contrib/Helix.appdata.xml000066400000000000000000000112221500614314100244470ustar00rootroot00000000000000 com.helix_editor.Helix CC0-1.0 MPL-2.0 Helix A post-modern text editor مُحَرِّرُ نُصُوصٍ سَابِقٌ لِعَهدِه Blaž Hrastnik

Helix is a terminal-based text editor inspired by Kakoune / Neovim and written in Rust.

مُحَرِّرُ نُصُوصٍ يَعمَلُ فِي الطَّرَفِيَّة، مُستَلهَمٌ مِن Kakoune وَ Neovim وَمَكتُوبٌ بِلُغَةِ رَست البَرمَجِيَّة.

  • Vim-like modal editing
  • تَحرِيرٌ وَضعِيٌّ شَبيهٌ بِـVim
  • Multiple selections
  • تَحدِيدَاتٌ لِلنَّصِ مُتَعَدِّدَة
  • Built-in language server support
  • دَعْمٌ مُدمَجٌ لِخَوادِمِ اللُّغَات
  • Smart, incremental syntax highlighting and code editing via tree-sitter
  • تَحرِيرُ التَّعلِيمَاتِ البَّرمَجِيَّةِ مَعَ تَمييزٍ لِلتَّركِيبِ النَّحُويِّ بِواسِطَةِ tree-sitter
Helix.desktop Helix with default theme https://github.com/helix-editor/helix/raw/d4565b4404cabc522bd60822abd374755581d751/screenshot.png https://helix-editor.com/ https://opencollective.com/helix-editor https://docs.helix-editor.com/ https://github.com/helix-editor/helix https://github.com/helix-editor/helix/issues https://github.com/helix-editor/helix/releases/tag/25.01.1 https://helix-editor.com/news/release-25-01-highlights/ https://github.com/helix-editor/helix/releases/tag/24.07 https://helix-editor.com/news/release-24-03-highlights/ https://helix-editor.com/news/release-23-10-highlights/ https://github.com/helix-editor/helix/releases/tag/23.05 https://helix-editor.com/news/release-23-03-highlights/ https://helix-editor.com/news/release-22-12-highlights/ https://helix-editor.com/news/release-22-08-highlights/ https://helix-editor.com/news/release-22-05-highlights/ https://helix-editor.com/news/release-22-03-highlights/ keyboard Utility TextEditor text editor development programming hx text/english text/plain text/x-makefile text/x-c++hdr text/x-c++src text/x-chdr text/x-csrc text/x-java text/x-moc text/x-pascal text/x-tcl text/x-tex application/x-shellscript text/x-c text/x-c++
helix-cbac4273836f5837cec641ab21f365c79b102a4b/contrib/Helix.desktop000066400000000000000000000077471500614314100237300ustar00rootroot00000000000000[Desktop Entry] Name=Helix GenericName=Text Editor GenericName[ar]=مُحَرِّرُ نُصُوص GenericName[de]=Texteditor GenericName[fr]=Éditeur de texte GenericName[ru]=Текстовый редактор GenericName[sr]=Едитор текст GenericName[tr]=Metin Düzenleyici Comment=Edit text files Comment[af]=Redigeer tekslêers Comment[am]=የጽሑፍ ፋይሎች ያስተካክሉ Comment[ar]=مُحَرِّرُ مِلَفَّاتٍ نَصِّيَّة Comment[az]=Mətn fayllarını redaktə edin Comment[be]=Рэдагаваньне тэкставых файлаў Comment[bg]=Редактиране на текстови файлове Comment[bn]=টেক্স্ট ফাইল এডিট করুন Comment[bs]=Izmijeni tekstualne datoteke Comment[ca]=Edita fitxers de text Comment[cs]=Úprava textových souborů Comment[cy]=Golygu ffeiliau testun Comment[da]=Redigér tekstfiler Comment[de]=Textdateien bearbeiten Comment[el]=Επεξεργασία αρχείων κειμένου Comment[en_CA]=Edit text files Comment[en_GB]=Edit text files Comment[es]=Edita archivos de texto Comment[et]=Redigeeri tekstifaile Comment[eu]=Editatu testu-fitxategiak Comment[fa]=ویرایش پرونده‌های متنی Comment[fi]=Muokkaa tekstitiedostoja Comment[fr]=Éditer des fichiers texte Comment[ga]=Eagar comhad Téacs Comment[gu]=લખાણ ફાઇલોમાં ફેરફાર કરો Comment[he]=ערוך קבצי טקסט Comment[hi]=पाठ फ़ाइलें संपादित करें Comment[hr]=Uređivanje tekstualne datoteke Comment[hu]=Szövegfájlok szerkesztése Comment[id]=Edit file teks Comment[it]=Modifica file di testo Comment[ja]=テキストファイルを編集します Comment[kn]=ಪಠ್ಯ ಕಡತಗಳನ್ನು ಸಂಪಾದಿಸು Comment[ko]=텍스트 파일을 편집합니다 Comment[lt]=Redaguoti tekstines bylas Comment[lv]=Rediģēt teksta failus Comment[mk]=Уреди текстуални фајлови Comment[ml]=വാചക രചനകള് തിരുത്തുക Comment[mn]=Текст файл боловсруулах Comment[mr]=गद्य फाइल संपादित करा Comment[ms]=Edit fail teks Comment[nb]=Rediger tekstfiler Comment[ne]=पाठ फाइललाई संशोधन गर्नुहोस् Comment[nl]=Tekstbestanden bewerken Comment[nn]=Rediger tekstfiler Comment[no]=Rediger tekstfiler Comment[or]=ପାଠ୍ଯ ଫାଇଲଗୁଡ଼ିକୁ ସମ୍ପାଦନ କରନ୍ତୁ Comment[pa]=ਪਾਠ ਫਾਇਲਾਂ ਸੰਪਾਦਨ Comment[pl]=Edytor plików tekstowych Comment[pt]=Editar ficheiros de texto Comment[pt_BR]=Edite arquivos de texto Comment[ro]=Editare fişiere text Comment[ru]=Редактирование текстовых файлов Comment[sk]=Úprava textových súborov Comment[sl]=Urejanje datotek z besedili Comment[sq]=Përpuno files teksti Comment[sr]=Уређујте текст фајлове Comment[sr@Latn]=Izmeni tekstualne datoteke Comment[sv]=Redigera textfiler Comment[ta]=உரை கோப்புகளை தொகுக்கவும் Comment[th]=แก้ไขแฟ้มข้อความ Comment[tk]=Metin faýllary editle Comment[tr]=Metin dosyaları düzenleyin Comment[uk]=Редактор текстових файлів Comment[vi]=Soạn thảo tập tin văn bản Comment[wa]=Asspougnî des fitchîs tecses Comment[zh_CN]=编辑文本文件 Comment[zh_TW]=編輯文字檔 TryExec=hx Exec=hx %F Terminal=true Type=Application Keywords=Text;editor; Keywords[ar]=نص;نصوص;محرر; Keywords[fr]=Texte;éditeur; Keywords[ru]=текст;текстовый редактор; Keywords[sr]=Текст;едитор; Keywords[tr]=Metin;düzenleyici; Icon=helix Categories=Utility;TextEditor;ConsoleOnly StartupNotify=false MimeType=text/english;text/plain;text/x-makefile;text/x-c++hdr;text/x-c++src;text/x-chdr;text/x-csrc;text/x-java;text/x-moc;text/x-pascal;text/x-tcl;text/x-tex;application/x-shellscript;text/x-c;text/x-c++; helix-cbac4273836f5837cec641ab21f365c79b102a4b/contrib/completion/000077500000000000000000000000001500614314100234165ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/contrib/completion/hx.bash000066400000000000000000000016071500614314100247000ustar00rootroot00000000000000#!/usr/bin/env bash # Bash completion script for Helix editor _hx() { local cur prev languages COMPREPLY=() cur="${COMP_WORDS[COMP_CWORD]}" prev="${COMP_WORDS[COMP_CWORD - 1]}" case "$prev" in -g | --grammar) mapfile -t COMPREPLY < <(compgen -W 'fetch build' -- "$cur") return 0 ;; --health) languages=$(hx --health | tail -n '+7' | awk '{print $1}' | sed 's/\x1b\[[0-9;]*m//g') mapfile -t COMPREPLY < <(compgen -W """$languages""" -- "$cur") return 0 ;; esac case "$2" in -*) mapfile -t COMPREPLY < <(compgen -W "-h --help --tutor -V --version -v -vv -vvv --health -g --grammar --vsplit --hsplit -c --config --log" -- """$2""") return 0 ;; *) mapfile -t COMPREPLY < <(compgen -fd -- """$2""") return 0 ;; esac } && complete -o filenames -F _hx hx helix-cbac4273836f5837cec641ab21f365c79b102a4b/contrib/completion/hx.elv000066400000000000000000000040331500614314100245450ustar00rootroot00000000000000# You can move it here ~/.config/elvish/lib/hx.elv # Or add `eval (slurp < ~/$REPOS/helix/contrib/completion/hx.elv)` # Be sure to replace `$REPOS` with something that makes sense for you! ### Renders a pretty completion candidate var candidate = { | _stem _desc | edit:complex-candidate $_stem &display=(styled $_stem bold)(styled " "$_desc dim) } ### These commands will invalidate further input (i.e. not react to them) var skips = [ "--tutor" "--help" "--version" "-V" "--health" ] ### Grammar commands var grammar = [ "--grammar" "-g" ] ### Config commands var config = [ "--config" "-c" ] ### Set an arg-completer for the `hx` binary set edit:completion:arg-completer[hx] = {|@args| var n = (count $args) if (>= $n 3) { # Stop completions if passed arg will take precedence # and invalidate further input if (has-value $skips $args[-2]) { return } # If the previous arg == --grammar, then only suggest: if (has-value $grammar $args[-2]) { $candidate "fetch" "Fetch the tree-sitter grammars" $candidate "build" "Build the tree-sitter grammars" return } # When we have --config, we need a file if (has-values $config $args[-2]) { edit:complete-filename $args[-1] | each { |v| put $v[stem] } return } # When we have --log, we need a file if (has-values "log" $args[-2]) { edit:complete-filename $args[-1] | each { |v| put $v[stem] } return } } edit:complete-filename $args[-1] | each { |v| put $v[stem]} $candidate "--help" "(Prints help information)" $candidate "--version" "(Prints version information)" $candidate "--tutor" "(Loads the tutorial)" $candidate "--health" "(Checks for errors in editor setup)" $candidate "--grammar" "(Fetch or build the tree-sitter grammars)" $candidate "--vsplit" "(Splits all given files vertically)" $candidate "--hsplit" "(Splits all given files horizontally)" $candidate "--config" "(Specifies a file to use for configuration)" $candidate "--log" "(Specifies a file to write log data into)" } helix-cbac4273836f5837cec641ab21f365c79b102a4b/contrib/completion/hx.fish000066400000000000000000000016631500614314100247160ustar00rootroot00000000000000#!/usr/bin/env fish # Fish completion script for Helix editor complete -c hx -s h -l help -d "Prints help information" complete -c hx -l tutor -d "Loads the tutorial" complete -c hx -l health -xa "(__hx_langs_ops)" -d "Checks for errors" complete -c hx -s g -l grammar -x -a "fetch build" -d "Fetch or build tree-sitter grammars" complete -c hx -s v -o vv -o vvv -d "Increases logging verbosity" complete -c hx -s V -l version -d "Prints version information" complete -c hx -l vsplit -d "Splits all given files vertically" complete -c hx -l hsplit -d "Splits all given files horizontally" complete -c hx -s c -l config -r -d "Specifies a file to use for config" complete -c hx -l log -r -d "Specifies a file to use for logging" complete -c hx -s w -l working-dir -d "Specify initial working directory" -xa "(__fish_complete_directories)" function __hx_langs_ops hx --health languages | tail -n '+2' | string replace -fr '^(\S+) .*' '$1' end helix-cbac4273836f5837cec641ab21f365c79b102a4b/contrib/completion/hx.nu000066400000000000000000000034531500614314100244060ustar00rootroot00000000000000# Completions for Helix: # # NOTE: the `+N` syntax is not supported in Nushell (https://github.com/nushell/nushell/issues/13418) # so it has not been specified here and will not be proposed in the autocompletion of Nushell. # The help message won't be overridden though, so it will still be present here def health_categories [] { let languages = ^hx --health languages | detect columns | get Language | filter { $in != null } let completions = [ "all", "clipboard", "languages" ] | append $languages return $completions } def grammar_categories [] { ["fetch", "build"] } # A post-modern text editor. export extern hx [ --help(-h), # Prints help information --tutor, # Loads the tutorial --health: string@health_categories, # Checks for potential errors in editor setup --grammar(-g): string@grammar_categories, # Fetches or builds tree-sitter grammars listed in `languages.toml` --config(-c): glob, # Specifies a file to use for configuration -v, # Increases logging verbosity each use for up to 3 times --log: glob, # Specifies a file to use for logging --version(-V), # Prints version information --vsplit, # Splits all given files vertically into different windows --hsplit, # Splits all given files horizontally into different windows --working-dir(-w): glob, # Specify an initial working directory ...files: glob, # Sets the input file to use, position can also be specified via file[:row[:col]] ] helix-cbac4273836f5837cec641ab21f365c79b102a4b/contrib/completion/hx.zsh000066400000000000000000000022601500614314100245630ustar00rootroot00000000000000#compdef _hx hx # Zsh completion script for Helix editor _hx() { _arguments -C \ "-h[Prints help information]" \ "--help[Prints help information]" \ "-v[Increase logging verbosity]" \ "-vv[Increase logging verbosity]" \ "-vvv[Increase logging verbosity]" \ "-V[Prints version information]" \ "--version[Prints version information]" \ "--tutor[Loads the tutorial]" \ "--health[Checks for errors in editor setup]:language:->health" \ "-g[Fetches or builds tree-sitter grammars]:action:->grammar" \ "--grammar[Fetches or builds tree-sitter grammars]:action:->grammar" \ "--vsplit[Splits all given files vertically]" \ "--hsplit[Splits all given files horizontally]" \ "-c[Specifies a file to use for configuration]" \ "--config[Specifies a file to use for configuration]" \ "-w[Specify initial working directory]" \ "--working-dir[Specify initial working directory]" \ "--log[Specifies a file to use for logging]" \ "*:file:_files" case "$state" in health) local languages=($(hx --health | tail -n '+11' | awk '{print $1}' | sed 's/\x1b\[[0-9;]*m//g;s/[✘✓]//g')) _values 'language' $languages ;; grammar) _values 'action' fetch build ;; esac } helix-cbac4273836f5837cec641ab21f365c79b102a4b/contrib/helix-256p.ico000066400000000000000000010200761500614314100235520ustar00rootroot00000000000000 ( ( TUnUPSUDUUUlU,UUUUUlUUvUUUUUUlT UXUUUUUUUUlRU>UUUUUUUUUUlT&UUUUUUUUUUUUlUUnUUUUUUUUUUUUUlUUPUUUUUUUUUUUUUUUlU6UUUUUUUUUUUUUUUUUlU UUUUUUUUUUUUUUUUUUUlTUfUUUUUUUUUUUUUUUUUUUUlVUHUUUUUUUUUUUUUUUUUUUUUUl{|U0UUUUUUUUUUUUUUUUUUUUUUUUlz{&{zUU~UUUUUUUUUUUUUUUUUUUUUUUUUl{"{v{J|U U^UUUUUUUUUUUUUUUUUUUUUUUUUUUl{{h{{zSUBUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{{J{{{{U*UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{,{{{{{jUUtUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUlw{V{{{{{{*S UVUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUlz{h{{{{{{{RU:UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{X{{{{{{{|T$UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{.{{{{{{{{yTUlUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUly{{{{{{{{{zTUNUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{@{{{{{{{{{{U4UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUlw{{{{{{{{{{{:UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUlz{{{{{{{{{{{UUdUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUlz8{{{{{{{{{{{{0TUFUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{V{{{{{{{{{{{{zU.UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{f{{{{{{{{{{{{{{UUzUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{j{{{{{{{{{{{{{{{4U U\UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{j{{{{{{{{{{{{{{{{~zTU@UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{j{{{{{{{{{{{{{{{{{{^z T(UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{j{{{{{{{{{{{{{{{{{{{{BzTUrUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{j{{{{{{{{{{{{{{{{{{{{{{*U UTUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{j{{{{{{{{{{{{{{{{{{{{{{{vzQT:UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{j{{{{{{{{{{{{{{{{{{{{{{{{{Xz U"UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUl{j{{{{{{{{{{{{{{{{{{{{{{{{{{{UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUU^{j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{\z U(UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUlT{j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{@yUUrUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUZU{j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{(TURUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUU.{j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{r{TU8UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUNT{j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{Vy T"UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUlU{j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{z:yTUhUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUT${j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{z$UUJUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUU:S{j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{j{U0UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUVT {j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{NzUU~UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUtU{j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{2UU`UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUT*{j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{̅$RDUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUBT{j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{ʀئqYTUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUU\U {j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{}АܰhVTUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUU|T{h{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{zʀӘ~߸aUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUT.{Z{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{z˄֢u\TUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUHU{@{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{z{΋ګmXTUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUfU{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{z}ђݳeVTUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUU {{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{zʁԛ{`UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUU6{R{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{̆פs[TUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUPTz{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{z|ύۭkWTUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUnT{D{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{z~ҕ޵dUTUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUU$z{x{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{z˂՞y^TUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUU{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{X݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄}j[R,z {\{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{0݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݃ޅNޅ{{z{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{zw ݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄6{.{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{yH݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ {{F{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{zs݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄f݄{{d{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{zx݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄J܃{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{y@݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄0{4{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{yb݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄~܄y{N{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{zl݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄^݄ {{j{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄B܅{"{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄*w{:{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄t܄y {T{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄X݄ {{r{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄<݄{({{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄${{@{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݃l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄l܄z {\{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l܂l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄N݃z{z{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l~b݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݃4{.{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l~@݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݃z{H{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l|܂݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄d݄{{d{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{lnہ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄Hބ{ {{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{lJ܃݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄.{4{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{lx ܂݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄z݄{|L{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݄0݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄\ރ il*murx{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݃Z݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄@݅kpkpxkpkpkpmvtz{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l܃݄j݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄(kp kpZkpkpkpkpkpkpkpoyvz{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݄݀X݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄r݄hokp>kpkpkpkpkpkpkpkpkpkplrp~w{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݄.݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄Tۄ ko(kpkpkpkpkpkpkpkpkpkpkpkpkpkpltry{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݄݅L݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄8ރkqkprkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpnwtz{{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݄݄j݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄$kokpTkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkqo{v{{{{{{{{{{{{{{{{{{{{{{{{{{{{l݄"݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄jޅfjkp8kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkplrqx{{{{{{{{{{{{{{{{{{{{{{{{{{lނ݄8݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄Lۄkp"kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpmtsy{{{{{{{{{{{{{{{{{{{{{{{{lޅ݄T݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄2kokphkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpnxuz{{{{{{{{{{{{{{{{{{{{{{l܄݄r݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄܄lpkpJkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkqp|w{{{{{{{{{{{{{{{{{{{{{l݄(݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄b܄kp2kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkplsqx{{{{{{{{{{{{{{{{{{{l܃݃@݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄Dzs"ho~kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpmusy{{{{{{{{{{{{{{{{{l܃ ݄\݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄߹~skpjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpnyuz{{{{{{{{{{{{{{{l܃݄z݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄΁ءyvrjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkqp~x{{{{{{{{{{{{{{l݄.݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބ܄ŀԔwpqjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpnww{{{{{{{{{{{{zlބ݄F݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބك߹~Їulpjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkomvw{{{{{{{{{{{yf݄݃d݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބԂ۬{}sjpjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpo{z{{{{{{{{{{yH݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބބ́םytrjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkqu{{{{{{{{{zx$݄4݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބ܄Ӑvoqjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkoo{z{{{{{{{{zu݄݃N݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބ؃޵}τtlpjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpltx{{{{{{{{yZ݄݄l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބ҂ڨ{zsjpjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkqv{{{{{{{zw݄$݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބ݄ɀ֚xsqjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkqu{{{{{{{yNރ݄:݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބۄҍvnpjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkqu{{{{{{ys݄ ݄V݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބ׃ݲ|ρtkpjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkplrv{{{{{zv݄݅t݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބЂ٤zxrjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpmuy{{{{zv݄(݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބ݄ǀՖwqqjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkoq~{{{zyXu݄݃@݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބڃ~щumpjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkplrv{zyw݃ ݄^݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބՃܮ|~skpjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpqzzx2݄݄|݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބބ΁ؠyvrjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpozwyLt݄.݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބ܄ĀԒwpqjpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpmusjx݄݄H݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބك߸}Іulpjpkpkpkpkpkpkpkpkpkpkpkpkpkqoz ݄݃f݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބӂ۪{|sjpjpkpkpkpkpkpkpkpkpkpkp0݃ ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބބ́לytrjpkpkpkpkpkpkpkpJjo݄4݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބ܄ӏvoqjpkpkpkphjoL׃ۄބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބ؃޴}Єtlpco ^mfoLvr֜xˁބބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބ׃ۃkp4kpkpjpjp|s۪{ӂބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄f݃kp kpkpkpkpkpkpjplpІu߷}كބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄HބjokpdkpkpkpkpkpkpkpkpkpjppqԒw܄ބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄0jnkpHkpkpkpkpkpkpkpkpkpkpkpkpkpjpvrؠýބބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄~݄kp.kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpkp~sܮ|Ճބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄`݄kpkp|kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpmpщu~ڃބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄Bۅjo kp\kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpqqՖwƀ݄ބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄*kpkp@kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpxr٣zЂބބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄v܃kp(kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpkpρtݲ|׃ބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄N߅kokprkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpnpҌvۄބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄|݃ jo kpTkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpsq֚xɁ݄ބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄܄ lskp:kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpjpzsڧz҂ބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄v݂kp"kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjplpτt޵}؃ބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄Dkokpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpoqӏv܄ބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ބkpkpLkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjptrםýބބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄Pkp2kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpjp|s۫{Ԃބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄܃kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjplpЇu߸}كބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄kpkpbkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjppqԓwĀ܄ބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄>jokoFkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpvrؠy΁ބބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄Zko,kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpkpsܮ|Ճބ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄hkpkpxkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpmpъu~ڃ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lko kpZkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpqq֙xՃ݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄linkp>kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjpfoFɀ$݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkp&kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpdko݄2݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkpkppkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpޅ܄L݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ljokpRkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkp4܄݄j݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkp8kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpNjo݄"݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkp"kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkplkp܁݄8݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkpkphkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpko$܃݄R݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ljnkpJkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjo:gn܃݄r݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ljp,kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpVko ݄(݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lhokpXkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkptjpރ݃>݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄liokpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjo*݃ ݄\݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkp\kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkp@ko݄݄x݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkp2kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkp\kp ݄,݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ljokpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkp|kp݂݄F݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkpDkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjo0݄݄b݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ljnkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpHip݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ljokpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpfjo݄4݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄ljp:kpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkp ݄݄N݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkpXkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkp6܄݄l݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkphkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpPkp݄"݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpnkpރ݄:݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkp&݃ ݄T݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄݄lkpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkp݄݄݄݄݄݄݄݄hޅkpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpZjp ݃*݄݄݄݄݄݄X݂kpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpxjp݄j݄݄݄݄݄,kpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkp,݄݄݄݄݄NބkpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjoFjo݄݄݄݃l܄kpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpbjoރ݄H݄z݃$kpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjp݂݄݄&ۄkpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkp4؂ڃkpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkoLjokpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpjjokpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkp"kpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpkp:hnkpjkpkpkpkpkpkpkpkpkpkpkpkpkpkpkpTkp kpjkpkpkpkpkpkpkpkpkpkpkpkpkprjokpjkpkpkpkpkpkpkpkpkpkpkpkp(kpjkpkpkpkpkpkpkpkpkpjp@jqkpjkpkpkpkpkpkpkpkp\jo kpjkpkpkpkpkpkpzkpkpjkpkpkpkpkp.kplkpkpkpFkokpRkprjo???????? ???? ??????8??0??????helix-cbac4273836f5837cec641ab21f365c79b102a4b/contrib/helix.png000066400000000000000000000366511500614314100230770ustar00rootroot00000000000000PNG  IHDR\rfgAMA a cHRMz&u0`:pQ<bKGD pHYs  ~tIME .;kq C@o~m&Ƅ&/󁻥tܠpǘ2KH(Jz2 >JKq8'/!i RnSdA|RZgƔ$,HHh' @+%UO#Si+hQۗ9=YZL 7U/!Mz〟?ƪ ̚RCuQۘm6h "pCc3'D * +T`Q 3fƔǣ/!K6Hx?@(3>F QۘTX #FmZ=}S~-K (X>%96d- +lzK]Ԗu9+}Hp%T0crM%t6Bē!E𛛘6j뺔DޙX6<1%A!T a)Y]B""nW哠)Pqє;"\! J]` 6Ǽ۸Qܩ$6ECD pyO0#sa@A`1\\U?Hy9@"?0 gO^/fJB5e5ӧ'BgמU:p`_v pTQ#[5js;D:ơ|P< 3>EFa,>dA`s?Xil%xtNnFG&a,A6kG La+D J|IvD%yI`|z;vK;Bq@q0j8 eЬi5xǬsYˣ/( ȾKB*j;L"{!B-g =S:j Tˋf\f[FwDL@('Y]>̩pM+y3Ҟ@R|g i0ǹ8i[^t(yri 5K)VǏ!w@UB`ߠXO1q.:Kyn}1A%1^>,峊EI0s{nҍ(*:A @! ?kĒDooDd*ҷz Qث: aVsp3&B+(ZJk]ED! 똖7濲py]xn2U,I:EOP-ȩu :#ESk~zSҍv;VҨf!~Ӿ)#7oê/ML ;1(( ǯ܅Uk" Y``P4\wb6ƒꗷJ*9M"X3Uj#~RVGCe5Iq;6|BU~,.~H'( A%k<ˌ)pu'Emcslֈ) -dlQWYv\ފ|wupm)-X.ca1Q\ 7$ -2Pe\/.-R6Z7'|~2<`To)kM ]sʎUo֮k> )oUPXlږ"c0*j0cZ r2wm- @=+33SG ׵DmIrp|%B0L,j5m㪗j zxƸK"&HInǰ # x 31b+*jhSDmWLB.k0cZ6)VLNZPOOCԸ#>SjP1f g*{X+; MjAKih[9<.F߇֊؈@SV*V%cD |$A=4Wej[K.p`^~ &qĕD,}6a _/jbV6J4z/nf\΁@%(3?yD V#zn'?qSz߁E pG/ ,\:~/>êً?̜ZCkqSe%ʟ5ϫZ>W7"júzVZ11uZV軈$m(HƐy_wI7VzMN,\t"b-AE~ |7"z}- !5>DIw(oǚڇ"`޺Z4#~z 5`Xł9Y LM`*!)LWYBTTTʍP'n4ਨ 1$#YwX \wןׯ|Z*[H "_؈_Zi ͙ɴ'7#%{(lɺڽ~VY?DR<.%n/m# 6*Gfۘ;aL'odi~h^Nwd{G@72孄%`p\y@=8wɽXm;N. `ʰ+S 2;Dv6EX>N%چ6[r/7L"ΤRpWhv$3|xPGɍ]싢/$}@@Z;۔w /)T~Xn.KƆ`at׼Zˎ&W*xr$nnY[ٴ40 hk5hv]@4 +{OY寡ƀJ=@c`F)t2WFWk zx[}غ[֔ާBU\x"ʆ\@ IT,: D1/f\>5dZG5%q!;Uħ2pSךxWg52`bCP)lrך!8- -~-PB5{zU/As? mAӮ*{Ee2pq 5q϶Z!x#ב.nU"M>]Q[1] O\)ٕ=V鸄>Q Zn2"Z d>xɈp_QWD| +(R+Z;@߿WouOn-V Pv AmxO5Qxnů(E]Nkc/6ئK d#Ѧ2H)O[k1FI"=Y720F\~AS)w?NR6Zt 뛜Ce݊nFʀRn$WdFe/jb2C&4_zăPڎU! ķ#\>EmH&["}e/NՑ{a2"_stGH^-W&eSitoݢbWݍv %ݎPAl4z][ TAa3(ھ,h|pyE=^'e\o4N? z}z>.}1:TW"GJ 4l򈾃ÝTFmx2kz69v+ l[QnV "JX s`Օӓ&@y/ct =^mS)U"Ɛ鶇zyoշyT<.{]]޶ˁ^_蒖82ʜ;<-XϺaYa5O @xwe$o3=ba[ )#"kC`@X%5PmƳ? CBǎb}q|e,Mۉ26T!$]x8_AiF̰n8pXKP-*&{p rIgKҨ[F jl bM}xqL*Vk_Zl=ì2Lb*fb9P(Ge%3q lUmy@R$_YzY -CnR7:.'. +;@rHґ("gJ6#bO89jXzi 7 F֦âKHyJ0LNeZV<?JQYET]!Olztb*BhuHҖ-l~d n *DRMWھ'f-嗈ւ`Hsa^,v.(j)gAtI8 ދƳlC;xZ8@ d Em_I^ʥYO7y`Tu4L5]-DŽ6@9]@=ǚZ0EU(uU6]}%Dgp T*88w^V,`Q7O1ĥNGVQiFC"f'g7->:(;jɜBМKi&}, TC) )vǙU3 S|Kf&|a6!y!)ڇFU$9eU0{Q)OBPlW3`l U^1LK{x2]Yh\[6qs!-7s 87c8  $+)fdž^`D !ʐcݧ?! i GI# uVk`_yӶDUXe C=D"j ._M{ea9חШ+z>aWCtD:\h'gh.WO>FB2??~ql*$`Rp0׌3z#ROxvK`]͖͖M|g' 7]dNti1C#u񜚖|!TSJc>dʄZg]%g09R7 72)| z~[Кٲɒ8N>ޭF%nL$e! ,Cb$,pwڶH`rX2O]~ak5HtێOשr!S7>j" @sAٺݢAJ!C751|c6)f܌r+Q#aa5=XS%k}β>$9}72TkZ@yDn`9~2/ L;6B,@P5'"*m\Ǎyes]H6Ԣ_B9Fo࠭YPQI)P]^ ,"`TـKon{`_ #^k`Ȗ&29,,p*+D|4_WtLԶ%̼Ԋp^_4l.~,C671bCeޟ'QIy ܹ`~ b)UBSy ] T؝U,(mR~r,vP\kH'|Cu9ˆ]ߙczlMMh9k!c$eNRӵ[t s׺sA} Xt3_*w4uU52xK|緓PBז#[5IU/} i>KD> uꌿlMHYc,g"IQ0 !LNy{^P]Y;|Gw[tmfdR+뎚3~F)!㢶Woza+-9aKՑMyUQm}8.W;p[?HvP.ʟͷv_ m)ߛ}^sxv +=P7٣iD~1`Ćl~L7<܀ a-]l.ڶ^M?rPf3 (8nnǁPt2d+-o:s:n^!({8ҫIO*;1t!g*ɬUTKV#Im~XԗիHhPYpKhTG-ڲeo"?4o[&cQc`ps3ٳ>F8 8'_{g" vh)GIohZ` b xM\gB#anLŁ8A6îiiV6? 0-lv=J_Ahھ6<|}q5ίLqT[RH\oiZˁڽ' %Cg",o_Pt ã/.Bsǖr(q~GTX(lt%<5M #3'ݍ(RCJ8ަNNR䅼Nz:MkH{J. } oeN;ʫ_  /5fxԶ(b-Kg\EA'ݥq~Gc "'FAT oOL6 ; 5QP<p%/]l#\UNS_l 42$*E|BMxR9O&k*fD mp)GU_dq~G XZa_C򞟄@,`ɜխez~ $u@k *~p aw~#]gdRn(A鸁eQT4lYe.(!@Eɶ;YFm_l1YX#*)T/B!m0g= )M> &p3 `@0QbD]50 {t.aáZS8K7ף@NJqKhPzO-e=x0`6bvZ|ti IFgN22#y}D2^ Q,B>o|&> JX~܀m,*bB%Bar= ֏$H Wy;FnP84wܳmھ" Sa GƯeߗ7>-V)MmFaMh+&$لqz#ZHu8?4JP~wpp(wq"U-rkb2%Q1A〛pل p͜ы %pZj5P\5jcD:_x A"KpU$o !N[qPո[ mc 0U 9e#[8|'/3ȉ\~ 4+Ɲ WV.jcAB)C,0qFj]u9PN XBΟ;;Y>B+ܤE ' 0UDnƪQDJ8sQץmɜpcږN P e5{C[ /.fOWxx~NK&-bwiR7?pRG7!jQ1" LƌQ*&eMM :KpRhKnf;7>MH NKp2+QY ։>BkyWo R»kuǬM3>~s =o4/+BQhQ.~9ec`z X "3c pVDmKHBg s-1aҼmp'*g="ϩ4Y >N2u|.i 6#$qkBgP -ap?(<&p&0 `f!|AHBF9xB} }IsO߇0+ "ݱ-`.1I›FEmDGI}B(pB֨=c8kYQi5~alhl |נ>jrĒ$h{S5V!ks3w=XߧsIMV&atn#(;%k {GS Vm>=m6t_|x! 9>DWZvvdNB#"* ܧq?PaT v'R_bx hXX> 3VmX]\-.s$SbA"oXd<oiq{;6*xX<|W_0,jG\";qQVz{` K҄rU\K)`2*_,H(aŗ&<;Vaa٘*ٚ%FfIQT(,aapJw exz.,Iq?Hm,/L;] U|  pQNFuvgCc`lc#.Anͨ* ψ| 1N y`܍Ewh6\ÑͫP,-M㼁pv" *qyma0V@,[ԗѫX~Y7AP^:YuY_ zB'KvX_I=2.$DF oUEmco" V\-_Q9F E-Mcm RQ-?_~eƷycOeI#YX+&u9W=KX7R]܃?㬜0?2 Q(k˖[N,kNK,ǻ^0՚esןED D`iRZ@qfĠ@uMNV||jzM z:p KB;T =\|Eph a6pMVN8cG`hڴ!E^P#cH% =pnE ȅ6j67Y6n8%[ȡEgqGùm=sդG!"=x?@$u<=#U|yU-y#>`t뗏Gnz/\=杀`-VxEU63}`VB q_k= \w QXHh傫O"qe'!?60$Ti` for info, more `v`s for higher verbosity) * Want to display the logs in a separate file instead of using the `:log-open` command in your compiled Helix editor? Start your debug version with `cargo run -- --log foo.log` and in a new terminal use `tail -f foo.log` - Instead of running a release version of Helix, while developing you may want to run in debug mode with `cargo run` which is way faster to compile - Looking for even faster compile times? Give a try to [mold](https://github.com/rui314/mold) - If your preferred language is missing, integrating a tree-sitter grammar for it and defining syntax highlight queries for it is straight forward and doesn't require much knowledge of the internals. - If you don't use the Nix development shell and are getting your rust-analyzer binary from rustup, you may need to run `rustup component add rust-analyzer`. This is because `rust-toolchain.toml` selects our MSRV for the development toolchain but doesn't download the matching rust-analyzer automatically. We provide an [architecture.md][architecture.md] that should give you a good overview of the internals. # Auto generated documentation Some parts of [the book][docs] are autogenerated from the code itself, like the list of `:commands` and supported languages. To generate these files, run ```shell cargo xtask docgen ``` inside the project. We use [xtask][xtask] as an ad-hoc task runner. To preview the book itself, install [mdbook][mdbook]. Then, run ```shell mdbook serve book ``` and visit [http://localhost:3000](http://localhost:3000). # Testing ## Unit tests/Documentation tests Run `cargo test --workspace` to run unit tests and documentation tests in all packages. ## Integration tests Integration tests for helix-term can be run with `cargo integration-test`. Code contributors are strongly encouraged to write integration tests for their code. Existing tests can be used as examples. Helpers can be found in [helpers.rs][helpers.rs]. The log level can be set with the `HELIX_LOG_LEVEL` environment variable, e.g. `HELIX_LOG_LEVEL=debug cargo integration-test`. Contributors using MacOS might encounter `Too many open files (os error 24)` failures while running integration tests. This can be resolved by increasing the default value (e.g. to `10240` from `256`) by running `ulimit -n 10240`. ## Minimum Stable Rust Version (MSRV) Policy Helix keeps an intentionally low MSRV for the sake of easy building and packaging downstream. We follow [Firefox's MSRV policy]. Once Firefox's MSRV increases we may bump ours as well, but be sure to check that popular distributions like Ubuntu package the new MSRV version. When increasing the MSRV, update these three places: * the `workspace.package.rust-version` key in `Cargo.toml` in the repository root * the `env.MSRV` key at the top of `.github/workflows/build.yml` * the `toolchain.channel` key in `rust-toolchain.toml` [Firefox's MSRV policy]: https://firefox-source-docs.mozilla.org/writing-rust-code/update-policy.html [good-first-issue]: https://github.com/helix-editor/helix/labels/E-easy [log-file]: https://github.com/helix-editor/helix/wiki/FAQ#access-the-log-file [architecture.md]: ./architecture.md [docs]: https://docs.helix-editor.com/ [xtask]: https://github.com/matklad/cargo-xtask [mdbook]: https://rust-lang.github.io/mdBook/guide/installation.html [helpers.rs]: ../helix-term/tests/test/helpers.rs helix-cbac4273836f5837cec641ab21f365c79b102a4b/docs/architecture.md000066400000000000000000000115231500614314100235430ustar00rootroot00000000000000 | Crate | Description | | ----------- | ----------- | | helix-core | Core editing primitives, functional. | | helix-lsp | Language server client | | helix-lsp-types | Language Server Protocol type definitions | | helix-dap | Debug Adapter Protocol (DAP) client | | helix-loader | Functions for building, fetching, and loading external resources | | helix-view | UI abstractions for use in backends, imperative shell. | | helix-term | Terminal UI | | helix-tui | TUI primitives, forked from tui-rs, inspired by Cursive | This document contains a high-level overview of Helix internals. > NOTE: Use `cargo doc --open` for API documentation as well as dependency > documentation. ## Core The core contains basic building blocks used to construct the editor. It is heavily based on [CodeMirror 6](https://codemirror.net/6/docs/). The primitives are functional: most operations won't modify data in place but instead return a new copy. The main data structure used for representing buffers is a `Rope`. We re-export the excellent [ropey](https://github.com/cessen/ropey) library. Ropes are cheap to clone, and allow us to easily make snapshots of a text state. Multiple selections are a core editing primitive. Document selections are represented by a `Selection`. Each `Range` in the selection consists of a moving `head` and an immovable `anchor`. A single cursor in the editor is simply a selection with a single range, with the head and the anchor in the same position. Ropes are modified by constructing an OT-like `Transaction`. It represents a single coherent change to the document and can be applied to the rope. A transaction can be inverted to produce an undo. Selections and marks can be mapped over a transaction to translate to a position in the new text state after applying the transaction. > NOTE: `Transaction::change`/`Transaction::change_by_selection` is the main > interface used to generate text edits. `Syntax` is the interface used to interact with tree-sitter ASTs for syntax highlighting and other features. ## View The `view` layer was supposed to be a frontend-agnostic imperative library that would build on top of `core` to provide the common editor logic. Currently it's tied to the terminal UI. A `Document` ties together the `Rope`, `Selection`(s), `Syntax`, document `History`, language server (etc.) into a comprehensive representation of an open file. A `View` represents an open split in the UI. It holds the currently open document ID and other related state. Views encapsulate the gutter, status line, diagnostics, and the inner area where the code is displayed. > NOTE: Multiple views are able to display the same document, so the document > contains selections for each view. To retrieve, `document.selection()` takes > a `ViewId`. `Info` is the autoinfo box that shows hints when awaiting another key with bindings like `g` and `m`. It is attached to the viewport as a whole. `Surface` is like a buffer to which widgets draw themselves to, and the surface is then rendered on the screen on each cycle. `Rect`s are areas (simply an x and y coordinate with the origin at the screen top left and then a height and width) which are part of a `Surface`. They can be used to limit the area to which a `Component` can render. For example if we wrap a `Markdown` component in a `Popup` (think the documentation popup with space+k), Markdown's render method will get a Rect that is the exact size of the popup. Widgets are called `Component`s internally, and you can see most of them in `helix-term/src/ui`. Some components like `Popup` and `Overlay` can take other components as children. `Layer`s are how multiple components are displayed, and is simply a `Vec`. Layers are managed by the `Compositor`. On each top level render call, the compositor renders each component in the order they were pushed into the stack. This makes multiple components "layer" on top of one another. Hence we get a file picker displayed over the editor, etc. The `Editor` holds the global state: all the open documents, a tree representation of all the view splits, the configuration, and a registry of language servers. To open or close files, interact with the editor. ## LSP A language server protocol client. ## Term The terminal frontend. The `main` function sets up a new `Application` that runs the event loop. `commands.rs` is probably the most interesting file. It contains all commands (actions tied to keybindings). `keymap.rs` links commands to key combinations. ## TUI / Term TODO: document Component and rendering related stuff helix-cbac4273836f5837cec641ab21f365c79b102a4b/docs/releases.md000066400000000000000000000061521500614314100226660ustar00rootroot00000000000000## Checklist Helix releases are versioned in the Calendar Versioning scheme: `YY.0M(.MICRO)`, for example, `22.05` for May of 2022, or in a patch release, `22.05.1`. In these instructions we'll use `` as a placeholder for the tag being published. * Merge the PR with the release updates. That branch should: * Update the version: * Update the `workspace.package.version` key in `Cargo.toml`. Cargo only accepts SemVer versions so a CalVer version of `22.07` for example must be formatted as `22.7.0`. Patch/bugfix releases should increment the SemVer patch number. A patch release for 22.07 would be `22.7.1`. * Run `cargo check` and commit the resulting change to `Cargo.lock` * Add changelog notes to `CHANGELOG.md` * Add new `` entry in `contrib/Helix.appdata.xml` with release information according to the [AppStream spec](https://www.freedesktop.org/software/appstream/docs/sect-Metadata-Releases.html) * Tag and push * Switch to master and pull * `git tag -s -m "" -a && git push origin ` (note the `-s` which signs the tag) * Wait for the Release CI to finish * It will automatically turn the git tag into a GitHub release when it uploads artifacts * Edit the new release * Use `` as the title * Link to the changelog and release notes * Merge the release notes PR * Download the macos and linux binaries and update the `sha256`s in the [homebrew formula] * Use `sha256sum` on the downloaded `.tar.xz` files to determine the hash * Link to the release notes in this-week-in-rust * [Example PR](https://github.com/rust-lang/this-week-in-rust/pull/3300) * Post to reddit * [Example post](https://www.reddit.com/r/rust/comments/uzp5ze/helix_editor_2205_released/) [homebrew formula]: https://github.com/Homebrew/homebrew-core/blob/master/Formula/h/helix.rb ## Changelog Curation The changelog is currently created manually by reading through commits in the log since the last release. GitHub's compare view is a nice way to approach this. For example, when creating the 22.07 release notes, this compare link may be used ``` https://github.com/helix-editor/helix/compare/22.05...master ``` Either side of the triple-dot may be replaced with an exact revision, so if you wish to incrementally compile the changelog, you can tackle a weeks worth or so, record the revision where you stopped, and use that as a starting point next week: ``` https://github.com/helix-editor/helix/compare/7706a4a0d8b67b943c31d0c5f7b00d357b5d838d...master ``` A work-in-progress commit for a changelog might look like [this example](https://github.com/helix-editor/helix/commit/831adfd4c709ca16b248799bfef19698d5175e55). Not every PR or commit needs a blurb in the changelog. Each release section tends to have a blurb that links to a GitHub comparison between release versions for convenience: > As usual, the following is a summary of each of the changes since the last > release. For the full log, check out the git log. Typically, small changes like dependencies or documentation updates, refactors, or meta changes like GitHub Actions work are left out. helix-cbac4273836f5837cec641ab21f365c79b102a4b/docs/vision.md000066400000000000000000000063121500614314100223700ustar00rootroot00000000000000The Helix project still has a ways to go before reaching its goals. This document outlines some of those goals and the overall vision for the project. # Vision An efficient, batteries-included editor you can take anywhere and be productive... if it's your kind of thing. * **Cross-platform.** Whether on Linux, Windows, or OSX, you should be able to take your editor with you. * **Terminal first.** Not all environments have a windowing system, and you shouldn't have to abandon your preferred editor in those cases. * **Native.** No Electron or HTML DOM here. We want an efficient, native-compiled editor that can run with minimal resources when needed. If you're working on a Raspberry Pi, your editor shouldn't consume half of your RAM. * **Batteries included.** Both the default configuration and bundled features should be enough to have a good editing experience and be productive. You shouldn't need a massive custom config or external executables and plugins for basic features and functionality. * **Don't try to be everything for everyone.** There are many great editors out there to choose from. Let's make Helix *one of* those great options, with its own take on things. # Goals Vision statements are all well and good, but are also vague and subjective. Here is a (non-exhaustive) list of some of Helix's more concrete goals, to help give a clearer idea of the project's direction: * **Modal.** Vim is a great idea. * **Selection -> Action**, not Verb -> Object. Interaction models aren't linguistics, and "selection first" lets you see what you're doing (among other benefits). * **We aren't playing code golf.** It's more important for the keymap to be consistent and easy to memorize than it is to save a key stroke or two when editing. * **Built-in tools** for working with code bases efficiently. Most projects aren't a single file, and an editor should handle that as a first-class use case. In Helix's case, this means (among other things) a fuzzy-search file navigator and LSP support. * **Edit anything** that comes up when coding, within reason. Whether it's a 200 MB XML file, a megabyte of minified javascript on a single line, or Japanese text encoded in ShiftJIS, you should be able to open it and edit it without problems. (Note: this doesn't mean handle every esoteric use case. Sometimes you do just need a specialized tool, and Helix isn't that.) * **Configurable**, within reason. Although the defaults should be good, not everyone will agree on what "good" is. Within the bounds of Helix's core interaction models, it should be reasonably configurable so that it can be "good" for more people. This means, for example, custom key maps among other things. * **Extensible**, within reason. Although we want Helix to be productive out-of-the-box, it's not practical or desirable to cram every useful feature and use case into the core editor. The basics should be built-in, but you should be able to extend it with additional functionality as needed. * **Clean code base.** Sometimes other factors (e.g. significant performance gains, important features, correctness, etc.) will trump strict readability, but we nevertheless want to keep the code base straightforward and easy to understand to the extent we can. helix-cbac4273836f5837cec641ab21f365c79b102a4b/flake.lock000066400000000000000000000037661500614314100215550ustar00rootroot00000000000000{ "nodes": { "flake-utils": { "inputs": { "systems": "systems" }, "locked": { "lastModified": 1731533236, "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", "owner": "numtide", "repo": "flake-utils", "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", "type": "github" }, "original": { "owner": "numtide", "repo": "flake-utils", "type": "github" } }, "nixpkgs": { "locked": { "lastModified": 1740560979, "narHash": "sha256-Vr3Qi346M+8CjedtbyUevIGDZW8LcA1fTG0ugPY/Hic=", "owner": "nixos", "repo": "nixpkgs", "rev": "5135c59491985879812717f4c9fea69604e7f26f", "type": "github" }, "original": { "owner": "nixos", "ref": "nixos-unstable", "repo": "nixpkgs", "type": "github" } }, "root": { "inputs": { "flake-utils": "flake-utils", "nixpkgs": "nixpkgs", "rust-overlay": "rust-overlay" } }, "rust-overlay": { "inputs": { "nixpkgs": [ "nixpkgs" ] }, "locked": { "lastModified": 1740623427, "narHash": "sha256-3SdPQrZoa4odlScFDUHd4CUPQ/R1gtH4Mq9u8CBiK8M=", "owner": "oxalica", "repo": "rust-overlay", "rev": "d342e8b5fd88421ff982f383c853f0fc78a847ab", "type": "github" }, "original": { "owner": "oxalica", "repo": "rust-overlay", "type": "github" } }, "systems": { "locked": { "lastModified": 1681028828, "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", "owner": "nix-systems", "repo": "default", "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", "type": "github" }, "original": { "owner": "nix-systems", "repo": "default", "type": "github" } } }, "root": "root", "version": 7 } helix-cbac4273836f5837cec641ab21f365c79b102a4b/flake.nix000066400000000000000000000054441500614314100214160ustar00rootroot00000000000000{ description = "A post-modern text editor."; inputs = { nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; flake-utils.url = "github:numtide/flake-utils"; rust-overlay = { url = "github:oxalica/rust-overlay"; inputs.nixpkgs.follows = "nixpkgs"; }; }; outputs = { self, nixpkgs, flake-utils, rust-overlay, ... }: let gitRev = self.rev or self.dirtyRev or null; in flake-utils.lib.eachDefaultSystem (system: let pkgs = import nixpkgs { inherit system; overlays = [(import rust-overlay)]; }; # Get Helix's MSRV toolchain to build with by default. msrvToolchain = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml; msrvPlatform = pkgs.makeRustPlatform { cargo = msrvToolchain; rustc = msrvToolchain; }; in { packages = rec { helix = pkgs.callPackage ./default.nix {inherit gitRev;}; /** The default Helix build. Uses the latest stable Rust toolchain, and unstable nixpkgs. The build inputs can be overriden with the following: packages.${system}.default.override { rustPlatform = newPlatform; }; Overriding a derivation attribute can be done as well: packages.${system}.default.overrideAttrs { buildType = "debug"; }; */ default = helix; }; checks.helix = self.outputs.packages.${system}.helix.override { buildType = "debug"; rustPlatform = msrvPlatform; }; # Devshell behavior is preserved. devShells.default = let commonRustFlagsEnv = "-C link-arg=-fuse-ld=lld -C target-cpu=native --cfg tokio_unstable"; platformRustFlagsEnv = pkgs.lib.optionalString pkgs.stdenv.isLinux "-Clink-arg=-Wl,--no-rosegment"; in pkgs.mkShell { inputsFrom = [self.checks.${system}.helix]; nativeBuildInputs = with pkgs; [ lld cargo-flamegraph rust-bin.nightly.latest.rust-analyzer ] ++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) cargo-tarpaulin) ++ (lib.optional stdenv.isLinux lldb) ++ (lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.CoreFoundation); shellHook = '' export RUST_BACKTRACE="1" export RUSTFLAGS="''${RUSTFLAGS:-""} ${commonRustFlagsEnv} ${platformRustFlagsEnv}" ''; }; }) // { overlays.default = final: prev: { helix = final.callPackage ./default.nix {inherit gitRev;}; }; }; nixConfig = { extra-substituters = ["https://helix.cachix.org"]; extra-trusted-public-keys = ["helix.cachix.org-1:ejp9KQpR1FBI2onstMQ34yogDm4OgU2ru6lIwPvuCVs="]; }; } helix-cbac4273836f5837cec641ab21f365c79b102a4b/grammars.nix000066400000000000000000000101551500614314100221400ustar00rootroot00000000000000{ stdenv, lib, runCommandLocal, runCommand, yj, includeGrammarIf ? _: true, grammarOverlays ? [], ... }: let # HACK: nix < 2.6 has a bug in the toml parser, so we convert to JSON # before parsing languages-json = runCommandLocal "languages-toml-to-json" {} '' ${yj}/bin/yj -t < ${./languages.toml} > $out ''; languagesConfig = if lib.versionAtLeast builtins.nixVersion "2.6.0" then builtins.fromTOML (builtins.readFile ./languages.toml) else builtins.fromJSON (builtins.readFile (builtins.toPath languages-json)); isGitGrammar = grammar: builtins.hasAttr "source" grammar && builtins.hasAttr "git" grammar.source && builtins.hasAttr "rev" grammar.source; isGitHubGrammar = grammar: lib.hasPrefix "https://github.com" grammar.source.git; toGitHubFetcher = url: let match = builtins.match "https://github\.com/([^/]*)/([^/]*)/?" url; in { owner = builtins.elemAt match 0; repo = builtins.elemAt match 1; }; # If `use-grammars.only` is set, use only those grammars. # If `use-grammars.except` is set, use all other grammars. # Otherwise use all grammars. useGrammar = grammar: if languagesConfig ? use-grammars.only then builtins.elem grammar.name languagesConfig.use-grammars.only else if languagesConfig ? use-grammars.except then !(builtins.elem grammar.name languagesConfig.use-grammars.except) else true; grammarsToUse = builtins.filter useGrammar languagesConfig.grammar; gitGrammars = builtins.filter isGitGrammar grammarsToUse; buildGrammar = grammar: let gh = toGitHubFetcher grammar.source.git; sourceGit = builtins.fetchTree { type = "git"; url = grammar.source.git; rev = grammar.source.rev; ref = grammar.source.ref or "HEAD"; shallow = true; }; sourceGitHub = builtins.fetchTree { type = "github"; owner = gh.owner; repo = gh.repo; inherit (grammar.source) rev; }; source = if isGitHubGrammar grammar then sourceGitHub else sourceGit; in stdenv.mkDerivation { # see https://github.com/NixOS/nixpkgs/blob/fbdd1a7c0bc29af5325e0d7dd70e804a972eb465/pkgs/development/tools/parsing/tree-sitter/grammar.nix pname = "helix-tree-sitter-${grammar.name}"; version = grammar.source.rev; src = source; sourceRoot = if builtins.hasAttr "subpath" grammar.source then "source/${grammar.source.subpath}" else "source"; dontConfigure = true; FLAGS = [ "-Isrc" "-g" "-O3" "-fPIC" "-fno-exceptions" "-Wl,-z,relro,-z,now" ]; NAME = grammar.name; buildPhase = '' runHook preBuild if [[ -e src/scanner.cc ]]; then $CXX -c src/scanner.cc -o scanner.o $FLAGS elif [[ -e src/scanner.c ]]; then $CC -c src/scanner.c -o scanner.o $FLAGS fi $CC -c src/parser.c -o parser.o $FLAGS $CXX -shared -o $NAME.so *.o ls -al runHook postBuild ''; installPhase = '' runHook preInstall mkdir $out mv $NAME.so $out/ runHook postInstall ''; # Strip failed on darwin: strip: error: symbols referenced by indirect symbol table entries that can't be stripped fixupPhase = lib.optionalString stdenv.isLinux '' runHook preFixup $STRIP $out/$NAME.so runHook postFixup ''; }; grammarsToBuild = builtins.filter includeGrammarIf gitGrammars; builtGrammars = builtins.map (grammar: { inherit (grammar) name; value = buildGrammar grammar; }) grammarsToBuild; extensibleGrammars = lib.makeExtensible (self: builtins.listToAttrs builtGrammars); overlaidGrammars = lib.pipe extensibleGrammars (builtins.map (overlay: grammar: grammar.extend overlay) grammarOverlays); grammarLinks = lib.mapAttrsToList (name: artifact: "ln -s ${artifact}/${name}.so $out/${name}.so") (lib.filterAttrs (n: v: lib.isDerivation v) overlaidGrammars); in runCommand "consolidated-helix-grammars" {} '' mkdir -p $out ${builtins.concatStringsSep "\n" grammarLinks} '' helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/000077500000000000000000000000001500614314100216445ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/.gitignore000066400000000000000000000000231500614314100236270ustar00rootroot00000000000000/target Cargo.lock helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/Cargo.toml000066400000000000000000000031231500614314100235730ustar00rootroot00000000000000[package] name = "helix-core" description = "Helix editor core editing primitives" include = ["src/**/*", "README.md"] version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true rust-version.workspace = true categories.workspace = true repository.workspace = true homepage.workspace = true [features] unicode-lines = ["ropey/unicode_lines"] integration = [] [dependencies] helix-stdx = { path = "../helix-stdx" } helix-loader = { path = "../helix-loader" } helix-parsec = { path = "../helix-parsec" } ropey.workspace = true smallvec = "1.15" smartstring = "1.0.1" unicode-segmentation.workspace = true # unicode-width is changing width definitions # that both break our logic and disagree with common # width definitions in terminals, we need to replace it. # For now lets lock the version to avoid rendering glitches # when installing without `--locked` unicode-width = "=0.1.12" unicode-general-category = "1.0" slotmap.workspace = true tree-sitter.workspace = true once_cell = "1.21" arc-swap = "1" regex = "1" bitflags.workspace = true ahash = "0.8.11" hashbrown = { version = "0.14.5", features = ["raw"] } url = "2.5.4" log = "0.4" anyhow = "1.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" toml = "0.8" imara-diff = "0.1.8" encoding_rs = "0.8" chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] } textwrap = "0.16.2" nucleo.workspace = true parking_lot.workspace = true globset = "0.4.16" regex-cursor = "0.1.5" [dev-dependencies] quickcheck = { version = "1", default-features = false } indoc = "2.0.6" helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/000077500000000000000000000000001500614314100224335ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/auto_pairs.rs000066400000000000000000000303351500614314100251530ustar00rootroot00000000000000//! When typing the opening character of one of the possible pairs defined below, //! this module provides the functionality to insert the paired closing character. use crate::{graphemes, movement::Direction, Range, Rope, Selection, Tendril, Transaction}; use std::collections::HashMap; use smallvec::SmallVec; // Heavily based on https://github.com/codemirror/closebrackets/ pub const DEFAULT_PAIRS: &[(char, char)] = &[ ('(', ')'), ('{', '}'), ('[', ']'), ('\'', '\''), ('"', '"'), ('`', '`'), ]; /// The type that represents the collection of auto pairs, /// keyed by both opener and closer. #[derive(Debug, Clone)] pub struct AutoPairs(HashMap); /// Represents the config for a particular pairing. #[derive(Debug, Clone, Copy)] pub struct Pair { pub open: char, pub close: char, } impl Pair { /// true if open == close pub fn same(&self) -> bool { self.open == self.close } /// true if all of the pair's conditions hold for the given document and range pub fn should_close(&self, doc: &Rope, range: &Range) -> bool { let mut should_close = Self::next_is_not_alpha(doc, range); if self.same() { should_close &= Self::prev_is_not_alpha(doc, range); } should_close } pub fn next_is_not_alpha(doc: &Rope, range: &Range) -> bool { let cursor = range.cursor(doc.slice(..)); let next_char = doc.get_char(cursor); next_char.map(|c| !c.is_alphanumeric()).unwrap_or(true) } pub fn prev_is_not_alpha(doc: &Rope, range: &Range) -> bool { let cursor = range.cursor(doc.slice(..)); let prev_char = prev_char(doc, cursor); prev_char.map(|c| !c.is_alphanumeric()).unwrap_or(true) } } impl From<&(char, char)> for Pair { fn from(&(open, close): &(char, char)) -> Self { Self { open, close } } } impl From<(&char, &char)> for Pair { fn from((open, close): (&char, &char)) -> Self { Self { open: *open, close: *close, } } } impl AutoPairs { /// Make a new AutoPairs set with the given pairs and default conditions. pub fn new<'a, V, A>(pairs: V) -> Self where V: IntoIterator + 'a, A: Into, { let mut auto_pairs = HashMap::new(); for pair in pairs.into_iter() { let auto_pair = pair.into(); auto_pairs.insert(auto_pair.open, auto_pair); if auto_pair.open != auto_pair.close { auto_pairs.insert(auto_pair.close, auto_pair); } } Self(auto_pairs) } pub fn get(&self, ch: char) -> Option<&Pair> { self.0.get(&ch) } } impl Default for AutoPairs { fn default() -> Self { AutoPairs::new(DEFAULT_PAIRS.iter()) } } // insert hook: // Fn(doc, selection, char) => Option // problem is, we want to do this per range, so we can call default handler for some ranges // so maybe ret Vec> // but we also need to be able to return transactions... // // to simplify, maybe return Option and just reimplement the default // [TODO] // * delete implementation where it erases the whole bracket (|) -> | // * change to multi character pairs to handle cases like placing the cursor in the // middle of triple quotes, and more exotic pairs like Jinja's {% %} #[must_use] pub fn hook(doc: &Rope, selection: &Selection, ch: char, pairs: &AutoPairs) -> Option { log::trace!("autopairs hook selection: {:#?}", selection); if let Some(pair) = pairs.get(ch) { if pair.same() { return Some(handle_same(doc, selection, pair)); } else if pair.open == ch { return Some(handle_open(doc, selection, pair)); } else if pair.close == ch { // && char_at pos == close return Some(handle_close(doc, selection, pair)); } } None } fn prev_char(doc: &Rope, pos: usize) -> Option { if pos == 0 { return None; } doc.get_char(pos - 1) } /// calculate what the resulting range should be for an auto pair insertion fn get_next_range(doc: &Rope, start_range: &Range, offset: usize, len_inserted: usize) -> Range { // When the character under the cursor changes due to complete pair // insertion, we must look backward a grapheme and then add the length // of the insertion to put the resulting cursor in the right place, e.g. // // foo[\r\n] - anchor: 3, head: 5 // foo([)]\r\n - anchor: 4, head: 5 // // foo[\r\n] - anchor: 3, head: 5 // foo'[\r\n] - anchor: 4, head: 6 // // foo([)]\r\n - anchor: 4, head: 5 // foo()[\r\n] - anchor: 5, head: 7 // // [foo]\r\n - anchor: 0, head: 3 // [foo(])\r\n - anchor: 0, head: 5 // inserting at the very end of the document after the last newline if start_range.head == doc.len_chars() && start_range.anchor == doc.len_chars() { return Range::new( start_range.anchor + offset + 1, start_range.head + offset + 1, ); } let doc_slice = doc.slice(..); let single_grapheme = start_range.is_single_grapheme(doc_slice); // just skip over graphemes if len_inserted == 0 { let end_anchor = if single_grapheme { graphemes::next_grapheme_boundary(doc_slice, start_range.anchor) + offset // even for backward inserts with multiple grapheme selections, // we want the anchor to stay where it is so that the relative // selection does not change, e.g.: // // foo([) wor]d -> insert ) -> foo()[ wor]d } else { start_range.anchor + offset }; return Range::new( end_anchor, graphemes::next_grapheme_boundary(doc_slice, start_range.head) + offset, ); } // trivial case: only inserted a single-char opener, just move the selection if len_inserted == 1 { let end_anchor = if single_grapheme || start_range.direction() == Direction::Backward { start_range.anchor + offset + 1 } else { start_range.anchor + offset }; return Range::new(end_anchor, start_range.head + offset + 1); } // If the head = 0, then we must be in insert mode with a backward // cursor, which implies the head will just move let end_head = if start_range.head == 0 || start_range.direction() == Direction::Backward { start_range.head + offset + 1 } else { // We must have a forward cursor, which means we must move to the // other end of the grapheme to get to where the new characters // are inserted, then move the head to where it should be let prev_bound = graphemes::prev_grapheme_boundary(doc_slice, start_range.head); log::trace!( "prev_bound: {}, offset: {}, len_inserted: {}", prev_bound, offset, len_inserted ); prev_bound + offset + len_inserted }; let end_anchor = match (start_range.len(), start_range.direction()) { // if we have a zero width cursor, it shifts to the same number (0, _) => end_head, // If we are inserting for a regular one-width cursor, the anchor // moves with the head. This is the fast path for ASCII. (1, Direction::Forward) => end_head - 1, (1, Direction::Backward) => end_head + 1, (_, Direction::Forward) => { if single_grapheme { graphemes::prev_grapheme_boundary(doc.slice(..), start_range.head) + 1 // if we are appending, the anchor stays where it is; only offset // for multiple range insertions } else { start_range.anchor + offset } } (_, Direction::Backward) => { if single_grapheme { // if we're backward, then the head is at the first char // of the typed char, so we need to add the length of // the closing char graphemes::prev_grapheme_boundary(doc.slice(..), start_range.anchor) + len_inserted + offset } else { // when we are inserting in front of a selection, we need to move // the anchor over by however many characters were inserted overall start_range.anchor + offset + len_inserted } } }; Range::new(end_anchor, end_head) } fn handle_open(doc: &Rope, selection: &Selection, pair: &Pair) -> Transaction { let mut end_ranges = SmallVec::with_capacity(selection.len()); let mut offs = 0; let transaction = Transaction::change_by_selection(doc, selection, |start_range| { let cursor = start_range.cursor(doc.slice(..)); let next_char = doc.get_char(cursor); let len_inserted; // Since auto pairs are currently limited to single chars, we're either // inserting exactly one or two chars. When arbitrary length pairs are // added, these will need to be changed. let change = match next_char { Some(_) if !pair.should_close(doc, start_range) => { len_inserted = 1; let mut tendril = Tendril::new(); tendril.push(pair.open); (cursor, cursor, Some(tendril)) } _ => { // insert open & close let pair_str = Tendril::from_iter([pair.open, pair.close]); len_inserted = 2; (cursor, cursor, Some(pair_str)) } }; let next_range = get_next_range(doc, start_range, offs, len_inserted); end_ranges.push(next_range); offs += len_inserted; change }); let t = transaction.with_selection(Selection::new(end_ranges, selection.primary_index())); log::debug!("auto pair transaction: {:#?}", t); t } fn handle_close(doc: &Rope, selection: &Selection, pair: &Pair) -> Transaction { let mut end_ranges = SmallVec::with_capacity(selection.len()); let mut offs = 0; let transaction = Transaction::change_by_selection(doc, selection, |start_range| { let cursor = start_range.cursor(doc.slice(..)); let next_char = doc.get_char(cursor); let mut len_inserted = 0; let change = if next_char == Some(pair.close) { // return transaction that moves past close (cursor, cursor, None) // no-op } else { len_inserted = 1; let mut tendril = Tendril::new(); tendril.push(pair.close); (cursor, cursor, Some(tendril)) }; let next_range = get_next_range(doc, start_range, offs, len_inserted); end_ranges.push(next_range); offs += len_inserted; change }); let t = transaction.with_selection(Selection::new(end_ranges, selection.primary_index())); log::debug!("auto pair transaction: {:#?}", t); t } /// handle cases where open and close is the same, or in triples ("""docstring""") fn handle_same(doc: &Rope, selection: &Selection, pair: &Pair) -> Transaction { let mut end_ranges = SmallVec::with_capacity(selection.len()); let mut offs = 0; let transaction = Transaction::change_by_selection(doc, selection, |start_range| { let cursor = start_range.cursor(doc.slice(..)); let mut len_inserted = 0; let next_char = doc.get_char(cursor); let change = if next_char == Some(pair.open) { // return transaction that moves past close (cursor, cursor, None) // no-op } else { let mut pair_str = Tendril::new(); pair_str.push(pair.open); // for equal pairs, don't insert both open and close if either // side has a non-pair char if pair.should_close(doc, start_range) { pair_str.push(pair.close); } len_inserted += pair_str.chars().count(); (cursor, cursor, Some(pair_str)) }; let next_range = get_next_range(doc, start_range, offs, len_inserted); end_ranges.push(next_range); offs += len_inserted; change }); let t = transaction.with_selection(Selection::new(end_ranges, selection.primary_index())); log::debug!("auto pair transaction: {:#?}", t); t } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/case_conversion.rs000066400000000000000000000034131500614314100261620ustar00rootroot00000000000000use crate::Tendril; // todo: should this be grapheme aware? pub fn to_pascal_case(text: impl Iterator) -> Tendril { let mut res = Tendril::new(); to_pascal_case_with(text, &mut res); res } pub fn to_pascal_case_with(text: impl Iterator, buf: &mut Tendril) { let mut at_word_start = true; for c in text { // we don't count _ as a word char here so case conversions work well if !c.is_alphanumeric() { at_word_start = true; continue; } if at_word_start { at_word_start = false; buf.extend(c.to_uppercase()); } else { buf.push(c) } } } pub fn to_upper_case_with(text: impl Iterator, buf: &mut Tendril) { for c in text { for c in c.to_uppercase() { buf.push(c) } } } pub fn to_lower_case_with(text: impl Iterator, buf: &mut Tendril) { for c in text { for c in c.to_lowercase() { buf.push(c) } } } pub fn to_camel_case(text: impl Iterator) -> Tendril { let mut res = Tendril::new(); to_camel_case_with(text, &mut res); res } pub fn to_camel_case_with(mut text: impl Iterator, buf: &mut Tendril) { for c in &mut text { if c.is_alphanumeric() { buf.extend(c.to_lowercase()) } } let mut at_word_start = false; for c in text { // we don't count _ as a word char here so case conversions work well if !c.is_alphanumeric() { at_word_start = true; continue; } if at_word_start { at_word_start = false; buf.extend(c.to_uppercase()); } else { buf.push(c) } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/chars.rs000066400000000000000000000104161500614314100241030ustar00rootroot00000000000000//! Utility functions to categorize a `char`. use crate::LineEnding; #[derive(Debug, Eq, PartialEq)] pub enum CharCategory { Whitespace, Eol, Word, Punctuation, Unknown, } #[inline] pub fn categorize_char(ch: char) -> CharCategory { if char_is_line_ending(ch) { CharCategory::Eol } else if ch.is_whitespace() { CharCategory::Whitespace } else if char_is_word(ch) { CharCategory::Word } else if char_is_punctuation(ch) { CharCategory::Punctuation } else { CharCategory::Unknown } } /// Determine whether a character is a line ending. #[inline] pub fn char_is_line_ending(ch: char) -> bool { LineEnding::from_char(ch).is_some() } /// Determine whether a character qualifies as (non-line-break) /// whitespace. #[inline] pub fn char_is_whitespace(ch: char) -> bool { // TODO: this is a naive binary categorization of whitespace // characters. For display, word wrapping, etc. we'll need a better // categorization based on e.g. breaking vs non-breaking spaces // and whether they're zero-width or not. match ch { //'\u{1680}' | // Ogham Space Mark (here for completeness, but usually displayed as a dash, not as whitespace) '\u{0009}' | // Character Tabulation '\u{0020}' | // Space '\u{00A0}' | // No-break Space '\u{180E}' | // Mongolian Vowel Separator '\u{202F}' | // Narrow No-break Space '\u{205F}' | // Medium Mathematical Space '\u{3000}' | // Ideographic Space '\u{FEFF}' // Zero Width No-break Space => true, // En Quad, Em Quad, En Space, Em Space, Three-per-em Space, // Four-per-em Space, Six-per-em Space, Figure Space, // Punctuation Space, Thin Space, Hair Space, Zero Width Space. ch if ('\u{2000}' ..= '\u{200B}').contains(&ch) => true, _ => false, } } #[inline] pub fn char_is_punctuation(ch: char) -> bool { use unicode_general_category::{get_general_category, GeneralCategory}; matches!( get_general_category(ch), GeneralCategory::OtherPunctuation | GeneralCategory::OpenPunctuation | GeneralCategory::ClosePunctuation | GeneralCategory::InitialPunctuation | GeneralCategory::FinalPunctuation | GeneralCategory::ConnectorPunctuation | GeneralCategory::DashPunctuation | GeneralCategory::MathSymbol | GeneralCategory::CurrencySymbol | GeneralCategory::ModifierSymbol ) } #[inline] pub fn char_is_word(ch: char) -> bool { ch.is_alphanumeric() || ch == '_' } #[cfg(test)] mod test { use super::*; #[test] fn test_categorize() { #[cfg(not(feature = "unicode-lines"))] const EOL_TEST_CASE: &str = "\n"; #[cfg(feature = "unicode-lines")] const EOL_TEST_CASE: &str = "\n\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}"; const WORD_TEST_CASE: &str = "_hello_world_あいうえおー12345678901234567890"; const PUNCTUATION_TEST_CASE: &str = "!\"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~!”#$%&’()*+、。:;<=>?@「」^`{|}~"; const WHITESPACE_TEST_CASE: &str = "      "; for ch in EOL_TEST_CASE.chars() { assert_eq!(CharCategory::Eol, categorize_char(ch)); } for ch in WHITESPACE_TEST_CASE.chars() { assert_eq!( CharCategory::Whitespace, categorize_char(ch), "Testing '{}', but got `{:?}` instead of `Category::Whitespace`", ch, categorize_char(ch) ); } for ch in WORD_TEST_CASE.chars() { assert_eq!( CharCategory::Word, categorize_char(ch), "Testing '{}', but got `{:?}` instead of `Category::Word`", ch, categorize_char(ch) ); } for ch in PUNCTUATION_TEST_CASE.chars() { assert_eq!( CharCategory::Punctuation, categorize_char(ch), "Testing '{}', but got `{:?}` instead of `Category::Punctuation`", ch, categorize_char(ch) ); } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/command_line.rs000066400000000000000000001341741500614314100254400ustar00rootroot00000000000000//! Types and parsing code for command mode (`:`) input. //! //! Command line parsing is done in steps: //! //! * The `Tokenizer` iterator returns `Token`s from the command line input naively - without //! accounting for a command's signature. //! * When executing a command (pressing `` in command mode), tokens are expanded with //! information from the editor like the current cursor line or column. Otherwise the tokens //! are unwrapped to their inner content. //! * `Args` interprets the contents (potentially expanded) as flags or positional arguments. //! When executing a command, `Args` performs validations like checking the number of positional //! arguments supplied and whether duplicate or unknown flags were supplied. //! //! `Args` is the interface used by typable command implementations. `Args` may be treated as a //! slice of `Cow` or `&str` to access positional arguments, for example `for arg in args` //! iterates over positional args (never flags) and `&args[0]` always corresponds to the first //! positional. Use `Args::has_flag` and `Args::get_flag` to read any specified flags. //! //! `Args` and `Tokenizer` are intertwined. `Args` may ask the `Tokenizer` for the rest of the //! command line as a single token after the configured number of positionals has been reached //! (according to `raw_after`). This is used for the custom parsing in `:set-option` and //! `:toggle-option` for example. Outside of executing commands, the `Tokenizer` can be used //! directly to interpret a string according to the regular tokenization rules. //! //! This module also defines structs for configuring the parsing of the command line for a //! command. See `Flag` and `Signature`. use std::{borrow::Cow, collections::HashMap, error::Error, fmt, ops, slice, vec}; /// Splits a command line into the command and arguments parts. /// /// The third tuple member describes whether the command part is finished. When this boolean is /// true the completion code for the command line should complete command names, otherwise /// command arguments. pub fn split(line: &str) -> (&str, &str, bool) { const SEPARATOR_PATTERN: [char; 2] = [' ', '\t']; let (command, rest) = line.split_once(SEPARATOR_PATTERN).unwrap_or((line, "")); let complete_command = command.is_empty() || (rest.trim().is_empty() && !line.ends_with(SEPARATOR_PATTERN)); (command, rest, complete_command) } /// A Unix-like flag that a command may accept. /// /// For example the `:sort` command accepts a `--reverse` (or `-r` for shorthand) boolean flag /// which controls the direction of sorting. Flags may accept an argument by setting the /// `completions` field to `Some`. #[derive(Debug, Clone, Copy)] pub struct Flag { /// The name of the flag. /// /// This value is also used to construct the "longhand" version of the flag. For example a /// flag with a name "reverse" has a longhand `--reverse`. /// /// This value should be supplied when reading a flag out of the [Args] with [Args::get_flag] /// and [Args::has_flag]. The `:sort` command implementation for example should ask for /// `args.has_flag("reverse")`. pub name: &'static str, /// The character that can be used as a shorthand for the flag, optionally. /// /// For example a flag like "reverse" mentioned above might take an alias `Some('r')` to /// allow specifying the flag as `-r`. pub alias: Option, pub doc: &'static str, /// The completion values to use when specifying an argument for a flag. /// /// This should be set to `None` for boolean flags and `Some(&["foo", "bar", "baz"])` for /// example for flags which accept options, with the strings corresponding to values that /// should be shown in completion. pub completions: Option<&'static [&'static str]>, } impl Flag { // This allows defining flags with the `..Flag::DEFAULT` shorthand. The `name` and `doc` // fields should always be overwritten. pub const DEFAULT: Self = Self { name: "", doc: "", alias: None, completions: None, }; } /// A description of how a command's input should be handled. /// /// Each typable command defines a signature (with the help of `Signature::DEFAULT`) at least to /// declare how many positional arguments it accepts. Command flags are also declared in this /// struct. The `raw_after` option may be set optionally to avoid evaluating quotes in parts of /// the command line (useful for shell commands for example). #[derive(Debug, Clone, Copy)] #[allow(clippy::manual_non_exhaustive)] pub struct Signature { /// The minimum and (optionally) maximum number of positional arguments a command may take. /// /// For example accepting exactly one positional can be specified with `(1, Some(1))` while /// accepting zero-or-more positionals can be specified as `(0, None)`. /// /// The number of positionals is checked when hitting `` in command mode. If the actual /// number of positionals is outside the declared range then the command is not executed and /// an error is shown instead. For example `:write` accepts zero or one positional arguments /// (`(0, Some(1))`). A command line like `:write a.txt b.txt` is outside the declared range /// and is not accepted. pub positionals: (usize, Option), /// The number of **positional** arguments for the parser to read with normal quoting rules. /// /// Once the number has been exceeded then the tokenizer returns the rest of the input as a /// `TokenKind::Expand` token (see `Tokenizer::rest`), meaning that quoting rules do not apply /// and none of the remaining text may be treated as a flag. /// /// If this is set to `None` then the entire command line is parsed with normal quoting and /// flag rules. /// /// A good example use-case for this option is `:toggle-option` which sets `Some(1)`. /// Everything up to the first positional argument is interpreted according to normal rules /// and the rest of the input is parsed "raw". This allows `:toggle-option` to perform custom /// parsing on the rest of the input - namely parsing complicated values as a JSON stream. /// `:toggle-option` could accept a flag in the future. If so, the flag would need to come /// before the first positional argument. /// /// Consider these lines for `:toggle-option` which sets `Some(1)`: /// /// * `:toggle foo` has one positional "foo" and no flags. /// * `:toggle foo bar` has two positionals. Expansions for `bar` are evaluated but quotes /// and anything that looks like a flag are treated literally. /// * `:toggle foo --bar` has two positionals: `["foo", "--bar"]`. `--bar` is not considered /// to be a flag because it comes after the first positional. /// * `:toggle --bar foo` has one positional "foo" and one flag "--bar". /// * `:toggle --bar foo --baz` has two positionals `["foo", "--baz"]` and one flag "--bar". pub raw_after: Option, /// A set of flags that a command may accept. /// /// See the `Flag` struct for more info. pub flags: &'static [Flag], /// Do not set this field. Use `..Signature::DEFAULT` to construct a `Signature` instead. // This field allows adding new fields later with minimal code changes. This works like a // `#[non_exhaustive]` annotation except that it supports the `..Signature::DEFAULT` // shorthand. pub _dummy: (), } impl Signature { // This allows defining signatures with the `..Signature::DEFAULT` shorthand. The // `positionals` field should always be overwritten. pub const DEFAULT: Self = Self { positionals: (0, None), raw_after: None, flags: &[], _dummy: (), }; fn check_positional_count(&self, actual: usize) -> Result<(), ParseArgsError<'static>> { let (min, max) = self.positionals; if min <= actual && max.unwrap_or(usize::MAX) >= actual { Ok(()) } else { Err(ParseArgsError::WrongPositionalCount { min, max, actual }) } } } #[derive(Debug, PartialEq, Eq)] pub enum ParseArgsError<'a> { WrongPositionalCount { min: usize, max: Option, actual: usize, }, UnterminatedToken { token: Token<'a>, }, DuplicatedFlag { flag: &'static str, }, UnknownFlag { text: Cow<'a, str>, }, FlagMissingArgument { flag: &'static str, }, MissingExpansionDelimiter { expansion: &'a str, }, UnknownExpansion { kind: &'a str, }, } impl fmt::Display for ParseArgsError<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::WrongPositionalCount { min, max, actual } => { write!(f, "expected ")?; let maybe_plural = |n| if n == 1 { "" } else { "s" }; match (min, max) { (0, Some(0)) => write!(f, "no arguments")?, (min, Some(max)) if min == max => { write!(f, "exactly {min} argument{}", maybe_plural(*min))? } (min, _) if actual < min => { write!(f, "at least {min} argument{}", maybe_plural(*min))? } (_, Some(max)) if actual > max => { write!(f, "at most {max} argument{}", maybe_plural(*max))? } // `actual` must be either less than `min` or greater than `max` for this type // to be constructed. _ => unreachable!(), } write!(f, ", got {actual}") } Self::UnterminatedToken { token } => { write!(f, "unterminated token {}", token.content) } Self::DuplicatedFlag { flag } => { write!(f, "flag '--{flag}' specified more than once") } Self::UnknownFlag { text } => write!(f, "unknown flag '{text}'"), Self::FlagMissingArgument { flag } => { write!(f, "flag '--{flag}' missing an argument") } Self::MissingExpansionDelimiter { expansion } => { if expansion.is_empty() { write!(f, "'%' was not properly escaped. Please use '%%'") } else { write!(f, "missing a string delimiter after '%{expansion}'") } } Self::UnknownExpansion { kind } => { write!(f, "unknown expansion '{kind}'") } } } } impl Error for ParseArgsError<'_> {} /// The kind of expansion to use on the token's content. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ExpansionKind { /// Expand variables from the editor's state. /// /// For example `%{cursor_line}`. Variable, /// Treat the token contents as hexadecimal corresponding to a Unicode codepoint value. /// /// For example `%u{25CF}`. Unicode, /// Run the token's contents via the configured shell program. /// /// For example `%sh{echo hello}`. Shell, } impl ExpansionKind { pub const VARIANTS: &'static [Self] = &[Self::Variable, Self::Unicode, Self::Shell]; pub const fn as_str(&self) -> &'static str { match self { Self::Variable => "", Self::Unicode => "u", Self::Shell => "sh", } } pub fn from_kind(name: &str) -> Option { match name { "" => Some(Self::Variable), "u" => Some(Self::Unicode), "sh" => Some(Self::Shell), _ => None, } } } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Quote { Single, Backtick, } impl Quote { pub const fn char(&self) -> char { match self { Self::Single => '\'', Self::Backtick => '`', } } // Quotes can be escaped by doubling them: `'hello '' world'` becomes `hello ' world`. pub const fn escape(&self) -> &'static str { match self { Self::Single => "''", Self::Backtick => "``", } } } /// The type of argument being written. /// /// The token kind decides how an argument in the command line will be expanded upon hitting /// `` in command mode. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum TokenKind { /// Unquoted text. /// /// For example in `:echo hello world`, "hello" and "world" are raw tokens. Unquoted, /// Quoted text which is interpreted literally. /// /// The purpose of this kind is to avoid splitting arguments on whitespace. For example /// `:open 'a b.txt'` will result in opening a file with a single argument `"a b.txt"`. /// /// Using expansions within single quotes or backticks will result in the expansion text /// being shown literally. For example `:echo '%u{0020}'` will print `"%u{0020}"` to the /// statusline. Quoted(Quote), /// Text within double quote delimiters (`"`). /// /// The inner text of a double quoted argument can be further expanded. For example /// `:echo "line: #%{cursor_line}"` could print `"line: #1"` to the statusline. Expand, /// An expansion / "percent token". /// /// These take the form `%[]`. See `ExpansionKind`. Expansion(ExpansionKind), /// A token kind that exists for the sake of completion. /// /// In input like `%foo` this token contains the text `"%foo"`. The content start is the byte /// after the percent token. /// /// When `Tokenizer` is passed `true` for its `validate` parameter this token cannot be /// returned: inputs that would return this token get a validation error instead. ExpansionKind, } #[derive(Debug, Clone, PartialEq, Eq)] pub struct Token<'a> { pub kind: TokenKind, /// The byte index into the input where the token's content starts. /// /// For quoted text this means the byte after the quote. For expansions this means the byte /// after the opening delimiter. pub content_start: usize, /// The inner content of the token. /// /// Usually this content borrows from the input but an owned value may be used in cases of /// escaping. On Unix systems a raw token like `a\ b` has the contents `"a b"`. pub content: Cow<'a, str>, /// Whether the token's opening delimiter is closed. /// /// For example a quote `"foo"` is closed but not `"foo` or an expansion `%sh{..}` is closed /// but not `%sh{echo {}`. pub is_terminated: bool, } impl<'a> Token<'a> { pub fn empty_at(content_start: usize) -> Self { Self { kind: TokenKind::Unquoted, content_start, content: Cow::Borrowed(""), is_terminated: false, } } pub fn expand(content: impl Into>) -> Self { Self { kind: TokenKind::Expand, content_start: 0, content: content.into(), is_terminated: true, } } } #[derive(Debug)] pub struct Tokenizer<'a> { input: &'a str, /// Whether to return errors in the iterator for failed validations like unterminated strings /// or expansions. When this is set to `false` the iterator will never return `Err`. validate: bool, /// The current byte index of the input being considered. pos: usize, } impl<'a> Tokenizer<'a> { pub fn new(input: &'a str, validate: bool) -> Self { Self { input, validate, pos: 0, } } /// Returns the current byte index position of the parser in the input. pub fn pos(&self) -> usize { self.pos } /// Returns the rest of the input as a single `TokenKind::Expand` token literally. /// /// Returns `None` if the tokenizer is already at the end of the input or advances the /// tokenizer to the end of the input otherwise. Leading whitespace characters are skipped. /// Quoting is not interpreted. pub fn rest(&mut self) -> Option> { self.skip_blanks(); if self.pos == self.input.len() { return None; } let content_start = self.pos; self.pos = self.input.len(); Some(Token { kind: TokenKind::Expand, content_start, content: Cow::Borrowed(&self.input[content_start..]), is_terminated: false, }) } fn byte(&self) -> Option { self.input.as_bytes().get(self.pos).copied() } fn peek_byte(&self) -> Option { self.input.as_bytes().get(self.pos + 1).copied() } fn prev_byte(&self) -> Option { self.pos .checked_sub(1) .map(|idx| self.input.as_bytes()[idx]) } fn skip_blanks(&mut self) { while let Some(b' ' | b'\t') = self.byte() { self.pos += 1; } } fn parse_unquoted(&mut self) -> Cow<'a, str> { // Note that `String::new` starts with no allocation. We only allocate if we see a // backslash escape (on Unix only). let mut escaped = String::new(); let mut start = self.pos; while let Some(byte) = self.byte() { if matches!(byte, b' ' | b'\t') { if cfg!(unix) && self.prev_byte() == Some(b'\\') { // Push everything up to but not including the backslash and then this // whitespace character. escaped.push_str(&self.input[start..self.pos - 1]); escaped.push(byte as char); start = self.pos + 1; } else if escaped.is_empty() { return Cow::Borrowed(&self.input[start..self.pos]); } else { break; } } self.pos += 1; } // Special case for a trailing backslash on Unix: exclude the backslash from the content. // This improves the behavior of completions like `":open a\\"` (trailing backslash). let end = if cfg!(unix) && self.prev_byte() == Some(b'\\') { self.pos - 1 } else { self.pos }; if escaped.is_empty() { assert_eq!(self.pos, self.input.len()); Cow::Borrowed(&self.input[start..end]) } else { escaped.push_str(&self.input[start..end]); Cow::Owned(escaped) } } /// Parses a string quoted by the given grapheme cluster. /// /// The position of the tokenizer is asserted to be immediately after the quote grapheme /// cluster. fn parse_quoted(&mut self, quote: u8) -> (Cow<'a, str>, bool) { assert_eq!(self.byte(), Some(quote)); self.pos += 1; let mut escaped = String::new(); while let Some(offset) = self.input[self.pos..].find(quote as char) { let idx = self.pos + offset; if self.input.as_bytes().get(idx + 1) == Some("e) { // Treat two quotes in a row as an escape. escaped.push_str(&self.input[self.pos..idx + 1]); // Advance past the escaped quote. self.pos = idx + 2; } else { // Otherwise this quote string is finished. let quoted = if escaped.is_empty() { Cow::Borrowed(&self.input[self.pos..idx]) } else { escaped.push_str(&self.input[self.pos..idx]); Cow::Owned(escaped) }; // Advance past the closing quote. self.pos = idx + 1; return (quoted, true); } } let quoted = if escaped.is_empty() { Cow::Borrowed(&self.input[self.pos..]) } else { escaped.push_str(&self.input[self.pos..]); Cow::Owned(escaped) }; self.pos = self.input.len(); (quoted, false) } /// Parses the percent token expansion under the tokenizer's cursor. /// /// This function should only be called when the tokenizer's cursor is on a non-escaped /// percent token. pub fn parse_percent_token(&mut self) -> Option, ParseArgsError<'a>>> { assert_eq!(self.byte(), Some(b'%')); self.pos += 1; let kind_start = self.pos; self.pos += self.input[self.pos..] .bytes() .take_while(|b| b.is_ascii_lowercase()) .count(); let kind = &self.input[kind_start..self.pos]; let (open, close) = match self.byte() { // We support a couple of hard-coded chars only to make sure we can provide more // useful errors and avoid weird behavior in case of typos. These should cover // practical cases. Some(b'(') => (b'(', b')'), Some(b'[') => (b'[', b']'), Some(b'{') => (b'{', b'}'), Some(b'<') => (b'<', b'>'), Some(b'\'') => (b'\'', b'\''), Some(b'\"') => (b'\"', b'\"'), Some(b'|') => (b'|', b'|'), Some(_) | None => { return Some(if self.validate { Err(ParseArgsError::MissingExpansionDelimiter { expansion: kind }) } else { Ok(Token { kind: TokenKind::ExpansionKind, content_start: kind_start, content: Cow::Borrowed(kind), is_terminated: false, }) }); } }; // The content start for expansions is the start of the content - after the opening // delimiter grapheme. let content_start = self.pos + 1; let kind = match ExpansionKind::from_kind(kind) { Some(kind) => TokenKind::Expansion(kind), None if self.validate => { return Some(Err(ParseArgsError::UnknownExpansion { kind })); } None => TokenKind::Expand, }; let (content, is_terminated) = if open == close { self.parse_quoted(open) } else { self.parse_quoted_balanced(open, close) }; let token = Token { kind, content_start, content, is_terminated, }; if self.validate && !is_terminated { return Some(Err(ParseArgsError::UnterminatedToken { token })); } Some(Ok(token)) } /// Parse the next string under the cursor given an open and closing pair. /// /// The open and closing pair are different ASCII characters. The cursor is asserted to be /// immediately after the opening delimiter. /// /// This function parses with nesting support. `%sh{echo {hello}}` for example should consume /// the entire input and not quit after the first '}' character is found. fn parse_quoted_balanced(&mut self, open: u8, close: u8) -> (Cow<'a, str>, bool) { assert_eq!(self.byte(), Some(open)); self.pos += 1; let start = self.pos; let mut level = 1; while let Some(offset) = self.input[self.pos..].find([open as char, close as char]) { let idx = self.pos + offset; // Move past the delimiter. self.pos = idx + 1; let byte = self.input.as_bytes()[idx]; if byte == open { level += 1; } else if byte == close { level -= 1; if level == 0 { break; } } else { unreachable!() } } let is_terminated = level == 0; let end = if is_terminated { // Exclude the closing delimiter from the token's content. self.pos - 1 } else { // When the token is not closed, advance to the end of the input. self.pos = self.input.len(); self.pos }; (Cow::Borrowed(&self.input[start..end]), is_terminated) } } impl<'a> Iterator for Tokenizer<'a> { type Item = Result, ParseArgsError<'a>>; fn next(&mut self) -> Option { self.skip_blanks(); let byte = self.byte()?; match byte { b'"' | b'\'' | b'`' => { let content_start = self.pos + 1; let (content, is_terminated) = self.parse_quoted(byte); let token = Token { kind: match byte { b'"' => TokenKind::Expand, b'\'' => TokenKind::Quoted(Quote::Single), b'`' => TokenKind::Quoted(Quote::Backtick), _ => unreachable!(), }, content_start, content, is_terminated, }; Some(if self.validate && !is_terminated { Err(ParseArgsError::UnterminatedToken { token }) } else { Ok(token) }) } b'%' => self.parse_percent_token(), _ => { let content_start = self.pos; // Allow backslash escaping on Unix for quotes or expansions if cfg!(unix) && byte == b'\\' && matches!(self.peek_byte(), Some(b'"' | b'\'' | b'`' | b'%')) { self.pos += 1; } Some(Ok(Token { kind: TokenKind::Unquoted, content_start, content: self.parse_unquoted(), is_terminated: false, })) } } } } #[derive(Debug, Default, Clone, Copy)] pub enum CompletionState { #[default] Positional, Flag(Option), FlagArgument(Flag), } /// A set of arguments provided to a command on the command line. /// /// Regular arguments are called "positional" arguments (or "positionals" for short). Command line /// input might also specify "flags" which can modify a command's behavior. /// /// ```rust,ignore /// // Say that the command accepts a "bar" flag which doesn't accept an argument itself. /// // This input has two positionals, "foo" and "baz" and one flag "--bar". /// let args = Args::parse("foo --bar baz", /* .. */); /// // `Args` may be treated like a slice to access positionals. /// assert_eq!(args.len(), 2); /// assert_eq!(&args[0], "foo"); /// assert_eq!(&args[1], "baz"); /// // Use `has_flag` or `get_flag` to access flags. /// assert!(args.has_flag("bar")); /// ``` /// /// The `Args` type can be treated mostly the same as a slice when accessing positional arguments. /// Common slice methods like `len`, `get`, `first` and `join` only expose positional arguments. /// Additionally, common syntax like `for arg in args` or `&args[idx]` is supported for accessing /// positional arguments. /// /// To look up flags, use `Args::get_flag` for flags which should accept an argument or /// `Args::has_flag` for boolean flags. /// /// The way that `Args` is parsed from the input depends on a command's `Signature`. See the /// `Signature` type for more details. #[derive(Debug)] pub struct Args<'a> { signature: Signature, /// Whether to validate the arguments. /// See the `ParseArgsError` type for the validations. validate: bool, /// Whether args pushed with `Self::push` should be treated as positionals even if they /// start with '-'. only_positionals: bool, state: CompletionState, positionals: Vec>, flags: HashMap<&'static str, Cow<'a, str>>, } impl Default for Args<'_> { fn default() -> Self { Self { signature: Signature::DEFAULT, validate: Default::default(), only_positionals: Default::default(), state: CompletionState::default(), positionals: Default::default(), flags: Default::default(), } } } impl<'a> Args<'a> { pub fn new(signature: Signature, validate: bool) -> Self { Self { signature, validate, only_positionals: false, positionals: Vec::new(), flags: HashMap::new(), state: CompletionState::default(), } } /// Reads the next token out of the given parser. /// /// If the command's signature sets a maximum number of positionals (via `raw_after`) then /// the token may contain the rest of the parser's input. pub fn read_token<'p>( &mut self, parser: &mut Tokenizer<'p>, ) -> Result>, ParseArgsError<'p>> { if self .signature .raw_after .is_some_and(|max| self.len() >= max as usize) { self.only_positionals = true; Ok(parser.rest()) } else { parser.next().transpose() } } /// Parses the given command line according to a command's signature. /// /// The `try_map_fn` function can be used to try changing each token before it is considered /// as an argument - this is used for variable expansion. pub fn parse( line: &'a str, signature: Signature, validate: bool, mut try_map_fn: M, ) -> Result> where // Note: this is a `FnMut` in case we decide to allow caching expansions in the future. // The `mut` is not currently used. M: FnMut(Token<'a>) -> Result, Box>, { let mut tokenizer = Tokenizer::new(line, validate); let mut args = Self::new(signature, validate); while let Some(token) = args.read_token(&mut tokenizer)? { let arg = try_map_fn(token)?; args.push(arg)?; } args.finish()?; Ok(args) } /// Adds the given argument token. /// /// Once all arguments have been added, `Self::finish` should be called to perform any /// closing validations. pub fn push(&mut self, arg: Cow<'a, str>) -> Result<(), ParseArgsError<'a>> { if !self.only_positionals && arg == "--" { // "--" marks the end of flags, everything after is a positional even if it starts // with '-'. self.only_positionals = true; self.state = CompletionState::Flag(None); } else if let Some(flag) = self.flag_awaiting_argument() { // If the last token was a flag which accepts an argument, treat this token as a flag // argument. self.flags.insert(flag.name, arg); self.state = CompletionState::FlagArgument(flag); } else if !self.only_positionals && arg.starts_with('-') { // If the token starts with '-' and we are not only accepting positional arguments, // treat this token as a flag. let flag = if let Some(longhand) = arg.strip_prefix("--") { self.signature .flags .iter() .find(|flag| flag.name == longhand) } else { let shorthand = arg.strip_prefix('-').unwrap(); self.signature.flags.iter().find(|flag| { flag.alias .is_some_and(|ch| shorthand == ch.encode_utf8(&mut [0; 4])) }) }; let Some(flag) = flag else { if self.validate { return Err(ParseArgsError::UnknownFlag { text: arg }); } self.positionals.push(arg); self.state = CompletionState::Flag(None); return Ok(()); }; if self.validate && self.flags.contains_key(flag.name) { return Err(ParseArgsError::DuplicatedFlag { flag: flag.name }); } self.flags.insert(flag.name, Cow::Borrowed("")); self.state = CompletionState::Flag(Some(*flag)); } else { // Otherwise this token is a positional argument. self.positionals.push(arg); self.state = CompletionState::Positional; } Ok(()) } /// Performs any validations that must be done after the input args are finished being pushed /// with `Self::push`. fn finish(&self) -> Result<(), ParseArgsError<'a>> { if !self.validate { return Ok(()); }; if let Some(flag) = self.flag_awaiting_argument() { return Err(ParseArgsError::FlagMissingArgument { flag: flag.name }); } self.signature .check_positional_count(self.positionals.len())?; Ok(()) } fn flag_awaiting_argument(&self) -> Option { match self.state { CompletionState::Flag(flag) => flag.filter(|f| f.completions.is_some()), _ => None, } } /// Returns the kind of argument the last token is considered to be. /// /// For example if the last argument in the command line is `--foo` then the argument may be /// considered to be a flag. pub fn completion_state(&self) -> CompletionState { self.state } /// Returns the number of positionals supplied in the input. /// /// This number does not account for any flags passed in the input. pub fn len(&self) -> usize { self.positionals.len() } /// Checks whether the arguments contain no positionals. /// /// Note that this function returns `true` if there are no positional arguments even if the /// input contained flags. pub fn is_empty(&self) -> bool { self.positionals.is_empty() } /// Gets the first positional argument, if one exists. pub fn first(&'a self) -> Option<&'a str> { self.positionals.first().map(AsRef::as_ref) } /// Gets the positional argument at the given index, if one exists. pub fn get(&'a self, index: usize) -> Option<&'a str> { self.positionals.get(index).map(AsRef::as_ref) } /// Flattens all positional arguments together with the given separator between each /// positional. pub fn join(&self, sep: &str) -> String { self.positionals.join(sep) } /// Returns an iterator over all positional arguments. pub fn iter(&self) -> slice::Iter<'_, Cow<'_, str>> { self.positionals.iter() } /// Gets the value associated with a flag's long name if the flag was provided. /// /// This function should be preferred over [Self::has_flag] when the flag accepts an argument. pub fn get_flag(&'a self, name: &'static str) -> Option<&'a str> { debug_assert!( self.signature.flags.iter().any(|flag| flag.name == name), "flag '--{name}' does not belong to the command's signature" ); debug_assert!( self.signature .flags .iter() .any(|flag| flag.name == name && flag.completions.is_some()), "Args::get_flag was used for '--{name}' but should only be used for flags with arguments, use Args::has_flag instead" ); self.flags.get(name).map(AsRef::as_ref) } /// Checks if a flag was provided in the arguments. /// /// This function should be preferred over [Self::get_flag] for boolean flags - flags that /// either are present or not. pub fn has_flag(&self, name: &'static str) -> bool { debug_assert!( self.signature.flags.iter().any(|flag| flag.name == name), "flag '--{name}' does not belong to the command's signature" ); debug_assert!( self.signature .flags .iter() .any(|flag| flag.name == name && flag.completions.is_none()), "Args::has_flag was used for '--{name}' but should only be used for flags without arguments, use Args::get_flag instead" ); self.flags.contains_key(name) } } // `arg[n]` impl ops::Index for Args<'_> { type Output = str; fn index(&self, index: usize) -> &Self::Output { self.positionals[index].as_ref() } } // `for arg in args { .. }` impl<'a> IntoIterator for Args<'a> { type Item = Cow<'a, str>; type IntoIter = vec::IntoIter>; fn into_iter(self) -> Self::IntoIter { self.positionals.into_iter() } } // `for arg in &args { .. }` impl<'i, 'a> IntoIterator for &'i Args<'a> { type Item = &'i Cow<'a, str>; type IntoIter = slice::Iter<'i, Cow<'a, str>>; fn into_iter(self) -> Self::IntoIter { self.positionals.iter() } } #[cfg(test)] mod test { use super::*; #[track_caller] fn assert_tokens(input: &str, expected: &[&str]) { let actual: Vec<_> = Tokenizer::new(input, true) .map(|arg| arg.unwrap().content) .collect(); let actual: Vec<_> = actual.iter().map(|c| c.as_ref()).collect(); assert_eq!(actual.as_slice(), expected); } #[track_caller] fn assert_incomplete_tokens(input: &str, expected: &[&str]) { assert!( Tokenizer::new(input, true).collect::, _>>().is_err(), "`assert_incomplete_tokens` only accepts input that fails validation, consider using `assert_tokens` instead" ); let actual: Vec<_> = Tokenizer::new(input, false) .map(|arg| arg.unwrap().content) .collect(); let actual: Vec<_> = actual.iter().map(|c| c.as_ref()).collect(); assert_eq!(actual.as_slice(), expected); } #[test] fn tokenize_unquoted() { assert_tokens("", &[]); assert_tokens("hello", &["hello"]); assert_tokens("hello world", &["hello", "world"]); // Any amount of whitespace is considered a separator. assert_tokens("hello\t \tworld", &["hello", "world"]); } // This escaping behavior is specific to Unix systems. #[cfg(unix)] #[test] fn tokenize_backslash_unix() { assert_tokens(r#"hello\ world"#, &["hello world"]); assert_tokens(r#"one\ two three"#, &["one two", "three"]); assert_tokens(r#"one two\ three"#, &["one", "two three"]); // Trailing backslash is ignored - this improves completions. assert_tokens(r#"hello\"#, &["hello"]); // The backslash at the start of the double quote makes the quote be treated as raw. // For the backslash before the ending quote the token is already considered raw so the // backslash and quote are treated literally. assert_tokens( r#"echo \"hello world\""#, &["echo", r#""hello"#, r#"world\""#], ); } #[test] fn tokenize_backslash() { assert_tokens(r#"\n"#, &["\\n"]); assert_tokens(r#"'\'"#, &["\\"]); } #[test] fn tokenize_quoting() { // Using a quote character twice escapes it. assert_tokens(r#"''"#, &[""]); assert_tokens(r#""""#, &[""]); assert_tokens(r#"``"#, &[""]); assert_tokens(r#"echo """#, &["echo", ""]); assert_tokens(r#"'hello'"#, &["hello"]); assert_tokens(r#"'hello world'"#, &["hello world"]); assert_tokens(r#""hello "" world""#, &["hello \" world"]); } #[test] fn tokenize_percent() { // Pair delimiters: assert_tokens(r#"echo %{hello world}"#, &["echo", "hello world"]); assert_tokens(r#"echo %[hello world]"#, &["echo", "hello world"]); assert_tokens(r#"echo %(hello world)"#, &["echo", "hello world"]); assert_tokens(r#"echo %"#, &["echo", "hello world"]); assert_tokens(r#"echo %|hello world|"#, &["echo", "hello world"]); assert_tokens(r#"echo %'hello world'"#, &["echo", "hello world"]); assert_tokens(r#"echo %"hello world""#, &["echo", "hello world"]); // When invoking a command, double percents can be used within a string as an escape for // the percent. This is done in the expansion code though, not in the parser here. assert_tokens(r#"echo "%%hello world""#, &["echo", "%%hello world"]); // Different kinds of quotes nested: assert_tokens( r#"echo "%sh{echo 'hello world'}""#, &["echo", r#"%sh{echo 'hello world'}"#], ); // Nesting of the expansion delimiter: assert_tokens(r#"echo %{hello {x} world}"#, &["echo", "hello {x} world"]); assert_tokens( r#"echo %{hello {{😎}} world}"#, &["echo", "hello {{😎}} world"], ); // Balanced nesting: assert_tokens( r#"echo %{hello {}} world}"#, &["echo", "hello {}", "world}"], ); // Recursive expansions: assert_tokens( r#"echo %sh{echo "%{cursor_line}"}"#, &["echo", r#"echo "%{cursor_line}""#], ); // Completion should provide variable names here. (Unbalanced nesting) assert_incomplete_tokens(r#"echo %sh{echo "%{c"#, &["echo", r#"echo "%{c"#]); assert_incomplete_tokens(r#"echo %{hello {{} world}"#, &["echo", "hello {{} world}"]); } fn parse_signature<'a>( input: &'a str, signature: Signature, ) -> Result, Box> { Args::parse(input, signature, true, |token| Ok(token.content)) } #[test] fn signature_validation_positionals() { let signature = Signature { positionals: (2, Some(3)), ..Signature::DEFAULT }; assert!(parse_signature("hello world", signature).is_ok()); assert!(parse_signature("foo bar baz", signature).is_ok()); assert!(parse_signature(r#"a "b c" d"#, signature).is_ok()); assert!(parse_signature("hello", signature).is_err()); assert!(parse_signature("foo bar baz quiz", signature).is_err()); let signature = Signature { positionals: (1, None), ..Signature::DEFAULT }; assert!(parse_signature("a", signature).is_ok()); assert!(parse_signature("a b", signature).is_ok()); assert!(parse_signature(r#"a "b c" d"#, signature).is_ok()); assert!(parse_signature("", signature).is_err()); } #[test] fn flags() { let signature = Signature { positionals: (1, Some(2)), flags: &[ Flag { name: "foo", alias: Some('f'), doc: "", completions: None, }, Flag { name: "bar", alias: Some('b'), doc: "", completions: Some(&[]), }, ], ..Signature::DEFAULT }; let args = parse_signature("hello", signature).unwrap(); assert_eq!(args.len(), 1); assert_eq!(&args[0], "hello"); assert!(!args.has_flag("foo")); assert!(args.get_flag("bar").is_none()); let args = parse_signature("--bar abcd hello world --foo", signature).unwrap(); assert_eq!(args.len(), 2); assert_eq!(&args[0], "hello"); assert_eq!(&args[1], "world"); assert!(args.has_flag("foo")); assert_eq!(args.get_flag("bar"), Some("abcd")); let args = parse_signature("hello -f -b abcd world", signature).unwrap(); assert_eq!(args.len(), 2); assert_eq!(&args[0], "hello"); assert_eq!(&args[1], "world"); assert!(args.has_flag("foo")); assert_eq!(args.get_flag("bar"), Some("abcd")); // The signature requires at least one positional. assert!(parse_signature("--foo", signature).is_err()); // And at most two. assert!(parse_signature("abc --bar baz def efg", signature).is_err()); let args = parse_signature(r#"abc -b "xyz 123" def"#, signature).unwrap(); assert_eq!(args.len(), 2); assert_eq!(&args[0], "abc"); assert_eq!(&args[1], "def"); assert_eq!(args.get_flag("bar"), Some("xyz 123")); // Unknown flags are validation errors. assert!(parse_signature(r#"foo --quiz"#, signature).is_err()); // Duplicated flags are parsing errors. assert!(parse_signature(r#"--foo bar --foo"#, signature).is_err()); assert!(parse_signature(r#"-f bar --foo"#, signature).is_err()); // "--" can be used to mark the end of flags. Everything after is considered a positional. let args = parse_signature(r#"hello --bar baz -- --foo"#, signature).unwrap(); assert_eq!(args.len(), 2); assert_eq!(&args[0], "hello"); assert_eq!(&args[1], "--foo"); assert_eq!(args.get_flag("bar"), Some("baz")); assert!(!args.has_flag("foo")); } #[test] fn raw_after() { let signature = Signature { positionals: (1, Some(1)), raw_after: Some(0), ..Signature::DEFAULT }; // All quoting and escaping is treated literally in raw mode. let args = parse_signature(r#"'\'"#, signature).unwrap(); assert_eq!(args.len(), 1); assert_eq!(&args[0], "'\\'"); let args = parse_signature(r#"\''"#, signature).unwrap(); assert_eq!(args.len(), 1); assert_eq!(&args[0], "\\''"); // Leading space is trimmed. let args = parse_signature(r#" %sh{foo}"#, signature).unwrap(); assert_eq!(args.len(), 1); assert_eq!(&args[0], "%sh{foo}"); let signature = Signature { positionals: (1, Some(2)), raw_after: Some(1), ..Signature::DEFAULT }; let args = parse_signature("foo", signature).unwrap(); assert_eq!(args.len(), 1); assert_eq!(&args[0], "foo"); // "--bar" is treated as a positional. let args = parse_signature("foo --bar", signature).unwrap(); assert_eq!(args.len(), 2); assert_eq!(&args[0], "foo"); assert_eq!(&args[1], "--bar"); let args = parse_signature("abc def ghi", signature).unwrap(); assert_eq!(args.len(), 2); assert_eq!(&args[0], "abc"); assert_eq!(&args[1], "def ghi"); let args = parse_signature("rulers [20, 30]", signature).unwrap(); assert_eq!(args.len(), 2); assert_eq!(&args[0], "rulers"); assert_eq!(&args[1], "[20, 30]"); let args = parse_signature(r#"gutters ["diff"] ["diff", "diagnostics"]"#, signature).unwrap(); assert_eq!(args.len(), 2); assert_eq!(&args[0], "gutters"); assert_eq!(&args[1], r#"["diff"] ["diff", "diagnostics"]"#); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/comment.rs000066400000000000000000000410041500614314100244420ustar00rootroot00000000000000//! This module contains the functionality toggle comments on lines over the selection //! using the comment character defined in the user's `languages.toml` use smallvec::SmallVec; use crate::{ syntax::BlockCommentToken, Change, Range, Rope, RopeSlice, Selection, Tendril, Transaction, }; use helix_stdx::rope::RopeSliceExt; use std::borrow::Cow; pub const DEFAULT_COMMENT_TOKEN: &str = "#"; /// Returns the longest matching comment token of the given line (if it exists). pub fn get_comment_token<'a, S: AsRef>( text: RopeSlice, tokens: &'a [S], line_num: usize, ) -> Option<&'a str> { let line = text.line(line_num); let start = line.first_non_whitespace_char()?; tokens .iter() .map(AsRef::as_ref) .filter(|token| line.slice(start..).starts_with(token)) .max_by_key(|token| token.len()) } /// Given text, a comment token, and a set of line indices, returns the following: /// - Whether the given lines should be considered commented /// - If any of the lines are uncommented, all lines are considered as such. /// - The lines to change for toggling comments /// - This is all provided lines excluding blanks lines. /// - The column of the comment tokens /// - Column of existing tokens, if the lines are commented; column to place tokens at otherwise. /// - The margin to the right of the comment tokens /// - Defaults to `1`. If any existing comment token is not followed by a space, changes to `0`. fn find_line_comment( token: &str, text: RopeSlice, lines: impl IntoIterator, ) -> (bool, Vec, usize, usize) { let mut commented = true; let mut to_change = Vec::new(); let mut min = usize::MAX; // minimum col for first_non_whitespace_char let mut margin = 1; let token_len = token.chars().count(); for line in lines { let line_slice = text.line(line); if let Some(pos) = line_slice.first_non_whitespace_char() { let len = line_slice.len_chars(); min = std::cmp::min(min, pos); // line can be shorter than pos + token len let fragment = Cow::from(line_slice.slice(pos..std::cmp::min(pos + token.len(), len))); // as soon as one of the non-blank lines doesn't have a comment, the whole block is // considered uncommented. if fragment != token { commented = false; } // determine margin of 0 or 1 for uncommenting; if any comment token is not followed by a space, // a margin of 0 is used for all lines. if !matches!(line_slice.get_char(pos + token_len), Some(c) if c == ' ') { margin = 0; } // blank lines don't get pushed. to_change.push(line); } } (commented, to_change, min, margin) } #[must_use] pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&str>) -> Transaction { let text = doc.slice(..); let token = token.unwrap_or(DEFAULT_COMMENT_TOKEN); let comment = Tendril::from(format!("{} ", token)); let mut lines: Vec = Vec::with_capacity(selection.len()); let mut min_next_line = 0; for selection in selection { let (start, end) = selection.line_range(text); let start = start.clamp(min_next_line, text.len_lines()); let end = (end + 1).min(text.len_lines()); lines.extend(start..end); min_next_line = end; } let (commented, to_change, min, margin) = find_line_comment(token, text, lines); let mut changes: Vec = Vec::with_capacity(to_change.len()); for line in to_change { let pos = text.line_to_char(line) + min; if !commented { // comment line changes.push((pos, pos, Some(comment.clone()))); } else { // uncomment line changes.push((pos, pos + token.len() + margin, None)); } } Transaction::change(doc, changes.into_iter()) } #[derive(Debug, PartialEq, Eq)] pub enum CommentChange { Commented { range: Range, start_pos: usize, end_pos: usize, start_margin: bool, end_margin: bool, start_token: String, end_token: String, }, Uncommented { range: Range, start_pos: usize, end_pos: usize, start_token: String, end_token: String, }, Whitespace { range: Range, }, } pub fn find_block_comments( tokens: &[BlockCommentToken], text: RopeSlice, selection: &Selection, ) -> (bool, Vec) { let mut commented = true; let mut only_whitespace = true; let mut comment_changes = Vec::with_capacity(selection.len()); let default_tokens = tokens.first().cloned().unwrap_or_default(); let mut start_token = default_tokens.start.clone(); let mut end_token = default_tokens.end.clone(); let mut tokens = tokens.to_vec(); // sort the tokens by length, so longer tokens will match first tokens.sort_by(|a, b| { if a.start.len() == b.start.len() { b.end.len().cmp(&a.end.len()) } else { b.start.len().cmp(&a.start.len()) } }); for range in selection { let selection_slice = range.slice(text); if let (Some(start_pos), Some(end_pos)) = ( selection_slice.first_non_whitespace_char(), selection_slice.last_non_whitespace_char(), ) { let mut line_commented = false; let mut after_start = 0; let mut before_end = 0; let len = (end_pos + 1) - start_pos; for BlockCommentToken { start, end } in &tokens { let start_len = start.chars().count(); let end_len = end.chars().count(); after_start = start_pos + start_len; before_end = end_pos.saturating_sub(end_len); if len >= start_len + end_len { let start_fragment = selection_slice.slice(start_pos..after_start); let end_fragment = selection_slice.slice(before_end + 1..end_pos + 1); // block commented with these tokens if start_fragment == start.as_str() && end_fragment == end.as_str() { start_token = start.to_string(); end_token = end.to_string(); line_commented = true; break; } } } if !line_commented { comment_changes.push(CommentChange::Uncommented { range: *range, start_pos, end_pos, start_token: default_tokens.start.clone(), end_token: default_tokens.end.clone(), }); commented = false; } else { comment_changes.push(CommentChange::Commented { range: *range, start_pos, end_pos, start_margin: selection_slice.get_char(after_start) == Some(' '), end_margin: after_start != before_end && (selection_slice.get_char(before_end) == Some(' ')), start_token: start_token.to_string(), end_token: end_token.to_string(), }); } only_whitespace = false; } else { comment_changes.push(CommentChange::Whitespace { range: *range }); } } if only_whitespace { commented = false; } (commented, comment_changes) } #[must_use] pub fn create_block_comment_transaction( doc: &Rope, selection: &Selection, commented: bool, comment_changes: Vec, ) -> (Transaction, SmallVec<[Range; 1]>) { let mut changes: Vec = Vec::with_capacity(selection.len() * 2); let mut ranges: SmallVec<[Range; 1]> = SmallVec::with_capacity(selection.len()); let mut offs = 0; for change in comment_changes { if commented { if let CommentChange::Commented { range, start_pos, end_pos, start_token, end_token, start_margin, end_margin, } = change { let from = range.from(); changes.push(( from + start_pos, from + start_pos + start_token.len() + start_margin as usize, None, )); changes.push(( from + end_pos - end_token.len() - end_margin as usize + 1, from + end_pos + 1, None, )); } } else { // uncommented so manually map ranges through changes match change { CommentChange::Uncommented { range, start_pos, end_pos, start_token, end_token, } => { let from = range.from(); changes.push(( from + start_pos, from + start_pos, Some(Tendril::from(format!("{} ", start_token))), )); changes.push(( from + end_pos + 1, from + end_pos + 1, Some(Tendril::from(format!(" {}", end_token))), )); let offset = start_token.chars().count() + end_token.chars().count() + 2; ranges.push( Range::new(from + offs, from + offs + end_pos + 1 + offset) .with_direction(range.direction()), ); offs += offset; } CommentChange::Commented { range, .. } | CommentChange::Whitespace { range } => { ranges.push(Range::new(range.from() + offs, range.to() + offs)); } } } } (Transaction::change(doc, changes.into_iter()), ranges) } #[must_use] pub fn toggle_block_comments( doc: &Rope, selection: &Selection, tokens: &[BlockCommentToken], ) -> Transaction { let text = doc.slice(..); let (commented, comment_changes) = find_block_comments(tokens, text, selection); let (mut transaction, ranges) = create_block_comment_transaction(doc, selection, commented, comment_changes); if !commented { transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index())); } transaction } pub fn split_lines_of_selection(text: RopeSlice, selection: &Selection) -> Selection { let mut ranges = SmallVec::new(); for range in selection.ranges() { let (line_start, line_end) = range.line_range(text.slice(..)); let mut pos = text.line_to_char(line_start); for line in text.slice(pos..text.line_to_char(line_end + 1)).lines() { let start = pos; pos += line.len_chars(); ranges.push(Range::new(start, pos)); } } Selection::new(ranges, 0) } #[cfg(test)] mod test { use super::*; mod find_line_comment { use super::*; #[test] fn not_commented() { // four lines, two space indented, except for line 1 which is blank. let doc = Rope::from(" 1\n\n 2\n 3"); let text = doc.slice(..); let res = find_line_comment("//", text, 0..3); // (commented = false, to_change = [line 0, line 2], min = col 2, margin = 0) assert_eq!(res, (false, vec![0, 2], 2, 0)); } #[test] fn is_commented() { // three lines where the second line is empty. let doc = Rope::from("// hello\n\n// there"); let res = find_line_comment("//", doc.slice(..), 0..3); // (commented = true, to_change = [line 0, line 2], min = col 0, margin = 1) assert_eq!(res, (true, vec![0, 2], 0, 1)); } } // TODO: account for uncommenting with uneven comment indentation mod toggle_line_comment { use super::*; #[test] fn comment() { // four lines, two space indented, except for line 1 which is blank. let mut doc = Rope::from(" 1\n\n 2\n 3"); // select whole document let selection = Selection::single(0, doc.len_chars() - 1); let transaction = toggle_line_comments(&doc, &selection, None); transaction.apply(&mut doc); assert_eq!(doc, " # 1\n\n # 2\n # 3"); } #[test] fn uncomment() { let mut doc = Rope::from(" # 1\n\n # 2\n # 3"); let mut selection = Selection::single(0, doc.len_chars() - 1); let transaction = toggle_line_comments(&doc, &selection, None); transaction.apply(&mut doc); selection = selection.map(transaction.changes()); assert_eq!(doc, " 1\n\n 2\n 3"); assert!(selection.len() == 1); // to ignore the selection unused warning } #[test] fn uncomment_0_margin_comments() { let mut doc = Rope::from(" #1\n\n #2\n #3"); let mut selection = Selection::single(0, doc.len_chars() - 1); let transaction = toggle_line_comments(&doc, &selection, None); transaction.apply(&mut doc); selection = selection.map(transaction.changes()); assert_eq!(doc, " 1\n\n 2\n 3"); assert!(selection.len() == 1); // to ignore the selection unused warning } #[test] fn uncomment_0_margin_comments_with_no_space() { let mut doc = Rope::from("#"); let mut selection = Selection::single(0, doc.len_chars() - 1); let transaction = toggle_line_comments(&doc, &selection, None); transaction.apply(&mut doc); selection = selection.map(transaction.changes()); assert_eq!(doc, ""); assert!(selection.len() == 1); // to ignore the selection unused warning } } #[test] fn test_find_block_comments() { // three lines 5 characters. let mut doc = Rope::from("1\n2\n3"); // select whole document let selection = Selection::single(0, doc.len_chars()); let text = doc.slice(..); let res = find_block_comments(&[BlockCommentToken::default()], text, &selection); assert_eq!( res, ( false, vec![CommentChange::Uncommented { range: Range::new(0, 5), start_pos: 0, end_pos: 4, start_token: "/*".to_string(), end_token: "*/".to_string(), }] ) ); // comment let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]); transaction.apply(&mut doc); assert_eq!(doc, "/* 1\n2\n3 */"); // uncomment let selection = Selection::single(0, doc.len_chars()); let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]); transaction.apply(&mut doc); assert_eq!(doc, "1\n2\n3"); // don't panic when there is just a space in comment doc = Rope::from("/* */"); let selection = Selection::single(0, doc.len_chars()); let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]); transaction.apply(&mut doc); assert_eq!(doc, ""); } /// Test, if `get_comment_tokens` works, even if the content of the file includes chars, whose /// byte size unequal the amount of chars #[test] fn test_get_comment_with_char_boundaries() { let rope = Rope::from("··"); let tokens = ["//", "///"]; assert_eq!( super::get_comment_token(rope.slice(..), tokens.as_slice(), 0), None ); } /// Test for `get_comment_token`. /// /// Assuming the comment tokens are stored as `["///", "//"]`, `get_comment_token` should still /// return `///` instead of `//` if the user is in a doc-comment section. #[test] fn test_use_longest_comment() { let text = Rope::from(" /// amogus"); let tokens = ["///", "//"]; assert_eq!( super::get_comment_token(text.slice(..), tokens.as_slice(), 0), Some("///") ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/completion.rs000066400000000000000000000011461500614314100251540ustar00rootroot00000000000000use std::borrow::Cow; use crate::{diagnostic::LanguageServerId, Transaction}; #[derive(Debug, PartialEq, Clone)] pub struct CompletionItem { pub transaction: Transaction, pub label: Cow<'static, str>, pub kind: Cow<'static, str>, /// Containing Markdown pub documentation: Option, pub provider: CompletionProvider, } #[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] pub enum CompletionProvider { Lsp(LanguageServerId), Path, } impl From for CompletionProvider { fn from(id: LanguageServerId) -> Self { CompletionProvider::Lsp(id) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/config.rs000066400000000000000000000031571500614314100242540ustar00rootroot00000000000000use crate::syntax::{Configuration, Loader, LoaderError}; /// Language configuration based on built-in languages.toml. pub fn default_lang_config() -> Configuration { helix_loader::config::default_lang_config() .try_into() .expect("Could not deserialize built-in languages.toml") } /// Language configuration loader based on built-in languages.toml. pub fn default_lang_loader() -> Loader { Loader::new(default_lang_config()).expect("Could not compile loader for default config") } #[derive(Debug)] pub enum LanguageLoaderError { DeserializeError(toml::de::Error), LoaderError(LoaderError), } impl std::fmt::Display for LanguageLoaderError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::DeserializeError(err) => write!(f, "Failed to parse language config: {err}"), Self::LoaderError(err) => write!(f, "Failed to compile language config: {err}"), } } } impl std::error::Error for LanguageLoaderError {} /// Language configuration based on user configured languages.toml. pub fn user_lang_config() -> Result { helix_loader::config::user_lang_config()?.try_into() } /// Language configuration loader based on user configured languages.toml. pub fn user_lang_loader() -> Result { let config: Configuration = helix_loader::config::user_lang_config() .map_err(LanguageLoaderError::DeserializeError)? .try_into() .map_err(LanguageLoaderError::DeserializeError)?; Loader::new(config).map_err(LanguageLoaderError::LoaderError) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/diagnostic.rs000066400000000000000000000061231500614314100251270ustar00rootroot00000000000000//! LSP diagnostic utility types. use std::{fmt, sync::Arc}; pub use helix_stdx::range::Range; use serde::{Deserialize, Serialize}; /// Describes the severity level of a [`Diagnostic`]. #[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] pub enum Severity { Hint, Info, Warning, Error, } impl Default for Severity { fn default() -> Self { Self::Hint } } #[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)] pub enum NumberOrString { Number(i32), String(String), } #[derive(Debug, Clone)] pub enum DiagnosticTag { Unnecessary, Deprecated, } /// Corresponds to [`lsp_types::Diagnostic`](https://docs.rs/lsp-types/0.94.0/lsp_types/struct.Diagnostic.html) #[derive(Debug, Clone)] pub struct Diagnostic { pub range: Range, // whether this diagnostic ends at the end of(or inside) a word pub ends_at_word: bool, pub starts_at_word: bool, pub zero_width: bool, pub line: usize, pub message: String, pub severity: Option, pub code: Option, pub provider: DiagnosticProvider, pub tags: Vec, pub source: Option, pub data: Option, } /// The source of a diagnostic. /// /// This type is cheap to clone: all data is either `Copy` or wrapped in an `Arc`. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub enum DiagnosticProvider { Lsp { /// The ID of the language server which sent the diagnostic. server_id: LanguageServerId, /// An optional identifier under which diagnostics are managed by the client. /// /// `identifier` is a field from the LSP "Pull Diagnostics" feature meant to provide an /// optional "namespace" for diagnostics: a language server can respond to a diagnostics /// pull request with an identifier and these diagnostics should be treated as separate /// from push diagnostics. Rust-analyzer uses this feature for example to provide Cargo /// diagnostics with push and internal diagnostics with pull. The push diagnostics should /// not clear the pull diagnostics and vice-versa. identifier: Option>, }, // Future internal features can go here... } impl DiagnosticProvider { pub fn language_server_id(&self) -> Option { match self { Self::Lsp { server_id, .. } => Some(*server_id), // _ => None, } } } // while I would prefer having this in helix-lsp that necessitates a bunch of // conversions I would rather not add. I think its fine since this just a very // trivial newtype wrapper and we would need something similar once we define // completions in core slotmap::new_key_type! { pub struct LanguageServerId; } impl fmt::Display for LanguageServerId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}", self.0) } } impl Diagnostic { #[inline] pub fn severity(&self) -> Severity { self.severity.unwrap_or(Severity::Warning) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/diff.rs000066400000000000000000000161041500614314100237130ustar00rootroot00000000000000use std::ops::Range; use std::time::Instant; use imara_diff::intern::InternedInput; use imara_diff::Algorithm; use ropey::RopeSlice; use crate::{ChangeSet, Rope, Tendril, Transaction}; /// A `imara_diff::Sink` that builds a `ChangeSet` for a character diff of a hunk struct CharChangeSetBuilder<'a> { res: &'a mut ChangeSet, hunk: &'a InternedInput, pos: u32, } impl imara_diff::Sink for CharChangeSetBuilder<'_> { type Out = (); fn process_change(&mut self, before: Range, after: Range) { self.res.retain((before.start - self.pos) as usize); self.res.delete(before.len()); self.pos = before.end; let res = self.hunk.after[after.start as usize..after.end as usize] .iter() .map(|&token| self.hunk.interner[token]) .collect(); self.res.insert(res); } fn finish(self) -> Self::Out { self.res.retain(self.hunk.before.len() - self.pos as usize); } } struct LineChangeSetBuilder<'a> { res: ChangeSet, after: RopeSlice<'a>, file: &'a InternedInput>, current_hunk: InternedInput, pos: u32, } impl imara_diff::Sink for LineChangeSetBuilder<'_> { type Out = ChangeSet; fn process_change(&mut self, before: Range, after: Range) { let len = self.file.before[self.pos as usize..before.start as usize] .iter() .map(|&it| self.file.interner[it].len_chars()) .sum(); self.res.retain(len); self.pos = before.end; // do not perform diffs on large hunks let len_before = before.end - before.start; let len_after = after.end - after.start; // Pure insertions/removals do not require a character diff. // Very large changes are ignored because their character diff is expensive to compute // TODO adjust heuristic to detect large changes? if len_before == 0 || len_after == 0 || len_after > 5 * len_before || 5 * len_after < len_before && len_before > 10 || len_before + len_after > 200 { let remove = self.file.before[before.start as usize..before.end as usize] .iter() .map(|&it| self.file.interner[it].len_chars()) .sum(); self.res.delete(remove); let mut fragment = Tendril::new(); if len_after > 500 { // copying a rope line by line is slower then copying the entire // rope. Use to_string for very large changes instead.. if self.file.after.len() == after.end as usize { if after.start == 0 { fragment = self.after.to_string().into(); } else { let start = self.after.line_to_char(after.start as usize); fragment = self.after.slice(start..).to_string().into(); } } else if after.start == 0 { let end = self.after.line_to_char(after.end as usize); fragment = self.after.slice(..end).to_string().into(); } else { let start = self.after.line_to_char(after.start as usize); let end = self.after.line_to_char(after.end as usize); fragment = self.after.slice(start..end).to_string().into(); } } else { for &line in &self.file.after[after.start as usize..after.end as usize] { for chunk in self.file.interner[line].chunks() { fragment.push_str(chunk) } } }; self.res.insert(fragment); } else { // for reasonably small hunks, generating a ChangeSet from char diff can save memory // TODO use a tokenizer (word diff?) for improved performance let hunk_before = self.file.before[before.start as usize..before.end as usize] .iter() .flat_map(|&it| self.file.interner[it].chars()); let hunk_after = self.file.after[after.start as usize..after.end as usize] .iter() .flat_map(|&it| self.file.interner[it].chars()); self.current_hunk.update_before(hunk_before); self.current_hunk.update_after(hunk_after); // the histogram heuristic does not work as well // for characters because the same characters often reoccur // use myer diff instead imara_diff::diff( Algorithm::Myers, &self.current_hunk, CharChangeSetBuilder { res: &mut self.res, hunk: &self.current_hunk, pos: 0, }, ); self.current_hunk.clear(); } } fn finish(mut self) -> Self::Out { let len = self.file.before[self.pos as usize..] .iter() .map(|&it| self.file.interner[it].len_chars()) .sum(); self.res.retain(len); self.res } } struct RopeLines<'a>(RopeSlice<'a>); impl<'a> imara_diff::intern::TokenSource for RopeLines<'a> { type Token = RopeSlice<'a>; type Tokenizer = ropey::iter::Lines<'a>; fn tokenize(&self) -> Self::Tokenizer { self.0.lines() } fn estimate_tokens(&self) -> u32 { // we can provide a perfect estimate which is very nice for performance self.0.len_lines() as u32 } } /// Compares `old` and `new` to generate a [`Transaction`] describing /// the steps required to get from `old` to `new`. pub fn compare_ropes(before: &Rope, after: &Rope) -> Transaction { let start = Instant::now(); let res = ChangeSet::with_capacity(32); let after = after.slice(..); let file = InternedInput::new(RopeLines(before.slice(..)), RopeLines(after)); let builder = LineChangeSetBuilder { res, file: &file, after, pos: 0, current_hunk: InternedInput::default(), }; let res = imara_diff::diff(Algorithm::Histogram, &file, builder).into(); log::debug!( "rope diff took {}s", Instant::now().duration_since(start).as_secs_f64() ); res } #[cfg(test)] mod tests { use super::*; fn test_identity(a: &str, b: &str) { let mut old = Rope::from(a); let new = Rope::from(b); compare_ropes(&old, &new).apply(&mut old); assert_eq!(old, new); } quickcheck::quickcheck! { fn test_compare_ropes(a: String, b: String) -> bool { let mut old = Rope::from(a); let new = Rope::from(b); compare_ropes(&old, &new).apply(&mut old); old == new } } #[test] fn equal_files() { test_identity("foo", "foo"); } #[test] fn trailing_newline() { test_identity("foo\n", "foo"); test_identity("foo", "foo\n"); } #[test] fn new_file() { test_identity("", "foo"); } #[test] fn deleted_file() { test_identity("foo", ""); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/doc_formatter.rs000066400000000000000000000402031500614314100256300ustar00rootroot00000000000000//! The `DocumentFormatter` forms the bridge between the raw document text //! and onscreen positioning. It yields the text graphemes as an iterator //! and traverses (part) of the document text. During that traversal it //! handles grapheme detection, softwrapping and annotations. //! It yields `FormattedGrapheme`s and their corresponding visual coordinates. //! //! As both virtual text and softwrapping can insert additional lines into the document //! it is generally not possible to find the start of the previous visual line. //! Instead the `DocumentFormatter` starts at the last "checkpoint" (usually a linebreak) //! called a "block" and the caller must advance it as needed. use std::borrow::Cow; use std::cmp::Ordering; use std::fmt::Debug; use std::mem::replace; #[cfg(test)] mod test; use unicode_segmentation::{Graphemes, UnicodeSegmentation}; use helix_stdx::rope::{RopeGraphemes, RopeSliceExt}; use crate::graphemes::{Grapheme, GraphemeStr}; use crate::syntax::Highlight; use crate::text_annotations::TextAnnotations; use crate::{Position, RopeSlice}; /// TODO make Highlight a u32 to reduce the size of this enum to a single word. #[derive(Debug, Clone, Copy)] pub enum GraphemeSource { Document { codepoints: u32, }, /// Inline virtual text can not be highlighted with a `Highlight` iterator /// because it's not part of the document. Instead the `Highlight` /// is emitted right by the document formatter VirtualText { highlight: Option, }, } impl GraphemeSource { /// Returns whether this grapheme is virtual inline text pub fn is_virtual(self) -> bool { matches!(self, GraphemeSource::VirtualText { .. }) } pub fn is_eof(self) -> bool { // all doc chars except the EOF char have non-zero codepoints matches!(self, GraphemeSource::Document { codepoints: 0 }) } pub fn doc_chars(self) -> usize { match self { GraphemeSource::Document { codepoints } => codepoints as usize, GraphemeSource::VirtualText { .. } => 0, } } } #[derive(Debug, Clone)] pub struct FormattedGrapheme<'a> { pub raw: Grapheme<'a>, pub source: GraphemeSource, pub visual_pos: Position, /// Document line at the start of the grapheme pub line_idx: usize, /// Document char position at the start of the grapheme pub char_idx: usize, } impl FormattedGrapheme<'_> { pub fn is_virtual(&self) -> bool { self.source.is_virtual() } pub fn doc_chars(&self) -> usize { self.source.doc_chars() } pub fn is_whitespace(&self) -> bool { self.raw.is_whitespace() } pub fn width(&self) -> usize { self.raw.width() } pub fn is_word_boundary(&self) -> bool { self.raw.is_word_boundary() } } #[derive(Debug, Clone)] struct GraphemeWithSource<'a> { grapheme: Grapheme<'a>, source: GraphemeSource, } impl<'a> GraphemeWithSource<'a> { fn new( g: GraphemeStr<'a>, visual_x: usize, tab_width: u16, source: GraphemeSource, ) -> GraphemeWithSource<'a> { GraphemeWithSource { grapheme: Grapheme::new(g, visual_x, tab_width), source, } } fn placeholder() -> Self { GraphemeWithSource { grapheme: Grapheme::Other { g: " ".into() }, source: GraphemeSource::Document { codepoints: 0 }, } } fn doc_chars(&self) -> usize { self.source.doc_chars() } fn is_whitespace(&self) -> bool { self.grapheme.is_whitespace() } fn is_newline(&self) -> bool { matches!(self.grapheme, Grapheme::Newline) } fn is_eof(&self) -> bool { self.source.is_eof() } fn width(&self) -> usize { self.grapheme.width() } fn is_word_boundary(&self) -> bool { self.grapheme.is_word_boundary() } } #[derive(Debug, Clone)] pub struct TextFormat { pub soft_wrap: bool, pub tab_width: u16, pub max_wrap: u16, pub max_indent_retain: u16, pub wrap_indicator: Box, pub wrap_indicator_highlight: Option, pub viewport_width: u16, pub soft_wrap_at_text_width: bool, } // test implementation is basically only used for testing or when softwrap is always disabled impl Default for TextFormat { fn default() -> Self { TextFormat { soft_wrap: false, tab_width: 4, max_wrap: 3, max_indent_retain: 4, wrap_indicator: Box::from(" "), viewport_width: 17, wrap_indicator_highlight: None, soft_wrap_at_text_width: false, } } } #[derive(Debug)] pub struct DocumentFormatter<'t> { text_fmt: &'t TextFormat, annotations: &'t TextAnnotations<'t>, /// The visual position at the end of the last yielded word boundary visual_pos: Position, graphemes: RopeGraphemes<'t>, /// The character pos of the `graphemes` iter used for inserting annotations char_pos: usize, /// The line pos of the `graphemes` iter used for inserting annotations line_pos: usize, exhausted: bool, inline_annotation_graphemes: Option<(Graphemes<'t>, Option)>, // softwrap specific /// The indentation of the current line /// Is set to `None` if the indentation level is not yet known /// because no non-whitespace graphemes have been encountered yet indent_level: Option, /// In case a long word needs to be split a single grapheme might need to be wrapped /// while the rest of the word stays on the same line peeked_grapheme: Option>, /// A first-in first-out (fifo) buffer for the Graphemes of any given word word_buf: Vec>, /// The index of the next grapheme that will be yielded from the `word_buf` word_i: usize, } impl<'t> DocumentFormatter<'t> { /// Creates a new formatter at the last block before `char_idx`. /// A block is a chunk which always ends with a linebreak. /// This is usually just a normal line break. /// However very long lines are always wrapped at constant intervals that can be cheaply calculated /// to avoid pathological behaviour. pub fn new_at_prev_checkpoint( text: RopeSlice<'t>, text_fmt: &'t TextFormat, annotations: &'t TextAnnotations, char_idx: usize, ) -> Self { // TODO divide long lines into blocks to avoid bad performance for long lines let block_line_idx = text.char_to_line(char_idx.min(text.len_chars())); let block_char_idx = text.line_to_char(block_line_idx); annotations.reset_pos(block_char_idx); DocumentFormatter { text_fmt, annotations, visual_pos: Position { row: 0, col: 0 }, graphemes: text.slice(block_char_idx..).graphemes(), char_pos: block_char_idx, exhausted: false, indent_level: None, peeked_grapheme: None, word_buf: Vec::with_capacity(64), word_i: 0, line_pos: block_line_idx, inline_annotation_graphemes: None, } } fn next_inline_annotation_grapheme( &mut self, char_pos: usize, ) -> Option<(&'t str, Option)> { loop { if let Some(&mut (ref mut annotation, highlight)) = self.inline_annotation_graphemes.as_mut() { if let Some(grapheme) = annotation.next() { return Some((grapheme, highlight)); } } if let Some((annotation, highlight)) = self.annotations.next_inline_annotation_at(char_pos) { self.inline_annotation_graphemes = Some(( UnicodeSegmentation::graphemes(&*annotation.text, true), highlight, )) } else { return None; } } } fn advance_grapheme(&mut self, col: usize, char_pos: usize) -> Option> { let (grapheme, source) = if let Some((grapheme, highlight)) = self.next_inline_annotation_grapheme(char_pos) { (grapheme.into(), GraphemeSource::VirtualText { highlight }) } else if let Some(grapheme) = self.graphemes.next() { let codepoints = grapheme.len_chars() as u32; let overlay = self.annotations.overlay_at(char_pos); let grapheme = match overlay { Some((overlay, _)) => overlay.grapheme.as_str().into(), None => Cow::from(grapheme).into(), }; (grapheme, GraphemeSource::Document { codepoints }) } else { if self.exhausted { return None; } self.exhausted = true; // EOF grapheme is required for rendering // and correct position computations return Some(GraphemeWithSource { grapheme: Grapheme::Other { g: " ".into() }, source: GraphemeSource::Document { codepoints: 0 }, }); }; let grapheme = GraphemeWithSource::new(grapheme, col, self.text_fmt.tab_width, source); Some(grapheme) } /// Move a word to the next visual line fn wrap_word(&mut self) -> usize { // softwrap this word to the next line let indent_carry_over = if let Some(indent) = self.indent_level { if indent as u16 <= self.text_fmt.max_indent_retain { indent as u16 } else { 0 } } else { // ensure the indent stays 0 self.indent_level = Some(0); 0 }; let virtual_lines = self.annotations .virtual_lines_at(self.char_pos, self.visual_pos, self.line_pos); self.visual_pos.col = indent_carry_over as usize; self.visual_pos.row += 1 + virtual_lines; let mut i = 0; let mut word_width = 0; let wrap_indicator = UnicodeSegmentation::graphemes(&*self.text_fmt.wrap_indicator, true) .map(|g| { i += 1; let grapheme = GraphemeWithSource::new( g.into(), self.visual_pos.col + word_width, self.text_fmt.tab_width, GraphemeSource::VirtualText { highlight: self.text_fmt.wrap_indicator_highlight, }, ); word_width += grapheme.width(); grapheme }); self.word_buf.splice(0..0, wrap_indicator); for grapheme in &mut self.word_buf[i..] { let visual_x = self.visual_pos.col + word_width; grapheme .grapheme .change_position(visual_x, self.text_fmt.tab_width); word_width += grapheme.width(); } if let Some(grapheme) = &mut self.peeked_grapheme { let visual_x = self.visual_pos.col + word_width; grapheme .grapheme .change_position(visual_x, self.text_fmt.tab_width); } word_width } fn peek_grapheme(&mut self, col: usize, char_pos: usize) -> Option<&GraphemeWithSource<'t>> { if self.peeked_grapheme.is_none() { self.peeked_grapheme = self.advance_grapheme(col, char_pos); } self.peeked_grapheme.as_ref() } fn next_grapheme(&mut self, col: usize, char_pos: usize) -> Option> { self.peek_grapheme(col, char_pos); self.peeked_grapheme.take() } fn advance_to_next_word(&mut self) { self.word_buf.clear(); let mut word_width = 0; let mut word_chars = 0; if self.exhausted { return; } loop { let mut col = self.visual_pos.col + word_width; let char_pos = self.char_pos + word_chars; match col.cmp(&(self.text_fmt.viewport_width as usize)) { // The EOF char and newline chars are always selectable in helix. That means // that wrapping happens "too-early" if a word fits a line perfectly. This // is intentional so that all selectable graphemes are always visible (and // therefore the cursor never disappears). However if the user manually set a // lower softwrap width then this is undesirable. Just increasing the viewport- // width by one doesn't work because if a line is wrapped multiple times then // some words may extend past the specified width. // // So we special case a word that ends exactly at line bounds and is followed // by a newline/eof character here. Ordering::Equal if self.text_fmt.soft_wrap_at_text_width && self .peek_grapheme(col, char_pos) .is_some_and(|grapheme| grapheme.is_newline() || grapheme.is_eof()) => { } Ordering::Equal if word_width > self.text_fmt.max_wrap as usize => return, Ordering::Greater if word_width > self.text_fmt.max_wrap as usize => { self.peeked_grapheme = self.word_buf.pop(); return; } Ordering::Equal | Ordering::Greater => { word_width = self.wrap_word(); col = self.visual_pos.col + word_width; } Ordering::Less => (), } let Some(grapheme) = self.next_grapheme(col, char_pos) else { return; }; word_chars += grapheme.doc_chars(); // Track indentation if !grapheme.is_whitespace() && self.indent_level.is_none() { self.indent_level = Some(self.visual_pos.col); } else if grapheme.grapheme == Grapheme::Newline { self.indent_level = None; } let is_word_boundary = grapheme.is_word_boundary(); word_width += grapheme.width(); self.word_buf.push(grapheme); if is_word_boundary { return; } } } /// returns the char index at the end of the last yielded grapheme pub fn next_char_pos(&self) -> usize { self.char_pos } /// returns the visual position at the end of the last yielded grapheme pub fn next_visual_pos(&self) -> Position { self.visual_pos } } impl<'t> Iterator for DocumentFormatter<'t> { type Item = FormattedGrapheme<'t>; fn next(&mut self) -> Option { let grapheme = if self.text_fmt.soft_wrap { if self.word_i >= self.word_buf.len() { self.advance_to_next_word(); self.word_i = 0; } let grapheme = replace( self.word_buf.get_mut(self.word_i)?, GraphemeWithSource::placeholder(), ); self.word_i += 1; grapheme } else { self.advance_grapheme(self.visual_pos.col, self.char_pos)? }; let grapheme = FormattedGrapheme { raw: grapheme.grapheme, source: grapheme.source, visual_pos: self.visual_pos, line_idx: self.line_pos, char_idx: self.char_pos, }; self.char_pos += grapheme.doc_chars(); if !grapheme.is_virtual() { self.annotations.process_virtual_text_anchors(&grapheme); } if grapheme.raw == Grapheme::Newline { // move to end of newline char self.visual_pos.col += 1; let virtual_lines = self.annotations .virtual_lines_at(self.char_pos, self.visual_pos, self.line_pos); self.visual_pos.row += 1 + virtual_lines; self.visual_pos.col = 0; if !grapheme.is_virtual() { self.line_pos += 1; } } else { self.visual_pos.col += grapheme.width(); } Some(grapheme) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/doc_formatter/000077500000000000000000000000001500614314100252635ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/doc_formatter/test.rs000066400000000000000000000136771500614314100266260ustar00rootroot00000000000000use crate::doc_formatter::{DocumentFormatter, TextFormat}; use crate::text_annotations::{InlineAnnotation, Overlay, TextAnnotations}; impl TextFormat { fn new_test(softwrap: bool) -> Self { TextFormat { soft_wrap: softwrap, tab_width: 2, max_wrap: 3, max_indent_retain: 4, wrap_indicator: ".".into(), wrap_indicator_highlight: None, // use a prime number to allow lining up too often with repeat viewport_width: 17, soft_wrap_at_text_width: false, } } } impl<'t> DocumentFormatter<'t> { fn collect_to_str(&mut self) -> String { use std::fmt::Write; let mut res = String::new(); let viewport_width = self.text_fmt.viewport_width; let soft_wrap_at_text_width = self.text_fmt.soft_wrap_at_text_width; let mut line = 0; for grapheme in self { if grapheme.visual_pos.row != line { line += 1; assert_eq!(grapheme.visual_pos.row, line); write!(res, "\n{}", ".".repeat(grapheme.visual_pos.col)).unwrap(); } if !soft_wrap_at_text_width { assert!( grapheme.visual_pos.col <= viewport_width as usize, "softwrapped failed {}<={viewport_width}", grapheme.visual_pos.col ); } write!(res, "{}", grapheme.raw).unwrap(); } res } } fn softwrap_text(text: &str) -> String { DocumentFormatter::new_at_prev_checkpoint( text.into(), &TextFormat::new_test(true), &TextAnnotations::default(), 0, ) .collect_to_str() } #[test] fn basic_softwrap() { assert_eq!( softwrap_text(&"foo ".repeat(10)), "foo foo foo foo \n.foo foo foo foo \n.foo foo " ); assert_eq!( softwrap_text(&"fooo ".repeat(10)), "fooo fooo fooo \n.fooo fooo fooo \n.fooo fooo fooo \n.fooo " ); // check that we don't wrap unnecessarily assert_eq!(softwrap_text("\t\txxxx1xxxx2xx\n"), " xxxx1xxxx2xx \n "); } #[test] fn softwrap_indentation() { assert_eq!( softwrap_text("\t\tfoo1 foo2 foo3 foo4 foo5 foo6\n"), " foo1 foo2 \n.....foo3 foo4 \n.....foo5 foo6 \n " ); assert_eq!( softwrap_text("\t\t\tfoo1 foo2 foo3 foo4 foo5 foo6\n"), " foo1 foo2 \n.foo3 foo4 foo5 \n.foo6 \n " ); } #[test] fn long_word_softwrap() { assert_eq!( softwrap_text("\t\txxxx1xxxx2xxxx3xxxx4xxxx5xxxx6xxxx7xxxx8xxxx9xxx\n"), " xxxx1xxxx2xxx\n.....x3xxxx4xxxx5\n.....xxxx6xxxx7xx\n.....xx8xxxx9xxx \n " ); assert_eq!( softwrap_text("xxxxxxxx1xxxx2xxx\n"), "xxxxxxxx1xxxx2xxx\n. \n " ); assert_eq!( softwrap_text("\t\txxxx1xxxx 2xxxx3xxxx4xxxx5xxxx6xxxx7xxxx8xxxx9xxx\n"), " xxxx1xxxx \n.....2xxxx3xxxx4x\n.....xxx5xxxx6xxx\n.....x7xxxx8xxxx9\n.....xxx \n " ); assert_eq!( softwrap_text("\t\txxxx1xxx 2xxxx3xxxx4xxxx5xxxx6xxxx7xxxx8xxxx9xxx\n"), " xxxx1xxx 2xxx\n.....x3xxxx4xxxx5\n.....xxxx6xxxx7xx\n.....xx8xxxx9xxx \n " ); } #[test] fn softwrap_multichar_grapheme() { assert_eq!( softwrap_text("xxxx xxxx xxx a\u{0301}bc\n"), "xxxx xxxx xxx \n.ábc \n " ) } fn softwrap_text_at_text_width(text: &str) -> String { let mut text_fmt = TextFormat::new_test(true); text_fmt.soft_wrap_at_text_width = true; let annotations = TextAnnotations::default(); let mut formatter = DocumentFormatter::new_at_prev_checkpoint(text.into(), &text_fmt, &annotations, 0); formatter.collect_to_str() } #[test] fn long_word_softwrap_text_width() { assert_eq!( softwrap_text_at_text_width("xxxxxxxx1xxxx2xxx\nxxxxxxxx1xxxx2xxx"), "xxxxxxxx1xxxx2xxx \nxxxxxxxx1xxxx2xxx " ); } fn overlay_text(text: &str, char_pos: usize, softwrap: bool, overlays: &[Overlay]) -> String { DocumentFormatter::new_at_prev_checkpoint( text.into(), &TextFormat::new_test(softwrap), TextAnnotations::default().add_overlay(overlays, None), char_pos, ) .collect_to_str() } #[test] fn overlay() { assert_eq!( overlay_text( "foobar", 0, false, &[Overlay::new(0, "X"), Overlay::new(2, "\t")], ), "Xo bar " ); assert_eq!( overlay_text( &"foo ".repeat(10), 0, true, &[ Overlay::new(2, "\t"), Overlay::new(5, "\t"), Overlay::new(16, "X"), ] ), "fo f o foo \n.foo Xoo foo foo \n.foo foo foo " ); } fn annotate_text(text: &str, softwrap: bool, annotations: &[InlineAnnotation]) -> String { DocumentFormatter::new_at_prev_checkpoint( text.into(), &TextFormat::new_test(softwrap), TextAnnotations::default().add_inline_annotations(annotations, None), 0, ) .collect_to_str() } #[test] fn annotation() { assert_eq!( annotate_text("bar", false, &[InlineAnnotation::new(0, "foo")]), "foobar " ); assert_eq!( annotate_text( &"foo ".repeat(10), true, &[InlineAnnotation::new(0, "foo ")] ), "foo foo foo foo \n.foo foo foo foo \n.foo foo foo " ); } #[test] fn annotation_and_overlay() { let annotations = [InlineAnnotation { char_idx: 0, text: "fooo".into(), }]; let overlay = [Overlay { char_idx: 0, grapheme: "\t".into(), }]; assert_eq!( DocumentFormatter::new_at_prev_checkpoint( "bbar".into(), &TextFormat::new_test(false), TextAnnotations::default() .add_inline_annotations(annotations.as_slice(), None) .add_overlay(overlay.as_slice(), None), 0, ) .collect_to_str(), "fooo bar " ); } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/editor_config.rs000066400000000000000000000303361500614314100256210ustar00rootroot00000000000000//! Support for [EditorConfig](https://EditorConfig.org) configuration loading. //! //! EditorConfig is an editor-agnostic format for specifying configuration in an INI-like, human //! friendly syntax in `.editorconfig` files (which are intended to be checked into VCS). This //! module provides functions to search for all `.editorconfig` files that apply to a given path //! and returns an `EditorConfig` type containing any specified configuration options. //! //! At time of writing, this module follows the [spec](https://spec.editorconfig.org/) at //! version 0.17.2. use std::{ collections::HashMap, fs, num::{NonZeroU16, NonZeroU8}, path::Path, str::FromStr, }; use encoding_rs::Encoding; use globset::{GlobBuilder, GlobMatcher}; use crate::{ indent::{IndentStyle, MAX_INDENT}, LineEnding, }; /// Configuration declared for a path in `.editorconfig` files. #[derive(Debug, Default, PartialEq, Eq)] pub struct EditorConfig { pub indent_style: Option, pub tab_width: Option, pub line_ending: Option, pub encoding: Option<&'static Encoding>, // pub spelling_language: Option, pub trim_trailing_whitespace: Option, pub insert_final_newline: Option, pub max_line_length: Option, } impl EditorConfig { /// Finds any configuration in `.editorconfig` files which applies to the given path. /// /// If no configuration applies then `EditorConfig::default()` is returned. pub fn find(path: &Path) -> Self { let mut configs = Vec::new(); // for ancestor in path.ancestors() { let editor_config_file = ancestor.join(".editorconfig"); let Ok(contents) = fs::read_to_string(&editor_config_file) else { continue; }; let ini = match contents.parse::() { Ok(ini) => ini, Err(err) => { log::warn!("Ignoring EditorConfig file at '{editor_config_file:?}' because a glob failed to compile: {err}"); continue; } }; let is_root = ini.pairs.get("root").map(AsRef::as_ref) == Some("true"); configs.push((ini, ancestor)); // > The search shall stop if an EditorConfig file is found with the `root` key set to // > `true` in the preamble or when reaching the root filesystem directory. if is_root { break; } } let mut pairs = Pairs::new(); // Reverse the configuration stack so that the `.editorconfig` files closest to `path` // are applied last and overwrite settings in files closer to the search ceiling. // // > If multiple EditorConfig files have matching sections, the pairs from the closer // > EditorConfig file are read last, so pairs in closer files take precedence. for (config, dir) in configs.into_iter().rev() { let relative_path = path.strip_prefix(dir).expect("dir is an ancestor of path"); for section in config.sections { if section.glob.is_match(relative_path) { log::info!( "applying EditorConfig from section '{}' in file {:?}", section.glob.glob(), dir.join(".editorconfig") ); pairs.extend(section.pairs); } } } Self::from_pairs(pairs) } fn from_pairs(pairs: Pairs) -> Self { enum IndentSize { Tab, Spaces(NonZeroU8), } // let indent_size = pairs.get("indent_size").and_then(|value| { if value.as_ref() == "tab" { Some(IndentSize::Tab) } else if let Ok(spaces) = value.parse::() { Some(IndentSize::Spaces(spaces)) } else { None } }); let tab_width = pairs .get("tab_width") .and_then(|value| value.parse::().ok()) .or(match indent_size { Some(IndentSize::Spaces(spaces)) => Some(spaces), _ => None, }); let indent_style = pairs .get("indent_style") .and_then(|value| match value.as_ref() { "tab" => Some(IndentStyle::Tabs), "space" => { let spaces = match indent_size { Some(IndentSize::Spaces(spaces)) => spaces.get(), Some(IndentSize::Tab) => tab_width.map(|n| n.get()).unwrap_or(4), None => 4, }; Some(IndentStyle::Spaces(spaces.clamp(1, MAX_INDENT))) } _ => None, }); let line_ending = pairs .get("end_of_line") .and_then(|value| match value.as_ref() { "lf" => Some(LineEnding::LF), "crlf" => Some(LineEnding::Crlf), #[cfg(feature = "unicode-lines")] "cr" => Some(LineEnding::CR), _ => None, }); let encoding = pairs.get("charset").and_then(|value| match value.as_ref() { "latin1" => Some(encoding_rs::WINDOWS_1252), "utf-8" => Some(encoding_rs::UTF_8), // `utf-8-bom` is intentionally ignored. // > `utf-8-bom` is discouraged. "utf-16le" => Some(encoding_rs::UTF_16LE), "utf-16be" => Some(encoding_rs::UTF_16BE), _ => None, }); let trim_trailing_whitespace = pairs .get("trim_trailing_whitespace") .and_then(|value| match value.as_ref() { "true" => Some(true), "false" => Some(false), _ => None, }); let insert_final_newline = pairs .get("insert_final_newline") .and_then(|value| match value.as_ref() { "true" => Some(true), "false" => Some(false), _ => None, }); // This option is not in the spec but is supported by some editors. // let max_line_length = pairs .get("max_line_length") .and_then(|value| value.parse::().ok()); Self { indent_style, tab_width, line_ending, encoding, trim_trailing_whitespace, insert_final_newline, max_line_length, } } } type Pairs = HashMap, Box>; #[derive(Debug)] struct Section { glob: GlobMatcher, pairs: Pairs, } #[derive(Debug, Default)] struct Ini { pairs: Pairs, sections: Vec
, } impl FromStr for Ini { type Err = globset::Error; fn from_str(source: &str) -> Result { // let mut ini = Ini::default(); // > EditorConfig files are in an INI-like file format. To read an EditorConfig file, take // > one line at a time, from beginning to end. For each line: for full_line in source.lines() { // > 1. Remove all leading and trailing whitespace. let line = full_line.trim(); // > 2. Process the remaining text as specified for its type below. // > The types of lines are: // > * Blank: contains nothing. Blank lines are ignored. if line.is_empty() { continue; } // > * Comment: starts with a ';' or '#'. Comment lines are ignored. if line.starts_with([';', '#']) { continue; } if let Some(section) = line.strip_prefix('[').and_then(|s| s.strip_suffix(']')) { // > * Section Header: starts with a `[` and ends with a `]`. These lines define // > globs... // // We need to modify the glob string slightly since EditorConfig's glob flavor // doesn't match `globset`'s exactly. `globset` only allows '**' at the beginning // or end of a glob or between two '/'s. (This replacement is not very fancy but // should cover most practical cases.) let mut glob_str = section.replace("**.", "**/*."); if !is_glob_relative(section) { glob_str.insert_str(0, "**/"); } let glob = GlobBuilder::new(&glob_str) .literal_separator(true) .backslash_escape(true) .build()?; ini.sections.push(Section { glob: glob.compile_matcher(), pairs: Pairs::new(), }); } else if let Some((key, value)) = line.split_once('=') { // > * Key-Value Pair (or Pair): contains a key and a value, separated by an `=`. // > * Key: The part before the first `=` on the line. // > * Value: The part, if any, after the first `=` on the line. // > * Keys and values are trimmed of leading and trailing whitespace, but // > include any whitespace that is between non-whitespace characters. // > * If a value is not provided, then the value is an empty string. let key = key.trim().to_lowercase().into_boxed_str(); let value = value.trim().to_lowercase().into_boxed_str(); if let Some(section) = ini.sections.last_mut() { section.pairs.insert(key, value); } else { ini.pairs.insert(key, value); } } } Ok(ini) } } /// Determines whether a glob is relative to the directory of the config file. fn is_glob_relative(source: &str) -> bool { // > If the glob contains a path separator (a `/` not inside square brackets), then the // > glob is relative to the directory level of the particular `.editorconfig` file itself. let mut idx = 0; while let Some(open) = source[idx..].find('[').map(|open| idx + open) { if source[..open].contains('/') { return true; } idx = source[open..] .find(']') .map_or(source.len(), |close| idx + close); } source[idx..].contains('/') } #[cfg(test)] mod test { use super::*; #[test] fn is_glob_relative_test() { assert!(is_glob_relative("subdir/*.c")); assert!(!is_glob_relative("*.txt")); assert!(!is_glob_relative("[a/b].c")); } fn editor_config(path: impl AsRef, source: &str) -> EditorConfig { let path = path.as_ref(); let ini = source.parse::().unwrap(); let pairs = ini .sections .into_iter() .filter(|section| section.glob.is_match(path)) .fold(Pairs::new(), |mut acc, section| { acc.extend(section.pairs); acc }); EditorConfig::from_pairs(pairs) } #[test] fn parse_test() { let source = r#" [*] indent_style = space [Makefile] indent_style = tab [docs/**.txt] insert_final_newline = true "#; assert_eq!( editor_config("a.txt", source), EditorConfig { indent_style: Some(IndentStyle::Spaces(4)), ..Default::default() } ); assert_eq!( editor_config("pkg/Makefile", source), EditorConfig { indent_style: Some(IndentStyle::Tabs), ..Default::default() } ); assert_eq!( editor_config("docs/config/editor.txt", source), EditorConfig { indent_style: Some(IndentStyle::Spaces(4)), insert_final_newline: Some(true), ..Default::default() } ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/fuzzy.rs000066400000000000000000000025351500614314100241750ustar00rootroot00000000000000use std::ops::DerefMut; use nucleo::pattern::{Atom, AtomKind, CaseMatching, Normalization}; use nucleo::Config; use parking_lot::Mutex; pub struct LazyMutex { inner: Mutex>, init: fn() -> T, } impl LazyMutex { pub const fn new(init: fn() -> T) -> Self { Self { inner: Mutex::new(None), init, } } pub fn lock(&self) -> impl DerefMut + '_ { parking_lot::MutexGuard::map(self.inner.lock(), |val| val.get_or_insert_with(self.init)) } } pub static MATCHER: LazyMutex = LazyMutex::new(nucleo::Matcher::default); /// convenience function to easily fuzzy match /// on a (relatively small list of inputs). This is not recommended for building a full tui /// application that can match large numbers of matches as all matching is done on the current /// thread, effectively blocking the UI pub fn fuzzy_match>( pattern: &str, items: impl IntoIterator, path: bool, ) -> Vec<(T, u16)> { let mut matcher = MATCHER.lock(); matcher.config = Config::DEFAULT; if path { matcher.config.set_match_paths(); } let pattern = Atom::new( pattern, CaseMatching::Smart, Normalization::Smart, AtomKind::Fuzzy, false, ); pattern.match_list(items, &mut matcher) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/graphemes.rs000066400000000000000000000277701500614314100247710ustar00rootroot00000000000000//! Utility functions to traverse the unicode graphemes of a `Rope`'s text contents. //! //! Based on use ropey::{str_utils::byte_to_char_idx, RopeSlice}; use unicode_segmentation::{GraphemeCursor, GraphemeIncomplete}; use unicode_width::UnicodeWidthStr; use std::borrow::Cow; use std::fmt::{self, Debug, Display}; use std::marker::PhantomData; use std::ops::Deref; use std::ptr::NonNull; use std::{slice, str}; use crate::chars::{char_is_whitespace, char_is_word}; use crate::LineEnding; #[inline] pub fn tab_width_at(visual_x: usize, tab_width: u16) -> usize { tab_width as usize - (visual_x % tab_width as usize) } #[derive(Debug, Clone, PartialEq, Eq)] pub enum Grapheme<'a> { Newline, Tab { width: usize }, Other { g: GraphemeStr<'a> }, } impl<'a> Grapheme<'a> { pub fn new_decoration(g: &'static str) -> Grapheme<'a> { assert_ne!(g, "\t"); Grapheme::new(g.into(), 0, 0) } pub fn new(g: GraphemeStr<'a>, visual_x: usize, tab_width: u16) -> Grapheme<'a> { match g { g if g == "\t" => Grapheme::Tab { width: tab_width_at(visual_x, tab_width), }, _ if LineEnding::from_str(&g).is_some() => Grapheme::Newline, _ => Grapheme::Other { g }, } } pub fn change_position(&mut self, visual_x: usize, tab_width: u16) { if let Grapheme::Tab { width } = self { *width = tab_width_at(visual_x, tab_width) } } /// Returns the a visual width of this grapheme, #[inline] pub fn width(&self) -> usize { match *self { // width is not cached because we are dealing with // ASCII almost all the time which already has a fastpath // it's okay to convert to u16 here because no codepoint has a width larger // than 2 and graphemes are usually atmost two visible codepoints wide Grapheme::Other { ref g } => grapheme_width(g), Grapheme::Tab { width } => width, Grapheme::Newline => 1, } } pub fn is_whitespace(&self) -> bool { !matches!(&self, Grapheme::Other { g } if !g.chars().next().is_some_and(char_is_whitespace)) } // TODO currently word boundaries are used for softwrapping. // This works best for programming languages and well for prose. // This could however be improved in the future by considering unicode // character classes but pub fn is_word_boundary(&self) -> bool { !matches!(&self, Grapheme::Other { g,.. } if g.chars().next().is_some_and(char_is_word)) } } impl Display for Grapheme<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Grapheme::Newline => write!(f, " "), Grapheme::Tab { width } => { for _ in 0..width { write!(f, " ")?; } Ok(()) } Grapheme::Other { ref g } => { write!(f, "{g}") } } } } #[must_use] pub fn grapheme_width(g: &str) -> usize { if g.as_bytes()[0] <= 127 { // Fast-path ascii. // Point 1: theoretically, ascii control characters should have zero // width, but in our case we actually want them to have width: if they // show up in text, we want to treat them as textual elements that can // be edited. So we can get away with making all ascii single width // here. // Point 2: we're only examining the first codepoint here, which means // we're ignoring graphemes formed with combining characters. However, // if it starts with ascii, it's going to be a single-width grapeheme // regardless, so, again, we can get away with that here. // Point 3: we're only examining the first _byte_. But for utf8, when // checking for ascii range values only, that works. 1 } else { // We use max(1) here because all grapeheme clusters--even illformed // ones--should have at least some width so they can be edited // properly. // TODO properly handle unicode width for all codepoints // example of where unicode width is currently wrong: 🤦🏼‍♂️ (taken from https://hsivonen.fi/string-length/) UnicodeWidthStr::width(g).max(1) } } // NOTE: for byte indexing versions of these functions see `RopeSliceExt`'s // `floor_grapheme_boundary` and `ceil_grapheme_boundary` and the rope grapheme iterators. #[must_use] pub fn nth_prev_grapheme_boundary(slice: RopeSlice, char_idx: usize, n: usize) -> usize { // Bounds check debug_assert!(char_idx <= slice.len_chars()); // We work with bytes for this, so convert. let mut byte_idx = slice.char_to_byte(char_idx); // Get the chunk with our byte index in it. let (mut chunk, mut chunk_byte_idx, mut chunk_char_idx, _) = slice.chunk_at_byte(byte_idx); // Set up the grapheme cursor. let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true); // Find the previous grapheme cluster boundary. for _ in 0..n { loop { match gc.prev_boundary(chunk, chunk_byte_idx) { Ok(None) => return 0, Ok(Some(n)) => { byte_idx = n; break; } Err(GraphemeIncomplete::PrevChunk) => { let (a, b, c, _) = slice.chunk_at_byte(chunk_byte_idx - 1); chunk = a; chunk_byte_idx = b; chunk_char_idx = c; } Err(GraphemeIncomplete::PreContext(n)) => { let ctx_chunk = slice.chunk_at_byte(n - 1).0; gc.provide_context(ctx_chunk, n - ctx_chunk.len()); } _ => unreachable!(), } } } let tmp = byte_to_char_idx(chunk, byte_idx - chunk_byte_idx); chunk_char_idx + tmp } /// Finds the previous grapheme boundary before the given char position. #[must_use] #[inline(always)] pub fn prev_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> usize { nth_prev_grapheme_boundary(slice, char_idx, 1) } #[must_use] pub fn nth_next_grapheme_boundary(slice: RopeSlice, char_idx: usize, n: usize) -> usize { // Bounds check debug_assert!(char_idx <= slice.len_chars()); // We work with bytes for this, so convert. let mut byte_idx = slice.char_to_byte(char_idx); // Get the chunk with our byte index in it. let (mut chunk, mut chunk_byte_idx, mut chunk_char_idx, _) = slice.chunk_at_byte(byte_idx); // Set up the grapheme cursor. let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true); // Find the nth next grapheme cluster boundary. for _ in 0..n { loop { match gc.next_boundary(chunk, chunk_byte_idx) { Ok(None) => return slice.len_chars(), Ok(Some(n)) => { byte_idx = n; break; } Err(GraphemeIncomplete::NextChunk) => { chunk_byte_idx += chunk.len(); let (a, _, c, _) = slice.chunk_at_byte(chunk_byte_idx); chunk = a; chunk_char_idx = c; } Err(GraphemeIncomplete::PreContext(n)) => { let ctx_chunk = slice.chunk_at_byte(n - 1).0; gc.provide_context(ctx_chunk, n - ctx_chunk.len()); } _ => unreachable!(), } } } let tmp = byte_to_char_idx(chunk, byte_idx - chunk_byte_idx); chunk_char_idx + tmp } /// Finds the next grapheme boundary after the given char position. #[must_use] #[inline(always)] pub fn next_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> usize { nth_next_grapheme_boundary(slice, char_idx, 1) } /// Returns the passed char index if it's already a grapheme boundary, /// or the next grapheme boundary char index if not. #[must_use] #[inline] pub fn ensure_grapheme_boundary_next(slice: RopeSlice, char_idx: usize) -> usize { if char_idx == 0 { char_idx } else { next_grapheme_boundary(slice, char_idx - 1) } } /// Returns the passed char index if it's already a grapheme boundary, /// or the prev grapheme boundary char index if not. #[must_use] #[inline] pub fn ensure_grapheme_boundary_prev(slice: RopeSlice, char_idx: usize) -> usize { if char_idx == slice.len_chars() { char_idx } else { prev_grapheme_boundary(slice, char_idx + 1) } } /// Returns whether the given char position is a grapheme boundary. #[must_use] pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool { // Bounds check debug_assert!(char_idx <= slice.len_chars()); // We work with bytes for this, so convert. let byte_idx = slice.char_to_byte(char_idx); // Get the chunk with our byte index in it. let (chunk, chunk_byte_idx, _, _) = slice.chunk_at_byte(byte_idx); // Set up the grapheme cursor. let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true); // Determine if the given position is a grapheme cluster boundary. loop { match gc.is_boundary(chunk, chunk_byte_idx) { Ok(n) => return n, Err(GraphemeIncomplete::PreContext(n)) => { let (ctx_chunk, ctx_byte_start, _, _) = slice.chunk_at_byte(n - 1); gc.provide_context(ctx_chunk, ctx_byte_start); } Err(_) => unreachable!(), } } } /// A highly compressed Cow<'a, str> that holds /// atmost u31::MAX bytes and is readonly pub struct GraphemeStr<'a> { ptr: NonNull, len: u32, phantom: PhantomData<&'a str>, } impl GraphemeStr<'_> { const MASK_OWNED: u32 = 1 << 31; fn compute_len(&self) -> usize { (self.len & !Self::MASK_OWNED) as usize } } impl Deref for GraphemeStr<'_> { type Target = str; fn deref(&self) -> &Self::Target { unsafe { let bytes = slice::from_raw_parts(self.ptr.as_ptr(), self.compute_len()); str::from_utf8_unchecked(bytes) } } } impl Drop for GraphemeStr<'_> { fn drop(&mut self) { if self.len & Self::MASK_OWNED != 0 { // free allocation unsafe { drop(Box::from_raw(slice::from_raw_parts_mut( self.ptr.as_ptr(), self.compute_len(), ))); } } } } impl<'a> From<&'a str> for GraphemeStr<'a> { fn from(g: &'a str) -> Self { GraphemeStr { ptr: unsafe { NonNull::new_unchecked(g.as_bytes().as_ptr() as *mut u8) }, len: i32::try_from(g.len()).unwrap() as u32, phantom: PhantomData, } } } impl From for GraphemeStr<'_> { fn from(g: String) -> Self { let len = g.len(); let ptr = Box::into_raw(g.into_bytes().into_boxed_slice()) as *mut u8; GraphemeStr { ptr: unsafe { NonNull::new_unchecked(ptr) }, len: (i32::try_from(len).unwrap() as u32) | Self::MASK_OWNED, phantom: PhantomData, } } } impl<'a> From> for GraphemeStr<'a> { fn from(g: Cow<'a, str>) -> Self { match g { Cow::Borrowed(g) => g.into(), Cow::Owned(g) => g.into(), } } } impl> PartialEq for GraphemeStr<'_> { fn eq(&self, other: &T) -> bool { self.deref() == other.deref() } } impl PartialEq for GraphemeStr<'_> { fn eq(&self, other: &str) -> bool { self.deref() == other } } impl Eq for GraphemeStr<'_> {} impl Debug for GraphemeStr<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { Debug::fmt(self.deref(), f) } } impl Display for GraphemeStr<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { Display::fmt(self.deref(), f) } } impl Clone for GraphemeStr<'_> { fn clone(&self) -> Self { self.deref().to_owned().into() } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/history.rs000066400000000000000000000525331500614314100245120ustar00rootroot00000000000000use crate::{Assoc, ChangeSet, Range, Rope, Selection, Transaction}; use once_cell::sync::Lazy; use regex::Regex; use std::num::NonZeroUsize; use std::time::{Duration, Instant}; #[derive(Debug, Clone)] pub struct State { pub doc: Rope, pub selection: Selection, } /// Stores the history of changes to a buffer. /// /// Currently the history is represented as a vector of revisions. The vector /// always has at least one element: the empty root revision. Each revision /// with the exception of the root has a parent revision, a [Transaction] /// that can be applied to its parent to transition from the parent to itself, /// and an inversion of that transaction to transition from the parent to its /// latest child. /// /// When using `u` to undo a change, an inverse of the stored transaction will /// be applied which will transition the buffer to the parent state. /// /// Each revision with the exception of the last in the vector also has a /// last child revision. When using `U` to redo a change, the last child transaction /// will be applied to the current state of the buffer. /// /// The current revision is the one currently displayed in the buffer. /// /// Committing a new revision to the history will update the last child of the /// current revision, and push a new revision to the end of the vector. /// /// Revisions are committed with a timestamp. :earlier and :later can be used /// to jump to the closest revision to a moment in time relative to the timestamp /// of the current revision plus (:later) or minus (:earlier) the duration /// given to the command. If a single integer is given, the editor will instead /// jump the given number of revisions in the vector. /// /// Limitations: /// * Changes in selections currently don't commit history changes. The selection /// will only be updated to the state after a committed buffer change. /// * The vector of history revisions is currently unbounded. This might /// cause the memory consumption to grow significantly large during long /// editing sessions. /// * Because delete transactions currently don't store the text that they /// delete, we also store an inversion of the transaction. /// /// Using time to navigate the history: #[derive(Debug)] pub struct History { revisions: Vec, current: usize, } /// A single point in history. See [History] for more information. #[derive(Debug, Clone)] struct Revision { parent: usize, last_child: Option, transaction: Transaction, // We need an inversion for undos because delete transactions don't store // the deleted text. inversion: Transaction, timestamp: Instant, } impl Default for History { fn default() -> Self { // Add a dummy root revision with empty transaction Self { revisions: vec![Revision { parent: 0, last_child: None, transaction: Transaction::from(ChangeSet::new("".into())), inversion: Transaction::from(ChangeSet::new("".into())), timestamp: Instant::now(), }], current: 0, } } } impl History { pub fn commit_revision(&mut self, transaction: &Transaction, original: &State) { self.commit_revision_at_timestamp(transaction, original, Instant::now()); } pub fn commit_revision_at_timestamp( &mut self, transaction: &Transaction, original: &State, timestamp: Instant, ) { let inversion = transaction .invert(&original.doc) // Store the current cursor position .with_selection(original.selection.clone()); let new_current = self.revisions.len(); self.revisions[self.current].last_child = NonZeroUsize::new(new_current); self.revisions.push(Revision { parent: self.current, last_child: None, transaction: transaction.clone(), inversion, timestamp, }); self.current = new_current; } #[inline] pub fn current_revision(&self) -> usize { self.current } #[inline] pub const fn at_root(&self) -> bool { self.current == 0 } /// Returns the changes since the given revision composed into a transaction. /// Returns None if there are no changes between the current and given revisions. pub fn changes_since(&self, revision: usize) -> Option { let lca = self.lowest_common_ancestor(revision, self.current); let up = self.path_up(revision, lca); let down = self.path_up(self.current, lca); let up_txns = up .iter() .rev() .map(|&n| self.revisions[n].inversion.clone()); let down_txns = down.iter().map(|&n| self.revisions[n].transaction.clone()); down_txns.chain(up_txns).reduce(|acc, tx| tx.compose(acc)) } /// Undo the last edit. pub fn undo(&mut self) -> Option<&Transaction> { if self.at_root() { return None; } let current_revision = &self.revisions[self.current]; self.current = current_revision.parent; Some(¤t_revision.inversion) } /// Redo the last edit. pub fn redo(&mut self) -> Option<&Transaction> { let current_revision = &self.revisions[self.current]; let last_child = current_revision.last_child?; self.current = last_child.get(); Some(&self.revisions[last_child.get()].transaction) } // Get the position of last change pub fn last_edit_pos(&self) -> Option { if self.current == 0 { return None; } let current_revision = &self.revisions[self.current]; let primary_selection = current_revision .inversion .selection() .expect("inversion always contains a selection") .primary(); let (_from, to, _fragment) = current_revision .transaction .changes_iter() // find a change that matches the primary selection .find(|(from, to, _fragment)| Range::new(*from, *to).overlaps(&primary_selection)) // or use the first change .or_else(|| current_revision.transaction.changes_iter().next()) .unwrap(); let pos = current_revision .transaction .changes() .map_pos(to, Assoc::After); Some(pos) } fn lowest_common_ancestor(&self, mut a: usize, mut b: usize) -> usize { use std::collections::HashSet; let mut a_path_set = HashSet::new(); let mut b_path_set = HashSet::new(); loop { a_path_set.insert(a); b_path_set.insert(b); if a_path_set.contains(&b) { return b; } if b_path_set.contains(&a) { return a; } a = self.revisions[a].parent; // Relies on the parent of 0 being 0. b = self.revisions[b].parent; // Same as above. } } /// List of nodes on the way from `n` to 'a`. Doesn't include `a`. /// Includes `n` unless `a == n`. `a` must be an ancestor of `n`. fn path_up(&self, mut n: usize, a: usize) -> Vec { let mut path = Vec::new(); while n != a { path.push(n); n = self.revisions[n].parent; } path } /// Create a [`Transaction`] that will jump to a specific revision in the history. fn jump_to(&mut self, to: usize) -> Vec { let lca = self.lowest_common_ancestor(self.current, to); let up = self.path_up(self.current, lca); let down = self.path_up(to, lca); self.current = to; let up_txns = up.iter().map(|&n| self.revisions[n].inversion.clone()); let down_txns = down .iter() .rev() .map(|&n| self.revisions[n].transaction.clone()); up_txns.chain(down_txns).collect() } /// Creates a [`Transaction`] that will undo `delta` revisions. fn jump_backward(&mut self, delta: usize) -> Vec { self.jump_to(self.current.saturating_sub(delta)) } /// Creates a [`Transaction`] that will redo `delta` revisions. fn jump_forward(&mut self, delta: usize) -> Vec { self.jump_to( self.current .saturating_add(delta) .min(self.revisions.len() - 1), ) } /// Helper for a binary search case below. fn revision_closer_to_instant(&self, i: usize, instant: Instant) -> usize { let dur_im1 = instant.duration_since(self.revisions[i - 1].timestamp); let dur_i = self.revisions[i].timestamp.duration_since(instant); use std::cmp::Ordering::*; match dur_im1.cmp(&dur_i) { Less => i - 1, Equal | Greater => i, } } /// Creates a [`Transaction`] that will match a revision created at around /// `instant`. fn jump_instant(&mut self, instant: Instant) -> Vec { let search_result = self .revisions .binary_search_by(|rev| rev.timestamp.cmp(&instant)); let revision = match search_result { Ok(revision) => revision, Err(insert_point) => match insert_point { 0 => 0, n if n == self.revisions.len() => n - 1, i => self.revision_closer_to_instant(i, instant), }, }; self.jump_to(revision) } /// Creates a [`Transaction`] that will match a revision created `duration` ago /// from the timestamp of current revision. fn jump_duration_backward(&mut self, duration: Duration) -> Vec { match self.revisions[self.current].timestamp.checked_sub(duration) { Some(instant) => self.jump_instant(instant), None => self.jump_to(0), } } /// Creates a [`Transaction`] that will match a revision created `duration` in /// the future from the timestamp of the current revision. fn jump_duration_forward(&mut self, duration: Duration) -> Vec { match self.revisions[self.current].timestamp.checked_add(duration) { Some(instant) => self.jump_instant(instant), None => self.jump_to(self.revisions.len() - 1), } } /// Creates an undo [`Transaction`]. pub fn earlier(&mut self, uk: UndoKind) -> Vec { use UndoKind::*; match uk { Steps(n) => self.jump_backward(n), TimePeriod(d) => self.jump_duration_backward(d), } } /// Creates a redo [`Transaction`]. pub fn later(&mut self, uk: UndoKind) -> Vec { use UndoKind::*; match uk { Steps(n) => self.jump_forward(n), TimePeriod(d) => self.jump_duration_forward(d), } } } /// Whether to undo by a number of edits or a duration of time. #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum UndoKind { Steps(usize), TimePeriod(std::time::Duration), } /// A subset of systemd.time time span syntax units. const TIME_UNITS: &[(&[&str], &str, u64)] = &[ (&["seconds", "second", "sec", "s"], "seconds", 1), (&["minutes", "minute", "min", "m"], "minutes", 60), (&["hours", "hour", "hr", "h"], "hours", 60 * 60), (&["days", "day", "d"], "days", 24 * 60 * 60), ]; /// Checks if the duration input can be turned into a valid duration. It must be a /// positive integer and denote the [unit of time.](`TIME_UNITS`) /// Examples of valid durations: /// * `5 sec` /// * `5 min` /// * `5 hr` /// * `5 days` static DURATION_VALIDATION_REGEX: Lazy = Lazy::new(|| Regex::new(r"^(?:\d+\s*[a-z]+\s*)+$").unwrap()); /// Captures both the number and unit as separate capture groups. static NUMBER_UNIT_REGEX: Lazy = Lazy::new(|| Regex::new(r"(\d+)\s*([a-z]+)").unwrap()); /// Parse a string (e.g. "5 sec") and try to convert it into a [`Duration`]. fn parse_human_duration(s: &str) -> Result { if !DURATION_VALIDATION_REGEX.is_match(s) { return Err("duration should be composed \ of positive integers followed by time units" .to_string()); } let mut specified = [false; TIME_UNITS.len()]; let mut seconds = 0u64; for cap in NUMBER_UNIT_REGEX.captures_iter(s) { let (n, unit_str) = (&cap[1], &cap[2]); let n: u64 = n.parse().map_err(|_| format!("integer too large: {}", n))?; let time_unit = TIME_UNITS .iter() .enumerate() .find(|(_, (forms, _, _))| forms.iter().any(|f| f == &unit_str)); if let Some((i, (_, unit, mul))) = time_unit { if specified[i] { return Err(format!("{} specified more than once", unit)); } specified[i] = true; let new_seconds = n.checked_mul(*mul).and_then(|s| seconds.checked_add(s)); match new_seconds { Some(ns) => seconds = ns, None => return Err("duration too large".to_string()), } } else { return Err(format!("incorrect time unit: {}", unit_str)); } } Ok(Duration::from_secs(seconds)) } impl std::str::FromStr for UndoKind { type Err = String; fn from_str(s: &str) -> Result { let s = s.trim(); if s.is_empty() { Ok(Self::Steps(1usize)) } else if let Ok(n) = s.parse::() { Ok(UndoKind::Steps(n)) } else { Ok(Self::TimePeriod(parse_human_duration(s)?)) } } } #[cfg(test)] mod test { use super::*; use crate::Selection; #[test] fn test_undo_redo() { let mut history = History::default(); let doc = Rope::from("hello"); let mut state = State { doc, selection: Selection::point(0), }; let transaction1 = Transaction::change(&state.doc, vec![(5, 5, Some(" world!".into()))].into_iter()); // Need to commit before applying! history.commit_revision(&transaction1, &state); transaction1.apply(&mut state.doc); assert_eq!("hello world!", state.doc); // --- let transaction2 = Transaction::change(&state.doc, vec![(6, 11, Some("世界".into()))].into_iter()); // Need to commit before applying! history.commit_revision(&transaction2, &state); transaction2.apply(&mut state.doc); assert_eq!("hello 世界!", state.doc); // --- fn undo(history: &mut History, state: &mut State) { if let Some(transaction) = history.undo() { transaction.apply(&mut state.doc); } } fn redo(history: &mut History, state: &mut State) { if let Some(transaction) = history.redo() { transaction.apply(&mut state.doc); } } undo(&mut history, &mut state); assert_eq!("hello world!", state.doc); redo(&mut history, &mut state); assert_eq!("hello 世界!", state.doc); undo(&mut history, &mut state); undo(&mut history, &mut state); assert_eq!("hello", state.doc); // undo at root is a no-op undo(&mut history, &mut state); assert_eq!("hello", state.doc); } #[test] fn test_earlier_later() { let mut history = History::default(); let doc = Rope::from("a\n"); let mut state = State { doc, selection: Selection::point(0), }; fn undo(history: &mut History, state: &mut State) { if let Some(transaction) = history.undo() { transaction.apply(&mut state.doc); } } fn earlier(history: &mut History, state: &mut State, uk: UndoKind) { let txns = history.earlier(uk); for txn in txns { txn.apply(&mut state.doc); } } fn later(history: &mut History, state: &mut State, uk: UndoKind) { let txns = history.later(uk); for txn in txns { txn.apply(&mut state.doc); } } fn commit_change( history: &mut History, state: &mut State, change: crate::transaction::Change, instant: Instant, ) { let txn = Transaction::change(&state.doc, vec![change].into_iter()); history.commit_revision_at_timestamp(&txn, state, instant); txn.apply(&mut state.doc); } let t0 = Instant::now(); let t = |n| t0.checked_add(Duration::from_secs(n)).unwrap(); commit_change(&mut history, &mut state, (1, 1, Some(" b".into())), t(0)); assert_eq!("a b\n", state.doc); commit_change(&mut history, &mut state, (3, 3, Some(" c".into())), t(10)); assert_eq!("a b c\n", state.doc); commit_change(&mut history, &mut state, (5, 5, Some(" d".into())), t(20)); assert_eq!("a b c d\n", state.doc); undo(&mut history, &mut state); assert_eq!("a b c\n", state.doc); commit_change(&mut history, &mut state, (5, 5, Some(" e".into())), t(30)); assert_eq!("a b c e\n", state.doc); undo(&mut history, &mut state); undo(&mut history, &mut state); assert_eq!("a b\n", state.doc); commit_change(&mut history, &mut state, (1, 3, None), t(40)); assert_eq!("a\n", state.doc); commit_change(&mut history, &mut state, (1, 1, Some(" f".into())), t(50)); assert_eq!("a f\n", state.doc); use UndoKind::*; earlier(&mut history, &mut state, Steps(3)); assert_eq!("a b c d\n", state.doc); later(&mut history, &mut state, TimePeriod(Duration::new(20, 0))); assert_eq!("a\n", state.doc); earlier(&mut history, &mut state, TimePeriod(Duration::new(19, 0))); assert_eq!("a b c d\n", state.doc); earlier( &mut history, &mut state, TimePeriod(Duration::new(10000, 0)), ); assert_eq!("a\n", state.doc); later(&mut history, &mut state, Steps(50)); assert_eq!("a f\n", state.doc); earlier(&mut history, &mut state, Steps(4)); assert_eq!("a b c\n", state.doc); later(&mut history, &mut state, TimePeriod(Duration::new(1, 0))); assert_eq!("a b c\n", state.doc); later(&mut history, &mut state, TimePeriod(Duration::new(5, 0))); assert_eq!("a b c d\n", state.doc); later(&mut history, &mut state, TimePeriod(Duration::new(6, 0))); assert_eq!("a b c e\n", state.doc); later(&mut history, &mut state, Steps(1)); assert_eq!("a\n", state.doc); } #[test] fn test_parse_undo_kind() { use UndoKind::*; // Default is one step. assert_eq!("".parse(), Ok(Steps(1))); // An integer means the number of steps. assert_eq!("1".parse(), Ok(Steps(1))); assert_eq!(" 16 ".parse(), Ok(Steps(16))); // Duration has a strict format. let validation_err = Err("duration should be composed \ of positive integers followed by time units" .to_string()); assert_eq!(" 16 33".parse::(), validation_err); assert_eq!(" seconds 22 ".parse::(), validation_err); assert_eq!(" -4 m".parse::(), validation_err); assert_eq!("5s 3".parse::(), validation_err); // Units are u64. assert_eq!( "18446744073709551616minutes".parse::(), Err("integer too large: 18446744073709551616".to_string()) ); // Units are validated. assert_eq!( "1 millennium".parse::(), Err("incorrect time unit: millennium".to_string()) ); // Units can't be specified twice. assert_eq!( "2 seconds 6s".parse::(), Err("seconds specified more than once".to_string()) ); // Various formats are correctly handled. assert_eq!( "4s".parse::(), Ok(TimePeriod(Duration::from_secs(4))) ); assert_eq!( "2m".parse::(), Ok(TimePeriod(Duration::from_secs(120))) ); assert_eq!( "5h".parse::(), Ok(TimePeriod(Duration::from_secs(5 * 60 * 60))) ); assert_eq!( "3d".parse::(), Ok(TimePeriod(Duration::from_secs(3 * 24 * 60 * 60))) ); assert_eq!( "1m30s".parse::(), Ok(TimePeriod(Duration::from_secs(90))) ); assert_eq!( "1m 20 seconds".parse::(), Ok(TimePeriod(Duration::from_secs(80))) ); assert_eq!( " 2 minute 1day".parse::(), Ok(TimePeriod(Duration::from_secs(24 * 60 * 60 + 2 * 60))) ); assert_eq!( "3 d 2hour 5 minutes 30sec".parse::(), Ok(TimePeriod(Duration::from_secs( 3 * 24 * 60 * 60 + 2 * 60 * 60 + 5 * 60 + 30 ))) ); // Sum overflow is handled. assert_eq!( "18446744073709551615minutes".parse::(), Err("duration too large".to_string()) ); assert_eq!( "1 minute 18446744073709551615 seconds".parse::(), Err("duration too large".to_string()) ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/increment/000077500000000000000000000000001500614314100244175ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/increment/date_time.rs000066400000000000000000000233331500614314100267240ustar00rootroot00000000000000use chrono::{Duration, NaiveDate, NaiveDateTime, NaiveTime}; use once_cell::sync::Lazy; use regex::Regex; use std::fmt::Write; /// Increment a Date or DateTime /// /// If just a Date is selected the day will be incremented. /// If a DateTime is selected the second will be incremented. pub fn increment(selected_text: &str, amount: i64) -> Option { if selected_text.is_empty() { return None; } FORMATS.iter().find_map(|format| { let captures = format.regex.captures(selected_text)?; if captures.len() - 1 != format.fields.len() { return None; } let date_time = captures.get(0)?; let has_date = format.fields.iter().any(|f| f.unit.is_date()); let has_time = format.fields.iter().any(|f| f.unit.is_time()); let date_time = &selected_text[date_time.start()..date_time.end()]; match (has_date, has_time) { (true, true) => { let date_time = NaiveDateTime::parse_from_str(date_time, format.fmt).ok()?; Some( date_time .checked_add_signed(Duration::try_minutes(amount)?)? .format(format.fmt) .to_string(), ) } (true, false) => { let date = NaiveDate::parse_from_str(date_time, format.fmt).ok()?; Some( date.checked_add_signed(Duration::try_days(amount)?)? .format(format.fmt) .to_string(), ) } (false, true) => { let time = NaiveTime::parse_from_str(date_time, format.fmt).ok()?; let (adjusted_time, _) = time.overflowing_add_signed(Duration::try_minutes(amount)?); Some(adjusted_time.format(format.fmt).to_string()) } (false, false) => None, } }) } static FORMATS: Lazy> = Lazy::new(|| { vec![ Format::new("%Y-%m-%d %H:%M:%S"), // 2021-11-24 07:12:23 Format::new("%Y/%m/%d %H:%M:%S"), // 2021/11/24 07:12:23 Format::new("%Y-%m-%d %H:%M"), // 2021-11-24 07:12 Format::new("%Y/%m/%d %H:%M"), // 2021/11/24 07:12 Format::new("%Y-%m-%d"), // 2021-11-24 Format::new("%Y/%m/%d"), // 2021/11/24 Format::new("%a %b %d %Y"), // Wed Nov 24 2021 Format::new("%d-%b-%Y"), // 24-Nov-2021 Format::new("%Y %b %d"), // 2021 Nov 24 Format::new("%b %d, %Y"), // Nov 24, 2021 Format::new("%-I:%M:%S %P"), // 7:21:53 am Format::new("%-I:%M %P"), // 7:21 am Format::new("%-I:%M:%S %p"), // 7:21:53 AM Format::new("%-I:%M %p"), // 7:21 AM Format::new("%H:%M:%S"), // 23:24:23 Format::new("%H:%M"), // 23:24 ] }); #[derive(Debug)] struct Format { fmt: &'static str, fields: Vec, regex: Regex, max_len: usize, } impl Format { fn new(fmt: &'static str) -> Self { let mut remaining = fmt; let mut fields = Vec::new(); let mut regex = "^".to_string(); let mut max_len = 0; while let Some(i) = remaining.find('%') { let after = &remaining[i + 1..]; let mut chars = after.chars(); let c = chars.next().unwrap(); let spec_len = if c == '-' { 1 + chars.next().unwrap().len_utf8() } else { c.len_utf8() }; let specifier = &after[..spec_len]; let field = DateField::from_specifier(specifier).unwrap(); fields.push(field); max_len += field.max_len + remaining[..i].len(); regex += &remaining[..i]; write!(regex, "({})", field.regex).unwrap(); remaining = &after[spec_len..]; } regex += "$"; let regex = Regex::new(®ex).unwrap(); Self { fmt, fields, regex, max_len, } } } impl PartialEq for Format { fn eq(&self, other: &Self) -> bool { self.fmt == other.fmt && self.fields == other.fields && self.max_len == other.max_len } } impl Eq for Format {} #[derive(Copy, Clone, Debug, PartialEq, Eq)] struct DateField { regex: &'static str, unit: DateUnit, max_len: usize, } impl DateField { fn from_specifier(specifier: &str) -> Option { match specifier { "Y" => Some(Self { regex: r"\d{4}", unit: DateUnit::Years, max_len: 5, }), "y" => Some(Self { regex: r"\d\d", unit: DateUnit::Years, max_len: 2, }), "m" => Some(Self { regex: r"[0-1]\d", unit: DateUnit::Months, max_len: 2, }), "d" => Some(Self { regex: r"[0-3]\d", unit: DateUnit::Days, max_len: 2, }), "-d" => Some(Self { regex: r"[1-3]?\d", unit: DateUnit::Days, max_len: 2, }), "a" => Some(Self { regex: r"Sun|Mon|Tue|Wed|Thu|Fri|Sat", unit: DateUnit::Days, max_len: 3, }), "A" => Some(Self { regex: r"Sunday|Monday|Tuesday|Wednesday|Thursday|Friday|Saturday", unit: DateUnit::Days, max_len: 9, }), "b" | "h" => Some(Self { regex: r"Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec", unit: DateUnit::Months, max_len: 3, }), "B" => Some(Self { regex: r"January|February|March|April|May|June|July|August|September|October|November|December", unit: DateUnit::Months, max_len: 9, }), "H" => Some(Self { regex: r"[0-2]\d", unit: DateUnit::Hours, max_len: 2, }), "M" => Some(Self { regex: r"[0-5]\d", unit: DateUnit::Minutes, max_len: 2, }), "S" => Some(Self { regex: r"[0-5]\d", unit: DateUnit::Seconds, max_len: 2, }), "I" => Some(Self { regex: r"[0-1]\d", unit: DateUnit::Hours, max_len: 2, }), "-I" => Some(Self { regex: r"1?\d", unit: DateUnit::Hours, max_len: 2, }), "P" => Some(Self { regex: r"am|pm", unit: DateUnit::AmPm, max_len: 2, }), "p" => Some(Self { regex: r"AM|PM", unit: DateUnit::AmPm, max_len: 2, }), _ => None, } } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum DateUnit { Years, Months, Days, Hours, Minutes, Seconds, AmPm, } impl DateUnit { fn is_date(self) -> bool { matches!(self, DateUnit::Years | DateUnit::Months | DateUnit::Days) } fn is_time(self) -> bool { matches!( self, DateUnit::Hours | DateUnit::Minutes | DateUnit::Seconds ) } } #[cfg(test)] mod test { use super::*; #[test] fn test_increment_date_times() { let tests = [ // (original, cursor, amount, expected) ("2020-02-28", 1, "2020-02-29"), ("2020-02-29", 1, "2020-03-01"), ("2020-01-31", 1, "2020-02-01"), ("2020-01-20", 1, "2020-01-21"), ("2021-01-01", -1, "2020-12-31"), ("2021-01-31", -2, "2021-01-29"), ("2020-02-28", 1, "2020-02-29"), ("2021-02-28", 1, "2021-03-01"), ("2021-03-01", -1, "2021-02-28"), ("2020-02-29", -1, "2020-02-28"), ("2020-02-20", -1, "2020-02-19"), ("2021-03-01", -1, "2021-02-28"), ("1980/12/21", 100, "1981/03/31"), ("1980/12/21", -100, "1980/09/12"), ("1980/12/21", 1000, "1983/09/17"), ("1980/12/21", -1000, "1978/03/27"), ("2021-11-24 07:12:23", 1, "2021-11-24 07:13:23"), ("2021-11-24 07:12", 1, "2021-11-24 07:13"), ("Wed Nov 24 2021", 1, "Thu Nov 25 2021"), ("24-Nov-2021", 1, "25-Nov-2021"), ("2021 Nov 24", 1, "2021 Nov 25"), ("Nov 24, 2021", 1, "Nov 25, 2021"), ("7:21:53 am", 1, "7:22:53 am"), ("7:21:53 AM", 1, "7:22:53 AM"), ("7:21 am", 1, "7:22 am"), ("23:24:23", 1, "23:25:23"), ("23:24", 1, "23:25"), ("23:59", 1, "00:00"), ("23:59:59", 1, "00:00:59"), ]; for (original, amount, expected) in tests { assert_eq!(increment(original, amount).unwrap(), expected); } } #[test] fn test_invalid_date_times() { let tests = [ "0000-00-00", "1980-2-21", "1980-12-1", "12345", "2020-02-30", "1999-12-32", "19-12-32", "1-2-3", "0000/00/00", "1980/2/21", "1980/12/1", "12345", "2020/02/30", "1999/12/32", "19/12/32", "1/2/3", "123:456:789", "11:61", "2021-55-12 08:12:54", ]; for invalid in tests { assert_eq!(increment(invalid, 1), None) } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/increment/integer.rs000066400000000000000000000175611500614314100264340ustar00rootroot00000000000000const SEPARATOR: char = '_'; /// Increment an integer. /// /// Supported bases: /// 2 with prefix 0b /// 8 with prefix 0o /// 10 with no prefix /// 16 with prefix 0x /// /// An integer can contain `_` as a separator but may not start or end with a separator. /// Base 10 integers can go negative, but bases 2, 8, and 16 cannot. /// All addition and subtraction is saturating. pub fn increment(selected_text: &str, amount: i64) -> Option { if selected_text.is_empty() || selected_text.ends_with(SEPARATOR) || selected_text.starts_with(SEPARATOR) { return None; } let radix = if selected_text.starts_with("0x") { 16 } else if selected_text.starts_with("0o") { 8 } else if selected_text.starts_with("0b") { 2 } else { 10 }; // Get separator indexes from right to left. let separator_rtl_indexes: Vec = selected_text .chars() .rev() .enumerate() .filter_map(|(i, c)| if c == SEPARATOR { Some(i) } else { None }) .collect(); let word: String = selected_text.chars().filter(|&c| c != SEPARATOR).collect(); let mut new_text = if radix == 10 { let number = &word; let value = i128::from_str_radix(number, radix).ok()?; let new_value = value.saturating_add(amount as i128); let format_length = match (value.is_negative(), new_value.is_negative()) { (true, false) => number.len() - 1, (false, true) => number.len() + 1, _ => number.len(), } - separator_rtl_indexes.len(); if number.starts_with('0') || number.starts_with("-0") { format!("{:01$}", new_value, format_length) } else { format!("{}", new_value) } } else { let number = &word[2..]; let value = u128::from_str_radix(number, radix).ok()?; let new_value = (value as i128).saturating_add(amount as i128); let new_value = if new_value < 0 { 0 } else { new_value }; let format_length = selected_text.len() - 2 - separator_rtl_indexes.len(); match radix { 2 => format!("0b{:01$b}", new_value, format_length), 8 => format!("0o{:01$o}", new_value, format_length), 16 => { let (lower_count, upper_count): (usize, usize) = number.chars().fold((0, 0), |(lower, upper), c| { ( lower + c.is_ascii_lowercase() as usize, upper + c.is_ascii_uppercase() as usize, ) }); if upper_count > lower_count { format!("0x{:01$X}", new_value, format_length) } else { format!("0x{:01$x}", new_value, format_length) } } _ => unimplemented!("radix not supported: {}", radix), } }; // Add separators from original number. for &rtl_index in &separator_rtl_indexes { if rtl_index < new_text.len() { let new_index = new_text.len().saturating_sub(rtl_index); if new_index > 0 { new_text.insert(new_index, SEPARATOR); } } } // Add in additional separators if necessary. if new_text.len() > selected_text.len() && !separator_rtl_indexes.is_empty() { let spacing = match separator_rtl_indexes.as_slice() { [.., b, a] => a - b - 1, _ => separator_rtl_indexes[0], }; let prefix_length = if radix == 10 { 0 } else { 2 }; if let Some(mut index) = new_text.find(SEPARATOR) { while index - prefix_length > spacing { index -= spacing; new_text.insert(index, SEPARATOR); } } } Some(new_text) } #[cfg(test)] mod test { use super::*; #[test] fn test_increment_basic_decimal_numbers() { let tests = [ ("100", 1, "101"), ("100", -1, "99"), ("99", 1, "100"), ("100", 1000, "1100"), ("100", -1000, "-900"), ("-1", 1, "0"), ("-1", 2, "1"), ("1", -1, "0"), ("1", -2, "-1"), ]; for (original, amount, expected) in tests { assert_eq!(increment(original, amount).unwrap(), expected); } } #[test] fn test_increment_basic_hexadecimal_numbers() { let tests = [ ("0x0100", 1, "0x0101"), ("0x0100", -1, "0x00ff"), ("0x0001", -1, "0x0000"), ("0x0000", -1, "0x0000"), ("0xffffffffffffffff", 1, "0x10000000000000000"), ("0xffffffffffffffff", 2, "0x10000000000000001"), ("0xffffffffffffffff", -1, "0xfffffffffffffffe"), ("0xABCDEF1234567890", 1, "0xABCDEF1234567891"), ("0xabcdef1234567890", 1, "0xabcdef1234567891"), ]; for (original, amount, expected) in tests { assert_eq!(increment(original, amount).unwrap(), expected); } } #[test] fn test_increment_basic_octal_numbers() { let tests = [ ("0o0107", 1, "0o0110"), ("0o0110", -1, "0o0107"), ("0o0001", -1, "0o0000"), ("0o7777", 1, "0o10000"), ("0o1000", -1, "0o0777"), ("0o0107", 10, "0o0121"), ("0o0000", -1, "0o0000"), ("0o1777777777777777777777", 1, "0o2000000000000000000000"), ("0o1777777777777777777777", 2, "0o2000000000000000000001"), ("0o1777777777777777777777", -1, "0o1777777777777777777776"), ]; for (original, amount, expected) in tests { assert_eq!(increment(original, amount).unwrap(), expected); } } #[test] fn test_increment_basic_binary_numbers() { let tests = [ ("0b00000100", 1, "0b00000101"), ("0b00000100", -1, "0b00000011"), ("0b00000100", 2, "0b00000110"), ("0b00000100", -2, "0b00000010"), ("0b00000001", -1, "0b00000000"), ("0b00111111", 10, "0b01001001"), ("0b11111111", 1, "0b100000000"), ("0b10000000", -1, "0b01111111"), ("0b0000", -1, "0b0000"), ( "0b1111111111111111111111111111111111111111111111111111111111111111", 1, "0b10000000000000000000000000000000000000000000000000000000000000000", ), ( "0b1111111111111111111111111111111111111111111111111111111111111111", 2, "0b10000000000000000000000000000000000000000000000000000000000000001", ), ( "0b1111111111111111111111111111111111111111111111111111111111111111", -1, "0b1111111111111111111111111111111111111111111111111111111111111110", ), ]; for (original, amount, expected) in tests { assert_eq!(increment(original, amount).unwrap(), expected); } } #[test] fn test_increment_with_separators() { let tests = [ ("999_999", 1, "1_000_000"), ("1_000_000", -1, "999_999"), ("-999_999", -1, "-1_000_000"), ("0x0000_0000_0001", 0x1_ffff_0000, "0x0001_ffff_0001"), ("0x0000_0000", -1, "0x0000_0000"), ("0x0000_0000_0000", -1, "0x0000_0000_0000"), ("0b01111111_11111111", 1, "0b10000000_00000000"), ("0b11111111_11111111", 1, "0b1_00000000_00000000"), ]; for (original, amount, expected) in tests { assert_eq!(increment(original, amount).unwrap(), expected); } } #[test] fn test_leading_and_trailing_separators_arent_a_match() { assert_eq!(increment("9_", 1), None); assert_eq!(increment("_9", 1), None); assert_eq!(increment("_9_", 1), None); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/increment/mod.rs000066400000000000000000000004141500614314100255430ustar00rootroot00000000000000mod date_time; mod integer; pub fn integer(selected_text: &str, amount: i64) -> Option { integer::increment(selected_text, amount) } pub fn date_time(selected_text: &str, amount: i64) -> Option { date_time::increment(selected_text, amount) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/indent.rs000066400000000000000000001352211500614314100242660ustar00rootroot00000000000000use std::{borrow::Cow, collections::HashMap, iter}; use helix_stdx::rope::RopeSliceExt; use tree_sitter::{Query, QueryCursor, QueryPredicateArg}; use crate::{ chars::{char_is_line_ending, char_is_whitespace}, graphemes::{grapheme_width, tab_width_at}, syntax::{IndentationHeuristic, LanguageConfiguration, RopeProvider, Syntax}, tree_sitter::Node, Position, Rope, RopeSlice, Tendril, }; /// Enum representing indentation style. /// /// Only values 1-8 are valid for the `Spaces` variant. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum IndentStyle { Tabs, Spaces(u8), } // 16 spaces const INDENTS: &str = " "; pub const MAX_INDENT: u8 = 16; impl IndentStyle { /// Creates an `IndentStyle` from an indentation string. /// /// For example, passing `" "` (four spaces) will create `IndentStyle::Spaces(4)`. #[allow(clippy::should_implement_trait)] #[inline] pub fn from_str(indent: &str) -> Self { // XXX: do we care about validating the input more than this? Probably not...? debug_assert!(!indent.is_empty() && indent.len() <= MAX_INDENT as usize); if indent.starts_with(' ') { IndentStyle::Spaces(indent.len().clamp(1, MAX_INDENT as usize) as u8) } else { IndentStyle::Tabs } } #[inline] pub fn as_str(&self) -> &'static str { match *self { IndentStyle::Tabs => "\t", IndentStyle::Spaces(n) => { // Unsupported indentation style. This should never happen, debug_assert!(n > 0 && n <= MAX_INDENT); // Either way, clamp to the nearest supported value let closest_n = n.clamp(1, MAX_INDENT) as usize; &INDENTS[0..closest_n] } } } #[inline] pub fn indent_width(&self, tab_width: usize) -> usize { match *self { IndentStyle::Tabs => tab_width, IndentStyle::Spaces(width) => width as usize, } } } /// Attempts to detect the indentation style used in a document. /// /// Returns the indentation style if the auto-detect confidence is /// reasonably high, otherwise returns `None`. pub fn auto_detect_indent_style(document_text: &Rope) -> Option { // Build a histogram of the indentation *increases* between // subsequent lines, ignoring lines that are all whitespace. // // Index 0 is for tabs, the rest are 1-MAX_INDENT spaces. let histogram: [usize; MAX_INDENT as usize + 1] = { let mut histogram = [0; MAX_INDENT as usize + 1]; let mut prev_line_is_tabs = false; let mut prev_line_leading_count = 0usize; // Loop through the lines, checking for and recording indentation // increases as we go. 'outer: for line in document_text.lines().take(1000) { let mut c_iter = line.chars(); // Is first character a tab or space? let is_tabs = match c_iter.next() { Some('\t') => true, Some(' ') => false, // Ignore blank lines. Some(c) if char_is_line_ending(c) => continue, _ => { prev_line_is_tabs = false; prev_line_leading_count = 0; continue; } }; // Count the line's total leading tab/space characters. let mut leading_count = 1; let mut count_is_done = false; for c in c_iter { match c { '\t' if is_tabs && !count_is_done => leading_count += 1, ' ' if !is_tabs && !count_is_done => leading_count += 1, // We stop counting if we hit whitespace that doesn't // qualify as indent or doesn't match the leading // whitespace, but we don't exit the loop yet because // we still want to determine if the line is blank. c if char_is_whitespace(c) => count_is_done = true, // Ignore blank lines. c if char_is_line_ending(c) => continue 'outer, _ => break, } // Bound the worst-case execution time for weird text files. if leading_count > 256 { continue 'outer; } } // If there was an increase in indentation over the previous // line, update the histogram with that increase. if (prev_line_is_tabs == is_tabs || prev_line_leading_count == 0) && prev_line_leading_count < leading_count { if is_tabs { histogram[0] += 1; } else { let amount = leading_count - prev_line_leading_count; if amount <= MAX_INDENT as usize { histogram[amount] += 1; } } } // Store this line's leading whitespace info for use with // the next line. prev_line_is_tabs = is_tabs; prev_line_leading_count = leading_count; } // Give more weight to tabs, because their presence is a very // strong indicator. histogram[0] *= 2; histogram }; // Find the most frequent indent, its frequency, and the frequency of // the next-most frequent indent. let indent = histogram .iter() .enumerate() .max_by_key(|kv| kv.1) .unwrap() .0; let indent_freq = histogram[indent]; let indent_freq_2 = *histogram .iter() .enumerate() .filter(|kv| kv.0 != indent) .map(|kv| kv.1) .max() .unwrap(); // Return the the auto-detected result if we're confident enough in its // accuracy, based on some heuristics. if indent_freq >= 1 && (indent_freq_2 as f64 / indent_freq as f64) < 0.66 { Some(match indent { 0 => IndentStyle::Tabs, _ => IndentStyle::Spaces(indent as u8), }) } else { None } } /// To determine indentation of a newly inserted line, figure out the indentation at the last col /// of the previous line. pub fn indent_level_for_line(line: RopeSlice, tab_width: usize, indent_width: usize) -> usize { let mut len = 0; for ch in line.chars() { match ch { '\t' => len += tab_width_at(len, tab_width as u16), ' ' => len += 1, _ => break, } } len / indent_width } /// Create a string of tabs & spaces that has the same visual width as the given RopeSlice (independent of the tab width). fn whitespace_with_same_width(text: RopeSlice) -> String { let mut s = String::new(); for grapheme in text.graphemes() { if grapheme == "\t" { s.push('\t'); } else { s.extend(std::iter::repeat(' ').take(grapheme_width(&Cow::from(grapheme)))); } } s } /// normalizes indentation to tabs/spaces based on user configuration /// This function does not change the actual indentation width, just the character /// composition. pub fn normalize_indentation( prefix: RopeSlice<'_>, line: RopeSlice<'_>, dst: &mut Tendril, indent_style: IndentStyle, tab_width: usize, ) -> usize { #[allow(deprecated)] let off = crate::visual_coords_at_pos(prefix, prefix.len_chars(), tab_width).col; let mut len = 0; let mut original_len = 0; for ch in line.chars() { match ch { '\t' => len += tab_width_at(len + off, tab_width as u16), ' ' => len += 1, _ => break, } original_len += 1; } if indent_style == IndentStyle::Tabs { dst.extend(iter::repeat('\t').take(len / tab_width)); len %= tab_width; } dst.extend(iter::repeat(' ').take(len)); original_len } fn add_indent_level( mut base_indent: String, added_indent_level: isize, indent_style: &IndentStyle, tab_width: usize, ) -> String { if added_indent_level >= 0 { // Adding a non-negative indent is easy, we can simply append the indent string base_indent.push_str(&indent_style.as_str().repeat(added_indent_level as usize)); base_indent } else { // In this case, we want to return a prefix of `base_indent`. // Since the width of a tab depends on its offset, we cannot simply iterate over // the chars of `base_indent` in reverse until we have the desired indent reduction, // instead we iterate over them twice in forward direction. let base_indent_rope = RopeSlice::from(base_indent.as_str()); #[allow(deprecated)] let base_indent_width = crate::visual_coords_at_pos(base_indent_rope, base_indent_rope.len_chars(), tab_width) .col; let target_indent_width = base_indent_width .saturating_sub((-added_indent_level) as usize * indent_style.indent_width(tab_width)); #[allow(deprecated)] let char_end_idx = crate::pos_at_visual_coords( base_indent_rope, Position { row: 0, col: target_indent_width, }, tab_width, ); let byte_end_idx = base_indent_rope.char_to_byte(char_end_idx); base_indent.truncate(byte_end_idx); base_indent } } /// Return true if only whitespace comes before the node on its line. /// If given, new_line_byte_pos is treated the same way as any existing newline. fn is_first_in_line(node: Node, text: RopeSlice, new_line_byte_pos: Option) -> bool { let mut line_start_byte_pos = text.line_to_byte(node.start_position().row); if let Some(pos) = new_line_byte_pos { if line_start_byte_pos < pos && pos <= node.start_byte() { line_start_byte_pos = pos; } } text.byte_slice(line_start_byte_pos..node.start_byte()) .chars() .all(|c| c.is_whitespace()) } /// The total indent for some line of code. /// This is usually constructed in one of 2 ways: /// - Successively add indent captures to get the (added) indent from a single line /// - Successively add the indent results for each line /// The string that this indentation defines starts with the string contained in the align field (unless it is None), followed by: /// - max(0, indent - outdent) tabs, if tabs are used for indentation /// - max(0, indent - outdent)*indent_width spaces, if spaces are used for indentation #[derive(Default, Debug, PartialEq, Eq, Clone)] pub struct Indentation<'a> { indent: usize, indent_always: usize, outdent: usize, outdent_always: usize, /// The alignment, as a string containing only tabs & spaces. Storing this as a string instead of e.g. /// the (visual) width ensures that the alignment is preserved even if the tab width changes. align: Option>, } impl<'a> Indentation<'a> { /// Add some other [Indentation] to this. /// The added indent should be the total added indent from one line. /// Indent should always be added starting from the bottom (or equivalently, the innermost tree-sitter node). fn add_line(&mut self, added: Indentation<'a>) { // Align overrides the indent from outer scopes. if self.align.is_some() { return; } if added.align.is_some() { self.align = added.align; return; } self.indent += added.indent; self.indent_always += added.indent_always; self.outdent += added.outdent; self.outdent_always += added.outdent_always; } /// Add an indent capture to this indent. /// Only captures that apply to the same line should be added together in this way (otherwise use `add_line`) /// and the captures should be added starting from the innermost tree-sitter node (currently this only matters /// if multiple `@align` patterns occur on the same line). fn add_capture(&mut self, added: IndentCaptureType<'a>) { match added { IndentCaptureType::Indent => { if self.indent_always == 0 { self.indent = 1; } } IndentCaptureType::IndentAlways => { // any time we encounter an `indent.always` on the same line, we // want to cancel out all regular indents self.indent_always += 1; self.indent = 0; } IndentCaptureType::Outdent => { if self.outdent_always == 0 { self.outdent = 1; } } IndentCaptureType::OutdentAlways => { self.outdent_always += 1; self.outdent = 0; } IndentCaptureType::Align(align) => { if self.align.is_none() { self.align = Some(align); } } } } fn net_indent(&self) -> isize { (self.indent + self.indent_always) as isize - ((self.outdent + self.outdent_always) as isize) } /// Convert `self` into a string, taking into account the computed and actual indentation of some other line. fn relative_indent( &self, other_computed_indent: &Self, other_leading_whitespace: RopeSlice, indent_style: &IndentStyle, tab_width: usize, ) -> Option { if self.align == other_computed_indent.align { // If self and baseline are either not aligned to anything or both aligned the same way, // we can simply take `other_leading_whitespace` and add some indent / outdent to it (in the second // case, the alignment should already be accounted for in `other_leading_whitespace`). let indent_diff = self.net_indent() - other_computed_indent.net_indent(); Some(add_indent_level( String::from(other_leading_whitespace), indent_diff, indent_style, tab_width, )) } else { // If the alignment of both lines is different, we cannot compare their indentation in any meaningful way None } } pub fn to_string(&self, indent_style: &IndentStyle, tab_width: usize) -> String { add_indent_level( self.align .map_or_else(String::new, whitespace_with_same_width), self.net_indent(), indent_style, tab_width, ) } } /// An indent definition which corresponds to a capture from the indent query #[derive(Debug)] struct IndentCapture<'a> { capture_type: IndentCaptureType<'a>, scope: IndentScope, } #[derive(Debug, Clone, PartialEq)] enum IndentCaptureType<'a> { Indent, IndentAlways, Outdent, OutdentAlways, /// Alignment given as a string of whitespace Align(RopeSlice<'a>), } impl IndentCaptureType<'_> { fn default_scope(&self) -> IndentScope { match self { IndentCaptureType::Indent | IndentCaptureType::IndentAlways => IndentScope::Tail, IndentCaptureType::Outdent | IndentCaptureType::OutdentAlways => IndentScope::All, IndentCaptureType::Align(_) => IndentScope::All, } } } /// This defines which part of a node an [IndentCapture] applies to. /// Each [IndentCaptureType] has a default scope, but the scope can be changed /// with `#set!` property declarations. #[derive(Debug, Clone, Copy)] enum IndentScope { /// The indent applies to the whole node All, /// The indent applies to everything except for the first line of the node Tail, } /// A capture from the indent query which does not define an indent but extends /// the range of a node. This is used before the indent is calculated. #[derive(Debug)] enum ExtendCapture { Extend, PreventOnce, } /// The result of running a tree-sitter indent query. This stores for /// each node (identified by its ID) the relevant captures (already filtered /// by predicates). #[derive(Debug)] struct IndentQueryResult<'a> { indent_captures: HashMap>>, extend_captures: HashMap>, } fn get_node_start_line(node: Node, new_line_byte_pos: Option) -> usize { let mut node_line = node.start_position().row; // Adjust for the new line that will be inserted if new_line_byte_pos.is_some_and(|pos| node.start_byte() >= pos) { node_line += 1; } node_line } fn get_node_end_line(node: Node, new_line_byte_pos: Option) -> usize { let mut node_line = node.end_position().row; // Adjust for the new line that will be inserted (with a strict inequality since end_byte is exclusive) if new_line_byte_pos.is_some_and(|pos| node.end_byte() > pos) { node_line += 1; } node_line } fn query_indents<'a>( query: &Query, syntax: &Syntax, cursor: &mut QueryCursor, text: RopeSlice<'a>, range: std::ops::Range, new_line_byte_pos: Option, ) -> IndentQueryResult<'a> { let mut indent_captures: HashMap> = HashMap::new(); let mut extend_captures: HashMap> = HashMap::new(); cursor.set_byte_range(range); // Iterate over all captures from the query for m in cursor.matches(query, syntax.tree().root_node(), RopeProvider(text)) { // Skip matches where not all custom predicates are fulfilled if !query.general_predicates(m.pattern_index).iter().all(|pred| { match pred.operator.as_ref() { "not-kind-eq?" => match (pred.args.first(), pred.args.get(1)) { ( Some(QueryPredicateArg::Capture(capture_idx)), Some(QueryPredicateArg::String(kind)), ) => { let node = m.nodes_for_capture_index(*capture_idx).next(); match node { Some(node) => node.kind()!=kind.as_ref(), _ => true, } } _ => { panic!("Invalid indent query: Arguments to \"not-kind-eq?\" must be a capture and a string"); } }, "same-line?" | "not-same-line?" => { match (pred.args.first(), pred.args.get(1)) { ( Some(QueryPredicateArg::Capture(capt1)), Some(QueryPredicateArg::Capture(capt2)) ) => { let n1 = m.nodes_for_capture_index(*capt1).next(); let n2 = m.nodes_for_capture_index(*capt2).next(); match (n1, n2) { (Some(n1), Some(n2)) => { let n1_line = get_node_start_line(n1, new_line_byte_pos); let n2_line = get_node_start_line(n2, new_line_byte_pos); let same_line = n1_line == n2_line; same_line==(pred.operator.as_ref()=="same-line?") } _ => true, } } _ => { panic!("Invalid indent query: Arguments to \"{}\" must be 2 captures", pred.operator); } } } "one-line?" | "not-one-line?" => match pred.args.first() { Some(QueryPredicateArg::Capture(capture_idx)) => { let node = m.nodes_for_capture_index(*capture_idx).next(); match node { Some(node) => { let (start_line, end_line) = (get_node_start_line(node,new_line_byte_pos), get_node_end_line(node, new_line_byte_pos)); let one_line = end_line == start_line; one_line != (pred.operator.as_ref() == "not-one-line?") }, _ => true, } } _ => { panic!("Invalid indent query: Arguments to \"not-kind-eq?\" must be a capture and a string"); } }, _ => { panic!( "Invalid indent query: Unknown predicate (\"{}\")", pred.operator ); } } }) { continue; } // A list of pairs (node_id, indent_capture) that are added by this match. // They cannot be added to indent_captures immediately since they may depend on other captures (such as an @anchor). let mut added_indent_captures: Vec<(usize, IndentCapture)> = Vec::new(); // The row/column position of the optional anchor in this query let mut anchor: Option = None; for capture in m.captures { let capture_name = query.capture_names()[capture.index as usize]; let capture_type = match capture_name { "indent" => IndentCaptureType::Indent, "indent.always" => IndentCaptureType::IndentAlways, "outdent" => IndentCaptureType::Outdent, "outdent.always" => IndentCaptureType::OutdentAlways, // The alignment will be updated to the correct value at the end, when the anchor is known. "align" => IndentCaptureType::Align(RopeSlice::from("")), "anchor" => { if anchor.is_some() { log::error!("Invalid indent query: Encountered more than one @anchor in the same match.") } else { anchor = Some(capture.node); } continue; } "extend" => { extend_captures .entry(capture.node.id()) .or_insert_with(|| Vec::with_capacity(1)) .push(ExtendCapture::Extend); continue; } "extend.prevent-once" => { extend_captures .entry(capture.node.id()) .or_insert_with(|| Vec::with_capacity(1)) .push(ExtendCapture::PreventOnce); continue; } _ => { // Ignore any unknown captures (these may be needed for predicates such as #match?) continue; } }; let scope = capture_type.default_scope(); let mut indent_capture = IndentCapture { capture_type, scope, }; // Apply additional settings for this capture for property in query.property_settings(m.pattern_index) { match property.key.as_ref() { "scope" => { indent_capture.scope = match property.value.as_deref() { Some("all") => IndentScope::All, Some("tail") => IndentScope::Tail, Some(s) => { panic!("Invalid indent query: Unknown value for \"scope\" property (\"{}\")", s); } None => { panic!( "Invalid indent query: Missing value for \"scope\" property" ); } } } _ => { panic!( "Invalid indent query: Unknown property \"{}\"", property.key ); } } } added_indent_captures.push((capture.node.id(), indent_capture)) } for (node_id, mut capture) in added_indent_captures { // Set the anchor for all align queries. if let IndentCaptureType::Align(_) = capture.capture_type { let anchor = match anchor { None => { log::error!( "Invalid indent query: @align requires an accompanying @anchor." ); continue; } Some(anchor) => anchor, }; capture.capture_type = IndentCaptureType::Align( text.line(anchor.start_position().row) .byte_slice(0..anchor.start_position().column), ); } indent_captures .entry(node_id) .or_insert_with(|| Vec::with_capacity(1)) .push(capture); } } let result = IndentQueryResult { indent_captures, extend_captures, }; log::trace!("indent result = {:?}", result); result } /// Handle extend queries. deepest_preceding is the deepest descendant of node that directly precedes the cursor position. /// Any ancestor of deepest_preceding which is also a descendant of node may be "extended". In that case, node will be updated, /// so that the indent computation starts with the correct syntax node. fn extend_nodes<'a>( node: &mut Node<'a>, mut deepest_preceding: Node<'a>, extend_captures: &HashMap>, text: RopeSlice, line: usize, tab_width: usize, indent_width: usize, ) { let mut stop_extend = false; while deepest_preceding != *node { let mut extend_node = false; // This will be set to true if this node is captured, regardless of whether // it actually will be extended (e.g. because the cursor isn't indented // more than the node). let mut node_captured = false; if let Some(captures) = extend_captures.get(&deepest_preceding.id()) { for capture in captures { match capture { ExtendCapture::PreventOnce => { stop_extend = true; } ExtendCapture::Extend => { node_captured = true; // We extend the node if // - the cursor is on the same line as the end of the node OR // - the line that the cursor is on is more indented than the // first line of the node if deepest_preceding.end_position().row == line { extend_node = true; } else { let cursor_indent = indent_level_for_line(text.line(line), tab_width, indent_width); let node_indent = indent_level_for_line( text.line(deepest_preceding.start_position().row), tab_width, indent_width, ); if cursor_indent > node_indent { extend_node = true; } } } } } } // If we encountered some `StopExtend` capture before, we don't // extend the node even if we otherwise would if node_captured && stop_extend { stop_extend = false; } else if extend_node && !stop_extend { *node = deepest_preceding; break; } // If the tree contains a syntax error, `deepest_preceding` may not // have a parent despite being a descendant of `node`. deepest_preceding = match deepest_preceding.parent() { Some(parent) => parent, None => return, } } } /// Prepare an indent query by computing: /// - The node from which to start the query (this is non-trivial due to `@extend` captures) /// - The indent captures for all relevant nodes. #[allow(clippy::too_many_arguments)] fn init_indent_query<'a, 'b>( query: &Query, syntax: &'a Syntax, text: RopeSlice<'b>, tab_width: usize, indent_width: usize, line: usize, byte_pos: usize, new_line_byte_pos: Option, ) -> Option<(Node<'a>, HashMap>>)> { // The innermost tree-sitter node which is considered for the indent // computation. It may change if some predeceding node is extended let mut node = syntax .tree() .root_node() .descendant_for_byte_range(byte_pos, byte_pos)?; let (query_result, deepest_preceding) = { // The query range should intersect with all nodes directly preceding // the position of the indent query in case one of them is extended. let mut deepest_preceding = None; // The deepest node preceding the indent query position let mut tree_cursor = node.walk(); for child in node.children(&mut tree_cursor) { if child.byte_range().end <= byte_pos { deepest_preceding = Some(child); } } deepest_preceding = deepest_preceding.map(|mut prec| { // Get the deepest directly preceding node while prec.child_count() > 0 { prec = prec.child(prec.child_count() - 1).unwrap(); } prec }); let query_range = deepest_preceding .map(|prec| prec.byte_range().end - 1..byte_pos + 1) .unwrap_or(byte_pos..byte_pos + 1); crate::syntax::PARSER.with(|ts_parser| { let mut ts_parser = ts_parser.borrow_mut(); let mut cursor = ts_parser.cursors.pop().unwrap_or_default(); let query_result = query_indents( query, syntax, &mut cursor, text, query_range, new_line_byte_pos, ); ts_parser.cursors.push(cursor); (query_result, deepest_preceding) }) }; let extend_captures = query_result.extend_captures; // Check for extend captures, potentially changing the node that the indent calculation starts with if let Some(deepest_preceding) = deepest_preceding { extend_nodes( &mut node, deepest_preceding, &extend_captures, text, line, tab_width, indent_width, ); } Some((node, query_result.indent_captures)) } /// Use the syntax tree to determine the indentation for a given position. /// This can be used in 2 ways: /// /// - To get the correct indentation for an existing line (new_line=false), not necessarily equal to the current indentation. /// - In this case, pos should be inside the first tree-sitter node on that line. /// In most cases, this can just be the first non-whitespace on that line. /// - To get the indentation for a new line (new_line=true). This behaves like the first usecase if the part of the current line /// after pos were moved to a new line. /// /// The indentation is determined by traversing all the tree-sitter nodes containing the position. /// Each of these nodes produces some [Indentation] for: /// /// - The line of the (beginning of the) node. This is defined by the scope `all` if this is the first node on its line. /// - The line after the node. This is defined by: /// - The scope `tail`. /// - The scope `all` if this node is not the first node on its line. /// /// Intuitively, `all` applies to everything contained in this node while `tail` applies to everything except for the first line of the node. /// The indents from different nodes for the same line are then combined. /// The result [Indentation] is simply the sum of the [Indentation] for all lines. /// /// Specifying which line exactly an [Indentation] applies to is important because indents on the same line combine differently than indents on different lines: /// ```ignore /// some_function(|| { /// // Both the function parameters as well as the contained block should be indented. /// // Because they are on the same line, this only yields one indent level /// }); /// ``` /// /// ```ignore /// some_function( /// param1, /// || { /// // Here we get 2 indent levels because the 'parameters' and the 'block' node begin on different lines /// }, /// ); /// ``` #[allow(clippy::too_many_arguments)] pub fn treesitter_indent_for_pos<'a>( query: &Query, syntax: &Syntax, tab_width: usize, indent_width: usize, text: RopeSlice<'a>, line: usize, pos: usize, new_line: bool, ) -> Option> { let byte_pos = text.char_to_byte(pos); let new_line_byte_pos = new_line.then_some(byte_pos); let (mut node, mut indent_captures) = init_indent_query( query, syntax, text, tab_width, indent_width, line, byte_pos, new_line_byte_pos, )?; let mut result = Indentation::default(); // We always keep track of all the indent changes on one line, in order to only indent once // even if there are multiple "indent" nodes on the same line let mut indent_for_line = Indentation::default(); let mut indent_for_line_below = Indentation::default(); loop { let is_first = is_first_in_line(node, text, new_line_byte_pos); // Apply all indent definitions for this node. // Since we only iterate over each node once, we can remove the // corresponding captures from the HashMap to avoid cloning them. if let Some(definitions) = indent_captures.remove(&node.id()) { for definition in definitions { match definition.scope { IndentScope::All => { if is_first { indent_for_line.add_capture(definition.capture_type); } else { indent_for_line_below.add_capture(definition.capture_type); } } IndentScope::Tail => { indent_for_line_below.add_capture(definition.capture_type); } } } } if let Some(parent) = node.parent() { let node_line = get_node_start_line(node, new_line_byte_pos); let parent_line = get_node_start_line(parent, new_line_byte_pos); if node_line != parent_line { // Don't add indent for the line below the line of the query if node_line < line + (new_line as usize) { result.add_line(indent_for_line_below); } if node_line == parent_line + 1 { indent_for_line_below = indent_for_line; } else { result.add_line(indent_for_line); indent_for_line_below = Indentation::default(); } indent_for_line = Indentation::default(); } node = parent; } else { // Only add the indentation for the line below if that line // is not after the line that the indentation is calculated for. if (node.start_position().row < line) || (new_line && node.start_position().row == line && node.start_byte() < byte_pos) { result.add_line(indent_for_line_below); } result.add_line(indent_for_line); break; } } Some(result) } /// Returns the indentation for a new line. /// This is done either using treesitter, or if that's not available by copying the indentation from the current line #[allow(clippy::too_many_arguments)] pub fn indent_for_newline( language_config: Option<&LanguageConfiguration>, syntax: Option<&Syntax>, indent_heuristic: &IndentationHeuristic, indent_style: &IndentStyle, tab_width: usize, text: RopeSlice, line_before: usize, line_before_end_pos: usize, current_line: usize, ) -> String { let indent_width = indent_style.indent_width(tab_width); if let ( IndentationHeuristic::TreeSitter | IndentationHeuristic::Hybrid, Some(query), Some(syntax), ) = ( indent_heuristic, language_config.and_then(|config| config.indent_query()), syntax, ) { if let Some(indent) = treesitter_indent_for_pos( query, syntax, tab_width, indent_width, text, line_before, line_before_end_pos, true, ) { if *indent_heuristic == IndentationHeuristic::Hybrid { // We want to compute the indentation not only based on the // syntax tree but also on the actual indentation of a previous // line. This makes indentation computation more resilient to // incomplete queries, incomplete source code & differing indentation // styles for the same language. // However, using the indent of a previous line as a baseline may not // make sense, e.g. if it has a different alignment than the new line. // In order to prevent edge cases with long running times, we only try // a constant number of (non-empty) lines. const MAX_ATTEMPTS: usize = 4; let mut num_attempts = 0; for line_idx in (0..=line_before).rev() { let line = text.line(line_idx); let first_non_whitespace_char = match line.first_non_whitespace_char() { Some(i) => i, None => { continue; } }; if let Some(indent) = (|| { let computed_indent = treesitter_indent_for_pos( query, syntax, tab_width, indent_width, text, line_idx, text.line_to_char(line_idx) + first_non_whitespace_char, false, )?; let leading_whitespace = line.slice(0..first_non_whitespace_char); indent.relative_indent( &computed_indent, leading_whitespace, indent_style, tab_width, ) })() { return indent; } num_attempts += 1; if num_attempts == MAX_ATTEMPTS { break; } } } return indent.to_string(indent_style, tab_width); }; } // Fallback in case we either don't have indent queries or they failed for some reason let indent_level = indent_level_for_line(text.line(current_line), tab_width, indent_width); indent_style.as_str().repeat(indent_level) } pub fn get_scopes(syntax: Option<&Syntax>, text: RopeSlice, pos: usize) -> Vec<&'static str> { let mut scopes = Vec::new(); if let Some(syntax) = syntax { let pos = text.char_to_byte(pos); let mut node = match syntax .tree() .root_node() .descendant_for_byte_range(pos, pos) { Some(node) => node, None => return scopes, }; scopes.push(node.kind()); while let Some(parent) = node.parent() { scopes.push(parent.kind()); node = parent; } } scopes.reverse(); scopes } #[cfg(test)] mod test { use super::*; use crate::Rope; #[test] fn test_indent_level() { let tab_width = 4; let indent_width = 4; let line = Rope::from(" fn new"); // 8 spaces assert_eq!( indent_level_for_line(line.slice(..), tab_width, indent_width), 2 ); let line = Rope::from("\t\t\tfn new"); // 3 tabs assert_eq!( indent_level_for_line(line.slice(..), tab_width, indent_width), 3 ); // mixed indentation let line = Rope::from("\t \tfn new"); // 1 tab, 4 spaces, tab assert_eq!( indent_level_for_line(line.slice(..), tab_width, indent_width), 3 ); } #[test] fn test_large_indent_level() { let tab_width = 16; let indent_width = 16; let line = Rope::from(" fn new"); // 16 spaces assert_eq!( indent_level_for_line(line.slice(..), tab_width, indent_width), 1 ); let line = Rope::from(" fn new"); // 32 spaces assert_eq!( indent_level_for_line(line.slice(..), tab_width, indent_width), 2 ); } #[test] fn add_capture() { let indent = || Indentation { indent: 1, ..Default::default() }; let indent_always = || Indentation { indent_always: 1, ..Default::default() }; let outdent = || Indentation { outdent: 1, ..Default::default() }; let outdent_always = || Indentation { outdent_always: 1, ..Default::default() }; fn add_capture<'a>( mut indent: Indentation<'a>, capture: IndentCaptureType<'a>, ) -> Indentation<'a> { indent.add_capture(capture); indent } // adding an indent to no indent makes an indent assert_eq!( indent(), add_capture(Indentation::default(), IndentCaptureType::Indent) ); assert_eq!( indent_always(), add_capture(Indentation::default(), IndentCaptureType::IndentAlways) ); assert_eq!( outdent(), add_capture(Indentation::default(), IndentCaptureType::Outdent) ); assert_eq!( outdent_always(), add_capture(Indentation::default(), IndentCaptureType::OutdentAlways) ); // adding an indent to an already indented has no effect assert_eq!(indent(), add_capture(indent(), IndentCaptureType::Indent)); assert_eq!( outdent(), add_capture(outdent(), IndentCaptureType::Outdent) ); // adding an always to a regular makes it always assert_eq!( indent_always(), add_capture(indent(), IndentCaptureType::IndentAlways) ); assert_eq!( outdent_always(), add_capture(outdent(), IndentCaptureType::OutdentAlways) ); // adding an always to an always is additive assert_eq!( Indentation { indent_always: 2, ..Default::default() }, add_capture(indent_always(), IndentCaptureType::IndentAlways) ); assert_eq!( Indentation { outdent_always: 2, ..Default::default() }, add_capture(outdent_always(), IndentCaptureType::OutdentAlways) ); // adding regular to always should be associative assert_eq!( Indentation { indent_always: 1, ..Default::default() }, add_capture( add_capture(indent(), IndentCaptureType::Indent), IndentCaptureType::IndentAlways ) ); assert_eq!( Indentation { indent_always: 1, ..Default::default() }, add_capture( add_capture(indent(), IndentCaptureType::IndentAlways), IndentCaptureType::Indent ) ); assert_eq!( Indentation { outdent_always: 1, ..Default::default() }, add_capture( add_capture(outdent(), IndentCaptureType::Outdent), IndentCaptureType::OutdentAlways ) ); assert_eq!( Indentation { outdent_always: 1, ..Default::default() }, add_capture( add_capture(outdent(), IndentCaptureType::OutdentAlways), IndentCaptureType::Outdent ) ); } #[test] fn test_relative_indent() { let indent_style = IndentStyle::Spaces(4); let tab_width: usize = 4; let no_align = [ Indentation::default(), Indentation { indent: 1, ..Default::default() }, Indentation { indent: 5, outdent: 1, ..Default::default() }, ]; let align = no_align.clone().map(|indent| Indentation { align: Some(RopeSlice::from("12345")), ..indent }); let different_align = Indentation { align: Some(RopeSlice::from("123456")), ..Default::default() }; // Check that relative and absolute indentation computation are the same when the line we compare to is // indented as we expect. let check_consistency = |indent: &Indentation, other: &Indentation| { assert_eq!( indent.relative_indent( other, RopeSlice::from(other.to_string(&indent_style, tab_width).as_str()), &indent_style, tab_width ), Some(indent.to_string(&indent_style, tab_width)) ); }; for a in &no_align { for b in &no_align { check_consistency(a, b); } } for a in &align { for b in &align { check_consistency(a, b); } } // Relative indent computation makes no sense if the alignment differs assert_eq!( align[0].relative_indent( &no_align[0], RopeSlice::from(" "), &indent_style, tab_width ), None ); assert_eq!( align[0].relative_indent( &different_align, RopeSlice::from(" "), &indent_style, tab_width ), None ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/lib.rs000066400000000000000000000033151500614314100235510ustar00rootroot00000000000000pub use encoding_rs as encoding; pub mod auto_pairs; pub mod case_conversion; pub mod chars; pub mod command_line; pub mod comment; pub mod completion; pub mod config; pub mod diagnostic; pub mod diff; pub mod doc_formatter; pub mod editor_config; pub mod fuzzy; pub mod graphemes; pub mod history; pub mod increment; pub mod indent; pub mod line_ending; pub mod macros; pub mod match_brackets; pub mod movement; pub mod object; mod position; pub mod search; pub mod selection; pub mod snippets; pub mod surround; pub mod syntax; pub mod test; pub mod text_annotations; pub mod textobject; mod transaction; pub mod uri; pub mod wrap; pub mod unicode { pub use unicode_general_category as category; pub use unicode_segmentation as segmentation; pub use unicode_width as width; } pub use helix_loader::find_workspace; mod rope_reader; pub use rope_reader::RopeReader; pub use ropey::{self, str_utils, Rope, RopeBuilder, RopeSlice}; // pub use tendril::StrTendril as Tendril; pub use smartstring::SmartString; pub type Tendril = SmartString; #[doc(inline)] pub use {regex, tree_sitter}; pub use position::{ char_idx_at_visual_offset, coords_at_pos, pos_at_coords, softwrapped_dimensions, visual_offset_from_anchor, visual_offset_from_block, Position, VisualOffsetError, }; #[allow(deprecated)] pub use position::{pos_at_visual_coords, visual_coords_at_pos}; pub use selection::{Range, Selection}; pub use smallvec::{smallvec, SmallVec}; pub use syntax::Syntax; pub use completion::CompletionItem; pub use diagnostic::Diagnostic; pub use line_ending::{LineEnding, NATIVE_LINE_ENDING}; pub use transaction::{Assoc, Change, ChangeSet, Deletion, Operation, Transaction}; pub use uri::Uri; helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/line_ending.rs000066400000000000000000000262471500614314100252670ustar00rootroot00000000000000use crate::{Rope, RopeSlice}; #[cfg(target_os = "windows")] pub const NATIVE_LINE_ENDING: LineEnding = LineEnding::Crlf; #[cfg(not(target_os = "windows"))] pub const NATIVE_LINE_ENDING: LineEnding = LineEnding::LF; /// Represents one of the valid Unicode line endings. #[derive(PartialEq, Eq, Copy, Clone, Debug)] pub enum LineEnding { Crlf, // CarriageReturn followed by LineFeed LF, // U+000A -- LineFeed #[cfg(feature = "unicode-lines")] VT, // U+000B -- VerticalTab #[cfg(feature = "unicode-lines")] FF, // U+000C -- FormFeed #[cfg(feature = "unicode-lines")] CR, // U+000D -- CarriageReturn #[cfg(feature = "unicode-lines")] Nel, // U+0085 -- NextLine #[cfg(feature = "unicode-lines")] LS, // U+2028 -- Line Separator #[cfg(feature = "unicode-lines")] PS, // U+2029 -- ParagraphSeparator } impl LineEnding { #[inline] pub const fn len_chars(&self) -> usize { match self { Self::Crlf => 2, _ => 1, } } #[inline] pub const fn as_str(&self) -> &'static str { match self { Self::Crlf => "\u{000D}\u{000A}", Self::LF => "\u{000A}", #[cfg(feature = "unicode-lines")] Self::VT => "\u{000B}", #[cfg(feature = "unicode-lines")] Self::FF => "\u{000C}", #[cfg(feature = "unicode-lines")] Self::CR => "\u{000D}", #[cfg(feature = "unicode-lines")] Self::Nel => "\u{0085}", #[cfg(feature = "unicode-lines")] Self::LS => "\u{2028}", #[cfg(feature = "unicode-lines")] Self::PS => "\u{2029}", } } #[inline] pub const fn from_char(ch: char) -> Option { match ch { '\u{000A}' => Some(LineEnding::LF), #[cfg(feature = "unicode-lines")] '\u{000B}' => Some(LineEnding::VT), #[cfg(feature = "unicode-lines")] '\u{000C}' => Some(LineEnding::FF), #[cfg(feature = "unicode-lines")] '\u{000D}' => Some(LineEnding::CR), #[cfg(feature = "unicode-lines")] '\u{0085}' => Some(LineEnding::Nel), #[cfg(feature = "unicode-lines")] '\u{2028}' => Some(LineEnding::LS), #[cfg(feature = "unicode-lines")] '\u{2029}' => Some(LineEnding::PS), // Not a line ending _ => None, } } // Normally we'd want to implement the FromStr trait, but in this case // that would force us into a different return type than from_char or // or from_rope_slice, which would be weird. #[allow(clippy::should_implement_trait)] #[inline] pub fn from_str(g: &str) -> Option { match g { "\u{000D}\u{000A}" => Some(LineEnding::Crlf), "\u{000A}" => Some(LineEnding::LF), #[cfg(feature = "unicode-lines")] "\u{000B}" => Some(LineEnding::VT), #[cfg(feature = "unicode-lines")] "\u{000C}" => Some(LineEnding::FF), #[cfg(feature = "unicode-lines")] "\u{000D}" => Some(LineEnding::CR), #[cfg(feature = "unicode-lines")] "\u{0085}" => Some(LineEnding::Nel), #[cfg(feature = "unicode-lines")] "\u{2028}" => Some(LineEnding::LS), #[cfg(feature = "unicode-lines")] "\u{2029}" => Some(LineEnding::PS), // Not a line ending _ => None, } } #[inline] pub fn from_rope_slice(g: &RopeSlice) -> Option { if let Some(text) = g.as_str() { LineEnding::from_str(text) } else { // Non-contiguous, so it can't be a line ending. // Specifically, Ropey guarantees that CRLF is always // contiguous. And the remaining line endings are all // single `char`s, and therefore trivially contiguous. None } } } #[inline] pub fn str_is_line_ending(s: &str) -> bool { LineEnding::from_str(s).is_some() } #[inline] pub fn rope_is_line_ending(r: RopeSlice) -> bool { r.chunks().all(str_is_line_ending) } /// Attempts to detect what line ending the passed document uses. pub fn auto_detect_line_ending(doc: &Rope) -> Option { // Return first matched line ending. Not all possible line endings // are being matched, as they might be special-use only for line in doc.lines().take(100) { match get_line_ending(&line) { None => {} #[cfg(feature = "unicode-lines")] Some(LineEnding::VT) | Some(LineEnding::FF) | Some(LineEnding::PS) => {} ending => return ending, } } None } /// Returns the passed line's line ending, if any. pub fn get_line_ending(line: &RopeSlice) -> Option { // Last character as str. let g1 = line .slice(line.len_chars().saturating_sub(1)..) .as_str() .unwrap(); // Last two characters as str, or empty str if they're not contiguous. // It's fine to punt on the non-contiguous case, because Ropey guarantees // that CRLF is always contiguous. let g2 = line .slice(line.len_chars().saturating_sub(2)..) .as_str() .unwrap_or(""); // First check the two-character case for CRLF, then check the single-character case. LineEnding::from_str(g2).or_else(|| LineEnding::from_str(g1)) } #[cfg(not(feature = "unicode-lines"))] /// Returns the passed line's line ending, if any. pub fn get_line_ending_of_str(line: &str) -> Option { if line.ends_with("\u{000D}\u{000A}") { Some(LineEnding::Crlf) } else if line.ends_with('\u{000A}') { Some(LineEnding::LF) } else { None } } #[cfg(feature = "unicode-lines")] /// Returns the passed line's line ending, if any. pub fn get_line_ending_of_str(line: &str) -> Option { if line.ends_with("\u{000D}\u{000A}") { Some(LineEnding::Crlf) } else if line.ends_with('\u{000A}') { Some(LineEnding::LF) } else if line.ends_with('\u{000B}') { Some(LineEnding::VT) } else if line.ends_with('\u{000C}') { Some(LineEnding::FF) } else if line.ends_with('\u{000D}') { Some(LineEnding::CR) } else if line.ends_with('\u{0085}') { Some(LineEnding::Nel) } else if line.ends_with('\u{2028}') { Some(LineEnding::LS) } else if line.ends_with('\u{2029}') { Some(LineEnding::PS) } else { None } } /// Returns the char index of the end of the given line, not including its line ending. pub fn line_end_char_index(slice: &RopeSlice, line: usize) -> usize { slice.line_to_char(line + 1) - get_line_ending(&slice.line(line)) .map(|le| le.len_chars()) .unwrap_or(0) } pub fn line_end_byte_index(slice: &RopeSlice, line: usize) -> usize { slice.line_to_byte(line + 1) - get_line_ending(&slice.line(line)) .map(|le| le.as_str().len()) .unwrap_or(0) } /// Fetches line `line_idx` from the passed rope slice, sans any line ending. pub fn line_without_line_ending<'a>(slice: &'a RopeSlice, line_idx: usize) -> RopeSlice<'a> { let start = slice.line_to_char(line_idx); let end = line_end_char_index(slice, line_idx); slice.slice(start..end) } /// Returns the char index of the end of the given RopeSlice, not including /// any final line ending. pub fn rope_end_without_line_ending(slice: &RopeSlice) -> usize { slice.len_chars() - get_line_ending(slice).map(|le| le.len_chars()).unwrap_or(0) } #[cfg(test)] mod line_ending_tests { use super::*; #[test] fn line_ending_autodetect() { assert_eq!( auto_detect_line_ending(&Rope::from_str("\n")), Some(LineEnding::LF) ); assert_eq!( auto_detect_line_ending(&Rope::from_str("\r\n")), Some(LineEnding::Crlf) ); assert_eq!(auto_detect_line_ending(&Rope::from_str("hello")), None); assert_eq!(auto_detect_line_ending(&Rope::from_str("")), None); assert_eq!( auto_detect_line_ending(&Rope::from_str("hello\nhelix\r\n")), Some(LineEnding::LF) ); assert_eq!( auto_detect_line_ending(&Rope::from_str("a formfeed\u{000C}")), None ); assert_eq!( auto_detect_line_ending(&Rope::from_str("\n\u{000A}\n \u{000A}")), Some(LineEnding::LF) ); assert_eq!( auto_detect_line_ending(&Rope::from_str( "a formfeed\u{000C} with a\u{000C} linefeed\u{000A}" )), Some(LineEnding::LF) ); assert_eq!(auto_detect_line_ending(&Rope::from_str("a formfeed\u{000C} with a\u{000C} carriage return linefeed\u{000D}\u{000A} and a linefeed\u{000A}")), Some(LineEnding::Crlf)); } #[test] fn str_to_line_ending() { #[cfg(feature = "unicode-lines")] assert_eq!(LineEnding::from_str("\r"), Some(LineEnding::CR)); assert_eq!(LineEnding::from_str("\n"), Some(LineEnding::LF)); assert_eq!(LineEnding::from_str("\r\n"), Some(LineEnding::Crlf)); assert_eq!(LineEnding::from_str("hello\n"), None); } #[test] fn rope_slice_to_line_ending() { let r = Rope::from_str("hello\r\n"); #[cfg(feature = "unicode-lines")] assert_eq!( LineEnding::from_rope_slice(&r.slice(5..6)), Some(LineEnding::CR) ); assert_eq!( LineEnding::from_rope_slice(&r.slice(6..7)), Some(LineEnding::LF) ); assert_eq!( LineEnding::from_rope_slice(&r.slice(5..7)), Some(LineEnding::Crlf) ); assert_eq!(LineEnding::from_rope_slice(&r.slice(..)), None); } #[test] fn get_line_ending_rope_slice() { let r = Rope::from_str("Hello\rworld\nhow\r\nare you?"); #[cfg(feature = "unicode-lines")] assert_eq!(get_line_ending(&r.slice(..6)), Some(LineEnding::CR)); assert_eq!(get_line_ending(&r.slice(..12)), Some(LineEnding::LF)); assert_eq!(get_line_ending(&r.slice(..17)), Some(LineEnding::Crlf)); assert_eq!(get_line_ending(&r.slice(..)), None); } #[test] fn get_line_ending_str() { let text = "Hello\rworld\nhow\r\nare you?"; #[cfg(feature = "unicode-lines")] assert_eq!(get_line_ending_of_str(&text[..6]), Some(LineEnding::CR)); assert_eq!(get_line_ending_of_str(&text[..12]), Some(LineEnding::LF)); assert_eq!(get_line_ending_of_str(&text[..17]), Some(LineEnding::Crlf)); assert_eq!(get_line_ending_of_str(text), None); } #[test] fn line_end_char_index_rope_slice() { let r = Rope::from_str("Hello\rworld\nhow\r\nare you?"); let s = &r.slice(..); #[cfg(not(feature = "unicode-lines"))] { assert_eq!(line_end_char_index(s, 0), 11); assert_eq!(line_end_char_index(s, 1), 15); assert_eq!(line_end_char_index(s, 2), 25); } #[cfg(feature = "unicode-lines")] { assert_eq!(line_end_char_index(s, 0), 5); assert_eq!(line_end_char_index(s, 1), 11); assert_eq!(line_end_char_index(s, 2), 15); } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/macros.rs000066400000000000000000000010131500614314100242600ustar00rootroot00000000000000#[macro_export] macro_rules! hashmap { (@single $($x:tt)*) => (()); (@count $($rest:expr),*) => (<[()]>::len(&[$(hashmap!(@single $rest)),*])); ($($key:expr => $value:expr,)+) => { hashmap!($($key => $value),+) }; ($($key:expr => $value:expr),*) => { { let _cap = hashmap!(@count $($key),*); let mut _map = ::std::collections::HashMap::with_capacity(_cap); $( let _ = _map.insert($key, $value); )* _map } }; } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/match_brackets.rs000066400000000000000000000251641500614314100257630ustar00rootroot00000000000000use std::iter; use ropey::RopeSlice; use tree_sitter::Node; use crate::movement::Direction::{self, Backward, Forward}; use crate::Syntax; const MAX_PLAINTEXT_SCAN: usize = 10000; const MATCH_LIMIT: usize = 16; pub const BRACKETS: [(char, char); 9] = [ ('(', ')'), ('{', '}'), ('[', ']'), ('<', '>'), ('‘', '’'), ('“', '”'), ('«', '»'), ('「', '」'), ('(', ')'), ]; // The difference between BRACKETS and PAIRS is that we can find matching // BRACKETS in a plain text file, but we can't do the same for PAIRs. // PAIRS also contains all BRACKETS. pub const PAIRS: [(char, char); BRACKETS.len() + 3] = { let mut pairs = [(' ', ' '); BRACKETS.len() + 3]; let mut idx = 0; while idx < BRACKETS.len() { pairs[idx] = BRACKETS[idx]; idx += 1; } pairs[idx] = ('"', '"'); pairs[idx + 1] = ('\'', '\''); pairs[idx + 2] = ('`', '`'); pairs }; /// Returns the position of the matching bracket under cursor. /// /// If the cursor is on the opening bracket, the position of /// the closing bracket is returned. If the cursor on the closing /// bracket, the position of the opening bracket is returned. /// /// If the cursor is not on a bracket, `None` is returned. /// /// If no matching bracket is found, `None` is returned. #[must_use] pub fn find_matching_bracket(syntax: &Syntax, doc: RopeSlice, pos: usize) -> Option { if pos >= doc.len_chars() || !is_valid_pair(doc.char(pos)) { return None; } find_pair(syntax, doc, pos, false) } // Returns the position of the bracket that is closing the current scope. // // If the cursor is on an opening or closing bracket, the function // behaves equivalent to [`find_matching_bracket`]. // // If the cursor position is within a scope, the function searches // for the surrounding scope that is surrounded by brackets and // returns the position of the closing bracket for that scope. // // If no surrounding scope is found, the function returns `None`. #[must_use] pub fn find_matching_bracket_fuzzy(syntax: &Syntax, doc: RopeSlice, pos: usize) -> Option { find_pair(syntax, doc, pos, true) } fn find_pair( syntax: &Syntax, doc: RopeSlice, pos_: usize, traverse_parents: bool, ) -> Option { let pos = doc.char_to_byte(pos_); let root = syntax.tree_for_byte_range(pos, pos).root_node(); let mut node = root.descendant_for_byte_range(pos, pos)?; loop { if node.is_named() && node.child_count() >= 2 { let open = node.child(0).unwrap(); let close = node.child(node.child_count() - 1).unwrap(); if let (Some((start_pos, open)), Some((end_pos, close))) = (as_char(doc, &open), as_char(doc, &close)) { if PAIRS.contains(&(open, close)) { if end_pos == pos_ { return Some(start_pos); } // We return the end char if the cursor is either on the start char // or at some arbitrary position between start and end char. if traverse_parents || start_pos == pos_ { return Some(end_pos); } } } } // this node itselt wasn't a pair but maybe its siblings are if let Some((start_char, end_char)) = as_close_pair(doc, &node) { if let Some(pair_start) = find_pair_end(doc, node.prev_sibling(), start_char, end_char, Backward) { return Some(pair_start); } } if let Some((start_char, end_char)) = as_open_pair(doc, &node) { if let Some(pair_end) = find_pair_end(doc, node.next_sibling(), start_char, end_char, Forward) { return Some(pair_end); } } if traverse_parents { for sibling in iter::successors(node.next_sibling(), |node| node.next_sibling()).take(MATCH_LIMIT) { let Some((start_char, end_char)) = as_close_pair(doc, &sibling) else { continue; }; if find_pair_end(doc, sibling.prev_sibling(), start_char, end_char, Backward) .is_some() { return doc.try_byte_to_char(sibling.start_byte()).ok(); } } } else if node.is_named() { break; } let Some(parent) = node.parent() else { break; }; node = parent; } let node = root.named_descendant_for_byte_range(pos, pos + 1)?; if node.child_count() != 0 { return None; } let node_start = doc.byte_to_char(node.start_byte()); find_matching_bracket_plaintext(doc.byte_slice(node.byte_range()), pos_ - node_start) .map(|pos| pos + node_start) } /// Returns the position of the matching bracket under cursor. /// This function works on plain text and ignores tree-sitter grammar. /// The search is limited to `MAX_PLAINTEXT_SCAN` characters /// /// If the cursor is on the opening bracket, the position of /// the closing bracket is returned. If the cursor on the closing /// bracket, the position of the opening bracket is returned. /// /// If the cursor is not on a bracket, `None` is returned. /// /// If no matching bracket is found, `None` is returned. #[must_use] pub fn find_matching_bracket_plaintext(doc: RopeSlice, cursor_pos: usize) -> Option { let bracket = doc.get_char(cursor_pos)?; let matching_bracket = { let pair = get_pair(bracket); if pair.0 == bracket { pair.1 } else { pair.0 } }; // Don't do anything when the cursor is not on top of a bracket. if !is_valid_bracket(bracket) { return None; } // Determine the direction of the matching. let is_fwd = is_open_bracket(bracket); let chars_iter = if is_fwd { doc.chars_at(cursor_pos + 1) } else { doc.chars_at(cursor_pos).reversed() }; let mut open_cnt = 1; for (i, candidate) in chars_iter.take(MAX_PLAINTEXT_SCAN).enumerate() { if candidate == bracket { open_cnt += 1; } else if candidate == matching_bracket { // Return when all pending brackets have been closed. if open_cnt == 1 { return Some(if is_fwd { cursor_pos + i + 1 } else { cursor_pos - i - 1 }); } open_cnt -= 1; } } None } /// Returns the open and closing chars pair. If not found in /// [`BRACKETS`] returns (ch, ch). /// /// ``` /// use helix_core::match_brackets::get_pair; /// /// assert_eq!(get_pair('['), ('[', ']')); /// assert_eq!(get_pair('}'), ('{', '}')); /// assert_eq!(get_pair('"'), ('"', '"')); /// ``` pub fn get_pair(ch: char) -> (char, char) { PAIRS .iter() .find(|(open, close)| *open == ch || *close == ch) .copied() .unwrap_or((ch, ch)) } pub fn is_open_bracket(ch: char) -> bool { BRACKETS.iter().any(|(l, _)| *l == ch) } pub fn is_close_bracket(ch: char) -> bool { BRACKETS.iter().any(|(_, r)| *r == ch) } pub fn is_valid_bracket(ch: char) -> bool { BRACKETS.iter().any(|(l, r)| *l == ch || *r == ch) } pub fn is_open_pair(ch: char) -> bool { PAIRS.iter().any(|(l, _)| *l == ch) } pub fn is_close_pair(ch: char) -> bool { PAIRS.iter().any(|(_, r)| *r == ch) } pub fn is_valid_pair(ch: char) -> bool { PAIRS.iter().any(|(l, r)| *l == ch || *r == ch) } /// Tests if this node is a pair close char and returns the expected open char /// and close char contained in this node fn as_close_pair(doc: RopeSlice, node: &Node) -> Option<(char, char)> { let close = as_char(doc, node)?.1; PAIRS .iter() .find_map(|&(open, close_)| (close_ == close).then_some((close, open))) } /// Checks if `node` or its siblings (at most MATCH_LIMIT nodes) is the specified closing char /// /// # Returns /// /// The position of the found node or `None` otherwise fn find_pair_end( doc: RopeSlice, node: Option, start_char: char, end_char: char, direction: Direction, ) -> Option { let advance = match direction { Forward => Node::next_sibling, Backward => Node::prev_sibling, }; let mut depth = 0; iter::successors(node, advance) .take(MATCH_LIMIT) .find_map(|node| { let (pos, c) = as_char(doc, &node)?; if c == end_char { if depth == 0 { return Some(pos); } depth -= 1; } else if c == start_char { depth += 1; } None }) } /// Tests if this node is a pair open char and returns the expected close char /// and open char contained in this node fn as_open_pair(doc: RopeSlice, node: &Node) -> Option<(char, char)> { let open = as_char(doc, node)?.1; PAIRS .iter() .find_map(|&(open_, close)| (open_ == open).then_some((open, close))) } /// If node is a single char return it (and its char position) fn as_char(doc: RopeSlice, node: &Node) -> Option<(usize, char)> { // TODO: multi char/non ASCII pairs if node.byte_range().len() != 1 { return None; } let pos = doc.try_byte_to_char(node.start_byte()).ok()?; Some((pos, doc.char(pos))) } #[cfg(test)] mod tests { use super::*; #[test] fn find_matching_bracket_empty_file() { let actual = find_matching_bracket_plaintext("".into(), 0); assert_eq!(actual, None); } #[test] fn test_find_matching_bracket_current_line_plaintext() { let assert = |input: &str, pos, expected| { let input = RopeSlice::from(input); let actual = find_matching_bracket_plaintext(input, pos); assert_eq!(expected, actual.unwrap()); let actual = find_matching_bracket_plaintext(input, expected); assert_eq!(pos, actual.unwrap(), "expected symmetrical behaviour"); }; assert("(hello)", 0, 6); assert("((hello))", 0, 8); assert("((hello))", 1, 7); assert("(((hello)))", 2, 8); assert("key: ${value}", 6, 12); assert("key: ${value} # (some comment)", 16, 29); assert("(paren (paren {bracket}))", 0, 24); assert("(paren (paren {bracket}))", 7, 23); assert("(paren (paren {bracket}))", 14, 22); assert("(prev line\n ) (middle) ( \n next line)", 0, 12); assert("(prev line\n ) (middle) ( \n next line)", 14, 21); assert("(prev line\n ) (middle) ( \n next line)", 23, 36); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/movement.rs000066400000000000000000002455671500614314100246560ustar00rootroot00000000000000use std::{cmp::Reverse, iter}; use ropey::iter::Chars; use tree_sitter::{Node, QueryCursor}; use crate::{ char_idx_at_visual_offset, chars::{categorize_char, char_is_line_ending, CharCategory}, doc_formatter::TextFormat, graphemes::{ next_grapheme_boundary, nth_next_grapheme_boundary, nth_prev_grapheme_boundary, prev_grapheme_boundary, }, line_ending::rope_is_line_ending, position::char_idx_at_visual_block_offset, syntax::LanguageConfiguration, text_annotations::TextAnnotations, textobject::TextObject, visual_offset_from_block, Range, RopeSlice, Selection, Syntax, }; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum Direction { Forward, Backward, } #[derive(Copy, Clone, PartialEq, Eq)] pub enum Movement { Extend, Move, } pub fn move_horizontally( slice: RopeSlice, range: Range, dir: Direction, count: usize, behaviour: Movement, _: &TextFormat, _: &mut TextAnnotations, ) -> Range { let pos = range.cursor(slice); // Compute the new position. let new_pos = match dir { Direction::Forward => nth_next_grapheme_boundary(slice, pos, count), Direction::Backward => nth_prev_grapheme_boundary(slice, pos, count), }; // Compute the final new range. range.put_cursor(slice, new_pos, behaviour == Movement::Extend) } pub fn move_vertically_visual( slice: RopeSlice, range: Range, dir: Direction, count: usize, behaviour: Movement, text_fmt: &TextFormat, annotations: &mut TextAnnotations, ) -> Range { if !text_fmt.soft_wrap { return move_vertically(slice, range, dir, count, behaviour, text_fmt, annotations); } annotations.clear_line_annotations(); let pos = range.cursor(slice); // Compute the current position's 2d coordinates. let (visual_pos, block_off) = visual_offset_from_block(slice, pos, pos, text_fmt, annotations); let new_col = range .old_visual_position .map_or(visual_pos.col as u32, |(_, col)| col); // Compute the new position. let mut row_off = match dir { Direction::Forward => count as isize, Direction::Backward => -(count as isize), }; // Compute visual offset relative to block start to avoid trasversing the block twice row_off += visual_pos.row as isize; let (mut new_pos, virtual_rows) = char_idx_at_visual_offset( slice, block_off, row_off, new_col as usize, text_fmt, annotations, ); if dir == Direction::Forward { new_pos += (virtual_rows != 0) as usize; } // Special-case to avoid moving to the end of the last non-empty line. if behaviour == Movement::Extend && slice.line(slice.char_to_line(new_pos)).len_chars() == 0 { return range; } let mut new_range = range.put_cursor(slice, new_pos, behaviour == Movement::Extend); new_range.old_visual_position = Some((0, new_col)); new_range } pub fn move_vertically( slice: RopeSlice, range: Range, dir: Direction, count: usize, behaviour: Movement, text_fmt: &TextFormat, annotations: &mut TextAnnotations, ) -> Range { annotations.clear_line_annotations(); let pos = range.cursor(slice); let line_idx = slice.char_to_line(pos); let line_start = slice.line_to_char(line_idx); // Compute the current position's 2d coordinates. let visual_pos = visual_offset_from_block(slice, line_start, pos, text_fmt, annotations).0; let (mut new_row, new_col) = range .old_visual_position .map_or((visual_pos.row as u32, visual_pos.col as u32), |pos| pos); new_row = new_row.max(visual_pos.row as u32); let line_idx = slice.char_to_line(pos); // Compute the new position. let mut new_line_idx = match dir { Direction::Forward => line_idx.saturating_add(count), Direction::Backward => line_idx.saturating_sub(count), }; let line = if new_line_idx >= slice.len_lines() - 1 { // there is no line terminator for the last line // so the logic below is not necessary here new_line_idx = slice.len_lines() - 1; slice } else { // char_idx_at_visual_block_offset returns a one-past-the-end index // in case it reaches the end of the slice // to avoid moving to the nextline in that case the line terminator is removed from the line let new_line_end = prev_grapheme_boundary(slice, slice.line_to_char(new_line_idx + 1)); slice.slice(..new_line_end) }; let new_line_start = line.line_to_char(new_line_idx); let (new_pos, _) = char_idx_at_visual_block_offset( line, new_line_start, new_row as usize, new_col as usize, text_fmt, annotations, ); // Special-case to avoid moving to the end of the last non-empty line. if behaviour == Movement::Extend && slice.line(new_line_idx).len_chars() == 0 { return range; } let mut new_range = range.put_cursor(slice, new_pos, behaviour == Movement::Extend); new_range.old_visual_position = Some((new_row, new_col)); new_range } pub fn move_next_word_start(slice: RopeSlice, range: Range, count: usize) -> Range { word_move(slice, range, count, WordMotionTarget::NextWordStart) } pub fn move_next_word_end(slice: RopeSlice, range: Range, count: usize) -> Range { word_move(slice, range, count, WordMotionTarget::NextWordEnd) } pub fn move_prev_word_start(slice: RopeSlice, range: Range, count: usize) -> Range { word_move(slice, range, count, WordMotionTarget::PrevWordStart) } pub fn move_prev_word_end(slice: RopeSlice, range: Range, count: usize) -> Range { word_move(slice, range, count, WordMotionTarget::PrevWordEnd) } pub fn move_next_long_word_start(slice: RopeSlice, range: Range, count: usize) -> Range { word_move(slice, range, count, WordMotionTarget::NextLongWordStart) } pub fn move_next_long_word_end(slice: RopeSlice, range: Range, count: usize) -> Range { word_move(slice, range, count, WordMotionTarget::NextLongWordEnd) } pub fn move_prev_long_word_start(slice: RopeSlice, range: Range, count: usize) -> Range { word_move(slice, range, count, WordMotionTarget::PrevLongWordStart) } pub fn move_prev_long_word_end(slice: RopeSlice, range: Range, count: usize) -> Range { word_move(slice, range, count, WordMotionTarget::PrevLongWordEnd) } pub fn move_next_sub_word_start(slice: RopeSlice, range: Range, count: usize) -> Range { word_move(slice, range, count, WordMotionTarget::NextSubWordStart) } pub fn move_next_sub_word_end(slice: RopeSlice, range: Range, count: usize) -> Range { word_move(slice, range, count, WordMotionTarget::NextSubWordEnd) } pub fn move_prev_sub_word_start(slice: RopeSlice, range: Range, count: usize) -> Range { word_move(slice, range, count, WordMotionTarget::PrevSubWordStart) } pub fn move_prev_sub_word_end(slice: RopeSlice, range: Range, count: usize) -> Range { word_move(slice, range, count, WordMotionTarget::PrevSubWordEnd) } fn word_move(slice: RopeSlice, range: Range, count: usize, target: WordMotionTarget) -> Range { let is_prev = matches!( target, WordMotionTarget::PrevWordStart | WordMotionTarget::PrevLongWordStart | WordMotionTarget::PrevSubWordStart | WordMotionTarget::PrevWordEnd | WordMotionTarget::PrevLongWordEnd | WordMotionTarget::PrevSubWordEnd ); // Special-case early-out. if (is_prev && range.head == 0) || (!is_prev && range.head == slice.len_chars()) { return range; } // Prepare the range appropriately based on the target movement // direction. This is addressing two things at once: // // 1. Block-cursor semantics. // 2. The anchor position being irrelevant to the output result. #[allow(clippy::collapsible_else_if)] // Makes the structure clearer in this case. let start_range = if is_prev { if range.anchor < range.head { Range::new(range.head, prev_grapheme_boundary(slice, range.head)) } else { Range::new(next_grapheme_boundary(slice, range.head), range.head) } } else { if range.anchor < range.head { Range::new(prev_grapheme_boundary(slice, range.head), range.head) } else { Range::new(range.head, next_grapheme_boundary(slice, range.head)) } }; // Do the main work. let mut range = start_range; for _ in 0..count { let next_range = slice.chars_at(range.head).range_to_target(target, range); if range == next_range { break; } range = next_range; } range } pub fn move_prev_paragraph( slice: RopeSlice, range: Range, count: usize, behavior: Movement, ) -> Range { let mut line = range.cursor_line(slice); let first_char = slice.line_to_char(line) == range.cursor(slice); let prev_line_empty = rope_is_line_ending(slice.line(line.saturating_sub(1))); let curr_line_empty = rope_is_line_ending(slice.line(line)); let prev_empty_to_line = prev_line_empty && !curr_line_empty; // skip character before paragraph boundary if prev_empty_to_line && !first_char { line += 1; } let mut lines = slice.lines_at(line); lines.reverse(); let mut lines = lines.map(rope_is_line_ending).peekable(); let mut last_line = line; for _ in 0..count { while lines.next_if(|&e| e).is_some() { line -= 1; } while lines.next_if(|&e| !e).is_some() { line -= 1; } if line == last_line { break; } last_line = line; } let head = slice.line_to_char(line); let anchor = if behavior == Movement::Move { // exclude first character after paragraph boundary if prev_empty_to_line && first_char { range.cursor(slice) } else { range.head } } else { range.put_cursor(slice, head, true).anchor }; Range::new(anchor, head) } pub fn move_next_paragraph( slice: RopeSlice, range: Range, count: usize, behavior: Movement, ) -> Range { let mut line = range.cursor_line(slice); let last_char = prev_grapheme_boundary(slice, slice.line_to_char(line + 1)) == range.cursor(slice); let curr_line_empty = rope_is_line_ending(slice.line(line)); let next_line_empty = rope_is_line_ending(slice.line(slice.len_lines().saturating_sub(1).min(line + 1))); let curr_empty_to_line = curr_line_empty && !next_line_empty; // skip character after paragraph boundary if curr_empty_to_line && last_char { line += 1; } let mut lines = slice.lines_at(line).map(rope_is_line_ending).peekable(); let mut last_line = line; for _ in 0..count { while lines.next_if(|&e| !e).is_some() { line += 1; } while lines.next_if(|&e| e).is_some() { line += 1; } if line == last_line { break; } last_line = line; } let head = slice.line_to_char(line); let anchor = if behavior == Movement::Move { if curr_empty_to_line && last_char { range.head } else { range.cursor(slice) } } else { range.put_cursor(slice, head, true).anchor }; Range::new(anchor, head) } // ---- util ------------ #[inline] /// Returns first index that doesn't satisfy a given predicate when /// advancing the character index. /// /// Returns none if all characters satisfy the predicate. pub fn skip_while(slice: RopeSlice, pos: usize, fun: F) -> Option where F: Fn(char) -> bool, { let mut chars = slice.chars_at(pos).enumerate(); chars.find_map(|(i, c)| if !fun(c) { Some(pos + i) } else { None }) } #[inline] /// Returns first index that doesn't satisfy a given predicate when /// retreating the character index, saturating if all elements satisfy /// the condition. pub fn backwards_skip_while(slice: RopeSlice, pos: usize, fun: F) -> Option where F: Fn(char) -> bool, { let mut chars_starting_from_next = slice.chars_at(pos); let mut backwards = iter::from_fn(|| chars_starting_from_next.prev()).enumerate(); backwards.find_map(|(i, c)| { if !fun(c) { Some(pos.saturating_sub(i)) } else { None } }) } /// Possible targets of a word motion #[derive(Copy, Clone, Debug)] pub enum WordMotionTarget { NextWordStart, NextWordEnd, PrevWordStart, PrevWordEnd, // A "Long word" (also known as a WORD in Vim/Kakoune) is strictly // delimited by whitespace, and can consist of punctuation as well // as alphanumerics. NextLongWordStart, NextLongWordEnd, PrevLongWordStart, PrevLongWordEnd, // A sub word is similar to a regular word, except it is also delimited by // underscores and transitions from lowercase to uppercase. NextSubWordStart, NextSubWordEnd, PrevSubWordStart, PrevSubWordEnd, } pub trait CharHelpers { fn range_to_target(&mut self, target: WordMotionTarget, origin: Range) -> Range; } impl CharHelpers for Chars<'_> { /// Note: this only changes the anchor of the range if the head is effectively /// starting on a boundary (either directly or after skipping newline characters). /// Any other changes to the anchor should be handled by the calling code. fn range_to_target(&mut self, target: WordMotionTarget, origin: Range) -> Range { let is_prev = matches!( target, WordMotionTarget::PrevWordStart | WordMotionTarget::PrevLongWordStart | WordMotionTarget::PrevSubWordStart | WordMotionTarget::PrevWordEnd | WordMotionTarget::PrevLongWordEnd | WordMotionTarget::PrevSubWordEnd ); // Reverse the iterator if needed for the motion direction. if is_prev { self.reverse(); } // Function to advance index in the appropriate motion direction. let advance: &dyn Fn(&mut usize) = if is_prev { &|idx| *idx = idx.saturating_sub(1) } else { &|idx| *idx += 1 }; // Initialize state variables. let mut anchor = origin.anchor; let mut head = origin.head; let mut prev_ch = { let ch = self.prev(); if ch.is_some() { self.next(); } ch }; // Skip any initial newline characters. while let Some(ch) = self.next() { if char_is_line_ending(ch) { prev_ch = Some(ch); advance(&mut head); } else { self.prev(); break; } } if prev_ch.map(char_is_line_ending).unwrap_or(false) { anchor = head; } // Find our target position(s). let head_start = head; #[allow(clippy::while_let_on_iterator)] // Clippy's suggestion to fix doesn't work here. while let Some(next_ch) = self.next() { if prev_ch.is_none() || reached_target(target, prev_ch.unwrap(), next_ch) { if head == head_start { anchor = head; } else { break; } } prev_ch = Some(next_ch); advance(&mut head); } // Un-reverse the iterator if needed. if is_prev { self.reverse(); } Range::new(anchor, head) } } fn is_word_boundary(a: char, b: char) -> bool { categorize_char(a) != categorize_char(b) } fn is_long_word_boundary(a: char, b: char) -> bool { match (categorize_char(a), categorize_char(b)) { (CharCategory::Word, CharCategory::Punctuation) | (CharCategory::Punctuation, CharCategory::Word) => false, (a, b) if a != b => true, _ => false, } } fn is_sub_word_boundary(a: char, b: char, dir: Direction) -> bool { match (categorize_char(a), categorize_char(b)) { (CharCategory::Word, CharCategory::Word) => { if (a == '_') != (b == '_') { return true; } // Subword boundaries are directional: in 'fooBar', there is a // boundary between 'o' and 'B', but not between 'B' and 'a'. match dir { Direction::Forward => a.is_lowercase() && b.is_uppercase(), Direction::Backward => a.is_uppercase() && b.is_lowercase(), } } (a, b) if a != b => true, _ => false, } } fn reached_target(target: WordMotionTarget, prev_ch: char, next_ch: char) -> bool { match target { WordMotionTarget::NextWordStart | WordMotionTarget::PrevWordEnd => { is_word_boundary(prev_ch, next_ch) && (char_is_line_ending(next_ch) || !next_ch.is_whitespace()) } WordMotionTarget::NextWordEnd | WordMotionTarget::PrevWordStart => { is_word_boundary(prev_ch, next_ch) && (!prev_ch.is_whitespace() || char_is_line_ending(next_ch)) } WordMotionTarget::NextLongWordStart | WordMotionTarget::PrevLongWordEnd => { is_long_word_boundary(prev_ch, next_ch) && (char_is_line_ending(next_ch) || !next_ch.is_whitespace()) } WordMotionTarget::NextLongWordEnd | WordMotionTarget::PrevLongWordStart => { is_long_word_boundary(prev_ch, next_ch) && (!prev_ch.is_whitespace() || char_is_line_ending(next_ch)) } WordMotionTarget::NextSubWordStart => { is_sub_word_boundary(prev_ch, next_ch, Direction::Forward) && (char_is_line_ending(next_ch) || !(next_ch.is_whitespace() || next_ch == '_')) } WordMotionTarget::PrevSubWordEnd => { is_sub_word_boundary(prev_ch, next_ch, Direction::Backward) && (char_is_line_ending(next_ch) || !(next_ch.is_whitespace() || next_ch == '_')) } WordMotionTarget::NextSubWordEnd => { is_sub_word_boundary(prev_ch, next_ch, Direction::Forward) && (!(prev_ch.is_whitespace() || prev_ch == '_') || char_is_line_ending(next_ch)) } WordMotionTarget::PrevSubWordStart => { is_sub_word_boundary(prev_ch, next_ch, Direction::Backward) && (!(prev_ch.is_whitespace() || prev_ch == '_') || char_is_line_ending(next_ch)) } } } /// Finds the range of the next or previous textobject in the syntax sub-tree of `node`. /// Returns the range in the forwards direction. pub fn goto_treesitter_object( slice: RopeSlice, range: Range, object_name: &str, dir: Direction, slice_tree: Node, lang_config: &LanguageConfiguration, count: usize, ) -> Range { let get_range = move |range: Range| -> Option { let byte_pos = slice.char_to_byte(range.cursor(slice)); let cap_name = |t: TextObject| format!("{}.{}", object_name, t); let mut cursor = QueryCursor::new(); let nodes = lang_config.textobject_query()?.capture_nodes_any( &[ &cap_name(TextObject::Movement), &cap_name(TextObject::Around), &cap_name(TextObject::Inside), ], slice_tree, slice, &mut cursor, )?; let node = match dir { Direction::Forward => nodes .filter(|n| n.start_byte() > byte_pos) .min_by_key(|n| (n.start_byte(), Reverse(n.end_byte())))?, Direction::Backward => nodes .filter(|n| n.end_byte() < byte_pos) .max_by_key(|n| (n.end_byte(), Reverse(n.start_byte())))?, }; let len = slice.len_bytes(); let start_byte = node.start_byte(); let end_byte = node.end_byte(); if start_byte >= len || end_byte >= len { return None; } let start_char = slice.byte_to_char(start_byte); let end_char = slice.byte_to_char(end_byte); // head of range should be at beginning Some(Range::new(start_char, end_char)) }; let mut last_range = range; for _ in 0..count { match get_range(last_range) { Some(r) if r != last_range => last_range = r, _ => break, } } last_range } fn find_parent_start(mut node: Node) -> Option { let start = node.start_byte(); while node.start_byte() >= start || !node.is_named() { node = node.parent()?; } Some(node) } pub fn move_parent_node_end( syntax: &Syntax, text: RopeSlice, selection: Selection, dir: Direction, movement: Movement, ) -> Selection { selection.transform(|range| { let start_from = text.char_to_byte(range.from()); let start_to = text.char_to_byte(range.to()); let mut node = match syntax.named_descendant_for_byte_range(start_from, start_to) { Some(node) => node, None => { log::debug!( "no descendant found for byte range: {} - {}", start_from, start_to ); return range; } }; let mut end_head = match dir { // moving forward, we always want to move one past the end of the // current node, so use the end byte of the current node, which is an exclusive // end of the range Direction::Forward => text.byte_to_char(node.end_byte()), // moving backward, we want the cursor to land on the start char of // the current node, or if it is already at the start of a node, to traverse up to // the parent Direction::Backward => { let end_head = text.byte_to_char(node.start_byte()); // if we're already on the beginning, look up to the parent if end_head == range.cursor(text) { node = find_parent_start(node).unwrap_or(node); text.byte_to_char(node.start_byte()) } else { end_head } } }; if movement == Movement::Move { // preserve direction of original range if range.direction() == Direction::Forward { Range::new(end_head, end_head + 1) } else { Range::new(end_head + 1, end_head) } } else { // if we end up with a forward range, then adjust it to be one past // where we want if end_head >= range.anchor { end_head += 1; } Range::new(range.anchor, end_head) } }) } #[cfg(test)] mod test { use ropey::Rope; use crate::{coords_at_pos, pos_at_coords}; use super::*; const SINGLE_LINE_SAMPLE: &str = "This is a simple alphabetic line"; const MULTILINE_SAMPLE: &str = "\ Multiline\n\ text sample\n\ which\n\ is merely alphabetic\n\ and whitespaced\n\ "; const MULTIBYTE_CHARACTER_SAMPLE: &str = "\ パーティーへ行かないか\n\ The text above is Japanese\n\ "; #[test] fn test_vertical_move() { let text = Rope::from("abcd\nefg\nwrs"); let slice = text.slice(..); let pos = pos_at_coords(slice, (0, 4).into(), true); let range = Range::new(pos, pos); assert_eq!( coords_at_pos( slice, move_vertically_visual( slice, range, Direction::Forward, 1, Movement::Move, &TextFormat::default(), &mut TextAnnotations::default(), ) .head ), (1, 3).into() ); } #[test] fn horizontal_moves_through_single_line_text() { let text = Rope::from(SINGLE_LINE_SAMPLE); let slice = text.slice(..); let position = pos_at_coords(slice, (0, 0).into(), true); let mut range = Range::point(position); let moves_and_expected_coordinates = [ ((Direction::Forward, 1usize), (0, 1)), // T|his is a simple alphabetic line ((Direction::Forward, 2usize), (0, 3)), // Thi|s is a simple alphabetic line ((Direction::Forward, 0usize), (0, 3)), // Thi|s is a simple alphabetic line ((Direction::Forward, 999usize), (0, 32)), // This is a simple alphabetic line| ((Direction::Forward, 999usize), (0, 32)), // This is a simple alphabetic line| ((Direction::Backward, 999usize), (0, 0)), // |This is a simple alphabetic line ]; for ((direction, amount), coordinates) in moves_and_expected_coordinates { range = move_horizontally( slice, range, direction, amount, Movement::Move, &TextFormat::default(), &mut TextAnnotations::default(), ); assert_eq!(coords_at_pos(slice, range.head), coordinates.into()) } } #[test] fn horizontal_moves_through_multiline_text() { let text = Rope::from(MULTILINE_SAMPLE); let slice = text.slice(..); let position = pos_at_coords(slice, (0, 0).into(), true); let mut range = Range::point(position); let moves_and_expected_coordinates = [ ((Direction::Forward, 11usize), (1, 1)), // Multiline\nt|ext sample\n... ((Direction::Backward, 1usize), (1, 0)), // Multiline\n|text sample\n... ((Direction::Backward, 5usize), (0, 5)), // Multi|line\ntext sample\n... ((Direction::Backward, 999usize), (0, 0)), // |Multiline\ntext sample\n... ((Direction::Forward, 3usize), (0, 3)), // Mul|tiline\ntext sample\n... ((Direction::Forward, 0usize), (0, 3)), // Mul|tiline\ntext sample\n... ((Direction::Backward, 0usize), (0, 3)), // Mul|tiline\ntext sample\n... ((Direction::Forward, 999usize), (5, 0)), // ...and whitespaced\n| ((Direction::Forward, 999usize), (5, 0)), // ...and whitespaced\n| ]; for ((direction, amount), coordinates) in moves_and_expected_coordinates { range = move_horizontally( slice, range, direction, amount, Movement::Move, &TextFormat::default(), &mut TextAnnotations::default(), ); assert_eq!(coords_at_pos(slice, range.head), coordinates.into()); assert_eq!(range.head, range.anchor); } } #[test] fn selection_extending_moves_in_single_line_text() { let text = Rope::from(SINGLE_LINE_SAMPLE); let slice = text.slice(..); let position = pos_at_coords(slice, (0, 0).into(), true); let mut range = Range::point(position); let original_anchor = range.anchor; let moves = [ (Direction::Forward, 1usize), (Direction::Forward, 5usize), (Direction::Backward, 3usize), ]; for (direction, amount) in moves { range = move_horizontally( slice, range, direction, amount, Movement::Extend, &TextFormat::default(), &mut TextAnnotations::default(), ); assert_eq!(range.anchor, original_anchor); } } #[test] fn vertical_moves_in_single_column() { let text = Rope::from(MULTILINE_SAMPLE); let slice = text.slice(..); let position = pos_at_coords(slice, (0, 0).into(), true); let mut range = Range::point(position); let moves_and_expected_coordinates = [ ((Direction::Forward, 1usize), (1, 0)), ((Direction::Forward, 2usize), (3, 0)), ((Direction::Forward, 1usize), (4, 0)), ((Direction::Backward, 999usize), (0, 0)), ((Direction::Forward, 4usize), (4, 0)), ((Direction::Forward, 0usize), (4, 0)), ((Direction::Backward, 0usize), (4, 0)), ((Direction::Forward, 5), (5, 0)), ((Direction::Forward, 999usize), (5, 0)), ]; for ((direction, amount), coordinates) in moves_and_expected_coordinates { range = move_vertically_visual( slice, range, direction, amount, Movement::Move, &TextFormat::default(), &mut TextAnnotations::default(), ); assert_eq!(coords_at_pos(slice, range.head), coordinates.into()); assert_eq!(range.head, range.anchor); } } #[test] fn vertical_moves_jumping_column() { let text = Rope::from(MULTILINE_SAMPLE); let slice = text.slice(..); let position = pos_at_coords(slice, (0, 0).into(), true); let mut range = Range::point(position); enum Axis { H, V, } let moves_and_expected_coordinates = [ // Places cursor at the end of line ((Axis::H, Direction::Forward, 8usize), (0, 8)), // First descent preserves column as the target line is wider ((Axis::V, Direction::Forward, 1usize), (1, 8)), // Second descent clamps column as the target line is shorter ((Axis::V, Direction::Forward, 1usize), (2, 5)), // Third descent restores the original column ((Axis::V, Direction::Forward, 1usize), (3, 8)), // Behaviour is preserved even through long jumps ((Axis::V, Direction::Backward, 999usize), (0, 8)), ((Axis::V, Direction::Forward, 4usize), (4, 8)), ((Axis::V, Direction::Forward, 999usize), (5, 0)), ]; for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates { range = match axis { Axis::H => move_horizontally( slice, range, direction, amount, Movement::Move, &TextFormat::default(), &mut TextAnnotations::default(), ), Axis::V => move_vertically_visual( slice, range, direction, amount, Movement::Move, &TextFormat::default(), &mut TextAnnotations::default(), ), }; assert_eq!(coords_at_pos(slice, range.head), coordinates.into()); assert_eq!(range.head, range.anchor); } } #[test] fn multibyte_character_wide_column_jumps() { let text = Rope::from(MULTIBYTE_CHARACTER_SAMPLE); let slice = text.slice(..); let position = pos_at_coords(slice, (0, 0).into(), true); let mut range = Range::point(position); // FIXME: The behaviour captured in this test diverges from both Kakoune and Vim. These // will attempt to preserve the horizontal position of the cursor, rather than // placing it at the same character index. enum Axis { H, V, } let moves_and_expected_coordinates = [ // Places cursor at the fourth kana. ((Axis::H, Direction::Forward, 4), (0, 4)), // Descent places cursor at the 8th character. ((Axis::V, Direction::Forward, 1usize), (1, 8)), // Moving back 2 characters. ((Axis::H, Direction::Backward, 2usize), (1, 6)), // Jumping back up 1 line. ((Axis::V, Direction::Backward, 1usize), (0, 3)), ]; for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates { range = match axis { Axis::H => move_horizontally( slice, range, direction, amount, Movement::Move, &TextFormat::default(), &mut TextAnnotations::default(), ), Axis::V => move_vertically_visual( slice, range, direction, amount, Movement::Move, &TextFormat::default(), &mut TextAnnotations::default(), ), }; assert_eq!(coords_at_pos(slice, range.head), coordinates.into()); assert_eq!(range.head, range.anchor); } } #[test] #[should_panic] fn nonsensical_ranges_panic_on_forward_movement_attempt_in_debug_mode() { move_next_word_start(Rope::from("Sample").slice(..), Range::point(99999999), 1); } #[test] #[should_panic] fn nonsensical_ranges_panic_on_forward_to_end_movement_attempt_in_debug_mode() { move_next_word_end(Rope::from("Sample").slice(..), Range::point(99999999), 1); } #[test] #[should_panic] fn nonsensical_ranges_panic_on_backwards_movement_attempt_in_debug_mode() { move_prev_word_start(Rope::from("Sample").slice(..), Range::point(99999999), 1); } #[test] fn test_behaviour_when_moving_to_start_of_next_words() { let tests = [ ("Basic forward motion stops at the first space", vec![(1, Range::new(0, 0), Range::new(0, 6))]), (" Starting from a boundary advances the anchor", vec![(1, Range::new(0, 0), Range::new(1, 10))]), ("Long whitespace gap is bridged by the head", vec![(1, Range::new(0, 0), Range::new(0, 11))]), ("Previous anchor is irrelevant for forward motions", vec![(1, Range::new(12, 0), Range::new(0, 9))]), (" Starting from whitespace moves to last space in sequence", vec![(1, Range::new(0, 0), Range::new(0, 4))]), ("Starting from mid-word leaves anchor at start position and moves head", vec![(1, Range::new(3, 3), Range::new(3, 9))]), ("Identifiers_with_underscores are considered a single word", vec![(1, Range::new(0, 0), Range::new(0, 29))]), ("Jumping\n into starting whitespace selects the spaces before 'into'", vec![(1, Range::new(0, 7), Range::new(8, 12))]), ("alphanumeric.!,and.?=punctuation are considered 'words' for the purposes of word motion", vec![ (1, Range::new(0, 0), Range::new(0, 12)), (1, Range::new(0, 12), Range::new(12, 15)), (1, Range::new(12, 15), Range::new(15, 18)) ]), ("... ... punctuation and spaces behave as expected", vec![ (1, Range::new(0, 0), Range::new(0, 6)), (1, Range::new(0, 6), Range::new(6, 10)), ]), (".._.._ punctuation is not joined by underscores into a single block", vec![(1, Range::new(0, 0), Range::new(0, 2))]), ("Newlines\n\nare bridged seamlessly.", vec![ (1, Range::new(0, 0), Range::new(0, 8)), (1, Range::new(0, 8), Range::new(10, 14)), ]), ("Jumping\n\n\n\n\n\n from newlines to whitespace selects whitespace.", vec![ (1, Range::new(0, 9), Range::new(13, 16)), ]), ("A failed motion does not modify the range", vec![ (3, Range::new(37, 41), Range::new(37, 41)), ]), ("oh oh oh two character words!", vec![ (1, Range::new(0, 0), Range::new(0, 3)), (1, Range::new(0, 3), Range::new(3, 6)), (1, Range::new(0, 2), Range::new(1, 3)), ]), ("Multiple motions at once resolve correctly", vec![ (3, Range::new(0, 0), Range::new(17, 20)), ]), ("Excessive motions are performed partially", vec![ (999, Range::new(0, 0), Range::new(32, 41)), ]), ("", // Edge case of moving forward in empty string vec![ (1, Range::new(0, 0), Range::new(0, 0)), ]), ("\n\n\n\n\n", // Edge case of moving forward in all newlines vec![ (1, Range::new(0, 0), Range::new(5, 5)), ]), ("\n \n \n Jumping through alternated space blocks and newlines selects the space blocks", vec![ (1, Range::new(0, 0), Range::new(1, 4)), (1, Range::new(1, 4), Range::new(5, 8)), ]), ("ヒーリクス multibyte characters behave as normal characters", vec![ (1, Range::new(0, 0), Range::new(0, 6)), ]), ]; for (sample, scenario) in tests { for (count, begin, expected_end) in scenario.into_iter() { let range = move_next_word_start(Rope::from(sample).slice(..), begin, count); assert_eq!(range, expected_end, "Case failed: [{}]", sample); } } } #[test] fn test_behaviour_when_moving_to_start_of_next_sub_words() { let tests = [ ( "NextSubwordStart", vec![ (1, Range::new(0, 0), Range::new(0, 4)), (1, Range::new(4, 4), Range::new(4, 11)), ], ), ( "next_subword_start", vec![ (1, Range::new(0, 0), Range::new(0, 5)), (1, Range::new(4, 4), Range::new(5, 13)), ], ), ( "Next_Subword_Start", vec![ (1, Range::new(0, 0), Range::new(0, 5)), (1, Range::new(4, 4), Range::new(5, 13)), ], ), ( "NEXT_SUBWORD_START", vec![ (1, Range::new(0, 0), Range::new(0, 5)), (1, Range::new(4, 4), Range::new(5, 13)), ], ), ( "next subword start", vec![ (1, Range::new(0, 0), Range::new(0, 5)), (1, Range::new(4, 4), Range::new(5, 13)), ], ), ( "Next Subword Start", vec![ (1, Range::new(0, 0), Range::new(0, 5)), (1, Range::new(4, 4), Range::new(5, 13)), ], ), ( "NEXT SUBWORD START", vec![ (1, Range::new(0, 0), Range::new(0, 5)), (1, Range::new(4, 4), Range::new(5, 13)), ], ), ( "next__subword__start", vec![ (1, Range::new(0, 0), Range::new(0, 6)), (1, Range::new(4, 4), Range::new(4, 6)), (1, Range::new(5, 5), Range::new(6, 15)), ], ), ( "Next__Subword__Start", vec![ (1, Range::new(0, 0), Range::new(0, 6)), (1, Range::new(4, 4), Range::new(4, 6)), (1, Range::new(5, 5), Range::new(6, 15)), ], ), ( "NEXT__SUBWORD__START", vec![ (1, Range::new(0, 0), Range::new(0, 6)), (1, Range::new(4, 4), Range::new(4, 6)), (1, Range::new(5, 5), Range::new(6, 15)), ], ), ]; for (sample, scenario) in tests { for (count, begin, expected_end) in scenario.into_iter() { let range = move_next_sub_word_start(Rope::from(sample).slice(..), begin, count); assert_eq!(range, expected_end, "Case failed: [{}]", sample); } } } #[test] fn test_behaviour_when_moving_to_end_of_next_sub_words() { let tests = [ ( "NextSubwordEnd", vec![ (1, Range::new(0, 0), Range::new(0, 4)), (1, Range::new(4, 4), Range::new(4, 11)), ], ), ( "next subword end", vec![ (1, Range::new(0, 0), Range::new(0, 4)), (1, Range::new(4, 4), Range::new(4, 12)), ], ), ( "Next Subword End", vec![ (1, Range::new(0, 0), Range::new(0, 4)), (1, Range::new(4, 4), Range::new(4, 12)), ], ), ( "NEXT SUBWORD END", vec![ (1, Range::new(0, 0), Range::new(0, 4)), (1, Range::new(4, 4), Range::new(4, 12)), ], ), ( "next_subword_end", vec![ (1, Range::new(0, 0), Range::new(0, 4)), (1, Range::new(4, 4), Range::new(4, 12)), ], ), ( "Next_Subword_End", vec![ (1, Range::new(0, 0), Range::new(0, 4)), (1, Range::new(4, 4), Range::new(4, 12)), ], ), ( "NEXT_SUBWORD_END", vec![ (1, Range::new(0, 0), Range::new(0, 4)), (1, Range::new(4, 4), Range::new(4, 12)), ], ), ( "next__subword__end", vec![ (1, Range::new(0, 0), Range::new(0, 4)), (1, Range::new(4, 4), Range::new(4, 13)), (1, Range::new(5, 5), Range::new(5, 13)), ], ), ( "Next__Subword__End", vec![ (1, Range::new(0, 0), Range::new(0, 4)), (1, Range::new(4, 4), Range::new(4, 13)), (1, Range::new(5, 5), Range::new(5, 13)), ], ), ( "NEXT__SUBWORD__END", vec![ (1, Range::new(0, 0), Range::new(0, 4)), (1, Range::new(4, 4), Range::new(4, 13)), (1, Range::new(5, 5), Range::new(5, 13)), ], ), ]; for (sample, scenario) in tests { for (count, begin, expected_end) in scenario.into_iter() { let range = move_next_sub_word_end(Rope::from(sample).slice(..), begin, count); assert_eq!(range, expected_end, "Case failed: [{}]", sample); } } } #[test] fn test_behaviour_when_moving_to_start_of_next_long_words() { let tests = [ ("Basic forward motion stops at the first space", vec![(1, Range::new(0, 0), Range::new(0, 6))]), (" Starting from a boundary advances the anchor", vec![(1, Range::new(0, 0), Range::new(1, 10))]), ("Long whitespace gap is bridged by the head", vec![(1, Range::new(0, 0), Range::new(0, 11))]), ("Previous anchor is irrelevant for forward motions", vec![(1, Range::new(12, 0), Range::new(0, 9))]), (" Starting from whitespace moves to last space in sequence", vec![(1, Range::new(0, 0), Range::new(0, 4))]), ("Starting from mid-word leaves anchor at start position and moves head", vec![(1, Range::new(3, 3), Range::new(3, 9))]), ("Identifiers_with_underscores are considered a single word", vec![(1, Range::new(0, 0), Range::new(0, 29))]), ("Jumping\n into starting whitespace selects the spaces before 'into'", vec![(1, Range::new(0, 7), Range::new(8, 12))]), ("alphanumeric.!,and.?=punctuation are not treated any differently than alphanumerics", vec![ (1, Range::new(0, 0), Range::new(0, 33)), ]), ("... ... punctuation and spaces behave as expected", vec![ (1, Range::new(0, 0), Range::new(0, 6)), (1, Range::new(0, 6), Range::new(6, 10)), ]), (".._.._ punctuation is joined by underscores into a single word, as it behaves like alphanumerics", vec![(1, Range::new(0, 0), Range::new(0, 7))]), ("Newlines\n\nare bridged seamlessly.", vec![ (1, Range::new(0, 0), Range::new(0, 8)), (1, Range::new(0, 8), Range::new(10, 14)), ]), ("Jumping\n\n\n\n\n\n from newlines to whitespace selects whitespace.", vec![ (1, Range::new(0, 9), Range::new(13, 16)), ]), ("A failed motion does not modify the range", vec![ (3, Range::new(37, 41), Range::new(37, 41)), ]), ("oh oh oh two character words!", vec![ (1, Range::new(0, 0), Range::new(0, 3)), (1, Range::new(0, 3), Range::new(3, 6)), (1, Range::new(0, 1), Range::new(0, 3)), ]), ("Multiple motions at once resolve correctly", vec![ (3, Range::new(0, 0), Range::new(17, 20)), ]), ("Excessive motions are performed partially", vec![ (999, Range::new(0, 0), Range::new(32, 41)), ]), ("", // Edge case of moving forward in empty string vec![ (1, Range::new(0, 0), Range::new(0, 0)), ]), ("\n\n\n\n\n", // Edge case of moving forward in all newlines vec![ (1, Range::new(0, 0), Range::new(5, 5)), ]), ("\n \n \n Jumping through alternated space blocks and newlines selects the space blocks", vec![ (1, Range::new(0, 0), Range::new(1, 4)), (1, Range::new(1, 4), Range::new(5, 8)), ]), ("ヒー..リクス multibyte characters behave as normal characters, including their interaction with punctuation", vec![ (1, Range::new(0, 0), Range::new(0, 8)), ]), ]; for (sample, scenario) in tests { for (count, begin, expected_end) in scenario.into_iter() { let range = move_next_long_word_start(Rope::from(sample).slice(..), begin, count); assert_eq!(range, expected_end, "Case failed: [{}]", sample); } } } #[test] fn test_behaviour_when_moving_to_start_of_previous_words() { let tests = [ ("Basic backward motion from the middle of a word", vec![(1, Range::new(3, 3), Range::new(4, 0))]), // // Why do we want this behavior? The current behavior fails this // // test, but seems better and more consistent. // ("Starting from after boundary retreats the anchor", // vec![(1, Range::new(0, 9), Range::new(8, 0))]), (" Jump to start of a word preceded by whitespace", vec![(1, Range::new(5, 5), Range::new(6, 4))]), (" Jump to start of line from start of word preceded by whitespace", vec![(1, Range::new(4, 4), Range::new(4, 0))]), ("Previous anchor is irrelevant for backward motions", vec![(1, Range::new(12, 5), Range::new(6, 0))]), (" Starting from whitespace moves to first space in sequence", vec![(1, Range::new(0, 4), Range::new(4, 0))]), ("Identifiers_with_underscores are considered a single word", vec![(1, Range::new(0, 20), Range::new(20, 0))]), ("Jumping\n \nback through a newline selects whitespace", vec![(1, Range::new(0, 13), Range::new(12, 8))]), ("Jumping to start of word from the end selects the word", vec![(1, Range::new(6, 7), Range::new(7, 0))]), ("alphanumeric.!,and.?=punctuation are considered 'words' for the purposes of word motion", vec![ (1, Range::new(29, 30), Range::new(30, 21)), (1, Range::new(30, 21), Range::new(21, 18)), (1, Range::new(21, 18), Range::new(18, 15)) ]), ("... ... punctuation and spaces behave as expected", vec![ (1, Range::new(0, 10), Range::new(10, 6)), (1, Range::new(10, 6), Range::new(6, 0)), ]), (".._.._ punctuation is not joined by underscores into a single block", vec![(1, Range::new(0, 6), Range::new(5, 3))]), ("Newlines\n\nare bridged seamlessly.", vec![ (1, Range::new(0, 10), Range::new(8, 0)), ]), ("Jumping \n\n\n\n\nback from within a newline group selects previous block", vec![ (1, Range::new(0, 13), Range::new(11, 0)), ]), ("Failed motions do not modify the range", vec![ (0, Range::new(3, 0), Range::new(3, 0)), ]), ("Multiple motions at once resolve correctly", vec![ (3, Range::new(18, 18), Range::new(9, 0)), ]), ("Excessive motions are performed partially", vec![ (999, Range::new(40, 40), Range::new(10, 0)), ]), ("", // Edge case of moving backwards in empty string vec![ (1, Range::new(0, 0), Range::new(0, 0)), ]), ("\n\n\n\n\n", // Edge case of moving backwards in all newlines vec![ (1, Range::new(5, 5), Range::new(0, 0)), ]), (" \n \nJumping back through alternated space blocks and newlines selects the space blocks", vec![ (1, Range::new(0, 8), Range::new(7, 4)), (1, Range::new(7, 4), Range::new(3, 0)), ]), ("ヒーリクス multibyte characters behave as normal characters", vec![ (1, Range::new(0, 6), Range::new(6, 0)), ]), ]; for (sample, scenario) in tests { for (count, begin, expected_end) in scenario.into_iter() { let range = move_prev_word_start(Rope::from(sample).slice(..), begin, count); assert_eq!(range, expected_end, "Case failed: [{}]", sample); } } } #[test] fn test_behaviour_when_moving_to_start_of_previous_sub_words() { let tests = [ ( "PrevSubwordEnd", vec![ (1, Range::new(13, 13), Range::new(14, 11)), (1, Range::new(11, 11), Range::new(11, 4)), ], ), ( "prev subword end", vec![ (1, Range::new(15, 15), Range::new(16, 13)), (1, Range::new(12, 12), Range::new(13, 5)), ], ), ( "Prev Subword End", vec![ (1, Range::new(15, 15), Range::new(16, 13)), (1, Range::new(12, 12), Range::new(13, 5)), ], ), ( "PREV SUBWORD END", vec![ (1, Range::new(15, 15), Range::new(16, 13)), (1, Range::new(12, 12), Range::new(13, 5)), ], ), ( "prev_subword_end", vec![ (1, Range::new(15, 15), Range::new(16, 13)), (1, Range::new(12, 12), Range::new(13, 5)), ], ), ( "Prev_Subword_End", vec![ (1, Range::new(15, 15), Range::new(16, 13)), (1, Range::new(12, 12), Range::new(13, 5)), ], ), ( "PREV_SUBWORD_END", vec![ (1, Range::new(15, 15), Range::new(16, 13)), (1, Range::new(12, 12), Range::new(13, 5)), ], ), ( "prev__subword__end", vec![ (1, Range::new(17, 17), Range::new(18, 15)), (1, Range::new(13, 13), Range::new(14, 6)), (1, Range::new(14, 14), Range::new(15, 6)), ], ), ( "Prev__Subword__End", vec![ (1, Range::new(17, 17), Range::new(18, 15)), (1, Range::new(13, 13), Range::new(14, 6)), (1, Range::new(14, 14), Range::new(15, 6)), ], ), ( "PREV__SUBWORD__END", vec![ (1, Range::new(17, 17), Range::new(18, 15)), (1, Range::new(13, 13), Range::new(14, 6)), (1, Range::new(14, 14), Range::new(15, 6)), ], ), ]; for (sample, scenario) in tests { for (count, begin, expected_end) in scenario.into_iter() { let range = move_prev_sub_word_start(Rope::from(sample).slice(..), begin, count); assert_eq!(range, expected_end, "Case failed: [{}]", sample); } } } #[test] fn test_behaviour_when_moving_to_start_of_previous_long_words() { let tests = [ ( "Basic backward motion from the middle of a word", vec![(1, Range::new(3, 3), Range::new(4, 0))], ), // // Why do we want this behavior? The current behavior fails this // // test, but seems better and more consistent. // ("Starting from after boundary retreats the anchor", // vec![(1, Range::new(0, 9), Range::new(8, 0))]), ( " Jump to start of a word preceded by whitespace", vec![(1, Range::new(5, 5), Range::new(6, 4))], ), ( " Jump to start of line from start of word preceded by whitespace", vec![(1, Range::new(3, 4), Range::new(4, 0))], ), ("Previous anchor is irrelevant for backward motions", vec![(1, Range::new(12, 5), Range::new(6, 0))]), ( " Starting from whitespace moves to first space in sequence", vec![(1, Range::new(0, 4), Range::new(4, 0))], ), ("Identifiers_with_underscores are considered a single word", vec![(1, Range::new(0, 20), Range::new(20, 0))]), ( "Jumping\n \nback through a newline selects whitespace", vec![(1, Range::new(0, 13), Range::new(12, 8))], ), ( "Jumping to start of word from the end selects the word", vec![(1, Range::new(6, 7), Range::new(7, 0))], ), ( "alphanumeric.!,and.?=punctuation are treated exactly the same", vec![(1, Range::new(29, 30), Range::new(30, 0))], ), ( "... ... punctuation and spaces behave as expected", vec![ (1, Range::new(0, 10), Range::new(10, 6)), (1, Range::new(10, 6), Range::new(6, 0)), ], ), (".._.._ punctuation is joined by underscores into a single block", vec![(1, Range::new(0, 6), Range::new(6, 0))]), ( "Newlines\n\nare bridged seamlessly.", vec![(1, Range::new(0, 10), Range::new(8, 0))], ), ( "Jumping \n\n\n\n\nback from within a newline group selects previous block", vec![(1, Range::new(0, 13), Range::new(11, 0))], ), ( "Failed motions do not modify the range", vec![(0, Range::new(3, 0), Range::new(3, 0))], ), ( "Multiple motions at once resolve correctly", vec![(3, Range::new(19, 19), Range::new(9, 0))], ), ( "Excessive motions are performed partially", vec![(999, Range::new(40, 40), Range::new(10, 0))], ), ( "", // Edge case of moving backwards in empty string vec![(1, Range::new(0, 0), Range::new(0, 0))], ), ( "\n\n\n\n\n", // Edge case of moving backwards in all newlines vec![(1, Range::new(5, 5), Range::new(0, 0))], ), (" \n \nJumping back through alternated space blocks and newlines selects the space blocks", vec![ (1, Range::new(0, 8), Range::new(7, 4)), (1, Range::new(7, 4), Range::new(3, 0)), ]), ("ヒーリ..クス multibyte characters behave as normal characters, including when interacting with punctuation", vec![ (1, Range::new(0, 8), Range::new(8, 0)), ]), ]; for (sample, scenario) in tests { for (count, begin, expected_end) in scenario.into_iter() { let range = move_prev_long_word_start(Rope::from(sample).slice(..), begin, count); assert_eq!(range, expected_end, "Case failed: [{}]", sample); } } } #[test] fn test_behaviour_when_moving_to_end_of_next_words() { let tests = [ ("Basic forward motion from the start of a word to the end of it", vec![(1, Range::new(0, 0), Range::new(0, 5))]), ("Basic forward motion from the end of a word to the end of the next", vec![(1, Range::new(0, 5), Range::new(5, 13))]), ("Basic forward motion from the middle of a word to the end of it", vec![(1, Range::new(2, 2), Range::new(2, 5))]), (" Jumping to end of a word preceded by whitespace", vec![(1, Range::new(0, 0), Range::new(0, 11))]), // // Why do we want this behavior? The current behavior fails this // // test, but seems better and more consistent. // (" Starting from a boundary advances the anchor", // vec![(1, Range::new(0, 0), Range::new(1, 9))]), ("Previous anchor is irrelevant for end of word motion", vec![(1, Range::new(12, 2), Range::new(2, 8))]), ("Identifiers_with_underscores are considered a single word", vec![(1, Range::new(0, 0), Range::new(0, 28))]), ("Jumping\n into starting whitespace selects up to the end of next word", vec![(1, Range::new(0, 7), Range::new(8, 16))]), ("alphanumeric.!,and.?=punctuation are considered 'words' for the purposes of word motion", vec![ (1, Range::new(0, 0), Range::new(0, 12)), (1, Range::new(0, 12), Range::new(12, 15)), (1, Range::new(12, 15), Range::new(15, 18)) ]), ("... ... punctuation and spaces behave as expected", vec![ (1, Range::new(0, 0), Range::new(0, 3)), (1, Range::new(0, 3), Range::new(3, 9)), ]), (".._.._ punctuation is not joined by underscores into a single block", vec![(1, Range::new(0, 0), Range::new(0, 2))]), ("Newlines\n\nare bridged seamlessly.", vec![ (1, Range::new(0, 0), Range::new(0, 8)), (1, Range::new(0, 8), Range::new(10, 13)), ]), ("Jumping\n\n\n\n\n\n from newlines to whitespace selects to end of next word.", vec![ (1, Range::new(0, 8), Range::new(13, 20)), ]), ("A failed motion does not modify the range", vec![ (3, Range::new(37, 41), Range::new(37, 41)), ]), ("Multiple motions at once resolve correctly", vec![ (3, Range::new(0, 0), Range::new(16, 19)), ]), ("Excessive motions are performed partially", vec![ (999, Range::new(0, 0), Range::new(31, 41)), ]), ("", // Edge case of moving forward in empty string vec![ (1, Range::new(0, 0), Range::new(0, 0)), ]), ("\n\n\n\n\n", // Edge case of moving forward in all newlines vec![ (1, Range::new(0, 0), Range::new(5, 5)), ]), ("\n \n \n Jumping through alternated space blocks and newlines selects the space blocks", vec![ (1, Range::new(0, 0), Range::new(1, 4)), (1, Range::new(1, 4), Range::new(5, 8)), ]), ("ヒーリクス multibyte characters behave as normal characters", vec![ (1, Range::new(0, 0), Range::new(0, 5)), ]), ]; for (sample, scenario) in tests { for (count, begin, expected_end) in scenario.into_iter() { let range = move_next_word_end(Rope::from(sample).slice(..), begin, count); assert_eq!(range, expected_end, "Case failed: [{}]", sample); } } } #[test] fn test_behaviour_when_moving_to_end_of_previous_words() { let tests = [ ("Basic backward motion from the middle of a word", vec![(1, Range::new(9, 9), Range::new(10, 5))]), ("Starting from after boundary retreats the anchor", vec![(1, Range::new(0, 14), Range::new(13, 8))]), ("Jump to end of a word succeeded by whitespace", vec![(1, Range::new(11, 11), Range::new(11, 4))]), (" Jump to start of line from end of word preceded by whitespace", vec![(1, Range::new(8, 8), Range::new(8, 0))]), ("Previous anchor is irrelevant for backward motions", vec![(1, Range::new(26, 12), Range::new(13, 8))]), (" Starting from whitespace moves to first space in sequence", vec![(1, Range::new(0, 4), Range::new(4, 0))]), ("Test identifiers_with_underscores are considered a single word", vec![(1, Range::new(0, 25), Range::new(25, 4))]), ("Jumping\n \nback through a newline selects whitespace", vec![(1, Range::new(0, 13), Range::new(12, 8))]), ("Jumping to start of word from the end selects the whole word", vec![(1, Range::new(16, 16), Range::new(16, 10))]), ("alphanumeric.!,and.?=punctuation are considered 'words' for the purposes of word motion", vec![ (1, Range::new(30, 30), Range::new(31, 21)), (1, Range::new(31, 21), Range::new(21, 18)), (1, Range::new(21, 18), Range::new(18, 15)) ]), ("... ... punctuation and spaces behave as expected", vec![ (1, Range::new(0, 10), Range::new(9, 3)), (1, Range::new(9, 3), Range::new(3, 0)), ]), (".._.._ punctuation is not joined by underscores into a single block", vec![(1, Range::new(0, 5), Range::new(5, 3))]), ("Newlines\n\nare bridged seamlessly.", vec![ (1, Range::new(0, 10), Range::new(8, 0)), ]), ("Jumping \n\n\n\n\nback from within a newline group selects previous block", vec![ (1, Range::new(0, 13), Range::new(11, 7)), ]), ("Failed motions do not modify the range", vec![ (0, Range::new(3, 0), Range::new(3, 0)), ]), ("Multiple motions at once resolve correctly", vec![ (3, Range::new(24, 24), Range::new(16, 8)), ]), ("Excessive motions are performed partially", vec![ (999, Range::new(40, 40), Range::new(9, 0)), ]), ("", // Edge case of moving backwards in empty string vec![ (1, Range::new(0, 0), Range::new(0, 0)), ]), ("\n\n\n\n\n", // Edge case of moving backwards in all newlines vec![ (1, Range::new(5, 5), Range::new(0, 0)), ]), (" \n \nJumping back through alternated space blocks and newlines selects the space blocks", vec![ (1, Range::new(0, 8), Range::new(7, 4)), (1, Range::new(7, 4), Range::new(3, 0)), ]), ("Test ヒーリクス multibyte characters behave as normal characters", vec![ (1, Range::new(0, 10), Range::new(10, 4)), ]), ]; for (sample, scenario) in tests { for (count, begin, expected_end) in scenario.into_iter() { let range = move_prev_word_end(Rope::from(sample).slice(..), begin, count); assert_eq!(range, expected_end, "Case failed: [{}]", sample); } } } #[test] fn test_behaviour_when_moving_to_end_of_previous_sub_words() { let tests = [ ( "PrevSubwordEnd", vec![ (1, Range::new(13, 13), Range::new(14, 11)), (1, Range::new(11, 11), Range::new(11, 4)), ], ), ( "prev subword end", vec![ (1, Range::new(15, 15), Range::new(16, 12)), (1, Range::new(12, 12), Range::new(12, 4)), ], ), ( "Prev Subword End", vec![ (1, Range::new(15, 15), Range::new(16, 12)), (1, Range::new(12, 12), Range::new(12, 4)), ], ), ( "PREV SUBWORD END", vec![ (1, Range::new(15, 15), Range::new(16, 12)), (1, Range::new(12, 12), Range::new(12, 4)), ], ), ( "prev_subword_end", vec![ (1, Range::new(15, 15), Range::new(16, 12)), (1, Range::new(12, 12), Range::new(12, 4)), ], ), ( "Prev_Subword_End", vec![ (1, Range::new(15, 15), Range::new(16, 12)), (1, Range::new(12, 12), Range::new(12, 4)), ], ), ( "PREV_SUBWORD_END", vec![ (1, Range::new(15, 15), Range::new(16, 12)), (1, Range::new(12, 12), Range::new(12, 4)), ], ), ( "prev__subword__end", vec![ (1, Range::new(17, 17), Range::new(18, 13)), (1, Range::new(13, 13), Range::new(13, 4)), (1, Range::new(14, 14), Range::new(15, 13)), ], ), ( "Prev__Subword__End", vec![ (1, Range::new(17, 17), Range::new(18, 13)), (1, Range::new(13, 13), Range::new(13, 4)), (1, Range::new(14, 14), Range::new(15, 13)), ], ), ( "PREV__SUBWORD__END", vec![ (1, Range::new(17, 17), Range::new(18, 13)), (1, Range::new(13, 13), Range::new(13, 4)), (1, Range::new(14, 14), Range::new(15, 13)), ], ), ]; for (sample, scenario) in tests { for (count, begin, expected_end) in scenario.into_iter() { let range = move_prev_sub_word_end(Rope::from(sample).slice(..), begin, count); assert_eq!(range, expected_end, "Case failed: [{}]", sample); } } } #[test] fn test_behaviour_when_moving_to_end_of_next_long_words() { let tests = [ ("Basic forward motion from the start of a word to the end of it", vec![(1, Range::new(0, 0), Range::new(0, 5))]), ("Basic forward motion from the end of a word to the end of the next", vec![(1, Range::new(0, 5), Range::new(5, 13))]), ("Basic forward motion from the middle of a word to the end of it", vec![(1, Range::new(2, 2), Range::new(2, 5))]), (" Jumping to end of a word preceded by whitespace", vec![(1, Range::new(0, 0), Range::new(0, 11))]), // // Why do we want this behavior? The current behavior fails this // // test, but seems better and more consistent. // (" Starting from a boundary advances the anchor", // vec![(1, Range::new(0, 0), Range::new(1, 9))]), ("Previous anchor is irrelevant for end of word motion", vec![(1, Range::new(12, 2), Range::new(2, 8))]), ("Identifiers_with_underscores are considered a single word", vec![(1, Range::new(0, 0), Range::new(0, 28))]), ("Jumping\n into starting whitespace selects up to the end of next word", vec![(1, Range::new(0, 7), Range::new(8, 16))]), ("alphanumeric.!,and.?=punctuation are treated the same way", vec![ (1, Range::new(0, 0), Range::new(0, 32)), ]), ("... ... punctuation and spaces behave as expected", vec![ (1, Range::new(0, 0), Range::new(0, 3)), (1, Range::new(0, 3), Range::new(3, 9)), ]), (".._.._ punctuation is joined by underscores into a single block", vec![(1, Range::new(0, 0), Range::new(0, 6))]), ("Newlines\n\nare bridged seamlessly.", vec![ (1, Range::new(0, 0), Range::new(0, 8)), (1, Range::new(0, 8), Range::new(10, 13)), ]), ("Jumping\n\n\n\n\n\n from newlines to whitespace selects to end of next word.", vec![ (1, Range::new(0, 9), Range::new(13, 20)), ]), ("A failed motion does not modify the range", vec![ (3, Range::new(37, 41), Range::new(37, 41)), ]), ("Multiple motions at once resolve correctly", vec![ (3, Range::new(0, 0), Range::new(16, 19)), ]), ("Excessive motions are performed partially", vec![ (999, Range::new(0, 0), Range::new(31, 41)), ]), ("", // Edge case of moving forward in empty string vec![ (1, Range::new(0, 0), Range::new(0, 0)), ]), ("\n\n\n\n\n", // Edge case of moving forward in all newlines vec![ (1, Range::new(0, 0), Range::new(5, 5)), ]), ("\n \n \n Jumping through alternated space blocks and newlines selects the space blocks", vec![ (1, Range::new(0, 0), Range::new(1, 4)), (1, Range::new(1, 4), Range::new(5, 8)), ]), ("ヒーリ..クス multibyte characters behave as normal characters, including when they interact with punctuation", vec![ (1, Range::new(0, 0), Range::new(0, 7)), ]), ]; for (sample, scenario) in tests { for (count, begin, expected_end) in scenario.into_iter() { let range = move_next_long_word_end(Rope::from(sample).slice(..), begin, count); assert_eq!(range, expected_end, "Case failed: [{}]", sample); } } } #[test] fn test_behaviour_when_moving_to_end_of_prev_long_words() { let tests = [ ( "Basic backward motion from the middle of a word", vec![(1, Range::new(3, 3), Range::new(4, 0))], ), ("Starting from after boundary retreats the anchor", vec![(1, Range::new(0, 9), Range::new(8, 0))], ), ( "Jump to end of a word succeeded by whitespace", vec![(1, Range::new(10, 10), Range::new(10, 4))], ), ( " Jump to start of line from end of word preceded by whitespace", vec![(1, Range::new(3, 4), Range::new(4, 0))], ), ("Previous anchor is irrelevant for backward motions", vec![(1, Range::new(12, 5), Range::new(6, 0))]), ( " Starting from whitespace moves to first space in sequence", vec![(1, Range::new(0, 4), Range::new(4, 0))], ), ("Identifiers_with_underscores are considered a single word", vec![(1, Range::new(0, 20), Range::new(20, 0))]), ( "Jumping\n \nback through a newline selects whitespace", vec![(1, Range::new(0, 13), Range::new(12, 8))], ), ( "Jumping to start of word from the end selects the word", vec![(1, Range::new(6, 7), Range::new(7, 0))], ), ( "alphanumeric.!,and.?=punctuation are treated exactly the same", vec![(1, Range::new(29, 30), Range::new(30, 0))], ), ( "... ... punctuation and spaces behave as expected", vec![ (1, Range::new(0, 10), Range::new(9, 3)), (1, Range::new(10, 6), Range::new(7, 3)), ], ), (".._.._ punctuation is joined by underscores into a single block", vec![(1, Range::new(0, 6), Range::new(6, 0))]), ( "Newlines\n\nare bridged seamlessly.", vec![(1, Range::new(0, 10), Range::new(8, 0))], ), ( "Jumping \n\n\n\n\nback from within a newline group selects previous block", vec![(1, Range::new(0, 13), Range::new(11, 7))], ), ( "Failed motions do not modify the range", vec![(0, Range::new(3, 0), Range::new(3, 0))], ), ( "Multiple motions at once resolve correctly", vec![(3, Range::new(19, 19), Range::new(8, 0))], ), ( "Excessive motions are performed partially", vec![(999, Range::new(40, 40), Range::new(9, 0))], ), ( "", // Edge case of moving backwards in empty string vec![(1, Range::new(0, 0), Range::new(0, 0))], ), ( "\n\n\n\n\n", // Edge case of moving backwards in all newlines vec![(1, Range::new(5, 5), Range::new(0, 0))], ), (" \n \nJumping back through alternated space blocks and newlines selects the space blocks", vec![ (1, Range::new(0, 8), Range::new(7, 4)), (1, Range::new(7, 4), Range::new(3, 0)), ]), ("ヒーリ..クス multibyte characters behave as normal characters, including when interacting with punctuation", vec![ (1, Range::new(0, 8), Range::new(7, 0)), ]), ]; for (sample, scenario) in tests { for (count, begin, expected_end) in scenario.into_iter() { let range = move_prev_long_word_end(Rope::from(sample).slice(..), begin, count); assert_eq!(range, expected_end, "Case failed: [{}]", sample); } } } #[test] fn test_behaviour_when_moving_to_prev_paragraph_single() { let tests = [ ("#[|]#", "#[|]#"), ("#[s|]#tart at\nfirst char\n", "#[|s]#tart at\nfirst char\n"), ("start at\nlast char#[\n|]#", "#[|start at\nlast char\n]#"), ( "goto\nfirst\n\n#[p|]#aragraph", "#[|goto\nfirst\n\n]#paragraph", ), ( "goto\nfirst\n#[\n|]#paragraph", "#[|goto\nfirst\n\n]#paragraph", ), ( "goto\nsecond\n\np#[a|]#ragraph", "goto\nsecond\n\n#[|pa]#ragraph", ), ( "here\n\nhave\nmultiple\nparagraph\n\n\n\n\n#[|]#", "here\n\n#[|have\nmultiple\nparagraph\n\n\n\n\n]#", ), ]; for (before, expected) in tests { let (s, selection) = crate::test::print(before); let text = Rope::from(s.as_str()); let selection = selection.transform(|r| move_prev_paragraph(text.slice(..), r, 1, Movement::Move)); let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } #[test] fn test_behaviour_when_moving_to_prev_paragraph_double() { let tests = [ ( "on#[e|]#\n\ntwo\n\nthree\n\n", "#[|one]#\n\ntwo\n\nthree\n\n", ), ( "one\n\ntwo\n\nth#[r|]#ee\n\n", "one\n\n#[|two\n\nthr]#ee\n\n", ), ]; for (before, expected) in tests { let (s, selection) = crate::test::print(before); let text = Rope::from(s.as_str()); let selection = selection.transform(|r| move_prev_paragraph(text.slice(..), r, 2, Movement::Move)); let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } #[test] fn test_behaviour_when_moving_to_prev_paragraph_extend() { let tests = [ ( "one\n\n#[|two\n\n]#three\n\n", "#[|one\n\ntwo\n\n]#three\n\n", ), ( "#[|one\n\ntwo\n\n]#three\n\n", "#[|one\n\ntwo\n\n]#three\n\n", ), ]; for (before, expected) in tests { let (s, selection) = crate::test::print(before); let text = Rope::from(s.as_str()); let selection = selection .transform(|r| move_prev_paragraph(text.slice(..), r, 1, Movement::Extend)); let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } #[test] fn test_behaviour_when_moving_to_next_paragraph_single() { let tests = [ ("#[|]#", "#[|]#"), ("#[s|]#tart at\nfirst char\n", "#[start at\nfirst char\n|]#"), ("start at\nlast char#[\n|]#", "start at\nlast char#[\n|]#"), ( "a\nb\n\n#[g|]#oto\nthird\n\nparagraph", "a\nb\n\n#[goto\nthird\n\n|]#paragraph", ), ( "a\nb\n#[\n|]#goto\nthird\n\nparagraph", "a\nb\n\n#[goto\nthird\n\n|]#paragraph", ), ( "a\nb#[\n|]#\n\ngoto\nsecond\n\nparagraph", "a\nb#[\n\n|]#goto\nsecond\n\nparagraph", ), ( "here\n\nhave\n#[m|]#ultiple\nparagraph\n\n\n\n\n", "here\n\nhave\n#[multiple\nparagraph\n\n\n\n\n|]#", ), ( "#[t|]#ext\n\n\nafter two blank lines\n\nmore text\n", "#[text\n\n\n|]#after two blank lines\n\nmore text\n", ), ( "#[text\n\n\n|]#after two blank lines\n\nmore text\n", "text\n\n\n#[after two blank lines\n\n|]#more text\n", ), ]; for (before, expected) in tests { let (s, selection) = crate::test::print(before); let text = Rope::from(s.as_str()); let selection = selection.transform(|r| move_next_paragraph(text.slice(..), r, 1, Movement::Move)); let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } #[test] fn test_behaviour_when_moving_to_next_paragraph_double() { let tests = [ ( "one\n\ntwo\n\nth#[r|]#ee\n\n", "one\n\ntwo\n\nth#[ree\n\n|]#", ), ( "on#[e|]#\n\ntwo\n\nthree\n\n", "on#[e\n\ntwo\n\n|]#three\n\n", ), ]; for (before, expected) in tests { let (s, selection) = crate::test::print(before); let text = Rope::from(s.as_str()); let selection = selection.transform(|r| move_next_paragraph(text.slice(..), r, 2, Movement::Move)); let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } #[test] fn test_behaviour_when_moving_to_next_paragraph_extend() { let tests = [ ( "one\n\n#[two\n\n|]#three\n\n", "one\n\n#[two\n\nthree\n\n|]#", ), ( "one\n\n#[two\n\nthree\n\n|]#", "one\n\n#[two\n\nthree\n\n|]#", ), ]; for (before, expected) in tests { let (s, selection) = crate::test::print(before); let text = Rope::from(s.as_str()); let selection = selection .transform(|r| move_next_paragraph(text.slice(..), r, 1, Movement::Extend)); let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/object.rs000066400000000000000000000072051500614314100242530ustar00rootroot00000000000000use crate::{movement::Direction, syntax::TreeCursor, Range, RopeSlice, Selection, Syntax}; pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { let cursor = &mut syntax.walk(); selection.transform(|range| { let from = text.char_to_byte(range.from()); let to = text.char_to_byte(range.to()); let byte_range = from..to; cursor.reset_to_byte_range(from, to); while cursor.node().byte_range() == byte_range { if !cursor.goto_parent() { break; } } let node = cursor.node(); let from = text.byte_to_char(node.start_byte()); let to = text.byte_to_char(node.end_byte()); Range::new(to, from).with_direction(range.direction()) }) } pub fn shrink_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { select_node_impl( syntax, text, selection, |cursor| { cursor.goto_first_child(); }, None, ) } pub fn select_next_sibling(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { select_node_impl( syntax, text, selection, |cursor| { while !cursor.goto_next_sibling() { if !cursor.goto_parent() { break; } } }, Some(Direction::Forward), ) } pub fn select_all_siblings(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { selection.transform_iter(|range| { let mut cursor = syntax.walk(); let (from, to) = range.into_byte_range(text); cursor.reset_to_byte_range(from, to); if !cursor.goto_parent_with(|parent| parent.child_count() > 1) { return vec![range].into_iter(); } select_children(&mut cursor, text, range).into_iter() }) } pub fn select_all_children(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { selection.transform_iter(|range| { let mut cursor = syntax.walk(); let (from, to) = range.into_byte_range(text); cursor.reset_to_byte_range(from, to); select_children(&mut cursor, text, range).into_iter() }) } fn select_children<'n>( cursor: &'n mut TreeCursor<'n>, text: RopeSlice, range: Range, ) -> Vec { let children = cursor .named_children() .map(|child| Range::from_node(child, text, range.direction())) .collect::>(); if !children.is_empty() { children } else { vec![range] } } pub fn select_prev_sibling(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { select_node_impl( syntax, text, selection, |cursor| { while !cursor.goto_prev_sibling() { if !cursor.goto_parent() { break; } } }, Some(Direction::Backward), ) } fn select_node_impl( syntax: &Syntax, text: RopeSlice, selection: Selection, motion: F, direction: Option, ) -> Selection where F: Fn(&mut TreeCursor), { let cursor = &mut syntax.walk(); selection.transform(|range| { let from = text.char_to_byte(range.from()); let to = text.char_to_byte(range.to()); cursor.reset_to_byte_range(from, to); motion(cursor); let node = cursor.node(); let from = text.byte_to_char(node.start_byte()); let to = text.byte_to_char(node.end_byte()); Range::new(from, to).with_direction(direction.unwrap_or_else(|| range.direction())) }) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/position.rs000066400000000000000000001041541500614314100246520ustar00rootroot00000000000000use std::{ borrow::Cow, cmp::Ordering, ops::{Add, AddAssign, Sub, SubAssign}, }; use helix_stdx::rope::RopeSliceExt; use crate::{ chars::char_is_line_ending, doc_formatter::{DocumentFormatter, TextFormat}, graphemes::{ensure_grapheme_boundary_prev, grapheme_width}, line_ending::line_end_char_index, text_annotations::TextAnnotations, RopeSlice, }; /// Represents a single point in a text buffer. Zero indexed. #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub struct Position { pub row: usize, pub col: usize, } impl AddAssign for Position { fn add_assign(&mut self, rhs: Self) { self.row += rhs.row; self.col += rhs.col; } } impl SubAssign for Position { fn sub_assign(&mut self, rhs: Self) { self.row -= rhs.row; self.col -= rhs.col; } } impl Sub for Position { type Output = Position; fn sub(mut self, rhs: Self) -> Self::Output { self -= rhs; self } } impl Add for Position { type Output = Position; fn add(mut self, rhs: Self) -> Self::Output { self += rhs; self } } impl Position { pub const fn new(row: usize, col: usize) -> Self { Self { row, col } } pub const fn is_zero(self) -> bool { self.row == 0 && self.col == 0 } // TODO: generalize pub fn traverse(self, text: &crate::Tendril) -> Self { let Self { mut row, mut col } = self; // TODO: there should be a better way here let mut chars = text.chars().peekable(); while let Some(ch) = chars.next() { if char_is_line_ending(ch) && !(ch == '\r' && chars.peek() == Some(&'\n')) { row += 1; col = 0; } else { col += 1; } } Self { row, col } } } impl From<(usize, usize)> for Position { fn from(tuple: (usize, usize)) -> Self { Self { row: tuple.0, col: tuple.1, } } } impl From for tree_sitter::Point { fn from(pos: Position) -> Self { Self::new(pos.row, pos.col) } } /// Convert a character index to (line, column) coordinates. /// /// column in `char` count which can be used for row:column display in /// status line. See [`visual_coords_at_pos`] for a visual one. pub fn coords_at_pos(text: RopeSlice, pos: usize) -> Position { let line = text.char_to_line(pos); let line_start = text.line_to_char(line); let pos = ensure_grapheme_boundary_prev(text, pos); let col = text.slice(line_start..pos).graphemes().count(); Position::new(line, col) } /// Convert a character index to (line, column) coordinates visually. /// /// Takes \t, double-width characters (CJK) into account as well as text /// not in the document in the future. /// See [`coords_at_pos`] for an "objective" one. /// /// This function should be used very rarely. Usually `visual_offset_from_anchor` /// or `visual_offset_from_block` is preferable. However when you want to compute the /// actual visual row/column in the text (not what is actually shown on screen) /// then you should use this function. For example aligning text should ignore virtual /// text and softwrap. #[deprecated = "Doesn't account for softwrap or decorations, use visual_offset_from_anchor instead"] pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Position { let line = text.char_to_line(pos); let line_start = text.line_to_char(line); let pos = ensure_grapheme_boundary_prev(text, pos); let mut col = 0; for grapheme in text.slice(line_start..pos).graphemes() { if grapheme == "\t" { col += tab_width - (col % tab_width); } else { let grapheme = Cow::from(grapheme); col += grapheme_width(&grapheme); } } Position::new(line, col) } /// Returns the visual offset from the start of the first visual line /// in the block that contains anchor. /// Text is always wrapped at blocks, they usually correspond to /// actual line breaks but for very long lines /// softwrapping positions are estimated with an O(1) algorithm /// to ensure consistent performance for large lines (currently unimplemented) /// /// Usually you want to use `visual_offset_from_anchor` instead but this function /// can be useful (and faster) if /// * You already know the visual position of the block /// * You only care about the horizontal offset (column) and not the vertical offset (row) pub fn visual_offset_from_block( text: RopeSlice, anchor: usize, pos: usize, text_fmt: &TextFormat, annotations: &TextAnnotations, ) -> (Position, usize) { let mut last_pos = Position::default(); let mut formatter = DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, annotations, anchor); let block_start = formatter.next_char_pos(); while let Some(grapheme) = formatter.next() { last_pos = grapheme.visual_pos; if formatter.next_char_pos() > pos { return (grapheme.visual_pos, block_start); } } (last_pos, block_start) } /// Returns the height of the given text when softwrapping pub fn softwrapped_dimensions(text: RopeSlice, text_fmt: &TextFormat) -> (usize, u16) { let last_pos = visual_offset_from_block(text, 0, usize::MAX, text_fmt, &TextAnnotations::default()).0; if last_pos.row == 0 { (1, last_pos.col as u16) } else { (last_pos.row + 1, text_fmt.viewport_width) } } #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum VisualOffsetError { PosBeforeAnchorRow, PosAfterMaxRow, } /// Returns the visual offset from the start of the visual line /// that contains anchor. pub fn visual_offset_from_anchor( text: RopeSlice, anchor: usize, pos: usize, text_fmt: &TextFormat, annotations: &TextAnnotations, max_rows: usize, ) -> Result<(Position, usize), VisualOffsetError> { let mut formatter = DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, annotations, anchor); let mut anchor_line = None; let mut found_pos = None; let mut last_pos = Position::default(); let block_start = formatter.next_char_pos(); if pos < block_start { return Err(VisualOffsetError::PosBeforeAnchorRow); } while let Some(grapheme) = formatter.next() { last_pos = grapheme.visual_pos; if formatter.next_char_pos() > pos { if let Some(anchor_line) = anchor_line { last_pos.row -= anchor_line; return Ok((last_pos, block_start)); } else { found_pos = Some(last_pos); } } if formatter.next_char_pos() > anchor && anchor_line.is_none() { if let Some(mut found_pos) = found_pos { return if found_pos.row == last_pos.row { found_pos.row = 0; Ok((found_pos, block_start)) } else { Err(VisualOffsetError::PosBeforeAnchorRow) }; } else { anchor_line = Some(last_pos.row); } } if let Some(anchor_line) = anchor_line { if grapheme.visual_pos.row >= anchor_line + max_rows { return Err(VisualOffsetError::PosAfterMaxRow); } } } let anchor_line = anchor_line.unwrap_or(last_pos.row); last_pos.row -= anchor_line; Ok((last_pos, block_start)) } /// Convert (line, column) coordinates to a character index. /// /// If the `line` coordinate is beyond the end of the file, the EOF /// position will be returned. /// /// If the `column` coordinate is past the end of the given line, the /// line-end position will be returned. What constitutes the "line-end /// position" depends on the parameter `limit_before_line_ending`. If it's /// `true`, the line-end position will be just *before* the line ending /// character. If `false` it will be just *after* the line ending /// character--on the border between the current line and the next. /// /// Usually you only want `limit_before_line_ending` to be `true` if you're working /// with left-side block-cursor positions, as this prevents the the block cursor /// from jumping to the next line. Otherwise you typically want it to be `false`, /// such as when dealing with raw anchor/head positions. pub fn pos_at_coords(text: RopeSlice, coords: Position, limit_before_line_ending: bool) -> usize { let Position { mut row, col } = coords; if limit_before_line_ending { row = row.min(text.len_lines() - 1); }; let line_start = text.line_to_char(row); let line_end = if limit_before_line_ending { line_end_char_index(&text, row) } else { text.line_to_char((row + 1).min(text.len_lines())) }; let mut col_char_offset = 0; for (i, g) in text.slice(line_start..line_end).graphemes().enumerate() { if i == col { break; } col_char_offset += g.chars().count(); } line_start + col_char_offset } /// Convert visual (line, column) coordinates to a character index. /// /// If the `line` coordinate is beyond the end of the file, the EOF /// position will be returned. /// /// If the `column` coordinate is past the end of the given line, the /// line-end position (in this case, just before the line ending /// character) will be returned. /// This function should be used very rarely. Usually `char_idx_at_visual_offset` is preferable. /// However when you want to compute a char position from the visual row/column in the text /// (not what is actually shown on screen) then you should use this function. /// For example aligning text should ignore virtual text and softwrap. #[deprecated = "Doesn't account for softwrap or decorations, use char_idx_at_visual_offset instead"] pub fn pos_at_visual_coords(text: RopeSlice, coords: Position, tab_width: usize) -> usize { let Position { mut row, col } = coords; row = row.min(text.len_lines() - 1); let line_start = text.line_to_char(row); let line_end = line_end_char_index(&text, row); let mut col_char_offset = 0; let mut cols_remaining = col; for grapheme in text.slice(line_start..line_end).graphemes() { let grapheme_width = if grapheme == "\t" { tab_width - ((col - cols_remaining) % tab_width) } else { let grapheme = Cow::from(grapheme); grapheme_width(&grapheme) }; // If pos is in the middle of a wider grapheme (tab for example) // return the starting offset. if grapheme_width > cols_remaining { break; } cols_remaining -= grapheme_width; col_char_offset += grapheme.chars().count(); } line_start + col_char_offset } /// Returns the char index on the visual line `row_offset` below the visual line of /// the provided char index `anchor` that is closest to the supplied visual `column`. /// /// If the targeted visual line is entirely covered by virtual text the last /// char position before the virtual text and a virtual offset is returned instead. /// /// If no (text) grapheme starts at exactly at the specified column the /// start of the grapheme to the left is returned. If there is no grapheme /// to the left (for example if the line starts with virtual text) then the positioning /// of the next grapheme to the right is returned. /// /// If the `line` coordinate is beyond the end of the file, the EOF /// position will be returned. /// /// If the `column` coordinate is past the end of the given line, the /// line-end position (in this case, just before the line ending /// character) will be returned. /// /// # Returns /// /// `(real_char_idx, virtual_lines)` /// /// The nearest character idx "closest" (see above) to the specified visual offset /// on the visual line is returned if the visual line contains any text: /// If the visual line at the specified offset is a virtual line generated by a `LineAnnotation` /// the previous char_index is returned, together with the remaining vertical offset (`virtual_lines`) pub fn char_idx_at_visual_offset( text: RopeSlice, mut anchor: usize, mut row_offset: isize, column: usize, text_fmt: &TextFormat, annotations: &TextAnnotations, ) -> (usize, usize) { let mut pos = anchor; // convert row relative to visual line containing anchor to row relative to a block containing anchor (anchor may change) loop { let (visual_pos_in_block, block_char_offset) = visual_offset_from_block(text, anchor, pos, text_fmt, annotations); row_offset += visual_pos_in_block.row as isize; anchor = block_char_offset; if row_offset >= 0 { break; } if block_char_offset == 0 { row_offset = 0; break; } // the row_offset is negative so we need to look at the previous block // set the anchor to the last char before the current block so that we can compute // the distance of this block from the start of the previous block pos = anchor; anchor -= 1; } char_idx_at_visual_block_offset( text, anchor, row_offset as usize, column, text_fmt, annotations, ) } /// This function behaves the same as `char_idx_at_visual_offset`, except that /// the vertical offset `row` is always computed relative to the block that contains `anchor` /// instead of the visual line that contains `anchor`. /// Usually `char_idx_at_visual_offset` is more useful but this function can be /// used in some situations as an optimization when `visual_offset_from_block` was used /// /// # Returns /// /// `(real_char_idx, virtual_lines)` /// /// See `char_idx_at_visual_offset` for details pub fn char_idx_at_visual_block_offset( text: RopeSlice, anchor: usize, row: usize, column: usize, text_fmt: &TextFormat, annotations: &TextAnnotations, ) -> (usize, usize) { let mut formatter = DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, annotations, anchor); let mut last_char_idx = formatter.next_char_pos(); let mut found_non_virtual_on_row = false; let mut last_row = 0; for grapheme in &mut formatter { match grapheme.visual_pos.row.cmp(&row) { Ordering::Equal => { if grapheme.visual_pos.col + grapheme.width() > column { if !grapheme.is_virtual() { return (grapheme.char_idx, 0); } else if found_non_virtual_on_row { return (last_char_idx, 0); } } else if !grapheme.is_virtual() { found_non_virtual_on_row = true; last_char_idx = grapheme.char_idx; } } Ordering::Greater if found_non_virtual_on_row => return (last_char_idx, 0), Ordering::Greater => return (last_char_idx, row - last_row), Ordering::Less => { if !grapheme.is_virtual() { last_row = grapheme.visual_pos.row; last_char_idx = grapheme.char_idx; } } } } (formatter.next_char_pos(), 0) } #[cfg(test)] mod test { use super::*; use crate::text_annotations::InlineAnnotation; use crate::Rope; #[test] fn test_ordering() { // (0, 5) is less than (1, 0) assert!(Position::new(0, 5) < Position::new(1, 0)); } #[test] fn test_coords_at_pos() { let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ"); let slice = text.slice(..); assert_eq!(coords_at_pos(slice, 0), (0, 0).into()); assert_eq!(coords_at_pos(slice, 5), (0, 5).into()); // position on \n assert_eq!(coords_at_pos(slice, 6), (1, 0).into()); // position on w assert_eq!(coords_at_pos(slice, 7), (1, 1).into()); // position on o assert_eq!(coords_at_pos(slice, 10), (1, 4).into()); // position on d // Test with wide characters. let text = Rope::from("今日はいい\n"); let slice = text.slice(..); assert_eq!(coords_at_pos(slice, 0), (0, 0).into()); assert_eq!(coords_at_pos(slice, 1), (0, 1).into()); assert_eq!(coords_at_pos(slice, 2), (0, 2).into()); assert_eq!(coords_at_pos(slice, 3), (0, 3).into()); assert_eq!(coords_at_pos(slice, 4), (0, 4).into()); assert_eq!(coords_at_pos(slice, 5), (0, 5).into()); assert_eq!(coords_at_pos(slice, 6), (1, 0).into()); // Test with grapheme clusters. let text = Rope::from("a̐éö̲\r\n"); let slice = text.slice(..); assert_eq!(coords_at_pos(slice, 0), (0, 0).into()); assert_eq!(coords_at_pos(slice, 2), (0, 1).into()); assert_eq!(coords_at_pos(slice, 4), (0, 2).into()); assert_eq!(coords_at_pos(slice, 7), (0, 3).into()); assert_eq!(coords_at_pos(slice, 9), (1, 0).into()); // Test with wide-character grapheme clusters. let text = Rope::from("किमपि\n"); let slice = text.slice(..); assert_eq!(coords_at_pos(slice, 0), (0, 0).into()); assert_eq!(coords_at_pos(slice, 2), (0, 1).into()); assert_eq!(coords_at_pos(slice, 3), (0, 2).into()); assert_eq!(coords_at_pos(slice, 5), (0, 3).into()); assert_eq!(coords_at_pos(slice, 6), (1, 0).into()); // Test with tabs. let text = Rope::from("\tHello\n"); let slice = text.slice(..); assert_eq!(coords_at_pos(slice, 0), (0, 0).into()); assert_eq!(coords_at_pos(slice, 1), (0, 1).into()); assert_eq!(coords_at_pos(slice, 2), (0, 2).into()); } #[test] #[allow(deprecated)] fn test_visual_coords_at_pos() { let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ"); let slice = text.slice(..); assert_eq!(visual_coords_at_pos(slice, 0, 8), (0, 0).into()); assert_eq!(visual_coords_at_pos(slice, 5, 8), (0, 5).into()); // position on \n assert_eq!(visual_coords_at_pos(slice, 6, 8), (1, 0).into()); // position on w assert_eq!(visual_coords_at_pos(slice, 7, 8), (1, 1).into()); // position on o assert_eq!(visual_coords_at_pos(slice, 10, 8), (1, 4).into()); // position on d // Test with wide characters. let text = Rope::from("今日はいい\n"); let slice = text.slice(..); assert_eq!(visual_coords_at_pos(slice, 0, 8), (0, 0).into()); assert_eq!(visual_coords_at_pos(slice, 1, 8), (0, 2).into()); assert_eq!(visual_coords_at_pos(slice, 2, 8), (0, 4).into()); assert_eq!(visual_coords_at_pos(slice, 3, 8), (0, 6).into()); assert_eq!(visual_coords_at_pos(slice, 4, 8), (0, 8).into()); assert_eq!(visual_coords_at_pos(slice, 5, 8), (0, 10).into()); assert_eq!(visual_coords_at_pos(slice, 6, 8), (1, 0).into()); // Test with grapheme clusters. let text = Rope::from("a̐éö̲\r\n"); let slice = text.slice(..); assert_eq!(visual_coords_at_pos(slice, 0, 8), (0, 0).into()); assert_eq!(visual_coords_at_pos(slice, 2, 8), (0, 1).into()); assert_eq!(visual_coords_at_pos(slice, 4, 8), (0, 2).into()); assert_eq!(visual_coords_at_pos(slice, 7, 8), (0, 3).into()); assert_eq!(visual_coords_at_pos(slice, 9, 8), (1, 0).into()); // Test with wide-character grapheme clusters. // TODO: account for cluster. let text = Rope::from("किमपि\n"); let slice = text.slice(..); assert_eq!(visual_coords_at_pos(slice, 0, 8), (0, 0).into()); assert_eq!(visual_coords_at_pos(slice, 2, 8), (0, 2).into()); assert_eq!(visual_coords_at_pos(slice, 3, 8), (0, 3).into()); assert_eq!(visual_coords_at_pos(slice, 5, 8), (0, 5).into()); assert_eq!(visual_coords_at_pos(slice, 6, 8), (1, 0).into()); // Test with tabs. let text = Rope::from("\tHello\n"); let slice = text.slice(..); assert_eq!(visual_coords_at_pos(slice, 0, 8), (0, 0).into()); assert_eq!(visual_coords_at_pos(slice, 1, 8), (0, 8).into()); assert_eq!(visual_coords_at_pos(slice, 2, 8), (0, 9).into()); } #[test] fn test_visual_off_from_block() { let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ"); let slice = text.slice(..); let annot = TextAnnotations::default(); let text_fmt = TextFormat::default(); assert_eq!( visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0, (0, 0).into() ); assert_eq!( visual_offset_from_block(slice, 0, 5, &text_fmt, &annot).0, (0, 5).into() ); // position on \n assert_eq!( visual_offset_from_block(slice, 0, 6, &text_fmt, &annot).0, (1, 0).into() ); // position on w assert_eq!( visual_offset_from_block(slice, 0, 7, &text_fmt, &annot).0, (1, 1).into() ); // position on o assert_eq!( visual_offset_from_block(slice, 0, 10, &text_fmt, &annot).0, (1, 4).into() ); // position on d // Test with wide characters. let text = Rope::from("今日はいい\n"); let slice = text.slice(..); assert_eq!( visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0, (0, 0).into() ); assert_eq!( visual_offset_from_block(slice, 0, 1, &text_fmt, &annot).0, (0, 2).into() ); assert_eq!( visual_offset_from_block(slice, 0, 2, &text_fmt, &annot).0, (0, 4).into() ); assert_eq!( visual_offset_from_block(slice, 0, 3, &text_fmt, &annot).0, (0, 6).into() ); assert_eq!( visual_offset_from_block(slice, 0, 4, &text_fmt, &annot).0, (0, 8).into() ); assert_eq!( visual_offset_from_block(slice, 0, 5, &text_fmt, &annot).0, (0, 10).into() ); assert_eq!( visual_offset_from_block(slice, 0, 6, &text_fmt, &annot).0, (1, 0).into() ); // Test with grapheme clusters. let text = Rope::from("a̐éö̲\r\n"); let slice = text.slice(..); assert_eq!( visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0, (0, 0).into() ); assert_eq!( visual_offset_from_block(slice, 0, 2, &text_fmt, &annot).0, (0, 1).into() ); assert_eq!( visual_offset_from_block(slice, 0, 4, &text_fmt, &annot).0, (0, 2).into() ); assert_eq!( visual_offset_from_block(slice, 0, 7, &text_fmt, &annot).0, (0, 3).into() ); assert_eq!( visual_offset_from_block(slice, 0, 9, &text_fmt, &annot).0, (1, 0).into() ); // Test with wide-character grapheme clusters. // TODO: account for cluster. let text = Rope::from("किमपि\n"); let slice = text.slice(..); assert_eq!( visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0, (0, 0).into() ); assert_eq!( visual_offset_from_block(slice, 0, 2, &text_fmt, &annot).0, (0, 2).into() ); assert_eq!( visual_offset_from_block(slice, 0, 3, &text_fmt, &annot).0, (0, 3).into() ); assert_eq!( visual_offset_from_block(slice, 0, 5, &text_fmt, &annot).0, (0, 5).into() ); assert_eq!( visual_offset_from_block(slice, 0, 6, &text_fmt, &annot).0, (1, 0).into() ); // Test with tabs. let text = Rope::from("\tHello\n"); let slice = text.slice(..); assert_eq!( visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0, (0, 0).into() ); assert_eq!( visual_offset_from_block(slice, 0, 1, &text_fmt, &annot).0, (0, 4).into() ); assert_eq!( visual_offset_from_block(slice, 0, 2, &text_fmt, &annot).0, (0, 5).into() ); } #[test] fn test_pos_at_coords() { let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ"); let slice = text.slice(..); assert_eq!(pos_at_coords(slice, (0, 0).into(), false), 0); assert_eq!(pos_at_coords(slice, (0, 5).into(), false), 5); // position on \n assert_eq!(pos_at_coords(slice, (0, 6).into(), false), 6); // position after \n assert_eq!(pos_at_coords(slice, (0, 6).into(), true), 5); // position after \n assert_eq!(pos_at_coords(slice, (1, 0).into(), false), 6); // position on w assert_eq!(pos_at_coords(slice, (1, 1).into(), false), 7); // position on o assert_eq!(pos_at_coords(slice, (1, 4).into(), false), 10); // position on d // Test with wide characters. // TODO: account for character width. let text = Rope::from("今日はいい\n"); let slice = text.slice(..); assert_eq!(pos_at_coords(slice, (0, 0).into(), false), 0); assert_eq!(pos_at_coords(slice, (0, 1).into(), false), 1); assert_eq!(pos_at_coords(slice, (0, 2).into(), false), 2); assert_eq!(pos_at_coords(slice, (0, 3).into(), false), 3); assert_eq!(pos_at_coords(slice, (0, 4).into(), false), 4); assert_eq!(pos_at_coords(slice, (0, 5).into(), false), 5); assert_eq!(pos_at_coords(slice, (0, 6).into(), false), 6); assert_eq!(pos_at_coords(slice, (0, 6).into(), true), 5); assert_eq!(pos_at_coords(slice, (1, 0).into(), false), 6); // Test with grapheme clusters. let text = Rope::from("a̐éö̲\r\n"); let slice = text.slice(..); assert_eq!(pos_at_coords(slice, (0, 0).into(), false), 0); assert_eq!(pos_at_coords(slice, (0, 1).into(), false), 2); assert_eq!(pos_at_coords(slice, (0, 2).into(), false), 4); assert_eq!(pos_at_coords(slice, (0, 3).into(), false), 7); // \r\n is one char here assert_eq!(pos_at_coords(slice, (0, 4).into(), false), 9); assert_eq!(pos_at_coords(slice, (0, 4).into(), true), 7); assert_eq!(pos_at_coords(slice, (1, 0).into(), false), 9); // Test with wide-character grapheme clusters. // TODO: account for character width. let text = Rope::from("किमपि"); // 2 - 1 - 2 codepoints // TODO: delete handling as per https://news.ycombinator.com/item?id=20058454 let slice = text.slice(..); assert_eq!(pos_at_coords(slice, (0, 0).into(), false), 0); assert_eq!(pos_at_coords(slice, (0, 1).into(), false), 2); assert_eq!(pos_at_coords(slice, (0, 2).into(), false), 3); assert_eq!(pos_at_coords(slice, (0, 3).into(), false), 5); assert_eq!(pos_at_coords(slice, (0, 3).into(), true), 5); // Test with tabs. // Todo: account for tab stops. let text = Rope::from("\tHello\n"); let slice = text.slice(..); assert_eq!(pos_at_coords(slice, (0, 0).into(), false), 0); assert_eq!(pos_at_coords(slice, (0, 1).into(), false), 1); assert_eq!(pos_at_coords(slice, (0, 2).into(), false), 2); // Test out of bounds. let text = Rope::new(); let slice = text.slice(..); assert_eq!(pos_at_coords(slice, (10, 0).into(), true), 0); assert_eq!(pos_at_coords(slice, (0, 10).into(), true), 0); assert_eq!(pos_at_coords(slice, (10, 10).into(), true), 0); } #[test] #[allow(deprecated)] fn test_pos_at_visual_coords() { let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ"); let slice = text.slice(..); assert_eq!(pos_at_visual_coords(slice, (0, 0).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (0, 5).into(), 4), 5); // position on \n assert_eq!(pos_at_visual_coords(slice, (0, 6).into(), 4), 5); // position after \n assert_eq!(pos_at_visual_coords(slice, (1, 0).into(), 4), 6); // position on w assert_eq!(pos_at_visual_coords(slice, (1, 1).into(), 4), 7); // position on o assert_eq!(pos_at_visual_coords(slice, (1, 4).into(), 4), 10); // position on d // Test with wide characters. let text = Rope::from("今日はいい\n"); let slice = text.slice(..); assert_eq!(pos_at_visual_coords(slice, (0, 0).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (0, 1).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (0, 2).into(), 4), 1); assert_eq!(pos_at_visual_coords(slice, (0, 3).into(), 4), 1); assert_eq!(pos_at_visual_coords(slice, (0, 4).into(), 4), 2); assert_eq!(pos_at_visual_coords(slice, (0, 5).into(), 4), 2); assert_eq!(pos_at_visual_coords(slice, (0, 6).into(), 4), 3); assert_eq!(pos_at_visual_coords(slice, (0, 7).into(), 4), 3); assert_eq!(pos_at_visual_coords(slice, (0, 8).into(), 4), 4); assert_eq!(pos_at_visual_coords(slice, (0, 9).into(), 4), 4); // assert_eq!(pos_at_visual_coords(slice, (0, 10).into(), 4, false), 5); // assert_eq!(pos_at_visual_coords(slice, (0, 10).into(), 4, true), 5); assert_eq!(pos_at_visual_coords(slice, (1, 0).into(), 4), 6); // Test with grapheme clusters. let text = Rope::from("a̐éö̲\r\n"); let slice = text.slice(..); assert_eq!(pos_at_visual_coords(slice, (0, 0).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (0, 1).into(), 4), 2); assert_eq!(pos_at_visual_coords(slice, (0, 2).into(), 4), 4); assert_eq!(pos_at_visual_coords(slice, (0, 3).into(), 4), 7); // \r\n is one char here assert_eq!(pos_at_visual_coords(slice, (0, 4).into(), 4), 7); assert_eq!(pos_at_visual_coords(slice, (1, 0).into(), 4), 9); // Test with wide-character grapheme clusters. let text = Rope::from("किमपि"); // 2 - 1 - 2 codepoints // TODO: delete handling as per https://news.ycombinator.com/item?id=20058454 let slice = text.slice(..); assert_eq!(pos_at_visual_coords(slice, (0, 0).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (0, 1).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (0, 2).into(), 4), 2); assert_eq!(pos_at_visual_coords(slice, (0, 3).into(), 4), 3); // Test with tabs. let text = Rope::from("\tHello\n"); let slice = text.slice(..); assert_eq!(pos_at_visual_coords(slice, (0, 0).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (0, 1).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (0, 2).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (0, 3).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (0, 4).into(), 4), 1); assert_eq!(pos_at_visual_coords(slice, (0, 5).into(), 4), 2); // Test out of bounds. let text = Rope::new(); let slice = text.slice(..); assert_eq!(pos_at_visual_coords(slice, (10, 0).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (0, 10).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (10, 10).into(), 4), 0); } #[test] fn test_char_idx_at_visual_row_offset_inline_annotation() { let text = Rope::from("foo\nbar"); let slice = text.slice(..); let mut text_fmt = TextFormat::default(); let annotations = [InlineAnnotation { text: "x".repeat(100).into(), char_idx: 3, }]; text_fmt.soft_wrap = true; assert_eq!( char_idx_at_visual_offset( slice, 0, 1, 0, &text_fmt, TextAnnotations::default().add_inline_annotations(&annotations, None) ), (2, 1) ); } #[test] fn test_char_idx_at_visual_row_offset() { let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ\nfoo"); let slice = text.slice(..); let mut text_fmt = TextFormat::default(); for i in 0isize..3isize { for j in -2isize..=2isize { if !(0..3).contains(&(i + j)) { continue; } println!("{i} {j}"); assert_eq!( char_idx_at_visual_offset( slice, slice.line_to_char(i as usize), j, 3, &text_fmt, &TextAnnotations::default(), ) .0, slice.line_to_char((i + j) as usize) + 3 ); } } text_fmt.soft_wrap = true; let mut softwrapped_text = "foo ".repeat(10); softwrapped_text.push('\n'); let last_char = softwrapped_text.len() - 1; let text = Rope::from(softwrapped_text.repeat(3)); let slice = text.slice(..); assert_eq!( char_idx_at_visual_offset( slice, last_char, 0, 0, &text_fmt, &TextAnnotations::default(), ) .0, 32 ); assert_eq!( char_idx_at_visual_offset( slice, last_char, -1, 0, &text_fmt, &TextAnnotations::default(), ) .0, 16 ); assert_eq!( char_idx_at_visual_offset( slice, last_char, -2, 0, &text_fmt, &TextAnnotations::default(), ) .0, 0 ); assert_eq!( char_idx_at_visual_offset( slice, softwrapped_text.len() + last_char, -2, 0, &text_fmt, &TextAnnotations::default(), ) .0, softwrapped_text.len() ); assert_eq!( char_idx_at_visual_offset( slice, softwrapped_text.len() + last_char, -5, 0, &text_fmt, &TextAnnotations::default(), ) .0, 0 ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/rope_reader.rs000066400000000000000000000015711500614314100252740ustar00rootroot00000000000000use std::io; use ropey::iter::Chunks; use ropey::RopeSlice; pub struct RopeReader<'a> { current_chunk: &'a [u8], chunks: Chunks<'a>, } impl<'a> RopeReader<'a> { pub fn new(rope: RopeSlice<'a>) -> RopeReader<'a> { RopeReader { current_chunk: &[], chunks: rope.chunks(), } } } impl io::Read for RopeReader<'_> { fn read(&mut self, mut buf: &mut [u8]) -> io::Result { let buf_len = buf.len(); loop { let read_bytes = self.current_chunk.read(buf)?; buf = &mut buf[read_bytes..]; if buf.is_empty() { return Ok(buf_len); } if let Some(next_chunk) = self.chunks.next() { self.current_chunk = next_chunk.as_bytes(); } else { return Ok(buf_len - buf.len()); } } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/search.rs000066400000000000000000000022751500614314100242540ustar00rootroot00000000000000use crate::RopeSlice; // TODO: switch to std::str::Pattern when it is stable. pub trait CharMatcher { fn char_match(&self, ch: char) -> bool; } impl CharMatcher for char { fn char_match(&self, ch: char) -> bool { *self == ch } } impl bool> CharMatcher for F { fn char_match(&self, ch: char) -> bool { (*self)(&ch) } } pub fn find_nth_next( text: RopeSlice, char_matcher: M, mut pos: usize, n: usize, ) -> Option { if pos >= text.len_chars() || n == 0 { return None; } let mut chars = text.chars_at(pos); for _ in 0..n { loop { let c = chars.next()?; pos += 1; if char_matcher.char_match(c) { break; } } } Some(pos - 1) } pub fn find_nth_prev(text: RopeSlice, ch: char, mut pos: usize, n: usize) -> Option { if pos == 0 || n == 0 { return None; } let mut chars = text.chars_at(pos); for _ in 0..n { loop { let c = chars.prev()?; pos -= 1; if c == ch { break; } } } Some(pos) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/selection.rs000066400000000000000000001334321500614314100247740ustar00rootroot00000000000000//! Selections are the primary editing construct. Even cursors are //! defined as a selection range. //! //! All positioning is done via `char` offsets into the buffer. use crate::{ graphemes::{ ensure_grapheme_boundary_next, ensure_grapheme_boundary_prev, next_grapheme_boundary, prev_grapheme_boundary, }, line_ending::get_line_ending, movement::Direction, Assoc, ChangeSet, RopeSlice, }; use helix_stdx::range::is_subset; use helix_stdx::rope::{self, RopeSliceExt}; use smallvec::{smallvec, SmallVec}; use std::{borrow::Cow, iter, slice}; use tree_sitter::Node; /// A single selection range. /// /// A range consists of an "anchor" and "head" position in /// the text. The head is the part that the user moves when /// directly extending a selection. The head and anchor /// can be in any order, or even share the same position. /// /// The anchor and head positions use gap indexing, meaning /// that their indices represent the gaps *between* `char`s /// rather than the `char`s themselves. For example, 1 /// represents the position between the first and second `char`. /// /// Below are some examples of `Range` configurations. /// The anchor and head indices are shown as "(anchor, head)" /// tuples, followed by example text with "[" and "]" symbols /// representing the anchor and head positions: /// /// - (0, 3): `[Som]e text`. /// - (3, 0): `]Som[e text`. /// - (2, 7): `So[me te]xt`. /// - (1, 1): `S[]ome text`. /// /// Ranges are considered to be inclusive on the left and /// exclusive on the right, regardless of anchor-head ordering. /// This means, for example, that non-zero-width ranges that /// are directly adjacent, sharing an edge, do not overlap. /// However, a zero-width range will overlap with the shared /// left-edge of another range. /// /// By convention, user-facing ranges are considered to have /// a block cursor on the head-side of the range that spans a /// single grapheme inward from the range's edge. There are a /// variety of helper methods on `Range` for working in terms of /// that block cursor, all of which have `cursor` in their name. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct Range { /// The anchor of the range: the side that doesn't move when extending. pub anchor: usize, /// The head of the range, moved when extending. pub head: usize, /// The previous visual offset (softwrapped lines and columns) from /// the start of the line pub old_visual_position: Option<(u32, u32)>, } impl Range { pub fn new(anchor: usize, head: usize) -> Self { Self { anchor, head, old_visual_position: None, } } pub fn point(head: usize) -> Self { Self::new(head, head) } pub fn from_node(node: Node, text: RopeSlice, direction: Direction) -> Self { let from = text.byte_to_char(node.start_byte()); let to = text.byte_to_char(node.end_byte()); Range::new(from, to).with_direction(direction) } /// Start of the range. #[inline] #[must_use] pub fn from(&self) -> usize { std::cmp::min(self.anchor, self.head) } /// End of the range. #[inline] #[must_use] pub fn to(&self) -> usize { std::cmp::max(self.anchor, self.head) } /// Total length of the range. #[inline] #[must_use] pub fn len(&self) -> usize { self.to() - self.from() } /// The (inclusive) range of lines that the range overlaps. #[inline] #[must_use] pub fn line_range(&self, text: RopeSlice) -> (usize, usize) { let from = self.from(); let to = if self.is_empty() { self.to() } else { prev_grapheme_boundary(text, self.to()).max(from) }; (text.char_to_line(from), text.char_to_line(to)) } /// `true` when head and anchor are at the same position. #[inline] pub fn is_empty(&self) -> bool { self.anchor == self.head } /// `Direction::Backward` when head < anchor. /// `Direction::Forward` otherwise. #[inline] #[must_use] pub fn direction(&self) -> Direction { if self.head < self.anchor { Direction::Backward } else { Direction::Forward } } /// Flips the direction of the selection pub fn flip(&self) -> Self { Self { anchor: self.head, head: self.anchor, old_visual_position: self.old_visual_position, } } /// Returns the selection if it goes in the direction of `direction`, /// flipping the selection otherwise. pub fn with_direction(self, direction: Direction) -> Self { if self.direction() == direction { self } else { self.flip() } } /// Check two ranges for overlap. #[must_use] pub fn overlaps(&self, other: &Self) -> bool { // To my eye, it's non-obvious why this works, but I arrived // at it after transforming the slower version that explicitly // enumerated more cases. The unit tests are thorough. self.from() == other.from() || (self.to() > other.from() && other.to() > self.from()) } #[inline] pub fn contains_range(&self, other: &Self) -> bool { self.from() <= other.from() && self.to() >= other.to() } pub fn contains(&self, pos: usize) -> bool { self.from() <= pos && pos < self.to() } /// Map a range through a set of changes. Returns a new range representing /// the same position after the changes are applied. Note that this /// function runs in O(N) (N is number of changes) and can therefore /// cause performance problems if run for a large number of ranges as the /// complexity is then O(MN) (for multicuror M=N usually). Instead use /// [Selection::map] or [ChangeSet::update_positions]. pub fn map(mut self, changes: &ChangeSet) -> Self { use std::cmp::Ordering; if changes.is_empty() { return self; } let positions_to_map = match self.anchor.cmp(&self.head) { Ordering::Equal => [ (&mut self.anchor, Assoc::AfterSticky), (&mut self.head, Assoc::AfterSticky), ], Ordering::Less => [ (&mut self.anchor, Assoc::AfterSticky), (&mut self.head, Assoc::BeforeSticky), ], Ordering::Greater => [ (&mut self.head, Assoc::AfterSticky), (&mut self.anchor, Assoc::BeforeSticky), ], }; changes.update_positions(positions_to_map.into_iter()); self.old_visual_position = None; self } /// Extend the range to cover at least `from` `to`. #[must_use] pub fn extend(&self, from: usize, to: usize) -> Self { debug_assert!(from <= to); if self.anchor <= self.head { Self { anchor: self.anchor.min(from), head: self.head.max(to), old_visual_position: None, } } else { Self { anchor: self.anchor.max(to), head: self.head.min(from), old_visual_position: None, } } } /// Returns a range that encompasses both input ranges. /// /// This is like `extend()`, but tries to negotiate the /// anchor/head ordering between the two input ranges. #[must_use] pub fn merge(&self, other: Self) -> Self { if self.anchor > self.head && other.anchor > other.head { Range { anchor: self.anchor.max(other.anchor), head: self.head.min(other.head), old_visual_position: None, } } else { Range { anchor: self.from().min(other.from()), head: self.to().max(other.to()), old_visual_position: None, } } } // groupAt /// Returns the text inside this range given the text of the whole buffer. /// /// The returned `Cow` is a reference if the range of text is inside a single /// chunk of the rope. Otherwise a copy of the text is returned. Consider /// using `slice` instead if you do not need a `Cow` or `String` to avoid copying. #[inline] pub fn fragment<'a, 'b: 'a>(&'a self, text: RopeSlice<'b>) -> Cow<'b, str> { self.slice(text).into() } /// Returns the text inside this range given the text of the whole buffer. /// /// The returned value is a reference to the passed slice. This method never /// copies any contents. #[inline] pub fn slice<'a, 'b: 'a>(&'a self, text: RopeSlice<'b>) -> RopeSlice<'b> { text.slice(self.from()..self.to()) } //-------------------------------- // Alignment methods. /// Compute a possibly new range from this range, with its ends /// shifted as needed to align with grapheme boundaries. /// /// Zero-width ranges will always stay zero-width, and non-zero-width /// ranges will never collapse to zero-width. #[must_use] pub fn grapheme_aligned(&self, slice: RopeSlice) -> Self { use std::cmp::Ordering; let (new_anchor, new_head) = match self.anchor.cmp(&self.head) { Ordering::Equal => { let pos = ensure_grapheme_boundary_prev(slice, self.anchor); (pos, pos) } Ordering::Less => ( ensure_grapheme_boundary_prev(slice, self.anchor), ensure_grapheme_boundary_next(slice, self.head), ), Ordering::Greater => ( ensure_grapheme_boundary_next(slice, self.anchor), ensure_grapheme_boundary_prev(slice, self.head), ), }; Range { anchor: new_anchor, head: new_head, old_visual_position: if new_anchor == self.anchor { self.old_visual_position } else { None }, } } /// Compute a possibly new range from this range, attempting to ensure /// a minimum range width of 1 char by shifting the head in the forward /// direction as needed. /// /// This method will never shift the anchor, and will only shift the /// head in the forward direction. Therefore, this method can fail /// at ensuring the minimum width if and only if the passed range is /// both zero-width and at the end of the `RopeSlice`. /// /// If the input range is grapheme-boundary aligned, the returned range /// will also be. Specifically, if the head needs to shift to achieve /// the minimum width, it will shift to the next grapheme boundary. #[must_use] #[inline] pub fn min_width_1(&self, slice: RopeSlice) -> Self { if self.anchor == self.head { Range { anchor: self.anchor, head: next_grapheme_boundary(slice, self.head), old_visual_position: self.old_visual_position, } } else { *self } } //-------------------------------- // Block-cursor methods. /// Gets the left-side position of the block cursor. #[must_use] #[inline] pub fn cursor(self, text: RopeSlice) -> usize { if self.head > self.anchor { prev_grapheme_boundary(text, self.head) } else { self.head } } /// Puts the left side of the block cursor at `char_idx`, optionally extending. /// /// This follows "1-width" semantics, and therefore does a combination of anchor /// and head moves to behave as if both the front and back of the range are 1-width /// blocks /// /// This method assumes that the range and `char_idx` are already properly /// grapheme-aligned. #[must_use] #[inline] pub fn put_cursor(self, text: RopeSlice, char_idx: usize, extend: bool) -> Range { if extend { let anchor = if self.head >= self.anchor && char_idx < self.anchor { next_grapheme_boundary(text, self.anchor) } else if self.head < self.anchor && char_idx >= self.anchor { prev_grapheme_boundary(text, self.anchor) } else { self.anchor }; if anchor <= char_idx { Range::new(anchor, next_grapheme_boundary(text, char_idx)) } else { Range::new(anchor, char_idx) } } else { Range::point(char_idx) } } /// The line number that the block-cursor is on. #[inline] #[must_use] pub fn cursor_line(&self, text: RopeSlice) -> usize { text.char_to_line(self.cursor(text)) } /// Returns true if this Range covers a single grapheme in the given text pub fn is_single_grapheme(&self, doc: RopeSlice) -> bool { let mut graphemes = doc.slice(self.from()..self.to()).graphemes(); let first = graphemes.next(); let second = graphemes.next(); first.is_some() && second.is_none() } /// Converts this char range into an in order byte range, discarding /// direction. pub fn into_byte_range(&self, text: RopeSlice) -> (usize, usize) { (text.char_to_byte(self.from()), text.char_to_byte(self.to())) } } impl From<(usize, usize)> for Range { fn from((anchor, head): (usize, usize)) -> Self { Self { anchor, head, old_visual_position: None, } } } impl From for helix_stdx::Range { fn from(range: Range) -> Self { Self { start: range.from(), end: range.to(), } } } /// A selection consists of one or more selection ranges. /// invariant: A selection can never be empty (always contains at least primary range). #[derive(Debug, Clone, PartialEq, Eq)] pub struct Selection { ranges: SmallVec<[Range; 1]>, primary_index: usize, } #[allow(clippy::len_without_is_empty)] // a Selection is never empty impl Selection { // eq #[inline] #[must_use] pub fn primary(&self) -> Range { self.ranges[self.primary_index] } #[inline] #[must_use] pub fn primary_mut(&mut self) -> &mut Range { &mut self.ranges[self.primary_index] } /// Ensure selection containing only the primary selection. pub fn into_single(self) -> Self { if self.ranges.len() == 1 { self } else { Self { ranges: smallvec![self.ranges[self.primary_index]], primary_index: 0, } } } /// Adds a new range to the selection and makes it the primary range. pub fn push(mut self, range: Range) -> Self { self.ranges.push(range); self.set_primary_index(self.ranges().len() - 1); self.normalize() } /// Removes a range from the selection. pub fn remove(mut self, index: usize) -> Self { assert!( self.ranges.len() > 1, "can't remove the last range from a selection!" ); self.ranges.remove(index); if index < self.primary_index || self.primary_index == self.ranges.len() { self.primary_index -= 1; } self } /// Replace a range in the selection with a new range. pub fn replace(mut self, index: usize, range: Range) -> Self { self.ranges[index] = range; self.normalize() } /// Map selections over a set of changes. Useful for adjusting the selection position after /// applying changes to a document. pub fn map(self, changes: &ChangeSet) -> Self { self.map_no_normalize(changes).normalize() } /// Map selections over a set of changes. Useful for adjusting the selection position after /// applying changes to a document. Doesn't normalize the selection pub fn map_no_normalize(mut self, changes: &ChangeSet) -> Self { if changes.is_empty() { return self; } let positions_to_map = self.ranges.iter_mut().flat_map(|range| { use std::cmp::Ordering; range.old_visual_position = None; match range.anchor.cmp(&range.head) { Ordering::Equal => [ (&mut range.anchor, Assoc::AfterSticky), (&mut range.head, Assoc::AfterSticky), ], Ordering::Less => [ (&mut range.anchor, Assoc::AfterSticky), (&mut range.head, Assoc::BeforeSticky), ], Ordering::Greater => [ (&mut range.head, Assoc::AfterSticky), (&mut range.anchor, Assoc::BeforeSticky), ], } }); changes.update_positions(positions_to_map); self } pub fn ranges(&self) -> &[Range] { &self.ranges } /// Returns an iterator over the line ranges of each range in the selection. /// /// Adjacent and overlapping line ranges of the [Range]s in the selection are merged. pub fn line_ranges<'a>(&'a self, text: RopeSlice<'a>) -> LineRangeIter<'a> { LineRangeIter { ranges: self.ranges.iter().peekable(), text, } } pub fn range_bounds(&self) -> impl Iterator + '_ { self.ranges.iter().map(|&range| range.into()) } pub fn primary_index(&self) -> usize { self.primary_index } pub fn set_primary_index(&mut self, idx: usize) { assert!(idx < self.ranges.len()); self.primary_index = idx; } #[must_use] /// Constructs a selection holding a single range. pub fn single(anchor: usize, head: usize) -> Self { Self { ranges: smallvec![Range { anchor, head, old_visual_position: None }], primary_index: 0, } } /// Constructs a selection holding a single cursor. pub fn point(pos: usize) -> Self { Self::single(pos, pos) } /// Normalizes a `Selection`. /// /// Ranges are sorted by [Range::from], with overlapping ranges merged. fn normalize(mut self) -> Self { if self.len() < 2 { return self; } let mut primary = self.ranges[self.primary_index]; self.ranges.sort_unstable_by_key(Range::from); self.ranges.dedup_by(|curr_range, prev_range| { if prev_range.overlaps(curr_range) { let new_range = curr_range.merge(*prev_range); if prev_range == &primary || curr_range == &primary { primary = new_range; } *prev_range = new_range; true } else { false } }); self.primary_index = self .ranges .iter() .position(|&range| range == primary) .unwrap(); self } /// Replaces ranges with one spanning from first to last range. pub fn merge_ranges(self) -> Self { let first = self.ranges.first().unwrap(); let last = self.ranges.last().unwrap(); Selection::new(smallvec![first.merge(*last)], 0) } /// Merges all ranges that are consecutive. pub fn merge_consecutive_ranges(mut self) -> Self { let mut primary = self.ranges[self.primary_index]; self.ranges.dedup_by(|curr_range, prev_range| { if prev_range.to() == curr_range.from() { let new_range = curr_range.merge(*prev_range); if prev_range == &primary || curr_range == &primary { primary = new_range; } *prev_range = new_range; true } else { false } }); self.primary_index = self .ranges .iter() .position(|&range| range == primary) .unwrap(); self } #[must_use] pub fn new(ranges: SmallVec<[Range; 1]>, primary_index: usize) -> Self { assert!(!ranges.is_empty()); debug_assert!(primary_index < ranges.len()); let selection = Self { ranges, primary_index, }; selection.normalize() } /// Takes a closure and maps each `Range` over the closure. pub fn transform(mut self, mut f: F) -> Self where F: FnMut(Range) -> Range, { for range in self.ranges.iter_mut() { *range = f(*range) } self.normalize() } /// Takes a closure and maps each `Range` over the closure to multiple `Range`s. pub fn transform_iter(mut self, f: F) -> Self where F: FnMut(Range) -> I, I: Iterator, { self.ranges = self.ranges.into_iter().flat_map(f).collect(); self.normalize() } // Ensures the selection adheres to the following invariants: // 1. All ranges are grapheme aligned. // 2. All ranges are at least 1 character wide, unless at the // very end of the document. // 3. Ranges are non-overlapping. // 4. Ranges are sorted by their position in the text. pub fn ensure_invariants(self, text: RopeSlice) -> Self { self.transform(|r| r.min_width_1(text).grapheme_aligned(text)) .normalize() } /// Transforms the selection into all of the left-side head positions, /// using block-cursor semantics. pub fn cursors(self, text: RopeSlice) -> Self { self.transform(|range| Range::point(range.cursor(text))) } pub fn fragments<'a>( &'a self, text: RopeSlice<'a>, ) -> impl DoubleEndedIterator> + ExactSizeIterator> { self.ranges.iter().map(move |range| range.fragment(text)) } pub fn slices<'a>( &'a self, text: RopeSlice<'a>, ) -> impl DoubleEndedIterator> + ExactSizeIterator> + 'a { self.ranges.iter().map(move |range| range.slice(text)) } #[inline(always)] pub fn iter(&self) -> std::slice::Iter<'_, Range> { self.ranges.iter() } #[inline(always)] pub fn len(&self) -> usize { self.ranges.len() } /// returns true if self ⊇ other pub fn contains(&self, other: &Selection) -> bool { is_subset::(self.range_bounds(), other.range_bounds()) } } impl<'a> IntoIterator for &'a Selection { type Item = &'a Range; type IntoIter = std::slice::Iter<'a, Range>; fn into_iter(self) -> std::slice::Iter<'a, Range> { self.ranges().iter() } } impl IntoIterator for Selection { type Item = Range; type IntoIter = smallvec::IntoIter<[Range; 1]>; fn into_iter(self) -> smallvec::IntoIter<[Range; 1]> { self.ranges.into_iter() } } impl FromIterator for Selection { fn from_iter>(ranges: T) -> Self { Self::new(ranges.into_iter().collect(), 0) } } impl From for Selection { fn from(range: Range) -> Self { Self { ranges: smallvec![range], primary_index: 0, } } } pub struct LineRangeIter<'a> { ranges: iter::Peekable>, text: RopeSlice<'a>, } impl Iterator for LineRangeIter<'_> { type Item = (usize, usize); fn next(&mut self) -> Option { let (start, mut end) = self.ranges.next()?.line_range(self.text); while let Some((next_start, next_end)) = self.ranges.peek().map(|range| range.line_range(self.text)) { // Merge overlapping and adjacent ranges. // This subtraction cannot underflow because the ranges are sorted. if next_start - end <= 1 { end = next_end; self.ranges.next(); } else { break; } } Some((start, end)) } } // TODO: checkSelection -> check if valid for doc length && sorted pub fn keep_or_remove_matches( text: RopeSlice, selection: &Selection, regex: &rope::Regex, remove: bool, ) -> Option { let result: SmallVec<_> = selection .iter() .filter(|range| regex.is_match(text.regex_input_at(range.from()..range.to())) ^ remove) .copied() .collect(); // TODO: figure out a new primary index if !result.is_empty() { return Some(Selection::new(result, 0)); } None } // TODO: support to split on capture #N instead of whole match pub fn select_on_matches( text: RopeSlice, selection: &Selection, regex: &rope::Regex, ) -> Option { let mut result = SmallVec::with_capacity(selection.len()); for sel in selection { for mat in regex.find_iter(text.regex_input_at(sel.from()..sel.to())) { // TODO: retain range direction let start = text.byte_to_char(mat.start()); let end = text.byte_to_char(mat.end()); let range = Range::new(start, end); // Make sure the match is not right outside of the selection. // These invalid matches can come from using RegEx anchors like `^`, `$` if range != Range::point(sel.to()) { result.push(range); } } } // TODO: figure out a new primary index if !result.is_empty() { return Some(Selection::new(result, 0)); } None } pub fn split_on_newline(text: RopeSlice, selection: &Selection) -> Selection { let mut result = SmallVec::with_capacity(selection.len()); for sel in selection { // Special case: zero-width selection. if sel.from() == sel.to() { result.push(*sel); continue; } let sel_start = sel.from(); let sel_end = sel.to(); let mut start = sel_start; for line in sel.slice(text).lines() { let Some(line_ending) = get_line_ending(&line) else { break; }; let line_end = start + line.len_chars(); // TODO: retain range direction result.push(Range::new(start, line_end - line_ending.len_chars())); start = line_end; } if start < sel_end { result.push(Range::new(start, sel_end)); } } // TODO: figure out a new primary index Selection::new(result, 0) } pub fn split_on_matches(text: RopeSlice, selection: &Selection, regex: &rope::Regex) -> Selection { let mut result = SmallVec::with_capacity(selection.len()); for sel in selection { // Special case: zero-width selection. if sel.from() == sel.to() { result.push(*sel); continue; } let sel_start = sel.from(); let sel_end = sel.to(); let mut start = sel_start; for mat in regex.find_iter(text.regex_input_at(sel_start..sel_end)) { // TODO: retain range direction let end = text.byte_to_char(mat.start()); result.push(Range::new(start, end)); start = text.byte_to_char(mat.end()); } if start < sel_end { result.push(Range::new(start, sel_end)); } } // TODO: figure out a new primary index Selection::new(result, 0) } #[cfg(test)] mod test { use super::*; use crate::Rope; #[test] #[should_panic] fn test_new_empty() { let _ = Selection::new(smallvec![], 0); } #[test] fn test_create_normalizes_and_merges() { let sel = Selection::new( smallvec![ Range::new(10, 12), Range::new(6, 7), Range::new(4, 5), Range::new(3, 4), Range::new(0, 6), Range::new(7, 8), Range::new(9, 13), Range::new(13, 14), ], 0, ); let res = sel .ranges .into_iter() .map(|range| format!("{}/{}", range.anchor, range.head)) .collect::>() .join(","); assert_eq!(res, "0/6,6/7,7/8,9/13,13/14"); // it correctly calculates a new primary index let sel = Selection::new( smallvec![Range::new(0, 2), Range::new(1, 5), Range::new(4, 7)], 2, ); let res = sel .ranges .into_iter() .map(|range| format!("{}/{}", range.anchor, range.head)) .collect::>() .join(","); assert_eq!(res, "0/7"); assert_eq!(sel.primary_index, 0); } #[test] fn test_create_merges_adjacent_points() { let sel = Selection::new( smallvec![ Range::new(10, 12), Range::new(12, 12), Range::new(12, 12), Range::new(10, 10), Range::new(8, 10), ], 0, ); let res = sel .ranges .into_iter() .map(|range| format!("{}/{}", range.anchor, range.head)) .collect::>() .join(","); assert_eq!(res, "8/10,10/12,12/12"); } #[test] fn test_contains() { let range = Range::new(10, 12); assert!(!range.contains(9)); assert!(range.contains(10)); assert!(range.contains(11)); assert!(!range.contains(12)); assert!(!range.contains(13)); let range = Range::new(9, 6); assert!(!range.contains(9)); assert!(range.contains(7)); assert!(range.contains(6)); } #[test] fn test_overlaps() { fn overlaps(a: (usize, usize), b: (usize, usize)) -> bool { Range::new(a.0, a.1).overlaps(&Range::new(b.0, b.1)) } // Two non-zero-width ranges, no overlap. assert!(!overlaps((0, 3), (3, 6))); assert!(!overlaps((0, 3), (6, 3))); assert!(!overlaps((3, 0), (3, 6))); assert!(!overlaps((3, 0), (6, 3))); assert!(!overlaps((3, 6), (0, 3))); assert!(!overlaps((3, 6), (3, 0))); assert!(!overlaps((6, 3), (0, 3))); assert!(!overlaps((6, 3), (3, 0))); // Two non-zero-width ranges, overlap. assert!(overlaps((0, 4), (3, 6))); assert!(overlaps((0, 4), (6, 3))); assert!(overlaps((4, 0), (3, 6))); assert!(overlaps((4, 0), (6, 3))); assert!(overlaps((3, 6), (0, 4))); assert!(overlaps((3, 6), (4, 0))); assert!(overlaps((6, 3), (0, 4))); assert!(overlaps((6, 3), (4, 0))); // Zero-width and non-zero-width range, no overlap. assert!(!overlaps((0, 3), (3, 3))); assert!(!overlaps((3, 0), (3, 3))); assert!(!overlaps((3, 3), (0, 3))); assert!(!overlaps((3, 3), (3, 0))); // Zero-width and non-zero-width range, overlap. assert!(overlaps((1, 4), (1, 1))); assert!(overlaps((4, 1), (1, 1))); assert!(overlaps((1, 1), (1, 4))); assert!(overlaps((1, 1), (4, 1))); assert!(overlaps((1, 4), (3, 3))); assert!(overlaps((4, 1), (3, 3))); assert!(overlaps((3, 3), (1, 4))); assert!(overlaps((3, 3), (4, 1))); // Two zero-width ranges, no overlap. assert!(!overlaps((0, 0), (1, 1))); assert!(!overlaps((1, 1), (0, 0))); // Two zero-width ranges, overlap. assert!(overlaps((1, 1), (1, 1))); } #[test] fn test_grapheme_aligned() { let r = Rope::from_str("\r\nHi\r\n"); let s = r.slice(..); // Zero-width. assert_eq!(Range::new(0, 0).grapheme_aligned(s), Range::new(0, 0)); assert_eq!(Range::new(1, 1).grapheme_aligned(s), Range::new(0, 0)); assert_eq!(Range::new(2, 2).grapheme_aligned(s), Range::new(2, 2)); assert_eq!(Range::new(3, 3).grapheme_aligned(s), Range::new(3, 3)); assert_eq!(Range::new(4, 4).grapheme_aligned(s), Range::new(4, 4)); assert_eq!(Range::new(5, 5).grapheme_aligned(s), Range::new(4, 4)); assert_eq!(Range::new(6, 6).grapheme_aligned(s), Range::new(6, 6)); // Forward. assert_eq!(Range::new(0, 1).grapheme_aligned(s), Range::new(0, 2)); assert_eq!(Range::new(1, 2).grapheme_aligned(s), Range::new(0, 2)); assert_eq!(Range::new(2, 3).grapheme_aligned(s), Range::new(2, 3)); assert_eq!(Range::new(3, 4).grapheme_aligned(s), Range::new(3, 4)); assert_eq!(Range::new(4, 5).grapheme_aligned(s), Range::new(4, 6)); assert_eq!(Range::new(5, 6).grapheme_aligned(s), Range::new(4, 6)); assert_eq!(Range::new(0, 2).grapheme_aligned(s), Range::new(0, 2)); assert_eq!(Range::new(1, 3).grapheme_aligned(s), Range::new(0, 3)); assert_eq!(Range::new(2, 4).grapheme_aligned(s), Range::new(2, 4)); assert_eq!(Range::new(3, 5).grapheme_aligned(s), Range::new(3, 6)); assert_eq!(Range::new(4, 6).grapheme_aligned(s), Range::new(4, 6)); // Reverse. assert_eq!(Range::new(1, 0).grapheme_aligned(s), Range::new(2, 0)); assert_eq!(Range::new(2, 1).grapheme_aligned(s), Range::new(2, 0)); assert_eq!(Range::new(3, 2).grapheme_aligned(s), Range::new(3, 2)); assert_eq!(Range::new(4, 3).grapheme_aligned(s), Range::new(4, 3)); assert_eq!(Range::new(5, 4).grapheme_aligned(s), Range::new(6, 4)); assert_eq!(Range::new(6, 5).grapheme_aligned(s), Range::new(6, 4)); assert_eq!(Range::new(2, 0).grapheme_aligned(s), Range::new(2, 0)); assert_eq!(Range::new(3, 1).grapheme_aligned(s), Range::new(3, 0)); assert_eq!(Range::new(4, 2).grapheme_aligned(s), Range::new(4, 2)); assert_eq!(Range::new(5, 3).grapheme_aligned(s), Range::new(6, 3)); assert_eq!(Range::new(6, 4).grapheme_aligned(s), Range::new(6, 4)); } #[test] fn test_min_width_1() { let r = Rope::from_str("\r\nHi\r\n"); let s = r.slice(..); // Zero-width. assert_eq!(Range::new(0, 0).min_width_1(s), Range::new(0, 2)); assert_eq!(Range::new(1, 1).min_width_1(s), Range::new(1, 2)); assert_eq!(Range::new(2, 2).min_width_1(s), Range::new(2, 3)); assert_eq!(Range::new(3, 3).min_width_1(s), Range::new(3, 4)); assert_eq!(Range::new(4, 4).min_width_1(s), Range::new(4, 6)); assert_eq!(Range::new(5, 5).min_width_1(s), Range::new(5, 6)); assert_eq!(Range::new(6, 6).min_width_1(s), Range::new(6, 6)); // Forward. assert_eq!(Range::new(0, 1).min_width_1(s), Range::new(0, 1)); assert_eq!(Range::new(1, 2).min_width_1(s), Range::new(1, 2)); assert_eq!(Range::new(2, 3).min_width_1(s), Range::new(2, 3)); assert_eq!(Range::new(3, 4).min_width_1(s), Range::new(3, 4)); assert_eq!(Range::new(4, 5).min_width_1(s), Range::new(4, 5)); assert_eq!(Range::new(5, 6).min_width_1(s), Range::new(5, 6)); // Reverse. assert_eq!(Range::new(1, 0).min_width_1(s), Range::new(1, 0)); assert_eq!(Range::new(2, 1).min_width_1(s), Range::new(2, 1)); assert_eq!(Range::new(3, 2).min_width_1(s), Range::new(3, 2)); assert_eq!(Range::new(4, 3).min_width_1(s), Range::new(4, 3)); assert_eq!(Range::new(5, 4).min_width_1(s), Range::new(5, 4)); assert_eq!(Range::new(6, 5).min_width_1(s), Range::new(6, 5)); } #[test] fn test_select_on_matches() { let r = Rope::from_str("Nobody expects the Spanish inquisition"); let s = r.slice(..); let selection = Selection::single(0, r.len_chars()); assert_eq!( select_on_matches(s, &selection, &rope::Regex::new(r"[A-Z][a-z]*").unwrap()), Some(Selection::new( smallvec![Range::new(0, 6), Range::new(19, 26)], 0 )) ); let r = Rope::from_str("This\nString\n\ncontains multiple\nlines"); let s = r.slice(..); let start_of_line = rope::RegexBuilder::new() .syntax(rope::Config::new().multi_line(true)) .build(r"^") .unwrap(); let end_of_line = rope::RegexBuilder::new() .syntax(rope::Config::new().multi_line(true)) .build(r"$") .unwrap(); // line without ending assert_eq!( select_on_matches(s, &Selection::single(0, 4), &start_of_line), Some(Selection::single(0, 0)) ); assert_eq!( select_on_matches(s, &Selection::single(0, 4), &end_of_line), None ); // line with ending assert_eq!( select_on_matches(s, &Selection::single(0, 5), &start_of_line), Some(Selection::single(0, 0)) ); assert_eq!( select_on_matches(s, &Selection::single(0, 5), &end_of_line), Some(Selection::single(4, 4)) ); // line with start of next line assert_eq!( select_on_matches(s, &Selection::single(0, 6), &start_of_line), Some(Selection::new( smallvec![Range::point(0), Range::point(5)], 0 )) ); assert_eq!( select_on_matches(s, &Selection::single(0, 6), &end_of_line), Some(Selection::single(4, 4)) ); // multiple lines assert_eq!( select_on_matches( s, &Selection::single(0, s.len_chars()), &rope::RegexBuilder::new() .syntax(rope::Config::new().multi_line(true)) .build(r"^[a-z ]*$") .unwrap() ), Some(Selection::new( smallvec![Range::point(12), Range::new(13, 30), Range::new(31, 36)], 0 )) ); } #[test] fn test_line_range() { let r = Rope::from_str("\r\nHi\r\nthere!"); let s = r.slice(..); // Zero-width ranges. assert_eq!(Range::new(0, 0).line_range(s), (0, 0)); assert_eq!(Range::new(1, 1).line_range(s), (0, 0)); assert_eq!(Range::new(2, 2).line_range(s), (1, 1)); assert_eq!(Range::new(3, 3).line_range(s), (1, 1)); // Forward ranges. assert_eq!(Range::new(0, 1).line_range(s), (0, 0)); assert_eq!(Range::new(0, 2).line_range(s), (0, 0)); assert_eq!(Range::new(0, 3).line_range(s), (0, 1)); assert_eq!(Range::new(1, 2).line_range(s), (0, 0)); assert_eq!(Range::new(2, 3).line_range(s), (1, 1)); assert_eq!(Range::new(3, 8).line_range(s), (1, 2)); assert_eq!(Range::new(0, 12).line_range(s), (0, 2)); // Reverse ranges. assert_eq!(Range::new(1, 0).line_range(s), (0, 0)); assert_eq!(Range::new(2, 0).line_range(s), (0, 0)); assert_eq!(Range::new(3, 0).line_range(s), (0, 1)); assert_eq!(Range::new(2, 1).line_range(s), (0, 0)); assert_eq!(Range::new(3, 2).line_range(s), (1, 1)); assert_eq!(Range::new(8, 3).line_range(s), (1, 2)); assert_eq!(Range::new(12, 0).line_range(s), (0, 2)); } #[test] fn selection_line_ranges() { let (text, selection) = crate::test::print( r#" L0 #[|these]# line #(|ranges)# are #(|merged)# L1 L2 single one-line #(|range)# L3 L4 single #(|multiline L5 range)# L6 L7 these #(|multiline L8 ranges)# are #(|also L9 merged)# L10 L11 adjacent #(|ranges)# L12 are merged #(|the same way)# L13 "#, ); let rope = Rope::from_str(&text); assert_eq!( vec![(1, 1), (3, 3), (5, 6), (8, 10), (12, 13)], selection.line_ranges(rope.slice(..)).collect::>(), ); } #[test] fn test_cursor() { let r = Rope::from_str("\r\nHi\r\nthere!"); let s = r.slice(..); // Zero-width ranges. assert_eq!(Range::new(0, 0).cursor(s), 0); assert_eq!(Range::new(2, 2).cursor(s), 2); assert_eq!(Range::new(3, 3).cursor(s), 3); // Forward ranges. assert_eq!(Range::new(0, 2).cursor(s), 0); assert_eq!(Range::new(0, 3).cursor(s), 2); assert_eq!(Range::new(3, 6).cursor(s), 4); // Reverse ranges. assert_eq!(Range::new(2, 0).cursor(s), 0); assert_eq!(Range::new(6, 2).cursor(s), 2); assert_eq!(Range::new(6, 3).cursor(s), 3); } #[test] fn test_put_cursor() { let r = Rope::from_str("\r\nHi\r\nthere!"); let s = r.slice(..); // Zero-width ranges. assert_eq!(Range::new(0, 0).put_cursor(s, 0, true), Range::new(0, 2)); assert_eq!(Range::new(0, 0).put_cursor(s, 2, true), Range::new(0, 3)); assert_eq!(Range::new(2, 3).put_cursor(s, 4, true), Range::new(2, 6)); assert_eq!(Range::new(2, 8).put_cursor(s, 4, true), Range::new(2, 6)); assert_eq!(Range::new(8, 8).put_cursor(s, 4, true), Range::new(9, 4)); // Forward ranges. assert_eq!(Range::new(3, 6).put_cursor(s, 0, true), Range::new(4, 0)); assert_eq!(Range::new(3, 6).put_cursor(s, 2, true), Range::new(4, 2)); assert_eq!(Range::new(3, 6).put_cursor(s, 3, true), Range::new(3, 4)); assert_eq!(Range::new(3, 6).put_cursor(s, 4, true), Range::new(3, 6)); assert_eq!(Range::new(3, 6).put_cursor(s, 6, true), Range::new(3, 7)); assert_eq!(Range::new(3, 6).put_cursor(s, 8, true), Range::new(3, 9)); // Reverse ranges. assert_eq!(Range::new(6, 3).put_cursor(s, 0, true), Range::new(6, 0)); assert_eq!(Range::new(6, 3).put_cursor(s, 2, true), Range::new(6, 2)); assert_eq!(Range::new(6, 3).put_cursor(s, 3, true), Range::new(6, 3)); assert_eq!(Range::new(6, 3).put_cursor(s, 4, true), Range::new(6, 4)); assert_eq!(Range::new(6, 3).put_cursor(s, 6, true), Range::new(4, 7)); assert_eq!(Range::new(6, 3).put_cursor(s, 8, true), Range::new(4, 9)); } #[test] fn test_split_on_matches() { let text = Rope::from(" abcd efg wrs xyz 123 456"); let selection = Selection::new(smallvec![Range::new(0, 9), Range::new(11, 20),], 0); let result = split_on_matches( text.slice(..), &selection, &rope::Regex::new(r"\s+").unwrap(), ); assert_eq!( result.ranges(), &[ // TODO: rather than this behavior, maybe we want it // to be based on which side is the anchor? // // We get a leading zero-width range when there's // a leading match because ranges are inclusive on // the left. Imagine, for example, if the entire // selection range were matched: you'd still want // at least one range to remain after the split. Range::new(0, 0), Range::new(1, 5), Range::new(6, 9), Range::new(11, 13), Range::new(16, 19), // In contrast to the comment above, there is no // _trailing_ zero-width range despite the trailing // match, because ranges are exclusive on the right. ] ); assert_eq!( result.fragments(text.slice(..)).collect::>(), &["", "abcd", "efg", "rs", "xyz"] ); } #[test] fn test_merge_consecutive_ranges() { let selection = Selection::new( smallvec![ Range::new(0, 1), Range::new(1, 10), Range::new(15, 20), Range::new(25, 26), Range::new(26, 30) ], 4, ); let result = selection.merge_consecutive_ranges(); assert_eq!( result.ranges(), &[Range::new(0, 10), Range::new(15, 20), Range::new(25, 30)] ); assert_eq!(result.primary_index, 2); let selection = Selection::new(smallvec![Range::new(0, 1)], 0); let result = selection.merge_consecutive_ranges(); assert_eq!(result.ranges(), &[Range::new(0, 1)]); assert_eq!(result.primary_index, 0); let selection = Selection::new( smallvec![ Range::new(0, 1), Range::new(1, 5), Range::new(5, 8), Range::new(8, 10), Range::new(10, 15), Range::new(18, 25) ], 3, ); let result = selection.merge_consecutive_ranges(); assert_eq!(result.ranges(), &[Range::new(0, 15), Range::new(18, 25)]); assert_eq!(result.primary_index, 0); } #[test] fn test_selection_contains() { fn contains(a: Vec<(usize, usize)>, b: Vec<(usize, usize)>) -> bool { let sela = Selection::new(a.iter().map(|a| Range::new(a.0, a.1)).collect(), 0); let selb = Selection::new(b.iter().map(|b| Range::new(b.0, b.1)).collect(), 0); sela.contains(&selb) } // exact match assert!(contains(vec!((1, 1)), vec!((1, 1)))); // larger set contains smaller assert!(contains(vec!((1, 1), (2, 2), (3, 3)), vec!((2, 2)))); // multiple matches assert!(contains(vec!((1, 1), (2, 2)), vec!((1, 1), (2, 2)))); // smaller set can't contain bigger assert!(!contains(vec!((1, 1)), vec!((1, 1), (2, 2)))); assert!(contains( vec!((1, 1), (2, 4), (5, 6), (7, 9), (10, 13)), vec!((3, 4), (7, 9)) )); assert!(!contains(vec!((1, 1), (5, 6)), vec!((1, 6)))); // multiple ranges of other are all contained in some ranges of self, assert!(contains( vec!((1, 4), (7, 10)), vec!((1, 2), (3, 4), (7, 9)) )); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/snippets.rs000066400000000000000000000005631500614314100246520ustar00rootroot00000000000000mod active; mod elaborate; mod parser; mod render; #[derive(PartialEq, Eq, Hash, Debug, PartialOrd, Ord, Clone, Copy)] pub struct TabstopIdx(usize); pub const LAST_TABSTOP_IDX: TabstopIdx = TabstopIdx(usize::MAX); pub use active::ActiveSnippet; pub use elaborate::{Snippet, SnippetElement, Transform}; pub use render::RenderedSnippet; pub use render::SnippetRenderCtx; helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/snippets/000077500000000000000000000000001500614314100243005ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/snippets/active.rs000066400000000000000000000240661500614314100261310ustar00rootroot00000000000000use std::ops::{Index, IndexMut}; use hashbrown::HashSet; use helix_stdx::range::{is_exact_subset, is_subset}; use helix_stdx::Range; use ropey::Rope; use crate::movement::Direction; use crate::snippets::render::{RenderedSnippet, Tabstop}; use crate::snippets::TabstopIdx; use crate::{Assoc, ChangeSet, Selection, Transaction}; pub struct ActiveSnippet { ranges: Vec, active_tabstops: HashSet, current_tabstop: TabstopIdx, tabstops: Vec, } impl Index for ActiveSnippet { type Output = Tabstop; fn index(&self, index: TabstopIdx) -> &Tabstop { &self.tabstops[index.0] } } impl IndexMut for ActiveSnippet { fn index_mut(&mut self, index: TabstopIdx) -> &mut Tabstop { &mut self.tabstops[index.0] } } impl ActiveSnippet { pub fn new(snippet: RenderedSnippet) -> Option { let snippet = Self { ranges: snippet.ranges, tabstops: snippet.tabstops, active_tabstops: HashSet::new(), current_tabstop: TabstopIdx(0), }; (snippet.tabstops.len() != 1).then_some(snippet) } pub fn is_valid(&self, new_selection: &Selection) -> bool { is_subset::(self.ranges.iter().copied(), new_selection.range_bounds()) } pub fn tabstops(&self) -> impl Iterator { self.tabstops.iter() } pub fn delete_placeholder(&self, doc: &Rope) -> Transaction { Transaction::delete( doc, self[self.current_tabstop] .ranges .iter() .map(|range| (range.start, range.end)), ) } /// maps the active snippets through a `ChangeSet` updating all tabstop ranges pub fn map(&mut self, changes: &ChangeSet) -> bool { let positions_to_map = self.ranges.iter_mut().flat_map(|range| { [ (&mut range.start, Assoc::After), (&mut range.end, Assoc::Before), ] }); changes.update_positions(positions_to_map); for (i, tabstop) in self.tabstops.iter_mut().enumerate() { if self.active_tabstops.contains(&TabstopIdx(i)) { let positions_to_map = tabstop.ranges.iter_mut().flat_map(|range| { let end_assoc = if range.start == range.end { Assoc::Before } else { Assoc::After }; [ (&mut range.start, Assoc::Before), (&mut range.end, end_assoc), ] }); changes.update_positions(positions_to_map); } else { let positions_to_map = tabstop.ranges.iter_mut().flat_map(|range| { let end_assoc = if range.start == range.end { Assoc::After } else { Assoc::Before }; [ (&mut range.start, Assoc::After), (&mut range.end, end_assoc), ] }); changes.update_positions(positions_to_map); } let mut snippet_ranges = self.ranges.iter(); let mut snippet_range = snippet_ranges.next().unwrap(); let mut tabstop_i = 0; let mut prev = Range { start: 0, end: 0 }; let num_ranges = tabstop.ranges.len() / self.ranges.len(); tabstop.ranges.retain_mut(|range| { if tabstop_i == num_ranges { snippet_range = snippet_ranges.next().unwrap(); tabstop_i = 0; } tabstop_i += 1; let retain = snippet_range.start <= snippet_range.end; if retain { range.start = range.start.max(snippet_range.start); range.end = range.end.max(range.start).min(snippet_range.end); // guaranteed by assoc debug_assert!(prev.start <= range.start); debug_assert!(range.start <= range.end); if prev.end > range.start { // not really sure what to do in this case. It shouldn't // really occur in practice, the below just ensures // our invariants hold range.start = prev.end; range.end = range.end.max(range.start) } prev = *range; } retain }); } self.ranges.iter().all(|range| range.end <= range.start) } pub fn next_tabstop(&mut self, current_selection: &Selection) -> (Selection, bool) { let primary_idx = self.primary_idx(current_selection); while self.current_tabstop.0 + 1 < self.tabstops.len() { self.current_tabstop.0 += 1; if self.activate_tabstop() { let selection = self.tabstop_selection(primary_idx, Direction::Forward); return (selection, self.current_tabstop.0 + 1 == self.tabstops.len()); } } ( self.tabstop_selection(primary_idx, Direction::Forward), true, ) } pub fn prev_tabstop(&mut self, current_selection: &Selection) -> Option { let primary_idx = self.primary_idx(current_selection); while self.current_tabstop.0 != 0 { self.current_tabstop.0 -= 1; if self.activate_tabstop() { return Some(self.tabstop_selection(primary_idx, Direction::Forward)); } } None } // computes the primary idx adjusted for the number of cursors in the current tabstop fn primary_idx(&self, current_selection: &Selection) -> usize { let primary: Range = current_selection.primary().into(); let res = self .ranges .iter() .position(|&range| range.contains(primary)); res.unwrap_or_else(|| { unreachable!( "active snippet must be valid {current_selection:?} {:?}", self.ranges ) }) } fn activate_tabstop(&mut self) -> bool { let tabstop = &self[self.current_tabstop]; if tabstop.has_placeholder() && tabstop.ranges.iter().all(|range| range.is_empty()) { return false; } self.active_tabstops.clear(); self.active_tabstops.insert(self.current_tabstop); let mut parent = self[self.current_tabstop].parent; while let Some(tabstop) = parent { self.active_tabstops.insert(tabstop); parent = self[tabstop].parent; } true // TODO: if the user removes the selection(s) in one snippet (but // there are still other cursors in other snippets) and jumps to the // next tabstop the selection in that tabstop is restored (at the // next tabstop). This could be annoying since its not possible to // remove a snippet cursor until the snippet is complete. On the other // hand it may be useful since the user may just have meant to edit // a subselection (like with s) of the tabstops and so the selection // removal was just temporary. Potentially this could have some sort of // separate keymap } pub fn tabstop_selection(&self, primary_idx: usize, direction: Direction) -> Selection { let tabstop = &self[self.current_tabstop]; tabstop.selection(direction, primary_idx, self.ranges.len()) } pub fn insert_subsnippet(mut self, snippet: RenderedSnippet) -> Option { if snippet.ranges.len() % self.ranges.len() != 0 || !is_exact_subset(self.ranges.iter().copied(), snippet.ranges.iter().copied()) { log::warn!("number of subsnippets did not match, discarding outer snippet"); return ActiveSnippet::new(snippet); } let mut cnt = 0; let parent = self[self.current_tabstop].parent; let tabstops = snippet.tabstops.into_iter().map(|mut tabstop| { cnt += 1; if let Some(parent) = &mut tabstop.parent { parent.0 += self.current_tabstop.0; } else { tabstop.parent = parent; } tabstop }); self.tabstops .splice(self.current_tabstop.0..=self.current_tabstop.0, tabstops); self.activate_tabstop(); Some(self) } } #[cfg(test)] mod tests { use std::iter::{self}; use ropey::Rope; use crate::snippets::{ActiveSnippet, Snippet, SnippetRenderCtx}; use crate::{Selection, Transaction}; #[test] fn fully_remove() { let snippet = Snippet::parse("foo(${1:bar})$0").unwrap(); let mut doc = Rope::from("bar.\n"); let (transaction, _, snippet) = snippet.render( &doc, &Selection::point(4), |_| (4, 4), &mut SnippetRenderCtx::test_ctx(), ); assert!(transaction.apply(&mut doc)); assert_eq!(doc, "bar.foo(bar)\n"); let mut snippet = ActiveSnippet::new(snippet).unwrap(); let edit = Transaction::change(&doc, iter::once((4, 12, None))); assert!(edit.apply(&mut doc)); snippet.map(edit.changes()); assert!(!snippet.is_valid(&Selection::point(4))) } #[test] fn tabstop_zero_with_placeholder() { // The `$0` tabstop should not have placeholder text. When we receive a snippet like this // (from older versions of clangd for example) we should discard the placeholder text. let snippet = Snippet::parse("sizeof(${0:expression-or-type})").unwrap(); let mut doc = Rope::from("\n"); let (transaction, _, snippet) = snippet.render( &doc, &Selection::point(0), |_| (0, 0), &mut SnippetRenderCtx::test_ctx(), ); assert!(transaction.apply(&mut doc)); assert_eq!(doc, "sizeof()\n"); assert!(ActiveSnippet::new(snippet).is_none()); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/snippets/elaborate.rs000066400000000000000000000312161500614314100266070ustar00rootroot00000000000000use std::mem::swap; use std::ops::Index; use std::sync::Arc; use anyhow::{anyhow, Result}; use helix_stdx::rope::RopeSliceExt; use helix_stdx::Range; use regex_cursor::engines::meta::Builder as RegexBuilder; use regex_cursor::engines::meta::Regex; use regex_cursor::regex_automata::util::syntax::Config as RegexConfig; use ropey::RopeSlice; use crate::case_conversion::to_lower_case_with; use crate::case_conversion::to_upper_case_with; use crate::case_conversion::{to_camel_case_with, to_pascal_case_with}; use crate::snippets::parser::{self, CaseChange, FormatItem}; use crate::snippets::{TabstopIdx, LAST_TABSTOP_IDX}; use crate::Tendril; #[derive(Debug)] pub struct Snippet { elements: Vec, tabstops: Vec, } impl Snippet { pub fn parse(snippet: &str) -> Result { let parsed_snippet = parser::parse(snippet) .map_err(|rest| anyhow!("Failed to parse snippet. Remaining input: {}", rest))?; Ok(Snippet::new(parsed_snippet)) } pub fn new(elements: Vec) -> Snippet { let mut res = Snippet { elements: Vec::new(), tabstops: Vec::new(), }; res.elements = res.elaborate(elements, None).into(); res.fixup_tabstops(); res.ensure_last_tabstop(); res.renumber_tabstops(); res } pub fn elements(&self) -> &[SnippetElement] { &self.elements } pub fn tabstops(&self) -> impl Iterator { self.tabstops.iter() } fn renumber_tabstops(&mut self) { Self::renumber_tabstops_in(&self.tabstops, &mut self.elements); for i in 0..self.tabstops.len() { if let Some(parent) = self.tabstops[i].parent { let parent = self .tabstops .binary_search_by_key(&parent, |tabstop| tabstop.idx) .expect("all tabstops have been resolved"); self.tabstops[i].parent = Some(TabstopIdx(parent)); } let tabstop = &mut self.tabstops[i]; if let TabstopKind::Placeholder { default } = &tabstop.kind { let mut default = default.clone(); tabstop.kind = TabstopKind::Empty; Self::renumber_tabstops_in(&self.tabstops, Arc::get_mut(&mut default).unwrap()); self.tabstops[i].kind = TabstopKind::Placeholder { default }; } } } fn renumber_tabstops_in(tabstops: &[Tabstop], elements: &mut [SnippetElement]) { for elem in elements { match elem { SnippetElement::Tabstop { idx } => { idx.0 = tabstops .binary_search_by_key(&*idx, |tabstop| tabstop.idx) .expect("all tabstops have been resolved") } SnippetElement::Variable { default, .. } => { if let Some(default) = default { Self::renumber_tabstops_in(tabstops, default); } } SnippetElement::Text(_) => (), } } } fn fixup_tabstops(&mut self) { self.tabstops.sort_by_key(|tabstop| tabstop.idx); self.tabstops.dedup_by(|tabstop1, tabstop2| { if tabstop1.idx != tabstop2.idx { return false; } // use the first non empty tabstop for all multicursor tabstops if tabstop2.kind.is_empty() { swap(tabstop2, tabstop1) } true }) } fn ensure_last_tabstop(&mut self) { if matches!(self.tabstops.last(), Some(tabstop) if tabstop.idx == LAST_TABSTOP_IDX) { return; } self.tabstops.push(Tabstop { idx: LAST_TABSTOP_IDX, parent: None, kind: TabstopKind::Empty, }); self.elements.push(SnippetElement::Tabstop { idx: LAST_TABSTOP_IDX, }) } fn elaborate( &mut self, default: Vec, parent: Option, ) -> Box<[SnippetElement]> { default .into_iter() .map(|val| match val { parser::SnippetElement::Tabstop { tabstop, transform: None, } => SnippetElement::Tabstop { idx: self.elaborate_placeholder(tabstop, parent, Vec::new()), }, parser::SnippetElement::Tabstop { tabstop, transform: Some(transform), } => SnippetElement::Tabstop { idx: self.elaborate_transform(tabstop, parent, transform), }, parser::SnippetElement::Placeholder { tabstop, value } => SnippetElement::Tabstop { idx: self.elaborate_placeholder(tabstop, parent, value), }, parser::SnippetElement::Choice { tabstop, choices } => SnippetElement::Tabstop { idx: self.elaborate_choice(tabstop, parent, choices), }, parser::SnippetElement::Variable { name, default, transform, } => SnippetElement::Variable { name, default: default.map(|default| self.elaborate(default, parent)), // TODO: error for invalid transforms transform: transform.and_then(Transform::new).map(Box::new), }, parser::SnippetElement::Text(text) => SnippetElement::Text(text), }) .collect() } fn elaborate_choice( &mut self, idx: usize, parent: Option, choices: Vec, ) -> TabstopIdx { let idx = TabstopIdx::elaborate(idx); self.tabstops.push(Tabstop { idx, parent, kind: TabstopKind::Choice { choices: choices.into(), }, }); idx } fn elaborate_placeholder( &mut self, idx: usize, parent: Option, mut default: Vec, ) -> TabstopIdx { let idx = TabstopIdx::elaborate(idx); if idx == LAST_TABSTOP_IDX && !default.is_empty() { // Older versions of clangd for example may send a snippet like `${0:placeholder}` // which is considered by VSCode to be a misuse of the `$0` tabstop. log::warn!("Discarding placeholder text for the `$0` tabstop ({default:?}). \ The `$0` tabstop signifies the final cursor position and should not include placeholder text."); default.clear(); } let default = self.elaborate(default, Some(idx)); self.tabstops.push(Tabstop { idx, parent, kind: TabstopKind::Placeholder { default: default.into(), }, }); idx } fn elaborate_transform( &mut self, idx: usize, parent: Option, transform: parser::Transform, ) -> TabstopIdx { let idx = TabstopIdx::elaborate(idx); if let Some(transform) = Transform::new(transform) { self.tabstops.push(Tabstop { idx, parent, kind: TabstopKind::Transform(Arc::new(transform)), }) } else { // TODO: proper error self.tabstops.push(Tabstop { idx, parent, kind: TabstopKind::Empty, }) } idx } } impl Index for Snippet { type Output = Tabstop; fn index(&self, index: TabstopIdx) -> &Tabstop { &self.tabstops[index.0] } } #[derive(Debug)] pub enum SnippetElement { Tabstop { idx: TabstopIdx, }, Variable { name: Tendril, default: Option>, transform: Option>, }, Text(Tendril), } #[derive(Debug)] pub struct Tabstop { idx: TabstopIdx, pub parent: Option, pub kind: TabstopKind, } #[derive(Debug)] pub enum TabstopKind { Choice { choices: Arc<[Tendril]> }, Placeholder { default: Arc<[SnippetElement]> }, Empty, Transform(Arc), } impl TabstopKind { pub fn is_empty(&self) -> bool { matches!(self, TabstopKind::Empty) } } #[derive(Debug)] pub struct Transform { regex: Regex, regex_str: Box, global: bool, replacement: Box<[FormatItem]>, } impl PartialEq for Transform { fn eq(&self, other: &Self) -> bool { self.replacement == other.replacement && self.global == other.global // doens't compare m and i setting but close enough && self.regex_str == other.regex_str } } impl Transform { fn new(transform: parser::Transform) -> Option { let mut config = RegexConfig::new(); let mut global = false; let mut invalid_config = false; for c in transform.options.chars() { match c { 'i' => { config = config.case_insensitive(true); } 'm' => { config = config.multi_line(true); } 'g' => { global = true; } // we ignore 'u' since we always want to // do unicode aware matching _ => invalid_config = true, } } if invalid_config { log::error!("invalid transform configuration characters {transform:?}"); } let regex = match RegexBuilder::new().syntax(config).build(&transform.regex) { Ok(regex) => regex, Err(err) => { log::error!("invalid transform {err} {transform:?}"); return None; } }; Some(Transform { regex, regex_str: transform.regex.as_str().into(), global, replacement: transform.replacement.into(), }) } pub fn apply(&self, mut doc: RopeSlice<'_>, range: Range) -> Tendril { let mut buf = Tendril::new(); let it = self .regex .captures_iter(doc.regex_input_at(range)) .enumerate(); doc = doc.slice(range); let mut last_match = 0; for (_, cap) in it { // unwrap on 0 is OK because captures only reports matches let m = cap.get_group(0).unwrap(); buf.extend(doc.byte_slice(last_match..m.start).chunks()); last_match = m.end; for fmt in &*self.replacement { match *fmt { FormatItem::Text(ref text) => { buf.push_str(text); } FormatItem::Capture(i) => { if let Some(cap) = cap.get_group(i) { buf.extend(doc.byte_slice(cap.range()).chunks()); } } FormatItem::CaseChange(i, change) => { if let Some(cap) = cap.get_group(i).filter(|i| !i.is_empty()) { let mut chars = doc.byte_slice(cap.range()).chars(); match change { CaseChange::Upcase => to_upper_case_with(chars, &mut buf), CaseChange::Downcase => to_lower_case_with(chars, &mut buf), CaseChange::Capitalize => { let first_char = chars.next().unwrap(); buf.extend(first_char.to_uppercase()); buf.extend(chars); } CaseChange::PascalCase => to_pascal_case_with(chars, &mut buf), CaseChange::CamelCase => to_camel_case_with(chars, &mut buf), } } } FormatItem::Conditional(i, ref if_, ref else_) => { if cap.get_group(i).map_or(true, |mat| mat.is_empty()) { buf.push_str(else_) } else { buf.push_str(if_) } } } } if !self.global { break; } } buf.extend(doc.byte_slice(last_match..).chunks()); buf } } impl TabstopIdx { fn elaborate(idx: usize) -> Self { TabstopIdx(idx.wrapping_sub(1)) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/snippets/parser.rs000066400000000000000000000676161500614314100261620ustar00rootroot00000000000000/*! A parser for LSP/VSCode style snippet syntax See . ``` text any ::= tabstop | placeholder | choice | variable | text tabstop ::= '$' int | '${' int '}' placeholder ::= '${' int ':' any '}' choice ::= '${' int '|' text (',' text)* '|}' variable ::= '$' var | '${' var }' | '${' var ':' any '}' | '${' var '/' regex '/' (format | text)+ '/' options '}' format ::= '$' int | '${' int '}' | '${' int ':' '/upcase' | '/downcase' | '/capitalize' '}' | '${' int ':+' if '}' | '${' int ':?' if ':' else '}' | '${' int ':-' else '}' | '${' int ':' else '}' regex ::= Regular Expression value (ctor-string) options ::= Regular Expression option (ctor-options) var ::= [_a-zA-Z] [_a-zA-Z0-9]* int ::= [0-9]+ text ::= .* if ::= text else ::= text ``` */ use crate::Tendril; use helix_parsec::*; #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum CaseChange { Upcase, Downcase, Capitalize, PascalCase, CamelCase, } #[derive(Debug, PartialEq, Eq)] pub enum FormatItem { Text(Tendril), Capture(usize), CaseChange(usize, CaseChange), Conditional(usize, Tendril, Tendril), } #[derive(Debug, PartialEq, Eq)] pub struct Transform { pub regex: Tendril, pub replacement: Vec, pub options: Tendril, } #[derive(Debug, PartialEq, Eq)] pub enum SnippetElement { Tabstop { tabstop: usize, transform: Option, }, Placeholder { tabstop: usize, value: Vec, }, Choice { tabstop: usize, choices: Vec, }, Variable { name: Tendril, default: Option>, transform: Option, }, Text(Tendril), } pub fn parse(s: &str) -> Result, &str> { snippet().parse(s).and_then(|(remainder, snippet)| { if remainder.is_empty() { Ok(snippet) } else { Err(remainder) } }) } fn var<'a>() -> impl Parser<'a, Output = &'a str> { // var = [_a-zA-Z][_a-zA-Z0-9]* move |input: &'a str| { input .char_indices() .take_while(|(p, c)| { *c == '_' || if *p == 0 { c.is_ascii_alphabetic() } else { c.is_ascii_alphanumeric() } }) .last() .map(|(index, c)| { let index = index + c.len_utf8(); (&input[index..], &input[0..index]) }) .ok_or(input) } } const TEXT_ESCAPE_CHARS: &[char] = &['\\', '}', '$']; const CHOICE_TEXT_ESCAPE_CHARS: &[char] = &['\\', '|', ',']; fn text<'a>( escape_chars: &'static [char], term_chars: &'static [char], ) -> impl Parser<'a, Output = Tendril> { move |input: &'a str| { let mut chars = input.char_indices().peekable(); let mut res = Tendril::new(); while let Some((i, c)) = chars.next() { match c { '\\' => { if let Some(&(_, c)) = chars.peek() { if escape_chars.contains(&c) { chars.next(); res.push(c); continue; } } res.push('\\'); } c if term_chars.contains(&c) => return Ok((&input[i..], res)), c => res.push(c), } } Ok(("", res)) } } fn digit<'a>() -> impl Parser<'a, Output = usize> { filter_map(take_while(|c| c.is_ascii_digit()), |s| s.parse().ok()) } fn case_change<'a>() -> impl Parser<'a, Output = CaseChange> { use CaseChange::*; choice!( map("upcase", |_| Upcase), map("downcase", |_| Downcase), map("capitalize", |_| Capitalize), map("pascalcase", |_| PascalCase), map("camelcase", |_| CamelCase), ) } fn format<'a>() -> impl Parser<'a, Output = FormatItem> { use FormatItem::*; choice!( // '$' int map(right("$", digit()), Capture), // '${' int '}' map(seq!("${", digit(), "}"), |seq| Capture(seq.1)), // '${' int ':' '/upcase' | '/downcase' | '/capitalize' '}' map(seq!("${", digit(), ":/", case_change(), "}"), |seq| { CaseChange(seq.1, seq.3) }), // '${' int ':+' if '}' map( seq!("${", digit(), ":+", text(TEXT_ESCAPE_CHARS, &['}']), "}"), |seq| { Conditional(seq.1, seq.3, Tendril::new()) } ), // '${' int ':?' if ':' else '}' map( seq!( "${", digit(), ":?", text(TEXT_ESCAPE_CHARS, &[':']), ":", text(TEXT_ESCAPE_CHARS, &['}']), "}" ), |seq| { Conditional(seq.1, seq.3, seq.5) } ), // '${' int ':-' else '}' | '${' int ':' else '}' map( seq!( "${", digit(), ":", optional("-"), text(TEXT_ESCAPE_CHARS, &['}']), "}" ), |seq| { Conditional(seq.1, Tendril::new(), seq.4) } ), ) } fn regex<'a>() -> impl Parser<'a, Output = Transform> { map( seq!( "/", // TODO parse as ECMAScript and convert to rust regex text(&['/'], &['/']), "/", zero_or_more(choice!( format(), // text doesn't parse $, if format fails we just accept the $ as text map("$", |_| FormatItem::Text("$".into())), map(text(&['\\', '/'], &['/', '$']), FormatItem::Text), )), "/", // vscode really doesn't allow escaping } here // so it's impossible to write a regex escape containing a } // we can consider deviating here and allowing the escape text(&[], &['}']), ), |(_, value, _, replacement, _, options)| Transform { regex: value, replacement, options, }, ) } fn tabstop<'a>() -> impl Parser<'a, Output = SnippetElement> { map( or( map(right("$", digit()), |i| (i, None)), map( seq!("${", digit(), optional(regex()), "}"), |(_, i, transform, _)| (i, transform), ), ), |(tabstop, transform)| SnippetElement::Tabstop { tabstop, transform }, ) } fn placeholder<'a>() -> impl Parser<'a, Output = SnippetElement> { map( seq!( "${", digit(), ":", // according to the grammar there is just a single anything here. // However in the prose it is explained that placeholders can be nested. // The example there contains both a placeholder text and a nested placeholder // which indicates a list. Looking at the VSCode sourcecode, the placeholder // is indeed parsed as zero_or_more so the grammar is simply incorrect here zero_or_more(anything(TEXT_ESCAPE_CHARS, true)), "}" ), |seq| SnippetElement::Placeholder { tabstop: seq.1, value: seq.3, }, ) } fn choice<'a>() -> impl Parser<'a, Output = SnippetElement> { map( seq!( "${", digit(), "|", sep(text(CHOICE_TEXT_ESCAPE_CHARS, &['|', ',']), ","), "|}", ), |seq| SnippetElement::Choice { tabstop: seq.1, choices: seq.3, }, ) } fn variable<'a>() -> impl Parser<'a, Output = SnippetElement> { choice!( // $var map(right("$", var()), |name| SnippetElement::Variable { name: name.into(), default: None, transform: None, }), // ${var} map(seq!("${", var(), "}",), |values| SnippetElement::Variable { name: values.1.into(), default: None, transform: None, }), // ${var:default} map( seq!( "${", var(), ":", zero_or_more(anything(TEXT_ESCAPE_CHARS, true)), "}", ), |values| SnippetElement::Variable { name: values.1.into(), default: Some(values.3), transform: None, } ), // ${var/value/format/options} map(seq!("${", var(), regex(), "}"), |values| { SnippetElement::Variable { name: values.1.into(), default: None, transform: Some(values.2), } }), ) } fn anything<'a>( escape_chars: &'static [char], end_at_brace: bool, ) -> impl Parser<'a, Output = SnippetElement> { let term_chars: &[_] = if end_at_brace { &['$', '}'] } else { &['$'] }; move |input: &'a str| { let parser = choice!( tabstop(), placeholder(), choice(), variable(), map("$", |_| SnippetElement::Text("$".into())), map(text(escape_chars, term_chars), SnippetElement::Text), ); parser.parse(input) } } fn snippet<'a>() -> impl Parser<'a, Output = Vec> { one_or_more(anything(TEXT_ESCAPE_CHARS, false)) } #[cfg(test)] mod test { use crate::snippets::{Snippet, SnippetRenderCtx}; use super::SnippetElement::*; use super::*; #[test] fn empty_string_is_error() { assert_eq!(Err(""), parse("")); } #[test] fn parse_placeholders_in_function_call() { assert_eq!( Ok(vec![ Text("match(".into()), Placeholder { tabstop: 1, value: vec![Text("Arg1".into())], }, Text(")".into()), ]), parse("match(${1:Arg1})") ); // The `$0` tabstop should not have placeholder text. The parser should handle this case // normally and then the placeholder text should be discarded during elaboration. assert_eq!( Ok(vec![ Text("sizeof(".into()), Placeholder { tabstop: 0, value: vec![Text("expression-or-type".into())], }, Text(")".into()), ]), parse("sizeof(${0:expression-or-type})") ); } #[test] fn unterminated_placeholder() { assert_eq!( Ok(vec![ Text("match(".into()), Text("$".into()), Text("{1:)".into()) ]), parse("match(${1:)") ) } #[test] fn parse_empty_placeholder() { assert_eq!( Ok(vec![ Text("match(".into()), Placeholder { tabstop: 1, value: vec![], }, Text(")".into()), ]), parse("match(${1:})") ) } #[test] fn parse_placeholders_in_statement() { assert_eq!( Ok(vec![ Text("local ".into()), Placeholder { tabstop: 1, value: vec![Text("var".into())], }, Text(" = ".into()), Placeholder { tabstop: 1, value: vec![Text("value".into())], }, ]), parse("local ${1:var} = ${1:value}") ) } #[test] fn parse_tabstop_nested_in_placeholder() { assert_eq!( Ok(vec![Placeholder { tabstop: 1, value: vec![ Text("var, ".into()), Tabstop { tabstop: 2, transform: None } ], }]), parse("${1:var, $2}") ) } #[test] fn parse_placeholder_nested_in_placeholder() { assert_eq!( Ok({ vec![Placeholder { tabstop: 1, value: vec![ Text("foo ".into()), Placeholder { tabstop: 2, value: vec![Text("bar".into())], }, ], }] }), parse("${1:foo ${2:bar}}") ) } #[test] fn parse_all() { assert_eq!( Ok(vec![ Text("hello ".into()), Tabstop { tabstop: 1, transform: None }, Tabstop { tabstop: 2, transform: None }, Text(" ".into()), Choice { tabstop: 1, choices: vec!["one".into(), "two".into(), "three".into()], }, Text(" ".into()), Variable { name: "name".into(), default: Some(vec![Text("foo".into())]), transform: None, }, Text(" ".into()), Variable { name: "var".into(), default: None, transform: None, }, Text(" ".into()), Variable { name: "TM".into(), default: None, transform: None, }, ]), parse("hello $1${2} ${1|one,two,three|} ${name:foo} $var $TM") ); } #[test] fn regex_capture_replace() { assert_eq!( Ok({ vec![Variable { name: "TM_FILENAME".into(), default: None, transform: Some(Transform { regex: "(.*).+$".into(), replacement: vec![FormatItem::Capture(1), FormatItem::Text("$".into())], options: Tendril::new(), }), }] }), parse("${TM_FILENAME/(.*).+$/$1$/}") ); } #[test] fn rust_macro() { assert_eq!( Ok({ vec![ Text("macro_rules! ".into()), Tabstop { tabstop: 1, transform: None, }, Text(" {\n (".into()), Tabstop { tabstop: 2, transform: None, }, Text(") => {\n ".into()), Tabstop { tabstop: 0, transform: None, }, Text("\n };\n}".into()), ] }), parse("macro_rules! $1 {\n ($2) => {\n $0\n };\n}") ); } fn assert_text(snippet: &str, parsed_text: &str) { let snippet = Snippet::parse(snippet).unwrap(); let mut rendered_snippet = snippet.prepare_render(); let rendered_text = snippet .render_at( &mut rendered_snippet, "".into(), false, &mut SnippetRenderCtx::test_ctx(), 0, ) .0; assert_eq!(rendered_text, parsed_text) } #[test] fn robust_parsing() { assert_text("$", "$"); assert_text("\\\\$", "\\$"); assert_text("{", "{"); assert_text("\\}", "}"); assert_text("\\abc", "\\abc"); assert_text("foo${f:\\}}bar", "foo}bar"); assert_text("\\{", "\\{"); assert_text("I need \\\\\\$", "I need \\$"); assert_text("\\", "\\"); assert_text("\\{{", "\\{{"); assert_text("{{", "{{"); assert_text("{{dd", "{{dd"); assert_text("}}", "}}"); assert_text("ff}}", "ff}}"); assert_text("farboo", "farboo"); assert_text("far{{}}boo", "far{{}}boo"); assert_text("far{{123}}boo", "far{{123}}boo"); assert_text("far\\{{123}}boo", "far\\{{123}}boo"); assert_text("far{{id:bern}}boo", "far{{id:bern}}boo"); assert_text("far{{id:bern {{basel}}}}boo", "far{{id:bern {{basel}}}}boo"); assert_text( "far{{id:bern {{id:basel}}}}boo", "far{{id:bern {{id:basel}}}}boo", ); assert_text( "far{{id:bern {{id2:basel}}}}boo", "far{{id:bern {{id2:basel}}}}boo", ); assert_text("${}$\\a\\$\\}\\\\", "${}$\\a$}\\"); assert_text("farboo", "farboo"); assert_text("far{{}}boo", "far{{}}boo"); assert_text("far{{123}}boo", "far{{123}}boo"); assert_text("far\\{{123}}boo", "far\\{{123}}boo"); assert_text("far`123`boo", "far`123`boo"); assert_text("far\\`123\\`boo", "far\\`123\\`boo"); assert_text("\\$far-boo", "$far-boo"); } fn assert_snippet(snippet: &str, expect: &[SnippetElement]) { let elements = parse(snippet).unwrap(); assert_eq!(elements, expect.to_owned()) } #[test] fn parse_variable() { use SnippetElement::*; assert_snippet( "$far-boo", &[ Variable { name: "far".into(), default: None, transform: None, }, Text("-boo".into()), ], ); assert_snippet( "far$farboo", &[ Text("far".into()), Variable { name: "farboo".into(), transform: None, default: None, }, ], ); assert_snippet( "far${farboo}", &[ Text("far".into()), Variable { name: "farboo".into(), transform: None, default: None, }, ], ); assert_snippet( "$123", &[Tabstop { tabstop: 123, transform: None, }], ); assert_snippet( "$farboo", &[Variable { name: "farboo".into(), transform: None, default: None, }], ); assert_snippet( "$far12boo", &[Variable { name: "far12boo".into(), transform: None, default: None, }], ); assert_snippet( "000_${far}_000", &[ Text("000_".into()), Variable { name: "far".into(), transform: None, default: None, }, Text("_000".into()), ], ); } #[test] fn parse_variable_transform() { assert_snippet( "${foo///}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: Tendril::new(), replacement: Vec::new(), options: Tendril::new(), }), default: None, }], ); assert_snippet( "${foo/regex/format/gmi}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: "regex".into(), replacement: vec![FormatItem::Text("format".into())], options: "gmi".into(), }), default: None, }], ); assert_snippet( "${foo/([A-Z][a-z])/format/}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: "([A-Z][a-z])".into(), replacement: vec![FormatItem::Text("format".into())], options: Tendril::new(), }), default: None, }], ); // invalid regex TODO: reneable tests once we actually parse this regex flavor // assert_text( // "${foo/([A-Z][a-z])/format/GMI}", // "${foo/([A-Z][a-z])/format/GMI}", // ); // assert_text( // "${foo/([A-Z][a-z])/format/funky}", // "${foo/([A-Z][a-z])/format/funky}", // ); // assert_text("${foo/([A-Z][a-z]/format/}", "${foo/([A-Z][a-z]/format/}"); assert_text( "${foo/regex\\/format/options}", "${foo/regex\\/format/options}", ); // tricky regex assert_snippet( "${foo/m\\/atch/$1/i}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: "m/atch".into(), replacement: vec![FormatItem::Capture(1)], options: "i".into(), }), default: None, }], ); // incomplete assert_text("${foo///", "${foo///"); assert_text("${foo/regex/format/options", "${foo/regex/format/options"); // format string assert_snippet( "${foo/.*/${0:fooo}/i}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: ".*".into(), replacement: vec![FormatItem::Conditional(0, Tendril::new(), "fooo".into())], options: "i".into(), }), default: None, }], ); assert_snippet( "${foo/.*/${1}/i}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: ".*".into(), replacement: vec![FormatItem::Capture(1)], options: "i".into(), }), default: None, }], ); assert_snippet( "${foo/.*/$1/i}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: ".*".into(), replacement: vec![FormatItem::Capture(1)], options: "i".into(), }), default: None, }], ); assert_snippet( "${foo/.*/This-$1-encloses/i}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: ".*".into(), replacement: vec![ FormatItem::Text("This-".into()), FormatItem::Capture(1), FormatItem::Text("-encloses".into()), ], options: "i".into(), }), default: None, }], ); assert_snippet( "${foo/.*/complex${1:else}/i}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: ".*".into(), replacement: vec![ FormatItem::Text("complex".into()), FormatItem::Conditional(1, Tendril::new(), "else".into()), ], options: "i".into(), }), default: None, }], ); assert_snippet( "${foo/.*/complex${1:-else}/i}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: ".*".into(), replacement: vec![ FormatItem::Text("complex".into()), FormatItem::Conditional(1, Tendril::new(), "else".into()), ], options: "i".into(), }), default: None, }], ); assert_snippet( "${foo/.*/complex${1:+if}/i}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: ".*".into(), replacement: vec![ FormatItem::Text("complex".into()), FormatItem::Conditional(1, "if".into(), Tendril::new()), ], options: "i".into(), }), default: None, }], ); assert_snippet( "${foo/.*/complex${1:?if:else}/i}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: ".*".into(), replacement: vec![ FormatItem::Text("complex".into()), FormatItem::Conditional(1, "if".into(), "else".into()), ], options: "i".into(), }), default: None, }], ); assert_snippet( "${foo/.*/complex${1:/upcase}/i}", &[Variable { name: "foo".into(), transform: Some(Transform { regex: ".*".into(), replacement: vec![ FormatItem::Text("complex".into()), FormatItem::CaseChange(1, CaseChange::Upcase), ], options: "i".into(), }), default: None, }], ); assert_snippet( "${TM_DIRECTORY/src\\//$1/}", &[Variable { name: "TM_DIRECTORY".into(), transform: Some(Transform { regex: "src/".into(), replacement: vec![FormatItem::Capture(1)], options: Tendril::new(), }), default: None, }], ); assert_snippet( "${TM_SELECTED_TEXT/a/\\/$1/g}", &[Variable { name: "TM_SELECTED_TEXT".into(), transform: Some(Transform { regex: "a".into(), replacement: vec![FormatItem::Text("/".into()), FormatItem::Capture(1)], options: "g".into(), }), default: None, }], ); assert_snippet( "${TM_SELECTED_TEXT/a/in\\/$1ner/g}", &[Variable { name: "TM_SELECTED_TEXT".into(), transform: Some(Transform { regex: "a".into(), replacement: vec![ FormatItem::Text("in/".into()), FormatItem::Capture(1), FormatItem::Text("ner".into()), ], options: "g".into(), }), default: None, }], ); assert_snippet( "${TM_SELECTED_TEXT/a/end\\//g}", &[Variable { name: "TM_SELECTED_TEXT".into(), transform: Some(Transform { regex: "a".into(), replacement: vec![FormatItem::Text("end/".into())], options: "g".into(), }), default: None, }], ); } // TODO port more tests from https://github.com/microsoft/vscode/blob/dce493cb6e36346ef2714e82c42ce14fc461b15c/src/vs/editor/contrib/snippet/test/browser/snippetParser.test.ts } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/snippets/render.rs000066400000000000000000000261441500614314100261340ustar00rootroot00000000000000use std::borrow::Cow; use std::ops::{Index, IndexMut}; use std::sync::Arc; use helix_stdx::Range; use ropey::{Rope, RopeSlice}; use smallvec::SmallVec; use crate::indent::{normalize_indentation, IndentStyle}; use crate::movement::Direction; use crate::snippets::elaborate; use crate::snippets::TabstopIdx; use crate::snippets::{Snippet, SnippetElement, Transform}; use crate::{selection, Selection, Tendril, Transaction}; #[derive(Debug, Clone, PartialEq)] pub enum TabstopKind { Choice { choices: Arc<[Tendril]> }, Placeholder, Empty, Transform(Arc), } #[derive(Debug, PartialEq)] pub struct Tabstop { pub ranges: SmallVec<[Range; 1]>, pub parent: Option, pub kind: TabstopKind, } impl Tabstop { pub fn has_placeholder(&self) -> bool { matches!( self.kind, TabstopKind::Choice { .. } | TabstopKind::Placeholder ) } pub fn selection( &self, direction: Direction, primary_idx: usize, snippet_ranges: usize, ) -> Selection { Selection::new( self.ranges .iter() .map(|&range| { let mut range = selection::Range::new(range.start, range.end); if direction == Direction::Backward { range = range.flip() } range }) .collect(), primary_idx * (self.ranges.len() / snippet_ranges), ) } } #[derive(Debug, Default, PartialEq)] pub struct RenderedSnippet { pub tabstops: Vec, pub ranges: Vec, } impl RenderedSnippet { pub fn first_selection(&self, direction: Direction, primary_idx: usize) -> Selection { self.tabstops[0].selection(direction, primary_idx, self.ranges.len()) } } impl Index for RenderedSnippet { type Output = Tabstop; fn index(&self, index: TabstopIdx) -> &Tabstop { &self.tabstops[index.0] } } impl IndexMut for RenderedSnippet { fn index_mut(&mut self, index: TabstopIdx) -> &mut Tabstop { &mut self.tabstops[index.0] } } impl Snippet { pub fn prepare_render(&self) -> RenderedSnippet { let tabstops = self.tabstops() .map(|tabstop| Tabstop { ranges: SmallVec::new(), parent: tabstop.parent, kind: match &tabstop.kind { elaborate::TabstopKind::Choice { choices } => TabstopKind::Choice { choices: choices.clone(), }, // start out as empty: the first non-empty placeholder will change this to // a placeholder automatically elaborate::TabstopKind::Empty | elaborate::TabstopKind::Placeholder { .. } => TabstopKind::Empty, elaborate::TabstopKind::Transform(transform) => { TabstopKind::Transform(transform.clone()) } }, }) .collect(); RenderedSnippet { tabstops, ranges: Vec::new(), } } pub fn render_at( &self, snippet: &mut RenderedSnippet, indent: RopeSlice<'_>, at_newline: bool, ctx: &mut SnippetRenderCtx, pos: usize, ) -> (Tendril, usize) { let mut ctx = SnippetRender { dst: snippet, src: self, indent, text: Tendril::new(), off: pos, ctx, at_newline, }; ctx.render_elements(self.elements()); let end = ctx.off; let text = ctx.text; snippet.ranges.push(Range { start: pos, end }); (text, end - pos) } pub fn render( &self, doc: &Rope, selection: &Selection, change_range: impl FnMut(&selection::Range) -> (usize, usize), ctx: &mut SnippetRenderCtx, ) -> (Transaction, Selection, RenderedSnippet) { let mut snippet = self.prepare_render(); let mut off = 0; let (transaction, selection) = Transaction::change_by_selection_ignore_overlapping( doc, selection, change_range, |replacement_start, replacement_end| { let line_idx = doc.char_to_line(replacement_start); let line_start = doc.line_to_char(line_idx); let prefix = doc.slice(line_start..replacement_start); let indent_len = prefix.chars().take_while(|c| c.is_whitespace()).count(); let indent = prefix.slice(..indent_len); let at_newline = indent_len == replacement_start - line_start; let (replacement, replacement_len) = self.render_at( &mut snippet, indent, at_newline, ctx, (replacement_start as i128 + off) as usize, ); off += replacement_start as i128 - replacement_end as i128 + replacement_len as i128; Some(replacement) }, ); (transaction, selection, snippet) } } pub type VariableResolver = dyn FnMut(&str) -> Option>; pub struct SnippetRenderCtx { pub resolve_var: Box, pub tab_width: usize, pub indent_style: IndentStyle, pub line_ending: &'static str, } impl SnippetRenderCtx { #[cfg(test)] pub(super) fn test_ctx() -> SnippetRenderCtx { SnippetRenderCtx { resolve_var: Box::new(|_| None), tab_width: 4, indent_style: IndentStyle::Spaces(4), line_ending: "\n", } } } struct SnippetRender<'a> { ctx: &'a mut SnippetRenderCtx, dst: &'a mut RenderedSnippet, src: &'a Snippet, indent: RopeSlice<'a>, text: Tendril, off: usize, at_newline: bool, } impl SnippetRender<'_> { fn render_elements(&mut self, elements: &[SnippetElement]) { for element in elements { self.render_element(element) } } fn render_element(&mut self, element: &SnippetElement) { match *element { SnippetElement::Tabstop { idx } => self.render_tabstop(idx), SnippetElement::Variable { ref name, ref default, ref transform, } => { // TODO: allow resolve_var access to the doc and make it return rope slice // so we can access selections and other document content without allocating if let Some(val) = (self.ctx.resolve_var)(name) { if let Some(transform) = transform { self.push_multiline_str(&transform.apply( (&*val).into(), Range { start: 0, end: val.chars().count(), }, )); } else { self.push_multiline_str(&val) } } else if let Some(default) = default { self.render_elements(default) } } SnippetElement::Text(ref text) => self.push_multiline_str(text), } } fn push_multiline_str(&mut self, text: &str) { let mut lines = text .split('\n') .map(|line| line.strip_suffix('\r').unwrap_or(line)); let first_line = lines.next().unwrap(); self.push_str(first_line, self.at_newline); for line in lines { self.push_newline(); self.push_str(line, true); } } fn push_str(&mut self, mut text: &str, at_newline: bool) { if at_newline { let old_len = self.text.len(); let old_indent_len = normalize_indentation( self.indent, text.into(), &mut self.text, self.ctx.indent_style, self.ctx.tab_width, ); // this is ok because indentation can only be ascii chars (' ' and '\t') self.off += self.text.len() - old_len; text = &text[old_indent_len..]; if text.is_empty() { self.at_newline = true; return; } } self.text.push_str(text); self.off += text.chars().count(); } fn push_newline(&mut self) { self.off += self.ctx.line_ending.chars().count() + self.indent.len_chars(); self.text.push_str(self.ctx.line_ending); self.text.extend(self.indent.chunks()); } fn render_tabstop(&mut self, tabstop: TabstopIdx) { let start = self.off; let end = match &self.src[tabstop].kind { elaborate::TabstopKind::Placeholder { default } if !default.is_empty() => { self.render_elements(default); self.dst[tabstop].kind = TabstopKind::Placeholder; self.off } _ => start, }; self.dst[tabstop].ranges.push(Range { start, end }); } } #[cfg(test)] mod tests { use helix_stdx::Range; use crate::snippets::render::Tabstop; use crate::snippets::{Snippet, SnippetRenderCtx}; use super::TabstopKind; fn assert_snippet(snippet: &str, expect: &str, tabstops: &[Tabstop]) { let snippet = Snippet::parse(snippet).unwrap(); let mut rendered_snippet = snippet.prepare_render(); let rendered_text = snippet .render_at( &mut rendered_snippet, "\t".into(), false, &mut SnippetRenderCtx::test_ctx(), 0, ) .0; assert_eq!(rendered_text, expect); assert_eq!(&rendered_snippet.tabstops, tabstops); assert_eq!( rendered_snippet.ranges.last().unwrap().end, rendered_text.chars().count() ); assert_eq!(rendered_snippet.ranges.last().unwrap().start, 0) } #[test] fn rust_macro() { assert_snippet( "macro_rules! ${1:name} {\n\t($3) => {\n\t\t$2\n\t};\n}", "macro_rules! name {\n\t () => {\n\t \n\t };\n\t}", &[ Tabstop { ranges: vec![Range { start: 13, end: 17 }].into(), parent: None, kind: TabstopKind::Placeholder, }, Tabstop { ranges: vec![Range { start: 42, end: 42 }].into(), parent: None, kind: TabstopKind::Empty, }, Tabstop { ranges: vec![Range { start: 26, end: 26 }].into(), parent: None, kind: TabstopKind::Empty, }, Tabstop { ranges: vec![Range { start: 53, end: 53 }].into(), parent: None, kind: TabstopKind::Empty, }, ], ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/surround.rs000066400000000000000000000340421500614314100246650ustar00rootroot00000000000000use std::fmt::Display; use crate::{ graphemes::next_grapheme_boundary, match_brackets::{ find_matching_bracket, find_matching_bracket_fuzzy, get_pair, is_close_bracket, is_open_bracket, }, movement::Direction, search, Range, Selection, Syntax, }; use ropey::RopeSlice; #[derive(Debug, PartialEq, Eq)] pub enum Error { PairNotFound, CursorOverlap, RangeExceedsText, CursorOnAmbiguousPair, } impl Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str(match *self { Error::PairNotFound => "Surround pair not found around all cursors", Error::CursorOverlap => "Cursors overlap for a single surround pair range", Error::RangeExceedsText => "Cursor range exceeds text length", Error::CursorOnAmbiguousPair => "Cursor on ambiguous surround pair", }) } } type Result = std::result::Result; /// Finds the position of surround pairs of any [`crate::match_brackets::PAIRS`] /// using tree-sitter when possible. /// /// # Returns /// /// Tuple `(anchor, head)`, meaning it is not always ordered. pub fn find_nth_closest_pairs_pos( syntax: Option<&Syntax>, text: RopeSlice, range: Range, skip: usize, ) -> Result<(usize, usize)> { match syntax { Some(syntax) => find_nth_closest_pairs_ts(syntax, text, range, skip), None => find_nth_closest_pairs_plain(text, range, skip), } } fn find_nth_closest_pairs_ts( syntax: &Syntax, text: RopeSlice, range: Range, mut skip: usize, ) -> Result<(usize, usize)> { let mut opening = range.from(); // We want to expand the selection if we are already on the found pair, // otherwise we would need to subtract "-1" from "range.to()". let mut closing = range.to(); while skip > 0 { closing = find_matching_bracket_fuzzy(syntax, text, closing).ok_or(Error::PairNotFound)?; opening = find_matching_bracket(syntax, text, closing).ok_or(Error::PairNotFound)?; // If we're already on a closing bracket "find_matching_bracket_fuzzy" will return // the position of the opening bracket. if closing < opening { (opening, closing) = (closing, opening); } // In case found brackets are partially inside current selection. if range.from() < opening || closing < range.to() - 1 { closing = next_grapheme_boundary(text, closing); } else { skip -= 1; if skip != 0 { closing = next_grapheme_boundary(text, closing); } } } // Keep the original direction. if let Direction::Forward = range.direction() { Ok((opening, closing)) } else { Ok((closing, opening)) } } fn find_nth_closest_pairs_plain( text: RopeSlice, range: Range, mut skip: usize, ) -> Result<(usize, usize)> { let mut stack = Vec::with_capacity(2); let pos = range.from(); let mut close_pos = pos.saturating_sub(1); for ch in text.chars_at(pos) { close_pos += 1; if is_open_bracket(ch) { // Track open pairs encountered so that we can step over // the corresponding close pairs that will come up further // down the loop. We want to find a lone close pair whose // open pair is before the cursor position. stack.push(ch); continue; } if !is_close_bracket(ch) { // We don't care if this character isn't a brace pair item, // so short circuit here. continue; } let (open, close) = get_pair(ch); if stack.last() == Some(&open) { // If we are encountering the closing pair for an opener // we just found while traversing, then its inside the // selection and should be skipped over. stack.pop(); continue; } match find_nth_open_pair(text, open, close, close_pos, 1) { // Before we accept this pair, we want to ensure that the // pair encloses the range rather than just the cursor. Some(open_pos) if open_pos <= pos.saturating_add(1) && close_pos >= range.to().saturating_sub(1) => { // Since we have special conditions for when to // accept, we can't just pass the skip parameter on // through to the find_nth_*_pair methods, so we // track skips manually here. if skip > 1 { skip -= 1; continue; } return match range.direction() { Direction::Forward => Ok((open_pos, close_pos)), Direction::Backward => Ok((close_pos, open_pos)), }; } _ => continue, } } Err(Error::PairNotFound) } /// Find the position of surround pairs of `ch` which can be either a closing /// or opening pair. `n` will skip n - 1 pairs (eg. n=2 will discard (only) /// the first pair found and keep looking) pub fn find_nth_pairs_pos( text: RopeSlice, ch: char, range: Range, n: usize, ) -> Result<(usize, usize)> { if text.len_chars() < 2 { return Err(Error::PairNotFound); } if range.to() >= text.len_chars() { return Err(Error::RangeExceedsText); } let (open, close) = get_pair(ch); let pos = range.cursor(text); let (open, close) = if open == close { if Some(open) == text.get_char(pos) { // Cursor is directly on match char. We return no match // because there's no way to know which side of the char // we should be searching on. return Err(Error::CursorOnAmbiguousPair); } ( search::find_nth_prev(text, open, pos, n), search::find_nth_next(text, close, pos, n), ) } else { ( find_nth_open_pair(text, open, close, pos, n), find_nth_close_pair(text, open, close, pos, n), ) }; // preserve original direction match range.direction() { Direction::Forward => Option::zip(open, close).ok_or(Error::PairNotFound), Direction::Backward => Option::zip(close, open).ok_or(Error::PairNotFound), } } fn find_nth_open_pair( text: RopeSlice, open: char, close: char, mut pos: usize, n: usize, ) -> Option { if pos >= text.len_chars() { return None; } let mut chars = text.chars_at(pos + 1); // Adjusts pos for the first iteration, and handles the case of the // cursor being *on* the close character which will get falsely stepped over // if not skipped here if chars.prev()? == open { return Some(pos); } for _ in 0..n { let mut step_over: usize = 0; loop { let c = chars.prev()?; pos = pos.saturating_sub(1); // ignore other surround pairs that are enclosed *within* our search scope if c == close { step_over += 1; } else if c == open { if step_over == 0 { break; } step_over = step_over.saturating_sub(1); } } } Some(pos) } fn find_nth_close_pair( text: RopeSlice, open: char, close: char, mut pos: usize, n: usize, ) -> Option { if pos >= text.len_chars() { return None; } let mut chars = text.chars_at(pos); if chars.next()? == close { return Some(pos); } for _ in 0..n { let mut step_over: usize = 0; loop { let c = chars.next()?; pos += 1; if c == open { step_over += 1; } else if c == close { if step_over == 0 { break; } step_over = step_over.saturating_sub(1); } } } Some(pos) } /// Find position of surround characters around every cursor. Returns None /// if any positions overlap. Note that the positions are in a flat Vec. /// Use get_surround_pos().chunks(2) to get matching pairs of surround positions. /// `ch` can be either closing or opening pair. If `ch` is None, surround pairs /// are automatically detected around each cursor (note that this may result /// in them selecting different surround characters for each selection). pub fn get_surround_pos( syntax: Option<&Syntax>, text: RopeSlice, selection: &Selection, ch: Option, skip: usize, ) -> Result> { let mut change_pos = Vec::new(); for &range in selection { let (open_pos, close_pos) = { let range_raw = match ch { Some(ch) => find_nth_pairs_pos(text, ch, range, skip)?, None => find_nth_closest_pairs_pos(syntax, text, range, skip)?, }; let range = Range::new(range_raw.0, range_raw.1); (range.from(), range.to()) }; if change_pos.contains(&open_pos) || change_pos.contains(&close_pos) { return Err(Error::CursorOverlap); } // ensure the positions are always paired in the forward direction change_pos.extend_from_slice(&[open_pos.min(close_pos), close_pos.max(open_pos)]); } Ok(change_pos) } #[cfg(test)] mod test { use super::*; use crate::Range; use ropey::Rope; use smallvec::SmallVec; #[test] fn test_get_surround_pos() { #[rustfmt::skip] let (doc, selection, expectations) = rope_with_selections_and_expectations( "(some) (chars)\n(newline)", "_ ^ _ _ ^ _\n_ ^ _" ); assert_eq!( get_surround_pos(None, doc.slice(..), &selection, Some('('), 1).unwrap(), expectations ); } #[test] fn test_get_surround_pos_bail_different_surround_chars() { #[rustfmt::skip] let (doc, selection, _) = rope_with_selections_and_expectations( "[some]\n(chars)xx\n(newline)", " ^ \n ^ \n " ); assert_eq!( get_surround_pos(None, doc.slice(..), &selection, Some('('), 1), Err(Error::PairNotFound) ); } #[test] fn test_get_surround_pos_bail_overlapping_surround_chars() { #[rustfmt::skip] let (doc, selection, _) = rope_with_selections_and_expectations( "[some]\n(chars)xx\n(newline)", " \n ^ \n ^ " ); assert_eq!( get_surround_pos(None, doc.slice(..), &selection, Some('('), 1), Err(Error::PairNotFound) // overlapping surround chars ); } #[test] fn test_get_surround_pos_bail_cursor_overlap() { #[rustfmt::skip] let (doc, selection, _) = rope_with_selections_and_expectations( "[some]\n(chars)xx\n(newline)", " ^^ \n \n " ); assert_eq!( get_surround_pos(None, doc.slice(..), &selection, Some('['), 1), Err(Error::CursorOverlap) ); } #[test] fn test_find_nth_pairs_pos_quote_success() { #[rustfmt::skip] let (doc, selection, expectations) = rope_with_selections_and_expectations( "some 'quoted text' on this 'line'\n'and this one'", " _ ^ _ \n " ); assert_eq!(2, expectations.len()); assert_eq!( find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 1) .expect("find should succeed"), (expectations[0], expectations[1]) ) } #[test] fn test_find_nth_pairs_pos_nested_quote_success() { #[rustfmt::skip] let (doc, selection, expectations) = rope_with_selections_and_expectations( "some 'nested 'quoted' text' on this 'line'\n'and this one'", " _ ^ _ \n " ); assert_eq!(2, expectations.len()); assert_eq!( find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 2) .expect("find should succeed"), (expectations[0], expectations[1]) ) } #[test] fn test_find_nth_pairs_pos_inside_quote_ambiguous() { #[rustfmt::skip] let (doc, selection, _) = rope_with_selections_and_expectations( "some 'nested 'quoted' text' on this 'line'\n'and this one'", " ^ \n " ); assert_eq!( find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 1), Err(Error::CursorOnAmbiguousPair) ) } #[test] fn test_find_nth_closest_pairs_pos_index_range_panic() { #[rustfmt::skip] let (doc, selection, _) = rope_with_selections_and_expectations( "(a)c)", "^^^^^" ); assert_eq!( find_nth_closest_pairs_pos(None, doc.slice(..), selection.primary(), 1), Err(Error::PairNotFound) ) } // Create a Rope and a matching Selection using a specification language. // ^ is a single-point selection. // _ is an expected index. These are returned as a Vec for use in assertions. fn rope_with_selections_and_expectations( text: &str, spec: &str, ) -> (Rope, Selection, Vec) { if text.len() != spec.len() { panic!("specification must match text length -- are newlines aligned?"); } let rope = Rope::from(text); let selections: SmallVec<[Range; 1]> = spec .match_indices('^') .map(|(i, _)| Range::point(i)) .collect(); let expectations: Vec = spec.match_indices('_').map(|(i, _)| i).collect(); (rope, Selection::new(selections, 0), expectations) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/syntax.rs000066400000000000000000003321671500614314100243430ustar00rootroot00000000000000mod tree_cursor; use crate::{ auto_pairs::AutoPairs, chars::char_is_line_ending, diagnostic::Severity, regex::Regex, transaction::{ChangeSet, Operation}, RopeSlice, Tendril, }; use ahash::RandomState; use arc_swap::{ArcSwap, Guard}; use bitflags::bitflags; use globset::GlobSet; use hashbrown::raw::RawTable; use helix_stdx::rope::{self, RopeSliceExt}; use slotmap::{DefaultKey as LayerId, HopSlotMap}; use std::{ borrow::Cow, cell::RefCell, collections::{HashMap, HashSet, VecDeque}, fmt::{self, Display, Write}, hash::{Hash, Hasher}, mem::replace, path::{Path, PathBuf}, str::FromStr, sync::Arc, }; use once_cell::sync::{Lazy, OnceCell}; use serde::{ser::SerializeSeq, Deserialize, Serialize}; use helix_loader::grammar::{get_language, load_runtime_file}; pub use tree_cursor::TreeCursor; fn deserialize_regex<'de, D>(deserializer: D) -> Result, D::Error> where D: serde::Deserializer<'de>, { Option::::deserialize(deserializer)? .map(|buf| rope::Regex::new(&buf).map_err(serde::de::Error::custom)) .transpose() } fn deserialize_lsp_config<'de, D>(deserializer: D) -> Result, D::Error> where D: serde::Deserializer<'de>, { Option::::deserialize(deserializer)? .map(|toml| toml.try_into().map_err(serde::de::Error::custom)) .transpose() } fn deserialize_tab_width<'de, D>(deserializer: D) -> Result where D: serde::Deserializer<'de>, { usize::deserialize(deserializer).and_then(|n| { if n > 0 && n <= 16 { Ok(n) } else { Err(serde::de::Error::custom( "tab width must be a value from 1 to 16 inclusive", )) } }) } pub fn deserialize_auto_pairs<'de, D>(deserializer: D) -> Result, D::Error> where D: serde::Deserializer<'de>, { Ok(Option::::deserialize(deserializer)?.and_then(AutoPairConfig::into)) } fn default_timeout() -> u64 { 20 } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Configuration { pub language: Vec, #[serde(default)] pub language_server: HashMap, } // largely based on tree-sitter/cli/src/loader.rs #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "kebab-case", deny_unknown_fields)] pub struct LanguageConfiguration { #[serde(rename = "name")] pub language_id: String, // c-sharp, rust, tsx #[serde(rename = "language-id")] // see the table under https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentItem pub language_server_language_id: Option, // csharp, rust, typescriptreact, for the language-server pub scope: String, // source.rust pub file_types: Vec, // filename extension or ends_with? #[serde(default)] pub shebangs: Vec, // interpreter(s) associated with language #[serde(default)] pub roots: Vec, // these indicate project roots <.git, Cargo.toml> #[serde( default, skip_serializing, deserialize_with = "from_comment_tokens", alias = "comment-token" )] pub comment_tokens: Option>, #[serde( default, skip_serializing, deserialize_with = "from_block_comment_tokens" )] pub block_comment_tokens: Option>, pub text_width: Option, pub soft_wrap: Option, #[serde(default)] pub auto_format: bool, #[serde(skip_serializing_if = "Option::is_none")] pub formatter: Option, /// If set, overrides `editor.path-completion`. pub path_completion: Option, #[serde(default)] pub diagnostic_severity: Severity, pub grammar: Option, // tree-sitter grammar name, defaults to language_id // content_regex #[serde(default, skip_serializing, deserialize_with = "deserialize_regex")] pub injection_regex: Option, // first_line_regex // #[serde(skip)] pub(crate) highlight_config: OnceCell>>, // tags_config OnceCell<> https://github.com/tree-sitter/tree-sitter/pull/583 #[serde( default, skip_serializing_if = "Vec::is_empty", serialize_with = "serialize_lang_features", deserialize_with = "deserialize_lang_features" )] pub language_servers: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub indent: Option, #[serde(skip)] pub(crate) indent_query: OnceCell>, #[serde(skip)] pub(crate) textobject_query: OnceCell>, #[serde(skip_serializing_if = "Option::is_none")] pub debugger: Option, /// Automatic insertion of pairs to parentheses, brackets, /// etc. Defaults to true. Optionally, this can be a list of 2-tuples /// to specify a list of characters to pair. This overrides the /// global setting. #[serde(default, skip_serializing, deserialize_with = "deserialize_auto_pairs")] pub auto_pairs: Option, pub rulers: Option>, // if set, override editor's rulers /// Hardcoded LSP root directories relative to the workspace root, like `examples` or `tools/fuzz`. /// Falling back to the current working directory if none are configured. pub workspace_lsp_roots: Option>, #[serde(default)] pub persistent_diagnostic_sources: Vec, } #[derive(Debug, PartialEq, Eq, Hash)] pub enum FileType { /// The extension of the file, either the `Path::extension` or the full /// filename if the file does not have an extension. Extension(String), /// A Unix-style path glob. This is compared to the file's absolute path, so /// it can be used to detect files based on their directories. If the glob /// is not an absolute path and does not already start with a glob pattern, /// a glob pattern will be prepended to it. Glob(globset::Glob), } impl Serialize for FileType { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, { use serde::ser::SerializeMap; match self { FileType::Extension(extension) => serializer.serialize_str(extension), FileType::Glob(glob) => { let mut map = serializer.serialize_map(Some(1))?; map.serialize_entry("glob", glob.glob())?; map.end() } } } } impl<'de> Deserialize<'de> for FileType { fn deserialize(deserializer: D) -> Result where D: serde::de::Deserializer<'de>, { struct FileTypeVisitor; impl<'de> serde::de::Visitor<'de> for FileTypeVisitor { type Value = FileType; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("string or table") } fn visit_str(self, value: &str) -> Result where E: serde::de::Error, { Ok(FileType::Extension(value.to_string())) } fn visit_map(self, mut map: M) -> Result where M: serde::de::MapAccess<'de>, { match map.next_entry::()? { Some((key, mut glob)) if key == "glob" => { // If the glob isn't an absolute path or already starts // with a glob pattern, add a leading glob so we // properly match relative paths. if !glob.starts_with('/') && !glob.starts_with("*/") { glob.insert_str(0, "*/"); } globset::Glob::new(glob.as_str()) .map(FileType::Glob) .map_err(|err| { serde::de::Error::custom(format!("invalid `glob` pattern: {}", err)) }) } Some((key, _value)) => Err(serde::de::Error::custom(format!( "unknown key in `file-types` list: {}", key ))), None => Err(serde::de::Error::custom( "expected a `suffix` key in the `file-types` entry", )), } } } deserializer.deserialize_any(FileTypeVisitor) } } fn from_comment_tokens<'de, D>(deserializer: D) -> Result>, D::Error> where D: serde::Deserializer<'de>, { #[derive(Deserialize)] #[serde(untagged)] enum CommentTokens { Multiple(Vec), Single(String), } Ok( Option::::deserialize(deserializer)?.map(|tokens| match tokens { CommentTokens::Single(val) => vec![val], CommentTokens::Multiple(vals) => vals, }), ) } #[derive(Clone, Debug, Serialize, Deserialize)] pub struct BlockCommentToken { pub start: String, pub end: String, } impl Default for BlockCommentToken { fn default() -> Self { BlockCommentToken { start: "/*".to_string(), end: "*/".to_string(), } } } fn from_block_comment_tokens<'de, D>( deserializer: D, ) -> Result>, D::Error> where D: serde::Deserializer<'de>, { #[derive(Deserialize)] #[serde(untagged)] enum BlockCommentTokens { Multiple(Vec), Single(BlockCommentToken), } Ok( Option::::deserialize(deserializer)?.map(|tokens| match tokens { BlockCommentTokens::Single(val) => vec![val], BlockCommentTokens::Multiple(vals) => vals, }), ) } #[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] #[serde(rename_all = "kebab-case")] pub enum LanguageServerFeature { Format, GotoDeclaration, GotoDefinition, GotoTypeDefinition, GotoReference, GotoImplementation, // Goto, use bitflags, combining previous Goto members? SignatureHelp, Hover, DocumentHighlight, Completion, CodeAction, WorkspaceCommand, DocumentSymbols, WorkspaceSymbols, // Symbols, use bitflags, see above? Diagnostics, RenameSymbol, InlayHints, DocumentColors, } impl Display for LanguageServerFeature { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use LanguageServerFeature::*; let feature = match self { Format => "format", GotoDeclaration => "goto-declaration", GotoDefinition => "goto-definition", GotoTypeDefinition => "goto-type-definition", GotoReference => "goto-reference", GotoImplementation => "goto-implementation", SignatureHelp => "signature-help", Hover => "hover", DocumentHighlight => "document-highlight", Completion => "completion", CodeAction => "code-action", WorkspaceCommand => "workspace-command", DocumentSymbols => "document-symbols", WorkspaceSymbols => "workspace-symbols", Diagnostics => "diagnostics", RenameSymbol => "rename-symbol", InlayHints => "inlay-hints", DocumentColors => "document-colors", }; write!(f, "{feature}",) } } #[derive(Debug, Serialize, Deserialize)] #[serde(untagged, rename_all = "kebab-case", deny_unknown_fields)] enum LanguageServerFeatureConfiguration { #[serde(rename_all = "kebab-case")] Features { #[serde(default, skip_serializing_if = "HashSet::is_empty")] only_features: HashSet, #[serde(default, skip_serializing_if = "HashSet::is_empty")] except_features: HashSet, name: String, }, Simple(String), } #[derive(Debug, Default)] pub struct LanguageServerFeatures { pub name: String, pub only: HashSet, pub excluded: HashSet, } impl LanguageServerFeatures { pub fn has_feature(&self, feature: LanguageServerFeature) -> bool { (self.only.is_empty() || self.only.contains(&feature)) && !self.excluded.contains(&feature) } } fn deserialize_lang_features<'de, D>( deserializer: D, ) -> Result, D::Error> where D: serde::Deserializer<'de>, { let raw: Vec = Deserialize::deserialize(deserializer)?; let res = raw .into_iter() .map(|config| match config { LanguageServerFeatureConfiguration::Simple(name) => LanguageServerFeatures { name, ..Default::default() }, LanguageServerFeatureConfiguration::Features { only_features, except_features, name, } => LanguageServerFeatures { name, only: only_features, excluded: except_features, }, }) .collect(); Ok(res) } fn serialize_lang_features( map: &Vec, serializer: S, ) -> Result where S: serde::Serializer, { let mut serializer = serializer.serialize_seq(Some(map.len()))?; for features in map { let features = if features.only.is_empty() && features.excluded.is_empty() { LanguageServerFeatureConfiguration::Simple(features.name.to_owned()) } else { LanguageServerFeatureConfiguration::Features { only_features: features.only.clone(), except_features: features.excluded.clone(), name: features.name.to_owned(), } }; serializer.serialize_element(&features)?; } serializer.end() } fn deserialize_required_root_patterns<'de, D>(deserializer: D) -> Result, D::Error> where D: serde::Deserializer<'de>, { let patterns = Vec::::deserialize(deserializer)?; if patterns.is_empty() { return Ok(None); } let mut builder = globset::GlobSetBuilder::new(); for pattern in patterns { let glob = globset::Glob::new(&pattern).map_err(serde::de::Error::custom)?; builder.add(glob); } builder.build().map(Some).map_err(serde::de::Error::custom) } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] pub struct LanguageServerConfiguration { pub command: String, #[serde(default)] #[serde(skip_serializing_if = "Vec::is_empty")] pub args: Vec, #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub environment: HashMap, #[serde(default, skip_serializing, deserialize_with = "deserialize_lsp_config")] pub config: Option, #[serde(default = "default_timeout")] pub timeout: u64, #[serde( default, skip_serializing, deserialize_with = "deserialize_required_root_patterns" )] pub required_root_patterns: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] pub struct FormatterConfiguration { pub command: String, #[serde(default)] #[serde(skip_serializing_if = "Vec::is_empty")] pub args: Vec, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "kebab-case")] pub struct AdvancedCompletion { pub name: Option, pub completion: Option, pub default: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "kebab-case", untagged)] pub enum DebugConfigCompletion { Named(String), Advanced(AdvancedCompletion), } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum DebugArgumentValue { String(String), Array(Vec), Boolean(bool), } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "kebab-case")] pub struct DebugTemplate { pub name: String, pub request: String, #[serde(default)] pub completion: Vec, pub args: HashMap, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "kebab-case")] pub struct DebugAdapterConfig { pub name: String, pub transport: String, #[serde(default)] pub command: String, #[serde(default)] pub args: Vec, pub port_arg: Option, pub templates: Vec, #[serde(default)] pub quirks: DebuggerQuirks, } // Different workarounds for adapters' differences #[derive(Debug, Default, PartialEq, Eq, Clone, Serialize, Deserialize)] pub struct DebuggerQuirks { #[serde(default)] pub absolute_paths: bool, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] pub struct IndentationConfiguration { #[serde(deserialize_with = "deserialize_tab_width")] pub tab_width: usize, pub unit: String, } /// How the indentation for a newly inserted line should be determined. /// If the selected heuristic is not available (e.g. because the current /// language has no tree-sitter indent queries), a simpler one will be used. #[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] pub enum IndentationHeuristic { /// Just copy the indentation of the line that the cursor is currently on. Simple, /// Use tree-sitter indent queries to compute the expected absolute indentation level of the new line. TreeSitter, /// Use tree-sitter indent queries to compute the expected difference in indentation between the new line /// and the line before. Add this to the actual indentation level of the line before. #[default] Hybrid, } /// Configuration for auto pairs #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "kebab-case", deny_unknown_fields, untagged)] pub enum AutoPairConfig { /// Enables or disables auto pairing. False means disabled. True means to use the default pairs. Enable(bool), /// The mappings of pairs. Pairs(HashMap), } impl Default for AutoPairConfig { fn default() -> Self { AutoPairConfig::Enable(true) } } impl From<&AutoPairConfig> for Option { fn from(auto_pair_config: &AutoPairConfig) -> Self { match auto_pair_config { AutoPairConfig::Enable(false) => None, AutoPairConfig::Enable(true) => Some(AutoPairs::default()), AutoPairConfig::Pairs(pairs) => Some(AutoPairs::new(pairs.iter())), } } } impl From for Option { fn from(auto_pairs_config: AutoPairConfig) -> Self { (&auto_pairs_config).into() } } impl FromStr for AutoPairConfig { type Err = std::str::ParseBoolError; // only do bool parsing for runtime setting fn from_str(s: &str) -> Result { let enable: bool = s.parse()?; Ok(AutoPairConfig::Enable(enable)) } } #[derive(Debug)] pub struct TextObjectQuery { pub query: Query, } #[derive(Debug)] pub enum CapturedNode<'a> { Single(Node<'a>), /// Guaranteed to be not empty Grouped(Vec>), } impl CapturedNode<'_> { pub fn start_byte(&self) -> usize { match self { Self::Single(n) => n.start_byte(), Self::Grouped(ns) => ns[0].start_byte(), } } pub fn end_byte(&self) -> usize { match self { Self::Single(n) => n.end_byte(), Self::Grouped(ns) => ns.last().unwrap().end_byte(), } } pub fn byte_range(&self) -> std::ops::Range { self.start_byte()..self.end_byte() } } /// The maximum number of in-progress matches a TS cursor can consider at once. /// This is set to a constant in order to avoid performance problems for medium to large files. Set with `set_match_limit`. /// Using such a limit means that we lose valid captures, so there is fundamentally a tradeoff here. /// /// /// Old tree sitter versions used a limit of 32 by default until this limit was removed in version `0.19.5` (must now be set manually). /// However, this causes performance issues for medium to large files. /// In helix, this problem caused treesitter motions to take multiple seconds to complete in medium-sized rust files (3k loc). /// /// /// Neovim also encountered this problem and reintroduced this limit after it was removed upstream /// (see and ). /// The number used here is fundamentally a tradeoff between breaking some obscure edge cases and performance. /// /// /// Neovim chose 64 for this value somewhat arbitrarily (). /// 64 is too low for some languages though. In particular, it breaks some highlighting for record fields in Erlang record definitions. /// This number can be increased if new syntax highlight breakages are found, as long as the performance penalty is not too high. const TREE_SITTER_MATCH_LIMIT: u32 = 256; impl TextObjectQuery { /// Run the query on the given node and return sub nodes which match given /// capture ("function.inside", "class.around", etc). /// /// Captures may contain multiple nodes by using quantifiers (+, *, etc), /// and support for this is partial and could use improvement. /// /// ```query /// (comment)+ @capture /// /// ; OR /// ( /// (comment)* /// . /// (function) /// ) @capture /// ``` pub fn capture_nodes<'a>( &'a self, capture_name: &str, node: Node<'a>, slice: RopeSlice<'a>, cursor: &'a mut QueryCursor, ) -> Option>> { self.capture_nodes_any(&[capture_name], node, slice, cursor) } /// Find the first capture that exists out of all given `capture_names` /// and return sub nodes that match this capture. pub fn capture_nodes_any<'a>( &'a self, capture_names: &[&str], node: Node<'a>, slice: RopeSlice<'a>, cursor: &'a mut QueryCursor, ) -> Option>> { let capture_idx = capture_names .iter() .find_map(|cap| self.query.capture_index_for_name(cap))?; cursor.set_match_limit(TREE_SITTER_MATCH_LIMIT); let nodes = cursor .captures(&self.query, node, RopeProvider(slice)) .filter_map(move |(mat, _)| { let nodes: Vec<_> = mat .captures .iter() .filter_map(|cap| (cap.index == capture_idx).then_some(cap.node)) .collect(); if nodes.len() > 1 { Some(CapturedNode::Grouped(nodes)) } else { nodes.into_iter().map(CapturedNode::Single).next() } }); Some(nodes) } } pub fn read_query(language: &str, filename: &str) -> String { static INHERITS_REGEX: Lazy = Lazy::new(|| Regex::new(r";+\s*inherits\s*:?\s*([a-z_,()-]+)\s*").unwrap()); let query = load_runtime_file(language, filename).unwrap_or_default(); // replaces all "; inherits (,)*" with the queries of the given language(s) INHERITS_REGEX .replace_all(&query, |captures: ®ex::Captures| { captures[1] .split(',') .fold(String::new(), |mut output, language| { // `write!` to a String cannot fail. write!(output, "\n{}\n", read_query(language, filename)).unwrap(); output }) }) .to_string() } impl LanguageConfiguration { fn initialize_highlight(&self, scopes: &[String]) -> Option> { let highlights_query = read_query(&self.language_id, "highlights.scm"); // always highlight syntax errors // highlights_query += "\n(ERROR) @error"; let injections_query = read_query(&self.language_id, "injections.scm"); let locals_query = read_query(&self.language_id, "locals.scm"); if highlights_query.is_empty() { None } else { let language = get_language(self.grammar.as_deref().unwrap_or(&self.language_id)) .map_err(|err| { log::error!( "Failed to load tree-sitter parser for language {:?}: {:#}", self.language_id, err ) }) .ok()?; let config = HighlightConfiguration::new( language, &highlights_query, &injections_query, &locals_query, ) .map_err(|err| log::error!("Could not parse queries for language {:?}. Are your grammars out of sync? Try running 'hx --grammar fetch' and 'hx --grammar build'. This query could not be parsed: {:?}", self.language_id, err)) .ok()?; config.configure(scopes); Some(Arc::new(config)) } } pub fn reconfigure(&self, scopes: &[String]) { if let Some(Some(config)) = self.highlight_config.get() { config.configure(scopes); } } pub fn highlight_config(&self, scopes: &[String]) -> Option> { self.highlight_config .get_or_init(|| self.initialize_highlight(scopes)) .clone() } pub fn is_highlight_initialized(&self) -> bool { self.highlight_config.get().is_some() } pub fn indent_query(&self) -> Option<&Query> { self.indent_query .get_or_init(|| self.load_query("indents.scm")) .as_ref() } pub fn textobject_query(&self) -> Option<&TextObjectQuery> { self.textobject_query .get_or_init(|| { self.load_query("textobjects.scm") .map(|query| TextObjectQuery { query }) }) .as_ref() } pub fn scope(&self) -> &str { &self.scope } fn load_query(&self, kind: &str) -> Option { let query_text = read_query(&self.language_id, kind); if query_text.is_empty() { return None; } let lang = &self.highlight_config.get()?.as_ref()?.language; Query::new(lang, &query_text) .map_err(|e| { log::error!( "Failed to parse {} queries for {}: {}", kind, self.language_id, e ) }) .ok() } } #[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] #[serde(default, rename_all = "kebab-case", deny_unknown_fields)] pub struct SoftWrap { /// Soft wrap lines that exceed viewport width. Default to off // NOTE: Option on purpose because the struct is shared between language config and global config. // By default the option is None so that the language config falls back to the global config unless explicitly set. pub enable: Option, /// Maximum space left free at the end of the line. /// This space is used to wrap text at word boundaries. If that is not possible within this limit /// the word is simply split at the end of the line. /// /// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views. /// /// Default to 20 pub max_wrap: Option, /// Maximum number of indentation that can be carried over from the previous line when softwrapping. /// If a line is indented further then this limit it is rendered at the start of the viewport instead. /// /// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views. /// /// Default to 40 pub max_indent_retain: Option, /// Indicator placed at the beginning of softwrapped lines /// /// Defaults to ↪ pub wrap_indicator: Option, /// Softwrap at `text_width` instead of viewport width if it is shorter pub wrap_at_text_width: Option, } #[derive(Debug)] struct FileTypeGlob { glob: globset::Glob, language_id: usize, } impl FileTypeGlob { fn new(glob: globset::Glob, language_id: usize) -> Self { Self { glob, language_id } } } #[derive(Debug)] struct FileTypeGlobMatcher { matcher: globset::GlobSet, file_types: Vec, } impl FileTypeGlobMatcher { fn new(file_types: Vec) -> Result { let mut builder = globset::GlobSetBuilder::new(); for file_type in &file_types { builder.add(file_type.glob.clone()); } Ok(Self { matcher: builder.build()?, file_types, }) } fn language_id_for_path(&self, path: &Path) -> Option<&usize> { self.matcher .matches(path) .iter() .filter_map(|idx| self.file_types.get(*idx)) .max_by_key(|file_type| file_type.glob.glob().len()) .map(|file_type| &file_type.language_id) } } // Expose loader as Lazy<> global since it's always static? #[derive(Debug)] pub struct Loader { // highlight_names ? language_configs: Vec>, language_config_ids_by_extension: HashMap, // Vec language_config_ids_glob_matcher: FileTypeGlobMatcher, language_config_ids_by_shebang: HashMap, language_server_configs: HashMap, scopes: ArcSwap>, } pub type LoaderError = globset::Error; impl Loader { pub fn new(config: Configuration) -> Result { let mut language_configs = Vec::new(); let mut language_config_ids_by_extension = HashMap::new(); let mut language_config_ids_by_shebang = HashMap::new(); let mut file_type_globs = Vec::new(); for config in config.language { // get the next id let language_id = language_configs.len(); for file_type in &config.file_types { // entry().or_insert(Vec::new).push(language_id); match file_type { FileType::Extension(extension) => { language_config_ids_by_extension.insert(extension.clone(), language_id); } FileType::Glob(glob) => { file_type_globs.push(FileTypeGlob::new(glob.to_owned(), language_id)); } }; } for shebang in &config.shebangs { language_config_ids_by_shebang.insert(shebang.clone(), language_id); } language_configs.push(Arc::new(config)); } Ok(Self { language_configs, language_config_ids_by_extension, language_config_ids_glob_matcher: FileTypeGlobMatcher::new(file_type_globs)?, language_config_ids_by_shebang, language_server_configs: config.language_server, scopes: ArcSwap::from_pointee(Vec::new()), }) } pub fn language_config_for_file_name(&self, path: &Path) -> Option> { // Find all the language configurations that match this file name // or a suffix of the file name. let configuration_id = self .language_config_ids_glob_matcher .language_id_for_path(path) .or_else(|| { path.extension() .and_then(|extension| extension.to_str()) .and_then(|extension| self.language_config_ids_by_extension.get(extension)) }); configuration_id.and_then(|&id| self.language_configs.get(id).cloned()) // TODO: content_regex handling conflict resolution } pub fn language_config_for_shebang( &self, source: RopeSlice, ) -> Option> { let line = Cow::from(source.line(0)); static SHEBANG_REGEX: Lazy = Lazy::new(|| Regex::new(&["^", SHEBANG].concat()).unwrap()); let configuration_id = SHEBANG_REGEX .captures(&line) .and_then(|cap| self.language_config_ids_by_shebang.get(&cap[1])); configuration_id.and_then(|&id| self.language_configs.get(id).cloned()) } pub fn language_config_for_scope(&self, scope: &str) -> Option> { self.language_configs .iter() .find(|config| config.scope == scope) .cloned() } pub fn language_config_for_language_id( &self, id: impl PartialEq, ) -> Option> { self.language_configs .iter() .find(|config| id.eq(&config.language_id)) .cloned() } /// Unlike `language_config_for_language_id`, which only returns Some for an exact id, this /// function will perform a regex match on the given string to find the closest language match. pub fn language_config_for_name(&self, slice: RopeSlice) -> Option> { // PERF: If the name matches up with the id, then this saves the need to do expensive regex. let shortcircuit = self.language_config_for_language_id(slice); if shortcircuit.is_some() { return shortcircuit; } // If the name did not match up with a known id, then match on injection regex. let mut best_match_length = 0; let mut best_match_position = None; for (i, configuration) in self.language_configs.iter().enumerate() { if let Some(injection_regex) = &configuration.injection_regex { if let Some(mat) = injection_regex.find(slice.regex_input()) { let length = mat.end() - mat.start(); if length > best_match_length { best_match_position = Some(i); best_match_length = length; } } } } best_match_position.map(|i| self.language_configs[i].clone()) } pub fn language_configuration_for_injection_string( &self, capture: &InjectionLanguageMarker, ) -> Option> { match capture { InjectionLanguageMarker::LanguageId(id) => self.language_config_for_language_id(*id), InjectionLanguageMarker::Name(name) => self.language_config_for_name(*name), InjectionLanguageMarker::Filename(file) => { let path_str: Cow = (*file).into(); self.language_config_for_file_name(Path::new(path_str.as_ref())) } InjectionLanguageMarker::Shebang(shebang) => { let shebang_str: Cow = (*shebang).into(); self.language_config_ids_by_shebang .get(shebang_str.as_ref()) .and_then(|&id| self.language_configs.get(id).cloned()) } } } pub fn language_configs(&self) -> impl Iterator> { self.language_configs.iter() } pub fn language_server_configs(&self) -> &HashMap { &self.language_server_configs } pub fn set_scopes(&self, scopes: Vec) { self.scopes.store(Arc::new(scopes)); // Reconfigure existing grammars for config in self .language_configs .iter() .filter(|cfg| cfg.is_highlight_initialized()) { config.reconfigure(&self.scopes()); } } pub fn scopes(&self) -> Guard>> { self.scopes.load() } } pub struct TsParser { parser: tree_sitter::Parser, pub cursors: Vec, } // could also just use a pool, or a single instance? thread_local! { pub static PARSER: RefCell = RefCell::new(TsParser { parser: Parser::new(), cursors: Vec::new(), }) } #[derive(Debug)] pub struct Syntax { layers: HopSlotMap, root: LayerId, loader: Arc>, } fn byte_range_to_str(range: std::ops::Range, source: RopeSlice) -> Cow { Cow::from(source.byte_slice(range)) } impl Syntax { pub fn new( source: RopeSlice, config: Arc, loader: Arc>, ) -> Option { let root_layer = LanguageLayer { tree: None, config, depth: 0, flags: LayerUpdateFlags::empty(), ranges: vec![Range { start_byte: 0, end_byte: usize::MAX, start_point: Point::new(0, 0), end_point: Point::new(usize::MAX, usize::MAX), }], parent: None, }; // track scope_descriptor: a Vec of scopes for item in tree let mut layers = HopSlotMap::default(); let root = layers.insert(root_layer); let mut syntax = Self { root, layers, loader, }; let res = syntax.update(source, source, &ChangeSet::new(source)); if res.is_err() { log::error!("TS parser failed, disabling TS for the current buffer: {res:?}"); return None; } Some(syntax) } pub fn update( &mut self, old_source: RopeSlice, source: RopeSlice, changeset: &ChangeSet, ) -> Result<(), Error> { let mut queue = VecDeque::new(); queue.push_back(self.root); let loader = self.loader.load(); let scopes = loader.scopes.load(); let injection_callback = |language: &InjectionLanguageMarker| { loader .language_configuration_for_injection_string(language) .and_then(|language_config| language_config.highlight_config(&scopes)) }; // Convert the changeset into tree sitter edits. let edits = generate_edits(old_source, changeset); // This table allows inverse indexing of `layers`. // That is by hashing a `Layer` you can find // the `LayerId` of an existing equivalent `Layer` in `layers`. // // It is used to determine if a new layer exists for an injection // or if an existing layer needs to be updated. let mut layers_table = RawTable::with_capacity(self.layers.len()); let layers_hasher = RandomState::new(); // Use the edits to update all layers markers fn point_add(a: Point, b: Point) -> Point { if b.row > 0 { Point::new(a.row.saturating_add(b.row), b.column) } else { Point::new(0, a.column.saturating_add(b.column)) } } fn point_sub(a: Point, b: Point) -> Point { if a.row > b.row { Point::new(a.row.saturating_sub(b.row), a.column) } else { Point::new(0, a.column.saturating_sub(b.column)) } } for (layer_id, layer) in self.layers.iter_mut() { // The root layer always covers the whole range (0..usize::MAX) if layer.depth == 0 { layer.flags = LayerUpdateFlags::MODIFIED; continue; } if !edits.is_empty() { for range in &mut layer.ranges { // Roughly based on https://github.com/tree-sitter/tree-sitter/blob/ddeaa0c7f534268b35b4f6cb39b52df082754413/lib/src/subtree.c#L691-L720 for edit in edits.iter().rev() { let is_pure_insertion = edit.old_end_byte == edit.start_byte; // if edit is after range, skip if edit.start_byte > range.end_byte { // TODO: || (is_noop && edit.start_byte == range.end_byte) continue; } // if edit is before range, shift entire range by len if edit.old_end_byte < range.start_byte { range.start_byte = edit.new_end_byte + (range.start_byte - edit.old_end_byte); range.start_point = point_add( edit.new_end_position, point_sub(range.start_point, edit.old_end_position), ); range.end_byte = edit .new_end_byte .saturating_add(range.end_byte - edit.old_end_byte); range.end_point = point_add( edit.new_end_position, point_sub(range.end_point, edit.old_end_position), ); layer.flags |= LayerUpdateFlags::MOVED; } // if the edit starts in the space before and extends into the range else if edit.start_byte < range.start_byte { range.start_byte = edit.new_end_byte; range.start_point = edit.new_end_position; range.end_byte = range .end_byte .saturating_sub(edit.old_end_byte) .saturating_add(edit.new_end_byte); range.end_point = point_add( edit.new_end_position, point_sub(range.end_point, edit.old_end_position), ); layer.flags = LayerUpdateFlags::MODIFIED; } // If the edit is an insertion at the start of the tree, shift else if edit.start_byte == range.start_byte && is_pure_insertion { range.start_byte = edit.new_end_byte; range.start_point = edit.new_end_position; layer.flags |= LayerUpdateFlags::MOVED; } else { range.end_byte = range .end_byte .saturating_sub(edit.old_end_byte) .saturating_add(edit.new_end_byte); range.end_point = point_add( edit.new_end_position, point_sub(range.end_point, edit.old_end_position), ); layer.flags = LayerUpdateFlags::MODIFIED; } } } } let hash = layers_hasher.hash_one(layer); // Safety: insert_no_grow is unsafe because it assumes that the table // has enough capacity to hold additional elements. // This is always the case as we reserved enough capacity above. unsafe { layers_table.insert_no_grow(hash, layer_id) }; } PARSER.with(|ts_parser| { let ts_parser = &mut ts_parser.borrow_mut(); ts_parser.parser.set_timeout_micros(1000 * 500); // half a second is pretty generours let mut cursor = ts_parser.cursors.pop().unwrap_or_default(); // TODO: might need to set cursor range cursor.set_byte_range(0..usize::MAX); cursor.set_match_limit(TREE_SITTER_MATCH_LIMIT); let source_slice = source.slice(..); while let Some(layer_id) = queue.pop_front() { let layer = &mut self.layers[layer_id]; // Mark the layer as touched layer.flags |= LayerUpdateFlags::TOUCHED; // If a tree already exists, notify it of changes. if let Some(tree) = &mut layer.tree { if layer .flags .intersects(LayerUpdateFlags::MODIFIED | LayerUpdateFlags::MOVED) { for edit in edits.iter().rev() { // Apply the edits in reverse. // If we applied them in order then edit 1 would disrupt the positioning of edit 2. tree.edit(edit); } } if layer.flags.contains(LayerUpdateFlags::MODIFIED) { // Re-parse the tree. layer.parse(&mut ts_parser.parser, source)?; } } else { // always parse if this layer has never been parsed before layer.parse(&mut ts_parser.parser, source)?; } // Switch to an immutable borrow. let layer = &self.layers[layer_id]; // Process injections. let matches = cursor.matches( &layer.config.injections_query, layer.tree().root_node(), RopeProvider(source_slice), ); let mut combined_injections = vec![ (None, Vec::new(), IncludedChildren::default()); layer.config.combined_injections_patterns.len() ]; let mut injections = Vec::new(); let mut last_injection_end = 0; for mat in matches { let (injection_capture, content_node, included_children) = layer .config .injection_for_match(&layer.config.injections_query, &mat, source_slice); // in case this is a combined injection save it for more processing later if let Some(combined_injection_idx) = layer .config .combined_injections_patterns .iter() .position(|&pattern| pattern == mat.pattern_index) { let entry = &mut combined_injections[combined_injection_idx]; if injection_capture.is_some() { entry.0 = injection_capture; } if let Some(content_node) = content_node { if content_node.start_byte() >= last_injection_end { entry.1.push(content_node); last_injection_end = content_node.end_byte(); } } entry.2 = included_children; continue; } // Explicitly remove this match so that none of its other captures will remain // in the stream of captures. mat.remove(); // If a language is found with the given name, then add a new language layer // to the highlighted document. if let (Some(injection_capture), Some(content_node)) = (injection_capture, content_node) { if let Some(config) = (injection_callback)(&injection_capture) { let ranges = intersect_ranges(&layer.ranges, &[content_node], included_children); if !ranges.is_empty() { if content_node.start_byte() < last_injection_end { continue; } last_injection_end = content_node.end_byte(); injections.push((config, ranges)); } } } } for (lang_name, content_nodes, included_children) in combined_injections { if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty()) { if let Some(config) = (injection_callback)(&lang_name) { let ranges = intersect_ranges(&layer.ranges, &content_nodes, included_children); if !ranges.is_empty() { injections.push((config, ranges)); } } } } let depth = layer.depth + 1; // TODO: can't inline this since matches borrows self.layers for (config, ranges) in injections { let parent = Some(layer_id); let new_layer = LanguageLayer { tree: None, config, depth, ranges, flags: LayerUpdateFlags::empty(), parent: None, }; // Find an identical existing layer let layer = layers_table .get(layers_hasher.hash_one(&new_layer), |&it| { self.layers[it] == new_layer }) .copied(); // ...or insert a new one. let layer_id = layer.unwrap_or_else(|| self.layers.insert(new_layer)); self.layers[layer_id].parent = parent; queue.push_back(layer_id); } // TODO: pre-process local scopes at this time, rather than highlight? // would solve problems with locals not working across boundaries } // Return the cursor back in the pool. ts_parser.cursors.push(cursor); // Reset all `LayerUpdateFlags` and remove all untouched layers self.layers.retain(|_, layer| { replace(&mut layer.flags, LayerUpdateFlags::empty()) .contains(LayerUpdateFlags::TOUCHED) }); Ok(()) }) } pub fn tree(&self) -> &Tree { self.layers[self.root].tree() } /// Iterate over the highlighted regions for a given slice of source code. pub fn highlight_iter<'a>( &'a self, source: RopeSlice<'a>, range: Option>, cancellation_flag: Option<&'a AtomicUsize>, ) -> impl Iterator> + 'a { let mut layers = self .layers .iter() .filter_map(|(_, layer)| { // TODO: if range doesn't overlap layer range, skip it // Reuse a cursor from the pool if available. let mut cursor = PARSER.with(|ts_parser| { let highlighter = &mut ts_parser.borrow_mut(); highlighter.cursors.pop().unwrap_or_default() }); // The `captures` iterator borrows the `Tree` and the `QueryCursor`, which // prevents them from being moved. But both of these values are really just // pointers, so it's actually ok to move them. let cursor_ref = unsafe { mem::transmute::<&mut tree_sitter::QueryCursor, &mut tree_sitter::QueryCursor>( &mut cursor, ) }; // if reusing cursors & no range this resets to whole range cursor_ref.set_byte_range(range.clone().unwrap_or(0..usize::MAX)); cursor_ref.set_match_limit(TREE_SITTER_MATCH_LIMIT); let mut captures = cursor_ref .captures( &layer.config.query, layer.tree().root_node(), RopeProvider(source), ) .peekable(); // If there's no captures, skip the layer captures.peek()?; Some(HighlightIterLayer { highlight_end_stack: Vec::new(), scope_stack: vec![LocalScope { inherits: false, range: 0..usize::MAX, local_defs: Vec::new(), }], cursor, _tree: None, captures: RefCell::new(captures), config: layer.config.as_ref(), // TODO: just reuse `layer` depth: layer.depth, // TODO: just reuse `layer` }) }) .collect::>(); layers.sort_unstable_by_key(|layer| layer.sort_key()); let mut result = HighlightIter { source, byte_offset: range.map_or(0, |r| r.start), cancellation_flag, iter_count: 0, layers, next_event: None, last_highlight_range: None, }; result.sort_layers(); result } pub fn tree_for_byte_range(&self, start: usize, end: usize) -> &Tree { let mut container_id = self.root; for (layer_id, layer) in self.layers.iter() { if layer.depth > self.layers[container_id].depth && layer.contains_byte_range(start, end) { container_id = layer_id; } } self.layers[container_id].tree() } pub fn named_descendant_for_byte_range(&self, start: usize, end: usize) -> Option> { self.tree_for_byte_range(start, end) .root_node() .named_descendant_for_byte_range(start, end) } pub fn descendant_for_byte_range(&self, start: usize, end: usize) -> Option> { self.tree_for_byte_range(start, end) .root_node() .descendant_for_byte_range(start, end) } pub fn walk(&self) -> TreeCursor<'_> { // data structure to find the smallest range that contains a point // when some of the ranges in the structure can overlap. TreeCursor::new(&self.layers, self.root) } // Commenting // comment_strings_for_pos // is_commented // Indentation // suggested_indent_for_line_at_buffer_row // suggested_indent_for_buffer_row // indent_level_for_line // TODO: Folding } bitflags! { /// Flags that track the status of a layer /// in the `Sytaxn::update` function #[derive(Debug)] struct LayerUpdateFlags : u32{ const MODIFIED = 0b001; const MOVED = 0b010; const TOUCHED = 0b100; } } #[derive(Debug)] pub struct LanguageLayer { // mode // grammar pub config: Arc, pub(crate) tree: Option, pub ranges: Vec, pub depth: u32, flags: LayerUpdateFlags, parent: Option, } /// This PartialEq implementation only checks if that /// two layers are theoretically identical (meaning they highlight the same text range with the same language). /// It does not check whether the layers have the same internal treesitter /// state. impl PartialEq for LanguageLayer { fn eq(&self, other: &Self) -> bool { self.depth == other.depth && self.config.language == other.config.language && self.ranges == other.ranges } } /// Hash implementation belongs to PartialEq implementation above. /// See its documentation for details. impl Hash for LanguageLayer { fn hash(&self, state: &mut H) { self.depth.hash(state); self.config.language.hash(state); self.ranges.hash(state); } } impl LanguageLayer { pub fn tree(&self) -> &Tree { // TODO: no unwrap self.tree.as_ref().unwrap() } fn parse(&mut self, parser: &mut Parser, source: RopeSlice) -> Result<(), Error> { parser .set_included_ranges(&self.ranges) .map_err(|_| Error::InvalidRanges)?; parser .set_language(&self.config.language) .map_err(|_| Error::InvalidLanguage)?; // unsafe { syntax.parser.set_cancellation_flag(cancellation_flag) }; let tree = parser .parse_with( &mut |byte, _| { if byte <= source.len_bytes() { let (chunk, start_byte, _, _) = source.chunk_at_byte(byte); &chunk.as_bytes()[byte - start_byte..] } else { // out of range &[] } }, self.tree.as_ref(), ) .ok_or(Error::Cancelled)?; // unsafe { ts_parser.parser.set_cancellation_flag(None) }; self.tree = Some(tree); Ok(()) } /// Whether the layer contains the given byte range. /// /// If the layer has multiple ranges (i.e. combined injections), the /// given range is considered contained if it is within the start and /// end bytes of the first and last ranges **and** if the given range /// starts or ends within any of the layer's ranges. fn contains_byte_range(&self, start: usize, end: usize) -> bool { let layer_start = self .ranges .first() .expect("ranges should not be empty") .start_byte; let layer_end = self .ranges .last() .expect("ranges should not be empty") .end_byte; layer_start <= start && layer_end >= end && self.ranges.iter().any(|range| { let byte_range = range.start_byte..range.end_byte; byte_range.contains(&start) || byte_range.contains(&end) }) } } pub(crate) fn generate_edits( old_text: RopeSlice, changeset: &ChangeSet, ) -> Vec { use Operation::*; let mut old_pos = 0; let mut edits = Vec::new(); if changeset.changes.is_empty() { return edits; } let mut iter = changeset.changes.iter().peekable(); // TODO; this is a lot easier with Change instead of Operation. fn point_at_pos(text: RopeSlice, pos: usize) -> (usize, Point) { let byte = text.char_to_byte(pos); // <- attempted to index past end let line = text.char_to_line(pos); let line_start_byte = text.line_to_byte(line); let col = byte - line_start_byte; (byte, Point::new(line, col)) } fn traverse(point: Point, text: &Tendril) -> Point { let Point { mut row, mut column, } = point; // TODO: there should be a better way here. let mut chars = text.chars().peekable(); while let Some(ch) = chars.next() { if char_is_line_ending(ch) && !(ch == '\r' && chars.peek() == Some(&'\n')) { row += 1; column = 0; } else { column += 1; } } Point { row, column } } while let Some(change) = iter.next() { let len = match change { Delete(i) | Retain(i) => *i, Insert(_) => 0, }; let mut old_end = old_pos + len; match change { Retain(_) => {} Delete(_) => { let (start_byte, start_position) = point_at_pos(old_text, old_pos); let (old_end_byte, old_end_position) = point_at_pos(old_text, old_end); // deletion edits.push(tree_sitter::InputEdit { start_byte, // old_pos to byte old_end_byte, // old_end to byte new_end_byte: start_byte, // old_pos to byte start_position, // old pos to coords old_end_position, // old_end to coords new_end_position: start_position, // old pos to coords }); } Insert(s) => { let (start_byte, start_position) = point_at_pos(old_text, old_pos); // a subsequent delete means a replace, consume it if let Some(Delete(len)) = iter.peek() { old_end = old_pos + len; let (old_end_byte, old_end_position) = point_at_pos(old_text, old_end); iter.next(); // replacement edits.push(tree_sitter::InputEdit { start_byte, // old_pos to byte old_end_byte, // old_end to byte new_end_byte: start_byte + s.len(), // old_pos to byte + s.len() start_position, // old pos to coords old_end_position, // old_end to coords new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over) }); } else { // insert edits.push(tree_sitter::InputEdit { start_byte, // old_pos to byte old_end_byte: start_byte, // same new_end_byte: start_byte + s.len(), // old_pos + s.len() start_position, // old pos to coords old_end_position: start_position, // same new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over) }); } } } old_pos = old_end; } edits } use std::sync::atomic::{AtomicUsize, Ordering}; use std::{iter, mem, ops, str}; use tree_sitter::{ Language as Grammar, Node, Parser, Point, Query, QueryCaptures, QueryCursor, QueryError, QueryMatch, Range, TextProvider, Tree, }; const CANCELLATION_CHECK_INTERVAL: usize = 100; /// Indicates which highlight should be applied to a region of source code. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Highlight(pub usize); /// Represents the reason why syntax highlighting failed. #[derive(Debug, PartialEq, Eq)] pub enum Error { Cancelled, InvalidLanguage, InvalidRanges, Unknown, } /// Represents a single step in rendering a syntax-highlighted document. #[derive(Copy, Clone, Debug)] pub enum HighlightEvent { Source { start: usize, end: usize }, HighlightStart(Highlight), HighlightEnd, } /// Contains the data needed to highlight code written in a particular language. /// /// This struct is immutable and can be shared between threads. #[derive(Debug)] pub struct HighlightConfiguration { pub language: Grammar, pub query: Query, injections_query: Query, combined_injections_patterns: Vec, highlights_pattern_index: usize, highlight_indices: ArcSwap>>, non_local_variable_patterns: Vec, injection_content_capture_index: Option, injection_language_capture_index: Option, injection_filename_capture_index: Option, injection_shebang_capture_index: Option, local_scope_capture_index: Option, local_def_capture_index: Option, local_def_value_capture_index: Option, local_ref_capture_index: Option, } #[derive(Debug)] struct LocalDef<'a> { name: Cow<'a, str>, value_range: ops::Range, highlight: Option, } #[derive(Debug)] struct LocalScope<'a> { inherits: bool, range: ops::Range, local_defs: Vec>, } #[derive(Debug)] struct HighlightIter<'a> { source: RopeSlice<'a>, byte_offset: usize, cancellation_flag: Option<&'a AtomicUsize>, layers: Vec>, iter_count: usize, next_event: Option, last_highlight_range: Option<(usize, usize, u32)>, } // Adapter to convert rope chunks to bytes pub struct ChunksBytes<'a> { chunks: ropey::iter::Chunks<'a>, } impl<'a> Iterator for ChunksBytes<'a> { type Item = &'a [u8]; fn next(&mut self) -> Option { self.chunks.next().map(str::as_bytes) } } pub struct RopeProvider<'a>(pub RopeSlice<'a>); impl<'a> TextProvider<&'a [u8]> for RopeProvider<'a> { type I = ChunksBytes<'a>; fn text(&mut self, node: Node) -> Self::I { let fragment = self.0.byte_slice(node.start_byte()..node.end_byte()); ChunksBytes { chunks: fragment.chunks(), } } } struct HighlightIterLayer<'a> { _tree: Option, cursor: QueryCursor, captures: RefCell, &'a [u8]>>>, config: &'a HighlightConfiguration, highlight_end_stack: Vec, scope_stack: Vec>, depth: u32, } impl fmt::Debug for HighlightIterLayer<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("HighlightIterLayer").finish() } } impl HighlightConfiguration { /// Creates a `HighlightConfiguration` for a given `Grammar` and set of highlighting /// queries. /// /// # Parameters /// /// * `language` - The Tree-sitter `Grammar` that should be used for parsing. /// * `highlights_query` - A string containing tree patterns for syntax highlighting. This /// should be non-empty, otherwise no syntax highlights will be added. /// * `injections_query` - A string containing tree patterns for injecting other languages /// into the document. This can be empty if no injections are desired. /// * `locals_query` - A string containing tree patterns for tracking local variable /// definitions and references. This can be empty if local variable tracking is not needed. /// /// Returns a `HighlightConfiguration` that can then be used with the `highlight` method. pub fn new( language: Grammar, highlights_query: &str, injection_query: &str, locals_query: &str, ) -> Result { // Concatenate the query strings, keeping track of the start offset of each section. let mut query_source = String::new(); query_source.push_str(locals_query); let highlights_query_offset = query_source.len(); query_source.push_str(highlights_query); // Construct a single query by concatenating the three query strings, but record the // range of pattern indices that belong to each individual string. let query = Query::new(&language, &query_source)?; let mut highlights_pattern_index = 0; for i in 0..(query.pattern_count()) { let pattern_offset = query.start_byte_for_pattern(i); if pattern_offset < highlights_query_offset { highlights_pattern_index += 1; } } let injections_query = Query::new(&language, injection_query)?; let combined_injections_patterns = (0..injections_query.pattern_count()) .filter(|&i| { injections_query .property_settings(i) .iter() .any(|s| &*s.key == "injection.combined") }) .collect(); // Find all of the highlighting patterns that are disabled for nodes that // have been identified as local variables. let non_local_variable_patterns = (0..query.pattern_count()) .map(|i| { query .property_predicates(i) .iter() .any(|(prop, positive)| !*positive && prop.key.as_ref() == "local") }) .collect(); // Store the numeric ids for all of the special captures. let mut injection_content_capture_index = None; let mut injection_language_capture_index = None; let mut injection_filename_capture_index = None; let mut injection_shebang_capture_index = None; let mut local_def_capture_index = None; let mut local_def_value_capture_index = None; let mut local_ref_capture_index = None; let mut local_scope_capture_index = None; for (i, name) in query.capture_names().iter().enumerate() { let i = Some(i as u32); match *name { "local.definition" => local_def_capture_index = i, "local.definition-value" => local_def_value_capture_index = i, "local.reference" => local_ref_capture_index = i, "local.scope" => local_scope_capture_index = i, _ => {} } } for (i, name) in injections_query.capture_names().iter().enumerate() { let i = Some(i as u32); match *name { "injection.content" => injection_content_capture_index = i, "injection.language" => injection_language_capture_index = i, "injection.filename" => injection_filename_capture_index = i, "injection.shebang" => injection_shebang_capture_index = i, _ => {} } } let highlight_indices = ArcSwap::from_pointee(vec![None; query.capture_names().len()]); Ok(Self { language, query, injections_query, combined_injections_patterns, highlights_pattern_index, highlight_indices, non_local_variable_patterns, injection_content_capture_index, injection_language_capture_index, injection_filename_capture_index, injection_shebang_capture_index, local_scope_capture_index, local_def_capture_index, local_def_value_capture_index, local_ref_capture_index, }) } /// Get a slice containing all of the highlight names used in the configuration. pub fn names(&self) -> &[&str] { self.query.capture_names() } /// Set the list of recognized highlight names. /// /// Tree-sitter syntax-highlighting queries specify highlights in the form of dot-separated /// highlight names like `punctuation.bracket` and `function.method.builtin`. Consumers of /// these queries can choose to recognize highlights with different levels of specificity. /// For example, the string `function.builtin` will match against `function.builtin.constructor` /// but will not match `function.method.builtin` and `function.method`. /// /// When highlighting, results are returned as `Highlight` values, which contain the index /// of the matched highlight this list of highlight names. pub fn configure(&self, recognized_names: &[String]) { let mut capture_parts = Vec::new(); let indices: Vec<_> = self .query .capture_names() .iter() .map(move |capture_name| { capture_parts.clear(); capture_parts.extend(capture_name.split('.')); let mut best_index = None; let mut best_match_len = 0; for (i, recognized_name) in recognized_names.iter().enumerate() { let mut len = 0; let mut matches = true; for (i, part) in recognized_name.split('.').enumerate() { match capture_parts.get(i) { Some(capture_part) if *capture_part == part => len += 1, _ => { matches = false; break; } } } if matches && len > best_match_len { best_index = Some(i); best_match_len = len; } } best_index.map(Highlight) }) .collect(); self.highlight_indices.store(Arc::new(indices)); } fn injection_pair<'a>( &self, query_match: &QueryMatch<'a, 'a>, source: RopeSlice<'a>, ) -> (Option>, Option>) { let mut injection_capture = None; let mut content_node = None; for capture in query_match.captures { let index = Some(capture.index); if index == self.injection_language_capture_index { injection_capture = Some(InjectionLanguageMarker::Name( source.byte_slice(capture.node.byte_range()), )); } else if index == self.injection_filename_capture_index { injection_capture = Some(InjectionLanguageMarker::Filename( source.byte_slice(capture.node.byte_range()), )); } else if index == self.injection_shebang_capture_index { let node_slice = source.byte_slice(capture.node.byte_range()); // some languages allow space and newlines before the actual string content // so a shebang could be on either the first or second line let lines = if let Ok(end) = node_slice.try_line_to_byte(2) { node_slice.byte_slice(..end) } else { node_slice }; static SHEBANG_REGEX: Lazy = Lazy::new(|| rope::Regex::new(SHEBANG).unwrap()); injection_capture = SHEBANG_REGEX .captures_iter(lines.regex_input()) .map(|cap| { let cap = lines.byte_slice(cap.get_group(1).unwrap().range()); InjectionLanguageMarker::Shebang(cap) }) .next() } else if index == self.injection_content_capture_index { content_node = Some(capture.node); } } (injection_capture, content_node) } fn injection_for_match<'a>( &self, query: &'a Query, query_match: &QueryMatch<'a, 'a>, source: RopeSlice<'a>, ) -> ( Option>, Option>, IncludedChildren, ) { let (mut injection_capture, content_node) = self.injection_pair(query_match, source); let mut included_children = IncludedChildren::default(); for prop in query.property_settings(query_match.pattern_index) { match prop.key.as_ref() { // In addition to specifying the language name via the text of a // captured node, it can also be hard-coded via a `#set!` predicate // that sets the injection.language key. "injection.language" if injection_capture.is_none() => { injection_capture = prop .value .as_deref() .map(InjectionLanguageMarker::LanguageId); } // By default, injections do not include the *children* of an // `injection.content` node - only the ranges that belong to the // node itself. This can be changed using a `#set!` predicate that // sets the `injection.include-children` key. "injection.include-children" => included_children = IncludedChildren::All, // Some queries might only exclude named children but include unnamed // children in their `injection.content` node. This can be enabled using // a `#set!` predicate that sets the `injection.include-unnamed-children` key. "injection.include-unnamed-children" => { included_children = IncludedChildren::Unnamed } _ => {} } } (injection_capture, content_node, included_children) } } impl HighlightIterLayer<'_> { // First, sort scope boundaries by their byte offset in the document. At a // given position, emit scope endings before scope beginnings. Finally, emit // scope boundaries from deeper layers first. fn sort_key(&self) -> Option<(usize, bool, isize)> { let depth = -(self.depth as isize); let next_start = self .captures .borrow_mut() .peek() .map(|(m, i)| m.captures[*i].node.start_byte()); let next_end = self.highlight_end_stack.last().cloned(); match (next_start, next_end) { (Some(start), Some(end)) => { if start < end { Some((start, true, depth)) } else { Some((end, false, depth)) } } (Some(i), None) => Some((i, true, depth)), (None, Some(j)) => Some((j, false, depth)), _ => None, } } } #[derive(Clone)] enum IncludedChildren { None, All, Unnamed, } impl Default for IncludedChildren { fn default() -> Self { Self::None } } // Compute the ranges that should be included when parsing an injection. // This takes into account three things: // * `parent_ranges` - The ranges must all fall within the *current* layer's ranges. // * `nodes` - Every injection takes place within a set of nodes. The injection ranges // are the ranges of those nodes. // * `includes_children` - For some injections, the content nodes' children should be // excluded from the nested document, so that only the content nodes' *own* content // is reparsed. For other injections, the content nodes' entire ranges should be // reparsed, including the ranges of their children. fn intersect_ranges( parent_ranges: &[Range], nodes: &[Node], included_children: IncludedChildren, ) -> Vec { let mut cursor = nodes[0].walk(); let mut result = Vec::new(); let mut parent_range_iter = parent_ranges.iter(); let mut parent_range = parent_range_iter .next() .expect("Layers should only be constructed with non-empty ranges vectors"); for node in nodes.iter() { let mut preceding_range = Range { start_byte: 0, start_point: Point::new(0, 0), end_byte: node.start_byte(), end_point: node.start_position(), }; let following_range = Range { start_byte: node.end_byte(), start_point: node.end_position(), end_byte: usize::MAX, end_point: Point::new(usize::MAX, usize::MAX), }; for excluded_range in node .children(&mut cursor) .filter_map(|child| match included_children { IncludedChildren::None => Some(child.range()), IncludedChildren::All => None, IncludedChildren::Unnamed => { if child.is_named() { Some(child.range()) } else { None } } }) .chain([following_range].iter().cloned()) { let mut range = Range { start_byte: preceding_range.end_byte, start_point: preceding_range.end_point, end_byte: excluded_range.start_byte, end_point: excluded_range.start_point, }; preceding_range = excluded_range; if range.end_byte < parent_range.start_byte { continue; } while parent_range.start_byte <= range.end_byte { if parent_range.end_byte > range.start_byte { if range.start_byte < parent_range.start_byte { range.start_byte = parent_range.start_byte; range.start_point = parent_range.start_point; } if parent_range.end_byte < range.end_byte { if range.start_byte < parent_range.end_byte { result.push(Range { start_byte: range.start_byte, start_point: range.start_point, end_byte: parent_range.end_byte, end_point: parent_range.end_point, }); } range.start_byte = parent_range.end_byte; range.start_point = parent_range.end_point; } else { if range.start_byte < range.end_byte { result.push(range); } break; } } if let Some(next_range) = parent_range_iter.next() { parent_range = next_range; } else { return result; } } } } result } impl HighlightIter<'_> { fn emit_event( &mut self, offset: usize, event: Option, ) -> Option> { let result; if self.byte_offset < offset { result = Some(Ok(HighlightEvent::Source { start: self.byte_offset, end: offset, })); self.byte_offset = offset; self.next_event = event; } else { result = event.map(Ok); } self.sort_layers(); result } fn sort_layers(&mut self) { while !self.layers.is_empty() { if let Some(sort_key) = self.layers[0].sort_key() { let mut i = 0; while i + 1 < self.layers.len() { if let Some(next_offset) = self.layers[i + 1].sort_key() { if next_offset < sort_key { i += 1; continue; } } else { let layer = self.layers.remove(i + 1); PARSER.with(|ts_parser| { let highlighter = &mut ts_parser.borrow_mut(); highlighter.cursors.push(layer.cursor); }); } break; } if i > 0 { self.layers[0..(i + 1)].rotate_left(1); } break; } else { let layer = self.layers.remove(0); PARSER.with(|ts_parser| { let highlighter = &mut ts_parser.borrow_mut(); highlighter.cursors.push(layer.cursor); }); } } } } impl Iterator for HighlightIter<'_> { type Item = Result; fn next(&mut self) -> Option { 'main: loop { // If we've already determined the next highlight boundary, just return it. if let Some(e) = self.next_event.take() { return Some(Ok(e)); } // Periodically check for cancellation, returning `Cancelled` error if the // cancellation flag was flipped. if let Some(cancellation_flag) = self.cancellation_flag { self.iter_count += 1; if self.iter_count >= CANCELLATION_CHECK_INTERVAL { self.iter_count = 0; if cancellation_flag.load(Ordering::Relaxed) != 0 { return Some(Err(Error::Cancelled)); } } } // If none of the layers have any more highlight boundaries, terminate. if self.layers.is_empty() { let len = self.source.len_bytes(); return if self.byte_offset < len { let result = Some(Ok(HighlightEvent::Source { start: self.byte_offset, end: len, })); self.byte_offset = len; result } else { None }; } // Get the next capture from whichever layer has the earliest highlight boundary. let range; let layer = &mut self.layers[0]; let captures = layer.captures.get_mut(); if let Some((next_match, capture_index)) = captures.peek() { let next_capture = next_match.captures[*capture_index]; range = next_capture.node.byte_range(); // If any previous highlight ends before this node starts, then before // processing this capture, emit the source code up until the end of the // previous highlight, and an end event for that highlight. if let Some(end_byte) = layer.highlight_end_stack.last().cloned() { if end_byte <= range.start { layer.highlight_end_stack.pop(); return self.emit_event(end_byte, Some(HighlightEvent::HighlightEnd)); } } } // If there are no more captures, then emit any remaining highlight end events. // And if there are none of those, then just advance to the end of the document. else if let Some(end_byte) = layer.highlight_end_stack.last().cloned() { layer.highlight_end_stack.pop(); return self.emit_event(end_byte, Some(HighlightEvent::HighlightEnd)); } else { return self.emit_event(self.source.len_bytes(), None); }; let (mut match_, capture_index) = captures.next().unwrap(); let mut capture = match_.captures[capture_index]; // Remove from the local scope stack any local scopes that have already ended. while range.start > layer.scope_stack.last().unwrap().range.end { layer.scope_stack.pop(); } // If this capture is for tracking local variables, then process the // local variable info. let mut reference_highlight = None; let mut definition_highlight = None; while match_.pattern_index < layer.config.highlights_pattern_index { // If the node represents a local scope, push a new local scope onto // the scope stack. if Some(capture.index) == layer.config.local_scope_capture_index { definition_highlight = None; let mut scope = LocalScope { inherits: true, range: range.clone(), local_defs: Vec::new(), }; for prop in layer.config.query.property_settings(match_.pattern_index) { if let "local.scope-inherits" = prop.key.as_ref() { scope.inherits = prop.value.as_ref().map_or(true, |r| r.as_ref() == "true"); } } layer.scope_stack.push(scope); } // If the node represents a definition, add a new definition to the // local scope at the top of the scope stack. else if Some(capture.index) == layer.config.local_def_capture_index { reference_highlight = None; let scope = layer.scope_stack.last_mut().unwrap(); let mut value_range = 0..0; for capture in match_.captures { if Some(capture.index) == layer.config.local_def_value_capture_index { value_range = capture.node.byte_range(); } } let name = byte_range_to_str(range.clone(), self.source); scope.local_defs.push(LocalDef { name, value_range, highlight: None, }); definition_highlight = scope.local_defs.last_mut().map(|s| &mut s.highlight); } // If the node represents a reference, then try to find the corresponding // definition in the scope stack. else if Some(capture.index) == layer.config.local_ref_capture_index && definition_highlight.is_none() { definition_highlight = None; let name = byte_range_to_str(range.clone(), self.source); for scope in layer.scope_stack.iter().rev() { if let Some(highlight) = scope.local_defs.iter().rev().find_map(|def| { if def.name == name && range.start >= def.value_range.end { Some(def.highlight) } else { None } }) { reference_highlight = highlight; break; } if !scope.inherits { break; } } } // Continue processing any additional matches for the same node. if let Some((next_match, next_capture_index)) = captures.peek() { let next_capture = next_match.captures[*next_capture_index]; if next_capture.node == capture.node { capture = next_capture; match_ = captures.next().unwrap().0; continue; } } self.sort_layers(); continue 'main; } // Otherwise, this capture must represent a highlight. // If this exact range has already been highlighted by an earlier pattern, or by // a different layer, then skip over this one. if let Some((last_start, last_end, last_depth)) = self.last_highlight_range { if range.start == last_start && range.end == last_end && layer.depth < last_depth { self.sort_layers(); continue 'main; } } // If the current node was found to be a local variable, then skip over any // highlighting patterns that are disabled for local variables. if definition_highlight.is_some() || reference_highlight.is_some() { while layer.config.non_local_variable_patterns[match_.pattern_index] { match_.remove(); if let Some((next_match, next_capture_index)) = captures.peek() { let next_capture = next_match.captures[*next_capture_index]; if next_capture.node == capture.node { capture = next_capture; match_ = captures.next().unwrap().0; continue; } } self.sort_layers(); continue 'main; } } // Use the last capture found for the current node, skipping over any // highlight patterns that also match this node. Captures // for a given node are ordered by pattern index, so these subsequent // captures are guaranteed to be for highlighting, not injections or // local variables. while let Some((next_match, next_capture_index)) = captures.peek() { let next_capture = next_match.captures[*next_capture_index]; if next_capture.node == capture.node { match_.remove(); capture = next_capture; match_ = captures.next().unwrap().0; } else { break; } } let current_highlight = layer.config.highlight_indices.load()[capture.index as usize]; // If this node represents a local definition, then store the current // highlight value on the local scope entry representing this node. if let Some(definition_highlight) = definition_highlight { *definition_highlight = current_highlight; } // Emit a scope start event and push the node's end position to the stack. if let Some(highlight) = reference_highlight.or(current_highlight) { self.last_highlight_range = Some((range.start, range.end, layer.depth)); layer.highlight_end_stack.push(range.end); return self .emit_event(range.start, Some(HighlightEvent::HighlightStart(highlight))); } self.sort_layers(); } } } #[derive(Debug, Clone)] pub enum InjectionLanguageMarker<'a> { /// The language is specified by `LanguageConfiguration`'s `language_id` field. /// /// This marker is used when a pattern sets the `injection.language` property, for example /// `(#set! injection.language "rust")`. LanguageId(&'a str), /// The language is specified in the document and captured by `@injection.language`. /// /// This is used for markdown code fences for example. While the `LanguageId` variant can be /// looked up by finding the language config that sets an `language_id`, this variant contains /// text from the document being highlighted, so the text is checked against each language's /// `injection_regex`. Name(RopeSlice<'a>), Filename(RopeSlice<'a>), Shebang(RopeSlice<'a>), } const SHEBANG: &str = r"#!\s*(?:\S*[/\\](?:env\s+(?:\-\S+\s+)*)?)?([^\s\.\d]+)"; pub struct Merge { iter: I, spans: Box)>>, next_event: Option, next_span: Option<(usize, std::ops::Range)>, queue: Vec, } /// Merge a list of spans into the highlight event stream. pub fn merge>( iter: I, spans: Vec<(usize, std::ops::Range)>, ) -> Merge { let spans = Box::new(spans.into_iter()); let mut merge = Merge { iter, spans, next_event: None, next_span: None, queue: Vec::new(), }; merge.next_event = merge.iter.next(); merge.next_span = merge.spans.next(); merge } impl> Iterator for Merge { type Item = HighlightEvent; fn next(&mut self) -> Option { use HighlightEvent::*; if let Some(event) = self.queue.pop() { return Some(event); } loop { match (self.next_event, &self.next_span) { // this happens when range is partially or fully offscreen (Some(Source { start, .. }), Some((span, range))) if start > range.start => { if start > range.end { self.next_span = self.spans.next(); } else { self.next_span = Some((*span, start..range.end)); }; } _ => break, } } match (self.next_event, &self.next_span) { (Some(HighlightStart(i)), _) => { self.next_event = self.iter.next(); Some(HighlightStart(i)) } (Some(HighlightEnd), _) => { self.next_event = self.iter.next(); Some(HighlightEnd) } (Some(Source { start, end }), Some((_, range))) if start < range.start => { let intersect = range.start.min(end); let event = Source { start, end: intersect, }; if end == intersect { // the event is complete self.next_event = self.iter.next(); } else { // subslice the event self.next_event = Some(Source { start: intersect, end, }); }; Some(event) } (Some(Source { start, end }), Some((span, range))) if start == range.start => { let intersect = range.end.min(end); let event = HighlightStart(Highlight(*span)); // enqueue in reverse order self.queue.push(HighlightEnd); self.queue.push(Source { start, end: intersect, }); if end == intersect { // the event is complete self.next_event = self.iter.next(); } else { // subslice the event self.next_event = Some(Source { start: intersect, end, }); }; if intersect == range.end { self.next_span = self.spans.next(); } else { self.next_span = Some((*span, intersect..range.end)); } Some(event) } (Some(event), None) => { self.next_event = self.iter.next(); Some(event) } // Can happen if cursor at EOF and/or diagnostic reaches past the end. // We need to actually emit events for the cursor-at-EOF situation, // even though the range is past the end of the text. This needs to be // handled appropriately by the drawing code by not assuming that // all `Source` events point to valid indices in the rope. (None, Some((span, range))) => { let event = HighlightStart(Highlight(*span)); self.queue.push(HighlightEnd); self.queue.push(Source { start: range.start, end: range.end, }); self.next_span = self.spans.next(); Some(event) } (None, None) => None, e => unreachable!("{:?}", e), } } } fn node_is_visible(node: &Node) -> bool { node.is_missing() || (node.is_named() && node.language().node_kind_is_visible(node.kind_id())) } fn format_anonymous_node_kind(kind: &str) -> Cow { if kind.contains('"') { Cow::Owned(kind.replace('"', "\\\"")) } else { Cow::Borrowed(kind) } } pub fn pretty_print_tree(fmt: &mut W, node: Node) -> fmt::Result { if node.child_count() == 0 { if node_is_visible(&node) { write!(fmt, "({})", node.kind()) } else { write!(fmt, "\"{}\"", format_anonymous_node_kind(node.kind())) } } else { pretty_print_tree_impl(fmt, &mut node.walk(), 0) } } fn pretty_print_tree_impl( fmt: &mut W, cursor: &mut tree_sitter::TreeCursor, depth: usize, ) -> fmt::Result { let node = cursor.node(); let visible = node_is_visible(&node); if visible { let indentation_columns = depth * 2; write!(fmt, "{:indentation_columns$}", "")?; if let Some(field_name) = cursor.field_name() { write!(fmt, "{}: ", field_name)?; } write!(fmt, "({}", node.kind())?; } else { write!(fmt, " \"{}\"", format_anonymous_node_kind(node.kind()))?; } // Handle children. if cursor.goto_first_child() { loop { if node_is_visible(&cursor.node()) { fmt.write_char('\n')?; } pretty_print_tree_impl(fmt, cursor, depth + 1)?; if !cursor.goto_next_sibling() { break; } } let moved = cursor.goto_parent(); // The parent of the first child must exist, and must be `node`. debug_assert!(moved); debug_assert!(cursor.node() == node); } if visible { fmt.write_char(')')?; } Ok(()) } #[cfg(test)] mod test { use super::*; use crate::{Rope, Transaction}; #[test] fn test_textobject_queries() { let query_str = r#" (line_comment)+ @quantified_nodes ((line_comment)+) @quantified_nodes_grouped ((line_comment) (line_comment)) @multiple_nodes_grouped "#; let source = Rope::from_str( r#" /// a comment on /// multiple lines "#, ); let loader = Loader::new(Configuration { language: vec![], language_server: HashMap::new(), }) .unwrap(); let language = get_language("rust").unwrap(); let query = Query::new(&language, query_str).unwrap(); let textobject = TextObjectQuery { query }; let mut cursor = QueryCursor::new(); let config = HighlightConfiguration::new(language, "", "", "").unwrap(); let syntax = Syntax::new( source.slice(..), Arc::new(config), Arc::new(ArcSwap::from_pointee(loader)), ) .unwrap(); let root = syntax.tree().root_node(); let mut test = |capture, range| { let matches: Vec<_> = textobject .capture_nodes(capture, root, source.slice(..), &mut cursor) .unwrap() .collect(); assert_eq!( matches[0].byte_range(), range, "@{} expected {:?}", capture, range ) }; test("quantified_nodes", 1..37); // NOTE: Enable after implementing proper node group capturing // test("quantified_nodes_grouped", 1..37); // test("multiple_nodes_grouped", 1..37); } #[test] fn test_parser() { let highlight_names: Vec = [ "attribute", "constant", "function.builtin", "function", "keyword", "operator", "property", "punctuation", "punctuation.bracket", "punctuation.delimiter", "string", "string.special", "tag", "type", "type.builtin", "variable", "variable.builtin", "variable.parameter", ] .iter() .cloned() .map(String::from) .collect(); let loader = Loader::new(Configuration { language: vec![], language_server: HashMap::new(), }) .unwrap(); let language = get_language("rust").unwrap(); let config = HighlightConfiguration::new( language, &std::fs::read_to_string("../runtime/grammars/sources/rust/queries/highlights.scm") .unwrap(), &std::fs::read_to_string("../runtime/grammars/sources/rust/queries/injections.scm") .unwrap(), "", // locals.scm ) .unwrap(); config.configure(&highlight_names); let source = Rope::from_str( " struct Stuff {} fn main() {} ", ); let syntax = Syntax::new( source.slice(..), Arc::new(config), Arc::new(ArcSwap::from_pointee(loader)), ) .unwrap(); let tree = syntax.tree(); let root = tree.root_node(); assert_eq!(root.kind(), "source_file"); assert_eq!( root.to_sexp(), concat!( "(source_file ", "(struct_item name: (type_identifier) body: (field_declaration_list)) ", "(function_item name: (identifier) parameters: (parameters) body: (block)))" ) ); let struct_node = root.child(0).unwrap(); assert_eq!(struct_node.kind(), "struct_item"); } #[test] fn test_input_edits() { use tree_sitter::InputEdit; let doc = Rope::from("hello world!\ntest 123"); let transaction = Transaction::change( &doc, vec![(6, 11, Some("test".into())), (12, 17, None)].into_iter(), ); let edits = generate_edits(doc.slice(..), transaction.changes()); // transaction.apply(&mut state); assert_eq!( edits, &[ InputEdit { start_byte: 6, old_end_byte: 11, new_end_byte: 10, start_position: Point { row: 0, column: 6 }, old_end_position: Point { row: 0, column: 11 }, new_end_position: Point { row: 0, column: 10 } }, InputEdit { start_byte: 12, old_end_byte: 17, new_end_byte: 12, start_position: Point { row: 0, column: 12 }, old_end_position: Point { row: 1, column: 4 }, new_end_position: Point { row: 0, column: 12 } } ] ); // Testing with the official example from tree-sitter let mut doc = Rope::from("fn test() {}"); let transaction = Transaction::change(&doc, vec![(8, 8, Some("a: u32".into()))].into_iter()); let edits = generate_edits(doc.slice(..), transaction.changes()); transaction.apply(&mut doc); assert_eq!(doc, "fn test(a: u32) {}"); assert_eq!( edits, &[InputEdit { start_byte: 8, old_end_byte: 8, new_end_byte: 14, start_position: Point { row: 0, column: 8 }, old_end_position: Point { row: 0, column: 8 }, new_end_position: Point { row: 0, column: 14 } }] ); } #[track_caller] fn assert_pretty_print( language_name: &str, source: &str, expected: &str, start: usize, end: usize, ) { let source = Rope::from_str(source); let loader = Loader::new(Configuration { language: vec![], language_server: HashMap::new(), }) .unwrap(); let language = get_language(language_name).unwrap(); let config = HighlightConfiguration::new(language, "", "", "").unwrap(); let syntax = Syntax::new( source.slice(..), Arc::new(config), Arc::new(ArcSwap::from_pointee(loader)), ) .unwrap(); let root = syntax .tree() .root_node() .descendant_for_byte_range(start, end) .unwrap(); let mut output = String::new(); pretty_print_tree(&mut output, root).unwrap(); assert_eq!(expected, output); } #[test] fn test_pretty_print() { let source = r#"// Hello"#; assert_pretty_print("rust", source, "(line_comment \"//\")", 0, source.len()); // A large tree should be indented with fields: let source = r#"fn main() { println!("Hello, World!"); }"#; assert_pretty_print( "rust", source, concat!( "(function_item \"fn\"\n", " name: (identifier)\n", " parameters: (parameters \"(\" \")\")\n", " body: (block \"{\"\n", " (expression_statement\n", " (macro_invocation\n", " macro: (identifier) \"!\"\n", " (token_tree \"(\"\n", " (string_literal \"\\\"\"\n", " (string_content) \"\\\"\") \")\")) \";\") \"}\"))", ), 0, source.len(), ); // Selecting a token should print just that token: let source = r#"fn main() {}"#; assert_pretty_print("rust", source, r#""fn""#, 0, 1); // Error nodes are printed as errors: let source = r#"}{"#; assert_pretty_print("rust", source, "(ERROR \"}\" \"{\")", 0, source.len()); // Fields broken under unnamed nodes are determined correctly. // In the following source, `object` belongs to the `singleton_method` // rule but `name` and `body` belong to an unnamed helper `_method_rest`. // This can cause a bug with a pretty-printing implementation that // uses `Node::field_name_for_child` to determine field names but is // fixed when using `tree_sitter::TreeCursor::field_name`. let source = "def self.method_name true end"; assert_pretty_print( "ruby", source, concat!( "(singleton_method \"def\"\n", " object: (self) \".\"\n", " name: (identifier)\n", " body: (body_statement\n", " (true)) \"end\")" ), 0, source.len(), ); } #[test] fn test_load_runtime_file() { // Test to make sure we can load some data from the runtime directory. let contents = load_runtime_file("rust", "indents.scm").unwrap(); assert!(!contents.is_empty()); let results = load_runtime_file("rust", "does-not-exist"); assert!(results.is_err()); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/syntax/000077500000000000000000000000001500614314100237615ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/syntax/tree_cursor.rs000066400000000000000000000167101500614314100266700ustar00rootroot00000000000000use std::{cmp::Reverse, ops::Range}; use super::{LanguageLayer, LayerId}; use slotmap::HopSlotMap; use tree_sitter::Node; /// The byte range of an injection layer. /// /// Injection ranges may overlap, but all overlapping parts are subsets of their parent ranges. /// This allows us to sort the ranges ahead of time in order to efficiently find a range that /// contains a point with maximum depth. #[derive(Debug)] struct InjectionRange { start: usize, end: usize, layer_id: LayerId, depth: u32, } pub struct TreeCursor<'a> { layers: &'a HopSlotMap, root: LayerId, current: LayerId, injection_ranges: Vec, // TODO: Ideally this would be a `tree_sitter::TreeCursor<'a>` but // that returns very surprising results in testing. cursor: Node<'a>, } impl<'a> TreeCursor<'a> { pub(super) fn new(layers: &'a HopSlotMap, root: LayerId) -> Self { let mut injection_ranges = Vec::new(); for (layer_id, layer) in layers.iter() { // Skip the root layer if layer.parent.is_none() { continue; } for byte_range in layer.ranges.iter() { let range = InjectionRange { start: byte_range.start_byte, end: byte_range.end_byte, layer_id, depth: layer.depth, }; injection_ranges.push(range); } } injection_ranges.sort_unstable_by_key(|range| (range.end, Reverse(range.depth))); let cursor = layers[root].tree().root_node(); Self { layers, root, current: root, injection_ranges, cursor, } } pub fn node(&self) -> Node<'a> { self.cursor } pub fn goto_parent(&mut self) -> bool { if let Some(parent) = self.node().parent() { self.cursor = parent; return true; } // If we are already on the root layer, we cannot ascend. if self.current == self.root { return false; } // Ascend to the parent layer. let range = self.node().byte_range(); let parent_id = self.layers[self.current] .parent .expect("non-root layers have a parent"); self.current = parent_id; let root = self.layers[self.current].tree().root_node(); self.cursor = root .descendant_for_byte_range(range.start, range.end) .unwrap_or(root); true } pub fn goto_parent_with

(&mut self, predicate: P) -> bool where P: Fn(&Node) -> bool, { while self.goto_parent() { if predicate(&self.node()) { return true; } } false } /// Finds the injection layer that has exactly the same range as the given `range`. fn layer_id_of_byte_range(&self, search_range: Range) -> Option { let start_idx = self .injection_ranges .partition_point(|range| range.end < search_range.end); self.injection_ranges[start_idx..] .iter() .take_while(|range| range.end == search_range.end) .find_map(|range| (range.start == search_range.start).then_some(range.layer_id)) } fn goto_first_child_impl(&mut self, named: bool) -> bool { // Check if the current node's range is an exact injection layer range. if let Some(layer_id) = self .layer_id_of_byte_range(self.node().byte_range()) .filter(|&layer_id| layer_id != self.current) { // Switch to the child layer. self.current = layer_id; self.cursor = self.layers[self.current].tree().root_node(); return true; } let child = if named { self.cursor.named_child(0) } else { self.cursor.child(0) }; if let Some(child) = child { // Otherwise descend in the current tree. self.cursor = child; true } else { false } } pub fn goto_first_child(&mut self) -> bool { self.goto_first_child_impl(false) } pub fn goto_first_named_child(&mut self) -> bool { self.goto_first_child_impl(true) } fn goto_next_sibling_impl(&mut self, named: bool) -> bool { let sibling = if named { self.cursor.next_named_sibling() } else { self.cursor.next_sibling() }; if let Some(sibling) = sibling { self.cursor = sibling; true } else { false } } pub fn goto_next_sibling(&mut self) -> bool { self.goto_next_sibling_impl(false) } pub fn goto_next_named_sibling(&mut self) -> bool { self.goto_next_sibling_impl(true) } fn goto_prev_sibling_impl(&mut self, named: bool) -> bool { let sibling = if named { self.cursor.prev_named_sibling() } else { self.cursor.prev_sibling() }; if let Some(sibling) = sibling { self.cursor = sibling; true } else { false } } pub fn goto_prev_sibling(&mut self) -> bool { self.goto_prev_sibling_impl(false) } pub fn goto_prev_named_sibling(&mut self) -> bool { self.goto_prev_sibling_impl(true) } /// Finds the injection layer that contains the given start-end range. fn layer_id_containing_byte_range(&self, start: usize, end: usize) -> LayerId { let start_idx = self .injection_ranges .partition_point(|range| range.end < end); self.injection_ranges[start_idx..] .iter() .take_while(|range| range.start < end || range.depth > 1) .find_map(|range| (range.start <= start).then_some(range.layer_id)) .unwrap_or(self.root) } pub fn reset_to_byte_range(&mut self, start: usize, end: usize) { self.current = self.layer_id_containing_byte_range(start, end); let root = self.layers[self.current].tree().root_node(); self.cursor = root.descendant_for_byte_range(start, end).unwrap_or(root); } /// Returns an iterator over the children of the node the TreeCursor is on /// at the time this is called. pub fn children(&'a mut self) -> ChildIter<'a> { let parent = self.node(); ChildIter { cursor: self, parent, named: false, } } /// Returns an iterator over the named children of the node the TreeCursor is on /// at the time this is called. pub fn named_children(&'a mut self) -> ChildIter<'a> { let parent = self.node(); ChildIter { cursor: self, parent, named: true, } } } pub struct ChildIter<'n> { cursor: &'n mut TreeCursor<'n>, parent: Node<'n>, named: bool, } impl<'n> Iterator for ChildIter<'n> { type Item = Node<'n>; fn next(&mut self) -> Option { // first iteration, just visit the first child if self.cursor.node() == self.parent { self.cursor .goto_first_child_impl(self.named) .then(|| self.cursor.node()) } else { self.cursor .goto_next_sibling_impl(self.named) .then(|| self.cursor.node()) } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/test.rs000066400000000000000000000272421500614314100237670ustar00rootroot00000000000000//! Test helpers. use crate::{Range, Selection}; use ropey::Rope; use smallvec::SmallVec; use std::cmp::Reverse; use unicode_segmentation::UnicodeSegmentation; /// Convert annotated test string to test string and selection. /// /// `#[|` for primary selection with head before anchor followed by `]#`. /// `#(|` for secondary selection with head before anchor followed by `)#`. /// `#[` for primary selection with head after anchor followed by `|]#`. /// `#(` for secondary selection with head after anchor followed by `|)#`. /// /// If the selection contains any LF or CRLF sequences, which are immediately /// followed by the same grapheme, then the subsequent one is removed. This is /// to allow representing having the cursor over the end of the line. /// /// # Examples /// /// ``` /// use helix_core::{Range, Selection, test::print}; /// use smallvec::smallvec; /// /// assert_eq!( /// print("#[a|]#b#(|c)#"), /// ("abc".to_owned(), Selection::new(smallvec![Range::new(0, 1), Range::new(3, 2)], 0)) /// ); /// ``` /// /// # Panics /// /// Panics when missing primary or appeared more than once. /// Panics when missing head or anchor. /// Panics when head come after head or anchor come after anchor. pub fn print(s: &str) -> (String, Selection) { let mut primary_idx = None; let mut ranges = SmallVec::new(); let mut iter = UnicodeSegmentation::graphemes(s, true).peekable(); let mut left = String::with_capacity(s.len()); 'outer: while let Some(c) = iter.next() { let start = left.chars().count(); if c != "#" { left.push_str(c); continue; } let (is_primary, close_pair) = match iter.next() { Some("[") => (true, "]"), Some("(") => (false, ")"), Some(ch) => { left.push('#'); left.push_str(ch); continue; } None => break, }; if is_primary && primary_idx.is_some() { panic!("primary `#[` already appeared {:?} {:?}", left, s); } let head_at_beg = iter.next_if_eq(&"|").is_some(); let last_grapheme = |s: &str| { UnicodeSegmentation::graphemes(s, true) .next_back() .map(String::from) }; while let Some(c) = iter.next() { let next = iter.peek(); let mut prev = last_grapheme(left.as_str()); if !(c == close_pair && next == Some(&"#")) { left.push_str(c); continue; } if !head_at_beg { match &prev { Some(p) if p != "|" => { left.push_str(c); continue; } Some(p) if p == "|" => { left.pop().unwrap(); // pop the | prev = last_grapheme(left.as_str()); } _ => (), } } iter.next(); // skip "#" let next = iter.peek(); // skip explicit line end inside selection if (prev == Some(String::from("\r\n")) || prev == Some(String::from("\n"))) && next.map(|n| String::from(*n)) == prev { iter.next(); } if is_primary { primary_idx = Some(ranges.len()); } let (anchor, head) = match head_at_beg { true => (left.chars().count(), start), false => (start, left.chars().count()), }; ranges.push(Range::new(anchor, head)); continue 'outer; } if head_at_beg { panic!("missing end `{}#` {:?} {:?}", close_pair, left, s); } else { panic!("missing end `|{}#` {:?} {:?}", close_pair, left, s); } } let primary = match primary_idx { Some(i) => i, None => panic!("missing primary `#[|]#` {:?}", s), }; let selection = Selection::new(ranges, primary); (left, selection) } /// Convert test string and selection to annotated test string. /// /// `#[|` for primary selection with head before anchor followed by `]#`. /// `#(|` for secondary selection with head before anchor followed by `)#`. /// `#[` for primary selection with head after anchor followed by `|]#`. /// `#(` for secondary selection with head after anchor followed by `|)#`. /// /// # Examples /// /// ``` /// use helix_core::{Range, Selection, test::plain}; /// use smallvec::smallvec; /// /// assert_eq!( /// plain("abc", &Selection::new(smallvec![Range::new(0, 1), Range::new(3, 2)], 0)), /// "#[a|]#b#(|c)#".to_owned() /// ); /// ``` pub fn plain>(s: R, selection: &Selection) -> String { let s = s.into(); let primary = selection.primary_index(); let mut out = String::with_capacity(s.len_bytes() + 5 * selection.len()); out.push_str(&s.to_string()); let mut insertion: Vec<_> = selection .iter() .enumerate() .flat_map(|(i, range)| { // sort like this before reversed so anchor < head later match (range.anchor < range.head, i == primary) { (true, true) => [(range.anchor, "#["), (range.head, "|]#")], (true, false) => [(range.anchor, "#("), (range.head, "|)#")], (false, true) => [(range.anchor, "]#"), (range.head, "#[|")], (false, false) => [(range.anchor, ")#"), (range.head, "#(|")], } }) .map(|(char_idx, marker)| (s.char_to_byte(char_idx), marker)) .collect(); // insert in reverse order insertion.sort_unstable_by_key(|k| Reverse(k.0)); for (i, s) in insertion { out.insert_str(i, s); } out } #[cfg(test)] #[allow(clippy::module_inception)] mod test { use super::*; #[test] fn print_single() { assert_eq!( (String::from("hello"), Selection::single(1, 0)), print("#[|h]#ello") ); assert_eq!( (String::from("hello"), Selection::single(0, 1)), print("#[h|]#ello") ); assert_eq!( (String::from("hello"), Selection::single(4, 0)), print("#[|hell]#o") ); assert_eq!( (String::from("hello"), Selection::single(0, 4)), print("#[hell|]#o") ); assert_eq!( (String::from("hello"), Selection::single(5, 0)), print("#[|hello]#") ); assert_eq!( (String::from("hello"), Selection::single(0, 5)), print("#[hello|]#") ); } #[test] fn print_multi() { assert_eq!( ( String::from("hello"), Selection::new( SmallVec::from_slice(&[Range::new(1, 0), Range::new(5, 4)]), 0 ) ), print("#[|h]#ell#(|o)#") ); assert_eq!( ( String::from("hello"), Selection::new( SmallVec::from_slice(&[Range::new(0, 1), Range::new(4, 5)]), 0 ) ), print("#[h|]#ell#(o|)#") ); assert_eq!( ( String::from("hello"), Selection::new( SmallVec::from_slice(&[Range::new(2, 0), Range::new(5, 3)]), 0 ) ), print("#[|he]#l#(|lo)#") ); assert_eq!( ( String::from("hello\r\nhello\r\nhello\r\n"), Selection::new( SmallVec::from_slice(&[ Range::new(7, 5), Range::new(21, 19), Range::new(14, 12) ]), 0 ) ), print("hello#[|\r\n]#hello#(|\r\n)#hello#(|\r\n)#") ); } #[test] fn print_multi_byte_code_point() { assert_eq!( (String::from("„“"), Selection::single(1, 0)), print("#[|„]#“") ); assert_eq!( (String::from("„“"), Selection::single(2, 1)), print("„#[|“]#") ); assert_eq!( (String::from("„“"), Selection::single(0, 1)), print("#[„|]#“") ); assert_eq!( (String::from("„“"), Selection::single(1, 2)), print("„#[“|]#") ); assert_eq!( (String::from("they said „hello“"), Selection::single(11, 10)), print("they said #[|„]#hello“") ); } #[test] fn print_multi_code_point_grapheme() { assert_eq!( ( String::from("hello 👨‍👩‍👧‍👦 goodbye"), Selection::single(13, 6) ), print("hello #[|👨‍👩‍👧‍👦]# goodbye") ); } #[test] fn plain_single() { assert_eq!("#[|h]#ello", plain("hello", &Selection::single(1, 0))); assert_eq!("#[h|]#ello", plain("hello", &Selection::single(0, 1))); assert_eq!("#[|hell]#o", plain("hello", &Selection::single(4, 0))); assert_eq!("#[hell|]#o", plain("hello", &Selection::single(0, 4))); assert_eq!("#[|hello]#", plain("hello", &Selection::single(5, 0))); assert_eq!("#[hello|]#", plain("hello", &Selection::single(0, 5))); } #[test] fn plain_multi() { assert_eq!( plain( "hello", &Selection::new( SmallVec::from_slice(&[Range::new(1, 0), Range::new(5, 4)]), 0 ) ), String::from("#[|h]#ell#(|o)#") ); assert_eq!( plain( "hello", &Selection::new( SmallVec::from_slice(&[Range::new(0, 1), Range::new(4, 5)]), 0 ) ), String::from("#[h|]#ell#(o|)#") ); assert_eq!( plain( "hello", &Selection::new( SmallVec::from_slice(&[Range::new(2, 0), Range::new(5, 3)]), 0 ) ), String::from("#[|he]#l#(|lo)#") ); assert_eq!( plain( "hello\r\nhello\r\nhello\r\n", &Selection::new( SmallVec::from_slice(&[ Range::new(7, 5), Range::new(21, 19), Range::new(14, 12) ]), 0 ) ), String::from("hello#[|\r\n]#hello#(|\r\n)#hello#(|\r\n)#") ); } #[test] fn plain_multi_byte_code_point() { assert_eq!( plain("„“", &Selection::single(1, 0)), String::from("#[|„]#“") ); assert_eq!( plain("„“", &Selection::single(2, 1)), String::from("„#[|“]#") ); assert_eq!( plain("„“", &Selection::single(0, 1)), String::from("#[„|]#“") ); assert_eq!( plain("„“", &Selection::single(1, 2)), String::from("„#[“|]#") ); assert_eq!( plain("they said „hello“", &Selection::single(11, 10)), String::from("they said #[|„]#hello“") ); } #[test] fn plain_multi_code_point_grapheme() { assert_eq!( plain("hello 👨‍👩‍👧‍👦 goodbye", &Selection::single(13, 6)), String::from("hello #[|👨‍👩‍👧‍👦]# goodbye") ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/text_annotations.rs000066400000000000000000000366621500614314100264170ustar00rootroot00000000000000use std::cell::Cell; use std::cmp::Ordering; use std::fmt::Debug; use std::ops::Range; use std::ptr::NonNull; use crate::doc_formatter::FormattedGrapheme; use crate::syntax::Highlight; use crate::{Position, Tendril}; /// An inline annotation is continuous text shown /// on the screen before the grapheme that starts at /// `char_idx` #[derive(Debug, Clone)] pub struct InlineAnnotation { pub text: Tendril, pub char_idx: usize, } impl InlineAnnotation { pub fn new(char_idx: usize, text: impl Into) -> Self { Self { char_idx, text: text.into(), } } } /// Represents a **single Grapheme** that is part of the document /// that start at `char_idx` that will be replaced with /// a different `grapheme`. /// If `grapheme` contains multiple graphemes the text /// will render incorrectly. /// If you want to overlay multiple graphemes simply /// use multiple `Overlays`. /// /// # Examples /// /// The following examples are valid overlays for the following text: /// /// `aX͎̊͢͜͝͡bc` /// /// ``` /// use helix_core::text_annotations::Overlay; /// /// // replaces a /// Overlay::new(0, "X"); /// /// // replaces X͎̊͢͜͝͡ /// Overlay::new(1, "\t"); /// /// // replaces b /// Overlay::new(6, "X̢̢̟͖̲͌̋̇͑͝"); /// ``` /// /// The following examples are invalid uses /// /// ``` /// use helix_core::text_annotations::Overlay; /// /// // overlay is not aligned at grapheme boundary /// Overlay::new(3, "x"); /// /// // overlay contains multiple graphemes /// Overlay::new(0, "xy"); /// ``` #[derive(Debug, Clone)] pub struct Overlay { pub char_idx: usize, pub grapheme: Tendril, } impl Overlay { pub fn new(char_idx: usize, grapheme: impl Into) -> Self { Self { char_idx, grapheme: grapheme.into(), } } } /// Line annotations allow inserting virtual text lines between normal text /// lines. These lines can be filled with text in the rendering code as their /// contents have no effect beyond visual appearance. /// /// The height of virtual text is usually not known ahead of time as virtual /// text often requires softwrapping. Furthermore the height of some virtual /// text like side-by-side diffs depends on the height of the text (again /// influenced by softwrap) and other virtual text. Therefore line annotations /// are computed on the fly instead of ahead of time like other annotations. /// /// The core of this trait `insert_virtual_lines` function. It is called at the /// end of every visual line and allows the `LineAnnotation` to insert empty /// virtual lines. Apart from that the `LineAnnotation` trait has multiple /// methods that allow it to track anchors in the document. /// /// When a new traversal of a document starts `reset_pos` is called. Afterwards /// the other functions are called with indices that are larger then the /// one passed to `reset_pos`. This allows performing a binary search (use /// `partition_point`) in `reset_pos` once and then to only look at the next /// anchor during each method call. /// /// The `reset_pos`, `skip_conceal` and `process_anchor` functions all return a /// `char_idx` anchor. This anchor is stored when transversing the document and /// when the grapheme at the anchor is traversed the `process_anchor` function /// is called. /// /// # Note /// /// All functions only receive immutable references to `self`. /// `LineAnnotation`s that want to store an internal position or /// state of some kind should use `Cell`. Using interior mutability for /// caches is preferable as otherwise a lot of lifetimes become invariant /// which complicates APIs a lot. pub trait LineAnnotation { /// Resets the internal position to `char_idx`. This function is called /// when a new traversal of a document starts. /// /// All `char_idx` passed to `insert_virtual_lines` are strictly monotonically increasing /// with the first `char_idx` greater or equal to the `char_idx` /// passed to this function. /// /// # Returns /// /// The `char_idx` of the next anchor this `LineAnnotation` is interested in, /// replaces the currently registered anchor. Return `usize::MAX` to ignore fn reset_pos(&mut self, _char_idx: usize) -> usize { usize::MAX } /// Called when a text is concealed that contains an anchor registered by this `LineAnnotation`. /// In this case the line decorations **must** ensure that virtual text anchored within that /// char range is skipped. /// /// # Returns /// /// The `char_idx` of the next anchor this `LineAnnotation` is interested in, /// **after the end of conceal_end_char_idx** /// replaces the currently registered anchor. Return `usize::MAX` to ignore fn skip_concealed_anchors(&mut self, conceal_end_char_idx: usize) -> usize { self.reset_pos(conceal_end_char_idx) } /// Process an anchor (horizontal position is provided) and returns the next anchor. /// /// # Returns /// /// The `char_idx` of the next anchor this `LineAnnotation` is interested in, /// replaces the currently registered anchor. Return `usize::MAX` to ignore fn process_anchor(&mut self, _grapheme: &FormattedGrapheme) -> usize { usize::MAX } /// This function is called at the end of a visual line to insert virtual text /// /// # Returns /// /// The number of additional virtual lines to reserve /// /// # Note /// /// The `line_end_visual_pos` parameter indicates the visual vertical distance /// from the start of block where the traversal starts. This includes the offset /// from other `LineAnnotations`. This allows inline annotations to consider /// the height of the text and "align" two different documents (like for side /// by side diffs). These annotations that want to "align" two documents should /// therefore be added last so that other virtual text is also considered while aligning fn insert_virtual_lines( &mut self, line_end_char_idx: usize, line_end_visual_pos: Position, doc_line: usize, ) -> Position; } #[derive(Debug)] struct Layer<'a, A, M> { annotations: &'a [A], current_index: Cell, metadata: M, } impl Clone for Layer<'_, A, M> { fn clone(&self) -> Self { Layer { annotations: self.annotations, current_index: self.current_index.clone(), metadata: self.metadata.clone(), } } } impl Layer<'_, A, M> { pub fn reset_pos(&self, char_idx: usize, get_char_idx: impl Fn(&A) -> usize) { let new_index = self .annotations .partition_point(|annot| get_char_idx(annot) < char_idx); self.current_index.set(new_index); } pub fn consume(&self, char_idx: usize, get_char_idx: impl Fn(&A) -> usize) -> Option<&A> { let annot = self.annotations.get(self.current_index.get())?; debug_assert!(get_char_idx(annot) >= char_idx); if get_char_idx(annot) == char_idx { self.current_index.set(self.current_index.get() + 1); Some(annot) } else { None } } } impl<'a, A, M> From<(&'a [A], M)> for Layer<'a, A, M> { fn from((annotations, metadata): (&'a [A], M)) -> Layer<'a, A, M> { Layer { annotations, current_index: Cell::new(0), metadata, } } } fn reset_pos(layers: &[Layer], pos: usize, get_pos: impl Fn(&A) -> usize) { for layer in layers { layer.reset_pos(pos, &get_pos) } } /// Safety: We store LineAnnotation in a NonNull pointer. This is necessary to work /// around an unfortunate inconsistency in rusts variance system that unnnecesarily /// makes the lifetime invariant if implemented with safe code. This makes the /// DocFormatter API very cumbersome/basically impossible to work with. /// /// Normally object types `dyn Foo + 'a` are covariant so if we used `Box` below /// everything would be alright. However we want to use `Cell>` /// to be able to call the mutable function on `LineAnnotation`. The problem is that /// some types like `Cell` make all their arguments invariant. This is important for soundness /// normally for the same reasons that `&'a mut T` is invariant over `T` /// (see ). However for `&'a mut` (`dyn Foo + 'b`) /// there is a specical rule in the language to make `'b` covariant (otherwise trait objects would be /// super annoying to use). See for /// why this is sound. Sadly that rule doesn't apply to `Cell` /// (or other invariant types like `UnsafeCell` or `*mut (dyn Foo + 'a)`). /// /// We sidestep the problem by using `NonNull` which is covariant. In the /// special case of trait objects this is sound (easily checked by adding a /// `PhantomData<&'a mut Foo + 'a)>` field). We don't need an explicit `Cell` /// type here because we never hand out any refereces to the trait objects. That /// means any reference to the pointer can create a valid multable reference /// that is covariant over `'a` (or in other words it's a raw pointer, as long as /// we don't hand out references we are free to do whatever we want). struct RawBox(NonNull); impl RawBox { /// Safety: Only a single mutable reference /// created by this function may exist at a given time. #[allow(clippy::mut_from_ref)] unsafe fn get(&self) -> &mut T { &mut *self.0.as_ptr() } } impl From> for RawBox { fn from(box_: Box) -> Self { // obviously safe because Box::into_raw never returns null unsafe { Self(NonNull::new_unchecked(Box::into_raw(box_))) } } } impl Drop for RawBox { fn drop(&mut self) { unsafe { drop(Box::from_raw(self.0.as_ptr())) } } } /// Annotations that change that is displayed when the document is render. /// Also commonly called virtual text. #[derive(Default)] pub struct TextAnnotations<'a> { inline_annotations: Vec>>, overlays: Vec>>, line_annotations: Vec<(Cell, RawBox)>, } impl Debug for TextAnnotations<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("TextAnnotations") .field("inline_annotations", &self.inline_annotations) .field("overlays", &self.overlays) .finish_non_exhaustive() } } impl<'a> TextAnnotations<'a> { /// Prepare the TextAnnotations for iteration starting at char_idx pub fn reset_pos(&self, char_idx: usize) { reset_pos(&self.inline_annotations, char_idx, |annot| annot.char_idx); reset_pos(&self.overlays, char_idx, |annot| annot.char_idx); for (next_anchor, layer) in &self.line_annotations { next_anchor.set(unsafe { layer.get().reset_pos(char_idx) }); } } pub fn collect_overlay_highlights( &self, char_range: Range, ) -> Vec<(usize, Range)> { let mut highlights = Vec::new(); self.reset_pos(char_range.start); for char_idx in char_range { if let Some((_, Some(highlight))) = self.overlay_at(char_idx) { // we don't know the number of chars the original grapheme takes // however it doesn't matter as highlight boundaries are automatically // aligned to grapheme boundaries in the rendering code highlights.push((highlight.0, char_idx..char_idx + 1)) } } highlights } /// Add new inline annotations. /// /// The annotations grapheme will be rendered with `highlight` /// patched on top of `ui.text`. /// /// The annotations **must be sorted** by their `char_idx`. /// Multiple annotations with the same `char_idx` are allowed, /// they will be display in the order that they are present in the layer. /// /// If multiple layers contain annotations at the same position /// the annotations that belong to the layers added first will be shown first. pub fn add_inline_annotations( &mut self, layer: &'a [InlineAnnotation], highlight: Option, ) -> &mut Self { if !layer.is_empty() { self.inline_annotations.push((layer, highlight).into()); } self } /// Add new grapheme overlays. /// /// The overlaid grapheme will be rendered with `highlight` /// patched on top of `ui.text`. /// /// The overlays **must be sorted** by their `char_idx`. /// Multiple overlays with the same `char_idx` **are allowed**. /// /// If multiple layers contain overlay at the same position /// the overlay from the layer added last will be show. pub fn add_overlay(&mut self, layer: &'a [Overlay], highlight: Option) -> &mut Self { if !layer.is_empty() { self.overlays.push((layer, highlight).into()); } self } /// Add new annotation lines. /// /// The line annotations **must be sorted** by their `char_idx`. /// Multiple line annotations with the same `char_idx` **are not allowed**. pub fn add_line_annotation(&mut self, layer: Box) -> &mut Self { self.line_annotations .push((Cell::new(usize::MAX), layer.into())); self } /// Removes all line annotations, useful for vertical motions /// so that virtual text lines are automatically skipped. pub fn clear_line_annotations(&mut self) { self.line_annotations.clear(); } pub(crate) fn next_inline_annotation_at( &self, char_idx: usize, ) -> Option<(&InlineAnnotation, Option)> { self.inline_annotations.iter().find_map(|layer| { let annotation = layer.consume(char_idx, |annot| annot.char_idx)?; Some((annotation, layer.metadata)) }) } pub(crate) fn overlay_at(&self, char_idx: usize) -> Option<(&Overlay, Option)> { let mut overlay = None; for layer in &self.overlays { while let Some(new_overlay) = layer.consume(char_idx, |annot| annot.char_idx) { overlay = Some((new_overlay, layer.metadata)); } } overlay } pub(crate) fn process_virtual_text_anchors(&self, grapheme: &FormattedGrapheme) { for (next_anchor, layer) in &self.line_annotations { loop { match next_anchor.get().cmp(&grapheme.char_idx) { Ordering::Less => next_anchor .set(unsafe { layer.get().skip_concealed_anchors(grapheme.char_idx) }), Ordering::Equal => { next_anchor.set(unsafe { layer.get().process_anchor(grapheme) }) } Ordering::Greater => break, }; } } } pub(crate) fn virtual_lines_at( &self, char_idx: usize, line_end_visual_pos: Position, doc_line: usize, ) -> usize { let mut virt_off = Position::new(0, 0); for (_, layer) in &self.line_annotations { virt_off += unsafe { layer .get() .insert_virtual_lines(char_idx, line_end_visual_pos + virt_off, doc_line) }; } virt_off.row } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/textobject.rs000066400000000000000000000476521500614314100251720ustar00rootroot00000000000000use std::fmt::Display; use ropey::RopeSlice; use tree_sitter::{Node, QueryCursor}; use crate::chars::{categorize_char, char_is_whitespace, CharCategory}; use crate::graphemes::{next_grapheme_boundary, prev_grapheme_boundary}; use crate::line_ending::rope_is_line_ending; use crate::movement::Direction; use crate::syntax::LanguageConfiguration; use crate::Range; use crate::{surround, Syntax}; fn find_word_boundary(slice: RopeSlice, mut pos: usize, direction: Direction, long: bool) -> usize { use CharCategory::{Eol, Whitespace}; let iter = match direction { Direction::Forward => slice.chars_at(pos), Direction::Backward => { let mut iter = slice.chars_at(pos); iter.reverse(); iter } }; let mut prev_category = match direction { Direction::Forward if pos == 0 => Whitespace, Direction::Forward => categorize_char(slice.char(pos - 1)), Direction::Backward if pos == slice.len_chars() => Whitespace, Direction::Backward => categorize_char(slice.char(pos)), }; for ch in iter { match categorize_char(ch) { Eol | Whitespace => return pos, category => { if !long && category != prev_category && pos != 0 && pos != slice.len_chars() { return pos; } else { match direction { Direction::Forward => pos += 1, Direction::Backward => pos = pos.saturating_sub(1), } prev_category = category; } } } } pos } #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum TextObject { Around, Inside, /// Used for moving between objects. Movement, } impl Display for TextObject { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str(match self { Self::Around => "around", Self::Inside => "inside", Self::Movement => "movement", }) } } // count doesn't do anything yet pub fn textobject_word( slice: RopeSlice, range: Range, textobject: TextObject, _count: usize, long: bool, ) -> Range { let pos = range.cursor(slice); let word_start = find_word_boundary(slice, pos, Direction::Backward, long); let word_end = match slice.get_char(pos).map(categorize_char) { None | Some(CharCategory::Whitespace | CharCategory::Eol) => pos, _ => find_word_boundary(slice, pos + 1, Direction::Forward, long), }; // Special case. if word_start == word_end { return Range::new(word_start, word_end); } match textobject { TextObject::Inside => Range::new(word_start, word_end), TextObject::Around => { let whitespace_count_right = slice .chars_at(word_end) .take_while(|c| char_is_whitespace(*c)) .count(); if whitespace_count_right > 0 { Range::new(word_start, word_end + whitespace_count_right) } else { let whitespace_count_left = { let mut iter = slice.chars_at(word_start); iter.reverse(); iter.take_while(|c| char_is_whitespace(*c)).count() }; Range::new(word_start - whitespace_count_left, word_end) } } TextObject::Movement => unreachable!(), } } pub fn textobject_paragraph( slice: RopeSlice, range: Range, textobject: TextObject, count: usize, ) -> Range { let mut line = range.cursor_line(slice); let prev_line_empty = rope_is_line_ending(slice.line(line.saturating_sub(1))); let curr_line_empty = rope_is_line_ending(slice.line(line)); let next_line_empty = rope_is_line_ending(slice.line(line.saturating_sub(1))); let last_char = prev_grapheme_boundary(slice, slice.line_to_char(line + 1)) == range.cursor(slice); let prev_empty_to_line = prev_line_empty && !curr_line_empty; let curr_empty_to_line = curr_line_empty && !next_line_empty; // skip character before paragraph boundary let mut line_back = line; // line but backwards if prev_empty_to_line || curr_empty_to_line { line_back += 1; } // do not include current paragraph on paragraph end (include next) if !(curr_empty_to_line && last_char) { let mut lines = slice.lines_at(line_back); lines.reverse(); let mut lines = lines.map(rope_is_line_ending).peekable(); while lines.next_if(|&e| e).is_some() { line_back -= 1; } while lines.next_if(|&e| !e).is_some() { line_back -= 1; } } // skip character after paragraph boundary if curr_empty_to_line && last_char { line += 1; } let mut lines = slice.lines_at(line).map(rope_is_line_ending).peekable(); let mut count_done = 0; // count how many non-whitespace paragraphs done for _ in 0..count { let mut done = false; while lines.next_if(|&e| !e).is_some() { line += 1; done = true; } while lines.next_if(|&e| e).is_some() { line += 1; } count_done += done as usize; } // search one paragraph backwards for last paragraph // makes `map` at the end of the paragraph with trailing newlines useful let last_paragraph = count_done != count && lines.peek().is_none(); if last_paragraph { let mut lines = slice.lines_at(line_back); lines.reverse(); let mut lines = lines.map(rope_is_line_ending).peekable(); while lines.next_if(|&e| e).is_some() { line_back -= 1; } while lines.next_if(|&e| !e).is_some() { line_back -= 1; } } // handle last whitespaces part separately depending on textobject match textobject { TextObject::Around => {} TextObject::Inside => { // remove last whitespace paragraph let mut lines = slice.lines_at(line); lines.reverse(); let mut lines = lines.map(rope_is_line_ending).peekable(); while lines.next_if(|&e| e).is_some() { line -= 1; } } TextObject::Movement => unreachable!(), } let anchor = slice.line_to_char(line_back); let head = slice.line_to_char(line); Range::new(anchor, head) } pub fn textobject_pair_surround( syntax: Option<&Syntax>, slice: RopeSlice, range: Range, textobject: TextObject, ch: char, count: usize, ) -> Range { textobject_pair_surround_impl(syntax, slice, range, textobject, Some(ch), count) } pub fn textobject_pair_surround_closest( syntax: Option<&Syntax>, slice: RopeSlice, range: Range, textobject: TextObject, count: usize, ) -> Range { textobject_pair_surround_impl(syntax, slice, range, textobject, None, count) } fn textobject_pair_surround_impl( syntax: Option<&Syntax>, slice: RopeSlice, range: Range, textobject: TextObject, ch: Option, count: usize, ) -> Range { let pair_pos = match ch { Some(ch) => surround::find_nth_pairs_pos(slice, ch, range, count), None => surround::find_nth_closest_pairs_pos(syntax, slice, range, count), }; pair_pos .map(|(anchor, head)| match textobject { TextObject::Inside => { if anchor < head { Range::new(next_grapheme_boundary(slice, anchor), head) } else { Range::new(anchor, next_grapheme_boundary(slice, head)) } } TextObject::Around => { if anchor < head { Range::new(anchor, next_grapheme_boundary(slice, head)) } else { Range::new(next_grapheme_boundary(slice, anchor), head) } } TextObject::Movement => unreachable!(), }) .unwrap_or(range) } /// Transform the given range to select text objects based on tree-sitter. /// `object_name` is a query capture base name like "function", "class", etc. /// `slice_tree` is the tree-sitter node corresponding to given text slice. pub fn textobject_treesitter( slice: RopeSlice, range: Range, textobject: TextObject, object_name: &str, slice_tree: Node, lang_config: &LanguageConfiguration, _count: usize, ) -> Range { let get_range = move || -> Option { let byte_pos = slice.char_to_byte(range.cursor(slice)); let capture_name = format!("{}.{}", object_name, textobject); // eg. function.inner let mut cursor = QueryCursor::new(); let node = lang_config .textobject_query()? .capture_nodes(&capture_name, slice_tree, slice, &mut cursor)? .filter(|node| node.byte_range().contains(&byte_pos)) .min_by_key(|node| node.byte_range().len())?; let len = slice.len_bytes(); let start_byte = node.start_byte(); let end_byte = node.end_byte(); if start_byte >= len || end_byte >= len { return None; } let start_char = slice.byte_to_char(start_byte); let end_char = slice.byte_to_char(end_byte); Some(Range::new(start_char, end_char)) }; get_range().unwrap_or(range) } #[cfg(test)] mod test { use super::TextObject::*; use super::*; use crate::Range; use ropey::Rope; #[test] fn test_textobject_word() { // (text, [(char position, textobject, final range), ...]) let tests = &[ ( "cursor at beginning of doc", vec![(0, Inside, (0, 6)), (0, Around, (0, 7))], ), ( "cursor at middle of word", vec![ (13, Inside, (10, 16)), (10, Inside, (10, 16)), (15, Inside, (10, 16)), (13, Around, (10, 17)), (10, Around, (10, 17)), (15, Around, (10, 17)), ], ), ( "cursor between word whitespace", vec![(6, Inside, (6, 6)), (6, Around, (6, 6))], ), ( "cursor on word before newline\n", vec![ (22, Inside, (22, 29)), (28, Inside, (22, 29)), (25, Inside, (22, 29)), (22, Around, (21, 29)), (28, Around, (21, 29)), (25, Around, (21, 29)), ], ), ( "cursor on newline\nnext line", vec![(17, Inside, (17, 17)), (17, Around, (17, 17))], ), ( "cursor on word after newline\nnext line", vec![ (29, Inside, (29, 33)), (30, Inside, (29, 33)), (32, Inside, (29, 33)), (29, Around, (29, 34)), (30, Around, (29, 34)), (32, Around, (29, 34)), ], ), ( "cursor on #$%:;* punctuation", vec![ (13, Inside, (10, 16)), (10, Inside, (10, 16)), (15, Inside, (10, 16)), (13, Around, (10, 17)), (10, Around, (10, 17)), (15, Around, (10, 17)), ], ), ( "cursor on punc%^#$:;.tuation", vec![ (14, Inside, (14, 21)), (20, Inside, (14, 21)), (17, Inside, (14, 21)), (14, Around, (14, 21)), (20, Around, (14, 21)), (17, Around, (14, 21)), ], ), ( "cursor in extra whitespace", vec![ (9, Inside, (9, 9)), (10, Inside, (10, 10)), (11, Inside, (11, 11)), (9, Around, (9, 9)), (10, Around, (10, 10)), (11, Around, (11, 11)), ], ), ( "cursor on word with extra whitespace", vec![(11, Inside, (10, 14)), (11, Around, (10, 17))], ), ( "cursor at end with extra whitespace", vec![(28, Inside, (27, 37)), (28, Around, (24, 37))], ), ( "cursor at end of doc", vec![(19, Inside, (17, 20)), (19, Around, (16, 20))], ), ]; for (sample, scenario) in tests { let doc = Rope::from(*sample); let slice = doc.slice(..); for &case in scenario { let (pos, objtype, expected_range) = case; // cursor is a single width selection let range = Range::new(pos, pos + 1); let result = textobject_word(slice, range, objtype, 1, false); assert_eq!( result, expected_range.into(), "\nCase failed: {:?} - {:?}", sample, case ); } } } #[test] fn test_textobject_paragraph_inside_single() { let tests = [ ("#[|]#", "#[|]#"), ("firs#[t|]#\n\nparagraph\n\n", "#[first\n|]#\nparagraph\n\n"), ( "second\n\npa#[r|]#agraph\n\n", "second\n\n#[paragraph\n|]#\n", ), ("#[f|]#irst char\n\n", "#[first char\n|]#\n"), ("last char\n#[\n|]#", "#[last char\n|]#\n"), ( "empty to line\n#[\n|]#paragraph boundary\n\n", "empty to line\n\n#[paragraph boundary\n|]#\n", ), ( "line to empty\n\n#[p|]#aragraph boundary\n\n", "line to empty\n\n#[paragraph boundary\n|]#\n", ), ]; for (before, expected) in tests { let (s, selection) = crate::test::print(before); let text = Rope::from(s.as_str()); let selection = selection .transform(|r| textobject_paragraph(text.slice(..), r, TextObject::Inside, 1)); let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } #[test] fn test_textobject_paragraph_inside_double() { let tests = [ ( "last two\n\n#[p|]#aragraph\n\nwithout whitespaces\n\n", "last two\n\n#[paragraph\n\nwithout whitespaces\n|]#\n", ), ( "last two\n#[\n|]#paragraph\n\nwithout whitespaces\n\n", "last two\n\n#[paragraph\n\nwithout whitespaces\n|]#\n", ), ]; for (before, expected) in tests { let (s, selection) = crate::test::print(before); let text = Rope::from(s.as_str()); let selection = selection .transform(|r| textobject_paragraph(text.slice(..), r, TextObject::Inside, 2)); let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } #[test] fn test_textobject_paragraph_around_single() { let tests = [ ("#[|]#", "#[|]#"), ("firs#[t|]#\n\nparagraph\n\n", "#[first\n\n|]#paragraph\n\n"), ( "second\n\npa#[r|]#agraph\n\n", "second\n\n#[paragraph\n\n|]#", ), ("#[f|]#irst char\n\n", "#[first char\n\n|]#"), ("last char\n#[\n|]#", "#[last char\n\n|]#"), ( "empty to line\n#[\n|]#paragraph boundary\n\n", "empty to line\n\n#[paragraph boundary\n\n|]#", ), ( "line to empty\n\n#[p|]#aragraph boundary\n\n", "line to empty\n\n#[paragraph boundary\n\n|]#", ), ]; for (before, expected) in tests { let (s, selection) = crate::test::print(before); let text = Rope::from(s.as_str()); let selection = selection .transform(|r| textobject_paragraph(text.slice(..), r, TextObject::Around, 1)); let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } #[test] fn test_textobject_surround() { // (text, [(cursor position, textobject, final range, surround char, count), ...]) let tests = &[ ( "simple (single) surround pairs", vec![ (3, Inside, (3, 3), '(', 1), (7, Inside, (8, 14), ')', 1), (10, Inside, (8, 14), '(', 1), (14, Inside, (8, 14), ')', 1), (3, Around, (3, 3), '(', 1), (7, Around, (7, 15), ')', 1), (10, Around, (7, 15), '(', 1), (14, Around, (7, 15), ')', 1), ], ), ( "samexx 'single' surround pairs", vec![ (3, Inside, (3, 3), '\'', 1), (7, Inside, (7, 7), '\'', 1), (10, Inside, (8, 14), '\'', 1), (14, Inside, (14, 14), '\'', 1), (3, Around, (3, 3), '\'', 1), (7, Around, (7, 7), '\'', 1), (10, Around, (7, 15), '\'', 1), (14, Around, (14, 14), '\'', 1), ], ), ( "(nested (surround (pairs)) 3 levels)", vec![ (0, Inside, (1, 35), '(', 1), (6, Inside, (1, 35), ')', 1), (8, Inside, (9, 25), '(', 1), (8, Inside, (9, 35), ')', 2), (20, Inside, (9, 25), '(', 2), (20, Inside, (1, 35), ')', 3), (0, Around, (0, 36), '(', 1), (6, Around, (0, 36), ')', 1), (8, Around, (8, 26), '(', 1), (8, Around, (8, 36), ')', 2), (20, Around, (8, 26), '(', 2), (20, Around, (0, 36), ')', 3), ], ), ( "(mixed {surround [pair] same} line)", vec![ (2, Inside, (1, 34), '(', 1), (9, Inside, (8, 28), '{', 1), (18, Inside, (18, 22), '[', 1), (2, Around, (0, 35), '(', 1), (9, Around, (7, 29), '{', 1), (18, Around, (17, 23), '[', 1), ], ), ( "(stepped (surround) pairs (should) skip)", vec![(22, Inside, (1, 39), '(', 1), (22, Around, (0, 40), '(', 1)], ), ( "[surround pairs{\non different]\nlines}", vec![ (7, Inside, (1, 29), '[', 1), (15, Inside, (16, 36), '{', 1), (7, Around, (0, 30), '[', 1), (15, Around, (15, 37), '{', 1), ], ), ]; for (sample, scenario) in tests { let doc = Rope::from(*sample); let slice = doc.slice(..); for &case in scenario { let (pos, objtype, expected_range, ch, count) = case; let result = textobject_pair_surround(None, slice, Range::point(pos), objtype, ch, count); assert_eq!( result, expected_range.into(), "\nCase failed: {:?} - {:?}", sample, case ); } } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/transaction.rs000066400000000000000000001060541500614314100253340ustar00rootroot00000000000000use ropey::RopeSlice; use smallvec::SmallVec; use crate::{chars::char_is_word, Range, Rope, Selection, Tendril}; use std::{borrow::Cow, iter::once}; /// (from, to, replacement) pub type Change = (usize, usize, Option); pub type Deletion = (usize, usize); // TODO: pub(crate) #[derive(Debug, Clone, PartialEq, Eq)] pub enum Operation { /// Move cursor by n characters. Retain(usize), /// Delete n characters. Delete(usize), /// Insert text at position. Insert(Tendril), } #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum Assoc { Before, After, /// Acts like `After` if a word character is inserted /// after the position, otherwise acts like `Before` AfterWord, /// Acts like `Before` if a word character is inserted /// before the position, otherwise acts like `After` BeforeWord, /// Acts like `Before` but if the position is within an exact replacement /// (exact size) the offset to the start of the replacement is kept BeforeSticky, /// Acts like `After` but if the position is within an exact replacement /// (exact size) the offset to the start of the replacement is kept AfterSticky, } impl Assoc { /// Whether to stick to gaps fn stay_at_gaps(self) -> bool { !matches!(self, Self::BeforeWord | Self::AfterWord) } fn insert_offset(self, s: &str) -> usize { let chars = s.chars().count(); match self { Assoc::After | Assoc::AfterSticky => chars, Assoc::AfterWord => s.chars().take_while(|&c| char_is_word(c)).count(), // return position before inserted text Assoc::Before | Assoc::BeforeSticky => 0, Assoc::BeforeWord => chars - s.chars().rev().take_while(|&c| char_is_word(c)).count(), } } pub fn sticky(self) -> bool { matches!(self, Assoc::BeforeSticky | Assoc::AfterSticky) } } #[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct ChangeSet { pub(crate) changes: Vec, /// The required document length. Will refuse to apply changes unless it matches. len: usize, len_after: usize, } impl ChangeSet { pub fn with_capacity(capacity: usize) -> Self { Self { changes: Vec::with_capacity(capacity), len: 0, len_after: 0, } } #[must_use] pub fn new(doc: RopeSlice) -> Self { let len = doc.len_chars(); Self { changes: Vec::new(), len, len_after: len, } } // TODO: from iter #[doc(hidden)] // used by lsp to convert to LSP changes pub fn changes(&self) -> &[Operation] { &self.changes } // Changeset builder operations: delete/insert/retain pub(crate) fn delete(&mut self, n: usize) { use Operation::*; if n == 0 { return; } self.len += n; if let Some(Delete(count)) = self.changes.last_mut() { *count += n; } else { self.changes.push(Delete(n)); } } pub(crate) fn insert(&mut self, fragment: Tendril) { use Operation::*; if fragment.is_empty() { return; } // Avoiding std::str::len() to account for UTF-8 characters. self.len_after += fragment.chars().count(); let new_last = match self.changes.as_mut_slice() { [.., Insert(prev)] | [.., Insert(prev), Delete(_)] => { prev.push_str(&fragment); return; } [.., last @ Delete(_)] => std::mem::replace(last, Insert(fragment)), _ => Insert(fragment), }; self.changes.push(new_last); } pub(crate) fn retain(&mut self, n: usize) { use Operation::*; if n == 0 { return; } self.len += n; self.len_after += n; if let Some(Retain(count)) = self.changes.last_mut() { *count += n; } else { self.changes.push(Retain(n)); } } /// Combine two changesets together. /// In other words, If `this` goes `docA` → `docB` and `other` represents `docB` → `docC`, the /// returned value will represent the change `docA` → `docC`. pub fn compose(self, other: Self) -> Self { assert!(self.len_after == other.len); // composing fails in weird ways if one of the sets is empty // a: [] len: 0 len_after: 1 | b: [Insert(Tendril(inline: "\n")), Retain(1)] len 1 if self.changes.is_empty() { return other; } if other.changes.is_empty() { return self; } let len = self.changes.len(); let mut changes_a = self.changes.into_iter(); let mut changes_b = other.changes.into_iter(); let mut head_a = changes_a.next(); let mut head_b = changes_b.next(); let mut changes = Self::with_capacity(len); // TODO: max(a, b), shrink_to_fit() afterwards loop { use std::cmp::Ordering; use Operation::*; match (head_a, head_b) { // we are done (None, None) => { break; } // deletion in A (Some(Delete(i)), b) => { changes.delete(i); head_a = changes_a.next(); head_b = b; } // insertion in B (a, Some(Insert(current))) => { changes.insert(current); head_a = a; head_b = changes_b.next(); } (None, val) | (val, None) => unreachable!("({:?})", val), (Some(Retain(i)), Some(Retain(j))) => match i.cmp(&j) { Ordering::Less => { changes.retain(i); head_a = changes_a.next(); head_b = Some(Retain(j - i)); } Ordering::Equal => { changes.retain(i); head_a = changes_a.next(); head_b = changes_b.next(); } Ordering::Greater => { changes.retain(j); head_a = Some(Retain(i - j)); head_b = changes_b.next(); } }, (Some(Insert(mut s)), Some(Delete(j))) => { let len = s.chars().count(); match len.cmp(&j) { Ordering::Less => { head_a = changes_a.next(); head_b = Some(Delete(j - len)); } Ordering::Equal => { head_a = changes_a.next(); head_b = changes_b.next(); } Ordering::Greater => { // TODO: cover this with a test // figure out the byte index of the truncated string end let (pos, _) = s.char_indices().nth(j).unwrap(); s.replace_range(0..pos, ""); head_a = Some(Insert(s)); head_b = changes_b.next(); } } } (Some(Insert(s)), Some(Retain(j))) => { let len = s.chars().count(); match len.cmp(&j) { Ordering::Less => { changes.insert(s); head_a = changes_a.next(); head_b = Some(Retain(j - len)); } Ordering::Equal => { changes.insert(s); head_a = changes_a.next(); head_b = changes_b.next(); } Ordering::Greater => { // figure out the byte index of the truncated string end let (pos, _) = s.char_indices().nth(j).unwrap(); let mut before = s; let after = before.split_off(pos); changes.insert(before); head_a = Some(Insert(after)); head_b = changes_b.next(); } } } (Some(Retain(i)), Some(Delete(j))) => match i.cmp(&j) { Ordering::Less => { changes.delete(i); head_a = changes_a.next(); head_b = Some(Delete(j - i)); } Ordering::Equal => { changes.delete(j); head_a = changes_a.next(); head_b = changes_b.next(); } Ordering::Greater => { changes.delete(j); head_a = Some(Retain(i - j)); head_b = changes_b.next(); } }, }; } // starting len should still equal original starting len debug_assert!(changes.len == self.len); changes } /// Given another change set starting in the same document, maps this /// change set over the other, producing a new change set that can be /// applied to the document produced by applying `other`. When /// `before` is `true`, order changes as if `this` comes before /// `other`, otherwise (the default) treat `other` as coming first. /// /// Given two changes `A` and `B`, `A.compose(B.map(A))` and /// `B.compose(A.map(B, true))` will produce the same document. This /// provides a basic form of [operational /// transformation](https://en.wikipedia.org/wiki/Operational_transformation), /// and can be used for collaborative editing. pub fn map(self, _other: Self) -> Self { unimplemented!() } /// Returns a new changeset that reverts this one. Useful for `undo` implementation. /// The document parameter expects the original document before this change was applied. pub fn invert(&self, original_doc: &Rope) -> Self { assert!(original_doc.len_chars() == self.len); let mut changes = Self::with_capacity(self.changes.len()); let mut pos = 0; for change in &self.changes { use Operation::*; match change { Retain(n) => { changes.retain(*n); pos += n; } Delete(n) => { let text = Cow::from(original_doc.slice(pos..pos + *n)); changes.insert(Tendril::from(text.as_ref())); pos += n; } Insert(s) => { let chars = s.chars().count(); changes.delete(chars); } } } changes } /// Returns true if applied successfully. pub fn apply(&self, text: &mut Rope) -> bool { if text.len_chars() != self.len { return false; } let mut pos = 0; for change in &self.changes { use Operation::*; match change { Retain(n) => { pos += n; } Delete(n) => { text.remove(pos..pos + *n); // pos += n; } Insert(s) => { text.insert(pos, s); pos += s.chars().count(); } } } true } /// `true` when the set is empty. #[inline] pub fn is_empty(&self) -> bool { self.changes.is_empty() || self.changes == [Operation::Retain(self.len)] } /// Map a (mostly) *sorted* list of positions through the changes. /// /// This is equivalent to updating each position with `map_pos`: /// /// ``` no-compile /// for (pos, assoc) in positions { /// *pos = changes.map_pos(*pos, assoc); /// } /// ``` /// However this function is significantly faster for sorted lists running /// in `O(N+M)` instead of `O(NM)`. This function also handles unsorted/ /// partially sorted lists. However, in that case worst case complexity is /// again `O(MN)`. For lists that are often/mostly sorted (like the end of diagnostic ranges) /// performance is usally close to `O(N + M)` pub fn update_positions<'a>(&self, positions: impl Iterator) { use Operation::*; let mut positions = positions.peekable(); let mut old_pos = 0; let mut new_pos = 0; let mut iter = self.changes.iter().enumerate().peekable(); 'outer: loop { macro_rules! map { ($map: expr, $i: expr) => { loop { let Some((pos, assoc)) = positions.peek_mut() else { return; }; if **pos < old_pos { // Positions are not sorted, revert to the last Operation that // contains this position and continue iterating from there. // We can unwrap here since `pos` can not be negative // (unsigned integer) and iterating backwards to the start // should always move us back to the start for (i, change) in self.changes[..$i].iter().enumerate().rev() { match change { Retain(i) => { old_pos -= i; new_pos -= i; } Delete(i) => { old_pos -= i; } Insert(ins) => { new_pos -= ins.chars().count(); } } if old_pos <= **pos { iter = self.changes[i..].iter().enumerate().peekable(); } } debug_assert!(old_pos <= **pos, "Reverse Iter across changeset works"); continue 'outer; } #[allow(clippy::redundant_closure_call)] let Some(new_pos) = $map(**pos, *assoc) else { break; }; **pos = new_pos; positions.next(); } }; } let Some((i, change)) = iter.next() else { map!( |pos, _| (old_pos == pos).then_some(new_pos), self.changes.len() ); break; }; let len = match change { Delete(i) | Retain(i) => *i, Insert(_) => 0, }; let mut old_end = old_pos + len; match change { Retain(_) => { map!( |pos, _| (old_end > pos).then_some(new_pos + (pos - old_pos)), i ); new_pos += len; } Delete(_) => { // in range map!(|pos, _| (old_end > pos).then_some(new_pos), i); } Insert(s) => { // a subsequent delete means a replace, consume it if let Some((_, Delete(len))) = iter.peek() { iter.next(); old_end = old_pos + len; // in range of replaced text map!( |pos, assoc: Assoc| (old_end > pos).then(|| { // at point or tracking before if pos == old_pos && assoc.stay_at_gaps() { new_pos } else { let ins = assoc.insert_offset(s); // if the deleted and inserted text have the exact same size // keep the relative offset into the new text if *len == ins && assoc.sticky() { new_pos + (pos - old_pos) } else { new_pos + assoc.insert_offset(s) } } }), i ); } else { // at insert point map!( |pos, assoc: Assoc| (old_pos == pos).then(|| { // return position before inserted text new_pos + assoc.insert_offset(s) }), i ); } new_pos += s.chars().count(); } } old_pos = old_end; } let out_of_bounds: Vec<_> = positions.collect(); panic!("Positions {out_of_bounds:?} are out of range for changeset len {old_pos}!",) } /// Map a position through the changes. /// /// `assoc` indicates which side to associate the position with. `Before` will keep the /// position close to the character before, and will place it before insertions over that /// range, or at that point. `After` will move it forward, placing it at the end of such /// insertions. pub fn map_pos(&self, mut pos: usize, assoc: Assoc) -> usize { self.update_positions(once((&mut pos, assoc))); pos } pub fn changes_iter(&self) -> ChangeIterator { ChangeIterator::new(self) } } /// Transaction represents a single undoable unit of changes. Several changes can be grouped into /// a single transaction. #[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct Transaction { changes: ChangeSet, selection: Option, } impl Transaction { /// Create a new, empty transaction. pub fn new(doc: &Rope) -> Self { Self { changes: ChangeSet::new(doc.slice(..)), selection: None, } } /// Changes made to the buffer. pub fn changes(&self) -> &ChangeSet { &self.changes } /// When set, explicitly updates the selection. pub fn selection(&self) -> Option<&Selection> { self.selection.as_ref() } /// Returns true if applied successfully. pub fn apply(&self, doc: &mut Rope) -> bool { if self.changes.is_empty() { return true; } // apply changes to the document self.changes.apply(doc) } /// Generate a transaction that reverts this one. pub fn invert(&self, original: &Rope) -> Self { let changes = self.changes.invert(original); Self { changes, selection: None, } } pub fn compose(mut self, other: Self) -> Self { self.changes = self.changes.compose(other.changes); // Other selection takes precedence self.selection = other.selection; self } pub fn with_selection(mut self, selection: Selection) -> Self { self.selection = Some(selection); self } /// Generate a transaction from a set of potentially overlapping changes. The `change_ranges` /// iterator yield the range (of removed text) in the old document for each edit. If any change /// overlaps with a range overlaps with a previous range then that range is ignored. /// /// The `process_change` callback is called for each edit that is not ignored (in the order /// yielded by `changes`) and should return the new text that the associated range will be /// replaced with. /// /// To make this function more flexible the iterator can yield additional data for each change /// that is passed to `process_change` pub fn change_ignore_overlapping( doc: &Rope, change_ranges: impl Iterator, mut process_change: impl FnMut(usize, usize, T) -> Option, ) -> Self { let mut last = 0; let changes = change_ranges.filter_map(|(from, to, data)| { if from < last { return None; } let tendril = process_change(from, to, data); last = to; Some((from, to, tendril)) }); Self::change(doc, changes) } /// Generate a transaction from a set of changes. pub fn change(doc: &Rope, changes: I) -> Self where I: Iterator, { let len = doc.len_chars(); let (lower, upper) = changes.size_hint(); let size = upper.unwrap_or(lower); let mut changeset = ChangeSet::with_capacity(2 * size + 1); // rough estimate let mut last = 0; for (from, to, tendril) in changes { // Verify ranges are ordered and not overlapping debug_assert!(last <= from); // Verify ranges are correct debug_assert!( from <= to, "Edit end must end before it starts (should {from} <= {to})" ); // Retain from last "to" to current "from" changeset.retain(from - last); let span = to - from; match tendril { Some(text) => { changeset.insert(text); changeset.delete(span); } None => changeset.delete(span), } last = to; } changeset.retain(len - last); Self::from(changeset) } /// Generate a transaction from a set of potentially overlapping deletions /// by merging overlapping deletions together. pub fn delete(doc: &Rope, deletions: I) -> Self where I: Iterator, { let len = doc.len_chars(); let (lower, upper) = deletions.size_hint(); let size = upper.unwrap_or(lower); let mut changeset = ChangeSet::with_capacity(2 * size + 1); // rough estimate let mut last = 0; for (mut from, to) in deletions { if last > to { continue; } if last > from { from = last } debug_assert!( from <= to, "Edit end must end before it starts (should {from} <= {to})" ); // Retain from last "to" to current "from" changeset.retain(from - last); changeset.delete(to - from); last = to; } changeset.retain(len - last); Self::from(changeset) } pub fn insert_at_eof(mut self, text: Tendril) -> Transaction { self.changes.insert(text); self } /// Generate a transaction with a change per selection range. pub fn change_by_selection(doc: &Rope, selection: &Selection, f: F) -> Self where F: FnMut(&Range) -> Change, { Self::change(doc, selection.iter().map(f)) } pub fn change_by_selection_ignore_overlapping( doc: &Rope, selection: &Selection, mut change_range: impl FnMut(&Range) -> (usize, usize), mut create_tendril: impl FnMut(usize, usize) -> Option, ) -> (Transaction, Selection) { let mut last_selection_idx = None; let mut new_primary_idx = None; let mut ranges: SmallVec<[Range; 1]> = SmallVec::new(); let process_change = |change_start, change_end, (idx, range): (usize, &Range)| { // update the primary idx if idx == selection.primary_index() { new_primary_idx = Some(idx); } else if new_primary_idx.is_none() { if idx > selection.primary_index() { new_primary_idx = last_selection_idx; } else { last_selection_idx = Some(idx); } } ranges.push(*range); create_tendril(change_start, change_end) }; let transaction = Self::change_ignore_overlapping( doc, selection.iter().enumerate().map(|range| { let (change_start, change_end) = change_range(range.1); (change_start, change_end, range) }), process_change, ); ( transaction, Selection::new(ranges, new_primary_idx.unwrap_or(0)), ) } /// Generate a transaction with a deletion per selection range. /// Compared to using `change_by_selection` directly these ranges may overlap. /// In that case they are merged pub fn delete_by_selection(doc: &Rope, selection: &Selection, f: F) -> Self where F: FnMut(&Range) -> Deletion, { Self::delete(doc, selection.iter().map(f)) } /// Insert text at each selection head. pub fn insert(doc: &Rope, selection: &Selection, text: Tendril) -> Self { Self::change_by_selection(doc, selection, |range| { (range.head, range.head, Some(text.clone())) }) } pub fn changes_iter(&self) -> ChangeIterator { self.changes.changes_iter() } } impl From for Transaction { fn from(changes: ChangeSet) -> Self { Self { changes, selection: None, } } } pub struct ChangeIterator<'a> { iter: std::iter::Peekable>, pos: usize, } impl<'a> ChangeIterator<'a> { fn new(changeset: &'a ChangeSet) -> Self { let iter = changeset.changes.iter().peekable(); Self { iter, pos: 0 } } } impl Iterator for ChangeIterator<'_> { type Item = Change; fn next(&mut self) -> Option { use Operation::*; loop { match self.iter.next()? { Retain(len) => { self.pos += len; } Delete(len) => { let start = self.pos; self.pos += len; return Some((start, self.pos, None)); } Insert(s) => { let start = self.pos; // a subsequent delete means a replace, consume it if let Some(Delete(len)) = self.iter.peek() { self.iter.next(); self.pos += len; return Some((start, self.pos, Some(s.clone()))); } else { return Some((start, start, Some(s.clone()))); } } } } } } #[cfg(test)] mod test { use super::*; use crate::history::State; #[test] fn composition() { use Operation::*; let a = ChangeSet { changes: vec![ Retain(5), Insert(" test!".into()), Retain(1), Delete(2), Insert("abc".into()), ], len: 8, len_after: 15, }; let b = ChangeSet { changes: vec![Delete(10), Insert("世orld".into()), Retain(5)], len: 15, len_after: 10, }; let mut text = Rope::from("hello xz"); // should probably return cloned text let composed = a.compose(b); assert_eq!(composed.len, 8); assert!(composed.apply(&mut text)); assert_eq!(text, "世orld! abc"); } #[test] fn invert() { use Operation::*; let changes = ChangeSet { changes: vec![Retain(4), Insert("test".into()), Delete(5), Retain(3)], len: 12, len_after: 11, }; let doc = Rope::from("世界3 hello xz"); let revert = changes.invert(&doc); let mut doc2 = doc.clone(); changes.apply(&mut doc2); // a revert is different assert_ne!(changes, revert); assert_ne!(doc, doc2); // but inverting a revert will give us the original assert_eq!(changes, revert.invert(&doc2)); // applying a revert gives us back the original revert.apply(&mut doc2); assert_eq!(doc, doc2); } #[test] fn map_pos() { use Operation::*; // maps inserts let cs = ChangeSet { changes: vec![Retain(4), Insert("!!".into()), Retain(4)], len: 8, len_after: 10, }; assert_eq!(cs.map_pos(0, Assoc::Before), 0); // before insert region assert_eq!(cs.map_pos(4, Assoc::Before), 4); // at insert, track before assert_eq!(cs.map_pos(4, Assoc::After), 6); // at insert, track after assert_eq!(cs.map_pos(5, Assoc::Before), 7); // after insert region // maps deletes let cs = ChangeSet { changes: vec![Retain(4), Delete(4), Retain(4)], len: 12, len_after: 8, }; assert_eq!(cs.map_pos(0, Assoc::Before), 0); // at start assert_eq!(cs.map_pos(4, Assoc::Before), 4); // before a delete assert_eq!(cs.map_pos(5, Assoc::Before), 4); // inside a delete assert_eq!(cs.map_pos(5, Assoc::After), 4); // inside a delete // TODO: delete tracking // stays inbetween replacements let cs = ChangeSet { changes: vec![ Insert("ab".into()), Delete(2), Insert("cd".into()), Delete(2), ], len: 4, len_after: 4, }; assert_eq!(cs.map_pos(2, Assoc::Before), 2); assert_eq!(cs.map_pos(2, Assoc::After), 2); // unsorted selection let cs = ChangeSet { changes: vec![ Insert("ab".into()), Delete(2), Insert("cd".into()), Delete(2), ], len: 4, len_after: 4, }; let mut positions = [4, 2]; cs.update_positions(positions.iter_mut().map(|pos| (pos, Assoc::After))); assert_eq!(positions, [4, 2]); // stays at word boundary let cs = ChangeSet { changes: vec![ Retain(2), // Insert(" ab".into()), Retain(2), // cd Insert("de ".into()), ], len: 4, len_after: 10, }; assert_eq!(cs.map_pos(2, Assoc::BeforeWord), 3); assert_eq!(cs.map_pos(4, Assoc::AfterWord), 9); let cs = ChangeSet { changes: vec![ Retain(1), // Insert(" b".into()), Delete(1), // c Retain(1), // d Insert("e ".into()), Delete(1), // ], len: 5, len_after: 7, }; assert_eq!(cs.map_pos(1, Assoc::BeforeWord), 2); assert_eq!(cs.map_pos(3, Assoc::AfterWord), 5); let cs = ChangeSet { changes: vec![ Retain(1), // Insert("a".into()), Delete(2), // b Retain(1), // d Insert("e".into()), Delete(1), // f Retain(1), // ], len: 5, len_after: 7, }; assert_eq!(cs.map_pos(2, Assoc::BeforeWord), 1); assert_eq!(cs.map_pos(4, Assoc::AfterWord), 4); } #[test] fn transaction_change() { let mut doc = Rope::from("hello world!\ntest 123"); let transaction = Transaction::change( &doc, // (1, 1, None) is a useless 0-width delete that gets factored out vec![(1, 1, None), (6, 11, Some("void".into())), (12, 17, None)].into_iter(), ); transaction.apply(&mut doc); assert_eq!(doc, Rope::from_str("hello void! 123")); } #[test] fn changes_iter() { let doc = Rope::from("hello world!\ntest 123"); let changes = vec![(6, 11, Some("void".into())), (12, 17, None)]; let transaction = Transaction::change(&doc, changes.clone().into_iter()); assert_eq!(transaction.changes_iter().collect::>(), changes); } #[test] fn optimized_composition() { let mut state = State { doc: "".into(), selection: Selection::point(0), }; let t1 = Transaction::insert(&state.doc, &state.selection, Tendril::from("h")); t1.apply(&mut state.doc); state.selection = state.selection.clone().map(t1.changes()); let t2 = Transaction::insert(&state.doc, &state.selection, Tendril::from("e")); t2.apply(&mut state.doc); state.selection = state.selection.clone().map(t2.changes()); let t3 = Transaction::insert(&state.doc, &state.selection, Tendril::from("l")); t3.apply(&mut state.doc); state.selection = state.selection.clone().map(t3.changes()); let t4 = Transaction::insert(&state.doc, &state.selection, Tendril::from("l")); t4.apply(&mut state.doc); state.selection = state.selection.clone().map(t4.changes()); let t5 = Transaction::insert(&state.doc, &state.selection, Tendril::from("o")); t5.apply(&mut state.doc); state.selection = state.selection.clone().map(t5.changes()); assert_eq!(state.doc, Rope::from_str("hello")); // changesets as follows: // h // retain 1, e // retain 2, l let changes = t1 .changes .compose(t2.changes) .compose(t3.changes) .compose(t4.changes) .compose(t5.changes); use Operation::*; assert_eq!(changes.changes, &[Insert("hello".into())]); // instead of insert h, insert e, insert l, insert l, insert o } #[test] fn combine_with_empty() { let empty = Rope::from(""); let a = ChangeSet::new(empty.slice(..)); let mut b = ChangeSet::new(empty.slice(..)); b.insert("a".into()); let changes = a.compose(b); use Operation::*; assert_eq!(changes.changes, &[Insert("a".into())]); } #[test] fn combine_with_utf8() { const TEST_CASE: &str = "Hello, これはヘリックスエディターです!"; let empty = Rope::from(""); let a = ChangeSet::new(empty.slice(..)); let mut b = ChangeSet::new(empty.slice(..)); b.insert(TEST_CASE.into()); let changes = a.compose(b); use Operation::*; assert_eq!(changes.changes, &[Insert(TEST_CASE.into())]); assert_eq!(changes.len_after, TEST_CASE.chars().count()); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/uri.rs000066400000000000000000000060561500614314100236070ustar00rootroot00000000000000use std::{ fmt, path::{Path, PathBuf}, sync::Arc, }; /// A generic pointer to a file location. /// /// Currently this type only supports paths to local files. /// /// Cloning this type is cheap: the internal representation uses an Arc. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[non_exhaustive] pub enum Uri { File(Arc), } impl Uri { // This clippy allow mirrors url::Url::from_file_path #[allow(clippy::result_unit_err)] pub fn to_url(&self) -> Result { match self { Uri::File(path) => url::Url::from_file_path(path), } } pub fn as_path(&self) -> Option<&Path> { match self { Self::File(path) => Some(path), } } } impl From for Uri { fn from(path: PathBuf) -> Self { Self::File(path.into()) } } impl fmt::Display for Uri { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::File(path) => write!(f, "{}", path.display()), } } } #[derive(Debug)] pub struct UrlConversionError { source: url::Url, kind: UrlConversionErrorKind, } #[derive(Debug)] pub enum UrlConversionErrorKind { UnsupportedScheme, UnableToConvert, } impl fmt::Display for UrlConversionError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self.kind { UrlConversionErrorKind::UnsupportedScheme => { write!( f, "unsupported scheme '{}' in URL {}", self.source.scheme(), self.source ) } UrlConversionErrorKind::UnableToConvert => { write!(f, "unable to convert URL to file path: {}", self.source) } } } } impl std::error::Error for UrlConversionError {} fn convert_url_to_uri(url: &url::Url) -> Result { if url.scheme() == "file" { url.to_file_path() .map(|path| Uri::File(helix_stdx::path::normalize(path).into())) .map_err(|_| UrlConversionErrorKind::UnableToConvert) } else { Err(UrlConversionErrorKind::UnsupportedScheme) } } impl TryFrom for Uri { type Error = UrlConversionError; fn try_from(url: url::Url) -> Result { convert_url_to_uri(&url).map_err(|kind| Self::Error { source: url, kind }) } } impl TryFrom<&url::Url> for Uri { type Error = UrlConversionError; fn try_from(url: &url::Url) -> Result { convert_url_to_uri(url).map_err(|kind| Self::Error { source: url.clone(), kind, }) } } #[cfg(test)] mod test { use super::*; use url::Url; #[test] fn unknown_scheme() { let url = Url::parse("csharp:/metadata/foo/bar/Baz.cs").unwrap(); assert!(matches!( Uri::try_from(url), Err(UrlConversionError { kind: UrlConversionErrorKind::UnsupportedScheme, .. }) )); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/src/wrap.rs000066400000000000000000000007211500614314100237520ustar00rootroot00000000000000use smartstring::{LazyCompact, SmartString}; use textwrap::{Options, WordSplitter::NoHyphenation}; /// Given a slice of text, return the text re-wrapped to fit it /// within the given width. pub fn reflow_hard_wrap(text: &str, text_width: usize) -> SmartString { let options = Options::new(text_width) .word_splitter(NoHyphenation) .word_separator(textwrap::WordSeparator::AsciiSpace); textwrap::refill(text, options).into() } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/tests/000077500000000000000000000000001500614314100230065ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/tests/data/000077500000000000000000000000001500614314100237175ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/tests/data/indent/000077500000000000000000000000001500614314100252005ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/tests/data/indent/cpp.cpp000066400000000000000000000020231500614314100264630ustar00rootroot00000000000000std::vector fn_with_many_parameters(int parm1, long parm2, float parm3, double parm4, char* parm5, bool parm6); std::vector fn_with_many_parameters(int parm1, long parm2, float parm3, double parm4, char* parm5, bool parm6) { auto lambda = []() { return 0; }; auto lambda_with_a_really_long_name_that_uses_a_whole_line = [](int some_more_aligned_parameters, std::string parm2) { do_smth(); }; if (brace_on_same_line) { do_smth(); } else if (brace_on_next_line) { do_smth(); } else if (another_condition) { do_smth(); } else { do_smth(); } if (inline_if_statement) do_smth(); if (another_inline_if_statement) return [](int parm1, char* parm2) { this_is_a_really_pointless_lambda(); }; switch (var) { case true: return -1; case false: return 42; } } class MyClass : public MyBaseClass { public: MyClass(); void public_fn(); private: super_secret_private_fn(); } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/tests/data/indent/languages.toml000066400000000000000000000015051500614314100300440ustar00rootroot00000000000000# This languages.toml should contain definitions for all languages for which we have indent tests [[language]] name = "rust" scope = "source.rust" injection-regex = "rust" file-types = ["rs"] comment-token = "//" roots = ["Cargo.toml", "Cargo.lock"] indent = { tab-width = 4, unit = " " } [[grammar]] name = "rust" source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "0431a2c60828731f27491ee9fdefe25e250ce9c9" } [[language]] name = "cpp" scope = "source.cpp" injection-regex = "cpp" file-types = ["cc", "hh", "c++", "cpp", "hpp", "h", "ipp", "tpp", "cxx", "hxx", "ixx", "txx", "ino", "C", "H"] roots = [] comment-token = "//" indent = { tab-width = 2, unit = " " } [[grammar]] name = "cpp" source = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "2d2c4aee8672af4c7c8edff68e7dd4c07e88d2b1" } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/tests/data/indent/rust.rs000066400000000000000000000046461500614314100265550ustar00rootroot00000000000000use std::{ io::{self, stdout, Stdout, Write}, path::PathBuf, sync::Arc, time::Duration, }; mod test { fn hello_world() { 1 + 1; let does_indentation_work = 1; let mut really_long_variable_name_using_up_the_line = really_long_fn_that_should_definitely_go_on_the_next_line(); really_long_variable_name_using_up_the_line = really_long_fn_that_should_definitely_go_on_the_next_line(); really_long_variable_name_using_up_the_line |= really_long_fn_that_should_definitely_go_on_the_next_line(); let ( a_long_variable_name_in_this_tuple, b_long_variable_name_in_this_tuple, c_long_variable_name_in_this_tuple, d_long_variable_name_in_this_tuple, e_long_variable_name_in_this_tuple, ): (usize, usize, usize, usize, usize) = if really_long_fn_that_should_definitely_go_on_the_next_line() { ( 03294239434, 1213412342314, 21231234134, 834534234549898789, 9879234234543853457, ) } else { (0, 1, 2, 3, 4) }; let test_function = function_with_param(this_param, that_param ); let test_function = function_with_param( this_param, that_param ); let test_function = function_with_proper_indent(param1, param2, ); let selection = Selection::new( changes .clone() .map(|(start, end, text): (usize, usize, Option)| { let len = text.map(|text| text.len()).unwrap() - 1; // minus newline let pos = start + len; Range::new(pos, pos) }) .collect(), 0, ); return; } } impl MyTrait for YourType where A: TraitB + TraitC, D: TraitE + TraitF, { } #[test] // match test { Some(a) => 1, None => { unimplemented!() } } std::panic::set_hook(Box::new(move |info| { hook(info); })); { { { 1 }}} pub fn change(document: &Document, changes: I) -> Self where I: IntoIterator + ExactSizeIterator, { [ 1, 2, 3, ]; ( 1, 2 ); true } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-core/tests/indent.rs000066400000000000000000000206451500614314100246440ustar00rootroot00000000000000use arc_swap::ArcSwap; use helix_core::{ indent::{indent_level_for_line, treesitter_indent_for_pos, IndentStyle}, syntax::{Configuration, Loader}, Syntax, }; use helix_stdx::rope::RopeSliceExt; use ropey::Rope; use std::{ops::Range, path::PathBuf, process::Command, sync::Arc}; #[test] fn test_treesitter_indent_rust() { standard_treesitter_test("rust.rs", "source.rust"); } #[test] fn test_treesitter_indent_cpp() { standard_treesitter_test("cpp.cpp", "source.cpp"); } #[test] fn test_treesitter_indent_rust_helix() { // We pin a specific git revision to prevent unrelated changes from causing the indent tests to fail. // Ideally, someone updates this once in a while and fixes any errors that occur. let rev = "af382768cdaf89ff547dbd8f644a1bddd90e7c8f"; let files = Command::new("git") .args([ "ls-tree", "-r", "--name-only", "--full-tree", rev, "helix-term/src", ]) .output() .unwrap(); let files = String::from_utf8(files.stdout).unwrap(); let ignored_files = [ // Contains many macros that tree-sitter does not parse in a meaningful way and is otherwise not very interesting "helix-term/src/health.rs", ]; for file in files.split_whitespace() { if ignored_files.contains(&file) { continue; } #[allow(clippy::single_range_in_vec_init)] let ignored_lines: Vec> = match file { "helix-term/src/application.rs" => vec![ // We can't handle complicated indent rules inside macros (`json!` in this case) since // the tree-sitter grammar only parses them as `token_tree` and `identifier` nodes. 1045..1051, ], "helix-term/src/commands.rs" => vec![ // This is broken because of the current handling of `call_expression` // (i.e. having an indent query for it but outdenting again in specific cases). // The indent query is needed to correctly handle multi-line arguments in function calls // inside indented `field_expression` nodes (which occurs fairly often). // // Once we have the `@indent.always` capture type, it might be possible to just have an indent // capture for the `arguments` field of a call expression. That could enable us to correctly // handle this. 2226..2230, ], "helix-term/src/commands/dap.rs" => vec![ // Complex `format!` macro 46..52, ], "helix-term/src/commands/lsp.rs" => vec![ // Macro 624..627, // Return type declaration of a closure. `cargo fmt` adds an additional space here, // which we cannot (yet) model with our indent queries. 878..879, // Same as in `helix-term/src/commands.rs` 1335..1343, ], "helix-term/src/config.rs" => vec![ // Multiline string 146..152, ], "helix-term/src/keymap.rs" => vec![ // Complex macro (see above) 456..470, // Multiline string without indent 563..567, ], "helix-term/src/main.rs" => vec![ // Multiline string 44..70, ], "helix-term/src/ui/completion.rs" => vec![ // Macro 218..232, ], "helix-term/src/ui/editor.rs" => vec![ // The chained function calls here are not indented, probably because of the comment // in between. Since `cargo fmt` doesn't even attempt to format it, there's probably // no point in trying to indent this correctly. 342..350, ], "helix-term/src/ui/lsp.rs" => vec![ // Macro 56..61, ], "helix-term/src/ui/statusline.rs" => vec![ // Same as in `helix-term/src/commands.rs` 436..442, 450..456, ], _ => Vec::new(), }; let git_object = rev.to_string() + ":" + file; let content = Command::new("git") .args(["cat-file", "blob", &git_object]) .output() .unwrap(); let doc = Rope::from_reader(&mut content.stdout.as_slice()).unwrap(); test_treesitter_indent(file, doc, "source.rust", ignored_lines); } } #[test] fn test_indent_level_for_line_with_spaces() { let tab_width: usize = 4; let indent_width: usize = 4; let line = ropey::Rope::from_str(" Indented with 8 spaces"); let indent_level = indent_level_for_line(line.slice(0..), tab_width, indent_width); assert_eq!(indent_level, 2) } #[test] fn test_indent_level_for_line_with_tabs() { let tab_width: usize = 4; let indent_width: usize = 4; let line = ropey::Rope::from_str("\t\tIndented with 2 tabs"); let indent_level = indent_level_for_line(line.slice(0..), tab_width, indent_width); assert_eq!(indent_level, 2) } #[test] fn test_indent_level_for_line_with_spaces_and_tabs() { let tab_width: usize = 4; let indent_width: usize = 4; let line = ropey::Rope::from_str(" \t \tIndented with mix of spaces and tabs"); let indent_level = indent_level_for_line(line.slice(0..), tab_width, indent_width); assert_eq!(indent_level, 2) } fn indent_tests_dir() -> PathBuf { let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); test_dir.push("tests/data/indent"); test_dir } fn indent_test_path(name: &str) -> PathBuf { let mut path = indent_tests_dir(); path.push(name); path } fn indent_tests_config() -> Configuration { let mut config_path = indent_tests_dir(); config_path.push("languages.toml"); let config = std::fs::read_to_string(config_path).unwrap(); toml::from_str(&config).unwrap() } fn standard_treesitter_test(file_name: &str, lang_scope: &str) { let test_path = indent_test_path(file_name); let test_file = std::fs::File::open(test_path).unwrap(); let doc = ropey::Rope::from_reader(test_file).unwrap(); test_treesitter_indent(file_name, doc, lang_scope, Vec::new()) } /// Test that all the lines in the given file are indented as expected. /// ignored_lines is a list of (1-indexed) line ranges that are excluded from this test. fn test_treesitter_indent( test_name: &str, doc: Rope, lang_scope: &str, ignored_lines: Vec>, ) { let loader = Loader::new(indent_tests_config()).unwrap(); // set runtime path so we can find the queries let mut runtime = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")); runtime.push("../runtime"); std::env::set_var("HELIX_RUNTIME", runtime.to_str().unwrap()); let language_config = loader.language_config_for_scope(lang_scope).unwrap(); let indent_style = IndentStyle::from_str(&language_config.indent.as_ref().unwrap().unit); let highlight_config = language_config.highlight_config(&[]).unwrap(); let text = doc.slice(..); let syntax = Syntax::new( text, highlight_config, Arc::new(ArcSwap::from_pointee(loader)), ) .unwrap(); let indent_query = language_config.indent_query().unwrap(); for i in 0..doc.len_lines() { let line = text.line(i); if ignored_lines.iter().any(|range| range.contains(&(i + 1))) { continue; } if let Some(pos) = line.first_non_whitespace_char() { let tab_width: usize = 4; let suggested_indent = treesitter_indent_for_pos( indent_query, &syntax, tab_width, indent_style.indent_width(tab_width), text, i, text.line_to_char(i) + pos, false, ) .unwrap() .to_string(&indent_style, tab_width); assert!( line.get_slice(..pos).map_or(false, |s| s == suggested_indent), "Wrong indentation for file {:?} on line {}:\n\"{}\" (original line)\n\"{}\" (suggested indentation)\n", test_name, i+1, line.slice(..line.len_chars()-1), suggested_indent, ); } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-dap/000077500000000000000000000000001500614314100214605ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-dap/Cargo.toml000066400000000000000000000014431500614314100234120ustar00rootroot00000000000000[package] name = "helix-dap" description = "DAP client implementation for Helix project" version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true rust-version.workspace = true categories.workspace = true repository.workspace = true homepage.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] helix-stdx = { path = "../helix-stdx" } helix-core = { path = "../helix-core" } anyhow = "1.0" log = "0.4" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] } thiserror.workspace = true [dev-dependencies] fern = "0.7" helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-dap/src/000077500000000000000000000000001500614314100222475ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-dap/src/client.rs000066400000000000000000000370171500614314100241030ustar00rootroot00000000000000use crate::{ requests::DisconnectArguments, transport::{Payload, Request, Response, Transport}, types::*, Error, Result, }; use helix_core::syntax::DebuggerQuirks; use serde_json::Value; use anyhow::anyhow; use std::{ collections::HashMap, future::Future, net::{IpAddr, Ipv4Addr, SocketAddr}, path::PathBuf, process::Stdio, sync::atomic::{AtomicU64, Ordering}, }; use tokio::{ io::{AsyncBufRead, AsyncWrite, BufReader, BufWriter}, net::TcpStream, process::{Child, Command}, sync::mpsc::{channel, unbounded_channel, UnboundedReceiver, UnboundedSender}, time, }; #[derive(Debug)] pub struct Client { id: usize, _process: Option, server_tx: UnboundedSender, request_counter: AtomicU64, connection_type: Option, starting_request_args: Option, pub caps: Option, // thread_id -> frames pub stack_frames: HashMap>, pub thread_states: ThreadStates, pub thread_id: Option, /// Currently active frame for the current thread. pub active_frame: Option, pub quirks: DebuggerQuirks, } #[derive(Clone, Copy, Debug)] pub enum ConnectionType { Launch, Attach, } impl Client { // Spawn a process and communicate with it by either TCP or stdio pub async fn process( transport: &str, command: &str, args: Vec<&str>, port_arg: Option<&str>, id: usize, ) -> Result<(Self, UnboundedReceiver)> { if command.is_empty() { return Result::Err(Error::Other(anyhow!("Command not provided"))); } match (transport, port_arg) { ("tcp", Some(port_arg)) => Self::tcp_process(command, args, port_arg, id).await, ("stdio", _) => Self::stdio(command, args, id), _ => Result::Err(Error::Other(anyhow!("Incorrect transport {}", transport))), } } pub fn streams( rx: Box, tx: Box, err: Option>, id: usize, process: Option, ) -> Result<(Self, UnboundedReceiver)> { let (server_rx, server_tx) = Transport::start(rx, tx, err, id); let (client_tx, client_rx) = unbounded_channel(); let client = Self { id, _process: process, server_tx, request_counter: AtomicU64::new(0), caps: None, connection_type: None, starting_request_args: None, stack_frames: HashMap::new(), thread_states: HashMap::new(), thread_id: None, active_frame: None, quirks: DebuggerQuirks::default(), }; tokio::spawn(Self::recv(server_rx, client_tx)); Ok((client, client_rx)) } pub async fn tcp( addr: std::net::SocketAddr, id: usize, ) -> Result<(Self, UnboundedReceiver)> { let stream = TcpStream::connect(addr).await?; let (rx, tx) = stream.into_split(); Self::streams(Box::new(BufReader::new(rx)), Box::new(tx), None, id, None) } pub fn stdio( cmd: &str, args: Vec<&str>, id: usize, ) -> Result<(Self, UnboundedReceiver)> { // Resolve path to the binary let cmd = helix_stdx::env::which(cmd)?; let process = Command::new(cmd) .args(args) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) // make sure the process is reaped on drop .kill_on_drop(true) .spawn(); let mut process = process?; // TODO: do we need bufreader/writer here? or do we use async wrappers on unblock? let writer = BufWriter::new(process.stdin.take().expect("Failed to open stdin")); let reader = BufReader::new(process.stdout.take().expect("Failed to open stdout")); let stderr = BufReader::new(process.stderr.take().expect("Failed to open stderr")); Self::streams( Box::new(reader), Box::new(writer), Some(Box::new(stderr)), id, Some(process), ) } async fn get_port() -> Option { Some( tokio::net::TcpListener::bind(SocketAddr::new( IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 0, )) .await .ok()? .local_addr() .ok()? .port(), ) } pub fn starting_request_args(&self) -> Option<&Value> { self.starting_request_args.as_ref() } pub async fn tcp_process( cmd: &str, args: Vec<&str>, port_format: &str, id: usize, ) -> Result<(Self, UnboundedReceiver)> { let port = Self::get_port().await.unwrap(); let process = Command::new(cmd) .args(args) .args(port_format.replace("{}", &port.to_string()).split(' ')) // silence messages .stdin(Stdio::null()) .stdout(Stdio::null()) .stderr(Stdio::null()) // Do not kill debug adapter when leaving, it should exit automatically .spawn()?; // Wait for adapter to become ready for connection time::sleep(time::Duration::from_millis(500)).await; let stream = TcpStream::connect(SocketAddr::new( IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), port, )) .await?; let (rx, tx) = stream.into_split(); Self::streams( Box::new(BufReader::new(rx)), Box::new(tx), None, id, Some(process), ) } async fn recv(mut server_rx: UnboundedReceiver, client_tx: UnboundedSender) { while let Some(msg) = server_rx.recv().await { match msg { Payload::Event(ev) => { client_tx.send(Payload::Event(ev)).expect("Failed to send"); } Payload::Response(_) => unreachable!(), Payload::Request(req) => { client_tx .send(Payload::Request(req)) .expect("Failed to send"); } } } } pub fn id(&self) -> usize { self.id } pub fn connection_type(&self) -> Option { self.connection_type } fn next_request_id(&self) -> u64 { self.request_counter.fetch_add(1, Ordering::Relaxed) } // Internal, called by specific DAP commands when resuming pub fn resume_application(&mut self) { if let Some(thread_id) = self.thread_id { self.thread_states.insert(thread_id, "running".to_string()); self.stack_frames.remove(&thread_id); } self.active_frame = None; self.thread_id = None; } /// Execute a RPC request on the debugger. pub fn call( &self, arguments: R::Arguments, ) -> impl Future> where R::Arguments: serde::Serialize, { let server_tx = self.server_tx.clone(); let id = self.next_request_id(); async move { use std::time::Duration; use tokio::time::timeout; let arguments = Some(serde_json::to_value(arguments)?); let (callback_tx, mut callback_rx) = channel(1); let req = Request { back_ch: Some(callback_tx), seq: id, command: R::COMMAND.to_string(), arguments, }; server_tx .send(Payload::Request(req)) .map_err(|e| Error::Other(e.into()))?; // TODO: specifiable timeout, delay other calls until initialize success timeout(Duration::from_secs(20), callback_rx.recv()) .await .map_err(|_| Error::Timeout(id))? // return Timeout .ok_or(Error::StreamClosed)? .map(|response| response.body.unwrap_or_default()) // TODO: check response.success } } pub async fn request(&self, params: R::Arguments) -> Result where R::Arguments: serde::Serialize, R::Result: core::fmt::Debug, // TODO: temporary { // a future that resolves into the response let json = self.call::(params).await?; let response = serde_json::from_value(json)?; Ok(response) } pub fn reply( &self, request_seq: u64, command: &str, result: core::result::Result, ) -> impl Future> { let server_tx = self.server_tx.clone(); let command = command.to_string(); async move { let response = match result { Ok(result) => Response { request_seq, command, success: true, message: None, body: Some(result), }, Err(error) => Response { request_seq, command, success: false, message: Some(error.to_string()), body: None, }, }; server_tx .send(Payload::Response(response)) .map_err(|e| Error::Other(e.into()))?; Ok(()) } } pub fn capabilities(&self) -> &DebuggerCapabilities { self.caps.as_ref().expect("debugger not yet initialized!") } pub async fn initialize(&mut self, adapter_id: String) -> Result<()> { let args = requests::InitializeArguments { client_id: Some("hx".to_owned()), client_name: Some("helix".to_owned()), adapter_id, locale: Some("en-us".to_owned()), lines_start_at_one: Some(true), columns_start_at_one: Some(true), path_format: Some("path".to_owned()), supports_variable_type: Some(true), supports_variable_paging: Some(false), supports_run_in_terminal_request: Some(true), supports_memory_references: Some(false), supports_progress_reporting: Some(false), supports_invalidated_event: Some(false), }; let response = self.request::(args).await?; self.caps = Some(response); Ok(()) } pub fn disconnect( &mut self, args: Option, ) -> impl Future> { self.connection_type = None; self.call::(args) } pub fn launch(&mut self, args: serde_json::Value) -> impl Future> { self.connection_type = Some(ConnectionType::Launch); self.starting_request_args = Some(args.clone()); self.call::(args) } pub fn attach(&mut self, args: serde_json::Value) -> impl Future> { self.connection_type = Some(ConnectionType::Attach); self.starting_request_args = Some(args.clone()); self.call::(args) } pub fn restart(&self) -> impl Future> { let args = if let Some(args) = &self.starting_request_args { args.clone() } else { Value::Null }; self.call::(args) } pub async fn set_breakpoints( &self, file: PathBuf, breakpoints: Vec, ) -> Result>> { let args = requests::SetBreakpointsArguments { source: Source { path: Some(file), name: None, source_reference: None, presentation_hint: None, origin: None, sources: None, adapter_data: None, checksums: None, }, breakpoints: Some(breakpoints), source_modified: Some(false), }; let response = self.request::(args).await?; Ok(response.breakpoints) } pub async fn configuration_done(&self) -> Result<()> { self.request::(()).await } pub fn continue_thread(&self, thread_id: ThreadId) -> impl Future> { let args = requests::ContinueArguments { thread_id }; self.call::(args) } pub async fn stack_trace( &self, thread_id: ThreadId, ) -> Result<(Vec, Option)> { let args = requests::StackTraceArguments { thread_id, start_frame: None, levels: None, format: None, }; let response = self.request::(args).await?; Ok((response.stack_frames, response.total_frames)) } pub fn threads(&self) -> impl Future> { self.call::(()) } pub async fn scopes(&self, frame_id: usize) -> Result> { let args = requests::ScopesArguments { frame_id }; let response = self.request::(args).await?; Ok(response.scopes) } pub async fn variables(&self, variables_reference: usize) -> Result> { let args = requests::VariablesArguments { variables_reference, filter: None, start: None, count: None, format: None, }; let response = self.request::(args).await?; Ok(response.variables) } pub fn step_in(&self, thread_id: ThreadId) -> impl Future> { let args = requests::StepInArguments { thread_id, target_id: None, granularity: None, }; self.call::(args) } pub fn step_out(&self, thread_id: ThreadId) -> impl Future> { let args = requests::StepOutArguments { thread_id, granularity: None, }; self.call::(args) } pub fn next(&self, thread_id: ThreadId) -> impl Future> { let args = requests::NextArguments { thread_id, granularity: None, }; self.call::(args) } pub fn pause(&self, thread_id: ThreadId) -> impl Future> { let args = requests::PauseArguments { thread_id }; self.call::(args) } pub async fn eval( &self, expression: String, frame_id: Option, ) -> Result { let args = requests::EvaluateArguments { expression, frame_id, context: None, format: None, }; self.request::(args).await } pub fn set_exception_breakpoints( &self, filters: Vec, ) -> impl Future> { let args = requests::SetExceptionBreakpointsArguments { filters }; self.call::(args) } pub fn current_stack_frame(&self) -> Option<&StackFrame> { self.stack_frames .get(&self.thread_id?)? .get(self.active_frame?) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-dap/src/lib.rs000066400000000000000000000070231500614314100233650ustar00rootroot00000000000000mod client; mod transport; mod types; pub use client::{Client, ConnectionType}; pub use transport::{Payload, Response, Transport}; pub use types::*; use serde::de::DeserializeOwned; use thiserror::Error; #[derive(Error, Debug)] pub enum Error { #[error("failed to parse: {0}")] Parse(#[from] serde_json::Error), #[error("IO Error: {0}")] IO(#[from] std::io::Error), #[error("request {0} timed out")] Timeout(u64), #[error("server closed the stream")] StreamClosed, #[error("Unhandled")] Unhandled, #[error(transparent)] ExecutableNotFound(#[from] helix_stdx::env::ExecutableNotFoundError), #[error(transparent)] Other(#[from] anyhow::Error), } pub type Result = core::result::Result; #[derive(Debug)] pub enum Request { RunInTerminal(::Arguments), } impl Request { pub fn parse(command: &str, arguments: Option) -> Result { use crate::types::Request as _; let arguments = arguments.unwrap_or_default(); let request = match command { requests::RunInTerminal::COMMAND => Self::RunInTerminal(parse_value(arguments)?), _ => return Err(Error::Unhandled), }; Ok(request) } } #[derive(Debug)] pub enum Event { Initialized(::Body), Stopped(::Body), Continued(::Body), Exited(::Body), Terminated(::Body), Thread(::Body), Output(::Body), Breakpoint(::Body), Module(::Body), LoadedSource(::Body), Process(::Body), Capabilities(::Body), // ProgressStart(), // ProgressUpdate(), // ProgressEnd(), // Invalidated(), Memory(::Body), } impl Event { pub fn parse(event: &str, body: Option) -> Result { use crate::events::Event as _; let body = body.unwrap_or_default(); let event = match event { events::Initialized::EVENT => Self::Initialized(parse_value(body)?), events::Stopped::EVENT => Self::Stopped(parse_value(body)?), events::Continued::EVENT => Self::Continued(parse_value(body)?), events::Exited::EVENT => Self::Exited(parse_value(body)?), events::Terminated::EVENT => Self::Terminated(parse_value(body)?), events::Thread::EVENT => Self::Thread(parse_value(body)?), events::Output::EVENT => Self::Output(parse_value(body)?), events::Breakpoint::EVENT => Self::Breakpoint(parse_value(body)?), events::Module::EVENT => Self::Module(parse_value(body)?), events::LoadedSource::EVENT => Self::LoadedSource(parse_value(body)?), events::Process::EVENT => Self::Process(parse_value(body)?), events::Capabilities::EVENT => Self::Capabilities(parse_value(body)?), events::Memory::EVENT => Self::Memory(parse_value(body)?), _ => return Err(Error::Unhandled), }; Ok(event) } } fn parse_value(value: serde_json::Value) -> Result where T: DeserializeOwned, { serde_json::from_value(value).map_err(|err| err.into()) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-dap/src/transport.rs000066400000000000000000000230501500614314100246510ustar00rootroot00000000000000use crate::{Error, Result}; use anyhow::Context; use log::{error, info, warn}; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::collections::HashMap; use std::sync::Arc; use tokio::{ io::{AsyncBufRead, AsyncBufReadExt, AsyncReadExt, AsyncWrite, AsyncWriteExt}, sync::{ mpsc::{unbounded_channel, Sender, UnboundedReceiver, UnboundedSender}, Mutex, }, }; #[derive(Debug, Clone, Deserialize, Serialize)] pub struct Request { #[serde(skip)] pub back_ch: Option>>, pub seq: u64, pub command: String, pub arguments: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] pub struct Response { // seq is omitted as unused and is not sent by some implementations pub request_seq: u64, pub success: bool, pub command: String, pub message: Option, pub body: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] pub struct Event { pub event: String, pub body: Option, } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(tag = "type", rename_all = "camelCase")] pub enum Payload { // type = "event" Event(Event), // type = "response" Response(Response), // type = "request" Request(Request), } #[derive(Debug)] pub struct Transport { #[allow(unused)] id: usize, pending_requests: Mutex>>>, } impl Transport { pub fn start( server_stdout: Box, server_stdin: Box, server_stderr: Option>, id: usize, ) -> (UnboundedReceiver, UnboundedSender) { let (client_tx, rx) = unbounded_channel(); let (tx, client_rx) = unbounded_channel(); let transport = Self { id, pending_requests: Mutex::new(HashMap::default()), }; let transport = Arc::new(transport); tokio::spawn(Self::recv(transport.clone(), server_stdout, client_tx)); tokio::spawn(Self::send(transport, server_stdin, client_rx)); if let Some(stderr) = server_stderr { tokio::spawn(Self::err(stderr)); } (rx, tx) } async fn recv_server_message( reader: &mut Box, buffer: &mut String, ) -> Result { let mut content_length = None; loop { buffer.truncate(0); if reader.read_line(buffer).await? == 0 { return Err(Error::StreamClosed); }; if buffer == "\r\n" { // look for an empty CRLF line break; } let header = buffer.trim(); let parts = header.split_once(": "); match parts { Some(("Content-Length", value)) => { content_length = Some(value.parse().context("invalid content length")?); } Some((_, _)) => {} None => { // Workaround: Some non-conformant language servers will output logging and other garbage // into the same stream as JSON-RPC messages. This can also happen from shell scripts that spawn // the server. Skip such lines and log a warning. // warn!("Failed to parse header: {:?}", header); } } } let content_length = content_length.context("missing content length")?; //TODO: reuse vector let mut content = vec![0; content_length]; reader.read_exact(&mut content).await?; let msg = std::str::from_utf8(&content).context("invalid utf8 from server")?; info!("<- DAP {}", msg); // try parsing as output (server response) or call (server request) let output: serde_json::Result = serde_json::from_str(msg); Ok(output?) } async fn recv_server_error( err: &mut (impl AsyncBufRead + Unpin + Send), buffer: &mut String, ) -> Result<()> { buffer.truncate(0); if err.read_line(buffer).await? == 0 { return Err(Error::StreamClosed); }; error!("err <- {}", buffer); Ok(()) } async fn send_payload_to_server( &self, server_stdin: &mut Box, mut payload: Payload, ) -> Result<()> { if let Payload::Request(request) = &mut payload { if let Some(back) = request.back_ch.take() { self.pending_requests.lock().await.insert(request.seq, back); } } let json = serde_json::to_string(&payload)?; self.send_string_to_server(server_stdin, json).await } async fn send_string_to_server( &self, server_stdin: &mut Box, request: String, ) -> Result<()> { info!("-> DAP {}", request); // send the headers server_stdin .write_all(format!("Content-Length: {}\r\n\r\n", request.len()).as_bytes()) .await?; // send the body server_stdin.write_all(request.as_bytes()).await?; server_stdin.flush().await?; Ok(()) } fn process_response(res: Response) -> Result { if res.success { info!("<- DAP success in response to {}", res.request_seq); Ok(res) } else { error!( "<- DAP error {:?} ({:?}) for command #{} {}", res.message, res.body, res.request_seq, res.command ); Err(Error::Other(anyhow::format_err!("{:?}", res.body))) } } async fn process_server_message( &self, client_tx: &UnboundedSender, msg: Payload, ) -> Result<()> { match msg { Payload::Response(res) => { let request_seq = res.request_seq; let tx = self.pending_requests.lock().await.remove(&request_seq); match tx { Some(tx) => match tx.send(Self::process_response(res)).await { Ok(_) => (), Err(_) => error!( "Tried sending response into a closed channel (id={:?}), original request likely timed out", request_seq ), } None => { warn!("Response to nonexistent request #{}", res.request_seq); client_tx.send(Payload::Response(res)).expect("Failed to send"); } } Ok(()) } Payload::Request(Request { ref command, ref seq, .. }) => { info!("<- DAP request {} #{}", command, seq); client_tx.send(msg).expect("Failed to send"); Ok(()) } Payload::Event(ref event) => { info!("<- DAP event {:?}", event); client_tx.send(msg).expect("Failed to send"); Ok(()) } } } async fn recv( transport: Arc, mut server_stdout: Box, client_tx: UnboundedSender, ) { let mut recv_buffer = String::new(); loop { match Self::recv_server_message(&mut server_stdout, &mut recv_buffer).await { Ok(msg) => match transport.process_server_message(&client_tx, msg).await { Ok(_) => (), Err(err) => { error!("err: <- {err:?}"); break; } }, Err(err) => { if !matches!(err, Error::StreamClosed) { error!("Exiting after unexpected error: {err:?}"); } // Close any outstanding requests. for (id, tx) in transport.pending_requests.lock().await.drain() { match tx.send(Err(Error::StreamClosed)).await { Ok(_) => (), Err(_) => { error!("Could not close request on a closed channel (id={id})"); } } } } } } } async fn send_inner( transport: Arc, mut server_stdin: Box, mut client_rx: UnboundedReceiver, ) -> Result<()> { while let Some(payload) = client_rx.recv().await { transport .send_payload_to_server(&mut server_stdin, payload) .await?; } Ok(()) } async fn send( transport: Arc, server_stdin: Box, client_rx: UnboundedReceiver, ) { if let Err(err) = Self::send_inner(transport, server_stdin, client_rx).await { error!("err: <- {:?}", err); } } async fn err(mut server_stderr: Box) { let mut recv_buffer = String::new(); loop { match Self::recv_server_error(&mut server_stderr, &mut recv_buffer).await { Ok(_) => {} Err(err) => { error!("err: <- {:?}", err); break; } } } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-dap/src/types.rs000066400000000000000000001030161500614314100237620ustar00rootroot00000000000000use serde::{Deserialize, Deserializer, Serialize}; use serde_json::Value; use std::collections::HashMap; use std::path::PathBuf; #[derive( Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, )] pub struct ThreadId(isize); impl std::fmt::Display for ThreadId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.0.fmt(f) } } pub type ThreadStates = HashMap; pub trait Request { type Arguments: serde::de::DeserializeOwned + serde::Serialize; type Result: serde::de::DeserializeOwned + serde::Serialize; const COMMAND: &'static str; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ColumnDescriptor { pub attribute_name: String, pub label: String, #[serde(skip_serializing_if = "Option::is_none")] pub format: Option, #[serde(rename = "type", skip_serializing_if = "Option::is_none")] pub ty: Option, #[serde(skip_serializing_if = "Option::is_none")] pub width: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ExceptionBreakpointsFilter { pub filter: String, pub label: String, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, #[serde(skip_serializing_if = "Option::is_none")] pub default: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_condition: Option, #[serde(skip_serializing_if = "Option::is_none")] pub condition_description: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DebuggerCapabilities { #[serde(skip_serializing_if = "Option::is_none")] pub supports_configuration_done_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_function_breakpoints: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_conditional_breakpoints: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_hit_conditional_breakpoints: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_evaluate_for_hovers: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_step_back: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_set_variable: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_restart_frame: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_goto_targets_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_step_in_targets_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_completions_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_modules_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_restart_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_exception_options: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_value_formatting_options: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_exception_info_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub support_terminate_debuggee: Option, #[serde(skip_serializing_if = "Option::is_none")] pub support_suspend_debuggee: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_delayed_stack_trace_loading: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_loaded_sources_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_log_points: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_terminate_threads_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_set_expression: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_terminate_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_data_breakpoints: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_read_memory_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_write_memory_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_disassemble_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_cancel_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_breakpoint_locations_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_clipboard_context: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_stepping_granularity: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_instruction_breakpoints: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_exception_filter_options: Option, #[serde(skip_serializing_if = "Option::is_none")] pub exception_breakpoint_filters: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub completion_trigger_characters: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub additional_module_columns: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub supported_checksum_algorithms: Option>, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct Checksum { pub algorithm: String, pub checksum: String, } #[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct Source { #[serde(skip_serializing_if = "Option::is_none")] pub name: Option, #[serde(skip_serializing_if = "Option::is_none")] pub path: Option, #[serde(skip_serializing_if = "Option::is_none")] pub source_reference: Option, #[serde(skip_serializing_if = "Option::is_none")] pub presentation_hint: Option, #[serde(skip_serializing_if = "Option::is_none")] pub origin: Option, #[serde(skip_serializing_if = "Option::is_none")] pub sources: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub adapter_data: Option, #[serde(skip_serializing_if = "Option::is_none")] pub checksums: Option>, } #[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SourceBreakpoint { pub line: usize, #[serde(skip_serializing_if = "Option::is_none")] pub column: Option, #[serde(skip_serializing_if = "Option::is_none")] pub condition: Option, #[serde(skip_serializing_if = "Option::is_none")] pub hit_condition: Option, #[serde(skip_serializing_if = "Option::is_none")] pub log_message: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct Breakpoint { #[serde(skip_serializing_if = "Option::is_none")] pub id: Option, pub verified: bool, #[serde(skip_serializing_if = "Option::is_none")] pub message: Option, #[serde(skip_serializing_if = "Option::is_none")] pub source: Option, #[serde(skip_serializing_if = "Option::is_none")] pub line: Option, #[serde(skip_serializing_if = "Option::is_none")] pub column: Option, #[serde(skip_serializing_if = "Option::is_none")] pub end_line: Option, #[serde(skip_serializing_if = "Option::is_none")] pub end_column: Option, #[serde(skip_serializing_if = "Option::is_none")] pub instruction_reference: Option, #[serde(skip_serializing_if = "Option::is_none")] pub offset: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct StackFrameFormat { #[serde(skip_serializing_if = "Option::is_none")] pub parameters: Option, #[serde(skip_serializing_if = "Option::is_none")] pub parameter_types: Option, #[serde(skip_serializing_if = "Option::is_none")] pub parameter_names: Option, #[serde(skip_serializing_if = "Option::is_none")] pub parameter_values: Option, #[serde(skip_serializing_if = "Option::is_none")] pub line: Option, #[serde(skip_serializing_if = "Option::is_none")] pub module: Option, #[serde(skip_serializing_if = "Option::is_none")] pub include_all: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct StackFrame { pub id: usize, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub source: Option, pub line: usize, pub column: usize, #[serde(skip_serializing_if = "Option::is_none")] pub end_line: Option, #[serde(skip_serializing_if = "Option::is_none")] pub end_column: Option, #[serde(skip_serializing_if = "Option::is_none")] pub can_restart: Option, #[serde(skip_serializing_if = "Option::is_none")] pub instruction_pointer_reference: Option, #[serde(skip_serializing_if = "Option::is_none")] pub module_id: Option, #[serde(skip_serializing_if = "Option::is_none")] pub presentation_hint: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct Thread { pub id: ThreadId, pub name: String, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct Scope { pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub presentation_hint: Option, pub variables_reference: usize, #[serde(skip_serializing_if = "Option::is_none")] pub named_variables: Option, #[serde(skip_serializing_if = "Option::is_none")] pub indexed_variables: Option, pub expensive: bool, #[serde(skip_serializing_if = "Option::is_none")] pub source: Option, #[serde(skip_serializing_if = "Option::is_none")] pub line: Option, #[serde(skip_serializing_if = "Option::is_none")] pub column: Option, #[serde(skip_serializing_if = "Option::is_none")] pub end_line: Option, #[serde(skip_serializing_if = "Option::is_none")] pub end_column: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ValueFormat { #[serde(skip_serializing_if = "Option::is_none")] pub hex: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct VariablePresentationHint { #[serde(skip_serializing_if = "Option::is_none")] pub kind: Option, #[serde(skip_serializing_if = "Option::is_none")] pub attributes: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub visibility: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct Variable { pub name: String, pub value: String, #[serde(rename = "type", skip_serializing_if = "Option::is_none")] pub ty: Option, #[serde(skip_serializing_if = "Option::is_none")] pub presentation_hint: Option, #[serde(skip_serializing_if = "Option::is_none")] pub evaluate_name: Option, pub variables_reference: usize, #[serde(skip_serializing_if = "Option::is_none")] pub named_variables: Option, #[serde(skip_serializing_if = "Option::is_none")] pub indexed_variables: Option, #[serde(skip_serializing_if = "Option::is_none")] pub memory_reference: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct Module { #[serde(deserialize_with = "from_number")] pub id: String, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub path: Option, #[serde(skip_serializing_if = "Option::is_none")] pub is_optimized: Option, #[serde(skip_serializing_if = "Option::is_none")] pub is_user_code: Option, #[serde(skip_serializing_if = "Option::is_none")] pub version: Option, #[serde(skip_serializing_if = "Option::is_none")] pub symbol_status: Option, #[serde(skip_serializing_if = "Option::is_none")] pub symbol_file_path: Option, #[serde(skip_serializing_if = "Option::is_none")] pub date_time_stamp: Option, #[serde(skip_serializing_if = "Option::is_none")] pub address_range: Option, } fn from_number<'de, D>(deserializer: D) -> Result where D: Deserializer<'de>, { #[derive(Deserialize)] #[serde(untagged)] enum NumberOrString { Number(i64), String(String), } match NumberOrString::deserialize(deserializer)? { NumberOrString::Number(n) => Ok(n.to_string()), NumberOrString::String(s) => Ok(s), } } pub mod requests { use super::*; #[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InitializeArguments { #[serde(rename = "clientID", skip_serializing_if = "Option::is_none")] pub client_id: Option, #[serde(skip_serializing_if = "Option::is_none")] pub client_name: Option, #[serde(rename = "adapterID")] pub adapter_id: String, #[serde(skip_serializing_if = "Option::is_none")] pub locale: Option, #[serde(rename = "linesStartAt1", skip_serializing_if = "Option::is_none")] pub lines_start_at_one: Option, #[serde(rename = "columnsStartAt1", skip_serializing_if = "Option::is_none")] pub columns_start_at_one: Option, #[serde(skip_serializing_if = "Option::is_none")] pub path_format: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_variable_type: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_variable_paging: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_run_in_terminal_request: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_memory_references: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_progress_reporting: Option, #[serde(skip_serializing_if = "Option::is_none")] pub supports_invalidated_event: Option, } #[derive(Debug)] pub enum Initialize {} impl Request for Initialize { type Arguments = InitializeArguments; type Result = DebuggerCapabilities; const COMMAND: &'static str = "initialize"; } #[derive(Debug)] pub enum Launch {} impl Request for Launch { type Arguments = Value; type Result = (); const COMMAND: &'static str = "launch"; } #[derive(Debug)] pub enum Attach {} impl Request for Attach { type Arguments = Value; type Result = (); const COMMAND: &'static str = "attach"; } #[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DisconnectArguments { #[serde(skip_serializing_if = "Option::is_none")] pub restart: Option, #[serde(skip_serializing_if = "Option::is_none")] pub terminate_debuggee: Option, #[serde(skip_serializing_if = "Option::is_none")] pub suspend_debuggee: Option, } #[derive(Debug)] pub enum Restart {} impl Request for Restart { type Arguments = Value; type Result = (); const COMMAND: &'static str = "restart"; } #[derive(Debug)] pub enum Disconnect {} impl Request for Disconnect { type Arguments = Option; type Result = (); const COMMAND: &'static str = "disconnect"; } #[derive(Debug)] pub enum ConfigurationDone {} impl Request for ConfigurationDone { type Arguments = (); type Result = (); const COMMAND: &'static str = "configurationDone"; } #[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SetBreakpointsArguments { pub source: Source, #[serde(skip_serializing_if = "Option::is_none")] pub breakpoints: Option>, // lines is deprecated #[serde(skip_serializing_if = "Option::is_none")] pub source_modified: Option, } #[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SetBreakpointsResponse { #[serde(skip_serializing_if = "Option::is_none")] pub breakpoints: Option>, } #[derive(Debug)] pub enum SetBreakpoints {} impl Request for SetBreakpoints { type Arguments = SetBreakpointsArguments; type Result = SetBreakpointsResponse; const COMMAND: &'static str = "setBreakpoints"; } #[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ContinueArguments { pub thread_id: ThreadId, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ContinueResponse { #[serde(skip_serializing_if = "Option::is_none")] pub all_threads_continued: Option, } #[derive(Debug)] pub enum Continue {} impl Request for Continue { type Arguments = ContinueArguments; type Result = ContinueResponse; const COMMAND: &'static str = "continue"; } #[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct StackTraceArguments { pub thread_id: ThreadId, #[serde(skip_serializing_if = "Option::is_none")] pub start_frame: Option, #[serde(skip_serializing_if = "Option::is_none")] pub levels: Option, #[serde(skip_serializing_if = "Option::is_none")] pub format: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct StackTraceResponse { #[serde(skip_serializing_if = "Option::is_none")] pub total_frames: Option, pub stack_frames: Vec, } #[derive(Debug)] pub enum StackTrace {} impl Request for StackTrace { type Arguments = StackTraceArguments; type Result = StackTraceResponse; const COMMAND: &'static str = "stackTrace"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ThreadsResponse { pub threads: Vec, } #[derive(Debug)] pub enum Threads {} impl Request for Threads { type Arguments = (); type Result = ThreadsResponse; const COMMAND: &'static str = "threads"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ScopesArguments { pub frame_id: usize, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ScopesResponse { pub scopes: Vec, } #[derive(Debug)] pub enum Scopes {} impl Request for Scopes { type Arguments = ScopesArguments; type Result = ScopesResponse; const COMMAND: &'static str = "scopes"; } #[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct VariablesArguments { pub variables_reference: usize, #[serde(skip_serializing_if = "Option::is_none")] pub filter: Option, #[serde(skip_serializing_if = "Option::is_none")] pub start: Option, #[serde(skip_serializing_if = "Option::is_none")] pub count: Option, #[serde(skip_serializing_if = "Option::is_none")] pub format: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct VariablesResponse { pub variables: Vec, } #[derive(Debug)] pub enum Variables {} impl Request for Variables { type Arguments = VariablesArguments; type Result = VariablesResponse; const COMMAND: &'static str = "variables"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct StepInArguments { pub thread_id: ThreadId, #[serde(skip_serializing_if = "Option::is_none")] pub target_id: Option, #[serde(skip_serializing_if = "Option::is_none")] pub granularity: Option, } #[derive(Debug)] pub enum StepIn {} impl Request for StepIn { type Arguments = StepInArguments; type Result = (); const COMMAND: &'static str = "stepIn"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct StepOutArguments { pub thread_id: ThreadId, #[serde(skip_serializing_if = "Option::is_none")] pub granularity: Option, } #[derive(Debug)] pub enum StepOut {} impl Request for StepOut { type Arguments = StepOutArguments; type Result = (); const COMMAND: &'static str = "stepOut"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct NextArguments { pub thread_id: ThreadId, #[serde(skip_serializing_if = "Option::is_none")] pub granularity: Option, } #[derive(Debug)] pub enum Next {} impl Request for Next { type Arguments = NextArguments; type Result = (); const COMMAND: &'static str = "next"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct PauseArguments { pub thread_id: ThreadId, } #[derive(Debug)] pub enum Pause {} impl Request for Pause { type Arguments = PauseArguments; type Result = (); const COMMAND: &'static str = "pause"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct EvaluateArguments { pub expression: String, #[serde(skip_serializing_if = "Option::is_none")] pub frame_id: Option, #[serde(skip_serializing_if = "Option::is_none")] pub context: Option, #[serde(skip_serializing_if = "Option::is_none")] pub format: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct EvaluateResponse { pub result: String, #[serde(rename = "type", skip_serializing_if = "Option::is_none")] pub ty: Option, #[serde(skip_serializing_if = "Option::is_none")] pub presentation_hint: Option, pub variables_reference: usize, #[serde(skip_serializing_if = "Option::is_none")] pub named_variables: Option, #[serde(skip_serializing_if = "Option::is_none")] pub indexed_variables: Option, #[serde(skip_serializing_if = "Option::is_none")] pub memory_reference: Option, } #[derive(Debug)] pub enum Evaluate {} impl Request for Evaluate { type Arguments = EvaluateArguments; type Result = EvaluateResponse; const COMMAND: &'static str = "evaluate"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SetExceptionBreakpointsArguments { pub filters: Vec, // pub filterOptions: Option>, // needs capability // pub exceptionOptions: Option>, // needs capability } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SetExceptionBreakpointsResponse { #[serde(skip_serializing_if = "Option::is_none")] pub breakpoints: Option>, } #[derive(Debug)] pub enum SetExceptionBreakpoints {} impl Request for SetExceptionBreakpoints { type Arguments = SetExceptionBreakpointsArguments; type Result = SetExceptionBreakpointsResponse; const COMMAND: &'static str = "setExceptionBreakpoints"; } // Reverse Requests #[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct RunInTerminalResponse { #[serde(skip_serializing_if = "Option::is_none")] pub process_id: Option, #[serde(skip_serializing_if = "Option::is_none")] pub shell_process_id: Option, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct RunInTerminalArguments { #[serde(skip_serializing_if = "Option::is_none")] pub kind: Option, #[serde(skip_serializing_if = "Option::is_none")] pub title: Option, pub cwd: String, pub args: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub env: Option>>, } #[derive(Debug)] pub enum RunInTerminal {} impl Request for RunInTerminal { type Arguments = RunInTerminalArguments; type Result = RunInTerminalResponse; const COMMAND: &'static str = "runInTerminal"; } } // Events pub mod events { use super::*; pub trait Event { type Body: serde::de::DeserializeOwned + serde::Serialize; const EVENT: &'static str; } #[derive(Debug)] pub enum Initialized {} impl Event for Initialized { type Body = Option; const EVENT: &'static str = "initialized"; } #[derive(Debug)] pub enum Stopped {} impl Event for Stopped { type Body = StoppedBody; const EVENT: &'static str = "stopped"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct StoppedBody { pub reason: String, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, #[serde(skip_serializing_if = "Option::is_none")] pub thread_id: Option, #[serde(skip_serializing_if = "Option::is_none")] pub preserve_focus_hint: Option, #[serde(skip_serializing_if = "Option::is_none")] pub text: Option, #[serde(skip_serializing_if = "Option::is_none")] pub all_threads_stopped: Option, #[serde(skip_serializing_if = "Option::is_none")] pub hit_breakpoint_ids: Option>, } #[derive(Debug)] pub enum Continued {} impl Event for Continued { type Body = ContinuedBody; const EVENT: &'static str = "continued"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ContinuedBody { pub thread_id: ThreadId, #[serde(skip_serializing_if = "Option::is_none")] pub all_threads_continued: Option, } #[derive(Debug)] pub enum Exited {} impl Event for Exited { type Body = ExitedBody; const EVENT: &'static str = "exited"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ExitedBody { pub exit_code: usize, } #[derive(Debug)] pub enum Terminated {} impl Event for Terminated { type Body = Option; const EVENT: &'static str = "terminated"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TerminatedBody { #[serde(skip_serializing_if = "Option::is_none")] pub restart: Option, } #[derive(Debug)] pub enum Thread {} impl Event for Thread { type Body = ThreadBody; const EVENT: &'static str = "thread"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ThreadBody { pub reason: String, pub thread_id: ThreadId, } #[derive(Debug)] pub enum Output {} impl Event for Output { type Body = OutputBody; const EVENT: &'static str = "output"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct OutputBody { pub output: String, #[serde(skip_serializing_if = "Option::is_none")] pub category: Option, #[serde(skip_serializing_if = "Option::is_none")] pub group: Option, #[serde(skip_serializing_if = "Option::is_none")] pub line: Option, #[serde(skip_serializing_if = "Option::is_none")] pub column: Option, #[serde(skip_serializing_if = "Option::is_none")] pub variables_reference: Option, #[serde(skip_serializing_if = "Option::is_none")] pub source: Option, #[serde(skip_serializing_if = "Option::is_none")] pub data: Option, } #[derive(Debug)] pub enum Breakpoint {} impl Event for Breakpoint { type Body = BreakpointBody; const EVENT: &'static str = "breakpoint"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct BreakpointBody { pub reason: String, pub breakpoint: super::Breakpoint, } #[derive(Debug)] pub enum Module {} impl Event for Module { type Body = ModuleBody; const EVENT: &'static str = "module"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ModuleBody { pub reason: String, pub module: super::Module, } #[derive(Debug)] pub enum LoadedSource {} impl Event for LoadedSource { type Body = LoadedSourceBody; const EVENT: &'static str = "loadedSource"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct LoadedSourceBody { pub reason: String, pub source: super::Source, } #[derive(Debug)] pub enum Process {} impl Event for Process { type Body = ProcessBody; const EVENT: &'static str = "process"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ProcessBody { pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub system_process_id: Option, #[serde(skip_serializing_if = "Option::is_none")] pub is_local_process: Option, #[serde(skip_serializing_if = "Option::is_none")] pub start_method: Option, // TODO: use enum #[serde(skip_serializing_if = "Option::is_none")] pub pointer_size: Option, } #[derive(Debug)] pub enum Capabilities {} impl Event for Capabilities { type Body = CapabilitiesBody; const EVENT: &'static str = "capabilities"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CapabilitiesBody { pub capabilities: super::DebuggerCapabilities, } // #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] // #[serde(rename_all = "camelCase")] // pub struct InvalidatedBody { // pub areas: Vec, // pub thread_id: Option, // pub stack_frame_id: Option, // } #[derive(Debug)] pub enum Memory {} impl Event for Memory { type Body = MemoryBody; const EVENT: &'static str = "memory"; } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct MemoryBody { pub memory_reference: String, pub offset: usize, pub count: usize, } } #[test] fn test_deserialize_module_id_from_number() { let raw = r#"{"id": 0, "name": "Name"}"#; let module: Module = serde_json::from_str(raw).expect("Error!"); assert_eq!(module.id, "0"); } #[test] fn test_deserialize_module_id_from_string() { let raw = r#"{"id": "0", "name": "Name"}"#; let module: Module = serde_json::from_str(raw).expect("Error!"); assert_eq!(module.id, "0"); } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-event/000077500000000000000000000000001500614314100220355ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-event/Cargo.toml000066400000000000000000000016431500614314100237710ustar00rootroot00000000000000[package] name = "helix-event" version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true rust-version.workspace = true categories.workspace = true repository.workspace = true homepage.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] foldhash.workspace = true hashbrown = "0.15" tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] } # the event registry is essentially read only but must be an rwlock so we can # setup new events on initialization, hardware-lock-elision hugely benefits this case # as it essentially makes the lock entirely free as long as there is no writes parking_lot = { workspace = true, features = ["hardware-lock-elision"] } once_cell = "1.21" anyhow = "1" log = "0.4" futures-executor = "0.3.31" [features] integration_test = [] helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-event/src/000077500000000000000000000000001500614314100226245ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-event/src/cancel.rs000066400000000000000000000211071500614314100244200ustar00rootroot00000000000000use std::borrow::Borrow; use std::future::Future; use std::sync::atomic::AtomicU64; use std::sync::atomic::Ordering::Relaxed; use std::sync::Arc; use tokio::sync::Notify; pub async fn cancelable_future( future: impl Future, cancel: impl Borrow, ) -> Option { tokio::select! { biased; _ = cancel.borrow().canceled() => { None } res = future => { Some(res) } } } #[derive(Default, Debug)] struct Shared { state: AtomicU64, // `Notify` has some features that we don't really need here because it // supports waking single tasks (`notify_one`) and does its own (more // complicated) state tracking, we could reimplement the waiter linked list // with modest effort and reduce memory consumption by one word/8 bytes and // reduce code complexity/number of atomic operations. // // I don't think that's worth the complexity (unsafe code). // // if we only cared about async code then we could also only use a notify // (without the generation count), this would be equivalent (or maybe more // correct if we want to allow cloning the TX) but it would be extremly slow // to frequently check for cancelation from sync code notify: Notify, } impl Shared { fn generation(&self) -> u32 { self.state.load(Relaxed) as u32 } fn num_running(&self) -> u32 { (self.state.load(Relaxed) >> 32) as u32 } /// Increments the generation count and sets `num_running` /// to the provided value, this operation is not with /// regard to the generation counter (doesn't use `fetch_add`) /// so the calling code must ensure it cannot execute concurrently /// to maintain correctness (but not safety) fn inc_generation(&self, num_running: u32) -> (u32, u32) { let state = self.state.load(Relaxed); let generation = state as u32; let prev_running = (state >> 32) as u32; // no need to create a new generation if the refcount is zero (fastpath) if prev_running == 0 && num_running == 0 { return (generation, 0); } let new_generation = generation.saturating_add(1); self.state.store( new_generation as u64 | ((num_running as u64) << 32), Relaxed, ); self.notify.notify_waiters(); (new_generation, prev_running) } fn inc_running(&self, generation: u32) { let mut state = self.state.load(Relaxed); loop { let current_generation = state as u32; if current_generation != generation { break; } let off = 1 << 32; let res = self.state.compare_exchange_weak( state, state.saturating_add(off), Relaxed, Relaxed, ); match res { Ok(_) => break, Err(new_state) => state = new_state, } } } fn dec_running(&self, generation: u32) { let mut state = self.state.load(Relaxed); loop { let current_generation = state as u32; if current_generation != generation { break; } let num_running = (state >> 32) as u32; // running can't be zero here, that would mean we miscounted somewhere assert_ne!(num_running, 0); let off = 1 << 32; let res = self .state .compare_exchange_weak(state, state - off, Relaxed, Relaxed); match res { Ok(_) => break, Err(new_state) => state = new_state, } } } } // This intentionally doesn't implement `Clone` and requires a mutable reference // for cancelation to avoid races (in inc_generation). /// A task controller allows managing a single subtask enabling the controller /// to cancel the subtask and to check whether it is still running. /// /// For efficiency reasons the controller can be reused/restarted, /// in that case the previous task is automatically canceled. /// /// If the controller is dropped, the subtasks are automatically canceled. #[derive(Default, Debug)] pub struct TaskController { shared: Arc, } impl TaskController { pub fn new() -> Self { TaskController::default() } /// Cancels the active task (handle). /// /// Returns whether any tasks were still running before the cancelation. pub fn cancel(&mut self) -> bool { self.shared.inc_generation(0).1 != 0 } /// Checks whether there are any task handles /// that haven't been dropped (or canceled) yet. pub fn is_running(&self) -> bool { self.shared.num_running() != 0 } /// Starts a new task and cancels the previous task (handles). pub fn restart(&mut self) -> TaskHandle { TaskHandle { generation: self.shared.inc_generation(1).0, shared: self.shared.clone(), } } } impl Drop for TaskController { fn drop(&mut self) { self.cancel(); } } /// A handle that is used to link a task with a task controller. /// /// It can be used to cancel async futures very efficiently but can also be checked for /// cancelation very quickly (single atomic read) in blocking code. /// The handle can be cheaply cloned (reference counted). /// /// The TaskController can check whether a task is "running" by inspecting the /// refcount of the (current) tasks handles. Therefore, if that information /// is important, ensure that the handle is not dropped until the task fully /// completes. pub struct TaskHandle { shared: Arc, generation: u32, } impl Clone for TaskHandle { fn clone(&self) -> Self { self.shared.inc_running(self.generation); TaskHandle { shared: self.shared.clone(), generation: self.generation, } } } impl Drop for TaskHandle { fn drop(&mut self) { self.shared.dec_running(self.generation); } } impl TaskHandle { /// Waits until [`TaskController::cancel`] is called for the corresponding /// [`TaskController`]. Immediately returns if `cancel` was already called since pub async fn canceled(&self) { let notified = self.shared.notify.notified(); if !self.is_canceled() { notified.await } } pub fn is_canceled(&self) -> bool { self.generation != self.shared.generation() } } #[cfg(test)] mod tests { use std::future::poll_fn; use futures_executor::block_on; use tokio::task::yield_now; use crate::{cancelable_future, TaskController}; #[test] fn immediate_cancel() { let mut controller = TaskController::new(); let handle = controller.restart(); controller.cancel(); assert!(handle.is_canceled()); controller.restart(); assert!(handle.is_canceled()); let res = block_on(cancelable_future( poll_fn(|_cx| std::task::Poll::Ready(())), handle, )); assert!(res.is_none()); } #[test] fn running_count() { let mut controller = TaskController::new(); let handle = controller.restart(); assert!(controller.is_running()); assert!(!handle.is_canceled()); drop(handle); assert!(!controller.is_running()); assert!(!controller.cancel()); let handle = controller.restart(); assert!(!handle.is_canceled()); assert!(controller.is_running()); let handle2 = handle.clone(); assert!(!handle.is_canceled()); assert!(controller.is_running()); drop(handle2); assert!(!handle.is_canceled()); assert!(controller.is_running()); assert!(controller.cancel()); assert!(handle.is_canceled()); assert!(!controller.is_running()); } #[test] fn no_cancel() { let mut controller = TaskController::new(); let handle = controller.restart(); assert!(!handle.is_canceled()); let res = block_on(cancelable_future( poll_fn(|_cx| std::task::Poll::Ready(())), handle, )); assert!(res.is_some()); } #[test] fn delayed_cancel() { let mut controller = TaskController::new(); let handle = controller.restart(); let mut hit = false; let res = block_on(cancelable_future( async { controller.cancel(); hit = true; yield_now().await; }, handle, )); assert!(res.is_none()); assert!(hit); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-event/src/debounce.rs000066400000000000000000000056511500614314100247650ustar00rootroot00000000000000//! Utilities for declaring an async (usually debounced) hook use std::time::Duration; use futures_executor::block_on; use tokio::sync::mpsc::{self, error::TrySendError, Sender}; use tokio::time::Instant; /// Async hooks provide a convenient framework for implementing (debounced) /// async event handlers. Most synchronous event hooks will likely need to /// debounce their events, coordinate multiple different hooks and potentially /// track some state. `AsyncHooks` facilitate these use cases by running as /// a background tokio task that waits for events (usually an enum) to be /// sent through a channel. pub trait AsyncHook: Sync + Send + 'static + Sized { type Event: Sync + Send + 'static; /// Called immediately whenever an event is received, this function can /// consume the event immediately or debounce it. In case of debouncing, /// it can either define a new debounce timeout or continue the current one fn handle_event(&mut self, event: Self::Event, timeout: Option) -> Option; /// Called whenever the debounce timeline is reached fn finish_debounce(&mut self); fn spawn(self) -> mpsc::Sender { // the capacity doesn't matter too much here, unless the cpu is totally overwhelmed // the cap will never be reached since we always immediately drain the channel // so it should only be reached in case of total CPU overload. // However, a bounded channel is much more efficient so it's nice to use here let (tx, rx) = mpsc::channel(128); // only spawn worker if we are inside runtime to avoid having to spawn a runtime for unrelated unit tests if tokio::runtime::Handle::try_current().is_ok() { tokio::spawn(run(self, rx)); } tx } } async fn run(mut hook: Hook, mut rx: mpsc::Receiver) { let mut deadline = None; loop { let event = match deadline { Some(deadline_) => { let res = tokio::time::timeout_at(deadline_, rx.recv()).await; match res { Ok(event) => event, Err(_) => { hook.finish_debounce(); deadline = None; continue; } } } None => rx.recv().await, }; let Some(event) = event else { break; }; deadline = hook.handle_event(event, deadline); } } pub fn send_blocking(tx: &Sender, data: T) { // block_on has some overhead and in practice the channel should basically // never be full anyway so first try sending without blocking if let Err(TrySendError::Full(data)) = tx.try_send(data) { // set a timeout so that we just drop a message instead of freezing the editor in the worst case let _ = block_on(tx.send_timeout(data, Duration::from_millis(10))); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-event/src/hook.rs000066400000000000000000000063301500614314100241340ustar00rootroot00000000000000//! rust dynamic dispatch is extremely limited so we have to build our //! own vtable implementation. Otherwise implementing the event system would not be possible. //! A nice bonus of this approach is that we can optimize the vtable a bit more. Normally //! a dyn Trait fat pointer contains two pointers: A pointer to the data itself and a //! pointer to a global (static) vtable entry which itself contains multiple other pointers //! (the various functions of the trait, drop, size and align). That makes dynamic //! dispatch pretty slow (double pointer indirections). However, we only have a single function //! in the hook trait and don't need a drop implementation (event system is global anyway //! and never dropped) so we can just store the entire vtable inline. use anyhow::Result; use std::ptr::{self, NonNull}; use crate::Event; /// Opaque handle type that represents an erased type parameter. /// /// If extern types were stable, this could be implemented as `extern { pub type Opaque; }` but /// until then we can use this. /// /// Care should be taken that we don't use a concrete instance of this. It should only be used /// through a reference, so we can maintain something else's lifetime. struct Opaque(()); pub(crate) struct ErasedHook { data: NonNull, call: unsafe fn(NonNull, NonNull, NonNull), } impl ErasedHook { pub(crate) fn new_dynamic Result<()> + 'static + Send + Sync>( hook: H, ) -> ErasedHook { unsafe fn call Result<()> + 'static + Send + Sync>( hook: NonNull, _event: NonNull, result: NonNull, ) { let hook: NonNull = hook.cast(); let result: NonNull> = result.cast(); let hook: &F = hook.as_ref(); let res = hook(); ptr::write(result.as_ptr(), res) } unsafe { ErasedHook { data: NonNull::new_unchecked(Box::into_raw(Box::new(hook)) as *mut Opaque), call: call::, } } } pub(crate) fn new Result<()>>(hook: F) -> ErasedHook { unsafe fn call Result<()>>( hook: NonNull, event: NonNull, result: NonNull, ) { let hook: NonNull = hook.cast(); let mut event: NonNull = event.cast(); let result: NonNull> = result.cast(); let hook: &F = hook.as_ref(); let res = hook(event.as_mut()); ptr::write(result.as_ptr(), res) } unsafe { ErasedHook { data: NonNull::new_unchecked(Box::into_raw(Box::new(hook)) as *mut Opaque), call: call::, } } } pub(crate) unsafe fn call(&self, event: &mut E) -> Result<()> { let mut res = Ok(()); unsafe { (self.call)( self.data, NonNull::from(event).cast(), NonNull::from(&mut res).cast(), ); } res } } unsafe impl Sync for ErasedHook {} unsafe impl Send for ErasedHook {} helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-event/src/lib.rs000066400000000000000000000216711500614314100237470ustar00rootroot00000000000000//! `helix-event` contains systems that allow (often async) communication between //! different editor components without strongly coupling them. Specifically //! it allows defining synchronous hooks that run when certain editor events //! occur. //! //! The core of the event system are hook callbacks and the [`Event`] trait. A //! hook is essentially just a closure `Fn(event: &mut impl Event) -> Result<()>` //! that gets called every time an appropriate event is dispatched. The implementation //! details of the [`Event`] trait are considered private. The [`events`] macro is //! provided which automatically declares event types. Similarly the `register_hook` //! macro should be used to (safely) declare event hooks. //! //! Hooks run synchronously which can be advantageous since they can modify the //! current editor state right away (for example to immediately hide the completion //! popup). However, they can not contain their own state without locking since //! they only receive immutable references. For handler that want to track state, do //! expensive background computations or debouncing an [`AsyncHook`] is preferable. //! Async hooks are based around a channels that receive events specific to //! that `AsyncHook` (usually an enum). These events can be sent by synchronous //! hooks. Due to some limitations around tokio channels the [`send_blocking`] //! function exported in this crate should be used instead of the builtin //! `blocking_send`. //! //! In addition to the core event system, this crate contains some message queues //! that allow transfer of data back to the main event loop from async hooks and //! hooks that may not have access to all application data (for example in helix-view). //! This include the ability to control rendering ([`lock_frame`], [`request_redraw`]) and //! display status messages ([`status`]). //! //! Hooks declared in helix-term can furthermore dispatch synchronous jobs to be run on the //! main loop (including access to the compositor). Ideally that queue will be moved //! to helix-view in the future if we manage to detach the compositor from its rendering backend. use anyhow::Result; pub use cancel::{cancelable_future, TaskController, TaskHandle}; pub use debounce::{send_blocking, AsyncHook}; pub use redraw::{ lock_frame, redraw_requested, request_redraw, start_frame, RenderLockGuard, RequestRedrawOnDrop, }; pub use registry::Event; mod cancel; mod debounce; mod hook; mod redraw; mod registry; #[doc(hidden)] pub mod runtime; pub mod status; #[cfg(test)] mod test; pub fn register_event() { registry::with_mut(|registry| registry.register_event::()) } /// Registers a hook that will be called when an event of type `E` is dispatched. /// This function should usually not be used directly, use the [`register_hook`] /// macro instead. /// /// /// # Safety /// /// `hook` must be totally generic over all lifetime parameters of `E`. For /// example if `E` was a known type `Foo<'a, 'b>`, then the correct trait bound /// would be `F: for<'a, 'b, 'c> Fn(&'a mut Foo<'b, 'c>)`, but there is no way to /// express that kind of constraint for a generic type with the Rust type system /// as of this writing. pub unsafe fn register_hook_raw( hook: impl Fn(&mut E) -> Result<()> + 'static + Send + Sync, ) { registry::with_mut(|registry| registry.register_hook(hook)) } /// Register a hook solely by event name pub fn register_dynamic_hook( hook: impl Fn() -> Result<()> + 'static + Send + Sync, id: &str, ) -> Result<()> { registry::with_mut(|reg| reg.register_dynamic_hook(hook, id)) } pub fn dispatch(e: impl Event) { registry::with(|registry| registry.dispatch(e)); } /// Macro to declare events /// /// # Examples /// /// ``` no-compile /// events! { /// FileWrite(&Path) /// ViewScrolled{ view: View, new_pos: ViewOffset } /// DocumentChanged<'a> { old_doc: &'a Rope, doc: &'a mut Document, changes: &'a ChangeSet } /// } /// /// fn init() { /// register_event::(); /// register_event::(); /// register_event::(); /// } /// /// fn save(path: &Path, content: &str){ /// std::fs::write(path, content); /// dispatch(FileWrite(path)); /// } /// ``` #[macro_export] macro_rules! events { ($name: ident<$($lt: lifetime),*> { $($data:ident : $data_ty:ty),* } $($rem:tt)*) => { pub struct $name<$($lt),*> { $(pub $data: $data_ty),* } unsafe impl<$($lt),*> $crate::Event for $name<$($lt),*> { const ID: &'static str = stringify!($name); const LIFETIMES: usize = $crate::events!(@sum $(1, $lt),*); type Static = $crate::events!(@replace_lt $name, $('static, $lt),*); } $crate::events!{ $($rem)* } }; ($name: ident { $($data:ident : $data_ty:ty),* } $($rem:tt)*) => { pub struct $name { $(pub $data: $data_ty),* } unsafe impl $crate::Event for $name { const ID: &'static str = stringify!($name); const LIFETIMES: usize = 0; type Static = Self; } $crate::events!{ $($rem)* } }; () => {}; (@replace_lt $name: ident, $($lt1: lifetime, $lt2: lifetime),* ) => {$name<$($lt1),*>}; (@sum $($val: expr, $lt1: lifetime),* ) => {0 $(+ $val)*}; } /// Safely register statically typed event hooks #[macro_export] macro_rules! register_hook { // Safety: this is safe because we fully control the type of the event here and // ensure all lifetime arguments are fully generic and the correct number of lifetime arguments // is present (move |$event:ident: &mut $event_ty: ident<$($lt: lifetime),*>| $body: expr) => { let val = move |$event: &mut $event_ty<$($lt),*>| $body; unsafe { // Lifetimes are a bit of a pain. We want to allow events being // non-static. Lifetimes don't actually exist at runtime so its // fine to essentially transmute the lifetimes as long as we can // prove soundness. The hook must therefore accept any combination // of lifetimes. In other words fn(&'_ mut Event<'_, '_>) is ok // but examples like fn(&'_ mut Event<'_, 'static>) or fn<'a>(&'a // mut Event<'a, 'a>) are not. To make this safe we use a macro to // forbid the user from specifying lifetimes manually (all lifetimes // specified are always function generics and passed to the event so // lifetimes can't be used multiple times and using 'static causes a // syntax error). // // There is one soundness hole tough: Type Aliases allow // "accidentally" creating these problems. For example: // // type Event2 = Event<'static>. // type Event2<'a> = Event<'a, a>. // // These cases can be caught by counting the number of lifetimes // parameters at the parameter declaration site and then at the hook // declaration site. By asserting the number of lifetime parameters // are equal we can catch all bad type aliases under one assumption: // There are no unused lifetime parameters. Introducing a static // would reduce the number of arguments of the alias by one in the // above example Event2 has zero lifetime arguments while the original // event has one lifetime argument. Similar logic applies to using // a lifetime argument multiple times. The ASSERT below performs a // a compile time assertion to ensure exactly this property. // // With unused lifetime arguments it is still one way to cause unsound code: // // type Event2<'a, 'b> = Event<'a, 'a>; // // However, this case will always emit a compiler warning/cause CI // failures so a user would have to introduce #[allow(unused)] which // is easily caught in review (and a very theoretical case anyway). // If we want to be pedantic we can simply compile helix with // forbid(unused). All of this is just a safety net to prevent // very theoretical misuse. This won't come up in real code (and is // easily caught in review). #[allow(unused)] const ASSERT: () = { if <$event_ty as $crate::Event>::LIFETIMES != 0 + $crate::events!(@sum $(1, $lt),*){ panic!("invalid type alias"); } }; $crate::register_hook_raw::<$crate::events!(@replace_lt $event_ty, $('static, $lt),*)>(val); } }; (move |$event:ident: &mut $event_ty: ident| $body: expr) => { let val = move |$event: &mut $event_ty| $body; unsafe { #[allow(unused)] const ASSERT: () = { if <$event_ty as $crate::Event>::LIFETIMES != 0{ panic!("invalid type alias"); } }; $crate::register_hook_raw::<$event_ty>(val); } }; } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-event/src/redraw.rs000066400000000000000000000040121500614314100244530ustar00rootroot00000000000000//! Signals that control when/if the editor redraws use std::future::Future; use parking_lot::{RwLock, RwLockReadGuard}; use tokio::sync::Notify; use crate::runtime_local; runtime_local! { /// A `Notify` instance that can be used to (asynchronously) request /// the editor to render a new frame. static REDRAW_NOTIFY: Notify = Notify::const_new(); /// A `RwLock` that prevents the next frame from being /// drawn until an exclusive (write) lock can be acquired. /// This allows asynchronous tasks to acquire `non-exclusive` /// locks (read) to prevent the next frame from being drawn /// until a certain computation has finished. static RENDER_LOCK: RwLock<()> = RwLock::new(()); } pub type RenderLockGuard = RwLockReadGuard<'static, ()>; /// Requests that the editor is redrawn. The redraws are debounced (currently to /// 30FPS) so this can be called many times without causing a ton of frames to /// be rendered. pub fn request_redraw() { REDRAW_NOTIFY.notify_one(); } /// Returns a future that will yield once a redraw has been asynchronously /// requested using [`request_redraw`]. pub fn redraw_requested() -> impl Future { REDRAW_NOTIFY.notified() } /// Wait until all locks acquired with [`lock_frame`] have been released. /// This function is called before rendering and is intended to allow the frame /// to wait for async computations that should be included in the current frame. pub fn start_frame() { drop(RENDER_LOCK.write()); // exhaust any leftover redraw notifications let notify = REDRAW_NOTIFY.notified(); tokio::pin!(notify); notify.enable(); } /// Acquires the render lock which will prevent the next frame from being drawn /// until the returned guard is dropped. pub fn lock_frame() -> RenderLockGuard { RENDER_LOCK.read() } /// A zero sized type that requests a redraw via [request_redraw] when the type [Drop]s. pub struct RequestRedrawOnDrop; impl Drop for RequestRedrawOnDrop { fn drop(&mut self) { request_redraw(); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-event/src/registry.rs000066400000000000000000000110451500614314100250430ustar00rootroot00000000000000//! A global registry where events are registered and can be //! subscribed to by registering hooks. The registry identifies event //! types using their type name so multiple event with the same type name //! may not be registered (will cause a panic to ensure soundness) use std::any::TypeId; use anyhow::{bail, Result}; use hashbrown::hash_map::Entry; use hashbrown::HashMap; use parking_lot::RwLock; use crate::hook::ErasedHook; use crate::runtime_local; pub struct Registry { events: HashMap<&'static str, TypeId, foldhash::fast::FixedState>, handlers: HashMap<&'static str, Vec, foldhash::fast::FixedState>, } impl Registry { pub fn register_event(&mut self) { let ty = TypeId::of::(); assert_eq!(ty, TypeId::of::()); match self.events.entry(E::ID) { Entry::Occupied(entry) => { if entry.get() == &ty { // don't warn during tests to avoid log spam #[cfg(not(feature = "integration_test"))] panic!("Event {} was registered multiple times", E::ID); } else { panic!("Multiple events with ID {} were registered", E::ID); } } Entry::Vacant(ent) => { ent.insert(ty); self.handlers.insert(E::ID, Vec::new()); } } } /// # Safety /// /// `hook` must be totally generic over all lifetime parameters of `E`. For /// example if `E` was a known type `Foo<'a, 'b> then the correct trait bound /// would be `F: for<'a, 'b, 'c> Fn(&'a mut Foo<'b, 'c>)` but there is no way to /// express that kind of constraint for a generic type with the rust type system /// right now. pub unsafe fn register_hook( &mut self, hook: impl Fn(&mut E) -> Result<()> + 'static + Send + Sync, ) { // ensure event type ids match so we can rely on them always matching let id = E::ID; let Some(&event_id) = self.events.get(id) else { panic!("Tried to register handler for unknown event {id}"); }; assert!( TypeId::of::() == event_id, "Tried to register invalid hook for event {id}" ); let hook = ErasedHook::new(hook); self.handlers.get_mut(id).unwrap().push(hook); } pub fn register_dynamic_hook( &mut self, hook: impl Fn() -> Result<()> + 'static + Send + Sync, id: &str, ) -> Result<()> { // ensure event type ids match so we can rely on them always matching if self.events.get(id).is_none() { bail!("Tried to register handler for unknown event {id}"); }; let hook = ErasedHook::new_dynamic(hook); self.handlers.get_mut(id).unwrap().push(hook); Ok(()) } pub fn dispatch(&self, mut event: E) { let Some(hooks) = self.handlers.get(E::ID) else { log::error!("Dispatched unknown event {}", E::ID); return; }; let event_id = self.events[E::ID]; assert_eq!( TypeId::of::(), event_id, "Tried to dispatch invalid event {}", E::ID ); for hook in hooks { // safety: event type is the same if let Err(err) = unsafe { hook.call(&mut event) } { log::error!("{} hook failed: {err:#?}", E::ID); crate::status::report_blocking(err); } } } } runtime_local! { static REGISTRY: RwLock = RwLock::new(Registry { // hardcoded random number is good enough here we don't care about DOS resistance // and avoids the additional complexity of `Option` events: HashMap::with_hasher(foldhash::fast::FixedState::with_seed(72536814787)), handlers: HashMap::with_hasher(foldhash::fast::FixedState::with_seed(72536814787)), }); } pub(crate) fn with(f: impl FnOnce(&Registry) -> T) -> T { f(®ISTRY.read()) } pub(crate) fn with_mut(f: impl FnOnce(&mut Registry) -> T) -> T { f(&mut REGISTRY.write()) } /// # Safety /// The number of specified lifetimes and the static type *must* be correct. /// This is ensured automatically by the [`events`](crate::events) /// macro. pub unsafe trait Event: Sized { /// Globally unique (case sensitive) string that identifies this type. /// A good candidate is the events type name const ID: &'static str; const LIFETIMES: usize; type Static: Event + 'static; } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-event/src/runtime.rs000066400000000000000000000057261500614314100246670ustar00rootroot00000000000000//! The event system makes use of global to decouple different systems. //! However, this can cause problems for the integration test system because //! it runs multiple helix applications in parallel. Making the globals //! thread-local does not work because a applications can/does have multiple //! runtime threads. Instead this crate implements a similar notion to a thread //! local but instead of being local to a single thread, the statics are local to //! a single tokio-runtime. The implementation requires locking so it's not exactly efficient. //! //! Therefore this function is only enabled during integration tests and behaves like //! a normal static otherwise. I would prefer this module to be fully private and to only //! export the macro but the macro still need to construct these internals so it's marked //! `doc(hidden)` instead use std::ops::Deref; #[cfg(not(feature = "integration_test"))] pub struct RuntimeLocal { /// inner API used in the macro, not part of public API #[doc(hidden)] pub __data: T, } #[cfg(not(feature = "integration_test"))] impl Deref for RuntimeLocal { type Target = T; fn deref(&self) -> &Self::Target { &self.__data } } #[cfg(not(feature = "integration_test"))] #[macro_export] macro_rules! runtime_local { ($($(#[$attr:meta])* $vis: vis static $name:ident: $ty: ty = $init: expr;)*) => { $($(#[$attr])* $vis static $name: $crate::runtime::RuntimeLocal<$ty> = $crate::runtime::RuntimeLocal { __data: $init };)* }; } #[cfg(feature = "integration_test")] pub struct RuntimeLocal { data: parking_lot::RwLock< hashbrown::HashMap, >, init: fn() -> T, } #[cfg(feature = "integration_test")] impl RuntimeLocal { /// inner API used in the macro, not part of public API #[doc(hidden)] pub const fn __new(init: fn() -> T) -> Self { Self { data: parking_lot::RwLock::new(hashbrown::HashMap::with_hasher( foldhash::fast::FixedState::with_seed(12345678910), )), init, } } } #[cfg(feature = "integration_test")] impl Deref for RuntimeLocal { type Target = T; fn deref(&self) -> &T { let id = tokio::runtime::Handle::current().id(); let guard = self.data.read(); match guard.get(&id) { Some(res) => res, None => { drop(guard); let data = Box::leak(Box::new((self.init)())); let mut guard = self.data.write(); guard.insert(id, data); data } } } } #[cfg(feature = "integration_test")] #[macro_export] macro_rules! runtime_local { ($($(#[$attr:meta])* $vis: vis static $name:ident: $ty: ty = $init: expr;)*) => { $($(#[$attr])* $vis static $name: $crate::runtime::RuntimeLocal<$ty> = $crate::runtime::RuntimeLocal::__new(|| $init);)* }; } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-event/src/status.rs000066400000000000000000000032161500614314100245170ustar00rootroot00000000000000//! A queue of async messages/errors that will be shown in the editor use std::borrow::Cow; use std::time::Duration; use crate::{runtime_local, send_blocking}; use once_cell::sync::OnceCell; use tokio::sync::mpsc::{Receiver, Sender}; /// Describes the severity level of a [`StatusMessage`]. #[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord)] pub enum Severity { Hint, Info, Warning, Error, } pub struct StatusMessage { pub severity: Severity, pub message: Cow<'static, str>, } impl From for StatusMessage { fn from(err: anyhow::Error) -> Self { StatusMessage { severity: Severity::Error, message: err.to_string().into(), } } } impl From<&'static str> for StatusMessage { fn from(msg: &'static str) -> Self { StatusMessage { severity: Severity::Info, message: msg.into(), } } } runtime_local! { static MESSAGES: OnceCell> = OnceCell::new(); } pub async fn report(msg: impl Into) { // if the error channel overflows just ignore it let _ = MESSAGES .wait() .send_timeout(msg.into(), Duration::from_millis(10)) .await; } pub fn report_blocking(msg: impl Into) { let messages = MESSAGES.wait(); send_blocking(messages, msg.into()) } /// Must be called once during editor startup exactly once /// before any of the messages in this module can be used /// /// # Panics /// If called multiple times pub fn setup() -> Receiver { let (tx, rx) = tokio::sync::mpsc::channel(128); let _ = MESSAGES.set(tx); rx } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-event/src/test.rs000066400000000000000000000042601500614314100241530ustar00rootroot00000000000000use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use std::time::Duration; use parking_lot::Mutex; use crate::{dispatch, events, register_dynamic_hook, register_event, register_hook}; #[test] fn smoke_test() { events! { Event1 { content: String } Event2 { content: usize } } register_event::(); register_event::(); // setup hooks let res1: Arc> = Arc::default(); let acc = Arc::clone(&res1); register_hook!(move |event: &mut Event1| { acc.lock().push_str(&event.content); Ok(()) }); let res2: Arc = Arc::default(); let acc = Arc::clone(&res2); register_hook!(move |event: &mut Event2| { acc.fetch_add(event.content, Ordering::Relaxed); Ok(()) }); // triggers events let thread = std::thread::spawn(|| { for i in 0..1000 { dispatch(Event2 { content: i }); } }); std::thread::sleep(Duration::from_millis(1)); dispatch(Event1 { content: "foo".to_owned(), }); dispatch(Event2 { content: 42 }); dispatch(Event1 { content: "bar".to_owned(), }); dispatch(Event1 { content: "hello world".to_owned(), }); thread.join().unwrap(); // check output assert_eq!(&**res1.lock(), "foobarhello world"); assert_eq!( res2.load(Ordering::Relaxed), 42 + (0..1000usize).sum::() ); } #[test] #[allow(dead_code)] fn dynamic() { events! { Event3 {} Event4 { count: usize } }; register_event::(); register_event::(); let count = Arc::new(AtomicUsize::new(0)); let count1 = count.clone(); let count2 = count.clone(); register_dynamic_hook( move || { count1.fetch_add(2, Ordering::Relaxed); Ok(()) }, "Event3", ) .unwrap(); register_dynamic_hook( move || { count2.fetch_add(3, Ordering::Relaxed); Ok(()) }, "Event4", ) .unwrap(); dispatch(Event3 {}); dispatch(Event4 { count: 0 }); dispatch(Event3 {}); assert_eq!(count.load(Ordering::Relaxed), 7) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-loader/000077500000000000000000000000001500614314100221625ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-loader/Cargo.toml000066400000000000000000000014441500614314100241150ustar00rootroot00000000000000[package] name = "helix-loader" description = "Build bootstrapping for Helix crates" version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true rust-version.workspace = true categories.workspace = true repository.workspace = true homepage.workspace = true [[bin]] name = "hx-loader" path = "src/main.rs" [dependencies] helix-stdx = { path = "../helix-stdx" } anyhow = "1" serde = { version = "1.0", features = ["derive"] } toml = "0.8" etcetera = "0.10" tree-sitter.workspace = true once_cell = "1.21" log = "0.4" # TODO: these two should be on !wasm32 only # cloning/compiling tree-sitter grammars cc = { version = "1" } threadpool = { version = "1.0" } tempfile.workspace = true [target.'cfg(not(target_arch = "wasm32"))'.dependencies] libloading = "0.8" helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-loader/build.rs000066400000000000000000000052241500614314100236320ustar00rootroot00000000000000use std::borrow::Cow; use std::path::Path; use std::process::Command; const MAJOR: &str = env!("CARGO_PKG_VERSION_MAJOR"); const MINOR: &str = env!("CARGO_PKG_VERSION_MINOR"); const PATCH: &str = env!("CARGO_PKG_VERSION_PATCH"); fn main() { let git_hash = Command::new("git") .args(["rev-parse", "HEAD"]) .output() .ok() .filter(|output| output.status.success()) .and_then(|x| String::from_utf8(x.stdout).ok()) .or_else(|| option_env!("HELIX_NIX_BUILD_REV").map(|s| s.to_string())); let minor = if MINOR.len() == 1 { // Print single-digit months in '0M' format format!("0{MINOR}") } else { MINOR.to_string() }; let calver = if PATCH == "0" { format!("{MAJOR}.{minor}") } else { format!("{MAJOR}.{minor}.{PATCH}") }; let version: Cow<_> = match &git_hash { Some(git_hash) => format!("{} ({})", calver, &git_hash[..8]).into(), None => calver.into(), }; println!( "cargo:rustc-env=BUILD_TARGET={}", std::env::var("TARGET").unwrap() ); println!("cargo:rustc-env=VERSION_AND_GIT_HASH={}", version); if git_hash.is_none() { return; } // we need to revparse because the git dir could be anywhere if you are // using detached worktrees but there is no good way to obtain an OsString // from command output so for now we can't accept non-utf8 paths here // probably rare enouch where it doesn't matter tough we could use gitoxide // here but that would be make it a hard dependency and slow compile times let Some(git_dir): Option = Command::new("git") .args(["rev-parse", "--git-dir"]) .output() .ok() .filter(|output| output.status.success()) .and_then(|x| String::from_utf8(x.stdout).ok()) .map(|x| x.trim().to_string()) else { return; }; // If heads starts pointing at something else (different branch) // we need to return let head = Path::new(&git_dir).join("HEAD"); if head.exists() { println!("cargo:rerun-if-changed={}", head.display()); } // if the thing head points to (branch) itself changes // we need to return let Some(head_ref): Option = Command::new("git") .args(["symbolic-ref", "HEAD"]) .output() .ok() .filter(|output| output.status.success()) .and_then(|x| String::from_utf8(x.stdout).ok()) .map(|x| x.trim().to_string()) else { return; }; let head_ref = Path::new(&git_dir).join(head_ref); if head_ref.exists() { println!("cargo:rerun-if-changed={}", head_ref.display()); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-loader/src/000077500000000000000000000000001500614314100227515ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-loader/src/config.rs000066400000000000000000000016231500614314100245660ustar00rootroot00000000000000use std::str::from_utf8; /// Default built-in languages.toml. pub fn default_lang_config() -> toml::Value { let default_config = include_bytes!("../../languages.toml"); toml::from_str(from_utf8(default_config).unwrap()) .expect("Could not parse built-in languages.toml to valid toml") } /// User configured languages.toml file, merged with the default config. pub fn user_lang_config() -> Result { let config = [ crate::config_dir(), crate::find_workspace().0.join(".helix"), ] .into_iter() .map(|path| path.join("languages.toml")) .filter_map(|file| { std::fs::read_to_string(file) .map(|config| toml::from_str(&config)) .ok() }) .collect::, _>>()? .into_iter() .fold(default_lang_config(), |a, b| { crate::merge_toml_values(a, b, 3) }); Ok(config) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-loader/src/grammar.rs000066400000000000000000000452001500614314100247460ustar00rootroot00000000000000use anyhow::{anyhow, bail, Context, Result}; use serde::{Deserialize, Serialize}; use std::fs; use std::time::SystemTime; use std::{ collections::HashSet, path::{Path, PathBuf}, process::Command, sync::mpsc::channel, }; use tempfile::TempPath; use tree_sitter::Language; #[cfg(unix)] const DYLIB_EXTENSION: &str = "so"; #[cfg(windows)] const DYLIB_EXTENSION: &str = "dll"; #[cfg(target_arch = "wasm32")] const DYLIB_EXTENSION: &str = "wasm"; #[derive(Debug, Serialize, Deserialize)] struct Configuration { #[serde(rename = "use-grammars")] pub grammar_selection: Option, pub grammar: Vec, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "lowercase", untagged)] pub enum GrammarSelection { Only { only: HashSet }, Except { except: HashSet }, } #[derive(Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] pub struct GrammarConfiguration { #[serde(rename = "name")] pub grammar_id: String, pub source: GrammarSource, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "lowercase", untagged)] pub enum GrammarSource { Local { path: String, }, Git { #[serde(rename = "git")] remote: String, #[serde(rename = "rev")] revision: String, subpath: Option, }, } const BUILD_TARGET: &str = env!("BUILD_TARGET"); const REMOTE_NAME: &str = "origin"; #[cfg(target_arch = "wasm32")] pub fn get_language(name: &str) -> Result { unimplemented!() } #[cfg(not(target_arch = "wasm32"))] pub fn get_language(name: &str) -> Result { use libloading::{Library, Symbol}; let mut rel_library_path = PathBuf::new().join("grammars").join(name); rel_library_path.set_extension(DYLIB_EXTENSION); let library_path = crate::runtime_file(&rel_library_path); let library = unsafe { Library::new(&library_path) } .with_context(|| format!("Error opening dynamic library {:?}", library_path))?; let language_fn_name = format!("tree_sitter_{}", name.replace('-', "_")); let language = unsafe { let language_fn: Symbol Language> = library .get(language_fn_name.as_bytes()) .with_context(|| format!("Failed to load symbol {}", language_fn_name))?; language_fn() }; std::mem::forget(library); Ok(language) } fn ensure_git_is_available() -> Result<()> { helix_stdx::env::which("git")?; Ok(()) } pub fn fetch_grammars() -> Result<()> { ensure_git_is_available()?; // We do not need to fetch local grammars. let mut grammars = get_grammar_configs()?; grammars.retain(|grammar| !matches!(grammar.source, GrammarSource::Local { .. })); println!("Fetching {} grammars", grammars.len()); let results = run_parallel(grammars, fetch_grammar); let mut errors = Vec::new(); let mut git_updated = Vec::new(); let mut git_up_to_date = 0; let mut non_git = Vec::new(); for (grammar_id, res) in results { match res { Ok(FetchStatus::GitUpToDate) => git_up_to_date += 1, Ok(FetchStatus::GitUpdated { revision }) => git_updated.push((grammar_id, revision)), Ok(FetchStatus::NonGit) => non_git.push(grammar_id), Err(e) => errors.push((grammar_id, e)), } } non_git.sort_unstable(); git_updated.sort_unstable_by(|a, b| a.0.cmp(&b.0)); if git_up_to_date != 0 { println!("{} up to date git grammars", git_up_to_date); } if !non_git.is_empty() { println!("{} non git grammars", non_git.len()); println!("\t{:?}", non_git); } if !git_updated.is_empty() { println!("{} updated grammars", git_updated.len()); // We checked the vec is not empty, unwrapping will not panic let longest_id = git_updated.iter().map(|x| x.0.len()).max().unwrap(); for (id, rev) in git_updated { println!( "\t{id:width$} now on {rev}", id = id, width = longest_id, rev = rev ); } } if !errors.is_empty() { let len = errors.len(); for (i, (grammar, error)) in errors.into_iter().enumerate() { println!("Failure {}/{len}: {grammar} {error}", i + 1); } bail!("{len} grammars failed to fetch"); } Ok(()) } pub fn build_grammars(target: Option) -> Result<()> { ensure_git_is_available()?; let grammars = get_grammar_configs()?; println!("Building {} grammars", grammars.len()); let results = run_parallel(grammars, move |grammar| { build_grammar(grammar, target.as_deref()) }); let mut errors = Vec::new(); let mut already_built = 0; let mut built = Vec::new(); for (grammar_id, res) in results { match res { Ok(BuildStatus::AlreadyBuilt) => already_built += 1, Ok(BuildStatus::Built) => built.push(grammar_id), Err(e) => errors.push((grammar_id, e)), } } built.sort_unstable(); if already_built != 0 { println!("{} grammars already built", already_built); } if !built.is_empty() { println!("{} grammars built now", built.len()); println!("\t{:?}", built); } if !errors.is_empty() { let len = errors.len(); for (i, (grammar_id, error)) in errors.into_iter().enumerate() { println!("Failure {}/{len}: {grammar_id} {error}", i + 1); } bail!("{len} grammars failed to build"); } Ok(()) } // Returns the set of grammar configurations the user requests. // Grammars are configured in the default and user `languages.toml` and are // merged. The `grammar_selection` key of the config is then used to filter // down all grammars into a subset of the user's choosing. fn get_grammar_configs() -> Result> { let config: Configuration = crate::config::user_lang_config() .context("Could not parse languages.toml")? .try_into()?; let grammars = match config.grammar_selection { Some(GrammarSelection::Only { only: selections }) => config .grammar .into_iter() .filter(|grammar| selections.contains(&grammar.grammar_id)) .collect(), Some(GrammarSelection::Except { except: rejections }) => config .grammar .into_iter() .filter(|grammar| !rejections.contains(&grammar.grammar_id)) .collect(), None => config.grammar, }; Ok(grammars) } fn run_parallel(grammars: Vec, job: F) -> Vec<(String, Result)> where F: Fn(GrammarConfiguration) -> Result + Send + 'static + Clone, Res: Send + 'static, { let pool = threadpool::Builder::new().build(); let (tx, rx) = channel(); for grammar in grammars { let tx = tx.clone(); let job = job.clone(); pool.execute(move || { // Ignore any SendErrors, if any job in another thread has encountered an // error the Receiver will be closed causing this send to fail. let _ = tx.send((grammar.grammar_id.clone(), job(grammar))); }); } drop(tx); rx.iter().collect() } enum FetchStatus { GitUpToDate, GitUpdated { revision: String }, NonGit, } fn fetch_grammar(grammar: GrammarConfiguration) -> Result { if let GrammarSource::Git { remote, revision, .. } = grammar.source { let grammar_dir = crate::runtime_dirs() .first() .expect("No runtime directories provided") // guaranteed by post-condition .join("grammars") .join("sources") .join(&grammar.grammar_id); fs::create_dir_all(&grammar_dir).context(format!( "Could not create grammar directory {:?}", grammar_dir ))?; // create the grammar dir contains a git directory if !grammar_dir.join(".git").exists() { git(&grammar_dir, ["init"])?; } // ensure the remote matches the configured remote if get_remote_url(&grammar_dir).as_ref() != Some(&remote) { set_remote(&grammar_dir, &remote)?; } // ensure the revision matches the configured revision if get_revision(&grammar_dir).as_ref() != Some(&revision) { // Fetch the exact revision from the remote. // Supported by server-side git since v2.5.0 (July 2015), // enabled by default on major git hosts. git( &grammar_dir, ["fetch", "--depth", "1", REMOTE_NAME, &revision], )?; git(&grammar_dir, ["checkout", &revision])?; Ok(FetchStatus::GitUpdated { revision }) } else { Ok(FetchStatus::GitUpToDate) } } else { Ok(FetchStatus::NonGit) } } // Sets the remote for a repository to the given URL, creating the remote if // it does not yet exist. fn set_remote(repository_dir: &Path, remote_url: &str) -> Result { git( repository_dir, ["remote", "set-url", REMOTE_NAME, remote_url], ) .or_else(|_| git(repository_dir, ["remote", "add", REMOTE_NAME, remote_url])) } fn get_remote_url(repository_dir: &Path) -> Option { git(repository_dir, ["remote", "get-url", REMOTE_NAME]).ok() } fn get_revision(repository_dir: &Path) -> Option { git(repository_dir, ["rev-parse", "HEAD"]).ok() } // A wrapper around 'git' commands which returns stdout in success and a // helpful error message showing the command, stdout, and stderr in error. fn git(repository_dir: &Path, args: I) -> Result where I: IntoIterator, S: AsRef, { let output = Command::new("git") .args(args) .current_dir(repository_dir) .output()?; if output.status.success() { Ok(String::from_utf8_lossy(&output.stdout) .trim_end() .to_owned()) } else { // TODO: figure out how to display the git command using `args` Err(anyhow!( "Git command failed.\nStdout: {}\nStderr: {}", String::from_utf8_lossy(&output.stdout), String::from_utf8_lossy(&output.stderr), )) } } enum BuildStatus { AlreadyBuilt, Built, } fn build_grammar(grammar: GrammarConfiguration, target: Option<&str>) -> Result { let grammar_dir = if let GrammarSource::Local { path } = &grammar.source { PathBuf::from(&path) } else { crate::runtime_dirs() .first() .expect("No runtime directories provided") // guaranteed by post-condition .join("grammars") .join("sources") .join(&grammar.grammar_id) }; let grammar_dir_entries = grammar_dir.read_dir().with_context(|| { format!( "Failed to read directory {:?}. Did you use 'hx --grammar fetch'?", grammar_dir ) })?; if grammar_dir_entries.count() == 0 { return Err(anyhow!( "Directory {:?} is empty. Did you use 'hx --grammar fetch'?", grammar_dir )); }; let path = match &grammar.source { GrammarSource::Git { subpath: Some(subpath), .. } => grammar_dir.join(subpath), _ => grammar_dir, } .join("src"); build_tree_sitter_library(&path, grammar, target) } fn build_tree_sitter_library( src_path: &Path, grammar: GrammarConfiguration, target: Option<&str>, ) -> Result { let header_path = src_path; let parser_path = src_path.join("parser.c"); let mut scanner_path = src_path.join("scanner.c"); let scanner_path = if scanner_path.exists() { Some(scanner_path) } else { scanner_path.set_extension("cc"); if scanner_path.exists() { Some(scanner_path) } else { None } }; let parser_lib_path = crate::runtime_dirs() .first() .expect("No runtime directories provided") // guaranteed by post-condition .join("grammars"); let mut library_path = parser_lib_path.join(&grammar.grammar_id); library_path.set_extension(DYLIB_EXTENSION); // if we are running inside a buildscript emit cargo metadata // to detect if we are running from a buildscript check some env variables // that cargo only sets for build scripts if std::env::var("OUT_DIR").is_ok() && std::env::var("CARGO").is_ok() { if let Some(scanner_path) = scanner_path.as_ref().and_then(|path| path.to_str()) { println!("cargo:rerun-if-changed={scanner_path}"); } if let Some(parser_path) = parser_path.to_str() { println!("cargo:rerun-if-changed={parser_path}"); } } let recompile = needs_recompile(&library_path, &parser_path, scanner_path.as_ref()) .context("Failed to compare source and binary timestamps")?; if !recompile { return Ok(BuildStatus::AlreadyBuilt); } let mut config = cc::Build::new(); config .cpp(true) .opt_level(3) .cargo_metadata(false) .host(BUILD_TARGET) .target(target.unwrap_or(BUILD_TARGET)); let compiler = config.get_compiler(); let mut command = Command::new(compiler.path()); command.current_dir(src_path); for (key, value) in compiler.env() { command.env(key, value); } command.args(compiler.args()); // used to delay dropping the temporary object file until after the compilation is complete let _path_guard; if compiler.is_like_msvc() { command .args(["/nologo", "/LD", "/I"]) .arg(header_path) .arg("/utf-8") .arg("/std:c11"); if let Some(scanner_path) = scanner_path.as_ref() { if scanner_path.extension() == Some("c".as_ref()) { command.arg(scanner_path); } else { let mut cpp_command = Command::new(compiler.path()); cpp_command.current_dir(src_path); for (key, value) in compiler.env() { cpp_command.env(key, value); } cpp_command.args(compiler.args()); let object_file = library_path.with_file_name(format!("{}_scanner.obj", &grammar.grammar_id)); cpp_command .args(["/nologo", "/LD", "/I"]) .arg(header_path) .arg("/utf-8") .arg("/std:c++14") .arg(format!("/Fo{}", object_file.display())) .arg("/c") .arg(scanner_path); let output = cpp_command .output() .context("Failed to execute C++ compiler")?; if !output.status.success() { return Err(anyhow!( "Parser compilation failed.\nStdout: {}\nStderr: {}", String::from_utf8_lossy(&output.stdout), String::from_utf8_lossy(&output.stderr) )); } command.arg(&object_file); _path_guard = TempPath::from_path(object_file); } } command .arg(parser_path) .arg("/link") .arg(format!("/out:{}", library_path.to_str().unwrap())); } else { #[cfg(not(windows))] command.arg("-fPIC"); command .arg("-shared") .arg("-fno-exceptions") .arg("-I") .arg(header_path) .arg("-o") .arg(&library_path); if let Some(scanner_path) = scanner_path.as_ref() { if scanner_path.extension() == Some("c".as_ref()) { command.arg("-xc").arg("-std=c11").arg(scanner_path); } else { let mut cpp_command = Command::new(compiler.path()); cpp_command.current_dir(src_path); for (key, value) in compiler.env() { cpp_command.env(key, value); } cpp_command.args(compiler.args()); let object_file = library_path.with_file_name(format!("{}_scanner.o", &grammar.grammar_id)); #[cfg(not(windows))] cpp_command.arg("-fPIC"); cpp_command .arg("-fno-exceptions") .arg("-I") .arg(header_path) .arg("-o") .arg(&object_file) .arg("-std=c++14") .arg("-c") .arg(scanner_path); let output = cpp_command .output() .context("Failed to execute C++ compiler")?; if !output.status.success() { return Err(anyhow!( "Parser compilation failed.\nStdout: {}\nStderr: {}", String::from_utf8_lossy(&output.stdout), String::from_utf8_lossy(&output.stderr) )); } command.arg(&object_file); _path_guard = TempPath::from_path(object_file); } } command.arg("-xc").arg("-std=c11").arg(parser_path); if cfg!(all( unix, not(any(target_os = "macos", target_os = "illumos")) )) { command.arg("-Wl,-z,relro,-z,now"); } } let output = command .output() .context("Failed to execute C/C++ compiler")?; if !output.status.success() { return Err(anyhow!( "Parser compilation failed.\nStdout: {}\nStderr: {}", String::from_utf8_lossy(&output.stdout), String::from_utf8_lossy(&output.stderr) )); } Ok(BuildStatus::Built) } fn needs_recompile( lib_path: &Path, parser_c_path: &Path, scanner_path: Option<&PathBuf>, ) -> Result { if !lib_path.exists() { return Ok(true); } let lib_mtime = mtime(lib_path)?; if mtime(parser_c_path)? > lib_mtime { return Ok(true); } if let Some(scanner_path) = scanner_path { if mtime(scanner_path)? > lib_mtime { return Ok(true); } } Ok(false) } fn mtime(path: &Path) -> Result { Ok(fs::metadata(path)?.modified()?) } /// Gives the contents of a file from a language's `runtime/queries/` /// directory pub fn load_runtime_file(language: &str, filename: &str) -> Result { let path = crate::runtime_file(PathBuf::new().join("queries").join(language).join(filename)); std::fs::read_to_string(path) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-loader/src/lib.rs000066400000000000000000000272451500614314100240770ustar00rootroot00000000000000pub mod config; pub mod grammar; use helix_stdx::{env::current_working_dir, path}; use etcetera::base_strategy::{choose_base_strategy, BaseStrategy}; use std::path::{Path, PathBuf}; pub const VERSION_AND_GIT_HASH: &str = env!("VERSION_AND_GIT_HASH"); static RUNTIME_DIRS: once_cell::sync::Lazy> = once_cell::sync::Lazy::new(prioritize_runtime_dirs); static CONFIG_FILE: once_cell::sync::OnceCell = once_cell::sync::OnceCell::new(); static LOG_FILE: once_cell::sync::OnceCell = once_cell::sync::OnceCell::new(); pub fn initialize_config_file(specified_file: Option) { let config_file = specified_file.unwrap_or_else(default_config_file); ensure_parent_dir(&config_file); CONFIG_FILE.set(config_file).ok(); } pub fn initialize_log_file(specified_file: Option) { let log_file = specified_file.unwrap_or_else(default_log_file); ensure_parent_dir(&log_file); LOG_FILE.set(log_file).ok(); } /// A list of runtime directories from highest to lowest priority /// /// The priority is: /// /// 1. sibling directory to `CARGO_MANIFEST_DIR` (if environment variable is set) /// 2. subdirectory of user config directory (always included) /// 3. `HELIX_RUNTIME` (if environment variable is set) /// 4. `HELIX_DEFAULT_RUNTIME` (if environment variable is set *at build time*) /// 5. subdirectory of path to helix executable (always included) /// /// Postcondition: returns at least two paths (they might not exist). fn prioritize_runtime_dirs() -> Vec { const RT_DIR: &str = "runtime"; // Adding higher priority first let mut rt_dirs = Vec::new(); if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") { // this is the directory of the crate being run by cargo, we need the workspace path so we take the parent let path = PathBuf::from(dir).parent().unwrap().join(RT_DIR); log::debug!("runtime dir: {}", path.to_string_lossy()); rt_dirs.push(path); } let conf_rt_dir = config_dir().join(RT_DIR); rt_dirs.push(conf_rt_dir); if let Ok(dir) = std::env::var("HELIX_RUNTIME") { let dir = path::expand_tilde(Path::new(&dir)); rt_dirs.push(path::normalize(dir)); } // If this variable is set during build time, it will always be included // in the lookup list. This allows downstream packagers to set a fallback // directory to a location that is conventional on their distro so that they // need not resort to a wrapper script or a global environment variable. if let Some(dir) = std::option_env!("HELIX_DEFAULT_RUNTIME") { rt_dirs.push(dir.into()); } // fallback to location of the executable being run // canonicalize the path in case the executable is symlinked let exe_rt_dir = std::env::current_exe() .ok() .and_then(|path| std::fs::canonicalize(path).ok()) .and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR))) .unwrap(); rt_dirs.push(exe_rt_dir); rt_dirs } /// Runtime directories ordered from highest to lowest priority /// /// All directories should be checked when looking for files. /// /// Postcondition: returns at least one path (it might not exist). pub fn runtime_dirs() -> &'static [PathBuf] { &RUNTIME_DIRS } /// Find file with path relative to runtime directory /// /// `rel_path` should be the relative path from within the `runtime/` directory. /// The valid runtime directories are searched in priority order and the first /// file found to exist is returned, otherwise None. fn find_runtime_file(rel_path: &Path) -> Option { RUNTIME_DIRS.iter().find_map(|rt_dir| { let path = rt_dir.join(rel_path); if path.exists() { Some(path) } else { None } }) } /// Find file with path relative to runtime directory /// /// `rel_path` should be the relative path from within the `runtime/` directory. /// The valid runtime directories are searched in priority order and the first /// file found to exist is returned, otherwise the path to the final attempt /// that failed. pub fn runtime_file(rel_path: impl AsRef) -> PathBuf { find_runtime_file(rel_path.as_ref()).unwrap_or_else(|| { RUNTIME_DIRS .last() .map(|dir| dir.join(rel_path)) .unwrap_or_default() }) } pub fn config_dir() -> PathBuf { // TODO: allow env var override let strategy = choose_base_strategy().expect("Unable to find the config directory!"); let mut path = strategy.config_dir(); path.push("helix"); path } pub fn cache_dir() -> PathBuf { // TODO: allow env var override let strategy = choose_base_strategy().expect("Unable to find the cache directory!"); let mut path = strategy.cache_dir(); path.push("helix"); path } pub fn config_file() -> PathBuf { CONFIG_FILE.get().map(|path| path.to_path_buf()).unwrap() } pub fn log_file() -> PathBuf { LOG_FILE.get().map(|path| path.to_path_buf()).unwrap() } pub fn workspace_config_file() -> PathBuf { find_workspace().0.join(".helix").join("config.toml") } pub fn lang_config_file() -> PathBuf { config_dir().join("languages.toml") } pub fn default_log_file() -> PathBuf { cache_dir().join("helix.log") } /// Merge two TOML documents, merging values from `right` onto `left` /// /// `merge_depth` sets the nesting depth up to which values are merged instead /// of overridden. /// /// When a table exists in both `left` and `right`, the merged table consists of /// all keys in `left`'s table unioned with all keys in `right` with the values /// of `right` being merged recursively onto values of `left`. /// /// `crate::merge_toml_values(a, b, 3)` combines, for example: /// /// b: /// ```toml /// [[language]] /// name = "toml" /// language-server = { command = "taplo", args = ["lsp", "stdio"] } /// ``` /// a: /// ```toml /// [[language]] /// language-server = { command = "/usr/bin/taplo" } /// ``` /// /// into: /// ```toml /// [[language]] /// name = "toml" /// language-server = { command = "/usr/bin/taplo" } /// ``` /// /// thus it overrides the third depth-level of b with values of a if they exist, /// but otherwise merges their values pub fn merge_toml_values(left: toml::Value, right: toml::Value, merge_depth: usize) -> toml::Value { use toml::Value; fn get_name(v: &Value) -> Option<&str> { v.get("name").and_then(Value::as_str) } match (left, right) { (Value::Array(mut left_items), Value::Array(right_items)) => { if merge_depth > 0 { left_items.reserve(right_items.len()); for rvalue in right_items { let lvalue = get_name(&rvalue) .and_then(|rname| { left_items.iter().position(|v| get_name(v) == Some(rname)) }) .map(|lpos| left_items.remove(lpos)); let mvalue = match lvalue { Some(lvalue) => merge_toml_values(lvalue, rvalue, merge_depth - 1), None => rvalue, }; left_items.push(mvalue); } Value::Array(left_items) } else { Value::Array(right_items) } } (Value::Table(mut left_map), Value::Table(right_map)) => { if merge_depth > 0 { for (rname, rvalue) in right_map { match left_map.remove(&rname) { Some(lvalue) => { let merged_value = merge_toml_values(lvalue, rvalue, merge_depth - 1); left_map.insert(rname, merged_value); } None => { left_map.insert(rname, rvalue); } } } Value::Table(left_map) } else { Value::Table(right_map) } } // Catch everything else we didn't handle, and use the right value (_, value) => value, } } /// Finds the current workspace folder. /// Used as a ceiling dir for LSP root resolution, the filepicker and potentially as a future filewatching root /// /// This function starts searching the FS upward from the CWD /// and returns the first directory that contains either `.git`, `.svn`, `.jj` or `.helix`. /// If no workspace was found returns (CWD, true). /// Otherwise (workspace, false) is returned pub fn find_workspace() -> (PathBuf, bool) { let current_dir = current_working_dir(); for ancestor in current_dir.ancestors() { if ancestor.join(".git").exists() || ancestor.join(".svn").exists() || ancestor.join(".jj").exists() || ancestor.join(".helix").exists() { return (ancestor.to_owned(), false); } } (current_dir, true) } fn default_config_file() -> PathBuf { config_dir().join("config.toml") } fn ensure_parent_dir(path: &Path) { if let Some(parent) = path.parent() { if !parent.exists() { std::fs::create_dir_all(parent).ok(); } } } #[cfg(test)] mod merge_toml_tests { use std::str; use super::merge_toml_values; use toml::Value; #[test] fn language_toml_map_merges() { const USER: &str = r#" [[language]] name = "nix" test = "bbb" indent = { tab-width = 4, unit = " ", test = "aaa" } "#; let base = include_bytes!("../../languages.toml"); let base = str::from_utf8(base).expect("Couldn't parse built-in languages config"); let base: Value = toml::from_str(base).expect("Couldn't parse built-in languages config"); let user: Value = toml::from_str(USER).unwrap(); let merged = merge_toml_values(base, user, 3); let languages = merged.get("language").unwrap().as_array().unwrap(); let nix = languages .iter() .find(|v| v.get("name").unwrap().as_str().unwrap() == "nix") .unwrap(); let nix_indent = nix.get("indent").unwrap(); // We changed tab-width and unit in indent so check them if they are the new values assert_eq!( nix_indent.get("tab-width").unwrap().as_integer().unwrap(), 4 ); assert_eq!(nix_indent.get("unit").unwrap().as_str().unwrap(), " "); // We added a new keys, so check them assert_eq!(nix.get("test").unwrap().as_str().unwrap(), "bbb"); assert_eq!(nix_indent.get("test").unwrap().as_str().unwrap(), "aaa"); // We didn't change comment-token so it should be same assert_eq!(nix.get("comment-token").unwrap().as_str().unwrap(), "#"); } #[test] fn language_toml_nested_array_merges() { const USER: &str = r#" [[language]] name = "typescript" language-server = { command = "deno", args = ["lsp"] } "#; let base = include_bytes!("../../languages.toml"); let base = str::from_utf8(base).expect("Couldn't parse built-in languages config"); let base: Value = toml::from_str(base).expect("Couldn't parse built-in languages config"); let user: Value = toml::from_str(USER).unwrap(); let merged = merge_toml_values(base, user, 3); let languages = merged.get("language").unwrap().as_array().unwrap(); let ts = languages .iter() .find(|v| v.get("name").unwrap().as_str().unwrap() == "typescript") .unwrap(); assert_eq!( ts.get("language-server") .unwrap() .get("args") .unwrap() .as_array() .unwrap(), &vec![Value::String("lsp".into())] ) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-loader/src/main.rs000066400000000000000000000003701500614314100242430ustar00rootroot00000000000000use anyhow::Result; use helix_loader::grammar::fetch_grammars; // This binary is used in the Release CI as an optimization to cut down on // compilation time. This is not meant to be run manually. fn main() -> Result<()> { fetch_grammars() } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/000077500000000000000000000000001500614314100226545ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/Cargo.toml000066400000000000000000000015411500614314100246050ustar00rootroot00000000000000[package] name = "helix-lsp-types" version = "0.95.1" authors = [ # Original authors "Markus Westerlind ", "Bruno Medeiros ", # Since forking "Helix contributors" ] edition = "2018" description = "Types for interaction with a language server, using VSCode's Language Server Protocol" repository = "https://github.com/gluon-lang/lsp-types" documentation = "https://docs.rs/lsp-types" readme = "README.md" keywords = ["language", "server", "lsp", "vscode", "lsif"] license = "MIT" [dependencies] bitflags.workspace = true serde = { version = "1.0.219", features = ["derive"] } serde_json = "1.0.140" url = {version = "2.5.4", features = ["serde"]} [features] default = [] # Enables proposed LSP extensions. # NOTE: No semver compatibility is guaranteed for types enabled by this feature. proposed = [] helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/LICENSE000066400000000000000000000020751500614314100236650ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2016 Markus Westerlind Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/README.md000066400000000000000000000010251500614314100241310ustar00rootroot00000000000000# Helix's `lsp-types` This is a fork of the [`lsp-types`](https://crates.io/crates/lsp-types) crate ([`gluon-lang/lsp-types`](https://github.com/gluon-lang/lsp-types)) taken at version v0.95.1 (commit [3e6daee](https://github.com/gluon-lang/lsp-types/commit/3e6daee771d14db4094a554b8d03e29c310dfcbe)). This fork focuses usability improvements that make the types easier to work with for the Helix codebase. For example the URL type - the `uri` crate at this version of `lsp-types` - will be replaced with a wrapper around a string. helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/000077500000000000000000000000001500614314100234435ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/call_hierarchy.rs000066400000000000000000000102471500614314100267660ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use serde_json::Value; use crate::{ DynamicRegistrationClientCapabilities, PartialResultParams, Range, SymbolKind, SymbolTag, TextDocumentPositionParams, Url, WorkDoneProgressOptions, WorkDoneProgressParams, }; pub type CallHierarchyClientCapabilities = DynamicRegistrationClientCapabilities; #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize, Copy)] #[serde(rename_all = "camelCase")] pub struct CallHierarchyOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize, Copy)] #[serde(untagged)] pub enum CallHierarchyServerCapability { Simple(bool), Options(CallHierarchyOptions), } impl From for CallHierarchyServerCapability { fn from(from: CallHierarchyOptions) -> Self { Self::Options(from) } } impl From for CallHierarchyServerCapability { fn from(from: bool) -> Self { Self::Simple(from) } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CallHierarchyPrepareParams { #[serde(flatten)] pub text_document_position_params: TextDocumentPositionParams, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(rename_all = "camelCase")] pub struct CallHierarchyItem { /// The name of this item. pub name: String, /// The kind of this item. pub kind: SymbolKind, /// Tags for this item. #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option>, /// More detail for this item, e.g. the signature of a function. #[serde(skip_serializing_if = "Option::is_none")] pub detail: Option, /// The resource identifier of this item. pub uri: Url, /// The range enclosing this symbol not including leading/trailing whitespace but everything else, e.g. comments and code. pub range: Range, /// The range that should be selected and revealed when this symbol is being picked, e.g. the name of a function. /// Must be contained by the [`range`](#CallHierarchyItem.range). pub selection_range: Range, /// A data entry field that is preserved between a call hierarchy prepare and incoming calls or outgoing calls requests. #[serde(skip_serializing_if = "Option::is_none")] pub data: Option, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CallHierarchyIncomingCallsParams { pub item: CallHierarchyItem, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } /// Represents an incoming call, e.g. a caller of a method or constructor. #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(rename_all = "camelCase")] pub struct CallHierarchyIncomingCall { /// The item that makes the call. pub from: CallHierarchyItem, /// The range at which at which the calls appears. This is relative to the caller /// denoted by [`this.from`](#CallHierarchyIncomingCall.from). pub from_ranges: Vec, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CallHierarchyOutgoingCallsParams { pub item: CallHierarchyItem, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } /// Represents an outgoing call, e.g. calling a getter from a method or a method from a constructor etc. #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(rename_all = "camelCase")] pub struct CallHierarchyOutgoingCall { /// The item that is called. pub to: CallHierarchyItem, /// The range at which this item is called. This is the range relative to the caller, e.g the item /// passed to [`provideCallHierarchyOutgoingCalls`](#CallHierarchyItemProvider.provideCallHierarchyOutgoingCalls) /// and not [`this.to`](#CallHierarchyOutgoingCall.to). pub from_ranges: Vec, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/code_action.rs000066400000000000000000000322531500614314100262650ustar00rootroot00000000000000use crate::{ Command, Diagnostic, PartialResultParams, Range, TextDocumentIdentifier, WorkDoneProgressOptions, WorkDoneProgressParams, WorkspaceEdit, }; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::borrow::Cow; #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum CodeActionProviderCapability { Simple(bool), Options(CodeActionOptions), } impl From for CodeActionProviderCapability { fn from(from: CodeActionOptions) -> Self { Self::Options(from) } } impl From for CodeActionProviderCapability { fn from(from: bool) -> Self { Self::Simple(from) } } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeActionClientCapabilities { /// /// This capability supports dynamic registration. /// #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// The client support code action literals as a valid /// response of the `textDocument/codeAction` request. #[serde(skip_serializing_if = "Option::is_none")] pub code_action_literal_support: Option, /// Whether code action supports the `isPreferred` property. /// /// @since 3.15.0 #[serde(skip_serializing_if = "Option::is_none")] pub is_preferred_support: Option, /// Whether code action supports the `disabled` property. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub disabled_support: Option, /// Whether code action supports the `data` property which is /// preserved between a `textDocument/codeAction` and a /// `codeAction/resolve` request. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub data_support: Option, /// Whether the client supports resolving additional code action /// properties via a separate `codeAction/resolve` request. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub resolve_support: Option, /// Whether the client honors the change annotations in /// text edits and resource operations returned via the /// `CodeAction#edit` property by for example presenting /// the workspace edit in the user interface and asking /// for confirmation. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub honors_change_annotations: Option, } /// Whether the client supports resolving additional code action /// properties via a separate `codeAction/resolve` request. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeActionCapabilityResolveSupport { /// The properties that a client can resolve lazily. pub properties: Vec, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeActionLiteralSupport { /// The code action kind is support with the following value set. pub code_action_kind: CodeActionKindLiteralSupport, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeActionKindLiteralSupport { /// The code action kind values the client supports. When this /// property exists the client also guarantees that it will /// handle values outside its set gracefully and falls back /// to a default value when unknown. pub value_set: Vec, } /// Params for the CodeActionRequest #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeActionParams { /// The document in which the command was invoked. pub text_document: TextDocumentIdentifier, /// The range for which the command was invoked. pub range: Range, /// Context carrying additional information. pub context: CodeActionContext, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } /// response for CodeActionRequest pub type CodeActionResponse = Vec; #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] #[serde(untagged)] pub enum CodeActionOrCommand { Command(Command), CodeAction(CodeAction), } impl From for CodeActionOrCommand { fn from(command: Command) -> Self { CodeActionOrCommand::Command(command) } } impl From for CodeActionOrCommand { fn from(action: CodeAction) -> Self { CodeActionOrCommand::CodeAction(action) } } #[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)] pub struct CodeActionKind(Cow<'static, str>); impl CodeActionKind { /// Empty kind. pub const EMPTY: CodeActionKind = CodeActionKind::new(""); /// Base kind for quickfix actions: 'quickfix' pub const QUICKFIX: CodeActionKind = CodeActionKind::new("quickfix"); /// Base kind for refactoring actions: 'refactor' pub const REFACTOR: CodeActionKind = CodeActionKind::new("refactor"); /// Base kind for refactoring extraction actions: 'refactor.extract' /// /// Example extract actions: /// /// - Extract method /// - Extract function /// - Extract variable /// - Extract interface from class /// - ... pub const REFACTOR_EXTRACT: CodeActionKind = CodeActionKind::new("refactor.extract"); /// Base kind for refactoring inline actions: 'refactor.inline' /// /// Example inline actions: /// /// - Inline function /// - Inline variable /// - Inline constant /// - ... pub const REFACTOR_INLINE: CodeActionKind = CodeActionKind::new("refactor.inline"); /// Base kind for refactoring rewrite actions: 'refactor.rewrite' /// /// Example rewrite actions: /// /// - Convert JavaScript function to class /// - Add or remove parameter /// - Encapsulate field /// - Make method static /// - Move method to base class /// - ... pub const REFACTOR_REWRITE: CodeActionKind = CodeActionKind::new("refactor.rewrite"); /// Base kind for source actions: `source` /// /// Source code actions apply to the entire file. pub const SOURCE: CodeActionKind = CodeActionKind::new("source"); /// Base kind for an organize imports source action: `source.organizeImports` pub const SOURCE_ORGANIZE_IMPORTS: CodeActionKind = CodeActionKind::new("source.organizeImports"); /// Base kind for a 'fix all' source action: `source.fixAll`. /// /// 'Fix all' actions automatically fix errors that have a clear fix that /// do not require user input. They should not suppress errors or perform /// unsafe fixes such as generating new types or classes. /// /// @since 3.17.0 pub const SOURCE_FIX_ALL: CodeActionKind = CodeActionKind::new("source.fixAll"); pub const fn new(tag: &'static str) -> Self { CodeActionKind(Cow::Borrowed(tag)) } pub fn as_str(&self) -> &str { &self.0 } } impl From for CodeActionKind { fn from(from: String) -> Self { CodeActionKind(Cow::from(from)) } } impl From<&'static str> for CodeActionKind { fn from(from: &'static str) -> Self { CodeActionKind::new(from) } } #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeAction { /// A short, human-readable, title for this code action. pub title: String, /// The kind of the code action. /// Used to filter code actions. #[serde(skip_serializing_if = "Option::is_none")] pub kind: Option, /// The diagnostics that this code action resolves. #[serde(skip_serializing_if = "Option::is_none")] pub diagnostics: Option>, /// The workspace edit this code action performs. #[serde(skip_serializing_if = "Option::is_none")] pub edit: Option, /// A command this code action executes. If a code action /// provides an edit and a command, first the edit is /// executed and then the command. #[serde(skip_serializing_if = "Option::is_none")] pub command: Option, /// Marks this as a preferred action. Preferred actions are used by the `auto fix` command and can be targeted /// by keybindings. /// A quick fix should be marked preferred if it properly addresses the underlying error. /// A refactoring should be marked preferred if it is the most reasonable choice of actions to take. /// /// @since 3.15.0 #[serde(skip_serializing_if = "Option::is_none")] pub is_preferred: Option, /// Marks that the code action cannot currently be applied. /// /// Clients should follow the following guidelines regarding disabled code actions: /// /// - Disabled code actions are not shown in automatic /// [lightbulb](https://code.visualstudio.com/docs/editor/editingevolved#_code-action) /// code action menu. /// /// - Disabled actions are shown as faded out in the code action menu when the user request /// a more specific type of code action, such as refactorings. /// /// - If the user has a keybinding that auto applies a code action and only a disabled code /// actions are returned, the client should show the user an error message with `reason` /// in the editor. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub disabled: Option, /// A data entry field that is preserved on a code action between /// a `textDocument/codeAction` and a `codeAction/resolve` request. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub data: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeActionDisabled { /// Human readable description of why the code action is currently disabled. /// /// This is displayed in the code actions UI. pub reason: String, } /// The reason why code actions were requested. /// /// @since 3.17.0 #[derive(Eq, PartialEq, Clone, Copy, Deserialize, Serialize)] #[serde(transparent)] pub struct CodeActionTriggerKind(i32); lsp_enum! { impl CodeActionTriggerKind { /// Code actions were explicitly requested by the user or by an extension. pub const INVOKED: CodeActionTriggerKind = CodeActionTriggerKind(1); /// Code actions were requested automatically. /// /// This typically happens when current selection in a file changes, but can /// also be triggered when file content changes. pub const AUTOMATIC: CodeActionTriggerKind = CodeActionTriggerKind(2); } } /// Contains additional diagnostic information about the context in which /// a code action is run. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeActionContext { /// An array of diagnostics. pub diagnostics: Vec, /// Requested kind of actions to return. /// /// Actions not of this kind are filtered out by the client before being shown. So servers /// can omit computing them. #[serde(skip_serializing_if = "Option::is_none")] pub only: Option>, /// The reason why code actions were requested. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub trigger_kind: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize, Default)] #[serde(rename_all = "camelCase")] pub struct CodeActionOptions { /// CodeActionKinds that this server may return. /// /// The list of kinds may be generic, such as `CodeActionKind.Refactor`, or the server /// may list out every specific kind they provide. #[serde(skip_serializing_if = "Option::is_none")] pub code_action_kinds: Option>, #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, /// The server provides support to resolve additional /// information for a code action. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub resolve_provider: Option, } #[cfg(test)] mod tests { use super::*; use crate::tests::test_serialization; #[test] fn test_code_action_response() { test_serialization( &vec![ CodeActionOrCommand::Command(Command { title: "title".to_string(), command: "command".to_string(), arguments: None, }), CodeActionOrCommand::CodeAction(CodeAction { title: "title".to_string(), kind: Some(CodeActionKind::QUICKFIX), command: None, diagnostics: None, edit: None, is_preferred: None, ..CodeAction::default() }), ], r#"[{"title":"title","command":"command"},{"title":"title","kind":"quickfix"}]"#, ) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/code_lens.rs000066400000000000000000000047141500614314100257520ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use serde_json::Value; use crate::{ Command, DynamicRegistrationClientCapabilities, PartialResultParams, Range, TextDocumentIdentifier, WorkDoneProgressParams, }; pub type CodeLensClientCapabilities = DynamicRegistrationClientCapabilities; /// Code Lens options. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize, Copy)] #[serde(rename_all = "camelCase")] pub struct CodeLensOptions { /// Code lens has a resolve provider as well. #[serde(skip_serializing_if = "Option::is_none")] pub resolve_provider: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeLensParams { /// The document to request code lens for. pub text_document: TextDocumentIdentifier, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } /// A code lens represents a command that should be shown along with /// source text, like the number of references, a way to run tests, etc. /// /// A code lens is _unresolved_ when no command is associated to it. For performance /// reasons the creation of a code lens and resolving should be done in two stages. #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeLens { /// The range in which this code lens is valid. Should only span a single line. pub range: Range, /// The command this code lens represents. #[serde(skip_serializing_if = "Option::is_none")] pub command: Option, /// A data entry field that is preserved on a code lens item between /// a code lens and a code lens resolve request. #[serde(skip_serializing_if = "Option::is_none")] pub data: Option, } #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeLensWorkspaceClientCapabilities { /// Whether the client implementation supports a refresh request sent from the /// server to the client. /// /// Note that this event is global and will force the client to refresh all /// code lenses currently shown. It should be used with absolute care and is /// useful for situation where a server for example detect a project wide /// change that requires such a calculation. #[serde(skip_serializing_if = "Option::is_none")] pub refresh_support: Option, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/color.rs000066400000000000000000000101031500614314100251220ustar00rootroot00000000000000use crate::{ DocumentSelector, DynamicRegistrationClientCapabilities, PartialResultParams, Range, TextDocumentIdentifier, TextEdit, WorkDoneProgressParams, }; use serde::{Deserialize, Serialize}; pub type DocumentColorClientCapabilities = DynamicRegistrationClientCapabilities; #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ColorProviderOptions {} #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct StaticTextDocumentColorProviderOptions { /// A document selector to identify the scope of the registration. If set to null /// the document selector provided on the client side will be used. pub document_selector: Option, #[serde(skip_serializing_if = "Option::is_none")] pub id: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum ColorProviderCapability { Simple(bool), ColorProvider(ColorProviderOptions), Options(StaticTextDocumentColorProviderOptions), } impl From for ColorProviderCapability { fn from(from: ColorProviderOptions) -> Self { Self::ColorProvider(from) } } impl From for ColorProviderCapability { fn from(from: StaticTextDocumentColorProviderOptions) -> Self { Self::Options(from) } } impl From for ColorProviderCapability { fn from(from: bool) -> Self { Self::Simple(from) } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentColorParams { /// The text document pub text_document: TextDocumentIdentifier, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ColorInformation { /// The range in the document where this color appears. pub range: Range, /// The actual color value for this color range. pub color: Color, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize, Copy)] #[serde(rename_all = "camelCase")] pub struct Color { /// The red component of this color in the range [0-1]. pub red: f32, /// The green component of this color in the range [0-1]. pub green: f32, /// The blue component of this color in the range [0-1]. pub blue: f32, /// The alpha component of this color in the range [0-1]. pub alpha: f32, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ColorPresentationParams { /// The text document. pub text_document: TextDocumentIdentifier, /// The color information to request presentations for. pub color: Color, /// The range where the color would be inserted. Serves as a context. pub range: Range, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } #[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Default, Clone)] #[serde(rename_all = "camelCase")] pub struct ColorPresentation { /// The label of this color presentation. It will be shown on the color /// picker header. By default this is also the text that is inserted when selecting /// this color presentation. pub label: String, /// An [edit](#TextEdit) which is applied to a document when selecting /// this presentation for the color. When `falsy` the [label](#ColorPresentation.label) /// is used. #[serde(skip_serializing_if = "Option::is_none")] pub text_edit: Option, /// An optional array of additional [text edits](#TextEdit) that are applied when /// selecting this color presentation. Edits must not overlap with the main [edit](#ColorPresentation.textEdit) nor with themselves. #[serde(skip_serializing_if = "Option::is_none")] pub additional_text_edits: Option>, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/completion.rs000066400000000000000000000572511500614314100261740ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use crate::{ Command, Documentation, MarkupKind, PartialResultParams, TagSupport, TextDocumentPositionParams, TextDocumentRegistrationOptions, TextEdit, WorkDoneProgressOptions, WorkDoneProgressParams, }; use crate::Range; use serde_json::Value; use std::fmt::Debug; /// Defines how to interpret the insert text in a completion item #[derive(Eq, PartialEq, Clone, Copy, Serialize, Deserialize)] #[serde(transparent)] pub struct InsertTextFormat(i32); lsp_enum! { impl InsertTextFormat { pub const PLAIN_TEXT: InsertTextFormat = InsertTextFormat(1); pub const SNIPPET: InsertTextFormat = InsertTextFormat(2); } } /// The kind of a completion entry. #[derive(Eq, PartialEq, Clone, Copy, Serialize, Deserialize)] #[serde(transparent)] pub struct CompletionItemKind(i32); lsp_enum! { impl CompletionItemKind { pub const TEXT: CompletionItemKind = CompletionItemKind(1); pub const METHOD: CompletionItemKind = CompletionItemKind(2); pub const FUNCTION: CompletionItemKind = CompletionItemKind(3); pub const CONSTRUCTOR: CompletionItemKind = CompletionItemKind(4); pub const FIELD: CompletionItemKind = CompletionItemKind(5); pub const VARIABLE: CompletionItemKind = CompletionItemKind(6); pub const CLASS: CompletionItemKind = CompletionItemKind(7); pub const INTERFACE: CompletionItemKind = CompletionItemKind(8); pub const MODULE: CompletionItemKind = CompletionItemKind(9); pub const PROPERTY: CompletionItemKind = CompletionItemKind(10); pub const UNIT: CompletionItemKind = CompletionItemKind(11); pub const VALUE: CompletionItemKind = CompletionItemKind(12); pub const ENUM: CompletionItemKind = CompletionItemKind(13); pub const KEYWORD: CompletionItemKind = CompletionItemKind(14); pub const SNIPPET: CompletionItemKind = CompletionItemKind(15); pub const COLOR: CompletionItemKind = CompletionItemKind(16); pub const FILE: CompletionItemKind = CompletionItemKind(17); pub const REFERENCE: CompletionItemKind = CompletionItemKind(18); pub const FOLDER: CompletionItemKind = CompletionItemKind(19); pub const ENUM_MEMBER: CompletionItemKind = CompletionItemKind(20); pub const CONSTANT: CompletionItemKind = CompletionItemKind(21); pub const STRUCT: CompletionItemKind = CompletionItemKind(22); pub const EVENT: CompletionItemKind = CompletionItemKind(23); pub const OPERATOR: CompletionItemKind = CompletionItemKind(24); pub const TYPE_PARAMETER: CompletionItemKind = CompletionItemKind(25); } } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionItemCapability { /// Client supports snippets as insert text. /// /// A snippet can define tab stops and placeholders with `$1`, `$2` /// and `${3:foo}`. `$0` defines the final tab stop, it defaults to /// the end of the snippet. Placeholders with equal identifiers are linked, /// that is typing in one will update others too. #[serde(skip_serializing_if = "Option::is_none")] pub snippet_support: Option, /// Client supports commit characters on a completion item. #[serde(skip_serializing_if = "Option::is_none")] pub commit_characters_support: Option, /// Client supports the follow content formats for the documentation /// property. The order describes the preferred format of the client. #[serde(skip_serializing_if = "Option::is_none")] pub documentation_format: Option>, /// Client supports the deprecated property on a completion item. #[serde(skip_serializing_if = "Option::is_none")] pub deprecated_support: Option, /// Client supports the preselect property on a completion item. #[serde(skip_serializing_if = "Option::is_none")] pub preselect_support: Option, /// Client supports the tag property on a completion item. Clients supporting /// tags have to handle unknown tags gracefully. Clients especially need to /// preserve unknown tags when sending a completion item back to the server in /// a resolve call. #[serde( default, skip_serializing_if = "Option::is_none", deserialize_with = "TagSupport::deserialize_compat" )] pub tag_support: Option>, /// Client support insert replace edit to control different behavior if a /// completion item is inserted in the text or should replace text. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub insert_replace_support: Option, /// Indicates which properties a client can resolve lazily on a completion /// item. Before version 3.16.0 only the predefined properties `documentation` /// and `details` could be resolved lazily. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub resolve_support: Option, /// The client supports the `insertTextMode` property on /// a completion item to override the whitespace handling mode /// as defined by the client. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub insert_text_mode_support: Option, /// The client has support for completion item label /// details (see also `CompletionItemLabelDetails`). /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub label_details_support: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionItemCapabilityResolveSupport { /// The properties that a client can resolve lazily. pub properties: Vec, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InsertTextModeSupport { pub value_set: Vec, } /// How whitespace and indentation is handled during completion /// item insertion. /// /// @since 3.16.0 #[derive(Eq, PartialEq, Clone, Copy, Serialize, Deserialize)] #[serde(transparent)] pub struct InsertTextMode(i32); lsp_enum! { impl InsertTextMode { /// The insertion or replace strings is taken as it is. If the /// value is multi line the lines below the cursor will be /// inserted using the indentation defined in the string value. /// The client will not apply any kind of adjustments to the /// string. pub const AS_IS: InsertTextMode = InsertTextMode(1); /// The editor adjusts leading whitespace of new lines so that /// they match the indentation up to the cursor of the line for /// which the item is accepted. /// /// Consider a line like this: `<2tabs><3tabs>foo`. Accepting a /// multi line completion item is indented using 2 tabs all /// following lines inserted will be indented using 2 tabs as well. pub const ADJUST_INDENTATION: InsertTextMode = InsertTextMode(2); } } #[derive(Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(transparent)] pub struct CompletionItemTag(i32); lsp_enum! { impl CompletionItemTag { pub const DEPRECATED: CompletionItemTag = CompletionItemTag(1); } } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionItemKindCapability { /// The completion item kind values the client supports. When this /// property exists the client also guarantees that it will /// handle values outside its set gracefully and falls back /// to a default value when unknown. /// /// If this property is not present the client only supports /// the completion items kinds from `Text` to `Reference` as defined in /// the initial version of the protocol. #[serde(skip_serializing_if = "Option::is_none")] pub value_set: Option>, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionListCapability { /// The client supports the following itemDefaults on /// a completion list. /// /// The value lists the supported property names of the /// `CompletionList.itemDefaults` object. If omitted /// no properties are supported. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub item_defaults: Option>, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionClientCapabilities { /// Whether completion supports dynamic registration. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// The client supports the following `CompletionItem` specific /// capabilities. #[serde(skip_serializing_if = "Option::is_none")] pub completion_item: Option, #[serde(skip_serializing_if = "Option::is_none")] pub completion_item_kind: Option, /// The client supports to send additional context information for a /// `textDocument/completion` request. #[serde(skip_serializing_if = "Option::is_none")] pub context_support: Option, /// The client's default when the completion item doesn't provide a /// `insertTextMode` property. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub insert_text_mode: Option, /// The client supports the following `CompletionList` specific /// capabilities. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub completion_list: Option, } /// A special text edit to provide an insert and a replace operation. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InsertReplaceEdit { /// The string to be inserted. pub new_text: String, /// The range if the insert is requested pub insert: Range, /// The range if the replace is requested. pub replace: Range, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum CompletionTextEdit { Edit(TextEdit), InsertAndReplace(InsertReplaceEdit), } impl From for CompletionTextEdit { fn from(edit: TextEdit) -> Self { CompletionTextEdit::Edit(edit) } } impl From for CompletionTextEdit { fn from(edit: InsertReplaceEdit) -> Self { CompletionTextEdit::InsertAndReplace(edit) } } /// Completion options. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionOptions { /// The server provides support to resolve additional information for a completion item. #[serde(skip_serializing_if = "Option::is_none")] pub resolve_provider: Option, /// Most tools trigger completion request automatically without explicitly /// requesting it using a keyboard shortcut (e.g. Ctrl+Space). Typically they /// do so when the user starts to type an identifier. For example if the user /// types `c` in a JavaScript file code complete will automatically pop up /// present `console` besides others as a completion item. Characters that /// make up identifiers don't need to be listed here. /// /// If code complete should automatically be trigger on characters not being /// valid inside an identifier (for example `.` in JavaScript) list them in /// `triggerCharacters`. #[serde(skip_serializing_if = "Option::is_none")] pub trigger_characters: Option>, /// The list of all possible characters that commit a completion. This field /// can be used if clients don't support individual commit characters per /// completion item. See client capability /// `completion.completionItem.commitCharactersSupport`. /// /// If a server provides both `allCommitCharacters` and commit characters on /// an individual completion item the ones on the completion item win. /// /// @since 3.2.0 #[serde(skip_serializing_if = "Option::is_none")] pub all_commit_characters: Option>, #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, /// The server supports the following `CompletionItem` specific /// capabilities. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub completion_item: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionOptionsCompletionItem { /// The server has support for completion item label /// details (see also `CompletionItemLabelDetails`) when receiving /// a completion item in a resolve call. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub label_details_support: Option, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct CompletionRegistrationOptions { #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, #[serde(flatten)] pub completion_options: CompletionOptions, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum CompletionResponse { Array(Vec), List(CompletionList), } impl From> for CompletionResponse { fn from(items: Vec) -> Self { CompletionResponse::Array(items) } } impl From for CompletionResponse { fn from(list: CompletionList) -> Self { CompletionResponse::List(list) } } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionParams { // This field was "mixed-in" from TextDocumentPositionParams #[serde(flatten)] pub text_document_position: TextDocumentPositionParams, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, // CompletionParams properties: #[serde(skip_serializing_if = "Option::is_none")] pub context: Option, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionContext { /// How the completion was triggered. pub trigger_kind: CompletionTriggerKind, /// The trigger character (a single character) that has trigger code complete. /// Is undefined if `triggerKind !== CompletionTriggerKind.TriggerCharacter` #[serde(skip_serializing_if = "Option::is_none")] pub trigger_character: Option, } /// How a completion was triggered. #[derive(Eq, PartialEq, Clone, Copy, Deserialize, Serialize)] #[serde(transparent)] pub struct CompletionTriggerKind(i32); lsp_enum! { impl CompletionTriggerKind { pub const INVOKED: CompletionTriggerKind = CompletionTriggerKind(1); pub const TRIGGER_CHARACTER: CompletionTriggerKind = CompletionTriggerKind(2); pub const TRIGGER_FOR_INCOMPLETE_COMPLETIONS: CompletionTriggerKind = CompletionTriggerKind(3); } } /// Represents a collection of [completion items](#CompletionItem) to be presented /// in the editor. #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionList { /// This list it not complete. Further typing should result in recomputing /// this list. pub is_incomplete: bool, /// The completion items. pub items: Vec, } #[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct CompletionItem { /// The label of this completion item. By default /// also the text that is inserted when selecting /// this completion. pub label: String, /// Additional details for the label /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub label_details: Option, /// The kind of this completion item. Based of the kind /// an icon is chosen by the editor. #[serde(skip_serializing_if = "Option::is_none")] pub kind: Option, /// A human-readable string with additional information /// about this item, like type or symbol information. #[serde(skip_serializing_if = "Option::is_none")] pub detail: Option, /// A human-readable string that represents a doc-comment. #[serde(skip_serializing_if = "Option::is_none")] pub documentation: Option, /// Indicates if this item is deprecated. #[serde(skip_serializing_if = "Option::is_none")] pub deprecated: Option, /// Select this item when showing. #[serde(skip_serializing_if = "Option::is_none")] pub preselect: Option, /// A string that should be used when comparing this item /// with other items. When `falsy` the label is used /// as the sort text for this item. #[serde(skip_serializing_if = "Option::is_none")] pub sort_text: Option, /// A string that should be used when filtering a set of /// completion items. When `falsy` the label is used as the /// filter text for this item. #[serde(skip_serializing_if = "Option::is_none")] pub filter_text: Option, /// A string that should be inserted into a document when selecting /// this completion. When `falsy` the label is used as the insert text /// for this item. /// /// The `insertText` is subject to interpretation by the client side. /// Some tools might not take the string literally. For example /// VS Code when code complete is requested in this example /// `con` and a completion item with an `insertText` of /// `console` is provided it will only insert `sole`. Therefore it is /// recommended to use `textEdit` instead since it avoids additional client /// side interpretation. #[serde(skip_serializing_if = "Option::is_none")] pub insert_text: Option, /// The format of the insert text. The format applies to both the `insertText` property /// and the `newText` property of a provided `textEdit`. If omitted defaults to `InsertTextFormat.PlainText`. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub insert_text_format: Option, /// How whitespace and indentation is handled during completion /// item insertion. If not provided the client's default value depends on /// the `textDocument.completion.insertTextMode` client capability. /// /// @since 3.16.0 /// @since 3.17.0 - support for `textDocument.completion.insertTextMode` #[serde(skip_serializing_if = "Option::is_none")] pub insert_text_mode: Option, /// An edit which is applied to a document when selecting /// this completion. When an edit is provided the value of /// insertText is ignored. /// /// Most editors support two different operation when accepting a completion item. One is to insert a /// completion text and the other is to replace an existing text with a completion text. Since this can /// usually not predetermined by a server it can report both ranges. Clients need to signal support for /// `InsertReplaceEdits` via the `textDocument.completion.insertReplaceSupport` client capability /// property. /// /// *Note 1:* The text edit's range as well as both ranges from a insert replace edit must be a /// [single line] and they must contain the position at which completion has been requested. /// *Note 2:* If an `InsertReplaceEdit` is returned the edit's insert range must be a prefix of /// the edit's replace range, that means it must be contained and starting at the same position. /// /// @since 3.16.0 additional type `InsertReplaceEdit` #[serde(skip_serializing_if = "Option::is_none")] pub text_edit: Option, /// An optional array of additional text edits that are applied when /// selecting this completion. Edits must not overlap with the main edit /// nor with themselves. #[serde(skip_serializing_if = "Option::is_none")] pub additional_text_edits: Option>, /// An optional command that is executed *after* inserting this completion. *Note* that /// additional modifications to the current document should be described with the /// additionalTextEdits-property. #[serde(skip_serializing_if = "Option::is_none")] pub command: Option, /// An optional set of characters that when pressed while this completion is /// active will accept it first and then type that character. *Note* that all /// commit characters should have `length=1` and that superfluous characters /// will be ignored. #[serde(skip_serializing_if = "Option::is_none")] pub commit_characters: Option>, /// An data entry field that is preserved on a completion item between /// a completion and a completion resolve request. #[serde(skip_serializing_if = "Option::is_none")] pub data: Option, /// Tags for this completion item. #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option>, } impl CompletionItem { /// Create a CompletionItem with the minimum possible info (label and detail). pub fn new_simple(label: String, detail: String) -> CompletionItem { CompletionItem { label, detail: Some(detail), ..Self::default() } } } /// Additional details for a completion item label. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct CompletionItemLabelDetails { /// An optional string which is rendered less prominently directly after /// {@link CompletionItemLabel.label label}, without any spacing. Should be /// used for function signatures or type annotations. #[serde(skip_serializing_if = "Option::is_none")] pub detail: Option, /// An optional string which is rendered less prominently after /// {@link CompletionItemLabel.detail}. Should be used for fully qualified /// names or file path. #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, } #[cfg(test)] mod tests { use super::*; use crate::tests::test_deserialization; #[test] fn test_tag_support_deserialization() { let empty = CompletionItemCapability { tag_support: None, ..CompletionItemCapability::default() }; test_deserialization(r#"{}"#, &empty); test_deserialization(r#"{"tagSupport": false}"#, &empty); let t = CompletionItemCapability { tag_support: Some(TagSupport { value_set: vec![] }), ..CompletionItemCapability::default() }; test_deserialization(r#"{"tagSupport": true}"#, &t); let t = CompletionItemCapability { tag_support: Some(TagSupport { value_set: vec![CompletionItemTag::DEPRECATED], }), ..CompletionItemCapability::default() }; test_deserialization(r#"{"tagSupport": {"valueSet": [1]}}"#, &t); } #[test] fn test_debug_enum() { assert_eq!(format!("{:?}", CompletionItemKind::TEXT), "Text"); assert_eq!( format!("{:?}", CompletionItemKind::TYPE_PARAMETER), "TypeParameter" ); } #[test] fn test_try_from_enum() { use std::convert::TryInto; assert_eq!("Text".try_into(), Ok(CompletionItemKind::TEXT)); assert_eq!( "TypeParameter".try_into(), Ok(CompletionItemKind::TYPE_PARAMETER) ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/document_diagnostic.rs000066400000000000000000000244701500614314100300420ustar00rootroot00000000000000use std::{collections::HashMap, sync::Arc}; use serde::{Deserialize, Serialize}; use crate::{ Diagnostic, PartialResultParams, StaticRegistrationOptions, TextDocumentIdentifier, TextDocumentRegistrationOptions, Url, WorkDoneProgressOptions, WorkDoneProgressParams, }; /// Client capabilities specific to diagnostic pull requests. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DiagnosticClientCapabilities { /// Whether implementation supports dynamic registration. /// /// If this is set to `true` the client supports the new `(TextDocumentRegistrationOptions & /// StaticRegistrationOptions)` return value for the corresponding server capability as well. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// Whether the clients supports related documents for document diagnostic pulls. #[serde(skip_serializing_if = "Option::is_none")] pub related_document_support: Option, } /// Diagnostic options. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DiagnosticOptions { /// An optional identifier under which the diagnostics are /// managed by the client. #[serde( default, skip_serializing_if = "Option::is_none", serialize_with = "serialize_option_arc_str", deserialize_with = "deserialize_option_arc_str" )] pub identifier: Option>, /// Whether the language has inter file dependencies, meaning that editing code in one file can /// result in a different diagnostic set in another file. Inter file dependencies are common /// for most programming languages and typically uncommon for linters. pub inter_file_dependencies: bool, /// The server provides support for workspace diagnostics as well. pub workspace_diagnostics: bool, #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } fn serialize_option_arc_str( val: &Option>, serializer: S, ) -> Result { serializer.serialize_str(val.as_ref().unwrap()) } fn deserialize_option_arc_str<'de, D: serde::Deserializer<'de>>( deserializer: D, ) -> Result>, D::Error> { Option::::deserialize(deserializer).map(|opt| opt.map(|s| s.into())) } /// Diagnostic registration options. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DiagnosticRegistrationOptions { #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, #[serde(flatten)] pub diagnostic_options: DiagnosticOptions, #[serde(flatten)] pub static_registration_options: StaticRegistrationOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum DiagnosticServerCapabilities { Options(DiagnosticOptions), RegistrationOptions(DiagnosticRegistrationOptions), } /// Parameters of the document diagnostic request. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentDiagnosticParams { /// The text document. pub text_document: TextDocumentIdentifier, /// The additional identifier provided during registration. #[serde( default, skip_serializing_if = "Option::is_none", serialize_with = "serialize_option_arc_str", deserialize_with = "deserialize_option_arc_str" )] pub identifier: Option>, /// The result ID of a previous response if provided. pub previous_result_id: Option, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } /// A diagnostic report with a full set of problems. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct FullDocumentDiagnosticReport { /// An optional result ID. If provided it will be sent on the next diagnostic request for the /// same document. #[serde(skip_serializing_if = "Option::is_none")] pub result_id: Option, /// The actual items. pub items: Vec, } /// A diagnostic report indicating that the last returned report is still accurate. /// /// A server can only return `unchanged` if result ids are provided. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct UnchangedDocumentDiagnosticReport { /// A result ID which will be sent on the next diagnostic request for the same document. pub result_id: String, } /// The document diagnostic report kinds. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(tag = "kind", rename_all = "lowercase")] pub enum DocumentDiagnosticReportKind { /// A diagnostic report with a full set of problems. Full(FullDocumentDiagnosticReport), /// A report indicating that the last returned report is still accurate. Unchanged(UnchangedDocumentDiagnosticReport), } impl From for DocumentDiagnosticReportKind { fn from(from: FullDocumentDiagnosticReport) -> Self { DocumentDiagnosticReportKind::Full(from) } } impl From for DocumentDiagnosticReportKind { fn from(from: UnchangedDocumentDiagnosticReport) -> Self { DocumentDiagnosticReportKind::Unchanged(from) } } /// A full diagnostic report with a set of related documents. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct RelatedFullDocumentDiagnosticReport { /// Diagnostics of related documents. /// /// This information is useful in programming languages where code in a file A can generate /// diagnostics in a file B which A depends on. An example of such a language is C/C++ where /// macro definitions in a file `a.cpp` result in errors in a header file `b.hpp`. /// /// @since 3.17.0 #[serde(with = "crate::url_map")] #[serde(skip_serializing_if = "Option::is_none")] #[serde(default)] pub related_documents: Option>, // relatedDocuments?: { [uri: string]: FullDocumentDiagnosticReport | UnchangedDocumentDiagnosticReport; }; #[serde(flatten)] pub full_document_diagnostic_report: FullDocumentDiagnosticReport, } /// An unchanged diagnostic report with a set of related documents. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct RelatedUnchangedDocumentDiagnosticReport { /// Diagnostics of related documents. /// /// This information is useful in programming languages where code in a file A can generate /// diagnostics in a file B which A depends on. An example of such a language is C/C++ where /// macro definitions in a file `a.cpp` result in errors in a header file `b.hpp`. /// /// @since 3.17.0 #[serde(with = "crate::url_map")] #[serde(skip_serializing_if = "Option::is_none")] #[serde(default)] pub related_documents: Option>, // relatedDocuments?: { [uri: string]: FullDocumentDiagnosticReport | UnchangedDocumentDiagnosticReport; }; #[serde(flatten)] pub unchanged_document_diagnostic_report: UnchangedDocumentDiagnosticReport, } /// The result of a document diagnostic pull request. /// /// A report can either be a full report containing all diagnostics for the requested document or /// an unchanged report indicating that nothing has changed in terms of diagnostics in comparison /// to the last pull request. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(tag = "kind", rename_all = "lowercase")] pub enum DocumentDiagnosticReport { /// A diagnostic report with a full set of problems. Full(RelatedFullDocumentDiagnosticReport), /// A report indicating that the last returned report is still accurate. Unchanged(RelatedUnchangedDocumentDiagnosticReport), } impl From for DocumentDiagnosticReport { fn from(from: RelatedFullDocumentDiagnosticReport) -> Self { DocumentDiagnosticReport::Full(from) } } impl From for DocumentDiagnosticReport { fn from(from: RelatedUnchangedDocumentDiagnosticReport) -> Self { DocumentDiagnosticReport::Unchanged(from) } } /// A partial result for a document diagnostic report. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct DocumentDiagnosticReportPartialResult { #[serde(with = "crate::url_map")] #[serde(skip_serializing_if = "Option::is_none")] #[serde(default)] pub related_documents: Option>, // relatedDocuments?: { [uri: string]: FullDocumentDiagnosticReport | UnchangedDocumentDiagnosticReport; }; } #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(untagged)] pub enum DocumentDiagnosticReportResult { Report(DocumentDiagnosticReport), Partial(DocumentDiagnosticReportPartialResult), } impl From for DocumentDiagnosticReportResult { fn from(from: DocumentDiagnosticReport) -> Self { DocumentDiagnosticReportResult::Report(from) } } impl From for DocumentDiagnosticReportResult { fn from(from: DocumentDiagnosticReportPartialResult) -> Self { DocumentDiagnosticReportResult::Partial(from) } } /// Cancellation data returned from a diagnostic request. /// /// If no data is provided, it defaults to `{ retrigger_request: true }`. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct DiagnosticServerCancellationData { pub retrigger_request: bool, } impl Default for DiagnosticServerCancellationData { fn default() -> Self { DiagnosticServerCancellationData { retrigger_request: true, } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/document_highlight.rs000066400000000000000000000033111500614314100276540ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use crate::{ DynamicRegistrationClientCapabilities, PartialResultParams, Range, TextDocumentPositionParams, WorkDoneProgressParams, }; pub type DocumentHighlightClientCapabilities = DynamicRegistrationClientCapabilities; #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentHighlightParams { #[serde(flatten)] pub text_document_position_params: TextDocumentPositionParams, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } /// A document highlight is a range inside a text document which deserves /// special attention. Usually a document highlight is visualized by changing /// the background color of its range. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct DocumentHighlight { /// The range this highlight applies to. pub range: Range, /// The highlight kind, default is DocumentHighlightKind.Text. #[serde(skip_serializing_if = "Option::is_none")] pub kind: Option, } /// A document highlight kind. #[derive(Eq, PartialEq, Copy, Clone, Deserialize, Serialize)] #[serde(transparent)] pub struct DocumentHighlightKind(i32); lsp_enum! { impl DocumentHighlightKind { /// A textual occurrence. pub const TEXT: DocumentHighlightKind = DocumentHighlightKind(1); /// Read-access of a symbol, like reading a variable. pub const READ: DocumentHighlightKind = DocumentHighlightKind(2); /// Write-access of a symbol, like writing to a variable. pub const WRITE: DocumentHighlightKind = DocumentHighlightKind(3); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/document_link.rs000066400000000000000000000046351500614314100266540ustar00rootroot00000000000000use crate::{ PartialResultParams, Range, TextDocumentIdentifier, Url, WorkDoneProgressOptions, WorkDoneProgressParams, }; use serde::{Deserialize, Serialize}; use serde_json::Value; #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentLinkClientCapabilities { /// Whether document link supports dynamic registration. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// Whether the client support the `tooltip` property on `DocumentLink`. #[serde(skip_serializing_if = "Option::is_none")] pub tooltip_support: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentLinkOptions { /// Document links have a resolve provider as well. #[serde(skip_serializing_if = "Option::is_none")] pub resolve_provider: Option, #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentLinkParams { /// The document to provide document links for. pub text_document: TextDocumentIdentifier, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } /// A document link is a range in a text document that links to an internal or external resource, like another /// text document or a web site. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct DocumentLink { /// The range this link applies to. pub range: Range, /// The uri this link points to. #[serde(skip_serializing_if = "Option::is_none")] pub target: Option, /// The tooltip text when you hover over this link. /// /// If a tooltip is provided, is will be displayed in a string that includes instructions on how to /// trigger the link, such as `{0} (ctrl + click)`. The specific instructions vary depending on OS, /// user settings, and localization. #[serde(skip_serializing_if = "Option::is_none")] pub tooltip: Option, /// A data entry field that is preserved on a document link between a DocumentLinkRequest /// and a DocumentLinkResolveRequest. #[serde(skip_serializing_if = "Option::is_none")] pub data: Option, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/document_symbols.rs000066400000000000000000000113421500614314100274000ustar00rootroot00000000000000use crate::{ Location, PartialResultParams, Range, SymbolKind, SymbolKindCapability, TextDocumentIdentifier, WorkDoneProgressParams, }; use crate::{SymbolTag, TagSupport}; use serde::{Deserialize, Serialize}; #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentSymbolClientCapabilities { /// This capability supports dynamic registration. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// Specific capabilities for the `SymbolKind`. #[serde(skip_serializing_if = "Option::is_none")] pub symbol_kind: Option, /// The client support hierarchical document symbols. #[serde(skip_serializing_if = "Option::is_none")] pub hierarchical_document_symbol_support: Option, /// The client supports tags on `SymbolInformation`. Tags are supported on /// `DocumentSymbol` if `hierarchicalDocumentSymbolSupport` is set to true. /// Clients supporting tags have to handle unknown tags gracefully. /// /// @since 3.16.0 #[serde( default, skip_serializing_if = "Option::is_none", deserialize_with = "TagSupport::deserialize_compat" )] pub tag_support: Option>, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum DocumentSymbolResponse { Flat(Vec), Nested(Vec), } impl From> for DocumentSymbolResponse { fn from(info: Vec) -> Self { DocumentSymbolResponse::Flat(info) } } impl From> for DocumentSymbolResponse { fn from(symbols: Vec) -> Self { DocumentSymbolResponse::Nested(symbols) } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentSymbolParams { /// The text document. pub text_document: TextDocumentIdentifier, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } /// Represents programming constructs like variables, classes, interfaces etc. /// that appear in a document. Document symbols can be hierarchical and they have two ranges: /// one that encloses its definition and one that points to its most interesting range, /// e.g. the range of an identifier. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentSymbol { /// The name of this symbol. pub name: String, /// More detail for this symbol, e.g the signature of a function. If not provided the /// name is used. #[serde(skip_serializing_if = "Option::is_none")] pub detail: Option, /// The kind of this symbol. pub kind: SymbolKind, /// Tags for this completion item. /// /// @since 3.15.0 #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option>, /// Indicates if this symbol is deprecated. #[serde(skip_serializing_if = "Option::is_none")] #[deprecated(note = "Use tags instead")] pub deprecated: Option, /// The range enclosing this symbol not including leading/trailing whitespace but everything else /// like comments. This information is typically used to determine if the the clients cursor is /// inside the symbol to reveal in the symbol in the UI. pub range: Range, /// The range that should be selected and revealed when this symbol is being picked, e.g the name of a function. /// Must be contained by the the `range`. pub selection_range: Range, /// Children of this symbol, e.g. properties of a class. #[serde(skip_serializing_if = "Option::is_none")] pub children: Option>, } /// Represents information about programming constructs like variables, classes, /// interfaces etc. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SymbolInformation { /// The name of this symbol. pub name: String, /// The kind of this symbol. pub kind: SymbolKind, /// Tags for this completion item. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option>, /// Indicates if this symbol is deprecated. #[serde(skip_serializing_if = "Option::is_none")] #[deprecated(note = "Use tags instead")] pub deprecated: Option, /// The location of this symbol. pub location: Location, /// The name of the symbol containing this symbol. #[serde(skip_serializing_if = "Option::is_none")] pub container_name: Option, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/error_codes.rs000066400000000000000000000043201500614314100263160ustar00rootroot00000000000000//! In this module we only define constants for lsp specific error codes. //! There are other error codes that are defined in the //! [JSON RPC specification](https://www.jsonrpc.org/specification#error_object). /// Defined in the LSP specification but in the range reserved for JSON-RPC error codes, /// namely the -32099 to -32000 "Reserved for implementation-defined server-errors." range. /// The code has, nonetheless, been left in this range for backwards compatibility reasons. pub const SERVER_NOT_INITIALIZED: i64 = -32002; /// Defined in the LSP specification but in the range reserved for JSON-RPC error codes, /// namely the -32099 to -32000 "Reserved for implementation-defined server-errors." range. /// The code has, nonetheless, left in this range for backwards compatibility reasons. pub const UNKNOWN_ERROR_CODE: i64 = -32001; /// This is the start range of LSP reserved error codes. /// It doesn't denote a real error code. /// /// @since 3.16.0 pub const LSP_RESERVED_ERROR_RANGE_START: i64 = -32899; /// A request failed but it was syntactically correct, e.g the /// method name was known and the parameters were valid. The error /// message should contain human readable information about why /// the request failed. /// /// @since 3.17.0 pub const REQUEST_FAILED: i64 = -32803; /// The server cancelled the request. This error code should /// only be used for requests that explicitly support being /// server cancellable. /// /// @since 3.17.0 pub const SERVER_CANCELLED: i64 = -32802; /// The server detected that the content of a document got /// modified outside normal conditions. A server should /// NOT send this error code if it detects a content change /// in it unprocessed messages. The result even computed /// on an older state might still be useful for the client. /// /// If a client decides that a result is not of any use anymore /// the client should cancel the request. pub const CONTENT_MODIFIED: i64 = -32801; /// The client has canceled a request and a server as detected /// the cancel. pub const REQUEST_CANCELLED: i64 = -32800; /// This is the end range of LSP reserved error codes. /// It doesn't denote a real error code. /// /// @since 3.16.0 pub const LSP_RESERVED_ERROR_RANGE_END: i64 = -32800; helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/file_operations.rs000066400000000000000000000170471500614314100272040ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceFileOperationsClientCapabilities { /// Whether the client supports dynamic registration for file /// requests/notifications. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// The client has support for sending didCreateFiles notifications. #[serde(skip_serializing_if = "Option::is_none")] pub did_create: Option, /// The server is interested in receiving willCreateFiles requests. #[serde(skip_serializing_if = "Option::is_none")] pub will_create: Option, /// The server is interested in receiving didRenameFiles requests. #[serde(skip_serializing_if = "Option::is_none")] pub did_rename: Option, /// The server is interested in receiving willRenameFiles requests. #[serde(skip_serializing_if = "Option::is_none")] pub will_rename: Option, /// The server is interested in receiving didDeleteFiles requests. #[serde(skip_serializing_if = "Option::is_none")] pub did_delete: Option, /// The server is interested in receiving willDeleteFiles requests. #[serde(skip_serializing_if = "Option::is_none")] pub will_delete: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceFileOperationsServerCapabilities { /// The server is interested in receiving didCreateFiles /// notifications. #[serde(skip_serializing_if = "Option::is_none")] pub did_create: Option, /// The server is interested in receiving willCreateFiles requests. #[serde(skip_serializing_if = "Option::is_none")] pub will_create: Option, /// The server is interested in receiving didRenameFiles /// notifications. #[serde(skip_serializing_if = "Option::is_none")] pub did_rename: Option, /// The server is interested in receiving willRenameFiles requests. #[serde(skip_serializing_if = "Option::is_none")] pub will_rename: Option, /// The server is interested in receiving didDeleteFiles file /// notifications. #[serde(skip_serializing_if = "Option::is_none")] pub did_delete: Option, /// The server is interested in receiving willDeleteFiles file /// requests. #[serde(skip_serializing_if = "Option::is_none")] pub will_delete: Option, } /// The options to register for file operations. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FileOperationRegistrationOptions { /// The actual filters. pub filters: Vec, } /// A filter to describe in which file operation requests or notifications /// the server is interested in. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FileOperationFilter { /// A Uri like `file` or `untitled`. pub scheme: Option, /// The actual file operation pattern. pub pattern: FileOperationPattern, } /// A pattern kind describing if a glob pattern matches a file a folder or /// both. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)] #[serde(rename_all = "lowercase")] pub enum FileOperationPatternKind { /// The pattern matches a file only. File, /// The pattern matches a folder only. Folder, } /// Matching options for the file operation pattern. /// /// @since 3.16.0 /// #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FileOperationPatternOptions { /// The pattern should be matched ignoring casing. #[serde(skip_serializing_if = "Option::is_none")] pub ignore_case: Option, } /// A pattern to describe in which file operation requests or notifications /// the server is interested in. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FileOperationPattern { /// The glob pattern to match. Glob patterns can have the following syntax: /// - `*` to match one or more characters in a path segment /// - `?` to match on one character in a path segment /// - `**` to match any number of path segments, including none /// - `{}` to group conditions (e.g. `**​/*.{ts,js}` matches all TypeScript /// and JavaScript files) /// - `[]` to declare a range of characters to match in a path segment /// (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …) /// - `[!...]` to negate a range of characters to match in a path segment /// (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but /// not `example.0`) pub glob: String, /// Whether to match files or folders with this pattern. /// /// Matches both if undefined. #[serde(skip_serializing_if = "Option::is_none")] pub matches: Option, /// Additional options used during matching. #[serde(skip_serializing_if = "Option::is_none")] pub options: Option, } /// The parameters sent in notifications/requests for user-initiated creation /// of files. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CreateFilesParams { /// An array of all files/folders created in this operation. pub files: Vec, } /// Represents information on a file/folder create. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FileCreate { /// A file:// URI for the location of the file/folder being created. pub uri: String, } /// The parameters sent in notifications/requests for user-initiated renames /// of files. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct RenameFilesParams { /// An array of all files/folders renamed in this operation. When a folder /// is renamed, only the folder will be included, and not its children. pub files: Vec, } /// Represents information on a file/folder rename. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FileRename { /// A file:// URI for the original location of the file/folder being renamed. pub old_uri: String, /// A file:// URI for the new location of the file/folder being renamed. pub new_uri: String, } /// The parameters sent in notifications/requests for user-initiated deletes /// of files. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DeleteFilesParams { /// An array of all files/folders deleted in this operation. pub files: Vec, } /// Represents information on a file/folder delete. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FileDelete { /// A file:// URI for the location of the file/folder being deleted. pub uri: String, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/folding_range.rs000066400000000000000000000127201500614314100266110ustar00rootroot00000000000000use crate::{ PartialResultParams, StaticTextDocumentColorProviderOptions, TextDocumentIdentifier, WorkDoneProgressParams, }; use serde::{Deserialize, Serialize}; #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FoldingRangeParams { /// The text document. pub text_document: TextDocumentIdentifier, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum FoldingRangeProviderCapability { Simple(bool), FoldingProvider(FoldingProviderOptions), Options(StaticTextDocumentColorProviderOptions), } impl From for FoldingRangeProviderCapability { fn from(from: StaticTextDocumentColorProviderOptions) -> Self { Self::Options(from) } } impl From for FoldingRangeProviderCapability { fn from(from: FoldingProviderOptions) -> Self { Self::FoldingProvider(from) } } impl From for FoldingRangeProviderCapability { fn from(from: bool) -> Self { Self::Simple(from) } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct FoldingProviderOptions {} #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FoldingRangeKindCapability { /// The folding range kind values the client supports. When this /// property exists the client also guarantees that it will /// handle values outside its set gracefully and falls back /// to a default value when unknown. #[serde(skip_serializing_if = "Option::is_none")] pub value_set: Option>, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FoldingRangeCapability { /// If set, the client signals that it supports setting collapsedText on /// folding ranges to display custom labels instead of the default text. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub collapsed_text: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FoldingRangeClientCapabilities { /// Whether implementation supports dynamic registration for folding range providers. If this is set to `true` /// the client supports the new `(FoldingRangeProviderOptions & TextDocumentRegistrationOptions & StaticRegistrationOptions)` /// return value for the corresponding server capability as well. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// The maximum number of folding ranges that the client prefers to receive per document. The value serves as a /// hint, servers are free to follow the limit. #[serde(skip_serializing_if = "Option::is_none")] pub range_limit: Option, /// If set, the client signals that it only supports folding complete lines. If set, client will /// ignore specified `startCharacter` and `endCharacter` properties in a FoldingRange. #[serde(skip_serializing_if = "Option::is_none")] pub line_folding_only: Option, /// Specific options for the folding range kind. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub folding_range_kind: Option, /// Specific options for the folding range. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub folding_range: Option, } /// Enum of known range kinds #[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)] #[serde(rename_all = "lowercase")] pub enum FoldingRangeKind { /// Folding range for a comment Comment, /// Folding range for a imports or includes Imports, /// Folding range for a region (e.g. `#region`) Region, } /// Represents a folding range. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FoldingRange { /// The zero-based line number from where the folded range starts. pub start_line: u32, /// The zero-based character offset from where the folded range starts. If not defined, defaults to the length of the start line. #[serde(skip_serializing_if = "Option::is_none")] pub start_character: Option, /// The zero-based line number where the folded range ends. pub end_line: u32, /// The zero-based character offset before the folded range ends. If not defined, defaults to the length of the end line. #[serde(skip_serializing_if = "Option::is_none")] pub end_character: Option, /// Describes the kind of the folding range such as `comment' or 'region'. The kind /// is used to categorize folding ranges and used by commands like 'Fold all comments'. See /// [FoldingRangeKind](#FoldingRangeKind) for an enumeration of standardized kinds. #[serde(skip_serializing_if = "Option::is_none")] pub kind: Option, /// The text that the client should show when the specified range is /// collapsed. If not defined or not supported by the client, a default /// will be chosen by the client. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub collapsed_text: Option, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/formatting.rs000066400000000000000000000115061500614314100261660ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use crate::{ DocumentSelector, DynamicRegistrationClientCapabilities, Range, TextDocumentIdentifier, TextDocumentPositionParams, WorkDoneProgressParams, }; use std::collections::HashMap; pub type DocumentFormattingClientCapabilities = DynamicRegistrationClientCapabilities; pub type DocumentRangeFormattingClientCapabilities = DynamicRegistrationClientCapabilities; pub type DocumentOnTypeFormattingClientCapabilities = DynamicRegistrationClientCapabilities; /// Format document on type options #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentOnTypeFormattingOptions { /// A character on which formatting should be triggered, like `}`. pub first_trigger_character: String, /// More trigger characters. #[serde(skip_serializing_if = "Option::is_none")] pub more_trigger_character: Option>, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentFormattingParams { /// The document to format. pub text_document: TextDocumentIdentifier, /// The format options. pub options: FormattingOptions, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, } /// Value-object describing what options formatting should use. #[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct FormattingOptions { /// Size of a tab in spaces. pub tab_size: u32, /// Prefer spaces over tabs. pub insert_spaces: bool, /// Signature for further properties. #[serde(flatten)] pub properties: HashMap, /// Trim trailing whitespace on a line. #[serde(skip_serializing_if = "Option::is_none")] pub trim_trailing_whitespace: Option, /// Insert a newline character at the end of the file if one does not exist. #[serde(skip_serializing_if = "Option::is_none")] pub insert_final_newline: Option, /// Trim all newlines after the final newline at the end of the file. #[serde(skip_serializing_if = "Option::is_none")] pub trim_final_newlines: Option, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum FormattingProperty { Bool(bool), Number(i32), String(String), } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentRangeFormattingParams { /// The document to format. pub text_document: TextDocumentIdentifier, /// The range to format pub range: Range, /// The format options pub options: FormattingOptions, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentOnTypeFormattingParams { /// Text Document and Position fields. #[serde(flatten)] pub text_document_position: TextDocumentPositionParams, /// The character that has been typed. pub ch: String, /// The format options. pub options: FormattingOptions, } /// Extends TextDocumentRegistrationOptions #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentOnTypeFormattingRegistrationOptions { /// A document selector to identify the scope of the registration. If set to null /// the document selector provided on the client side will be used. pub document_selector: Option, /// A character on which formatting should be triggered, like `}`. pub first_trigger_character: String, /// More trigger characters. #[serde(skip_serializing_if = "Option::is_none")] pub more_trigger_character: Option>, } #[cfg(test)] mod tests { use super::*; use crate::tests::test_serialization; #[test] fn formatting_options() { test_serialization( &FormattingOptions { tab_size: 123, insert_spaces: true, properties: HashMap::new(), trim_trailing_whitespace: None, insert_final_newline: None, trim_final_newlines: None, }, r#"{"tabSize":123,"insertSpaces":true}"#, ); test_serialization( &FormattingOptions { tab_size: 123, insert_spaces: true, properties: vec![("prop".to_string(), FormattingProperty::Number(1))] .into_iter() .collect(), trim_trailing_whitespace: None, insert_final_newline: None, trim_final_newlines: None, }, r#"{"tabSize":123,"insertSpaces":true,"prop":1}"#, ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/hover.rs000066400000000000000000000051761500614314100251450ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use crate::{ MarkedString, MarkupContent, MarkupKind, Range, TextDocumentPositionParams, TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams, }; #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct HoverClientCapabilities { /// Whether completion supports dynamic registration. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// Client supports the follow content formats for the content /// property. The order describes the preferred format of the client. #[serde(skip_serializing_if = "Option::is_none")] pub content_format: Option>, } /// Hover options. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct HoverOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct HoverRegistrationOptions { #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, #[serde(flatten)] pub hover_options: HoverOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum HoverProviderCapability { Simple(bool), Options(HoverOptions), } impl From for HoverProviderCapability { fn from(from: HoverOptions) -> Self { Self::Options(from) } } impl From for HoverProviderCapability { fn from(from: bool) -> Self { Self::Simple(from) } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct HoverParams { #[serde(flatten)] pub text_document_position_params: TextDocumentPositionParams, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, } /// The result of a hover request. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct Hover { /// The hover's content pub contents: HoverContents, /// An optional range is a range inside a text document /// that is used to visualize a hover, e.g. by changing the background color. #[serde(skip_serializing_if = "Option::is_none")] pub range: Option, } /// Hover contents could be single entry or multiple entries. #[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum HoverContents { Scalar(MarkedString), Array(Vec), Markup(MarkupContent), } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/inlay_hint.rs000066400000000000000000000222641500614314100261550ustar00rootroot00000000000000use crate::{ Command, LSPAny, Location, MarkupContent, Position, Range, StaticRegistrationOptions, TextDocumentIdentifier, TextDocumentRegistrationOptions, TextEdit, WorkDoneProgressOptions, WorkDoneProgressParams, }; use serde::{Deserialize, Serialize}; #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] #[serde(untagged)] pub enum InlayHintServerCapabilities { Options(InlayHintOptions), RegistrationOptions(InlayHintRegistrationOptions), } /// Inlay hint client capabilities. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlayHintClientCapabilities { /// Whether inlay hints support dynamic registration. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// Indicates which properties a client can resolve lazily on a inlay /// hint. #[serde(skip_serializing_if = "Option::is_none")] pub resolve_support: Option, } /// Inlay hint options used during static registration. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlayHintOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, /// The server provides support to resolve additional /// information for an inlay hint item. #[serde(skip_serializing_if = "Option::is_none")] pub resolve_provider: Option, } /// Inlay hint options used during static or dynamic registration. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlayHintRegistrationOptions { #[serde(flatten)] pub inlay_hint_options: InlayHintOptions, #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, #[serde(flatten)] pub static_registration_options: StaticRegistrationOptions, } /// A parameter literal used in inlay hint requests. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlayHintParams { #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, /// The text document. pub text_document: TextDocumentIdentifier, /// The visible document range for which inlay hints should be computed. pub range: Range, } /// Inlay hint information. /// /// @since 3.17.0 #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlayHint { /// The position of this hint. pub position: Position, /// The label of this hint. A human readable string or an array of /// InlayHintLabelPart label parts. /// /// *Note* that neither the string nor the label part can be empty. pub label: InlayHintLabel, /// The kind of this hint. Can be omitted in which case the client /// should fall back to a reasonable default. #[serde(skip_serializing_if = "Option::is_none")] pub kind: Option, /// Optional text edits that are performed when accepting this inlay hint. /// /// *Note* that edits are expected to change the document so that the inlay /// hint (or its nearest variant) is now part of the document and the inlay /// hint itself is now obsolete. /// /// Depending on the client capability `inlayHint.resolveSupport` clients /// might resolve this property late using the resolve request. #[serde(skip_serializing_if = "Option::is_none")] pub text_edits: Option>, /// The tooltip text when you hover over this item. /// /// Depending on the client capability `inlayHint.resolveSupport` clients /// might resolve this property late using the resolve request. #[serde(skip_serializing_if = "Option::is_none")] pub tooltip: Option, /// Render padding before the hint. /// /// Note: Padding should use the editor's background color, not the /// background color of the hint itself. That means padding can be used /// to visually align/separate an inlay hint. #[serde(skip_serializing_if = "Option::is_none")] pub padding_left: Option, /// Render padding after the hint. /// /// Note: Padding should use the editor's background color, not the /// background color of the hint itself. That means padding can be used /// to visually align/separate an inlay hint. #[serde(skip_serializing_if = "Option::is_none")] pub padding_right: Option, /// A data entry field that is preserved on a inlay hint between /// a `textDocument/inlayHint` and a `inlayHint/resolve` request. #[serde(skip_serializing_if = "Option::is_none")] pub data: Option, } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum InlayHintLabel { String(String), LabelParts(Vec), } impl From for InlayHintLabel { #[inline] fn from(from: String) -> Self { Self::String(from) } } impl From> for InlayHintLabel { #[inline] fn from(from: Vec) -> Self { Self::LabelParts(from) } } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum InlayHintTooltip { String(String), MarkupContent(MarkupContent), } impl From for InlayHintTooltip { #[inline] fn from(from: String) -> Self { Self::String(from) } } impl From for InlayHintTooltip { #[inline] fn from(from: MarkupContent) -> Self { Self::MarkupContent(from) } } /// An inlay hint label part allows for interactive and composite labels /// of inlay hints. #[derive(Debug, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlayHintLabelPart { /// The value of this label part. pub value: String, /// The tooltip text when you hover over this label part. Depending on /// the client capability `inlayHint.resolveSupport` clients might resolve /// this property late using the resolve request. #[serde(skip_serializing_if = "Option::is_none")] pub tooltip: Option, /// An optional source code location that represents this /// label part. /// /// The editor will use this location for the hover and for code navigation /// features: This part will become a clickable link that resolves to the /// definition of the symbol at the given location (not necessarily the /// location itself), it shows the hover that shows at the given location, /// and it shows a context menu with further code navigation commands. /// /// Depending on the client capability `inlayHint.resolveSupport` clients /// might resolve this property late using the resolve request. #[serde(skip_serializing_if = "Option::is_none")] pub location: Option, /// An optional command for this label part. /// /// Depending on the client capability `inlayHint.resolveSupport` clients /// might resolve this property late using the resolve request. #[serde(skip_serializing_if = "Option::is_none")] pub command: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum InlayHintLabelPartTooltip { String(String), MarkupContent(MarkupContent), } impl From for InlayHintLabelPartTooltip { #[inline] fn from(from: String) -> Self { Self::String(from) } } impl From for InlayHintLabelPartTooltip { #[inline] fn from(from: MarkupContent) -> Self { Self::MarkupContent(from) } } /// Inlay hint kinds. /// /// @since 3.17.0 #[derive(Eq, PartialEq, Copy, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct InlayHintKind(i32); lsp_enum! { impl InlayHintKind { /// An inlay hint that for a type annotation. pub const TYPE: InlayHintKind = InlayHintKind(1); /// An inlay hint that is for a parameter. pub const PARAMETER: InlayHintKind = InlayHintKind(2); } } /// Inlay hint client capabilities. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlayHintResolveClientCapabilities { /// The properties that a client can resolve lazily. pub properties: Vec, } /// Client workspace capabilities specific to inlay hints. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlayHintWorkspaceClientCapabilities { /// Whether the client implementation supports a refresh request sent from /// the server to the client. /// /// Note that this event is global and will force the client to refresh all /// inlay hints currently shown. It should be used with absolute care and /// is useful for situation where a server for example detects a project wide /// change that requires such a calculation. #[serde(skip_serializing_if = "Option::is_none")] pub refresh_support: Option, } // TODO(sno2): add tests once stabilized helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/inline_completion.rs000066400000000000000000000140651500614314100275260ustar00rootroot00000000000000use crate::{ Command, InsertTextFormat, Range, StaticRegistrationOptions, TextDocumentPositionParams, TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams, }; use serde::{Deserialize, Serialize}; /// Client capabilities specific to inline completions. /// /// @since 3.18.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlineCompletionClientCapabilities { /// Whether implementation supports dynamic registration for inline completion providers. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, } /// Inline completion options used during static registration. /// /// @since 3.18.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct InlineCompletionOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } /// Inline completion options used during static or dynamic registration. /// // @since 3.18.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct InlineCompletionRegistrationOptions { #[serde(flatten)] pub inline_completion_options: InlineCompletionOptions, #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, #[serde(flatten)] pub static_registration_options: StaticRegistrationOptions, } /// A parameter literal used in inline completion requests. /// /// @since 3.18.0 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlineCompletionParams { #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub text_document_position: TextDocumentPositionParams, /// Additional information about the context in which inline completions were requested. pub context: InlineCompletionContext, } /// Describes how an [`InlineCompletionItemProvider`] was triggered. /// /// @since 3.18.0 #[derive(Eq, PartialEq, Clone, Copy, Deserialize, Serialize)] pub struct InlineCompletionTriggerKind(i32); lsp_enum! { impl InlineCompletionTriggerKind { /// Completion was triggered explicitly by a user gesture. /// Return multiple completion items to enable cycling through them. pub const Invoked: InlineCompletionTriggerKind = InlineCompletionTriggerKind(1); /// Completion was triggered automatically while editing. /// It is sufficient to return a single completion item in this case. pub const Automatic: InlineCompletionTriggerKind = InlineCompletionTriggerKind(2); } } /// Describes the currently selected completion item. /// /// @since 3.18.0 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct SelectedCompletionInfo { /// The range that will be replaced if this completion item is accepted. pub range: Range, /// The text the range will be replaced with if this completion is /// accepted. pub text: String, } /// Provides information about the context in which an inline completion was /// requested. /// /// @since 3.18.0 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlineCompletionContext { /// Describes how the inline completion was triggered. pub trigger_kind: InlineCompletionTriggerKind, /// Provides information about the currently selected item in the /// autocomplete widget if it is visible. /// /// If set, provided inline completions must extend the text of the /// selected item and use the same range, otherwise they are not shown as /// preview. /// As an example, if the document text is `console.` and the selected item /// is `.log` replacing the `.` in the document, the inline completion must /// also replace `.` and start with `.log`, for example `.log()`. /// /// Inline completion providers are requested again whenever the selected /// item changes. #[serde(skip_serializing_if = "Option::is_none")] pub selected_completion_info: Option, } /// InlineCompletion response can be multiple completion items, or a list of completion items #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum InlineCompletionResponse { Array(Vec), List(InlineCompletionList), } /// Represents a collection of [`InlineCompletionItem`] to be presented in the editor. /// /// @since 3.18.0 #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct InlineCompletionList { /// The inline completion items pub items: Vec, } /// An inline completion item represents a text snippet that is proposed inline /// to complete text that is being typed. /// /// @since 3.18.0 #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct InlineCompletionItem { /// The text to replace the range with. Must be set. /// Is used both for the preview and the accept operation. pub insert_text: String, /// A text that is used to decide if this inline completion should be /// shown. When `falsy` the [`InlineCompletionItem::insertText`] is /// used. /// /// An inline completion is shown if the text to replace is a prefix of the /// filter text. #[serde(skip_serializing_if = "Option::is_none")] pub filter_text: Option, /// The range to replace. /// Must begin and end on the same line. /// /// Prefer replacements over insertions to provide a better experience when /// the user deletes typed text. #[serde(skip_serializing_if = "Option::is_none")] pub range: Option, /// An optional command that is executed *after* inserting this /// completion. #[serde(skip_serializing_if = "Option::is_none")] pub command: Option, /// The format of the insert text. The format applies to the `insertText`. /// If omitted defaults to `InsertTextFormat.PlainText`. #[serde(skip_serializing_if = "Option::is_none")] pub insert_text_format: Option, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/inline_value.rs000066400000000000000000000162021500614314100264640ustar00rootroot00000000000000use crate::{ DynamicRegistrationClientCapabilities, Range, StaticRegistrationOptions, TextDocumentIdentifier, TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams, }; use serde::{Deserialize, Serialize}; pub type InlineValueClientCapabilities = DynamicRegistrationClientCapabilities; #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum InlineValueServerCapabilities { Options(InlineValueOptions), RegistrationOptions(InlineValueRegistrationOptions), } /// Inline value options used during static registration. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct InlineValueOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } /// Inline value options used during static or dynamic registration. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct InlineValueRegistrationOptions { #[serde(flatten)] pub inline_value_options: InlineValueOptions, #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, #[serde(flatten)] pub static_registration_options: StaticRegistrationOptions, } /// A parameter literal used in inline value requests. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlineValueParams { #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, /// The text document. pub text_document: TextDocumentIdentifier, /// The document range for which inline values should be computed. pub range: Range, /// Additional information about the context in which inline values were /// requested. pub context: InlineValueContext, } /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlineValueContext { /// The stack frame (as a DAP Id) where the execution has stopped. pub frame_id: i32, /// The document range where execution has stopped. /// Typically the end position of the range denotes the line where the /// inline values are shown. pub stopped_location: Range, } /// Provide inline value as text. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct InlineValueText { /// The document range for which the inline value applies. pub range: Range, /// The text of the inline value. pub text: String, } /// Provide inline value through a variable lookup. /// /// If only a range is specified, the variable name will be extracted from /// the underlying document. /// /// An optional variable name can be used to override the extracted name. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlineValueVariableLookup { /// The document range for which the inline value applies. /// The range is used to extract the variable name from the underlying /// document. pub range: Range, /// If specified the name of the variable to look up. #[serde(skip_serializing_if = "Option::is_none")] pub variable_name: Option, /// How to perform the lookup. pub case_sensitive_lookup: bool, } /// Provide an inline value through an expression evaluation. /// /// If only a range is specified, the expression will be extracted from the /// underlying document. /// /// An optional expression can be used to override the extracted expression. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InlineValueEvaluatableExpression { /// The document range for which the inline value applies. /// The range is used to extract the evaluatable expression from the /// underlying document. pub range: Range, /// If specified the expression overrides the extracted expression. #[serde(skip_serializing_if = "Option::is_none")] pub expression: Option, } /// Inline value information can be provided by different means: /// - directly as a text value (class InlineValueText). /// - as a name to use for a variable lookup (class InlineValueVariableLookup) /// - as an evaluatable expression (class InlineValueEvaluatableExpression) /// /// The InlineValue types combines all inline value types into one type. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum InlineValue { Text(InlineValueText), VariableLookup(InlineValueVariableLookup), EvaluatableExpression(InlineValueEvaluatableExpression), } impl From for InlineValue { #[inline] fn from(from: InlineValueText) -> Self { Self::Text(from) } } impl From for InlineValue { #[inline] fn from(from: InlineValueVariableLookup) -> Self { Self::VariableLookup(from) } } impl From for InlineValue { #[inline] fn from(from: InlineValueEvaluatableExpression) -> Self { Self::EvaluatableExpression(from) } } /// Client workspace capabilities specific to inline values. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] /// /// @since 3.17.0 #[serde(rename_all = "camelCase")] pub struct InlineValueWorkspaceClientCapabilities { /// Whether the client implementation supports a refresh request sent from /// the server to the client. /// /// Note that this event is global and will force the client to refresh all /// inline values currently shown. It should be used with absolute care and /// is useful for situation where a server for example detect a project wide /// change that requires such a calculation. #[serde(skip_serializing_if = "Option::is_none")] pub refresh_support: Option, } #[cfg(test)] mod tests { use super::*; use crate::tests::test_serialization; use crate::Position; #[test] fn inline_values() { test_serialization( &InlineValueText { range: Range::new(Position::new(0, 0), Position::new(0, 4)), text: "one".to_owned(), }, r#"{"range":{"start":{"line":0,"character":0},"end":{"line":0,"character":4}},"text":"one"}"#, ); test_serialization( &InlineValue::VariableLookup(InlineValueVariableLookup { range: Range::new(Position::new(1, 0), Position::new(1, 4)), variable_name: None, case_sensitive_lookup: false, }), r#"{"range":{"start":{"line":1,"character":0},"end":{"line":1,"character":4}},"caseSensitiveLookup":false}"#, ); test_serialization( &InlineValue::EvaluatableExpression(InlineValueEvaluatableExpression { range: Range::new(Position::new(2, 0), Position::new(2, 4)), expression: None, }), r#"{"range":{"start":{"line":2,"character":0},"end":{"line":2,"character":4}}}"#, ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/lib.rs000066400000000000000000003015501500614314100245630ustar00rootroot00000000000000/*! Language Server Protocol types for Rust. Based on: This library uses the URL crate for parsing URIs. Note that there is some confusion on the meaning of URLs vs URIs: . According to that information, on the classical sense of "URLs", "URLs" are a subset of URIs, But on the modern/new meaning of URLs, they are the same as URIs. The important take-away aspect is that the URL crate should be able to parse any URI, such as `urn:isbn:0451450523`. */ #![allow(non_upper_case_globals)] #![forbid(unsafe_code)] use bitflags::bitflags; use std::{collections::HashMap, fmt::Debug}; use serde::{de, de::Error as Error_, Deserialize, Serialize}; use serde_json::Value; pub use url::Url; // Large enough to contain any enumeration name defined in this crate type PascalCaseBuf = [u8; 32]; const fn fmt_pascal_case_const(name: &str) -> (PascalCaseBuf, usize) { let mut buf = [0; 32]; let mut buf_i = 0; let mut name_i = 0; let name = name.as_bytes(); while name_i < name.len() { let first = name[name_i]; name_i += 1; buf[buf_i] = first; buf_i += 1; while name_i < name.len() { let rest = name[name_i]; name_i += 1; if rest == b'_' { break; } buf[buf_i] = rest.to_ascii_lowercase(); buf_i += 1; } } (buf, buf_i) } fn fmt_pascal_case(f: &mut std::fmt::Formatter<'_>, name: &str) -> std::fmt::Result { for word in name.split('_') { let mut chars = word.chars(); let first = chars.next().unwrap(); write!(f, "{}", first)?; for rest in chars { write!(f, "{}", rest.to_lowercase())?; } } Ok(()) } macro_rules! lsp_enum { (impl $typ: ident { $( $(#[$attr:meta])* pub const $name: ident : $enum_type: ty = $value: expr; )* }) => { impl $typ { $( $(#[$attr])* pub const $name: $enum_type = $value; )* } impl std::fmt::Debug for $typ { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match *self { $( Self::$name => crate::fmt_pascal_case(f, stringify!($name)), )* _ => write!(f, "{}({})", stringify!($typ), self.0), } } } impl std::convert::TryFrom<&str> for $typ { type Error = &'static str; fn try_from(value: &str) -> Result { match () { $( _ if { const X: (crate::PascalCaseBuf, usize) = crate::fmt_pascal_case_const(stringify!($name)); let (buf, len) = X; &buf[..len] == value.as_bytes() } => Ok(Self::$name), )* _ => Err("unknown enum variant"), } } } } } pub mod error_codes; pub mod notification; pub mod request; mod call_hierarchy; pub use call_hierarchy::*; mod code_action; pub use code_action::*; mod code_lens; pub use code_lens::*; mod color; pub use color::*; mod completion; pub use completion::*; mod document_diagnostic; pub use document_diagnostic::*; mod document_highlight; pub use document_highlight::*; mod document_link; pub use document_link::*; mod document_symbols; pub use document_symbols::*; mod file_operations; pub use file_operations::*; mod folding_range; pub use folding_range::*; mod formatting; pub use formatting::*; mod hover; pub use hover::*; mod inlay_hint; pub use inlay_hint::*; mod inline_value; pub use inline_value::*; #[cfg(feature = "proposed")] mod inline_completion; #[cfg(feature = "proposed")] pub use inline_completion::*; mod moniker; pub use moniker::*; mod progress; pub use progress::*; mod references; pub use references::*; mod rename; pub use rename::*; pub mod selection_range; pub use selection_range::*; mod semantic_tokens; pub use semantic_tokens::*; mod signature_help; pub use signature_help::*; mod type_hierarchy; pub use type_hierarchy::*; mod linked_editing; pub use linked_editing::*; mod window; pub use window::*; mod workspace_diagnostic; pub use workspace_diagnostic::*; mod workspace_folders; pub use workspace_folders::*; mod workspace_symbols; pub use workspace_symbols::*; pub mod lsif; mod trace; pub use trace::*; /* ----------------- Auxiliary types ----------------- */ #[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum NumberOrString { Number(i32), String(String), } /* ----------------- Cancel support ----------------- */ #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct CancelParams { /// The request id to cancel. pub id: NumberOrString, } /* ----------------- Basic JSON Structures ----------------- */ /// The LSP any type /// /// @since 3.17.0 pub type LSPAny = serde_json::Value; /// LSP object definition. /// /// @since 3.17.0 pub type LSPObject = serde_json::Map; /// LSP arrays. /// /// @since 3.17.0 pub type LSPArray = Vec; /// Position in a text document expressed as zero-based line and character offset. /// A position is between two characters like an 'insert' cursor in a editor. #[derive( Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default, Deserialize, Serialize, Hash, )] pub struct Position { /// Line position in a document (zero-based). pub line: u32, /// Character offset on a line in a document (zero-based). The meaning of this /// offset is determined by the negotiated `PositionEncodingKind`. /// /// If the character value is greater than the line length it defaults back /// to the line length. pub character: u32, } impl Position { pub fn new(line: u32, character: u32) -> Position { Position { line, character } } } /// A range in a text document expressed as (zero-based) start and end positions. /// A range is comparable to a selection in an editor. Therefore the end position is exclusive. #[derive(Debug, Eq, PartialEq, Copy, Clone, Default, Deserialize, Serialize, Hash)] pub struct Range { /// The range's start position. pub start: Position, /// The range's end position. pub end: Position, } impl Range { pub fn new(start: Position, end: Position) -> Range { Range { start, end } } } /// Represents a location inside a resource, such as a line inside a text file. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize, Hash)] pub struct Location { pub uri: Url, pub range: Range, } impl Location { pub fn new(uri: Url, range: Range) -> Location { Location { uri, range } } } /// Represents a link between a source and a target location. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct LocationLink { /// Span of the origin of this link. /// /// Used as the underlined span for mouse interaction. Defaults to the word range at /// the mouse position. #[serde(skip_serializing_if = "Option::is_none")] pub origin_selection_range: Option, /// The target resource identifier of this link. pub target_uri: Url, /// The full target range of this link. pub target_range: Range, /// The span of this link. pub target_selection_range: Range, } /// A type indicating how positions are encoded, /// specifically what column offsets mean. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)] pub struct PositionEncodingKind(std::borrow::Cow<'static, str>); impl PositionEncodingKind { /// Character offsets count UTF-8 code units. pub const UTF8: PositionEncodingKind = PositionEncodingKind::new("utf-8"); /// Character offsets count UTF-16 code units. /// /// This is the default and must always be supported /// by servers pub const UTF16: PositionEncodingKind = PositionEncodingKind::new("utf-16"); /// Character offsets count UTF-32 code units. /// /// Implementation note: these are the same as Unicode code points, /// so this `PositionEncodingKind` may also be used for an /// encoding-agnostic representation of character offsets. pub const UTF32: PositionEncodingKind = PositionEncodingKind::new("utf-32"); pub const fn new(tag: &'static str) -> Self { PositionEncodingKind(std::borrow::Cow::Borrowed(tag)) } pub fn as_str(&self) -> &str { &self.0 } } impl From for PositionEncodingKind { fn from(from: String) -> Self { PositionEncodingKind(std::borrow::Cow::from(from)) } } impl From<&'static str> for PositionEncodingKind { fn from(from: &'static str) -> Self { PositionEncodingKind::new(from) } } /// Represents a diagnostic, such as a compiler error or warning. /// Diagnostic objects are only valid in the scope of a resource. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct Diagnostic { /// The range at which the message applies. pub range: Range, /// The diagnostic's severity. Can be omitted. If omitted it is up to the /// client to interpret diagnostics as error, warning, info or hint. #[serde(skip_serializing_if = "Option::is_none")] pub severity: Option, /// The diagnostic's code. Can be omitted. #[serde(skip_serializing_if = "Option::is_none")] pub code: Option, /// An optional property to describe the error code. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub code_description: Option, /// A human-readable string describing the source of this /// diagnostic, e.g. 'typescript' or 'super lint'. #[serde(skip_serializing_if = "Option::is_none")] pub source: Option, /// The diagnostic's message. pub message: String, /// An array of related diagnostic information, e.g. when symbol-names within /// a scope collide all definitions can be marked via this property. #[serde(skip_serializing_if = "Option::is_none")] pub related_information: Option>, /// Additional metadata about the diagnostic. #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option>, /// A data entry field that is preserved between a `textDocument/publishDiagnostics` /// notification and `textDocument/codeAction` request. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub data: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeDescription { pub href: Url, } impl Diagnostic { pub fn new( range: Range, severity: Option, code: Option, source: Option, message: String, related_information: Option>, tags: Option>, ) -> Diagnostic { Diagnostic { range, severity, code, source, message, related_information, tags, ..Diagnostic::default() } } pub fn new_simple(range: Range, message: String) -> Diagnostic { Self::new(range, None, None, None, message, None, None) } pub fn new_with_code_number( range: Range, severity: DiagnosticSeverity, code_number: i32, source: Option, message: String, ) -> Diagnostic { let code = Some(NumberOrString::Number(code_number)); Self::new(range, Some(severity), code, source, message, None, None) } } /// The protocol currently supports the following diagnostic severities: #[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Deserialize, Serialize)] #[serde(transparent)] pub struct DiagnosticSeverity(i32); lsp_enum! { impl DiagnosticSeverity { /// Reports an error. pub const ERROR: DiagnosticSeverity = DiagnosticSeverity(1); /// Reports a warning. pub const WARNING: DiagnosticSeverity = DiagnosticSeverity(2); /// Reports an information. pub const INFORMATION: DiagnosticSeverity = DiagnosticSeverity(3); /// Reports a hint. pub const HINT: DiagnosticSeverity = DiagnosticSeverity(4); } } /// Represents a related message and source code location for a diagnostic. This /// should be used to point to code locations that cause or related to a /// diagnostics, e.g when duplicating a symbol in a scope. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct DiagnosticRelatedInformation { /// The location of this related diagnostic information. pub location: Location, /// The message of this related diagnostic information. pub message: String, } /// The diagnostic tags. #[derive(Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(transparent)] pub struct DiagnosticTag(i32); lsp_enum! { impl DiagnosticTag { /// Unused or unnecessary code. /// Clients are allowed to render diagnostics with this tag faded out instead of having /// an error squiggle. pub const UNNECESSARY: DiagnosticTag = DiagnosticTag(1); /// Deprecated or obsolete code. /// Clients are allowed to rendered diagnostics with this tag strike through. pub const DEPRECATED: DiagnosticTag = DiagnosticTag(2); } } /// Represents a reference to a command. Provides a title which will be used to represent a command in the UI. /// Commands are identified by a string identifier. The recommended way to handle commands is to implement /// their execution on the server side if the client and server provides the corresponding capabilities. /// Alternatively the tool extension code could handle the command. /// The protocol currently doesn’t specify a set of well-known commands. #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct Command { /// Title of the command, like `save`. pub title: String, /// The identifier of the actual command handler. pub command: String, /// Arguments that the command handler should be /// invoked with. #[serde(skip_serializing_if = "Option::is_none")] pub arguments: Option>, } impl Command { pub fn new(title: String, command: String, arguments: Option>) -> Command { Command { title, command, arguments, } } } /// A textual edit applicable to a text document. /// /// If n `TextEdit`s are applied to a text document all text edits describe changes to the initial document version. /// Execution wise text edits should applied from the bottom to the top of the text document. Overlapping text edits /// are not supported. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TextEdit { /// The range of the text document to be manipulated. To insert /// text into a document create a range where start === end. pub range: Range, /// The string to be inserted. For delete operations use an /// empty string. pub new_text: String, } impl TextEdit { pub fn new(range: Range, new_text: String) -> TextEdit { TextEdit { range, new_text } } } /// An identifier referring to a change annotation managed by a workspace /// edit. /// /// @since 3.16.0 pub type ChangeAnnotationIdentifier = String; /// A special text edit with an additional change annotation. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct AnnotatedTextEdit { #[serde(flatten)] pub text_edit: TextEdit, /// The actual annotation pub annotation_id: ChangeAnnotationIdentifier, } /// Describes textual changes on a single text document. The text document is referred to as a /// `OptionalVersionedTextDocumentIdentifier` to allow clients to check the text document version before an /// edit is applied. A `TextDocumentEdit` describes all changes on a version Si and after they are /// applied move the document to version Si+1. So the creator of a `TextDocumentEdit` doesn't need to /// sort the array or do any kind of ordering. However the edits must be non overlapping. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TextDocumentEdit { /// The text document to change. pub text_document: OptionalVersionedTextDocumentIdentifier, /// The edits to be applied. /// /// @since 3.16.0 - support for AnnotatedTextEdit. This is guarded by the /// client capability `workspace.workspaceEdit.changeAnnotationSupport` pub edits: Vec>, } /// Additional information that describes document changes. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ChangeAnnotation { /// A human-readable string describing the actual change. The string /// is rendered prominent in the user interface. pub label: String, /// A flag which indicates that user confirmation is needed /// before applying the change. #[serde(skip_serializing_if = "Option::is_none")] pub needs_confirmation: Option, /// A human-readable string which is rendered less prominent in /// the user interface. #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ChangeAnnotationWorkspaceEditClientCapabilities { /// Whether the client groups edits with equal labels into tree nodes, /// for instance all edits labelled with "Changes in Strings" would /// be a tree node. #[serde(skip_serializing_if = "Option::is_none")] pub groups_on_label: Option, } /// Options to create a file. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CreateFileOptions { /// Overwrite existing file. Overwrite wins over `ignoreIfExists` #[serde(skip_serializing_if = "Option::is_none")] pub overwrite: Option, /// Ignore if exists. #[serde(skip_serializing_if = "Option::is_none")] pub ignore_if_exists: Option, } /// Create file operation #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CreateFile { /// The resource to create. pub uri: Url, /// Additional options #[serde(skip_serializing_if = "Option::is_none")] pub options: Option, /// An optional annotation identifier describing the operation. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub annotation_id: Option, } /// Rename file options #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct RenameFileOptions { /// Overwrite target if existing. Overwrite wins over `ignoreIfExists` #[serde(skip_serializing_if = "Option::is_none")] pub overwrite: Option, /// Ignores if target exists. #[serde(skip_serializing_if = "Option::is_none")] pub ignore_if_exists: Option, } /// Rename file operation #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct RenameFile { /// The old (existing) location. pub old_uri: Url, /// The new location. pub new_uri: Url, /// Rename options. #[serde(skip_serializing_if = "Option::is_none")] pub options: Option, /// An optional annotation identifier describing the operation. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub annotation_id: Option, } /// Delete file options #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DeleteFileOptions { /// Delete the content recursively if a folder is denoted. #[serde(skip_serializing_if = "Option::is_none")] pub recursive: Option, /// Ignore the operation if the file doesn't exist. #[serde(skip_serializing_if = "Option::is_none")] pub ignore_if_not_exists: Option, /// An optional annotation identifier describing the operation. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub annotation_id: Option, } /// Delete file operation #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DeleteFile { /// The file to delete. pub uri: Url, /// Delete options. #[serde(skip_serializing_if = "Option::is_none")] pub options: Option, } /// A workspace edit represents changes to many resources managed in the workspace. /// The edit should either provide `changes` or `documentChanges`. /// If the client can handle versioned document edits and if `documentChanges` are present, /// the latter are preferred over `changes`. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceEdit { /// Holds changes to existing resources. #[serde(with = "url_map")] #[serde(skip_serializing_if = "Option::is_none")] #[serde(default)] pub changes: Option>>, // changes?: { [uri: string]: TextEdit[]; }; /// Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes /// are either an array of `TextDocumentEdit`s to express changes to n different text documents /// where each text document edit addresses a specific version of a text document. Or it can contain /// above `TextDocumentEdit`s mixed with create, rename and delete file / folder operations. /// /// Whether a client supports versioned document edits is expressed via /// `workspace.workspaceEdit.documentChanges` client capability. /// /// If a client neither supports `documentChanges` nor `workspace.workspaceEdit.resourceOperations` then /// only plain `TextEdit`s using the `changes` property are supported. #[serde(skip_serializing_if = "Option::is_none")] pub document_changes: Option, /// A map of change annotations that can be referenced in /// `AnnotatedTextEdit`s or create, rename and delete file / folder /// operations. /// /// Whether clients honor this property depends on the client capability /// `workspace.changeAnnotationSupport`. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub change_annotations: Option>, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum DocumentChanges { Edits(Vec), Operations(Vec), } // TODO: Once https://github.com/serde-rs/serde/issues/912 is solved // we can remove ResourceOp and switch to the following implementation // of DocumentChangeOperation: // // #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] // #[serde(tag = "kind", rename_all="lowercase" )] // pub enum DocumentChangeOperation { // Create(CreateFile), // Rename(RenameFile), // Delete(DeleteFile), // // #[serde(other)] // Edit(TextDocumentEdit), // } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged, rename_all = "lowercase")] pub enum DocumentChangeOperation { Op(ResourceOp), Edit(TextDocumentEdit), } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(tag = "kind", rename_all = "lowercase")] pub enum ResourceOp { Create(CreateFile), Rename(RenameFile), Delete(DeleteFile), } pub type DidChangeConfigurationClientCapabilities = DynamicRegistrationClientCapabilities; #[derive(Debug, Default, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ConfigurationParams { pub items: Vec, } #[derive(Debug, Default, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ConfigurationItem { /// The scope to get the configuration section for. #[serde(skip_serializing_if = "Option::is_none")] pub scope_uri: Option, ///The configuration section asked for. #[serde(skip_serializing_if = "Option::is_none")] pub section: Option, } mod url_map { use std::fmt; use std::marker::PhantomData; use super::*; pub fn deserialize<'de, D, V>(deserializer: D) -> Result>, D::Error> where D: serde::Deserializer<'de>, V: de::DeserializeOwned, { struct UrlMapVisitor { _marker: PhantomData, } impl Default for UrlMapVisitor { fn default() -> Self { UrlMapVisitor { _marker: PhantomData, } } } impl<'de, V: de::DeserializeOwned> de::Visitor<'de> for UrlMapVisitor { type Value = HashMap; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("map") } fn visit_map(self, mut visitor: M) -> Result where M: de::MapAccess<'de>, { let mut values = HashMap::with_capacity(visitor.size_hint().unwrap_or(0)); // While there are entries remaining in the input, add them // into our map. while let Some((key, value)) = visitor.next_entry::()? { values.insert(key, value); } Ok(values) } } struct OptionUrlMapVisitor { _marker: PhantomData, } impl Default for OptionUrlMapVisitor { fn default() -> Self { OptionUrlMapVisitor { _marker: PhantomData, } } } impl<'de, V: de::DeserializeOwned> de::Visitor<'de> for OptionUrlMapVisitor { type Value = Option>; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("option") } #[inline] fn visit_unit(self) -> Result where E: serde::de::Error, { Ok(None) } #[inline] fn visit_none(self) -> Result where E: serde::de::Error, { Ok(None) } #[inline] fn visit_some(self, deserializer: D) -> Result where D: serde::Deserializer<'de>, { deserializer .deserialize_map(UrlMapVisitor::::default()) .map(Some) } } // Instantiate our Visitor and ask the Deserializer to drive // it over the input data, resulting in an instance of MyMap. deserializer.deserialize_option(OptionUrlMapVisitor::default()) } pub fn serialize( changes: &Option>, serializer: S, ) -> Result where S: serde::Serializer, V: serde::Serialize, { use serde::ser::SerializeMap; match *changes { Some(ref changes) => { let mut map = serializer.serialize_map(Some(changes.len()))?; for (k, v) in changes { map.serialize_entry(k.as_str(), v)?; } map.end() } None => serializer.serialize_none(), } } } impl WorkspaceEdit { pub fn new(changes: HashMap>) -> WorkspaceEdit { WorkspaceEdit { changes: Some(changes), document_changes: None, ..Default::default() } } } /// Text documents are identified using a URI. On the protocol level, URIs are passed as strings. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct TextDocumentIdentifier { // !!!!!! Note: // In the spec VersionedTextDocumentIdentifier extends TextDocumentIdentifier // This modelled by "mixing-in" TextDocumentIdentifier in VersionedTextDocumentIdentifier, // so any changes to this type must be effected in the sub-type as well. /// The text document's URI. pub uri: Url, } impl TextDocumentIdentifier { pub fn new(uri: Url) -> TextDocumentIdentifier { TextDocumentIdentifier { uri } } } /// An item to transfer a text document from the client to the server. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TextDocumentItem { /// The text document's URI. pub uri: Url, /// The text document's language identifier. pub language_id: String, /// The version number of this document (it will strictly increase after each /// change, including undo/redo). pub version: i32, /// The content of the opened text document. pub text: String, } impl TextDocumentItem { pub fn new(uri: Url, language_id: String, version: i32, text: String) -> TextDocumentItem { TextDocumentItem { uri, language_id, version, text, } } } /// An identifier to denote a specific version of a text document. This information usually flows from the client to the server. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct VersionedTextDocumentIdentifier { // This field was "mixed-in" from TextDocumentIdentifier /// The text document's URI. pub uri: Url, /// The version number of this document. /// /// The version number of a document will increase after each change, /// including undo/redo. The number doesn't need to be consecutive. pub version: i32, } impl VersionedTextDocumentIdentifier { pub fn new(uri: Url, version: i32) -> VersionedTextDocumentIdentifier { VersionedTextDocumentIdentifier { uri, version } } } /// An identifier which optionally denotes a specific version of a text document. This information usually flows from the server to the client #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct OptionalVersionedTextDocumentIdentifier { // This field was "mixed-in" from TextDocumentIdentifier /// The text document's URI. pub uri: Url, /// The version number of this document. If an optional versioned text document /// identifier is sent from the server to the client and the file is not /// open in the editor (the server has not received an open notification /// before) the server can send `null` to indicate that the version is /// known and the content on disk is the master (as specified with document /// content ownership). /// /// The version number of a document will increase after each change, /// including undo/redo. The number doesn't need to be consecutive. pub version: Option, } impl OptionalVersionedTextDocumentIdentifier { pub fn new(uri: Url, version: i32) -> OptionalVersionedTextDocumentIdentifier { OptionalVersionedTextDocumentIdentifier { uri, version: Some(version), } } } /// A parameter literal used in requests to pass a text document and a position inside that document. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TextDocumentPositionParams { // !!!!!! Note: // In the spec ReferenceParams extends TextDocumentPositionParams // This modelled by "mixing-in" TextDocumentPositionParams in ReferenceParams, // so any changes to this type must be effected in sub-type as well. /// The text document. pub text_document: TextDocumentIdentifier, /// The position inside the text document. pub position: Position, } impl TextDocumentPositionParams { pub fn new( text_document: TextDocumentIdentifier, position: Position, ) -> TextDocumentPositionParams { TextDocumentPositionParams { text_document, position, } } } /// A document filter denotes a document through properties like language, schema or pattern. /// Examples are a filter that applies to TypeScript files on disk or a filter the applies to JSON /// files with name package.json: /// /// { language: 'typescript', scheme: 'file' } /// { language: 'json', pattern: '**/package.json' } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct DocumentFilter { /// A language id, like `typescript`. #[serde(skip_serializing_if = "Option::is_none")] pub language: Option, /// A Uri [scheme](#Uri.scheme), like `file` or `untitled`. #[serde(skip_serializing_if = "Option::is_none")] pub scheme: Option, /// A glob pattern, like `*.{ts,js}`. #[serde(skip_serializing_if = "Option::is_none")] pub pattern: Option, } /// A document selector is the combination of one or many document filters. pub type DocumentSelector = Vec; // ========================= Actual Protocol ========================= #[derive(Debug, PartialEq, Clone, Deserialize, Serialize, Default)] #[serde(rename_all = "camelCase")] pub struct InitializeParams { /// The process Id of the parent process that started /// the server. Is null if the process has not been started by another process. /// If the parent process is not alive then the server should exit (see exit notification) its process. pub process_id: Option, /// The rootPath of the workspace. Is null /// if no folder is open. #[serde(skip_serializing_if = "Option::is_none")] #[deprecated(note = "Use `root_uri` instead when possible")] pub root_path: Option, /// The rootUri of the workspace. Is null if no /// folder is open. If both `rootPath` and `rootUri` are set /// `rootUri` wins. #[serde(default)] #[deprecated(note = "Use `workspace_folders` instead when possible")] pub root_uri: Option, /// User provided initialization options. #[serde(skip_serializing_if = "Option::is_none")] pub initialization_options: Option, /// The capabilities provided by the client (editor or tool) pub capabilities: ClientCapabilities, /// The initial trace setting. If omitted trace is disabled ('off'). #[serde(default)] #[serde(skip_serializing_if = "Option::is_none")] pub trace: Option, /// The workspace folders configured in the client when the server starts. /// This property is only available if the client supports workspace folders. /// It can be `null` if the client supports workspace folders but none are /// configured. #[serde(skip_serializing_if = "Option::is_none")] pub workspace_folders: Option>, /// Information about the client. #[serde(skip_serializing_if = "Option::is_none")] pub client_info: Option, /// The locale the client is currently showing the user interface /// in. This must not necessarily be the locale of the operating /// system. /// /// Uses IETF language tags as the value's syntax /// (See ) /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub locale: Option, /// The LSP server may report about initialization progress to the client /// by using the following work done token if it was passed by the client. #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] pub struct ClientInfo { /// The name of the client as defined by the client. pub name: String, /// The client's version as defined by the client. #[serde(skip_serializing_if = "Option::is_none")] pub version: Option, } #[derive(Debug, PartialEq, Clone, Copy, Deserialize, Serialize)] pub struct InitializedParams {} #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct GenericRegistrationOptions { #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, #[serde(flatten)] pub options: GenericOptions, #[serde(flatten)] pub static_registration_options: StaticRegistrationOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct GenericOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct GenericParams { #[serde(flatten)] pub text_document_position_params: TextDocumentPositionParams, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } #[derive(Debug, Eq, PartialEq, Clone, Copy, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DynamicRegistrationClientCapabilities { /// This capability supports dynamic registration. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, } #[derive(Debug, Eq, PartialEq, Clone, Copy, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct GotoCapability { #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// The client supports additional metadata in the form of definition links. #[serde(skip_serializing_if = "Option::is_none")] pub link_support: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceEditClientCapabilities { /// The client supports versioned document changes in `WorkspaceEdit`s #[serde(skip_serializing_if = "Option::is_none")] pub document_changes: Option, /// The resource operations the client supports. Clients should at least /// support 'create', 'rename' and 'delete' files and folders. #[serde(skip_serializing_if = "Option::is_none")] pub resource_operations: Option>, /// The failure handling strategy of a client if applying the workspace edit fails. #[serde(skip_serializing_if = "Option::is_none")] pub failure_handling: Option, /// Whether the client normalizes line endings to the client specific /// setting. /// If set to `true` the client will normalize line ending characters /// in a workspace edit to the client specific new line character(s). /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub normalizes_line_endings: Option, /// Whether the client in general supports change annotations on text edits, /// create file, rename file and delete file changes. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub change_annotation_support: Option, } #[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Copy, Clone)] #[serde(rename_all = "lowercase")] pub enum ResourceOperationKind { Create, Rename, Delete, } #[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Copy, Clone)] #[serde(rename_all = "camelCase")] pub enum FailureHandlingKind { Abort, Transactional, TextOnlyTransactional, Undo, } /// A symbol kind. #[derive(Eq, PartialEq, Copy, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct SymbolKind(i32); lsp_enum! { impl SymbolKind { pub const FILE: SymbolKind = SymbolKind(1); pub const MODULE: SymbolKind = SymbolKind(2); pub const NAMESPACE: SymbolKind = SymbolKind(3); pub const PACKAGE: SymbolKind = SymbolKind(4); pub const CLASS: SymbolKind = SymbolKind(5); pub const METHOD: SymbolKind = SymbolKind(6); pub const PROPERTY: SymbolKind = SymbolKind(7); pub const FIELD: SymbolKind = SymbolKind(8); pub const CONSTRUCTOR: SymbolKind = SymbolKind(9); pub const ENUM: SymbolKind = SymbolKind(10); pub const INTERFACE: SymbolKind = SymbolKind(11); pub const FUNCTION: SymbolKind = SymbolKind(12); pub const VARIABLE: SymbolKind = SymbolKind(13); pub const CONSTANT: SymbolKind = SymbolKind(14); pub const STRING: SymbolKind = SymbolKind(15); pub const NUMBER: SymbolKind = SymbolKind(16); pub const BOOLEAN: SymbolKind = SymbolKind(17); pub const ARRAY: SymbolKind = SymbolKind(18); pub const OBJECT: SymbolKind = SymbolKind(19); pub const KEY: SymbolKind = SymbolKind(20); pub const NULL: SymbolKind = SymbolKind(21); pub const ENUM_MEMBER: SymbolKind = SymbolKind(22); pub const STRUCT: SymbolKind = SymbolKind(23); pub const EVENT: SymbolKind = SymbolKind(24); pub const OPERATOR: SymbolKind = SymbolKind(25); pub const TYPE_PARAMETER: SymbolKind = SymbolKind(26); } } /// Specific capabilities for the `SymbolKind` in the `workspace/symbol` request. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SymbolKindCapability { /// The symbol kind values the client supports. When this /// property exists the client also guarantees that it will /// handle values outside its set gracefully and falls back /// to a default value when unknown. /// /// If this property is not present the client only supports /// the symbol kinds from `File` to `Array` as defined in /// the initial version of the protocol. pub value_set: Option>, } /// Workspace specific client capabilities. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceClientCapabilities { /// The client supports applying batch edits to the workspace by supporting /// the request 'workspace/applyEdit' #[serde(skip_serializing_if = "Option::is_none")] pub apply_edit: Option, /// Capabilities specific to `WorkspaceEdit`s #[serde(skip_serializing_if = "Option::is_none")] pub workspace_edit: Option, /// Capabilities specific to the `workspace/didChangeConfiguration` notification. #[serde(skip_serializing_if = "Option::is_none")] pub did_change_configuration: Option, /// Capabilities specific to the `workspace/didChangeWatchedFiles` notification. #[serde(skip_serializing_if = "Option::is_none")] pub did_change_watched_files: Option, /// Capabilities specific to the `workspace/symbol` request. #[serde(skip_serializing_if = "Option::is_none")] pub symbol: Option, /// Capabilities specific to the `workspace/executeCommand` request. #[serde(skip_serializing_if = "Option::is_none")] pub execute_command: Option, /// The client has support for workspace folders. /// /// @since 3.6.0 #[serde(skip_serializing_if = "Option::is_none")] pub workspace_folders: Option, /// The client supports `workspace/configuration` requests. /// /// @since 3.6.0 #[serde(skip_serializing_if = "Option::is_none")] pub configuration: Option, /// Capabilities specific to the semantic token requests scoped to the workspace. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub semantic_tokens: Option, /// Capabilities specific to the code lens requests scoped to the workspace. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub code_lens: Option, /// The client has support for file requests/notifications. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub file_operations: Option, /// Client workspace capabilities specific to inline values. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub inline_value: Option, /// Client workspace capabilities specific to inlay hints. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub inlay_hint: Option, /// Client workspace capabilities specific to diagnostics. /// since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub diagnostic: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TextDocumentSyncClientCapabilities { /// Whether text document synchronization supports dynamic registration. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// The client supports sending will save notifications. #[serde(skip_serializing_if = "Option::is_none")] pub will_save: Option, /// The client supports sending a will save request and /// waits for a response providing text edits which will /// be applied to the document before it is saved. #[serde(skip_serializing_if = "Option::is_none")] pub will_save_wait_until: Option, /// The client supports did save notifications. #[serde(skip_serializing_if = "Option::is_none")] pub did_save: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct PublishDiagnosticsClientCapabilities { /// Whether the clients accepts diagnostics with related information. #[serde(skip_serializing_if = "Option::is_none")] pub related_information: Option, /// Client supports the tag property to provide meta data about a diagnostic. /// Clients supporting tags have to handle unknown tags gracefully. #[serde( default, skip_serializing_if = "Option::is_none", deserialize_with = "TagSupport::deserialize_compat" )] pub tag_support: Option>, /// Whether the client interprets the version property of the /// `textDocument/publishDiagnostics` notification's parameter. /// /// @since 3.15.0 #[serde(skip_serializing_if = "Option::is_none")] pub version_support: Option, /// Client supports a codeDescription property /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub code_description_support: Option, /// Whether code action supports the `data` property which is /// preserved between a `textDocument/publishDiagnostics` and /// `textDocument/codeAction` request. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub data_support: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TagSupport { /// The tags supported by the client. pub value_set: Vec, } impl TagSupport { /// Support for deserializing a boolean tag Support, in case it's present. /// /// This is currently the case for vscode 1.41.1 fn deserialize_compat<'de, S>(serializer: S) -> Result>, S::Error> where S: serde::Deserializer<'de>, T: serde::Deserialize<'de>, { Ok( match Option::::deserialize(serializer).map_err(serde::de::Error::custom)? { Some(Value::Bool(false)) => None, Some(Value::Bool(true)) => Some(TagSupport { value_set: vec![] }), Some(other) => { Some(TagSupport::::deserialize(other).map_err(serde::de::Error::custom)?) } None => None, }, ) } } /// Text document specific client capabilities. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TextDocumentClientCapabilities { #[serde(skip_serializing_if = "Option::is_none")] pub synchronization: Option, /// Capabilities specific to the `textDocument/completion` #[serde(skip_serializing_if = "Option::is_none")] pub completion: Option, /// Capabilities specific to the `textDocument/hover` #[serde(skip_serializing_if = "Option::is_none")] pub hover: Option, /// Capabilities specific to the `textDocument/signatureHelp` #[serde(skip_serializing_if = "Option::is_none")] pub signature_help: Option, /// Capabilities specific to the `textDocument/references` #[serde(skip_serializing_if = "Option::is_none")] pub references: Option, /// Capabilities specific to the `textDocument/documentHighlight` #[serde(skip_serializing_if = "Option::is_none")] pub document_highlight: Option, /// Capabilities specific to the `textDocument/documentSymbol` #[serde(skip_serializing_if = "Option::is_none")] pub document_symbol: Option, /// Capabilities specific to the `textDocument/formatting` #[serde(skip_serializing_if = "Option::is_none")] pub formatting: Option, /// Capabilities specific to the `textDocument/rangeFormatting` #[serde(skip_serializing_if = "Option::is_none")] pub range_formatting: Option, /// Capabilities specific to the `textDocument/onTypeFormatting` #[serde(skip_serializing_if = "Option::is_none")] pub on_type_formatting: Option, /// Capabilities specific to the `textDocument/declaration` #[serde(skip_serializing_if = "Option::is_none")] pub declaration: Option, /// Capabilities specific to the `textDocument/definition` #[serde(skip_serializing_if = "Option::is_none")] pub definition: Option, /// Capabilities specific to the `textDocument/typeDefinition` #[serde(skip_serializing_if = "Option::is_none")] pub type_definition: Option, /// Capabilities specific to the `textDocument/implementation` #[serde(skip_serializing_if = "Option::is_none")] pub implementation: Option, /// Capabilities specific to the `textDocument/codeAction` #[serde(skip_serializing_if = "Option::is_none")] pub code_action: Option, /// Capabilities specific to the `textDocument/codeLens` #[serde(skip_serializing_if = "Option::is_none")] pub code_lens: Option, /// Capabilities specific to the `textDocument/documentLink` #[serde(skip_serializing_if = "Option::is_none")] pub document_link: Option, /// Capabilities specific to the `textDocument/documentColor` and the /// `textDocument/colorPresentation` request. #[serde(skip_serializing_if = "Option::is_none")] pub color_provider: Option, /// Capabilities specific to the `textDocument/rename` #[serde(skip_serializing_if = "Option::is_none")] pub rename: Option, /// Capabilities specific to `textDocument/publishDiagnostics`. #[serde(skip_serializing_if = "Option::is_none")] pub publish_diagnostics: Option, /// Capabilities specific to `textDocument/foldingRange` requests. #[serde(skip_serializing_if = "Option::is_none")] pub folding_range: Option, /// Capabilities specific to the `textDocument/selectionRange` request. /// /// @since 3.15.0 #[serde(skip_serializing_if = "Option::is_none")] pub selection_range: Option, /// Capabilities specific to `textDocument/linkedEditingRange` requests. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub linked_editing_range: Option, /// Capabilities specific to the various call hierarchy requests. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub call_hierarchy: Option, /// Capabilities specific to the `textDocument/semanticTokens/*` requests. #[serde(skip_serializing_if = "Option::is_none")] pub semantic_tokens: Option, /// Capabilities specific to the `textDocument/moniker` request. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub moniker: Option, /// Capabilities specific to the various type hierarchy requests. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub type_hierarchy: Option, /// Capabilities specific to the `textDocument/inlineValue` request. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub inline_value: Option, /// Capabilities specific to the `textDocument/inlayHint` request. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub inlay_hint: Option, /// Capabilities specific to the diagnostic pull model. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub diagnostic: Option, /// Capabilities specific to the `textDocument/inlineCompletion` request. /// /// @since 3.18.0 #[serde(skip_serializing_if = "Option::is_none")] #[cfg(feature = "proposed")] pub inline_completion: Option, } /// Where ClientCapabilities are currently empty: #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ClientCapabilities { /// Workspace specific client capabilities. #[serde(skip_serializing_if = "Option::is_none")] pub workspace: Option, /// Text document specific client capabilities. #[serde(skip_serializing_if = "Option::is_none")] pub text_document: Option, /// Window specific client capabilities. #[serde(skip_serializing_if = "Option::is_none")] pub window: Option, /// General client capabilities. #[serde(skip_serializing_if = "Option::is_none")] pub general: Option, /// Unofficial UT8-offsets extension. /// /// See https://clangd.llvm.org/extensions.html#utf-8-offsets. #[serde(skip_serializing_if = "Option::is_none")] #[cfg(feature = "proposed")] pub offset_encoding: Option>, /// Experimental client capabilities. #[serde(skip_serializing_if = "Option::is_none")] pub experimental: Option, } #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct GeneralClientCapabilities { /// Client capabilities specific to regular expressions. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub regular_expressions: Option, /// Client capabilities specific to the client's markdown parser. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub markdown: Option, /// Client capability that signals how the client handles stale requests (e.g. a request for /// which the client will not process the response anymore since the information is outdated). /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub stale_request_support: Option, /// The position encodings supported by the client. Client and server /// have to agree on the same position encoding to ensure that offsets /// (e.g. character position in a line) are interpreted the same on both /// side. /// /// To keep the protocol backwards compatible the following applies: if /// the value 'utf-16' is missing from the array of position encodings /// servers can assume that the client supports UTF-16. UTF-16 is /// therefore a mandatory encoding. /// /// If omitted it defaults to ['utf-16']. /// /// Implementation considerations: since the conversion from one encoding /// into another requires the content of the file / line the conversion /// is best done where the file is read which is usually on the server /// side. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub position_encodings: Option>, } /// Client capability that signals how the client /// handles stale requests (e.g. a request /// for which the client will not process the response /// anymore since the information is outdated). /// /// @since 3.17.0 #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct StaleRequestSupportClientCapabilities { /// The client will actively cancel the request. pub cancel: bool, /// The list of requests for which the client /// will retry the request if it receives a /// response with error code `ContentModified`` pub retry_on_content_modified: Vec, } #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct RegularExpressionsClientCapabilities { /// The engine's name. pub engine: String, /// The engine's version #[serde(skip_serializing_if = "Option::is_none")] pub version: Option, } #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct MarkdownClientCapabilities { /// The name of the parser. pub parser: String, /// The version of the parser. #[serde(skip_serializing_if = "Option::is_none")] pub version: Option, /// A list of HTML tags that the client allows / supports in /// Markdown. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub allowed_tags: Option>, } #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InitializeResult { /// The capabilities the language server provides. pub capabilities: ServerCapabilities, /// Information about the server. #[serde(skip_serializing_if = "Option::is_none")] pub server_info: Option, /// Unofficial UT8-offsets extension. /// /// See https://clangd.llvm.org/extensions.html#utf-8-offsets. #[serde(skip_serializing_if = "Option::is_none")] #[cfg(feature = "proposed")] pub offset_encoding: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct ServerInfo { /// The name of the server as defined by the server. pub name: String, /// The servers's version as defined by the server. #[serde(skip_serializing_if = "Option::is_none")] pub version: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct InitializeError { /// Indicates whether the client execute the following retry logic: /// /// - (1) show the message provided by the ResponseError to the user /// - (2) user selects retry or cancel /// - (3) if user selected retry the initialize method is sent again. pub retry: bool, } // The server can signal the following capabilities: /// Defines how the host (editor) should sync document changes to the language server. #[derive(Eq, PartialEq, Clone, Copy, Deserialize, Serialize)] #[serde(transparent)] pub struct TextDocumentSyncKind(i32); lsp_enum! { impl TextDocumentSyncKind { /// Documents should not be synced at all. pub const NONE: TextDocumentSyncKind = TextDocumentSyncKind(0); /// Documents are synced by always sending the full content of the document. pub const FULL: TextDocumentSyncKind = TextDocumentSyncKind(1); /// Documents are synced by sending the full content on open. After that only /// incremental updates to the document are sent. pub const INCREMENTAL: TextDocumentSyncKind = TextDocumentSyncKind(2); } } pub type ExecuteCommandClientCapabilities = DynamicRegistrationClientCapabilities; /// Execute command options. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct ExecuteCommandOptions { /// The commands to be executed on the server pub commands: Vec, #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } /// Save options. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SaveOptions { /// The client is supposed to include the content on save. #[serde(skip_serializing_if = "Option::is_none")] pub include_text: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum TextDocumentSyncSaveOptions { Supported(bool), SaveOptions(SaveOptions), } impl From for TextDocumentSyncSaveOptions { fn from(from: SaveOptions) -> Self { Self::SaveOptions(from) } } impl From for TextDocumentSyncSaveOptions { fn from(from: bool) -> Self { Self::Supported(from) } } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TextDocumentSyncOptions { /// Open and close notifications are sent to the server. #[serde(skip_serializing_if = "Option::is_none")] pub open_close: Option, /// Change notifications are sent to the server. See TextDocumentSyncKind.None, TextDocumentSyncKind.Full /// and TextDocumentSyncKindIncremental. #[serde(skip_serializing_if = "Option::is_none")] pub change: Option, /// Will save notifications are sent to the server. #[serde(skip_serializing_if = "Option::is_none")] pub will_save: Option, /// Will save wait until requests are sent to the server. #[serde(skip_serializing_if = "Option::is_none")] pub will_save_wait_until: Option, /// Save notifications are sent to the server. #[serde(skip_serializing_if = "Option::is_none")] pub save: Option, } #[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum OneOf { Left(A), Right(B), } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum TextDocumentSyncCapability { Kind(TextDocumentSyncKind), Options(TextDocumentSyncOptions), } impl From for TextDocumentSyncCapability { fn from(from: TextDocumentSyncOptions) -> Self { Self::Options(from) } } impl From for TextDocumentSyncCapability { fn from(from: TextDocumentSyncKind) -> Self { Self::Kind(from) } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum ImplementationProviderCapability { Simple(bool), Options(StaticTextDocumentRegistrationOptions), } impl From for ImplementationProviderCapability { fn from(from: StaticTextDocumentRegistrationOptions) -> Self { Self::Options(from) } } impl From for ImplementationProviderCapability { fn from(from: bool) -> Self { Self::Simple(from) } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum TypeDefinitionProviderCapability { Simple(bool), Options(StaticTextDocumentRegistrationOptions), } impl From for TypeDefinitionProviderCapability { fn from(from: StaticTextDocumentRegistrationOptions) -> Self { Self::Options(from) } } impl From for TypeDefinitionProviderCapability { fn from(from: bool) -> Self { Self::Simple(from) } } #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ServerCapabilities { /// The position encoding the server picked from the encodings offered /// by the client via the client capability `general.positionEncodings`. /// /// If the client didn't provide any position encodings the only valid /// value that a server can return is 'utf-16'. /// /// If omitted it defaults to 'utf-16'. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub position_encoding: Option, /// Defines how text documents are synced. #[serde(skip_serializing_if = "Option::is_none")] pub text_document_sync: Option, /// Capabilities specific to `textDocument/selectionRange` requests. #[serde(skip_serializing_if = "Option::is_none")] pub selection_range_provider: Option, /// The server provides hover support. #[serde(skip_serializing_if = "Option::is_none")] pub hover_provider: Option, /// The server provides completion support. #[serde(skip_serializing_if = "Option::is_none")] pub completion_provider: Option, /// The server provides signature help support. #[serde(skip_serializing_if = "Option::is_none")] pub signature_help_provider: Option, /// The server provides goto definition support. #[serde(skip_serializing_if = "Option::is_none")] pub definition_provider: Option>, /// The server provides goto type definition support. #[serde(skip_serializing_if = "Option::is_none")] pub type_definition_provider: Option, /// The server provides goto implementation support. #[serde(skip_serializing_if = "Option::is_none")] pub implementation_provider: Option, /// The server provides find references support. #[serde(skip_serializing_if = "Option::is_none")] pub references_provider: Option>, /// The server provides document highlight support. #[serde(skip_serializing_if = "Option::is_none")] pub document_highlight_provider: Option>, /// The server provides document symbol support. #[serde(skip_serializing_if = "Option::is_none")] pub document_symbol_provider: Option>, /// The server provides workspace symbol support. #[serde(skip_serializing_if = "Option::is_none")] pub workspace_symbol_provider: Option>, /// The server provides code actions. #[serde(skip_serializing_if = "Option::is_none")] pub code_action_provider: Option, /// The server provides code lens. #[serde(skip_serializing_if = "Option::is_none")] pub code_lens_provider: Option, /// The server provides document formatting. #[serde(skip_serializing_if = "Option::is_none")] pub document_formatting_provider: Option>, /// The server provides document range formatting. #[serde(skip_serializing_if = "Option::is_none")] pub document_range_formatting_provider: Option>, /// The server provides document formatting on typing. #[serde(skip_serializing_if = "Option::is_none")] pub document_on_type_formatting_provider: Option, /// The server provides rename support. #[serde(skip_serializing_if = "Option::is_none")] pub rename_provider: Option>, /// The server provides document link support. #[serde(skip_serializing_if = "Option::is_none")] pub document_link_provider: Option, /// The server provides color provider support. #[serde(skip_serializing_if = "Option::is_none")] pub color_provider: Option, /// The server provides folding provider support. #[serde(skip_serializing_if = "Option::is_none")] pub folding_range_provider: Option, /// The server provides go to declaration support. #[serde(skip_serializing_if = "Option::is_none")] pub declaration_provider: Option, /// The server provides execute command support. #[serde(skip_serializing_if = "Option::is_none")] pub execute_command_provider: Option, /// Workspace specific server capabilities #[serde(skip_serializing_if = "Option::is_none")] pub workspace: Option, /// Call hierarchy provider capabilities. #[serde(skip_serializing_if = "Option::is_none")] pub call_hierarchy_provider: Option, /// Semantic tokens server capabilities. #[serde(skip_serializing_if = "Option::is_none")] pub semantic_tokens_provider: Option, /// Whether server provides moniker support. #[serde(skip_serializing_if = "Option::is_none")] pub moniker_provider: Option>, /// The server provides linked editing range support. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub linked_editing_range_provider: Option, /// The server provides inline values. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub inline_value_provider: Option>, /// The server provides inlay hints. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub inlay_hint_provider: Option>, /// The server has support for pull model diagnostics. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub diagnostic_provider: Option, /// The server provides inline completions. /// /// @since 3.18.0 #[serde(skip_serializing_if = "Option::is_none")] #[cfg(feature = "proposed")] pub inline_completion_provider: Option>, /// Experimental server capabilities. #[serde(skip_serializing_if = "Option::is_none")] pub experimental: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceServerCapabilities { /// The server supports workspace folder. #[serde(skip_serializing_if = "Option::is_none")] pub workspace_folders: Option, #[serde(skip_serializing_if = "Option::is_none")] pub file_operations: Option, } /// General parameters to to register for a capability. #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct Registration { /// The id used to register the request. The id can be used to deregister /// the request again. pub id: String, /// The method / capability to register for. pub method: String, /// Options necessary for the registration. #[serde(skip_serializing_if = "Option::is_none")] pub register_options: Option, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] pub struct RegistrationParams { pub registrations: Vec, } /// Since most of the registration options require to specify a document selector there is a base /// interface that can be used. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TextDocumentRegistrationOptions { /// A document selector to identify the scope of the registration. If set to null /// the document selector provided on the client side will be used. pub document_selector: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum DeclarationCapability { Simple(bool), RegistrationOptions(DeclarationRegistrationOptions), Options(DeclarationOptions), } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DeclarationRegistrationOptions { #[serde(flatten)] pub declaration_options: DeclarationOptions, #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, #[serde(flatten)] pub static_registration_options: StaticRegistrationOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DeclarationOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct StaticRegistrationOptions { #[serde(skip_serializing_if = "Option::is_none")] pub id: Option, } #[derive(Debug, Default, Eq, PartialEq, Clone, Deserialize, Serialize, Copy)] #[serde(rename_all = "camelCase")] pub struct WorkDoneProgressOptions { #[serde(skip_serializing_if = "Option::is_none")] pub work_done_progress: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentFormattingOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentRangeFormattingOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DefinitionOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentSymbolOptions { /// A human-readable string that is shown when multiple outlines trees are /// shown for the same document. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub label: Option, #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ReferencesOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DocumentHighlightOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceSymbolOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, /// The server provides support to resolve additional /// information for a workspace symbol. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub resolve_provider: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct StaticTextDocumentRegistrationOptions { /// A document selector to identify the scope of the registration. If set to null /// the document selector provided on the client side will be used. pub document_selector: Option, #[serde(skip_serializing_if = "Option::is_none")] pub id: Option, } /// General parameters to unregister a capability. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct Unregistration { /// The id used to unregister the request or notification. Usually an id /// provided during the register request. pub id: String, /// The method / capability to unregister for. pub method: String, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct UnregistrationParams { pub unregisterations: Vec, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] pub struct DidChangeConfigurationParams { /// The actual changed settings pub settings: Value, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DidOpenTextDocumentParams { /// The document that was opened. pub text_document: TextDocumentItem, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DidChangeTextDocumentParams { /// The document that did change. The version number points /// to the version after all provided content changes have /// been applied. pub text_document: VersionedTextDocumentIdentifier, /// The actual content changes. pub content_changes: Vec, } /// An event describing a change to a text document. If range and rangeLength are omitted /// the new text is considered to be the full content of the document. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TextDocumentContentChangeEvent { /// The range of the document that changed. #[serde(skip_serializing_if = "Option::is_none")] pub range: Option, /// The length of the range that got replaced. /// /// Deprecated: Use range instead #[serde(skip_serializing_if = "Option::is_none")] pub range_length: Option, /// The new text of the document. pub text: String, } /// Describe options to be used when registering for text document change events. /// /// Extends TextDocumentRegistrationOptions #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TextDocumentChangeRegistrationOptions { /// A document selector to identify the scope of the registration. If set to null /// the document selector provided on the client side will be used. pub document_selector: Option, /// How documents are synced to the server. See TextDocumentSyncKind.Full /// and TextDocumentSyncKindIncremental. pub sync_kind: i32, } /// The parameters send in a will save text document notification. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WillSaveTextDocumentParams { /// The document that will be saved. pub text_document: TextDocumentIdentifier, /// The 'TextDocumentSaveReason'. pub reason: TextDocumentSaveReason, } /// Represents reasons why a text document is saved. #[derive(Copy, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(transparent)] pub struct TextDocumentSaveReason(i32); lsp_enum! { impl TextDocumentSaveReason { /// Manually triggered, e.g. by the user pressing save, by starting debugging, /// or by an API call. pub const MANUAL: TextDocumentSaveReason = TextDocumentSaveReason(1); /// Automatic after a delay. pub const AFTER_DELAY: TextDocumentSaveReason = TextDocumentSaveReason(2); /// When the editor lost focus. pub const FOCUS_OUT: TextDocumentSaveReason = TextDocumentSaveReason(3); } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DidCloseTextDocumentParams { /// The document that was closed. pub text_document: TextDocumentIdentifier, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DidSaveTextDocumentParams { /// The document that was saved. pub text_document: TextDocumentIdentifier, /// Optional the content when saved. Depends on the includeText value /// when the save notification was requested. #[serde(skip_serializing_if = "Option::is_none")] pub text: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TextDocumentSaveRegistrationOptions { /// The client is supposed to include the content on save. #[serde(skip_serializing_if = "Option::is_none")] pub include_text: Option, #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, } #[derive(Debug, Eq, PartialEq, Clone, Copy, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DidChangeWatchedFilesClientCapabilities { /// Did change watched files notification supports dynamic registration. /// Please note that the current protocol doesn't support static /// configuration for file changes from the server side. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// Whether the client has support for relative patterns /// or not. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub relative_pattern_support: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct DidChangeWatchedFilesParams { /// The actual file events. pub changes: Vec, } /// The file event type. #[derive(Eq, PartialEq, Hash, Copy, Clone, Deserialize, Serialize)] #[serde(transparent)] pub struct FileChangeType(i32); lsp_enum! { impl FileChangeType { /// The file got created. pub const CREATED: FileChangeType = FileChangeType(1); /// The file got changed. pub const CHANGED: FileChangeType = FileChangeType(2); /// The file got deleted. pub const DELETED: FileChangeType = FileChangeType(3); } } /// An event describing a file change. #[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)] pub struct FileEvent { /// The file's URI. pub uri: Url, /// The change type. #[serde(rename = "type")] pub typ: FileChangeType, } impl FileEvent { pub fn new(uri: Url, typ: FileChangeType) -> FileEvent { FileEvent { uri, typ } } } /// Describe options to be used when registered for text document change events. #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Deserialize, Serialize)] pub struct DidChangeWatchedFilesRegistrationOptions { /// The watchers to register. pub watchers: Vec, } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FileSystemWatcher { /// The glob pattern to watch. See {@link GlobPattern glob pattern} /// for more detail. /// /// @since 3.17.0 support for relative patterns. pub glob_pattern: GlobPattern, /// The kind of events of interest. If omitted it defaults to WatchKind.Create | /// WatchKind.Change | WatchKind.Delete which is 7. #[serde(skip_serializing_if = "Option::is_none")] pub kind: Option, } /// The glob pattern. Either a string pattern or a relative pattern. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum GlobPattern { String(Pattern), Relative(RelativePattern), } impl From for GlobPattern { #[inline] fn from(from: Pattern) -> Self { Self::String(from) } } impl From for GlobPattern { #[inline] fn from(from: RelativePattern) -> Self { Self::Relative(from) } } /// A relative pattern is a helper to construct glob patterns that are matched /// relatively to a base URI. The common value for a `baseUri` is a workspace /// folder root, but it can be another absolute URI as well. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct RelativePattern { /// A workspace folder or a base URI to which this pattern will be matched /// against relatively. pub base_uri: OneOf, /// The actual glob pattern. pub pattern: Pattern, } /// The glob pattern to watch relative to the base path. Glob patterns can have /// the following syntax: /// - `*` to match one or more characters in a path segment /// - `?` to match on one character in a path segment /// - `**` to match any number of path segments, including none /// - `{}` to group conditions (e.g. `**​/*.{ts,js}` matches all TypeScript /// and JavaScript files) /// - `[]` to declare a range of characters to match in a path segment /// (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …) /// - `[!...]` to negate a range of characters to match in a path segment /// (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, /// but not `example.0`) /// /// @since 3.17.0 pub type Pattern = String; bitflags! { #[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Clone, Copy)] pub struct WatchKind: u8 { /// Interested in create events. const Create = 1; /// Interested in change events const Change = 2; /// Interested in delete events const Delete = 4; } } impl<'de> serde::Deserialize<'de> for WatchKind { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { let i = u8::deserialize(deserializer)?; WatchKind::from_bits(i).ok_or_else(|| { D::Error::invalid_value(de::Unexpected::Unsigned(u64::from(i)), &"Unknown flag") }) } } impl serde::Serialize for WatchKind { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, { serializer.serialize_u8(self.bits()) } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct PublishDiagnosticsParams { /// The URI for which diagnostic information is reported. pub uri: Url, /// An array of diagnostic information items. pub diagnostics: Vec, /// Optional the version number of the document the diagnostics are published for. #[serde(skip_serializing_if = "Option::is_none")] pub version: Option, } impl PublishDiagnosticsParams { pub fn new( uri: Url, diagnostics: Vec, version: Option, ) -> PublishDiagnosticsParams { PublishDiagnosticsParams { uri, diagnostics, version, } } } #[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)] #[serde(untagged)] pub enum Documentation { String(String), MarkupContent(MarkupContent), } /// MarkedString can be used to render human readable text. It is either a /// markdown string or a code-block that provides a language and a code snippet. /// The language identifier is semantically equal to the optional language /// identifier in fenced code blocks in GitHub issues. /// /// The pair of a language and a value is an equivalent to markdown: /// ///

```${language}
/// ${value}
/// ```
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum MarkedString { String(String), LanguageString(LanguageString), } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct LanguageString { pub language: String, pub value: String, } impl MarkedString { pub fn from_markdown(markdown: String) -> MarkedString { MarkedString::String(markdown) } pub fn from_language_code(language: String, code_block: String) -> MarkedString { MarkedString::LanguageString(LanguageString { language, value: code_block, }) } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct GotoDefinitionParams { #[serde(flatten)] pub text_document_position_params: TextDocumentPositionParams, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } /// GotoDefinition response can be single location, or multiple Locations or a link. #[derive(Debug, PartialEq, Serialize, Deserialize, Clone)] #[serde(untagged)] pub enum GotoDefinitionResponse { Scalar(Location), Array(Vec), Link(Vec), } impl From for GotoDefinitionResponse { fn from(location: Location) -> Self { GotoDefinitionResponse::Scalar(location) } } impl From> for GotoDefinitionResponse { fn from(locations: Vec) -> Self { GotoDefinitionResponse::Array(locations) } } impl From> for GotoDefinitionResponse { fn from(locations: Vec) -> Self { GotoDefinitionResponse::Link(locations) } } #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct ExecuteCommandParams { /// The identifier of the actual command handler. pub command: String, /// Arguments that the command should be invoked with. #[serde(default)] pub arguments: Vec, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, } /// Execute command registration options. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct ExecuteCommandRegistrationOptions { /// The commands to be executed on the server pub commands: Vec, #[serde(flatten)] pub execute_command_options: ExecuteCommandOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ApplyWorkspaceEditParams { /// An optional label of the workspace edit. This label is /// presented in the user interface for example on an undo /// stack to undo the workspace edit. #[serde(skip_serializing_if = "Option::is_none")] pub label: Option, /// The edits to apply. pub edit: WorkspaceEdit, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ApplyWorkspaceEditResponse { /// Indicates whether the edit was applied or not. pub applied: bool, /// An optional textual description for why the edit was not applied. /// This may be used may be used by the server for diagnostic /// logging or to provide a suitable error for a request that /// triggered the edit #[serde(skip_serializing_if = "Option::is_none")] pub failure_reason: Option, /// Depending on the client's failure handling strategy `failedChange` might /// contain the index of the change that failed. This property is only available /// if the client signals a `failureHandlingStrategy` in its client capabilities. #[serde(skip_serializing_if = "Option::is_none")] pub failed_change: Option, } /// Describes the content type that a client supports in various /// result literals like `Hover`, `ParameterInfo` or `CompletionItem`. /// /// Please note that `MarkupKinds` must not start with a `$`. This kinds /// are reserved for internal usage. #[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)] #[serde(rename_all = "lowercase")] pub enum MarkupKind { /// Plain text is supported as a content format PlainText, /// Markdown is supported as a content format Markdown, } /// A `MarkupContent` literal represents a string value which content can be represented in different formats. /// Currently `plaintext` and `markdown` are supported formats. A `MarkupContent` is usually used in /// documentation properties of result literals like `CompletionItem` or `SignatureInformation`. /// If the format is `markdown` the content should follow the [GitHub Flavored Markdown Specification](https://github.github.com/gfm/). /// /// Here is an example how such a string can be constructed using JavaScript / TypeScript: /// /// ```ignore /// let markdown: MarkupContent = { /// kind: MarkupKind::Markdown, /// value: [ /// "# Header", /// "Some text", /// "```typescript", /// "someCode();", /// "```" /// ] /// .join("\n"), /// }; /// ``` /// /// Please *Note* that clients might sanitize the return markdown. A client could decide to /// remove HTML from the markdown to avoid script execution. #[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)] pub struct MarkupContent { pub kind: MarkupKind, pub value: String, } /// A parameter literal used to pass a partial result token. #[derive(Debug, Eq, PartialEq, Default, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct PartialResultParams { #[serde(skip_serializing_if = "Option::is_none")] pub partial_result_token: Option, } /// Symbol tags are extra annotations that tweak the rendering of a symbol. /// /// @since 3.16.0 #[derive(Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(transparent)] pub struct SymbolTag(i32); lsp_enum! { impl SymbolTag { /// Render a symbol as obsolete, usually using a strike-out. pub const DEPRECATED: SymbolTag = SymbolTag(1); } } #[cfg(test)] mod tests { use serde::{Deserialize, Serialize}; use super::*; pub(crate) fn test_serialization(ms: &SER, expected: &str) where SER: Serialize + for<'de> Deserialize<'de> + PartialEq + std::fmt::Debug, { let json_str = serde_json::to_string(ms).unwrap(); assert_eq!(&json_str, expected); let deserialized: SER = serde_json::from_str(&json_str).unwrap(); assert_eq!(&deserialized, ms); } pub(crate) fn test_deserialization(json: &str, expected: &T) where T: for<'de> Deserialize<'de> + PartialEq + std::fmt::Debug, { let value = serde_json::from_str::(json).unwrap(); assert_eq!(&value, expected); } #[test] fn one_of() { test_serialization(&OneOf::::Left(true), r#"true"#); test_serialization(&OneOf::::Left("abcd".into()), r#""abcd""#); test_serialization( &OneOf::::Right(WorkDoneProgressOptions { work_done_progress: Some(false), }), r#"{"workDoneProgress":false}"#, ); } #[test] fn number_or_string() { test_serialization(&NumberOrString::Number(123), r#"123"#); test_serialization(&NumberOrString::String("abcd".into()), r#""abcd""#); } #[test] fn marked_string() { test_serialization(&MarkedString::from_markdown("xxx".into()), r#""xxx""#); test_serialization( &MarkedString::from_language_code("lang".into(), "code".into()), r#"{"language":"lang","value":"code"}"#, ); } #[test] fn language_string() { test_serialization( &LanguageString { language: "LL".into(), value: "VV".into(), }, r#"{"language":"LL","value":"VV"}"#, ); } #[test] fn workspace_edit() { test_serialization( &WorkspaceEdit { changes: Some(vec![].into_iter().collect()), document_changes: None, ..Default::default() }, r#"{"changes":{}}"#, ); test_serialization( &WorkspaceEdit { changes: None, document_changes: None, ..Default::default() }, r#"{}"#, ); test_serialization( &WorkspaceEdit { changes: Some( vec![(Url::parse("file://test").unwrap(), vec![])] .into_iter() .collect(), ), document_changes: None, ..Default::default() }, r#"{"changes":{"file://test/":[]}}"#, ); } #[test] fn root_uri_can_be_missing() { serde_json::from_str::(r#"{ "capabilities": {} }"#).unwrap(); } #[test] fn test_watch_kind() { test_serialization(&WatchKind::Create, "1"); test_serialization(&(WatchKind::Create | WatchKind::Change), "3"); test_serialization( &(WatchKind::Create | WatchKind::Change | WatchKind::Delete), "7", ); } #[test] fn test_resource_operation_kind() { test_serialization( &vec![ ResourceOperationKind::Create, ResourceOperationKind::Rename, ResourceOperationKind::Delete, ], r#"["create","rename","delete"]"#, ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/linked_editing.rs000066400000000000000000000042061500614314100267640ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use crate::{ DynamicRegistrationClientCapabilities, Range, StaticRegistrationOptions, TextDocumentPositionParams, TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams, }; pub type LinkedEditingRangeClientCapabilities = DynamicRegistrationClientCapabilities; #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct LinkedEditingRangeOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct LinkedEditingRangeRegistrationOptions { #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, #[serde(flatten)] pub linked_editing_range_options: LinkedEditingRangeOptions, #[serde(flatten)] pub static_registration_options: StaticRegistrationOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum LinkedEditingRangeServerCapabilities { Simple(bool), Options(LinkedEditingRangeOptions), RegistrationOptions(LinkedEditingRangeRegistrationOptions), } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct LinkedEditingRangeParams { #[serde(flatten)] pub text_document_position_params: TextDocumentPositionParams, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct LinkedEditingRanges { /// A list of ranges that can be renamed together. The ranges must have /// identical length and contain identical text content. The ranges cannot overlap. pub ranges: Vec, /// An optional word pattern (regular expression) that describes valid contents for /// the given ranges. If no pattern is provided, the client configuration's word /// pattern will be used. #[serde(skip_serializing_if = "Option::is_none")] pub word_pattern: Option, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/lsif.rs000066400000000000000000000234131500614314100247510ustar00rootroot00000000000000//! Types of Language Server Index Format (LSIF). LSIF is a standard format //! for language servers or other programming tools to dump their knowledge //! about a workspace. //! //! Based on use crate::{Range, Url}; use serde::{Deserialize, Serialize}; pub type Id = crate::NumberOrString; #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(untagged)] pub enum LocationOrRangeId { Location(crate::Location), RangeId(Id), } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Entry { pub id: Id, #[serde(flatten)] pub data: Element, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] #[serde(tag = "type")] pub enum Element { Vertex(Vertex), Edge(Edge), } #[derive(Debug, PartialEq, Serialize, Deserialize)] pub struct ToolInfo { pub name: String, #[serde(default = "Default::default")] #[serde(skip_serializing_if = "Vec::is_empty")] pub args: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub version: Option, } #[derive(Debug, PartialEq, Serialize, Deserialize, Clone, Copy)] pub enum Encoding { /// Currently only 'utf-16' is supported due to the limitations in LSP. #[serde(rename = "utf-16")] Utf16, } #[derive(Debug, PartialEq, Serialize, Deserialize)] pub struct RangeBasedDocumentSymbol { pub id: Id, #[serde(default = "Default::default")] #[serde(skip_serializing_if = "Vec::is_empty")] pub children: Vec, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] #[serde(untagged)] pub enum DocumentSymbolOrRangeBasedVec { DocumentSymbol(Vec), RangeBased(Vec), } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct DefinitionTag { /// The text covered by the range text: String, /// The symbol kind. kind: crate::SymbolKind, /// Indicates if this symbol is deprecated. #[serde(default)] #[serde(skip_serializing_if = "std::ops::Not::not")] deprecated: bool, /// The full range of the definition not including leading/trailing whitespace but everything else, e.g comments and code. /// The range must be included in fullRange. full_range: Range, /// Optional detail information for the definition. #[serde(skip_serializing_if = "Option::is_none")] detail: Option, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct DeclarationTag { /// The text covered by the range text: String, /// The symbol kind. kind: crate::SymbolKind, /// Indicates if this symbol is deprecated. #[serde(default)] deprecated: bool, /// The full range of the definition not including leading/trailing whitespace but everything else, e.g comments and code. /// The range must be included in fullRange. full_range: Range, /// Optional detail information for the definition. #[serde(skip_serializing_if = "Option::is_none")] detail: Option, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ReferenceTag { text: String, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct UnknownTag { text: String, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] #[serde(tag = "type")] pub enum RangeTag { Definition(DefinitionTag), Declaration(DeclarationTag), Reference(ReferenceTag), Unknown(UnknownTag), } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] #[serde(tag = "label")] pub enum Vertex { MetaData(MetaData), /// Project(Project), Document(Document), /// Range { #[serde(flatten)] range: Range, #[serde(skip_serializing_if = "Option::is_none")] tag: Option, }, /// ResultSet(ResultSet), Moniker(crate::Moniker), PackageInformation(PackageInformation), #[serde(rename = "$event")] Event(Event), DefinitionResult, DeclarationResult, TypeDefinitionResult, ReferenceResult, ImplementationResult, FoldingRangeResult { result: Vec, }, HoverResult { result: crate::Hover, }, DocumentSymbolResult { result: DocumentSymbolOrRangeBasedVec, }, DocumentLinkResult { result: Vec, }, DiagnosticResult { result: Vec, }, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub enum EventKind { Begin, End, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub enum EventScope { Document, Project, } #[derive(Debug, PartialEq, Serialize, Deserialize)] pub struct Event { pub kind: EventKind, pub scope: EventScope, pub data: Id, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] #[serde(tag = "label")] pub enum Edge { Contains(EdgeDataMultiIn), Moniker(EdgeData), NextMoniker(EdgeData), Next(EdgeData), PackageInformation(EdgeData), Item(Item), // Methods #[serde(rename = "textDocument/definition")] Definition(EdgeData), #[serde(rename = "textDocument/declaration")] Declaration(EdgeData), #[serde(rename = "textDocument/hover")] Hover(EdgeData), #[serde(rename = "textDocument/references")] References(EdgeData), #[serde(rename = "textDocument/implementation")] Implementation(EdgeData), #[serde(rename = "textDocument/typeDefinition")] TypeDefinition(EdgeData), #[serde(rename = "textDocument/foldingRange")] FoldingRange(EdgeData), #[serde(rename = "textDocument/documentLink")] DocumentLink(EdgeData), #[serde(rename = "textDocument/documentSymbol")] DocumentSymbol(EdgeData), #[serde(rename = "textDocument/diagnostic")] Diagnostic(EdgeData), } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct EdgeData { pub in_v: Id, pub out_v: Id, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct EdgeDataMultiIn { pub in_vs: Vec, pub out_v: Id, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(untagged)] pub enum DefinitionResultType { Scalar(LocationOrRangeId), Array(LocationOrRangeId), } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub enum ItemKind { Declarations, Definitions, References, ReferenceResults, ImplementationResults, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Item { pub document: Id, #[serde(skip_serializing_if = "Option::is_none")] pub property: Option, #[serde(flatten)] pub edge_data: EdgeDataMultiIn, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Document { pub uri: Url, pub language_id: String, } /// #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ResultSet { #[serde(skip_serializing_if = "Option::is_none")] pub key: Option, } /// #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Project { #[serde(skip_serializing_if = "Option::is_none")] pub resource: Option, #[serde(skip_serializing_if = "Option::is_none")] pub content: Option, pub kind: String, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct MetaData { /// The version of the LSIF format using semver notation. See . Please note /// the version numbers starting with 0 don't adhere to semver and adopters have to assume /// that each new version is breaking. pub version: String, /// The project root (in form of an URI) used to compute this dump. pub project_root: Url, /// The string encoding used to compute line and character values in /// positions and ranges. pub position_encoding: Encoding, /// Information about the tool that created the dump #[serde(skip_serializing_if = "Option::is_none")] pub tool_info: Option, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Repository { pub r#type: String, pub url: String, #[serde(skip_serializing_if = "Option::is_none")] pub commit_id: Option, } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct PackageInformation { pub name: String, pub manager: String, #[serde(skip_serializing_if = "Option::is_none")] pub uri: Option, #[serde(skip_serializing_if = "Option::is_none")] pub content: Option, #[serde(skip_serializing_if = "Option::is_none")] pub repository: Option, #[serde(skip_serializing_if = "Option::is_none")] pub version: Option, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/moniker.rs000066400000000000000000000057661500614314100254730ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use crate::{ DynamicRegistrationClientCapabilities, PartialResultParams, TextDocumentPositionParams, TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams, }; pub type MonikerClientCapabilities = DynamicRegistrationClientCapabilities; #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum MonikerServerCapabilities { Options(MonikerOptions), RegistrationOptions(MonikerRegistrationOptions), } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct MonikerOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct MonikerRegistrationOptions { #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, #[serde(flatten)] pub moniker_options: MonikerOptions, } /// Moniker uniqueness level to define scope of the moniker. #[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Copy, Clone)] #[serde(rename_all = "camelCase")] pub enum UniquenessLevel { /// The moniker is only unique inside a document Document, /// The moniker is unique inside a project for which a dump got created Project, /// The moniker is unique inside the group to which a project belongs Group, /// The moniker is unique inside the moniker scheme. Scheme, /// The moniker is globally unique Global, } /// The moniker kind. #[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Copy, Clone)] #[serde(rename_all = "camelCase")] pub enum MonikerKind { /// The moniker represent a symbol that is imported into a project Import, /// The moniker represent a symbol that is exported into a project Export, /// The moniker represents a symbol that is local to a project (e.g. a local /// variable of a function, a class not visible outside the project, ...) Local, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct MonikerParams { #[serde(flatten)] pub text_document_position_params: TextDocumentPositionParams, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } /// Moniker definition to match LSIF 0.5 moniker definition. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct Moniker { /// The scheme of the moniker. For example tsc or .Net pub scheme: String, /// The identifier of the moniker. The value is opaque in LSIF however /// schema owners are allowed to define the structure if they want. pub identifier: String, /// The scope in which the moniker is unique pub unique: UniquenessLevel, /// The moniker kind if known. #[serde(skip_serializing_if = "Option::is_none")] pub kind: Option, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/notification.rs000066400000000000000000000302271500614314100265030ustar00rootroot00000000000000use super::*; use serde::{de::DeserializeOwned, Serialize}; pub trait Notification { type Params: DeserializeOwned + Serialize + Send + Sync + 'static; const METHOD: &'static str; } #[macro_export] macro_rules! lsp_notification { ("$/cancelRequest") => { $crate::notification::Cancel }; ("$/setTrace") => { $crate::notification::SetTrace }; ("$/logTrace") => { $crate::notification::LogTrace }; ("initialized") => { $crate::notification::Initialized }; ("exit") => { $crate::notification::Exit }; ("window/showMessage") => { $crate::notification::ShowMessage }; ("window/logMessage") => { $crate::notification::LogMessage }; ("window/workDoneProgress/cancel") => { $crate::notification::WorkDoneProgressCancel }; ("telemetry/event") => { $crate::notification::TelemetryEvent }; ("textDocument/didOpen") => { $crate::notification::DidOpenTextDocument }; ("textDocument/didChange") => { $crate::notification::DidChangeTextDocument }; ("textDocument/willSave") => { $crate::notification::WillSaveTextDocument }; ("textDocument/didSave") => { $crate::notification::DidSaveTextDocument }; ("textDocument/didClose") => { $crate::notification::DidCloseTextDocument }; ("textDocument/publishDiagnostics") => { $crate::notification::PublishDiagnostics }; ("workspace/didChangeConfiguration") => { $crate::notification::DidChangeConfiguration }; ("workspace/didChangeWatchedFiles") => { $crate::notification::DidChangeWatchedFiles }; ("workspace/didChangeWorkspaceFolders") => { $crate::notification::DidChangeWorkspaceFolders }; ("$/progress") => { $crate::notification::Progress }; ("workspace/didCreateFiles") => { $crate::notification::DidCreateFiles }; ("workspace/didRenameFiles") => { $crate::notification::DidRenameFiles }; ("workspace/didDeleteFiles") => { $crate::notification::DidDeleteFiles }; } /// The base protocol now offers support for request cancellation. To cancel a request, /// a notification message with the following properties is sent: /// /// A request that got canceled still needs to return from the server and send a response back. /// It can not be left open / hanging. This is in line with the JSON RPC protocol that requires /// that every request sends a response back. In addition it allows for returning partial results on cancel. #[derive(Debug)] pub enum Cancel {} impl Notification for Cancel { type Params = CancelParams; const METHOD: &'static str = "$/cancelRequest"; } /// A notification that should be used by the client to modify the trace /// setting of the server. #[derive(Debug)] pub enum SetTrace {} impl Notification for SetTrace { type Params = SetTraceParams; const METHOD: &'static str = "$/setTrace"; } /// A notification to log the trace of the server’s execution. /// The amount and content of these notifications depends on the current trace configuration. /// /// `LogTrace` should be used for systematic trace reporting. For single debugging messages, /// the server should send `LogMessage` notifications. #[derive(Debug)] pub enum LogTrace {} impl Notification for LogTrace { type Params = LogTraceParams; const METHOD: &'static str = "$/logTrace"; } /// The initialized notification is sent from the client to the server after the client received /// the result of the initialize request but before the client is sending any other request or /// notification to the server. The server can use the initialized notification for example to /// dynamically register capabilities. #[derive(Debug)] pub enum Initialized {} impl Notification for Initialized { type Params = InitializedParams; const METHOD: &'static str = "initialized"; } /// A notification to ask the server to exit its process. /// The server should exit with success code 0 if the shutdown request has been received before; /// otherwise with error code 1. #[derive(Debug)] pub enum Exit {} impl Notification for Exit { type Params = (); const METHOD: &'static str = "exit"; } /// The show message notification is sent from a server to a client to ask the client to display a particular message /// in the user interface. #[derive(Debug)] pub enum ShowMessage {} impl Notification for ShowMessage { type Params = ShowMessageParams; const METHOD: &'static str = "window/showMessage"; } /// The log message notification is sent from the server to the client to ask the client to log a particular message. #[derive(Debug)] pub enum LogMessage {} impl Notification for LogMessage { type Params = LogMessageParams; const METHOD: &'static str = "window/logMessage"; } /// The telemetry notification is sent from the server to the client to ask the client to log a telemetry event. /// The protocol doesn't specify the payload since no interpretation of the data happens in the protocol. Most clients even don't handle /// the event directly but forward them to the extensions owning the corresponding server issuing the event. #[derive(Debug)] pub enum TelemetryEvent {} impl Notification for TelemetryEvent { type Params = OneOf; const METHOD: &'static str = "telemetry/event"; } /// A notification sent from the client to the server to signal the change of configuration settings. #[derive(Debug)] pub enum DidChangeConfiguration {} impl Notification for DidChangeConfiguration { type Params = DidChangeConfigurationParams; const METHOD: &'static str = "workspace/didChangeConfiguration"; } /// The document open notification is sent from the client to the server to signal newly opened text documents. /// The document's truth is now managed by the client and the server must not try to read the document's truth /// using the document's uri. #[derive(Debug)] pub enum DidOpenTextDocument {} impl Notification for DidOpenTextDocument { type Params = DidOpenTextDocumentParams; const METHOD: &'static str = "textDocument/didOpen"; } /// The document change notification is sent from the client to the server to signal changes to a text document. /// In 2.0 the shape of the params has changed to include proper version numbers and language ids. #[derive(Debug)] pub enum DidChangeTextDocument {} impl Notification for DidChangeTextDocument { type Params = DidChangeTextDocumentParams; const METHOD: &'static str = "textDocument/didChange"; } /// The document will save notification is sent from the client to the server before the document /// is actually saved. #[derive(Debug)] pub enum WillSaveTextDocument {} impl Notification for WillSaveTextDocument { type Params = WillSaveTextDocumentParams; const METHOD: &'static str = "textDocument/willSave"; } /// The document close notification is sent from the client to the server when the document got closed in the client. /// The document's truth now exists where the document's uri points to (e.g. if the document's uri is a file uri /// the truth now exists on disk). #[derive(Debug)] pub enum DidCloseTextDocument {} impl Notification for DidCloseTextDocument { type Params = DidCloseTextDocumentParams; const METHOD: &'static str = "textDocument/didClose"; } /// The document save notification is sent from the client to the server when the document was saved in the client. #[derive(Debug)] pub enum DidSaveTextDocument {} impl Notification for DidSaveTextDocument { type Params = DidSaveTextDocumentParams; const METHOD: &'static str = "textDocument/didSave"; } /// The watched files notification is sent from the client to the server when the client detects changes to files and folders /// watched by the language client (note although the name suggest that only file events are sent it is about file system events which include folders as well). /// It is recommended that servers register for these file system events using the registration mechanism. /// In former implementations clients pushed file events without the server actively asking for it. #[derive(Debug)] pub enum DidChangeWatchedFiles {} impl Notification for DidChangeWatchedFiles { type Params = DidChangeWatchedFilesParams; const METHOD: &'static str = "workspace/didChangeWatchedFiles"; } /// The workspace/didChangeWorkspaceFolders notification is sent from the client to the server to inform the server /// about workspace folder configuration changes #[derive(Debug)] pub enum DidChangeWorkspaceFolders {} impl Notification for DidChangeWorkspaceFolders { type Params = DidChangeWorkspaceFoldersParams; const METHOD: &'static str = "workspace/didChangeWorkspaceFolders"; } /// Diagnostics notification are sent from the server to the client to signal results of validation runs. #[derive(Debug)] pub enum PublishDiagnostics {} impl Notification for PublishDiagnostics { type Params = PublishDiagnosticsParams; const METHOD: &'static str = "textDocument/publishDiagnostics"; } /// The progress notification is sent from the server to the client to ask /// the client to indicate progress. #[derive(Debug)] pub enum Progress {} impl Notification for Progress { type Params = ProgressParams; const METHOD: &'static str = "$/progress"; } /// The `window/workDoneProgress/cancel` notification is sent from the client /// to the server to cancel a progress initiated on the server side using the `window/workDoneProgress/create`. #[derive(Debug)] pub enum WorkDoneProgressCancel {} impl Notification for WorkDoneProgressCancel { type Params = WorkDoneProgressCancelParams; const METHOD: &'static str = "window/workDoneProgress/cancel"; } /// The did create files notification is sent from the client to the server when files were created from within the client. #[derive(Debug)] pub enum DidCreateFiles {} impl Notification for DidCreateFiles { type Params = CreateFilesParams; const METHOD: &'static str = "workspace/didCreateFiles"; } /// The did rename files notification is sent from the client to the server when files were renamed from within the client. #[derive(Debug)] pub enum DidRenameFiles {} impl Notification for DidRenameFiles { type Params = RenameFilesParams; const METHOD: &'static str = "workspace/didRenameFiles"; } /// The did delete files notification is sent from the client to the server when files were deleted from within the client. #[derive(Debug)] pub enum DidDeleteFiles {} impl Notification for DidDeleteFiles { type Params = DeleteFilesParams; const METHOD: &'static str = "workspace/didDeleteFiles"; } #[cfg(test)] mod test { use super::*; fn fake_call() where N: Notification, N::Params: serde::Serialize, { } macro_rules! check_macro { ($name:tt) => { // check whether the macro name matches the method assert_eq!(::METHOD, $name); // test whether type checking passes for each component fake_call::(); }; } #[test] fn check_macro_definitions() { check_macro!("$/cancelRequest"); check_macro!("$/progress"); check_macro!("$/logTrace"); check_macro!("$/setTrace"); check_macro!("initialized"); check_macro!("exit"); check_macro!("window/showMessage"); check_macro!("window/logMessage"); check_macro!("window/workDoneProgress/cancel"); check_macro!("telemetry/event"); check_macro!("textDocument/didOpen"); check_macro!("textDocument/didChange"); check_macro!("textDocument/willSave"); check_macro!("textDocument/didSave"); check_macro!("textDocument/didClose"); check_macro!("textDocument/publishDiagnostics"); check_macro!("workspace/didChangeConfiguration"); check_macro!("workspace/didChangeWatchedFiles"); check_macro!("workspace/didChangeWorkspaceFolders"); check_macro!("workspace/didCreateFiles"); check_macro!("workspace/didRenameFiles"); check_macro!("workspace/didDeleteFiles"); } #[test] #[cfg(feature = "proposed")] fn check_proposed_macro_definitions() {} } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/progress.rs000066400000000000000000000126271500614314100256650ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use crate::NumberOrString; pub type ProgressToken = NumberOrString; /// The progress notification is sent from the server to the client to ask /// the client to indicate progress. #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct ProgressParams { /// The progress token provided by the client. pub token: ProgressToken, /// The progress data. pub value: ProgressParamsValue, } #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(untagged)] pub enum ProgressParamsValue { WorkDone(WorkDoneProgress), } /// The `window/workDoneProgress/create` request is sent /// from the server to the client to ask the client to create a work done progress. #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct WorkDoneProgressCreateParams { /// The token to be used to report progress. pub token: ProgressToken, } /// The `window/workDoneProgress/cancel` notification is sent from the client /// to the server to cancel a progress initiated on the server side using the `window/workDoneProgress/create`. #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct WorkDoneProgressCancelParams { /// The token to be used to report progress. pub token: ProgressToken, } /// Options to signal work done progress support in server capabilities. #[derive(Debug, Eq, PartialEq, Default, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct WorkDoneProgressOptions { #[serde(skip_serializing_if = "Option::is_none")] pub work_done_progress: Option, } /// An optional token that a server can use to report work done progress #[derive(Debug, Eq, PartialEq, Default, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct WorkDoneProgressParams { #[serde(skip_serializing_if = "Option::is_none")] pub work_done_token: Option, } #[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct WorkDoneProgressBegin { /// Mandatory title of the progress operation. Used to briefly inform /// about the kind of operation being performed. /// Examples: "Indexing" or "Linking dependencies". pub title: String, /// Controls if a cancel button should show to allow the user to cancel the /// long running operation. Clients that don't support cancellation are allowed /// to ignore the setting. #[serde(skip_serializing_if = "Option::is_none")] pub cancellable: Option, /// Optional, more detailed associated progress message. Contains /// complementary information to the `title`. /// /// Examples: "3/25 files", "project/src/module2", "node_modules/some_dep". /// If unset, the previous progress message (if any) is still valid. #[serde(skip_serializing_if = "Option::is_none")] pub message: Option, /// Optional progress percentage to display (value 100 is considered 100%). /// If not provided infinite progress is assumed and clients are allowed /// to ignore the `percentage` value in subsequent in report notifications. /// /// The value should be steadily rising. Clients are free to ignore values /// that are not following this rule. The value range is [0, 100] #[serde(skip_serializing_if = "Option::is_none")] pub percentage: Option, } #[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct WorkDoneProgressReport { /// Controls if a cancel button should show to allow the user to cancel the /// long running operation. Clients that don't support cancellation are allowed /// to ignore the setting. #[serde(skip_serializing_if = "Option::is_none")] pub cancellable: Option, /// Optional, more detailed associated progress message. Contains /// complementary information to the `title`. /// Examples: "3/25 files", "project/src/module2", "node_modules/some_dep". /// If unset, the previous progress message (if any) is still valid. #[serde(skip_serializing_if = "Option::is_none")] pub message: Option, /// Optional progress percentage to display (value 100 is considered 100%). /// If not provided infinite progress is assumed and clients are allowed /// to ignore the `percentage` value in subsequent in report notifications. /// /// The value should be steadily rising. Clients are free to ignore values /// that are not following this rule. The value range is [0, 100] #[serde(skip_serializing_if = "Option::is_none")] pub percentage: Option, } #[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct WorkDoneProgressEnd { /// Optional, more detailed associated progress message. Contains /// complementary information to the `title`. /// Examples: "3/25 files", "project/src/module2", "node_modules/some_dep". /// If unset, the previous progress message (if any) is still valid. #[serde(skip_serializing_if = "Option::is_none")] pub message: Option, } #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(tag = "kind", rename_all = "lowercase")] pub enum WorkDoneProgress { Begin(WorkDoneProgressBegin), Report(WorkDoneProgressReport), End(WorkDoneProgressEnd), } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/references.rs000066400000000000000000000016771500614314100261450ustar00rootroot00000000000000use crate::{ DynamicRegistrationClientCapabilities, PartialResultParams, TextDocumentPositionParams, WorkDoneProgressParams, }; use serde::{Deserialize, Serialize}; pub type ReferenceClientCapabilities = DynamicRegistrationClientCapabilities; #[derive(Debug, Eq, PartialEq, Clone, Copy, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ReferenceContext { /// Include the declaration of the current symbol. pub include_declaration: bool, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ReferenceParams { // Text Document and Position fields #[serde(flatten)] pub text_document_position: TextDocumentPositionParams, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, // ReferenceParams properties: pub context: ReferenceContext, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/rename.rs000066400000000000000000000056771500614314100252770ustar00rootroot00000000000000use crate::{Range, TextDocumentPositionParams, WorkDoneProgressOptions, WorkDoneProgressParams}; use serde::{Deserialize, Serialize}; #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct RenameParams { /// Text Document and Position fields #[serde(flatten)] pub text_document_position: TextDocumentPositionParams, /// The new name of the symbol. If the given name is not valid the /// request must return a [ResponseError](#ResponseError) with an /// appropriate message set. pub new_name: String, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct RenameOptions { /// Renames should be checked and tested before being executed. #[serde(skip_serializing_if = "Option::is_none")] pub prepare_provider: Option, #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct RenameClientCapabilities { /// Whether rename supports dynamic registration. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// Client supports testing for validity of rename operations before execution. /// /// @since 3.12.0 #[serde(skip_serializing_if = "Option::is_none")] pub prepare_support: Option, /// Client supports the default behavior result. /// /// The value indicates the default behavior used by the /// client. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub prepare_support_default_behavior: Option, /// Whether the client honors the change annotations in /// text edits and resource operations returned via the /// rename request's workspace edit by for example presenting /// the workspace edit in the user interface and asking /// for confirmation. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub honors_change_annotations: Option, } #[derive(Eq, PartialEq, Copy, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct PrepareSupportDefaultBehavior(i32); lsp_enum! { impl PrepareSupportDefaultBehavior { /// The client's default behavior is to select the identifier /// according the to language's syntax rule pub const IDENTIFIER: PrepareSupportDefaultBehavior = PrepareSupportDefaultBehavior(1); } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] #[serde(rename_all = "camelCase")] pub enum PrepareRenameResponse { Range(Range), RangeWithPlaceholder { range: Range, placeholder: String, }, #[serde(rename_all = "camelCase")] DefaultBehavior { default_behavior: bool, }, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/request.rs000066400000000000000000001224211500614314100255030ustar00rootroot00000000000000use super::*; use serde::{de::DeserializeOwned, Serialize}; pub trait Request { type Params: DeserializeOwned + Serialize + Send + Sync + 'static; type Result: DeserializeOwned + Serialize + Send + Sync + 'static; const METHOD: &'static str; } #[macro_export] macro_rules! lsp_request { ("initialize") => { $crate::request::Initialize }; ("shutdown") => { $crate::request::Shutdown }; ("window/showMessageRequest") => { $crate::request::ShowMessageRequest }; ("client/registerCapability") => { $crate::request::RegisterCapability }; ("client/unregisterCapability") => { $crate::request::UnregisterCapability }; ("workspace/symbol") => { $crate::request::WorkspaceSymbolRequest }; ("workspaceSymbol/resolve") => { $crate::request::WorkspaceSymbolResolve }; ("workspace/executeCommand") => { $crate::request::ExecuteCommand }; ("textDocument/willSaveWaitUntil") => { $crate::request::WillSaveWaitUntil }; ("textDocument/completion") => { $crate::request::Completion }; ("completionItem/resolve") => { $crate::request::ResolveCompletionItem }; ("textDocument/hover") => { $crate::request::HoverRequest }; ("textDocument/signatureHelp") => { $crate::request::SignatureHelpRequest }; ("textDocument/declaration") => { $crate::request::GotoDeclaration }; ("textDocument/definition") => { $crate::request::GotoDefinition }; ("textDocument/references") => { $crate::request::References }; ("textDocument/documentHighlight") => { $crate::request::DocumentHighlightRequest }; ("textDocument/documentSymbol") => { $crate::request::DocumentSymbolRequest }; ("textDocument/codeAction") => { $crate::request::CodeActionRequest }; ("textDocument/codeLens") => { $crate::request::CodeLensRequest }; ("codeLens/resolve") => { $crate::request::CodeLensResolve }; ("textDocument/documentLink") => { $crate::request::DocumentLinkRequest }; ("documentLink/resolve") => { $crate::request::DocumentLinkResolve }; ("workspace/applyEdit") => { $crate::request::ApplyWorkspaceEdit }; ("textDocument/rangeFormatting") => { $crate::request::RangeFormatting }; ("textDocument/onTypeFormatting") => { $crate::request::OnTypeFormatting }; ("textDocument/formatting") => { $crate::request::Formatting }; ("textDocument/rename") => { $crate::request::Rename }; ("textDocument/documentColor") => { $crate::request::DocumentColor }; ("textDocument/colorPresentation") => { $crate::request::ColorPresentationRequest }; ("textDocument/foldingRange") => { $crate::request::FoldingRangeRequest }; ("textDocument/prepareRename") => { $crate::request::PrepareRenameRequest }; ("textDocument/implementation") => { $crate::request::GotoImplementation }; ("textDocument/typeDefinition") => { $crate::request::GotoTypeDefinition }; ("textDocument/selectionRange") => { $crate::request::SelectionRangeRequest }; ("workspace/workspaceFolders") => { $crate::request::WorkspaceFoldersRequest }; ("workspace/configuration") => { $crate::request::WorkspaceConfiguration }; ("window/workDoneProgress/create") => { $crate::request::WorkDoneProgressCreate }; ("callHierarchy/incomingCalls") => { $crate::request::CallHierarchyIncomingCalls }; ("callHierarchy/outgoingCalls") => { $crate::request::CallHierarchyOutgoingCalls }; ("textDocument/moniker") => { $crate::request::MonikerRequest }; ("textDocument/linkedEditingRange") => { $crate::request::LinkedEditingRange }; ("textDocument/prepareCallHierarchy") => { $crate::request::CallHierarchyPrepare }; ("textDocument/prepareTypeHierarchy") => { $crate::request::TypeHierarchyPrepare }; ("textDocument/semanticTokens/full") => { $crate::request::SemanticTokensFullRequest }; ("textDocument/semanticTokens/full/delta") => { $crate::request::SemanticTokensFullDeltaRequest }; ("textDocument/semanticTokens/range") => { $crate::request::SemanticTokensRangeRequest }; ("textDocument/inlayHint") => { $crate::request::InlayHintRequest }; ("textDocument/inlineValue") => { $crate::request::InlineValueRequest }; ("textDocument/diagnostic") => { $crate::request::DocumentDiagnosticRequest }; ("workspace/diagnostic") => { $crate::request::WorkspaceDiagnosticRequest }; ("workspace/diagnostic/refresh") => { $crate::request::WorkspaceDiagnosticRefresh }; ("typeHierarchy/supertypes") => { $crate::request::TypeHierarchySupertypes }; ("typeHierarchy/subtypes") => { $crate::request::TypeHierarchySubtypes }; ("workspace/willCreateFiles") => { $crate::request::WillCreateFiles }; ("workspace/willRenameFiles") => { $crate::request::WillRenameFiles }; ("workspace/willDeleteFiles") => { $crate::request::WillDeleteFiles }; ("workspace/semanticTokens/refresh") => { $crate::request::SemanticTokensRefresh }; ("workspace/codeLens/refresh") => { $crate::request::CodeLensRefresh }; ("workspace/inlayHint/refresh") => { $crate::request::InlayHintRefreshRequest }; ("workspace/inlineValue/refresh") => { $crate::request::InlineValueRefreshRequest }; ("codeAction/resolve") => { $crate::request::CodeActionResolveRequest }; ("inlayHint/resolve") => { $crate::request::InlayHintResolveRequest }; ("window/showDocument") => { $crate::request::ShowDocument }; } /// The initialize request is sent as the first request from the client to the server. /// If the server receives request or notification before the `initialize` request it should act as follows: /// /// * for a request the respond should be errored with `code: -32001`. The message can be picked by the server. /// * notifications should be dropped. #[derive(Debug)] pub enum Initialize {} impl Request for Initialize { type Params = InitializeParams; type Result = InitializeResult; const METHOD: &'static str = "initialize"; } /// The shutdown request is sent from the client to the server. It asks the server to shut down, /// but to not exit (otherwise the response might not be delivered correctly to the client). /// There is a separate exit notification that asks the server to exit. #[derive(Debug)] pub enum Shutdown {} impl Request for Shutdown { type Params = (); type Result = (); const METHOD: &'static str = "shutdown"; } /// The show message request is sent from a server to a client to ask the client to display a particular message /// in the user interface. In addition to the show message notification the request allows to pass actions and to /// wait for an answer from the client. #[derive(Debug)] pub enum ShowMessageRequest {} impl Request for ShowMessageRequest { type Params = ShowMessageRequestParams; type Result = Option; const METHOD: &'static str = "window/showMessageRequest"; } /// The client/registerCapability request is sent from the server to the client to register for a new capability /// on the client side. Not all clients need to support dynamic capability registration. A client opts in via the /// ClientCapabilities.GenericCapability property. #[derive(Debug)] pub enum RegisterCapability {} impl Request for RegisterCapability { type Params = RegistrationParams; type Result = (); const METHOD: &'static str = "client/registerCapability"; } /// The client/unregisterCapability request is sent from the server to the client to unregister a /// previously register capability. #[derive(Debug)] pub enum UnregisterCapability {} impl Request for UnregisterCapability { type Params = UnregistrationParams; type Result = (); const METHOD: &'static str = "client/unregisterCapability"; } /// The Completion request is sent from the client to the server to compute completion items at a given cursor position. /// Completion items are presented in the IntelliSense user interface. If computing full completion items is expensive, /// servers can additionally provide a handler for the completion item resolve request ('completionItem/resolve'). /// This request is sent when a completion item is selected in the user interface. A typical use case is for example: /// the 'textDocument/completion' request doesn’t fill in the documentation property for returned completion items /// since it is expensive to compute. When the item is selected in the user interface then a ‘completionItem/resolve’ /// request is sent with the selected completion item as a param. The returned completion item should have the /// documentation property filled in. The request can delay the computation of the detail and documentation properties. /// However, properties that are needed for the initial sorting and filtering, like sortText, filterText, insertText, /// and textEdit must be provided in the textDocument/completion request and must not be changed during resolve. #[derive(Debug)] pub enum Completion {} impl Request for Completion { type Params = CompletionParams; type Result = Option; const METHOD: &'static str = "textDocument/completion"; } /// The request is sent from the client to the server to resolve additional information for a given completion item. #[derive(Debug)] pub enum ResolveCompletionItem {} impl Request for ResolveCompletionItem { type Params = CompletionItem; type Result = CompletionItem; const METHOD: &'static str = "completionItem/resolve"; } /// The hover request is sent from the client to the server to request hover information at a given text /// document position. #[derive(Debug)] pub enum HoverRequest {} impl Request for HoverRequest { type Params = HoverParams; type Result = Option; const METHOD: &'static str = "textDocument/hover"; } /// The signature help request is sent from the client to the server to request signature information at /// a given cursor position. #[derive(Debug)] pub enum SignatureHelpRequest {} impl Request for SignatureHelpRequest { type Params = SignatureHelpParams; type Result = Option; const METHOD: &'static str = "textDocument/signatureHelp"; } #[derive(Debug)] pub enum GotoDeclaration {} pub type GotoDeclarationParams = GotoDefinitionParams; pub type GotoDeclarationResponse = GotoDefinitionResponse; /// The goto declaration request is sent from the client to the server to resolve the declaration location of /// a symbol at a given text document position. impl Request for GotoDeclaration { type Params = GotoDeclarationParams; type Result = Option; const METHOD: &'static str = "textDocument/declaration"; } /// The goto definition request is sent from the client to the server to resolve the definition location of /// a symbol at a given text document position. #[derive(Debug)] pub enum GotoDefinition {} impl Request for GotoDefinition { type Params = GotoDefinitionParams; type Result = Option; const METHOD: &'static str = "textDocument/definition"; } /// The references request is sent from the client to the server to resolve project-wide references for the /// symbol denoted by the given text document position. #[derive(Debug)] pub enum References {} impl Request for References { type Params = ReferenceParams; type Result = Option>; const METHOD: &'static str = "textDocument/references"; } /// The goto type definition request is sent from the client to the /// server to resolve the type definition location of a symbol at a /// given text document position. #[derive(Debug)] pub enum GotoTypeDefinition {} pub type GotoTypeDefinitionParams = GotoDefinitionParams; pub type GotoTypeDefinitionResponse = GotoDefinitionResponse; impl Request for GotoTypeDefinition { type Params = GotoTypeDefinitionParams; type Result = Option; const METHOD: &'static str = "textDocument/typeDefinition"; } /// The goto implementation request is sent from the client to the /// server to resolve the implementation location of a symbol at a /// given text document position. #[derive(Debug)] pub enum GotoImplementation {} pub type GotoImplementationParams = GotoTypeDefinitionParams; pub type GotoImplementationResponse = GotoDefinitionResponse; impl Request for GotoImplementation { type Params = GotoImplementationParams; type Result = Option; const METHOD: &'static str = "textDocument/implementation"; } /// The document highlight request is sent from the client to the server to resolve a document highlights /// for a given text document position. /// For programming languages this usually highlights all references to the symbol scoped to this file. /// However we kept 'textDocument/documentHighlight' and 'textDocument/references' separate requests since /// the first one is allowed to be more fuzzy. /// Symbol matches usually have a DocumentHighlightKind of Read or Write whereas fuzzy or textual matches /// use Text as the kind. #[derive(Debug)] pub enum DocumentHighlightRequest {} impl Request for DocumentHighlightRequest { type Params = DocumentHighlightParams; type Result = Option>; const METHOD: &'static str = "textDocument/documentHighlight"; } /// The document symbol request is sent from the client to the server to list all symbols found in a given /// text document. #[derive(Debug)] pub enum DocumentSymbolRequest {} impl Request for DocumentSymbolRequest { type Params = DocumentSymbolParams; type Result = Option; const METHOD: &'static str = "textDocument/documentSymbol"; } /// The workspace symbol request is sent from the client to the server to list project-wide symbols /// matching the query string. #[derive(Debug)] pub enum WorkspaceSymbolRequest {} impl Request for WorkspaceSymbolRequest { type Params = WorkspaceSymbolParams; type Result = Option; const METHOD: &'static str = "workspace/symbol"; } /// The `workspaceSymbol/resolve` request is sent from the client to the server to resolve /// additional information for a given workspace symbol. #[derive(Debug)] pub enum WorkspaceSymbolResolve {} impl Request for WorkspaceSymbolResolve { type Params = WorkspaceSymbol; type Result = WorkspaceSymbol; const METHOD: &'static str = "workspaceSymbol/resolve"; } /// The workspace/executeCommand request is sent from the client to the server to trigger command execution on the server. /// In most cases the server creates a WorkspaceEdit structure and applies the changes to the workspace using the request /// workspace/applyEdit which is sent from the server to the client. #[derive(Debug)] pub enum ExecuteCommand {} impl Request for ExecuteCommand { type Params = ExecuteCommandParams; type Result = Option; const METHOD: &'static str = "workspace/executeCommand"; } /// The document will save request is sent from the client to the server before the document is /// actually saved. The request can return an array of TextEdits which will be applied to the text /// document before it is saved. Please note that clients might drop results if computing the text /// edits took too long or if a server constantly fails on this request. This is done to keep the /// save fast and reliable. #[derive(Debug)] pub enum WillSaveWaitUntil {} impl Request for WillSaveWaitUntil { type Params = WillSaveTextDocumentParams; type Result = Option>; const METHOD: &'static str = "textDocument/willSaveWaitUntil"; } /// The workspace/applyEdit request is sent from the server to the client to modify resource on the /// client side. #[derive(Debug)] pub enum ApplyWorkspaceEdit {} impl Request for ApplyWorkspaceEdit { type Params = ApplyWorkspaceEditParams; type Result = ApplyWorkspaceEditResponse; const METHOD: &'static str = "workspace/applyEdit"; } /// The workspace/configuration request is sent from the server to the client to fetch configuration settings /// from the client. The request can fetch several configuration settings in one roundtrip. /// The order of the returned configuration settings correspond to the order of the passed ConfigurationItems /// (e.g. the first item in the response is the result for the first configuration item in the params). /// /// A ConfigurationItem consists of the configuration section to ask for and an additional scope URI. /// The configuration section ask for is defined by the server and doesn’t necessarily need to correspond to /// the configuration store used be the client. So a server might ask for a configuration cpp.formatterOptions /// but the client stores the configuration in a XML store layout differently. /// It is up to the client to do the necessary conversion. If a scope URI is provided the client should return /// the setting scoped to the provided resource. If the client for example uses EditorConfig to manage its /// settings the configuration should be returned for the passed resource URI. If the client can’t provide a /// configuration setting for a given scope then null need to be present in the returned array. #[derive(Debug)] pub enum WorkspaceConfiguration {} impl Request for WorkspaceConfiguration { type Params = ConfigurationParams; type Result = Vec; const METHOD: &'static str = "workspace/configuration"; } /// The code action request is sent from the client to the server to compute commands for a given text document /// and range. The request is triggered when the user moves the cursor into a problem marker in the editor or /// presses the lightbulb associated with a marker. #[derive(Debug)] pub enum CodeActionRequest {} impl Request for CodeActionRequest { type Params = CodeActionParams; type Result = Option; const METHOD: &'static str = "textDocument/codeAction"; } /// The request is sent from the client to the server to resolve additional information for a given code action. /// This is usually used to compute the `edit` property of a code action to avoid its unnecessary computation /// during the `textDocument/codeAction` request. /// /// @since 3.16.0 #[derive(Debug)] pub enum CodeActionResolveRequest {} impl Request for CodeActionResolveRequest { type Params = CodeAction; type Result = CodeAction; const METHOD: &'static str = "codeAction/resolve"; } /// The code lens request is sent from the client to the server to compute code lenses for a given text document. #[derive(Debug)] pub enum CodeLensRequest {} impl Request for CodeLensRequest { type Params = CodeLensParams; type Result = Option>; const METHOD: &'static str = "textDocument/codeLens"; } /// The code lens resolve request is sent from the client to the server to resolve the command for a /// given code lens item. #[derive(Debug)] pub enum CodeLensResolve {} impl Request for CodeLensResolve { type Params = CodeLens; type Result = CodeLens; const METHOD: &'static str = "codeLens/resolve"; } /// The document links request is sent from the client to the server to request the location of links in a document. #[derive(Debug)] pub enum DocumentLinkRequest {} impl Request for DocumentLinkRequest { type Params = DocumentLinkParams; type Result = Option>; const METHOD: &'static str = "textDocument/documentLink"; } /// The document link resolve request is sent from the client to the server to resolve the target of /// a given document link. #[derive(Debug)] pub enum DocumentLinkResolve {} impl Request for DocumentLinkResolve { type Params = DocumentLink; type Result = DocumentLink; const METHOD: &'static str = "documentLink/resolve"; } /// The document formatting request is sent from the server to the client to format a whole document. #[derive(Debug)] pub enum Formatting {} impl Request for Formatting { type Params = DocumentFormattingParams; type Result = Option>; const METHOD: &'static str = "textDocument/formatting"; } /// The document range formatting request is sent from the client to the server to format a given range in a document. #[derive(Debug)] pub enum RangeFormatting {} impl Request for RangeFormatting { type Params = DocumentRangeFormattingParams; type Result = Option>; const METHOD: &'static str = "textDocument/rangeFormatting"; } /// The document on type formatting request is sent from the client to the server to format parts of /// the document during typing. #[derive(Debug)] pub enum OnTypeFormatting {} impl Request for OnTypeFormatting { type Params = DocumentOnTypeFormattingParams; type Result = Option>; const METHOD: &'static str = "textDocument/onTypeFormatting"; } /// The linked editing request is sent from the client to the server to return for a given position in a document /// the range of the symbol at the position and all ranges that have the same content. /// Optionally a word pattern can be returned to describe valid contents. A rename to one of the ranges can be applied /// to all other ranges if the new content is valid. If no result-specific word pattern is provided, the word pattern from /// the client’s language configuration is used. #[derive(Debug)] pub enum LinkedEditingRange {} impl Request for LinkedEditingRange { type Params = LinkedEditingRangeParams; type Result = Option; const METHOD: &'static str = "textDocument/linkedEditingRange"; } /// The rename request is sent from the client to the server to perform a workspace-wide rename of a symbol. #[derive(Debug)] pub enum Rename {} impl Request for Rename { type Params = RenameParams; type Result = Option; const METHOD: &'static str = "textDocument/rename"; } /// The document color request is sent from the client to the server to list all color references found in a given text document. /// Along with the range, a color value in RGB is returned. #[derive(Debug)] pub enum DocumentColor {} impl Request for DocumentColor { type Params = DocumentColorParams; type Result = Vec; const METHOD: &'static str = "textDocument/documentColor"; } /// The color presentation request is sent from the client to the server to obtain a list of presentations for a color value /// at a given location. #[derive(Debug)] pub enum ColorPresentationRequest {} impl Request for ColorPresentationRequest { type Params = ColorPresentationParams; type Result = Vec; const METHOD: &'static str = "textDocument/colorPresentation"; } /// The folding range request is sent from the client to the server to return all folding ranges found in a given text document. #[derive(Debug)] pub enum FoldingRangeRequest {} impl Request for FoldingRangeRequest { type Params = FoldingRangeParams; type Result = Option>; const METHOD: &'static str = "textDocument/foldingRange"; } /// The prepare rename request is sent from the client to the server to setup and test the validity of a rename operation /// at a given location. #[derive(Debug)] pub enum PrepareRenameRequest {} impl Request for PrepareRenameRequest { type Params = TextDocumentPositionParams; type Result = Option; const METHOD: &'static str = "textDocument/prepareRename"; } #[derive(Debug)] #[cfg(feature = "proposed")] pub enum InlineCompletionRequest {} #[cfg(feature = "proposed")] impl Request for InlineCompletionRequest { type Params = InlineCompletionParams; type Result = Option; const METHOD: &'static str = "textDocument/inlineCompletion"; } /// The workspace/workspaceFolders request is sent from the server to the client to fetch the current open list of /// workspace folders. Returns null in the response if only a single file is open in the tool. /// Returns an empty array if a workspace is open but no folders are configured. #[derive(Debug)] pub enum WorkspaceFoldersRequest {} impl Request for WorkspaceFoldersRequest { type Params = (); type Result = Option>; const METHOD: &'static str = "workspace/workspaceFolders"; } /// The `window/workDoneProgress/create` request is sent from the server /// to the client to ask the client to create a work done progress. #[derive(Debug)] pub enum WorkDoneProgressCreate {} impl Request for WorkDoneProgressCreate { type Params = WorkDoneProgressCreateParams; type Result = (); const METHOD: &'static str = "window/workDoneProgress/create"; } /// The selection range request is sent from the client to the server to return /// suggested selection ranges at given positions. A selection range is a range /// around the cursor position which the user might be interested in selecting. /// /// A selection range in the return array is for the position in the provided parameters at the same index. /// Therefore `positions[i]` must be contained in `result[i].range`. /// /// Typically, but not necessary, selection ranges correspond to the nodes of the /// syntax tree. pub enum SelectionRangeRequest {} impl Request for SelectionRangeRequest { type Params = SelectionRangeParams; type Result = Option>; const METHOD: &'static str = "textDocument/selectionRange"; } pub enum CallHierarchyPrepare {} impl Request for CallHierarchyPrepare { type Params = CallHierarchyPrepareParams; type Result = Option>; const METHOD: &'static str = "textDocument/prepareCallHierarchy"; } pub enum CallHierarchyIncomingCalls {} impl Request for CallHierarchyIncomingCalls { type Params = CallHierarchyIncomingCallsParams; type Result = Option>; const METHOD: &'static str = "callHierarchy/incomingCalls"; } pub enum CallHierarchyOutgoingCalls {} impl Request for CallHierarchyOutgoingCalls { type Params = CallHierarchyOutgoingCallsParams; type Result = Option>; const METHOD: &'static str = "callHierarchy/outgoingCalls"; } pub enum SemanticTokensFullRequest {} impl Request for SemanticTokensFullRequest { type Params = SemanticTokensParams; type Result = Option; const METHOD: &'static str = "textDocument/semanticTokens/full"; } pub enum SemanticTokensFullDeltaRequest {} impl Request for SemanticTokensFullDeltaRequest { type Params = SemanticTokensDeltaParams; type Result = Option; const METHOD: &'static str = "textDocument/semanticTokens/full/delta"; } pub enum SemanticTokensRangeRequest {} impl Request for SemanticTokensRangeRequest { type Params = SemanticTokensRangeParams; type Result = Option; const METHOD: &'static str = "textDocument/semanticTokens/range"; } /// The `workspace/semanticTokens/refresh` request is sent from the server to the client. /// Servers can use it to ask clients to refresh the editors for which this server provides semantic tokens. /// As a result the client should ask the server to recompute the semantic tokens for these editors. /// This is useful if a server detects a project wide configuration change which requires a re-calculation of all semantic tokens. /// Note that the client still has the freedom to delay the re-calculation of the semantic tokens if for example an editor is currently not visible. pub enum SemanticTokensRefresh {} impl Request for SemanticTokensRefresh { type Params = (); type Result = (); const METHOD: &'static str = "workspace/semanticTokens/refresh"; } /// The workspace/codeLens/refresh request is sent from the server to the client. /// Servers can use it to ask clients to refresh the code lenses currently shown in editors. /// As a result the client should ask the server to recompute the code lenses for these editors. /// This is useful if a server detects a configuration change which requires a re-calculation of all code lenses. /// Note that the client still has the freedom to delay the re-calculation of the code lenses if for example an editor is currently not visible. pub enum CodeLensRefresh {} impl Request for CodeLensRefresh { type Params = (); type Result = (); const METHOD: &'static str = "workspace/codeLens/refresh"; } /// The will create files request is sent from the client to the server before files are actually created as long as the creation is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are created. Please note that clients might drop results if computing the edit took too long or if a server constantly fails on this request. This is done to keep creates fast and reliable. pub enum WillCreateFiles {} impl Request for WillCreateFiles { type Params = CreateFilesParams; type Result = Option; const METHOD: &'static str = "workspace/willCreateFiles"; } /// The will rename files request is sent from the client to the server before files are actually renamed as long as the rename is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are renamed. Please note that clients might drop results if computing the edit took too long or if a server constantly fails on this request. This is done to keep renames fast and reliable. pub enum WillRenameFiles {} impl Request for WillRenameFiles { type Params = RenameFilesParams; type Result = Option; const METHOD: &'static str = "workspace/willRenameFiles"; } /// The will delete files request is sent from the client to the server before files are actually deleted as long as the deletion is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are deleted. Please note that clients might drop results if computing the edit took too long or if a server constantly fails on this request. This is done to keep deletes fast and reliable. pub enum WillDeleteFiles {} impl Request for WillDeleteFiles { type Params = DeleteFilesParams; type Result = Option; const METHOD: &'static str = "workspace/willDeleteFiles"; } /// The show document request is sent from a server to a client to ask the client to display a particular document in the user interface. pub enum ShowDocument {} impl Request for ShowDocument { type Params = ShowDocumentParams; type Result = ShowDocumentResult; const METHOD: &'static str = "window/showDocument"; } pub enum MonikerRequest {} impl Request for MonikerRequest { type Params = MonikerParams; type Result = Option>; const METHOD: &'static str = "textDocument/moniker"; } /// The inlay hints request is sent from the client to the server to compute inlay hints for a given /// [text document, range] tuple that may be rendered in the editor in place with other text. pub enum InlayHintRequest {} impl Request for InlayHintRequest { type Params = InlayHintParams; type Result = Option>; const METHOD: &'static str = "textDocument/inlayHint"; } /// The `inlayHint/resolve` request is sent from the client to the server to resolve additional /// information for a given inlay hint. This is usually used to compute the tooltip, location or /// command properties of a inlay hint’s label part to avoid its unnecessary computation during the /// `textDocument/inlayHint` request. pub enum InlayHintResolveRequest {} impl Request for InlayHintResolveRequest { type Params = InlayHint; type Result = InlayHint; const METHOD: &'static str = "inlayHint/resolve"; } /// The `workspace/inlayHint/refresh` request is sent from the server to the client. Servers can use /// it to ask clients to refresh the inlay hints currently shown in editors. As a result the client /// should ask the server to recompute the inlay hints for these editors. This is useful if a server /// detects a configuration change which requires a re-calculation of all inlay hints. Note that the /// client still has the freedom to delay the re-calculation of the inlay hints if for example an /// editor is currently not visible. pub enum InlayHintRefreshRequest {} impl Request for InlayHintRefreshRequest { type Params = (); type Result = (); const METHOD: &'static str = "workspace/inlayHint/refresh"; } /// The inline value request is sent from the client to the server to compute inline values for a /// given text document that may be rendered in the editor at the end of lines. pub enum InlineValueRequest {} impl Request for InlineValueRequest { type Params = InlineValueParams; type Result = Option; const METHOD: &'static str = "textDocument/inlineValue"; } /// The `workspace/inlineValue/refresh` request is sent from the server to the client. Servers can /// use it to ask clients to refresh the inline values currently shown in editors. As a result the /// client should ask the server to recompute the inline values for these editors. This is useful if /// a server detects a configuration change which requires a re-calculation of all inline values. /// Note that the client still has the freedom to delay the re-calculation of the inline values if /// for example an editor is currently not visible. pub enum InlineValueRefreshRequest {} impl Request for InlineValueRefreshRequest { type Params = (); type Result = (); const METHOD: &'static str = "workspace/inlineValue/refresh"; } /// The text document diagnostic request is sent from the client to the server to ask the server to /// compute the diagnostics for a given document. As with other pull requests the server is asked /// to compute the diagnostics for the currently synced version of the document. #[derive(Debug)] pub enum DocumentDiagnosticRequest {} impl Request for DocumentDiagnosticRequest { type Params = DocumentDiagnosticParams; type Result = DocumentDiagnosticReportResult; const METHOD: &'static str = "textDocument/diagnostic"; } /// The workspace diagnostic request is sent from the client to the server to ask the server to /// compute workspace wide diagnostics which previously where pushed from the server to the client. /// In contrast to the document diagnostic request the workspace request can be long running and is /// not bound to a specific workspace or document state. If the client supports streaming for the /// workspace diagnostic pull it is legal to provide a document diagnostic report multiple times /// for the same document URI. The last one reported will win over previous reports. #[derive(Debug)] pub enum WorkspaceDiagnosticRequest {} impl Request for WorkspaceDiagnosticRequest { type Params = WorkspaceDiagnosticParams; const METHOD: &'static str = "workspace/diagnostic"; type Result = WorkspaceDiagnosticReportResult; } /// The `workspace/diagnostic/refresh` request is sent from the server to the client. Servers can /// use it to ask clients to refresh all needed document and workspace diagnostics. This is useful /// if a server detects a project wide configuration change which requires a re-calculation of all /// diagnostics. #[derive(Debug)] pub enum WorkspaceDiagnosticRefresh {} impl Request for WorkspaceDiagnosticRefresh { type Params = (); type Result = (); const METHOD: &'static str = "workspace/diagnostic/refresh"; } /// The type hierarchy request is sent from the client to the server to return a type hierarchy for /// the language element of given text document positions. Will return null if the server couldn’t /// infer a valid type from the position. The type hierarchy requests are executed in two steps: /// /// 1. first a type hierarchy item is prepared for the given text document position. /// 2. for a type hierarchy item the supertype or subtype type hierarchy items are resolved. pub enum TypeHierarchyPrepare {} impl Request for TypeHierarchyPrepare { type Params = TypeHierarchyPrepareParams; type Result = Option>; const METHOD: &'static str = "textDocument/prepareTypeHierarchy"; } /// The `typeHierarchy/supertypes` request is sent from the client to the server to resolve the /// supertypes for a given type hierarchy item. Will return null if the server couldn’t infer a /// valid type from item in the params. The request doesn’t define its own client and server /// capabilities. It is only issued if a server registers for the /// `textDocument/prepareTypeHierarchy` request. pub enum TypeHierarchySupertypes {} impl Request for TypeHierarchySupertypes { type Params = TypeHierarchySupertypesParams; type Result = Option>; const METHOD: &'static str = "typeHierarchy/supertypes"; } /// The `typeHierarchy/subtypes` request is sent from the client to the server to resolve the /// subtypes for a given type hierarchy item. Will return null if the server couldn’t infer a valid /// type from item in the params. The request doesn’t define its own client and server capabilities. /// It is only issued if a server registers for the textDocument/prepareTypeHierarchy request. pub enum TypeHierarchySubtypes {} impl Request for TypeHierarchySubtypes { type Params = TypeHierarchySubtypesParams; type Result = Option>; const METHOD: &'static str = "typeHierarchy/subtypes"; } #[cfg(test)] mod test { use super::*; fn fake_call() where R: Request, R::Params: serde::Serialize, R::Result: serde::de::DeserializeOwned, { } macro_rules! check_macro { ($name:tt) => { // check whether the macro name matches the method assert_eq!(::METHOD, $name); // test whether type checking passes for each component fake_call::(); }; } #[test] fn check_macro_definitions() { check_macro!("initialize"); check_macro!("shutdown"); check_macro!("window/showDocument"); check_macro!("window/showMessageRequest"); check_macro!("window/workDoneProgress/create"); check_macro!("client/registerCapability"); check_macro!("client/unregisterCapability"); check_macro!("textDocument/willSaveWaitUntil"); check_macro!("textDocument/completion"); check_macro!("textDocument/hover"); check_macro!("textDocument/signatureHelp"); check_macro!("textDocument/declaration"); check_macro!("textDocument/definition"); check_macro!("textDocument/references"); check_macro!("textDocument/documentHighlight"); check_macro!("textDocument/documentSymbol"); check_macro!("textDocument/codeAction"); check_macro!("textDocument/codeLens"); check_macro!("textDocument/documentLink"); check_macro!("textDocument/rangeFormatting"); check_macro!("textDocument/onTypeFormatting"); check_macro!("textDocument/formatting"); check_macro!("textDocument/rename"); check_macro!("textDocument/documentColor"); check_macro!("textDocument/colorPresentation"); check_macro!("textDocument/foldingRange"); check_macro!("textDocument/prepareRename"); check_macro!("textDocument/implementation"); check_macro!("textDocument/selectionRange"); check_macro!("textDocument/typeDefinition"); check_macro!("textDocument/moniker"); check_macro!("textDocument/linkedEditingRange"); check_macro!("textDocument/prepareCallHierarchy"); check_macro!("textDocument/prepareTypeHierarchy"); check_macro!("textDocument/semanticTokens/full"); check_macro!("textDocument/semanticTokens/full/delta"); check_macro!("textDocument/semanticTokens/range"); check_macro!("textDocument/inlayHint"); check_macro!("textDocument/inlineValue"); check_macro!("textDocument/diagnostic"); check_macro!("workspace/applyEdit"); check_macro!("workspace/symbol"); check_macro!("workspace/executeCommand"); check_macro!("workspace/configuration"); check_macro!("workspace/diagnostic"); check_macro!("workspace/diagnostic/refresh"); check_macro!("workspace/willCreateFiles"); check_macro!("workspace/willRenameFiles"); check_macro!("workspace/willDeleteFiles"); check_macro!("workspace/workspaceFolders"); check_macro!("workspace/semanticTokens/refresh"); check_macro!("workspace/codeLens/refresh"); check_macro!("workspace/inlayHint/refresh"); check_macro!("workspace/inlineValue/refresh"); check_macro!("callHierarchy/incomingCalls"); check_macro!("callHierarchy/outgoingCalls"); check_macro!("codeAction/resolve"); check_macro!("codeLens/resolve"); check_macro!("completionItem/resolve"); check_macro!("documentLink/resolve"); check_macro!("inlayHint/resolve"); check_macro!("typeHierarchy/subtypes"); check_macro!("typeHierarchy/supertypes"); check_macro!("workspaceSymbol/resolve"); } #[test] #[cfg(feature = "proposed")] fn check_proposed_macro_definitions() {} } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/selection_range.rs000066400000000000000000000054071500614314100271600ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use crate::{ PartialResultParams, Position, Range, StaticTextDocumentRegistrationOptions, TextDocumentIdentifier, WorkDoneProgressOptions, WorkDoneProgressParams, }; #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SelectionRangeClientCapabilities { /// Whether implementation supports dynamic registration for selection range /// providers. If this is set to `true` the client supports the new /// `SelectionRangeRegistrationOptions` return value for the corresponding /// server capability as well. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct SelectionRangeOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct SelectionRangeRegistrationOptions { #[serde(flatten)] pub selection_range_options: SelectionRangeOptions, #[serde(flatten)] pub registration_options: StaticTextDocumentRegistrationOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum SelectionRangeProviderCapability { Simple(bool), Options(SelectionRangeOptions), RegistrationOptions(SelectionRangeRegistrationOptions), } impl From for SelectionRangeProviderCapability { fn from(from: SelectionRangeRegistrationOptions) -> Self { Self::RegistrationOptions(from) } } impl From for SelectionRangeProviderCapability { fn from(from: SelectionRangeOptions) -> Self { Self::Options(from) } } impl From for SelectionRangeProviderCapability { fn from(from: bool) -> Self { Self::Simple(from) } } /// A parameter literal used in selection range requests. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SelectionRangeParams { /// The text document. pub text_document: TextDocumentIdentifier, /// The positions inside the text document. pub positions: Vec, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } /// Represents a selection range. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SelectionRange { /// Range of the selection. pub range: Range, /// The parent selection range containing this range. #[serde(skip_serializing_if = "Option::is_none")] pub parent: Option>, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/semantic_tokens.rs000066400000000000000000000612161500614314100272050ustar00rootroot00000000000000use std::borrow::Cow; use serde::ser::SerializeSeq; use serde::{Deserialize, Serialize}; use crate::{ PartialResultParams, Range, StaticRegistrationOptions, TextDocumentIdentifier, TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams, }; /// A set of predefined token types. This set is not fixed /// and clients can specify additional token types via the /// corresponding client capabilities. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)] pub struct SemanticTokenType(Cow<'static, str>); impl SemanticTokenType { pub const NAMESPACE: SemanticTokenType = SemanticTokenType::new("namespace"); pub const TYPE: SemanticTokenType = SemanticTokenType::new("type"); pub const CLASS: SemanticTokenType = SemanticTokenType::new("class"); pub const ENUM: SemanticTokenType = SemanticTokenType::new("enum"); pub const INTERFACE: SemanticTokenType = SemanticTokenType::new("interface"); pub const STRUCT: SemanticTokenType = SemanticTokenType::new("struct"); pub const TYPE_PARAMETER: SemanticTokenType = SemanticTokenType::new("typeParameter"); pub const PARAMETER: SemanticTokenType = SemanticTokenType::new("parameter"); pub const VARIABLE: SemanticTokenType = SemanticTokenType::new("variable"); pub const PROPERTY: SemanticTokenType = SemanticTokenType::new("property"); pub const ENUM_MEMBER: SemanticTokenType = SemanticTokenType::new("enumMember"); pub const EVENT: SemanticTokenType = SemanticTokenType::new("event"); pub const FUNCTION: SemanticTokenType = SemanticTokenType::new("function"); pub const METHOD: SemanticTokenType = SemanticTokenType::new("method"); pub const MACRO: SemanticTokenType = SemanticTokenType::new("macro"); pub const KEYWORD: SemanticTokenType = SemanticTokenType::new("keyword"); pub const MODIFIER: SemanticTokenType = SemanticTokenType::new("modifier"); pub const COMMENT: SemanticTokenType = SemanticTokenType::new("comment"); pub const STRING: SemanticTokenType = SemanticTokenType::new("string"); pub const NUMBER: SemanticTokenType = SemanticTokenType::new("number"); pub const REGEXP: SemanticTokenType = SemanticTokenType::new("regexp"); pub const OPERATOR: SemanticTokenType = SemanticTokenType::new("operator"); /// @since 3.17.0 pub const DECORATOR: SemanticTokenType = SemanticTokenType::new("decorator"); pub const fn new(tag: &'static str) -> Self { SemanticTokenType(Cow::Borrowed(tag)) } pub fn as_str(&self) -> &str { &self.0 } } impl From for SemanticTokenType { fn from(from: String) -> Self { SemanticTokenType(Cow::from(from)) } } impl From<&'static str> for SemanticTokenType { fn from(from: &'static str) -> Self { SemanticTokenType::new(from) } } /// A set of predefined token modifiers. This set is not fixed /// and clients can specify additional token types via the /// corresponding client capabilities. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)] pub struct SemanticTokenModifier(Cow<'static, str>); impl SemanticTokenModifier { pub const DECLARATION: SemanticTokenModifier = SemanticTokenModifier::new("declaration"); pub const DEFINITION: SemanticTokenModifier = SemanticTokenModifier::new("definition"); pub const READONLY: SemanticTokenModifier = SemanticTokenModifier::new("readonly"); pub const STATIC: SemanticTokenModifier = SemanticTokenModifier::new("static"); pub const DEPRECATED: SemanticTokenModifier = SemanticTokenModifier::new("deprecated"); pub const ABSTRACT: SemanticTokenModifier = SemanticTokenModifier::new("abstract"); pub const ASYNC: SemanticTokenModifier = SemanticTokenModifier::new("async"); pub const MODIFICATION: SemanticTokenModifier = SemanticTokenModifier::new("modification"); pub const DOCUMENTATION: SemanticTokenModifier = SemanticTokenModifier::new("documentation"); pub const DEFAULT_LIBRARY: SemanticTokenModifier = SemanticTokenModifier::new("defaultLibrary"); pub const fn new(tag: &'static str) -> Self { SemanticTokenModifier(Cow::Borrowed(tag)) } pub fn as_str(&self) -> &str { &self.0 } } impl From for SemanticTokenModifier { fn from(from: String) -> Self { SemanticTokenModifier(Cow::from(from)) } } impl From<&'static str> for SemanticTokenModifier { fn from(from: &'static str) -> Self { SemanticTokenModifier::new(from) } } #[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)] pub struct TokenFormat(Cow<'static, str>); impl TokenFormat { pub const RELATIVE: TokenFormat = TokenFormat::new("relative"); pub const fn new(tag: &'static str) -> Self { TokenFormat(Cow::Borrowed(tag)) } pub fn as_str(&self) -> &str { &self.0 } } impl From for TokenFormat { fn from(from: String) -> Self { TokenFormat(Cow::from(from)) } } impl From<&'static str> for TokenFormat { fn from(from: &'static str) -> Self { TokenFormat::new(from) } } /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokensLegend { /// The token types a server uses. pub token_types: Vec, /// The token modifiers a server uses. pub token_modifiers: Vec, } /// The actual tokens. #[derive(Debug, Eq, PartialEq, Copy, Clone, Default)] pub struct SemanticToken { pub delta_line: u32, pub delta_start: u32, pub length: u32, pub token_type: u32, pub token_modifiers_bitset: u32, } impl SemanticToken { fn deserialize_tokens<'de, D>(deserializer: D) -> Result, D::Error> where D: serde::Deserializer<'de>, { let data = Vec::::deserialize(deserializer)?; let chunks = data.chunks_exact(5); if !chunks.remainder().is_empty() { return Result::Err(serde::de::Error::custom("Length is not divisible by 5")); } Result::Ok( chunks .map(|chunk| SemanticToken { delta_line: chunk[0], delta_start: chunk[1], length: chunk[2], token_type: chunk[3], token_modifiers_bitset: chunk[4], }) .collect(), ) } fn serialize_tokens(tokens: &[SemanticToken], serializer: S) -> Result where S: serde::Serializer, { let mut seq = serializer.serialize_seq(Some(tokens.len() * 5))?; for token in tokens.iter() { seq.serialize_element(&token.delta_line)?; seq.serialize_element(&token.delta_start)?; seq.serialize_element(&token.length)?; seq.serialize_element(&token.token_type)?; seq.serialize_element(&token.token_modifiers_bitset)?; } seq.end() } fn deserialize_tokens_opt<'de, D>( deserializer: D, ) -> Result>, D::Error> where D: serde::Deserializer<'de>, { #[derive(Deserialize)] #[serde(transparent)] struct Wrapper { #[serde(deserialize_with = "SemanticToken::deserialize_tokens")] tokens: Vec, } Ok(Option::::deserialize(deserializer)?.map(|wrapper| wrapper.tokens)) } fn serialize_tokens_opt( data: &Option>, serializer: S, ) -> Result where S: serde::Serializer, { #[derive(Serialize)] #[serde(transparent)] struct Wrapper { #[serde(serialize_with = "SemanticToken::serialize_tokens")] tokens: Vec, } let opt = data.as_ref().map(|t| Wrapper { tokens: t.to_vec() }); opt.serialize(serializer) } } /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokens { /// An optional result id. If provided and clients support delta updating /// the client will include the result id in the next semantic token request. /// A server can then instead of computing all semantic tokens again simply /// send a delta. #[serde(skip_serializing_if = "Option::is_none")] pub result_id: Option, /// The actual tokens. For a detailed description about how the data is /// structured please see /// #[serde( deserialize_with = "SemanticToken::deserialize_tokens", serialize_with = "SemanticToken::serialize_tokens" )] pub data: Vec, } /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokensPartialResult { #[serde( deserialize_with = "SemanticToken::deserialize_tokens", serialize_with = "SemanticToken::serialize_tokens" )] pub data: Vec, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] #[serde(untagged)] pub enum SemanticTokensResult { Tokens(SemanticTokens), Partial(SemanticTokensPartialResult), } impl From for SemanticTokensResult { fn from(from: SemanticTokens) -> Self { SemanticTokensResult::Tokens(from) } } impl From for SemanticTokensResult { fn from(from: SemanticTokensPartialResult) -> Self { SemanticTokensResult::Partial(from) } } /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokensEdit { pub start: u32, pub delete_count: u32, #[serde( default, skip_serializing_if = "Option::is_none", deserialize_with = "SemanticToken::deserialize_tokens_opt", serialize_with = "SemanticToken::serialize_tokens_opt" )] pub data: Option>, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] #[serde(untagged)] pub enum SemanticTokensFullDeltaResult { Tokens(SemanticTokens), TokensDelta(SemanticTokensDelta), PartialTokensDelta { edits: Vec }, } impl From for SemanticTokensFullDeltaResult { fn from(from: SemanticTokens) -> Self { SemanticTokensFullDeltaResult::Tokens(from) } } impl From for SemanticTokensFullDeltaResult { fn from(from: SemanticTokensDelta) -> Self { SemanticTokensFullDeltaResult::TokensDelta(from) } } /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokensDelta { #[serde(skip_serializing_if = "Option::is_none")] pub result_id: Option, /// For a detailed description how these edits are structured please see /// pub edits: Vec, } /// Capabilities specific to the `textDocument/semanticTokens/*` requests. /// /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokensClientCapabilities { /// Whether implementation supports dynamic registration. If this is set to `true` /// the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` /// return value for the corresponding server capability as well. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// Which requests the client supports and might send to the server /// depending on the server's capability. Please note that clients might not /// show semantic tokens or degrade some of the user experience if a range /// or full request is advertised by the client but not provided by the /// server. If for example the client capability `requests.full` and /// `request.range` are both set to true but the server only provides a /// range provider the client might not render a minimap correctly or might /// even decide to not show any semantic tokens at all. pub requests: SemanticTokensClientCapabilitiesRequests, /// The token types that the client supports. pub token_types: Vec, /// The token modifiers that the client supports. pub token_modifiers: Vec, /// The token formats the clients supports. pub formats: Vec, /// Whether the client supports tokens that can overlap each other. #[serde(skip_serializing_if = "Option::is_none")] pub overlapping_token_support: Option, /// Whether the client supports tokens that can span multiple lines. #[serde(skip_serializing_if = "Option::is_none")] pub multiline_token_support: Option, /// Whether the client allows the server to actively cancel a /// semantic token request, e.g. supports returning /// ErrorCodes.ServerCancelled. If a server does the client /// needs to retrigger the request. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub server_cancel_support: Option, /// Whether the client uses semantic tokens to augment existing /// syntax tokens. If set to `true` client side created syntax /// tokens and semantic tokens are both used for colorization. If /// set to `false` the client only uses the returned semantic tokens /// for colorization. /// /// If the value is `undefined` then the client behavior is not /// specified. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub augments_syntax_tokens: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokensClientCapabilitiesRequests { /// The client will send the `textDocument/semanticTokens/range` request if the server provides a corresponding handler. #[serde(skip_serializing_if = "Option::is_none")] pub range: Option, /// The client will send the `textDocument/semanticTokens/full` request if the server provides a corresponding handler. #[serde(skip_serializing_if = "Option::is_none")] pub full: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] #[serde(untagged)] pub enum SemanticTokensFullOptions { Bool(bool), Delta { /// The client will send the `textDocument/semanticTokens/full/delta` request if the server provides a corresponding handler. /// The server supports deltas for full documents. #[serde(skip_serializing_if = "Option::is_none")] delta: Option, }, } /// @since 3.16.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokensOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, /// The legend used by the server pub legend: SemanticTokensLegend, /// Server supports providing semantic tokens for a specific range /// of a document. #[serde(skip_serializing_if = "Option::is_none")] pub range: Option, /// Server supports providing semantic tokens for a full document. #[serde(skip_serializing_if = "Option::is_none")] pub full: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokensRegistrationOptions { #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, #[serde(flatten)] pub semantic_tokens_options: SemanticTokensOptions, #[serde(flatten)] pub static_registration_options: StaticRegistrationOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] #[serde(untagged)] pub enum SemanticTokensServerCapabilities { SemanticTokensOptions(SemanticTokensOptions), SemanticTokensRegistrationOptions(SemanticTokensRegistrationOptions), } impl From for SemanticTokensServerCapabilities { fn from(from: SemanticTokensOptions) -> Self { SemanticTokensServerCapabilities::SemanticTokensOptions(from) } } impl From for SemanticTokensServerCapabilities { fn from(from: SemanticTokensRegistrationOptions) -> Self { SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(from) } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokensWorkspaceClientCapabilities { /// Whether the client implementation supports a refresh request sent from /// the server to the client. /// /// Note that this event is global and will force the client to refresh all /// semantic tokens currently shown. It should be used with absolute care /// and is useful for situation where a server for example detect a project /// wide change that requires such a calculation. pub refresh_support: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokensParams { #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, /// The text document. pub text_document: TextDocumentIdentifier, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokensDeltaParams { #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, /// The text document. pub text_document: TextDocumentIdentifier, /// The result id of a previous response. The result Id can either point to a full response /// or a delta response depending on what was received last. pub previous_result_id: String, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SemanticTokensRangeParams { #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, /// The text document. pub text_document: TextDocumentIdentifier, /// The range the semantic tokens are requested for. pub range: Range, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] #[serde(untagged)] pub enum SemanticTokensRangeResult { Tokens(SemanticTokens), Partial(SemanticTokensPartialResult), } impl From for SemanticTokensRangeResult { fn from(tokens: SemanticTokens) -> Self { SemanticTokensRangeResult::Tokens(tokens) } } impl From for SemanticTokensRangeResult { fn from(partial: SemanticTokensPartialResult) -> Self { SemanticTokensRangeResult::Partial(partial) } } #[cfg(test)] mod tests { use super::*; use crate::tests::{test_deserialization, test_serialization}; #[test] fn test_semantic_tokens_support_serialization() { test_serialization( &SemanticTokens { result_id: None, data: vec![], }, r#"{"data":[]}"#, ); test_serialization( &SemanticTokens { result_id: None, data: vec![SemanticToken { delta_line: 2, delta_start: 5, length: 3, token_type: 0, token_modifiers_bitset: 3, }], }, r#"{"data":[2,5,3,0,3]}"#, ); test_serialization( &SemanticTokens { result_id: None, data: vec![ SemanticToken { delta_line: 2, delta_start: 5, length: 3, token_type: 0, token_modifiers_bitset: 3, }, SemanticToken { delta_line: 0, delta_start: 5, length: 4, token_type: 1, token_modifiers_bitset: 0, }, ], }, r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#, ); } #[test] fn test_semantic_tokens_support_deserialization() { test_deserialization( r#"{"data":[]}"#, &SemanticTokens { result_id: None, data: vec![], }, ); test_deserialization( r#"{"data":[2,5,3,0,3]}"#, &SemanticTokens { result_id: None, data: vec![SemanticToken { delta_line: 2, delta_start: 5, length: 3, token_type: 0, token_modifiers_bitset: 3, }], }, ); test_deserialization( r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#, &SemanticTokens { result_id: None, data: vec![ SemanticToken { delta_line: 2, delta_start: 5, length: 3, token_type: 0, token_modifiers_bitset: 3, }, SemanticToken { delta_line: 0, delta_start: 5, length: 4, token_type: 1, token_modifiers_bitset: 0, }, ], }, ); } #[test] #[should_panic] fn test_semantic_tokens_support_deserialization_err() { test_deserialization( r#"{"data":[1]}"#, &SemanticTokens { result_id: None, data: vec![], }, ); } #[test] fn test_semantic_tokens_edit_support_deserialization() { test_deserialization( r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#, &SemanticTokensEdit { start: 0, delete_count: 1, data: Some(vec![ SemanticToken { delta_line: 2, delta_start: 5, length: 3, token_type: 0, token_modifiers_bitset: 3, }, SemanticToken { delta_line: 0, delta_start: 5, length: 4, token_type: 1, token_modifiers_bitset: 0, }, ]), }, ); test_deserialization( r#"{"start":0,"deleteCount":1}"#, &SemanticTokensEdit { start: 0, delete_count: 1, data: None, }, ); } #[test] fn test_semantic_tokens_edit_support_serialization() { test_serialization( &SemanticTokensEdit { start: 0, delete_count: 1, data: Some(vec![ SemanticToken { delta_line: 2, delta_start: 5, length: 3, token_type: 0, token_modifiers_bitset: 3, }, SemanticToken { delta_line: 0, delta_start: 5, length: 4, token_type: 1, token_modifiers_bitset: 0, }, ]), }, r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#, ); test_serialization( &SemanticTokensEdit { start: 0, delete_count: 1, data: None, }, r#"{"start":0,"deleteCount":1}"#, ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/signature_help.rs000066400000000000000000000177551500614314100270410ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use crate::{ Documentation, MarkupKind, TextDocumentPositionParams, TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams, }; #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SignatureInformationSettings { /// Client supports the follow content formats for the documentation /// property. The order describes the preferred format of the client. #[serde(skip_serializing_if = "Option::is_none")] pub documentation_format: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub parameter_information: Option, /// The client support the `activeParameter` property on `SignatureInformation` /// literal. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub active_parameter_support: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ParameterInformationSettings { /// The client supports processing label offsets instead of a /// simple label string. /// /// @since 3.14.0 #[serde(skip_serializing_if = "Option::is_none")] pub label_offset_support: Option, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SignatureHelpClientCapabilities { /// Whether completion supports dynamic registration. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// The client supports the following `SignatureInformation` /// specific properties. #[serde(skip_serializing_if = "Option::is_none")] pub signature_information: Option, /// The client supports to send additional context information for a /// `textDocument/signatureHelp` request. A client that opts into /// contextSupport will also support the `retriggerCharacters` on /// `SignatureHelpOptions`. /// /// @since 3.15.0 #[serde(skip_serializing_if = "Option::is_none")] pub context_support: Option, } /// Signature help options. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SignatureHelpOptions { /// The characters that trigger signature help automatically. #[serde(skip_serializing_if = "Option::is_none")] pub trigger_characters: Option>, /// List of characters that re-trigger signature help. /// These trigger characters are only active when signature help is already showing. All trigger characters /// are also counted as re-trigger characters. #[serde(skip_serializing_if = "Option::is_none")] pub retrigger_characters: Option>, #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } /// Signature help options. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct SignatureHelpRegistrationOptions { #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, } /// Signature help options. #[derive(Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(transparent)] pub struct SignatureHelpTriggerKind(i32); lsp_enum! { impl SignatureHelpTriggerKind { /// Signature help was invoked manually by the user or by a command. pub const INVOKED: SignatureHelpTriggerKind = SignatureHelpTriggerKind(1); /// Signature help was triggered by a trigger character. pub const TRIGGER_CHARACTER: SignatureHelpTriggerKind = SignatureHelpTriggerKind(2); /// Signature help was triggered by the cursor moving or by the document content changing. pub const CONTENT_CHANGE: SignatureHelpTriggerKind = SignatureHelpTriggerKind(3); } } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SignatureHelpParams { /// The signature help context. This is only available if the client specifies /// to send this using the client capability `textDocument.signatureHelp.contextSupport === true` #[serde(skip_serializing_if = "Option::is_none")] pub context: Option, #[serde(flatten)] pub text_document_position_params: TextDocumentPositionParams, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SignatureHelpContext { /// Action that caused signature help to be triggered. pub trigger_kind: SignatureHelpTriggerKind, /// Character that caused signature help to be triggered. /// This is undefined when `triggerKind !== SignatureHelpTriggerKind.TriggerCharacter` #[serde(skip_serializing_if = "Option::is_none")] pub trigger_character: Option, /// `true` if signature help was already showing when it was triggered. /// Retriggers occur when the signature help is already active and can be caused by actions such as /// typing a trigger character, a cursor move, or document content changes. pub is_retrigger: bool, /// The currently active `SignatureHelp`. /// The `activeSignatureHelp` has its `SignatureHelp.activeSignature` field updated based on /// the user navigating through available signatures. #[serde(skip_serializing_if = "Option::is_none")] pub active_signature_help: Option, } /// Signature help represents the signature of something /// callable. There can be multiple signature but only one /// active and only one active parameter. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SignatureHelp { /// One or more signatures. pub signatures: Vec, /// The active signature. #[serde(skip_serializing_if = "Option::is_none")] pub active_signature: Option, /// The active parameter of the active signature. #[serde(skip_serializing_if = "Option::is_none")] pub active_parameter: Option, } /// Represents the signature of something callable. A signature /// can have a label, like a function-name, a doc-comment, and /// a set of parameters. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SignatureInformation { /// The label of this signature. Will be shown in /// the UI. pub label: String, /// The human-readable doc-comment of this signature. Will be shown /// in the UI but can be omitted. #[serde(skip_serializing_if = "Option::is_none")] pub documentation: Option, /// The parameters of this signature. #[serde(skip_serializing_if = "Option::is_none")] pub parameters: Option>, /// The index of the active parameter. /// /// If provided, this is used in place of `SignatureHelp.activeParameter`. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub active_parameter: Option, } /// Represents a parameter of a callable-signature. A parameter can /// have a label and a doc-comment. #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ParameterInformation { /// The label of this parameter information. /// /// Either a string or an inclusive start and exclusive end offsets within its containing /// signature label. (see SignatureInformation.label). *Note*: A label of type string must be /// a substring of its containing signature label. pub label: ParameterLabel, /// The human-readable doc-comment of this parameter. Will be shown /// in the UI but can be omitted. #[serde(skip_serializing_if = "Option::is_none")] pub documentation: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum ParameterLabel { Simple(String), LabelOffsets([u32; 2]), } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/trace.rs000066400000000000000000000042651500614314100251160ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct SetTraceParams { /// The new value that should be assigned to the trace setting. pub value: TraceValue, } /// A TraceValue represents the level of verbosity with which the server systematically /// reports its execution trace using `LogTrace` notifications. /// /// The initial trace value is set by the client at initialization and can be modified /// later using the `SetTrace` notification. #[derive(Debug, Eq, PartialEq, Clone, Copy, Deserialize, Serialize, Default)] #[serde(rename_all = "camelCase")] pub enum TraceValue { /// The server should not send any `$/logTrace` notification #[default] Off, /// The server should not add the 'verbose' field in the `LogTraceParams` Messages, Verbose, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct LogTraceParams { /// The message to be logged. pub message: String, /// Additional information that can be computed if the `trace` configuration /// is set to `'verbose'` #[serde(skip_serializing_if = "Option::is_none")] pub verbose: Option, } #[cfg(test)] mod tests { use super::*; use crate::tests::test_serialization; #[test] fn test_set_trace_params() { test_serialization( &SetTraceParams { value: TraceValue::Off, }, r#"{"value":"off"}"#, ); } #[test] fn test_log_trace_params() { test_serialization( &LogTraceParams { message: "message".into(), verbose: None, }, r#"{"message":"message"}"#, ); test_serialization( &LogTraceParams { message: "message".into(), verbose: Some("verbose".into()), }, r#"{"message":"message","verbose":"verbose"}"#, ); } #[test] fn test_trace_value() { test_serialization( &vec![TraceValue::Off, TraceValue::Messages, TraceValue::Verbose], r#"["off","messages","verbose"]"#, ); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/type_hierarchy.rs000066400000000000000000000061321500614314100270320ustar00rootroot00000000000000use crate::{ DynamicRegistrationClientCapabilities, LSPAny, PartialResultParams, Range, StaticRegistrationOptions, SymbolKind, SymbolTag, TextDocumentPositionParams, TextDocumentRegistrationOptions, Url, WorkDoneProgressOptions, WorkDoneProgressParams, }; use serde::{Deserialize, Serialize}; pub type TypeHierarchyClientCapabilities = DynamicRegistrationClientCapabilities; #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct TypeHierarchyOptions { #[serde(flatten)] pub work_done_progress_options: WorkDoneProgressOptions, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct TypeHierarchyRegistrationOptions { #[serde(flatten)] pub text_document_registration_options: TextDocumentRegistrationOptions, #[serde(flatten)] pub type_hierarchy_options: TypeHierarchyOptions, #[serde(flatten)] pub static_registration_options: StaticRegistrationOptions, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct TypeHierarchyPrepareParams { #[serde(flatten)] pub text_document_position_params: TextDocumentPositionParams, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct TypeHierarchySupertypesParams { pub item: TypeHierarchyItem, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct TypeHierarchySubtypesParams { pub item: TypeHierarchyItem, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct TypeHierarchyItem { /// The name of this item. pub name: String, /// The kind of this item. pub kind: SymbolKind, /// Tags for this item. #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option, /// More detail for this item, e.g. the signature of a function. #[serde(skip_serializing_if = "Option::is_none")] pub detail: Option, /// The resource identifier of this item. pub uri: Url, /// The range enclosing this symbol not including leading/trailing whitespace /// but everything else, e.g. comments and code. pub range: Range, /// The range that should be selected and revealed when this symbol is being /// picked, e.g. the name of a function. Must be contained by the /// [`range`](#TypeHierarchyItem.range). pub selection_range: Range, /// A data entry field that is preserved between a type hierarchy prepare and /// supertypes or subtypes requests. It could also be used to identify the /// type hierarchy in the server, helping improve the performance on /// resolving supertypes and subtypes. #[serde(skip_serializing_if = "Option::is_none")] pub data: Option, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/window.rs000066400000000000000000000126551500614314100253310ustar00rootroot00000000000000use std::collections::HashMap; use serde::{Deserialize, Serialize}; use serde_json::Value; use crate::{Range, Url}; #[derive(Eq, PartialEq, Clone, Copy, Deserialize, Serialize)] #[serde(transparent)] pub struct MessageType(i32); lsp_enum! { impl MessageType { /// An error message. pub const ERROR: MessageType = MessageType(1); /// A warning message. pub const WARNING: MessageType = MessageType(2); /// An information message; pub const INFO: MessageType = MessageType(3); /// A log message. pub const LOG: MessageType = MessageType(4); } } /// Window specific client capabilities. #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WindowClientCapabilities { /// Whether client supports handling progress notifications. If set /// servers are allowed to report in `workDoneProgress` property in the /// request specific server capabilities. /// /// @since 3.15.0 #[serde(skip_serializing_if = "Option::is_none")] pub work_done_progress: Option, /// Capabilities specific to the showMessage request. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub show_message: Option, /// Client capabilities for the show document request. /// /// @since 3.16.0 #[serde(skip_serializing_if = "Option::is_none")] pub show_document: Option, } /// Show message request client capabilities #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ShowMessageRequestClientCapabilities { /// Capabilities specific to the `MessageActionItem` type. #[serde(skip_serializing_if = "Option::is_none")] pub message_action_item: Option, } #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct MessageActionItemCapabilities { /// Whether the client supports additional attributes which /// are preserved and send back to the server in the /// request's response. #[serde(skip_serializing_if = "Option::is_none")] pub additional_properties_support: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct MessageActionItem { /// A short title like 'Retry', 'Open Log' etc. pub title: String, /// Additional attributes that the client preserves and /// sends back to the server. This depends on the client /// capability window.messageActionItem.additionalPropertiesSupport #[serde(flatten)] pub properties: HashMap, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum MessageActionItemProperty { String(String), Boolean(bool), Integer(i32), Object(Value), } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct LogMessageParams { /// The message type. See {@link MessageType} #[serde(rename = "type")] pub typ: MessageType, /// The actual message pub message: String, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct ShowMessageParams { /// The message type. See {@link MessageType}. #[serde(rename = "type")] pub typ: MessageType, /// The actual message. pub message: String, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct ShowMessageRequestParams { /// The message type. See {@link MessageType} #[serde(rename = "type")] pub typ: MessageType, /// The actual message pub message: String, /// The message action items to present. #[serde(skip_serializing_if = "Option::is_none")] pub actions: Option>, } /// Client capabilities for the show document request. #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ShowDocumentClientCapabilities { /// The client has support for the show document request. pub support: bool, } /// Params to show a document. /// /// @since 3.16.0 #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ShowDocumentParams { /// The document uri to show. pub uri: Url, /// Indicates to show the resource in an external program. /// To show for example `https://code.visualstudio.com/` /// in the default WEB browser set `external` to `true`. #[serde(skip_serializing_if = "Option::is_none")] pub external: Option, /// An optional property to indicate whether the editor /// showing the document should take focus or not. /// Clients might ignore this property if an external /// program in started. #[serde(skip_serializing_if = "Option::is_none")] pub take_focus: Option, /// An optional selection range if the document is a text /// document. Clients might ignore the property if an /// external program is started or the file is not a text /// file. #[serde(skip_serializing_if = "Option::is_none")] pub selection: Option, } /// The result of an show document request. /// /// @since 3.16.0 #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ShowDocumentResult { /// A boolean indicating if the show was successful. pub success: bool, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/workspace_diagnostic.rs000066400000000000000000000115471500614314100302230ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use crate::{ FullDocumentDiagnosticReport, PartialResultParams, UnchangedDocumentDiagnosticReport, Url, WorkDoneProgressParams, }; /// Workspace client capabilities specific to diagnostic pull requests. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DiagnosticWorkspaceClientCapabilities { /// Whether the client implementation supports a refresh request sent from /// the server to the client. /// /// Note that this event is global and will force the client to refresh all /// pulled diagnostics currently shown. It should be used with absolute care /// and is useful for situation where a server for example detects a project /// wide change that requires such a calculation. #[serde(skip_serializing_if = "Option::is_none")] pub refresh_support: Option, } /// A previous result ID in a workspace pull request. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct PreviousResultId { /// The URI for which the client knows a result ID. pub uri: Url, /// The value of the previous result ID. pub value: String, } /// Parameters of the workspace diagnostic request. /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceDiagnosticParams { /// The additional identifier provided during registration. pub identifier: Option, /// The currently known diagnostic reports with their /// previous result ids. pub previous_result_ids: Vec, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, #[serde(flatten)] pub partial_result_params: PartialResultParams, } /// A full document diagnostic report for a workspace diagnostic result. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct WorkspaceFullDocumentDiagnosticReport { /// The URI for which diagnostic information is reported. pub uri: Url, /// The version number for which the diagnostics are reported. /// /// If the document is not marked as open, `None` can be provided. pub version: Option, #[serde(flatten)] pub full_document_diagnostic_report: FullDocumentDiagnosticReport, } /// An unchanged document diagnostic report for a workspace diagnostic result. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct WorkspaceUnchangedDocumentDiagnosticReport { /// The URI for which diagnostic information is reported. pub uri: Url, /// The version number for which the diagnostics are reported. /// /// If the document is not marked as open, `None` can be provided. pub version: Option, #[serde(flatten)] pub unchanged_document_diagnostic_report: UnchangedDocumentDiagnosticReport, } /// A workspace diagnostic document report. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(tag = "kind", rename_all = "lowercase")] pub enum WorkspaceDocumentDiagnosticReport { Full(WorkspaceFullDocumentDiagnosticReport), Unchanged(WorkspaceUnchangedDocumentDiagnosticReport), } impl From for WorkspaceDocumentDiagnosticReport { fn from(from: WorkspaceFullDocumentDiagnosticReport) -> Self { WorkspaceDocumentDiagnosticReport::Full(from) } } impl From for WorkspaceDocumentDiagnosticReport { fn from(from: WorkspaceUnchangedDocumentDiagnosticReport) -> Self { WorkspaceDocumentDiagnosticReport::Unchanged(from) } } /// A workspace diagnostic report. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)] pub struct WorkspaceDiagnosticReport { pub items: Vec, } /// A partial result for a workspace diagnostic report. /// /// @since 3.17.0 #[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)] pub struct WorkspaceDiagnosticReportPartialResult { pub items: Vec, } #[derive(Debug, PartialEq, Deserialize, Serialize, Clone)] #[serde(untagged)] pub enum WorkspaceDiagnosticReportResult { Report(WorkspaceDiagnosticReport), Partial(WorkspaceDiagnosticReportPartialResult), } impl From for WorkspaceDiagnosticReportResult { fn from(from: WorkspaceDiagnosticReport) -> Self { WorkspaceDiagnosticReportResult::Report(from) } } impl From for WorkspaceDiagnosticReportResult { fn from(from: WorkspaceDiagnosticReportPartialResult) -> Self { WorkspaceDiagnosticReportResult::Partial(from) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/workspace_folders.rs000066400000000000000000000033271500614314100275320ustar00rootroot00000000000000use serde::{Deserialize, Serialize}; use crate::{OneOf, Url}; #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceFoldersServerCapabilities { /// The server has support for workspace folders #[serde(skip_serializing_if = "Option::is_none")] pub supported: Option, /// Whether the server wants to receive workspace folder /// change notifications. /// /// If a string is provided, the string is treated as an ID /// under which the notification is registered on the client /// side. The ID can be used to unregister for these events /// using the `client/unregisterCapability` request. #[serde(skip_serializing_if = "Option::is_none")] pub change_notifications: Option>, } #[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceFolder { /// The associated URI for this workspace folder. pub uri: Url, /// The name of the workspace folder. Defaults to the uri's basename. pub name: String, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DidChangeWorkspaceFoldersParams { /// The actual workspace folder change event. pub event: WorkspaceFoldersChangeEvent, } /// The workspace folder change event. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceFoldersChangeEvent { /// The array of added workspace folders pub added: Vec, /// The array of the removed workspace folders pub removed: Vec, } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp-types/src/workspace_symbols.rs000066400000000000000000000071531500614314100275650ustar00rootroot00000000000000use crate::{ LSPAny, Location, OneOf, PartialResultParams, SymbolInformation, SymbolKind, SymbolKindCapability, SymbolTag, TagSupport, Url, WorkDoneProgressParams, }; use serde::{Deserialize, Serialize}; #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceSymbolClientCapabilities { /// This capability supports dynamic registration. #[serde(skip_serializing_if = "Option::is_none")] pub dynamic_registration: Option, /// Specific capabilities for the `SymbolKind` in the `workspace/symbol` request. #[serde(skip_serializing_if = "Option::is_none")] pub symbol_kind: Option, /// The client supports tags on `SymbolInformation`. /// Clients supporting tags have to handle unknown tags gracefully. /// /// @since 3.16.0 #[serde( default, skip_serializing_if = "Option::is_none", deserialize_with = "TagSupport::deserialize_compat" )] pub tag_support: Option>, /// The client support partial workspace symbols. The client will send the /// request `workspaceSymbol/resolve` to the server to resolve additional /// properties. /// /// @since 3.17.0 #[serde(skip_serializing_if = "Option::is_none")] pub resolve_support: Option, } /// The parameters of a Workspace Symbol Request. #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct WorkspaceSymbolParams { #[serde(flatten)] pub partial_result_params: PartialResultParams, #[serde(flatten)] pub work_done_progress_params: WorkDoneProgressParams, /// A non-empty query string pub query: String, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct WorkspaceSymbolResolveSupportCapability { /// The properties that a client can resolve lazily. Usually /// `location.range` pub properties: Vec, } /// A special workspace symbol that supports locations without a range /// /// @since 3.17.0 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceSymbol { /// The name of this symbol. pub name: String, /// The kind of this symbol. pub kind: SymbolKind, /// Tags for this completion item. #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option>, /// The name of the symbol containing this symbol. This information is for /// user interface purposes (e.g. to render a qualifier in the user interface /// if necessary). It can't be used to re-infer a hierarchy for the document /// symbols. #[serde(skip_serializing_if = "Option::is_none")] pub container_name: Option, /// The location of this symbol. Whether a server is allowed to /// return a location without a range depends on the client /// capability `workspace.symbol.resolveSupport`. /// /// See also `SymbolInformation.location`. pub location: OneOf, /// A data entry field that is preserved on a workspace symbol between a /// workspace symbol request and a workspace symbol resolve request. #[serde(skip_serializing_if = "Option::is_none")] pub data: Option, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] pub struct WorkspaceLocation { pub uri: Url, } #[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum WorkspaceSymbolResponse { Flat(Vec), Nested(Vec), } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp/000077500000000000000000000000001500614314100215125ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp/Cargo.toml000066400000000000000000000021051500614314100234400ustar00rootroot00000000000000[package] name = "helix-lsp" description = "LSP client implementation for Helix project" version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true rust-version.workspace = true categories.workspace = true repository.workspace = true homepage.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] helix-stdx = { path = "../helix-stdx" } helix-core = { path = "../helix-core" } helix-loader = { path = "../helix-loader" } helix-lsp-types = { path = "../helix-lsp-types" } anyhow = "1.0" futures-executor = "0.3" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } globset = "0.4.16" log = "0.4" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" tokio = { version = "1.44", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } tokio-stream = "0.1.17" parking_lot.workspace = true arc-swap = "1" slotmap.workspace = true thiserror.workspace = true helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp/src/000077500000000000000000000000001500614314100223015ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp/src/client.rs000066400000000000000000001610601500614314100241310ustar00rootroot00000000000000use crate::{ file_operations::FileOperationsInterest, find_lsp_workspace, jsonrpc, transport::{Payload, Transport}, Call, Error, LanguageServerId, OffsetEncoding, Result, }; use crate::lsp::{ self, notification::DidChangeWorkspaceFolders, CodeActionCapabilityResolveSupport, DidChangeWorkspaceFoldersParams, OneOf, PositionEncodingKind, SignatureHelp, Url, WorkspaceFolder, WorkspaceFoldersChangeEvent, }; use helix_core::{find_workspace, syntax::LanguageServerFeature, ChangeSet, Rope}; use helix_loader::VERSION_AND_GIT_HASH; use helix_stdx::path; use parking_lot::Mutex; use serde::Deserialize; use serde_json::Value; use std::{collections::HashMap, path::PathBuf}; use std::{ ffi::OsStr, sync::{ atomic::{AtomicU64, Ordering}, Arc, }, }; use std::{future::Future, sync::OnceLock}; use std::{path::Path, process::Stdio}; use tokio::{ io::{BufReader, BufWriter}, process::{Child, Command}, sync::{ mpsc::{channel, UnboundedReceiver, UnboundedSender}, Notify, OnceCell, }, }; fn workspace_for_uri(uri: lsp::Url) -> WorkspaceFolder { lsp::WorkspaceFolder { name: uri .path_segments() .and_then(|mut segments| segments.next_back()) .map(|basename| basename.to_string()) .unwrap_or_default(), uri, } } #[derive(Debug)] pub struct Client { id: LanguageServerId, name: String, _process: Child, server_tx: UnboundedSender, request_counter: AtomicU64, pub(crate) capabilities: OnceCell, pub(crate) file_operation_interest: OnceLock, config: Option, root_path: std::path::PathBuf, root_uri: Option, workspace_folders: Mutex>, initialize_notify: Arc, /// workspace folders added while the server is still initializing req_timeout: u64, } impl Client { pub fn try_add_doc( self: &Arc, root_markers: &[String], manual_roots: &[PathBuf], doc_path: Option<&std::path::PathBuf>, may_support_workspace: bool, ) -> bool { let (workspace, workspace_is_cwd) = find_workspace(); let workspace = path::normalize(workspace); let root = find_lsp_workspace( doc_path .and_then(|x| x.parent().and_then(|x| x.to_str())) .unwrap_or("."), root_markers, manual_roots, &workspace, workspace_is_cwd, ); let root_uri = root .as_ref() .and_then(|root| lsp::Url::from_file_path(root).ok()); if self.root_path == root.unwrap_or(workspace) || root_uri.as_ref().is_some_and(|root_uri| { self.workspace_folders .lock() .iter() .any(|workspace| &workspace.uri == root_uri) }) { // workspace URI is already registered so we can use this client return true; } // this server definitely doesn't support multiple workspace, no need to check capabilities if !may_support_workspace { return false; } let Some(capabilities) = self.capabilities.get() else { let client = Arc::clone(self); // initialization hasn't finished yet, deal with this new root later // TODO: In the edgecase that a **new root** is added // for an LSP that **doesn't support workspace_folders** before initaliation is finished // the new roots are ignored. // That particular edgecase would require retroactively spawning new LSP // clients and therefore also require us to retroactively update the corresponding // documents LSP client handle. It's doable but a pretty weird edgecase so let's // wait and see if anyone ever runs into it. tokio::spawn(async move { client.initialize_notify.notified().await; if let Some(workspace_folders_caps) = client .capabilities() .workspace .as_ref() .and_then(|cap| cap.workspace_folders.as_ref()) .filter(|cap| cap.supported.unwrap_or(false)) { client.add_workspace_folder( root_uri, workspace_folders_caps.change_notifications.as_ref(), ); } }); return true; }; if let Some(workspace_folders_caps) = capabilities .workspace .as_ref() .and_then(|cap| cap.workspace_folders.as_ref()) .filter(|cap| cap.supported.unwrap_or(false)) { self.add_workspace_folder( root_uri, workspace_folders_caps.change_notifications.as_ref(), ); true } else { // the server doesn't support multi workspaces, we need a new client false } } fn add_workspace_folder( &self, root_uri: Option, change_notifications: Option<&OneOf>, ) { // root_uri is None just means that there isn't really any LSP workspace // associated with this file. For servers that support multiple workspaces // there is just one server so we can always just use that shared instance. // No need to add a new workspace root here as there is no logical root for this file // let the server deal with this let Some(root_uri) = root_uri else { return; }; // server supports workspace folders, let's add the new root to the list self.workspace_folders .lock() .push(workspace_for_uri(root_uri.clone())); if Some(&OneOf::Left(false)) == change_notifications { // server specifically opted out of DidWorkspaceChange notifications // let's assume the server will request the workspace folders itself // and that we can therefore reuse the client (but are done now) return; } self.did_change_workspace(vec![workspace_for_uri(root_uri)], Vec::new()) } #[allow(clippy::type_complexity, clippy::too_many_arguments)] pub fn start( cmd: &str, args: &[String], config: Option, server_environment: impl IntoIterator, impl AsRef)>, root_path: PathBuf, root_uri: Option, id: LanguageServerId, name: String, req_timeout: u64, ) -> Result<( Self, UnboundedReceiver<(LanguageServerId, Call)>, Arc, )> { // Resolve path to the binary let cmd = helix_stdx::env::which(cmd)?; let process = Command::new(cmd) .envs(server_environment) .args(args) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) // make sure the process is reaped on drop .kill_on_drop(true) .spawn(); let mut process = process?; // TODO: do we need bufreader/writer here? or do we use async wrappers on unblock? let writer = BufWriter::new(process.stdin.take().expect("Failed to open stdin")); let reader = BufReader::new(process.stdout.take().expect("Failed to open stdout")); let stderr = BufReader::new(process.stderr.take().expect("Failed to open stderr")); let (server_rx, server_tx, initialize_notify) = Transport::start(reader, writer, stderr, id, name.clone()); let workspace_folders = root_uri .clone() .map(|root| vec![workspace_for_uri(root)]) .unwrap_or_default(); let client = Self { id, name, _process: process, server_tx, request_counter: AtomicU64::new(0), capabilities: OnceCell::new(), file_operation_interest: OnceLock::new(), config, req_timeout, root_path, root_uri, workspace_folders: Mutex::new(workspace_folders), initialize_notify: initialize_notify.clone(), }; Ok((client, server_rx, initialize_notify)) } pub fn name(&self) -> &str { &self.name } pub fn id(&self) -> LanguageServerId { self.id } fn next_request_id(&self) -> jsonrpc::Id { let id = self.request_counter.fetch_add(1, Ordering::Relaxed); jsonrpc::Id::Num(id) } fn value_into_params(value: Value) -> jsonrpc::Params { use jsonrpc::Params; match value { Value::Null => Params::None, Value::Bool(_) | Value::Number(_) | Value::String(_) => Params::Array(vec![value]), Value::Array(vec) => Params::Array(vec), Value::Object(map) => Params::Map(map), } } pub fn is_initialized(&self) -> bool { self.capabilities.get().is_some() } pub fn capabilities(&self) -> &lsp::ServerCapabilities { self.capabilities .get() .expect("language server not yet initialized!") } pub(crate) fn file_operations_intests(&self) -> &FileOperationsInterest { self.file_operation_interest .get_or_init(|| FileOperationsInterest::new(self.capabilities())) } /// Client has to be initialized otherwise this function panics #[inline] pub fn supports_feature(&self, feature: LanguageServerFeature) -> bool { let capabilities = self.capabilities(); use lsp::*; match feature { LanguageServerFeature::Format => matches!( capabilities.document_formatting_provider, Some(OneOf::Left(true) | OneOf::Right(_)) ), LanguageServerFeature::GotoDeclaration => matches!( capabilities.declaration_provider, Some( DeclarationCapability::Simple(true) | DeclarationCapability::RegistrationOptions(_) | DeclarationCapability::Options(_), ) ), LanguageServerFeature::GotoDefinition => matches!( capabilities.definition_provider, Some(OneOf::Left(true) | OneOf::Right(_)) ), LanguageServerFeature::GotoTypeDefinition => matches!( capabilities.type_definition_provider, Some( TypeDefinitionProviderCapability::Simple(true) | TypeDefinitionProviderCapability::Options(_), ) ), LanguageServerFeature::GotoReference => matches!( capabilities.references_provider, Some(OneOf::Left(true) | OneOf::Right(_)) ), LanguageServerFeature::GotoImplementation => matches!( capabilities.implementation_provider, Some( ImplementationProviderCapability::Simple(true) | ImplementationProviderCapability::Options(_), ) ), LanguageServerFeature::SignatureHelp => capabilities.signature_help_provider.is_some(), LanguageServerFeature::Hover => matches!( capabilities.hover_provider, Some(HoverProviderCapability::Simple(true) | HoverProviderCapability::Options(_),) ), LanguageServerFeature::DocumentHighlight => matches!( capabilities.document_highlight_provider, Some(OneOf::Left(true) | OneOf::Right(_)) ), LanguageServerFeature::Completion => capabilities.completion_provider.is_some(), LanguageServerFeature::CodeAction => matches!( capabilities.code_action_provider, Some( CodeActionProviderCapability::Simple(true) | CodeActionProviderCapability::Options(_), ) ), LanguageServerFeature::WorkspaceCommand => { capabilities.execute_command_provider.is_some() } LanguageServerFeature::DocumentSymbols => matches!( capabilities.document_symbol_provider, Some(OneOf::Left(true) | OneOf::Right(_)) ), LanguageServerFeature::WorkspaceSymbols => matches!( capabilities.workspace_symbol_provider, Some(OneOf::Left(true) | OneOf::Right(_)) ), LanguageServerFeature::Diagnostics => true, // there's no extra server capability LanguageServerFeature::RenameSymbol => matches!( capabilities.rename_provider, Some(OneOf::Left(true)) | Some(OneOf::Right(_)) ), LanguageServerFeature::InlayHints => matches!( capabilities.inlay_hint_provider, Some(OneOf::Left(true) | OneOf::Right(InlayHintServerCapabilities::Options(_))) ), LanguageServerFeature::DocumentColors => matches!( capabilities.color_provider, Some( ColorProviderCapability::Simple(true) | ColorProviderCapability::ColorProvider(_) | ColorProviderCapability::Options(_) ) ), } } pub fn offset_encoding(&self) -> OffsetEncoding { self.capabilities() .position_encoding .as_ref() .and_then(|encoding| match encoding.as_str() { "utf-8" => Some(OffsetEncoding::Utf8), "utf-16" => Some(OffsetEncoding::Utf16), "utf-32" => Some(OffsetEncoding::Utf32), encoding => { log::error!("Server provided invalid position encoding {encoding}, defaulting to utf-16"); None }, }) .unwrap_or_default() } pub fn config(&self) -> Option<&Value> { self.config.as_ref() } pub async fn workspace_folders( &self, ) -> parking_lot::MutexGuard<'_, Vec> { self.workspace_folders.lock() } /// Execute a RPC request on the language server. fn call( &self, params: R::Params, ) -> impl Future> where R::Params: serde::Serialize, { self.call_with_ref::(¶ms) } fn call_with_ref( &self, params: &R::Params, ) -> impl Future> where R::Params: serde::Serialize, { self.call_with_timeout::(params, self.req_timeout) } fn call_with_timeout( &self, params: &R::Params, timeout_secs: u64, ) -> impl Future> where R::Params: serde::Serialize, { let server_tx = self.server_tx.clone(); let id = self.next_request_id(); // It's important that this is not part of the future so that it gets executed right away // and the request order stays consistent. let rx = serde_json::to_value(params) .map_err(Error::from) .and_then(|params| { let request = jsonrpc::MethodCall { jsonrpc: Some(jsonrpc::Version::V2), id: id.clone(), method: R::METHOD.to_string(), params: Self::value_into_params(params), }; let (tx, rx) = channel::>(1); server_tx .send(Payload::Request { chan: tx, value: request, }) .map_err(|e| Error::Other(e.into()))?; Ok(rx) }); async move { use std::time::Duration; use tokio::time::timeout; // TODO: delay other calls until initialize success timeout(Duration::from_secs(timeout_secs), rx?.recv()) .await .map_err(|_| Error::Timeout(id))? // return Timeout .ok_or(Error::StreamClosed)? .and_then(|value| serde_json::from_value(value).map_err(Into::into)) } } /// Send a RPC notification to the language server. pub fn notify(&self, params: R::Params) where R::Params: serde::Serialize, { let server_tx = self.server_tx.clone(); let params = match serde_json::to_value(params) { Ok(params) => params, Err(err) => { log::error!( "Failed to serialize params for notification '{}' for server '{}': {err}", R::METHOD, self.name, ); return; } }; let notification = jsonrpc::Notification { jsonrpc: Some(jsonrpc::Version::V2), method: R::METHOD.to_string(), params: Self::value_into_params(params), }; if let Err(err) = server_tx.send(Payload::Notification(notification)) { log::error!( "Failed to send notification '{}' to server '{}': {err}", R::METHOD, self.name ); } } /// Reply to a language server RPC call. pub fn reply( &self, id: jsonrpc::Id, result: core::result::Result, ) -> Result<()> { use jsonrpc::{Failure, Output, Success, Version}; let server_tx = self.server_tx.clone(); let output = match result { Ok(result) => Output::Success(Success { jsonrpc: Some(Version::V2), id, result, }), Err(error) => Output::Failure(Failure { jsonrpc: Some(Version::V2), id, error, }), }; server_tx .send(Payload::Response(output)) .map_err(|e| Error::Other(e.into()))?; Ok(()) } // ------------------------------------------------------------------------------------------- // General messages // ------------------------------------------------------------------------------------------- pub(crate) async fn initialize(&self, enable_snippets: bool) -> Result { if let Some(config) = &self.config { log::info!("Using custom LSP config: {}", config); } #[allow(deprecated)] let params = lsp::InitializeParams { process_id: Some(std::process::id()), workspace_folders: Some(self.workspace_folders.lock().clone()), // root_path is obsolete, but some clients like pyright still use it so we specify both. // clients will prefer _uri if possible root_path: self.root_path.to_str().map(|path| path.to_owned()), root_uri: self.root_uri.clone(), initialization_options: self.config.clone(), capabilities: lsp::ClientCapabilities { workspace: Some(lsp::WorkspaceClientCapabilities { configuration: Some(true), did_change_configuration: Some(lsp::DynamicRegistrationClientCapabilities { dynamic_registration: Some(false), }), workspace_folders: Some(true), apply_edit: Some(true), symbol: Some(lsp::WorkspaceSymbolClientCapabilities { dynamic_registration: Some(false), ..Default::default() }), execute_command: Some(lsp::DynamicRegistrationClientCapabilities { dynamic_registration: Some(false), }), inlay_hint: Some(lsp::InlayHintWorkspaceClientCapabilities { refresh_support: Some(false), }), workspace_edit: Some(lsp::WorkspaceEditClientCapabilities { document_changes: Some(true), resource_operations: Some(vec![ lsp::ResourceOperationKind::Create, lsp::ResourceOperationKind::Rename, lsp::ResourceOperationKind::Delete, ]), failure_handling: Some(lsp::FailureHandlingKind::Abort), normalizes_line_endings: Some(false), change_annotation_support: None, }), did_change_watched_files: Some(lsp::DidChangeWatchedFilesClientCapabilities { dynamic_registration: Some(true), relative_pattern_support: Some(false), }), file_operations: Some(lsp::WorkspaceFileOperationsClientCapabilities { will_rename: Some(true), did_rename: Some(true), ..Default::default() }), ..Default::default() }), text_document: Some(lsp::TextDocumentClientCapabilities { completion: Some(lsp::CompletionClientCapabilities { completion_item: Some(lsp::CompletionItemCapability { snippet_support: Some(enable_snippets), resolve_support: Some(lsp::CompletionItemCapabilityResolveSupport { properties: vec![ String::from("documentation"), String::from("detail"), String::from("additionalTextEdits"), ], }), insert_replace_support: Some(true), deprecated_support: Some(true), tag_support: Some(lsp::TagSupport { value_set: vec![lsp::CompletionItemTag::DEPRECATED], }), ..Default::default() }), completion_item_kind: Some(lsp::CompletionItemKindCapability { ..Default::default() }), context_support: None, // additional context information Some(true) ..Default::default() }), hover: Some(lsp::HoverClientCapabilities { // if not specified, rust-analyzer returns plaintext marked as markdown but // badly formatted. content_format: Some(vec![lsp::MarkupKind::Markdown]), ..Default::default() }), signature_help: Some(lsp::SignatureHelpClientCapabilities { signature_information: Some(lsp::SignatureInformationSettings { documentation_format: Some(vec![lsp::MarkupKind::Markdown]), parameter_information: Some(lsp::ParameterInformationSettings { label_offset_support: Some(true), }), active_parameter_support: Some(true), }), ..Default::default() }), rename: Some(lsp::RenameClientCapabilities { dynamic_registration: Some(false), prepare_support: Some(true), prepare_support_default_behavior: None, honors_change_annotations: Some(false), }), formatting: Some(lsp::DocumentFormattingClientCapabilities { dynamic_registration: Some(false), }), code_action: Some(lsp::CodeActionClientCapabilities { code_action_literal_support: Some(lsp::CodeActionLiteralSupport { code_action_kind: lsp::CodeActionKindLiteralSupport { value_set: [ lsp::CodeActionKind::EMPTY, lsp::CodeActionKind::QUICKFIX, lsp::CodeActionKind::REFACTOR, lsp::CodeActionKind::REFACTOR_EXTRACT, lsp::CodeActionKind::REFACTOR_INLINE, lsp::CodeActionKind::REFACTOR_REWRITE, lsp::CodeActionKind::SOURCE, lsp::CodeActionKind::SOURCE_ORGANIZE_IMPORTS, ] .iter() .map(|kind| kind.as_str().to_string()) .collect(), }, }), is_preferred_support: Some(true), disabled_support: Some(true), data_support: Some(true), resolve_support: Some(CodeActionCapabilityResolveSupport { properties: vec!["edit".to_owned(), "command".to_owned()], }), ..Default::default() }), publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities { version_support: Some(true), tag_support: Some(lsp::TagSupport { value_set: vec![ lsp::DiagnosticTag::UNNECESSARY, lsp::DiagnosticTag::DEPRECATED, ], }), ..Default::default() }), inlay_hint: Some(lsp::InlayHintClientCapabilities { dynamic_registration: Some(false), resolve_support: None, }), ..Default::default() }), window: Some(lsp::WindowClientCapabilities { work_done_progress: Some(true), ..Default::default() }), general: Some(lsp::GeneralClientCapabilities { position_encodings: Some(vec![ PositionEncodingKind::UTF8, PositionEncodingKind::UTF32, PositionEncodingKind::UTF16, ]), ..Default::default() }), ..Default::default() }, trace: None, client_info: Some(lsp::ClientInfo { name: String::from("helix"), version: Some(String::from(VERSION_AND_GIT_HASH)), }), locale: None, // TODO work_done_progress_params: lsp::WorkDoneProgressParams::default(), }; self.call::(params).await } pub async fn shutdown(&self) -> Result<()> { self.call::(()).await } pub fn exit(&self) { self.notify::(()) } /// Tries to shut down the language server but returns /// early if server responds with an error. pub async fn shutdown_and_exit(&self) -> Result<()> { self.shutdown().await?; self.exit(); Ok(()) } /// Forcefully shuts down the language server ignoring any errors. pub async fn force_shutdown(&self) -> Result<()> { if let Err(e) = self.shutdown().await { log::warn!("language server failed to terminate gracefully - {}", e); } self.exit(); Ok(()) } // ------------------------------------------------------------------------------------------- // Workspace // ------------------------------------------------------------------------------------------- pub fn did_change_configuration(&self, settings: Value) { self.notify::( lsp::DidChangeConfigurationParams { settings }, ) } pub fn did_change_workspace(&self, added: Vec, removed: Vec) { self.notify::(DidChangeWorkspaceFoldersParams { event: WorkspaceFoldersChangeEvent { added, removed }, }) } pub fn will_rename( &self, old_path: &Path, new_path: &Path, is_dir: bool, ) -> Option>>> { let capabilities = self.file_operations_intests(); if !capabilities.will_rename.has_interest(old_path, is_dir) { return None; } let url_from_path = |path| { let url = if is_dir { Url::from_directory_path(path) } else { Url::from_file_path(path) }; Some(url.ok()?.to_string()) }; let files = vec![lsp::FileRename { old_uri: url_from_path(old_path)?, new_uri: url_from_path(new_path)?, }]; Some(self.call_with_timeout::( &lsp::RenameFilesParams { files }, 5, )) } pub fn did_rename(&self, old_path: &Path, new_path: &Path, is_dir: bool) -> Option<()> { let capabilities = self.file_operations_intests(); if !capabilities.did_rename.has_interest(new_path, is_dir) { return None; } let url_from_path = |path| { let url = if is_dir { Url::from_directory_path(path) } else { Url::from_file_path(path) }; Some(url.ok()?.to_string()) }; let files = vec![lsp::FileRename { old_uri: url_from_path(old_path)?, new_uri: url_from_path(new_path)?, }]; self.notify::(lsp::RenameFilesParams { files }); Some(()) } // ------------------------------------------------------------------------------------------- // Text document // ------------------------------------------------------------------------------------------- pub fn text_document_did_open( &self, uri: lsp::Url, version: i32, doc: &Rope, language_id: String, ) { self.notify::(lsp::DidOpenTextDocumentParams { text_document: lsp::TextDocumentItem { uri, language_id, version, text: String::from(doc), }, }) } pub fn changeset_to_changes( old_text: &Rope, new_text: &Rope, changeset: &ChangeSet, offset_encoding: OffsetEncoding, ) -> Vec { let mut iter = changeset.changes().iter().peekable(); let mut old_pos = 0; let mut new_pos = 0; let mut changes = Vec::new(); use crate::util::pos_to_lsp_pos; use helix_core::Operation::*; // this is dumb. TextEdit describes changes to the initial doc (concurrent), but // TextDocumentContentChangeEvent describes a series of changes (sequential). // So S -> S1 -> S2, meaning positioning depends on the previous edits. // // Calculation is therefore a bunch trickier. use helix_core::RopeSlice; fn traverse( pos: lsp::Position, text: RopeSlice, offset_encoding: OffsetEncoding, ) -> lsp::Position { let lsp::Position { mut line, mut character, } = pos; let mut chars = text.chars().peekable(); while let Some(ch) = chars.next() { // LSP only considers \n, \r or \r\n as line endings if ch == '\n' || ch == '\r' { // consume a \r\n if ch == '\r' && chars.peek() == Some(&'\n') { chars.next(); } line += 1; character = 0; } else { character += match offset_encoding { OffsetEncoding::Utf8 => ch.len_utf8() as u32, OffsetEncoding::Utf16 => ch.len_utf16() as u32, OffsetEncoding::Utf32 => 1, }; } } lsp::Position { line, character } } let old_text = old_text.slice(..); while let Some(change) = iter.next() { let len = match change { Delete(i) | Retain(i) => *i, Insert(_) => 0, }; let mut old_end = old_pos + len; match change { Retain(i) => { new_pos += i; } Delete(_) => { let start = pos_to_lsp_pos(new_text, new_pos, offset_encoding); let end = traverse(start, old_text.slice(old_pos..old_end), offset_encoding); // deletion changes.push(lsp::TextDocumentContentChangeEvent { range: Some(lsp::Range::new(start, end)), text: "".to_string(), range_length: None, }); } Insert(s) => { let start = pos_to_lsp_pos(new_text, new_pos, offset_encoding); new_pos += s.chars().count(); // a subsequent delete means a replace, consume it let end = if let Some(Delete(len)) = iter.peek() { old_end = old_pos + len; let end = traverse(start, old_text.slice(old_pos..old_end), offset_encoding); iter.next(); // replacement end } else { // insert start }; changes.push(lsp::TextDocumentContentChangeEvent { range: Some(lsp::Range::new(start, end)), text: s.to_string(), range_length: None, }); } } old_pos = old_end; } changes } pub fn text_document_did_change( &self, text_document: lsp::VersionedTextDocumentIdentifier, old_text: &Rope, new_text: &Rope, changes: &ChangeSet, ) -> Option<()> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support document sync. let sync_capabilities = match capabilities.text_document_sync { Some( lsp::TextDocumentSyncCapability::Kind(kind) | lsp::TextDocumentSyncCapability::Options(lsp::TextDocumentSyncOptions { change: Some(kind), .. }), ) => kind, // None | SyncOptions { changes: None } _ => return None, }; let changes = match sync_capabilities { lsp::TextDocumentSyncKind::FULL => { vec![lsp::TextDocumentContentChangeEvent { // range = None -> whole document range: None, //Some(Range) range_length: None, // u64 apparently deprecated text: new_text.to_string(), }] } lsp::TextDocumentSyncKind::INCREMENTAL => { Self::changeset_to_changes(old_text, new_text, changes, self.offset_encoding()) } lsp::TextDocumentSyncKind::NONE => return None, kind => unimplemented!("{:?}", kind), }; self.notify::(lsp::DidChangeTextDocumentParams { text_document, content_changes: changes, }); Some(()) } pub fn text_document_did_close(&self, text_document: lsp::TextDocumentIdentifier) { self.notify::(lsp::DidCloseTextDocumentParams { text_document, }) } // will_save / will_save_wait_until pub fn text_document_did_save( &self, text_document: lsp::TextDocumentIdentifier, text: &Rope, ) -> Option<()> { let capabilities = self.capabilities.get().unwrap(); let include_text = match &capabilities.text_document_sync.as_ref()? { lsp::TextDocumentSyncCapability::Options(lsp::TextDocumentSyncOptions { save: options, .. }) => match options.as_ref()? { lsp::TextDocumentSyncSaveOptions::Supported(true) => false, lsp::TextDocumentSyncSaveOptions::SaveOptions(lsp::SaveOptions { include_text, }) => include_text.unwrap_or(false), lsp::TextDocumentSyncSaveOptions::Supported(false) => return None, }, // see: https://github.com/microsoft/language-server-protocol/issues/288 lsp::TextDocumentSyncCapability::Kind(..) => false, }; self.notify::(lsp::DidSaveTextDocumentParams { text_document, text: include_text.then_some(text.into()), }); Some(()) } pub fn completion( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, work_done_token: Option, context: lsp::CompletionContext, ) -> Option>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support completion. capabilities.completion_provider.as_ref()?; let params = lsp::CompletionParams { text_document_position: lsp::TextDocumentPositionParams { text_document, position, }, context: Some(context), // TODO: support these tokens by async receiving and updating the choice list work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, partial_result_params: lsp::PartialResultParams { partial_result_token: None, }, }; Some(self.call::(params)) } pub fn resolve_completion_item( &self, completion_item: &lsp::CompletionItem, ) -> impl Future> { self.call_with_ref::(completion_item) } pub fn resolve_code_action( &self, code_action: &lsp::CodeAction, ) -> Option>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support resolving code actions. match capabilities.code_action_provider { Some(lsp::CodeActionProviderCapability::Options(lsp::CodeActionOptions { resolve_provider: Some(true), .. })) => (), _ => return None, } Some(self.call_with_ref::(code_action)) } pub fn text_document_signature_help( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, work_done_token: Option, ) -> Option>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support signature help. capabilities.signature_help_provider.as_ref()?; let params = lsp::SignatureHelpParams { text_document_position_params: lsp::TextDocumentPositionParams { text_document, position, }, work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, context: None, // lsp::SignatureHelpContext }; Some(self.call::(params)) } pub fn text_document_range_inlay_hints( &self, text_document: lsp::TextDocumentIdentifier, range: lsp::Range, work_done_token: Option, ) -> Option>>>> { let capabilities = self.capabilities.get().unwrap(); match capabilities.inlay_hint_provider { Some( lsp::OneOf::Left(true) | lsp::OneOf::Right(lsp::InlayHintServerCapabilities::Options(_)), ) => (), _ => return None, } let params = lsp::InlayHintParams { text_document, range, work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, }; Some(self.call::(params)) } pub fn text_document_document_color( &self, text_document: lsp::TextDocumentIdentifier, work_done_token: Option, ) -> Option>>> { self.capabilities.get().unwrap().color_provider.as_ref()?; let params = lsp::DocumentColorParams { text_document, work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token: work_done_token.clone(), }, partial_result_params: helix_lsp_types::PartialResultParams { partial_result_token: work_done_token, }, }; Some(self.call::(params)) } pub fn text_document_hover( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, work_done_token: Option, ) -> Option>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support hover. match capabilities.hover_provider { Some( lsp::HoverProviderCapability::Simple(true) | lsp::HoverProviderCapability::Options(_), ) => (), _ => return None, } let params = lsp::HoverParams { text_document_position_params: lsp::TextDocumentPositionParams { text_document, position, }, work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, // lsp::SignatureHelpContext }; Some(self.call::(params)) } // formatting pub fn text_document_formatting( &self, text_document: lsp::TextDocumentIdentifier, options: lsp::FormattingOptions, work_done_token: Option, ) -> Option>>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support formatting. match capabilities.document_formatting_provider { Some(lsp::OneOf::Left(true) | lsp::OneOf::Right(_)) => (), _ => return None, }; // merge FormattingOptions with 'config.format' let config_format = self .config .as_ref() .and_then(|cfg| cfg.get("format")) .and_then(|fmt| HashMap::::deserialize(fmt).ok()); let options = if let Some(mut properties) = config_format { // passed in options take precedence over 'config.format' properties.extend(options.properties); lsp::FormattingOptions { properties, ..options } } else { options }; let params = lsp::DocumentFormattingParams { text_document, options, work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, }; Some(self.call::(params)) } pub fn text_document_range_formatting( &self, text_document: lsp::TextDocumentIdentifier, range: lsp::Range, options: lsp::FormattingOptions, work_done_token: Option, ) -> Option>>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support range formatting. match capabilities.document_range_formatting_provider { Some(lsp::OneOf::Left(true) | lsp::OneOf::Right(_)) => (), _ => return None, }; let params = lsp::DocumentRangeFormattingParams { text_document, range, options, work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, }; Some(self.call::(params)) } pub fn text_document_document_highlight( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, work_done_token: Option, ) -> Option>>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support document highlight. match capabilities.document_highlight_provider { Some(lsp::OneOf::Left(true) | lsp::OneOf::Right(_)) => (), _ => return None, } let params = lsp::DocumentHighlightParams { text_document_position_params: lsp::TextDocumentPositionParams { text_document, position, }, work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, partial_result_params: lsp::PartialResultParams { partial_result_token: None, }, }; Some(self.call::(params)) } fn goto_request< T: lsp::request::Request< Params = lsp::GotoDefinitionParams, Result = Option, >, >( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, work_done_token: Option, ) -> impl Future> { let params = lsp::GotoDefinitionParams { text_document_position_params: lsp::TextDocumentPositionParams { text_document, position, }, work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, partial_result_params: lsp::PartialResultParams { partial_result_token: None, }, }; self.call::(params) } pub fn goto_definition( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, work_done_token: Option, ) -> Option>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support goto-definition. match capabilities.definition_provider { Some(lsp::OneOf::Left(true) | lsp::OneOf::Right(_)) => (), _ => return None, } Some(self.goto_request::( text_document, position, work_done_token, )) } pub fn goto_declaration( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, work_done_token: Option, ) -> Option>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support goto-declaration. match capabilities.declaration_provider { Some( lsp::DeclarationCapability::Simple(true) | lsp::DeclarationCapability::RegistrationOptions(_) | lsp::DeclarationCapability::Options(_), ) => (), _ => return None, } Some(self.goto_request::( text_document, position, work_done_token, )) } pub fn goto_type_definition( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, work_done_token: Option, ) -> Option>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support goto-type-definition. match capabilities.type_definition_provider { Some( lsp::TypeDefinitionProviderCapability::Simple(true) | lsp::TypeDefinitionProviderCapability::Options(_), ) => (), _ => return None, } Some(self.goto_request::( text_document, position, work_done_token, )) } pub fn goto_implementation( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, work_done_token: Option, ) -> Option>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support goto-definition. match capabilities.implementation_provider { Some( lsp::ImplementationProviderCapability::Simple(true) | lsp::ImplementationProviderCapability::Options(_), ) => (), _ => return None, } Some(self.goto_request::( text_document, position, work_done_token, )) } pub fn goto_reference( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, include_declaration: bool, work_done_token: Option, ) -> Option>>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support goto-reference. match capabilities.references_provider { Some(lsp::OneOf::Left(true) | lsp::OneOf::Right(_)) => (), _ => return None, } let params = lsp::ReferenceParams { text_document_position: lsp::TextDocumentPositionParams { text_document, position, }, context: lsp::ReferenceContext { include_declaration, }, work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, partial_result_params: lsp::PartialResultParams { partial_result_token: None, }, }; Some(self.call::(params)) } pub fn document_symbols( &self, text_document: lsp::TextDocumentIdentifier, ) -> Option>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support document symbols. match capabilities.document_symbol_provider { Some(lsp::OneOf::Left(true) | lsp::OneOf::Right(_)) => (), _ => return None, } let params = lsp::DocumentSymbolParams { text_document, work_done_progress_params: lsp::WorkDoneProgressParams::default(), partial_result_params: lsp::PartialResultParams::default(), }; Some(self.call::(params)) } pub fn prepare_rename( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, ) -> Option>>> { let capabilities = self.capabilities.get().unwrap(); match capabilities.rename_provider { Some(lsp::OneOf::Right(lsp::RenameOptions { prepare_provider: Some(true), .. })) => (), _ => return None, } let params = lsp::TextDocumentPositionParams { text_document, position, }; Some(self.call::(params)) } // empty string to get all symbols pub fn workspace_symbols( &self, query: String, ) -> Option>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support workspace symbols. match capabilities.workspace_symbol_provider { Some(lsp::OneOf::Left(true) | lsp::OneOf::Right(_)) => (), _ => return None, } let params = lsp::WorkspaceSymbolParams { query, work_done_progress_params: lsp::WorkDoneProgressParams::default(), partial_result_params: lsp::PartialResultParams::default(), }; Some(self.call::(params)) } pub fn code_actions( &self, text_document: lsp::TextDocumentIdentifier, range: lsp::Range, context: lsp::CodeActionContext, ) -> Option>>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support code actions. match capabilities.code_action_provider { Some( lsp::CodeActionProviderCapability::Simple(true) | lsp::CodeActionProviderCapability::Options(_), ) => (), _ => return None, } let params = lsp::CodeActionParams { text_document, range, context, work_done_progress_params: lsp::WorkDoneProgressParams::default(), partial_result_params: lsp::PartialResultParams::default(), }; Some(self.call::(params)) } pub fn rename_symbol( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, new_name: String, ) -> Option>>> { if !self.supports_feature(LanguageServerFeature::RenameSymbol) { return None; } let params = lsp::RenameParams { text_document_position: lsp::TextDocumentPositionParams { text_document, position, }, new_name, work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token: None, }, }; Some(self.call::(params)) } pub fn command( &self, command: lsp::Command, ) -> Option>>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the language server does not support executing commands. capabilities.execute_command_provider.as_ref()?; let params = lsp::ExecuteCommandParams { command: command.command, arguments: command.arguments.unwrap_or_default(), work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token: None, }, }; Some(self.call::(params)) } pub fn did_change_watched_files(&self, changes: Vec) { self.notify::(lsp::DidChangeWatchedFilesParams { changes, }) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp/src/file_event.rs000066400000000000000000000151471500614314100247770ustar00rootroot00000000000000use std::{collections::HashMap, path::PathBuf, sync::Weak}; use globset::{GlobBuilder, GlobSetBuilder}; use tokio::sync::mpsc; use crate::{lsp, Client, LanguageServerId}; enum Event { FileChanged { path: PathBuf, }, Register { client_id: LanguageServerId, client: Weak, registration_id: String, options: lsp::DidChangeWatchedFilesRegistrationOptions, }, Unregister { client_id: LanguageServerId, registration_id: String, }, RemoveClient { client_id: LanguageServerId, }, } #[derive(Default)] struct ClientState { client: Weak, registered: HashMap, } /// The Handler uses a dedicated tokio task to respond to file change events by /// forwarding changes to LSPs that have registered for notifications with a /// matching glob. /// /// When an LSP registers for the DidChangeWatchedFiles notification, the /// Handler is notified by sending the registration details in addition to a /// weak reference to the LSP client. This is done so that the Handler can have /// access to the client without preventing the client from being dropped if it /// is closed and the Handler isn't properly notified. #[derive(Clone, Debug)] pub struct Handler { tx: mpsc::UnboundedSender, } impl Default for Handler { fn default() -> Self { Self::new() } } impl Handler { pub fn new() -> Self { let (tx, rx) = mpsc::unbounded_channel(); tokio::spawn(Self::run(rx)); Self { tx } } pub fn register( &self, client_id: LanguageServerId, client: Weak, registration_id: String, options: lsp::DidChangeWatchedFilesRegistrationOptions, ) { let _ = self.tx.send(Event::Register { client_id, client, registration_id, options, }); } pub fn unregister(&self, client_id: LanguageServerId, registration_id: String) { let _ = self.tx.send(Event::Unregister { client_id, registration_id, }); } pub fn file_changed(&self, path: PathBuf) { let _ = self.tx.send(Event::FileChanged { path }); } pub fn remove_client(&self, client_id: LanguageServerId) { let _ = self.tx.send(Event::RemoveClient { client_id }); } async fn run(mut rx: mpsc::UnboundedReceiver) { let mut state: HashMap = HashMap::new(); while let Some(event) = rx.recv().await { match event { Event::FileChanged { path } => { log::debug!("Received file event for {:?}", &path); state.retain(|id, client_state| { if !client_state .registered .values() .any(|glob| glob.is_match(&path)) { return true; } let Some(client) = client_state.client.upgrade() else { log::warn!("LSP client was dropped: {id}"); return false; }; let Ok(uri) = lsp::Url::from_file_path(&path) else { return true; }; log::debug!( "Sending didChangeWatchedFiles notification to client '{}'", client.name() ); client.did_change_watched_files(vec![lsp::FileEvent { uri, // We currently always send the CHANGED state // since we don't actually have more context at // the moment. typ: lsp::FileChangeType::CHANGED, }]); true }); } Event::Register { client_id, client, registration_id, options: ops, } => { log::debug!( "Registering didChangeWatchedFiles for client '{}' with id '{}'", client_id, registration_id ); let entry = state.entry(client_id).or_default(); entry.client = client; let mut builder = GlobSetBuilder::new(); for watcher in ops.watchers { if let lsp::GlobPattern::String(pattern) = watcher.glob_pattern { if let Ok(glob) = GlobBuilder::new(&pattern).build() { builder.add(glob); } } } match builder.build() { Ok(globset) => { entry.registered.insert(registration_id, globset); } Err(err) => { // Remove any old state for that registration id and // remove the entire client if it's now empty. entry.registered.remove(®istration_id); if entry.registered.is_empty() { state.remove(&client_id); } log::warn!( "Unable to build globset for LSP didChangeWatchedFiles {err}" ) } } } Event::Unregister { client_id, registration_id, } => { log::debug!( "Unregistering didChangeWatchedFiles with id '{}' for client '{}'", registration_id, client_id ); if let Some(client_state) = state.get_mut(&client_id) { client_state.registered.remove(®istration_id); if client_state.registered.is_empty() { state.remove(&client_id); } } } Event::RemoveClient { client_id } => { log::debug!("Removing LSP client: {client_id}"); state.remove(&client_id); } } } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp/src/file_operations.rs000066400000000000000000000066211500614314100260360ustar00rootroot00000000000000use std::path::Path; use globset::{GlobBuilder, GlobSet}; use crate::lsp; #[derive(Default, Debug)] pub(crate) struct FileOperationFilter { dir_globs: GlobSet, file_globs: GlobSet, } impl FileOperationFilter { fn new(capability: Option<&lsp::FileOperationRegistrationOptions>) -> FileOperationFilter { let Some(cap) = capability else { return FileOperationFilter::default(); }; let mut dir_globs = GlobSet::builder(); let mut file_globs = GlobSet::builder(); for filter in &cap.filters { // TODO: support other url schemes let is_non_file_schema = filter .scheme .as_ref() .is_some_and(|schema| schema != "file"); if is_non_file_schema { continue; } let ignore_case = filter .pattern .options .as_ref() .and_then(|opts| opts.ignore_case) .unwrap_or(false); let mut glob_builder = GlobBuilder::new(&filter.pattern.glob); glob_builder.case_insensitive(!ignore_case); let glob = match glob_builder.build() { Ok(glob) => glob, Err(err) => { log::error!("invalid glob send by LS: {err}"); continue; } }; match filter.pattern.matches { Some(lsp::FileOperationPatternKind::File) => { file_globs.add(glob); } Some(lsp::FileOperationPatternKind::Folder) => { dir_globs.add(glob); } None => { file_globs.add(glob.clone()); dir_globs.add(glob); } }; } let file_globs = file_globs.build().unwrap_or_else(|err| { log::error!("invalid globs send by LS: {err}"); GlobSet::empty() }); let dir_globs = dir_globs.build().unwrap_or_else(|err| { log::error!("invalid globs send by LS: {err}"); GlobSet::empty() }); FileOperationFilter { dir_globs, file_globs, } } pub(crate) fn has_interest(&self, path: &Path, is_dir: bool) -> bool { if is_dir { self.dir_globs.is_match(path) } else { self.file_globs.is_match(path) } } } #[derive(Default, Debug)] pub(crate) struct FileOperationsInterest { // TODO: support other notifications // did_create: FileOperationFilter, // will_create: FileOperationFilter, pub did_rename: FileOperationFilter, pub will_rename: FileOperationFilter, // did_delete: FileOperationFilter, // will_delete: FileOperationFilter, } impl FileOperationsInterest { pub fn new(capabilities: &lsp::ServerCapabilities) -> FileOperationsInterest { let capabilities = capabilities .workspace .as_ref() .and_then(|capabilities| capabilities.file_operations.as_ref()); let Some(capabilities) = capabilities else { return FileOperationsInterest::default(); }; FileOperationsInterest { did_rename: FileOperationFilter::new(capabilities.did_rename.as_ref()), will_rename: FileOperationFilter::new(capabilities.will_rename.as_ref()), } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp/src/jsonrpc.rs000066400000000000000000000271401500614314100243310ustar00rootroot00000000000000//! An implementation of the JSONRPC 2.0 spec types // Upstream implementation: https://github.com/paritytech/jsonrpc/tree/38af3c9439aa75481805edf6c05c6622a5ab1e70/core/src/types // Changes from upstream: // * unused functions (almost all non-trait-implementation functions) have been removed // * `#[serde(deny_unknown_fields)]` annotations have been removed on response types // for compatibility with non-strict language server implementations like Ruby Sorbet // (see https://github.com/helix-editor/helix/issues/2786) // * some variable names have been lengthened for readability use serde::de::{self, DeserializeOwned, Visitor}; use serde::{Deserialize, Serialize}; use serde_json::Value; // https://www.jsonrpc.org/specification#error_object #[derive(Debug, PartialEq, Eq, Clone)] pub enum ErrorCode { ParseError, InvalidRequest, MethodNotFound, InvalidParams, InternalError, ServerError(i64), } impl ErrorCode { pub fn code(&self) -> i64 { match *self { ErrorCode::ParseError => -32700, ErrorCode::InvalidRequest => -32600, ErrorCode::MethodNotFound => -32601, ErrorCode::InvalidParams => -32602, ErrorCode::InternalError => -32603, ErrorCode::ServerError(code) => code, } } } impl From for ErrorCode { fn from(code: i64) -> Self { match code { -32700 => ErrorCode::ParseError, -32600 => ErrorCode::InvalidRequest, -32601 => ErrorCode::MethodNotFound, -32602 => ErrorCode::InvalidParams, -32603 => ErrorCode::InternalError, code => ErrorCode::ServerError(code), } } } impl<'de> Deserialize<'de> for ErrorCode { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { let code: i64 = Deserialize::deserialize(deserializer)?; Ok(ErrorCode::from(code)) } } impl Serialize for ErrorCode { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, { serializer.serialize_i64(self.code()) } } #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] pub struct Error { pub code: ErrorCode, pub message: String, #[serde(skip_serializing_if = "Option::is_none")] pub data: Option, } impl Error { pub fn invalid_params(message: M) -> Self where M: Into, { Error { code: ErrorCode::InvalidParams, message: message.into(), data: None, } } } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}: {}", self.code, self.message) } } impl std::error::Error for Error {} // https://www.jsonrpc.org/specification#request_object /// Request ID #[derive(Debug, PartialEq, Eq, Clone, Hash, Deserialize, Serialize)] #[serde(untagged)] pub enum Id { Null, Num(#[serde(deserialize_with = "deserialize_jsonrpc_id_num")] u64), Str(String), } fn deserialize_jsonrpc_id_num<'de, D>(deserializer: D) -> Result where D: serde::Deserializer<'de>, { let num = serde_json::Number::deserialize(deserializer)?; if let Some(val) = num.as_u64() { return Ok(val); }; // Accept floats as long as they represent positive whole numbers. // The JSONRPC spec says "Numbers SHOULD NOT contain fractional parts" so we should try to // accept them if possible. The JavaScript type system lumps integers and floats together so // some languages may serialize integer IDs as floats with a zeroed fractional part. // See . if let Some(val) = num .as_f64() .filter(|f| f.is_sign_positive() && f.fract() == 0.0) { return Ok(val as u64); } Err(de::Error::custom( "number must be integer or float representing a whole number in valid u64 range", )) } impl std::fmt::Display for Id { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Id::Null => f.write_str("null"), Id::Num(num) => write!(f, "{}", num), Id::Str(string) => f.write_str(string), } } } /// Protocol Version #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] pub enum Version { V2, } impl Serialize for Version { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, { match *self { Version::V2 => serializer.serialize_str("2.0"), } } } struct VersionVisitor; impl Visitor<'_> for VersionVisitor { type Value = Version; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("a string") } fn visit_str(self, value: &str) -> Result where E: de::Error, { match value { "2.0" => Ok(Version::V2), _ => Err(de::Error::custom("invalid version")), } } } impl<'de> Deserialize<'de> for Version { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { deserializer.deserialize_identifier(VersionVisitor) } } #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum Params { None, Array(Vec), Map(serde_json::Map), } impl Params { pub fn parse(self) -> Result where D: DeserializeOwned, { let value: Value = self.into(); serde_json::from_value(value) .map_err(|err| Error::invalid_params(format!("Invalid params: {}.", err))) } pub fn is_none(&self) -> bool { self == &Params::None } } impl From for Value { fn from(params: Params) -> Value { match params { Params::Array(vec) => Value::Array(vec), Params::Map(map) => Value::Object(map), Params::None => Value::Null, } } } #[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] #[serde(deny_unknown_fields)] pub struct MethodCall { pub jsonrpc: Option, pub method: String, #[serde(default = "default_params", skip_serializing_if = "Params::is_none")] pub params: Params, pub id: Id, } #[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] #[serde(deny_unknown_fields)] pub struct Notification { pub jsonrpc: Option, pub method: String, #[serde(default = "default_params", skip_serializing_if = "Params::is_none")] pub params: Params, } #[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] #[serde(deny_unknown_fields)] #[serde(untagged)] pub enum Call { MethodCall(MethodCall), Notification(Notification), Invalid { // We can attempt to salvage the id out of the invalid request // for better debugging #[serde(default = "default_id")] id: Id, }, } fn default_params() -> Params { Params::None } fn default_id() -> Id { Id::Null } impl From for Call { fn from(method_call: MethodCall) -> Self { Call::MethodCall(method_call) } } impl From for Call { fn from(notification: Notification) -> Self { Call::Notification(notification) } } #[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] #[serde(deny_unknown_fields)] #[serde(untagged)] pub enum Request { Single(Call), Batch(Vec), } // https://www.jsonrpc.org/specification#response_object #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] pub struct Success { #[serde(skip_serializing_if = "Option::is_none")] pub jsonrpc: Option, pub result: Value, pub id: Id, } #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] pub struct Failure { #[serde(skip_serializing_if = "Option::is_none")] pub jsonrpc: Option, pub error: Error, pub id: Id, } // Note that failure comes first because we're not using // #[serde(deny_unknown_field)]: we want a request that contains // both `result` and `error` to be a `Failure`. #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum Output { Failure(Failure), Success(Success), } impl From for Result { fn from(output: Output) -> Self { match output { Output::Success(success) => Ok(success.result), Output::Failure(failure) => Err(failure.error), } } } #[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] #[serde(untagged)] pub enum Response { Single(Output), Batch(Vec), } impl From for Response { fn from(failure: Failure) -> Self { Response::Single(Output::Failure(failure)) } } impl From for Response { fn from(success: Success) -> Self { Response::Single(Output::Success(success)) } } #[test] fn method_call_serialize() { use serde_json; let m = MethodCall { jsonrpc: Some(Version::V2), method: "update".to_owned(), params: Params::Array(vec![Value::from(1), Value::from(2)]), id: Id::Num(1), }; let serialized = serde_json::to_string(&m).unwrap(); assert_eq!( serialized, r#"{"jsonrpc":"2.0","method":"update","params":[1,2],"id":1}"# ); } #[test] fn notification_serialize() { use serde_json; let n = Notification { jsonrpc: Some(Version::V2), method: "update".to_owned(), params: Params::Array(vec![Value::from(1), Value::from(2)]), }; let serialized = serde_json::to_string(&n).unwrap(); assert_eq!( serialized, r#"{"jsonrpc":"2.0","method":"update","params":[1,2]}"# ); } #[test] fn serialize_skip_none_params() { use serde_json; let m = MethodCall { jsonrpc: Some(Version::V2), method: "shutdown".to_owned(), params: Params::None, id: Id::Num(1), }; let serialized = serde_json::to_string(&m).unwrap(); assert_eq!( serialized, r#"{"jsonrpc":"2.0","method":"shutdown","id":1}"# ); let n = Notification { jsonrpc: Some(Version::V2), method: "exit".to_owned(), params: Params::None, }; let serialized = serde_json::to_string(&n).unwrap(); assert_eq!(serialized, r#"{"jsonrpc":"2.0","method":"exit"}"#); } #[test] fn id_deserialize() { use serde_json; let id = r#"8"#; let deserialized: Id = serde_json::from_str(id).unwrap(); assert_eq!(deserialized, Id::Num(8)); let id = r#"4.0"#; let deserialized: Id = serde_json::from_str(id).unwrap(); assert_eq!(deserialized, Id::Num(4)); let id = r#"0.01"#; assert!(serde_json::from_str::(id).is_err()); } #[test] fn success_output_deserialize() { use serde_json; let dso = r#"{"jsonrpc":"2.0","result":1,"id":1}"#; let deserialized: Output = serde_json::from_str(dso).unwrap(); assert_eq!( deserialized, Output::Success(Success { jsonrpc: Some(Version::V2), result: Value::from(1), id: Id::Num(1) }) ); } #[test] fn success_output_deserialize_with_extra_fields() { use serde_json; // https://github.com/helix-editor/helix/issues/2786 let dso = r#"{"jsonrpc":"2.0","result":1,"id":1,"requestMethod":"initialize"}"#; let deserialized: Output = serde_json::from_str(dso).unwrap(); assert_eq!( deserialized, Output::Success(Success { jsonrpc: Some(Version::V2), result: Value::from(1), id: Id::Num(1) }) ); } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp/src/lib.rs000066400000000000000000001131771500614314100234270ustar00rootroot00000000000000mod client; pub mod file_event; mod file_operations; pub mod jsonrpc; mod transport; use arc_swap::ArcSwap; pub use client::Client; pub use futures_executor::block_on; pub use helix_lsp_types as lsp; pub use jsonrpc::Call; pub use lsp::{Position, Url}; use futures_util::stream::select_all::SelectAll; use helix_core::syntax::{ LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures, }; use helix_stdx::path; use slotmap::SlotMap; use tokio::sync::mpsc::UnboundedReceiver; use std::{ collections::HashMap, path::{Path, PathBuf}, sync::Arc, }; use thiserror::Error; use tokio_stream::wrappers::UnboundedReceiverStream; pub type Result = core::result::Result; pub type LanguageServerName = String; pub use helix_core::diagnostic::LanguageServerId; #[derive(Error, Debug)] pub enum Error { #[error("protocol error: {0}")] Rpc(#[from] jsonrpc::Error), #[error("failed to parse: {0}")] Parse(#[from] serde_json::Error), #[error("IO Error: {0}")] IO(#[from] std::io::Error), #[error("request {0} timed out")] Timeout(jsonrpc::Id), #[error("server closed the stream")] StreamClosed, #[error("Unhandled")] Unhandled, #[error(transparent)] ExecutableNotFound(#[from] helix_stdx::env::ExecutableNotFoundError), #[error(transparent)] Other(#[from] anyhow::Error), } #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub enum OffsetEncoding { /// UTF-8 code units aka bytes Utf8, /// UTF-32 code units aka chars Utf32, /// UTF-16 code units #[default] Utf16, } pub mod util { use super::*; use helix_core::line_ending::{line_end_byte_index, line_end_char_index}; use helix_core::snippets::{RenderedSnippet, Snippet, SnippetRenderCtx}; use helix_core::{chars, RopeSlice}; use helix_core::{diagnostic::NumberOrString, Range, Rope, Selection, Tendril, Transaction}; /// Converts a diagnostic in the document to [`lsp::Diagnostic`]. /// /// Panics when [`pos_to_lsp_pos`] would for an invalid range on the diagnostic. pub fn diagnostic_to_lsp_diagnostic( doc: &Rope, diag: &helix_core::diagnostic::Diagnostic, offset_encoding: OffsetEncoding, ) -> lsp::Diagnostic { use helix_core::diagnostic::Severity::*; let range = Range::new(diag.range.start, diag.range.end); let severity = diag.severity.map(|s| match s { Hint => lsp::DiagnosticSeverity::HINT, Info => lsp::DiagnosticSeverity::INFORMATION, Warning => lsp::DiagnosticSeverity::WARNING, Error => lsp::DiagnosticSeverity::ERROR, }); let code = match diag.code.clone() { Some(x) => match x { NumberOrString::Number(x) => Some(lsp::NumberOrString::Number(x)), NumberOrString::String(x) => Some(lsp::NumberOrString::String(x)), }, None => None, }; let new_tags: Vec<_> = diag .tags .iter() .map(|tag| match tag { helix_core::diagnostic::DiagnosticTag::Unnecessary => { lsp::DiagnosticTag::UNNECESSARY } helix_core::diagnostic::DiagnosticTag::Deprecated => lsp::DiagnosticTag::DEPRECATED, }) .collect(); let tags = if !new_tags.is_empty() { Some(new_tags) } else { None }; lsp::Diagnostic { range: range_to_lsp_range(doc, range, offset_encoding), severity, code, source: diag.source.clone(), message: diag.message.to_owned(), related_information: None, tags, data: diag.data.to_owned(), ..Default::default() } } /// Converts [`lsp::Position`] to a position in the document. /// /// Returns `None` if position.line is out of bounds or an overflow occurs pub fn lsp_pos_to_pos( doc: &Rope, pos: lsp::Position, offset_encoding: OffsetEncoding, ) -> Option { let pos_line = pos.line as usize; if pos_line > doc.len_lines() - 1 { // If it extends past the end, truncate it to the end. This is because the // way the LSP describes the range including the last newline is by // specifying a line number after what we would call the last line. log::warn!("LSP position {pos:?} out of range assuming EOF"); return Some(doc.len_chars()); } // We need to be careful here to fully comply ith the LSP spec. // Two relevant quotes from the spec: // // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#position // > If the character value is greater than the line length it defaults back // > to the line length. // // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocuments // > To ensure that both client and server split the string into the same // > line representation the protocol specifies the following end-of-line sequences: // > ‘\n’, ‘\r\n’ and ‘\r’. Positions are line end character agnostic. // > So you can not specify a position that denotes \r|\n or \n| where | represents the character offset. // // This means that while the line must be in bounds the `character` // must be capped to the end of the line. // Note that the end of the line here is **before** the line terminator // so we must use `line_end_char_index` instead of `doc.line_to_char(pos_line + 1)` // // FIXME: Helix does not fully comply with the LSP spec for line terminators. // The LSP standard requires that line terminators are ['\n', '\r\n', '\r']. // Without the unicode-linebreak feature disabled, the `\r` terminator is not handled by helix. // With the unicode-linebreak feature, helix recognizes multiple extra line break chars // which means that positions will be decoded/encoded incorrectly in their presence let line = match offset_encoding { OffsetEncoding::Utf8 => { let line_start = doc.line_to_byte(pos_line); let line_end = line_end_byte_index(&doc.slice(..), pos_line); line_start..line_end } OffsetEncoding::Utf16 => { // TODO directly translate line index to char-idx // ropey can do this just as easily as utf-8 byte translation // but the functions are just missing. // Translate to char first and then utf-16 as a workaround let line_start = doc.line_to_char(pos_line); let line_end = line_end_char_index(&doc.slice(..), pos_line); doc.char_to_utf16_cu(line_start)..doc.char_to_utf16_cu(line_end) } OffsetEncoding::Utf32 => { let line_start = doc.line_to_char(pos_line); let line_end = line_end_char_index(&doc.slice(..), pos_line); line_start..line_end } }; // The LSP spec demands that the offset is capped to the end of the line let pos = line .start .checked_add(pos.character as usize) .unwrap_or(line.end) .min(line.end); match offset_encoding { OffsetEncoding::Utf8 => doc.try_byte_to_char(pos).ok(), OffsetEncoding::Utf16 => doc.try_utf16_cu_to_char(pos).ok(), OffsetEncoding::Utf32 => Some(pos), } } /// Converts position in the document to [`lsp::Position`]. /// /// Panics when `pos` is out of `doc` bounds or operation overflows. pub fn pos_to_lsp_pos( doc: &Rope, pos: usize, offset_encoding: OffsetEncoding, ) -> lsp::Position { match offset_encoding { OffsetEncoding::Utf8 => { let line = doc.char_to_line(pos); let line_start = doc.line_to_byte(line); let col = doc.char_to_byte(pos) - line_start; lsp::Position::new(line as u32, col as u32) } OffsetEncoding::Utf16 => { let line = doc.char_to_line(pos); let line_start = doc.char_to_utf16_cu(doc.line_to_char(line)); let col = doc.char_to_utf16_cu(pos) - line_start; lsp::Position::new(line as u32, col as u32) } OffsetEncoding::Utf32 => { let line = doc.char_to_line(pos); let line_start = doc.line_to_char(line); let col = pos - line_start; lsp::Position::new(line as u32, col as u32) } } } /// Converts a range in the document to [`lsp::Range`]. pub fn range_to_lsp_range( doc: &Rope, range: Range, offset_encoding: OffsetEncoding, ) -> lsp::Range { let start = pos_to_lsp_pos(doc, range.from(), offset_encoding); let end = pos_to_lsp_pos(doc, range.to(), offset_encoding); lsp::Range::new(start, end) } pub fn lsp_range_to_range( doc: &Rope, mut range: lsp::Range, offset_encoding: OffsetEncoding, ) -> Option { // This is sort of an edgecase. It's not clear from the spec how to deal with // ranges where end < start. They don't make much sense but vscode simply caps start to end // and because it's not specified quite a few LS rely on this as a result (for example the TS server) if range.start > range.end { log::error!( "Invalid LSP range start {:?} > end {:?}, using an empty range at the end instead", range.start, range.end ); range.start = range.end; } let start = lsp_pos_to_pos(doc, range.start, offset_encoding)?; let end = lsp_pos_to_pos(doc, range.end, offset_encoding)?; Some(Range::new(start, end)) } /// If the LS did not provide a range for the completion or the range of the /// primary cursor can not be used for the secondary cursor, this function /// can be used to find the completion range for a cursor fn find_completion_range(text: RopeSlice, replace_mode: bool, cursor: usize) -> (usize, usize) { let start = cursor - text .chars_at(cursor) .reversed() .take_while(|ch| chars::char_is_word(*ch)) .count(); let mut end = cursor; if replace_mode { end += text .chars_at(cursor) .take_while(|ch| chars::char_is_word(*ch)) .count(); } (start, end) } fn completion_range( text: RopeSlice, edit_offset: Option<(i128, i128)>, replace_mode: bool, cursor: usize, ) -> Option<(usize, usize)> { let res = match edit_offset { Some((start_offset, end_offset)) => { let start_offset = cursor as i128 + start_offset; if start_offset < 0 { return None; } let end_offset = cursor as i128 + end_offset; if end_offset > text.len_chars() as i128 { return None; } (start_offset as usize, end_offset as usize) } None => find_completion_range(text, replace_mode, cursor), }; Some(res) } /// Creates a [Transaction] from the [lsp::TextEdit] in a completion response. /// The transaction applies the edit to all cursors. pub fn generate_transaction_from_completion_edit( doc: &Rope, selection: &Selection, edit_offset: Option<(i128, i128)>, replace_mode: bool, new_text: String, ) -> Transaction { let replacement: Option = if new_text.is_empty() { None } else { Some(new_text.into()) }; let text = doc.slice(..); let (removed_start, removed_end) = completion_range( text, edit_offset, replace_mode, selection.primary().cursor(text), ) .expect("transaction must be valid for primary selection"); let removed_text = text.slice(removed_start..removed_end); let (transaction, mut selection) = Transaction::change_by_selection_ignore_overlapping( doc, selection, |range| { let cursor = range.cursor(text); completion_range(text, edit_offset, replace_mode, cursor) .filter(|(start, end)| text.slice(start..end) == removed_text) .unwrap_or_else(|| find_completion_range(text, replace_mode, cursor)) }, |_, _| replacement.clone(), ); if transaction.changes().is_empty() { return transaction; } selection = selection.map(transaction.changes()); transaction.with_selection(selection) } /// Creates a [Transaction] from the [Snippet] in a completion response. /// The transaction applies the edit to all cursors. pub fn generate_transaction_from_snippet( doc: &Rope, selection: &Selection, edit_offset: Option<(i128, i128)>, replace_mode: bool, snippet: Snippet, cx: &mut SnippetRenderCtx, ) -> (Transaction, RenderedSnippet) { let text = doc.slice(..); let (removed_start, removed_end) = completion_range( text, edit_offset, replace_mode, selection.primary().cursor(text), ) .expect("transaction must be valid for primary selection"); let removed_text = text.slice(removed_start..removed_end); let (transaction, mapped_selection, snippet) = snippet.render( doc, selection, |range| { let cursor = range.cursor(text); completion_range(text, edit_offset, replace_mode, cursor) .filter(|(start, end)| text.slice(start..end) == removed_text) .unwrap_or_else(|| find_completion_range(text, replace_mode, cursor)) }, cx, ); let transaction = transaction.with_selection(snippet.first_selection( // we keep the direction of the old primary selection in case it changed during mapping // but use the primary idx from the mapped selection in case ranges had to be merged selection.primary().direction(), mapped_selection.primary_index(), )); (transaction, snippet) } pub fn generate_transaction_from_edits( doc: &Rope, mut edits: Vec, offset_encoding: OffsetEncoding, ) -> Transaction { // Sort edits by start range, since some LSPs (Omnisharp) send them // in reverse order. edits.sort_by_key(|edit| edit.range.start); // Generate a diff if the edit is a full document replacement. #[allow(clippy::collapsible_if)] if edits.len() == 1 { let is_document_replacement = edits.first().and_then(|edit| { let start = lsp_pos_to_pos(doc, edit.range.start, offset_encoding)?; let end = lsp_pos_to_pos(doc, edit.range.end, offset_encoding)?; Some(start..end) }) == Some(0..doc.len_chars()); if is_document_replacement { let new_text = Rope::from(edits.pop().unwrap().new_text); return helix_core::diff::compare_ropes(doc, &new_text); } } Transaction::change( doc, edits.into_iter().map(|edit| { // simplify "" into None for cleaner changesets let replacement = if !edit.new_text.is_empty() { Some(edit.new_text.into()) } else { None }; let start = if let Some(start) = lsp_pos_to_pos(doc, edit.range.start, offset_encoding) { start } else { return (0, 0, None); }; let end = if let Some(end) = lsp_pos_to_pos(doc, edit.range.end, offset_encoding) { end } else { return (0, 0, None); }; if start > end { log::error!( "Invalid LSP text edit start {:?} > end {:?}, discarding", start, end ); return (0, 0, None); } (start, end, replacement) }), ) } } #[derive(Debug, PartialEq, Clone)] pub enum MethodCall { WorkDoneProgressCreate(lsp::WorkDoneProgressCreateParams), ApplyWorkspaceEdit(lsp::ApplyWorkspaceEditParams), WorkspaceFolders, WorkspaceConfiguration(lsp::ConfigurationParams), RegisterCapability(lsp::RegistrationParams), UnregisterCapability(lsp::UnregistrationParams), ShowDocument(lsp::ShowDocumentParams), } impl MethodCall { pub fn parse(method: &str, params: jsonrpc::Params) -> Result { use lsp::request::Request; let request = match method { lsp::request::WorkDoneProgressCreate::METHOD => { let params: lsp::WorkDoneProgressCreateParams = params.parse()?; Self::WorkDoneProgressCreate(params) } lsp::request::ApplyWorkspaceEdit::METHOD => { let params: lsp::ApplyWorkspaceEditParams = params.parse()?; Self::ApplyWorkspaceEdit(params) } lsp::request::WorkspaceFoldersRequest::METHOD => Self::WorkspaceFolders, lsp::request::WorkspaceConfiguration::METHOD => { let params: lsp::ConfigurationParams = params.parse()?; Self::WorkspaceConfiguration(params) } lsp::request::RegisterCapability::METHOD => { let params: lsp::RegistrationParams = params.parse()?; Self::RegisterCapability(params) } lsp::request::UnregisterCapability::METHOD => { let params: lsp::UnregistrationParams = params.parse()?; Self::UnregisterCapability(params) } lsp::request::ShowDocument::METHOD => { let params: lsp::ShowDocumentParams = params.parse()?; Self::ShowDocument(params) } _ => { return Err(Error::Unhandled); } }; Ok(request) } } #[derive(Debug, PartialEq, Clone)] pub enum Notification { // we inject this notification to signal the LSP is ready Initialized, // and this notification to signal that the LSP exited Exit, PublishDiagnostics(lsp::PublishDiagnosticsParams), ShowMessage(lsp::ShowMessageParams), LogMessage(lsp::LogMessageParams), ProgressMessage(lsp::ProgressParams), } impl Notification { pub fn parse(method: &str, params: jsonrpc::Params) -> Result { use lsp::notification::Notification as _; let notification = match method { lsp::notification::Initialized::METHOD => Self::Initialized, lsp::notification::Exit::METHOD => Self::Exit, lsp::notification::PublishDiagnostics::METHOD => { let params: lsp::PublishDiagnosticsParams = params.parse()?; Self::PublishDiagnostics(params) } lsp::notification::ShowMessage::METHOD => { let params: lsp::ShowMessageParams = params.parse()?; Self::ShowMessage(params) } lsp::notification::LogMessage::METHOD => { let params: lsp::LogMessageParams = params.parse()?; Self::LogMessage(params) } lsp::notification::Progress::METHOD => { let params: lsp::ProgressParams = params.parse()?; Self::ProgressMessage(params) } _ => { return Err(Error::Unhandled); } }; Ok(notification) } } #[derive(Debug)] pub struct Registry { inner: SlotMap>, inner_by_name: HashMap>>, syn_loader: Arc>, pub incoming: SelectAll>, pub file_event_handler: file_event::Handler, } impl Registry { pub fn new(syn_loader: Arc>) -> Self { Self { inner: SlotMap::with_key(), inner_by_name: HashMap::new(), syn_loader, incoming: SelectAll::new(), file_event_handler: file_event::Handler::new(), } } pub fn get_by_id(&self, id: LanguageServerId) -> Option<&Arc> { self.inner.get(id) } pub fn remove_by_id(&mut self, id: LanguageServerId) { let Some(client) = self.inner.remove(id) else { log::debug!("client was already removed"); return; }; self.file_event_handler.remove_client(id); let instances = self .inner_by_name .get_mut(client.name()) .expect("inner and inner_by_name must be synced"); instances.retain(|ls| id != ls.id()); if instances.is_empty() { self.inner_by_name.remove(client.name()); } } fn start_client( &mut self, name: String, ls_config: &LanguageConfiguration, doc_path: Option<&std::path::PathBuf>, root_dirs: &[PathBuf], enable_snippets: bool, ) -> Result, StartupError> { let syn_loader = self.syn_loader.load(); let config = syn_loader .language_server_configs() .get(&name) .ok_or_else(|| anyhow::anyhow!("Language server '{name}' not defined"))?; let id = self.inner.try_insert_with_key(|id| { start_client( id, name, ls_config, config, doc_path, root_dirs, enable_snippets, ) .map(|client| { self.incoming.push(UnboundedReceiverStream::new(client.1)); client.0 }) })?; Ok(self.inner[id].clone()) } /// If this method is called, all documents that have a reference to the language server have to refresh their language servers, /// See helix_view::editor::Editor::refresh_language_servers pub fn restart_server( &mut self, name: &str, language_config: &LanguageConfiguration, doc_path: Option<&std::path::PathBuf>, root_dirs: &[PathBuf], enable_snippets: bool, ) -> Option>> { if let Some(old_clients) = self.inner_by_name.remove(name) { if old_clients.is_empty() { log::info!("restarting client for '{name}' which was manually stopped"); } else { log::info!("stopping existing clients for '{name}'"); } for old_client in old_clients { self.file_event_handler.remove_client(old_client.id()); self.inner.remove(old_client.id()); tokio::spawn(async move { let _ = old_client.force_shutdown().await; }); } } let client = match self.start_client( name.to_string(), language_config, doc_path, root_dirs, enable_snippets, ) { Ok(client) => client, Err(StartupError::NoRequiredRootFound) => return None, Err(StartupError::Error(err)) => return Some(Err(err)), }; self.inner_by_name .insert(name.to_owned(), vec![client.clone()]); Some(Ok(client)) } pub fn stop(&mut self, name: &str) { if let Some(clients) = self.inner_by_name.get_mut(name) { // Drain the clients vec so that the entry in `inner_by_name` remains // empty. We use the empty vec as a "tombstone" to mean that a server // has been manually stopped with :lsp-stop and shouldn't be automatically // restarted by `get`. :lsp-restart can be used to restart the server // manually. for client in clients.drain(..) { self.file_event_handler.remove_client(client.id()); self.inner.remove(client.id()); tokio::spawn(async move { let _ = client.force_shutdown().await; }); } } } pub fn get<'a>( &'a mut self, language_config: &'a LanguageConfiguration, doc_path: Option<&'a std::path::PathBuf>, root_dirs: &'a [PathBuf], enable_snippets: bool, ) -> impl Iterator>)> + 'a { language_config.language_servers.iter().filter_map( move |LanguageServerFeatures { name, .. }| { if let Some(clients) = self.inner_by_name.get(name) { // If the clients vec is empty, do not automatically start a client // for this server. The empty vec is a tombstone left to mean that a // server has been manually stopped and shouldn't be started automatically. // See `stop`. if clients.is_empty() { return None; } if let Some((_, client)) = clients.iter().enumerate().find(|(i, client)| { let manual_roots = language_config .workspace_lsp_roots .as_deref() .unwrap_or(root_dirs); client.try_add_doc(&language_config.roots, manual_roots, doc_path, *i == 0) }) { return Some((name.to_owned(), Ok(client.clone()))); } } match self.start_client( name.clone(), language_config, doc_path, root_dirs, enable_snippets, ) { Ok(client) => { self.inner_by_name .entry(name.to_owned()) .or_default() .push(client.clone()); Some((name.clone(), Ok(client))) } Err(StartupError::NoRequiredRootFound) => None, Err(StartupError::Error(err)) => Some((name.to_owned(), Err(err))), } }, ) } pub fn iter_clients(&self) -> impl Iterator> { self.inner.values() } } #[derive(Debug)] pub enum ProgressStatus { Created, Started { title: String, progress: lsp::WorkDoneProgress, }, } impl ProgressStatus { pub fn progress(&self) -> Option<&lsp::WorkDoneProgress> { match &self { ProgressStatus::Created => None, ProgressStatus::Started { title: _, progress } => Some(progress), } } } #[derive(Default, Debug)] /// Acts as a container for progress reported by language servers. Each server /// has a unique id assigned at creation through [`Registry`]. This id is then used /// to store the progress in this map. pub struct LspProgressMap(HashMap>); impl LspProgressMap { pub fn new() -> Self { Self::default() } /// Returns a map of all tokens corresponding to the language server with `id`. pub fn progress_map( &self, id: LanguageServerId, ) -> Option<&HashMap> { self.0.get(&id) } pub fn is_progressing(&self, id: LanguageServerId) -> bool { self.0.get(&id).map(|it| !it.is_empty()).unwrap_or_default() } /// Returns last progress status for a given server with `id` and `token`. pub fn progress( &self, id: LanguageServerId, token: &lsp::ProgressToken, ) -> Option<&ProgressStatus> { self.0.get(&id).and_then(|values| values.get(token)) } pub fn title(&self, id: LanguageServerId, token: &lsp::ProgressToken) -> Option<&String> { self.progress(id, token).and_then(|p| match p { ProgressStatus::Created => None, ProgressStatus::Started { title, .. } => Some(title), }) } /// Checks if progress `token` for server with `id` is created. pub fn is_created(&mut self, id: LanguageServerId, token: &lsp::ProgressToken) -> bool { self.0 .get(&id) .map(|values| values.get(token).is_some()) .unwrap_or_default() } pub fn create(&mut self, id: LanguageServerId, token: lsp::ProgressToken) { self.0 .entry(id) .or_default() .insert(token, ProgressStatus::Created); } /// Ends the progress by removing the `token` from server with `id`, if removed returns the value. pub fn end_progress( &mut self, id: LanguageServerId, token: &lsp::ProgressToken, ) -> Option { self.0.get_mut(&id).and_then(|vals| vals.remove(token)) } /// Updates the progress of `token` for server with `id` to begin state `status` pub fn begin( &mut self, id: LanguageServerId, token: lsp::ProgressToken, status: lsp::WorkDoneProgressBegin, ) { self.0.entry(id).or_default().insert( token, ProgressStatus::Started { title: status.title.clone(), progress: lsp::WorkDoneProgress::Begin(status), }, ); } /// Updates the progress of `token` for server with `id` to report state `status`. pub fn update( &mut self, id: LanguageServerId, token: lsp::ProgressToken, status: lsp::WorkDoneProgressReport, ) { self.0 .entry(id) .or_default() .entry(token) .and_modify(|e| match e { ProgressStatus::Created => (), ProgressStatus::Started { progress, .. } => { *progress = lsp::WorkDoneProgress::Report(status) } }); } } struct NewClient(Arc, UnboundedReceiver<(LanguageServerId, Call)>); enum StartupError { NoRequiredRootFound, Error(Error), } impl> From for StartupError { fn from(value: T) -> Self { StartupError::Error(value.into()) } } /// start_client takes both a LanguageConfiguration and a LanguageServerConfiguration to ensure that /// it is only called when it makes sense. fn start_client( id: LanguageServerId, name: String, config: &LanguageConfiguration, ls_config: &LanguageServerConfiguration, doc_path: Option<&std::path::PathBuf>, root_dirs: &[PathBuf], enable_snippets: bool, ) -> Result { let (workspace, workspace_is_cwd) = helix_loader::find_workspace(); let workspace = path::normalize(workspace); let root = find_lsp_workspace( doc_path .and_then(|x| x.parent().and_then(|x| x.to_str())) .unwrap_or("."), &config.roots, config.workspace_lsp_roots.as_deref().unwrap_or(root_dirs), &workspace, workspace_is_cwd, ); // `root_uri` and `workspace_folder` can be empty in case there is no workspace // `root_url` can not, use `workspace` as a fallback let root_path = root.clone().unwrap_or_else(|| workspace.clone()); let root_uri = root.and_then(|root| lsp::Url::from_file_path(root).ok()); if let Some(globset) = &ls_config.required_root_patterns { if !root_path .read_dir()? .flatten() .map(|entry| entry.file_name()) .any(|entry| globset.is_match(entry)) { return Err(StartupError::NoRequiredRootFound); } } let (client, incoming, initialize_notify) = Client::start( &ls_config.command, &ls_config.args, ls_config.config.clone(), &ls_config.environment, root_path, root_uri, id, name, ls_config.timeout, )?; let client = Arc::new(client); // Initialize the client asynchronously let _client = client.clone(); tokio::spawn(async move { use futures_util::TryFutureExt; let value = _client .capabilities .get_or_try_init(|| { _client .initialize(enable_snippets) .map_ok(|response| response.capabilities) }) .await; if let Err(e) = value { log::error!("failed to initialize language server: {}", e); return; } // next up, notify _client.notify::(lsp::InitializedParams {}); initialize_notify.notify_one(); }); Ok(NewClient(client, incoming)) } /// Find an LSP workspace of a file using the following mechanism: /// * if the file is outside `workspace` return `None` /// * start at `file` and search the file tree upward /// * stop the search at the first `root_dirs` entry that contains `file` /// * if no `root_dirs` matches `file` stop at workspace /// * Returns the top most directory that contains a `root_marker` /// * If no root marker and we stopped at a `root_dirs` entry, return the directory we stopped at /// * If we stopped at `workspace` instead and `workspace_is_cwd == false` return `None` /// * If we stopped at `workspace` instead and `workspace_is_cwd == true` return `workspace` pub fn find_lsp_workspace( file: &str, root_markers: &[String], root_dirs: &[PathBuf], workspace: &Path, workspace_is_cwd: bool, ) -> Option { let file = std::path::Path::new(file); let mut file = if file.is_absolute() { file.to_path_buf() } else { let current_dir = helix_stdx::env::current_working_dir(); current_dir.join(file) }; file = path::normalize(&file); if !file.starts_with(workspace) { return None; } let mut top_marker = None; for ancestor in file.ancestors() { if root_markers .iter() .any(|marker| ancestor.join(marker).exists()) { top_marker = Some(ancestor); } if root_dirs .iter() .any(|root_dir| path::normalize(workspace.join(root_dir)) == ancestor) { // if the worskapce is the cwd do not search any higher for workspaces // but specify return Some(top_marker.unwrap_or(workspace).to_owned()); } if ancestor == workspace { // if the workspace is the CWD, let the LSP decide what the workspace // is return top_marker .or_else(|| (!workspace_is_cwd).then_some(workspace)) .map(Path::to_owned); } } debug_assert!(false, "workspace must be an ancestor of "); None } #[cfg(test)] mod tests { use super::{lsp, util::*, OffsetEncoding}; use helix_core::Rope; #[test] fn converts_lsp_pos_to_pos() { macro_rules! test_case { ($doc:expr, ($x:expr, $y:expr) => $want:expr) => { let doc = Rope::from($doc); let pos = lsp::Position::new($x, $y); assert_eq!($want, lsp_pos_to_pos(&doc, pos, OffsetEncoding::Utf16)); assert_eq!($want, lsp_pos_to_pos(&doc, pos, OffsetEncoding::Utf8)) }; } test_case!("", (0, 0) => Some(0)); test_case!("", (0, 1) => Some(0)); test_case!("", (1, 0) => Some(0)); test_case!("\n\n", (0, 0) => Some(0)); test_case!("\n\n", (1, 0) => Some(1)); test_case!("\n\n", (1, 1) => Some(1)); test_case!("\n\n", (2, 0) => Some(2)); test_case!("\n\n", (3, 0) => Some(2)); test_case!("test\n\n\n\ncase", (4, 3) => Some(11)); test_case!("test\n\n\n\ncase", (4, 4) => Some(12)); test_case!("test\n\n\n\ncase", (4, 5) => Some(12)); test_case!("", (u32::MAX, u32::MAX) => Some(0)); } #[test] fn emoji_format_gh_4791() { use lsp::{Position, Range, TextEdit}; let edits = vec![ TextEdit { range: Range { start: Position { line: 0, character: 1, }, end: Position { line: 1, character: 0, }, }, new_text: "\n ".to_string(), }, TextEdit { range: Range { start: Position { line: 1, character: 7, }, end: Position { line: 2, character: 0, }, }, new_text: "\n ".to_string(), }, ]; let mut source = Rope::from_str("[\n\"🇺🇸\",\n\"🎄\",\n]"); let transaction = generate_transaction_from_edits(&source, edits, OffsetEncoding::Utf16); assert!(transaction.apply(&mut source)); assert_eq!(source, "[\n \"🇺🇸\",\n \"🎄\",\n]"); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-lsp/src/transport.rs000066400000000000000000000350571500614314100247150ustar00rootroot00000000000000use crate::{ jsonrpc, lsp::{self, notification::Notification as _}, Error, LanguageServerId, Result, }; use anyhow::Context; use log::{error, info}; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::collections::HashMap; use std::sync::Arc; use tokio::{ io::{AsyncBufRead, AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader, BufWriter}, process::{ChildStderr, ChildStdin, ChildStdout}, sync::{ mpsc::{unbounded_channel, Sender, UnboundedReceiver, UnboundedSender}, Mutex, Notify, }, }; #[derive(Debug)] pub enum Payload { Request { chan: Sender>, value: jsonrpc::MethodCall, }, Notification(jsonrpc::Notification), Response(jsonrpc::Output), } /// A type representing all possible values sent from the server to the client. #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(deny_unknown_fields)] #[serde(untagged)] enum ServerMessage { /// A regular JSON-RPC request output (single response). Output(jsonrpc::Output), /// A JSON-RPC request or notification. Call(jsonrpc::Call), } #[derive(Debug)] pub struct Transport { id: LanguageServerId, name: String, pending_requests: Mutex>>>, } impl Transport { pub fn start( server_stdout: BufReader, server_stdin: BufWriter, server_stderr: BufReader, id: LanguageServerId, name: String, ) -> ( UnboundedReceiver<(LanguageServerId, jsonrpc::Call)>, UnboundedSender, Arc, ) { let (client_tx, rx) = unbounded_channel(); let (tx, client_rx) = unbounded_channel(); let notify = Arc::new(Notify::new()); let transport = Self { id, name, pending_requests: Mutex::new(HashMap::default()), }; let transport = Arc::new(transport); tokio::spawn(Self::recv( transport.clone(), server_stdout, client_tx.clone(), )); tokio::spawn(Self::err(transport.clone(), server_stderr)); tokio::spawn(Self::send( transport, server_stdin, client_tx, client_rx, notify.clone(), )); (rx, tx, notify) } async fn recv_server_message( reader: &mut (impl AsyncBufRead + Unpin + Send), buffer: &mut String, language_server_name: &str, ) -> Result { let mut content_length = None; loop { buffer.truncate(0); if reader.read_line(buffer).await? == 0 { return Err(Error::StreamClosed); }; // debug!("<- header {:?}", buffer); if buffer == "\r\n" { // look for an empty CRLF line break; } let header = buffer.trim(); let parts = header.split_once(": "); match parts { Some(("Content-Length", value)) => { content_length = Some(value.parse().context("invalid content length")?); } Some((_, _)) => {} None => { // Workaround: Some non-conformant language servers will output logging and other garbage // into the same stream as JSON-RPC messages. This can also happen from shell scripts that spawn // the server. Skip such lines and log a warning. // warn!("Failed to parse header: {:?}", header); } } } let content_length = content_length.context("missing content length")?; //TODO: reuse vector let mut content = vec![0; content_length]; reader.read_exact(&mut content).await?; let msg = std::str::from_utf8(&content).context("invalid utf8 from server")?; info!("{language_server_name} <- {msg}"); // try parsing as output (server response) or call (server request) let output: serde_json::Result = serde_json::from_str(msg); Ok(output?) } async fn recv_server_error( err: &mut (impl AsyncBufRead + Unpin + Send), buffer: &mut String, language_server_name: &str, ) -> Result<()> { buffer.truncate(0); if err.read_line(buffer).await? == 0 { return Err(Error::StreamClosed); }; error!("{language_server_name} err <- {buffer:?}"); Ok(()) } async fn send_payload_to_server( &self, server_stdin: &mut BufWriter, payload: Payload, ) -> Result<()> { //TODO: reuse string let json = match payload { Payload::Request { chan, value } => { self.pending_requests .lock() .await .insert(value.id.clone(), chan); serde_json::to_string(&value)? } Payload::Notification(value) => serde_json::to_string(&value)?, Payload::Response(error) => serde_json::to_string(&error)?, }; self.send_string_to_server(server_stdin, json, &self.name) .await } async fn send_string_to_server( &self, server_stdin: &mut BufWriter, request: String, language_server_name: &str, ) -> Result<()> { info!("{language_server_name} -> {request}"); // send the headers server_stdin .write_all(format!("Content-Length: {}\r\n\r\n", request.len()).as_bytes()) .await?; // send the body server_stdin.write_all(request.as_bytes()).await?; server_stdin.flush().await?; Ok(()) } async fn process_server_message( &self, client_tx: &UnboundedSender<(LanguageServerId, jsonrpc::Call)>, msg: ServerMessage, language_server_name: &str, ) -> Result<()> { match msg { ServerMessage::Output(output) => { self.process_request_response(output, language_server_name) .await? } ServerMessage::Call(call) => { client_tx .send((self.id, call)) .context("failed to send a message to server")?; // let notification = Notification::parse(&method, params); } }; Ok(()) } async fn process_request_response( &self, output: jsonrpc::Output, language_server_name: &str, ) -> Result<()> { let (id, result) = match output { jsonrpc::Output::Success(jsonrpc::Success { id, result, .. }) => (id, Ok(result)), jsonrpc::Output::Failure(jsonrpc::Failure { id, error, .. }) => { error!("{language_server_name} <- {error}"); (id, Err(error.into())) } }; if let Some(tx) = self.pending_requests.lock().await.remove(&id) { match tx.send(result).await { Ok(_) => (), Err(_) => error!( "Tried sending response into a closed channel (id={:?}), original request likely timed out", id ), }; } else { log::error!( "Discarding Language Server response without a request (id={:?}) {:?}", id, result ); } Ok(()) } async fn recv( transport: Arc, mut server_stdout: BufReader, client_tx: UnboundedSender<(LanguageServerId, jsonrpc::Call)>, ) { let mut recv_buffer = String::new(); loop { match Self::recv_server_message(&mut server_stdout, &mut recv_buffer, &transport.name) .await { Ok(msg) => { match transport .process_server_message(&client_tx, msg, &transport.name) .await { Ok(_) => {} Err(err) => { error!("{} err: <- {err:?}", transport.name); break; } }; } Err(err) => { if !matches!(err, Error::StreamClosed) { error!( "Exiting {} after unexpected error: {err:?}", &transport.name ); } // Close any outstanding requests. for (id, tx) in transport.pending_requests.lock().await.drain() { match tx.send(Err(Error::StreamClosed)).await { Ok(_) => (), Err(_) => { error!("Could not close request on a closed channel (id={:?})", id) } } } // Hack: inject a terminated notification so we trigger code that needs to happen after exit let notification = ServerMessage::Call(jsonrpc::Call::Notification(jsonrpc::Notification { jsonrpc: None, method: lsp::notification::Exit::METHOD.to_string(), params: jsonrpc::Params::None, })); match transport .process_server_message(&client_tx, notification, &transport.name) .await { Ok(_) => {} Err(err) => { error!("err: <- {:?}", err); } } break; } } } } async fn err(transport: Arc, mut server_stderr: BufReader) { let mut recv_buffer = String::new(); loop { match Self::recv_server_error(&mut server_stderr, &mut recv_buffer, &transport.name) .await { Ok(_) => {} Err(err) => { error!("{} err: <- {err:?}", transport.name); break; } } } } async fn send( transport: Arc, mut server_stdin: BufWriter, client_tx: UnboundedSender<(LanguageServerId, jsonrpc::Call)>, mut client_rx: UnboundedReceiver, initialize_notify: Arc, ) { let mut pending_messages: Vec = Vec::new(); let mut is_pending = true; // Determine if a message is allowed to be sent early fn is_initialize(payload: &Payload) -> bool { use lsp::{ notification::Initialized, request::{Initialize, Request}, }; match payload { Payload::Request { value: jsonrpc::MethodCall { method, .. }, .. } if method == Initialize::METHOD => true, Payload::Notification(jsonrpc::Notification { method, .. }) if method == Initialized::METHOD => { true } _ => false, } } fn is_shutdown(payload: &Payload) -> bool { use lsp::request::{Request, Shutdown}; matches!(payload, Payload::Request { value: jsonrpc::MethodCall { method, .. }, .. } if method == Shutdown::METHOD) } // TODO: events that use capabilities need to do the right thing loop { tokio::select! { biased; _ = initialize_notify.notified() => { // TODO: notified is technically not cancellation safe // server successfully initialized is_pending = false; // Hack: inject an initialized notification so we trigger code that needs to happen after init let notification = ServerMessage::Call(jsonrpc::Call::Notification(jsonrpc::Notification { jsonrpc: None, method: lsp::notification::Initialized::METHOD.to_string(), params: jsonrpc::Params::None, })); let language_server_name = &transport.name; match transport.process_server_message(&client_tx, notification, language_server_name).await { Ok(_) => {} Err(err) => { error!("{language_server_name} err: <- {err:?}"); } } // drain the pending queue and send payloads to server for msg in pending_messages.drain(..) { log::info!("Draining pending message {:?}", msg); match transport.send_payload_to_server(&mut server_stdin, msg).await { Ok(_) => {} Err(err) => { error!("{language_server_name} err: <- {err:?}"); } } } } msg = client_rx.recv() => { if let Some(msg) = msg { if is_pending && is_shutdown(&msg) { log::info!("Language server not initialized, shutting down"); break; } else if is_pending && !is_initialize(&msg) { // ignore notifications if let Payload::Notification(_) = msg { continue; } log::info!("Language server not initialized, delaying request"); pending_messages.push(msg); } else { match transport.send_payload_to_server(&mut server_stdin, msg).await { Ok(_) => {} Err(err) => { error!("{} err: <- {err:?}", transport.name); } } } } else { // channel closed break; } } } } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-parsec/000077500000000000000000000000001500614314100221715ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-parsec/Cargo.toml000066400000000000000000000005251500614314100241230ustar00rootroot00000000000000[package] name = "helix-parsec" description = "Parser combinators for Helix" include = ["src/**/*", "README.md"] version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true rust-version.workspace = true categories.workspace = true repository.workspace = true homepage.workspace = true [dependencies] helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-parsec/src/000077500000000000000000000000001500614314100227605ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-parsec/src/lib.rs000066400000000000000000000423511500614314100241010ustar00rootroot00000000000000//! Parser-combinator functions //! //! This module provides parsers and parser combinators which can be used //! together to build parsers by functional composition. // This module implements parser combinators following https://bodil.lol/parser-combinators/. // `sym` (trait implementation for `&'static str`), `map`, `pred` (filter), `one_or_more`, // `zero_or_more`, as well as the `Parser` trait originate mostly from that post. // The remaining parsers and parser combinators are either based on // https://github.com/archseer/snippets.nvim/blob/a583da6ef130d2a4888510afd8c4e5ffd62d0dce/lua/snippet/parser.lua#L5-L138 // or are novel. // When a parser matches the input successfully, it returns `Ok((next_input, some_value))` // where the type of the returned value depends on the parser. If the parser fails to match, // it returns `Err(input)`. type ParseResult<'a, Output> = Result<(&'a str, Output), &'a str>; /// A parser or parser-combinator. /// /// Parser-combinators compose multiple parsers together to parse input. /// For example, two basic parsers (`&'static str`s) may be combined with /// a parser-combinator like [or] to produce a new parser. /// /// ``` /// use helix_parsec::{or, Parser}; /// let foo = "foo"; // matches "foo" literally /// let bar = "bar"; // matches "bar" literally /// let foo_or_bar = or(foo, bar); // matches either "foo" or "bar" /// assert_eq!(Ok(("", "foo")), foo_or_bar.parse("foo")); /// assert_eq!(Ok(("", "bar")), foo_or_bar.parse("bar")); /// assert_eq!(Err("baz"), foo_or_bar.parse("baz")); /// ``` pub trait Parser<'a> { type Output; fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output>; } // Most parser-combinators are written as higher-order functions which take some // parser(s) as input and return a new parser: a function that takes input and returns // a parse result. The underlying implementation of [Parser::parse] for these functions // is simply application. #[doc(hidden)] impl<'a, F, T> Parser<'a> for F where F: Fn(&'a str) -> ParseResult<'a, T>, { type Output = T; fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> { self(input) } } /// A parser which matches the string literal exactly. /// /// This parser succeeds if the next characters in the input are equal to the given /// string literal. /// /// Note that [str::parse] interferes with calling [Parser::parse] on string literals /// directly; this trait implementation works when used within any parser combinator /// but does not work on its own. To call [Parser::parse] on a parser for a string /// literal, use the [token] parser. /// /// # Examples /// /// ``` /// use helix_parsec::{or, Parser}; /// let parser = or("foo", "bar"); /// assert_eq!(Ok(("", "foo")), parser.parse("foo")); /// assert_eq!(Ok(("", "bar")), parser.parse("bar")); /// assert_eq!(Err("baz"), parser.parse("baz")); /// ``` impl<'a> Parser<'a> for &'static str { type Output = &'a str; fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> { match input.get(0..self.len()) { Some(actual) if actual == *self => Ok((&input[self.len()..], &input[0..self.len()])), _ => Err(input), } } } // Parsers /// A parser which matches the given string literally. /// /// This function is a convenience for interpreting string literals as parsers /// and is only necessary to avoid conflict with [str::parse]. See the documentation /// for the `&'static str` implementation of [Parser]. /// /// # Examples /// /// ``` /// use helix_parsec::{token, Parser}; /// let parser = token("foo"); /// assert_eq!(Ok(("", "foo")), parser.parse("foo")); /// assert_eq!(Err("bar"), parser.parse("bar")); /// ``` pub fn token<'a>(literal: &'static str) -> impl Parser<'a, Output = &'a str> { literal } /// A parser which matches all values until the specified pattern is found. /// /// If the pattern is not found, this parser does not match. The input up to the /// character which returns `true` is returned but not that character itself. /// /// If the pattern function returns true on the first input character, this /// parser fails. /// /// # Examples /// /// ``` /// use helix_parsec::{take_until, Parser}; /// let parser = take_until(|c| c == '.'); /// assert_eq!(Ok((".bar", "foo")), parser.parse("foo.bar")); /// assert_eq!(Err(".foo"), parser.parse(".foo")); /// assert_eq!(Err("foo"), parser.parse("foo")); /// ``` pub fn take_until<'a, F>(pattern: F) -> impl Parser<'a, Output = &'a str> where F: Fn(char) -> bool, { move |input: &'a str| match input.find(&pattern) { Some(index) if index != 0 => Ok((&input[index..], &input[0..index])), _ => Err(input), } } /// A parser which matches all values until the specified pattern no longer match. /// /// This parser only ever fails if the input has a length of zero. /// /// # Examples /// /// ``` /// use helix_parsec::{take_while, Parser}; /// let parser = take_while(|c| c == '1'); /// assert_eq!(Ok(("2", "11")), parser.parse("112")); /// assert_eq!(Err("22"), parser.parse("22")); /// ``` pub fn take_while<'a, F>(pattern: F) -> impl Parser<'a, Output = &'a str> where F: Fn(char) -> bool, { move |input: &'a str| match input .char_indices() .take_while(|(_p, c)| pattern(*c)) .last() { Some((index, c)) => { let index = index + c.len_utf8(); Ok((&input[index..], &input[0..index])) } _ => Err(input), } } // Variadic parser combinators /// A parser combinator which matches a sequence of parsers in an all-or-nothing fashion. /// /// The returned value is a tuple containing the outputs of all parsers in order. Each /// parser in the sequence may be typed differently. /// /// # Examples /// /// ``` /// use helix_parsec::{seq, Parser}; /// let parser = seq!("<", "a", ">"); /// assert_eq!(Ok(("", ("<", "a", ">"))), parser.parse("")); /// assert_eq!(Err(""), parser.parse("")); /// ``` #[macro_export] macro_rules! seq { ($($parsers: expr),+ $(,)?) => { ($($parsers),+) } } // Seq is implemented using trait-implementations of Parser for various size tuples. // This allows sequences to be typed heterogeneously. macro_rules! seq_impl { ($($parser:ident),+) => { #[allow(non_snake_case)] impl<'a, $($parser),+> Parser<'a> for ($($parser),+) where $($parser: Parser<'a>),+ { type Output = ($($parser::Output),+); fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> { let ($($parser),+) = self; seq_body_impl!(input, input, $($parser),+ ; ) } } } } macro_rules! seq_body_impl { ($input:expr, $next_input:expr, $head:ident, $($tail:ident),+ ; $(,)? $($acc:ident),*) => { match $head.parse($next_input) { Ok((next_input, $head)) => seq_body_impl!($input, next_input, $($tail),+ ; $($acc),*, $head), Err(_) => Err($input), } }; ($input:expr, $next_input:expr, $last:ident ; $(,)? $($acc:ident),*) => { match $last.parse($next_input) { Ok((next_input, last)) => Ok((next_input, ($($acc),+, last))), Err(_) => Err($input), } } } seq_impl!(A, B); seq_impl!(A, B, C); seq_impl!(A, B, C, D); seq_impl!(A, B, C, D, E); seq_impl!(A, B, C, D, E, F); seq_impl!(A, B, C, D, E, F, G); seq_impl!(A, B, C, D, E, F, G, H); seq_impl!(A, B, C, D, E, F, G, H, I); seq_impl!(A, B, C, D, E, F, G, H, I, J); /// A parser combinator which chooses the first of the input parsers which matches /// successfully. /// /// All input parsers must have the same output type. This is a variadic form for [or]. /// /// # Examples /// /// ``` /// use helix_parsec::{choice, or, Parser}; /// let parser = choice!("foo", "bar", "baz"); /// assert_eq!(Ok(("", "foo")), parser.parse("foo")); /// assert_eq!(Ok(("", "bar")), parser.parse("bar")); /// assert_eq!(Err("quiz"), parser.parse("quiz")); /// ``` #[macro_export] macro_rules! choice { ($parser: expr $(,)?) => { $parser }; ($parser: expr, $($rest: expr),+ $(,)?) => { or($parser, choice!($($rest),+)) } } // Ordinary parser combinators /// A parser combinator which takes a parser as input and maps the output using the /// given transformation function. /// /// This corresponds to [Result::map]. The value is only mapped if the input parser /// matches against input. /// /// # Examples /// /// ``` /// use helix_parsec::{map, Parser}; /// let parser = map("123", |s| s.parse::().unwrap()); /// assert_eq!(Ok(("", 123)), parser.parse("123")); /// assert_eq!(Err("abc"), parser.parse("abc")); /// ``` pub fn map<'a, P, F, T>(parser: P, map_fn: F) -> impl Parser<'a, Output = T> where P: Parser<'a>, F: Fn(P::Output) -> T, { move |input| { parser .parse(input) .map(|(next_input, result)| (next_input, map_fn(result))) } } /// A parser combinator which succeeds if the given parser matches the input and /// the given `filter_map_fn` returns `Some`. /// /// # Examples /// /// ``` /// use helix_parsec::{filter_map, take_until, Parser}; /// let parser = filter_map(take_until(|c| c == '.'), |s| s.parse::().ok()); /// assert_eq!(Ok((".456", 123)), parser.parse("123.456")); /// assert_eq!(Err("abc.def"), parser.parse("abc.def")); /// ``` pub fn filter_map<'a, P, F, T>(parser: P, filter_map_fn: F) -> impl Parser<'a, Output = T> where P: Parser<'a>, F: Fn(P::Output) -> Option, { move |input| match parser.parse(input) { Ok((next_input, value)) => match filter_map_fn(value) { Some(value) => Ok((next_input, value)), None => Err(input), }, Err(_) => Err(input), } } /// A parser combinator which succeeds if the first given parser matches the input and /// the second given parse also matches. /// /// # Examples /// /// ``` /// use helix_parsec::{reparse_as, take_until, one_or_more, Parser}; /// let parser = reparse_as(take_until(|c| c == '/'), one_or_more("a")); /// assert_eq!(Ok(("/bb", vec!["a", "a"])), parser.parse("aa/bb")); /// ``` pub fn reparse_as<'a, P1, P2, T>(parser1: P1, parser2: P2) -> impl Parser<'a, Output = T> where P1: Parser<'a, Output = &'a str>, P2: Parser<'a, Output = T>, { filter_map(parser1, move |str| { parser2.parse(str).map(|(_, value)| value).ok() }) } /// A parser combinator which only matches the input when the predicate function /// returns true. /// /// # Examples /// /// ``` /// use helix_parsec::{filter, take_until, Parser}; /// let parser = filter(take_until(|c| c == '.'), |s| s == &"123"); /// assert_eq!(Ok((".456", "123")), parser.parse("123.456")); /// assert_eq!(Err("456.123"), parser.parse("456.123")); /// ``` pub fn filter<'a, P, F, T>(parser: P, pred_fn: F) -> impl Parser<'a, Output = T> where P: Parser<'a, Output = T>, F: Fn(&P::Output) -> bool, { move |input| { if let Ok((next_input, value)) = parser.parse(input) { if pred_fn(&value) { return Ok((next_input, value)); } } Err(input) } } /// A parser combinator which matches either of the input parsers. /// /// Both parsers must have the same output type. For a variadic form which /// can take any number of parsers, use `choice!`. /// /// # Examples /// /// ``` /// use helix_parsec::{or, Parser}; /// let parser = or("foo", "bar"); /// assert_eq!(Ok(("", "foo")), parser.parse("foo")); /// assert_eq!(Ok(("", "bar")), parser.parse("bar")); /// assert_eq!(Err("baz"), parser.parse("baz")); /// ``` pub fn or<'a, P1, P2, T>(parser1: P1, parser2: P2) -> impl Parser<'a, Output = T> where P1: Parser<'a, Output = T>, P2: Parser<'a, Output = T>, { move |input| match parser1.parse(input) { ok @ Ok(_) => ok, Err(_) => parser2.parse(input), } } /// A parser combinator which attempts to match the given parser, returning a /// `None` output value if the parser does not match. /// /// The parser produced with this combinator always succeeds. If the given parser /// succeeds, `Some(value)` is returned where `value` is the output of the given /// parser. Otherwise, `None`. /// /// # Examples /// /// ``` /// use helix_parsec::{optional, Parser}; /// let parser = optional("foo"); /// assert_eq!(Ok(("bar", Some("foo"))), parser.parse("foobar")); /// assert_eq!(Ok(("bar", None)), parser.parse("bar")); /// ``` pub fn optional<'a, P, T>(parser: P) -> impl Parser<'a, Output = Option> where P: Parser<'a, Output = T>, { move |input| match parser.parse(input) { Ok((next_input, value)) => Ok((next_input, Some(value))), Err(_) => Ok((input, None)), } } /// A parser combinator which runs the given parsers in sequence and returns the /// value of `left` if both are matched. /// /// This is useful for two-element sequences in which you only want the output /// value of the `left` parser. /// /// # Examples /// /// ``` /// use helix_parsec::{left, Parser}; /// let parser = left("foo", "bar"); /// assert_eq!(Ok(("", "foo")), parser.parse("foobar")); /// ``` pub fn left<'a, L, R, T>(left: L, right: R) -> impl Parser<'a, Output = T> where L: Parser<'a, Output = T>, R: Parser<'a>, { map(seq!(left, right), |(left_value, _)| left_value) } /// A parser combinator which runs the given parsers in sequence and returns the /// value of `right` if both are matched. /// /// This is useful for two-element sequences in which you only want the output /// value of the `right` parser. /// /// # Examples /// /// ``` /// use helix_parsec::{right, Parser}; /// let parser = right("foo", "bar"); /// assert_eq!(Ok(("", "bar")), parser.parse("foobar")); /// ``` pub fn right<'a, L, R, T>(left: L, right: R) -> impl Parser<'a, Output = T> where L: Parser<'a>, R: Parser<'a, Output = T>, { map(seq!(left, right), |(_, right_value)| right_value) } /// A parser combinator which matches the given parser against the input zero or /// more times. /// /// This parser always succeeds and returns the empty Vec when it matched zero /// times. /// /// # Examples /// /// ``` /// use helix_parsec::{zero_or_more, Parser}; /// let parser = zero_or_more("a"); /// assert_eq!(Ok(("", vec![])), parser.parse("")); /// assert_eq!(Ok(("", vec!["a"])), parser.parse("a")); /// assert_eq!(Ok(("", vec!["a", "a"])), parser.parse("aa")); /// assert_eq!(Ok(("bb", vec![])), parser.parse("bb")); /// ``` pub fn zero_or_more<'a, P, T>(parser: P) -> impl Parser<'a, Output = Vec> where P: Parser<'a, Output = T>, { let parser = non_empty(parser); move |mut input| { let mut values = Vec::new(); while let Ok((next_input, value)) = parser.parse(input) { input = next_input; values.push(value); } Ok((input, values)) } } /// A parser combinator which matches the given parser against the input one or /// more times. /// /// This parser combinator acts the same as [zero_or_more] but must match at /// least once. /// /// # Examples /// /// ``` /// use helix_parsec::{one_or_more, Parser}; /// let parser = one_or_more("a"); /// assert_eq!(Err(""), parser.parse("")); /// assert_eq!(Ok(("", vec!["a"])), parser.parse("a")); /// assert_eq!(Ok(("", vec!["a", "a"])), parser.parse("aa")); /// assert_eq!(Err("bb"), parser.parse("bb")); /// ``` pub fn one_or_more<'a, P, T>(parser: P) -> impl Parser<'a, Output = Vec> where P: Parser<'a, Output = T>, { let parser = non_empty(parser); move |mut input| { let mut values = Vec::new(); match parser.parse(input) { Ok((next_input, value)) => { input = next_input; values.push(value); } Err(err) => return Err(err), } while let Ok((next_input, value)) = parser.parse(input) { input = next_input; values.push(value); } Ok((input, values)) } } /// A parser combinator which matches one or more instances of the given parser /// interspersed with the separator parser. /// /// Output values of the separator parser are discarded. /// /// This is typically used to parse function arguments or list items. /// /// # Examples /// /// ```rust /// use helix_parsec::{sep, Parser}; /// let parser = sep("a", ","); /// assert_eq!(Ok(("", vec!["a", "a", "a"])), parser.parse("a,a,a")); /// ``` pub fn sep<'a, P, S, T>(parser: P, separator: S) -> impl Parser<'a, Output = Vec> where P: Parser<'a, Output = T>, S: Parser<'a>, { move |mut input| { let mut values = Vec::new(); match parser.parse(input) { Ok((next_input, value)) => { input = next_input; values.push(value); } Err(err) => return Err(err), } loop { match separator.parse(input) { Ok((next_input, _)) => input = next_input, Err(_) => break, } match parser.parse(input) { Ok((next_input, value)) => { input = next_input; values.push(value); } Err(_) => break, } } Ok((input, values)) } } pub fn non_empty<'a, T>(p: impl Parser<'a, Output = T>) -> impl Parser<'a, Output = T> { move |input| { let (new_input, res) = p.parse(input)?; if new_input.len() == input.len() { Err(input) } else { Ok((new_input, res)) } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-stdx/000077500000000000000000000000001500614314100216765ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-stdx/Cargo.toml000066400000000000000000000015551500614314100236340ustar00rootroot00000000000000[package] name = "helix-stdx" description = "Standard library extensions" include = ["src/**/*", "README.md"] version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true rust-version.workspace = true categories.workspace = true repository.workspace = true homepage.workspace = true [dependencies] dunce = "1.0" etcetera = "0.10" ropey.workspace = true which = "7.0" regex-cursor = "0.1.5" bitflags.workspace = true once_cell = "1.21" regex-automata = "0.4.9" unicode-segmentation.workspace = true [target.'cfg(windows)'.dependencies] windows-sys = { version = "0.59", features = ["Win32_Foundation", "Win32_Security", "Win32_Security_Authorization", "Win32_Storage_FileSystem", "Win32_System_Threading"] } [target.'cfg(unix)'.dependencies] rustix = { version = "1.0", features = ["fs"] } [dev-dependencies] tempfile.workspace = true helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-stdx/src/000077500000000000000000000000001500614314100224655ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-stdx/src/env.rs000066400000000000000000000163101500614314100236240ustar00rootroot00000000000000use std::{ borrow::Cow, ffi::{OsStr, OsString}, path::{Path, PathBuf}, sync::RwLock, }; use once_cell::sync::Lazy; // We keep the CWD as a static so that we can access it in places where we don't have access to the Editor static CWD: RwLock> = RwLock::new(None); // Get the current working directory. // This information is managed internally as the call to std::env::current_dir // might fail if the cwd has been deleted. pub fn current_working_dir() -> PathBuf { if let Some(path) = &*CWD.read().unwrap() { return path.clone(); } // implementation of crossplatform pwd -L // we want pwd -L so that symlinked directories are handled correctly let mut cwd = std::env::current_dir().expect("Couldn't determine current working directory"); let pwd = std::env::var_os("PWD"); #[cfg(windows)] let pwd = pwd.or_else(|| std::env::var_os("CD")); if let Some(pwd) = pwd.map(PathBuf::from) { if pwd.canonicalize().ok().as_ref() == Some(&cwd) { cwd = pwd; } } let mut dst = CWD.write().unwrap(); *dst = Some(cwd.clone()); cwd } pub fn set_current_working_dir(path: impl AsRef) -> std::io::Result> { let path = crate::path::canonicalize(path); std::env::set_current_dir(&path)?; let mut cwd = CWD.write().unwrap(); Ok(cwd.replace(path)) } pub fn env_var_is_set(env_var_name: &str) -> bool { std::env::var_os(env_var_name).is_some() } pub fn binary_exists>(binary_name: T) -> bool { which::which(binary_name).is_ok() } pub fn which>( binary_name: T, ) -> Result { let binary_name = binary_name.as_ref(); which::which(binary_name).map_err(|err| ExecutableNotFoundError { command: binary_name.to_string_lossy().into_owned(), inner: err, }) } fn find_brace_end(src: &[u8]) -> Option { use regex_automata::meta::Regex; static REGEX: Lazy = Lazy::new(|| Regex::builder().build("[{}]").unwrap()); let mut depth = 0; for mat in REGEX.find_iter(src) { let pos = mat.start(); match src[pos] { b'{' => depth += 1, b'}' if depth == 0 => return Some(pos), b'}' => depth -= 1, _ => unreachable!(), } } None } fn expand_impl(src: &OsStr, mut resolve: impl FnMut(&OsStr) -> Option) -> Cow { use regex_automata::meta::Regex; static REGEX: Lazy = Lazy::new(|| { Regex::builder() .build_many(&[ r"\$\{([^\}:]+):-", r"\$\{([^\}:]+):=", r"\$\{([^\}-]+)-", r"\$\{([^\}=]+)=", r"\$\{([^\}]+)", r"\$(\w+)", ]) .unwrap() }); let bytes = src.as_encoded_bytes(); let mut res = Vec::with_capacity(bytes.len()); let mut pos = 0; for captures in REGEX.captures_iter(bytes) { let mat = captures.get_match().unwrap(); let pattern_id = mat.pattern().as_usize(); let mut range = mat.range(); // A pattern may match multiple times on a single variable, for example `${HOME:-$HOME}`: // `${HOME:-` matches and also the default value (`$HOME`). Skip past any variables which // have already been expanded. if range.start < pos { continue; } let var = &bytes[captures.get_group(1).unwrap().range()]; let default = if pattern_id != 5 { let Some(bracket_pos) = find_brace_end(&bytes[range.end..]) else { break; }; let default = &bytes[range.end..range.end + bracket_pos]; range.end += bracket_pos + 1; default } else { &[] }; // safety: this is a codepoint aligned substring of an osstr (always valid) let var = unsafe { OsStr::from_encoded_bytes_unchecked(var) }; let expansion = resolve(var); let expansion = match &expansion { Some(val) => { if val.is_empty() && pattern_id < 2 { default } else { val.as_encoded_bytes() } } None => default, }; res.extend_from_slice(&bytes[pos..range.start]); pos = range.end; res.extend_from_slice(expansion); } if pos == 0 { src.into() } else { res.extend_from_slice(&bytes[pos..]); // safety: this is a composition of valid osstr (and codepoint aligned slices which are also valid) unsafe { OsString::from_encoded_bytes_unchecked(res) }.into() } } /// performs substitution of enviorment variables. Supports the following (POSIX) syntax: /// /// * `$`, `${}` /// * `${:-}`, `${-}` /// * `${:=}`, `${=default}` /// pub fn expand + ?Sized>(src: &S) -> Cow { expand_impl(src.as_ref(), |var| std::env::var_os(var)) } #[derive(Debug)] pub struct ExecutableNotFoundError { command: String, inner: which::Error, } impl std::fmt::Display for ExecutableNotFoundError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "command '{}' not found: {}", self.command, self.inner) } } impl std::error::Error for ExecutableNotFoundError {} #[cfg(test)] mod tests { use std::ffi::{OsStr, OsString}; use super::{current_working_dir, expand_impl, set_current_working_dir}; #[test] fn current_dir_is_set() { let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap(); let cwd = current_working_dir(); assert_ne!(cwd, new_path); set_current_working_dir(&new_path).expect("Couldn't set new path"); let cwd = current_working_dir(); assert_eq!(cwd, new_path); } macro_rules! assert_env_expand { ($env: expr, $lhs: expr, $rhs: expr) => { assert_eq!(&*expand_impl($lhs.as_ref(), $env), OsStr::new($rhs)); }; } /// paths that should work on all platforms #[test] fn test_env_expand() { let env = |var: &OsStr| -> Option { match var.to_str().unwrap() { "FOO" => Some("foo".into()), "EMPTY" => Some("".into()), _ => None, } }; assert_env_expand!(env, "pass_trough", "pass_trough"); assert_env_expand!(env, "$FOO", "foo"); assert_env_expand!(env, "bar/$FOO/baz", "bar/foo/baz"); assert_env_expand!(env, "bar/${FOO}/baz", "bar/foo/baz"); assert_env_expand!(env, "baz/${BAR:-bar}/foo", "baz/bar/foo"); assert_env_expand!(env, "baz/${FOO:-$FOO}/foo", "baz/foo/foo"); assert_env_expand!(env, "baz/${BAR:=bar}/foo", "baz/bar/foo"); assert_env_expand!(env, "baz/${BAR-bar}/foo", "baz/bar/foo"); assert_env_expand!(env, "baz/${BAR=bar}/foo", "baz/bar/foo"); assert_env_expand!(env, "baz/${EMPTY:-bar}/foo", "baz/bar/foo"); assert_env_expand!(env, "baz/${EMPTY:=bar}/foo", "baz/bar/foo"); assert_env_expand!(env, "baz/${EMPTY-bar}/foo", "baz//foo"); assert_env_expand!(env, "baz/${EMPTY=bar}/foo", "baz//foo"); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-stdx/src/faccess.rs000066400000000000000000000343341500614314100244510ustar00rootroot00000000000000//! From use std::io; use std::path::Path; use bitflags::bitflags; // Licensed under MIT from faccess bitflags! { /// Access mode flags for `access` function to test for. pub struct AccessMode: u8 { /// Path exists const EXISTS = 0b0001; /// Path can likely be read const READ = 0b0010; /// Path can likely be written to const WRITE = 0b0100; /// Path can likely be executed const EXECUTE = 0b1000; } } #[cfg(unix)] mod imp { use super::*; use rustix::fs::Access; use std::os::unix::fs::{MetadataExt, PermissionsExt}; pub fn access(p: &Path, mode: AccessMode) -> io::Result<()> { let mut imode = Access::empty(); if mode.contains(AccessMode::EXISTS) { imode |= Access::EXISTS; } if mode.contains(AccessMode::READ) { imode |= Access::READ_OK; } if mode.contains(AccessMode::WRITE) { imode |= Access::WRITE_OK; } if mode.contains(AccessMode::EXECUTE) { imode |= Access::EXEC_OK; } rustix::fs::access(p, imode)?; Ok(()) } fn chown(p: &Path, uid: Option, gid: Option) -> io::Result<()> { let uid = uid.map(rustix::fs::Uid::from_raw); let gid = gid.map(rustix::fs::Gid::from_raw); rustix::fs::chown(p, uid, gid)?; Ok(()) } pub fn copy_metadata(from: &Path, to: &Path) -> io::Result<()> { let from_meta = std::fs::metadata(from)?; let to_meta = std::fs::metadata(to)?; let from_gid = from_meta.gid(); let to_gid = to_meta.gid(); let mut perms = from_meta.permissions(); perms.set_mode(perms.mode() & 0o0777); if from_gid != to_gid && chown(to, None, Some(from_gid)).is_err() { let new_perms = (perms.mode() & 0o0707) | ((perms.mode() & 0o07) << 3); perms.set_mode(new_perms); } std::fs::set_permissions(to, perms)?; Ok(()) } pub fn hardlink_count(p: &Path) -> std::io::Result { let metadata = p.metadata()?; Ok(metadata.nlink()) } } // Licensed under MIT from faccess except for `chown`, `copy_metadata` and `is_acl_inherited` #[cfg(windows)] mod imp { use windows_sys::Win32::Foundation::{CloseHandle, LocalFree, ERROR_SUCCESS, HANDLE}; use windows_sys::Win32::Security::Authorization::{ GetNamedSecurityInfoW, SetNamedSecurityInfoW, SE_FILE_OBJECT, }; use windows_sys::Win32::Security::{ AccessCheck, AclSizeInformation, GetAce, GetAclInformation, GetSidIdentifierAuthority, ImpersonateSelf, IsValidAcl, IsValidSid, MapGenericMask, RevertToSelf, SecurityImpersonation, ACCESS_ALLOWED_CALLBACK_ACE, ACL, ACL_SIZE_INFORMATION, DACL_SECURITY_INFORMATION, GENERIC_MAPPING, GROUP_SECURITY_INFORMATION, INHERITED_ACE, LABEL_SECURITY_INFORMATION, OBJECT_SECURITY_INFORMATION, OWNER_SECURITY_INFORMATION, PRIVILEGE_SET, PROTECTED_DACL_SECURITY_INFORMATION, PSECURITY_DESCRIPTOR, PSID, SID_IDENTIFIER_AUTHORITY, TOKEN_DUPLICATE, TOKEN_QUERY, }; use windows_sys::Win32::Storage::FileSystem::{ GetFileInformationByHandle, BY_HANDLE_FILE_INFORMATION, FILE_ACCESS_RIGHTS, FILE_ALL_ACCESS, FILE_GENERIC_EXECUTE, FILE_GENERIC_READ, FILE_GENERIC_WRITE, }; use windows_sys::Win32::System::Threading::{GetCurrentThread, OpenThreadToken}; use super::*; use std::ffi::c_void; use std::os::windows::{ffi::OsStrExt, fs::OpenOptionsExt, io::AsRawHandle}; struct SecurityDescriptor { sd: PSECURITY_DESCRIPTOR, owner: PSID, group: PSID, dacl: *mut ACL, } impl Drop for SecurityDescriptor { fn drop(&mut self) { if !self.sd.is_null() { unsafe { LocalFree(self.sd); } } } } impl SecurityDescriptor { fn for_path(p: &Path) -> io::Result { let path = std::fs::canonicalize(p)?; let pathos = path.into_os_string(); let mut pathw: Vec = Vec::with_capacity(pathos.len() + 1); pathw.extend(pathos.encode_wide()); pathw.push(0); let mut sd = std::ptr::null_mut(); let mut owner = std::ptr::null_mut(); let mut group = std::ptr::null_mut(); let mut dacl = std::ptr::null_mut(); let err = unsafe { GetNamedSecurityInfoW( pathw.as_ptr(), SE_FILE_OBJECT, OWNER_SECURITY_INFORMATION | GROUP_SECURITY_INFORMATION | DACL_SECURITY_INFORMATION | LABEL_SECURITY_INFORMATION, &mut owner, &mut group, &mut dacl, std::ptr::null_mut(), &mut sd, ) }; if err == ERROR_SUCCESS { Ok(SecurityDescriptor { sd, owner, group, dacl, }) } else { Err(io::Error::last_os_error()) } } fn is_acl_inherited(&self) -> bool { let mut acl_info: ACL_SIZE_INFORMATION = unsafe { ::core::mem::zeroed() }; let acl_info_ptr: *mut c_void = &mut acl_info as *mut _ as *mut c_void; let mut ace: ACCESS_ALLOWED_CALLBACK_ACE = unsafe { ::core::mem::zeroed() }; unsafe { GetAclInformation( self.dacl, acl_info_ptr, std::mem::size_of_val(&acl_info) as u32, AclSizeInformation, ) }; for i in 0..acl_info.AceCount { let mut ptr = &mut ace as *mut _ as *mut c_void; unsafe { GetAce(self.dacl, i, &mut ptr) }; if (ace.Header.AceFlags as u32 & INHERITED_ACE) != 0 { return true; } } false } fn descriptor(&self) -> &PSECURITY_DESCRIPTOR { &self.sd } fn owner(&self) -> &PSID { &self.owner } } struct ThreadToken(HANDLE); impl Drop for ThreadToken { fn drop(&mut self) { unsafe { CloseHandle(self.0); } } } impl ThreadToken { fn new() -> io::Result { unsafe { if ImpersonateSelf(SecurityImpersonation) == 0 { return Err(io::Error::last_os_error()); } let token: *mut HANDLE = std::ptr::null_mut(); let err = OpenThreadToken(GetCurrentThread(), TOKEN_DUPLICATE | TOKEN_QUERY, 0, token); RevertToSelf(); if err == 0 { return Err(io::Error::last_os_error()); } Ok(Self(*token)) } } fn as_handle(&self) -> &HANDLE { &self.0 } } // Based roughly on Tcl's NativeAccess() // https://github.com/tcltk/tcl/blob/2ee77587e4dc2150deb06b48f69db948b4ab0584/win/tclWinFile.c fn eaccess(p: &Path, mut mode: FILE_ACCESS_RIGHTS) -> io::Result<()> { let md = p.metadata()?; if !md.is_dir() { // Read Only is ignored for directories if mode & FILE_GENERIC_WRITE == FILE_GENERIC_WRITE && md.permissions().readonly() { return Err(io::Error::new( io::ErrorKind::PermissionDenied, "File is read only", )); } // If it doesn't have the correct extension it isn't executable if mode & FILE_GENERIC_EXECUTE == FILE_GENERIC_EXECUTE { if let Some(ext) = p.extension().and_then(|s| s.to_str()) { match ext { "exe" | "com" | "bat" | "cmd" => (), _ => { return Err(io::Error::new( io::ErrorKind::InvalidData, "File not executable", )) } } } } return std::fs::OpenOptions::new() .access_mode(mode) .open(p) .map(|_| ()); } let sd = SecurityDescriptor::for_path(p)?; // Unmapped Samba users are assigned a top level authority of 22 // ACL tests are likely to be misleading const SAMBA_UNMAPPED: SID_IDENTIFIER_AUTHORITY = SID_IDENTIFIER_AUTHORITY { Value: [0, 0, 0, 0, 0, 22], }; unsafe { let owner = sd.owner(); if IsValidSid(*owner) != 0 && (*GetSidIdentifierAuthority(*owner)).Value == SAMBA_UNMAPPED.Value { return Ok(()); } } let token = ThreadToken::new()?; let mut privileges: PRIVILEGE_SET = unsafe { std::mem::zeroed() }; let mut granted_access: u32 = 0; let mut privileges_length = std::mem::size_of::() as u32; let mut result = 0; let mapping = GENERIC_MAPPING { GenericRead: FILE_GENERIC_READ, GenericWrite: FILE_GENERIC_WRITE, GenericExecute: FILE_GENERIC_EXECUTE, GenericAll: FILE_ALL_ACCESS, }; unsafe { MapGenericMask(&mut mode, &mapping) }; if unsafe { AccessCheck( *sd.descriptor(), *token.as_handle(), mode, &mapping, &mut privileges, &mut privileges_length, &mut granted_access, &mut result, ) } != 0 { if result == 0 { Err(io::Error::new( io::ErrorKind::PermissionDenied, "Permission Denied", )) } else { Ok(()) } } else { Err(io::Error::last_os_error()) } } pub fn access(p: &Path, mode: AccessMode) -> io::Result<()> { let mut imode = 0; if mode.contains(AccessMode::READ) { imode |= FILE_GENERIC_READ; } if mode.contains(AccessMode::WRITE) { imode |= FILE_GENERIC_WRITE; } if mode.contains(AccessMode::EXECUTE) { imode |= FILE_GENERIC_EXECUTE; } if imode == 0 { if p.exists() { Ok(()) } else { Err(io::Error::new(io::ErrorKind::NotFound, "Not Found")) } } else { eaccess(p, imode) } } fn chown(p: &Path, sd: SecurityDescriptor) -> io::Result<()> { let path = std::fs::canonicalize(p)?; let pathos = path.as_os_str(); let mut pathw = Vec::with_capacity(pathos.len() + 1); pathw.extend(pathos.encode_wide()); pathw.push(0); let mut owner = std::ptr::null_mut(); let mut group = std::ptr::null_mut(); let mut dacl = std::ptr::null(); let mut si = OBJECT_SECURITY_INFORMATION::default(); if unsafe { IsValidSid(sd.owner) } != 0 { si |= OWNER_SECURITY_INFORMATION; owner = sd.owner; } if unsafe { IsValidSid(sd.group) } != 0 { si |= GROUP_SECURITY_INFORMATION; group = sd.group; } if unsafe { IsValidAcl(sd.dacl) } != 0 { si |= DACL_SECURITY_INFORMATION; if !sd.is_acl_inherited() { si |= PROTECTED_DACL_SECURITY_INFORMATION; } dacl = sd.dacl as *const _; } let err = unsafe { SetNamedSecurityInfoW( pathw.as_ptr(), SE_FILE_OBJECT, si, owner, group, dacl, std::ptr::null(), ) }; if err == ERROR_SUCCESS { Ok(()) } else { Err(io::Error::last_os_error()) } } pub fn copy_metadata(from: &Path, to: &Path) -> io::Result<()> { let sd = SecurityDescriptor::for_path(from)?; chown(to, sd)?; let meta = std::fs::metadata(from)?; let perms = meta.permissions(); std::fs::set_permissions(to, perms)?; Ok(()) } pub fn hardlink_count(p: &Path) -> std::io::Result { let file = std::fs::File::open(p)?; let handle = file.as_raw_handle(); let mut info: BY_HANDLE_FILE_INFORMATION = unsafe { std::mem::zeroed() }; if unsafe { GetFileInformationByHandle(handle, &mut info) } == 0 { Err(std::io::Error::last_os_error()) } else { Ok(info.nNumberOfLinks as u64) } } } // Licensed under MIT from faccess except for `copy_metadata` #[cfg(not(any(unix, windows)))] mod imp { use super::*; pub fn access(p: &Path, mode: AccessMode) -> io::Result<()> { if mode.contains(AccessMode::WRITE) { if std::fs::metadata(p)?.permissions().readonly() { return Err(io::Error::new( io::ErrorKind::PermissionDenied, "Path is read only", )); } else { return Ok(()); } } if p.exists() { Ok(()) } else { Err(io::Error::new(io::ErrorKind::NotFound, "Path not found")) } } pub fn copy_metadata(from: &path, to: &Path) -> io::Result<()> { let meta = std::fs::metadata(from)?; let perms = meta.permissions(); std::fs::set_permissions(to, perms)?; Ok(()) } } pub fn readonly(p: &Path) -> bool { match imp::access(p, AccessMode::WRITE) { Ok(_) => false, Err(err) if err.kind() == std::io::ErrorKind::NotFound => false, Err(_) => true, } } pub fn copy_metadata(from: &Path, to: &Path) -> io::Result<()> { imp::copy_metadata(from, to) } pub fn hardlink_count(p: &Path) -> io::Result { imp::hardlink_count(p) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-stdx/src/lib.rs000066400000000000000000000001401500614314100235740ustar00rootroot00000000000000pub mod env; pub mod faccess; pub mod path; pub mod range; pub mod rope; pub use range::Range; helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-stdx/src/path.rs000066400000000000000000000364711500614314100240020ustar00rootroot00000000000000pub use etcetera::home_dir; use once_cell::sync::Lazy; use regex_cursor::{engines::meta::Regex, Input}; use ropey::RopeSlice; use std::{ borrow::Cow, ffi::OsString, ops::Range, path::{Component, Path, PathBuf, MAIN_SEPARATOR_STR}, }; use crate::env::current_working_dir; /// Replaces users home directory from `path` with tilde `~` if the directory /// is available, otherwise returns the path unchanged. pub fn fold_home_dir<'a, P>(path: P) -> Cow<'a, Path> where P: Into>, { let path = path.into(); if let Ok(home) = home_dir() { if let Ok(stripped) = path.strip_prefix(&home) { let mut path = OsString::with_capacity(2 + stripped.as_os_str().len()); path.push("~"); path.push(MAIN_SEPARATOR_STR); path.push(stripped); return Cow::Owned(PathBuf::from(path)); } } path } /// Expands tilde `~` into users home directory if available, otherwise returns the path /// unchanged. /// /// The tilde will only be expanded when present as the first component of the path /// and only slash follows it. pub fn expand_tilde<'a, P>(path: P) -> Cow<'a, Path> where P: Into>, { let path = path.into(); let mut components = path.components(); if let Some(Component::Normal(c)) = components.next() { if c == "~" { if let Ok(mut buf) = home_dir() { buf.push(components); return Cow::Owned(buf); } } } path } /// Normalize a path without resolving symlinks. // Strategy: start from the first component and move up. Canonicalize previous path, // join component, canonicalize new path, strip prefix and join to the final result. pub fn normalize(path: impl AsRef) -> PathBuf { let mut components = path.as_ref().components().peekable(); let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().copied() { components.next(); PathBuf::from(c.as_os_str()) } else { PathBuf::new() }; for component in components { match component { Component::Prefix(..) => unreachable!(), Component::RootDir => { ret.push(component.as_os_str()); } Component::CurDir => {} #[cfg(not(windows))] Component::ParentDir => { ret.pop(); } #[cfg(windows)] Component::ParentDir => { if let Some(head) = ret.components().next_back() { match head { Component::Prefix(_) | Component::RootDir => {} Component::CurDir => unreachable!(), // If we left previous component as ".." it means we met a symlink before and we can't pop path. Component::ParentDir => { ret.push(".."); } Component::Normal(_) => { if ret.is_symlink() { ret.push(".."); } else { ret.pop(); } } } } } #[cfg(not(windows))] Component::Normal(c) => { ret.push(c); } #[cfg(windows)] Component::Normal(c) => 'normal: { use std::fs::canonicalize; let new_path = ret.join(c); if new_path.is_symlink() { ret = new_path; break 'normal; } let (can_new, can_old) = (canonicalize(&new_path), canonicalize(&ret)); match (can_new, can_old) { (Ok(can_new), Ok(can_old)) => { let striped = can_new.strip_prefix(can_old); ret.push(striped.unwrap_or_else(|_| c.as_ref())); } _ => ret.push(c), } } } } dunce::simplified(&ret).to_path_buf() } /// Returns the canonical, absolute form of a path with all intermediate components normalized. /// /// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify /// here if the path exists, just normalize it's components. pub fn canonicalize(path: impl AsRef) -> PathBuf { let path = expand_tilde(path.as_ref()); let path = if path.is_relative() { Cow::Owned(current_working_dir().join(path)) } else { path }; normalize(path) } pub fn get_relative_path<'a, P>(path: P) -> Cow<'a, Path> where P: Into>, { let path = path.into(); if path.is_absolute() { let cwdir = normalize(current_working_dir()); if let Ok(stripped) = normalize(&path).strip_prefix(cwdir) { return Cow::Owned(PathBuf::from(stripped)); } return fold_home_dir(path); } path } /// Returns a truncated filepath where the basepart of the path is reduced to the first /// char of the folder and the whole filename appended. /// /// Also strip the current working directory from the beginning of the path. /// Note that this function does not check if the truncated path is unambiguous. /// /// ``` /// use helix_stdx::path::get_truncated_path; /// use std::path::Path; /// /// assert_eq!( /// get_truncated_path("/home/cnorris/documents/jokes.txt").as_path(), /// Path::new("/h/c/d/jokes.txt") /// ); /// assert_eq!( /// get_truncated_path("jokes.txt").as_path(), /// Path::new("jokes.txt") /// ); /// assert_eq!( /// get_truncated_path("/jokes.txt").as_path(), /// Path::new("/jokes.txt") /// ); /// assert_eq!( /// get_truncated_path("/h/c/d/jokes.txt").as_path(), /// Path::new("/h/c/d/jokes.txt") /// ); /// assert_eq!(get_truncated_path("").as_path(), Path::new("")); /// ``` /// pub fn get_truncated_path(path: impl AsRef) -> PathBuf { let cwd = current_working_dir(); let path = path.as_ref(); let path = path.strip_prefix(cwd).unwrap_or(path); let file = path.file_name().unwrap_or_default(); let base = path.parent().unwrap_or_else(|| Path::new("")); let mut ret = PathBuf::with_capacity(file.len()); // A char can't be directly pushed to a PathBuf let mut first_char_buffer = String::new(); for d in base { let Some(first_char) = d.to_string_lossy().chars().next() else { break; }; first_char_buffer.push(first_char); ret.push(&first_char_buffer); first_char_buffer.clear(); } ret.push(file); ret } fn path_component_regex(windows: bool) -> String { // TODO: support backslash path escape on windows (when using git bash for example) let space_escape = if windows { r"[\^`]\s" } else { r"[\\]\s" }; // partially baesd on what's allowed in an url but with some care to avoid // false positives (like any kind of brackets or quotes) r"[\w@.\-+#$%?!,;~&]|".to_owned() + space_escape } /// Regex for delimited environment captures like `${HOME}`. fn braced_env_regex(windows: bool) -> String { r"\$\{(?:".to_owned() + &path_component_regex(windows) + r"|[/:=])+\}" } fn compile_path_regex( prefix: &str, postfix: &str, match_single_file: bool, windows: bool, ) -> Regex { let first_component = format!( "(?:{}|(?:{}))", braced_env_regex(windows), path_component_regex(windows) ); // For all components except the first we allow an equals so that `foo=/ // bar/baz` does not include foo. This is primarily intended for url queries // (where an equals is never in the first component) let component = format!("(?:{first_component}|=)"); let sep = if windows { r"[/\\]" } else { "/" }; let url_prefix = r"[\w+\-.]+://??"; let path_prefix = if windows { // single slash handles most windows prefixes (like\\server\...) but `\ // \?\C:\..` (and C:\) needs special handling, since we don't allow : in path // components (so that colon separated paths and : work) r"\\\\\?\\\w:|\w:|\\|" } else { "" }; let path_start = format!("(?:{first_component}+|~|{path_prefix}{url_prefix})"); let optional = if match_single_file { format!("|{path_start}") } else { String::new() }; let path_regex = format!( "{prefix}(?:{path_start}?(?:(?:{sep}{component}+)+{sep}?|{sep}){optional}){postfix}" ); Regex::new(&path_regex).unwrap() } /// If `src` ends with a path then this function returns the part of the slice. pub fn get_path_suffix(src: RopeSlice<'_>, match_single_file: bool) -> Option> { let regex = if match_single_file { static REGEX: Lazy = Lazy::new(|| compile_path_regex("", "$", true, cfg!(windows))); &*REGEX } else { static REGEX: Lazy = Lazy::new(|| compile_path_regex("", "$", false, cfg!(windows))); &*REGEX }; regex .find(Input::new(src)) .map(|mat| src.byte_slice(mat.range())) } /// Returns an iterator of the **byte** ranges in src that contain a path. pub fn find_paths( src: RopeSlice<'_>, match_single_file: bool, ) -> impl Iterator> + '_ { let regex = if match_single_file { static REGEX: Lazy = Lazy::new(|| compile_path_regex("", "", true, cfg!(windows))); &*REGEX } else { static REGEX: Lazy = Lazy::new(|| compile_path_regex("", "", false, cfg!(windows))); &*REGEX }; regex.find_iter(Input::new(src)).map(|mat| mat.range()) } /// Performs substitution of `~` and environment variables, see [`env::expand`](crate::env::expand) and [`expand_tilde`] pub fn expand + ?Sized>(path: &T) -> Cow<'_, Path> { let path = path.as_ref(); let path = expand_tilde(path); match crate::env::expand(&*path) { Cow::Borrowed(_) => path, Cow::Owned(path) => PathBuf::from(path).into(), } } #[cfg(test)] mod tests { use std::{ ffi::OsStr, path::{Component, Path}, }; use regex_cursor::Input; use ropey::RopeSlice; use crate::path::{self, compile_path_regex}; #[test] fn expand_tilde() { for path in ["~", "~/foo"] { let expanded = path::expand_tilde(Path::new(path)); let tilde = Component::Normal(OsStr::new("~")); let mut component_count = 0; for component in expanded.components() { // No tilde left. assert_ne!(component, tilde); component_count += 1; } // The path was at least expanded to something. assert_ne!(component_count, 0); } } macro_rules! assert_match { ($regex: expr, $haystack: expr) => { let haystack = Input::new(RopeSlice::from($haystack)); assert!( $regex.is_match(haystack), "regex should match {}", $haystack ); }; } macro_rules! assert_no_match { ($regex: expr, $haystack: expr) => { let haystack = Input::new(RopeSlice::from($haystack)); assert!( !$regex.is_match(haystack), "regex should not match {}", $haystack ); }; } macro_rules! assert_matches { ($regex: expr, $haystack: expr, [$($matches: expr),*]) => { let src = $haystack; let matches: Vec<_> = $regex .find_iter(Input::new(RopeSlice::from(src))) .map(|it| &src[it.range()]) .collect(); assert_eq!(matches, vec![$($matches),*]); }; } /// Linux-only path #[test] fn path_regex_unix() { // due to ambiguity with the `\` path separator we can't support space escapes `\ ` on windows let regex = compile_path_regex("^", "$", false, false); assert_match!(regex, "${FOO}/hello\\ world"); assert_match!(regex, "${FOO}/\\ "); } /// Windows-only paths #[test] fn path_regex_windows() { let regex = compile_path_regex("^", "$", false, true); assert_match!(regex, "${FOO}/hello^ world"); assert_match!(regex, "${FOO}/hello` world"); assert_match!(regex, "${FOO}/^ "); assert_match!(regex, "${FOO}/` "); assert_match!(regex, r"foo\bar"); assert_match!(regex, r"foo\bar"); assert_match!(regex, r"..\bar"); assert_match!(regex, r"..\"); assert_match!(regex, r"C:\"); assert_match!(regex, r"\\?\C:\foo"); assert_match!(regex, r"\\server\foo"); } /// Paths that should work on all platforms #[test] fn path_regex() { for windows in [false, true] { let regex = compile_path_regex("^", "$", false, windows); assert_no_match!(regex, "foo"); assert_no_match!(regex, ""); assert_match!(regex, "https://github.com/notifications/query=foo"); assert_match!(regex, "file:///foo/bar"); assert_match!(regex, "foo/bar"); assert_match!(regex, "$HOME/foo"); assert_match!(regex, "${FOO:-bar}/baz"); assert_match!(regex, "foo/bar_"); assert_match!(regex, "/home/bar"); assert_match!(regex, "foo/"); assert_match!(regex, "./"); assert_match!(regex, "../"); assert_match!(regex, "../.."); assert_match!(regex, "./foo"); assert_match!(regex, "./foo.rs"); assert_match!(regex, "/"); assert_match!(regex, "~/"); assert_match!(regex, "~/foo"); assert_match!(regex, "~/foo"); assert_match!(regex, "~/foo/../baz"); assert_match!(regex, "${HOME}/foo"); assert_match!(regex, "$HOME/foo"); assert_match!(regex, "/$FOO"); assert_match!(regex, "/${FOO}"); assert_match!(regex, "/${FOO}/${BAR}"); assert_match!(regex, "/${FOO}/${BAR}/foo"); assert_match!(regex, "/${FOO}/${BAR}"); assert_match!(regex, "${FOO}/hello_$WORLD"); assert_match!(regex, "${FOO}/hello_${WORLD}"); let regex = compile_path_regex("", "", false, windows); assert_no_match!(regex, ""); assert_matches!( regex, r#"${FOO}/hello_${WORLD} ${FOO}/hello_${WORLD} foo("./bar", "/home/foo")""#, [ "${FOO}/hello_${WORLD}", "${FOO}/hello_${WORLD}", "./bar", "/home/foo" ] ); assert_matches!( regex, r#"--> helix-stdx/src/path.rs:427:13"#, ["helix-stdx/src/path.rs"] ); assert_matches!( regex, r#"PATH=/foo/bar:/bar/baz:${foo:-/foo}/bar:${PATH}"#, ["/foo/bar", "/bar/baz", "${foo:-/foo}/bar"] ); let regex = compile_path_regex("^", "$", true, windows); assert_no_match!(regex, ""); assert_match!(regex, "foo"); assert_match!(regex, "foo/"); assert_match!(regex, "$FOO"); assert_match!(regex, "${BAR}"); } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-stdx/src/range.rs000066400000000000000000000064761500614314100241440ustar00rootroot00000000000000use std::ops::{self, RangeBounds}; /// A range of `char`s within the text. #[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq)] pub struct Range { pub start: T, pub end: T, } impl Range { pub fn contains(&self, other: Self) -> bool { self.start <= other.start && other.end <= self.end } pub fn is_empty(&self) -> bool { self.end <= self.start } } impl RangeBounds for Range { fn start_bound(&self) -> ops::Bound<&T> { ops::Bound::Included(&self.start) } fn end_bound(&self) -> ops::Bound<&T> { ops::Bound::Excluded(&self.end) } } /// Returns true if all ranges yielded by `sub_set` are contained by /// `super_set`. This is essentially an optimized implementation of /// `sub_set.all(|rb| super_set.any(|ra| ra.contains(rb)))` that runs in O(m+n) /// instead of O(mn) (and in many cases faster). /// /// Both iterators must uphold a the following invariants: /// * ranges must not overlap (but they can be adjacent) /// * ranges must be sorted pub fn is_subset( mut super_set: impl Iterator, mut sub_set: impl Iterator, ) -> bool { let (mut super_range, mut sub_range) = (super_set.next(), sub_set.next()); loop { match (super_range, sub_range) { // skip over irrelevant ranges (Some(ra), Some(rb)) if ra.end <= rb.start && (ra.start != rb.start || !ALLOW_EMPTY) => { super_range = super_set.next(); } (Some(ra), Some(rb)) => { if ra.contains(rb) { sub_range = sub_set.next(); } else { return false; } } (None, Some(_)) => { // exhausted `super_set`, we can't match the reminder of `sub_set` return false; } (_, None) => { // no elements from `sub_sut` left to match, `super_set` contains `sub_set` return true; } } } } pub fn is_exact_subset( mut super_set: impl Iterator, mut sub_set: impl Iterator, ) -> bool { let (mut super_range, mut sub_range) = (super_set.next(), sub_set.next()); let mut super_range_matched = true; loop { match (super_range, sub_range) { // skip over irrelevant ranges (Some(ra), Some(rb)) if ra.end <= rb.start && ra.start < rb.start => { if !super_range_matched { return false; } super_range_matched = false; super_range = super_set.next(); } (Some(ra), Some(rb)) => { if ra.contains(rb) { super_range_matched = true; sub_range = sub_set.next(); } else { return false; } } (None, Some(_)) => { // exhausted `super_set`, we can't match the reminder of `sub_set` return false; } (_, None) => { // no elements from `sub_sut` left to match, `super_set` contains `sub_set` return super_set.next().is_none(); } } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-stdx/src/rope.rs000066400000000000000000000511171500614314100240050ustar00rootroot00000000000000use std::fmt; use std::ops::{Bound, RangeBounds}; pub use regex_cursor::engines::meta::{Builder as RegexBuilder, Regex}; pub use regex_cursor::regex_automata::util::syntax::Config; use regex_cursor::{Input as RegexInput, RopeyCursor}; use ropey::iter::Chunks; use ropey::RopeSlice; use unicode_segmentation::{GraphemeCursor, GraphemeIncomplete}; pub trait RopeSliceExt<'a>: Sized { fn ends_with(self, text: &str) -> bool; fn starts_with(self, text: &str) -> bool; fn regex_input(self) -> RegexInput>; fn regex_input_at_bytes>( self, byte_range: R, ) -> RegexInput>; fn regex_input_at>(self, char_range: R) -> RegexInput>; fn first_non_whitespace_char(self) -> Option; fn last_non_whitespace_char(self) -> Option; /// Finds the closest byte index not exceeding `byte_idx` which lies on a character boundary. /// /// If `byte_idx` already lies on a character boundary then it is returned as-is. When /// `byte_idx` lies between two character boundaries, this function returns the byte index of /// the lesser / earlier / left-hand-side boundary. /// /// # Example /// /// ``` /// # use ropey::RopeSlice; /// # use helix_stdx::rope::RopeSliceExt; /// let text = RopeSlice::from("⌚"); // three bytes: e2 8c 9a /// assert_eq!(text.floor_char_boundary(0), 0); /// assert_eq!(text.floor_char_boundary(1), 0); /// assert_eq!(text.floor_char_boundary(2), 0); /// assert_eq!(text.floor_char_boundary(3), 3); /// ``` fn floor_char_boundary(self, byte_idx: usize) -> usize; /// Finds the closest byte index not below `byte_idx` which lies on a character boundary. /// /// If `byte_idx` already lies on a character boundary then it is returned as-is. When /// `byte_idx` lies between two character boundaries, this function returns the byte index of /// the greater / later / right-hand-side boundary. /// /// # Example /// /// ``` /// # use ropey::RopeSlice; /// # use helix_stdx::rope::RopeSliceExt; /// let text = RopeSlice::from("⌚"); // three bytes: e2 8c 9a /// assert_eq!(text.ceil_char_boundary(0), 0); /// assert_eq!(text.ceil_char_boundary(1), 3); /// assert_eq!(text.ceil_char_boundary(2), 3); /// assert_eq!(text.ceil_char_boundary(3), 3); /// ``` fn ceil_char_boundary(self, byte_idx: usize) -> usize; /// Checks whether the given `byte_idx` lies on a character boundary. /// /// # Example /// /// ``` /// # use ropey::RopeSlice; /// # use helix_stdx::rope::RopeSliceExt; /// let text = RopeSlice::from("⌚"); // three bytes: e2 8c 9a /// assert!(text.is_char_boundary(0)); /// assert!(!text.is_char_boundary(1)); /// assert!(!text.is_char_boundary(2)); /// assert!(text.is_char_boundary(3)); /// ``` #[allow(clippy::wrong_self_convention)] fn is_char_boundary(self, byte_idx: usize) -> bool; /// Finds the closest byte index not exceeding `byte_idx` which lies on a grapheme cluster /// boundary. /// /// If `byte_idx` already lies on a grapheme cluster boundary then it is returned as-is. When /// `byte_idx` lies between two grapheme cluster boundaries, this function returns the byte /// index of the lesser / earlier / left-hand-side boundary. /// /// `byte_idx` does not need to be aligned to a character boundary. /// /// # Example /// /// ``` /// # use ropey::RopeSlice; /// # use helix_stdx::rope::RopeSliceExt; /// let text = RopeSlice::from("\r\n"); // U+000D U+000A, hex: 0d 0a /// assert_eq!(text.floor_grapheme_boundary(0), 0); /// assert_eq!(text.floor_grapheme_boundary(1), 0); /// assert_eq!(text.floor_grapheme_boundary(2), 2); /// ``` fn floor_grapheme_boundary(self, byte_idx: usize) -> usize; /// Finds the closest byte index not exceeding `byte_idx` which lies on a grapheme cluster /// boundary. /// /// If `byte_idx` already lies on a grapheme cluster boundary then it is returned as-is. When /// `byte_idx` lies between two grapheme cluster boundaries, this function returns the byte /// index of the greater / later / right-hand-side boundary. /// /// `byte_idx` does not need to be aligned to a character boundary. /// /// # Example /// /// ``` /// # use ropey::RopeSlice; /// # use helix_stdx::rope::RopeSliceExt; /// let text = RopeSlice::from("\r\n"); // U+000D U+000A, hex: 0d 0a /// assert_eq!(text.ceil_grapheme_boundary(0), 0); /// assert_eq!(text.ceil_grapheme_boundary(1), 2); /// assert_eq!(text.ceil_grapheme_boundary(2), 2); /// ``` fn ceil_grapheme_boundary(self, byte_idx: usize) -> usize; /// Checks whether the `byte_idx` lies on a grapheme cluster boundary. /// /// # Example /// /// ``` /// # use ropey::RopeSlice; /// # use helix_stdx::rope::RopeSliceExt; /// let text = RopeSlice::from("\r\n"); // U+000D U+000A, hex: 0d 0a /// assert!(text.is_grapheme_boundary(0)); /// assert!(!text.is_grapheme_boundary(1)); /// assert!(text.is_grapheme_boundary(2)); /// ``` #[allow(clippy::wrong_self_convention)] fn is_grapheme_boundary(self, byte_idx: usize) -> bool; /// Returns an iterator over the grapheme clusters in the slice. /// /// # Example /// /// ``` /// # use ropey::RopeSlice; /// # use helix_stdx::rope::RopeSliceExt; /// let text = RopeSlice::from("😶‍🌫️🏴‍☠️🖼️"); /// let graphemes: Vec<_> = text.graphemes().collect(); /// assert_eq!(graphemes.as_slice(), &["😶‍🌫️", "🏴‍☠️", "🖼️"]); /// ``` fn graphemes(self) -> RopeGraphemes<'a>; /// Returns an iterator over the grapheme clusters in the slice, reversed. /// /// The returned iterator starts at the end of the slice and ends at the beginning of the /// slice. /// /// # Example /// /// ``` /// # use ropey::RopeSlice; /// # use helix_stdx::rope::RopeSliceExt; /// let text = RopeSlice::from("😶‍🌫️🏴‍☠️🖼️"); /// let graphemes: Vec<_> = text.graphemes_rev().collect(); /// assert_eq!(graphemes.as_slice(), &["🖼️", "🏴‍☠️", "😶‍🌫️"]); /// ``` fn graphemes_rev(self) -> RevRopeGraphemes<'a>; } impl<'a> RopeSliceExt<'a> for RopeSlice<'a> { fn ends_with(self, text: &str) -> bool { let len = self.len_bytes(); if len < text.len() { return false; } self.get_byte_slice(len - text.len()..) .is_some_and(|end| end == text) } fn starts_with(self, text: &str) -> bool { let len = self.len_bytes(); if len < text.len() { return false; } self.get_byte_slice(..text.len()) .is_some_and(|start| start == text) } fn regex_input(self) -> RegexInput> { RegexInput::new(self) } fn regex_input_at>(self, char_range: R) -> RegexInput> { let start_bound = match char_range.start_bound() { Bound::Included(&val) => Bound::Included(self.char_to_byte(val)), Bound::Excluded(&val) => Bound::Excluded(self.char_to_byte(val)), Bound::Unbounded => Bound::Unbounded, }; let end_bound = match char_range.end_bound() { Bound::Included(&val) => Bound::Included(self.char_to_byte(val)), Bound::Excluded(&val) => Bound::Excluded(self.char_to_byte(val)), Bound::Unbounded => Bound::Unbounded, }; self.regex_input_at_bytes((start_bound, end_bound)) } fn regex_input_at_bytes>( self, byte_range: R, ) -> RegexInput> { let input = match byte_range.start_bound() { Bound::Included(&pos) | Bound::Excluded(&pos) => { RegexInput::new(RopeyCursor::at(self, pos)) } Bound::Unbounded => RegexInput::new(self), }; input.range(byte_range) } fn first_non_whitespace_char(self) -> Option { self.chars().position(|ch| !ch.is_whitespace()) } fn last_non_whitespace_char(self) -> Option { self.chars_at(self.len_chars()) .reversed() .position(|ch| !ch.is_whitespace()) .map(|pos| self.len_chars() - pos - 1) } // These three are adapted from std: fn floor_char_boundary(self, byte_idx: usize) -> usize { if byte_idx >= self.len_bytes() { self.len_bytes() } else { let offset = self .bytes_at(byte_idx + 1) .reversed() .take(4) .position(is_utf8_char_boundary) // A char can only be four bytes long so we are guaranteed to find a boundary. .unwrap(); byte_idx - offset } } fn ceil_char_boundary(self, byte_idx: usize) -> usize { if byte_idx > self.len_bytes() { self.len_bytes() } else { let upper_bound = self.len_bytes().min(byte_idx + 4); self.bytes_at(byte_idx) .position(is_utf8_char_boundary) .map_or(upper_bound, |pos| pos + byte_idx) } } fn is_char_boundary(self, byte_idx: usize) -> bool { if byte_idx == 0 { return true; } if byte_idx >= self.len_bytes() { byte_idx == self.len_bytes() } else { is_utf8_char_boundary(self.bytes_at(byte_idx).next().unwrap()) } } fn floor_grapheme_boundary(self, mut byte_idx: usize) -> usize { if byte_idx >= self.len_bytes() { return self.len_bytes(); } byte_idx = self.ceil_char_boundary(byte_idx + 1); let (mut chunk, mut chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx); let mut cursor = GraphemeCursor::new(byte_idx, self.len_bytes(), true); loop { match cursor.prev_boundary(chunk, chunk_byte_idx) { Ok(None) => return 0, Ok(Some(boundary)) => return boundary, Err(GraphemeIncomplete::PrevChunk) => { let (ch, ch_byte_idx, _, _) = self.chunk_at_byte(chunk_byte_idx - 1); chunk = ch; chunk_byte_idx = ch_byte_idx; } Err(GraphemeIncomplete::PreContext(n)) => { let ctx_chunk = self.chunk_at_byte(n - 1).0; cursor.provide_context(ctx_chunk, n - ctx_chunk.len()); } _ => unreachable!(), } } } fn ceil_grapheme_boundary(self, mut byte_idx: usize) -> usize { if byte_idx >= self.len_bytes() { return self.len_bytes(); } if byte_idx == 0 { return 0; } byte_idx = self.floor_char_boundary(byte_idx - 1); let (mut chunk, mut chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx); let mut cursor = GraphemeCursor::new(byte_idx, self.len_bytes(), true); loop { match cursor.next_boundary(chunk, chunk_byte_idx) { Ok(None) => return self.len_bytes(), Ok(Some(boundary)) => return boundary, Err(GraphemeIncomplete::NextChunk) => { chunk_byte_idx += chunk.len(); chunk = self.chunk_at_byte(chunk_byte_idx).0; } Err(GraphemeIncomplete::PreContext(n)) => { let ctx_chunk = self.chunk_at_byte(n - 1).0; cursor.provide_context(ctx_chunk, n - ctx_chunk.len()); } _ => unreachable!(), } } } fn is_grapheme_boundary(self, byte_idx: usize) -> bool { // The byte must lie on a character boundary to lie on a grapheme cluster boundary. if !self.is_char_boundary(byte_idx) { return false; } let (chunk, chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx); let mut cursor = GraphemeCursor::new(byte_idx, self.len_bytes(), true); loop { match cursor.is_boundary(chunk, chunk_byte_idx) { Ok(n) => return n, Err(GraphemeIncomplete::PreContext(n)) => { let (ctx_chunk, ctx_byte_start, _, _) = self.chunk_at_byte(n - 1); cursor.provide_context(ctx_chunk, ctx_byte_start); } Err(_) => unreachable!(), } } } fn graphemes(self) -> RopeGraphemes<'a> { let mut chunks = self.chunks(); let first_chunk = chunks.next().unwrap_or(""); RopeGraphemes { text: self, chunks, cur_chunk: first_chunk, cur_chunk_start: 0, cursor: GraphemeCursor::new(0, self.len_bytes(), true), } } fn graphemes_rev(self) -> RevRopeGraphemes<'a> { let (mut chunks, mut cur_chunk_start, _, _) = self.chunks_at_byte(self.len_bytes()); chunks.reverse(); let first_chunk = chunks.next().unwrap_or(""); cur_chunk_start -= first_chunk.len(); RevRopeGraphemes { text: self, chunks, cur_chunk: first_chunk, cur_chunk_start, cursor: GraphemeCursor::new(self.len_bytes(), self.len_bytes(), true), } } } // copied from std #[inline] const fn is_utf8_char_boundary(b: u8) -> bool { // This is bit magic equivalent to: b < 128 || b >= 192 (b as i8) >= -0x40 } /// An iterator over the graphemes of a `RopeSlice`. #[derive(Clone)] pub struct RopeGraphemes<'a> { text: RopeSlice<'a>, chunks: Chunks<'a>, cur_chunk: &'a str, cur_chunk_start: usize, cursor: GraphemeCursor, } impl fmt::Debug for RopeGraphemes<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RopeGraphemes") .field("text", &self.text) .field("chunks", &self.chunks) .field("cur_chunk", &self.cur_chunk) .field("cur_chunk_start", &self.cur_chunk_start) // .field("cursor", &self.cursor) .finish() } } impl<'a> Iterator for RopeGraphemes<'a> { type Item = RopeSlice<'a>; fn next(&mut self) -> Option { let a = self.cursor.cur_cursor(); let b; loop { match self .cursor .next_boundary(self.cur_chunk, self.cur_chunk_start) { Ok(None) => { return None; } Ok(Some(n)) => { b = n; break; } Err(GraphemeIncomplete::NextChunk) => { self.cur_chunk_start += self.cur_chunk.len(); self.cur_chunk = self.chunks.next().unwrap_or(""); } Err(GraphemeIncomplete::PreContext(idx)) => { let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1)); self.cursor.provide_context(chunk, byte_idx); } _ => unreachable!(), } } if a < self.cur_chunk_start { Some(self.text.byte_slice(a..b)) } else { let a2 = a - self.cur_chunk_start; let b2 = b - self.cur_chunk_start; Some((&self.cur_chunk[a2..b2]).into()) } } } /// An iterator over the graphemes of a `RopeSlice` in reverse. #[derive(Clone)] pub struct RevRopeGraphemes<'a> { text: RopeSlice<'a>, chunks: Chunks<'a>, cur_chunk: &'a str, cur_chunk_start: usize, cursor: GraphemeCursor, } impl fmt::Debug for RevRopeGraphemes<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RevRopeGraphemes") .field("text", &self.text) .field("chunks", &self.chunks) .field("cur_chunk", &self.cur_chunk) .field("cur_chunk_start", &self.cur_chunk_start) // .field("cursor", &self.cursor) .finish() } } impl<'a> Iterator for RevRopeGraphemes<'a> { type Item = RopeSlice<'a>; fn next(&mut self) -> Option { let a = self.cursor.cur_cursor(); let b; loop { match self .cursor .prev_boundary(self.cur_chunk, self.cur_chunk_start) { Ok(None) => { return None; } Ok(Some(n)) => { b = n; break; } Err(GraphemeIncomplete::PrevChunk) => { self.cur_chunk = self.chunks.next().unwrap_or(""); self.cur_chunk_start -= self.cur_chunk.len(); } Err(GraphemeIncomplete::PreContext(idx)) => { let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1)); self.cursor.provide_context(chunk, byte_idx); } _ => unreachable!(), } } if a >= self.cur_chunk_start + self.cur_chunk.len() { Some(self.text.byte_slice(b..a)) } else { let a2 = a - self.cur_chunk_start; let b2 = b - self.cur_chunk_start; Some((&self.cur_chunk[b2..a2]).into()) } } } #[cfg(test)] mod tests { use ropey::RopeSlice; use crate::rope::RopeSliceExt; #[test] fn starts_with() { assert!(RopeSlice::from("asdf").starts_with("a")); } #[test] fn ends_with() { assert!(RopeSlice::from("asdf").ends_with("f")); } #[test] fn char_boundaries() { let ascii = RopeSlice::from("ascii"); // When the given index lies on a character boundary, the index should not change. for byte_idx in 0..=ascii.len_bytes() { assert_eq!(ascii.floor_char_boundary(byte_idx), byte_idx); assert_eq!(ascii.ceil_char_boundary(byte_idx), byte_idx); assert!(ascii.is_char_boundary(byte_idx)); } // This is a polyfill of a method of this trait which was replaced by ceil_char_boundary. // It returns the _character index_ of the given byte index, rounding up if it does not // already lie on a character boundary. fn byte_to_next_char(slice: RopeSlice, byte_idx: usize) -> usize { slice.byte_to_char(slice.ceil_char_boundary(byte_idx)) } for i in 0..=6 { assert_eq!(byte_to_next_char(RopeSlice::from("foobar"), i), i); } for char_idx in 0..10 { let len = "😆".len(); assert_eq!( byte_to_next_char(RopeSlice::from("😆😆😆😆😆😆😆😆😆😆"), char_idx * len), char_idx ); for i in 1..=len { assert_eq!( byte_to_next_char(RopeSlice::from("😆😆😆😆😆😆😆😆😆😆"), char_idx * len + i), char_idx + 1 ); } } } #[test] fn grapheme_boundaries() { let ascii = RopeSlice::from("ascii"); // When the given index lies on a grapheme boundary, the index should not change. for byte_idx in 0..=ascii.len_bytes() { assert_eq!(ascii.floor_char_boundary(byte_idx), byte_idx); assert_eq!(ascii.ceil_char_boundary(byte_idx), byte_idx); assert!(ascii.is_grapheme_boundary(byte_idx)); } // 🏴‍☠️: U+1F3F4 U+200D U+2620 U+FE0F // 13 bytes, hex: f0 9f 8f b4 + e2 80 8d + e2 98 a0 + ef b8 8f let g = RopeSlice::from("🏴‍☠️\r\n"); let emoji_len = "🏴‍☠️".len(); let end = g.len_bytes(); for byte_idx in 0..emoji_len { assert_eq!(g.floor_grapheme_boundary(byte_idx), 0); } for byte_idx in emoji_len..end { assert_eq!(g.floor_grapheme_boundary(byte_idx), emoji_len); } assert_eq!(g.floor_grapheme_boundary(end), end); assert_eq!(g.ceil_grapheme_boundary(0), 0); for byte_idx in 1..=emoji_len { assert_eq!(g.ceil_grapheme_boundary(byte_idx), emoji_len); } for byte_idx in emoji_len + 1..=end { assert_eq!(g.ceil_grapheme_boundary(byte_idx), end); } assert!(g.is_grapheme_boundary(0)); assert!(g.is_grapheme_boundary(emoji_len)); assert!(g.is_grapheme_boundary(end)); for byte_idx in (1..emoji_len).chain(emoji_len + 1..end) { assert!(!g.is_grapheme_boundary(byte_idx)); } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-stdx/tests/000077500000000000000000000000001500614314100230405ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-stdx/tests/path.rs000066400000000000000000000064501500614314100243470ustar00rootroot00000000000000#![cfg(windows)] use std::{env::set_current_dir, error::Error, path::Component}; use helix_stdx::path; use tempfile::Builder; // Paths on Windows are almost always case-insensitive. // Normalization should return the original path. // E.g. mkdir `CaSe`, normalize(`case`) = `CaSe`. #[test] fn test_case_folding_windows() -> Result<(), Box> { // tmp/root/case let tmp_prefix = std::env::temp_dir(); set_current_dir(&tmp_prefix)?; let root = Builder::new().prefix("root-").tempdir()?; let case = Builder::new().prefix("CaSe-").tempdir_in(&root)?; let root_without_prefix = root.path().strip_prefix(&tmp_prefix)?; let lowercase_case = format!( "case-{}", case.path() .file_name() .unwrap() .to_string_lossy() .split_at(5) .1 ); let test_path = root_without_prefix.join(lowercase_case); assert_eq!( path::normalize(&test_path), case.path().strip_prefix(&tmp_prefix)? ); Ok(()) } #[test] fn test_normalize_path() -> Result<(), Box> { /* tmp/root/ ├── link -> dir1/orig_file ├── dir1/ │ └── orig_file └── dir2/ └── dir_link -> ../dir1/ */ let tmp_prefix = std::env::temp_dir(); set_current_dir(&tmp_prefix)?; // Create a tree structure as shown above let root = Builder::new().prefix("root-").tempdir()?; let dir1 = Builder::new().prefix("dir1-").tempdir_in(&root)?; let orig_file = Builder::new().prefix("orig_file-").tempfile_in(&dir1)?; let dir2 = Builder::new().prefix("dir2-").tempdir_in(&root)?; // Create path and delete existing file let dir_link = Builder::new() .prefix("dir_link-") .tempfile_in(&dir2)? .path() .to_owned(); let link = Builder::new() .prefix("link-") .tempfile_in(&root)? .path() .to_owned(); use std::os::windows; windows::fs::symlink_dir(&dir1, &dir_link)?; windows::fs::symlink_file(&orig_file, &link)?; // root/link let path = link.strip_prefix(&tmp_prefix)?; assert_eq!( path::normalize(path), path, "input {:?} and symlink last component shouldn't be resolved", path ); // root/dir2/dir_link/orig_file/../.. let path = dir_link .strip_prefix(&tmp_prefix) .unwrap() .join(orig_file.path().file_name().unwrap()) .join(Component::ParentDir) .join(Component::ParentDir); let expected = dir_link .strip_prefix(&tmp_prefix) .unwrap() .join(Component::ParentDir); assert_eq!( path::normalize(&path), expected, "input {:?} and \"..\" should not erase the symlink that goes ahead", &path ); // root/link/.././../dir2/../ let path = link .strip_prefix(&tmp_prefix) .unwrap() .join(Component::ParentDir) .join(Component::CurDir) .join(Component::ParentDir) .join(dir2.path().file_name().unwrap()) .join(Component::ParentDir); let expected = link .strip_prefix(&tmp_prefix) .unwrap() .join(Component::ParentDir) .join(Component::ParentDir); assert_eq!(path::normalize(&path), expected, "input {:?}", &path); Ok(()) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/000077500000000000000000000000001500614314100216635ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/.gitignore000066400000000000000000000000101500614314100236420ustar00rootroot00000000000000/target helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/Cargo.toml000066400000000000000000000072671500614314100236270ustar00rootroot00000000000000[package] name = "helix-term" description = "A post-modern text editor." include = ["src/**/*", "README.md"] default-run = "hx" version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true rust-version.workspace = true categories.workspace = true repository.workspace = true homepage.workspace = true [package.metadata.deb] # generate a .deb in target/debian/ with the command: cargo deb --no-build name = "helix" assets = [ { source = "target/release/hx", dest = "/usr/lib/helix/", mode = "755" }, { source = "../contrib/hx_launcher.sh", dest = "/usr/bin/hx", mode = "755" }, { source = "../runtime/*", dest = "/usr/lib/helix/runtime/", mode = "644" }, { source = "../runtime/grammars/*", dest = "/usr/lib/helix/runtime/grammars/", mode = "644" }, # to avoid sources/ { source = "../runtime/queries/**/*", dest = "/usr/lib/helix/runtime/queries/", mode = "644" }, { source = "../runtime/themes/**/*", dest = "/usr/lib/helix/runtime/themes/", mode = "644" }, { source = "../README.md", dest = "/usr/share/doc/helix/", mode = "644" }, { source = "../contrib/completion/hx.bash", dest = "/usr/share/bash-completion/completions/hx", mode = "644" }, { source = "../contrib/completion/hx.fish", dest = "/usr/share/fish/vendor_completions.d/hx.fish", mode = "644" }, { source = "../contrib/completion/hx.zsh", dest = "/usr/share/zsh/vendor-completions/_hx", mode = "644" }, { source = "../contrib/Helix.desktop", dest = "/usr/share/applications/Helix.desktop", mode = "644" }, { source = "../contrib/helix.png", dest = "/usr/share/icons/hicolor/256x256/apps/helix.png", mode = "644" }, ] [features] default = ["git"] unicode-lines = ["helix-core/unicode-lines", "helix-view/unicode-lines"] integration = ["helix-event/integration_test"] git = ["helix-vcs/git"] [[bin]] name = "hx" path = "src/main.rs" [dependencies] helix-stdx = { path = "../helix-stdx" } helix-core = { path = "../helix-core" } helix-event = { path = "../helix-event" } helix-view = { path = "../helix-view" } helix-lsp = { path = "../helix-lsp" } helix-dap = { path = "../helix-dap" } helix-vcs = { path = "../helix-vcs" } helix-loader = { path = "../helix-loader" } anyhow = "1" once_cell = "1.21" tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] } crossterm = { version = "0.28", features = ["event-stream"] } signal-hook = "0.3" tokio-stream = "0.1" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } arc-swap = { version = "1.7.1" } termini = "1" indexmap = "2.9" # Logging fern = "0.7" chrono = { version = "0.4", default-features = false, features = ["clock"] } log = "0.4" # File picker nucleo.workspace = true ignore = "0.4" # markdown doc rendering pulldown-cmark = { version = "0.13", default-features = false } # file type detection content_inspector = "0.2.4" thiserror.workspace = true # opening URLs open = "5.3.2" url = "2.5.4" # config toml = "0.8" serde_json = "1.0" serde = { version = "1.0", features = ["derive"] } # ripgrep for global search grep-regex = "0.1.13" grep-searcher = "0.1.14" [target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100 signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] } libc = "0.2.172" [target.'cfg(target_os = "macos")'.dependencies] crossterm = { version = "0.28", features = ["event-stream", "use-dev-tty", "libc"] } [build-dependencies] helix-loader = { path = "../helix-loader" } [dev-dependencies] smallvec = "1.15" indoc = "2.0.6" tempfile.workspace = true same-file = "1.0.1" helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/build.rs000066400000000000000000000135641500614314100233410ustar00rootroot00000000000000use helix_loader::grammar::{build_grammars, fetch_grammars}; fn main() { if std::env::var("HELIX_DISABLE_AUTO_GRAMMAR_BUILD").is_err() { fetch_grammars().expect("Failed to fetch tree-sitter grammars"); build_grammars(Some(std::env::var("TARGET").unwrap())) .expect("Failed to compile tree-sitter grammars"); } #[cfg(windows)] windows_rc::link_icon_in_windows_exe("../contrib/helix-256p.ico"); } #[cfg(windows)] mod windows_rc { use std::io::prelude::Write; use std::{env, io, path::Path, path::PathBuf, process}; pub(crate) fn link_icon_in_windows_exe(icon_path: &str) { let rc_exe = find_rc_exe().expect("Windows SDK is to be installed along with MSVC"); let output = env::var("OUT_DIR").expect("Env var OUT_DIR should have been set by compiler"); let output_dir = PathBuf::from(output); let rc_path = output_dir.join("resource.rc"); write_resource_file(&rc_path, icon_path).unwrap(); let resource_file = PathBuf::from(&output_dir).join("resource.lib"); compile_with_toolkit_msvc(rc_exe, resource_file, rc_path); println!("cargo:rustc-link-search=native={}", output_dir.display()); println!("cargo:rustc-link-lib=dylib=resource"); } fn compile_with_toolkit_msvc(rc_exe: PathBuf, output: PathBuf, input: PathBuf) { let mut command = process::Command::new(rc_exe); let command = command.arg(format!( "/I{}", env::var("CARGO_MANIFEST_DIR") .expect("CARGO_MANIFEST_DIR should have been set by Cargo") )); let status = command .arg(format!("/fo{}", output.display())) .arg(format!("{}", input.display())) .output() .unwrap(); println!( "RC Output:\n{}\n------", String::from_utf8_lossy(&status.stdout) ); println!( "RC Error:\n{}\n------", String::from_utf8_lossy(&status.stderr) ); } fn find_rc_exe() -> io::Result { let find_reg_key = process::Command::new("reg") .arg("query") .arg(r"HKLM\SOFTWARE\Microsoft\Windows Kits\Installed Roots") .arg("/reg:32") .arg("/v") .arg("KitsRoot10") .output(); match find_reg_key { Err(find_reg_key) => Err(io::Error::new( io::ErrorKind::Other, format!("Failed to run registry query: {}", find_reg_key), )), Ok(find_reg_key) => { if find_reg_key.status.code().unwrap() != 0 { Err(io::Error::new( io::ErrorKind::Other, "Can not find Windows SDK", )) } else { let lines = String::from_utf8(find_reg_key.stdout) .expect("Should be able to parse the output"); let mut lines: Vec<&str> = lines.lines().collect(); let mut rc_exe_paths: Vec = Vec::new(); lines.reverse(); for line in lines { if line.trim().starts_with("KitsRoot") { let kit: String = line .chars() .skip(line.find("REG_SZ").unwrap() + 6) .skip_while(|c| c.is_whitespace()) .collect(); let p = PathBuf::from(&kit); let rc = if cfg!(target_arch = "x86_64") { p.join(r"bin\x64\rc.exe") } else { p.join(r"bin\x86\rc.exe") }; if rc.exists() { println!("{:?}", rc); rc_exe_paths.push(rc.to_owned()); } if let Ok(bin) = p.join("bin").read_dir() { for e in bin.filter_map(|e| e.ok()) { let p = if cfg!(target_arch = "x86_64") { e.path().join(r"x64\rc.exe") } else { e.path().join(r"x86\rc.exe") }; if p.exists() { println!("{:?}", p); rc_exe_paths.push(p.to_owned()); } } } } } if rc_exe_paths.is_empty() { return Err(io::Error::new( io::ErrorKind::Other, "Can not find Windows SDK", )); } println!("{:?}", rc_exe_paths); let rc_path = rc_exe_paths.pop().unwrap(); let rc_exe = if !rc_path.exists() { if cfg!(target_arch = "x86_64") { PathBuf::from(rc_path.parent().unwrap()).join(r"bin\x64\rc.exe") } else { PathBuf::from(rc_path.parent().unwrap()).join(r"bin\x86\rc.exe") } } else { rc_path }; println!("Selected RC path: '{}'", rc_exe.display()); Ok(rc_exe) } } } } fn write_resource_file(rc_path: &Path, icon_path: &str) -> io::Result<()> { let mut f = std::fs::File::create(rc_path)?; writeln!(f, "{} ICON \"{}\"", 1, icon_path)?; Ok(()) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/000077500000000000000000000000001500614314100224525ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/application.rs000066400000000000000000001362571500614314100253410ustar00rootroot00000000000000use arc_swap::{access::Map, ArcSwap}; use futures_util::Stream; use helix_core::{diagnostic::Severity, pos_at_coords, syntax, Range, Selection}; use helix_lsp::{ lsp::{self, notification::Notification}, util::lsp_range_to_range, LanguageServerId, LspProgressMap, }; use helix_stdx::path::get_relative_path; use helix_view::{ align_view, document::{DocumentOpenError, DocumentSavedEventResult}, editor::{ConfigEvent, EditorEvent}, graphics::Rect, theme, tree::Layout, Align, Editor, }; use serde_json::json; use tui::backend::Backend; use crate::{ args::Args, compositor::{Compositor, Event}, config::Config, handlers, job::Jobs, keymap::Keymaps, ui::{self, overlay::overlaid}, }; use log::{debug, error, info, warn}; #[cfg(not(feature = "integration"))] use std::io::stdout; use std::{io::stdin, path::Path, sync::Arc}; #[cfg(not(windows))] use anyhow::Context; use anyhow::Error; use crossterm::{event::Event as CrosstermEvent, tty::IsTty}; #[cfg(not(windows))] use {signal_hook::consts::signal, signal_hook_tokio::Signals}; #[cfg(windows)] type Signals = futures_util::stream::Empty<()>; #[cfg(not(feature = "integration"))] use tui::backend::CrosstermBackend; #[cfg(feature = "integration")] use tui::backend::TestBackend; #[cfg(not(feature = "integration"))] type TerminalBackend = CrosstermBackend; #[cfg(feature = "integration")] type TerminalBackend = TestBackend; type Terminal = tui::terminal::Terminal; pub struct Application { compositor: Compositor, terminal: Terminal, pub editor: Editor, config: Arc>, signals: Signals, jobs: Jobs, lsp_progress: LspProgressMap, } #[cfg(feature = "integration")] fn setup_integration_logging() { let level = std::env::var("HELIX_LOG_LEVEL") .map(|lvl| lvl.parse().unwrap()) .unwrap_or(log::LevelFilter::Info); // Separate file config so we can include year, month and day in file logs let _ = fern::Dispatch::new() .format(|out, message, record| { out.finish(format_args!( "{} {} [{}] {}", chrono::Local::now().format("%Y-%m-%dT%H:%M:%S%.3f"), record.target(), record.level(), message )) }) .level(level) .chain(std::io::stdout()) .apply(); } impl Application { pub fn new(args: Args, config: Config, lang_loader: syntax::Loader) -> Result { #[cfg(feature = "integration")] setup_integration_logging(); use helix_view::editor::Action; let mut theme_parent_dirs = vec![helix_loader::config_dir()]; theme_parent_dirs.extend(helix_loader::runtime_dirs().iter().cloned()); let theme_loader = theme::Loader::new(&theme_parent_dirs); #[cfg(not(feature = "integration"))] let backend = CrosstermBackend::new(stdout(), &config.editor); #[cfg(feature = "integration")] let backend = TestBackend::new(120, 150); let terminal = Terminal::new(backend)?; let area = terminal.size().expect("couldn't get terminal size"); let mut compositor = Compositor::new(area); let config = Arc::new(ArcSwap::from_pointee(config)); let handlers = handlers::setup(config.clone()); let mut editor = Editor::new( area, Arc::new(theme_loader), Arc::new(ArcSwap::from_pointee(lang_loader)), Arc::new(Map::new(Arc::clone(&config), |config: &Config| { &config.editor })), handlers, ); Self::load_configured_theme(&mut editor, &config.load()); let keys = Box::new(Map::new(Arc::clone(&config), |config: &Config| { &config.keys })); let editor_view = Box::new(ui::EditorView::new(Keymaps::new(keys))); compositor.push(editor_view); if args.load_tutor { let path = helix_loader::runtime_file(Path::new("tutor")); editor.open(&path, Action::VerticalSplit)?; // Unset path to prevent accidentally saving to the original tutor file. doc_mut!(editor).set_path(None); } else if !args.files.is_empty() { let mut files_it = args.files.into_iter().peekable(); // If the first file is a directory, skip it and open a picker if let Some((first, _)) = files_it.next_if(|(p, _)| p.is_dir()) { let picker = ui::file_picker(&editor, first); compositor.push(Box::new(overlaid(picker))); } // If there are any more files specified, open them if files_it.peek().is_some() { let mut nr_of_files = 0; for (file, pos) in files_it { nr_of_files += 1; if file.is_dir() { return Err(anyhow::anyhow!( "expected a path to file, but found a directory: {file:?}. (to open a directory pass it as first argument)" )); } else { // If the user passes in either `--vsplit` or // `--hsplit` as a command line argument, all the given // files will be opened according to the selected // option. If neither of those two arguments are passed // in, just load the files normally. let action = match args.split { _ if nr_of_files == 1 => Action::VerticalSplit, Some(Layout::Vertical) => Action::VerticalSplit, Some(Layout::Horizontal) => Action::HorizontalSplit, None => Action::Load, }; let old_id = editor.document_id_by_path(&file); let doc_id = match editor.open(&file, action) { // Ignore irregular files during application init. Err(DocumentOpenError::IrregularFile) => { nr_of_files -= 1; continue; } Err(err) => return Err(anyhow::anyhow!(err)), // We can't open more than 1 buffer for 1 file, in this case we already have opened this file previously Ok(doc_id) if old_id == Some(doc_id) => { nr_of_files -= 1; doc_id } Ok(doc_id) => doc_id, }; // with Action::Load all documents have the same view // NOTE: this isn't necessarily true anymore. If // `--vsplit` or `--hsplit` are used, the file which is // opened last is focused on. let view_id = editor.tree.focus; let doc = doc_mut!(editor, &doc_id); let selection = pos .into_iter() .map(|coords| { Range::point(pos_at_coords(doc.text().slice(..), coords, true)) }) .collect(); doc.set_selection(view_id, selection); } } // if all files were invalid, replace with empty buffer if nr_of_files == 0 { editor.new_file(Action::VerticalSplit); } else { editor.set_status(format!( "Loaded {} file{}.", nr_of_files, if nr_of_files == 1 { "" } else { "s" } // avoid "Loaded 1 files." grammo )); // align the view to center after all files are loaded, // does not affect views without pos since it is at the top let (view, doc) = current!(editor); align_view(doc, view, Align::Center); } } else { editor.new_file(Action::VerticalSplit); } } else if stdin().is_tty() || cfg!(feature = "integration") { editor.new_file(Action::VerticalSplit); } else { editor .new_file_from_stdin(Action::VerticalSplit) .unwrap_or_else(|_| editor.new_file(Action::VerticalSplit)); } #[cfg(windows)] let signals = futures_util::stream::empty(); #[cfg(not(windows))] let signals = Signals::new([ signal::SIGTSTP, signal::SIGCONT, signal::SIGUSR1, signal::SIGTERM, signal::SIGINT, ]) .context("build signal handler")?; let app = Self { compositor, terminal, editor, config, signals, jobs: Jobs::new(), lsp_progress: LspProgressMap::new(), }; Ok(app) } async fn render(&mut self) { if self.compositor.full_redraw { self.terminal.clear().expect("Cannot clear the terminal"); self.compositor.full_redraw = false; } let mut cx = crate::compositor::Context { editor: &mut self.editor, jobs: &mut self.jobs, scroll: None, }; helix_event::start_frame(); cx.editor.needs_redraw = false; let area = self .terminal .autoresize() .expect("Unable to determine terminal size"); // TODO: need to recalculate view tree if necessary let surface = self.terminal.current_buffer_mut(); self.compositor.render(area, surface, &mut cx); let (pos, kind) = self.compositor.cursor(area, &self.editor); // reset cursor cache self.editor.cursor_cache.reset(); let pos = pos.map(|pos| (pos.col as u16, pos.row as u16)); self.terminal.draw(pos, kind).unwrap(); } pub async fn event_loop(&mut self, input_stream: &mut S) where S: Stream> + Unpin, { self.render().await; loop { if !self.event_loop_until_idle(input_stream).await { break; } } } pub async fn event_loop_until_idle(&mut self, input_stream: &mut S) -> bool where S: Stream> + Unpin, { loop { if self.editor.should_close() { return false; } use futures_util::StreamExt; tokio::select! { biased; Some(signal) = self.signals.next() => { if !self.handle_signals(signal).await { return false; }; } Some(event) = input_stream.next() => { self.handle_terminal_events(event).await; } Some(callback) = self.jobs.callbacks.recv() => { self.jobs.handle_callback(&mut self.editor, &mut self.compositor, Ok(Some(callback))); self.render().await; } Some(msg) = self.jobs.status_messages.recv() => { let severity = match msg.severity{ helix_event::status::Severity::Hint => Severity::Hint, helix_event::status::Severity::Info => Severity::Info, helix_event::status::Severity::Warning => Severity::Warning, helix_event::status::Severity::Error => Severity::Error, }; // TODO: show multiple status messages at once to avoid clobbering self.editor.status_msg = Some((msg.message, severity)); helix_event::request_redraw(); } Some(callback) = self.jobs.wait_futures.next() => { self.jobs.handle_callback(&mut self.editor, &mut self.compositor, callback); self.render().await; } event = self.editor.wait_event() => { let _idle_handled = self.handle_editor_event(event).await; #[cfg(feature = "integration")] { if _idle_handled { return true; } } } } // for integration tests only, reset the idle timer after every // event to signal when test events are done processing #[cfg(feature = "integration")] { self.editor.reset_idle_timer(); } } } pub fn handle_config_events(&mut self, config_event: ConfigEvent) { match config_event { ConfigEvent::Refresh => self.refresh_config(), // Since only the Application can make changes to Editor's config, // the Editor must send up a new copy of a modified config so that // the Application can apply it. ConfigEvent::Update(editor_config) => { let mut app_config = (*self.config.load().clone()).clone(); app_config.editor = *editor_config; if let Err(err) = self.terminal.reconfigure(app_config.editor.clone().into()) { self.editor.set_error(err.to_string()); }; self.config.store(Arc::new(app_config)); } } // Update all the relevant members in the editor after updating // the configuration. self.editor.refresh_config(); // reset view position in case softwrap was enabled/disabled let scrolloff = self.editor.config().scrolloff; for (view, _) in self.editor.tree.views() { let doc = doc_mut!(self.editor, &view.doc); view.ensure_cursor_in_view(doc, scrolloff); } } /// refresh language config after config change fn refresh_language_config(&mut self) -> Result<(), Error> { let lang_loader = helix_core::config::user_lang_loader()?; self.editor.syn_loader.store(Arc::new(lang_loader)); for document in self.editor.documents.values_mut() { document.detect_language(self.editor.syn_loader.clone()); let diagnostics = Editor::doc_diagnostics( &self.editor.language_servers, &self.editor.diagnostics, document, ); document.replace_diagnostics(diagnostics, &[], None); } Ok(()) } fn refresh_config(&mut self) { let mut refresh_config = || -> Result<(), Error> { let default_config = Config::load_default() .map_err(|err| anyhow::anyhow!("Failed to load config: {}", err))?; self.refresh_language_config()?; // Refresh theme after config change Self::load_configured_theme(&mut self.editor, &default_config); self.terminal .reconfigure(default_config.editor.clone().into())?; // Store new config self.config.store(Arc::new(default_config)); Ok(()) }; match refresh_config() { Ok(_) => { self.editor.set_status("Config refreshed"); } Err(err) => { self.editor.set_error(err.to_string()); } } } /// Load the theme set in configuration fn load_configured_theme(editor: &mut Editor, config: &Config) { let true_color = config.editor.true_color || crate::true_color(); let theme = config .theme .as_ref() .and_then(|theme| { editor .theme_loader .load(theme) .map_err(|e| { log::warn!("failed to load theme `{}` - {}", theme, e); e }) .ok() .filter(|theme| { let colors_ok = true_color || theme.is_16_color(); if !colors_ok { log::warn!( "loaded theme `{}` but cannot use it because true color \ support is not enabled", theme.name() ); } colors_ok }) }) .unwrap_or_else(|| editor.theme_loader.default_theme(true_color)); editor.set_theme(theme); } #[cfg(windows)] // no signal handling available on windows pub async fn handle_signals(&mut self, _signal: ()) -> bool { true } #[cfg(not(windows))] pub async fn handle_signals(&mut self, signal: i32) -> bool { match signal { signal::SIGTSTP => { self.restore_term().unwrap(); // SAFETY: // // - helix must have permissions to send signals to all processes in its signal // group, either by already having the requisite permission, or by having the // user's UID / EUID / SUID match that of the receiving process(es). let res = unsafe { // A pid of 0 sends the signal to the entire process group, allowing the user to // regain control of their terminal if the editor was spawned under another process // (e.g. when running `git commit`). // // We have to send SIGSTOP (not SIGTSTP) to the entire process group, because, // as mentioned above, the terminal will get stuck if `helix` was spawned from // an external process and that process waits for `helix` to complete. This may // be an issue with signal-hook-tokio, but the author of signal-hook believes it // could be a tokio issue instead: // https://github.com/vorner/signal-hook/issues/132 libc::kill(0, signal::SIGSTOP) }; if res != 0 { let err = std::io::Error::last_os_error(); eprintln!("{}", err); let res = err.raw_os_error().unwrap_or(1); std::process::exit(res); } } signal::SIGCONT => { // Copy/Paste from same issue from neovim: // https://github.com/neovim/neovim/issues/12322 // https://github.com/neovim/neovim/pull/13084 for retries in 1..=10 { match self.claim_term().await { Ok(()) => break, Err(err) if retries == 10 => panic!("Failed to claim terminal: {}", err), Err(_) => continue, } } // redraw the terminal let area = self.terminal.size().expect("couldn't get terminal size"); self.compositor.resize(area); self.terminal.clear().expect("couldn't clear terminal"); self.render().await; } signal::SIGUSR1 => { self.refresh_config(); self.render().await; } signal::SIGTERM | signal::SIGINT => { self.restore_term().unwrap(); return false; } _ => unreachable!(), } true } pub async fn handle_idle_timeout(&mut self) { let mut cx = crate::compositor::Context { editor: &mut self.editor, jobs: &mut self.jobs, scroll: None, }; let should_render = self.compositor.handle_event(&Event::IdleTimeout, &mut cx); if should_render || self.editor.needs_redraw { self.render().await; } } pub fn handle_document_write(&mut self, doc_save_event: DocumentSavedEventResult) { let doc_save_event = match doc_save_event { Ok(event) => event, Err(err) => { self.editor.set_error(err.to_string()); return; } }; let doc = match self.editor.document_mut(doc_save_event.doc_id) { None => { warn!( "received document saved event for non-existent doc id: {}", doc_save_event.doc_id ); return; } Some(doc) => doc, }; debug!( "document {:?} saved with revision {}", doc.path(), doc_save_event.revision ); doc.set_last_saved_revision(doc_save_event.revision, doc_save_event.save_time); let lines = doc_save_event.text.len_lines(); let bytes = doc_save_event.text.len_bytes(); self.editor .set_doc_path(doc_save_event.doc_id, &doc_save_event.path); // TODO: fix being overwritten by lsp self.editor.set_status(format!( "'{}' written, {}L {}B", get_relative_path(&doc_save_event.path).to_string_lossy(), lines, bytes )); } #[inline(always)] pub async fn handle_editor_event(&mut self, event: EditorEvent) -> bool { log::debug!("received editor event: {:?}", event); match event { EditorEvent::DocumentSaved(event) => { self.handle_document_write(event); self.render().await; } EditorEvent::ConfigEvent(event) => { self.handle_config_events(event); self.render().await; } EditorEvent::LanguageServerMessage((id, call)) => { self.handle_language_server_message(call, id).await; // limit render calls for fast language server messages helix_event::request_redraw(); } EditorEvent::DebuggerEvent(payload) => { let needs_render = self.editor.handle_debugger_message(payload).await; if needs_render { self.render().await; } } EditorEvent::Redraw => { self.render().await; } EditorEvent::IdleTimer => { self.editor.clear_idle_timer(); self.handle_idle_timeout().await; #[cfg(feature = "integration")] { return true; } } } false } pub async fn handle_terminal_events(&mut self, event: std::io::Result) { let mut cx = crate::compositor::Context { editor: &mut self.editor, jobs: &mut self.jobs, scroll: None, }; // Handle key events let should_redraw = match event.unwrap() { CrosstermEvent::Resize(width, height) => { self.terminal .resize(Rect::new(0, 0, width, height)) .expect("Unable to resize terminal"); let area = self.terminal.size().expect("couldn't get terminal size"); self.compositor.resize(area); self.compositor .handle_event(&Event::Resize(width, height), &mut cx) } // Ignore keyboard release events. CrosstermEvent::Key(crossterm::event::KeyEvent { kind: crossterm::event::KeyEventKind::Release, .. }) => false, event => self.compositor.handle_event(&event.into(), &mut cx), }; if should_redraw && !self.editor.should_close() { self.render().await; } } pub async fn handle_language_server_message( &mut self, call: helix_lsp::Call, server_id: LanguageServerId, ) { use helix_lsp::{Call, MethodCall, Notification}; macro_rules! language_server { () => { match self.editor.language_server_by_id(server_id) { Some(language_server) => language_server, None => { warn!("can't find language server with id `{}`", server_id); return; } } }; } match call { Call::Notification(helix_lsp::jsonrpc::Notification { method, params, .. }) => { let notification = match Notification::parse(&method, params) { Ok(notification) => notification, Err(helix_lsp::Error::Unhandled) => { info!("Ignoring Unhandled notification from Language Server"); return; } Err(err) => { error!( "Ignoring unknown notification from Language Server: {}", err ); return; } }; match notification { Notification::Initialized => { let language_server = language_server!(); // Trigger a workspace/didChangeConfiguration notification after initialization. // This might not be required by the spec but Neovim does this as well, so it's // probably a good idea for compatibility. if let Some(config) = language_server.config() { language_server.did_change_configuration(config.clone()); } helix_event::dispatch(helix_view::events::LanguageServerInitialized { editor: &mut self.editor, server_id, }); } Notification::PublishDiagnostics(params) => { let uri = match helix_core::Uri::try_from(params.uri) { Ok(uri) => uri, Err(err) => { log::error!("{err}"); return; } }; let language_server = language_server!(); if !language_server.is_initialized() { log::error!("Discarding publishDiagnostic notification sent by an uninitialized server: {}", language_server.name()); return; } let provider = helix_core::diagnostic::DiagnosticProvider::Lsp { server_id, identifier: None, }; self.editor.handle_lsp_diagnostics( &provider, uri, params.version, params.diagnostics, ); } Notification::ShowMessage(params) => { if self.config.load().editor.lsp.display_messages { match params.typ { lsp::MessageType::ERROR => self.editor.set_error(params.message), lsp::MessageType::WARNING => { self.editor.set_warning(params.message) } _ => self.editor.set_status(params.message), } } } Notification::LogMessage(params) => { log::info!("window/logMessage: {:?}", params); } Notification::ProgressMessage(params) if !self .compositor .has_component(std::any::type_name::()) => { let editor_view = self .compositor .find::() .expect("expected at least one EditorView"); let lsp::ProgressParams { token, value: lsp::ProgressParamsValue::WorkDone(work), } = params; let (title, message, percentage) = match &work { lsp::WorkDoneProgress::Begin(lsp::WorkDoneProgressBegin { title, message, percentage, .. }) => (Some(title), message, percentage), lsp::WorkDoneProgress::Report(lsp::WorkDoneProgressReport { message, percentage, .. }) => (None, message, percentage), lsp::WorkDoneProgress::End(lsp::WorkDoneProgressEnd { message }) => { if message.is_some() { (None, message, &None) } else { self.lsp_progress.end_progress(server_id, &token); if !self.lsp_progress.is_progressing(server_id) { editor_view.spinners_mut().get_or_create(server_id).stop(); } self.editor.clear_status(); // we want to render to clear any leftover spinners or messages return; } } }; if self.editor.config().lsp.display_progress_messages { let title = title.or_else(|| self.lsp_progress.title(server_id, &token)); if title.is_some() || percentage.is_some() || message.is_some() { use std::fmt::Write as _; let mut status = format!("{}: ", language_server!().name()); if let Some(percentage) = percentage { write!(status, "{percentage:>2}% ").unwrap(); } if let Some(title) = title { status.push_str(title); } if title.is_some() && message.is_some() { status.push_str(" ⋅ "); } if let Some(message) = message { status.push_str(message); } self.editor.set_status(status); } } match work { lsp::WorkDoneProgress::Begin(begin_status) => { self.lsp_progress .begin(server_id, token.clone(), begin_status); } lsp::WorkDoneProgress::Report(report_status) => { self.lsp_progress .update(server_id, token.clone(), report_status); } lsp::WorkDoneProgress::End(_) => { self.lsp_progress.end_progress(server_id, &token); if !self.lsp_progress.is_progressing(server_id) { editor_view.spinners_mut().get_or_create(server_id).stop(); }; } } } Notification::ProgressMessage(_params) => { // do nothing } Notification::Exit => { self.editor.set_status("Language server exited"); // LSPs may produce diagnostics for files that haven't been opened in helix, // we need to clear those and remove the entries from the list if this leads to // an empty diagnostic list for said files for diags in self.editor.diagnostics.values_mut() { diags.retain(|(_, provider)| { provider.language_server_id() != Some(server_id) }); } self.editor.diagnostics.retain(|_, diags| !diags.is_empty()); // Clear any diagnostics for documents with this server open. for doc in self.editor.documents_mut() { doc.clear_diagnostics_for_language_server(server_id); } helix_event::dispatch(helix_view::events::LanguageServerExited { editor: &mut self.editor, server_id, }); // Remove the language server from the registry. self.editor.language_servers.remove_by_id(server_id); } } } Call::MethodCall(helix_lsp::jsonrpc::MethodCall { method, params, id, .. }) => { let reply = match MethodCall::parse(&method, params) { Err(helix_lsp::Error::Unhandled) => { error!( "Language Server: Method {} not found in request {}", method, id ); Err(helix_lsp::jsonrpc::Error { code: helix_lsp::jsonrpc::ErrorCode::MethodNotFound, message: format!("Method not found: {}", method), data: None, }) } Err(err) => { log::error!( "Language Server: Received malformed method call {} in request {}: {}", method, id, err ); Err(helix_lsp::jsonrpc::Error { code: helix_lsp::jsonrpc::ErrorCode::ParseError, message: format!("Malformed method call: {}", method), data: None, }) } Ok(MethodCall::WorkDoneProgressCreate(params)) => { self.lsp_progress.create(server_id, params.token); let editor_view = self .compositor .find::() .expect("expected at least one EditorView"); let spinner = editor_view.spinners_mut().get_or_create(server_id); if spinner.is_stopped() { spinner.start(); } Ok(serde_json::Value::Null) } Ok(MethodCall::ApplyWorkspaceEdit(params)) => { let language_server = language_server!(); if language_server.is_initialized() { let offset_encoding = language_server.offset_encoding(); let res = self .editor .apply_workspace_edit(offset_encoding, ¶ms.edit); Ok(json!(lsp::ApplyWorkspaceEditResponse { applied: res.is_ok(), failure_reason: res.as_ref().err().map(|err| err.kind.to_string()), failed_change: res .as_ref() .err() .map(|err| err.failed_change_idx as u32), })) } else { Err(helix_lsp::jsonrpc::Error { code: helix_lsp::jsonrpc::ErrorCode::InvalidRequest, message: "Server must be initialized to request workspace edits" .to_string(), data: None, }) } } Ok(MethodCall::WorkspaceFolders) => { Ok(json!(&*language_server!().workspace_folders().await)) } Ok(MethodCall::WorkspaceConfiguration(params)) => { let language_server = language_server!(); let result: Vec<_> = params .items .iter() .map(|item| { let mut config = language_server.config()?; if let Some(section) = item.section.as_ref() { // for some reason some lsps send an empty string (observed in 'vscode-eslint-language-server') if !section.is_empty() { for part in section.split('.') { config = config.get(part)?; } } } Some(config) }) .collect(); Ok(json!(result)) } Ok(MethodCall::RegisterCapability(params)) => { if let Some(client) = self.editor.language_servers.get_by_id(server_id) { for reg in params.registrations { match reg.method.as_str() { lsp::notification::DidChangeWatchedFiles::METHOD => { let Some(options) = reg.register_options else { continue; }; let ops: lsp::DidChangeWatchedFilesRegistrationOptions = match serde_json::from_value(options) { Ok(ops) => ops, Err(err) => { log::warn!("Failed to deserialize DidChangeWatchedFilesRegistrationOptions: {err}"); continue; } }; self.editor.language_servers.file_event_handler.register( client.id(), Arc::downgrade(client), reg.id, ops, ) } _ => { // Language Servers based on the `vscode-languageserver-node` library often send // client/registerCapability even though we do not enable dynamic registration // for most capabilities. We should send a MethodNotFound JSONRPC error in this // case but that rejects the registration promise in the server which causes an // exit. So we work around this by ignoring the request and sending back an OK // response. log::warn!("Ignoring a client/registerCapability request because dynamic capability registration is not enabled. Please report this upstream to the language server"); } } } } Ok(serde_json::Value::Null) } Ok(MethodCall::UnregisterCapability(params)) => { for unreg in params.unregisterations { match unreg.method.as_str() { lsp::notification::DidChangeWatchedFiles::METHOD => { self.editor .language_servers .file_event_handler .unregister(server_id, unreg.id); } _ => { log::warn!("Received unregistration request for unsupported method: {}", unreg.method); } } } Ok(serde_json::Value::Null) } Ok(MethodCall::ShowDocument(params)) => { let language_server = language_server!(); let offset_encoding = language_server.offset_encoding(); let result = self.handle_show_document(params, offset_encoding); Ok(json!(result)) } }; let language_server = language_server!(); if let Err(err) = language_server.reply(id.clone(), reply) { log::error!( "Failed to send reply to server '{}' request {id}: {err}", language_server.name() ); } } Call::Invalid { id } => log::error!("LSP invalid method call id={:?}", id), } } fn handle_show_document( &mut self, params: lsp::ShowDocumentParams, offset_encoding: helix_lsp::OffsetEncoding, ) -> lsp::ShowDocumentResult { if let lsp::ShowDocumentParams { external: Some(true), uri, .. } = params { self.jobs.callback(crate::open_external_url_callback(uri)); return lsp::ShowDocumentResult { success: true }; }; let lsp::ShowDocumentParams { uri, selection, take_focus, .. } = params; let uri = match helix_core::Uri::try_from(uri) { Ok(uri) => uri, Err(err) => { log::error!("{err}"); return lsp::ShowDocumentResult { success: false }; } }; // If `Uri` gets another variant other than `Path` this may not be valid. let path = uri.as_path().expect("URIs are valid paths"); let action = match take_focus { Some(true) => helix_view::editor::Action::Replace, _ => helix_view::editor::Action::VerticalSplit, }; let doc_id = match self.editor.open(path, action) { Ok(id) => id, Err(err) => { log::error!("failed to open path: {:?}: {:?}", uri, err); return lsp::ShowDocumentResult { success: false }; } }; let doc = doc_mut!(self.editor, &doc_id); if let Some(range) = selection { // TODO: convert inside server if let Some(new_range) = lsp_range_to_range(doc.text(), range, offset_encoding) { let view = view_mut!(self.editor); // we flip the range so that the cursor sits on the start of the symbol // (for example start of the function). doc.set_selection(view.id, Selection::single(new_range.head, new_range.anchor)); if action.align_view(view, doc.id()) { align_view(doc, view, Align::Center); } } else { log::warn!("lsp position out of bounds - {:?}", range); }; }; lsp::ShowDocumentResult { success: true } } async fn claim_term(&mut self) -> std::io::Result<()> { let terminal_config = self.config.load().editor.clone().into(); self.terminal.claim(terminal_config) } fn restore_term(&mut self) -> std::io::Result<()> { let terminal_config = self.config.load().editor.clone().into(); use helix_view::graphics::CursorKind; self.terminal .backend_mut() .show_cursor(CursorKind::Block) .ok(); self.terminal.restore(terminal_config) } pub async fn run(&mut self, input_stream: &mut S) -> Result where S: Stream> + Unpin, { self.claim_term().await?; // Exit the alternate screen and disable raw mode before panicking let hook = std::panic::take_hook(); std::panic::set_hook(Box::new(move |info| { // We can't handle errors properly inside this closure. And it's // probably not a good idea to `unwrap()` inside a panic handler. // So we just ignore the `Result`. let _ = TerminalBackend::force_restore(); hook(info); })); self.event_loop(input_stream).await; let close_errs = self.close().await; self.restore_term()?; for err in close_errs { self.editor.exit_code = 1; eprintln!("Error: {}", err); } Ok(self.editor.exit_code) } pub async fn close(&mut self) -> Vec { // [NOTE] we intentionally do not return early for errors because we // want to try to run as much cleanup as we can, regardless of // errors along the way let mut errs = Vec::new(); if let Err(err) = self .jobs .finish(&mut self.editor, Some(&mut self.compositor)) .await { log::error!("Error executing job: {}", err); errs.push(err); }; if let Err(err) = self.editor.flush_writes().await { log::error!("Error writing: {}", err); errs.push(err); } if self.editor.close_language_servers(None).await.is_err() { log::error!("Timed out waiting for language servers to shutdown"); errs.push(anyhow::format_err!( "Timed out waiting for language servers to shutdown" )); } errs } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/args.rs000066400000000000000000000144041500614314100237570ustar00rootroot00000000000000use anyhow::Result; use helix_core::Position; use helix_view::tree::Layout; use indexmap::IndexMap; use std::path::{Path, PathBuf}; #[derive(Default)] pub struct Args { pub display_help: bool, pub display_version: bool, pub health: bool, pub health_arg: Option, pub load_tutor: bool, pub fetch_grammars: bool, pub build_grammars: bool, pub split: Option, pub verbosity: u64, pub log_file: Option, pub config_file: Option, pub files: IndexMap>, pub working_directory: Option, } impl Args { pub fn parse_args() -> Result { let mut args = Args::default(); let mut argv = std::env::args().peekable(); let mut line_number = 0; let mut insert_file_with_position = |file_with_position: &str| { let (filename, position) = parse_file(file_with_position); // Before setting the working directory, resolve all the paths in args.files let filename = helix_stdx::path::canonicalize(filename); args.files .entry(filename) .and_modify(|positions| positions.push(position)) .or_insert_with(|| vec![position]); }; argv.next(); // skip the program, we don't care about that while let Some(arg) = argv.next() { match arg.as_str() { "--" => break, // stop parsing at this point treat the remaining as files "--version" => args.display_version = true, "--help" => args.display_help = true, "--tutor" => args.load_tutor = true, "--vsplit" => match args.split { Some(_) => anyhow::bail!("can only set a split once of a specific type"), None => args.split = Some(Layout::Vertical), }, "--hsplit" => match args.split { Some(_) => anyhow::bail!("can only set a split once of a specific type"), None => args.split = Some(Layout::Horizontal), }, "--health" => { args.health = true; args.health_arg = argv.next_if(|opt| !opt.starts_with('-')); } "-g" | "--grammar" => match argv.next().as_deref() { Some("fetch") => args.fetch_grammars = true, Some("build") => args.build_grammars = true, _ => { anyhow::bail!("--grammar must be followed by either 'fetch' or 'build'") } }, "-c" | "--config" => match argv.next().as_deref() { Some(path) => args.config_file = Some(path.into()), None => anyhow::bail!("--config must specify a path to read"), }, "--log" => match argv.next().as_deref() { Some(path) => args.log_file = Some(path.into()), None => anyhow::bail!("--log must specify a path to write"), }, "-w" | "--working-dir" => match argv.next().as_deref() { Some(path) => { args.working_directory = if Path::new(path).is_dir() { Some(PathBuf::from(path)) } else { anyhow::bail!( "--working-dir specified does not exist or is not a directory" ) } } None => { anyhow::bail!("--working-dir must specify an initial working directory") } }, arg if arg.starts_with("--") => { anyhow::bail!("unexpected double dash argument: {}", arg) } arg if arg.starts_with('-') => { let arg = arg.get(1..).unwrap().chars(); for chr in arg { match chr { 'v' => args.verbosity += 1, 'V' => args.display_version = true, 'h' => args.display_help = true, _ => anyhow::bail!("unexpected short arg {}", chr), } } } arg if arg.starts_with('+') => { match arg[1..].parse::() { Ok(n) => line_number = n.saturating_sub(1), _ => insert_file_with_position(arg), }; } arg => insert_file_with_position(arg), } } // push the remaining args, if any to the files for arg in argv { insert_file_with_position(&arg); } if line_number != 0 { if let Some(first_position) = args .files .first_mut() .and_then(|(_, positions)| positions.first_mut()) { first_position.row = line_number; } } Ok(args) } } /// Parse arg into [`PathBuf`] and position. pub(crate) fn parse_file(s: &str) -> (PathBuf, Position) { let def = || (PathBuf::from(s), Position::default()); if Path::new(s).exists() { return def(); } split_path_row_col(s) .or_else(|| split_path_row(s)) .unwrap_or_else(def) } /// Split file.rs:10:2 into [`PathBuf`], row and col. /// /// Does not validate if file.rs is a file or directory. fn split_path_row_col(s: &str) -> Option<(PathBuf, Position)> { let mut s = s.trim_end_matches(':').rsplitn(3, ':'); let col: usize = s.next()?.parse().ok()?; let row: usize = s.next()?.parse().ok()?; let path = s.next()?.into(); let pos = Position::new(row.saturating_sub(1), col.saturating_sub(1)); Some((path, pos)) } /// Split file.rs:10 into [`PathBuf`] and row. /// /// Does not validate if file.rs is a file or directory. fn split_path_row(s: &str) -> Option<(PathBuf, Position)> { let (path, row) = s.trim_end_matches(':').rsplit_once(':')?; let row: usize = row.parse().ok()?; let path = path.into(); let pos = Position::new(row.saturating_sub(1), 0); Some((path, pos)) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/commands.rs000066400000000000000000007000311500614314100246220ustar00rootroot00000000000000pub(crate) mod dap; pub(crate) mod lsp; pub(crate) mod typed; pub use dap::*; use futures_util::FutureExt; use helix_event::status; use helix_stdx::{ path::{self, find_paths}, rope::{self, RopeSliceExt}, }; use helix_vcs::{FileChange, Hunk}; pub use lsp::*; use tui::{ text::{Span, Spans}, widgets::Cell, }; pub use typed::*; use helix_core::{ char_idx_at_visual_offset, chars::char_is_word, command_line, comment, doc_formatter::TextFormat, encoding, find_workspace, graphemes::{self, next_grapheme_boundary}, history::UndoKind, increment, indent::{self, IndentStyle}, line_ending::{get_line_ending_of_str, line_end_char_index}, match_brackets, movement::{self, move_vertically_visual, Direction}, object, pos_at_coords, regex::{self, Regex}, search::{self, CharMatcher}, selection, surround, syntax::{BlockCommentToken, LanguageServerFeature}, text_annotations::{Overlay, TextAnnotations}, textobject, unicode::width::UnicodeWidthChar, visual_offset_from_block, Deletion, LineEnding, Position, Range, Rope, RopeReader, RopeSlice, Selection, SmallVec, Syntax, Tendril, Transaction, }; use helix_view::{ document::{FormatterError, Mode, SCRATCH_BUFFER_NAME}, editor::Action, info::Info, input::KeyEvent, keyboard::KeyCode, theme::Style, tree, view::View, Document, DocumentId, Editor, ViewId, }; use anyhow::{anyhow, bail, ensure, Context as _}; use insert::*; use movement::Movement; use crate::{ compositor::{self, Component, Compositor}, filter_picker_entry, job::Callback, ui::{self, overlay::overlaid, Picker, PickerColumn, Popup, Prompt, PromptEvent}, }; use crate::job::{self, Jobs}; use std::{ char::{ToLowercase, ToUppercase}, cmp::Ordering, collections::{HashMap, HashSet}, error::Error, fmt, future::Future, io::Read, num::NonZeroUsize, }; use std::{ borrow::Cow, path::{Path, PathBuf}, }; use once_cell::sync::Lazy; use serde::de::{self, Deserialize, Deserializer}; use url::Url; use grep_regex::RegexMatcherBuilder; use grep_searcher::{sinks, BinaryDetection, SearcherBuilder}; use ignore::{DirEntry, WalkBuilder, WalkState}; pub type OnKeyCallback = Box; #[derive(PartialEq, Eq, Clone, Copy, Debug)] pub enum OnKeyCallbackKind { PseudoPending, Fallback, } pub struct Context<'a> { pub register: Option, pub count: Option, pub editor: &'a mut Editor, pub callback: Vec, pub on_next_key_callback: Option<(OnKeyCallback, OnKeyCallbackKind)>, pub jobs: &'a mut Jobs, } impl Context<'_> { /// Push a new component onto the compositor. pub fn push_layer(&mut self, component: Box) { self.callback .push(Box::new(|compositor: &mut Compositor, _| { compositor.push(component) })); } /// Call `replace_or_push` on the Compositor pub fn replace_or_push_layer(&mut self, id: &'static str, component: T) { self.callback .push(Box::new(move |compositor: &mut Compositor, _| { compositor.replace_or_push(id, component); })); } #[inline] pub fn on_next_key( &mut self, on_next_key_callback: impl FnOnce(&mut Context, KeyEvent) + 'static, ) { self.on_next_key_callback = Some(( Box::new(on_next_key_callback), OnKeyCallbackKind::PseudoPending, )); } #[inline] pub fn on_next_key_fallback( &mut self, on_next_key_callback: impl FnOnce(&mut Context, KeyEvent) + 'static, ) { self.on_next_key_callback = Some((Box::new(on_next_key_callback), OnKeyCallbackKind::Fallback)); } #[inline] pub fn callback( &mut self, call: impl Future> + 'static + Send, callback: F, ) where T: Send + 'static, F: FnOnce(&mut Editor, &mut Compositor, T) + Send + 'static, { self.jobs.callback(make_job_callback(call, callback)); } /// Returns 1 if no explicit count was provided #[inline] pub fn count(&self) -> usize { self.count.map_or(1, |v| v.get()) } /// Waits on all pending jobs, and then tries to flush all pending write /// operations for all documents. pub fn block_try_flush_writes(&mut self) -> anyhow::Result<()> { compositor::Context { editor: self.editor, jobs: self.jobs, scroll: None, } .block_try_flush_writes() } } #[inline] fn make_job_callback( call: impl Future> + 'static + Send, callback: F, ) -> std::pin::Pin>>> where T: Send + 'static, F: FnOnce(&mut Editor, &mut Compositor, T) + Send + 'static, { Box::pin(async move { let response = call.await?; let call: job::Callback = Callback::EditorCompositor(Box::new( move |editor: &mut Editor, compositor: &mut Compositor| { callback(editor, compositor, response) }, )); Ok(call) }) } use helix_view::{align_view, Align}; /// MappableCommands are commands that can be bound to keys, executable in /// normal, insert or select mode. /// /// There are three kinds: /// /// * Static: commands usually bound to keys and used for editing, movement, /// etc., for example `move_char_left`. /// * Typable: commands executable from command mode, prefixed with a `:`, /// for example `:write!`. /// * Macro: a sequence of keys to execute, for example `@miw`. #[derive(Clone)] pub enum MappableCommand { Typable { name: String, args: String, doc: String, }, Static { name: &'static str, fun: fn(cx: &mut Context), doc: &'static str, }, Macro { name: String, keys: Vec, }, } macro_rules! static_commands { ( $($name:ident, $doc:literal,)* ) => { $( #[allow(non_upper_case_globals)] pub const $name: Self = Self::Static { name: stringify!($name), fun: $name, doc: $doc }; )* pub const STATIC_COMMAND_LIST: &'static [Self] = &[ $( Self::$name, )* ]; } } impl MappableCommand { pub fn execute(&self, cx: &mut Context) { match &self { Self::Typable { name, args, doc: _ } => { if let Some(command) = typed::TYPABLE_COMMAND_MAP.get(name.as_str()) { let mut cx = compositor::Context { editor: cx.editor, jobs: cx.jobs, scroll: None, }; if let Err(e) = typed::execute_command(&mut cx, command, args, PromptEvent::Validate) { cx.editor.set_error(format!("{}", e)); } } else { cx.editor.set_error(format!("no such command: '{name}'")); } } Self::Static { fun, .. } => (fun)(cx), Self::Macro { keys, .. } => { // Protect against recursive macros. if cx.editor.macro_replaying.contains(&'@') { cx.editor.set_error( "Cannot execute macro because the [@] register is already playing a macro", ); return; } cx.editor.macro_replaying.push('@'); let keys = keys.clone(); cx.callback.push(Box::new(move |compositor, cx| { for key in keys.into_iter() { compositor.handle_event(&compositor::Event::Key(key), cx); } cx.editor.macro_replaying.pop(); })); } } } pub fn name(&self) -> &str { match &self { Self::Typable { name, .. } => name, Self::Static { name, .. } => name, Self::Macro { name, .. } => name, } } pub fn doc(&self) -> &str { match &self { Self::Typable { doc, .. } => doc, Self::Static { doc, .. } => doc, Self::Macro { name, .. } => name, } } #[rustfmt::skip] static_commands!( no_op, "Do nothing", move_char_left, "Move left", move_char_right, "Move right", move_line_up, "Move up", move_line_down, "Move down", move_visual_line_up, "Move up", move_visual_line_down, "Move down", extend_char_left, "Extend left", extend_char_right, "Extend right", extend_line_up, "Extend up", extend_line_down, "Extend down", extend_visual_line_up, "Extend up", extend_visual_line_down, "Extend down", copy_selection_on_next_line, "Copy selection on next line", copy_selection_on_prev_line, "Copy selection on previous line", move_next_word_start, "Move to start of next word", move_prev_word_start, "Move to start of previous word", move_next_word_end, "Move to end of next word", move_prev_word_end, "Move to end of previous word", move_next_long_word_start, "Move to start of next long word", move_prev_long_word_start, "Move to start of previous long word", move_next_long_word_end, "Move to end of next long word", move_prev_long_word_end, "Move to end of previous long word", move_next_sub_word_start, "Move to start of next sub word", move_prev_sub_word_start, "Move to start of previous sub word", move_next_sub_word_end, "Move to end of next sub word", move_prev_sub_word_end, "Move to end of previous sub word", move_parent_node_end, "Move to end of the parent node", move_parent_node_start, "Move to beginning of the parent node", extend_next_word_start, "Extend to start of next word", extend_prev_word_start, "Extend to start of previous word", extend_next_word_end, "Extend to end of next word", extend_prev_word_end, "Extend to end of previous word", extend_next_long_word_start, "Extend to start of next long word", extend_prev_long_word_start, "Extend to start of previous long word", extend_next_long_word_end, "Extend to end of next long word", extend_prev_long_word_end, "Extend to end of prev long word", extend_next_sub_word_start, "Extend to start of next sub word", extend_prev_sub_word_start, "Extend to start of previous sub word", extend_next_sub_word_end, "Extend to end of next sub word", extend_prev_sub_word_end, "Extend to end of prev sub word", extend_parent_node_end, "Extend to end of the parent node", extend_parent_node_start, "Extend to beginning of the parent node", find_till_char, "Move till next occurrence of char", find_next_char, "Move to next occurrence of char", extend_till_char, "Extend till next occurrence of char", extend_next_char, "Extend to next occurrence of char", till_prev_char, "Move till previous occurrence of char", find_prev_char, "Move to previous occurrence of char", extend_till_prev_char, "Extend till previous occurrence of char", extend_prev_char, "Extend to previous occurrence of char", repeat_last_motion, "Repeat last motion", replace, "Replace with new char", switch_case, "Switch (toggle) case", switch_to_uppercase, "Switch to uppercase", switch_to_lowercase, "Switch to lowercase", page_up, "Move page up", page_down, "Move page down", half_page_up, "Move half page up", half_page_down, "Move half page down", page_cursor_up, "Move page and cursor up", page_cursor_down, "Move page and cursor down", page_cursor_half_up, "Move page and cursor half up", page_cursor_half_down, "Move page and cursor half down", select_all, "Select whole document", select_regex, "Select all regex matches inside selections", split_selection, "Split selections on regex matches", split_selection_on_newline, "Split selection on newlines", merge_selections, "Merge selections", merge_consecutive_selections, "Merge consecutive selections", search, "Search for regex pattern", rsearch, "Reverse search for regex pattern", search_next, "Select next search match", search_prev, "Select previous search match", extend_search_next, "Add next search match to selection", extend_search_prev, "Add previous search match to selection", search_selection, "Use current selection as search pattern", search_selection_detect_word_boundaries, "Use current selection as the search pattern, automatically wrapping with `\\b` on word boundaries", make_search_word_bounded, "Modify current search to make it word bounded", global_search, "Global search in workspace folder", extend_line, "Select current line, if already selected, extend to another line based on the anchor", extend_line_below, "Select current line, if already selected, extend to next line", extend_line_above, "Select current line, if already selected, extend to previous line", select_line_above, "Select current line, if already selected, extend or shrink line above based on the anchor", select_line_below, "Select current line, if already selected, extend or shrink line below based on the anchor", extend_to_line_bounds, "Extend selection to line bounds", shrink_to_line_bounds, "Shrink selection to line bounds", delete_selection, "Delete selection", delete_selection_noyank, "Delete selection without yanking", change_selection, "Change selection", change_selection_noyank, "Change selection without yanking", collapse_selection, "Collapse selection into single cursor", flip_selections, "Flip selection cursor and anchor", ensure_selections_forward, "Ensure all selections face forward", insert_mode, "Insert before selection", append_mode, "Append after selection", command_mode, "Enter command mode", file_picker, "Open file picker", file_picker_in_current_buffer_directory, "Open file picker at current buffer's directory", file_picker_in_current_directory, "Open file picker at current working directory", file_explorer, "Open file explorer in workspace root", file_explorer_in_current_buffer_directory, "Open file explorer at current buffer's directory", file_explorer_in_current_directory, "Open file explorer at current working directory", code_action, "Perform code action", buffer_picker, "Open buffer picker", jumplist_picker, "Open jumplist picker", symbol_picker, "Open symbol picker", changed_file_picker, "Open changed file picker", select_references_to_symbol_under_cursor, "Select symbol references", workspace_symbol_picker, "Open workspace symbol picker", diagnostics_picker, "Open diagnostic picker", workspace_diagnostics_picker, "Open workspace diagnostic picker", last_picker, "Open last picker", insert_at_line_start, "Insert at start of line", insert_at_line_end, "Insert at end of line", open_below, "Open new line below selection", open_above, "Open new line above selection", normal_mode, "Enter normal mode", select_mode, "Enter selection extend mode", exit_select_mode, "Exit selection mode", goto_definition, "Goto definition", goto_declaration, "Goto declaration", add_newline_above, "Add newline above", add_newline_below, "Add newline below", goto_type_definition, "Goto type definition", goto_implementation, "Goto implementation", goto_file_start, "Goto line number else file start", goto_file_end, "Goto file end", extend_to_file_start, "Extend to line number else file start", extend_to_file_end, "Extend to file end", goto_file, "Goto files/URLs in selections", goto_file_hsplit, "Goto files in selections (hsplit)", goto_file_vsplit, "Goto files in selections (vsplit)", goto_reference, "Goto references", goto_window_top, "Goto window top", goto_window_center, "Goto window center", goto_window_bottom, "Goto window bottom", goto_last_accessed_file, "Goto last accessed file", goto_last_modified_file, "Goto last modified file", goto_last_modification, "Goto last modification", goto_line, "Goto line", goto_last_line, "Goto last line", extend_to_last_line, "Extend to last line", goto_first_diag, "Goto first diagnostic", goto_last_diag, "Goto last diagnostic", goto_next_diag, "Goto next diagnostic", goto_prev_diag, "Goto previous diagnostic", goto_next_change, "Goto next change", goto_prev_change, "Goto previous change", goto_first_change, "Goto first change", goto_last_change, "Goto last change", goto_line_start, "Goto line start", goto_line_end, "Goto line end", goto_column, "Goto column", extend_to_column, "Extend to column", goto_next_buffer, "Goto next buffer", goto_previous_buffer, "Goto previous buffer", goto_line_end_newline, "Goto newline at line end", goto_first_nonwhitespace, "Goto first non-blank in line", trim_selections, "Trim whitespace from selections", extend_to_line_start, "Extend to line start", extend_to_first_nonwhitespace, "Extend to first non-blank in line", extend_to_line_end, "Extend to line end", extend_to_line_end_newline, "Extend to line end", signature_help, "Show signature help", smart_tab, "Insert tab if all cursors have all whitespace to their left; otherwise, run a separate command.", insert_tab, "Insert tab char", insert_newline, "Insert newline char", delete_char_backward, "Delete previous char", delete_char_forward, "Delete next char", delete_word_backward, "Delete previous word", delete_word_forward, "Delete next word", kill_to_line_start, "Delete till start of line", kill_to_line_end, "Delete till end of line", undo, "Undo change", redo, "Redo change", earlier, "Move backward in history", later, "Move forward in history", commit_undo_checkpoint, "Commit changes to new checkpoint", yank, "Yank selection", yank_to_clipboard, "Yank selections to clipboard", yank_to_primary_clipboard, "Yank selections to primary clipboard", yank_joined, "Join and yank selections", yank_joined_to_clipboard, "Join and yank selections to clipboard", yank_main_selection_to_clipboard, "Yank main selection to clipboard", yank_joined_to_primary_clipboard, "Join and yank selections to primary clipboard", yank_main_selection_to_primary_clipboard, "Yank main selection to primary clipboard", replace_with_yanked, "Replace with yanked text", replace_selections_with_clipboard, "Replace selections by clipboard content", replace_selections_with_primary_clipboard, "Replace selections by primary clipboard", paste_after, "Paste after selection", paste_before, "Paste before selection", paste_clipboard_after, "Paste clipboard after selections", paste_clipboard_before, "Paste clipboard before selections", paste_primary_clipboard_after, "Paste primary clipboard after selections", paste_primary_clipboard_before, "Paste primary clipboard before selections", indent, "Indent selection", unindent, "Unindent selection", format_selections, "Format selection", join_selections, "Join lines inside selection", join_selections_space, "Join lines inside selection and select spaces", keep_selections, "Keep selections matching regex", remove_selections, "Remove selections matching regex", align_selections, "Align selections in column", keep_primary_selection, "Keep primary selection", remove_primary_selection, "Remove primary selection", completion, "Invoke completion popup", hover, "Show docs for item under cursor", toggle_comments, "Comment/uncomment selections", toggle_line_comments, "Line comment/uncomment selections", toggle_block_comments, "Block comment/uncomment selections", rotate_selections_forward, "Rotate selections forward", rotate_selections_backward, "Rotate selections backward", rotate_selection_contents_forward, "Rotate selection contents forward", rotate_selection_contents_backward, "Rotate selections contents backward", reverse_selection_contents, "Reverse selections contents", expand_selection, "Expand selection to parent syntax node", shrink_selection, "Shrink selection to previously expanded syntax node", select_next_sibling, "Select next sibling in the syntax tree", select_prev_sibling, "Select previous sibling the in syntax tree", select_all_siblings, "Select all siblings of the current node", select_all_children, "Select all children of the current node", jump_forward, "Jump forward on jumplist", jump_backward, "Jump backward on jumplist", save_selection, "Save current selection to jumplist", jump_view_right, "Jump to right split", jump_view_left, "Jump to left split", jump_view_up, "Jump to split above", jump_view_down, "Jump to split below", swap_view_right, "Swap with right split", swap_view_left, "Swap with left split", swap_view_up, "Swap with split above", swap_view_down, "Swap with split below", transpose_view, "Transpose splits", rotate_view, "Goto next window", rotate_view_reverse, "Goto previous window", hsplit, "Horizontal bottom split", hsplit_new, "Horizontal bottom split scratch buffer", vsplit, "Vertical right split", vsplit_new, "Vertical right split scratch buffer", wclose, "Close window", wonly, "Close windows except current", select_register, "Select register", insert_register, "Insert register", copy_between_registers, "Copy between two registers", align_view_middle, "Align view middle", align_view_top, "Align view top", align_view_center, "Align view center", align_view_bottom, "Align view bottom", scroll_up, "Scroll view up", scroll_down, "Scroll view down", match_brackets, "Goto matching bracket", surround_add, "Surround add", surround_replace, "Surround replace", surround_delete, "Surround delete", select_textobject_around, "Select around object", select_textobject_inner, "Select inside object", goto_next_function, "Goto next function", goto_prev_function, "Goto previous function", goto_next_class, "Goto next type definition", goto_prev_class, "Goto previous type definition", goto_next_parameter, "Goto next parameter", goto_prev_parameter, "Goto previous parameter", goto_next_comment, "Goto next comment", goto_prev_comment, "Goto previous comment", goto_next_test, "Goto next test", goto_prev_test, "Goto previous test", goto_next_entry, "Goto next pairing", goto_prev_entry, "Goto previous pairing", goto_next_paragraph, "Goto next paragraph", goto_prev_paragraph, "Goto previous paragraph", dap_launch, "Launch debug target", dap_restart, "Restart debugging session", dap_toggle_breakpoint, "Toggle breakpoint", dap_continue, "Continue program execution", dap_pause, "Pause program execution", dap_step_in, "Step in", dap_step_out, "Step out", dap_next, "Step to next", dap_variables, "List variables", dap_terminate, "End debug session", dap_edit_condition, "Edit breakpoint condition on current line", dap_edit_log, "Edit breakpoint log message on current line", dap_switch_thread, "Switch current thread", dap_switch_stack_frame, "Switch stack frame", dap_enable_exceptions, "Enable exception breakpoints", dap_disable_exceptions, "Disable exception breakpoints", shell_pipe, "Pipe selections through shell command", shell_pipe_to, "Pipe selections into shell command ignoring output", shell_insert_output, "Insert shell command output before selections", shell_append_output, "Append shell command output after selections", shell_keep_pipe, "Filter selections with shell predicate", suspend, "Suspend and return to shell", rename_symbol, "Rename symbol", increment, "Increment item under cursor", decrement, "Decrement item under cursor", record_macro, "Record macro", replay_macro, "Replay macro", command_palette, "Open command palette", goto_word, "Jump to a two-character label", extend_to_word, "Extend to a two-character label", goto_next_tabstop, "goto next snippet placeholder", goto_prev_tabstop, "goto next snippet placeholder", ); } impl fmt::Debug for MappableCommand { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { MappableCommand::Static { name, .. } => { f.debug_tuple("MappableCommand").field(name).finish() } MappableCommand::Typable { name, args, .. } => f .debug_tuple("MappableCommand") .field(name) .field(args) .finish(), MappableCommand::Macro { name, keys, .. } => f .debug_tuple("MappableCommand") .field(name) .field(keys) .finish(), } } } impl fmt::Display for MappableCommand { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str(self.name()) } } impl std::str::FromStr for MappableCommand { type Err = anyhow::Error; fn from_str(s: &str) -> Result { if let Some(suffix) = s.strip_prefix(':') { let (name, args, _) = command_line::split(suffix); ensure!(!name.is_empty(), "Expected typable command name"); typed::TYPABLE_COMMAND_MAP .get(name) .map(|cmd| { let doc = if args.is_empty() { cmd.doc.to_string() } else { format!(":{} {:?}", cmd.name, args) }; MappableCommand::Typable { name: cmd.name.to_owned(), doc, args: args.to_string(), } }) .ok_or_else(|| anyhow!("No TypableCommand named '{}'", s)) } else if let Some(suffix) = s.strip_prefix('@') { helix_view::input::parse_macro(suffix).map(|keys| Self::Macro { name: s.to_string(), keys, }) } else { MappableCommand::STATIC_COMMAND_LIST .iter() .find(|cmd| cmd.name() == s) .cloned() .ok_or_else(|| anyhow!("No command named '{}'", s)) } } } impl<'de> Deserialize<'de> for MappableCommand { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; s.parse().map_err(de::Error::custom) } } impl PartialEq for MappableCommand { fn eq(&self, other: &Self) -> bool { match (self, other) { ( MappableCommand::Typable { name: first_name, args: first_args, .. }, MappableCommand::Typable { name: second_name, args: second_args, .. }, ) => first_name == second_name && first_args == second_args, ( MappableCommand::Static { name: first_name, .. }, MappableCommand::Static { name: second_name, .. }, ) => first_name == second_name, _ => false, } } } fn no_op(_cx: &mut Context) {} type MoveFn = fn(RopeSlice, Range, Direction, usize, Movement, &TextFormat, &mut TextAnnotations) -> Range; fn move_impl(cx: &mut Context, move_fn: MoveFn, dir: Direction, behaviour: Movement) { let count = cx.count(); let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let text_fmt = doc.text_format(view.inner_area(doc).width, None); let mut annotations = view.text_annotations(doc, None); let selection = doc.selection(view.id).clone().transform(|range| { move_fn( text, range, dir, count, behaviour, &text_fmt, &mut annotations, ) }); drop(annotations); doc.set_selection(view.id, selection); } use helix_core::movement::{move_horizontally, move_vertically}; fn move_char_left(cx: &mut Context) { move_impl(cx, move_horizontally, Direction::Backward, Movement::Move) } fn move_char_right(cx: &mut Context) { move_impl(cx, move_horizontally, Direction::Forward, Movement::Move) } fn move_line_up(cx: &mut Context) { move_impl(cx, move_vertically, Direction::Backward, Movement::Move) } fn move_line_down(cx: &mut Context) { move_impl(cx, move_vertically, Direction::Forward, Movement::Move) } fn move_visual_line_up(cx: &mut Context) { move_impl( cx, move_vertically_visual, Direction::Backward, Movement::Move, ) } fn move_visual_line_down(cx: &mut Context) { move_impl( cx, move_vertically_visual, Direction::Forward, Movement::Move, ) } fn extend_char_left(cx: &mut Context) { move_impl(cx, move_horizontally, Direction::Backward, Movement::Extend) } fn extend_char_right(cx: &mut Context) { move_impl(cx, move_horizontally, Direction::Forward, Movement::Extend) } fn extend_line_up(cx: &mut Context) { move_impl(cx, move_vertically, Direction::Backward, Movement::Extend) } fn extend_line_down(cx: &mut Context) { move_impl(cx, move_vertically, Direction::Forward, Movement::Extend) } fn extend_visual_line_up(cx: &mut Context) { move_impl( cx, move_vertically_visual, Direction::Backward, Movement::Extend, ) } fn extend_visual_line_down(cx: &mut Context) { move_impl( cx, move_vertically_visual, Direction::Forward, Movement::Extend, ) } fn goto_line_end_impl(view: &mut View, doc: &mut Document, movement: Movement) { let text = doc.text().slice(..); let selection = doc.selection(view.id).clone().transform(|range| { let line = range.cursor_line(text); let line_start = text.line_to_char(line); let pos = graphemes::prev_grapheme_boundary(text, line_end_char_index(&text, line)) .max(line_start); range.put_cursor(text, pos, movement == Movement::Extend) }); doc.set_selection(view.id, selection); } fn goto_line_end(cx: &mut Context) { let (view, doc) = current!(cx.editor); goto_line_end_impl( view, doc, if cx.editor.mode == Mode::Select { Movement::Extend } else { Movement::Move }, ) } fn extend_to_line_end(cx: &mut Context) { let (view, doc) = current!(cx.editor); goto_line_end_impl(view, doc, Movement::Extend) } fn goto_line_end_newline_impl(view: &mut View, doc: &mut Document, movement: Movement) { let text = doc.text().slice(..); let selection = doc.selection(view.id).clone().transform(|range| { let line = range.cursor_line(text); let pos = line_end_char_index(&text, line); range.put_cursor(text, pos, movement == Movement::Extend) }); doc.set_selection(view.id, selection); } fn goto_line_end_newline(cx: &mut Context) { let (view, doc) = current!(cx.editor); goto_line_end_newline_impl( view, doc, if cx.editor.mode == Mode::Select { Movement::Extend } else { Movement::Move }, ) } fn extend_to_line_end_newline(cx: &mut Context) { let (view, doc) = current!(cx.editor); goto_line_end_newline_impl(view, doc, Movement::Extend) } fn goto_line_start_impl(view: &mut View, doc: &mut Document, movement: Movement) { let text = doc.text().slice(..); let selection = doc.selection(view.id).clone().transform(|range| { let line = range.cursor_line(text); // adjust to start of the line let pos = text.line_to_char(line); range.put_cursor(text, pos, movement == Movement::Extend) }); doc.set_selection(view.id, selection); } fn goto_line_start(cx: &mut Context) { let (view, doc) = current!(cx.editor); goto_line_start_impl( view, doc, if cx.editor.mode == Mode::Select { Movement::Extend } else { Movement::Move }, ) } fn goto_next_buffer(cx: &mut Context) { goto_buffer(cx.editor, Direction::Forward, cx.count()); } fn goto_previous_buffer(cx: &mut Context) { goto_buffer(cx.editor, Direction::Backward, cx.count()); } fn goto_buffer(editor: &mut Editor, direction: Direction, count: usize) { let current = view!(editor).doc; let id = match direction { Direction::Forward => { let iter = editor.documents.keys(); // skip 'count' times past current buffer iter.cycle().skip_while(|id| *id != ¤t).nth(count) } Direction::Backward => { let iter = editor.documents.keys(); // skip 'count' times past current buffer iter.rev() .cycle() .skip_while(|id| *id != ¤t) .nth(count) } } .unwrap(); let id = *id; editor.switch(id, Action::Replace); } fn extend_to_line_start(cx: &mut Context) { let (view, doc) = current!(cx.editor); goto_line_start_impl(view, doc, Movement::Extend) } fn kill_to_line_start(cx: &mut Context) { delete_by_selection_insert_mode( cx, move |text, range| { let line = range.cursor_line(text); let first_char = text.line_to_char(line); let anchor = range.cursor(text); let head = if anchor == first_char && line != 0 { // select until previous line line_end_char_index(&text, line - 1) } else if let Some(pos) = text.line(line).first_non_whitespace_char() { if first_char + pos < anchor { // select until first non-blank in line if cursor is after it first_char + pos } else { // select until start of line first_char } } else { // select until start of line first_char }; (head, anchor) }, Direction::Backward, ); } fn kill_to_line_end(cx: &mut Context) { delete_by_selection_insert_mode( cx, |text, range| { let line = range.cursor_line(text); let line_end_pos = line_end_char_index(&text, line); let pos = range.cursor(text); // if the cursor is on the newline char delete that if pos == line_end_pos { (pos, text.line_to_char(line + 1)) } else { (pos, line_end_pos) } }, Direction::Forward, ); } fn goto_first_nonwhitespace(cx: &mut Context) { let (view, doc) = current!(cx.editor); goto_first_nonwhitespace_impl( view, doc, if cx.editor.mode == Mode::Select { Movement::Extend } else { Movement::Move }, ) } fn extend_to_first_nonwhitespace(cx: &mut Context) { let (view, doc) = current!(cx.editor); goto_first_nonwhitespace_impl(view, doc, Movement::Extend) } fn goto_first_nonwhitespace_impl(view: &mut View, doc: &mut Document, movement: Movement) { let text = doc.text().slice(..); let selection = doc.selection(view.id).clone().transform(|range| { let line = range.cursor_line(text); if let Some(pos) = text.line(line).first_non_whitespace_char() { let pos = pos + text.line_to_char(line); range.put_cursor(text, pos, movement == Movement::Extend) } else { range } }); doc.set_selection(view.id, selection); } fn trim_selections(cx: &mut Context) { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let ranges: SmallVec<[Range; 1]> = doc .selection(view.id) .iter() .filter_map(|range| { if range.is_empty() || range.slice(text).chars().all(|ch| ch.is_whitespace()) { return None; } let mut start = range.from(); let mut end = range.to(); start = movement::skip_while(text, start, |x| x.is_whitespace()).unwrap_or(start); end = movement::backwards_skip_while(text, end, |x| x.is_whitespace()).unwrap_or(end); Some(Range::new(start, end).with_direction(range.direction())) }) .collect(); if !ranges.is_empty() { let primary = doc.selection(view.id).primary(); let idx = ranges .iter() .position(|range| range.overlaps(&primary)) .unwrap_or(ranges.len() - 1); doc.set_selection(view.id, Selection::new(ranges, idx)); } else { collapse_selection(cx); keep_primary_selection(cx); }; } // align text in selection #[allow(deprecated)] fn align_selections(cx: &mut Context) { use helix_core::visual_coords_at_pos; let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc.selection(view.id); let tab_width = doc.tab_width(); let mut column_widths: Vec> = Vec::new(); let mut last_line = text.len_lines() + 1; let mut col = 0; for range in selection { let coords = visual_coords_at_pos(text, range.head, tab_width); let anchor_coords = visual_coords_at_pos(text, range.anchor, tab_width); if coords.row != anchor_coords.row { cx.editor .set_error("align cannot work with multi line selections"); return; } col = if coords.row == last_line { col + 1 } else { 0 }; if col >= column_widths.len() { column_widths.push(Vec::new()); } column_widths[col].push((range.from(), coords.col)); last_line = coords.row; } let mut changes = Vec::with_capacity(selection.len()); // Account for changes on each row let len = column_widths.first().map(|cols| cols.len()).unwrap_or(0); let mut offs = vec![0; len]; for col in column_widths { let max_col = col .iter() .enumerate() .map(|(row, (_, cursor))| *cursor + offs[row]) .max() .unwrap_or(0); for (row, (insert_pos, last_col)) in col.into_iter().enumerate() { let ins_count = max_col - (last_col + offs[row]); if ins_count == 0 { continue; } offs[row] += ins_count; changes.push((insert_pos, insert_pos, Some(" ".repeat(ins_count).into()))); } } // The changeset has to be sorted changes.sort_unstable_by_key(|(from, _, _)| *from); let transaction = Transaction::change(doc.text(), changes.into_iter()); doc.apply(&transaction, view.id); exit_select_mode(cx); } fn goto_window(cx: &mut Context, align: Align) { let count = cx.count() - 1; let config = cx.editor.config(); let (view, doc) = current!(cx.editor); let view_offset = doc.view_offset(view.id); let height = view.inner_height(); // respect user given count if any // - 1 so we have at least one gap in the middle. // a height of 6 with padding of 3 on each side will keep shifting the view back and forth // as we type let scrolloff = config.scrolloff.min(height.saturating_sub(1) / 2); let last_visual_line = view.last_visual_line(doc); let visual_line = match align { Align::Top => view_offset.vertical_offset + scrolloff + count, Align::Center => view_offset.vertical_offset + (last_visual_line / 2), Align::Bottom => { view_offset.vertical_offset + last_visual_line.saturating_sub(scrolloff + count) } }; let visual_line = visual_line .max(view_offset.vertical_offset + scrolloff) .min(view_offset.vertical_offset + last_visual_line.saturating_sub(scrolloff)); let pos = view .pos_at_visual_coords(doc, visual_line as u16, 0, false) .expect("visual_line was constrained to the view area"); let text = doc.text().slice(..); let selection = doc .selection(view.id) .clone() .transform(|range| range.put_cursor(text, pos, cx.editor.mode == Mode::Select)); doc.set_selection(view.id, selection); } fn goto_window_top(cx: &mut Context) { goto_window(cx, Align::Top) } fn goto_window_center(cx: &mut Context) { goto_window(cx, Align::Center) } fn goto_window_bottom(cx: &mut Context) { goto_window(cx, Align::Bottom) } fn move_word_impl(cx: &mut Context, move_fn: F) where F: Fn(RopeSlice, Range, usize) -> Range, { let count = cx.count(); let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc .selection(view.id) .clone() .transform(|range| move_fn(text, range, count)); doc.set_selection(view.id, selection); } fn move_next_word_start(cx: &mut Context) { move_word_impl(cx, movement::move_next_word_start) } fn move_prev_word_start(cx: &mut Context) { move_word_impl(cx, movement::move_prev_word_start) } fn move_prev_word_end(cx: &mut Context) { move_word_impl(cx, movement::move_prev_word_end) } fn move_next_word_end(cx: &mut Context) { move_word_impl(cx, movement::move_next_word_end) } fn move_next_long_word_start(cx: &mut Context) { move_word_impl(cx, movement::move_next_long_word_start) } fn move_prev_long_word_start(cx: &mut Context) { move_word_impl(cx, movement::move_prev_long_word_start) } fn move_prev_long_word_end(cx: &mut Context) { move_word_impl(cx, movement::move_prev_long_word_end) } fn move_next_long_word_end(cx: &mut Context) { move_word_impl(cx, movement::move_next_long_word_end) } fn move_next_sub_word_start(cx: &mut Context) { move_word_impl(cx, movement::move_next_sub_word_start) } fn move_prev_sub_word_start(cx: &mut Context) { move_word_impl(cx, movement::move_prev_sub_word_start) } fn move_prev_sub_word_end(cx: &mut Context) { move_word_impl(cx, movement::move_prev_sub_word_end) } fn move_next_sub_word_end(cx: &mut Context) { move_word_impl(cx, movement::move_next_sub_word_end) } fn goto_para_impl(cx: &mut Context, move_fn: F) where F: Fn(RopeSlice, Range, usize, Movement) -> Range + 'static, { let count = cx.count(); let motion = move |editor: &mut Editor| { let (view, doc) = current!(editor); let text = doc.text().slice(..); let behavior = if editor.mode == Mode::Select { Movement::Extend } else { Movement::Move }; let selection = doc .selection(view.id) .clone() .transform(|range| move_fn(text, range, count, behavior)); doc.set_selection(view.id, selection); }; cx.editor.apply_motion(motion) } fn goto_prev_paragraph(cx: &mut Context) { goto_para_impl(cx, movement::move_prev_paragraph) } fn goto_next_paragraph(cx: &mut Context) { goto_para_impl(cx, movement::move_next_paragraph) } fn goto_file_start(cx: &mut Context) { goto_file_start_impl(cx, Movement::Move); } fn extend_to_file_start(cx: &mut Context) { goto_file_start_impl(cx, Movement::Extend); } fn goto_file_start_impl(cx: &mut Context, movement: Movement) { if cx.count.is_some() { goto_line_impl(cx, movement); } else { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc .selection(view.id) .clone() .transform(|range| range.put_cursor(text, 0, movement == Movement::Extend)); push_jump(view, doc); doc.set_selection(view.id, selection); } } fn goto_file_end(cx: &mut Context) { goto_file_end_impl(cx, Movement::Move); } fn extend_to_file_end(cx: &mut Context) { goto_file_end_impl(cx, Movement::Extend) } fn goto_file_end_impl(cx: &mut Context, movement: Movement) { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let pos = doc.text().len_chars(); let selection = doc .selection(view.id) .clone() .transform(|range| range.put_cursor(text, pos, movement == Movement::Extend)); push_jump(view, doc); doc.set_selection(view.id, selection); } fn goto_file(cx: &mut Context) { goto_file_impl(cx, Action::Replace); } fn goto_file_hsplit(cx: &mut Context) { goto_file_impl(cx, Action::HorizontalSplit); } fn goto_file_vsplit(cx: &mut Context) { goto_file_impl(cx, Action::VerticalSplit); } /// Goto files in selection. fn goto_file_impl(cx: &mut Context, action: Action) { let (view, doc) = current_ref!(cx.editor); let text = doc.text().slice(..); let selections = doc.selection(view.id); let primary = selections.primary(); let rel_path = doc .relative_path() .map(|path| path.parent().unwrap().to_path_buf()) .unwrap_or_default(); let paths: Vec<_> = if selections.len() == 1 && primary.len() == 1 { // Cap the search at roughly 1k bytes around the cursor. let lookaround = 1000; let pos = text.char_to_byte(primary.cursor(text)); let search_start = text .line_to_byte(text.byte_to_line(pos)) .max(text.floor_char_boundary(pos.saturating_sub(lookaround))); let search_end = text .line_to_byte(text.byte_to_line(pos) + 1) .min(text.ceil_char_boundary(pos + lookaround)); let search_range = text.byte_slice(search_start..search_end); // we also allow paths that are next to the cursor (can be ambiguous but // rarely so in practice) so that gf on quoted/braced path works (not sure about this // but apparently that is how gf has worked historically in helix) let path = find_paths(search_range, true) .take_while(|range| search_start + range.start <= pos + 1) .find(|range| pos <= search_start + range.end) .map(|range| Cow::from(search_range.byte_slice(range))); log::debug!("goto_file auto-detected path: {path:?}"); let path = path.unwrap_or_else(|| primary.fragment(text)); vec![path.into_owned()] } else { // Otherwise use each selection, trimmed. selections .fragments(text) .map(|sel| sel.trim().to_owned()) .filter(|sel| !sel.is_empty()) .collect() }; for sel in paths { if let Ok(url) = Url::parse(&sel) { open_url(cx, url, action); continue; } let path = path::expand(&sel); let path = &rel_path.join(path); if path.is_dir() { let picker = ui::file_picker(cx.editor, path.into()); cx.push_layer(Box::new(overlaid(picker))); } else if let Err(e) = cx.editor.open(path, action) { cx.editor.set_error(format!("Open file failed: {:?}", e)); } } } /// Opens the given url. If the URL points to a valid textual file it is open in helix. // Otherwise, the file is open using external program. fn open_url(cx: &mut Context, url: Url, action: Action) { let doc = doc!(cx.editor); let rel_path = doc .relative_path() .map(|path| path.parent().unwrap().to_path_buf()) .unwrap_or_default(); if url.scheme() != "file" { return cx.jobs.callback(crate::open_external_url_callback(url)); } let content_type = std::fs::File::open(url.path()).and_then(|file| { // Read up to 1kb to detect the content type let mut read_buffer = Vec::new(); let n = file.take(1024).read_to_end(&mut read_buffer)?; Ok(content_inspector::inspect(&read_buffer[..n])) }); // we attempt to open binary files - files that can't be open in helix - using external // program as well, e.g. pdf files or images match content_type { Ok(content_inspector::ContentType::BINARY) => { cx.jobs.callback(crate::open_external_url_callback(url)) } Ok(_) | Err(_) => { let path = &rel_path.join(url.path()); if path.is_dir() { let picker = ui::file_picker(cx.editor, path.into()); cx.push_layer(Box::new(overlaid(picker))); } else if let Err(e) = cx.editor.open(path, action) { cx.editor.set_error(format!("Open file failed: {:?}", e)); } } } } fn extend_word_impl(cx: &mut Context, extend_fn: F) where F: Fn(RopeSlice, Range, usize) -> Range, { let count = cx.count(); let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc.selection(view.id).clone().transform(|range| { let word = extend_fn(text, range, count); let pos = word.cursor(text); range.put_cursor(text, pos, true) }); doc.set_selection(view.id, selection); } fn extend_next_word_start(cx: &mut Context) { extend_word_impl(cx, movement::move_next_word_start) } fn extend_prev_word_start(cx: &mut Context) { extend_word_impl(cx, movement::move_prev_word_start) } fn extend_next_word_end(cx: &mut Context) { extend_word_impl(cx, movement::move_next_word_end) } fn extend_prev_word_end(cx: &mut Context) { extend_word_impl(cx, movement::move_prev_word_end) } fn extend_next_long_word_start(cx: &mut Context) { extend_word_impl(cx, movement::move_next_long_word_start) } fn extend_prev_long_word_start(cx: &mut Context) { extend_word_impl(cx, movement::move_prev_long_word_start) } fn extend_prev_long_word_end(cx: &mut Context) { extend_word_impl(cx, movement::move_prev_long_word_end) } fn extend_next_long_word_end(cx: &mut Context) { extend_word_impl(cx, movement::move_next_long_word_end) } fn extend_next_sub_word_start(cx: &mut Context) { extend_word_impl(cx, movement::move_next_sub_word_start) } fn extend_prev_sub_word_start(cx: &mut Context) { extend_word_impl(cx, movement::move_prev_sub_word_start) } fn extend_prev_sub_word_end(cx: &mut Context) { extend_word_impl(cx, movement::move_prev_sub_word_end) } fn extend_next_sub_word_end(cx: &mut Context) { extend_word_impl(cx, movement::move_next_sub_word_end) } /// Separate branch to find_char designed only for `` char. // // This is necessary because the one document can have different line endings inside. And we // cannot predict what character to find when is pressed. On the current line it can be `lf` // but on the next line it can be `crlf`. That's why [`find_char_impl`] cannot be applied here. fn find_char_line_ending( cx: &mut Context, count: usize, direction: Direction, inclusive: bool, extend: bool, ) { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc.selection(view.id).clone().transform(|range| { let cursor = range.cursor(text); let cursor_line = range.cursor_line(text); // Finding the line where we're going to find . Depends mostly on // `count`, but also takes into account edge cases where we're already at the end // of a line or the beginning of a line let find_on_line = match direction { Direction::Forward => { let on_edge = line_end_char_index(&text, cursor_line) == cursor; let line = cursor_line + count - 1 + (on_edge as usize); if line >= text.len_lines() - 1 { return range; } else { line } } Direction::Backward => { let on_edge = text.line_to_char(cursor_line) == cursor && !inclusive; let line = cursor_line as isize - (count as isize - 1 + on_edge as isize); if line <= 0 { return range; } else { line as usize } } }; let pos = match (direction, inclusive) { (Direction::Forward, true) => line_end_char_index(&text, find_on_line), (Direction::Forward, false) => line_end_char_index(&text, find_on_line) - 1, (Direction::Backward, true) => line_end_char_index(&text, find_on_line - 1), (Direction::Backward, false) => text.line_to_char(find_on_line), }; if extend { range.put_cursor(text, pos, true) } else { Range::point(range.cursor(text)).put_cursor(text, pos, true) } }); doc.set_selection(view.id, selection); } fn find_char(cx: &mut Context, direction: Direction, inclusive: bool, extend: bool) { // TODO: count is reset to 1 before next key so we move it into the closure here. // Would be nice to carry over. let count = cx.count(); // need to wait for next key // TODO: should this be done by grapheme rather than char? For example, // we can't properly handle the line-ending CRLF case here in terms of char. cx.on_next_key(move |cx, event| { let ch = match event { KeyEvent { code: KeyCode::Enter, .. } => { find_char_line_ending(cx, count, direction, inclusive, extend); return; } KeyEvent { code: KeyCode::Tab, .. } => '\t', KeyEvent { code: KeyCode::Char(ch), .. } => ch, _ => return, }; let motion = move |editor: &mut Editor| { match direction { Direction::Forward => { find_char_impl(editor, &find_next_char_impl, inclusive, extend, ch, count) } Direction::Backward => { find_char_impl(editor, &find_prev_char_impl, inclusive, extend, ch, count) } }; }; cx.editor.apply_motion(motion); }) } // #[inline] fn find_char_impl( editor: &mut Editor, search_fn: &F, inclusive: bool, extend: bool, char_matcher: M, count: usize, ) where F: Fn(RopeSlice, M, usize, usize, bool) -> Option + 'static, { let (view, doc) = current!(editor); let text = doc.text().slice(..); let selection = doc.selection(view.id).clone().transform(|range| { // TODO: use `Range::cursor()` here instead. However, that works in terms of // graphemes, whereas this function doesn't yet. So we're doing the same logic // here, but just in terms of chars instead. let search_start_pos = if range.anchor < range.head { range.head - 1 } else { range.head }; search_fn(text, char_matcher, search_start_pos, count, inclusive).map_or(range, |pos| { if extend { range.put_cursor(text, pos, true) } else { Range::point(range.cursor(text)).put_cursor(text, pos, true) } }) }); doc.set_selection(view.id, selection); } fn find_next_char_impl( text: RopeSlice, ch: char, pos: usize, n: usize, inclusive: bool, ) -> Option { let pos = (pos + 1).min(text.len_chars()); if inclusive { search::find_nth_next(text, ch, pos, n) } else { let n = match text.get_char(pos) { Some(next_ch) if next_ch == ch => n + 1, _ => n, }; search::find_nth_next(text, ch, pos, n).map(|n| n.saturating_sub(1)) } } fn find_prev_char_impl( text: RopeSlice, ch: char, pos: usize, n: usize, inclusive: bool, ) -> Option { if inclusive { search::find_nth_prev(text, ch, pos, n) } else { let n = match text.get_char(pos.saturating_sub(1)) { Some(next_ch) if next_ch == ch => n + 1, _ => n, }; search::find_nth_prev(text, ch, pos, n).map(|n| (n + 1).min(text.len_chars())) } } fn find_till_char(cx: &mut Context) { find_char(cx, Direction::Forward, false, false); } fn find_next_char(cx: &mut Context) { find_char(cx, Direction::Forward, true, false) } fn extend_till_char(cx: &mut Context) { find_char(cx, Direction::Forward, false, true) } fn extend_next_char(cx: &mut Context) { find_char(cx, Direction::Forward, true, true) } fn till_prev_char(cx: &mut Context) { find_char(cx, Direction::Backward, false, false) } fn find_prev_char(cx: &mut Context) { find_char(cx, Direction::Backward, true, false) } fn extend_till_prev_char(cx: &mut Context) { find_char(cx, Direction::Backward, false, true) } fn extend_prev_char(cx: &mut Context) { find_char(cx, Direction::Backward, true, true) } fn repeat_last_motion(cx: &mut Context) { cx.editor.repeat_last_motion(cx.count()) } fn replace(cx: &mut Context) { let mut buf = [0u8; 4]; // To hold utf8 encoded char. // need to wait for next key cx.on_next_key(move |cx, event| { let (view, doc) = current!(cx.editor); let ch: Option<&str> = match event { KeyEvent { code: KeyCode::Char(ch), .. } => Some(ch.encode_utf8(&mut buf[..])), KeyEvent { code: KeyCode::Enter, .. } => Some(doc.line_ending.as_str()), KeyEvent { code: KeyCode::Tab, .. } => Some("\t"), _ => None, }; let selection = doc.selection(view.id); if let Some(ch) = ch { let transaction = Transaction::change_by_selection(doc.text(), selection, |range| { if !range.is_empty() { let text: Tendril = doc .text() .slice(range.from()..range.to()) .graphemes() .map(|_g| ch) .collect(); (range.from(), range.to(), Some(text)) } else { // No change. (range.from(), range.to(), None) } }); doc.apply(&transaction, view.id); exit_select_mode(cx); } }) } fn switch_case_impl(cx: &mut Context, change_fn: F) where F: Fn(RopeSlice) -> Tendril, { let (view, doc) = current!(cx.editor); let selection = doc.selection(view.id); let transaction = Transaction::change_by_selection(doc.text(), selection, |range| { let text: Tendril = change_fn(range.slice(doc.text().slice(..))); (range.from(), range.to(), Some(text)) }); doc.apply(&transaction, view.id); exit_select_mode(cx); } enum CaseSwitcher { Upper(ToUppercase), Lower(ToLowercase), Keep(Option), } impl Iterator for CaseSwitcher { type Item = char; fn next(&mut self) -> Option { match self { CaseSwitcher::Upper(upper) => upper.next(), CaseSwitcher::Lower(lower) => lower.next(), CaseSwitcher::Keep(ch) => ch.take(), } } fn size_hint(&self) -> (usize, Option) { match self { CaseSwitcher::Upper(upper) => upper.size_hint(), CaseSwitcher::Lower(lower) => lower.size_hint(), CaseSwitcher::Keep(ch) => { let n = if ch.is_some() { 1 } else { 0 }; (n, Some(n)) } } } } impl ExactSizeIterator for CaseSwitcher {} fn switch_case(cx: &mut Context) { switch_case_impl(cx, |string| { string .chars() .flat_map(|ch| { if ch.is_lowercase() { CaseSwitcher::Upper(ch.to_uppercase()) } else if ch.is_uppercase() { CaseSwitcher::Lower(ch.to_lowercase()) } else { CaseSwitcher::Keep(Some(ch)) } }) .collect() }); } fn switch_to_uppercase(cx: &mut Context) { switch_case_impl(cx, |string| { string.chunks().map(|chunk| chunk.to_uppercase()).collect() }); } fn switch_to_lowercase(cx: &mut Context) { switch_case_impl(cx, |string| { string.chunks().map(|chunk| chunk.to_lowercase()).collect() }); } pub fn scroll(cx: &mut Context, offset: usize, direction: Direction, sync_cursor: bool) { use Direction::*; let config = cx.editor.config(); let (view, doc) = current!(cx.editor); let mut view_offset = doc.view_offset(view.id); let range = doc.selection(view.id).primary(); let text = doc.text().slice(..); let cursor = range.cursor(text); let height = view.inner_height(); let scrolloff = config.scrolloff.min(height.saturating_sub(1) / 2); let offset = match direction { Forward => offset as isize, Backward => -(offset as isize), }; let doc_text = doc.text().slice(..); let viewport = view.inner_area(doc); let text_fmt = doc.text_format(viewport.width, None); (view_offset.anchor, view_offset.vertical_offset) = char_idx_at_visual_offset( doc_text, view_offset.anchor, view_offset.vertical_offset as isize + offset, 0, &text_fmt, // &annotations, &view.text_annotations(&*doc, None), ); doc.set_view_offset(view.id, view_offset); let doc_text = doc.text().slice(..); let mut annotations = view.text_annotations(&*doc, None); if sync_cursor { let movement = match cx.editor.mode { Mode::Select => Movement::Extend, _ => Movement::Move, }; // TODO: When inline diagnostics gets merged- 1. move_vertically_visual removes // line annotations/diagnostics so the cursor may jump further than the view. // 2. If the cursor lands on a complete line of virtual text, the cursor will // jump a different distance than the view. let selection = doc.selection(view.id).clone().transform(|range| { move_vertically_visual( doc_text, range, direction, offset.unsigned_abs(), movement, &text_fmt, &mut annotations, ) }); drop(annotations); doc.set_selection(view.id, selection); return; } let view_offset = doc.view_offset(view.id); let mut head; match direction { Forward => { let off; (head, off) = char_idx_at_visual_offset( doc_text, view_offset.anchor, (view_offset.vertical_offset + scrolloff) as isize, 0, &text_fmt, &annotations, ); head += (off != 0) as usize; if head <= cursor { return; } } Backward => { head = char_idx_at_visual_offset( doc_text, view_offset.anchor, (view_offset.vertical_offset + height - scrolloff - 1) as isize, 0, &text_fmt, &annotations, ) .0; if head >= cursor { return; } } } let anchor = if cx.editor.mode == Mode::Select { range.anchor } else { head }; // replace primary selection with an empty selection at cursor pos let prim_sel = Range::new(anchor, head); let mut sel = doc.selection(view.id).clone(); let idx = sel.primary_index(); sel = sel.replace(idx, prim_sel); drop(annotations); doc.set_selection(view.id, sel); } fn page_up(cx: &mut Context) { let view = view!(cx.editor); let offset = view.inner_height(); scroll(cx, offset, Direction::Backward, false); } fn page_down(cx: &mut Context) { let view = view!(cx.editor); let offset = view.inner_height(); scroll(cx, offset, Direction::Forward, false); } fn half_page_up(cx: &mut Context) { let view = view!(cx.editor); let offset = view.inner_height() / 2; scroll(cx, offset, Direction::Backward, false); } fn half_page_down(cx: &mut Context) { let view = view!(cx.editor); let offset = view.inner_height() / 2; scroll(cx, offset, Direction::Forward, false); } fn page_cursor_up(cx: &mut Context) { let view = view!(cx.editor); let offset = view.inner_height(); scroll(cx, offset, Direction::Backward, true); } fn page_cursor_down(cx: &mut Context) { let view = view!(cx.editor); let offset = view.inner_height(); scroll(cx, offset, Direction::Forward, true); } fn page_cursor_half_up(cx: &mut Context) { let view = view!(cx.editor); let offset = view.inner_height() / 2; scroll(cx, offset, Direction::Backward, true); } fn page_cursor_half_down(cx: &mut Context) { let view = view!(cx.editor); let offset = view.inner_height() / 2; scroll(cx, offset, Direction::Forward, true); } #[allow(deprecated)] // currently uses the deprecated `visual_coords_at_pos`/`pos_at_visual_coords` functions // as this function ignores softwrapping (and virtual text) and instead only cares // about "text visual position" // // TODO: implement a variant of that uses visual lines and respects virtual text fn copy_selection_on_line(cx: &mut Context, direction: Direction) { use helix_core::{pos_at_visual_coords, visual_coords_at_pos}; let count = cx.count(); let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc.selection(view.id); let mut ranges = SmallVec::with_capacity(selection.ranges().len() * (count + 1)); ranges.extend_from_slice(selection.ranges()); let mut primary_index = 0; for range in selection.iter() { let is_primary = *range == selection.primary(); // The range is always head exclusive let (head, anchor) = if range.anchor < range.head { (range.head - 1, range.anchor) } else { (range.head, range.anchor.saturating_sub(1)) }; let tab_width = doc.tab_width(); let head_pos = visual_coords_at_pos(text, head, tab_width); let anchor_pos = visual_coords_at_pos(text, anchor, tab_width); let height = std::cmp::max(head_pos.row, anchor_pos.row) - std::cmp::min(head_pos.row, anchor_pos.row) + 1; if is_primary { primary_index = ranges.len(); } ranges.push(*range); let mut sels = 0; let mut i = 0; while sels < count { let offset = (i + 1) * height; let anchor_row = match direction { Direction::Forward => anchor_pos.row + offset, Direction::Backward => anchor_pos.row.saturating_sub(offset), }; let head_row = match direction { Direction::Forward => head_pos.row + offset, Direction::Backward => head_pos.row.saturating_sub(offset), }; if anchor_row >= text.len_lines() || head_row >= text.len_lines() { break; } let anchor = pos_at_visual_coords(text, Position::new(anchor_row, anchor_pos.col), tab_width); let head = pos_at_visual_coords(text, Position::new(head_row, head_pos.col), tab_width); // skip lines that are too short if visual_coords_at_pos(text, anchor, tab_width).col == anchor_pos.col && visual_coords_at_pos(text, head, tab_width).col == head_pos.col { if is_primary { primary_index = ranges.len(); } // This is Range::new(anchor, head), but it will place the cursor on the correct column ranges.push(Range::point(anchor).put_cursor(text, head, true)); sels += 1; } if anchor_row == 0 && head_row == 0 { break; } i += 1; } } let selection = Selection::new(ranges, primary_index); doc.set_selection(view.id, selection); } fn copy_selection_on_prev_line(cx: &mut Context) { copy_selection_on_line(cx, Direction::Backward) } fn copy_selection_on_next_line(cx: &mut Context) { copy_selection_on_line(cx, Direction::Forward) } fn select_all(cx: &mut Context) { let (view, doc) = current!(cx.editor); let end = doc.text().len_chars(); doc.set_selection(view.id, Selection::single(0, end)) } fn select_regex(cx: &mut Context) { let reg = cx.register.unwrap_or('/'); ui::regex_prompt( cx, "select:".into(), Some(reg), ui::completers::none, move |cx, regex, event| { let (view, doc) = current!(cx.editor); if !matches!(event, PromptEvent::Update | PromptEvent::Validate) { return; } let text = doc.text().slice(..); if let Some(selection) = selection::select_on_matches(text, doc.selection(view.id), ®ex) { doc.set_selection(view.id, selection); } else { cx.editor.set_error("nothing selected"); } }, ); } fn split_selection(cx: &mut Context) { let reg = cx.register.unwrap_or('/'); ui::regex_prompt( cx, "split:".into(), Some(reg), ui::completers::none, move |cx, regex, event| { let (view, doc) = current!(cx.editor); if !matches!(event, PromptEvent::Update | PromptEvent::Validate) { return; } let text = doc.text().slice(..); let selection = selection::split_on_matches(text, doc.selection(view.id), ®ex); doc.set_selection(view.id, selection); }, ); } fn split_selection_on_newline(cx: &mut Context) { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = selection::split_on_newline(text, doc.selection(view.id)); doc.set_selection(view.id, selection); } fn merge_selections(cx: &mut Context) { let (view, doc) = current!(cx.editor); let selection = doc.selection(view.id).clone().merge_ranges(); doc.set_selection(view.id, selection); } fn merge_consecutive_selections(cx: &mut Context) { let (view, doc) = current!(cx.editor); let selection = doc.selection(view.id).clone().merge_consecutive_ranges(); doc.set_selection(view.id, selection); } #[allow(clippy::too_many_arguments)] fn search_impl( editor: &mut Editor, regex: &rope::Regex, movement: Movement, direction: Direction, scrolloff: usize, wrap_around: bool, show_warnings: bool, ) { let (view, doc) = current!(editor); let text = doc.text().slice(..); let selection = doc.selection(view.id); // Get the right side of the primary block cursor for forward search, or the // grapheme before the start of the selection for reverse search. let start = match direction { Direction::Forward => text.char_to_byte(graphemes::ensure_grapheme_boundary_next( text, selection.primary().to(), )), Direction::Backward => text.char_to_byte(graphemes::ensure_grapheme_boundary_prev( text, selection.primary().from(), )), }; // A regex::Match returns byte-positions in the str. In the case where we // do a reverse search and wraparound to the end, we don't need to search // the text before the current cursor position for matches, but by slicing // it out, we need to add it back to the position of the selection. let doc = doc!(editor).text().slice(..); // use find_at to find the next match after the cursor, loop around the end // Careful, `Regex` uses `bytes` as offsets, not character indices! let mut mat = match direction { Direction::Forward => regex.find(doc.regex_input_at_bytes(start..)), Direction::Backward => regex.find_iter(doc.regex_input_at_bytes(..start)).last(), }; if mat.is_none() { if wrap_around { mat = match direction { Direction::Forward => regex.find(doc.regex_input()), Direction::Backward => regex.find_iter(doc.regex_input_at_bytes(start..)).last(), }; } if show_warnings { if wrap_around && mat.is_some() { editor.set_status("Wrapped around document"); } else { editor.set_error("No more matches"); } } } let (view, doc) = current!(editor); let text = doc.text().slice(..); let selection = doc.selection(view.id); if let Some(mat) = mat { let start = text.byte_to_char(mat.start()); let end = text.byte_to_char(mat.end()); if end == 0 { // skip empty matches that don't make sense return; } // Determine range direction based on the primary range let primary = selection.primary(); let range = Range::new(start, end).with_direction(primary.direction()); let selection = match movement { Movement::Extend => selection.clone().push(range), Movement::Move => selection.clone().replace(selection.primary_index(), range), }; doc.set_selection(view.id, selection); view.ensure_cursor_in_view_center(doc, scrolloff); }; } fn search_completions(cx: &mut Context, reg: Option) -> Vec { let mut items = reg .and_then(|reg| cx.editor.registers.read(reg, cx.editor)) .map_or(Vec::new(), |reg| reg.take(200).collect()); items.sort_unstable(); items.dedup(); items.into_iter().map(|value| value.to_string()).collect() } fn search(cx: &mut Context) { searcher(cx, Direction::Forward) } fn rsearch(cx: &mut Context) { searcher(cx, Direction::Backward) } fn searcher(cx: &mut Context, direction: Direction) { let reg = cx.register.unwrap_or('/'); let config = cx.editor.config(); let scrolloff = config.scrolloff; let wrap_around = config.search.wrap_around; let movement = if cx.editor.mode() == Mode::Select { Movement::Extend } else { Movement::Move }; // TODO: could probably share with select_on_matches? let completions = search_completions(cx, Some(reg)); ui::regex_prompt( cx, "search:".into(), Some(reg), move |_editor: &Editor, input: &str| { completions .iter() .filter(|comp| comp.starts_with(input)) .map(|comp| (0.., comp.clone().into())) .collect() }, move |cx, regex, event| { if event == PromptEvent::Validate { cx.editor.registers.last_search_register = reg; } else if event != PromptEvent::Update { return; } search_impl( cx.editor, ®ex, movement, direction, scrolloff, wrap_around, false, ); }, ); } fn search_next_or_prev_impl(cx: &mut Context, movement: Movement, direction: Direction) { let count = cx.count(); let register = cx .register .unwrap_or(cx.editor.registers.last_search_register); let config = cx.editor.config(); let scrolloff = config.scrolloff; if let Some(query) = cx.editor.registers.first(register, cx.editor) { let search_config = &config.search; let case_insensitive = if search_config.smart_case { !query.chars().any(char::is_uppercase) } else { false }; let wrap_around = search_config.wrap_around; if let Ok(regex) = rope::RegexBuilder::new() .syntax( rope::Config::new() .case_insensitive(case_insensitive) .multi_line(true), ) .build(&query) { for _ in 0..count { search_impl( cx.editor, ®ex, movement, direction, scrolloff, wrap_around, true, ); } } else { let error = format!("Invalid regex: {}", query); cx.editor.set_error(error); } } } fn search_next(cx: &mut Context) { search_next_or_prev_impl(cx, Movement::Move, Direction::Forward); } fn search_prev(cx: &mut Context) { search_next_or_prev_impl(cx, Movement::Move, Direction::Backward); } fn extend_search_next(cx: &mut Context) { search_next_or_prev_impl(cx, Movement::Extend, Direction::Forward); } fn extend_search_prev(cx: &mut Context) { search_next_or_prev_impl(cx, Movement::Extend, Direction::Backward); } fn search_selection(cx: &mut Context) { search_selection_impl(cx, false) } fn search_selection_detect_word_boundaries(cx: &mut Context) { search_selection_impl(cx, true) } fn search_selection_impl(cx: &mut Context, detect_word_boundaries: bool) { fn is_at_word_start(text: RopeSlice, index: usize) -> bool { // This can happen when the cursor is at the last character in // the document +1 (ge + j), in this case text.char(index) will panic as // it will index out of bounds. See https://github.com/helix-editor/helix/issues/12609 if index == text.len_chars() { return false; } let ch = text.char(index); if index == 0 { return char_is_word(ch); } let prev_ch = text.char(index - 1); !char_is_word(prev_ch) && char_is_word(ch) } fn is_at_word_end(text: RopeSlice, index: usize) -> bool { if index == 0 || index == text.len_chars() { return false; } let ch = text.char(index); let prev_ch = text.char(index - 1); char_is_word(prev_ch) && !char_is_word(ch) } let register = cx.register.unwrap_or('/'); let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let regex = doc .selection(view.id) .iter() .map(|selection| { let add_boundary_prefix = detect_word_boundaries && is_at_word_start(text, selection.from()); let add_boundary_suffix = detect_word_boundaries && is_at_word_end(text, selection.to()); let prefix = if add_boundary_prefix { "\\b" } else { "" }; let suffix = if add_boundary_suffix { "\\b" } else { "" }; let word = regex::escape(&selection.fragment(text)); format!("{}{}{}", prefix, word, suffix) }) .collect::>() // Collect into hashset to deduplicate identical regexes .into_iter() .collect::>() .join("|"); let msg = format!("register '{}' set to '{}'", register, ®ex); match cx.editor.registers.push(register, regex) { Ok(_) => { cx.editor.registers.last_search_register = register; cx.editor.set_status(msg) } Err(err) => cx.editor.set_error(err.to_string()), } } fn make_search_word_bounded(cx: &mut Context) { // Defaults to the active search register instead `/` to be more ergonomic assuming most people // would use this command following `search_selection`. This avoids selecting the register // twice. let register = cx .register .unwrap_or(cx.editor.registers.last_search_register); let regex = match cx.editor.registers.first(register, cx.editor) { Some(regex) => regex, None => return, }; let start_anchored = regex.starts_with("\\b"); let end_anchored = regex.ends_with("\\b"); if start_anchored && end_anchored { return; } let mut new_regex = String::with_capacity( regex.len() + if start_anchored { 0 } else { 2 } + if end_anchored { 0 } else { 2 }, ); if !start_anchored { new_regex.push_str("\\b"); } new_regex.push_str(®ex); if !end_anchored { new_regex.push_str("\\b"); } let msg = format!("register '{}' set to '{}'", register, &new_regex); match cx.editor.registers.push(register, new_regex) { Ok(_) => { cx.editor.registers.last_search_register = register; cx.editor.set_status(msg) } Err(err) => cx.editor.set_error(err.to_string()), } } fn global_search(cx: &mut Context) { #[derive(Debug)] struct FileResult { path: PathBuf, /// 0 indexed lines line_num: usize, } impl FileResult { fn new(path: &Path, line_num: usize) -> Self { Self { path: path.to_path_buf(), line_num, } } } struct GlobalSearchConfig { smart_case: bool, file_picker_config: helix_view::editor::FilePickerConfig, directory_style: Style, number_style: Style, colon_style: Style, } let config = cx.editor.config(); let config = GlobalSearchConfig { smart_case: config.search.smart_case, file_picker_config: config.file_picker.clone(), directory_style: cx.editor.theme.get("ui.text.directory"), number_style: cx.editor.theme.get("constant.numeric.integer"), colon_style: cx.editor.theme.get("punctuation"), }; let columns = [ PickerColumn::new("path", |item: &FileResult, config: &GlobalSearchConfig| { let path = helix_stdx::path::get_relative_path(&item.path); let directories = path .parent() .filter(|p| !p.as_os_str().is_empty()) .map(|p| format!("{}{}", p.display(), std::path::MAIN_SEPARATOR)) .unwrap_or_default(); let filename = item .path .file_name() .expect("global search paths are normalized (can't end in `..`)") .to_string_lossy(); Cell::from(Spans::from(vec![ Span::styled(directories, config.directory_style), Span::raw(filename), Span::styled(":", config.colon_style), Span::styled((item.line_num + 1).to_string(), config.number_style), ])) }), PickerColumn::hidden("contents"), ]; let get_files = |query: &str, editor: &mut Editor, config: std::sync::Arc, injector: &ui::picker::Injector<_, _>| { if query.is_empty() { return async { Ok(()) }.boxed(); } let search_root = helix_stdx::env::current_working_dir(); if !search_root.exists() { return async { Err(anyhow::anyhow!("Current working directory does not exist")) } .boxed(); } let documents: Vec<_> = editor .documents() .map(|doc| (doc.path().cloned(), doc.text().to_owned())) .collect(); let matcher = match RegexMatcherBuilder::new() .case_smart(config.smart_case) .build(query) { Ok(matcher) => { // Clear any "Failed to compile regex" errors out of the statusline. editor.clear_status(); matcher } Err(err) => { log::info!("Failed to compile search pattern in global search: {}", err); return async { Err(anyhow::anyhow!("Failed to compile regex")) }.boxed(); } }; let dedup_symlinks = config.file_picker_config.deduplicate_links; let absolute_root = search_root .canonicalize() .unwrap_or_else(|_| search_root.clone()); let injector = injector.clone(); async move { let searcher = SearcherBuilder::new() .binary_detection(BinaryDetection::quit(b'\x00')) .build(); WalkBuilder::new(search_root) .hidden(config.file_picker_config.hidden) .parents(config.file_picker_config.parents) .ignore(config.file_picker_config.ignore) .follow_links(config.file_picker_config.follow_symlinks) .git_ignore(config.file_picker_config.git_ignore) .git_global(config.file_picker_config.git_global) .git_exclude(config.file_picker_config.git_exclude) .max_depth(config.file_picker_config.max_depth) .filter_entry(move |entry| { filter_picker_entry(entry, &absolute_root, dedup_symlinks) }) .add_custom_ignore_filename(helix_loader::config_dir().join("ignore")) .add_custom_ignore_filename(".helix/ignore") .build_parallel() .run(|| { let mut searcher = searcher.clone(); let matcher = matcher.clone(); let injector = injector.clone(); let documents = &documents; Box::new(move |entry: Result| -> WalkState { let entry = match entry { Ok(entry) => entry, Err(_) => return WalkState::Continue, }; match entry.file_type() { Some(entry) if entry.is_file() => {} // skip everything else _ => return WalkState::Continue, }; let mut stop = false; let sink = sinks::UTF8(|line_num, _line_content| { stop = injector .push(FileResult::new(entry.path(), line_num as usize - 1)) .is_err(); Ok(!stop) }); let doc = documents.iter().find(|&(doc_path, _)| { doc_path .as_ref() .is_some_and(|doc_path| doc_path == entry.path()) }); let result = if let Some((_, doc)) = doc { // there is already a buffer for this file // search the buffer instead of the file because it's faster // and captures new edits without requiring a save if searcher.multi_line_with_matcher(&matcher) { // in this case a continuous buffer is required // convert the rope to a string let text = doc.to_string(); searcher.search_slice(&matcher, text.as_bytes(), sink) } else { searcher.search_reader( &matcher, RopeReader::new(doc.slice(..)), sink, ) } } else { searcher.search_path(&matcher, entry.path(), sink) }; if let Err(err) = result { log::error!("Global search error: {}, {}", entry.path().display(), err); } if stop { WalkState::Quit } else { WalkState::Continue } }) }); Ok(()) } .boxed() }; let reg = cx.register.unwrap_or('/'); cx.editor.registers.last_search_register = reg; let picker = Picker::new( columns, 1, // contents [], config, move |cx, FileResult { path, line_num, .. }, action| { let doc = match cx.editor.open(path, action) { Ok(id) => doc_mut!(cx.editor, &id), Err(e) => { cx.editor .set_error(format!("Failed to open file '{}': {}", path.display(), e)); return; } }; let line_num = *line_num; let view = view_mut!(cx.editor); let text = doc.text(); if line_num >= text.len_lines() { cx.editor.set_error( "The line you jumped to does not exist anymore because the file has changed.", ); return; } let start = text.line_to_char(line_num); let end = text.line_to_char((line_num + 1).min(text.len_lines())); doc.set_selection(view.id, Selection::single(start, end)); if action.align_view(view, doc.id()) { align_view(doc, view, Align::Center); } }, ) .with_preview(|_editor, FileResult { path, line_num, .. }| { Some((path.as_path().into(), Some((*line_num, *line_num)))) }) .with_history_register(Some(reg)) .with_dynamic_query(get_files, Some(275)); cx.push_layer(Box::new(overlaid(picker))); } enum Extend { Above, Below, } fn extend_line(cx: &mut Context) { let (view, doc) = current_ref!(cx.editor); let extend = match doc.selection(view.id).primary().direction() { Direction::Forward => Extend::Below, Direction::Backward => Extend::Above, }; extend_line_impl(cx, extend); } fn extend_line_below(cx: &mut Context) { extend_line_impl(cx, Extend::Below); } fn extend_line_above(cx: &mut Context) { extend_line_impl(cx, Extend::Above); } fn extend_line_impl(cx: &mut Context, extend: Extend) { let count = cx.count(); let (view, doc) = current!(cx.editor); let text = doc.text(); let selection = doc.selection(view.id).clone().transform(|range| { let (start_line, end_line) = range.line_range(text.slice(..)); let start = text.line_to_char(start_line); let end = text.line_to_char( (end_line + 1) // newline of end_line .min(text.len_lines()), ); // extend to previous/next line if current line is selected let (anchor, head) = if range.from() == start && range.to() == end { match extend { Extend::Above => (end, text.line_to_char(start_line.saturating_sub(count))), Extend::Below => ( start, text.line_to_char((end_line + count + 1).min(text.len_lines())), ), } } else { match extend { Extend::Above => (end, text.line_to_char(start_line.saturating_sub(count - 1))), Extend::Below => ( start, text.line_to_char((end_line + count).min(text.len_lines())), ), } }; Range::new(anchor, head) }); doc.set_selection(view.id, selection); } fn select_line_below(cx: &mut Context) { select_line_impl(cx, Extend::Below); } fn select_line_above(cx: &mut Context) { select_line_impl(cx, Extend::Above); } fn select_line_impl(cx: &mut Context, extend: Extend) { let mut count = cx.count(); let (view, doc) = current!(cx.editor); let text = doc.text(); let saturating_add = |a: usize, b: usize| (a + b).min(text.len_lines()); let selection = doc.selection(view.id).clone().transform(|range| { let (start_line, end_line) = range.line_range(text.slice(..)); let start = text.line_to_char(start_line); let end = text.line_to_char(saturating_add(end_line, 1)); let direction = range.direction(); // Extending to line bounds is counted as one step if range.from() != start || range.to() != end { count = count.saturating_sub(1) } let (anchor_line, head_line) = match (&extend, direction) { (Extend::Above, Direction::Forward) => (start_line, end_line.saturating_sub(count)), (Extend::Above, Direction::Backward) => (end_line, start_line.saturating_sub(count)), (Extend::Below, Direction::Forward) => (start_line, saturating_add(end_line, count)), (Extend::Below, Direction::Backward) => (end_line, saturating_add(start_line, count)), }; let (anchor, head) = match anchor_line.cmp(&head_line) { Ordering::Less => ( text.line_to_char(anchor_line), text.line_to_char(saturating_add(head_line, 1)), ), Ordering::Equal => match extend { Extend::Above => ( text.line_to_char(saturating_add(anchor_line, 1)), text.line_to_char(head_line), ), Extend::Below => ( text.line_to_char(head_line), text.line_to_char(saturating_add(anchor_line, 1)), ), }, Ordering::Greater => ( text.line_to_char(saturating_add(anchor_line, 1)), text.line_to_char(head_line), ), }; Range::new(anchor, head) }); doc.set_selection(view.id, selection); } fn extend_to_line_bounds(cx: &mut Context) { let (view, doc) = current!(cx.editor); doc.set_selection( view.id, doc.selection(view.id).clone().transform(|range| { let text = doc.text(); let (start_line, end_line) = range.line_range(text.slice(..)); let start = text.line_to_char(start_line); let end = text.line_to_char((end_line + 1).min(text.len_lines())); Range::new(start, end).with_direction(range.direction()) }), ); } fn shrink_to_line_bounds(cx: &mut Context) { let (view, doc) = current!(cx.editor); doc.set_selection( view.id, doc.selection(view.id).clone().transform(|range| { let text = doc.text(); let (start_line, end_line) = range.line_range(text.slice(..)); // Do nothing if the selection is within one line to prevent // conditional logic for the behavior of this command if start_line == end_line { return range; } let mut start = text.line_to_char(start_line); // line_to_char gives us the start position of the line, so // we need to get the start position of the next line. In // the editor, this will correspond to the cursor being on // the EOL whitespace character, which is what we want. let mut end = text.line_to_char((end_line + 1).min(text.len_lines())); if start != range.from() { start = text.line_to_char((start_line + 1).min(text.len_lines())); } if end != range.to() { end = text.line_to_char(end_line); } Range::new(start, end).with_direction(range.direction()) }), ); } enum Operation { Delete, Change, } fn selection_is_linewise(selection: &Selection, text: &Rope) -> bool { selection.ranges().iter().all(|range| { let text = text.slice(..); if range.slice(text).len_lines() < 2 { return false; } // If the start of the selection is at the start of a line and the end at the end of a line. let (start_line, end_line) = range.line_range(text); let start = text.line_to_char(start_line); let end = text.line_to_char((end_line + 1).min(text.len_lines())); start == range.from() && end == range.to() }) } enum YankAction { Yank, NoYank, } fn delete_selection_impl(cx: &mut Context, op: Operation, yank: YankAction) { let (view, doc) = current!(cx.editor); let selection = doc.selection(view.id); let only_whole_lines = selection_is_linewise(selection, doc.text()); if cx.register != Some('_') && matches!(yank, YankAction::Yank) { // yank the selection let text = doc.text().slice(..); let values: Vec = selection.fragments(text).map(Cow::into_owned).collect(); let reg_name = cx .register .unwrap_or_else(|| cx.editor.config.load().default_yank_register); if let Err(err) = cx.editor.registers.write(reg_name, values) { cx.editor.set_error(err.to_string()); return; } } // delete the selection let transaction = Transaction::delete_by_selection(doc.text(), selection, |range| (range.from(), range.to())); doc.apply(&transaction, view.id); match op { Operation::Delete => { // exit select mode, if currently in select mode exit_select_mode(cx); } Operation::Change => { if only_whole_lines { open(cx, Open::Above, CommentContinuation::Disabled); } else { enter_insert_mode(cx); } } } } #[inline] fn delete_by_selection_insert_mode( cx: &mut Context, mut f: impl FnMut(RopeSlice, &Range) -> Deletion, direction: Direction, ) { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let mut selection = SmallVec::new(); let mut insert_newline = false; let text_len = text.len_chars(); let mut transaction = Transaction::delete_by_selection(doc.text(), doc.selection(view.id), |range| { let (start, end) = f(text, range); if direction == Direction::Forward { let mut range = *range; if range.head > range.anchor { insert_newline |= end == text_len; // move the cursor to the right so that the selection // doesn't shrink when deleting forward (so the text appears to // move to left) // += 1 is enough here as the range is normalized to grapheme boundaries // later anyway range.head += 1; } selection.push(range); } (start, end) }); // in case we delete the last character and the cursor would be moved to the EOF char // insert a newline, just like when entering append mode if insert_newline { transaction = transaction.insert_at_eof(doc.line_ending.as_str().into()); } if direction == Direction::Forward { doc.set_selection( view.id, Selection::new(selection, doc.selection(view.id).primary_index()), ); } doc.apply(&transaction, view.id); } fn delete_selection(cx: &mut Context) { delete_selection_impl(cx, Operation::Delete, YankAction::Yank); } fn delete_selection_noyank(cx: &mut Context) { delete_selection_impl(cx, Operation::Delete, YankAction::NoYank); } fn change_selection(cx: &mut Context) { delete_selection_impl(cx, Operation::Change, YankAction::Yank); } fn change_selection_noyank(cx: &mut Context) { delete_selection_impl(cx, Operation::Change, YankAction::NoYank); } fn collapse_selection(cx: &mut Context) { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc.selection(view.id).clone().transform(|range| { let pos = range.cursor(text); Range::new(pos, pos) }); doc.set_selection(view.id, selection); } fn flip_selections(cx: &mut Context) { let (view, doc) = current!(cx.editor); let selection = doc .selection(view.id) .clone() .transform(|range| range.flip()); doc.set_selection(view.id, selection); } fn ensure_selections_forward(cx: &mut Context) { let (view, doc) = current!(cx.editor); let selection = doc .selection(view.id) .clone() .transform(|r| r.with_direction(Direction::Forward)); doc.set_selection(view.id, selection); } fn enter_insert_mode(cx: &mut Context) { cx.editor.mode = Mode::Insert; } // inserts at the start of each selection fn insert_mode(cx: &mut Context) { enter_insert_mode(cx); let (view, doc) = current!(cx.editor); log::trace!( "entering insert mode with sel: {:?}, text: {:?}", doc.selection(view.id), doc.text().to_string() ); let selection = doc .selection(view.id) .clone() .transform(|range| Range::new(range.to(), range.from())); doc.set_selection(view.id, selection); } // inserts at the end of each selection fn append_mode(cx: &mut Context) { enter_insert_mode(cx); let (view, doc) = current!(cx.editor); doc.restore_cursor = true; let text = doc.text().slice(..); // Make sure there's room at the end of the document if the last // selection butts up against it. let end = text.len_chars(); let last_range = doc .selection(view.id) .iter() .last() .expect("selection should always have at least one range"); if !last_range.is_empty() && last_range.to() == end { let transaction = Transaction::change( doc.text(), [(end, end, Some(doc.line_ending.as_str().into()))].into_iter(), ); doc.apply(&transaction, view.id); } let selection = doc.selection(view.id).clone().transform(|range| { Range::new( range.from(), graphemes::next_grapheme_boundary(doc.text().slice(..), range.to()), ) }); doc.set_selection(view.id, selection); } fn file_picker(cx: &mut Context) { let root = find_workspace().0; if !root.exists() { cx.editor.set_error("Workspace directory does not exist"); return; } let picker = ui::file_picker(cx.editor, root); cx.push_layer(Box::new(overlaid(picker))); } fn file_picker_in_current_buffer_directory(cx: &mut Context) { let doc_dir = doc!(cx.editor) .path() .and_then(|path| path.parent().map(|path| path.to_path_buf())); let path = match doc_dir { Some(path) => path, None => { cx.editor.set_error("current buffer has no path or parent"); return; } }; let picker = ui::file_picker(cx.editor, path); cx.push_layer(Box::new(overlaid(picker))); } fn file_picker_in_current_directory(cx: &mut Context) { let cwd = helix_stdx::env::current_working_dir(); if !cwd.exists() { cx.editor .set_error("Current working directory does not exist"); return; } let picker = ui::file_picker(cx.editor, cwd); cx.push_layer(Box::new(overlaid(picker))); } fn file_explorer(cx: &mut Context) { let root = find_workspace().0; if !root.exists() { cx.editor.set_error("Workspace directory does not exist"); return; } if let Ok(picker) = ui::file_explorer(root, cx.editor) { cx.push_layer(Box::new(overlaid(picker))); } } fn file_explorer_in_current_buffer_directory(cx: &mut Context) { let doc_dir = doc!(cx.editor) .path() .and_then(|path| path.parent().map(|path| path.to_path_buf())); let path = match doc_dir { Some(path) => path, None => { let cwd = helix_stdx::env::current_working_dir(); if !cwd.exists() { cx.editor.set_error( "Current buffer has no parent and current working directory does not exist", ); return; } cx.editor.set_error( "Current buffer has no parent, opening file explorer in current working directory", ); cwd } }; if let Ok(picker) = ui::file_explorer(path, cx.editor) { cx.push_layer(Box::new(overlaid(picker))); } } fn file_explorer_in_current_directory(cx: &mut Context) { let cwd = helix_stdx::env::current_working_dir(); if !cwd.exists() { cx.editor .set_error("Current working directory does not exist"); return; } if let Ok(picker) = ui::file_explorer(cwd, cx.editor) { cx.push_layer(Box::new(overlaid(picker))); } } fn buffer_picker(cx: &mut Context) { let current = view!(cx.editor).doc; struct BufferMeta { id: DocumentId, path: Option, is_modified: bool, is_current: bool, focused_at: std::time::Instant, } let new_meta = |doc: &Document| BufferMeta { id: doc.id(), path: doc.path().cloned(), is_modified: doc.is_modified(), is_current: doc.id() == current, focused_at: doc.focused_at, }; let mut items = cx .editor .documents .values() .map(new_meta) .collect::>(); // mru items.sort_unstable_by_key(|item| std::cmp::Reverse(item.focused_at)); let columns = [ PickerColumn::new("id", |meta: &BufferMeta, _| meta.id.to_string().into()), PickerColumn::new("flags", |meta: &BufferMeta, _| { let mut flags = String::new(); if meta.is_modified { flags.push('+'); } if meta.is_current { flags.push('*'); } flags.into() }), PickerColumn::new("path", |meta: &BufferMeta, _| { let path = meta .path .as_deref() .map(helix_stdx::path::get_relative_path); path.as_deref() .and_then(Path::to_str) .unwrap_or(SCRATCH_BUFFER_NAME) .to_string() .into() }), ]; let picker = Picker::new(columns, 2, items, (), |cx, meta, action| { cx.editor.switch(meta.id, action); }) .with_preview(|editor, meta| { let doc = &editor.documents.get(&meta.id)?; let lines = doc.selections().values().next().map(|selection| { let cursor_line = selection.primary().cursor_line(doc.text().slice(..)); (cursor_line, cursor_line) }); Some((meta.id.into(), lines)) }); cx.push_layer(Box::new(overlaid(picker))); } fn jumplist_picker(cx: &mut Context) { struct JumpMeta { id: DocumentId, path: Option, selection: Selection, text: String, is_current: bool, } for (view, _) in cx.editor.tree.views_mut() { for doc_id in view.jumps.iter().map(|e| e.0).collect::>().iter() { let doc = doc_mut!(cx.editor, doc_id); view.sync_changes(doc); } } let new_meta = |view: &View, doc_id: DocumentId, selection: Selection| { let doc = &cx.editor.documents.get(&doc_id); let text = doc.map_or("".into(), |d| { selection .fragments(d.text().slice(..)) .map(Cow::into_owned) .collect::>() .join(" ") }); JumpMeta { id: doc_id, path: doc.and_then(|d| d.path().cloned()), selection, text, is_current: view.doc == doc_id, } }; let columns = [ ui::PickerColumn::new("id", |item: &JumpMeta, _| item.id.to_string().into()), ui::PickerColumn::new("path", |item: &JumpMeta, _| { let path = item .path .as_deref() .map(helix_stdx::path::get_relative_path); path.as_deref() .and_then(Path::to_str) .unwrap_or(SCRATCH_BUFFER_NAME) .to_string() .into() }), ui::PickerColumn::new("flags", |item: &JumpMeta, _| { let mut flags = Vec::new(); if item.is_current { flags.push("*"); } if flags.is_empty() { "".into() } else { format!(" ({})", flags.join("")).into() } }), ui::PickerColumn::new("contents", |item: &JumpMeta, _| item.text.as_str().into()), ]; let picker = Picker::new( columns, 1, // path cx.editor.tree.views().flat_map(|(view, _)| { view.jumps .iter() .rev() .map(|(doc_id, selection)| new_meta(view, *doc_id, selection.clone())) }), (), |cx, meta, action| { cx.editor.switch(meta.id, action); let config = cx.editor.config(); let (view, doc) = (view_mut!(cx.editor), doc_mut!(cx.editor, &meta.id)); doc.set_selection(view.id, meta.selection.clone()); if action.align_view(view, doc.id()) { view.ensure_cursor_in_view_center(doc, config.scrolloff); } }, ) .with_preview(|editor, meta| { let doc = &editor.documents.get(&meta.id)?; let line = meta.selection.primary().cursor_line(doc.text().slice(..)); Some((meta.id.into(), Some((line, line)))) }); cx.push_layer(Box::new(overlaid(picker))); } fn changed_file_picker(cx: &mut Context) { pub struct FileChangeData { cwd: PathBuf, style_untracked: Style, style_modified: Style, style_conflict: Style, style_deleted: Style, style_renamed: Style, } let cwd = helix_stdx::env::current_working_dir(); if !cwd.exists() { cx.editor .set_error("Current working directory does not exist"); return; } let added = cx.editor.theme.get("diff.plus"); let modified = cx.editor.theme.get("diff.delta"); let conflict = cx.editor.theme.get("diff.delta.conflict"); let deleted = cx.editor.theme.get("diff.minus"); let renamed = cx.editor.theme.get("diff.delta.moved"); let columns = [ PickerColumn::new("change", |change: &FileChange, data: &FileChangeData| { match change { FileChange::Untracked { .. } => Span::styled("+ untracked", data.style_untracked), FileChange::Modified { .. } => Span::styled("~ modified", data.style_modified), FileChange::Conflict { .. } => Span::styled("x conflict", data.style_conflict), FileChange::Deleted { .. } => Span::styled("- deleted", data.style_deleted), FileChange::Renamed { .. } => Span::styled("> renamed", data.style_renamed), } .into() }), PickerColumn::new("path", |change: &FileChange, data: &FileChangeData| { let display_path = |path: &PathBuf| { path.strip_prefix(&data.cwd) .unwrap_or(path) .display() .to_string() }; match change { FileChange::Untracked { path } => display_path(path), FileChange::Modified { path } => display_path(path), FileChange::Conflict { path } => display_path(path), FileChange::Deleted { path } => display_path(path), FileChange::Renamed { from_path, to_path } => { format!("{} -> {}", display_path(from_path), display_path(to_path)) } } .into() }), ]; let picker = Picker::new( columns, 1, // path [], FileChangeData { cwd: cwd.clone(), style_untracked: added, style_modified: modified, style_conflict: conflict, style_deleted: deleted, style_renamed: renamed, }, |cx, meta: &FileChange, action| { let path_to_open = meta.path(); if let Err(e) = cx.editor.open(path_to_open, action) { let err = if let Some(err) = e.source() { format!("{}", err) } else { format!("unable to open \"{}\"", path_to_open.display()) }; cx.editor.set_error(err); } }, ) .with_preview(|_editor, meta| Some((meta.path().into(), None))); let injector = picker.injector(); cx.editor .diff_providers .clone() .for_each_changed_file(cwd, move |change| match change { Ok(change) => injector.push(change).is_ok(), Err(err) => { status::report_blocking(err); true } }); cx.push_layer(Box::new(overlaid(picker))); } pub fn command_palette(cx: &mut Context) { let register = cx.register; let count = cx.count; cx.callback.push(Box::new( move |compositor: &mut Compositor, cx: &mut compositor::Context| { let keymap = compositor.find::().unwrap().keymaps.map() [&cx.editor.mode] .reverse_map(); let commands = MappableCommand::STATIC_COMMAND_LIST.iter().cloned().chain( typed::TYPABLE_COMMAND_LIST .iter() .map(|cmd| MappableCommand::Typable { name: cmd.name.to_owned(), args: String::new(), doc: cmd.doc.to_owned(), }), ); let columns = [ ui::PickerColumn::new("name", |item, _| match item { MappableCommand::Typable { name, .. } => format!(":{name}").into(), MappableCommand::Static { name, .. } => (*name).into(), MappableCommand::Macro { .. } => { unreachable!("macros aren't included in the command palette") } }), ui::PickerColumn::new( "bindings", |item: &MappableCommand, keymap: &crate::keymap::ReverseKeymap| { keymap .get(item.name()) .map(|bindings| { bindings.iter().fold(String::new(), |mut acc, bind| { if !acc.is_empty() { acc.push(' '); } for key in bind { acc.push_str(&key.key_sequence_format()); } acc }) }) .unwrap_or_default() .into() }, ), ui::PickerColumn::new("doc", |item: &MappableCommand, _| item.doc().into()), ]; let picker = Picker::new(columns, 0, commands, keymap, move |cx, command, _action| { let mut ctx = Context { register, count, editor: cx.editor, callback: Vec::new(), on_next_key_callback: None, jobs: cx.jobs, }; let focus = view!(ctx.editor).id; command.execute(&mut ctx); if ctx.editor.tree.contains(focus) { let config = ctx.editor.config(); let mode = ctx.editor.mode(); let view = view_mut!(ctx.editor, focus); let doc = doc_mut!(ctx.editor, &view.doc); view.ensure_cursor_in_view(doc, config.scrolloff); if mode != Mode::Insert { doc.append_changes_to_history(view); } } }); compositor.push(Box::new(overlaid(picker))); }, )); } fn last_picker(cx: &mut Context) { // TODO: last picker does not seem to work well with buffer_picker cx.callback.push(Box::new(|compositor, cx| { if let Some(picker) = compositor.last_picker.take() { compositor.push(picker); } else { cx.editor.set_error("no last picker") } })); } /// Fallback position to use for [`insert_with_indent`]. enum IndentFallbackPos { LineStart, LineEnd, } // `I` inserts at the first nonwhitespace character of each line with a selection. // If the line is empty, automatically indent. fn insert_at_line_start(cx: &mut Context) { insert_with_indent(cx, IndentFallbackPos::LineStart); } // `A` inserts at the end of each line with a selection. // If the line is empty, automatically indent. fn insert_at_line_end(cx: &mut Context) { insert_with_indent(cx, IndentFallbackPos::LineEnd); } // Enter insert mode and auto-indent the current line if it is empty. // If the line is not empty, move the cursor to the specified fallback position. fn insert_with_indent(cx: &mut Context, cursor_fallback: IndentFallbackPos) { enter_insert_mode(cx); let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let contents = doc.text(); let selection = doc.selection(view.id); let language_config = doc.language_config(); let syntax = doc.syntax(); let tab_width = doc.tab_width(); let mut ranges = SmallVec::with_capacity(selection.len()); let mut offs = 0; let mut transaction = Transaction::change_by_selection(contents, selection, |range| { let cursor_line = range.cursor_line(text); let cursor_line_start = text.line_to_char(cursor_line); if line_end_char_index(&text, cursor_line) == cursor_line_start { // line is empty => auto indent let line_end_index = cursor_line_start; let indent = indent::indent_for_newline( language_config, syntax, &doc.config.load().indent_heuristic, &doc.indent_style, tab_width, text, cursor_line, line_end_index, cursor_line, ); // calculate new selection ranges let pos = offs + cursor_line_start; let indent_width = indent.chars().count(); ranges.push(Range::point(pos + indent_width)); offs += indent_width; (line_end_index, line_end_index, Some(indent.into())) } else { // move cursor to the fallback position let pos = match cursor_fallback { IndentFallbackPos::LineStart => text .line(cursor_line) .first_non_whitespace_char() .map(|ws_offset| ws_offset + cursor_line_start) .unwrap_or(cursor_line_start), IndentFallbackPos::LineEnd => line_end_char_index(&text, cursor_line), }; ranges.push(range.put_cursor(text, pos + offs, cx.editor.mode == Mode::Select)); (cursor_line_start, cursor_line_start, None) } }); transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index())); doc.apply(&transaction, view.id); } // Creates an LspCallback that waits for formatting changes to be computed. When they're done, // it applies them, but only if the doc hasn't changed. // // TODO: provide some way to cancel this, probably as part of a more general job cancellation // scheme async fn make_format_callback( doc_id: DocumentId, doc_version: i32, view_id: ViewId, format: impl Future> + Send + 'static, write: Option<(Option, bool)>, ) -> anyhow::Result { let format = format.await; let call: job::Callback = Callback::Editor(Box::new(move |editor| { if !editor.documents.contains_key(&doc_id) || !editor.tree.contains(view_id) { return; } let scrolloff = editor.config().scrolloff; let doc = doc_mut!(editor, &doc_id); let view = view_mut!(editor, view_id); match format { Ok(format) => { if doc.version() == doc_version { doc.apply(&format, view.id); doc.append_changes_to_history(view); doc.detect_indent_and_line_ending(); view.ensure_cursor_in_view(doc, scrolloff); } else { log::info!("discarded formatting changes because the document changed"); } } Err(err) => { if write.is_none() { editor.set_error(err.to_string()); return; } log::info!("failed to format '{}': {err}", doc.display_name()); } } if let Some((path, force)) = write { let id = doc.id(); if let Err(err) = editor.save(id, path, force) { editor.set_error(format!("Error saving: {}", err)); } } })); Ok(call) } #[derive(PartialEq, Eq)] pub enum Open { Below, Above, } #[derive(PartialEq)] pub enum CommentContinuation { Enabled, Disabled, } fn open(cx: &mut Context, open: Open, comment_continuation: CommentContinuation) { let count = cx.count(); enter_insert_mode(cx); let config = cx.editor.config(); let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let contents = doc.text(); let selection = doc.selection(view.id); let mut offs = 0; let mut ranges = SmallVec::with_capacity(selection.len()); let continue_comment_tokens = if comment_continuation == CommentContinuation::Enabled && config.continue_comments { doc.language_config() .and_then(|config| config.comment_tokens.as_ref()) } else { None }; let mut transaction = Transaction::change_by_selection(contents, selection, |range| { // the line number, where the cursor is currently let curr_line_num = text.char_to_line(match open { Open::Below => graphemes::prev_grapheme_boundary(text, range.to()), Open::Above => range.from(), }); // the next line number, where the cursor will be, after finishing the transaction let next_new_line_num = match open { Open::Below => curr_line_num + 1, Open::Above => curr_line_num, }; let above_next_new_line_num = next_new_line_num.saturating_sub(1); let continue_comment_token = continue_comment_tokens .and_then(|tokens| comment::get_comment_token(text, tokens, curr_line_num)); // Index to insert newlines after, as well as the char width // to use to compensate for those inserted newlines. let (above_next_line_end_index, above_next_line_end_width) = if next_new_line_num == 0 { (0, 0) } else { ( line_end_char_index(&text, above_next_new_line_num), doc.line_ending.len_chars(), ) }; let line = text.line(curr_line_num); let indent = match line.first_non_whitespace_char() { Some(pos) if continue_comment_token.is_some() => line.slice(..pos).to_string(), _ => indent::indent_for_newline( doc.language_config(), doc.syntax(), &config.indent_heuristic, &doc.indent_style, doc.tab_width(), text, above_next_new_line_num, above_next_line_end_index, curr_line_num, ), }; let indent_len = indent.len(); let mut text = String::with_capacity(1 + indent_len); if open == Open::Above && next_new_line_num == 0 { text.push_str(&indent); if let Some(token) = continue_comment_token { text.push_str(token); text.push(' '); } text.push_str(doc.line_ending.as_str()); } else { text.push_str(doc.line_ending.as_str()); text.push_str(&indent); if let Some(token) = continue_comment_token { text.push_str(token); text.push(' '); } } let text = text.repeat(count); // calculate new selection ranges let pos = offs + above_next_line_end_index + above_next_line_end_width; let comment_len = continue_comment_token .map(|token| token.len() + 1) // `+ 1` for the extra space added .unwrap_or_default(); for i in 0..count { // pos -> beginning of reference line, // + (i * (1+indent_len + comment_len)) -> beginning of i'th line from pos (possibly including comment token) // + indent_len + comment_len -> -> indent for i'th line ranges.push(Range::point( pos + (i * (1 + indent_len + comment_len)) + indent_len + comment_len, )); } // update the offset for the next range offs += text.chars().count(); ( above_next_line_end_index, above_next_line_end_index, Some(text.into()), ) }); transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index())); doc.apply(&transaction, view.id); } // o inserts a new line after each line with a selection fn open_below(cx: &mut Context) { open(cx, Open::Below, CommentContinuation::Enabled) } // O inserts a new line before each line with a selection fn open_above(cx: &mut Context) { open(cx, Open::Above, CommentContinuation::Enabled) } fn normal_mode(cx: &mut Context) { cx.editor.enter_normal_mode(); } // Store a jump on the jumplist. fn push_jump(view: &mut View, doc: &Document) { let jump = (doc.id(), doc.selection(view.id).clone()); view.jumps.push(jump); } fn goto_line(cx: &mut Context) { goto_line_impl(cx, Movement::Move); } fn goto_line_impl(cx: &mut Context, movement: Movement) { if cx.count.is_some() { let (view, doc) = current!(cx.editor); push_jump(view, doc); goto_line_without_jumplist(cx.editor, cx.count, movement); } } fn goto_line_without_jumplist( editor: &mut Editor, count: Option, movement: Movement, ) { if let Some(count) = count { let (view, doc) = current!(editor); let text = doc.text().slice(..); let max_line = if text.line(text.len_lines() - 1).len_chars() == 0 { // If the last line is blank, don't jump to it. text.len_lines().saturating_sub(2) } else { text.len_lines() - 1 }; let line_idx = std::cmp::min(count.get() - 1, max_line); let pos = text.line_to_char(line_idx); let selection = doc .selection(view.id) .clone() .transform(|range| range.put_cursor(text, pos, movement == Movement::Extend)); doc.set_selection(view.id, selection); } } fn goto_last_line(cx: &mut Context) { goto_last_line_impl(cx, Movement::Move) } fn extend_to_last_line(cx: &mut Context) { goto_last_line_impl(cx, Movement::Extend) } fn goto_last_line_impl(cx: &mut Context, movement: Movement) { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let line_idx = if text.line(text.len_lines() - 1).len_chars() == 0 { // If the last line is blank, don't jump to it. text.len_lines().saturating_sub(2) } else { text.len_lines() - 1 }; let pos = text.line_to_char(line_idx); let selection = doc .selection(view.id) .clone() .transform(|range| range.put_cursor(text, pos, movement == Movement::Extend)); push_jump(view, doc); doc.set_selection(view.id, selection); } fn goto_column(cx: &mut Context) { goto_column_impl(cx, Movement::Move); } fn extend_to_column(cx: &mut Context) { goto_column_impl(cx, Movement::Extend); } fn goto_column_impl(cx: &mut Context, movement: Movement) { let count = cx.count(); let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc.selection(view.id).clone().transform(|range| { let line = range.cursor_line(text); let line_start = text.line_to_char(line); let line_end = line_end_char_index(&text, line); let pos = graphemes::nth_next_grapheme_boundary(text, line_start, count - 1).min(line_end); range.put_cursor(text, pos, movement == Movement::Extend) }); doc.set_selection(view.id, selection); } fn goto_last_accessed_file(cx: &mut Context) { let view = view_mut!(cx.editor); if let Some(alt) = view.docs_access_history.pop() { cx.editor.switch(alt, Action::Replace); } else { cx.editor.set_error("no last accessed buffer") } } fn goto_last_modification(cx: &mut Context) { let (view, doc) = current!(cx.editor); let pos = doc.history.get_mut().last_edit_pos(); let text = doc.text().slice(..); if let Some(pos) = pos { let selection = doc .selection(view.id) .clone() .transform(|range| range.put_cursor(text, pos, cx.editor.mode == Mode::Select)); doc.set_selection(view.id, selection); } } fn goto_last_modified_file(cx: &mut Context) { let view = view!(cx.editor); let alternate_file = view .last_modified_docs .into_iter() .flatten() .find(|&id| id != view.doc); if let Some(alt) = alternate_file { cx.editor.switch(alt, Action::Replace); } else { cx.editor.set_error("no last modified buffer") } } fn select_mode(cx: &mut Context) { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); // Make sure end-of-document selections are also 1-width. // (With the exception of being in an empty document, of course.) let selection = doc.selection(view.id).clone().transform(|range| { if range.is_empty() && range.head == text.len_chars() { Range::new( graphemes::prev_grapheme_boundary(text, range.anchor), range.head, ) } else { range } }); doc.set_selection(view.id, selection); cx.editor.mode = Mode::Select; } fn exit_select_mode(cx: &mut Context) { if cx.editor.mode == Mode::Select { cx.editor.mode = Mode::Normal; } } fn goto_first_diag(cx: &mut Context) { let (view, doc) = current!(cx.editor); let selection = match doc.diagnostics().first() { Some(diag) => Selection::single(diag.range.start, diag.range.end), None => return, }; doc.set_selection(view.id, selection); view.diagnostics_handler .immediately_show_diagnostic(doc, view.id); } fn goto_last_diag(cx: &mut Context) { let (view, doc) = current!(cx.editor); let selection = match doc.diagnostics().last() { Some(diag) => Selection::single(diag.range.start, diag.range.end), None => return, }; doc.set_selection(view.id, selection); view.diagnostics_handler .immediately_show_diagnostic(doc, view.id); } fn goto_next_diag(cx: &mut Context) { let motion = move |editor: &mut Editor| { let (view, doc) = current!(editor); let cursor_pos = doc .selection(view.id) .primary() .cursor(doc.text().slice(..)); let diag = doc .diagnostics() .iter() .find(|diag| diag.range.start > cursor_pos); let selection = match diag { Some(diag) => Selection::single(diag.range.start, diag.range.end), None => return, }; doc.set_selection(view.id, selection); view.diagnostics_handler .immediately_show_diagnostic(doc, view.id); }; cx.editor.apply_motion(motion); } fn goto_prev_diag(cx: &mut Context) { let motion = move |editor: &mut Editor| { let (view, doc) = current!(editor); let cursor_pos = doc .selection(view.id) .primary() .cursor(doc.text().slice(..)); let diag = doc .diagnostics() .iter() .rev() .find(|diag| diag.range.start < cursor_pos); let selection = match diag { // NOTE: the selection is reversed because we're jumping to the // previous diagnostic. Some(diag) => Selection::single(diag.range.end, diag.range.start), None => return, }; doc.set_selection(view.id, selection); view.diagnostics_handler .immediately_show_diagnostic(doc, view.id); }; cx.editor.apply_motion(motion) } fn goto_first_change(cx: &mut Context) { goto_first_change_impl(cx, false); } fn goto_last_change(cx: &mut Context) { goto_first_change_impl(cx, true); } fn goto_first_change_impl(cx: &mut Context, reverse: bool) { let editor = &mut cx.editor; let (view, doc) = current!(editor); if let Some(handle) = doc.diff_handle() { let hunk = { let diff = handle.load(); let idx = if reverse { diff.len().saturating_sub(1) } else { 0 }; diff.nth_hunk(idx) }; if hunk != Hunk::NONE { let range = hunk_range(hunk, doc.text().slice(..)); doc.set_selection(view.id, Selection::single(range.anchor, range.head)); } } } fn goto_next_change(cx: &mut Context) { goto_next_change_impl(cx, Direction::Forward) } fn goto_prev_change(cx: &mut Context) { goto_next_change_impl(cx, Direction::Backward) } fn goto_next_change_impl(cx: &mut Context, direction: Direction) { let count = cx.count() as u32 - 1; let motion = move |editor: &mut Editor| { let (view, doc) = current!(editor); let doc_text = doc.text().slice(..); let diff_handle = if let Some(diff_handle) = doc.diff_handle() { diff_handle } else { editor.set_status("Diff is not available in current buffer"); return; }; let selection = doc.selection(view.id).clone().transform(|range| { let cursor_line = range.cursor_line(doc_text) as u32; let diff = diff_handle.load(); let hunk_idx = match direction { Direction::Forward => diff .next_hunk(cursor_line) .map(|idx| (idx + count).min(diff.len() - 1)), Direction::Backward => diff .prev_hunk(cursor_line) .map(|idx| idx.saturating_sub(count)), }; let Some(hunk_idx) = hunk_idx else { return range; }; let hunk = diff.nth_hunk(hunk_idx); let new_range = hunk_range(hunk, doc_text); if editor.mode == Mode::Select { let head = if new_range.head < range.anchor { new_range.anchor } else { new_range.head }; Range::new(range.anchor, head) } else { new_range.with_direction(direction) } }); doc.set_selection(view.id, selection) }; cx.editor.apply_motion(motion); } /// Returns the [Range] for a [Hunk] in the given text. /// Additions and modifications cover the added and modified ranges. /// Deletions are represented as the point at the start of the deletion hunk. fn hunk_range(hunk: Hunk, text: RopeSlice) -> Range { let anchor = text.line_to_char(hunk.after.start as usize); let head = if hunk.after.is_empty() { anchor + 1 } else { text.line_to_char(hunk.after.end as usize) }; Range::new(anchor, head) } pub mod insert { use crate::events::PostInsertChar; use super::*; pub type Hook = fn(&Rope, &Selection, char) -> Option; /// Exclude the cursor in range. fn exclude_cursor(text: RopeSlice, range: Range, cursor: Range) -> Range { if range.to() == cursor.to() && text.len_chars() != cursor.to() { Range::new( range.from(), graphemes::prev_grapheme_boundary(text, cursor.to()), ) } else { range } } // The default insert hook: simply insert the character #[allow(clippy::unnecessary_wraps)] // need to use Option<> because of the Hook signature fn insert(doc: &Rope, selection: &Selection, ch: char) -> Option { let cursors = selection.clone().cursors(doc.slice(..)); let mut t = Tendril::new(); t.push(ch); let transaction = Transaction::insert(doc, &cursors, t); Some(transaction) } use helix_core::auto_pairs; use helix_view::editor::SmartTabConfig; pub fn insert_char(cx: &mut Context, c: char) { let (view, doc) = current_ref!(cx.editor); let text = doc.text(); let selection = doc.selection(view.id); let auto_pairs = doc.auto_pairs(cx.editor); let transaction = auto_pairs .as_ref() .and_then(|ap| auto_pairs::hook(text, selection, c, ap)) .or_else(|| insert(text, selection, c)); let (view, doc) = current!(cx.editor); if let Some(t) = transaction { doc.apply(&t, view.id); } helix_event::dispatch(PostInsertChar { c, cx }); } pub fn smart_tab(cx: &mut Context) { let (view, doc) = current_ref!(cx.editor); let view_id = view.id; if matches!( cx.editor.config().smart_tab, Some(SmartTabConfig { enable: true, .. }) ) { let cursors_after_whitespace = doc.selection(view_id).ranges().iter().all(|range| { let cursor = range.cursor(doc.text().slice(..)); let current_line_num = doc.text().char_to_line(cursor); let current_line_start = doc.text().line_to_char(current_line_num); let left = doc.text().slice(current_line_start..cursor); left.chars().all(|c| c.is_whitespace()) }); if !cursors_after_whitespace { if doc.active_snippet.is_some() { goto_next_tabstop(cx); } else { move_parent_node_end(cx); } return; } } insert_tab(cx); } pub fn insert_tab(cx: &mut Context) { let (view, doc) = current!(cx.editor); // TODO: round out to nearest indentation level (for example a line with 3 spaces should // indent by one to reach 4 spaces). let indent = Tendril::from(doc.indent_style.as_str()); let transaction = Transaction::insert( doc.text(), &doc.selection(view.id).clone().cursors(doc.text().slice(..)), indent, ); doc.apply(&transaction, view.id); } pub fn insert_newline(cx: &mut Context) { let config = cx.editor.config(); let (view, doc) = current_ref!(cx.editor); let text = doc.text().slice(..); let line_ending = doc.line_ending.as_str(); let contents = doc.text(); let selection = doc.selection(view.id); let mut ranges = SmallVec::with_capacity(selection.len()); // TODO: this is annoying, but we need to do it to properly calculate pos after edits let mut global_offs = 0; let mut new_text = String::new(); let continue_comment_tokens = if config.continue_comments { doc.language_config() .and_then(|config| config.comment_tokens.as_ref()) } else { None }; let mut transaction = Transaction::change_by_selection(contents, selection, |range| { // Tracks the number of trailing whitespace characters deleted by this selection. let mut chars_deleted = 0; let pos = range.cursor(text); let prev = if pos == 0 { ' ' } else { contents.char(pos - 1) }; let curr = contents.get_char(pos).unwrap_or(' '); let current_line = text.char_to_line(pos); let line_start = text.line_to_char(current_line); let continue_comment_token = continue_comment_tokens .and_then(|tokens| comment::get_comment_token(text, tokens, current_line)); let (from, to, local_offs) = if let Some(idx) = text.slice(line_start..pos).last_non_whitespace_char() { let first_trailing_whitespace_char = (line_start + idx + 1).min(pos); let line = text.line(current_line); let indent = match line.first_non_whitespace_char() { Some(pos) if continue_comment_token.is_some() => line.slice(..pos).to_string(), _ => indent::indent_for_newline( doc.language_config(), doc.syntax(), &config.indent_heuristic, &doc.indent_style, doc.tab_width(), text, current_line, pos, current_line, ), }; // If we are between pairs (such as brackets), we want to // insert an additional line which is indented one level // more and place the cursor there let on_auto_pair = doc .auto_pairs(cx.editor) .and_then(|pairs| pairs.get(prev)) .is_some_and(|pair| pair.open == prev && pair.close == curr); let local_offs = if let Some(token) = continue_comment_token { new_text.reserve_exact(line_ending.len() + indent.len() + token.len() + 1); new_text.push_str(line_ending); new_text.push_str(&indent); new_text.push_str(token); new_text.push(' '); new_text.chars().count() } else if on_auto_pair { // line where the cursor will be let inner_indent = indent.clone() + doc.indent_style.as_str(); new_text .reserve_exact(line_ending.len() * 2 + indent.len() + inner_indent.len()); new_text.push_str(line_ending); new_text.push_str(&inner_indent); // line where the matching pair will be let local_offs = new_text.chars().count(); new_text.push_str(line_ending); new_text.push_str(&indent); local_offs } else { new_text.reserve_exact(line_ending.len() + indent.len()); new_text.push_str(line_ending); new_text.push_str(&indent); new_text.chars().count() }; // Note that `first_trailing_whitespace_char` is at least `pos` so this unsigned // subtraction cannot underflow. chars_deleted = pos - first_trailing_whitespace_char; ( first_trailing_whitespace_char, pos, local_offs as isize - chars_deleted as isize, ) } else { // If the current line is all whitespace, insert a line ending at the beginning of // the current line. This makes the current line empty and the new line contain the // indentation of the old line. new_text.push_str(line_ending); (line_start, line_start, new_text.chars().count() as isize) }; let new_range = if range.cursor(text) > range.anchor { // when appending, extend the range by local_offs Range::new( (range.anchor as isize + global_offs) as usize, (range.head as isize + local_offs + global_offs) as usize, ) } else { // when inserting, slide the range by local_offs Range::new( (range.anchor as isize + local_offs + global_offs) as usize, (range.head as isize + local_offs + global_offs) as usize, ) }; // TODO: range replace or extend // range.replace(|range| range.is_empty(), head); -> fn extend if cond true, new head pos // can be used with cx.mode to do replace or extend on most changes ranges.push(new_range); global_offs += new_text.chars().count() as isize - chars_deleted as isize; let tendril = Tendril::from(&new_text); new_text.clear(); (from, to, Some(tendril)) }); transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index())); let (view, doc) = current!(cx.editor); doc.apply(&transaction, view.id); } pub fn delete_char_backward(cx: &mut Context) { let count = cx.count(); let (view, doc) = current_ref!(cx.editor); let text = doc.text().slice(..); let tab_width = doc.tab_width(); let indent_width = doc.indent_width(); let auto_pairs = doc.auto_pairs(cx.editor); let transaction = Transaction::delete_by_selection(doc.text(), doc.selection(view.id), |range| { let pos = range.cursor(text); if pos == 0 { return (pos, pos); } let line_start_pos = text.line_to_char(range.cursor_line(text)); // consider to delete by indent level if all characters before `pos` are indent units. let fragment = Cow::from(text.slice(line_start_pos..pos)); if !fragment.is_empty() && fragment.chars().all(|ch| ch == ' ' || ch == '\t') { if text.get_char(pos.saturating_sub(1)) == Some('\t') { // fast path, delete one char (graphemes::nth_prev_grapheme_boundary(text, pos, 1), pos) } else { let width: usize = fragment .chars() .map(|ch| { if ch == '\t' { tab_width } else { // it can be none if it still meet control characters other than '\t' // here just set the width to 1 (or some value better?). ch.width().unwrap_or(1) } }) .sum(); let mut drop = width % indent_width; // round down to nearest unit if drop == 0 { drop = indent_width }; // if it's already at a unit, consume a whole unit let mut chars = fragment.chars().rev(); let mut start = pos; for _ in 0..drop { // delete up to `drop` spaces match chars.next() { Some(' ') => start -= 1, _ => break, } } (start, pos) // delete! } } else { match ( text.get_char(pos.saturating_sub(1)), text.get_char(pos), auto_pairs, ) { (Some(_x), Some(_y), Some(ap)) if range.is_single_grapheme(text) && ap.get(_x).is_some() && ap.get(_x).unwrap().open == _x && ap.get(_x).unwrap().close == _y => // delete both autopaired characters { ( graphemes::nth_prev_grapheme_boundary(text, pos, count), graphemes::nth_next_grapheme_boundary(text, pos, count), ) } _ => // delete 1 char { (graphemes::nth_prev_grapheme_boundary(text, pos, count), pos) } } } }); let (view, doc) = current!(cx.editor); doc.apply(&transaction, view.id); } pub fn delete_char_forward(cx: &mut Context) { let count = cx.count(); delete_by_selection_insert_mode( cx, |text, range| { let pos = range.cursor(text); (pos, graphemes::nth_next_grapheme_boundary(text, pos, count)) }, Direction::Forward, ) } pub fn delete_word_backward(cx: &mut Context) { let count = cx.count(); delete_by_selection_insert_mode( cx, |text, range| { let anchor = movement::move_prev_word_start(text, *range, count).from(); let next = Range::new(anchor, range.cursor(text)); let range = exclude_cursor(text, next, *range); (range.from(), range.to()) }, Direction::Backward, ); } pub fn delete_word_forward(cx: &mut Context) { let count = cx.count(); delete_by_selection_insert_mode( cx, |text, range| { let head = movement::move_next_word_end(text, *range, count).to(); (range.cursor(text), head) }, Direction::Forward, ); } } // Undo / Redo fn undo(cx: &mut Context) { let count = cx.count(); let (view, doc) = current!(cx.editor); for _ in 0..count { if !doc.undo(view) { cx.editor.set_status("Already at oldest change"); break; } } } fn redo(cx: &mut Context) { let count = cx.count(); let (view, doc) = current!(cx.editor); for _ in 0..count { if !doc.redo(view) { cx.editor.set_status("Already at newest change"); break; } } } fn earlier(cx: &mut Context) { let count = cx.count(); let (view, doc) = current!(cx.editor); for _ in 0..count { // rather than doing in batch we do this so get error halfway if !doc.earlier(view, UndoKind::Steps(1)) { cx.editor.set_status("Already at oldest change"); break; } } } fn later(cx: &mut Context) { let count = cx.count(); let (view, doc) = current!(cx.editor); for _ in 0..count { // rather than doing in batch we do this so get error halfway if !doc.later(view, UndoKind::Steps(1)) { cx.editor.set_status("Already at newest change"); break; } } } fn commit_undo_checkpoint(cx: &mut Context) { let (view, doc) = current!(cx.editor); doc.append_changes_to_history(view); } // Yank / Paste fn yank(cx: &mut Context) { yank_impl( cx.editor, cx.register .unwrap_or(cx.editor.config().default_yank_register), ); exit_select_mode(cx); } fn yank_to_clipboard(cx: &mut Context) { yank_impl(cx.editor, '+'); exit_select_mode(cx); } fn yank_to_primary_clipboard(cx: &mut Context) { yank_impl(cx.editor, '*'); exit_select_mode(cx); } fn yank_impl(editor: &mut Editor, register: char) { let (view, doc) = current!(editor); let text = doc.text().slice(..); let values: Vec = doc .selection(view.id) .fragments(text) .map(Cow::into_owned) .collect(); let selections = values.len(); match editor.registers.write(register, values) { Ok(_) => editor.set_status(format!( "yanked {selections} selection{} to register {register}", if selections == 1 { "" } else { "s" } )), Err(err) => editor.set_error(err.to_string()), } } fn yank_joined_impl(editor: &mut Editor, separator: &str, register: char) { let (view, doc) = current!(editor); let text = doc.text().slice(..); let selection = doc.selection(view.id); let selections = selection.len(); let joined = selection .fragments(text) .fold(String::new(), |mut acc, fragment| { if !acc.is_empty() { acc.push_str(separator); } acc.push_str(&fragment); acc }); match editor.registers.write(register, vec![joined]) { Ok(_) => editor.set_status(format!( "joined and yanked {selections} selection{} to register {register}", if selections == 1 { "" } else { "s" } )), Err(err) => editor.set_error(err.to_string()), } } fn yank_joined(cx: &mut Context) { let separator = doc!(cx.editor).line_ending.as_str(); yank_joined_impl( cx.editor, separator, cx.register .unwrap_or(cx.editor.config().default_yank_register), ); exit_select_mode(cx); } fn yank_joined_to_clipboard(cx: &mut Context) { let line_ending = doc!(cx.editor).line_ending; yank_joined_impl(cx.editor, line_ending.as_str(), '+'); exit_select_mode(cx); } fn yank_joined_to_primary_clipboard(cx: &mut Context) { let line_ending = doc!(cx.editor).line_ending; yank_joined_impl(cx.editor, line_ending.as_str(), '*'); exit_select_mode(cx); } fn yank_primary_selection_impl(editor: &mut Editor, register: char) { let (view, doc) = current!(editor); let text = doc.text().slice(..); let selection = doc.selection(view.id).primary().fragment(text).to_string(); match editor.registers.write(register, vec![selection]) { Ok(_) => editor.set_status(format!("yanked primary selection to register {register}",)), Err(err) => editor.set_error(err.to_string()), } } fn yank_main_selection_to_clipboard(cx: &mut Context) { yank_primary_selection_impl(cx.editor, '+'); exit_select_mode(cx); } fn yank_main_selection_to_primary_clipboard(cx: &mut Context) { yank_primary_selection_impl(cx.editor, '*'); exit_select_mode(cx); } #[derive(Copy, Clone)] enum Paste { Before, After, Cursor, } static LINE_ENDING_REGEX: Lazy = Lazy::new(|| Regex::new(r"\r\n|\r|\n").unwrap()); fn paste_impl( values: &[String], doc: &mut Document, view: &mut View, action: Paste, count: usize, mode: Mode, ) { if values.is_empty() { return; } if mode == Mode::Insert { doc.append_changes_to_history(view); } // if any of values ends with a line ending, it's linewise paste let linewise = values .iter() .any(|value| get_line_ending_of_str(value).is_some()); let map_value = |value| { let value = LINE_ENDING_REGEX.replace_all(value, doc.line_ending.as_str()); let mut out = Tendril::from(value.as_ref()); for _ in 1..count { out.push_str(&value); } out }; let repeat = std::iter::repeat( // `values` is asserted to have at least one entry above. map_value(values.last().unwrap()), ); let mut values = values.iter().map(|value| map_value(value)).chain(repeat); let text = doc.text(); let selection = doc.selection(view.id); let mut offset = 0; let mut ranges = SmallVec::with_capacity(selection.len()); let mut transaction = Transaction::change_by_selection(text, selection, |range| { let pos = match (action, linewise) { // paste linewise before (Paste::Before, true) => text.line_to_char(text.char_to_line(range.from())), // paste linewise after (Paste::After, true) => { let line = range.line_range(text.slice(..)).1; text.line_to_char((line + 1).min(text.len_lines())) } // paste insert (Paste::Before, false) => range.from(), // paste append (Paste::After, false) => range.to(), // paste at cursor (Paste::Cursor, _) => range.cursor(text.slice(..)), }; let value = values.next(); let value_len = value .as_ref() .map(|content| content.chars().count()) .unwrap_or_default(); let anchor = offset + pos; let new_range = Range::new(anchor, anchor + value_len).with_direction(range.direction()); ranges.push(new_range); offset += value_len; (pos, pos, value) }); if mode == Mode::Normal { transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index())); } doc.apply(&transaction, view.id); doc.append_changes_to_history(view); } pub(crate) fn paste_bracketed_value(cx: &mut Context, contents: String) { let count = cx.count(); let paste = match cx.editor.mode { Mode::Insert | Mode::Select => Paste::Cursor, Mode::Normal => Paste::Before, }; let (view, doc) = current!(cx.editor); paste_impl(&[contents], doc, view, paste, count, cx.editor.mode); exit_select_mode(cx); } fn paste_clipboard_after(cx: &mut Context) { paste(cx.editor, '+', Paste::After, cx.count()); exit_select_mode(cx); } fn paste_clipboard_before(cx: &mut Context) { paste(cx.editor, '+', Paste::Before, cx.count()); exit_select_mode(cx); } fn paste_primary_clipboard_after(cx: &mut Context) { paste(cx.editor, '*', Paste::After, cx.count()); exit_select_mode(cx); } fn paste_primary_clipboard_before(cx: &mut Context) { paste(cx.editor, '*', Paste::Before, cx.count()); exit_select_mode(cx); } fn replace_with_yanked(cx: &mut Context) { replace_with_yanked_impl( cx.editor, cx.register .unwrap_or(cx.editor.config().default_yank_register), cx.count(), ); exit_select_mode(cx); } fn replace_with_yanked_impl(editor: &mut Editor, register: char, count: usize) { let Some(values) = editor .registers .read(register, editor) .filter(|values| values.len() > 0) else { return; }; let scrolloff = editor.config().scrolloff; let (view, doc) = current_ref!(editor); let map_value = |value: &Cow| { let value = LINE_ENDING_REGEX.replace_all(value, doc.line_ending.as_str()); let mut out = Tendril::from(value.as_ref()); for _ in 1..count { out.push_str(&value); } out }; let mut values_rev = values.rev().peekable(); // `values` is asserted to have at least one entry above. let last = values_rev.peek().unwrap(); let repeat = std::iter::repeat(map_value(last)); let mut values = values_rev .rev() .map(|value| map_value(&value)) .chain(repeat); let selection = doc.selection(view.id); let transaction = Transaction::change_by_selection(doc.text(), selection, |range| { if !range.is_empty() { (range.from(), range.to(), Some(values.next().unwrap())) } else { (range.from(), range.to(), None) } }); drop(values); let (view, doc) = current!(editor); doc.apply(&transaction, view.id); doc.append_changes_to_history(view); view.ensure_cursor_in_view(doc, scrolloff); } fn replace_selections_with_clipboard(cx: &mut Context) { replace_with_yanked_impl(cx.editor, '+', cx.count()); exit_select_mode(cx); } fn replace_selections_with_primary_clipboard(cx: &mut Context) { replace_with_yanked_impl(cx.editor, '*', cx.count()); exit_select_mode(cx); } fn paste(editor: &mut Editor, register: char, pos: Paste, count: usize) { let Some(values) = editor.registers.read(register, editor) else { return; }; let values: Vec<_> = values.map(|value| value.to_string()).collect(); let (view, doc) = current!(editor); paste_impl(&values, doc, view, pos, count, editor.mode); } fn paste_after(cx: &mut Context) { paste( cx.editor, cx.register .unwrap_or(cx.editor.config().default_yank_register), Paste::After, cx.count(), ); exit_select_mode(cx); } fn paste_before(cx: &mut Context) { paste( cx.editor, cx.register .unwrap_or(cx.editor.config().default_yank_register), Paste::Before, cx.count(), ); exit_select_mode(cx); } fn get_lines(doc: &Document, view_id: ViewId) -> Vec { let mut lines = Vec::new(); // Get all line numbers for range in doc.selection(view_id) { let (start, end) = range.line_range(doc.text().slice(..)); for line in start..=end { lines.push(line) } } lines.sort_unstable(); // sorting by usize so _unstable is preferred lines.dedup(); lines } fn indent(cx: &mut Context) { let count = cx.count(); let (view, doc) = current!(cx.editor); let lines = get_lines(doc, view.id); // Indent by one level let indent = Tendril::from(doc.indent_style.as_str().repeat(count)); let transaction = Transaction::change( doc.text(), lines.into_iter().filter_map(|line| { let is_blank = doc.text().line(line).chunks().all(|s| s.trim().is_empty()); if is_blank { return None; } let pos = doc.text().line_to_char(line); Some((pos, pos, Some(indent.clone()))) }), ); doc.apply(&transaction, view.id); exit_select_mode(cx); } fn unindent(cx: &mut Context) { let count = cx.count(); let (view, doc) = current!(cx.editor); let lines = get_lines(doc, view.id); let mut changes = Vec::with_capacity(lines.len()); let tab_width = doc.tab_width(); let indent_width = count * doc.indent_width(); for line_idx in lines { let line = doc.text().line(line_idx); let mut width = 0; let mut pos = 0; for ch in line.chars() { match ch { ' ' => width += 1, '\t' => width = (width / tab_width + 1) * tab_width, _ => break, } pos += 1; if width >= indent_width { break; } } // now delete from start to first non-blank if pos > 0 { let start = doc.text().line_to_char(line_idx); changes.push((start, start + pos, None)) } } let transaction = Transaction::change(doc.text(), changes.into_iter()); doc.apply(&transaction, view.id); exit_select_mode(cx); } fn format_selections(cx: &mut Context) { use helix_lsp::{lsp, util::range_to_lsp_range}; let (view, doc) = current!(cx.editor); let view_id = view.id; // via lsp if available // TODO: else via tree-sitter indentation calculations if doc.selection(view_id).len() != 1 { cx.editor .set_error("format_selections only supports a single selection for now"); return; } // TODO extra LanguageServerFeature::FormatSelections? // maybe such that LanguageServerFeature::Format contains it as well let Some(language_server) = doc .language_servers_with_feature(LanguageServerFeature::Format) .find(|ls| { matches!( ls.capabilities().document_range_formatting_provider, Some(lsp::OneOf::Left(true) | lsp::OneOf::Right(_)) ) }) else { cx.editor .set_error("No configured language server supports range formatting"); return; }; let offset_encoding = language_server.offset_encoding(); let ranges: Vec = doc .selection(view_id) .iter() .map(|range| range_to_lsp_range(doc.text(), *range, offset_encoding)) .collect(); // TODO: handle fails // TODO: concurrent map over all ranges let range = ranges[0]; let future = language_server .text_document_range_formatting( doc.identifier(), range, lsp::FormattingOptions { tab_size: doc.tab_width() as u32, insert_spaces: matches!(doc.indent_style, IndentStyle::Spaces(_)), ..Default::default() }, None, ) .unwrap(); let edits = tokio::task::block_in_place(|| helix_lsp::block_on(future)) .ok() .flatten() .unwrap_or_default(); let transaction = helix_lsp::util::generate_transaction_from_edits(doc.text(), edits, offset_encoding); doc.apply(&transaction, view_id); } fn join_selections_impl(cx: &mut Context, select_space: bool) { use movement::skip_while; let (view, doc) = current!(cx.editor); let text = doc.text(); let slice = text.slice(..); let comment_tokens = doc .language_config() .and_then(|config| config.comment_tokens.as_deref()) .unwrap_or(&[]); // Sort by length to handle Rust's /// vs // let mut comment_tokens: Vec<&str> = comment_tokens.iter().map(|x| x.as_str()).collect(); comment_tokens.sort_unstable_by_key(|x| std::cmp::Reverse(x.len())); let mut changes = Vec::new(); for selection in doc.selection(view.id) { let (start, mut end) = selection.line_range(slice); if start == end { end = (end + 1).min(text.len_lines() - 1); } let lines = start..end; changes.reserve(lines.len()); let first_line_idx = slice.line_to_char(start); let first_line_idx = skip_while(slice, first_line_idx, |ch| matches!(ch, ' ' | '\t')) .unwrap_or(first_line_idx); let first_line = slice.slice(first_line_idx..); let mut current_comment_token = comment_tokens .iter() .find(|token| first_line.starts_with(token)); for line in lines { let start = line_end_char_index(&slice, line); let mut end = text.line_to_char(line + 1); end = skip_while(slice, end, |ch| matches!(ch, ' ' | '\t')).unwrap_or(end); let slice_from_end = slice.slice(end..); if let Some(token) = comment_tokens .iter() .find(|token| slice_from_end.starts_with(token)) { if Some(token) == current_comment_token { end += token.chars().count(); end = skip_while(slice, end, |ch| matches!(ch, ' ' | '\t')).unwrap_or(end); } else { // update current token, but don't delete this one. current_comment_token = Some(token); } } let separator = if end == line_end_char_index(&slice, line + 1) { // the joining line contains only space-characters => don't include a whitespace when joining None } else { Some(Tendril::from(" ")) }; changes.push((start, end, separator)); } } // nothing to do, bail out early to avoid crashes later if changes.is_empty() { return; } changes.sort_unstable_by_key(|(from, _to, _text)| *from); changes.dedup(); // select inserted spaces let transaction = if select_space { let mut offset: usize = 0; let ranges: SmallVec<_> = changes .iter() .filter_map(|change| { if change.2.is_some() { let range = Range::point(change.0 - offset); offset += change.1 - change.0 - 1; // -1 adjusts for the replacement of the range by a space Some(range) } else { offset += change.1 - change.0; None } }) .collect(); let t = Transaction::change(text, changes.into_iter()); if ranges.is_empty() { t } else { let selection = Selection::new(ranges, 0); t.with_selection(selection) } } else { Transaction::change(text, changes.into_iter()) }; doc.apply(&transaction, view.id); } fn keep_or_remove_selections_impl(cx: &mut Context, remove: bool) { // keep or remove selections matching regex let reg = cx.register.unwrap_or('/'); ui::regex_prompt( cx, if remove { "remove:" } else { "keep:" }.into(), Some(reg), ui::completers::none, move |cx, regex, event| { let (view, doc) = current!(cx.editor); if !matches!(event, PromptEvent::Update | PromptEvent::Validate) { return; } let text = doc.text().slice(..); if let Some(selection) = selection::keep_or_remove_matches(text, doc.selection(view.id), ®ex, remove) { doc.set_selection(view.id, selection); } else { cx.editor.set_error("no selections remaining"); } }, ) } fn join_selections(cx: &mut Context) { join_selections_impl(cx, false) } fn join_selections_space(cx: &mut Context) { join_selections_impl(cx, true) } fn keep_selections(cx: &mut Context) { keep_or_remove_selections_impl(cx, false) } fn remove_selections(cx: &mut Context) { keep_or_remove_selections_impl(cx, true) } fn keep_primary_selection(cx: &mut Context) { let (view, doc) = current!(cx.editor); // TODO: handle count let range = doc.selection(view.id).primary(); doc.set_selection(view.id, Selection::single(range.anchor, range.head)); } fn remove_primary_selection(cx: &mut Context) { let (view, doc) = current!(cx.editor); // TODO: handle count let selection = doc.selection(view.id); if selection.len() == 1 { cx.editor.set_error("no selections remaining"); return; } let index = selection.primary_index(); let selection = selection.clone().remove(index); doc.set_selection(view.id, selection); } pub fn completion(cx: &mut Context) { let (view, doc) = current!(cx.editor); let range = doc.selection(view.id).primary(); let text = doc.text().slice(..); let cursor = range.cursor(text); cx.editor .handlers .trigger_completions(cursor, doc.id(), view.id); } // comments type CommentTransactionFn = fn( line_token: Option<&str>, block_tokens: Option<&[BlockCommentToken]>, doc: &Rope, selection: &Selection, ) -> Transaction; fn toggle_comments_impl(cx: &mut Context, comment_transaction: CommentTransactionFn) { let (view, doc) = current!(cx.editor); let line_token: Option<&str> = doc .language_config() .and_then(|lc| lc.comment_tokens.as_ref()) .and_then(|tc| tc.first()) .map(|tc| tc.as_str()); let block_tokens: Option<&[BlockCommentToken]> = doc .language_config() .and_then(|lc| lc.block_comment_tokens.as_ref()) .map(|tc| &tc[..]); let transaction = comment_transaction(line_token, block_tokens, doc.text(), doc.selection(view.id)); doc.apply(&transaction, view.id); exit_select_mode(cx); } /// commenting behavior: /// 1. only line comment tokens -> line comment /// 2. each line block commented -> uncomment all lines /// 3. whole selection block commented -> uncomment selection /// 4. all lines not commented and block tokens -> comment uncommented lines /// 5. no comment tokens and not block commented -> line comment fn toggle_comments(cx: &mut Context) { toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| { let text = doc.slice(..); // only have line comment tokens if line_token.is_some() && block_tokens.is_none() { return comment::toggle_line_comments(doc, selection, line_token); } let split_lines = comment::split_lines_of_selection(text, selection); let default_block_tokens = &[BlockCommentToken::default()]; let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens); let (line_commented, line_comment_changes) = comment::find_block_comments(block_comment_tokens, text, &split_lines); // block commented by line would also be block commented so check this first if line_commented { return comment::create_block_comment_transaction( doc, &split_lines, line_commented, line_comment_changes, ) .0; } let (block_commented, comment_changes) = comment::find_block_comments(block_comment_tokens, text, selection); // check if selection has block comments if block_commented { return comment::create_block_comment_transaction( doc, selection, block_commented, comment_changes, ) .0; } // not commented and only have block comment tokens if line_token.is_none() && block_tokens.is_some() { return comment::create_block_comment_transaction( doc, &split_lines, line_commented, line_comment_changes, ) .0; } // not block commented at all and don't have any tokens comment::toggle_line_comments(doc, selection, line_token) }) } fn toggle_line_comments(cx: &mut Context) { toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| { if line_token.is_none() && block_tokens.is_some() { let default_block_tokens = &[BlockCommentToken::default()]; let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens); comment::toggle_block_comments( doc, &comment::split_lines_of_selection(doc.slice(..), selection), block_comment_tokens, ) } else { comment::toggle_line_comments(doc, selection, line_token) } }); } fn toggle_block_comments(cx: &mut Context) { toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| { if line_token.is_some() && block_tokens.is_none() { comment::toggle_line_comments(doc, selection, line_token) } else { let default_block_tokens = &[BlockCommentToken::default()]; let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens); comment::toggle_block_comments(doc, selection, block_comment_tokens) } }); } fn rotate_selections(cx: &mut Context, direction: Direction) { let count = cx.count(); let (view, doc) = current!(cx.editor); let mut selection = doc.selection(view.id).clone(); let index = selection.primary_index(); let len = selection.len(); selection.set_primary_index(match direction { Direction::Forward => (index + count) % len, Direction::Backward => (index + (len.saturating_sub(count) % len)) % len, }); doc.set_selection(view.id, selection); } fn rotate_selections_forward(cx: &mut Context) { rotate_selections(cx, Direction::Forward) } fn rotate_selections_backward(cx: &mut Context) { rotate_selections(cx, Direction::Backward) } enum ReorderStrategy { RotateForward, RotateBackward, Reverse, } fn reorder_selection_contents(cx: &mut Context, strategy: ReorderStrategy) { let count = cx.count; let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc.selection(view.id); let mut fragments: Vec<_> = selection .slices(text) .map(|fragment| fragment.chunks().collect()) .collect(); let group = count .map(|count| count.get()) .unwrap_or(fragments.len()) // default to rotating everything as one group .min(fragments.len()); for chunk in fragments.chunks_mut(group) { // TODO: also modify main index match strategy { ReorderStrategy::RotateForward => chunk.rotate_right(1), ReorderStrategy::RotateBackward => chunk.rotate_left(1), ReorderStrategy::Reverse => chunk.reverse(), }; } let transaction = Transaction::change( doc.text(), selection .ranges() .iter() .zip(fragments) .map(|(range, fragment)| (range.from(), range.to(), Some(fragment))), ); doc.apply(&transaction, view.id); } fn rotate_selection_contents_forward(cx: &mut Context) { reorder_selection_contents(cx, ReorderStrategy::RotateForward) } fn rotate_selection_contents_backward(cx: &mut Context) { reorder_selection_contents(cx, ReorderStrategy::RotateBackward) } fn reverse_selection_contents(cx: &mut Context) { reorder_selection_contents(cx, ReorderStrategy::Reverse) } // tree sitter node selection fn expand_selection(cx: &mut Context) { let motion = |editor: &mut Editor| { let (view, doc) = current!(editor); if let Some(syntax) = doc.syntax() { let text = doc.text().slice(..); let current_selection = doc.selection(view.id); let selection = object::expand_selection(syntax, text, current_selection.clone()); // check if selection is different from the last one if *current_selection != selection { // save current selection so it can be restored using shrink_selection view.object_selections.push(current_selection.clone()); doc.set_selection(view.id, selection); } } }; cx.editor.apply_motion(motion); } fn shrink_selection(cx: &mut Context) { let motion = |editor: &mut Editor| { let (view, doc) = current!(editor); let current_selection = doc.selection(view.id); // try to restore previous selection if let Some(prev_selection) = view.object_selections.pop() { if current_selection.contains(&prev_selection) { doc.set_selection(view.id, prev_selection); return; } else { // clear existing selection as they can't be shrunk to anyway view.object_selections.clear(); } } // if not previous selection, shrink to first child if let Some(syntax) = doc.syntax() { let text = doc.text().slice(..); let selection = object::shrink_selection(syntax, text, current_selection.clone()); doc.set_selection(view.id, selection); } }; cx.editor.apply_motion(motion); } fn select_sibling_impl(cx: &mut Context, sibling_fn: F) where F: Fn(&helix_core::Syntax, RopeSlice, Selection) -> Selection + 'static, { let motion = move |editor: &mut Editor| { let (view, doc) = current!(editor); if let Some(syntax) = doc.syntax() { let text = doc.text().slice(..); let current_selection = doc.selection(view.id); let selection = sibling_fn(syntax, text, current_selection.clone()); doc.set_selection(view.id, selection); } }; cx.editor.apply_motion(motion); } fn select_next_sibling(cx: &mut Context) { select_sibling_impl(cx, object::select_next_sibling) } fn select_prev_sibling(cx: &mut Context) { select_sibling_impl(cx, object::select_prev_sibling) } fn move_node_bound_impl(cx: &mut Context, dir: Direction, movement: Movement) { let motion = move |editor: &mut Editor| { let (view, doc) = current!(editor); if let Some(syntax) = doc.syntax() { let text = doc.text().slice(..); let current_selection = doc.selection(view.id); let selection = movement::move_parent_node_end( syntax, text, current_selection.clone(), dir, movement, ); doc.set_selection(view.id, selection); } }; cx.editor.apply_motion(motion); } pub fn move_parent_node_end(cx: &mut Context) { move_node_bound_impl(cx, Direction::Forward, Movement::Move) } pub fn move_parent_node_start(cx: &mut Context) { move_node_bound_impl(cx, Direction::Backward, Movement::Move) } pub fn extend_parent_node_end(cx: &mut Context) { move_node_bound_impl(cx, Direction::Forward, Movement::Extend) } pub fn extend_parent_node_start(cx: &mut Context) { move_node_bound_impl(cx, Direction::Backward, Movement::Extend) } fn select_all_impl(editor: &mut Editor, select_fn: F) where F: Fn(&Syntax, RopeSlice, Selection) -> Selection, { let (view, doc) = current!(editor); if let Some(syntax) = doc.syntax() { let text = doc.text().slice(..); let current_selection = doc.selection(view.id); let selection = select_fn(syntax, text, current_selection.clone()); doc.set_selection(view.id, selection); } } fn select_all_siblings(cx: &mut Context) { let motion = |editor: &mut Editor| { select_all_impl(editor, object::select_all_siblings); }; cx.editor.apply_motion(motion); } fn select_all_children(cx: &mut Context) { let motion = |editor: &mut Editor| { select_all_impl(editor, object::select_all_children); }; cx.editor.apply_motion(motion); } fn match_brackets(cx: &mut Context) { let (view, doc) = current!(cx.editor); let is_select = cx.editor.mode == Mode::Select; let text = doc.text(); let text_slice = text.slice(..); let selection = doc.selection(view.id).clone().transform(|range| { let pos = range.cursor(text_slice); if let Some(matched_pos) = doc.syntax().map_or_else( || match_brackets::find_matching_bracket_plaintext(text.slice(..), pos), |syntax| match_brackets::find_matching_bracket_fuzzy(syntax, text.slice(..), pos), ) { range.put_cursor(text_slice, matched_pos, is_select) } else { range } }); doc.set_selection(view.id, selection); } // fn jump_forward(cx: &mut Context) { let count = cx.count(); let config = cx.editor.config(); let view = view_mut!(cx.editor); let doc_id = view.doc; if let Some((id, selection)) = view.jumps.forward(count) { view.doc = *id; let selection = selection.clone(); let (view, doc) = current!(cx.editor); // refetch doc if doc.id() != doc_id { view.add_to_history(doc_id); } doc.set_selection(view.id, selection); // Document we switch to might not have been opened in the view before doc.ensure_view_init(view.id); view.ensure_cursor_in_view_center(doc, config.scrolloff); }; } fn jump_backward(cx: &mut Context) { let count = cx.count(); let config = cx.editor.config(); let (view, doc) = current!(cx.editor); let doc_id = doc.id(); if let Some((id, selection)) = view.jumps.backward(view.id, doc, count) { view.doc = *id; let selection = selection.clone(); let (view, doc) = current!(cx.editor); // refetch doc if doc.id() != doc_id { view.add_to_history(doc_id); } doc.set_selection(view.id, selection); // Document we switch to might not have been opened in the view before doc.ensure_view_init(view.id); view.ensure_cursor_in_view_center(doc, config.scrolloff); }; } fn save_selection(cx: &mut Context) { let (view, doc) = current!(cx.editor); push_jump(view, doc); cx.editor.set_status("Selection saved to jumplist"); } fn rotate_view(cx: &mut Context) { cx.editor.focus_next() } fn rotate_view_reverse(cx: &mut Context) { cx.editor.focus_prev() } fn jump_view_right(cx: &mut Context) { cx.editor.focus_direction(tree::Direction::Right) } fn jump_view_left(cx: &mut Context) { cx.editor.focus_direction(tree::Direction::Left) } fn jump_view_up(cx: &mut Context) { cx.editor.focus_direction(tree::Direction::Up) } fn jump_view_down(cx: &mut Context) { cx.editor.focus_direction(tree::Direction::Down) } fn swap_view_right(cx: &mut Context) { cx.editor.swap_split_in_direction(tree::Direction::Right) } fn swap_view_left(cx: &mut Context) { cx.editor.swap_split_in_direction(tree::Direction::Left) } fn swap_view_up(cx: &mut Context) { cx.editor.swap_split_in_direction(tree::Direction::Up) } fn swap_view_down(cx: &mut Context) { cx.editor.swap_split_in_direction(tree::Direction::Down) } fn transpose_view(cx: &mut Context) { cx.editor.transpose_view() } /// Open a new split in the given direction specified by the action. /// /// Maintain the current view (both the cursor's position and view in document). fn split(editor: &mut Editor, action: Action) { let (view, doc) = current!(editor); let id = doc.id(); let selection = doc.selection(view.id).clone(); let offset = doc.view_offset(view.id); editor.switch(id, action); // match the selection in the previous view let (view, doc) = current!(editor); doc.set_selection(view.id, selection); // match the view scroll offset (switch doesn't handle this fully // since the selection is only matched after the split) doc.set_view_offset(view.id, offset); } fn hsplit(cx: &mut Context) { split(cx.editor, Action::HorizontalSplit); } fn hsplit_new(cx: &mut Context) { cx.editor.new_file(Action::HorizontalSplit); } fn vsplit(cx: &mut Context) { split(cx.editor, Action::VerticalSplit); } fn vsplit_new(cx: &mut Context) { cx.editor.new_file(Action::VerticalSplit); } fn wclose(cx: &mut Context) { if cx.editor.tree.views().count() == 1 { if let Err(err) = typed::buffers_remaining_impl(cx.editor) { cx.editor.set_error(err.to_string()); return; } } let view_id = view!(cx.editor).id; // close current split cx.editor.close(view_id); } fn wonly(cx: &mut Context) { let views = cx .editor .tree .views() .map(|(v, focus)| (v.id, focus)) .collect::>(); for (view_id, focus) in views { if !focus { cx.editor.close(view_id); } } } fn select_register(cx: &mut Context) { cx.editor.autoinfo = Some(Info::from_registers( "Select register", &cx.editor.registers, )); cx.on_next_key(move |cx, event| { cx.editor.autoinfo = None; if let Some(ch) = event.char() { cx.editor.selected_register = Some(ch); } }) } fn insert_register(cx: &mut Context) { cx.editor.autoinfo = Some(Info::from_registers( "Insert register", &cx.editor.registers, )); cx.on_next_key(move |cx, event| { cx.editor.autoinfo = None; if let Some(ch) = event.char() { cx.register = Some(ch); paste( cx.editor, cx.register .unwrap_or(cx.editor.config().default_yank_register), Paste::Cursor, cx.count(), ); } }) } fn copy_between_registers(cx: &mut Context) { cx.editor.autoinfo = Some(Info::from_registers( "Copy from register", &cx.editor.registers, )); cx.on_next_key(move |cx, event| { cx.editor.autoinfo = None; let Some(source) = event.char() else { return; }; let Some(values) = cx.editor.registers.read(source, cx.editor) else { cx.editor.set_error(format!("register {source} is empty")); return; }; let values: Vec<_> = values.map(|value| value.to_string()).collect(); cx.editor.autoinfo = Some(Info::from_registers( "Copy into register", &cx.editor.registers, )); cx.on_next_key(move |cx, event| { cx.editor.autoinfo = None; let Some(dest) = event.char() else { return; }; let n_values = values.len(); match cx.editor.registers.write(dest, values) { Ok(_) => cx.editor.set_status(format!( "yanked {n_values} value{} from register {source} to {dest}", if n_values == 1 { "" } else { "s" } )), Err(err) => cx.editor.set_error(err.to_string()), } }); }); } fn align_view_top(cx: &mut Context) { let (view, doc) = current!(cx.editor); align_view(doc, view, Align::Top); } fn align_view_center(cx: &mut Context) { let (view, doc) = current!(cx.editor); align_view(doc, view, Align::Center); } fn align_view_bottom(cx: &mut Context) { let (view, doc) = current!(cx.editor); align_view(doc, view, Align::Bottom); } fn align_view_middle(cx: &mut Context) { let (view, doc) = current!(cx.editor); let inner_width = view.inner_width(doc); let text_fmt = doc.text_format(inner_width, None); // there is no horizontal position when softwrap is enabled if text_fmt.soft_wrap { return; } let doc_text = doc.text().slice(..); let pos = doc.selection(view.id).primary().cursor(doc_text); let pos = visual_offset_from_block( doc_text, doc.view_offset(view.id).anchor, pos, &text_fmt, &view.text_annotations(doc, None), ) .0; let mut offset = doc.view_offset(view.id); offset.horizontal_offset = pos .col .saturating_sub((view.inner_area(doc).width as usize) / 2); doc.set_view_offset(view.id, offset); } fn scroll_up(cx: &mut Context) { scroll(cx, cx.count(), Direction::Backward, false); } fn scroll_down(cx: &mut Context) { scroll(cx, cx.count(), Direction::Forward, false); } fn goto_ts_object_impl(cx: &mut Context, object: &'static str, direction: Direction) { let count = cx.count(); let motion = move |editor: &mut Editor| { let (view, doc) = current!(editor); if let Some((lang_config, syntax)) = doc.language_config().zip(doc.syntax()) { let text = doc.text().slice(..); let root = syntax.tree().root_node(); let selection = doc.selection(view.id).clone().transform(|range| { let new_range = movement::goto_treesitter_object( text, range, object, direction, root, lang_config, count, ); if editor.mode == Mode::Select { let head = if new_range.head < range.anchor { new_range.anchor } else { new_range.head }; Range::new(range.anchor, head) } else { new_range.with_direction(direction) } }); doc.set_selection(view.id, selection); } else { editor.set_status("Syntax-tree is not available in current buffer"); } }; cx.editor.apply_motion(motion); } fn goto_next_function(cx: &mut Context) { goto_ts_object_impl(cx, "function", Direction::Forward) } fn goto_prev_function(cx: &mut Context) { goto_ts_object_impl(cx, "function", Direction::Backward) } fn goto_next_class(cx: &mut Context) { goto_ts_object_impl(cx, "class", Direction::Forward) } fn goto_prev_class(cx: &mut Context) { goto_ts_object_impl(cx, "class", Direction::Backward) } fn goto_next_parameter(cx: &mut Context) { goto_ts_object_impl(cx, "parameter", Direction::Forward) } fn goto_prev_parameter(cx: &mut Context) { goto_ts_object_impl(cx, "parameter", Direction::Backward) } fn goto_next_comment(cx: &mut Context) { goto_ts_object_impl(cx, "comment", Direction::Forward) } fn goto_prev_comment(cx: &mut Context) { goto_ts_object_impl(cx, "comment", Direction::Backward) } fn goto_next_test(cx: &mut Context) { goto_ts_object_impl(cx, "test", Direction::Forward) } fn goto_prev_test(cx: &mut Context) { goto_ts_object_impl(cx, "test", Direction::Backward) } fn goto_next_entry(cx: &mut Context) { goto_ts_object_impl(cx, "entry", Direction::Forward) } fn goto_prev_entry(cx: &mut Context) { goto_ts_object_impl(cx, "entry", Direction::Backward) } fn select_textobject_around(cx: &mut Context) { select_textobject(cx, textobject::TextObject::Around); } fn select_textobject_inner(cx: &mut Context) { select_textobject(cx, textobject::TextObject::Inside); } fn select_textobject(cx: &mut Context, objtype: textobject::TextObject) { let count = cx.count(); cx.on_next_key(move |cx, event| { cx.editor.autoinfo = None; if let Some(ch) = event.char() { let textobject = move |editor: &mut Editor| { let (view, doc) = current!(editor); let text = doc.text().slice(..); let textobject_treesitter = |obj_name: &str, range: Range| -> Range { let (lang_config, syntax) = match doc.language_config().zip(doc.syntax()) { Some(t) => t, None => return range, }; textobject::textobject_treesitter( text, range, objtype, obj_name, syntax.tree().root_node(), lang_config, count, ) }; if ch == 'g' && doc.diff_handle().is_none() { editor.set_status("Diff is not available in current buffer"); return; } let textobject_change = |range: Range| -> Range { let diff_handle = doc.diff_handle().unwrap(); let diff = diff_handle.load(); let line = range.cursor_line(text); let hunk_idx = if let Some(hunk_idx) = diff.hunk_at(line as u32, false) { hunk_idx } else { return range; }; let hunk = diff.nth_hunk(hunk_idx).after; let start = text.line_to_char(hunk.start as usize); let end = text.line_to_char(hunk.end as usize); Range::new(start, end).with_direction(range.direction()) }; let selection = doc.selection(view.id).clone().transform(|range| { match ch { 'w' => textobject::textobject_word(text, range, objtype, count, false), 'W' => textobject::textobject_word(text, range, objtype, count, true), 't' => textobject_treesitter("class", range), 'f' => textobject_treesitter("function", range), 'a' => textobject_treesitter("parameter", range), 'c' => textobject_treesitter("comment", range), 'T' => textobject_treesitter("test", range), 'e' => textobject_treesitter("entry", range), 'p' => textobject::textobject_paragraph(text, range, objtype, count), 'm' => textobject::textobject_pair_surround_closest( doc.syntax(), text, range, objtype, count, ), 'g' => textobject_change(range), // TODO: cancel new ranges if inconsistent surround matches across lines ch if !ch.is_ascii_alphanumeric() => textobject::textobject_pair_surround( doc.syntax(), text, range, objtype, ch, count, ), _ => range, } }); doc.set_selection(view.id, selection); }; cx.editor.apply_motion(textobject); } }); let title = match objtype { textobject::TextObject::Inside => "Match inside", textobject::TextObject::Around => "Match around", _ => return, }; let help_text = [ ("w", "Word"), ("W", "WORD"), ("p", "Paragraph"), ("t", "Type definition (tree-sitter)"), ("f", "Function (tree-sitter)"), ("a", "Argument/parameter (tree-sitter)"), ("c", "Comment (tree-sitter)"), ("T", "Test (tree-sitter)"), ("e", "Data structure entry (tree-sitter)"), ("m", "Closest surrounding pair (tree-sitter)"), ("g", "Change"), (" ", "... or any character acting as a pair"), ]; cx.editor.autoinfo = Some(Info::new(title, &help_text)); } static SURROUND_HELP_TEXT: [(&str, &str); 6] = [ ("m", "Nearest matching pair"), ("( or )", "Parentheses"), ("{ or }", "Curly braces"), ("< or >", "Angled brackets"), ("[ or ]", "Square brackets"), (" ", "... or any character"), ]; fn surround_add(cx: &mut Context) { cx.on_next_key(move |cx, event| { cx.editor.autoinfo = None; let (view, doc) = current!(cx.editor); // surround_len is the number of new characters being added. let (open, close, surround_len) = match event.char() { Some(ch) => { let (o, c) = match_brackets::get_pair(ch); let mut open = Tendril::new(); open.push(o); let mut close = Tendril::new(); close.push(c); (open, close, 2) } None if event.code == KeyCode::Enter => ( doc.line_ending.as_str().into(), doc.line_ending.as_str().into(), 2 * doc.line_ending.len_chars(), ), None => return, }; let selection = doc.selection(view.id); let mut changes = Vec::with_capacity(selection.len() * 2); let mut ranges = SmallVec::with_capacity(selection.len()); let mut offs = 0; for range in selection.iter() { changes.push((range.from(), range.from(), Some(open.clone()))); changes.push((range.to(), range.to(), Some(close.clone()))); ranges.push( Range::new(offs + range.from(), offs + range.to() + surround_len) .with_direction(range.direction()), ); offs += surround_len; } let transaction = Transaction::change(doc.text(), changes.into_iter()) .with_selection(Selection::new(ranges, selection.primary_index())); doc.apply(&transaction, view.id); exit_select_mode(cx); }); cx.editor.autoinfo = Some(Info::new( "Surround selections with", &SURROUND_HELP_TEXT[1..], )); } fn surround_replace(cx: &mut Context) { let count = cx.count(); cx.on_next_key(move |cx, event| { cx.editor.autoinfo = None; let surround_ch = match event.char() { Some('m') => None, // m selects the closest surround pair Some(ch) => Some(ch), None => return, }; let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc.selection(view.id); let change_pos = match surround::get_surround_pos(doc.syntax(), text, selection, surround_ch, count) { Ok(c) => c, Err(err) => { cx.editor.set_error(err.to_string()); return; } }; let selection = selection.clone(); let ranges: SmallVec<[Range; 1]> = change_pos.iter().map(|&p| Range::point(p)).collect(); doc.set_selection( view.id, Selection::new(ranges, selection.primary_index() * 2), ); cx.on_next_key(move |cx, event| { cx.editor.autoinfo = None; let (view, doc) = current!(cx.editor); let to = match event.char() { Some(to) => to, None => return doc.set_selection(view.id, selection), }; let (open, close) = match_brackets::get_pair(to); // the changeset has to be sorted to allow nested surrounds let mut sorted_pos: Vec<(usize, char)> = Vec::new(); for p in change_pos.chunks(2) { sorted_pos.push((p[0], open)); sorted_pos.push((p[1], close)); } sorted_pos.sort_unstable(); let transaction = Transaction::change( doc.text(), sorted_pos.iter().map(|&pos| { let mut t = Tendril::new(); t.push(pos.1); (pos.0, pos.0 + 1, Some(t)) }), ); doc.set_selection(view.id, selection); doc.apply(&transaction, view.id); exit_select_mode(cx); }); cx.editor.autoinfo = Some(Info::new( "Replace with a pair of", &SURROUND_HELP_TEXT[1..], )); }); cx.editor.autoinfo = Some(Info::new( "Replace surrounding pair of", &SURROUND_HELP_TEXT, )); } fn surround_delete(cx: &mut Context) { let count = cx.count(); cx.on_next_key(move |cx, event| { cx.editor.autoinfo = None; let surround_ch = match event.char() { Some('m') => None, // m selects the closest surround pair Some(ch) => Some(ch), None => return, }; let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc.selection(view.id); let mut change_pos = match surround::get_surround_pos(doc.syntax(), text, selection, surround_ch, count) { Ok(c) => c, Err(err) => { cx.editor.set_error(err.to_string()); return; } }; change_pos.sort_unstable(); // the changeset has to be sorted to allow nested surrounds let transaction = Transaction::change(doc.text(), change_pos.into_iter().map(|p| (p, p + 1, None))); doc.apply(&transaction, view.id); exit_select_mode(cx); }); cx.editor.autoinfo = Some(Info::new("Delete surrounding pair of", &SURROUND_HELP_TEXT)); } #[derive(Eq, PartialEq)] enum ShellBehavior { Replace, Ignore, Insert, Append, } fn shell_pipe(cx: &mut Context) { shell_prompt(cx, "pipe:".into(), ShellBehavior::Replace); } fn shell_pipe_to(cx: &mut Context) { shell_prompt(cx, "pipe-to:".into(), ShellBehavior::Ignore); } fn shell_insert_output(cx: &mut Context) { shell_prompt(cx, "insert-output:".into(), ShellBehavior::Insert); } fn shell_append_output(cx: &mut Context) { shell_prompt(cx, "append-output:".into(), ShellBehavior::Append); } fn shell_keep_pipe(cx: &mut Context) { ui::prompt( cx, "keep-pipe:".into(), Some('|'), ui::completers::none, move |cx, input: &str, event: PromptEvent| { let shell = &cx.editor.config().shell; if event != PromptEvent::Validate { return; } if input.is_empty() { return; } let (view, doc) = current!(cx.editor); let selection = doc.selection(view.id); let mut ranges = SmallVec::with_capacity(selection.len()); let old_index = selection.primary_index(); let mut index: Option = None; let text = doc.text().slice(..); for (i, range) in selection.ranges().iter().enumerate() { let fragment = range.slice(text); if let Err(err) = shell_impl(shell, input, Some(fragment.into())) { log::debug!("Shell command failed: {}", err); } else { ranges.push(*range); if i >= old_index && index.is_none() { index = Some(ranges.len() - 1); } } } if ranges.is_empty() { cx.editor.set_error("No selections remaining"); return; } let index = index.unwrap_or_else(|| ranges.len() - 1); doc.set_selection(view.id, Selection::new(ranges, index)); }, ); } fn shell_impl(shell: &[String], cmd: &str, input: Option) -> anyhow::Result { tokio::task::block_in_place(|| helix_lsp::block_on(shell_impl_async(shell, cmd, input))) } async fn shell_impl_async( shell: &[String], cmd: &str, input: Option, ) -> anyhow::Result { use std::process::Stdio; use tokio::process::Command; ensure!(!shell.is_empty(), "No shell set"); let mut process = Command::new(&shell[0]); process .args(&shell[1..]) .arg(cmd) .stdout(Stdio::piped()) .stderr(Stdio::piped()); if input.is_some() || cfg!(windows) { process.stdin(Stdio::piped()); } else { process.stdin(Stdio::null()); } let mut process = match process.spawn() { Ok(process) => process, Err(e) => { log::error!("Failed to start shell: {}", e); return Err(e.into()); } }; let output = if let Some(mut stdin) = process.stdin.take() { let input_task = tokio::spawn(async move { if let Some(input) = input { helix_view::document::to_writer(&mut stdin, (encoding::UTF_8, false), &input) .await?; } anyhow::Ok(()) }); let (output, _) = tokio::join! { process.wait_with_output(), input_task, }; output? } else { // Process has no stdin, so we just take the output process.wait_with_output().await? }; let output = if !output.status.success() { if output.stderr.is_empty() { match output.status.code() { Some(exit_code) => bail!("Shell command failed: status {}", exit_code), None => bail!("Shell command failed"), } } String::from_utf8_lossy(&output.stderr) // Prioritize `stderr` output over `stdout` } else if !output.stderr.is_empty() { let stderr = String::from_utf8_lossy(&output.stderr); log::debug!("Command printed to stderr: {stderr}"); stderr } else { String::from_utf8_lossy(&output.stdout) }; Ok(Tendril::from(output)) } fn shell(cx: &mut compositor::Context, cmd: &str, behavior: &ShellBehavior) { let pipe = match behavior { ShellBehavior::Replace | ShellBehavior::Ignore => true, ShellBehavior::Insert | ShellBehavior::Append => false, }; let config = cx.editor.config(); let shell = &config.shell; let (view, doc) = current!(cx.editor); let selection = doc.selection(view.id); let mut changes = Vec::with_capacity(selection.len()); let mut ranges = SmallVec::with_capacity(selection.len()); let text = doc.text().slice(..); let mut shell_output: Option = None; let mut offset = 0isize; for range in selection.ranges() { let output = if let Some(output) = shell_output.as_ref() { output.clone() } else { let input = range.slice(text); match shell_impl(shell, cmd, pipe.then(|| input.into())) { Ok(mut output) => { if !input.ends_with("\n") && output.ends_with('\n') { output.pop(); if output.ends_with('\r') { output.pop(); } } if !pipe { shell_output = Some(output.clone()); } output } Err(err) => { cx.editor.set_error(err.to_string()); return; } } }; let output_len = output.chars().count(); let (from, to, deleted_len) = match behavior { ShellBehavior::Replace => (range.from(), range.to(), range.len()), ShellBehavior::Insert => (range.from(), range.from(), 0), ShellBehavior::Append => (range.to(), range.to(), 0), _ => (range.from(), range.from(), 0), }; // These `usize`s cannot underflow because selection ranges cannot overlap. let anchor = to .checked_add_signed(offset) .expect("Selection ranges cannot overlap") .checked_sub(deleted_len) .expect("Selection ranges cannot overlap"); let new_range = Range::new(anchor, anchor + output_len).with_direction(range.direction()); ranges.push(new_range); offset = offset .checked_add_unsigned(output_len) .expect("Selection ranges cannot overlap") .checked_sub_unsigned(deleted_len) .expect("Selection ranges cannot overlap"); changes.push((from, to, Some(output))); } if behavior != &ShellBehavior::Ignore { let transaction = Transaction::change(doc.text(), changes.into_iter()) .with_selection(Selection::new(ranges, selection.primary_index())); doc.apply(&transaction, view.id); doc.append_changes_to_history(view); } // after replace cursor may be out of bounds, do this to // make sure cursor is in view and update scroll as well view.ensure_cursor_in_view(doc, config.scrolloff); } fn shell_prompt(cx: &mut Context, prompt: Cow<'static, str>, behavior: ShellBehavior) { ui::prompt( cx, prompt, Some('|'), ui::completers::shell, move |cx, input: &str, event: PromptEvent| { if event != PromptEvent::Validate { return; } if input.is_empty() { return; } shell(cx, input, &behavior); }, ); } fn suspend(_cx: &mut Context) { #[cfg(not(windows))] { _cx.block_try_flush_writes().ok(); signal_hook::low_level::raise(signal_hook::consts::signal::SIGTSTP).unwrap(); } } fn add_newline_above(cx: &mut Context) { add_newline_impl(cx, Open::Above); } fn add_newline_below(cx: &mut Context) { add_newline_impl(cx, Open::Below) } fn add_newline_impl(cx: &mut Context, open: Open) { let count = cx.count(); let (view, doc) = current!(cx.editor); let selection = doc.selection(view.id); let text = doc.text(); let slice = text.slice(..); let changes = selection.into_iter().map(|range| { let (start, end) = range.line_range(slice); let line = match open { Open::Above => start, Open::Below => end + 1, }; let pos = text.line_to_char(line); ( pos, pos, Some(doc.line_ending.as_str().repeat(count).into()), ) }); let transaction = Transaction::change(text, changes); doc.apply(&transaction, view.id); } enum IncrementDirection { Increase, Decrease, } /// Increment objects within selections by count. fn increment(cx: &mut Context) { increment_impl(cx, IncrementDirection::Increase); } /// Decrement objects within selections by count. fn decrement(cx: &mut Context) { increment_impl(cx, IncrementDirection::Decrease); } /// Increment objects within selections by `amount`. /// A negative `amount` will decrement objects within selections. fn increment_impl(cx: &mut Context, increment_direction: IncrementDirection) { let sign = match increment_direction { IncrementDirection::Increase => 1, IncrementDirection::Decrease => -1, }; let mut amount = sign * cx.count() as i64; // If the register is `#` then increase or decrease the `amount` by 1 per element let increase_by = if cx.register == Some('#') { sign } else { 0 }; let (view, doc) = current!(cx.editor); let selection = doc.selection(view.id); let text = doc.text().slice(..); let mut new_selection_ranges = SmallVec::new(); let mut cumulative_length_diff: i128 = 0; let mut changes = vec![]; for range in selection { let selected_text: Cow = range.fragment(text); let new_from = ((range.from() as i128) + cumulative_length_diff) as usize; let incremented = [increment::integer, increment::date_time] .iter() .find_map(|incrementor| incrementor(selected_text.as_ref(), amount)); amount += increase_by; match incremented { None => { let new_range = Range::new( new_from, (range.to() as i128 + cumulative_length_diff) as usize, ); new_selection_ranges.push(new_range); } Some(new_text) => { let new_range = Range::new(new_from, new_from + new_text.len()); cumulative_length_diff += new_text.len() as i128 - selected_text.len() as i128; new_selection_ranges.push(new_range); changes.push((range.from(), range.to(), Some(new_text.into()))); } } } if !changes.is_empty() { let new_selection = Selection::new(new_selection_ranges, selection.primary_index()); let transaction = Transaction::change(doc.text(), changes.into_iter()); let transaction = transaction.with_selection(new_selection); doc.apply(&transaction, view.id); exit_select_mode(cx); } } fn goto_next_tabstop(cx: &mut Context) { goto_next_tabstop_impl(cx, Direction::Forward) } fn goto_prev_tabstop(cx: &mut Context) { goto_next_tabstop_impl(cx, Direction::Backward) } fn goto_next_tabstop_impl(cx: &mut Context, direction: Direction) { let (view, doc) = current!(cx.editor); let view_id = view.id; let Some(mut snippet) = doc.active_snippet.take() else { cx.editor.set_error("no snippet is currently active"); return; }; let tabstop = match direction { Direction::Forward => Some(snippet.next_tabstop(doc.selection(view_id))), Direction::Backward => snippet .prev_tabstop(doc.selection(view_id)) .map(|selection| (selection, false)), }; let Some((selection, last_tabstop)) = tabstop else { return; }; doc.set_selection(view_id, selection); if !last_tabstop { doc.active_snippet = Some(snippet) } if cx.editor.mode() == Mode::Insert { cx.on_next_key_fallback(|cx, key| { if let Some(c) = key.char() { let (view, doc) = current!(cx.editor); if let Some(snippet) = &doc.active_snippet { doc.apply(&snippet.delete_placeholder(doc.text()), view.id); } insert_char(cx, c); } }) } } fn record_macro(cx: &mut Context) { if let Some((reg, mut keys)) = cx.editor.macro_recording.take() { // Remove the keypress which ends the recording keys.pop(); let s = keys .into_iter() .map(|key| { let s = key.to_string(); if s.chars().count() == 1 { s } else { format!("<{}>", s) } }) .collect::(); match cx.editor.registers.write(reg, vec![s]) { Ok(_) => cx .editor .set_status(format!("Recorded to register [{}]", reg)), Err(err) => cx.editor.set_error(err.to_string()), } } else { let reg = cx.register.take().unwrap_or('@'); cx.editor.macro_recording = Some((reg, Vec::new())); cx.editor .set_status(format!("Recording to register [{}]", reg)); } } fn replay_macro(cx: &mut Context) { let reg = cx.register.unwrap_or('@'); if cx.editor.macro_replaying.contains(®) { cx.editor.set_error(format!( "Cannot replay from register [{}] because already replaying from same register", reg )); return; } let keys: Vec = if let Some(keys) = cx .editor .registers .read(reg, cx.editor) .filter(|values| values.len() == 1) .map(|mut values| values.next().unwrap()) { match helix_view::input::parse_macro(&keys) { Ok(keys) => keys, Err(err) => { cx.editor.set_error(format!("Invalid macro: {}", err)); return; } } } else { cx.editor.set_error(format!("Register [{}] empty", reg)); return; }; // Once the macro has been fully validated, it's marked as being under replay // to ensure we don't fall into infinite recursion. cx.editor.macro_replaying.push(reg); let count = cx.count(); cx.callback.push(Box::new(move |compositor, cx| { for _ in 0..count { for &key in keys.iter() { compositor.handle_event(&compositor::Event::Key(key), cx); } } // The macro under replay is cleared at the end of the callback, not in the // macro replay context, or it will not correctly protect the user from // replaying recursively. cx.editor.macro_replaying.pop(); })); } fn goto_word(cx: &mut Context) { jump_to_word(cx, Movement::Move) } fn extend_to_word(cx: &mut Context) { jump_to_word(cx, Movement::Extend) } fn jump_to_label(cx: &mut Context, labels: Vec, behaviour: Movement) { let doc = doc!(cx.editor); let alphabet = &cx.editor.config().jump_label_alphabet; if labels.is_empty() { return; } let alphabet_char = |i| { let mut res = Tendril::new(); res.push(alphabet[i]); res }; // Add label for each jump candidate to the View as virtual text. let text = doc.text().slice(..); let mut overlays: Vec<_> = labels .iter() .enumerate() .flat_map(|(i, range)| { [ Overlay::new(range.from(), alphabet_char(i / alphabet.len())), Overlay::new( graphemes::next_grapheme_boundary(text, range.from()), alphabet_char(i % alphabet.len()), ), ] }) .collect(); overlays.sort_unstable_by_key(|overlay| overlay.char_idx); let (view, doc) = current!(cx.editor); doc.set_jump_labels(view.id, overlays); // Accept two characters matching a visible label. Jump to the candidate // for that label if it exists. let primary_selection = doc.selection(view.id).primary(); let view = view.id; let doc = doc.id(); cx.on_next_key(move |cx, event| { let alphabet = &cx.editor.config().jump_label_alphabet; let Some(i) = event .char() .filter(|_| event.modifiers.is_empty()) .and_then(|ch| alphabet.iter().position(|&it| it == ch)) else { doc_mut!(cx.editor, &doc).remove_jump_labels(view); return; }; let outer = i * alphabet.len(); // Bail if the given character cannot be a jump label. if outer > labels.len() { doc_mut!(cx.editor, &doc).remove_jump_labels(view); return; } cx.on_next_key(move |cx, event| { doc_mut!(cx.editor, &doc).remove_jump_labels(view); let alphabet = &cx.editor.config().jump_label_alphabet; let Some(inner) = event .char() .filter(|_| event.modifiers.is_empty()) .and_then(|ch| alphabet.iter().position(|&it| it == ch)) else { return; }; if let Some(mut range) = labels.get(outer + inner).copied() { range = if behaviour == Movement::Extend { let anchor = if range.anchor < range.head { let from = primary_selection.from(); if range.anchor < from { range.anchor } else { from } } else { let to = primary_selection.to(); if range.anchor > to { range.anchor } else { to } }; Range::new(anchor, range.head) } else { range.with_direction(Direction::Forward) }; doc_mut!(cx.editor, &doc).set_selection(view, range.into()); } }); }); } fn jump_to_word(cx: &mut Context, behaviour: Movement) { // Calculate the jump candidates: ranges for any visible words with two or // more characters. let alphabet = &cx.editor.config().jump_label_alphabet; let jump_label_limit = alphabet.len() * alphabet.len(); let mut words = Vec::with_capacity(jump_label_limit); let (view, doc) = current_ref!(cx.editor); let text = doc.text().slice(..); // This is not necessarily exact if there is virtual text like soft wrap. // It's ok though because the extra jump labels will not be rendered. let start = text.line_to_char(text.char_to_line(doc.view_offset(view.id).anchor)); let end = text.line_to_char(view.estimate_last_doc_line(doc) + 1); let primary_selection = doc.selection(view.id).primary(); let cursor = primary_selection.cursor(text); let mut cursor_fwd = Range::point(cursor); let mut cursor_rev = Range::point(cursor); if text.get_char(cursor).is_some_and(|c| !c.is_whitespace()) { let cursor_word_end = movement::move_next_word_end(text, cursor_fwd, 1); // single grapheme words need a special case if cursor_word_end.anchor == cursor { cursor_fwd = cursor_word_end; } let cursor_word_start = movement::move_prev_word_start(text, cursor_rev, 1); if cursor_word_start.anchor == next_grapheme_boundary(text, cursor) { cursor_rev = cursor_word_start; } } 'outer: loop { let mut changed = false; while cursor_fwd.head < end { cursor_fwd = movement::move_next_word_end(text, cursor_fwd, 1); // The cursor is on a word that is atleast two graphemes long and // madeup of word characters. The latter condition is needed because // move_next_word_end simply treats a sequence of characters from // the same char class as a word so `=<` would also count as a word. let add_label = text .slice(..cursor_fwd.head) .graphemes_rev() .take(2) .take_while(|g| g.chars().all(char_is_word)) .count() == 2; if !add_label { continue; } changed = true; // skip any leading whitespace cursor_fwd.anchor += text .chars_at(cursor_fwd.anchor) .take_while(|&c| !char_is_word(c)) .count(); words.push(cursor_fwd); if words.len() == jump_label_limit { break 'outer; } break; } while cursor_rev.head > start { cursor_rev = movement::move_prev_word_start(text, cursor_rev, 1); // The cursor is on a word that is atleast two graphemes long and // madeup of word characters. The latter condition is needed because // move_prev_word_start simply treats a sequence of characters from // the same char class as a word so `=<` would also count as a word. let add_label = text .slice(cursor_rev.head..) .graphemes() .take(2) .take_while(|g| g.chars().all(char_is_word)) .count() == 2; if !add_label { continue; } changed = true; cursor_rev.anchor -= text .chars_at(cursor_rev.anchor) .reversed() .take_while(|&c| !char_is_word(c)) .count(); words.push(cursor_rev); if words.len() == jump_label_limit { break 'outer; } break; } if !changed { break; } } jump_to_label(cx, words, behaviour) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/commands/000077500000000000000000000000001500614314100242535ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/commands/dap.rs000066400000000000000000000623661500614314100254020ustar00rootroot00000000000000use super::{Context, Editor}; use crate::{ compositor::{self, Compositor}, job::{Callback, Jobs}, ui::{self, overlay::overlaid, Picker, Popup, Prompt, PromptEvent, Text}, }; use dap::{StackFrame, Thread, ThreadStates}; use helix_core::syntax::{DebugArgumentValue, DebugConfigCompletion, DebugTemplate}; use helix_dap::{self as dap, Client}; use helix_lsp::block_on; use helix_view::editor::Breakpoint; use serde_json::{to_value, Value}; use tokio_stream::wrappers::UnboundedReceiverStream; use tui::text::Spans; use std::collections::HashMap; use std::future::Future; use std::path::PathBuf; use anyhow::{anyhow, bail}; use helix_view::handlers::dap::{breakpoints_changed, jump_to_stack_frame, select_thread_id}; fn thread_picker( cx: &mut Context, callback_fn: impl Fn(&mut Editor, &dap::Thread) + Send + 'static, ) { let debugger = debugger!(cx.editor); let future = debugger.threads(); dap_callback( cx.jobs, future, move |editor, compositor, response: dap::requests::ThreadsResponse| { let threads = response.threads; if threads.len() == 1 { callback_fn(editor, &threads[0]); return; } let debugger = debugger!(editor); let thread_states = debugger.thread_states.clone(); let columns = [ ui::PickerColumn::new("name", |item: &Thread, _| item.name.as_str().into()), ui::PickerColumn::new("state", |item: &Thread, thread_states: &ThreadStates| { thread_states .get(&item.id) .map(|state| state.as_str()) .unwrap_or("unknown") .into() }), ]; let picker = Picker::new( columns, 0, threads, thread_states, move |cx, thread, _action| callback_fn(cx.editor, thread), ) .with_preview(move |editor, thread| { let frames = editor.debugger.as_ref()?.stack_frames.get(&thread.id)?; let frame = frames.first()?; let path = frame.source.as_ref()?.path.as_ref()?.as_path(); let pos = Some(( frame.line.saturating_sub(1), frame.end_line.unwrap_or(frame.line).saturating_sub(1), )); Some((path.into(), pos)) }); compositor.push(Box::new(picker)); }, ); } fn get_breakpoint_at_current_line(editor: &mut Editor) -> Option<(usize, Breakpoint)> { let (view, doc) = current!(editor); let text = doc.text().slice(..); let line = doc.selection(view.id).primary().cursor_line(text); let path = doc.path()?; editor.breakpoints.get(path).and_then(|breakpoints| { let i = breakpoints.iter().position(|b| b.line == line); i.map(|i| (i, breakpoints[i].clone())) }) } // -- DAP fn dap_callback( jobs: &mut Jobs, call: impl Future> + 'static + Send, callback: F, ) where T: for<'de> serde::Deserialize<'de> + Send + 'static, F: FnOnce(&mut Editor, &mut Compositor, T) + Send + 'static, { let callback = Box::pin(async move { let json = call.await?; let response = serde_json::from_value(json)?; let call: Callback = Callback::EditorCompositor(Box::new( move |editor: &mut Editor, compositor: &mut Compositor| { callback(editor, compositor, response) }, )); Ok(call) }); jobs.callback(callback); } pub fn dap_start_impl( cx: &mut compositor::Context, name: Option<&str>, socket: Option, params: Option>>, ) -> Result<(), anyhow::Error> { let doc = doc!(cx.editor); let config = doc .language_config() .and_then(|config| config.debugger.as_ref()) .ok_or_else(|| anyhow!("No debug adapter available for language"))?; let result = match socket { Some(socket) => block_on(Client::tcp(socket, 0)), None => block_on(Client::process( &config.transport, &config.command, config.args.iter().map(|arg| arg.as_str()).collect(), config.port_arg.as_deref(), 0, )), }; let (mut debugger, events) = match result { Ok(r) => r, Err(e) => bail!("Failed to start debug session: {}", e), }; let request = debugger.initialize(config.name.clone()); if let Err(e) = block_on(request) { bail!("Failed to initialize debug adapter: {}", e); } debugger.quirks = config.quirks.clone(); // TODO: avoid refetching all of this... pass a config in let template = match name { Some(name) => config.templates.iter().find(|t| t.name == name), None => config.templates.first(), } .ok_or_else(|| anyhow!("No debug config with given name"))?; let mut args: HashMap<&str, Value> = HashMap::new(); for (k, t) in &template.args { let mut value = t.clone(); if let Some(ref params) = params { for (i, x) in params.iter().enumerate() { let mut param = x.to_string(); if let Some(DebugConfigCompletion::Advanced(cfg)) = template.completion.get(i) { if matches!(cfg.completion.as_deref(), Some("filename" | "directory")) { param = std::fs::canonicalize(x.as_ref()) .ok() .and_then(|pb| pb.into_os_string().into_string().ok()) .unwrap_or_else(|| x.to_string()); } } // For param #0 replace {0} in args let pattern = format!("{{{}}}", i); value = match value { // TODO: just use toml::Value -> json::Value DebugArgumentValue::String(v) => { DebugArgumentValue::String(v.replace(&pattern, ¶m)) } DebugArgumentValue::Array(arr) => DebugArgumentValue::Array( arr.iter().map(|v| v.replace(&pattern, ¶m)).collect(), ), DebugArgumentValue::Boolean(_) => value, }; } } match value { DebugArgumentValue::String(string) => { if let Ok(integer) = string.parse::() { args.insert(k, to_value(integer).unwrap()); } else { args.insert(k, to_value(string).unwrap()); } } DebugArgumentValue::Array(arr) => { args.insert(k, to_value(arr).unwrap()); } DebugArgumentValue::Boolean(bool) => { args.insert(k, to_value(bool).unwrap()); } } } args.insert("cwd", to_value(helix_stdx::env::current_working_dir())?); let args = to_value(args).unwrap(); let callback = |_editor: &mut Editor, _compositor: &mut Compositor, _response: Value| { // if let Err(e) = result { // editor.set_error(format!("Failed {} target: {}", template.request, e)); // } }; match &template.request[..] { "launch" => { let call = debugger.launch(args); dap_callback(cx.jobs, call, callback); } "attach" => { let call = debugger.attach(args); dap_callback(cx.jobs, call, callback); } request => bail!("Unsupported request '{}'", request), }; // TODO: either await "initialized" or buffer commands until event is received cx.editor.debugger = Some(debugger); let stream = UnboundedReceiverStream::new(events); cx.editor.debugger_events.push(stream); Ok(()) } pub fn dap_launch(cx: &mut Context) { if cx.editor.debugger.is_some() { cx.editor.set_error("Debugger is already running"); return; } let doc = doc!(cx.editor); let config = match doc .language_config() .and_then(|config| config.debugger.as_ref()) { Some(c) => c, None => { cx.editor .set_error("No debug adapter available for language"); return; } }; let templates = config.templates.clone(); let columns = [ui::PickerColumn::new( "template", |item: &DebugTemplate, _| item.name.as_str().into(), )]; cx.push_layer(Box::new(overlaid(Picker::new( columns, 0, templates, (), |cx, template, _action| { if template.completion.is_empty() { if let Err(err) = dap_start_impl(cx, Some(&template.name), None, None) { cx.editor.set_error(err.to_string()); } } else { let completions = template.completion.clone(); let name = template.name.clone(); let callback = Box::pin(async move { let call: Callback = Callback::EditorCompositor(Box::new(move |_editor, compositor| { let prompt = debug_parameter_prompt(completions, name, Vec::new()); compositor.push(Box::new(prompt)); })); Ok(call) }); cx.jobs.callback(callback); } }, )))); } pub fn dap_restart(cx: &mut Context) { let debugger = match &cx.editor.debugger { Some(debugger) => debugger, None => { cx.editor.set_error("Debugger is not running"); return; } }; if !debugger .capabilities() .supports_restart_request .unwrap_or(false) { cx.editor .set_error("Debugger does not support session restarts"); return; } if debugger.starting_request_args().is_none() { cx.editor .set_error("No arguments found with which to restart the sessions"); return; } dap_callback( cx.jobs, debugger.restart(), |editor, _compositor, _resp: ()| editor.set_status("Debugging session restarted"), ); } fn debug_parameter_prompt( completions: Vec, config_name: String, mut params: Vec, ) -> Prompt { let completion = completions.get(params.len()).unwrap(); let field_type = if let DebugConfigCompletion::Advanced(cfg) = completion { cfg.completion.as_deref().unwrap_or("") } else { "" }; let name = match completion { DebugConfigCompletion::Advanced(cfg) => cfg.name.as_deref().unwrap_or(field_type), DebugConfigCompletion::Named(name) => name.as_str(), }; let default_val = match completion { DebugConfigCompletion::Advanced(cfg) => cfg.default.as_deref().unwrap_or(""), _ => "", } .to_owned(); let completer = match field_type { "filename" => |editor: &Editor, input: &str| { ui::completers::filename_with_git_ignore(editor, input, false) }, "directory" => |editor: &Editor, input: &str| { ui::completers::directory_with_git_ignore(editor, input, false) }, _ => ui::completers::none, }; Prompt::new( format!("{}: ", name).into(), None, completer, move |cx, input: &str, event: PromptEvent| { if event != PromptEvent::Validate { return; } let mut value = input.to_owned(); if value.is_empty() { value = default_val.clone(); } params.push(value); if params.len() < completions.len() { let completions = completions.clone(); let config_name = config_name.clone(); let params = params.clone(); let callback = Box::pin(async move { let call: Callback = Callback::EditorCompositor(Box::new(move |_editor, compositor| { let prompt = debug_parameter_prompt(completions, config_name, params); compositor.push(Box::new(prompt)); })); Ok(call) }); cx.jobs.callback(callback); } else if let Err(err) = dap_start_impl( cx, Some(&config_name), None, Some(params.iter().map(|x| x.into()).collect()), ) { cx.editor.set_error(err.to_string()); } }, ) } pub fn dap_toggle_breakpoint(cx: &mut Context) { let (view, doc) = current!(cx.editor); let path = match doc.path() { Some(path) => path.clone(), None => { cx.editor .set_error("Can't set breakpoint: document has no path"); return; } }; let text = doc.text().slice(..); let line = doc.selection(view.id).primary().cursor_line(text); dap_toggle_breakpoint_impl(cx, path, line); } pub fn dap_toggle_breakpoint_impl(cx: &mut Context, path: PathBuf, line: usize) { // TODO: need to map breakpoints over edits and update them? // we shouldn't really allow editing while debug is running though let breakpoints = cx.editor.breakpoints.entry(path.clone()).or_default(); // TODO: always keep breakpoints sorted and use binary search to determine insertion point if let Some(pos) = breakpoints .iter() .position(|breakpoint| breakpoint.line == line) { breakpoints.remove(pos); } else { breakpoints.push(Breakpoint { line, ..Default::default() }); } let debugger = debugger!(cx.editor); if let Err(e) = breakpoints_changed(debugger, path, breakpoints) { cx.editor .set_error(format!("Failed to set breakpoints: {}", e)); } } pub fn dap_continue(cx: &mut Context) { let debugger = debugger!(cx.editor); if let Some(thread_id) = debugger.thread_id { let request = debugger.continue_thread(thread_id); dap_callback( cx.jobs, request, |editor, _compositor, _response: dap::requests::ContinueResponse| { debugger!(editor).resume_application(); }, ); } else { cx.editor .set_error("Currently active thread is not stopped. Switch the thread."); } } pub fn dap_pause(cx: &mut Context) { thread_picker(cx, |editor, thread| { let debugger = debugger!(editor); let request = debugger.pause(thread.id); // NOTE: we don't need to set active thread id here because DAP will emit a "stopped" event if let Err(e) = block_on(request) { editor.set_error(format!("Failed to pause: {}", e)); } }) } pub fn dap_step_in(cx: &mut Context) { let debugger = debugger!(cx.editor); if let Some(thread_id) = debugger.thread_id { let request = debugger.step_in(thread_id); dap_callback(cx.jobs, request, |editor, _compositor, _response: ()| { debugger!(editor).resume_application(); }); } else { cx.editor .set_error("Currently active thread is not stopped. Switch the thread."); } } pub fn dap_step_out(cx: &mut Context) { let debugger = debugger!(cx.editor); if let Some(thread_id) = debugger.thread_id { let request = debugger.step_out(thread_id); dap_callback(cx.jobs, request, |editor, _compositor, _response: ()| { debugger!(editor).resume_application(); }); } else { cx.editor .set_error("Currently active thread is not stopped. Switch the thread."); } } pub fn dap_next(cx: &mut Context) { let debugger = debugger!(cx.editor); if let Some(thread_id) = debugger.thread_id { let request = debugger.next(thread_id); dap_callback(cx.jobs, request, |editor, _compositor, _response: ()| { debugger!(editor).resume_application(); }); } else { cx.editor .set_error("Currently active thread is not stopped. Switch the thread."); } } pub fn dap_variables(cx: &mut Context) { let debugger = debugger!(cx.editor); if debugger.thread_id.is_none() { cx.editor .set_status("Cannot access variables while target is running."); return; } let (frame, thread_id) = match (debugger.active_frame, debugger.thread_id) { (Some(frame), Some(thread_id)) => (frame, thread_id), _ => { cx.editor .set_status("Cannot find current stack frame to access variables."); return; } }; let thread_frame = match debugger.stack_frames.get(&thread_id) { Some(thread_frame) => thread_frame, None => { cx.editor .set_error(format!("Failed to get stack frame for thread: {thread_id}")); return; } }; let stack_frame = match thread_frame.get(frame) { Some(stack_frame) => stack_frame, None => { cx.editor.set_error(format!( "Failed to get stack frame for thread {thread_id} and frame {frame}." )); return; } }; let frame_id = stack_frame.id; let scopes = match block_on(debugger.scopes(frame_id)) { Ok(s) => s, Err(e) => { cx.editor.set_error(format!("Failed to get scopes: {}", e)); return; } }; // TODO: allow expanding variables into sub-fields let mut variables = Vec::new(); let theme = &cx.editor.theme; let scope_style = theme.get("ui.linenr.selected"); let type_style = theme.get("ui.text"); let text_style = theme.get("ui.text.focus"); for scope in scopes.iter() { // use helix_view::graphics::Style; use tui::text::Span; let response = block_on(debugger.variables(scope.variables_reference)); variables.push(Spans::from(Span::styled( format!("▸ {}", scope.name), scope_style, ))); if let Ok(vars) = response { variables.reserve(vars.len()); for var in vars { let mut spans = Vec::with_capacity(5); spans.push(Span::styled(var.name.to_owned(), text_style)); if let Some(ty) = var.ty { spans.push(Span::raw(": ")); spans.push(Span::styled(ty.to_owned(), type_style)); } spans.push(Span::raw(" = ")); spans.push(Span::styled(var.value.to_owned(), text_style)); variables.push(Spans::from(spans)); } } } let contents = Text::from(tui::text::Text::from(variables)); let popup = Popup::new("dap-variables", contents); cx.replace_or_push_layer("dap-variables", popup); } pub fn dap_terminate(cx: &mut Context) { let debugger = debugger!(cx.editor); let request = debugger.disconnect(None); dap_callback(cx.jobs, request, |editor, _compositor, _response: ()| { // editor.set_error(format!("Failed to disconnect: {}", e)); editor.debugger = None; }); } pub fn dap_enable_exceptions(cx: &mut Context) { let debugger = debugger!(cx.editor); let filters = match &debugger.capabilities().exception_breakpoint_filters { Some(filters) => filters.iter().map(|f| f.filter.clone()).collect(), None => return, }; let request = debugger.set_exception_breakpoints(filters); dap_callback( cx.jobs, request, |_editor, _compositor, _response: dap::requests::SetExceptionBreakpointsResponse| { // editor.set_error(format!("Failed to set up exception breakpoints: {}", e)); }, ) } pub fn dap_disable_exceptions(cx: &mut Context) { let debugger = debugger!(cx.editor); let request = debugger.set_exception_breakpoints(Vec::new()); dap_callback( cx.jobs, request, |_editor, _compositor, _response: dap::requests::SetExceptionBreakpointsResponse| { // editor.set_error(format!("Failed to set up exception breakpoints: {}", e)); }, ) } // TODO: both edit condition and edit log need to be stable: we might get new breakpoints from the debugger which can change offsets pub fn dap_edit_condition(cx: &mut Context) { if let Some((pos, breakpoint)) = get_breakpoint_at_current_line(cx.editor) { let path = match doc!(cx.editor).path() { Some(path) => path.clone(), None => return, }; let callback = Box::pin(async move { let call: Callback = Callback::EditorCompositor(Box::new(move |editor, compositor| { let mut prompt = Prompt::new( "condition:".into(), None, ui::completers::none, move |cx, input: &str, event: PromptEvent| { if event != PromptEvent::Validate { return; } let breakpoints = &mut cx.editor.breakpoints.get_mut(&path).unwrap(); breakpoints[pos].condition = match input { "" => None, input => Some(input.to_owned()), }; let debugger = debugger!(cx.editor); if let Err(e) = breakpoints_changed(debugger, path.clone(), breakpoints) { cx.editor .set_error(format!("Failed to set breakpoints: {}", e)); } }, ); if let Some(condition) = breakpoint.condition { prompt.insert_str(&condition, editor) } compositor.push(Box::new(prompt)); })); Ok(call) }); cx.jobs.callback(callback); } } pub fn dap_edit_log(cx: &mut Context) { if let Some((pos, breakpoint)) = get_breakpoint_at_current_line(cx.editor) { let path = match doc!(cx.editor).path() { Some(path) => path.clone(), None => return, }; let callback = Box::pin(async move { let call: Callback = Callback::EditorCompositor(Box::new(move |editor, compositor| { let mut prompt = Prompt::new( "log-message:".into(), None, ui::completers::none, move |cx, input: &str, event: PromptEvent| { if event != PromptEvent::Validate { return; } let breakpoints = &mut cx.editor.breakpoints.get_mut(&path).unwrap(); breakpoints[pos].log_message = match input { "" => None, input => Some(input.to_owned()), }; let debugger = debugger!(cx.editor); if let Err(e) = breakpoints_changed(debugger, path.clone(), breakpoints) { cx.editor .set_error(format!("Failed to set breakpoints: {}", e)); } }, ); if let Some(log_message) = breakpoint.log_message { prompt.insert_str(&log_message, editor); } compositor.push(Box::new(prompt)); })); Ok(call) }); cx.jobs.callback(callback); } } pub fn dap_switch_thread(cx: &mut Context) { thread_picker(cx, |editor, thread| { block_on(select_thread_id(editor, thread.id, true)); }) } pub fn dap_switch_stack_frame(cx: &mut Context) { let debugger = debugger!(cx.editor); let thread_id = match debugger.thread_id { Some(thread_id) => thread_id, None => { cx.editor.set_error("No thread is currently active"); return; } }; let frames = debugger.stack_frames[&thread_id].clone(); let columns = [ui::PickerColumn::new("frame", |item: &StackFrame, _| { item.name.as_str().into() // TODO: include thread_states in the label })]; let picker = Picker::new(columns, 0, frames, (), move |cx, frame, _action| { let debugger = debugger!(cx.editor); // TODO: this should be simpler to find let pos = debugger.stack_frames[&thread_id] .iter() .position(|f| f.id == frame.id); debugger.active_frame = pos; let frame = debugger.stack_frames[&thread_id] .get(pos.unwrap_or(0)) .cloned(); if let Some(frame) = &frame { jump_to_stack_frame(cx.editor, frame); } }) .with_preview(move |_editor, frame| { frame .source .as_ref() .and_then(|source| source.path.as_ref()) .map(|path| { ( path.as_path().into(), Some(( frame.line.saturating_sub(1), frame.end_line.unwrap_or(frame.line).saturating_sub(1), )), ) }) }); cx.push_layer(Box::new(picker)) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/commands/lsp.rs000066400000000000000000001471541500614314100254330ustar00rootroot00000000000000use futures_util::{stream::FuturesOrdered, FutureExt}; use helix_lsp::{ block_on, lsp::{ self, CodeAction, CodeActionOrCommand, CodeActionTriggerKind, DiagnosticSeverity, NumberOrString, }, util::{diagnostic_to_lsp_diagnostic, lsp_range_to_range, range_to_lsp_range}, Client, LanguageServerId, OffsetEncoding, }; use tokio_stream::StreamExt; use tui::{text::Span, widgets::Row}; use super::{align_view, push_jump, Align, Context, Editor}; use helix_core::{ diagnostic::DiagnosticProvider, syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection, Uri, }; use helix_stdx::path; use helix_view::{ document::{DocumentInlayHints, DocumentInlayHintsId}, editor::Action, handlers::lsp::SignatureHelpInvoked, theme::Style, Document, View, }; use crate::{ compositor::{self, Compositor}, job::Callback, ui::{self, overlay::overlaid, FileLocation, Picker, Popup, PromptEvent}, }; use std::{cmp::Ordering, collections::HashSet, fmt::Display, future::Future, path::Path}; /// Gets the first language server that is attached to a document which supports a specific feature. /// If there is no configured language server that supports the feature, this displays a status message. /// Using this macro in a context where the editor automatically queries the LSP /// (instead of when the user explicitly does so via a keybind like `gd`) /// will spam the "No configured language server supports \" status message confusingly. #[macro_export] macro_rules! language_server_with_feature { ($editor:expr, $doc:expr, $feature:expr) => {{ let language_server = $doc.language_servers_with_feature($feature).next(); match language_server { Some(language_server) => language_server, None => { $editor.set_status(format!( "No configured language server supports {}", $feature )); return; } } }}; } /// A wrapper around `lsp::Location` that swaps out the LSP URI for `helix_core::Uri` and adds /// the server's offset encoding. #[derive(Debug, Clone, PartialEq, Eq)] struct Location { uri: Uri, range: lsp::Range, offset_encoding: OffsetEncoding, } fn lsp_location_to_location( location: lsp::Location, offset_encoding: OffsetEncoding, ) -> Option { let uri = match location.uri.try_into() { Ok(uri) => uri, Err(err) => { log::warn!("discarding invalid or unsupported URI: {err}"); return None; } }; Some(Location { uri, range: location.range, offset_encoding, }) } struct SymbolInformationItem { location: Location, symbol: lsp::SymbolInformation, } struct DiagnosticStyles { hint: Style, info: Style, warning: Style, error: Style, } struct PickerDiagnostic { location: Location, diag: lsp::Diagnostic, } fn location_to_file_location(location: &Location) -> Option { let path = location.uri.as_path()?; let line = Some(( location.range.start.line as usize, location.range.end.line as usize, )); Some((path.into(), line)) } fn jump_to_location(editor: &mut Editor, location: &Location, action: Action) { let (view, doc) = current!(editor); push_jump(view, doc); let Some(path) = location.uri.as_path() else { let err = format!("unable to convert URI to filepath: {:?}", location.uri); editor.set_error(err); return; }; jump_to_position( editor, path, location.range, location.offset_encoding, action, ); } fn jump_to_position( editor: &mut Editor, path: &Path, range: lsp::Range, offset_encoding: OffsetEncoding, action: Action, ) { let doc = match editor.open(path, action) { Ok(id) => doc_mut!(editor, &id), Err(err) => { let err = format!("failed to open path: {:?}: {:?}", path, err); editor.set_error(err); return; } }; let view = view_mut!(editor); // TODO: convert inside server let new_range = if let Some(new_range) = lsp_range_to_range(doc.text(), range, offset_encoding) { new_range } else { log::warn!("lsp position out of bounds - {:?}", range); return; }; // we flip the range so that the cursor sits on the start of the symbol // (for example start of the function). doc.set_selection(view.id, Selection::single(new_range.head, new_range.anchor)); if action.align_view(view, doc.id()) { align_view(doc, view, Align::Center); } } fn display_symbol_kind(kind: lsp::SymbolKind) -> &'static str { match kind { lsp::SymbolKind::FILE => "file", lsp::SymbolKind::MODULE => "module", lsp::SymbolKind::NAMESPACE => "namespace", lsp::SymbolKind::PACKAGE => "package", lsp::SymbolKind::CLASS => "class", lsp::SymbolKind::METHOD => "method", lsp::SymbolKind::PROPERTY => "property", lsp::SymbolKind::FIELD => "field", lsp::SymbolKind::CONSTRUCTOR => "construct", lsp::SymbolKind::ENUM => "enum", lsp::SymbolKind::INTERFACE => "interface", lsp::SymbolKind::FUNCTION => "function", lsp::SymbolKind::VARIABLE => "variable", lsp::SymbolKind::CONSTANT => "constant", lsp::SymbolKind::STRING => "string", lsp::SymbolKind::NUMBER => "number", lsp::SymbolKind::BOOLEAN => "boolean", lsp::SymbolKind::ARRAY => "array", lsp::SymbolKind::OBJECT => "object", lsp::SymbolKind::KEY => "key", lsp::SymbolKind::NULL => "null", lsp::SymbolKind::ENUM_MEMBER => "enummem", lsp::SymbolKind::STRUCT => "struct", lsp::SymbolKind::EVENT => "event", lsp::SymbolKind::OPERATOR => "operator", lsp::SymbolKind::TYPE_PARAMETER => "typeparam", _ => { log::warn!("Unknown symbol kind: {:?}", kind); "" } } } #[derive(Copy, Clone, PartialEq)] enum DiagnosticsFormat { ShowSourcePath, HideSourcePath, } type DiagnosticsPicker = Picker; fn diag_picker( cx: &Context, diagnostics: impl IntoIterator)>, format: DiagnosticsFormat, ) -> DiagnosticsPicker { // TODO: drop current_path comparison and instead use workspace: bool flag? // flatten the map to a vec of (url, diag) pairs let mut flat_diag = Vec::new(); for (uri, diags) in diagnostics { flat_diag.reserve(diags.len()); for (diag, provider) in diags { if let Some(ls) = provider .language_server_id() .and_then(|id| cx.editor.language_server_by_id(id)) { flat_diag.push(PickerDiagnostic { location: Location { uri: uri.clone(), range: diag.range, offset_encoding: ls.offset_encoding(), }, diag, }); } } } let styles = DiagnosticStyles { hint: cx.editor.theme.get("hint"), info: cx.editor.theme.get("info"), warning: cx.editor.theme.get("warning"), error: cx.editor.theme.get("error"), }; let mut columns = vec![ ui::PickerColumn::new( "severity", |item: &PickerDiagnostic, styles: &DiagnosticStyles| { match item.diag.severity { Some(DiagnosticSeverity::HINT) => Span::styled("HINT", styles.hint), Some(DiagnosticSeverity::INFORMATION) => Span::styled("INFO", styles.info), Some(DiagnosticSeverity::WARNING) => Span::styled("WARN", styles.warning), Some(DiagnosticSeverity::ERROR) => Span::styled("ERROR", styles.error), _ => Span::raw(""), } .into() }, ), ui::PickerColumn::new("code", |item: &PickerDiagnostic, _| { match item.diag.code.as_ref() { Some(NumberOrString::Number(n)) => n.to_string().into(), Some(NumberOrString::String(s)) => s.as_str().into(), None => "".into(), } }), ui::PickerColumn::new("message", |item: &PickerDiagnostic, _| { item.diag.message.as_str().into() }), ]; let mut primary_column = 2; // message if format == DiagnosticsFormat::ShowSourcePath { columns.insert( // between message code and message 2, ui::PickerColumn::new("path", |item: &PickerDiagnostic, _| { if let Some(path) = item.location.uri.as_path() { path::get_truncated_path(path) .to_string_lossy() .to_string() .into() } else { Default::default() } }), ); primary_column += 1; } Picker::new( columns, primary_column, flat_diag, styles, move |cx, diag, action| { jump_to_location(cx.editor, &diag.location, action); let (view, doc) = current!(cx.editor); view.diagnostics_handler .immediately_show_diagnostic(doc, view.id); }, ) .with_preview(move |_editor, diag| location_to_file_location(&diag.location)) .truncate_start(false) } pub fn symbol_picker(cx: &mut Context) { fn nested_to_flat( list: &mut Vec, file: &lsp::TextDocumentIdentifier, uri: &Uri, symbol: lsp::DocumentSymbol, offset_encoding: OffsetEncoding, ) { #[allow(deprecated)] list.push(SymbolInformationItem { symbol: lsp::SymbolInformation { name: symbol.name, kind: symbol.kind, tags: symbol.tags, deprecated: symbol.deprecated, location: lsp::Location::new(file.uri.clone(), symbol.selection_range), container_name: None, }, location: Location { uri: uri.clone(), range: symbol.selection_range, offset_encoding, }, }); for child in symbol.children.into_iter().flatten() { nested_to_flat(list, file, uri, child, offset_encoding); } } let doc = doc!(cx.editor); let mut seen_language_servers = HashSet::new(); let mut futures: FuturesOrdered<_> = doc .language_servers_with_feature(LanguageServerFeature::DocumentSymbols) .filter(|ls| seen_language_servers.insert(ls.id())) .map(|language_server| { let request = language_server.document_symbols(doc.identifier()).unwrap(); let offset_encoding = language_server.offset_encoding(); let doc_id = doc.identifier(); let doc_uri = doc .uri() .expect("docs with active language servers must be backed by paths"); async move { let symbols = match request.await? { Some(symbols) => symbols, None => return anyhow::Ok(vec![]), }; // lsp has two ways to represent symbols (flat/nested) // convert the nested variant to flat, so that we have a homogeneous list let symbols = match symbols { lsp::DocumentSymbolResponse::Flat(symbols) => symbols .into_iter() .map(|symbol| SymbolInformationItem { location: Location { uri: doc_uri.clone(), range: symbol.location.range, offset_encoding, }, symbol, }) .collect(), lsp::DocumentSymbolResponse::Nested(symbols) => { let mut flat_symbols = Vec::new(); for symbol in symbols { nested_to_flat( &mut flat_symbols, &doc_id, &doc_uri, symbol, offset_encoding, ) } flat_symbols } }; Ok(symbols) } }) .collect(); if futures.is_empty() { cx.editor .set_error("No configured language server supports document symbols"); return; } cx.jobs.callback(async move { let mut symbols = Vec::new(); while let Some(response) = futures.next().await { match response { Ok(mut items) => symbols.append(&mut items), Err(err) => log::error!("Error requesting document symbols: {err}"), } } let call = move |_editor: &mut Editor, compositor: &mut Compositor| { let columns = [ ui::PickerColumn::new("kind", |item: &SymbolInformationItem, _| { display_symbol_kind(item.symbol.kind).into() }), // Some symbols in the document symbol picker may have a URI that isn't // the current file. It should be rare though, so we concatenate that // URI in with the symbol name in this picker. ui::PickerColumn::new("name", |item: &SymbolInformationItem, _| { item.symbol.name.as_str().into() }), ui::PickerColumn::new("container", |item: &SymbolInformationItem, _| { item.symbol .container_name .as_deref() .unwrap_or_default() .into() }), ]; let picker = Picker::new( columns, 1, // name column symbols, (), move |cx, item, action| { jump_to_location(cx.editor, &item.location, action); }, ) .with_preview(move |_editor, item| location_to_file_location(&item.location)) .truncate_start(false); compositor.push(Box::new(overlaid(picker))) }; Ok(Callback::EditorCompositor(Box::new(call))) }); } pub fn workspace_symbol_picker(cx: &mut Context) { use crate::ui::picker::Injector; let doc = doc!(cx.editor); if doc .language_servers_with_feature(LanguageServerFeature::WorkspaceSymbols) .count() == 0 { cx.editor .set_error("No configured language server supports workspace symbols"); return; } let get_symbols = |pattern: &str, editor: &mut Editor, _data, injector: &Injector<_, _>| { let doc = doc!(editor); let mut seen_language_servers = HashSet::new(); let mut futures: FuturesOrdered<_> = doc .language_servers_with_feature(LanguageServerFeature::WorkspaceSymbols) .filter(|ls| seen_language_servers.insert(ls.id())) .map(|language_server| { let request = language_server .workspace_symbols(pattern.to_string()) .unwrap(); let offset_encoding = language_server.offset_encoding(); async move { let symbols = request .await? .and_then(|resp| match resp { lsp::WorkspaceSymbolResponse::Flat(symbols) => Some(symbols), lsp::WorkspaceSymbolResponse::Nested(_) => None, }) .unwrap_or_default(); let response: Vec<_> = symbols .into_iter() .filter_map(|symbol| { let uri = match Uri::try_from(&symbol.location.uri) { Ok(uri) => uri, Err(err) => { log::warn!("discarding symbol with invalid URI: {err}"); return None; } }; Some(SymbolInformationItem { location: Location { uri, range: symbol.location.range, offset_encoding, }, symbol, }) }) .collect(); anyhow::Ok(response) } }) .collect(); if futures.is_empty() { editor.set_error("No configured language server supports workspace symbols"); } let injector = injector.clone(); async move { while let Some(response) = futures.next().await { match response { Ok(items) => { for item in items { injector.push(item)?; } } Err(err) => log::error!("Error requesting workspace symbols: {err}"), } } Ok(()) } .boxed() }; let columns = [ ui::PickerColumn::new("kind", |item: &SymbolInformationItem, _| { display_symbol_kind(item.symbol.kind).into() }), ui::PickerColumn::new("name", |item: &SymbolInformationItem, _| { item.symbol.name.as_str().into() }) .without_filtering(), ui::PickerColumn::new("container", |item: &SymbolInformationItem, _| { item.symbol .container_name .as_deref() .unwrap_or_default() .into() }), ui::PickerColumn::new("path", |item: &SymbolInformationItem, _| { if let Some(path) = item.location.uri.as_path() { path::get_relative_path(path) .to_string_lossy() .to_string() .into() } else { item.symbol.location.uri.to_string().into() } }), ]; let picker = Picker::new( columns, 1, // name column [], (), move |cx, item, action| { jump_to_location(cx.editor, &item.location, action); }, ) .with_preview(|_editor, item| location_to_file_location(&item.location)) .with_dynamic_query(get_symbols, None) .truncate_start(false); cx.push_layer(Box::new(overlaid(picker))); } pub fn diagnostics_picker(cx: &mut Context) { let doc = doc!(cx.editor); if let Some(uri) = doc.uri() { let diagnostics = cx.editor.diagnostics.get(&uri).cloned().unwrap_or_default(); let picker = diag_picker(cx, [(uri, diagnostics)], DiagnosticsFormat::HideSourcePath); cx.push_layer(Box::new(overlaid(picker))); } } pub fn workspace_diagnostics_picker(cx: &mut Context) { // TODO not yet filtered by LanguageServerFeature, need to do something similar as Document::shown_diagnostics here for all open documents let diagnostics = cx.editor.diagnostics.clone(); let picker = diag_picker(cx, diagnostics, DiagnosticsFormat::ShowSourcePath); cx.push_layer(Box::new(overlaid(picker))); } struct CodeActionOrCommandItem { lsp_item: lsp::CodeActionOrCommand, language_server_id: LanguageServerId, } impl ui::menu::Item for CodeActionOrCommandItem { type Data = (); fn format(&self, _data: &Self::Data) -> Row { match &self.lsp_item { lsp::CodeActionOrCommand::CodeAction(action) => action.title.as_str().into(), lsp::CodeActionOrCommand::Command(command) => command.title.as_str().into(), } } } /// Determines the category of the `CodeAction` using the `CodeAction::kind` field. /// Returns a number that represent these categories. /// Categories with a lower number should be displayed first. /// /// /// While the `kind` field is defined as open ended in the LSP spec (any value may be used) /// in practice a closed set of common values (mostly suggested in the LSP spec) are used. /// VSCode displays each of these categories separately (separated by a heading in the codeactions picker) /// to make them easier to navigate. Helix does not display these headings to the user. /// However it does sort code actions by their categories to achieve the same order as the VScode picker, /// just without the headings. /// /// The order used here is modeled after the [vscode sourcecode](https://github.com/microsoft/vscode/blob/eaec601dd69aeb4abb63b9601a6f44308c8d8c6e/src/vs/editor/contrib/codeAction/browser/codeActionWidget.ts>) fn action_category(action: &CodeActionOrCommand) -> u32 { if let CodeActionOrCommand::CodeAction(CodeAction { kind: Some(kind), .. }) = action { let mut components = kind.as_str().split('.'); match components.next() { Some("quickfix") => 0, Some("refactor") => match components.next() { Some("extract") => 1, Some("inline") => 2, Some("rewrite") => 3, Some("move") => 4, Some("surround") => 5, _ => 7, }, Some("source") => 6, _ => 7, } } else { 7 } } fn action_preferred(action: &CodeActionOrCommand) -> bool { matches!( action, CodeActionOrCommand::CodeAction(CodeAction { is_preferred: Some(true), .. }) ) } fn action_fixes_diagnostics(action: &CodeActionOrCommand) -> bool { matches!( action, CodeActionOrCommand::CodeAction(CodeAction { diagnostics: Some(diagnostics), .. }) if !diagnostics.is_empty() ) } pub fn code_action(cx: &mut Context) { let (view, doc) = current!(cx.editor); let selection_range = doc.selection(view.id).primary(); let mut seen_language_servers = HashSet::new(); let mut futures: FuturesOrdered<_> = doc .language_servers_with_feature(LanguageServerFeature::CodeAction) .filter(|ls| seen_language_servers.insert(ls.id())) // TODO this should probably already been filtered in something like "language_servers_with_feature" .filter_map(|language_server| { let offset_encoding = language_server.offset_encoding(); let language_server_id = language_server.id(); let range = range_to_lsp_range(doc.text(), selection_range, offset_encoding); // Filter and convert overlapping diagnostics let code_action_context = lsp::CodeActionContext { diagnostics: doc .diagnostics() .iter() .filter(|&diag| { selection_range .overlaps(&helix_core::Range::new(diag.range.start, diag.range.end)) }) .map(|diag| diagnostic_to_lsp_diagnostic(doc.text(), diag, offset_encoding)) .collect(), only: None, trigger_kind: Some(CodeActionTriggerKind::INVOKED), }; let code_action_request = language_server.code_actions(doc.identifier(), range, code_action_context)?; Some((code_action_request, language_server_id)) }) .map(|(request, ls_id)| async move { let Some(mut actions) = request.await? else { return anyhow::Ok(Vec::new()); }; // remove disabled code actions actions.retain(|action| { matches!( action, CodeActionOrCommand::Command(_) | CodeActionOrCommand::CodeAction(CodeAction { disabled: None, .. }) ) }); // Sort codeactions into a useful order. This behaviour is only partially described in the LSP spec. // Many details are modeled after vscode because language servers are usually tested against it. // VScode sorts the codeaction two times: // // First the codeactions that fix some diagnostics are moved to the front. // If both codeactions fix some diagnostics (or both fix none) the codeaction // that is marked with `is_preferred` is shown first. The codeactions are then shown in separate // submenus that only contain a certain category (see `action_category`) of actions. // // Below this done in in a single sorting step actions.sort_by(|action1, action2| { // sort actions by category let order = action_category(action1).cmp(&action_category(action2)); if order != Ordering::Equal { return order; } // within the categories sort by relevancy. // Modeled after the `codeActionsComparator` function in vscode: // https://github.com/microsoft/vscode/blob/eaec601dd69aeb4abb63b9601a6f44308c8d8c6e/src/vs/editor/contrib/codeAction/browser/codeAction.ts // if one code action fixes a diagnostic but the other one doesn't show it first let order = action_fixes_diagnostics(action1) .cmp(&action_fixes_diagnostics(action2)) .reverse(); if order != Ordering::Equal { return order; } // if one of the codeactions is marked as preferred show it first // otherwise keep the original LSP sorting action_preferred(action1) .cmp(&action_preferred(action2)) .reverse() }); Ok(actions .into_iter() .map(|lsp_item| CodeActionOrCommandItem { lsp_item, language_server_id: ls_id, }) .collect()) }) .collect(); if futures.is_empty() { cx.editor .set_error("No configured language server supports code actions"); return; } cx.jobs.callback(async move { let mut actions = Vec::new(); while let Some(output) = futures.next().await { match output { Ok(mut lsp_items) => actions.append(&mut lsp_items), Err(err) => log::error!("while gathering code actions: {err}"), } } let call = move |editor: &mut Editor, compositor: &mut Compositor| { if actions.is_empty() { editor.set_error("No code actions available"); return; } let mut picker = ui::Menu::new(actions, (), move |editor, action, event| { if event != PromptEvent::Validate { return; } // always present here let action = action.unwrap(); let Some(language_server) = editor.language_server_by_id(action.language_server_id) else { editor.set_error("Language Server disappeared"); return; }; let offset_encoding = language_server.offset_encoding(); match &action.lsp_item { lsp::CodeActionOrCommand::Command(command) => { log::debug!("code action command: {:?}", command); editor.execute_lsp_command(command.clone(), action.language_server_id); } lsp::CodeActionOrCommand::CodeAction(code_action) => { log::debug!("code action: {:?}", code_action); // we support lsp "codeAction/resolve" for `edit` and `command` fields let mut resolved_code_action = None; if code_action.edit.is_none() || code_action.command.is_none() { if let Some(future) = language_server.resolve_code_action(code_action) { if let Ok(code_action) = helix_lsp::block_on(future) { resolved_code_action = Some(code_action); } } } let resolved_code_action = resolved_code_action.as_ref().unwrap_or(code_action); if let Some(ref workspace_edit) = resolved_code_action.edit { let _ = editor.apply_workspace_edit(offset_encoding, workspace_edit); } // if code action provides both edit and command first the edit // should be applied and then the command if let Some(command) = &code_action.command { editor.execute_lsp_command(command.clone(), action.language_server_id); } } } }); picker.move_down(); // pre-select the first item let popup = Popup::new("code-action", picker).with_scrollbar(false); compositor.replace_or_push("code-action", popup); }; Ok(Callback::EditorCompositor(Box::new(call))) }); } #[derive(Debug)] pub struct ApplyEditError { pub kind: ApplyEditErrorKind, pub failed_change_idx: usize, } #[derive(Debug)] pub enum ApplyEditErrorKind { DocumentChanged, FileNotFound, UnknownURISchema, IoError(std::io::Error), // TODO: check edits before applying and propagate failure // InvalidEdit, } impl Display for ApplyEditErrorKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { ApplyEditErrorKind::DocumentChanged => f.write_str("document has changed"), ApplyEditErrorKind::FileNotFound => f.write_str("file not found"), ApplyEditErrorKind::UnknownURISchema => f.write_str("URI schema not supported"), ApplyEditErrorKind::IoError(err) => f.write_str(&format!("{err}")), } } } /// Precondition: `locations` should be non-empty. fn goto_impl(editor: &mut Editor, compositor: &mut Compositor, locations: Vec) { let cwdir = helix_stdx::env::current_working_dir(); match locations.as_slice() { [location] => { jump_to_location(editor, location, Action::Replace); } [] => unreachable!("`locations` should be non-empty for `goto_impl`"), _locations => { let columns = [ui::PickerColumn::new( "location", |item: &Location, cwdir: &std::path::PathBuf| { let path = if let Some(path) = item.uri.as_path() { path.strip_prefix(cwdir).unwrap_or(path).to_string_lossy() } else { item.uri.to_string().into() }; format!("{path}:{}", item.range.start.line + 1).into() }, )]; let picker = Picker::new(columns, 0, locations, cwdir, |cx, location, action| { jump_to_location(cx.editor, location, action) }) .with_preview(|_editor, location| location_to_file_location(location)); compositor.push(Box::new(overlaid(picker))); } } } fn goto_single_impl(cx: &mut Context, feature: LanguageServerFeature, request_provider: P) where P: Fn(&Client, lsp::Position, lsp::TextDocumentIdentifier) -> Option, F: Future>> + 'static + Send, { let (view, doc) = current_ref!(cx.editor); let mut futures: FuturesOrdered<_> = doc .language_servers_with_feature(feature) .map(|language_server| { let offset_encoding = language_server.offset_encoding(); let pos = doc.position(view.id, offset_encoding); let future = request_provider(language_server, pos, doc.identifier()).unwrap(); async move { anyhow::Ok((future.await?, offset_encoding)) } }) .collect(); cx.jobs.callback(async move { let mut locations = Vec::new(); while let Some(response) = futures.next().await { match response { Ok((response, offset_encoding)) => match response { Some(lsp::GotoDefinitionResponse::Scalar(lsp_location)) => { locations.extend(lsp_location_to_location(lsp_location, offset_encoding)); } Some(lsp::GotoDefinitionResponse::Array(lsp_locations)) => { locations.extend(lsp_locations.into_iter().flat_map(|location| { lsp_location_to_location(location, offset_encoding) })); } Some(lsp::GotoDefinitionResponse::Link(lsp_locations)) => { locations.extend( lsp_locations .into_iter() .map(|location_link| { lsp::Location::new( location_link.target_uri, location_link.target_range, ) }) .flat_map(|location| { lsp_location_to_location(location, offset_encoding) }), ); } None => (), }, Err(err) => log::error!("Error requesting locations: {err}"), } } let call = move |editor: &mut Editor, compositor: &mut Compositor| { if locations.is_empty() { editor.set_error("No definition found."); } else { goto_impl(editor, compositor, locations); } }; Ok(Callback::EditorCompositor(Box::new(call))) }); } pub fn goto_declaration(cx: &mut Context) { goto_single_impl( cx, LanguageServerFeature::GotoDeclaration, |ls, pos, doc_id| ls.goto_declaration(doc_id, pos, None), ); } pub fn goto_definition(cx: &mut Context) { goto_single_impl( cx, LanguageServerFeature::GotoDefinition, |ls, pos, doc_id| ls.goto_definition(doc_id, pos, None), ); } pub fn goto_type_definition(cx: &mut Context) { goto_single_impl( cx, LanguageServerFeature::GotoTypeDefinition, |ls, pos, doc_id| ls.goto_type_definition(doc_id, pos, None), ); } pub fn goto_implementation(cx: &mut Context) { goto_single_impl( cx, LanguageServerFeature::GotoImplementation, |ls, pos, doc_id| ls.goto_implementation(doc_id, pos, None), ); } pub fn goto_reference(cx: &mut Context) { let config = cx.editor.config(); let (view, doc) = current_ref!(cx.editor); let mut futures: FuturesOrdered<_> = doc .language_servers_with_feature(LanguageServerFeature::GotoReference) .map(|language_server| { let offset_encoding = language_server.offset_encoding(); let pos = doc.position(view.id, offset_encoding); let future = language_server .goto_reference( doc.identifier(), pos, config.lsp.goto_reference_include_declaration, None, ) .unwrap(); async move { anyhow::Ok((future.await?, offset_encoding)) } }) .collect(); cx.jobs.callback(async move { let mut locations = Vec::new(); while let Some(response) = futures.next().await { match response { Ok((lsp_locations, offset_encoding)) => locations.extend( lsp_locations .into_iter() .flatten() .flat_map(|location| lsp_location_to_location(location, offset_encoding)), ), Err(err) => log::error!("Error requesting references: {err}"), } } let call = move |editor: &mut Editor, compositor: &mut Compositor| { if locations.is_empty() { editor.set_error("No references found."); } else { goto_impl(editor, compositor, locations); } }; Ok(Callback::EditorCompositor(Box::new(call))) }); } pub fn signature_help(cx: &mut Context) { cx.editor .handlers .trigger_signature_help(SignatureHelpInvoked::Manual, cx.editor) } pub fn hover(cx: &mut Context) { use ui::lsp::hover::Hover; let (view, doc) = current!(cx.editor); if doc .language_servers_with_feature(LanguageServerFeature::Hover) .count() == 0 { cx.editor .set_error("No configured language server supports hover"); return; } let mut seen_language_servers = HashSet::new(); let mut futures: FuturesOrdered<_> = doc .language_servers_with_feature(LanguageServerFeature::Hover) .filter(|ls| seen_language_servers.insert(ls.id())) .map(|language_server| { let server_name = language_server.name().to_string(); // TODO: factor out a doc.position_identifier() that returns lsp::TextDocumentPositionIdentifier let pos = doc.position(view.id, language_server.offset_encoding()); let request = language_server .text_document_hover(doc.identifier(), pos, None) .unwrap(); async move { anyhow::Ok((server_name, request.await?)) } }) .collect(); cx.jobs.callback(async move { let mut hovers: Vec<(String, lsp::Hover)> = Vec::new(); while let Some(response) = futures.next().await { match response { Ok((server_name, Some(hover))) => hovers.push((server_name, hover)), Ok(_) => (), Err(err) => log::error!("Error requesting hover: {err}"), } } let call = move |editor: &mut Editor, compositor: &mut Compositor| { if hovers.is_empty() { editor.set_status("No hover results available."); return; } // create new popup let contents = Hover::new(hovers, editor.syn_loader.clone()); let popup = Popup::new(Hover::ID, contents).auto_close(true); compositor.replace_or_push(Hover::ID, popup); }; Ok(Callback::EditorCompositor(Box::new(call))) }); } pub fn rename_symbol(cx: &mut Context) { fn get_prefill_from_word_boundary(editor: &Editor) -> String { let (view, doc) = current_ref!(editor); let text = doc.text().slice(..); let primary_selection = doc.selection(view.id).primary(); if primary_selection.len() > 1 { primary_selection } else { use helix_core::textobject::{textobject_word, TextObject}; textobject_word(text, primary_selection, TextObject::Inside, 1, false) } .fragment(text) .into() } fn get_prefill_from_lsp_response( editor: &Editor, offset_encoding: OffsetEncoding, response: Option, ) -> Result { match response { Some(lsp::PrepareRenameResponse::Range(range)) => { let text = doc!(editor).text(); Ok(lsp_range_to_range(text, range, offset_encoding) .ok_or("lsp sent invalid selection range for rename")? .fragment(text.slice(..)) .into()) } Some(lsp::PrepareRenameResponse::RangeWithPlaceholder { placeholder, .. }) => { Ok(placeholder) } Some(lsp::PrepareRenameResponse::DefaultBehavior { .. }) => { Ok(get_prefill_from_word_boundary(editor)) } None => Err("lsp did not respond to prepare rename request"), } } fn create_rename_prompt( editor: &Editor, prefill: String, history_register: Option, language_server_id: Option, ) -> Box { let prompt = ui::Prompt::new( "rename-to:".into(), history_register, ui::completers::none, move |cx: &mut compositor::Context, input: &str, event: PromptEvent| { if event != PromptEvent::Validate { return; } let (view, doc) = current!(cx.editor); let Some(language_server) = doc .language_servers_with_feature(LanguageServerFeature::RenameSymbol) .find(|ls| language_server_id.map_or(true, |id| id == ls.id())) else { cx.editor .set_error("No configured language server supports symbol renaming"); return; }; let offset_encoding = language_server.offset_encoding(); let pos = doc.position(view.id, offset_encoding); let future = language_server .rename_symbol(doc.identifier(), pos, input.to_string()) .unwrap(); match block_on(future) { Ok(edits) => { let _ = cx .editor .apply_workspace_edit(offset_encoding, &edits.unwrap_or_default()); } Err(err) => cx.editor.set_error(err.to_string()), } }, ) .with_line(prefill, editor); Box::new(prompt) } let (view, doc) = current_ref!(cx.editor); let history_register = cx.register; if doc .language_servers_with_feature(LanguageServerFeature::RenameSymbol) .next() .is_none() { cx.editor .set_error("No configured language server supports symbol renaming"); return; } let language_server_with_prepare_rename_support = doc .language_servers_with_feature(LanguageServerFeature::RenameSymbol) .find(|ls| { matches!( ls.capabilities().rename_provider, Some(lsp::OneOf::Right(lsp::RenameOptions { prepare_provider: Some(true), .. })) ) }); if let Some(language_server) = language_server_with_prepare_rename_support { let ls_id = language_server.id(); let offset_encoding = language_server.offset_encoding(); let pos = doc.position(view.id, offset_encoding); let future = language_server .prepare_rename(doc.identifier(), pos) .unwrap(); cx.callback( future, move |editor, compositor, response: Option| { let prefill = match get_prefill_from_lsp_response(editor, offset_encoding, response) { Ok(p) => p, Err(e) => { editor.set_error(e); return; } }; let prompt = create_rename_prompt(editor, prefill, history_register, Some(ls_id)); compositor.push(prompt); }, ); } else { let prefill = get_prefill_from_word_boundary(cx.editor); let prompt = create_rename_prompt(cx.editor, prefill, history_register, None); cx.push_layer(prompt); } } pub fn select_references_to_symbol_under_cursor(cx: &mut Context) { let (view, doc) = current!(cx.editor); let language_server = language_server_with_feature!(cx.editor, doc, LanguageServerFeature::DocumentHighlight); let offset_encoding = language_server.offset_encoding(); let pos = doc.position(view.id, offset_encoding); let future = language_server .text_document_document_highlight(doc.identifier(), pos, None) .unwrap(); cx.callback( future, move |editor, _compositor, response: Option>| { let document_highlights = match response { Some(highlights) if !highlights.is_empty() => highlights, _ => return, }; let (view, doc) = current!(editor); let text = doc.text(); let pos = doc.selection(view.id).primary().cursor(text.slice(..)); // We must find the range that contains our primary cursor to prevent our primary cursor to move let mut primary_index = 0; let ranges = document_highlights .iter() .filter_map(|highlight| lsp_range_to_range(text, highlight.range, offset_encoding)) .enumerate() .map(|(i, range)| { if range.contains(pos) { primary_index = i; } range }) .collect(); let selection = Selection::new(ranges, primary_index); doc.set_selection(view.id, selection); }, ); } pub fn compute_inlay_hints_for_all_views(editor: &mut Editor, jobs: &mut crate::job::Jobs) { if !editor.config().lsp.display_inlay_hints { return; } for (view, _) in editor.tree.views() { let doc = match editor.documents.get(&view.doc) { Some(doc) => doc, None => continue, }; if let Some(callback) = compute_inlay_hints_for_view(view, doc) { jobs.callback(callback); } } } fn compute_inlay_hints_for_view( view: &View, doc: &Document, ) -> Option>>>> { let view_id = view.id; let doc_id = view.doc; let language_server = doc .language_servers_with_feature(LanguageServerFeature::InlayHints) .next()?; let doc_text = doc.text(); let len_lines = doc_text.len_lines(); // Compute ~3 times the current view height of inlay hints, that way some scrolling // will not show half the view with hints and half without while still being faster // than computing all the hints for the full file (which could be dozens of time // longer than the view is). let view_height = view.inner_height(); let first_visible_line = doc_text.char_to_line(doc.view_offset(view_id).anchor.min(doc_text.len_chars())); let first_line = first_visible_line.saturating_sub(view_height); let last_line = first_visible_line .saturating_add(view_height.saturating_mul(2)) .min(len_lines); let new_doc_inlay_hints_id = DocumentInlayHintsId { first_line, last_line, }; // Don't recompute the annotations in case nothing has changed about the view if !doc.inlay_hints_oudated && doc .inlay_hints(view_id) .is_some_and(|dih| dih.id == new_doc_inlay_hints_id) { return None; } let doc_slice = doc_text.slice(..); let first_char_in_range = doc_slice.line_to_char(first_line); let last_char_in_range = doc_slice.line_to_char(last_line); let range = helix_lsp::util::range_to_lsp_range( doc_text, helix_core::Range::new(first_char_in_range, last_char_in_range), language_server.offset_encoding(), ); let offset_encoding = language_server.offset_encoding(); let callback = super::make_job_callback( language_server.text_document_range_inlay_hints(doc.identifier(), range, None)?, move |editor, _compositor, response: Option>| { // The config was modified or the window was closed while the request was in flight if !editor.config().lsp.display_inlay_hints || editor.tree.try_get(view_id).is_none() { return; } // Add annotations to relevant document, not the current one (it may have changed in between) let doc = match editor.documents.get_mut(&doc_id) { Some(doc) => doc, None => return, }; // If we have neither hints nor an LSP, empty the inlay hints since they're now oudated let mut hints = match response { Some(hints) if !hints.is_empty() => hints, _ => { doc.set_inlay_hints( view_id, DocumentInlayHints::empty_with_id(new_doc_inlay_hints_id), ); doc.inlay_hints_oudated = false; return; } }; // Most language servers will already send them sorted but ensure this is the case to // avoid errors on our end. hints.sort_by_key(|inlay_hint| inlay_hint.position); let mut padding_before_inlay_hints = Vec::new(); let mut type_inlay_hints = Vec::new(); let mut parameter_inlay_hints = Vec::new(); let mut other_inlay_hints = Vec::new(); let mut padding_after_inlay_hints = Vec::new(); let doc_text = doc.text(); for hint in hints { let char_idx = match helix_lsp::util::lsp_pos_to_pos(doc_text, hint.position, offset_encoding) { Some(pos) => pos, // Skip inlay hints that have no "real" position None => continue, }; let label = match hint.label { lsp::InlayHintLabel::String(s) => s, lsp::InlayHintLabel::LabelParts(parts) => parts .into_iter() .map(|p| p.value) .collect::>() .join(""), }; let inlay_hints_vec = match hint.kind { Some(lsp::InlayHintKind::TYPE) => &mut type_inlay_hints, Some(lsp::InlayHintKind::PARAMETER) => &mut parameter_inlay_hints, // We can't warn on unknown kind here since LSPs are free to set it or not, for // example Rust Analyzer does not: every kind will be `None`. _ => &mut other_inlay_hints, }; if let Some(true) = hint.padding_left { padding_before_inlay_hints.push(InlineAnnotation::new(char_idx, " ")); } inlay_hints_vec.push(InlineAnnotation::new(char_idx, label)); if let Some(true) = hint.padding_right { padding_after_inlay_hints.push(InlineAnnotation::new(char_idx, " ")); } } doc.set_inlay_hints( view_id, DocumentInlayHints { id: new_doc_inlay_hints_id, type_inlay_hints, parameter_inlay_hints, other_inlay_hints, padding_before_inlay_hints, padding_after_inlay_hints, }, ); doc.inlay_hints_oudated = false; }, ); Some(callback) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/commands/typed.rs000066400000000000000000003564121500614314100257610ustar00rootroot00000000000000use std::fmt::Write; use std::io::BufReader; use std::ops::{self, Deref}; use crate::job::Job; use super::*; use helix_core::command_line::{Args, Flag, Signature, Token, TokenKind}; use helix_core::fuzzy::fuzzy_match; use helix_core::indent::MAX_INDENT; use helix_core::line_ending; use helix_stdx::path::home_dir; use helix_view::document::{read_to_string, DEFAULT_LANGUAGE_NAME}; use helix_view::editor::{CloseError, ConfigEvent}; use helix_view::expansion; use serde_json::Value; use ui::completers::{self, Completer}; #[derive(Clone)] pub struct TypableCommand { pub name: &'static str, pub aliases: &'static [&'static str], pub doc: &'static str, // params, flags, helper, completer pub fun: fn(&mut compositor::Context, Args, PromptEvent) -> anyhow::Result<()>, /// What completion methods, if any, does this command have? pub completer: CommandCompleter, pub signature: Signature, } impl TypableCommand { fn completer_for_argument_number(&self, n: usize) -> &Completer { match self.completer.positional_args.get(n) { Some(completer) => completer, _ => &self.completer.var_args, } } } #[derive(Clone)] pub struct CommandCompleter { // Arguments with specific completion methods based on their position. positional_args: &'static [Completer], // All remaining arguments will use this completion method, if set. var_args: Completer, } impl CommandCompleter { const fn none() -> Self { Self { positional_args: &[], var_args: completers::none, } } const fn positional(completers: &'static [Completer]) -> Self { Self { positional_args: completers, var_args: completers::none, } } const fn all(completer: Completer) -> Self { Self { positional_args: &[], var_args: completer, } } } fn quit(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { log::debug!("quitting..."); if event != PromptEvent::Validate { return Ok(()); } // last view and we have unsaved changes if cx.editor.tree.views().count() == 1 { buffers_remaining_impl(cx.editor)? } cx.block_try_flush_writes()?; cx.editor.close(view!(cx.editor).id); Ok(()) } fn force_quit(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } cx.block_try_flush_writes()?; cx.editor.close(view!(cx.editor).id); Ok(()) } fn open(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } for arg in args { let (path, pos) = crate::args::parse_file(&arg); let path = helix_stdx::path::expand_tilde(path); // If the path is a directory, open a file picker on that directory and update the status // message if let Ok(true) = std::fs::canonicalize(&path).map(|p| p.is_dir()) { let callback = async move { let call: job::Callback = job::Callback::EditorCompositor(Box::new( move |editor: &mut Editor, compositor: &mut Compositor| { let picker = ui::file_picker(editor, path.into_owned()); compositor.push(Box::new(overlaid(picker))); }, )); Ok(call) }; cx.jobs.callback(callback); } else { // Otherwise, just open the file let _ = cx.editor.open(&path, Action::Replace)?; let (view, doc) = current!(cx.editor); let pos = Selection::point(pos_at_coords(doc.text().slice(..), pos, true)); doc.set_selection(view.id, pos); // does not affect opening a buffer without pos align_view(doc, view, Align::Center); } } Ok(()) } fn buffer_close_by_ids_impl( cx: &mut compositor::Context, doc_ids: &[DocumentId], force: bool, ) -> anyhow::Result<()> { cx.block_try_flush_writes()?; let (modified_ids, modified_names): (Vec<_>, Vec<_>) = doc_ids .iter() .filter_map(|&doc_id| { if let Err(CloseError::BufferModified(name)) = cx.editor.close_document(doc_id, force) { Some((doc_id, name)) } else { None } }) .unzip(); if let Some(first) = modified_ids.first() { let current = doc!(cx.editor); // If the current document is unmodified, and there are modified // documents, switch focus to the first modified doc. if !modified_ids.contains(¤t.id()) { cx.editor.switch(*first, Action::Replace); } bail!( "{} unsaved buffer{} remaining: {:?}", modified_names.len(), if modified_names.len() == 1 { "" } else { "s" }, modified_names, ); } Ok(()) } fn buffer_gather_paths_impl(editor: &mut Editor, args: Args) -> Vec { // No arguments implies current document if args.is_empty() { let doc_id = view!(editor).doc; return vec![doc_id]; } let mut nonexistent_buffers = vec![]; let mut document_ids = vec![]; for arg in args { let doc_id = editor.documents().find_map(|doc| { let arg_path = Some(Path::new(arg.as_ref())); if doc.path().map(|p| p.as_path()) == arg_path || doc.relative_path() == arg_path { Some(doc.id()) } else { None } }); match doc_id { Some(doc_id) => document_ids.push(doc_id), None => nonexistent_buffers.push(format!("'{}'", arg)), } } if !nonexistent_buffers.is_empty() { editor.set_error(format!( "cannot close non-existent buffers: {}", nonexistent_buffers.join(", ") )); } document_ids } fn buffer_close( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let document_ids = buffer_gather_paths_impl(cx.editor, args); buffer_close_by_ids_impl(cx, &document_ids, false) } fn force_buffer_close( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let document_ids = buffer_gather_paths_impl(cx.editor, args); buffer_close_by_ids_impl(cx, &document_ids, true) } fn buffer_gather_others_impl(editor: &mut Editor) -> Vec { let current_document = &doc!(editor).id(); editor .documents() .map(|doc| doc.id()) .filter(|doc_id| doc_id != current_document) .collect() } fn buffer_close_others( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let document_ids = buffer_gather_others_impl(cx.editor); buffer_close_by_ids_impl(cx, &document_ids, false) } fn force_buffer_close_others( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let document_ids = buffer_gather_others_impl(cx.editor); buffer_close_by_ids_impl(cx, &document_ids, true) } fn buffer_gather_all_impl(editor: &mut Editor) -> Vec { editor.documents().map(|doc| doc.id()).collect() } fn buffer_close_all( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let document_ids = buffer_gather_all_impl(cx.editor); buffer_close_by_ids_impl(cx, &document_ids, false) } fn force_buffer_close_all( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let document_ids = buffer_gather_all_impl(cx.editor); buffer_close_by_ids_impl(cx, &document_ids, true) } fn buffer_next( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } goto_buffer(cx.editor, Direction::Forward, 1); Ok(()) } fn buffer_previous( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } goto_buffer(cx.editor, Direction::Backward, 1); Ok(()) } fn write_impl(cx: &mut compositor::Context, path: Option<&str>, force: bool) -> anyhow::Result<()> { let config = cx.editor.config(); let jobs = &mut cx.jobs; let (view, doc) = current!(cx.editor); if doc.trim_trailing_whitespace() { trim_trailing_whitespace(doc, view.id); } if config.trim_final_newlines { trim_final_newlines(doc, view.id); } if doc.insert_final_newline() { insert_final_newline(doc, view.id); } // Save an undo checkpoint for any outstanding changes. doc.append_changes_to_history(view); let (view, doc) = current_ref!(cx.editor); let fmt = if config.auto_format { doc.auto_format(cx.editor).map(|fmt| { let callback = make_format_callback( doc.id(), doc.version(), view.id, fmt, Some((path.map(Into::into), force)), ); jobs.add(Job::with_callback(callback).wait_before_exiting()); }) } else { None }; if fmt.is_none() { let id = doc.id(); cx.editor.save(id, path, force)?; } Ok(()) } /// Trim all whitespace preceding line-endings in a document. fn trim_trailing_whitespace(doc: &mut Document, view_id: ViewId) { let text = doc.text(); let mut pos = 0; let transaction = Transaction::delete( text, text.lines().filter_map(|line| { let line_end_len_chars = line_ending::get_line_ending(&line) .map(|le| le.len_chars()) .unwrap_or_default(); // Char after the last non-whitespace character or the beginning of the line if the // line is all whitespace: let first_trailing_whitespace = pos + line.last_non_whitespace_char().map_or(0, |idx| idx + 1); pos += line.len_chars(); // Char before the line ending character(s), or the final char in the text if there // is no line-ending on this line: let line_end = pos - line_end_len_chars; if first_trailing_whitespace != line_end { Some((first_trailing_whitespace, line_end)) } else { None } }), ); doc.apply(&transaction, view_id); } /// Trim any extra line-endings after the final line-ending. fn trim_final_newlines(doc: &mut Document, view_id: ViewId) { let rope = doc.text(); let mut text = rope.slice(..); let mut total_char_len = 0; let mut final_char_len = 0; while let Some(line_ending) = line_ending::get_line_ending(&text) { total_char_len += line_ending.len_chars(); final_char_len = line_ending.len_chars(); text = text.slice(..text.len_chars() - line_ending.len_chars()); } let chars_to_delete = total_char_len - final_char_len; if chars_to_delete != 0 { let transaction = Transaction::delete( rope, [(rope.len_chars() - chars_to_delete, rope.len_chars())].into_iter(), ); doc.apply(&transaction, view_id); } } /// Ensure that the document is terminated with a line ending. fn insert_final_newline(doc: &mut Document, view_id: ViewId) { let text = doc.text(); if text.len_chars() > 0 && line_ending::get_line_ending(&text.slice(..)).is_none() { let eof = Selection::point(text.len_chars()); let insert = Transaction::insert(text, &eof, doc.line_ending.as_str().into()); doc.apply(&insert, view_id); } } fn write(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } write_impl(cx, args.first(), false) } fn force_write(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } write_impl(cx, args.first(), true) } fn write_buffer_close( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } write_impl(cx, args.first(), false)?; let document_ids = buffer_gather_paths_impl(cx.editor, args); buffer_close_by_ids_impl(cx, &document_ids, false) } fn force_write_buffer_close( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } write_impl(cx, args.first(), true)?; let document_ids = buffer_gather_paths_impl(cx.editor, args); buffer_close_by_ids_impl(cx, &document_ids, false) } fn new_file(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } cx.editor.new_file(Action::Replace); Ok(()) } fn format(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let (view, doc) = current_ref!(cx.editor); let format = doc.format(cx.editor).context( "A formatter isn't available, and no language server provides formatting capabilities", )?; let callback = make_format_callback(doc.id(), doc.version(), view.id, format, None); cx.jobs.callback(callback); Ok(()) } fn set_indent_style( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } use IndentStyle::*; // If no argument, report current indent style. if args.is_empty() { let style = doc!(cx.editor).indent_style; cx.editor.set_status(match style { Tabs => "tabs".to_owned(), Spaces(1) => "1 space".to_owned(), Spaces(n) => format!("{} spaces", n), }); return Ok(()); } // Attempt to parse argument as an indent style. let style = match args.first() { Some(arg) if "tabs".starts_with(&arg.to_lowercase()) => Some(Tabs), Some("0") => Some(Tabs), Some(arg) => arg .parse::() .ok() .filter(|n| (1..=MAX_INDENT).contains(n)) .map(Spaces), _ => None, }; let style = style.context("invalid indent style")?; let doc = doc_mut!(cx.editor); doc.indent_style = style; Ok(()) } /// Sets or reports the current document's line ending setting. fn set_line_ending( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } use LineEnding::*; // If no argument, report current line ending setting. if args.is_empty() { let line_ending = doc!(cx.editor).line_ending; cx.editor.set_status(match line_ending { Crlf => "crlf", LF => "line feed", #[cfg(feature = "unicode-lines")] FF => "form feed", #[cfg(feature = "unicode-lines")] CR => "carriage return", #[cfg(feature = "unicode-lines")] Nel => "next line", // These should never be a document's default line ending. #[cfg(feature = "unicode-lines")] VT | LS | PS => "error", }); return Ok(()); } let arg = args .first() .context("argument missing")? .to_ascii_lowercase(); // Attempt to parse argument as a line ending. let line_ending = match arg { arg if arg.starts_with("crlf") => Crlf, arg if arg.starts_with("lf") => LF, #[cfg(feature = "unicode-lines")] arg if arg.starts_with("cr") => CR, #[cfg(feature = "unicode-lines")] arg if arg.starts_with("ff") => FF, #[cfg(feature = "unicode-lines")] arg if arg.starts_with("nel") => Nel, _ => bail!("invalid line ending"), }; let (view, doc) = current!(cx.editor); doc.line_ending = line_ending; let mut pos = 0; let transaction = Transaction::change( doc.text(), doc.text().lines().filter_map(|line| { pos += line.len_chars(); match helix_core::line_ending::get_line_ending(&line) { Some(ending) if ending != line_ending => { let start = pos - ending.len_chars(); let end = pos; Some((start, end, Some(line_ending.as_str().into()))) } _ => None, } }), ); doc.apply(&transaction, view.id); doc.append_changes_to_history(view); Ok(()) } fn earlier(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let uk = args.join(" ").parse::().map_err(|s| anyhow!(s))?; let (view, doc) = current!(cx.editor); let success = doc.earlier(view, uk); if !success { cx.editor.set_status("Already at oldest change"); } Ok(()) } fn later(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let uk = args.join(" ").parse::().map_err(|s| anyhow!(s))?; let (view, doc) = current!(cx.editor); let success = doc.later(view, uk); if !success { cx.editor.set_status("Already at newest change"); } Ok(()) } fn write_quit(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } write_impl(cx, args.first(), false)?; cx.block_try_flush_writes()?; quit(cx, Args::default(), event) } fn force_write_quit( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } write_impl(cx, args.first(), true)?; cx.block_try_flush_writes()?; force_quit(cx, Args::default(), event) } /// Results in an error if there are modified buffers remaining and sets editor /// error, otherwise returns `Ok(())`. If the current document is unmodified, /// and there are modified documents, switches focus to one of them. pub(super) fn buffers_remaining_impl(editor: &mut Editor) -> anyhow::Result<()> { let modified_ids: Vec<_> = editor .documents() .filter(|doc| doc.is_modified()) .map(|doc| doc.id()) .collect(); if let Some(first) = modified_ids.first() { let current = doc!(editor); // If the current document is unmodified, and there are modified // documents, switch focus to the first modified doc. if !modified_ids.contains(¤t.id()) { editor.switch(*first, Action::Replace); } let modified_names: Vec<_> = modified_ids .iter() .map(|doc_id| doc!(editor, doc_id).display_name()) .collect(); bail!( "{} unsaved buffer{} remaining: {:?}", modified_names.len(), if modified_names.len() == 1 { "" } else { "s" }, modified_names, ); } Ok(()) } #[derive(Debug, Clone, Copy)] pub struct WriteAllOptions { pub force: bool, pub write_scratch: bool, pub auto_format: bool, } pub fn write_all_impl( cx: &mut compositor::Context, options: WriteAllOptions, ) -> anyhow::Result<()> { let mut errors: Vec<&'static str> = Vec::new(); let config = cx.editor.config(); let jobs = &mut cx.jobs; let saves: Vec<_> = cx .editor .documents .keys() .cloned() .collect::>() .into_iter() .filter_map(|id| { let doc = doc!(cx.editor, &id); if !doc.is_modified() { return None; } if doc.path().is_none() { if options.write_scratch { errors.push("cannot write a buffer without a filename"); } return None; } // Look for a view to apply the formatting change to. let target_view = cx.editor.get_synced_view_id(doc.id()); Some((id, target_view)) }) .collect(); for (doc_id, target_view) in saves { let doc = doc_mut!(cx.editor, &doc_id); let view = view_mut!(cx.editor, target_view); if doc.trim_trailing_whitespace() { trim_trailing_whitespace(doc, target_view); } if config.trim_final_newlines { trim_final_newlines(doc, target_view); } if doc.insert_final_newline() { insert_final_newline(doc, target_view); } // Save an undo checkpoint for any outstanding changes. doc.append_changes_to_history(view); let fmt = if options.auto_format && config.auto_format { let doc = doc!(cx.editor, &doc_id); doc.auto_format(cx.editor).map(|fmt| { let callback = make_format_callback( doc_id, doc.version(), target_view, fmt, Some((None, options.force)), ); jobs.add(Job::with_callback(callback).wait_before_exiting()); }) } else { None }; if fmt.is_none() { cx.editor.save::(doc_id, None, options.force)?; } } if !errors.is_empty() && !options.force { bail!("{:?}", errors); } Ok(()) } fn write_all(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } write_all_impl( cx, WriteAllOptions { force: false, write_scratch: true, auto_format: true, }, ) } fn force_write_all( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } write_all_impl( cx, WriteAllOptions { force: true, write_scratch: true, auto_format: true, }, ) } fn write_all_quit( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } write_all_impl( cx, WriteAllOptions { force: false, write_scratch: true, auto_format: true, }, )?; quit_all_impl(cx, false) } fn force_write_all_quit( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let _ = write_all_impl( cx, WriteAllOptions { force: true, write_scratch: true, auto_format: true, }, ); quit_all_impl(cx, true) } fn quit_all_impl(cx: &mut compositor::Context, force: bool) -> anyhow::Result<()> { cx.block_try_flush_writes()?; if !force { buffers_remaining_impl(cx.editor)?; } // close all views let views: Vec<_> = cx.editor.tree.views().map(|(view, _)| view.id).collect(); for view_id in views { cx.editor.close(view_id); } Ok(()) } fn quit_all(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } quit_all_impl(cx, false) } fn force_quit_all( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } quit_all_impl(cx, true) } fn cquit(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let exit_code = args .first() .and_then(|code| code.parse::().ok()) .unwrap_or(1); cx.editor.exit_code = exit_code; quit_all_impl(cx, false) } fn force_cquit(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let exit_code = args .first() .and_then(|code| code.parse::().ok()) .unwrap_or(1); cx.editor.exit_code = exit_code; quit_all_impl(cx, true) } fn theme(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { let true_color = cx.editor.config.load().true_color || crate::true_color(); match event { PromptEvent::Abort => { cx.editor.unset_theme_preview(); } PromptEvent::Update => { if args.is_empty() { // Ensures that a preview theme gets cleaned up if the user backspaces until the prompt is empty. cx.editor.unset_theme_preview(); } else if let Some(theme_name) = args.first() { if let Ok(theme) = cx.editor.theme_loader.load(theme_name) { if !(true_color || theme.is_16_color()) { bail!("Unsupported theme: theme requires true color support"); } cx.editor.set_theme_preview(theme); }; }; } PromptEvent::Validate => { if let Some(theme_name) = args.first() { let theme = cx .editor .theme_loader .load(theme_name) .map_err(|err| anyhow::anyhow!("Could not load theme: {}", err))?; if !(true_color || theme.is_16_color()) { bail!("Unsupported theme: theme requires true color support"); } cx.editor.set_theme(theme); } else { let name = cx.editor.theme.name().to_string(); cx.editor.set_status(name); } } }; Ok(()) } fn yank_main_selection_to_clipboard( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } yank_primary_selection_impl(cx.editor, '+'); Ok(()) } fn yank_joined(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let doc = doc!(cx.editor); let default_sep = Cow::Borrowed(doc.line_ending.as_str()); let separator = args.first().unwrap_or(&default_sep); let register = cx .editor .selected_register .unwrap_or(cx.editor.config().default_yank_register); yank_joined_impl(cx.editor, separator, register); Ok(()) } fn yank_joined_to_clipboard( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let doc = doc!(cx.editor); let default_sep = Cow::Borrowed(doc.line_ending.as_str()); let separator = args.first().unwrap_or(&default_sep); yank_joined_impl(cx.editor, separator, '+'); Ok(()) } fn yank_main_selection_to_primary_clipboard( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } yank_primary_selection_impl(cx.editor, '*'); Ok(()) } fn yank_joined_to_primary_clipboard( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let doc = doc!(cx.editor); let default_sep = Cow::Borrowed(doc.line_ending.as_str()); let separator = args.first().unwrap_or(&default_sep); yank_joined_impl(cx.editor, separator, '*'); Ok(()) } fn paste_clipboard_after( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } paste(cx.editor, '+', Paste::After, 1); Ok(()) } fn paste_clipboard_before( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } paste(cx.editor, '+', Paste::Before, 1); Ok(()) } fn paste_primary_clipboard_after( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } paste(cx.editor, '*', Paste::After, 1); Ok(()) } fn paste_primary_clipboard_before( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } paste(cx.editor, '*', Paste::Before, 1); Ok(()) } fn replace_selections_with_clipboard( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } replace_with_yanked_impl(cx.editor, '+', 1); Ok(()) } fn replace_selections_with_primary_clipboard( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } replace_with_yanked_impl(cx.editor, '*', 1); Ok(()) } fn show_clipboard_provider( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } cx.editor .set_status(cx.editor.registers.clipboard_provider_name()); Ok(()) } fn change_current_directory( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let dir = match args.first().map(AsRef::as_ref) { Some("-") => cx .editor .get_last_cwd() .map(|path| Cow::Owned(path.to_path_buf())) .ok_or_else(|| anyhow!("No previous working directory"))?, Some(path) => helix_stdx::path::expand_tilde(Path::new(path)), None => Cow::Owned(home_dir()?), }; cx.editor.set_cwd(&dir).map_err(|err| { anyhow!( "Could not change working directory to '{}': {err}", dir.display() ) })?; cx.editor.set_status(format!( "Current working directory is now {}", helix_stdx::env::current_working_dir().display() )); Ok(()) } fn show_current_directory( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let cwd = helix_stdx::env::current_working_dir(); let message = format!("Current working directory is {}", cwd.display()); if cwd.exists() { cx.editor.set_status(message); } else { cx.editor.set_error(format!("{} (deleted)", message)); } Ok(()) } /// Sets the [`Document`]'s encoding.. fn set_encoding( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let doc = doc_mut!(cx.editor); if let Some(label) = args.first() { doc.set_encoding(label) } else { let encoding = doc.encoding().name().to_owned(); cx.editor.set_status(encoding); Ok(()) } } /// Shows info about the character under the primary cursor. fn get_character_info( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let (view, doc) = current_ref!(cx.editor); let text = doc.text().slice(..); let grapheme_start = doc.selection(view.id).primary().cursor(text); let grapheme_end = graphemes::next_grapheme_boundary(text, grapheme_start); if grapheme_start == grapheme_end { return Ok(()); } let grapheme = text.slice(grapheme_start..grapheme_end).to_string(); let encoding = doc.encoding(); let printable = grapheme.chars().fold(String::new(), |mut s, c| { match c { '\0' => s.push_str("\\0"), '\t' => s.push_str("\\t"), '\n' => s.push_str("\\n"), '\r' => s.push_str("\\r"), _ => s.push(c), } s }); // Convert to Unicode codepoints if in UTF-8 let unicode = if encoding == encoding::UTF_8 { let mut unicode = " (".to_owned(); for (i, char) in grapheme.chars().enumerate() { if i != 0 { unicode.push(' '); } unicode.push_str("U+"); let codepoint: u32 = if char.is_ascii() { char.into() } else { // Not ascii means it will be multi-byte, so strip out the extra // bits that encode the length & mark continuation bytes let s = String::from(char); let bytes = s.as_bytes(); // First byte starts with 2-4 ones then a zero, so strip those off let first = bytes[0]; let codepoint = first & (0xFF >> (first.leading_ones() + 1)); let mut codepoint = u32::from(codepoint); // Following bytes start with 10 for byte in bytes.iter().skip(1) { codepoint <<= 6; codepoint += u32::from(*byte) & 0x3F; } codepoint }; write!(unicode, "{codepoint:0>4x}").unwrap(); } unicode.push(')'); unicode } else { String::new() }; // Give the decimal value for ascii characters let dec = if encoding.is_ascii_compatible() && grapheme.len() == 1 { format!(" Dec {}", grapheme.as_bytes()[0]) } else { String::new() }; let hex = { let mut encoder = encoding.new_encoder(); let max_encoded_len = encoder .max_buffer_length_from_utf8_without_replacement(grapheme.len()) .unwrap(); let mut bytes = Vec::with_capacity(max_encoded_len); let mut current_byte = 0; let mut hex = String::new(); for (i, char) in grapheme.chars().enumerate() { if i != 0 { hex.push_str(" +"); } let (result, _input_bytes_read) = encoder.encode_from_utf8_to_vec_without_replacement( &char.to_string(), &mut bytes, true, ); if let encoding::EncoderResult::Unmappable(char) = result { bail!("{char:?} cannot be mapped to {}", encoding.name()); } for byte in &bytes[current_byte..] { write!(hex, " {byte:0>2x}").unwrap(); } current_byte = bytes.len(); } hex }; cx.editor .set_status(format!("\"{printable}\"{unicode}{dec} Hex{hex}")); Ok(()) } /// Reload the [`Document`] from its source file. fn reload(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let scrolloff = cx.editor.config().scrolloff; let (view, doc) = current!(cx.editor); doc.reload(view, &cx.editor.diff_providers).map(|_| { view.ensure_cursor_in_view(doc, scrolloff); })?; if let Some(path) = doc.path() { cx.editor .language_servers .file_event_handler .file_changed(path.clone()); } Ok(()) } fn reload_all(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let scrolloff = cx.editor.config().scrolloff; let view_id = view!(cx.editor).id; let docs_view_ids: Vec<(DocumentId, Vec)> = cx .editor .documents_mut() .map(|doc| { let mut view_ids: Vec<_> = doc.selections().keys().cloned().collect(); if view_ids.is_empty() { doc.ensure_view_init(view_id); view_ids.push(view_id); }; (doc.id(), view_ids) }) .collect(); for (doc_id, view_ids) in docs_view_ids { let doc = doc_mut!(cx.editor, &doc_id); // Every doc is guaranteed to have at least 1 view at this point. let view = view_mut!(cx.editor, view_ids[0]); // Ensure that the view is synced with the document's history. view.sync_changes(doc); if let Err(error) = doc.reload(view, &cx.editor.diff_providers) { cx.editor.set_error(format!("{}", error)); continue; } if let Some(path) = doc.path() { cx.editor .language_servers .file_event_handler .file_changed(path.clone()); } for view_id in view_ids { let view = view_mut!(cx.editor, view_id); if view.doc.eq(&doc_id) { view.ensure_cursor_in_view(doc, scrolloff); } } } Ok(()) } /// Update the [`Document`] if it has been modified. fn update(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let (_view, doc) = current!(cx.editor); if doc.is_modified() { write(cx, args, event) } else { Ok(()) } } fn lsp_workspace_command( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let doc = doc!(cx.editor); let ls_id_commands = doc .language_servers_with_feature(LanguageServerFeature::WorkspaceCommand) .flat_map(|ls| { ls.capabilities() .execute_command_provider .iter() .flat_map(|options| options.commands.iter()) .map(|command| (ls.id(), command)) }); if args.is_empty() { let commands = ls_id_commands .map(|(ls_id, command)| { ( ls_id, helix_lsp::lsp::Command { title: command.clone(), command: command.clone(), arguments: None, }, ) }) .collect::>(); let callback = async move { let call: job::Callback = Callback::EditorCompositor(Box::new( move |_editor: &mut Editor, compositor: &mut Compositor| { let columns = [ui::PickerColumn::new( "title", |(_ls_id, command): &(_, helix_lsp::lsp::Command), _| { command.title.as_str().into() }, )]; let picker = ui::Picker::new( columns, 0, commands, (), move |cx, (ls_id, command), _action| { cx.editor.execute_lsp_command(command.clone(), *ls_id); }, ); compositor.push(Box::new(overlaid(picker))) }, )); Ok(call) }; cx.jobs.callback(callback); } else { let command = args[0].to_string(); let matches: Vec<_> = ls_id_commands .filter(|(_ls_id, c)| *c == &command) .collect(); match matches.as_slice() { [(ls_id, _command)] => { let arguments = args .get(1) .map(|rest| { serde_json::Deserializer::from_str(rest) .into_iter() .collect::, _>>() .map_err(|err| anyhow!("failed to parse arguments: {err}")) }) .transpose()? .filter(|args| !args.is_empty()); cx.editor.execute_lsp_command( helix_lsp::lsp::Command { title: command.clone(), arguments, command, }, *ls_id, ); } [] => { cx.editor.set_status(format!( "`{command}` is not supported for any language server" )); } _ => { cx.editor.set_status(format!( "`{command}` supported by multiple language servers" )); } } } Ok(()) } fn lsp_restart(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let editor_config = cx.editor.config.load(); let doc = doc!(cx.editor); let config = doc .language_config() .context("LSP not defined for the current document")?; let language_servers: Vec<_> = config .language_servers .iter() .map(|ls| ls.name.as_str()) .collect(); let language_servers = if args.is_empty() { language_servers } else { let (valid, invalid): (Vec<_>, Vec<_>) = args .iter() .map(|arg| arg.as_ref()) .partition(|name| language_servers.contains(name)); if !invalid.is_empty() { let s = if invalid.len() == 1 { "" } else { "s" }; bail!("Unknown language server{s}: {}", invalid.join(", ")); } valid }; let mut errors = Vec::new(); for server in language_servers.iter() { match cx .editor .language_servers .restart_server( server, config, doc.path(), &editor_config.workspace_lsp_roots, editor_config.lsp.snippets, ) .transpose() { // Ignore the executable-not-found error unless the server was explicitly requested // in the arguments. Err(helix_lsp::Error::ExecutableNotFound(_)) if !args.iter().any(|arg| arg == server) => {} Err(err) => errors.push(err.to_string()), _ => (), } } // This collect is needed because refresh_language_server would need to re-borrow editor. let document_ids_to_refresh: Vec = cx .editor .documents() .filter_map(|doc| match doc.language_config() { Some(config) if config.language_servers.iter().any(|ls| { language_servers .iter() .any(|restarted_ls| restarted_ls == &ls.name) }) => { Some(doc.id()) } _ => None, }) .collect(); for document_id in document_ids_to_refresh { cx.editor.refresh_language_servers(document_id); } if errors.is_empty() { Ok(()) } else { Err(anyhow::anyhow!( "Error restarting language servers: {}", errors.join(", ") )) } } fn lsp_stop(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let doc = doc!(cx.editor); let language_servers: Vec<_> = doc .language_servers() .map(|ls| ls.name().to_string()) .collect(); let language_servers = if args.is_empty() { language_servers } else { let (valid, invalid): (Vec<_>, Vec<_>) = args .iter() .map(|arg| arg.to_string()) .partition(|name| language_servers.contains(name)); if !invalid.is_empty() { let s = if invalid.len() == 1 { "" } else { "s" }; bail!("Unknown language server{s}: {}", invalid.join(", ")); } valid }; for ls_name in &language_servers { cx.editor.language_servers.stop(ls_name); for doc in cx.editor.documents_mut() { if let Some(client) = doc.remove_language_server_by_name(ls_name) { doc.clear_diagnostics_for_language_server(client.id()); doc.reset_all_inlay_hints(); doc.inlay_hints_oudated = true; } } } Ok(()) } fn tree_sitter_scopes( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let pos = doc.selection(view.id).primary().cursor(text); let scopes = indent::get_scopes(doc.syntax(), text, pos); let contents = format!("```json\n{:?}\n````", scopes); let callback = async move { let call: job::Callback = Callback::EditorCompositor(Box::new( move |editor: &mut Editor, compositor: &mut Compositor| { let contents = ui::Markdown::new(contents, editor.syn_loader.clone()); let popup = Popup::new("hover", contents).auto_close(true); compositor.replace_or_push("hover", popup); }, )); Ok(call) }; cx.jobs.callback(callback); Ok(()) } fn tree_sitter_highlight_name( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { fn find_highlight_at_cursor( cx: &mut compositor::Context<'_>, ) -> Option { use helix_core::syntax::HighlightEvent; let (view, doc) = current!(cx.editor); let syntax = doc.syntax()?; let text = doc.text().slice(..); let cursor = doc.selection(view.id).primary().cursor(text); let byte = text.char_to_byte(cursor); let node = syntax.descendant_for_byte_range(byte, byte)?; // Query the same range as the one used in syntax highlighting. let range = { // Calculate viewport byte ranges: let row = text.char_to_line(doc.view_offset(view.id).anchor.min(text.len_chars())); // Saturating subs to make it inclusive zero indexing. let last_line = text.len_lines().saturating_sub(1); let height = view.inner_area(doc).height; let last_visible_line = (row + height as usize).saturating_sub(1).min(last_line); let start = text.line_to_byte(row.min(last_line)); let end = text.line_to_byte(last_visible_line + 1); start..end }; let mut highlight = None; for event in syntax.highlight_iter(text, Some(range), None) { match event.unwrap() { HighlightEvent::Source { start, end } if start == node.start_byte() && end == node.end_byte() => { return highlight; } HighlightEvent::HighlightStart(hl) => { highlight = Some(hl); } _ => (), } } None } if event != PromptEvent::Validate { return Ok(()); } let Some(highlight) = find_highlight_at_cursor(cx) else { return Ok(()); }; let content = cx.editor.theme.scope(highlight.0).to_string(); let callback = async move { let call: job::Callback = Callback::EditorCompositor(Box::new( move |editor: &mut Editor, compositor: &mut Compositor| { let content = ui::Markdown::new(content, editor.syn_loader.clone()); let popup = Popup::new("hover", content).auto_close(true); compositor.replace_or_push("hover", popup); }, )); Ok(call) }; cx.jobs.callback(callback); Ok(()) } fn vsplit(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } if args.is_empty() { split(cx.editor, Action::VerticalSplit); } else { for arg in args { cx.editor .open(&PathBuf::from(arg.as_ref()), Action::VerticalSplit)?; } } Ok(()) } fn hsplit(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } if args.is_empty() { split(cx.editor, Action::HorizontalSplit); } else { for arg in args { cx.editor .open(&PathBuf::from(arg.as_ref()), Action::HorizontalSplit)?; } } Ok(()) } fn vsplit_new(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } cx.editor.new_file(Action::VerticalSplit); Ok(()) } fn hsplit_new(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } cx.editor.new_file(Action::HorizontalSplit); Ok(()) } fn debug_eval(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } if let Some(debugger) = cx.editor.debugger.as_mut() { let (frame, thread_id) = match (debugger.active_frame, debugger.thread_id) { (Some(frame), Some(thread_id)) => (frame, thread_id), _ => { bail!("Cannot find current stack frame to access variables") } }; // TODO: support no frame_id let frame_id = debugger.stack_frames[&thread_id][frame].id; let response = helix_lsp::block_on(debugger.eval(args.join(" "), Some(frame_id)))?; cx.editor.set_status(response.result); } Ok(()) } fn debug_start(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let mut args: Vec<_> = args.into_iter().collect(); let name = match args.len() { 0 => None, _ => Some(args.remove(0)), }; dap_start_impl(cx, name.as_deref(), None, Some(args)) } fn debug_remote( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let mut args: Vec<_> = args.into_iter().collect(); let address = match args.len() { 0 => None, _ => Some(args.remove(0).parse()?), }; let name = match args.len() { 0 => None, _ => Some(args.remove(0)), }; dap_start_impl(cx, name.as_deref(), address, Some(args)) } fn tutor(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let path = helix_loader::runtime_file(Path::new("tutor")); cx.editor.open(&path, Action::Replace)?; // Unset path to prevent accidentally saving to the original tutor file. doc_mut!(cx.editor).set_path(None); Ok(()) } fn abort_goto_line_number_preview(cx: &mut compositor::Context) { if let Some(last_selection) = cx.editor.last_selection.take() { let scrolloff = cx.editor.config().scrolloff; let (view, doc) = current!(cx.editor); doc.set_selection(view.id, last_selection); view.ensure_cursor_in_view(doc, scrolloff); } } fn update_goto_line_number_preview(cx: &mut compositor::Context, args: Args) -> anyhow::Result<()> { cx.editor.last_selection.get_or_insert_with(|| { let (view, doc) = current!(cx.editor); doc.selection(view.id).clone() }); let scrolloff = cx.editor.config().scrolloff; let line = args[0].parse::()?; goto_line_without_jumplist( cx.editor, NonZeroUsize::new(line), if cx.editor.mode == Mode::Select { Movement::Extend } else { Movement::Move }, ); let (view, doc) = current!(cx.editor); view.ensure_cursor_in_view(doc, scrolloff); Ok(()) } pub(super) fn goto_line_number( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { match event { PromptEvent::Abort => abort_goto_line_number_preview(cx), PromptEvent::Validate => { // If we are invoked directly via a keybinding, Validate is // sent without any prior Update events. Ensure the cursor // is moved to the appropriate location. update_goto_line_number_preview(cx, args)?; let last_selection = cx .editor .last_selection .take() .expect("update_goto_line_number_preview should always set last_selection"); let (view, doc) = current!(cx.editor); view.jumps.push((doc.id(), last_selection)); } // When a user hits backspace and there are no numbers left, // we can bring them back to their original selection. If they // begin typing numbers again, we'll start a new preview session. PromptEvent::Update if args.is_empty() => abort_goto_line_number_preview(cx), PromptEvent::Update => update_goto_line_number_preview(cx, args)?, } Ok(()) } // Fetch the current value of a config option and output as status. fn get_option(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let key = &args[0].to_lowercase(); let key_error = || anyhow::anyhow!("Unknown key `{}`", key); let config = serde_json::json!(cx.editor.config().deref()); let pointer = format!("/{}", key.replace('.', "/")); let value = config.pointer(&pointer).ok_or_else(key_error)?; cx.editor.set_status(value.to_string()); Ok(()) } /// Change config at runtime. Access nested values by dot syntax, for /// example to disable smart case search, use `:set search.smart-case false`. fn set_option(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let (key, arg) = (&args[0].to_lowercase(), args[1].trim()); let key_error = || anyhow::anyhow!("Unknown key `{}`", key); let field_error = |_| anyhow::anyhow!("Could not parse field `{}`", arg); let mut config = serde_json::json!(&cx.editor.config().deref()); let pointer = format!("/{}", key.replace('.', "/")); let value = config.pointer_mut(&pointer).ok_or_else(key_error)?; *value = if value.is_string() { // JSON strings require quotes, so we can't .parse() directly Value::String(arg.to_string()) } else { arg.parse().map_err(field_error)? }; let config = serde_json::from_value(config).map_err(field_error)?; cx.editor .config_events .0 .send(ConfigEvent::Update(config))?; Ok(()) } /// Toggle boolean config option at runtime. Access nested values by dot /// syntax, for example to toggle smart case search, use `:toggle search.smart- /// case`. fn toggle_option( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let key = &args[0].to_lowercase(); let key_error = || anyhow::anyhow!("Unknown key `{}`", key); let mut config = serde_json::json!(&cx.editor.config().deref()); let pointer = format!("/{}", key.replace('.', "/")); let value = config.pointer_mut(&pointer).ok_or_else(key_error)?; *value = match value { Value::Bool(ref value) => { ensure!( args.len() == 1, "Bad arguments. For boolean configurations use: `:toggle {key}`" ); Value::Bool(!value) } Value::String(ref value) => { ensure!( args.len() == 2, "Bad arguments. For string configurations use: `:toggle {key} val1 val2 ...`", ); // For string values, parse the input according to normal command line rules. let values: Vec<_> = command_line::Tokenizer::new(&args[1], true) .map(|res| res.map(|token| token.content)) .collect::>() .map_err(|err| anyhow!("failed to parse values: {err}"))?; Value::String( values .iter() .skip_while(|e| *e != value) .nth(1) .map(AsRef::as_ref) .unwrap_or_else(|| &values[0]) .to_string(), ) } Value::Null => bail!("Configuration {key} cannot be toggled"), Value::Number(_) | Value::Array(_) | Value::Object(_) => { ensure!( args.len() == 2, "Bad arguments. For {kind} configurations use: `:toggle {key} val1 val2 ...`", kind = match value { Value::Number(_) => "number", Value::Array(_) => "array", Value::Object(_) => "object", _ => unreachable!(), } ); // For numbers, arrays and objects, parse each argument with // `serde_json::StreamDeserializer`. let values: Vec = serde_json::Deserializer::from_str(&args[1]) .into_iter() .collect::>() .map_err(|err| anyhow!("failed to parse value: {err}"))?; if let Some(wrongly_typed_value) = values .iter() .find(|v| std::mem::discriminant(*v) != std::mem::discriminant(&*value)) { bail!("value '{wrongly_typed_value}' has a different type than '{value}'"); } values .iter() .skip_while(|e| *e != value) .nth(1) .unwrap_or(&values[0]) .clone() } }; let status = format!("'{key}' is now set to {value}"); let config = serde_json::from_value(config) .map_err(|err| anyhow::anyhow!("Failed to parse config: {err}"))?; cx.editor .config_events .0 .send(ConfigEvent::Update(config))?; cx.editor.set_status(status); Ok(()) } /// Change the language of the current buffer at runtime. fn language(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } if args.is_empty() { let doc = doc!(cx.editor); let language = &doc.language_name().unwrap_or(DEFAULT_LANGUAGE_NAME); cx.editor.set_status(language.to_string()); return Ok(()); } let doc = doc_mut!(cx.editor); if &args[0] == DEFAULT_LANGUAGE_NAME { doc.set_language(None, None) } else { doc.set_language_by_language_id(&args[0], cx.editor.syn_loader.clone())?; } doc.detect_indent_and_line_ending(); let id = doc.id(); cx.editor.refresh_language_servers(id); let doc = doc_mut!(cx.editor); let diagnostics = Editor::doc_diagnostics(&cx.editor.language_servers, &cx.editor.diagnostics, doc); doc.replace_diagnostics(diagnostics, &[], None); Ok(()) } fn sort(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } sort_impl(cx, args.has_flag("reverse")) } fn sort_impl(cx: &mut compositor::Context, reverse: bool) -> anyhow::Result<()> { let scrolloff = cx.editor.config().scrolloff; let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc.selection(view.id); if selection.len() == 1 { bail!("Sorting requires multiple selections. Hint: split selection first"); } let mut fragments: Vec<_> = selection .slices(text) .map(|fragment| fragment.chunks().collect()) .collect(); fragments.sort_by(match reverse { true => |a: &Tendril, b: &Tendril| b.cmp(a), false => |a: &Tendril, b: &Tendril| a.cmp(b), }); let transaction = Transaction::change( doc.text(), selection .into_iter() .zip(fragments) .map(|(s, fragment)| (s.from(), s.to(), Some(fragment))), ); doc.apply(&transaction, view.id); doc.append_changes_to_history(view); view.ensure_cursor_in_view(doc, scrolloff); Ok(()) } fn reflow(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let scrolloff = cx.editor.config().scrolloff; let (view, doc) = current!(cx.editor); // Find the text_width by checking the following sources in order: // - The passed argument in `args` // - The configured text-width for this language in languages.toml // - The configured text-width in the config.toml let text_width: usize = args .first() .map(|num| num.parse::()) .transpose()? .unwrap_or_else(|| doc.text_width()); let rope = doc.text(); let selection = doc.selection(view.id); let transaction = Transaction::change_by_selection(rope, selection, |range| { let fragment = range.fragment(rope.slice(..)); let reflowed_text = helix_core::wrap::reflow_hard_wrap(&fragment, text_width); (range.from(), range.to(), Some(reflowed_text)) }); doc.apply(&transaction, view.id); doc.append_changes_to_history(view); view.ensure_cursor_in_view(doc, scrolloff); Ok(()) } fn tree_sitter_subtree( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let (view, doc) = current!(cx.editor); if let Some(syntax) = doc.syntax() { let primary_selection = doc.selection(view.id).primary(); let text = doc.text(); let from = text.char_to_byte(primary_selection.from()); let to = text.char_to_byte(primary_selection.to()); if let Some(selected_node) = syntax.descendant_for_byte_range(from, to) { let mut contents = String::from("```tsq\n"); helix_core::syntax::pretty_print_tree(&mut contents, selected_node)?; contents.push_str("\n```"); let callback = async move { let call: job::Callback = Callback::EditorCompositor(Box::new( move |editor: &mut Editor, compositor: &mut Compositor| { let contents = ui::Markdown::new(contents, editor.syn_loader.clone()); let popup = Popup::new("hover", contents).auto_close(true); compositor.replace_or_push("hover", popup); }, )); Ok(call) }; cx.jobs.callback(callback); } } Ok(()) } fn open_config( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } cx.editor .open(&helix_loader::config_file(), Action::Replace)?; Ok(()) } fn open_workspace_config( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } cx.editor .open(&helix_loader::workspace_config_file(), Action::Replace)?; Ok(()) } fn open_log(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } cx.editor.open(&helix_loader::log_file(), Action::Replace)?; Ok(()) } fn refresh_config( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } cx.editor.config_events.0.send(ConfigEvent::Refresh)?; Ok(()) } fn append_output( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } shell(cx, &args.join(" "), &ShellBehavior::Append); Ok(()) } fn insert_output( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } shell(cx, &args.join(" "), &ShellBehavior::Insert); Ok(()) } fn pipe_to(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { pipe_impl(cx, args, event, &ShellBehavior::Ignore) } fn pipe(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { pipe_impl(cx, args, event, &ShellBehavior::Replace) } fn pipe_impl( cx: &mut compositor::Context, args: Args, event: PromptEvent, behavior: &ShellBehavior, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } shell(cx, &args.join(" "), behavior); Ok(()) } fn run_shell_command( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let shell = cx.editor.config().shell.clone(); let args = args.join(" "); let callback = async move { let output = shell_impl_async(&shell, &args, None).await?; let call: job::Callback = Callback::EditorCompositor(Box::new( move |editor: &mut Editor, compositor: &mut Compositor| { if !output.is_empty() { let contents = ui::Markdown::new( format!("```sh\n{}\n```", output.trim_end()), editor.syn_loader.clone(), ); let popup = Popup::new("shell", contents).position(Some( helix_core::Position::new(editor.cursor().0.unwrap_or_default().row, 2), )); compositor.replace_or_push("shell", popup); } editor.set_status("Command run"); }, )); Ok(call) }; cx.jobs.callback(callback); Ok(()) } fn reset_diff_change( cx: &mut compositor::Context, _args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let editor = &mut cx.editor; let scrolloff = editor.config().scrolloff; let (view, doc) = current!(editor); let Some(handle) = doc.diff_handle() else { bail!("Diff is not available in the current buffer") }; let diff = handle.load(); let doc_text = doc.text().slice(..); let diff_base = diff.diff_base(); let mut changes = 0; let transaction = Transaction::change( doc.text(), diff.hunks_intersecting_line_ranges(doc.selection(view.id).line_ranges(doc_text)) .map(|hunk| { changes += 1; let start = diff_base.line_to_char(hunk.before.start as usize); let end = diff_base.line_to_char(hunk.before.end as usize); let text: Tendril = diff_base.slice(start..end).chunks().collect(); ( doc_text.line_to_char(hunk.after.start as usize), doc_text.line_to_char(hunk.after.end as usize), (!text.is_empty()).then_some(text), ) }), ); if changes == 0 { bail!("There are no changes under any selection"); } drop(diff); // make borrow check happy doc.apply(&transaction, view.id); doc.append_changes_to_history(view); view.ensure_cursor_in_view(doc, scrolloff); cx.editor.set_status(format!( "Reset {changes} change{}", if changes == 1 { "" } else { "s" } )); Ok(()) } fn clear_register( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } if args.is_empty() { cx.editor.registers.clear(); cx.editor.set_status("All registers cleared"); return Ok(()); } ensure!( args[0].chars().count() == 1, format!("Invalid register {}", &args[0]) ); let register = args[0].chars().next().unwrap_or_default(); if cx.editor.registers.remove(register) { cx.editor .set_status(format!("Register {} cleared", register)); } else { cx.editor .set_error(format!("Register {} not found", register)); } Ok(()) } fn redraw(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let callback = Box::pin(async move { let call: job::Callback = job::Callback::EditorCompositor(Box::new(|_editor, compositor| { compositor.need_full_redraw(); })); Ok(call) }); cx.jobs.callback(callback); Ok(()) } fn move_buffer(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let doc = doc!(cx.editor); let old_path = doc .path() .context("Scratch buffer cannot be moved. Use :write instead")? .clone(); let new_path = args.first().unwrap().to_string(); if let Err(err) = cx.editor.move_path(&old_path, new_path.as_ref()) { bail!("Could not move file: {err}"); } Ok(()) } fn yank_diagnostic( cx: &mut compositor::Context, args: Args, event: PromptEvent, ) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let reg = match args.first() { Some(s) => { ensure!(s.chars().count() == 1, format!("Invalid register {s}")); s.chars().next().unwrap() } None => '+', }; let (view, doc) = current_ref!(cx.editor); let primary = doc.selection(view.id).primary(); // Look only for diagnostics that intersect with the primary selection let diag: Vec<_> = doc .diagnostics() .iter() .filter(|d| primary.overlaps(&helix_core::Range::new(d.range.start, d.range.end))) .map(|d| d.message.clone()) .collect(); let n = diag.len(); if n == 0 { bail!("No diagnostics under primary selection"); } cx.editor.registers.write(reg, diag)?; cx.editor.set_status(format!( "Yanked {n} diagnostic{} to register {reg}", if n == 1 { "" } else { "s" } )); Ok(()) } fn read(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let scrolloff = cx.editor.config().scrolloff; let (view, doc) = current!(cx.editor); let filename = args.first().unwrap(); let path = helix_stdx::path::expand_tilde(PathBuf::from(filename.to_string())); ensure!( path.exists() && path.is_file(), "path is not a file: {:?}", path ); let file = std::fs::File::open(path).map_err(|err| anyhow!("error opening file: {}", err))?; let mut reader = BufReader::new(file); let (contents, _, _) = read_to_string(&mut reader, Some(doc.encoding())) .map_err(|err| anyhow!("error reading file: {}", err))?; let contents = Tendril::from(contents); let selection = doc.selection(view.id); let transaction = Transaction::insert(doc.text(), selection, contents); doc.apply(&transaction, view.id); doc.append_changes_to_history(view); view.ensure_cursor_in_view(doc, scrolloff); Ok(()) } fn echo(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> { if event != PromptEvent::Validate { return Ok(()); } let output = args.into_iter().fold(String::new(), |mut acc, arg| { if !acc.is_empty() { acc.push(' '); } acc.push_str(&arg); acc }); cx.editor.set_status(output); Ok(()) } fn noop(_cx: &mut compositor::Context, _args: Args, _event: PromptEvent) -> anyhow::Result<()> { Ok(()) } // TODO: SHELL_SIGNATURE should specify var args for arguments, so that just completers::filename can be used, // but Signature does not yet allow for var args. /// This command handles all of its input as-is with no quoting or flags. const SHELL_SIGNATURE: Signature = Signature { positionals: (1, Some(2)), raw_after: Some(1), ..Signature::DEFAULT }; const SHELL_COMPLETER: CommandCompleter = CommandCompleter::positional(&[ // Command name completers::program, // Shell argument(s) completers::repeating_filenames, ]); pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ TypableCommand { name: "quit", aliases: &["q"], doc: "Close the current view.", fun: quit, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "quit!", aliases: &["q!"], doc: "Force close the current view, ignoring unsaved changes.", fun: force_quit, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "open", aliases: &["o", "edit", "e"], doc: "Open a file from disk into the current view.", fun: open, completer: CommandCompleter::all(completers::filename), signature: Signature { positionals: (1, None), ..Signature::DEFAULT }, }, TypableCommand { name: "buffer-close", aliases: &["bc", "bclose"], doc: "Close the current buffer.", fun: buffer_close, completer: CommandCompleter::all(completers::buffer), signature: Signature { positionals: (0, None), ..Signature::DEFAULT }, }, TypableCommand { name: "buffer-close!", aliases: &["bc!", "bclose!"], doc: "Close the current buffer forcefully, ignoring unsaved changes.", fun: force_buffer_close, completer: CommandCompleter::all(completers::buffer), signature: Signature { positionals: (0, None), ..Signature::DEFAULT }, }, TypableCommand { name: "buffer-close-others", aliases: &["bco", "bcloseother"], doc: "Close all buffers but the currently focused one.", fun: buffer_close_others, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "buffer-close-others!", aliases: &["bco!", "bcloseother!"], doc: "Force close all buffers but the currently focused one.", fun: force_buffer_close_others, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "buffer-close-all", aliases: &["bca", "bcloseall"], doc: "Close all buffers without quitting.", fun: buffer_close_all, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "buffer-close-all!", aliases: &["bca!", "bcloseall!"], doc: "Force close all buffers ignoring unsaved changes without quitting.", fun: force_buffer_close_all, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "buffer-next", aliases: &["bn", "bnext"], doc: "Goto next buffer.", fun: buffer_next, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "buffer-previous", aliases: &["bp", "bprev"], doc: "Goto previous buffer.", fun: buffer_previous, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "write", aliases: &["w"], doc: "Write changes to disk. Accepts an optional path (:write some/path.txt)", fun: write, completer: CommandCompleter::positional(&[completers::filename]), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "write!", aliases: &["w!"], doc: "Force write changes to disk creating necessary subdirectories. Accepts an optional path (:write! some/path.txt)", fun: force_write, completer: CommandCompleter::positional(&[completers::filename]), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "write-buffer-close", aliases: &["wbc"], doc: "Write changes to disk and closes the buffer. Accepts an optional path (:write-buffer-close some/path.txt)", fun: write_buffer_close, completer: CommandCompleter::positional(&[completers::filename]), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "write-buffer-close!", aliases: &["wbc!"], doc: "Force write changes to disk creating necessary subdirectories and closes the buffer. Accepts an optional path (:write-buffer-close! some/path.txt)", fun: force_write_buffer_close, completer: CommandCompleter::positional(&[completers::filename]), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "new", aliases: &["n"], doc: "Create a new scratch buffer.", fun: new_file, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "format", aliases: &["fmt"], doc: "Format the file using an external formatter or language server.", fun: format, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "indent-style", aliases: &[], doc: "Set the indentation style for editing. ('t' for tabs or 1-16 for number of spaces.)", fun: set_indent_style, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "line-ending", aliases: &[], #[cfg(not(feature = "unicode-lines"))] doc: "Set the document's default line ending. Options: crlf, lf.", #[cfg(feature = "unicode-lines")] doc: "Set the document's default line ending. Options: crlf, lf, cr, ff, nel.", fun: set_line_ending, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "earlier", aliases: &["ear"], doc: "Jump back to an earlier point in edit history. Accepts a number of steps or a time span.", fun: earlier, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "later", aliases: &["lat"], doc: "Jump to a later point in edit history. Accepts a number of steps or a time span.", fun: later, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "write-quit", aliases: &["wq", "x"], doc: "Write changes to disk and close the current view. Accepts an optional path (:wq some/path.txt)", fun: write_quit, completer: CommandCompleter::positional(&[completers::filename]), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "write-quit!", aliases: &["wq!", "x!"], doc: "Write changes to disk and close the current view forcefully. Accepts an optional path (:wq! some/path.txt)", fun: force_write_quit, completer: CommandCompleter::positional(&[completers::filename]), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "write-all", aliases: &["wa"], doc: "Write changes from all buffers to disk.", fun: write_all, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "write-all!", aliases: &["wa!"], doc: "Forcefully write changes from all buffers to disk creating necessary subdirectories.", fun: force_write_all, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "write-quit-all", aliases: &["wqa", "xa"], doc: "Write changes from all buffers to disk and close all views.", fun: write_all_quit, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "write-quit-all!", aliases: &["wqa!", "xa!"], doc: "Write changes from all buffers to disk and close all views forcefully (ignoring unsaved changes).", fun: force_write_all_quit, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "quit-all", aliases: &["qa"], doc: "Close all views.", fun: quit_all, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "quit-all!", aliases: &["qa!"], doc: "Force close all views ignoring unsaved changes.", fun: force_quit_all, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "cquit", aliases: &["cq"], doc: "Quit with exit code (default 1). Accepts an optional integer exit code (:cq 2).", fun: cquit, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "cquit!", aliases: &["cq!"], doc: "Force quit with exit code (default 1) ignoring unsaved changes. Accepts an optional integer exit code (:cq! 2).", fun: force_cquit, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "theme", aliases: &[], doc: "Change the editor theme (show current theme if no name specified).", fun: theme, completer: CommandCompleter::positional(&[completers::theme]), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "yank-join", aliases: &[], doc: "Yank joined selections. A separator can be provided as first argument. Default value is newline.", fun: yank_joined, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "clipboard-yank", aliases: &[], doc: "Yank main selection into system clipboard.", fun: yank_main_selection_to_clipboard, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "clipboard-yank-join", aliases: &[], doc: "Yank joined selections into system clipboard. A separator can be provided as first argument. Default value is newline.", // FIXME: current UI can't display long doc. fun: yank_joined_to_clipboard, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "primary-clipboard-yank", aliases: &[], doc: "Yank main selection into system primary clipboard.", fun: yank_main_selection_to_primary_clipboard, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "primary-clipboard-yank-join", aliases: &[], doc: "Yank joined selections into system primary clipboard. A separator can be provided as first argument. Default value is newline.", // FIXME: current UI can't display long doc. fun: yank_joined_to_primary_clipboard, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "clipboard-paste-after", aliases: &[], doc: "Paste system clipboard after selections.", fun: paste_clipboard_after, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "clipboard-paste-before", aliases: &[], doc: "Paste system clipboard before selections.", fun: paste_clipboard_before, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "clipboard-paste-replace", aliases: &[], doc: "Replace selections with content of system clipboard.", fun: replace_selections_with_clipboard, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "primary-clipboard-paste-after", aliases: &[], doc: "Paste primary clipboard after selections.", fun: paste_primary_clipboard_after, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "primary-clipboard-paste-before", aliases: &[], doc: "Paste primary clipboard before selections.", fun: paste_primary_clipboard_before, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "primary-clipboard-paste-replace", aliases: &[], doc: "Replace selections with content of system primary clipboard.", fun: replace_selections_with_primary_clipboard, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "show-clipboard-provider", aliases: &[], doc: "Show clipboard provider name in status bar.", fun: show_clipboard_provider, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "change-current-directory", aliases: &["cd"], doc: "Change the current working directory.", fun: change_current_directory, completer: CommandCompleter::positional(&[completers::directory]), signature: Signature { positionals: (1, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "show-directory", aliases: &["pwd"], doc: "Show the current working directory.", fun: show_current_directory, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "encoding", aliases: &[], doc: "Set encoding. Based on `https://encoding.spec.whatwg.org`.", fun: set_encoding, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "character-info", aliases: &["char"], doc: "Get info about the character under the primary cursor.", fun: get_character_info, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "reload", aliases: &["rl"], doc: "Discard changes and reload from the source file.", fun: reload, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "reload-all", aliases: &["rla"], doc: "Discard changes and reload all documents from the source files.", fun: reload_all, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "update", aliases: &["u"], doc: "Write changes only if the file has been modified.", fun: update, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "lsp-workspace-command", aliases: &[], doc: "Open workspace command picker", fun: lsp_workspace_command, completer: CommandCompleter::positional(&[completers::lsp_workspace_command]), signature: Signature { positionals: (0, None), raw_after: Some(1), ..Signature::DEFAULT }, }, TypableCommand { name: "lsp-restart", aliases: &[], doc: "Restarts the given language servers, or all language servers that are used by the current file if no arguments are supplied", fun: lsp_restart, completer: CommandCompleter::all(completers::configured_language_servers), signature: Signature { positionals: (0, None), ..Signature::DEFAULT }, }, TypableCommand { name: "lsp-stop", aliases: &[], doc: "Stops the given language servers, or all language servers that are used by the current file if no arguments are supplied", fun: lsp_stop, completer: CommandCompleter::all(completers::active_language_servers), signature: Signature { positionals: (0, None), ..Signature::DEFAULT }, }, TypableCommand { name: "tree-sitter-scopes", aliases: &[], doc: "Display tree sitter scopes, primarily for theming and development.", fun: tree_sitter_scopes, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "tree-sitter-highlight-name", aliases: &[], doc: "Display name of tree-sitter highlight scope under the cursor.", fun: tree_sitter_highlight_name, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "debug-start", aliases: &["dbg"], doc: "Start a debug session from a given template with given parameters.", fun: debug_start, completer: CommandCompleter::none(), signature: Signature { positionals: (0, None), ..Signature::DEFAULT }, }, TypableCommand { name: "debug-remote", aliases: &["dbg-tcp"], doc: "Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters.", fun: debug_remote, completer: CommandCompleter::none(), signature: Signature { positionals: (0, None), ..Signature::DEFAULT }, }, TypableCommand { name: "debug-eval", aliases: &[], doc: "Evaluate expression in current debug context.", fun: debug_eval, completer: CommandCompleter::none(), signature: Signature { positionals: (1, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "vsplit", aliases: &["vs"], doc: "Open the file in a vertical split.", fun: vsplit, completer: CommandCompleter::all(completers::filename), signature: Signature { positionals: (0, None), ..Signature::DEFAULT }, }, TypableCommand { name: "vsplit-new", aliases: &["vnew"], doc: "Open a scratch buffer in a vertical split.", fun: vsplit_new, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "hsplit", aliases: &["hs", "sp"], doc: "Open the file in a horizontal split.", fun: hsplit, completer: CommandCompleter::all(completers::filename), signature: Signature { positionals: (0, None), ..Signature::DEFAULT }, }, TypableCommand { name: "hsplit-new", aliases: &["hnew"], doc: "Open a scratch buffer in a horizontal split.", fun: hsplit_new, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "tutor", aliases: &[], doc: "Open the tutorial.", fun: tutor, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "goto", aliases: &["g"], doc: "Goto line number.", fun: goto_line_number, completer: CommandCompleter::none(), signature: Signature { positionals: (1, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "set-language", aliases: &["lang"], doc: "Set the language of current buffer (show current language if no value specified).", fun: language, completer: CommandCompleter::positional(&[completers::language]), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "set-option", aliases: &["set"], doc: "Set a config option at runtime.\nFor example to disable smart case search, use `:set search.smart-case false`.", fun: set_option, // TODO: Add support for completion of the options value(s), when appropriate. completer: CommandCompleter::positional(&[completers::setting]), signature: Signature { positionals: (2, Some(2)), raw_after: Some(1), ..Signature::DEFAULT }, }, TypableCommand { name: "toggle-option", aliases: &["toggle"], doc: "Toggle a config option at runtime.\nFor example to toggle smart case search, use `:toggle search.smart-case`.", fun: toggle_option, completer: CommandCompleter::positional(&[completers::setting]), signature: Signature { positionals: (1, None), raw_after: Some(1), ..Signature::DEFAULT }, }, TypableCommand { name: "get-option", aliases: &["get"], doc: "Get the current value of a config option.", fun: get_option, completer: CommandCompleter::positional(&[completers::setting]), signature: Signature { positionals: (1, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "sort", aliases: &[], doc: "Sort ranges in selection.", fun: sort, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), flags: &[ Flag { name: "reverse", alias: Some('r'), doc: "sort ranges in reverse order", ..Flag::DEFAULT }, ], ..Signature::DEFAULT }, }, TypableCommand { name: "reflow", aliases: &[], doc: "Hard-wrap the current selection of lines to a given width.", fun: reflow, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "tree-sitter-subtree", aliases: &["ts-subtree"], doc: "Display the smallest tree-sitter subtree that spans the primary selection, primarily for debugging queries.", fun: tree_sitter_subtree, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "config-reload", aliases: &[], doc: "Refresh user config.", fun: refresh_config, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "config-open", aliases: &[], doc: "Open the user config.toml file.", fun: open_config, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "config-open-workspace", aliases: &[], doc: "Open the workspace config.toml file.", fun: open_workspace_config, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "log-open", aliases: &[], doc: "Open the helix log file.", fun: open_log, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "insert-output", aliases: &[], doc: "Run shell command, inserting output before each selection.", fun: insert_output, completer: SHELL_COMPLETER, signature: SHELL_SIGNATURE, }, TypableCommand { name: "append-output", aliases: &[], doc: "Run shell command, appending output after each selection.", fun: append_output, completer: SHELL_COMPLETER, signature: SHELL_SIGNATURE, }, TypableCommand { name: "pipe", aliases: &["|"], doc: "Pipe each selection to the shell command.", fun: pipe, completer: SHELL_COMPLETER, signature: SHELL_SIGNATURE, }, TypableCommand { name: "pipe-to", aliases: &[], doc: "Pipe each selection to the shell command, ignoring output.", fun: pipe_to, completer: SHELL_COMPLETER, signature: SHELL_SIGNATURE, }, TypableCommand { name: "run-shell-command", aliases: &["sh", "!"], doc: "Run a shell command", fun: run_shell_command, completer: SHELL_COMPLETER, signature: SHELL_SIGNATURE, }, TypableCommand { name: "reset-diff-change", aliases: &["diffget", "diffg"], doc: "Reset the diff change at the cursor position.", fun: reset_diff_change, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "clear-register", aliases: &[], doc: "Clear given register. If no argument is provided, clear all registers.", fun: clear_register, completer: CommandCompleter::all(completers::register), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "redraw", aliases: &[], doc: "Clear and re-render the whole UI", fun: redraw, completer: CommandCompleter::none(), signature: Signature { positionals: (0, Some(0)), ..Signature::DEFAULT }, }, TypableCommand { name: "move", aliases: &["mv"], doc: "Move the current buffer and its corresponding file to a different path", fun: move_buffer, completer: CommandCompleter::positional(&[completers::filename]), signature: Signature { positionals: (1, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "yank-diagnostic", aliases: &[], doc: "Yank diagnostic(s) under primary cursor to register, or clipboard by default", fun: yank_diagnostic, completer: CommandCompleter::all(completers::register), signature: Signature { positionals: (0, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "read", aliases: &["r"], doc: "Load a file into buffer", fun: read, completer: CommandCompleter::positional(&[completers::filename]), signature: Signature { positionals: (1, Some(1)), ..Signature::DEFAULT }, }, TypableCommand { name: "echo", aliases: &[], doc: "Prints the given arguments to the statusline.", fun: echo, completer: CommandCompleter::none(), signature: Signature { positionals: (1, None), ..Signature::DEFAULT }, }, TypableCommand { name: "noop", aliases: &[], doc: "Does nothing.", fun: noop, completer: CommandCompleter::none(), signature: Signature { positionals: (0, None), ..Signature::DEFAULT }, }, ]; pub static TYPABLE_COMMAND_MAP: Lazy> = Lazy::new(|| { TYPABLE_COMMAND_LIST .iter() .flat_map(|cmd| { std::iter::once((cmd.name, cmd)) .chain(cmd.aliases.iter().map(move |&alias| (alias, cmd))) }) .collect() }); fn execute_command_line( cx: &mut compositor::Context, input: &str, event: PromptEvent, ) -> anyhow::Result<()> { let (command, rest, _) = command_line::split(input); if command.is_empty() { return Ok(()); } // If command is numeric, interpret as line number and go there. if command.parse::().is_ok() && rest.trim().is_empty() { let cmd = TYPABLE_COMMAND_MAP.get("goto").unwrap(); return execute_command(cx, cmd, command, event); } match typed::TYPABLE_COMMAND_MAP.get(command) { Some(cmd) => execute_command(cx, cmd, rest, event), None if event == PromptEvent::Validate => Err(anyhow!("no such command: '{command}'")), None => Ok(()), } } pub(super) fn execute_command( cx: &mut compositor::Context, cmd: &TypableCommand, args: &str, event: PromptEvent, ) -> anyhow::Result<()> { let args = if event == PromptEvent::Validate { Args::parse(args, cmd.signature, true, |token| { expansion::expand(cx.editor, token).map_err(|err| err.into()) }) .map_err(|err| anyhow!("'{}': {err}", cmd.name))? } else { Args::parse(args, cmd.signature, false, |token| Ok(token.content)) .expect("arg parsing cannot fail when validation is turned off") }; (cmd.fun)(cx, args, event).map_err(|err| anyhow!("'{}': {err}", cmd.name)) } #[allow(clippy::unnecessary_unwrap)] pub(super) fn command_mode(cx: &mut Context) { let mut prompt = Prompt::new( ":".into(), Some(':'), complete_command_line, move |cx: &mut compositor::Context, input: &str, event: PromptEvent| { if let Err(err) = execute_command_line(cx, input, event) { cx.editor.set_error(err.to_string()); } }, ); prompt.doc_fn = Box::new(command_line_doc); // Calculate initial completion prompt.recalculate_completion(cx.editor); cx.push_layer(Box::new(prompt)); } fn command_line_doc(input: &str) -> Option> { let (command, _, _) = command_line::split(input); let command = TYPABLE_COMMAND_MAP.get(command)?; if command.aliases.is_empty() && command.signature.flags.is_empty() { return Some(Cow::Borrowed(command.doc)); } let mut doc = command.doc.to_string(); if !command.aliases.is_empty() { write!(doc, "\nAliases: {}", command.aliases.join(", ")).unwrap(); } if !command.signature.flags.is_empty() { const ARG_PLACEHOLDER: &str = " "; fn flag_len(flag: &Flag) -> usize { let name_len = flag.name.len(); let alias_len = if let Some(alias) = flag.alias { "/-".len() + alias.len_utf8() } else { 0 }; let arg_len = if flag.completions.is_some() { ARG_PLACEHOLDER.len() } else { 0 }; name_len + alias_len + arg_len } doc.push_str("\nFlags:"); let max_flag_len = command.signature.flags.iter().map(flag_len).max().unwrap(); for flag in command.signature.flags { let mut buf = [0u8; 4]; let this_flag_len = flag_len(flag); write!( doc, "\n --{flag_text}{spacer:spacing$} {doc}", doc = flag.doc, // `fmt::Arguments` does not respect width controls so we must place the spacers // explicitly: spacer = "", spacing = max_flag_len - this_flag_len, flag_text = format_args!( "{}{}{}{}", flag.name, // Ideally this would be written as a `format_args!` too but the borrow // checker is not yet smart enough. if flag.alias.is_some() { "/-" } else { "" }, if let Some(alias) = flag.alias { alias.encode_utf8(&mut buf) } else { "" }, if flag.completions.is_some() { ARG_PLACEHOLDER } else { "" } ), ) .unwrap(); } } Some(Cow::Owned(doc)) } fn complete_command_line(editor: &Editor, input: &str) -> Vec { let (command, rest, complete_command) = command_line::split(input); if complete_command { fuzzy_match( input, TYPABLE_COMMAND_LIST.iter().map(|command| command.name), false, ) .into_iter() .map(|(name, _)| (0.., name.into())) .collect() } else { TYPABLE_COMMAND_MAP .get(command) .map_or_else(Vec::new, |cmd| { let args_offset = command.len() + 1; complete_command_args(editor, cmd, rest, args_offset) }) } } fn complete_command_args( editor: &Editor, command: &TypableCommand, input: &str, offset: usize, ) -> Vec { use command_line::{CompletionState, ExpansionKind, Tokenizer}; // TODO: completion should depend on the location of the cursor instead of the end of the // string. This refactor is left for the future but the below completion code should respect // the cursor position if it becomes a parameter. let cursor = input.len(); let prefix = &input[..cursor]; let mut tokenizer = Tokenizer::new(prefix, false); let mut args = Args::new(command.signature, false); let mut final_token = None; let mut is_last_token = true; while let Some(token) = args .read_token(&mut tokenizer) .expect("arg parsing cannot fail when validation is turned off") { final_token = Some(token.clone()); args.push(token.content) .expect("arg parsing cannot fail when validation is turned off"); if tokenizer.pos() >= cursor { is_last_token = false; } } // Use a fake final token when the input is not terminated with a token. This simulates an // empty argument, causing completion on an empty value whenever you type space/tab. For // example if you say `":open README.md "` (with that trailing space) you should see the // files in the current dir - completing `""` rather than completions for `"README.md"` or // `"README.md "`. let token = if is_last_token { let token = Token::empty_at(prefix.len()); args.push(token.content.clone()).unwrap(); token } else { final_token.unwrap() }; // Don't complete on closed tokens, for example after writing a closing double quote. if token.is_terminated { return Vec::new(); } match token.kind { TokenKind::Unquoted | TokenKind::Quoted(_) => { match args.completion_state() { CompletionState::Positional => { // If the completion state is positional there must be at least one positional // in `args`. let n = args .len() .checked_sub(1) .expect("completion state to be positional"); let completer = command.completer_for_argument_number(n); completer(editor, &token.content) .into_iter() .map(|(range, span)| quote_completion(&token, range, span, offset)) .collect() } CompletionState::Flag(_) => fuzzy_match( token.content.trim_start_matches('-'), command.signature.flags.iter().map(|flag| flag.name), false, ) .into_iter() .map(|(name, _)| ((offset + token.content_start).., format!("--{name}").into())) .collect(), CompletionState::FlagArgument(flag) => fuzzy_match( &token.content, flag.completions .expect("flags in FlagArgument always have completions"), false, ) .into_iter() .map(|(value, _)| ((offset + token.content_start).., (*value).into())) .collect(), } } TokenKind::Expand | TokenKind::Expansion(ExpansionKind::Shell) => { // See the comment about the checked sub expect above. let arg_completer = matches!(args.completion_state(), CompletionState::Positional) .then(|| { let n = args .len() .checked_sub(1) .expect("completion state to be positional"); command.completer_for_argument_number(n) }); complete_expand(editor, &token, arg_completer, offset + token.content_start) } TokenKind::Expansion(ExpansionKind::Variable) => { complete_variable_expansion(&token.content, offset + token.content_start) } TokenKind::Expansion(ExpansionKind::Unicode) => Vec::new(), TokenKind::ExpansionKind => { complete_expansion_kind(&token.content, offset + token.content_start) } } } /// Replace the content and optionally update the range of a positional's completion to account /// for quoting. /// /// This is used to handle completions of file or directory names for example. When completing a /// file with a space, tab or percent character in the name, the space should be escaped by /// quoting the entire token. If the token being completed is already quoted, any quotes within /// the completion text should be escaped by doubling them. fn quote_completion<'a>( token: &Token, range: ops::RangeFrom, mut span: Span<'a>, offset: usize, ) -> (ops::RangeFrom, Span<'a>) { fn replace<'a>(text: Cow<'a, str>, from: char, to: &str) -> Cow<'a, str> { if text.contains(from) { Cow::Owned(text.replace(from, to)) } else { text } } match token.kind { TokenKind::Unquoted if span.content.contains([' ', '\t', '%']) => { span.content = Cow::Owned(format!( "'{}{}'", // Escape any inner single quotes by doubling them. replace(token.content.as_ref().into(), '\'', "''"), replace(span.content, '\'', "''") )); // Ignore `range.start` here since we're replacing the entire token. ((offset + token.content_start).., span) } TokenKind::Quoted(quote) => { span.content = replace(span.content, quote.char(), quote.escape()); ((range.start + offset + token.content_start).., span) } TokenKind::Expand => { // NOTE: `token.content_start` is already accounted for in `offset` for `Expand` // tokens. span.content = replace(span.content, '"', "\"\""); ((range.start + offset).., span) } _ => ((range.start + offset + token.content_start).., span), } } fn complete_expand( editor: &Editor, token: &Token, completer: Option<&Completer>, offset: usize, ) -> Vec { use command_line::{ExpansionKind, Tokenizer}; let mut start = 0; // If the expand token contains expansions, complete those. while let Some(idx) = token.content[start..].find('%') { let idx = start + idx; if token.content.as_bytes().get(idx + '%'.len_utf8()).copied() == Some(b'%') { // Two percents together are skipped. start = idx + ('%'.len_utf8() * 2); } else { let mut tokenizer = Tokenizer::new(&token.content[idx..], false); let token = tokenizer .parse_percent_token() .map(|token| token.expect("arg parser cannot fail when validation is disabled")); start = idx + tokenizer.pos(); // Like closing quote characters in `complete_command_args` above, don't provide // completions if the token is already terminated. This also skips expansions // which have already been fully written, for example // `"%{cursor_line}:%{cursor_col` should complete `cursor_column` instead of // `cursor_line`. let Some(token) = token.filter(|t| !t.is_terminated) else { continue; }; let local_offset = offset + idx + token.content_start; match token.kind { TokenKind::Expansion(ExpansionKind::Variable) => { return complete_variable_expansion(&token.content, local_offset); } TokenKind::Expansion(ExpansionKind::Shell) => { return complete_expand(editor, &token, None, local_offset); } TokenKind::ExpansionKind => { return complete_expansion_kind(&token.content, local_offset); } _ => continue, } } } match completer { // If no expansions were found and an argument is being completed, Some(completer) if start == 0 => completer(editor, &token.content) .into_iter() .map(|(range, span)| quote_completion(token, range, span, offset)) .collect(), _ => Vec::new(), } } fn complete_variable_expansion(content: &str, offset: usize) -> Vec { use expansion::Variable; fuzzy_match( content, Variable::VARIANTS.iter().map(Variable::as_str), false, ) .into_iter() .map(|(name, _)| (offset.., (*name).into())) .collect() } fn complete_expansion_kind(content: &str, offset: usize) -> Vec { use command_line::ExpansionKind; fuzzy_match( content, // Skip `ExpansionKind::Variable` since its kind string is empty. ExpansionKind::VARIANTS .iter() .skip(1) .map(ExpansionKind::as_str), false, ) .into_iter() .map(|(name, _)| (offset.., (*name).into())) .collect() } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/compositor.rs000066400000000000000000000214061500614314100252210ustar00rootroot00000000000000// Each component declares its own size constraints and gets fitted based on its parent. // Q: how does this work with popups? // cursive does compositor.screen_mut().add_layer_at(pos::absolute(x, y), ) use helix_core::Position; use helix_view::graphics::{CursorKind, Rect}; use tui::buffer::Buffer as Surface; pub type Callback = Box; pub type SyncCallback = Box; // Cursive-inspired pub enum EventResult { Ignored(Option), Consumed(Option), } use crate::job::Jobs; use crate::ui::picker; use helix_view::Editor; pub use helix_view::input::Event; pub struct Context<'a> { pub editor: &'a mut Editor, pub scroll: Option, pub jobs: &'a mut Jobs, } impl Context<'_> { /// Waits on all pending jobs, and then tries to flush all pending write /// operations for all documents. pub fn block_try_flush_writes(&mut self) -> anyhow::Result<()> { tokio::task::block_in_place(|| helix_lsp::block_on(self.jobs.finish(self.editor, None)))?; tokio::task::block_in_place(|| helix_lsp::block_on(self.editor.flush_writes()))?; Ok(()) } } pub trait Component: Any + AnyComponent { /// Process input events, return true if handled. fn handle_event(&mut self, _event: &Event, _ctx: &mut Context) -> EventResult { EventResult::Ignored(None) } // , args: () /// Should redraw? Useful for saving redraw cycles if we know component didn't change. fn should_update(&self) -> bool { true } /// Render the component onto the provided surface. fn render(&mut self, area: Rect, frame: &mut Surface, ctx: &mut Context); /// Get cursor position and cursor kind. fn cursor(&self, _area: Rect, _ctx: &Editor) -> (Option, CursorKind) { (None, CursorKind::Hidden) } /// May be used by the parent component to compute the child area. /// viewport is the maximum allowed area, and the child should stay within those bounds. /// /// The returned size might be larger than the viewport if the child is too big to fit. /// In this case the parent can use the values to calculate scroll. fn required_size(&mut self, _viewport: (u16, u16)) -> Option<(u16, u16)> { None } fn type_name(&self) -> &'static str { std::any::type_name::() } fn id(&self) -> Option<&'static str> { None } } pub struct Compositor { layers: Vec>, area: Rect, pub(crate) last_picker: Option>, pub(crate) full_redraw: bool, } impl Compositor { pub fn new(area: Rect) -> Self { Self { layers: Vec::new(), area, last_picker: None, full_redraw: false, } } pub fn size(&self) -> Rect { self.area } pub fn resize(&mut self, area: Rect) { self.area = area; } /// Add a layer to be rendered in front of all existing layers. pub fn push(&mut self, mut layer: Box) { // immediately clear last_picker field to avoid excessive memory // consumption for picker with many items if layer.id() == Some(picker::ID) { self.last_picker = None; } let size = self.size(); // trigger required_size on init layer.required_size((size.width, size.height)); self.layers.push(layer); } /// Replace a component that has the given `id` with the new layer and if /// no component is found, push the layer normally. pub fn replace_or_push(&mut self, id: &'static str, layer: T) { if let Some(component) = self.find_id(id) { *component = layer; } else { self.push(Box::new(layer)) } } pub fn pop(&mut self) -> Option> { self.layers.pop() } pub fn remove(&mut self, id: &'static str) -> Option> { let idx = self .layers .iter() .position(|layer| layer.id() == Some(id))?; Some(self.layers.remove(idx)) } pub fn handle_event(&mut self, event: &Event, cx: &mut Context) -> bool { // If it is a key event, a macro is being recorded, and a macro isn't being replayed, // push the key event to the recording. if let (Event::Key(key), Some((_, keys))) = (event, &mut cx.editor.macro_recording) { if cx.editor.macro_replaying.is_empty() { keys.push(*key); } } let mut callbacks = Vec::new(); let mut consumed = false; // propagate events through the layers until we either find a layer that consumes it or we // run out of layers (event bubbling), starting at the front layer and then moving to the // background. for layer in self.layers.iter_mut().rev() { match layer.handle_event(event, cx) { EventResult::Consumed(Some(callback)) => { callbacks.push(callback); consumed = true; break; } EventResult::Consumed(None) => { consumed = true; break; } EventResult::Ignored(Some(callback)) => { callbacks.push(callback); } EventResult::Ignored(None) => {} }; } for callback in callbacks { callback(self, cx) } consumed } pub fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) { for layer in &mut self.layers { layer.render(area, surface, cx); } } pub fn cursor(&self, area: Rect, editor: &Editor) -> (Option, CursorKind) { for layer in self.layers.iter().rev() { if let (Some(pos), kind) = layer.cursor(area, editor) { return (Some(pos), kind); } } (None, CursorKind::Hidden) } pub fn has_component(&self, type_name: &str) -> bool { self.layers .iter() .any(|component| component.type_name() == type_name) } pub fn find(&mut self) -> Option<&mut T> { let type_name = std::any::type_name::(); self.layers .iter_mut() .find(|component| component.type_name() == type_name) .and_then(|component| component.as_any_mut().downcast_mut()) } pub fn find_id(&mut self, id: &'static str) -> Option<&mut T> { self.layers .iter_mut() .find(|component| component.id() == Some(id)) .and_then(|component| component.as_any_mut().downcast_mut()) } pub fn need_full_redraw(&mut self) { self.full_redraw = true; } } // View casting, taken straight from Cursive use std::any::Any; /// A view that can be downcasted to its concrete type. /// /// This trait is automatically implemented for any `T: Component`. pub trait AnyComponent { /// Downcast self to a `Any`. fn as_any(&self) -> &dyn Any; /// Downcast self to a mutable `Any`. fn as_any_mut(&mut self) -> &mut dyn Any; /// Returns a boxed any from a boxed self. /// /// Can be used before `Box::downcast()`. /// /// # Examples /// /// ```rust /// use helix_term::{ui::Text, compositor::Component}; /// let boxed: Box = Box::new(Text::new("text".to_string())); /// let text: Box = boxed.as_boxed_any().downcast().unwrap(); /// ``` fn as_boxed_any(self: Box) -> Box; } impl AnyComponent for T { /// Downcast self to a `Any`. fn as_any(&self) -> &dyn Any { self } /// Downcast self to a mutable `Any`. fn as_any_mut(&mut self) -> &mut dyn Any { self } fn as_boxed_any(self: Box) -> Box { self } } impl dyn AnyComponent { /// Attempts to downcast `self` to a concrete type. pub fn downcast_ref(&self) -> Option<&T> { self.as_any().downcast_ref() } /// Attempts to downcast `self` to a concrete type. pub fn downcast_mut(&mut self) -> Option<&mut T> { self.as_any_mut().downcast_mut() } /// Attempts to downcast `Box` to a concrete type. pub fn downcast(self: Box) -> Result, Box> { // Do the check here + unwrap, so the error // value is `Self` and not `dyn Any`. if self.as_any().is::() { Ok(self.as_boxed_any().downcast().unwrap()) } else { Err(self) } } /// Checks if this view is of type `T`. pub fn is(&mut self) -> bool { self.as_any().is::() } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/config.rs000066400000000000000000000133721500614314100242730ustar00rootroot00000000000000use crate::keymap; use crate::keymap::{merge_keys, KeyTrie}; use helix_loader::merge_toml_values; use helix_view::document::Mode; use serde::Deserialize; use std::collections::HashMap; use std::fmt::Display; use std::fs; use std::io::Error as IOError; use toml::de::Error as TomlError; #[derive(Debug, Clone, PartialEq)] pub struct Config { pub theme: Option, pub keys: HashMap, pub editor: helix_view::editor::Config, } #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] pub struct ConfigRaw { pub theme: Option, pub keys: Option>, pub editor: Option, } impl Default for Config { fn default() -> Config { Config { theme: None, keys: keymap::default(), editor: helix_view::editor::Config::default(), } } } #[derive(Debug)] pub enum ConfigLoadError { BadConfig(TomlError), Error(IOError), } impl Default for ConfigLoadError { fn default() -> Self { ConfigLoadError::Error(IOError::new(std::io::ErrorKind::NotFound, "place holder")) } } impl Display for ConfigLoadError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { ConfigLoadError::BadConfig(err) => err.fmt(f), ConfigLoadError::Error(err) => err.fmt(f), } } } impl Config { pub fn load( global: Result, local: Result, ) -> Result { let global_config: Result = global.and_then(|file| toml::from_str(&file).map_err(ConfigLoadError::BadConfig)); let local_config: Result = local.and_then(|file| toml::from_str(&file).map_err(ConfigLoadError::BadConfig)); let res = match (global_config, local_config) { (Ok(global), Ok(local)) => { let mut keys = keymap::default(); if let Some(global_keys) = global.keys { merge_keys(&mut keys, global_keys) } if let Some(local_keys) = local.keys { merge_keys(&mut keys, local_keys) } let editor = match (global.editor, local.editor) { (None, None) => helix_view::editor::Config::default(), (None, Some(val)) | (Some(val), None) => { val.try_into().map_err(ConfigLoadError::BadConfig)? } (Some(global), Some(local)) => merge_toml_values(global, local, 3) .try_into() .map_err(ConfigLoadError::BadConfig)?, }; Config { theme: local.theme.or(global.theme), keys, editor, } } // if any configs are invalid return that first (_, Err(ConfigLoadError::BadConfig(err))) | (Err(ConfigLoadError::BadConfig(err)), _) => { return Err(ConfigLoadError::BadConfig(err)) } (Ok(config), Err(_)) | (Err(_), Ok(config)) => { let mut keys = keymap::default(); if let Some(keymap) = config.keys { merge_keys(&mut keys, keymap); } Config { theme: config.theme, keys, editor: config.editor.map_or_else( || Ok(helix_view::editor::Config::default()), |val| val.try_into().map_err(ConfigLoadError::BadConfig), )?, } } // these are just two io errors return the one for the global config (Err(err), Err(_)) => return Err(err), }; Ok(res) } pub fn load_default() -> Result { let global_config = fs::read_to_string(helix_loader::config_file()).map_err(ConfigLoadError::Error); let local_config = fs::read_to_string(helix_loader::workspace_config_file()) .map_err(ConfigLoadError::Error); Config::load(global_config, local_config) } } #[cfg(test)] mod tests { use super::*; impl Config { fn load_test(config: &str) -> Config { Config::load(Ok(config.to_owned()), Err(ConfigLoadError::default())).unwrap() } } #[test] fn parsing_keymaps_config_file() { use crate::keymap; use helix_core::hashmap; use helix_view::document::Mode; let sample_keymaps = r#" [keys.insert] y = "move_line_down" S-C-a = "delete_selection" [keys.normal] A-F12 = "move_next_word_end" "#; let mut keys = keymap::default(); merge_keys( &mut keys, hashmap! { Mode::Insert => keymap!({ "Insert mode" "y" => move_line_down, "S-C-a" => delete_selection, }), Mode::Normal => keymap!({ "Normal mode" "A-F12" => move_next_word_end, }), }, ); assert_eq!( Config::load_test(sample_keymaps), Config { keys, ..Default::default() } ); } #[test] fn keys_resolve_to_correct_defaults() { // From serde default let default_keys = Config::load_test("").keys; assert_eq!(default_keys, keymap::default()); // From the Default trait let default_keys = Config::default().keys; assert_eq!(default_keys, keymap::default()); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/events.rs000066400000000000000000000021251500614314100243240ustar00rootroot00000000000000use helix_event::{events, register_event}; use helix_view::document::Mode; use helix_view::events::{ DiagnosticsDidChange, DocumentDidChange, DocumentDidClose, DocumentDidOpen, DocumentFocusLost, LanguageServerExited, LanguageServerInitialized, SelectionDidChange, }; use crate::commands; use crate::keymap::MappableCommand; events! { OnModeSwitch<'a, 'cx> { old_mode: Mode, new_mode: Mode, cx: &'a mut commands::Context<'cx> } PostInsertChar<'a, 'cx> { c: char, cx: &'a mut commands::Context<'cx> } PostCommand<'a, 'cx> { command: & 'a MappableCommand, cx: &'a mut commands::Context<'cx> } } pub fn register() { register_event::(); register_event::(); register_event::(); register_event::(); register_event::(); register_event::(); register_event::(); register_event::(); register_event::(); register_event::(); register_event::(); } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers.rs000066400000000000000000000024141500614314100246210ustar00rootroot00000000000000use std::sync::Arc; use arc_swap::ArcSwap; use helix_event::AsyncHook; use crate::config::Config; use crate::events; use crate::handlers::auto_save::AutoSaveHandler; use crate::handlers::signature_help::SignatureHelpHandler; pub use helix_view::handlers::Handlers; use self::document_colors::DocumentColorsHandler; mod auto_save; pub mod completion; mod diagnostics; mod document_colors; mod signature_help; mod snippet; pub fn setup(config: Arc>) -> Handlers { events::register(); let event_tx = completion::CompletionHandler::new(config).spawn(); let signature_hints = SignatureHelpHandler::new().spawn(); let auto_save = AutoSaveHandler::new().spawn(); let document_colors = DocumentColorsHandler::default().spawn(); let handlers = Handlers { completions: helix_view::handlers::completion::CompletionHandler::new(event_tx), signature_hints, auto_save, document_colors, }; helix_view::handlers::register_hooks(&handlers); completion::register_hooks(&handlers); signature_help::register_hooks(&handlers); auto_save::register_hooks(&handlers); diagnostics::register_hooks(&handlers); snippet::register_hooks(&handlers); document_colors::register_hooks(&handlers); handlers } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers/000077500000000000000000000000001500614314100242525ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers/auto_save.rs000066400000000000000000000065401500614314100266130ustar00rootroot00000000000000use std::{ sync::{ atomic::{self, AtomicBool}, Arc, }, time::Duration, }; use anyhow::Ok; use arc_swap::access::Access; use helix_event::{register_hook, send_blocking}; use helix_view::{ document::Mode, events::DocumentDidChange, handlers::{AutoSaveEvent, Handlers}, Editor, }; use tokio::time::Instant; use crate::{ commands, compositor, events::OnModeSwitch, job::{self, Jobs}, }; #[derive(Debug)] pub(super) struct AutoSaveHandler { save_pending: Arc, } impl AutoSaveHandler { pub fn new() -> AutoSaveHandler { AutoSaveHandler { save_pending: Default::default(), } } } impl helix_event::AsyncHook for AutoSaveHandler { type Event = AutoSaveEvent; fn handle_event( &mut self, event: Self::Event, existing_debounce: Option, ) -> Option { match event { Self::Event::DocumentChanged { save_after } => { Some(Instant::now() + Duration::from_millis(save_after)) } Self::Event::LeftInsertMode => { if existing_debounce.is_some() { // If the change happened more recently than the debounce, let the // debounce run down before saving. existing_debounce } else { // Otherwise if there is a save pending, save immediately. if self.save_pending.load(atomic::Ordering::Relaxed) { self.finish_debounce(); } None } } } } fn finish_debounce(&mut self) { let save_pending = self.save_pending.clone(); job::dispatch_blocking(move |editor, _| { if editor.mode() == Mode::Insert { // Avoid saving while in insert mode since this mixes up // the modification indicator and prevents future saves. save_pending.store(true, atomic::Ordering::Relaxed); } else { request_auto_save(editor); save_pending.store(false, atomic::Ordering::Relaxed); } }) } } fn request_auto_save(editor: &mut Editor) { let context = &mut compositor::Context { editor, scroll: Some(0), jobs: &mut Jobs::new(), }; let options = commands::WriteAllOptions { force: false, write_scratch: false, auto_format: false, }; if let Err(e) = commands::typed::write_all_impl(context, options) { context.editor.set_error(format!("{}", e)); } } pub(super) fn register_hooks(handlers: &Handlers) { let tx = handlers.auto_save.clone(); register_hook!(move |event: &mut DocumentDidChange<'_>| { let config = event.doc.config.load(); if config.auto_save.after_delay.enable { send_blocking( &tx, AutoSaveEvent::DocumentChanged { save_after: config.auto_save.after_delay.timeout, }, ); } Ok(()) }); let tx = handlers.auto_save.clone(); register_hook!(move |event: &mut OnModeSwitch<'_, '_>| { if event.old_mode == Mode::Insert { send_blocking(&tx, AutoSaveEvent::LeftInsertMode) } Ok(()) }); } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers/completion.rs000066400000000000000000000224241500614314100267750ustar00rootroot00000000000000use std::collections::HashMap; use helix_core::chars::char_is_word; use helix_core::completion::CompletionProvider; use helix_core::syntax::LanguageServerFeature; use helix_event::{register_hook, TaskHandle}; use helix_lsp::lsp; use helix_stdx::rope::RopeSliceExt; use helix_view::document::Mode; use helix_view::handlers::completion::{CompletionEvent, ResponseContext}; use helix_view::Editor; use tokio::task::JoinSet; use crate::commands; use crate::compositor::Compositor; use crate::events::{OnModeSwitch, PostCommand, PostInsertChar}; use crate::handlers::completion::request::{request_incomplete_completion_list, Trigger}; use crate::job::dispatch; use crate::keymap::MappableCommand; use crate::ui::lsp::signature_help::SignatureHelp; use crate::ui::{self, Popup}; use super::Handlers; pub use item::{CompletionItem, CompletionItems, CompletionResponse, LspCompletionItem}; pub use request::CompletionHandler; pub use resolve::ResolveHandler; mod item; mod path; mod request; mod resolve; async fn handle_response( requests: &mut JoinSet, is_incomplete: bool, ) -> Option { loop { let response = requests.join_next().await?.unwrap(); if !is_incomplete && !response.context.is_incomplete && response.items.is_empty() { continue; } return Some(response); } } async fn replace_completions( handle: TaskHandle, mut requests: JoinSet, is_incomplete: bool, ) { while let Some(mut response) = handle_response(&mut requests, is_incomplete).await { let handle = handle.clone(); dispatch(move |editor, compositor| { let editor_view = compositor.find::().unwrap(); let Some(completion) = &mut editor_view.completion else { return; }; if handle.is_canceled() { log::info!("dropping outdated completion response"); return; } completion.replace_provider_completions(&mut response, is_incomplete); if completion.is_empty() { editor_view.clear_completion(editor); // clearing completions might mean we want to immediately re-request them (usually // this occurs if typing a trigger char) trigger_auto_completion(editor, false); } else { editor .handlers .completions .active_completions .insert(response.provider, response.context); } }) .await; } } fn show_completion( editor: &mut Editor, compositor: &mut Compositor, items: Vec, context: HashMap, trigger: Trigger, ) { let (view, doc) = current_ref!(editor); // check if the completion request is stale. // // Completions are completed asynchronously and therefore the user could //switch document/view or leave insert mode. In all of thoise cases the // completion should be discarded if editor.mode != Mode::Insert || view.id != trigger.view || doc.id() != trigger.doc { return; } let size = compositor.size(); let ui = compositor.find::().unwrap(); if ui.completion.is_some() { return; } editor.handlers.completions.active_completions = context; let completion_area = ui.set_completion(editor, items, trigger.pos, size); let signature_help_area = compositor .find_id::>(SignatureHelp::ID) .map(|signature_help| signature_help.area(size, editor)); // Delete the signature help popup if they intersect. if matches!((completion_area, signature_help_area),(Some(a), Some(b)) if a.intersects(b)) { compositor.remove(SignatureHelp::ID); } } pub fn trigger_auto_completion(editor: &Editor, trigger_char_only: bool) { let config = editor.config.load(); if !config.auto_completion { return; } let (view, doc): (&helix_view::View, &helix_view::Document) = current_ref!(editor); let mut text = doc.text().slice(..); let cursor = doc.selection(view.id).primary().cursor(text); text = doc.text().slice(..cursor); let is_trigger_char = doc .language_servers_with_feature(LanguageServerFeature::Completion) .any(|ls| { matches!(&ls.capabilities().completion_provider, Some(lsp::CompletionOptions { trigger_characters: Some(triggers), .. }) if triggers.iter().any(|trigger| text.ends_with(trigger))) }); let cursor_char = text .get_bytes_at(text.len_bytes()) .and_then(|t| t.reversed().next()); #[cfg(windows)] let is_path_completion_trigger = matches!(cursor_char, Some(b'/' | b'\\')); #[cfg(not(windows))] let is_path_completion_trigger = matches!(cursor_char, Some(b'/')); let handler = &editor.handlers.completions; if is_trigger_char || (is_path_completion_trigger && doc.path_completion_enabled()) { handler.event(CompletionEvent::TriggerChar { cursor, doc: doc.id(), view: view.id, }); return; } let is_auto_trigger = !trigger_char_only && doc .text() .chars_at(cursor) .reversed() .take(config.completion_trigger_len as usize) .all(char_is_word); if is_auto_trigger { handler.event(CompletionEvent::AutoTrigger { cursor, doc: doc.id(), view: view.id, }); } } fn update_completion_filter(cx: &mut commands::Context, c: Option) { cx.callback.push(Box::new(move |compositor, cx| { let editor_view = compositor.find::().unwrap(); if let Some(completion) = &mut editor_view.completion { completion.update_filter(c); if completion.is_empty() || c.is_some_and(|c| !char_is_word(c)) { editor_view.clear_completion(cx.editor); // clearing completions might mean we want to immediately rerequest them (usually // this occurs if typing a trigger char) if c.is_some() { trigger_auto_completion(cx.editor, false); } } else { let handle = cx.editor.handlers.completions.request_controller.restart(); request_incomplete_completion_list(cx.editor, handle) } } })) } fn clear_completions(cx: &mut commands::Context) { cx.callback.push(Box::new(|compositor, cx| { let editor_view = compositor.find::().unwrap(); editor_view.clear_completion(cx.editor); })) } fn completion_post_command_hook( PostCommand { command, cx }: &mut PostCommand<'_, '_>, ) -> anyhow::Result<()> { if cx.editor.mode == Mode::Insert { if cx.editor.last_completion.is_some() { match command { MappableCommand::Static { name: "delete_word_forward" | "delete_char_forward" | "completion", .. } => (), MappableCommand::Static { name: "delete_char_backward", .. } => update_completion_filter(cx, None), _ => clear_completions(cx), } } else { let event = match command { MappableCommand::Static { name: "delete_char_backward" | "delete_word_forward" | "delete_char_forward", .. } => { let (view, doc) = current!(cx.editor); let primary_cursor = doc .selection(view.id) .primary() .cursor(doc.text().slice(..)); CompletionEvent::DeleteText { cursor: primary_cursor, } } // hacks: some commands are handeled elsewhere and we don't want to // cancel in that case MappableCommand::Static { name: "completion" | "insert_mode" | "append_mode", .. } => return Ok(()), _ => CompletionEvent::Cancel, }; cx.editor.handlers.completions.event(event); } } Ok(()) } pub(super) fn register_hooks(_handlers: &Handlers) { register_hook!(move |event: &mut PostCommand<'_, '_>| completion_post_command_hook(event)); register_hook!(move |event: &mut OnModeSwitch<'_, '_>| { if event.old_mode == Mode::Insert { event .cx .editor .handlers .completions .event(CompletionEvent::Cancel); clear_completions(event.cx); } else if event.new_mode == Mode::Insert { trigger_auto_completion(event.cx.editor, false) } Ok(()) }); register_hook!(move |event: &mut PostInsertChar<'_, '_>| { if event.cx.editor.last_completion.is_some() { update_completion_filter(event.cx, Some(event.c)) } else { trigger_auto_completion(event.cx.editor, false); } Ok(()) }); } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers/completion/000077500000000000000000000000001500614314100264235ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers/completion/item.rs000066400000000000000000000070521500614314100277330ustar00rootroot00000000000000use std::mem; use helix_core::completion::CompletionProvider; use helix_lsp::{lsp, LanguageServerId}; use helix_view::handlers::completion::ResponseContext; pub struct CompletionResponse { pub items: CompletionItems, pub provider: CompletionProvider, pub context: ResponseContext, } pub enum CompletionItems { Lsp(Vec), Other(Vec), } impl CompletionItems { pub fn is_empty(&self) -> bool { match self { CompletionItems::Lsp(items) => items.is_empty(), CompletionItems::Other(items) => items.is_empty(), } } } impl CompletionResponse { pub fn take_items(&mut self, dst: &mut Vec) { match &mut self.items { CompletionItems::Lsp(items) => dst.extend(items.drain(..).map(|item| { CompletionItem::Lsp(LspCompletionItem { item, provider: match self.provider { CompletionProvider::Lsp(provider) => provider, _ => unreachable!(), }, resolved: false, provider_priority: self.context.priority, }) })), CompletionItems::Other(items) if dst.is_empty() => mem::swap(dst, items), CompletionItems::Other(items) => dst.append(items), } } } #[derive(Debug, PartialEq, Clone)] pub struct LspCompletionItem { pub item: lsp::CompletionItem, pub provider: LanguageServerId, pub resolved: bool, // TODO: we should not be filtering and sorting incomplete completion list // according to the spec but vscode does that anyway and most servers ( // including rust-analyzer) rely on that.. so we can't do that without // breaking completions. pub provider_priority: i8, } impl LspCompletionItem { #[inline] pub fn filter_text(&self) -> &str { self.item .filter_text .as_ref() .unwrap_or(&self.item.label) .as_str() } } #[derive(Debug, PartialEq, Clone)] pub enum CompletionItem { Lsp(LspCompletionItem), Other(helix_core::CompletionItem), } impl CompletionItem { #[inline] pub fn filter_text(&self) -> &str { match self { CompletionItem::Lsp(item) => item.filter_text(), CompletionItem::Other(item) => &item.label, } } } impl PartialEq for LspCompletionItem { fn eq(&self, other: &CompletionItem) -> bool { match other { CompletionItem::Lsp(other) => self == other, _ => false, } } } impl PartialEq for helix_core::CompletionItem { fn eq(&self, other: &CompletionItem) -> bool { match other { CompletionItem::Other(other) => self == other, _ => false, } } } impl CompletionItem { pub fn provider_priority(&self) -> i8 { match self { CompletionItem::Lsp(item) => item.provider_priority, // sorting path completions after LSP for now CompletionItem::Other(_) => 1, } } pub fn provider(&self) -> CompletionProvider { match self { CompletionItem::Lsp(item) => CompletionProvider::Lsp(item.provider), CompletionItem::Other(item) => item.provider, } } pub fn preselect(&self) -> bool { match self { CompletionItem::Lsp(LspCompletionItem { item, .. }) => item.preselect.unwrap_or(false), CompletionItem::Other(_) => false, } } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers/completion/path.rs000066400000000000000000000153721500614314100277350ustar00rootroot00000000000000use std::{ borrow::Cow, fs, path::{Path, PathBuf}, str::FromStr as _, sync::Arc, }; use helix_core::{self as core, completion::CompletionProvider, Selection, Transaction}; use helix_event::TaskHandle; use helix_stdx::path::{self, canonicalize, fold_home_dir, get_path_suffix}; use helix_view::{document::SavePoint, handlers::completion::ResponseContext, Document}; use url::Url; use crate::handlers::completion::{item::CompletionResponse, CompletionItem, CompletionItems}; pub(crate) fn path_completion( selection: Selection, doc: &Document, handle: TaskHandle, savepoint: Arc, ) -> Option CompletionResponse> { if !doc.path_completion_enabled() { return None; } let text = doc.text().clone(); let cursor = selection.primary().cursor(text.slice(..)); let cur_line = text.char_to_line(cursor); let start = text.line_to_char(cur_line).max(cursor.saturating_sub(1000)); let line_until_cursor = text.slice(start..cursor); let (dir_path, typed_file_name) = get_path_suffix(line_until_cursor, false).and_then(|matched_path| { let matched_path = Cow::from(matched_path); let path: Cow<_> = if matched_path.starts_with("file://") { Url::from_str(&matched_path) .ok() .and_then(|url| url.to_file_path().ok())? .into() } else { Path::new(&*matched_path).into() }; let path = path::expand(&path); let parent_dir = doc.path().and_then(|dp| dp.parent()); let path = match parent_dir { Some(parent_dir) if path.is_relative() => parent_dir.join(&path), _ => path.into_owned(), }; #[cfg(windows)] let ends_with_slash = matches!(matched_path.as_bytes().last(), Some(b'/' | b'\\')); #[cfg(not(windows))] let ends_with_slash = matches!(matched_path.as_bytes().last(), Some(b'/')); if ends_with_slash { Some((PathBuf::from(path.as_path()), None)) } else { path.parent().map(|parent_path| { ( PathBuf::from(parent_path), path.file_name().and_then(|f| f.to_str().map(String::from)), ) }) } })?; if handle.is_canceled() { return None; } // TODO: handle properly in the future const PRIORITY: i8 = 1; let future = move || { let Ok(read_dir) = std::fs::read_dir(&dir_path) else { return CompletionResponse { items: CompletionItems::Other(Vec::new()), provider: CompletionProvider::Path, context: ResponseContext { is_incomplete: false, priority: PRIORITY, savepoint, }, }; }; let edit_diff = typed_file_name .as_ref() .map(|s| s.chars().count()) .unwrap_or_default(); let res: Vec<_> = read_dir .filter_map(Result::ok) .filter_map(|dir_entry| { dir_entry .metadata() .ok() .and_then(|md| Some((dir_entry.file_name().into_string().ok()?, md))) }) .map_while(|(file_name, md)| { if handle.is_canceled() { return None; } let kind = path_kind(&md); let documentation = path_documentation(&md, &dir_path.join(&file_name), kind); let transaction = Transaction::change_by_selection(&text, &selection, |range| { let cursor = range.cursor(text.slice(..)); (cursor - edit_diff, cursor, Some((&file_name).into())) }); Some(CompletionItem::Other(core::CompletionItem { kind: Cow::Borrowed(kind), label: file_name.into(), transaction, documentation: Some(documentation), provider: CompletionProvider::Path, })) }) .collect(); CompletionResponse { items: CompletionItems::Other(res), provider: CompletionProvider::Path, context: ResponseContext { is_incomplete: false, priority: PRIORITY, savepoint, }, } }; Some(future) } #[cfg(unix)] fn path_documentation(md: &fs::Metadata, full_path: &Path, kind: &str) -> String { let full_path = fold_home_dir(canonicalize(full_path)); let full_path_name = full_path.to_string_lossy(); use std::os::unix::prelude::PermissionsExt; let mode = md.permissions().mode(); let perms = [ (libc::S_IRUSR, 'r'), (libc::S_IWUSR, 'w'), (libc::S_IXUSR, 'x'), (libc::S_IRGRP, 'r'), (libc::S_IWGRP, 'w'), (libc::S_IXGRP, 'x'), (libc::S_IROTH, 'r'), (libc::S_IWOTH, 'w'), (libc::S_IXOTH, 'x'), ] .into_iter() .fold(String::with_capacity(9), |mut acc, (p, s)| { // This cast is necessary on some platforms such as macos as `mode_t` is u16 there #[allow(clippy::unnecessary_cast)] acc.push(if mode & (p as u32) > 0 { s } else { '-' }); acc }); // TODO it would be great to be able to individually color the documentation, // but this will likely require a custom doc implementation (i.e. not `lsp::Documentation`) // and/or different rendering in completion.rs format!( "type: `{kind}`\n\ permissions: `[{perms}]`\n\ full path: `{full_path_name}`", ) } #[cfg(not(unix))] fn path_documentation(_md: &fs::Metadata, full_path: &Path, kind: &str) -> String { let full_path = fold_home_dir(canonicalize(full_path)); let full_path_name = full_path.to_string_lossy(); format!("type: `{kind}`\nfull path: `{full_path_name}`",) } #[cfg(unix)] fn path_kind(md: &fs::Metadata) -> &'static str { if md.is_symlink() { "link" } else if md.is_dir() { "folder" } else { use std::os::unix::fs::FileTypeExt; if md.file_type().is_block_device() { "block" } else if md.file_type().is_socket() { "socket" } else if md.file_type().is_char_device() { "char_device" } else if md.file_type().is_fifo() { "fifo" } else { "file" } } } #[cfg(not(unix))] fn path_kind(md: &fs::Metadata) -> &'static str { if md.is_symlink() { "link" } else if md.is_dir() { "folder" } else { "file" } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers/completion/request.rs000066400000000000000000000320561500614314100304670ustar00rootroot00000000000000use std::collections::{HashMap, HashSet}; use std::sync::Arc; use std::time::Duration; use arc_swap::ArcSwap; use futures_util::Future; use helix_core::completion::CompletionProvider; use helix_core::syntax::LanguageServerFeature; use helix_event::{cancelable_future, TaskController, TaskHandle}; use helix_lsp::lsp; use helix_lsp::lsp::{CompletionContext, CompletionTriggerKind}; use helix_lsp::util::pos_to_lsp_pos; use helix_stdx::rope::RopeSliceExt; use helix_view::document::{Mode, SavePoint}; use helix_view::handlers::completion::{CompletionEvent, ResponseContext}; use helix_view::{Document, DocumentId, Editor, ViewId}; use tokio::task::JoinSet; use tokio::time::{timeout_at, Instant}; use crate::compositor::Compositor; use crate::config::Config; use crate::handlers::completion::item::CompletionResponse; use crate::handlers::completion::path::path_completion; use crate::handlers::completion::{ handle_response, replace_completions, show_completion, CompletionItems, }; use crate::job::{dispatch, dispatch_blocking}; use crate::ui; use crate::ui::editor::InsertEvent; #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub(super) enum TriggerKind { Auto, TriggerChar, Manual, } #[derive(Debug, Clone, Copy)] pub(super) struct Trigger { pub(super) pos: usize, pub(super) view: ViewId, pub(super) doc: DocumentId, pub(super) kind: TriggerKind, } #[derive(Debug)] pub struct CompletionHandler { /// The currently active trigger which will cause a completion request after the timeout. trigger: Option, in_flight: Option, task_controller: TaskController, config: Arc>, } impl CompletionHandler { pub fn new(config: Arc>) -> CompletionHandler { Self { config, task_controller: TaskController::new(), trigger: None, in_flight: None, } } } impl helix_event::AsyncHook for CompletionHandler { type Event = CompletionEvent; fn handle_event( &mut self, event: Self::Event, _old_timeout: Option, ) -> Option { if self.in_flight.is_some() && !self.task_controller.is_running() { self.in_flight = None; } match event { CompletionEvent::AutoTrigger { cursor: trigger_pos, doc, view, } => { // Technically it shouldn't be possible to switch views/documents in insert mode // but people may create weird keymaps/use the mouse so let's be extra careful. if self .trigger .or(self.in_flight) .map_or(true, |trigger| trigger.doc != doc || trigger.view != view) { self.trigger = Some(Trigger { pos: trigger_pos, view, doc, kind: TriggerKind::Auto, }); } } CompletionEvent::TriggerChar { cursor, doc, view } => { // immediately request completions and drop all auto completion requests self.task_controller.cancel(); self.trigger = Some(Trigger { pos: cursor, view, doc, kind: TriggerKind::TriggerChar, }); } CompletionEvent::ManualTrigger { cursor, doc, view } => { // immediately request completions and drop all auto completion requests self.trigger = Some(Trigger { pos: cursor, view, doc, kind: TriggerKind::Manual, }); // stop debouncing immediately and request the completion self.finish_debounce(); return None; } CompletionEvent::Cancel => { self.trigger = None; self.task_controller.cancel(); } CompletionEvent::DeleteText { cursor } => { // if we deleted the original trigger, abort the completion if matches!(self.trigger.or(self.in_flight), Some(Trigger{ pos, .. }) if cursor < pos) { self.trigger = None; self.task_controller.cancel(); } } } self.trigger.map(|trigger| { // if the current request was closed forget about it // otherwise immediately restart the completion request let timeout = if trigger.kind == TriggerKind::Auto { self.config.load().editor.completion_timeout } else { // we want almost instant completions for trigger chars // and restarting completion requests. The small timeout here mainly // serves to better handle cases where the completion handler // may fall behind (so multiple events in the channel) and macros Duration::from_millis(5) }; Instant::now() + timeout }) } fn finish_debounce(&mut self) { let trigger = self.trigger.take().expect("debounce always has a trigger"); self.in_flight = Some(trigger); let handle = self.task_controller.restart(); dispatch_blocking(move |editor, compositor| { request_completions(trigger, handle, editor, compositor) }); } } fn request_completions( mut trigger: Trigger, handle: TaskHandle, editor: &mut Editor, compositor: &mut Compositor, ) { let (view, doc) = current_ref!(editor); if compositor .find::() .unwrap() .completion .is_some() || editor.mode != Mode::Insert { return; } let text = doc.text(); let cursor = doc.selection(view.id).primary().cursor(text.slice(..)); if trigger.view != view.id || trigger.doc != doc.id() || cursor < trigger.pos { return; } // This looks odd... Why are we not using the trigger position from the `trigger` here? Won't // that mean that the trigger char doesn't get send to the language server if we type fast // enough? Yes that is true but it's not actually a problem. The language server will resolve // the completion to the identifier anyway (in fact sending the later position is necessary to // get the right results from language servers that provide incomplete completion list). We // rely on the trigger offset and primary cursor matching for multi-cursor completions so this // is definitely necessary from our side too. trigger.pos = cursor; let doc = doc_mut!(editor, &doc.id()); let savepoint = doc.savepoint(view); let text = doc.text(); let trigger_text = text.slice(..cursor); let mut seen_language_servers = HashSet::new(); let language_servers: Vec<_> = doc .language_servers_with_feature(LanguageServerFeature::Completion) .filter(|ls| seen_language_servers.insert(ls.id())) .collect(); let mut requests = JoinSet::new(); for (priority, ls) in language_servers.iter().enumerate() { let context = if trigger.kind == TriggerKind::Manual { lsp::CompletionContext { trigger_kind: lsp::CompletionTriggerKind::INVOKED, trigger_character: None, } } else { let trigger_char = ls.capabilities() .completion_provider .as_ref() .and_then(|provider| { provider .trigger_characters .as_deref()? .iter() .find(|&trigger| trigger_text.ends_with(trigger)) }); if trigger_char.is_some() { lsp::CompletionContext { trigger_kind: lsp::CompletionTriggerKind::TRIGGER_CHARACTER, trigger_character: trigger_char.cloned(), } } else { lsp::CompletionContext { trigger_kind: lsp::CompletionTriggerKind::INVOKED, trigger_character: None, } } }; requests.spawn(request_completions_from_language_server( ls, doc, view.id, context, -(priority as i8), savepoint.clone(), )); } if let Some(path_completion_request) = path_completion( doc.selection(view.id).clone(), doc, handle.clone(), savepoint, ) { requests.spawn_blocking(path_completion_request); } let ui = compositor.find::().unwrap(); ui.last_insert.1.push(InsertEvent::RequestCompletion); let handle_ = handle.clone(); let request_completions = async move { let mut context = HashMap::new(); let Some(mut response) = handle_response(&mut requests, false).await else { return; }; let mut items: Vec<_> = Vec::new(); response.take_items(&mut items); context.insert(response.provider, response.context); let deadline = Instant::now() + Duration::from_millis(100); loop { let Some(mut response) = timeout_at(deadline, handle_response(&mut requests, false)) .await .ok() .flatten() else { break; }; response.take_items(&mut items); context.insert(response.provider, response.context); } dispatch(move |editor, compositor| { show_completion(editor, compositor, items, context, trigger) }) .await; if !requests.is_empty() { replace_completions(handle_, requests, false).await; } }; tokio::spawn(cancelable_future(request_completions, handle)); } fn request_completions_from_language_server( ls: &helix_lsp::Client, doc: &Document, view: ViewId, context: lsp::CompletionContext, priority: i8, savepoint: Arc, ) -> impl Future { let provider = ls.id(); let offset_encoding = ls.offset_encoding(); let text = doc.text(); let cursor = doc.selection(view).primary().cursor(text.slice(..)); let pos = pos_to_lsp_pos(text, cursor, offset_encoding); let doc_id = doc.identifier(); // it's important that this is before the async block (and that this is not an async function) // to ensure the request is dispatched right away before any new edit notifications let completion_response = ls.completion(doc_id, pos, None, context).unwrap(); async move { let response: Option = completion_response .await .inspect_err(|err| log::error!("completion request failed: {err}")) .ok() .flatten(); let (mut items, is_incomplete) = match response { Some(lsp::CompletionResponse::Array(items)) => (items, false), Some(lsp::CompletionResponse::List(lsp::CompletionList { is_incomplete, items, })) => (items, is_incomplete), None => (Vec::new(), false), }; items.sort_by(|item1, item2| { let sort_text1 = item1.sort_text.as_deref().unwrap_or(&item1.label); let sort_text2 = item2.sort_text.as_deref().unwrap_or(&item2.label); sort_text1.cmp(sort_text2) }); CompletionResponse { items: CompletionItems::Lsp(items), context: ResponseContext { is_incomplete, priority, savepoint, }, provider: CompletionProvider::Lsp(provider), } } } pub fn request_incomplete_completion_list(editor: &mut Editor, handle: TaskHandle) { let handler = &mut editor.handlers.completions; let mut requests = JoinSet::new(); let mut savepoint = None; for (&provider, context) in &handler.active_completions { if !context.is_incomplete { continue; } let CompletionProvider::Lsp(ls_id) = provider else { log::error!("non-lsp incomplete completion lists"); continue; }; let Some(ls) = editor.language_servers.get_by_id(ls_id) else { continue; }; let (view, doc) = current!(editor); let savepoint = savepoint.get_or_insert_with(|| doc.savepoint(view)).clone(); let request = request_completions_from_language_server( ls, doc, view.id, CompletionContext { trigger_kind: CompletionTriggerKind::TRIGGER_FOR_INCOMPLETE_COMPLETIONS, trigger_character: None, }, context.priority, savepoint, ); requests.spawn(request); } if !requests.is_empty() { tokio::spawn(replace_completions(handle, requests, true)); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers/completion/resolve.rs000066400000000000000000000135611500614314100304560ustar00rootroot00000000000000use std::sync::Arc; use helix_lsp::lsp; use tokio::sync::mpsc::Sender; use tokio::time::{Duration, Instant}; use helix_event::{send_blocking, AsyncHook, TaskController, TaskHandle}; use helix_view::Editor; use super::LspCompletionItem; use crate::handlers::completion::CompletionItem; use crate::job; /// A hook for resolving incomplete completion items. /// /// From the [LSP spec](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_completion): /// /// > If computing full completion items is expensive, servers can additionally provide a /// > handler for the completion item resolve request. ... /// > A typical use case is for example: the `textDocument/completion` request doesn't fill /// > in the `documentation` property for returned completion items since it is expensive /// > to compute. When the item is selected in the user interface then a /// > 'completionItem/resolve' request is sent with the selected completion item as a parameter. /// > The returned completion item should have the documentation property filled in. pub struct ResolveHandler { last_request: Option>, resolver: Sender, } impl ResolveHandler { pub fn new() -> ResolveHandler { ResolveHandler { last_request: None, resolver: ResolveTimeout::default().spawn(), } } pub fn ensure_item_resolved(&mut self, editor: &mut Editor, item: &mut LspCompletionItem) { if item.resolved { return; } // We consider an item to be fully resolved if it has non-empty, none-`None` details, // docs and additional text-edits. Ideally we could use `is_some` instead of this // check but some language servers send values like `Some([])` for additional text // edits although the items need to be resolved. This is probably a consequence of // how `null` works in the JavaScript world. let is_resolved = item .item .documentation .as_ref() .is_some_and(|docs| match docs { lsp::Documentation::String(text) => !text.is_empty(), lsp::Documentation::MarkupContent(markup) => !markup.value.is_empty(), }) && item .item .detail .as_ref() .is_some_and(|detail| !detail.is_empty()) && item .item .additional_text_edits .as_ref() .is_some_and(|edits| !edits.is_empty()); if is_resolved { item.resolved = true; return; } if self.last_request.as_deref().is_some_and(|it| it == item) { return; } let Some(ls) = editor.language_servers.get_by_id(item.provider).cloned() else { item.resolved = true; return; }; if matches!( ls.capabilities().completion_provider, Some(lsp::CompletionOptions { resolve_provider: Some(true), .. }) ) { let item = Arc::new(item.clone()); self.last_request = Some(item.clone()); send_blocking(&self.resolver, ResolveRequest { item, ls }) } else { item.resolved = true; } } } struct ResolveRequest { item: Arc, ls: Arc, } #[derive(Default)] struct ResolveTimeout { next_request: Option, in_flight: Option>, task_controller: TaskController, } impl AsyncHook for ResolveTimeout { type Event = ResolveRequest; fn handle_event( &mut self, request: Self::Event, timeout: Option, ) -> Option { if self .next_request .as_ref() .is_some_and(|old_request| old_request.item == request.item) { timeout } else if self .in_flight .as_ref() .is_some_and(|old_request| old_request.item == request.item.item) { self.next_request = None; None } else { self.next_request = Some(request); Some(Instant::now() + Duration::from_millis(150)) } } fn finish_debounce(&mut self) { let Some(request) = self.next_request.take() else { return; }; let token = self.task_controller.restart(); self.in_flight = Some(request.item.clone()); tokio::spawn(request.execute(token)); } } impl ResolveRequest { async fn execute(self, cancel: TaskHandle) { let future = self.ls.resolve_completion_item(&self.item.item); let Some(resolved_item) = helix_event::cancelable_future(future, cancel).await else { return; }; job::dispatch(move |_, compositor| { if let Some(completion) = &mut compositor .find::() .unwrap() .completion { let resolved_item = CompletionItem::Lsp(match resolved_item { Ok(item) => LspCompletionItem { item, resolved: true, ..*self.item }, Err(err) => { log::error!("completion resolve request failed: {err}"); // set item to resolved so we don't request it again // we could also remove it but that oculd be odd ui let mut item = (*self.item).clone(); item.resolved = true; item } }); completion.replace_item(&*self.item, resolved_item); }; }) .await } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers/diagnostics.rs000066400000000000000000000015231500614314100271300ustar00rootroot00000000000000use helix_event::{register_hook, send_blocking}; use helix_view::document::Mode; use helix_view::events::DiagnosticsDidChange; use helix_view::handlers::diagnostics::DiagnosticEvent; use helix_view::handlers::Handlers; use crate::events::OnModeSwitch; pub(super) fn register_hooks(_handlers: &Handlers) { register_hook!(move |event: &mut DiagnosticsDidChange<'_>| { if event.editor.mode != Mode::Insert { for (view, _) in event.editor.tree.views_mut() { send_blocking(&view.diagnostics_handler.events, DiagnosticEvent::Refresh) } } Ok(()) }); register_hook!(move |event: &mut OnModeSwitch<'_, '_>| { for (view, _) in event.cx.editor.tree.views_mut() { view.diagnostics_handler.active = event.new_mode != Mode::Insert; } Ok(()) }); } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers/document_colors.rs000066400000000000000000000152431500614314100300240ustar00rootroot00000000000000use std::{collections::HashSet, time::Duration}; use futures_util::{stream::FuturesOrdered, StreamExt}; use helix_core::{syntax::LanguageServerFeature, text_annotations::InlineAnnotation}; use helix_event::{cancelable_future, register_hook}; use helix_lsp::lsp; use helix_view::{ document::DocumentColorSwatches, events::{DocumentDidChange, DocumentDidOpen, LanguageServerExited, LanguageServerInitialized}, handlers::{lsp::DocumentColorsEvent, Handlers}, DocumentId, Editor, Theme, }; use tokio::time::Instant; use crate::job; #[derive(Default)] pub(super) struct DocumentColorsHandler { docs: HashSet, } const DOCUMENT_CHANGE_DEBOUNCE: Duration = Duration::from_millis(250); impl helix_event::AsyncHook for DocumentColorsHandler { type Event = DocumentColorsEvent; fn handle_event(&mut self, event: Self::Event, _timeout: Option) -> Option { let DocumentColorsEvent(doc_id) = event; self.docs.insert(doc_id); Some(Instant::now() + DOCUMENT_CHANGE_DEBOUNCE) } fn finish_debounce(&mut self) { let docs = std::mem::take(&mut self.docs); job::dispatch_blocking(move |editor, _compositor| { for doc in docs { request_document_colors(editor, doc); } }); } } fn request_document_colors(editor: &mut Editor, doc_id: DocumentId) { if !editor.config().lsp.display_color_swatches { return; } let Some(doc) = editor.document_mut(doc_id) else { return; }; let cancel = doc.color_swatch_controller.restart(); let mut seen_language_servers = HashSet::new(); let mut futures: FuturesOrdered<_> = doc .language_servers_with_feature(LanguageServerFeature::DocumentColors) .filter(|ls| seen_language_servers.insert(ls.id())) .map(|language_server| { let text = doc.text().clone(); let offset_encoding = language_server.offset_encoding(); let future = language_server .text_document_document_color(doc.identifier(), None) .unwrap(); async move { let colors: Vec<_> = future .await? .into_iter() .filter_map(|color_info| { let pos = helix_lsp::util::lsp_pos_to_pos( &text, color_info.range.start, offset_encoding, )?; Some((pos, color_info.color)) }) .collect(); anyhow::Ok(colors) } }) .collect(); tokio::spawn(async move { let mut all_colors = Vec::new(); loop { match cancelable_future(futures.next(), &cancel).await { Some(Some(Ok(items))) => all_colors.extend(items), Some(Some(Err(err))) => log::error!("document color request failed: {err}"), Some(None) => break, // The request was cancelled. None => return, } } job::dispatch(move |editor, _| attach_document_colors(editor, doc_id, all_colors)).await; }); } fn attach_document_colors( editor: &mut Editor, doc_id: DocumentId, mut doc_colors: Vec<(usize, lsp::Color)>, ) { if !editor.config().lsp.display_color_swatches { return; } let Some(doc) = editor.documents.get_mut(&doc_id) else { return; }; if doc_colors.is_empty() { doc.color_swatches.take(); return; } doc_colors.sort_by_key(|(pos, _)| *pos); let mut color_swatches = Vec::with_capacity(doc_colors.len()); let mut color_swatches_padding = Vec::with_capacity(doc_colors.len()); let mut colors = Vec::with_capacity(doc_colors.len()); for (pos, color) in doc_colors { color_swatches_padding.push(InlineAnnotation::new(pos, " ")); color_swatches.push(InlineAnnotation::new(pos, "■")); colors.push(Theme::rgb_highlight( (color.red * 255.) as u8, (color.green * 255.) as u8, (color.blue * 255.) as u8, )); } doc.color_swatches = Some(DocumentColorSwatches { color_swatches, colors, color_swatches_padding, }); } pub(super) fn register_hooks(handlers: &Handlers) { register_hook!(move |event: &mut DocumentDidOpen<'_>| { // when a document is initially opened, request colors for it request_document_colors(event.editor, event.doc); Ok(()) }); let tx = handlers.document_colors.clone(); register_hook!(move |event: &mut DocumentDidChange<'_>| { // Update the color swatch' positions, helping ensure they are displayed in the // proper place. let apply_color_swatch_changes = |annotations: &mut Vec| { event.changes.update_positions( annotations .iter_mut() .map(|annotation| (&mut annotation.char_idx, helix_core::Assoc::After)), ); }; if let Some(DocumentColorSwatches { color_swatches, colors: _colors, color_swatches_padding, }) = &mut event.doc.color_swatches { apply_color_swatch_changes(color_swatches); apply_color_swatch_changes(color_swatches_padding); } // Avoid re-requesting document colors if the change is a ghost transaction (completion) // because the language server will not know about the updates to the document and will // give out-of-date locations. if !event.ghost_transaction { // Cancel the ongoing request, if present. event.doc.color_swatch_controller.cancel(); helix_event::send_blocking(&tx, DocumentColorsEvent(event.doc.id())); } Ok(()) }); register_hook!(move |event: &mut LanguageServerInitialized<'_>| { let doc_ids: Vec<_> = event.editor.documents().map(|doc| doc.id()).collect(); for doc_id in doc_ids { request_document_colors(event.editor, doc_id); } Ok(()) }); register_hook!(move |event: &mut LanguageServerExited<'_>| { // Clear and re-request all color swatches when a server exits. for doc in event.editor.documents_mut() { if doc.supports_language_server(event.server_id) { doc.color_swatches.take(); } } let doc_ids: Vec<_> = event.editor.documents().map(|doc| doc.id()).collect(); for doc_id in doc_ids { request_document_colors(event.editor, doc_id); } Ok(()) }); } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers/signature_help.rs000066400000000000000000000302131500614314100276300ustar00rootroot00000000000000use std::sync::Arc; use std::time::Duration; use helix_core::syntax::LanguageServerFeature; use helix_event::{cancelable_future, register_hook, send_blocking, TaskController, TaskHandle}; use helix_lsp::lsp::{self, SignatureInformation}; use helix_stdx::rope::RopeSliceExt; use helix_view::document::Mode; use helix_view::events::{DocumentDidChange, SelectionDidChange}; use helix_view::handlers::lsp::{SignatureHelpEvent, SignatureHelpInvoked}; use helix_view::Editor; use tokio::sync::mpsc::Sender; use tokio::time::Instant; use crate::commands::Open; use crate::compositor::Compositor; use crate::events::{OnModeSwitch, PostInsertChar}; use crate::handlers::Handlers; use crate::ui::lsp::signature_help::{Signature, SignatureHelp}; use crate::ui::Popup; use crate::{job, ui}; #[derive(Debug, PartialEq, Eq)] enum State { Open, Closed, Pending, } /// debounce timeout in ms, value taken from VSCode /// TODO: make this configurable? const TIMEOUT: u64 = 120; #[derive(Debug)] pub(super) struct SignatureHelpHandler { trigger: Option, state: State, task_controller: TaskController, } impl SignatureHelpHandler { pub fn new() -> SignatureHelpHandler { SignatureHelpHandler { trigger: None, state: State::Closed, task_controller: TaskController::new(), } } } impl helix_event::AsyncHook for SignatureHelpHandler { type Event = SignatureHelpEvent; fn handle_event( &mut self, event: Self::Event, timeout: Option, ) -> Option { match event { SignatureHelpEvent::Invoked => { self.trigger = Some(SignatureHelpInvoked::Manual); self.state = State::Closed; self.finish_debounce(); return None; } SignatureHelpEvent::Trigger => {} SignatureHelpEvent::ReTrigger => { // don't retrigger if we aren't open/pending yet if matches!(self.state, State::Closed) { return timeout; } } SignatureHelpEvent::Cancel => { self.state = State::Closed; return None; } SignatureHelpEvent::RequestComplete { open } => { // don't cancel rerequest that was already triggered if self.state == State::Pending && self.task_controller.is_running() { return timeout; } self.state = if open { State::Open } else { State::Closed }; self.task_controller.cancel(); return timeout; } } if self.trigger.is_none() { self.trigger = Some(SignatureHelpInvoked::Automatic) } Some(Instant::now() + Duration::from_millis(TIMEOUT)) } fn finish_debounce(&mut self) { let invocation = self.trigger.take().unwrap(); self.state = State::Pending; let handle = self.task_controller.restart(); job::dispatch_blocking(move |editor, _| request_signature_help(editor, invocation, handle)) } } pub fn request_signature_help( editor: &mut Editor, invoked: SignatureHelpInvoked, cancel: TaskHandle, ) { let (view, doc) = current!(editor); // TODO merge multiple language server signature help into one instead of just taking the first language server that supports it let future = doc .language_servers_with_feature(LanguageServerFeature::SignatureHelp) .find_map(|language_server| { let pos = doc.position(view.id, language_server.offset_encoding()); language_server.text_document_signature_help(doc.identifier(), pos, None) }); let Some(future) = future else { // Do not show the message if signature help was invoked // automatically on backspace, trigger characters, etc. if invoked == SignatureHelpInvoked::Manual { editor.set_error("No configured language server supports signature-help"); } return; }; tokio::spawn(async move { match cancelable_future(future, cancel).await { Some(Ok(res)) => { job::dispatch(move |editor, compositor| { show_signature_help(editor, compositor, invoked, res) }) .await } Some(Err(err)) => log::error!("signature help request failed: {err}"), None => (), } }); } fn active_param_range( signature: &SignatureInformation, response_active_parameter: Option, ) -> Option<(usize, usize)> { let param_idx = signature .active_parameter .or(response_active_parameter) .unwrap_or(0) as usize; let param = signature.parameters.as_ref()?.get(param_idx)?; match ¶m.label { lsp::ParameterLabel::Simple(string) => { let start = signature.label.find(string.as_str())?; Some((start, start + string.len())) } lsp::ParameterLabel::LabelOffsets([start, end]) => { // LS sends offsets based on utf-16 based string representation // but highlighting in helix is done using byte offset. use helix_core::str_utils::char_to_byte_idx; let from = char_to_byte_idx(&signature.label, *start as usize); let to = char_to_byte_idx(&signature.label, *end as usize); Some((from, to)) } } } pub fn show_signature_help( editor: &mut Editor, compositor: &mut Compositor, invoked: SignatureHelpInvoked, response: Option, ) { let config = &editor.config(); if !(config.lsp.auto_signature_help || SignatureHelp::visible_popup(compositor).is_some() || invoked == SignatureHelpInvoked::Manual) { return; } // If the signature help invocation is automatic, don't show it outside of Insert Mode: // it very probably means the server was a little slow to respond and the user has // already moved on to something else, making a signature help popup will just be an // annoyance, see https://github.com/helix-editor/helix/issues/3112 // For the most part this should not be needed as the request gets canceled automatically now // but it's technically possible for the mode change to just preempt this callback so better safe than sorry if invoked == SignatureHelpInvoked::Automatic && editor.mode != Mode::Insert { return; } let response = match response { // According to the spec the response should be None if there // are no signatures, but some servers don't follow this. Some(s) if !s.signatures.is_empty() => s, _ => { send_blocking( &editor.handlers.signature_hints, SignatureHelpEvent::RequestComplete { open: false }, ); compositor.remove(SignatureHelp::ID); return; } }; send_blocking( &editor.handlers.signature_hints, SignatureHelpEvent::RequestComplete { open: true }, ); let doc = doc!(editor); let language = doc.language_name().unwrap_or(""); if response.signatures.is_empty() { return; } let signatures: Vec = response .signatures .into_iter() .map(|s| { let active_param_range = active_param_range(&s, response.active_parameter); let signature_doc = if config.lsp.display_signature_help_docs { s.documentation.map(|doc| match doc { lsp::Documentation::String(s) => s, lsp::Documentation::MarkupContent(markup) => markup.value, }) } else { None }; Signature { signature: s.label, signature_doc, active_param_range, } }) .collect(); let old_popup = compositor.find_id::>(SignatureHelp::ID); let lsp_signature = response.active_signature.map(|s| s as usize); // take the new suggested lsp signature if changed // otherwise take the old signature if possible // otherwise the last one (in case there is less signatures than before) let active_signature = old_popup .as_ref() .map(|popup| { let old_lsp_sig = popup.contents().lsp_signature(); let old_sig = popup .contents() .active_signature() .min(signatures.len() - 1); if old_lsp_sig != lsp_signature { lsp_signature.unwrap_or(old_sig) } else { old_sig } }) .unwrap_or(lsp_signature.unwrap_or_default()); let contents = SignatureHelp::new( language.to_string(), Arc::clone(&editor.syn_loader), active_signature, lsp_signature, signatures, ); let mut popup = Popup::new(SignatureHelp::ID, contents) .position(old_popup.and_then(|p| p.get_position())) .position_bias(Open::Above) .ignore_escape_key(true); // Don't create a popup if it intersects the auto-complete menu. let size = compositor.size(); if compositor .find::() .unwrap() .completion .as_mut() .map(|completion| completion.area(size, editor)) .filter(|area| area.intersects(popup.area(size, editor))) .is_some() { return; } compositor.replace_or_push(SignatureHelp::ID, popup); } fn signature_help_post_insert_char_hook( tx: &Sender, PostInsertChar { cx, .. }: &mut PostInsertChar<'_, '_>, ) -> anyhow::Result<()> { if !cx.editor.config().lsp.auto_signature_help { return Ok(()); } let (view, doc) = current!(cx.editor); // TODO support multiple language servers (not just the first that is found), likely by merging UI somehow let Some(language_server) = doc .language_servers_with_feature(LanguageServerFeature::SignatureHelp) .next() else { return Ok(()); }; let capabilities = language_server.capabilities(); if let lsp::ServerCapabilities { signature_help_provider: Some(lsp::SignatureHelpOptions { trigger_characters: Some(triggers), // TODO: retrigger_characters .. }), .. } = capabilities { let mut text = doc.text().slice(..); let cursor = doc.selection(view.id).primary().cursor(text); text = text.slice(..cursor); if triggers.iter().any(|trigger| text.ends_with(trigger)) { send_blocking(tx, SignatureHelpEvent::Trigger) } } Ok(()) } pub(super) fn register_hooks(handlers: &Handlers) { let tx = handlers.signature_hints.clone(); register_hook!(move |event: &mut OnModeSwitch<'_, '_>| { match (event.old_mode, event.new_mode) { (Mode::Insert, _) => { send_blocking(&tx, SignatureHelpEvent::Cancel); event.cx.callback.push(Box::new(|compositor, _| { compositor.remove(SignatureHelp::ID); })); } (_, Mode::Insert) => { if event.cx.editor.config().lsp.auto_signature_help { send_blocking(&tx, SignatureHelpEvent::Trigger); } } _ => (), } Ok(()) }); let tx = handlers.signature_hints.clone(); register_hook!( move |event: &mut PostInsertChar<'_, '_>| signature_help_post_insert_char_hook(&tx, event) ); let tx = handlers.signature_hints.clone(); register_hook!(move |event: &mut DocumentDidChange<'_>| { if event.doc.config.load().lsp.auto_signature_help && !event.ghost_transaction { send_blocking(&tx, SignatureHelpEvent::ReTrigger); } Ok(()) }); let tx = handlers.signature_hints.clone(); register_hook!(move |event: &mut SelectionDidChange<'_>| { if event.doc.config.load().lsp.auto_signature_help { send_blocking(&tx, SignatureHelpEvent::ReTrigger); } Ok(()) }); } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/handlers/snippet.rs000066400000000000000000000017071500614314100263070ustar00rootroot00000000000000use helix_event::register_hook; use helix_view::events::{DocumentDidChange, DocumentFocusLost, SelectionDidChange}; use helix_view::handlers::Handlers; pub(super) fn register_hooks(_handlers: &Handlers) { register_hook!(move |event: &mut SelectionDidChange<'_>| { if let Some(snippet) = &event.doc.active_snippet { if !snippet.is_valid(event.doc.selection(event.view)) { event.doc.active_snippet = None; } } Ok(()) }); register_hook!(move |event: &mut DocumentDidChange<'_>| { if let Some(snippet) = &mut event.doc.active_snippet { let invalid = snippet.map(event.changes); if invalid { event.doc.active_snippet = None; } } Ok(()) }); register_hook!(move |event: &mut DocumentFocusLost<'_>| { let editor = &mut event.editor; doc_mut!(editor).active_snippet = None; Ok(()) }); } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/health.rs000066400000000000000000000302171500614314100242700ustar00rootroot00000000000000use crate::config::{Config, ConfigLoadError}; use crossterm::{ style::{Color, StyledContent, Stylize}, tty::IsTty, }; use helix_core::config::{default_lang_config, user_lang_config}; use helix_loader::grammar::load_runtime_file; use std::io::Write; #[derive(Copy, Clone)] pub enum TsFeature { Highlight, TextObject, AutoIndent, } impl TsFeature { pub fn all() -> &'static [Self] { &[Self::Highlight, Self::TextObject, Self::AutoIndent] } pub fn runtime_filename(&self) -> &'static str { match *self { Self::Highlight => "highlights.scm", Self::TextObject => "textobjects.scm", Self::AutoIndent => "indents.scm", } } pub fn long_title(&self) -> &'static str { match *self { Self::Highlight => "Syntax Highlighting", Self::TextObject => "Treesitter Textobjects", Self::AutoIndent => "Auto Indent", } } pub fn short_title(&self) -> &'static str { match *self { Self::Highlight => "Highlight", Self::TextObject => "Textobject", Self::AutoIndent => "Indent", } } } /// Display general diagnostics. pub fn general() -> std::io::Result<()> { let stdout = std::io::stdout(); let mut stdout = stdout.lock(); let config_file = helix_loader::config_file(); let lang_file = helix_loader::lang_config_file(); let log_file = helix_loader::log_file(); let rt_dirs = helix_loader::runtime_dirs(); if config_file.exists() { writeln!(stdout, "Config file: {}", config_file.display())?; } else { writeln!(stdout, "Config file: default")?; } if lang_file.exists() { writeln!(stdout, "Language file: {}", lang_file.display())?; } else { writeln!(stdout, "Language file: default")?; } writeln!(stdout, "Log file: {}", log_file.display())?; writeln!( stdout, "Runtime directories: {}", rt_dirs .iter() .map(|d| d.to_string_lossy()) .collect::>() .join(";") )?; for rt_dir in rt_dirs.iter() { if let Ok(path) = std::fs::read_link(rt_dir) { let msg = format!( "Runtime directory {} is symlinked to: {}", rt_dir.display(), path.display() ); writeln!(stdout, "{}", msg.yellow())?; } if !rt_dir.exists() { let msg = format!("Runtime directory does not exist: {}", rt_dir.display()); writeln!(stdout, "{}", msg.yellow())?; } else if rt_dir.read_dir().ok().map(|it| it.count()) == Some(0) { let msg = format!("Runtime directory is empty: {}", rt_dir.display()); writeln!(stdout, "{}", msg.yellow())?; } } Ok(()) } pub fn clipboard() -> std::io::Result<()> { let stdout = std::io::stdout(); let mut stdout = stdout.lock(); let config = match Config::load_default() { Ok(config) => config, Err(ConfigLoadError::Error(err)) if err.kind() == std::io::ErrorKind::NotFound => { Config::default() } Err(err) => { writeln!(stdout, "{}", "Configuration file malformed".red())?; writeln!(stdout, "{}", err)?; return Ok(()); } }; match config.editor.clipboard_provider.name().as_ref() { "none" => { writeln!( stdout, "{}", "System clipboard provider: Not installed".red() )?; writeln!( stdout, " {}", "For troubleshooting system clipboard issues, refer".red() )?; writeln!(stdout, " {}", "https://github.com/helix-editor/helix/wiki/Troubleshooting#copypaste-fromto-system-clipboard-not-working" .red().underlined())?; } name => writeln!(stdout, "System clipboard provider: {}", name)?, } Ok(()) } pub fn languages_all() -> std::io::Result<()> { let stdout = std::io::stdout(); let mut stdout = stdout.lock(); let mut syn_loader_conf = match user_lang_config() { Ok(conf) => conf, Err(err) => { let stderr = std::io::stderr(); let mut stderr = stderr.lock(); writeln!( stderr, "{}: {}", "Error parsing user language config".red(), err )?; writeln!(stderr, "{}", "Using default language config".yellow())?; default_lang_config() } }; let mut headings = vec!["Language", "Language servers", "Debug adapter", "Formatter"]; for feat in TsFeature::all() { headings.push(feat.short_title()) } let terminal_cols = crossterm::terminal::size().map(|(c, _)| c).unwrap_or(80); let column_width = terminal_cols as usize / headings.len(); let is_terminal = std::io::stdout().is_tty(); let fit = |s: &str| -> StyledContent { format!( "{:column_width$}", s.get(..column_width - 2) .map(|s| format!("{}…", s)) .unwrap_or_else(|| s.to_string()) ) .stylize() }; let color = |s: StyledContent, c: Color| if is_terminal { s.with(c) } else { s }; let bold = |s: StyledContent| if is_terminal { s.bold() } else { s }; for heading in headings { write!(stdout, "{}", bold(fit(heading)))?; } writeln!(stdout)?; syn_loader_conf .language .sort_unstable_by_key(|l| l.language_id.clone()); let check_binary = |cmd: Option<&str>| match cmd { Some(cmd) => match helix_stdx::env::which(cmd) { Ok(_) => color(fit(&format!("✓ {}", cmd)), Color::Green), Err(_) => color(fit(&format!("✘ {}", cmd)), Color::Red), }, None => color(fit("None"), Color::Yellow), }; for lang in &syn_loader_conf.language { write!(stdout, "{}", fit(&lang.language_id))?; let mut cmds = lang.language_servers.iter().filter_map(|ls| { syn_loader_conf .language_server .get(&ls.name) .map(|config| config.command.as_str()) }); write!(stdout, "{}", check_binary(cmds.next()))?; let dap = lang.debugger.as_ref().map(|dap| dap.command.as_str()); write!(stdout, "{}", check_binary(dap))?; let formatter = lang .formatter .as_ref() .map(|formatter| formatter.command.as_str()); write!(stdout, "{}", check_binary(formatter))?; for ts_feat in TsFeature::all() { match load_runtime_file(&lang.language_id, ts_feat.runtime_filename()).is_ok() { true => write!(stdout, "{}", color(fit("✓"), Color::Green))?, false => write!(stdout, "{}", color(fit("✘"), Color::Red))?, } } writeln!(stdout)?; for cmd in cmds { write!(stdout, "{}", fit(""))?; writeln!(stdout, "{}", check_binary(Some(cmd)))?; } } Ok(()) } /// Display diagnostics pertaining to a particular language (LSP, /// highlight queries, etc). pub fn language(lang_str: String) -> std::io::Result<()> { let stdout = std::io::stdout(); let mut stdout = stdout.lock(); let syn_loader_conf = match user_lang_config() { Ok(conf) => conf, Err(err) => { let stderr = std::io::stderr(); let mut stderr = stderr.lock(); writeln!( stderr, "{}: {}", "Error parsing user language config".red(), err )?; writeln!(stderr, "{}", "Using default language config".yellow())?; default_lang_config() } }; let lang = match syn_loader_conf .language .iter() .find(|l| l.language_id == lang_str) { Some(l) => l, None => { let msg = format!("Language '{}' not found", lang_str); writeln!(stdout, "{}", msg.red())?; let suggestions: Vec<&str> = syn_loader_conf .language .iter() .filter(|l| l.language_id.starts_with(lang_str.chars().next().unwrap())) .map(|l| l.language_id.as_str()) .collect(); if !suggestions.is_empty() { let suggestions = suggestions.join(", "); writeln!( stdout, "Did you mean one of these: {} ?", suggestions.yellow() )?; } return Ok(()); } }; probe_protocols( "language server", lang.language_servers .iter() .filter_map(|ls| syn_loader_conf.language_server.get(&ls.name)) .map(|config| config.command.as_str()), )?; probe_protocol( "debug adapter", lang.debugger.as_ref().map(|dap| dap.command.to_string()), )?; probe_protocol( "formatter", lang.formatter .as_ref() .map(|formatter| formatter.command.to_string()), )?; probe_parser(lang.grammar.as_ref().unwrap_or(&lang.language_id))?; for ts_feat in TsFeature::all() { probe_treesitter_feature(&lang_str, *ts_feat)? } Ok(()) } fn probe_parser(grammar_name: &str) -> std::io::Result<()> { let stdout = std::io::stdout(); let mut stdout = stdout.lock(); write!(stdout, "Tree-sitter parser: ")?; match helix_loader::grammar::get_language(grammar_name) { Ok(_) => writeln!(stdout, "{}", "✓".green()), Err(_) => writeln!(stdout, "{}", "None".yellow()), } } /// Display diagnostics about multiple LSPs and DAPs. fn probe_protocols<'a, I: Iterator + 'a>( protocol_name: &str, server_cmds: I, ) -> std::io::Result<()> { let stdout = std::io::stdout(); let mut stdout = stdout.lock(); let mut server_cmds = server_cmds.peekable(); write!(stdout, "Configured {}s:", protocol_name)?; if server_cmds.peek().is_none() { writeln!(stdout, "{}", " None".yellow())?; return Ok(()); } writeln!(stdout)?; for cmd in server_cmds { let (path, icon) = match helix_stdx::env::which(cmd) { Ok(path) => (path.display().to_string().green(), "✓".green()), Err(_) => (format!("'{}' not found in $PATH", cmd).red(), "✘".red()), }; writeln!(stdout, " {} {}: {}", icon, cmd, path)?; } Ok(()) } /// Display diagnostics about LSP and DAP. fn probe_protocol(protocol_name: &str, server_cmd: Option) -> std::io::Result<()> { let stdout = std::io::stdout(); let mut stdout = stdout.lock(); let cmd_name = match server_cmd { Some(ref cmd) => cmd.as_str().green(), None => "None".yellow(), }; writeln!(stdout, "Configured {}: {}", protocol_name, cmd_name)?; if let Some(cmd) = server_cmd { let path = match helix_stdx::env::which(&cmd) { Ok(path) => path.display().to_string().green(), Err(_) => format!("'{}' not found in $PATH", cmd).red(), }; writeln!(stdout, "Binary for {}: {}", protocol_name, path)?; } Ok(()) } /// Display diagnostics about a feature that requires tree-sitter /// query files (highlights, textobjects, etc). fn probe_treesitter_feature(lang: &str, feature: TsFeature) -> std::io::Result<()> { let stdout = std::io::stdout(); let mut stdout = stdout.lock(); let found = match load_runtime_file(lang, feature.runtime_filename()).is_ok() { true => "✓".green(), false => "✘".red(), }; writeln!(stdout, "{} queries: {}", feature.short_title(), found)?; Ok(()) } pub fn print_health(health_arg: Option) -> std::io::Result<()> { match health_arg.as_deref() { Some("languages") => languages_all()?, Some("clipboard") => clipboard()?, None | Some("all") => { general()?; clipboard()?; writeln!(std::io::stdout().lock())?; languages_all()?; } Some(lang) => language(lang.to_string())?, } Ok(()) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/job.rs000066400000000000000000000121121500614314100235670ustar00rootroot00000000000000use helix_event::status::StatusMessage; use helix_event::{runtime_local, send_blocking}; use helix_view::Editor; use once_cell::sync::OnceCell; use crate::compositor::Compositor; use futures_util::future::{BoxFuture, Future, FutureExt}; use futures_util::stream::{FuturesUnordered, StreamExt}; use tokio::sync::mpsc::{channel, Receiver, Sender}; pub type EditorCompositorCallback = Box; pub type EditorCallback = Box; runtime_local! { static JOB_QUEUE: OnceCell> = OnceCell::new(); } pub async fn dispatch_callback(job: Callback) { let _ = JOB_QUEUE.wait().send(job).await; } pub async fn dispatch(job: impl FnOnce(&mut Editor, &mut Compositor) + Send + 'static) { let _ = JOB_QUEUE .wait() .send(Callback::EditorCompositor(Box::new(job))) .await; } pub fn dispatch_blocking(job: impl FnOnce(&mut Editor, &mut Compositor) + Send + 'static) { let jobs = JOB_QUEUE.wait(); send_blocking(jobs, Callback::EditorCompositor(Box::new(job))) } pub enum Callback { EditorCompositor(EditorCompositorCallback), Editor(EditorCallback), } pub type JobFuture = BoxFuture<'static, anyhow::Result>>; pub struct Job { pub future: BoxFuture<'static, anyhow::Result>>, /// Do we need to wait for this job to finish before exiting? pub wait: bool, } pub struct Jobs { /// jobs that need to complete before we exit. pub wait_futures: FuturesUnordered, pub callbacks: Receiver, pub status_messages: Receiver, } impl Job { pub fn new> + Send + 'static>(f: F) -> Self { Self { future: f.map(|r| r.map(|()| None)).boxed(), wait: false, } } pub fn with_callback> + Send + 'static>( f: F, ) -> Self { Self { future: f.map(|r| r.map(Some)).boxed(), wait: false, } } pub fn wait_before_exiting(mut self) -> Self { self.wait = true; self } } impl Jobs { #[allow(clippy::new_without_default)] pub fn new() -> Self { let (tx, rx) = channel(1024); let _ = JOB_QUEUE.set(tx); let status_messages = helix_event::status::setup(); Self { wait_futures: FuturesUnordered::new(), callbacks: rx, status_messages, } } pub fn spawn> + Send + 'static>(&mut self, f: F) { self.add(Job::new(f)); } pub fn callback> + Send + 'static>( &mut self, f: F, ) { self.add(Job::with_callback(f)); } pub fn handle_callback( &self, editor: &mut Editor, compositor: &mut Compositor, call: anyhow::Result>, ) { match call { Ok(None) => {} Ok(Some(call)) => match call { Callback::EditorCompositor(call) => call(editor, compositor), Callback::Editor(call) => call(editor), }, Err(e) => { editor.set_error(format!("Async job failed: {}", e)); } } } pub fn add(&self, j: Job) { if j.wait { self.wait_futures.push(j.future); } else { tokio::spawn(async move { match j.future.await { Ok(Some(cb)) => dispatch_callback(cb).await, Ok(None) => (), Err(err) => helix_event::status::report(err).await, } }); } } /// Blocks until all the jobs that need to be waited on are done. pub async fn finish( &mut self, editor: &mut Editor, mut compositor: Option<&mut Compositor>, ) -> anyhow::Result<()> { log::debug!("waiting on jobs..."); let mut wait_futures = std::mem::take(&mut self.wait_futures); while let (Some(job), tail) = wait_futures.into_future().await { match job { Ok(callback) => { wait_futures = tail; if let Some(callback) = callback { // clippy doesn't realize this is an error without the derefs #[allow(clippy::needless_option_as_deref)] match callback { Callback::EditorCompositor(call) if compositor.is_some() => { call(editor, compositor.as_deref_mut().unwrap()) } Callback::Editor(call) => call(editor), // skip callbacks for which we don't have the necessary references _ => (), } } } Err(e) => { self.wait_futures = tail; return Err(e); } } } Ok(()) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/keymap.rs000066400000000000000000000456431500614314100243220ustar00rootroot00000000000000pub mod default; pub mod macros; pub use crate::commands::MappableCommand; use arc_swap::{ access::{DynAccess, DynGuard}, ArcSwap, }; use helix_view::{document::Mode, info::Info, input::KeyEvent}; use serde::Deserialize; use std::{ borrow::Cow, collections::{BTreeSet, HashMap}, ops::{Deref, DerefMut}, sync::Arc, }; pub use default::default; use macros::key; #[derive(Debug, Clone, Default)] pub struct KeyTrieNode { /// A label for keys coming under this node, like "Goto mode" name: String, map: HashMap, order: Vec, pub is_sticky: bool, } impl<'de> Deserialize<'de> for KeyTrieNode { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { let map = HashMap::::deserialize(deserializer)?; let order = map.keys().copied().collect::>(); // NOTE: map.keys() has arbitrary order Ok(Self { map, order, ..Default::default() }) } } impl KeyTrieNode { pub fn new(name: &str, map: HashMap, order: Vec) -> Self { Self { name: name.to_string(), map, order, is_sticky: false, } } /// Merge another Node in. Leaves and subnodes from the other node replace /// corresponding keyevent in self, except when both other and self have /// subnodes for same key. In that case the merge is recursive. pub fn merge(&mut self, mut other: Self) { for (key, trie) in std::mem::take(&mut other.map) { if let Some(KeyTrie::Node(node)) = self.map.get_mut(&key) { if let KeyTrie::Node(other_node) = trie { node.merge(other_node); continue; } } self.map.insert(key, trie); } for &key in self.map.keys() { if !self.order.contains(&key) { self.order.push(key); } } } pub fn infobox(&self) -> Info { let mut body: Vec<(BTreeSet, &str)> = Vec::with_capacity(self.len()); for (&key, trie) in self.iter() { let desc = match trie { KeyTrie::MappableCommand(cmd) => { if cmd.name() == "no_op" { continue; } cmd.doc() } KeyTrie::Node(n) => &n.name, KeyTrie::Sequence(_) => "[Multiple commands]", }; match body.iter().position(|(_, d)| d == &desc) { Some(pos) => { body[pos].0.insert(key); } None => body.push((BTreeSet::from([key]), desc)), } } body.sort_unstable_by_key(|(keys, _)| { self.order .iter() .position(|&k| k == *keys.iter().next().unwrap()) .unwrap() }); let body: Vec<_> = body .into_iter() .map(|(events, desc)| { let events = events.iter().map(ToString::to_string).collect::>(); (events.join(", "), desc) }) .collect(); Info::new(self.name.clone(), &body) } } impl PartialEq for KeyTrieNode { fn eq(&self, other: &Self) -> bool { self.map == other.map } } impl Deref for KeyTrieNode { type Target = HashMap; fn deref(&self) -> &Self::Target { &self.map } } impl DerefMut for KeyTrieNode { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.map } } #[derive(Debug, Clone, PartialEq)] pub enum KeyTrie { MappableCommand(MappableCommand), Sequence(Vec), Node(KeyTrieNode), } impl<'de> Deserialize<'de> for KeyTrie { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { deserializer.deserialize_any(KeyTrieVisitor) } } struct KeyTrieVisitor; impl<'de> serde::de::Visitor<'de> for KeyTrieVisitor { type Value = KeyTrie; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { write!(formatter, "a command, list of commands, or sub-keymap") } fn visit_str(self, command: &str) -> Result where E: serde::de::Error, { command .parse::() .map(KeyTrie::MappableCommand) .map_err(E::custom) } fn visit_seq(self, mut seq: S) -> Result where S: serde::de::SeqAccess<'de>, { let mut commands = Vec::new(); while let Some(command) = seq.next_element::()? { commands.push( command .parse::() .map_err(serde::de::Error::custom)?, ) } // Prevent macro keybindings from being used in command sequences. // This is meant to be a temporary restriction pending a larger // refactor of how command sequences are executed. if commands .iter() .any(|cmd| matches!(cmd, MappableCommand::Macro { .. })) { return Err(serde::de::Error::custom( "macro keybindings may not be used in command sequences", )); } Ok(KeyTrie::Sequence(commands)) } fn visit_map(self, mut map: M) -> Result where M: serde::de::MapAccess<'de>, { let mut mapping = HashMap::new(); let mut order = Vec::new(); while let Some((key, value)) = map.next_entry::()? { mapping.insert(key, value); order.push(key); } Ok(KeyTrie::Node(KeyTrieNode::new("", mapping, order))) } } impl KeyTrie { pub fn reverse_map(&self) -> ReverseKeymap { // recursively visit all nodes in keymap fn map_node(cmd_map: &mut ReverseKeymap, node: &KeyTrie, keys: &mut Vec) { match node { KeyTrie::MappableCommand(MappableCommand::Macro { .. }) => {} KeyTrie::MappableCommand(cmd) => { let name = cmd.name(); if name != "no_op" { cmd_map.entry(name.into()).or_default().push(keys.clone()) } } KeyTrie::Node(next) => { for (key, trie) in &next.map { keys.push(*key); map_node(cmd_map, trie, keys); keys.pop(); } } KeyTrie::Sequence(_) => {} }; } let mut res = HashMap::new(); map_node(&mut res, self, &mut Vec::new()); res } pub fn node(&self) -> Option<&KeyTrieNode> { match *self { KeyTrie::Node(ref node) => Some(node), KeyTrie::MappableCommand(_) | KeyTrie::Sequence(_) => None, } } pub fn node_mut(&mut self) -> Option<&mut KeyTrieNode> { match *self { KeyTrie::Node(ref mut node) => Some(node), KeyTrie::MappableCommand(_) | KeyTrie::Sequence(_) => None, } } /// Merge another KeyTrie in, assuming that this KeyTrie and the other /// are both Nodes. Panics otherwise. pub fn merge_nodes(&mut self, mut other: Self) { let node = std::mem::take(other.node_mut().unwrap()); self.node_mut().unwrap().merge(node); } pub fn search(&self, keys: &[KeyEvent]) -> Option<&KeyTrie> { let mut trie = self; for key in keys { trie = match trie { KeyTrie::Node(map) => map.get(key), // leaf encountered while keys left to process KeyTrie::MappableCommand(_) | KeyTrie::Sequence(_) => None, }? } Some(trie) } } #[derive(Debug, Clone, PartialEq)] pub enum KeymapResult { /// Needs more keys to execute a command. Contains valid keys for next keystroke. Pending(KeyTrieNode), Matched(MappableCommand), /// Matched a sequence of commands to execute. MatchedSequence(Vec), /// Key was not found in the root keymap NotFound, /// Key is invalid in combination with previous keys. Contains keys leading upto /// and including current (invalid) key. Cancelled(Vec), } /// A map of command names to keybinds that will execute the command. pub type ReverseKeymap = HashMap>>; pub struct Keymaps { pub map: Box>>, /// Stores pending keys waiting for the next key. This is relative to a /// sticky node if one is in use. state: Vec, /// Stores the sticky node if one is activated. pub sticky: Option, } impl Keymaps { pub fn new(map: Box>>) -> Self { Self { map, state: Vec::new(), sticky: None, } } pub fn map(&self) -> DynGuard> { self.map.load() } /// Returns list of keys waiting to be disambiguated in current mode. pub fn pending(&self) -> &[KeyEvent] { &self.state } pub fn sticky(&self) -> Option<&KeyTrieNode> { self.sticky.as_ref() } pub fn contains_key(&self, mode: Mode, key: KeyEvent) -> bool { let keymaps = &*self.map(); let keymap = &keymaps[&mode]; keymap .search(self.pending()) .and_then(KeyTrie::node) .is_some_and(|node| node.contains_key(&key)) } /// Lookup `key` in the keymap to try and find a command to execute. Escape /// key cancels pending keystrokes. If there are no pending keystrokes but a /// sticky node is in use, it will be cleared. pub fn get(&mut self, mode: Mode, key: KeyEvent) -> KeymapResult { // TODO: remove the sticky part and look up manually let keymaps = &*self.map(); let keymap = &keymaps[&mode]; if key!(Esc) == key { if !self.state.is_empty() { // Note that Esc is not included here return KeymapResult::Cancelled(self.state.drain(..).collect()); } self.sticky = None; } let first = self.state.first().unwrap_or(&key); let trie_node = match self.sticky { Some(ref trie) => Cow::Owned(KeyTrie::Node(trie.clone())), None => Cow::Borrowed(keymap), }; let trie = match trie_node.search(&[*first]) { Some(KeyTrie::MappableCommand(ref cmd)) => { return KeymapResult::Matched(cmd.clone()); } Some(KeyTrie::Sequence(ref cmds)) => { return KeymapResult::MatchedSequence(cmds.clone()); } None => return KeymapResult::NotFound, Some(t) => t, }; self.state.push(key); match trie.search(&self.state[1..]) { Some(KeyTrie::Node(map)) => { if map.is_sticky { self.state.clear(); self.sticky = Some(map.clone()); } KeymapResult::Pending(map.clone()) } Some(KeyTrie::MappableCommand(cmd)) => { self.state.clear(); KeymapResult::Matched(cmd.clone()) } Some(KeyTrie::Sequence(cmds)) => { self.state.clear(); KeymapResult::MatchedSequence(cmds.clone()) } None => KeymapResult::Cancelled(self.state.drain(..).collect()), } } } impl Default for Keymaps { fn default() -> Self { Self::new(Box::new(ArcSwap::new(Arc::new(default())))) } } /// Merge default config keys with user overwritten keys for custom user config. pub fn merge_keys(dst: &mut HashMap, mut delta: HashMap) { for (mode, keys) in dst { keys.merge_nodes( delta .remove(mode) .unwrap_or_else(|| KeyTrie::Node(KeyTrieNode::default())), ) } } #[cfg(test)] mod tests { use super::macros::keymap; use super::*; use arc_swap::access::Constant; use helix_core::hashmap; #[test] #[should_panic] fn duplicate_keys_should_panic() { keymap!({ "Normal mode" "i" => normal_mode, "i" => goto_definition, }); } #[test] fn check_duplicate_keys_in_default_keymap() { // will panic on duplicate keys, assumes that `Keymaps` uses keymap! macro Keymaps::default(); } #[test] fn merge_partial_keys() { let keymap = hashmap! { Mode::Normal => keymap!({ "Normal mode" "i" => normal_mode, "无" => insert_mode, "z" => jump_backward, "g" => { "Merge into goto mode" "$" => goto_line_end, "g" => delete_char_forward, }, }) }; let mut merged_keyamp = default(); merge_keys(&mut merged_keyamp, keymap.clone()); assert_ne!(keymap, merged_keyamp); let mut keymap = Keymaps::new(Box::new(Constant(merged_keyamp.clone()))); assert_eq!( keymap.get(Mode::Normal, key!('i')), KeymapResult::Matched(MappableCommand::normal_mode), "Leaf should replace leaf" ); assert_eq!( keymap.get(Mode::Normal, key!('无')), KeymapResult::Matched(MappableCommand::insert_mode), "New leaf should be present in merged keymap" ); // Assumes that z is a node in the default keymap assert_eq!( keymap.get(Mode::Normal, key!('z')), KeymapResult::Matched(MappableCommand::jump_backward), "Leaf should replace node" ); let keymap = merged_keyamp.get_mut(&Mode::Normal).unwrap(); // Assumes that `g` is a node in default keymap assert_eq!( keymap.search(&[key!('g'), key!('$')]).unwrap(), &KeyTrie::MappableCommand(MappableCommand::goto_line_end), "Leaf should be present in merged subnode" ); // Assumes that `gg` is in default keymap assert_eq!( keymap.search(&[key!('g'), key!('g')]).unwrap(), &KeyTrie::MappableCommand(MappableCommand::delete_char_forward), "Leaf should replace old leaf in merged subnode" ); // Assumes that `ge` is in default keymap assert_eq!( keymap.search(&[key!('g'), key!('e')]).unwrap(), &KeyTrie::MappableCommand(MappableCommand::goto_last_line), "Old leaves in subnode should be present in merged node" ); assert!( merged_keyamp .get(&Mode::Normal) .and_then(|key_trie| key_trie.node()) .unwrap() .len() > 1 ); assert!( merged_keyamp .get(&Mode::Insert) .and_then(|key_trie| key_trie.node()) .unwrap() .len() > 0 ); } #[test] fn order_should_be_set() { let keymap = hashmap! { Mode::Normal => keymap!({ "Normal mode" "space" => { "" "s" => { "" "v" => vsplit, "c" => hsplit, }, }, }) }; let mut merged_keyamp = default(); merge_keys(&mut merged_keyamp, keymap.clone()); assert_ne!(keymap, merged_keyamp); let keymap = merged_keyamp.get_mut(&Mode::Normal).unwrap(); // Make sure mapping works assert_eq!( keymap.search(&[key!(' '), key!('s'), key!('v')]).unwrap(), &KeyTrie::MappableCommand(MappableCommand::vsplit), "Leaf should be present in merged subnode" ); // Make sure an order was set during merge let node = keymap.search(&[crate::key!(' ')]).unwrap(); assert!(!node.node().unwrap().order.as_slice().is_empty()) } #[test] fn aliased_modes_are_same_in_default_keymap() { let keymaps = Keymaps::default().map(); let root = keymaps.get(&Mode::Normal).unwrap(); assert_eq!( root.search(&[key!(' '), key!('w')]).unwrap(), root.search(&["C-w".parse::().unwrap()]).unwrap(), "Mismatch for window mode on `Space-w` and `Ctrl-w`" ); assert_eq!( root.search(&[key!('z')]).unwrap(), root.search(&[key!('Z')]).unwrap(), "Mismatch for view mode on `z` and `Z`" ); } #[test] fn reverse_map() { let normal_mode = keymap!({ "Normal mode" "i" => insert_mode, "g" => { "Goto" "g" => goto_file_start, "e" => goto_file_end, }, "j" | "k" => move_line_down, }); let keymap = normal_mode; let mut reverse_map = keymap.reverse_map(); // sort keybindings in order to have consistent tests // HashMaps can be compared but we can still get different ordering of bindings // for commands that have multiple bindings assigned for v in reverse_map.values_mut() { v.sort() } assert_eq!( reverse_map, HashMap::from([ ("insert_mode".to_string(), vec![vec![key!('i')]]), ( "goto_file_start".to_string(), vec![vec![key!('g'), key!('g')]] ), ( "goto_file_end".to_string(), vec![vec![key!('g'), key!('e')]] ), ( "move_line_down".to_string(), vec![vec![key!('j')], vec![key!('k')]] ), ]), "Mismatch" ) } #[test] fn escaped_keymap() { use crate::commands::MappableCommand; use helix_view::input::{KeyCode, KeyEvent, KeyModifiers}; let keys = r#" "+" = [ "select_all", ":pipe sed -E 's/\\s+$//g'", ] "#; let key = KeyEvent { code: KeyCode::Char('+'), modifiers: KeyModifiers::NONE, }; let expectation = KeyTrie::Node(KeyTrieNode::new( "", hashmap! { key => KeyTrie::Sequence(vec!{ MappableCommand::select_all, MappableCommand::Typable { name: "pipe".to_string(), args: "sed -E 's/\\s+$//g'".to_string(), doc: "".to_string(), }, }) }, vec![key], )); assert_eq!(toml::from_str(keys), Ok(expectation)); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/keymap/000077500000000000000000000000001500614314100237405ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/keymap/default.rs000066400000000000000000000320051500614314100257320ustar00rootroot00000000000000use std::collections::HashMap; use super::macros::keymap; use super::{KeyTrie, Mode}; use helix_core::hashmap; pub fn default() -> HashMap { let normal = keymap!({ "Normal mode" "h" | "left" => move_char_left, "j" | "down" => move_visual_line_down, "k" | "up" => move_visual_line_up, "l" | "right" => move_char_right, "t" => find_till_char, "f" => find_next_char, "T" => till_prev_char, "F" => find_prev_char, "r" => replace, "R" => replace_with_yanked, "A-." => repeat_last_motion, "~" => switch_case, "`" => switch_to_lowercase, "A-`" => switch_to_uppercase, "home" => goto_line_start, "end" => goto_line_end, "w" => move_next_word_start, "b" => move_prev_word_start, "e" => move_next_word_end, "W" => move_next_long_word_start, "B" => move_prev_long_word_start, "E" => move_next_long_word_end, "v" => select_mode, "G" => goto_line, "g" => { "Goto" "g" => goto_file_start, "|" => goto_column, "e" => goto_last_line, "f" => goto_file, "h" => goto_line_start, "l" => goto_line_end, "s" => goto_first_nonwhitespace, "d" => goto_definition, "D" => goto_declaration, "y" => goto_type_definition, "r" => goto_reference, "i" => goto_implementation, "t" => goto_window_top, "c" => goto_window_center, "b" => goto_window_bottom, "a" => goto_last_accessed_file, "m" => goto_last_modified_file, "n" => goto_next_buffer, "p" => goto_previous_buffer, "k" => move_line_up, "j" => move_line_down, "." => goto_last_modification, "w" => goto_word, }, ":" => command_mode, "i" => insert_mode, "I" => insert_at_line_start, "a" => append_mode, "A" => insert_at_line_end, "o" => open_below, "O" => open_above, "d" => delete_selection, "A-d" => delete_selection_noyank, "c" => change_selection, "A-c" => change_selection_noyank, "C" => copy_selection_on_next_line, "A-C" => copy_selection_on_prev_line, "s" => select_regex, "A-s" => split_selection_on_newline, "A-minus" => merge_selections, "A-_" => merge_consecutive_selections, "S" => split_selection, ";" => collapse_selection, "A-;" => flip_selections, "A-o" | "A-up" => expand_selection, "A-i" | "A-down" => shrink_selection, "A-I" | "A-S-down" => select_all_children, "A-p" | "A-left" => select_prev_sibling, "A-n" | "A-right" => select_next_sibling, "A-e" => move_parent_node_end, "A-b" => move_parent_node_start, "A-a" => select_all_siblings, "%" => select_all, "x" => extend_line_below, "X" => extend_to_line_bounds, "A-x" => shrink_to_line_bounds, "m" => { "Match" "m" => match_brackets, "s" => surround_add, "r" => surround_replace, "d" => surround_delete, "a" => select_textobject_around, "i" => select_textobject_inner, }, "[" => { "Left bracket" "d" => goto_prev_diag, "D" => goto_first_diag, "g" => goto_prev_change, "G" => goto_first_change, "f" => goto_prev_function, "t" => goto_prev_class, "a" => goto_prev_parameter, "c" => goto_prev_comment, "e" => goto_prev_entry, "T" => goto_prev_test, "p" => goto_prev_paragraph, "space" => add_newline_above, }, "]" => { "Right bracket" "d" => goto_next_diag, "D" => goto_last_diag, "g" => goto_next_change, "G" => goto_last_change, "f" => goto_next_function, "t" => goto_next_class, "a" => goto_next_parameter, "c" => goto_next_comment, "e" => goto_next_entry, "T" => goto_next_test, "p" => goto_next_paragraph, "space" => add_newline_below, }, "/" => search, "?" => rsearch, "n" => search_next, "N" => search_prev, "*" => search_selection_detect_word_boundaries, "A-*" => search_selection, "u" => undo, "U" => redo, "A-u" => earlier, "A-U" => later, "y" => yank, // yank_all "p" => paste_after, // paste_all "P" => paste_before, "Q" => record_macro, "q" => replay_macro, ">" => indent, "<" => unindent, "=" => format_selections, "J" => join_selections, "A-J" => join_selections_space, "K" => keep_selections, "A-K" => remove_selections, "," => keep_primary_selection, "A-," => remove_primary_selection, // "q" => record_macro, // "Q" => replay_macro, "&" => align_selections, "_" => trim_selections, "(" => rotate_selections_backward, ")" => rotate_selections_forward, "A-(" => rotate_selection_contents_backward, "A-)" => rotate_selection_contents_forward, "A-:" => ensure_selections_forward, "esc" => normal_mode, "C-b" | "pageup" => page_up, "C-f" | "pagedown" => page_down, "C-u" => page_cursor_half_up, "C-d" => page_cursor_half_down, "C-w" => { "Window" "C-w" | "w" => rotate_view, "C-s" | "s" => hsplit, "C-v" | "v" => vsplit, "C-t" | "t" => transpose_view, "f" => goto_file_hsplit, "F" => goto_file_vsplit, "C-q" | "q" => wclose, "C-o" | "o" => wonly, "C-h" | "h" | "left" => jump_view_left, "C-j" | "j" | "down" => jump_view_down, "C-k" | "k" | "up" => jump_view_up, "C-l" | "l" | "right" => jump_view_right, "L" => swap_view_right, "K" => swap_view_up, "H" => swap_view_left, "J" => swap_view_down, "n" => { "New split scratch buffer" "C-s" | "s" => hsplit_new, "C-v" | "v" => vsplit_new, }, }, // move under c "C-c" => toggle_comments, // z family for save/restore/combine from/to sels from register "C-i" | "tab" => jump_forward, // tab == "C-o" => jump_backward, "C-s" => save_selection, "space" => { "Space" "f" => file_picker, "F" => file_picker_in_current_directory, "e" => file_explorer, "E" => file_explorer_in_current_buffer_directory, "b" => buffer_picker, "j" => jumplist_picker, "s" => symbol_picker, "S" => workspace_symbol_picker, "d" => diagnostics_picker, "D" => workspace_diagnostics_picker, "g" => changed_file_picker, "a" => code_action, "'" => last_picker, "G" => { "Debug (experimental)" sticky=true "l" => dap_launch, "r" => dap_restart, "b" => dap_toggle_breakpoint, "c" => dap_continue, "h" => dap_pause, "i" => dap_step_in, "o" => dap_step_out, "n" => dap_next, "v" => dap_variables, "t" => dap_terminate, "C-c" => dap_edit_condition, "C-l" => dap_edit_log, "s" => { "Switch" "t" => dap_switch_thread, "f" => dap_switch_stack_frame, // sl, sb }, "e" => dap_enable_exceptions, "E" => dap_disable_exceptions, }, "w" => { "Window" "C-w" | "w" => rotate_view, "C-s" | "s" => hsplit, "C-v" | "v" => vsplit, "C-t" | "t" => transpose_view, "f" => goto_file_hsplit, "F" => goto_file_vsplit, "C-q" | "q" => wclose, "C-o" | "o" => wonly, "C-h" | "h" | "left" => jump_view_left, "C-j" | "j" | "down" => jump_view_down, "C-k" | "k" | "up" => jump_view_up, "C-l" | "l" | "right" => jump_view_right, "H" => swap_view_left, "J" => swap_view_down, "K" => swap_view_up, "L" => swap_view_right, "n" => { "New split scratch buffer" "C-s" | "s" => hsplit_new, "C-v" | "v" => vsplit_new, }, }, "y" => yank_to_clipboard, "Y" => yank_main_selection_to_clipboard, "p" => paste_clipboard_after, "P" => paste_clipboard_before, "R" => replace_selections_with_clipboard, "/" => global_search, "k" => hover, "r" => rename_symbol, "h" => select_references_to_symbol_under_cursor, "c" => toggle_comments, "C" => toggle_block_comments, "A-c" => toggle_line_comments, "?" => command_palette, }, "z" => { "View" "z" | "c" => align_view_center, "t" => align_view_top, "b" => align_view_bottom, "m" => align_view_middle, "k" | "up" => scroll_up, "j" | "down" => scroll_down, "C-b" | "pageup" => page_up, "C-f" | "pagedown" => page_down, "C-u" | "backspace" => page_cursor_half_up, "C-d" | "space" => page_cursor_half_down, "/" => search, "?" => rsearch, "n" => search_next, "N" => search_prev, }, "Z" => { "View" sticky=true "z" | "c" => align_view_center, "t" => align_view_top, "b" => align_view_bottom, "m" => align_view_middle, "k" | "up" => scroll_up, "j" | "down" => scroll_down, "C-b" | "pageup" => page_up, "C-f" | "pagedown" => page_down, "C-u" | "backspace" => page_cursor_half_up, "C-d" | "space" => page_cursor_half_down, "/" => search, "?" => rsearch, "n" => search_next, "N" => search_prev, }, "\"" => select_register, "|" => shell_pipe, "A-|" => shell_pipe_to, "!" => shell_insert_output, "A-!" => shell_append_output, "$" => shell_keep_pipe, "C-z" => suspend, "C-a" => increment, "C-x" => decrement, }); let mut select = normal.clone(); select.merge_nodes(keymap!({ "Select mode" "h" | "left" => extend_char_left, "j" | "down" => extend_visual_line_down, "k" | "up" => extend_visual_line_up, "l" | "right" => extend_char_right, "w" => extend_next_word_start, "b" => extend_prev_word_start, "e" => extend_next_word_end, "W" => extend_next_long_word_start, "B" => extend_prev_long_word_start, "E" => extend_next_long_word_end, "A-e" => extend_parent_node_end, "A-b" => extend_parent_node_start, "n" => extend_search_next, "N" => extend_search_prev, "t" => extend_till_char, "f" => extend_next_char, "T" => extend_till_prev_char, "F" => extend_prev_char, "home" => extend_to_line_start, "end" => extend_to_line_end, "esc" => exit_select_mode, "v" => normal_mode, "g" => { "Goto" "g" => extend_to_file_start, "|" => extend_to_column, "e" => extend_to_last_line, "k" => extend_line_up, "j" => extend_line_down, "w" => extend_to_word, }, })); let insert = keymap!({ "Insert mode" "esc" => normal_mode, "C-s" => commit_undo_checkpoint, "C-x" => completion, "C-r" => insert_register, "C-w" | "A-backspace" => delete_word_backward, "A-d" | "A-del" => delete_word_forward, "C-u" => kill_to_line_start, "C-k" => kill_to_line_end, "C-h" | "backspace" | "S-backspace" => delete_char_backward, "C-d" | "del" => delete_char_forward, "C-j" | "ret" => insert_newline, "tab" => smart_tab, "S-tab" => insert_tab, "up" => move_visual_line_up, "down" => move_visual_line_down, "left" => move_char_left, "right" => move_char_right, "pageup" => page_up, "pagedown" => page_down, "home" => goto_line_start, "end" => goto_line_end_newline, }); hashmap!( Mode::Normal => normal, Mode::Select => select, Mode::Insert => insert, ) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/keymap/macros.rs000066400000000000000000000073011500614314100255730ustar00rootroot00000000000000#[macro_export] macro_rules! key { ($key:ident) => { ::helix_view::input::KeyEvent { code: ::helix_view::keyboard::KeyCode::$key, modifiers: ::helix_view::keyboard::KeyModifiers::NONE, } }; ($($ch:tt)*) => { ::helix_view::input::KeyEvent { code: ::helix_view::keyboard::KeyCode::Char($($ch)*), modifiers: ::helix_view::keyboard::KeyModifiers::NONE, } }; } #[macro_export] macro_rules! shift { ($key:ident) => { ::helix_view::input::KeyEvent { code: ::helix_view::keyboard::KeyCode::$key, modifiers: ::helix_view::keyboard::KeyModifiers::SHIFT, } }; ($($ch:tt)*) => { ::helix_view::input::KeyEvent { code: ::helix_view::keyboard::KeyCode::Char($($ch)*), modifiers: ::helix_view::keyboard::KeyModifiers::SHIFT, } }; } #[macro_export] macro_rules! ctrl { ($key:ident) => { ::helix_view::input::KeyEvent { code: ::helix_view::keyboard::KeyCode::$key, modifiers: ::helix_view::keyboard::KeyModifiers::CONTROL, } }; ($($ch:tt)*) => { ::helix_view::input::KeyEvent { code: ::helix_view::keyboard::KeyCode::Char($($ch)*), modifiers: ::helix_view::keyboard::KeyModifiers::CONTROL, } }; } #[macro_export] macro_rules! alt { ($key:ident) => { ::helix_view::input::KeyEvent { code: ::helix_view::keyboard::KeyCode::$key, modifiers: ::helix_view::keyboard::KeyModifiers::ALT, } }; ($($ch:tt)*) => { ::helix_view::input::KeyEvent { code: ::helix_view::keyboard::KeyCode::Char($($ch)*), modifiers: ::helix_view::keyboard::KeyModifiers::ALT, } }; } /// Macro for defining a `KeyTrie`. Example: /// /// ``` /// # use helix_core::hashmap; /// # use helix_term::keymap; /// let normal_mode = keymap!({ "Normal mode" /// "i" => insert_mode, /// "g" => { "Goto" /// "g" => goto_file_start, /// "e" => goto_file_end, /// }, /// "j" | "down" => move_line_down, /// }); /// let keymap = normal_mode; /// ``` #[macro_export] macro_rules! keymap { (@trie $cmd:ident) => { $crate::keymap::KeyTrie::MappableCommand($crate::commands::MappableCommand::$cmd) }; (@trie { $label:literal $(sticky=$sticky:literal)? $($($key:literal)|+ => $value:tt,)+ } ) => { keymap!({ $label $(sticky=$sticky)? $($($key)|+ => $value,)+ }) }; (@trie [$($cmd:ident),* $(,)?]) => { $crate::keymap::KeyTrie::Sequence(vec![$($crate::commands::MappableCommand::$cmd),*]) }; ( { $label:literal $(sticky=$sticky:literal)? $($($key:literal)|+ => $value:tt,)+ } ) => { // modified from the hashmap! macro { let _cap = hashmap!(@count $($($key),+),*); let mut _map = ::std::collections::HashMap::with_capacity(_cap); let mut _order = ::std::vec::Vec::with_capacity(_cap); $( $( let _key = $key.parse::<::helix_view::input::KeyEvent>().unwrap(); let _duplicate = _map.insert( _key, keymap!(@trie $value) ); assert!(_duplicate.is_none(), "Duplicate key found: {:?}", _duplicate.unwrap()); _order.push(_key); )+ )* let mut _node = $crate::keymap::KeyTrieNode::new($label, _map, _order); $( _node.is_sticky = $sticky; )? $crate::keymap::KeyTrie::Node(_node) } }; } pub use alt; pub use ctrl; pub use key; pub use keymap; pub use shift; helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/lib.rs000066400000000000000000000043771500614314100236010ustar00rootroot00000000000000#[macro_use] extern crate helix_view; pub mod application; pub mod args; pub mod commands; pub mod compositor; pub mod config; pub mod events; pub mod health; pub mod job; pub mod keymap; pub mod ui; use std::path::Path; use futures_util::Future; mod handlers; use ignore::DirEntry; use url::Url; #[cfg(windows)] fn true_color() -> bool { true } #[cfg(not(windows))] fn true_color() -> bool { if matches!( std::env::var("COLORTERM").map(|v| matches!(v.as_str(), "truecolor" | "24bit")), Ok(true) ) { return true; } match termini::TermInfo::from_env() { Ok(t) => { t.extended_cap("RGB").is_some() || t.extended_cap("Tc").is_some() || (t.extended_cap("setrgbf").is_some() && t.extended_cap("setrgbb").is_some()) } Err(_) => false, } } /// Function used for filtering dir entries in the various file pickers. fn filter_picker_entry(entry: &DirEntry, root: &Path, dedup_symlinks: bool) -> bool { // We always want to ignore popular VCS directories, otherwise if // `ignore` is turned off, we end up with a lot of noise // in our picker. if matches!( entry.file_name().to_str(), Some(".git" | ".pijul" | ".jj" | ".hg" | ".svn") ) { return false; } // We also ignore symlinks that point inside the current directory // if `dedup_links` is enabled. if dedup_symlinks && entry.path_is_symlink() { return entry .path() .canonicalize() .ok() .is_some_and(|path| !path.starts_with(root)); } true } /// Opens URL in external program. fn open_external_url_callback( url: Url, ) -> impl Future> + Send + 'static { let commands = open::commands(url.as_str()); async { for cmd in commands { let mut command = tokio::process::Command::new(cmd.get_program()); command.args(cmd.get_args()); if command.output().await.is_ok() { return Ok(job::Callback::Editor(Box::new(|_| {}))); } } Ok(job::Callback::Editor(Box::new(move |editor| { editor.set_error("Opening URL in external program failed") }))) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/main.rs000066400000000000000000000132561500614314100237530ustar00rootroot00000000000000use anyhow::{Context, Error, Result}; use crossterm::event::EventStream; use helix_loader::VERSION_AND_GIT_HASH; use helix_term::application::Application; use helix_term::args::Args; use helix_term::config::{Config, ConfigLoadError}; fn setup_logging(verbosity: u64) -> Result<()> { let mut base_config = fern::Dispatch::new(); base_config = match verbosity { 0 => base_config.level(log::LevelFilter::Warn), 1 => base_config.level(log::LevelFilter::Info), 2 => base_config.level(log::LevelFilter::Debug), _3_or_more => base_config.level(log::LevelFilter::Trace), }; // Separate file config so we can include year, month and day in file logs let file_config = fern::Dispatch::new() .format(|out, message, record| { out.finish(format_args!( "{} {} [{}] {}", chrono::Local::now().format("%Y-%m-%dT%H:%M:%S%.3f"), record.target(), record.level(), message )) }) .chain(fern::log_file(helix_loader::log_file())?); base_config.chain(file_config).apply()?; Ok(()) } fn main() -> Result<()> { let exit_code = main_impl()?; std::process::exit(exit_code); } #[tokio::main] async fn main_impl() -> Result { let args = Args::parse_args().context("could not parse arguments")?; helix_loader::initialize_config_file(args.config_file.clone()); helix_loader::initialize_log_file(args.log_file.clone()); // Help has a higher priority and should be handled separately. if args.display_help { print!( "\ {} {} {} {} USAGE: hx [FLAGS] [files]... ARGS: ... Sets the input file to use, position can also be specified via file[:row[:col]] FLAGS: -h, --help Prints help information --tutor Loads the tutorial --health [CATEGORY] Checks for potential errors in editor setup CATEGORY can be a language or one of 'clipboard', 'languages' or 'all'. 'all' is the default if not specified. -g, --grammar {{fetch|build}} Fetches or builds tree-sitter grammars listed in languages.toml -c, --config Specifies a file to use for configuration -v Increases logging verbosity each use for up to 3 times --log Specifies a file to use for logging (default file: {}) -V, --version Prints version information --vsplit Splits all given files vertically into different windows --hsplit Splits all given files horizontally into different windows -w, --working-dir Specify an initial working directory +N Open the first given file at line number N ", env!("CARGO_PKG_NAME"), VERSION_AND_GIT_HASH, env!("CARGO_PKG_AUTHORS"), env!("CARGO_PKG_DESCRIPTION"), helix_loader::default_log_file().display(), ); std::process::exit(0); } if args.display_version { println!("helix {}", VERSION_AND_GIT_HASH); std::process::exit(0); } if args.health { if let Err(err) = helix_term::health::print_health(args.health_arg) { // Piping to for example `head -10` requires special handling: // https://stackoverflow.com/a/65760807/7115678 if err.kind() != std::io::ErrorKind::BrokenPipe { return Err(err.into()); } } std::process::exit(0); } if args.fetch_grammars { helix_loader::grammar::fetch_grammars()?; return Ok(0); } if args.build_grammars { helix_loader::grammar::build_grammars(None)?; return Ok(0); } setup_logging(args.verbosity).context("failed to initialize logging")?; // NOTE: Set the working directory early so the correct configuration is loaded. Be aware that // Application::new() depends on this logic so it must be updated if this changes. if let Some(path) = &args.working_directory { helix_stdx::env::set_current_working_dir(path)?; } else if let Some((path, _)) = args.files.first().filter(|p| p.0.is_dir()) { // If the first file is a directory, it will be the working directory unless -w was specified helix_stdx::env::set_current_working_dir(path)?; } let config = match Config::load_default() { Ok(config) => config, Err(ConfigLoadError::Error(err)) if err.kind() == std::io::ErrorKind::NotFound => { Config::default() } Err(ConfigLoadError::Error(err)) => return Err(Error::new(err)), Err(ConfigLoadError::BadConfig(err)) => { eprintln!("Bad config: {}", err); eprintln!("Press to continue with default config"); use std::io::Read; let _ = std::io::stdin().read(&mut []); Config::default() } }; let lang_loader = helix_core::config::user_lang_loader().unwrap_or_else(|err| { eprintln!("{}", err); eprintln!("Press to continue with default language config"); use std::io::Read; // This waits for an enter press. let _ = std::io::stdin().read(&mut []); helix_core::config::default_lang_loader() }); // TODO: use the thread local executor to spawn the application task separately from the work pool let mut app = Application::new(args, config, lang_loader).context("unable to start Helix")?; let exit_code = app.run(&mut EventStream::new()).await?; Ok(exit_code) } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/ui/000077500000000000000000000000001500614314100230675ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/ui/completion.rs000066400000000000000000000657061500614314100256240ustar00rootroot00000000000000use crate::ui::{menu, Markdown, Menu, Popup, PromptEvent}; use crate::{ compositor::{Component, Context, Event, EventResult}, handlers::completion::{ trigger_auto_completion, CompletionItem, CompletionResponse, LspCompletionItem, ResolveHandler, }, }; use helix_core::snippets::{ActiveSnippet, RenderedSnippet, Snippet}; use helix_core::{self as core, chars, fuzzy::MATCHER, Change, Transaction}; use helix_lsp::{lsp, util, OffsetEncoding}; use helix_view::{ editor::CompleteAction, handlers::lsp::SignatureHelpInvoked, theme::{Color, Modifier, Style}, ViewId, }; use helix_view::{graphics::Rect, Document, Editor}; use nucleo::{ pattern::{Atom, AtomKind, CaseMatching, Normalization}, Config, Utf32Str, }; use tui::text::Spans; use tui::{buffer::Buffer as Surface, text::Span}; use std::cmp::Reverse; impl menu::Item for CompletionItem { type Data = Style; fn format(&self, dir_style: &Self::Data) -> menu::Row { let deprecated = match self { CompletionItem::Lsp(LspCompletionItem { item, .. }) => { item.deprecated.unwrap_or_default() || item .tags .as_ref() .is_some_and(|tags| tags.contains(&lsp::CompletionItemTag::DEPRECATED)) } CompletionItem::Other(_) => false, }; let label = match self { CompletionItem::Lsp(LspCompletionItem { item, .. }) => item.label.as_str(), CompletionItem::Other(core::CompletionItem { label, .. }) => label, }; let kind = match self { CompletionItem::Lsp(LspCompletionItem { item, .. }) => match item.kind { Some(lsp::CompletionItemKind::TEXT) => "text".into(), Some(lsp::CompletionItemKind::METHOD) => "method".into(), Some(lsp::CompletionItemKind::FUNCTION) => "function".into(), Some(lsp::CompletionItemKind::CONSTRUCTOR) => "constructor".into(), Some(lsp::CompletionItemKind::FIELD) => "field".into(), Some(lsp::CompletionItemKind::VARIABLE) => "variable".into(), Some(lsp::CompletionItemKind::CLASS) => "class".into(), Some(lsp::CompletionItemKind::INTERFACE) => "interface".into(), Some(lsp::CompletionItemKind::MODULE) => "module".into(), Some(lsp::CompletionItemKind::PROPERTY) => "property".into(), Some(lsp::CompletionItemKind::UNIT) => "unit".into(), Some(lsp::CompletionItemKind::VALUE) => "value".into(), Some(lsp::CompletionItemKind::ENUM) => "enum".into(), Some(lsp::CompletionItemKind::KEYWORD) => "keyword".into(), Some(lsp::CompletionItemKind::SNIPPET) => "snippet".into(), Some(lsp::CompletionItemKind::COLOR) => item .documentation .as_ref() .and_then(|docs| { let text = match docs { lsp::Documentation::String(text) => text, lsp::Documentation::MarkupContent(lsp::MarkupContent { value, .. }) => value, }; // Language servers which send Color completion items tend to include a 6 // digit hex code at the end for the color. The extra 1 digit is for the '#' text.get(text.len().checked_sub(7)?..) }) .and_then(Color::from_hex) .map_or("color".into(), |color| { Spans::from(vec![ Span::raw("color "), Span::styled("■", Style::default().fg(color)), ]) }), Some(lsp::CompletionItemKind::FILE) => "file".into(), Some(lsp::CompletionItemKind::REFERENCE) => "reference".into(), Some(lsp::CompletionItemKind::FOLDER) => "folder".into(), Some(lsp::CompletionItemKind::ENUM_MEMBER) => "enum_member".into(), Some(lsp::CompletionItemKind::CONSTANT) => "constant".into(), Some(lsp::CompletionItemKind::STRUCT) => "struct".into(), Some(lsp::CompletionItemKind::EVENT) => "event".into(), Some(lsp::CompletionItemKind::OPERATOR) => "operator".into(), Some(lsp::CompletionItemKind::TYPE_PARAMETER) => "type_param".into(), Some(kind) => { log::error!("Received unknown completion item kind: {:?}", kind); "".into() } None => "".into(), }, CompletionItem::Other(core::CompletionItem { kind, .. }) => kind.as_ref().into(), }; let label = Span::styled( label, if deprecated { Style::default().add_modifier(Modifier::CROSSED_OUT) } else if kind.0[0].content == "folder" { *dir_style } else { Style::default() }, ); menu::Row::new([menu::Cell::from(label), menu::Cell::from(kind)]) } } /// Wraps a Menu. pub struct Completion { popup: Popup>, #[allow(dead_code)] trigger_offset: usize, filter: String, // TODO: move to helix-view/central handler struct in the future resolve_handler: ResolveHandler, } impl Completion { pub const ID: &'static str = "completion"; pub fn new(editor: &Editor, items: Vec, trigger_offset: usize) -> Self { let preview_completion_insert = editor.config().preview_completion_insert; let replace_mode = editor.config().completion_replace; let dir_style = editor.theme.get("ui.text.directory"); // Then create the menu let menu = Menu::new(items, dir_style, move |editor: &mut Editor, item, event| { let (view, doc) = current!(editor); macro_rules! language_server { ($item:expr) => { match editor .language_servers .get_by_id($item.provider) { Some(ls) => ls, None => { editor.set_error("completions are outdated"); // TODO close the completion menu somehow, // currently there is no trivial way to access the EditorView to close the completion menu return; } } }; } match event { PromptEvent::Abort => {} PromptEvent::Update if preview_completion_insert => { // Update creates "ghost" transactions which are not sent to the // lsp server to avoid messing up re-requesting completions. Once a // completion has been selected (with tab, c-n or c-p) it's always accepted whenever anything // is typed. The only way to avoid that is to explicitly abort the completion // with c-c. This will remove the "ghost" transaction. // // The ghost transaction is modeled with a transaction that is not sent to the LS. // (apply_temporary) and a savepoint. It's extremely important this savepoint is restored // (also without sending the transaction to the LS) *before any further transaction is applied*. // Otherwise incremental sync breaks (since the state of the LS doesn't match the state the transaction // is applied to). if matches!(editor.last_completion, Some(CompleteAction::Triggered)) { editor.last_completion = Some(CompleteAction::Selected { savepoint: doc.savepoint(view), }) } let item = item.unwrap(); let context = &editor.handlers.completions.active_completions[&item.provider()]; // if more text was entered, remove it doc.restore(view, &context.savepoint, false); // always present here match item { CompletionItem::Lsp(item) => { let (transaction, _) = lsp_item_to_transaction( doc, view.id, &item.item, language_server!(item).offset_encoding(), trigger_offset, replace_mode, ); doc.apply_temporary(&transaction, view.id) } CompletionItem::Other(core::CompletionItem { transaction, .. }) => { doc.apply_temporary(transaction, view.id) } }; } PromptEvent::Update => {} PromptEvent::Validate => { if let Some(CompleteAction::Selected { savepoint }) = editor.last_completion.take() { doc.restore(view, &savepoint, false); } let item = item.unwrap(); let context = &editor.handlers.completions.active_completions[&item.provider()]; // if more text was entered, remove it doc.restore(view, &context.savepoint, true); // save an undo checkpoint before the completion doc.append_changes_to_history(view); // item always present here let (transaction, additional_edits, snippet) = match item.clone() { CompletionItem::Lsp(mut item) => { let language_server = language_server!(item); // resolve item if not yet resolved if !item.resolved { if let Some(resolved_item) = Self::resolve_completion_item( language_server, item.item.clone(), ) { item.item = resolved_item; } }; let encoding = language_server.offset_encoding(); let (transaction, snippet) = lsp_item_to_transaction( doc, view.id, &item.item, encoding, trigger_offset, replace_mode, ); let add_edits = item.item.additional_text_edits; ( transaction, add_edits.map(|edits| (edits, encoding)), snippet, ) } CompletionItem::Other(core::CompletionItem { transaction, .. }) => { (transaction, None, None) } }; doc.apply(&transaction, view.id); let placeholder = snippet.is_some(); if let Some(snippet) = snippet { doc.active_snippet = match doc.active_snippet.take() { Some(active) => active.insert_subsnippet(snippet), None => ActiveSnippet::new(snippet), }; } editor.last_completion = Some(CompleteAction::Applied { trigger_offset, changes: completion_changes(&transaction, trigger_offset), placeholder, }); // TODO: add additional _edits to completion_changes? if let Some((additional_edits, offset_encoding)) = additional_edits { if !additional_edits.is_empty() { let transaction = util::generate_transaction_from_edits( doc.text(), additional_edits, offset_encoding, // TODO: should probably transcode in Client ); doc.apply(&transaction, view.id); } } // we could have just inserted a trigger char (like a `crate::` completion for rust // so we want to retrigger immediately when accepting a completion. trigger_auto_completion(editor, true); } }; // In case the popup was deleted because of an intersection w/ the auto-complete menu. if event != PromptEvent::Update { editor .handlers .trigger_signature_help(SignatureHelpInvoked::Automatic, editor); } }); let popup = Popup::new(Self::ID, menu) .with_scrollbar(false) .ignore_escape_key(true); let (view, doc) = current_ref!(editor); let text = doc.text().slice(..); let cursor = doc.selection(view.id).primary().cursor(text); let offset = text .chars_at(cursor) .reversed() .take_while(|ch| chars::char_is_word(*ch)) .count(); let start_offset = cursor.saturating_sub(offset); let fragment = doc.text().slice(start_offset..cursor); let mut completion = Self { popup, trigger_offset, // TODO: expand nucleo api to allow moving straight to a Utf32String here // and avoid allocation during matching filter: String::from(fragment), resolve_handler: ResolveHandler::new(), }; // need to recompute immediately in case start_offset != trigger_offset completion.score(false); completion } fn score(&mut self, incremental: bool) { let pattern = &self.filter; let mut matcher = MATCHER.lock(); matcher.config = Config::DEFAULT; // slight preference towards prefix matches matcher.config.prefer_prefix = true; let pattern = Atom::new( pattern, CaseMatching::Ignore, Normalization::Smart, AtomKind::Fuzzy, false, ); let mut buf = Vec::new(); let (matches, options) = self.popup.contents_mut().update_options(); if incremental { matches.retain_mut(|(index, score)| { let option = &options[*index as usize]; let text = option.filter_text(); let new_score = pattern.score(Utf32Str::new(text, &mut buf), &mut matcher); match new_score { Some(new_score) => { *score = new_score as u32 / 2; true } None => false, } }) } else { matches.clear(); matches.extend(options.iter().enumerate().filter_map(|(i, option)| { let text = option.filter_text(); pattern .score(Utf32Str::new(text, &mut buf), &mut matcher) .map(|score| (i as u32, score as u32 / 3)) })); } // Nucleo is meant as an FZF-like fuzzy matcher and only hides matches that are truly // impossible - as in the sequence of characters just doesn't appear. That doesn't work // well for completions with multiple language servers where all completions of the next // server are below the current one (so you would get good suggestions from the second // server below those of the first). Setting a reasonable cutoff below which to move bad // completions out of the way helps with that. // // The score computation is a heuristic derived from Nucleo internal constants that may // move upstream in the future. I want to test this out here to settle on a good number. let min_score = (7 + pattern.needle_text().len() as u32 * 14) / 3; matches.sort_unstable_by_key(|&(i, score)| { let option = &options[i as usize]; ( score <= min_score, Reverse(option.preselect()), option.provider_priority(), Reverse(score), i, ) }); } /// Synchronously resolve the given completion item. This is used when /// accepting a completion. fn resolve_completion_item( language_server: &helix_lsp::Client, completion_item: lsp::CompletionItem, ) -> Option { if !matches!( language_server.capabilities().completion_provider, Some(lsp::CompletionOptions { resolve_provider: Some(true), .. }) ) { return None; } let future = language_server.resolve_completion_item(&completion_item); let response = helix_lsp::block_on(future); match response { Ok(item) => Some(item), Err(err) => { log::error!("Failed to resolve completion item: {}", err); None } } } /// Appends (`c: Some(c)`) or removes (`c: None`) a character to/from the filter /// this should be called whenever the user types or deletes a character in insert mode. pub fn update_filter(&mut self, c: Option) { // recompute menu based on matches let menu = self.popup.contents_mut(); match c { Some(c) => self.filter.push(c), None => { self.filter.pop(); if self.filter.is_empty() { menu.clear(); return; } } } self.score(c.is_some()); self.popup.contents_mut().reset_cursor(); } pub fn replace_provider_completions( &mut self, response: &mut CompletionResponse, is_incomplete: bool, ) { let menu = self.popup.contents_mut(); let (_, options) = menu.update_options(); if is_incomplete { options.retain(|item| item.provider() != response.provider) } response.take_items(options); self.score(false); let menu = self.popup.contents_mut(); menu.ensure_cursor_in_bounds(); } pub fn is_empty(&self) -> bool { self.popup.contents().is_empty() } pub fn replace_item( &mut self, old_item: &impl PartialEq, new_item: CompletionItem, ) { self.popup.contents_mut().replace_option(old_item, new_item); } pub fn area(&mut self, viewport: Rect, editor: &Editor) -> Rect { self.popup.area(viewport, editor) } } impl Component for Completion { fn handle_event(&mut self, event: &Event, cx: &mut Context) -> EventResult { self.popup.handle_event(event, cx) } fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> { self.popup.required_size(viewport) } fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) { self.popup.render(area, surface, cx); // if we have a selection, render a markdown popup on top/below with info let option = match self.popup.contents_mut().selection_mut() { Some(option) => option, None => return, }; if let CompletionItem::Lsp(option) = option { self.resolve_handler.ensure_item_resolved(cx.editor, option); } // need to render: // option.detail // --- // option.documentation let Some(coords) = cx.editor.cursor().0 else { return; }; let cursor_pos = coords.row as u16; let doc = doc!(cx.editor); let language = doc.language_name().unwrap_or(""); let markdowned = |lang: &str, detail: Option<&str>, doc: Option<&str>| { let md = match (detail, doc) { (Some(detail), Some(doc)) => format!("```{lang}\n{detail}\n```\n{doc}"), (Some(detail), None) => format!("```{lang}\n{detail}\n```"), (None, Some(doc)) => doc.to_string(), (None, None) => String::new(), }; Markdown::new(md, cx.editor.syn_loader.clone()) }; let mut markdown_doc = match option { CompletionItem::Lsp(option) => match &option.item.documentation { Some(lsp::Documentation::String(contents)) | Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind: lsp::MarkupKind::PlainText, value: contents, })) => { // TODO: convert to wrapped text markdowned(language, option.item.detail.as_deref(), Some(contents)) } Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind: lsp::MarkupKind::Markdown, value: contents, })) => { // TODO: set language based on doc scope markdowned(language, option.item.detail.as_deref(), Some(contents)) } None if option.item.detail.is_some() => { // TODO: set language based on doc scope markdowned(language, option.item.detail.as_deref(), None) } None => return, }, CompletionItem::Other(option) => { let Some(doc) = option.documentation.as_deref() else { return; }; markdowned(language, None, Some(doc)) } }; let popup_area = self.popup.area(area, cx.editor); let doc_width_available = area.width.saturating_sub(popup_area.right()); let doc_area = if doc_width_available > 30 { let mut doc_width = doc_width_available; let mut doc_height = area.height.saturating_sub(popup_area.top()); let x = popup_area.right(); let y = popup_area.top(); if let Some((rel_width, rel_height)) = markdown_doc.required_size((doc_width, doc_height)) { doc_width = rel_width.min(doc_width); doc_height = rel_height.min(doc_height); } Rect::new(x, y, doc_width, doc_height) } else { // Documentation should not cover the cursor or the completion popup // Completion popup could be above or below the current line let avail_height_above = cursor_pos.min(popup_area.top()).saturating_sub(1); let avail_height_below = area .height .saturating_sub(cursor_pos.max(popup_area.bottom()) + 1 /* padding */); let (y, avail_height) = if avail_height_below >= avail_height_above { ( area.height.saturating_sub(avail_height_below), avail_height_below, ) } else { (0, avail_height_above) }; if avail_height <= 1 { return; } Rect::new(0, y, area.width, avail_height.min(15)) }; // clear area let background = cx.editor.theme.get("ui.popup"); surface.clear_with(doc_area, background); if cx.editor.popup_border() { use tui::widgets::{Block, Widget}; Widget::render(Block::bordered(), doc_area, surface); } markdown_doc.render(doc_area, surface, cx); } } fn lsp_item_to_transaction( doc: &Document, view_id: ViewId, item: &lsp::CompletionItem, offset_encoding: OffsetEncoding, trigger_offset: usize, replace_mode: bool, ) -> (Transaction, Option) { let selection = doc.selection(view_id); let text = doc.text().slice(..); let primary_cursor = selection.primary().cursor(text); let (edit_offset, new_text) = if let Some(edit) = &item.text_edit { let edit = match edit { lsp::CompletionTextEdit::Edit(edit) => edit.clone(), lsp::CompletionTextEdit::InsertAndReplace(item) => { let range = if replace_mode { item.replace } else { item.insert }; lsp::TextEdit::new(range, item.new_text.clone()) } }; let Some(range) = util::lsp_range_to_range(doc.text(), edit.range, offset_encoding) else { return (Transaction::new(doc.text()), None); }; let start_offset = range.anchor as i128 - primary_cursor as i128; let end_offset = range.head as i128 - primary_cursor as i128; (Some((start_offset, end_offset)), edit.new_text) } else { let new_text = item .insert_text .clone() .unwrap_or_else(|| item.label.clone()); // check that we are still at the correct savepoint // we can still generate a transaction regardless but if the // document changed (and not just the selection) then we will // likely delete the wrong text (same if we applied an edit sent by the LS) debug_assert!(primary_cursor == trigger_offset); (None, new_text) }; if matches!(item.kind, Some(lsp::CompletionItemKind::SNIPPET)) || matches!( item.insert_text_format, Some(lsp::InsertTextFormat::SNIPPET) ) { let Ok(snippet) = Snippet::parse(&new_text) else { log::error!("Failed to parse snippet: {new_text:?}",); return (Transaction::new(doc.text()), None); }; let (transaction, snippet) = util::generate_transaction_from_snippet( doc.text(), selection, edit_offset, replace_mode, snippet, &mut doc.snippet_ctx(), ); (transaction, Some(snippet)) } else { let transaction = util::generate_transaction_from_completion_edit( doc.text(), selection, edit_offset, replace_mode, new_text, ); (transaction, None) } } fn completion_changes(transaction: &Transaction, trigger_offset: usize) -> Vec { transaction .changes_iter() .filter(|(start, end, _)| (*start..=*end).contains(&trigger_offset)) .collect() } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/ui/document.rs000066400000000000000000000447611500614314100252670ustar00rootroot00000000000000use std::cmp::min; use helix_core::doc_formatter::{DocumentFormatter, GraphemeSource, TextFormat}; use helix_core::graphemes::Grapheme; use helix_core::str_utils::char_to_byte_idx; use helix_core::syntax::Highlight; use helix_core::syntax::HighlightEvent; use helix_core::text_annotations::TextAnnotations; use helix_core::{visual_offset_from_block, Position, RopeSlice}; use helix_stdx::rope::RopeSliceExt; use helix_view::editor::{WhitespaceConfig, WhitespaceRenderValue}; use helix_view::graphics::Rect; use helix_view::theme::Style; use helix_view::view::ViewPosition; use helix_view::{Document, Theme}; use tui::buffer::Buffer as Surface; use crate::ui::text_decorations::DecorationManager; #[derive(Debug, PartialEq, Eq, Clone, Copy)] enum StyleIterKind { /// base highlights (usually emitted by TS), byte indices (potentially not codepoint aligned) BaseHighlights, /// overlay highlights (emitted by custom code from selections), char indices Overlay, } /// A wrapper around a HighlightIterator /// that merges the layered highlights to create the final text style /// and yields the active text style and the char_idx where the active /// style will have to be recomputed. /// /// TODO(ropey2): hopefully one day helix and ropey will operate entirely /// on byte ranges and we can remove this struct StyleIter<'a, H: Iterator> { text_style: Style, active_highlights: Vec, highlight_iter: H, kind: StyleIterKind, text: RopeSlice<'a>, theme: &'a Theme, } impl> Iterator for StyleIter<'_, H> { type Item = (Style, usize); fn next(&mut self) -> Option<(Style, usize)> { while let Some(event) = self.highlight_iter.next() { match event { HighlightEvent::HighlightStart(highlights) => { self.active_highlights.push(highlights) } HighlightEvent::HighlightEnd => { self.active_highlights.pop(); } HighlightEvent::Source { mut end, .. } => { let style = self .active_highlights .iter() .fold(self.text_style, |acc, span| { acc.patch(self.theme.highlight(span.0)) }); if self.kind == StyleIterKind::BaseHighlights { // Move the end byte index to the nearest character boundary (rounding up) // and convert it to a character index. end = self.text.byte_to_char(self.text.ceil_char_boundary(end)); } return Some((style, end)); } } } None } } #[derive(Debug, PartialEq, Eq, Copy, Clone)] pub struct LinePos { /// Indicates whether the given visual line /// is the first visual line of the given document line pub first_visual_line: bool, /// The line index of the document line that contains the given visual line pub doc_line: usize, /// Vertical offset from the top of the inner view area pub visual_line: u16, } #[allow(clippy::too_many_arguments)] pub fn render_document( surface: &mut Surface, viewport: Rect, doc: &Document, offset: ViewPosition, doc_annotations: &TextAnnotations, syntax_highlight_iter: impl Iterator, overlay_highlight_iter: impl Iterator, theme: &Theme, decorations: DecorationManager, ) { let mut renderer = TextRenderer::new( surface, doc, theme, Position::new(offset.vertical_offset, offset.horizontal_offset), viewport, ); render_text( &mut renderer, doc.text().slice(..), offset.anchor, &doc.text_format(viewport.width, Some(theme)), doc_annotations, syntax_highlight_iter, overlay_highlight_iter, theme, decorations, ) } #[allow(clippy::too_many_arguments)] pub fn render_text( renderer: &mut TextRenderer, text: RopeSlice<'_>, anchor: usize, text_fmt: &TextFormat, text_annotations: &TextAnnotations, syntax_highlight_iter: impl Iterator, overlay_highlight_iter: impl Iterator, theme: &Theme, mut decorations: DecorationManager, ) { let row_off = visual_offset_from_block(text, anchor, anchor, text_fmt, text_annotations) .0 .row; let mut formatter = DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, text_annotations, anchor); let mut syntax_styles = StyleIter { text_style: renderer.text_style, active_highlights: Vec::with_capacity(64), highlight_iter: syntax_highlight_iter, kind: StyleIterKind::BaseHighlights, theme, text, }; let mut overlay_styles = StyleIter { text_style: Style::default(), active_highlights: Vec::with_capacity(64), highlight_iter: overlay_highlight_iter, kind: StyleIterKind::Overlay, theme, text, }; let mut last_line_pos = LinePos { first_visual_line: false, doc_line: usize::MAX, visual_line: u16::MAX, }; let mut last_line_end = 0; let mut is_in_indent_area = true; let mut last_line_indent_level = 0; let mut syntax_style_span = syntax_styles .next() .unwrap_or_else(|| (Style::default(), usize::MAX)); let mut overlay_style_span = overlay_styles .next() .unwrap_or_else(|| (Style::default(), usize::MAX)); let mut reached_view_top = false; loop { let Some(mut grapheme) = formatter.next() else { break; }; // skip any graphemes on visual lines before the block start if grapheme.visual_pos.row < row_off { continue; } grapheme.visual_pos.row -= row_off; if !reached_view_top { decorations.prepare_for_rendering(grapheme.char_idx); reached_view_top = true; } // if the end of the viewport is reached stop rendering if grapheme.visual_pos.row as u16 >= renderer.viewport.height + renderer.offset.row as u16 { break; } // apply decorations before rendering a new line if grapheme.visual_pos.row as u16 != last_line_pos.visual_line { // we initiate doc_line with usize::MAX because no file // can reach that size (memory allocations are limited to isize::MAX) // initially there is no "previous" line (so doc_line is set to usize::MAX) // in that case we don't need to draw indent guides/virtual text if last_line_pos.doc_line != usize::MAX { // draw indent guides for the last line renderer.draw_indent_guides(last_line_indent_level, last_line_pos.visual_line); is_in_indent_area = true; decorations.render_virtual_lines(renderer, last_line_pos, last_line_end) } last_line_pos = LinePos { first_visual_line: grapheme.line_idx != last_line_pos.doc_line, doc_line: grapheme.line_idx, visual_line: grapheme.visual_pos.row as u16, }; decorations.decorate_line(renderer, last_line_pos); } // acquire the correct grapheme style while grapheme.char_idx >= syntax_style_span.1 { syntax_style_span = syntax_styles .next() .unwrap_or((Style::default(), usize::MAX)); } while grapheme.char_idx >= overlay_style_span.1 { overlay_style_span = overlay_styles .next() .unwrap_or((Style::default(), usize::MAX)); } let grapheme_style = if let GraphemeSource::VirtualText { highlight } = grapheme.source { let mut style = renderer.text_style; if let Some(highlight) = highlight { style = style.patch(theme.highlight(highlight.0)); } GraphemeStyle { syntax_style: style, overlay_style: Style::default(), } } else { GraphemeStyle { syntax_style: syntax_style_span.0, overlay_style: overlay_style_span.0, } }; decorations.decorate_grapheme(renderer, &grapheme); let virt = grapheme.is_virtual(); let grapheme_width = renderer.draw_grapheme( grapheme.raw, grapheme_style, virt, &mut last_line_indent_level, &mut is_in_indent_area, grapheme.visual_pos, ); last_line_end = grapheme.visual_pos.col + grapheme_width; } renderer.draw_indent_guides(last_line_indent_level, last_line_pos.visual_line); decorations.render_virtual_lines(renderer, last_line_pos, last_line_end) } #[derive(Debug)] pub struct TextRenderer<'a> { surface: &'a mut Surface, pub text_style: Style, pub whitespace_style: Style, pub indent_guide_char: String, pub indent_guide_style: Style, pub newline: String, pub nbsp: String, pub nnbsp: String, pub space: String, pub tab: String, pub virtual_tab: String, pub indent_width: u16, pub starting_indent: usize, pub draw_indent_guides: bool, pub viewport: Rect, pub offset: Position, } pub struct GraphemeStyle { syntax_style: Style, overlay_style: Style, } impl<'a> TextRenderer<'a> { pub fn new( surface: &'a mut Surface, doc: &Document, theme: &Theme, offset: Position, viewport: Rect, ) -> TextRenderer<'a> { let editor_config = doc.config.load(); let WhitespaceConfig { render: ws_render, characters: ws_chars, } = &editor_config.whitespace; let tab_width = doc.tab_width(); let tab = if ws_render.tab() == WhitespaceRenderValue::All { std::iter::once(ws_chars.tab) .chain(std::iter::repeat(ws_chars.tabpad).take(tab_width - 1)) .collect() } else { " ".repeat(tab_width) }; let virtual_tab = " ".repeat(tab_width); let newline = if ws_render.newline() == WhitespaceRenderValue::All { ws_chars.newline.into() } else { " ".to_owned() }; let space = if ws_render.space() == WhitespaceRenderValue::All { ws_chars.space.into() } else { " ".to_owned() }; let nbsp = if ws_render.nbsp() == WhitespaceRenderValue::All { ws_chars.nbsp.into() } else { " ".to_owned() }; let nnbsp = if ws_render.nnbsp() == WhitespaceRenderValue::All { ws_chars.nnbsp.into() } else { " ".to_owned() }; let text_style = theme.get("ui.text"); let indent_width = doc.indent_style.indent_width(tab_width) as u16; TextRenderer { surface, indent_guide_char: editor_config.indent_guides.character.into(), newline, nbsp, nnbsp, space, tab, virtual_tab, whitespace_style: theme.get("ui.virtual.whitespace"), indent_width, starting_indent: offset.col / indent_width as usize + (offset.col % indent_width as usize != 0) as usize + editor_config.indent_guides.skip_levels as usize, indent_guide_style: text_style.patch( theme .try_get("ui.virtual.indent-guide") .unwrap_or_else(|| theme.get("ui.virtual.whitespace")), ), text_style, draw_indent_guides: editor_config.indent_guides.render, viewport, offset, } } /// Draws a single `grapheme` at the current render position with a specified `style`. pub fn draw_decoration_grapheme( &mut self, grapheme: Grapheme, mut style: Style, mut row: u16, col: u16, ) -> bool { if (row as usize) < self.offset.row || row >= self.viewport.height || col >= self.viewport.width { return false; } row -= self.offset.row as u16; // TODO is it correct to apply the whitspace style to all unicode white spaces? if grapheme.is_whitespace() { style = style.patch(self.whitespace_style); } let grapheme = match grapheme { Grapheme::Tab { width } => { let grapheme_tab_width = char_to_byte_idx(&self.virtual_tab, width); &self.virtual_tab[..grapheme_tab_width] } Grapheme::Other { ref g } if g == "\u{00A0}" => " ", Grapheme::Other { ref g } => g, Grapheme::Newline => " ", }; self.surface.set_string( self.viewport.x + col, self.viewport.y + row, grapheme, style, ); true } /// Draws a single `grapheme` at the current render position with a specified `style`. pub fn draw_grapheme( &mut self, grapheme: Grapheme, grapheme_style: GraphemeStyle, is_virtual: bool, last_indent_level: &mut usize, is_in_indent_area: &mut bool, mut position: Position, ) -> usize { if position.row < self.offset.row { return 0; } position.row -= self.offset.row; let cut_off_start = self.offset.col.saturating_sub(position.col); let is_whitespace = grapheme.is_whitespace(); // TODO is it correct to apply the whitespace style to all unicode white spaces? let mut style = grapheme_style.syntax_style; if is_whitespace { style = style.patch(self.whitespace_style); } style = style.patch(grapheme_style.overlay_style); let width = grapheme.width(); let space = if is_virtual { " " } else { &self.space }; let nbsp = if is_virtual { " " } else { &self.nbsp }; let nnbsp = if is_virtual { " " } else { &self.nnbsp }; let tab = if is_virtual { &self.virtual_tab } else { &self.tab }; let grapheme = match grapheme { Grapheme::Tab { width } => { let grapheme_tab_width = char_to_byte_idx(tab, width); &tab[..grapheme_tab_width] } // TODO special rendering for other whitespaces? Grapheme::Other { ref g } if g == " " => space, Grapheme::Other { ref g } if g == "\u{00A0}" => nbsp, Grapheme::Other { ref g } if g == "\u{202F}" => nnbsp, Grapheme::Other { ref g } => g, Grapheme::Newline => &self.newline, }; let in_bounds = self.column_in_bounds(position.col, width); if in_bounds { self.surface.set_string( self.viewport.x + (position.col - self.offset.col) as u16, self.viewport.y + position.row as u16, grapheme, style, ); } else if cut_off_start != 0 && cut_off_start < width { // partially on screen let rect = Rect::new( self.viewport.x, self.viewport.y + position.row as u16, (width - cut_off_start) as u16, 1, ); self.surface.set_style(rect, style); } if *is_in_indent_area && !is_whitespace { *last_indent_level = position.col; *is_in_indent_area = false; } width } pub fn column_in_bounds(&self, colum: usize, width: usize) -> bool { self.offset.col <= colum && colum + width <= self.offset.col + self.viewport.width as usize } /// Overlay indentation guides ontop of a rendered line /// The indentation level is computed in `draw_lines`. /// Therefore this function must always be called afterwards. pub fn draw_indent_guides(&mut self, indent_level: usize, mut row: u16) { if !self.draw_indent_guides || self.offset.row > row as usize { return; } row -= self.offset.row as u16; // Don't draw indent guides outside of view let end_indent = min( indent_level, // Add indent_width - 1 to round up, since the first visible // indent might be a bit after offset.col self.offset.col + self.viewport.width as usize + (self.indent_width as usize - 1), ) / self.indent_width as usize; for i in self.starting_indent..end_indent { let x = (self.viewport.x as usize + (i * self.indent_width as usize) - self.offset.col) as u16; let y = self.viewport.y + row; debug_assert!(self.surface.in_bounds(x, y)); self.surface .set_string(x, y, &self.indent_guide_char, self.indent_guide_style); } } pub fn set_string(&mut self, x: u16, y: u16, string: impl AsRef, style: Style) { if (y as usize) < self.offset.row { return; } self.surface .set_string(x, y + self.viewport.y, string, style) } pub fn set_stringn( &mut self, x: u16, y: u16, string: impl AsRef, width: usize, style: Style, ) { if (y as usize) < self.offset.row { return; } self.surface .set_stringn(x, y + self.viewport.y, string, width, style); } /// Sets the style of an area **within the text viewport* this accounts /// both for the renderers vertical offset and its viewport pub fn set_style(&mut self, mut area: Rect, style: Style) { area = area.clip_top(self.offset.row as u16); area.y += self.viewport.y; self.surface.set_style(area, style); } #[allow(clippy::too_many_arguments)] pub fn set_string_truncated( &mut self, x: u16, y: u16, string: &str, width: usize, style: impl Fn(usize) -> Style, // Map a grapheme's string offset to a style ellipsis: bool, truncate_start: bool, ) -> (u16, u16) { if (y as usize) < self.offset.row { return (x, y); } self.surface.set_string_truncated( x, y + self.viewport.y, string, width, style, ellipsis, truncate_start, ) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/ui/editor.rs000066400000000000000000001766431500614314100247440ustar00rootroot00000000000000use crate::{ commands::{self, OnKeyCallback, OnKeyCallbackKind}, compositor::{Component, Context, Event, EventResult}, events::{OnModeSwitch, PostCommand}, handlers::completion::CompletionItem, key, keymap::{KeymapResult, Keymaps}, ui::{ document::{render_document, LinePos, TextRenderer}, statusline, text_decorations::{self, Decoration, DecorationManager, InlineDiagnostics}, Completion, ProgressSpinners, }, }; use helix_core::{ diagnostic::NumberOrString, graphemes::{next_grapheme_boundary, prev_grapheme_boundary}, movement::Direction, syntax::{self, HighlightEvent}, text_annotations::TextAnnotations, unicode::width::UnicodeWidthStr, visual_offset_from_block, Change, Position, Range, Selection, Transaction, }; use helix_view::{ annotations::diagnostics::DiagnosticFilter, document::{Mode, SCRATCH_BUFFER_NAME}, editor::{CompleteAction, CursorShapeConfig}, graphics::{Color, CursorKind, Modifier, Rect, Style}, input::{KeyEvent, MouseButton, MouseEvent, MouseEventKind}, keyboard::{KeyCode, KeyModifiers}, Document, Editor, Theme, View, }; use std::{mem::take, num::NonZeroUsize, path::PathBuf, rc::Rc}; use tui::{buffer::Buffer as Surface, text::Span}; pub struct EditorView { pub keymaps: Keymaps, on_next_key: Option<(OnKeyCallback, OnKeyCallbackKind)>, pseudo_pending: Vec, pub(crate) last_insert: (commands::MappableCommand, Vec), pub(crate) completion: Option, spinners: ProgressSpinners, /// Tracks if the terminal window is focused by reaction to terminal focus events terminal_focused: bool, } #[derive(Debug, Clone)] pub enum InsertEvent { Key(KeyEvent), CompletionApply { trigger_offset: usize, changes: Vec, }, TriggerCompletion, RequestCompletion, } impl EditorView { pub fn new(keymaps: Keymaps) -> Self { Self { keymaps, on_next_key: None, pseudo_pending: Vec::new(), last_insert: (commands::MappableCommand::normal_mode, Vec::new()), completion: None, spinners: ProgressSpinners::default(), terminal_focused: true, } } pub fn spinners_mut(&mut self) -> &mut ProgressSpinners { &mut self.spinners } pub fn render_view( &self, editor: &Editor, doc: &Document, view: &View, viewport: Rect, surface: &mut Surface, is_focused: bool, ) { let inner = view.inner_area(doc); let area = view.area; let theme = &editor.theme; let config = editor.config(); let view_offset = doc.view_offset(view.id); let text_annotations = view.text_annotations(doc, Some(theme)); let mut decorations = DecorationManager::default(); if is_focused && config.cursorline { decorations.add_decoration(Self::cursorline(doc, view, theme)); } if is_focused && config.cursorcolumn { Self::highlight_cursorcolumn(doc, view, surface, theme, inner, &text_annotations); } // Set DAP highlights, if needed. if let Some(frame) = editor.current_stack_frame() { let dap_line = frame.line.saturating_sub(1); let style = theme.get("ui.highlight.frameline"); let line_decoration = move |renderer: &mut TextRenderer, pos: LinePos| { if pos.doc_line != dap_line { return; } renderer.set_style(Rect::new(inner.x, pos.visual_line, inner.width, 1), style); }; decorations.add_decoration(line_decoration); } let syntax_highlights = Self::doc_syntax_highlights(doc, view_offset.anchor, inner.height, theme); let mut overlay_highlights = Self::empty_highlight_iter(doc, view_offset.anchor, inner.height); let overlay_syntax_highlights = Self::overlay_syntax_highlights( doc, view_offset.anchor, inner.height, &text_annotations, ); if !overlay_syntax_highlights.is_empty() { overlay_highlights = Box::new(syntax::merge(overlay_highlights, overlay_syntax_highlights)); } for diagnostic in Self::doc_diagnostics_highlights(doc, theme) { // Most of the `diagnostic` Vecs are empty most of the time. Skipping // a merge for any empty Vec saves a significant amount of work. if diagnostic.is_empty() { continue; } overlay_highlights = Box::new(syntax::merge(overlay_highlights, diagnostic)); } if is_focused { if let Some(tabstops) = Self::tabstop_highlights(doc, theme) { overlay_highlights = Box::new(syntax::merge(overlay_highlights, tabstops)); } let highlights = syntax::merge( overlay_highlights, Self::doc_selection_highlights( editor.mode(), doc, view, theme, &config.cursor_shape, self.terminal_focused, ), ); let focused_view_elements = Self::highlight_focused_view_elements(view, doc, theme); if focused_view_elements.is_empty() { overlay_highlights = Box::new(highlights) } else { overlay_highlights = Box::new(syntax::merge(highlights, focused_view_elements)) } } let gutter_overflow = view.gutter_offset(doc) == 0; if !gutter_overflow { Self::render_gutter( editor, doc, view, view.area, theme, is_focused & self.terminal_focused, &mut decorations, ); } Self::render_rulers(editor, doc, view, inner, surface, theme); let primary_cursor = doc .selection(view.id) .primary() .cursor(doc.text().slice(..)); if is_focused { decorations.add_decoration(text_decorations::Cursor { cache: &editor.cursor_cache, primary_cursor, }); } let width = view.inner_width(doc); let config = doc.config.load(); let enable_cursor_line = view .diagnostics_handler .show_cursorline_diagnostics(doc, view.id); let inline_diagnostic_config = config.inline_diagnostics.prepare(width, enable_cursor_line); decorations.add_decoration(InlineDiagnostics::new( doc, theme, primary_cursor, inline_diagnostic_config, config.end_of_line_diagnostics, )); render_document( surface, inner, doc, view_offset, &text_annotations, syntax_highlights, overlay_highlights, theme, decorations, ); // if we're not at the edge of the screen, draw a right border if viewport.right() != view.area.right() { let x = area.right(); let border_style = theme.get("ui.window"); for y in area.top()..area.bottom() { surface[(x, y)] .set_symbol(tui::symbols::line::VERTICAL) //.set_symbol(" ") .set_style(border_style); } } if config.inline_diagnostics.disabled() && config.end_of_line_diagnostics == DiagnosticFilter::Disable { Self::render_diagnostics(doc, view, inner, surface, theme); } let statusline_area = view .area .clip_top(view.area.height.saturating_sub(1)) .clip_bottom(1); // -1 from bottom to remove commandline let mut context = statusline::RenderContext::new(editor, doc, view, is_focused, &self.spinners); statusline::render(&mut context, statusline_area, surface); } pub fn render_rulers( editor: &Editor, doc: &Document, view: &View, viewport: Rect, surface: &mut Surface, theme: &Theme, ) { let editor_rulers = &editor.config().rulers; let ruler_theme = theme .try_get("ui.virtual.ruler") .unwrap_or_else(|| Style::default().bg(Color::Red)); let rulers = doc .language_config() .and_then(|config| config.rulers.as_ref()) .unwrap_or(editor_rulers); let view_offset = doc.view_offset(view.id); rulers .iter() // View might be horizontally scrolled, convert from absolute distance // from the 1st column to relative distance from left of viewport .filter_map(|ruler| ruler.checked_sub(1 + view_offset.horizontal_offset as u16)) .filter(|ruler| ruler < &viewport.width) .map(|ruler| viewport.clip_left(ruler).with_width(1)) .for_each(|area| surface.set_style(area, ruler_theme)) } fn viewport_byte_range( text: helix_core::RopeSlice, row: usize, height: u16, ) -> std::ops::Range { // Calculate viewport byte ranges: // Saturating subs to make it inclusive zero indexing. let last_line = text.len_lines().saturating_sub(1); let last_visible_line = (row + height as usize).saturating_sub(1).min(last_line); let start = text.line_to_byte(row.min(last_line)); let end = text.line_to_byte(last_visible_line + 1); start..end } pub fn empty_highlight_iter( doc: &Document, anchor: usize, height: u16, ) -> Box> { let text = doc.text().slice(..); let row = text.char_to_line(anchor.min(text.len_chars())); // Calculate viewport byte ranges: // Saturating subs to make it inclusive zero indexing. let range = Self::viewport_byte_range(text, row, height); Box::new( [HighlightEvent::Source { start: text.byte_to_char(range.start), end: text.byte_to_char(range.end), }] .into_iter(), ) } /// Get syntax highlights for a document in a view represented by the first line /// and column (`offset`) and the last line. This is done instead of using a view /// directly to enable rendering syntax highlighted docs anywhere (eg. picker preview) pub fn doc_syntax_highlights<'doc>( doc: &'doc Document, anchor: usize, height: u16, _theme: &Theme, ) -> Box + 'doc> { let text = doc.text().slice(..); let row = text.char_to_line(anchor.min(text.len_chars())); let range = Self::viewport_byte_range(text, row, height); match doc.syntax() { Some(syntax) => { let iter = syntax // TODO: range doesn't actually restrict source, just highlight range .highlight_iter(text.slice(..), Some(range), None) .map(|event| event.unwrap()); Box::new(iter) } None => Box::new( [HighlightEvent::Source { start: range.start, end: range.end, }] .into_iter(), ), } } pub fn overlay_syntax_highlights( doc: &Document, anchor: usize, height: u16, text_annotations: &TextAnnotations, ) -> Vec<(usize, std::ops::Range)> { let text = doc.text().slice(..); let row = text.char_to_line(anchor.min(text.len_chars())); let mut range = Self::viewport_byte_range(text, row, height); range = text.byte_to_char(range.start)..text.byte_to_char(range.end); text_annotations.collect_overlay_highlights(range) } /// Get highlight spans for document diagnostics pub fn doc_diagnostics_highlights( doc: &Document, theme: &Theme, ) -> [Vec<(usize, std::ops::Range)>; 7] { use helix_core::diagnostic::{DiagnosticTag, Range, Severity}; let get_scope_of = |scope| { theme .find_scope_index_exact(scope) // get one of the themes below as fallback values .or_else(|| theme.find_scope_index_exact("diagnostic")) .or_else(|| theme.find_scope_index_exact("ui.cursor")) .or_else(|| theme.find_scope_index_exact("ui.selection")) .expect( "at least one of the following scopes must be defined in the theme: `diagnostic`, `ui.cursor`, or `ui.selection`", ) }; // basically just queries the theme color defined in the config let hint = get_scope_of("diagnostic.hint"); let info = get_scope_of("diagnostic.info"); let warning = get_scope_of("diagnostic.warning"); let error = get_scope_of("diagnostic.error"); let r#default = get_scope_of("diagnostic"); // this is a bit redundant but should be fine // Diagnostic tags let unnecessary = theme.find_scope_index_exact("diagnostic.unnecessary"); let deprecated = theme.find_scope_index_exact("diagnostic.deprecated"); let mut default_vec: Vec<(usize, std::ops::Range)> = Vec::new(); let mut info_vec = Vec::new(); let mut hint_vec = Vec::new(); let mut warning_vec = Vec::new(); let mut error_vec = Vec::new(); let mut unnecessary_vec = Vec::new(); let mut deprecated_vec = Vec::new(); let push_diagnostic = |vec: &mut Vec<(usize, std::ops::Range)>, scope, range: Range| { // If any diagnostic overlaps ranges with the prior diagnostic, // merge the two together. Otherwise push a new span. match vec.last_mut() { Some((_, existing_range)) if range.start <= existing_range.end => { // This branch merges overlapping diagnostics, assuming that the current // diagnostic starts on range.start or later. If this assertion fails, // we will discard some part of `diagnostic`. This implies that // `doc.diagnostics()` is not sorted by `diagnostic.range`. debug_assert!(existing_range.start <= range.start); existing_range.end = range.end.max(existing_range.end) } _ => vec.push((scope, range.start..range.end)), } }; for diagnostic in doc.diagnostics() { // Separate diagnostics into different Vecs by severity. let (vec, scope) = match diagnostic.severity { Some(Severity::Info) => (&mut info_vec, info), Some(Severity::Hint) => (&mut hint_vec, hint), Some(Severity::Warning) => (&mut warning_vec, warning), Some(Severity::Error) => (&mut error_vec, error), _ => (&mut default_vec, r#default), }; // If the diagnostic has tags and a non-warning/error severity, skip rendering // the diagnostic as info/hint/default and only render it as unnecessary/deprecated // instead. For warning/error diagnostics, render both the severity highlight and // the tag highlight. if diagnostic.tags.is_empty() || matches!( diagnostic.severity, Some(Severity::Warning | Severity::Error) ) { push_diagnostic(vec, scope, diagnostic.range); } for tag in &diagnostic.tags { match tag { DiagnosticTag::Unnecessary => { if let Some(scope) = unnecessary { push_diagnostic(&mut unnecessary_vec, scope, diagnostic.range) } } DiagnosticTag::Deprecated => { if let Some(scope) = deprecated { push_diagnostic(&mut deprecated_vec, scope, diagnostic.range) } } } } } [ default_vec, unnecessary_vec, deprecated_vec, info_vec, hint_vec, warning_vec, error_vec, ] } /// Get highlight spans for selections in a document view. pub fn doc_selection_highlights( mode: Mode, doc: &Document, view: &View, theme: &Theme, cursor_shape_config: &CursorShapeConfig, is_terminal_focused: bool, ) -> Vec<(usize, std::ops::Range)> { let text = doc.text().slice(..); let selection = doc.selection(view.id); let primary_idx = selection.primary_index(); let cursorkind = cursor_shape_config.from_mode(mode); let cursor_is_block = cursorkind == CursorKind::Block; let selection_scope = theme .find_scope_index_exact("ui.selection") .expect("could not find `ui.selection` scope in the theme!"); let primary_selection_scope = theme .find_scope_index_exact("ui.selection.primary") .unwrap_or(selection_scope); let base_cursor_scope = theme .find_scope_index_exact("ui.cursor") .unwrap_or(selection_scope); let base_primary_cursor_scope = theme .find_scope_index("ui.cursor.primary") .unwrap_or(base_cursor_scope); let cursor_scope = match mode { Mode::Insert => theme.find_scope_index_exact("ui.cursor.insert"), Mode::Select => theme.find_scope_index_exact("ui.cursor.select"), Mode::Normal => theme.find_scope_index_exact("ui.cursor.normal"), } .unwrap_or(base_cursor_scope); let primary_cursor_scope = match mode { Mode::Insert => theme.find_scope_index_exact("ui.cursor.primary.insert"), Mode::Select => theme.find_scope_index_exact("ui.cursor.primary.select"), Mode::Normal => theme.find_scope_index_exact("ui.cursor.primary.normal"), } .unwrap_or(base_primary_cursor_scope); let mut spans: Vec<(usize, std::ops::Range)> = Vec::new(); for (i, range) in selection.iter().enumerate() { let selection_is_primary = i == primary_idx; let (cursor_scope, selection_scope) = if selection_is_primary { (primary_cursor_scope, primary_selection_scope) } else { (cursor_scope, selection_scope) }; // Special-case: cursor at end of the rope. if range.head == range.anchor && range.head == text.len_chars() { if !selection_is_primary || (cursor_is_block && is_terminal_focused) { // Bar and underline cursors are drawn by the terminal // BUG: If the editor area loses focus while having a bar or // underline cursor (eg. when a regex prompt has focus) then // the primary cursor will be invisible. This doesn't happen // with block cursors since we manually draw *all* cursors. spans.push((cursor_scope, range.head..range.head + 1)); } continue; } let range = range.min_width_1(text); if range.head > range.anchor { // Standard case. let cursor_start = prev_grapheme_boundary(text, range.head); // non block cursors look like they exclude the cursor let selection_end = if selection_is_primary && !cursor_is_block && mode != Mode::Insert { range.head } else { cursor_start }; spans.push((selection_scope, range.anchor..selection_end)); // add block cursors // skip primary cursor if terminal is unfocused - crossterm cursor is used in that case if !selection_is_primary || (cursor_is_block && is_terminal_focused) { spans.push((cursor_scope, cursor_start..range.head)); } } else { // Reverse case. let cursor_end = next_grapheme_boundary(text, range.head); // add block cursors // skip primary cursor if terminal is unfocused - crossterm cursor is used in that case if !selection_is_primary || (cursor_is_block && is_terminal_focused) { spans.push((cursor_scope, range.head..cursor_end)); } // non block cursors look like they exclude the cursor let selection_start = if selection_is_primary && !cursor_is_block && !(mode == Mode::Insert && cursor_end == range.anchor) { range.head } else { cursor_end }; spans.push((selection_scope, selection_start..range.anchor)); } } spans } /// Render brace match, etc (meant for the focused view only) pub fn highlight_focused_view_elements( view: &View, doc: &Document, theme: &Theme, ) -> Vec<(usize, std::ops::Range)> { // Highlight matching braces if let Some(syntax) = doc.syntax() { let text = doc.text().slice(..); use helix_core::match_brackets; let pos = doc.selection(view.id).primary().cursor(text); if let Some(pos) = match_brackets::find_matching_bracket(syntax, doc.text().slice(..), pos) { // ensure col is on screen if let Some(highlight) = theme.find_scope_index_exact("ui.cursor.match") { return vec![(highlight, pos..pos + 1)]; } } } Vec::new() } pub fn tabstop_highlights( doc: &Document, theme: &Theme, ) -> Option)>> { let snippet = doc.active_snippet.as_ref()?; let highlight = theme.find_scope_index_exact("tabstop")?; let mut highlights = Vec::new(); for tabstop in snippet.tabstops() { highlights.extend( tabstop .ranges .iter() .map(|range| (highlight, range.start..range.end)), ); } (!highlights.is_empty()).then_some(highlights) } /// Render bufferline at the top pub fn render_bufferline(editor: &Editor, viewport: Rect, surface: &mut Surface) { let scratch = PathBuf::from(SCRATCH_BUFFER_NAME); // default filename to use for scratch buffer surface.clear_with( viewport, editor .theme .try_get("ui.bufferline.background") .unwrap_or_else(|| editor.theme.get("ui.statusline")), ); let bufferline_active = editor .theme .try_get("ui.bufferline.active") .unwrap_or_else(|| editor.theme.get("ui.statusline.active")); let bufferline_inactive = editor .theme .try_get("ui.bufferline") .unwrap_or_else(|| editor.theme.get("ui.statusline.inactive")); let mut x = viewport.x; let current_doc = view!(editor).doc; for doc in editor.documents() { let fname = doc .path() .unwrap_or(&scratch) .file_name() .unwrap_or_default() .to_str() .unwrap_or_default(); let style = if current_doc == doc.id() { bufferline_active } else { bufferline_inactive }; let text = format!(" {}{} ", fname, if doc.is_modified() { "[+]" } else { "" }); let used_width = viewport.x.saturating_sub(x); let rem_width = surface.area.width.saturating_sub(used_width); x = surface .set_stringn(x, viewport.y, text, rem_width as usize, style) .0; if x >= surface.area.right() { break; } } } pub fn render_gutter<'d>( editor: &'d Editor, doc: &'d Document, view: &View, viewport: Rect, theme: &Theme, is_focused: bool, decoration_manager: &mut DecorationManager<'d>, ) { let text = doc.text().slice(..); let cursors: Rc<[_]> = doc .selection(view.id) .iter() .map(|range| range.cursor_line(text)) .collect(); let mut offset = 0; let gutter_style = theme.get("ui.gutter"); let gutter_selected_style = theme.get("ui.gutter.selected"); let gutter_style_virtual = theme.get("ui.gutter.virtual"); let gutter_selected_style_virtual = theme.get("ui.gutter.selected.virtual"); for gutter_type in view.gutters() { let mut gutter = gutter_type.style(editor, doc, view, theme, is_focused); let width = gutter_type.width(view, doc); // avoid lots of small allocations by reusing a text buffer for each line let mut text = String::with_capacity(width); let cursors = cursors.clone(); let gutter_decoration = move |renderer: &mut TextRenderer, pos: LinePos| { // TODO handle softwrap in gutters let selected = cursors.contains(&pos.doc_line); let x = viewport.x + offset; let y = pos.visual_line; let gutter_style = match (selected, pos.first_visual_line) { (false, true) => gutter_style, (true, true) => gutter_selected_style, (false, false) => gutter_style_virtual, (true, false) => gutter_selected_style_virtual, }; if let Some(style) = gutter(pos.doc_line, selected, pos.first_visual_line, &mut text) { renderer.set_stringn(x, y, &text, width, gutter_style.patch(style)); } else { renderer.set_style( Rect { x, y, width: width as u16, height: 1, }, gutter_style, ); } text.clear(); }; decoration_manager.add_decoration(gutter_decoration); offset += width as u16; } } pub fn render_diagnostics( doc: &Document, view: &View, viewport: Rect, surface: &mut Surface, theme: &Theme, ) { use helix_core::diagnostic::Severity; use tui::{ layout::Alignment, text::Text, widgets::{Paragraph, Widget, Wrap}, }; let cursor = doc .selection(view.id) .primary() .cursor(doc.text().slice(..)); let diagnostics = doc.diagnostics().iter().filter(|diagnostic| { diagnostic.range.start <= cursor && diagnostic.range.end >= cursor }); let warning = theme.get("warning"); let error = theme.get("error"); let info = theme.get("info"); let hint = theme.get("hint"); let mut lines = Vec::new(); let background_style = theme.get("ui.background"); for diagnostic in diagnostics { let style = Style::reset() .patch(background_style) .patch(match diagnostic.severity { Some(Severity::Error) => error, Some(Severity::Warning) | None => warning, Some(Severity::Info) => info, Some(Severity::Hint) => hint, }); let text = Text::styled(&diagnostic.message, style); lines.extend(text.lines); let code = diagnostic.code.as_ref().map(|x| match x { NumberOrString::Number(n) => format!("({n})"), NumberOrString::String(s) => format!("({s})"), }); if let Some(code) = code { let span = Span::styled(code, style); lines.push(span.into()); } } let text = Text::from(lines); let paragraph = Paragraph::new(&text) .alignment(Alignment::Right) .wrap(Wrap { trim: true }); let width = 100.min(viewport.width); let height = 15.min(viewport.height); paragraph.render( Rect::new(viewport.right() - width, viewport.y + 1, width, height), surface, ); } /// Apply the highlighting on the lines where a cursor is active pub fn cursorline(doc: &Document, view: &View, theme: &Theme) -> impl Decoration { let text = doc.text().slice(..); // TODO only highlight the visual line that contains the cursor instead of the full visual line let primary_line = doc.selection(view.id).primary().cursor_line(text); // The secondary_lines do contain the primary_line, it doesn't matter // as the else-if clause in the loop later won't test for the // secondary_lines if primary_line == line. // It's used inside a loop so the collect isn't needless: // https://github.com/rust-lang/rust-clippy/issues/6164 #[allow(clippy::needless_collect)] let secondary_lines: Vec<_> = doc .selection(view.id) .iter() .map(|range| range.cursor_line(text)) .collect(); let primary_style = theme.get("ui.cursorline.primary"); let secondary_style = theme.get("ui.cursorline.secondary"); let viewport = view.area; move |renderer: &mut TextRenderer, pos: LinePos| { let area = Rect::new(viewport.x, pos.visual_line, viewport.width, 1); if primary_line == pos.doc_line { renderer.set_style(area, primary_style); } else if secondary_lines.binary_search(&pos.doc_line).is_ok() { renderer.set_style(area, secondary_style); } } } /// Apply the highlighting on the columns where a cursor is active pub fn highlight_cursorcolumn( doc: &Document, view: &View, surface: &mut Surface, theme: &Theme, viewport: Rect, text_annotations: &TextAnnotations, ) { let text = doc.text().slice(..); // Manual fallback behaviour: // ui.cursorcolumn.{p/s} -> ui.cursorcolumn -> ui.cursorline.{p/s} let primary_style = theme .try_get_exact("ui.cursorcolumn.primary") .or_else(|| theme.try_get_exact("ui.cursorcolumn")) .unwrap_or_else(|| theme.get("ui.cursorline.primary")); let secondary_style = theme .try_get_exact("ui.cursorcolumn.secondary") .or_else(|| theme.try_get_exact("ui.cursorcolumn")) .unwrap_or_else(|| theme.get("ui.cursorline.secondary")); let inner_area = view.inner_area(doc); let selection = doc.selection(view.id); let view_offset = doc.view_offset(view.id); let primary = selection.primary(); let text_format = doc.text_format(viewport.width, None); for range in selection.iter() { let is_primary = primary == *range; let cursor = range.cursor(text); let Position { col, .. } = visual_offset_from_block(text, cursor, cursor, &text_format, text_annotations).0; // if the cursor is horizontally in the view if col >= view_offset.horizontal_offset && inner_area.width > (col - view_offset.horizontal_offset) as u16 { let area = Rect::new( inner_area.x + (col - view_offset.horizontal_offset) as u16, view.area.y, 1, view.area.height, ); if is_primary { surface.set_style(area, primary_style) } else { surface.set_style(area, secondary_style) } } } } /// Handle events by looking them up in `self.keymaps`. Returns None /// if event was handled (a command was executed or a subkeymap was /// activated). Only KeymapResult::{NotFound, Cancelled} is returned /// otherwise. fn handle_keymap_event( &mut self, mode: Mode, cxt: &mut commands::Context, event: KeyEvent, ) -> Option { let mut last_mode = mode; self.pseudo_pending.extend(self.keymaps.pending()); let key_result = self.keymaps.get(mode, event); cxt.editor.autoinfo = self.keymaps.sticky().map(|node| node.infobox()); let mut execute_command = |command: &commands::MappableCommand| { command.execute(cxt); helix_event::dispatch(PostCommand { command, cx: cxt }); let current_mode = cxt.editor.mode(); if current_mode != last_mode { helix_event::dispatch(OnModeSwitch { old_mode: last_mode, new_mode: current_mode, cx: cxt, }); // HAXX: if we just entered insert mode from normal, clear key buf // and record the command that got us into this mode. if current_mode == Mode::Insert { // how we entered insert mode is important, and we should track that so // we can repeat the side effect. self.last_insert.0 = command.clone(); self.last_insert.1.clear(); } } last_mode = current_mode; }; match &key_result { KeymapResult::Matched(command) => { execute_command(command); } KeymapResult::Pending(node) => cxt.editor.autoinfo = Some(node.infobox()), KeymapResult::MatchedSequence(commands) => { for command in commands { execute_command(command); } } KeymapResult::NotFound | KeymapResult::Cancelled(_) => return Some(key_result), } None } fn insert_mode(&mut self, cx: &mut commands::Context, event: KeyEvent) { if let Some(keyresult) = self.handle_keymap_event(Mode::Insert, cx, event) { match keyresult { KeymapResult::NotFound => { if !self.on_next_key(OnKeyCallbackKind::Fallback, cx, event) { if let Some(ch) = event.char() { commands::insert::insert_char(cx, ch) } } } KeymapResult::Cancelled(pending) => { for ev in pending { match ev.char() { Some(ch) => commands::insert::insert_char(cx, ch), None => { if let KeymapResult::Matched(command) = self.keymaps.get(Mode::Insert, ev) { command.execute(cx); } } } } } _ => unreachable!(), } } } fn command_mode(&mut self, mode: Mode, cxt: &mut commands::Context, event: KeyEvent) { match (event, cxt.editor.count) { // If the count is already started and the input is a number, always continue the count. (key!(i @ '0'..='9'), Some(count)) => { let i = i.to_digit(10).unwrap() as usize; let count = count.get() * 10 + i; if count > 100_000_000 { return; } cxt.editor.count = NonZeroUsize::new(count); } // A non-zero digit will start the count if that number isn't used by a keymap. (key!(i @ '1'..='9'), None) if !self.keymaps.contains_key(mode, event) => { let i = i.to_digit(10).unwrap() as usize; cxt.editor.count = NonZeroUsize::new(i); } // special handling for repeat operator (key!('.'), _) if self.keymaps.pending().is_empty() => { for _ in 0..cxt.editor.count.map_or(1, NonZeroUsize::into) { // first execute whatever put us into insert mode self.last_insert.0.execute(cxt); let mut last_savepoint = None; let mut last_request_savepoint = None; // then replay the inputs for key in self.last_insert.1.clone() { match key { InsertEvent::Key(key) => self.insert_mode(cxt, key), InsertEvent::CompletionApply { trigger_offset, changes, } => { let (view, doc) = current!(cxt.editor); if let Some(last_savepoint) = last_savepoint.as_deref() { doc.restore(view, last_savepoint, true); } let text = doc.text().slice(..); let cursor = doc.selection(view.id).primary().cursor(text); let shift_position = |pos: usize| -> usize { (pos + cursor).saturating_sub(trigger_offset) }; let tx = Transaction::change( doc.text(), changes.iter().cloned().map(|(start, end, t)| { (shift_position(start), shift_position(end), t) }), ); doc.apply(&tx, view.id); } InsertEvent::TriggerCompletion => { last_savepoint = take(&mut last_request_savepoint); } InsertEvent::RequestCompletion => { let (view, doc) = current!(cxt.editor); last_request_savepoint = Some(doc.savepoint(view)); } } } } cxt.editor.count = None; } _ => { // set the count cxt.count = cxt.editor.count; // TODO: edge case: 0j -> reset to 1 // if this fails, count was Some(0) // debug_assert!(cxt.count != 0); // set the register cxt.register = cxt.editor.selected_register.take(); let res = self.handle_keymap_event(mode, cxt, event); if matches!(&res, Some(KeymapResult::NotFound)) { self.on_next_key(OnKeyCallbackKind::Fallback, cxt, event); } if self.keymaps.pending().is_empty() { cxt.editor.count = None } else { cxt.editor.selected_register = cxt.register.take(); } } } } #[allow(clippy::too_many_arguments)] pub fn set_completion( &mut self, editor: &mut Editor, items: Vec, trigger_offset: usize, size: Rect, ) -> Option { let mut completion = Completion::new(editor, items, trigger_offset); if completion.is_empty() { // skip if we got no completion results return None; } let area = completion.area(size, editor); editor.last_completion = Some(CompleteAction::Triggered); self.last_insert.1.push(InsertEvent::TriggerCompletion); // TODO : propagate required size on resize to completion too self.completion = Some(completion); Some(area) } pub fn clear_completion(&mut self, editor: &mut Editor) -> Option { self.completion = None; let mut on_next_key: Option = None; editor.handlers.completions.request_controller.restart(); editor.handlers.completions.active_completions.clear(); if let Some(last_completion) = editor.last_completion.take() { match last_completion { CompleteAction::Triggered => (), CompleteAction::Applied { trigger_offset, changes, placeholder, } => { self.last_insert.1.push(InsertEvent::CompletionApply { trigger_offset, changes, }); on_next_key = placeholder.then_some(Box::new(|cx, key| { if let Some(c) = key.char() { let (view, doc) = current!(cx.editor); if let Some(snippet) = &doc.active_snippet { doc.apply(&snippet.delete_placeholder(doc.text()), view.id); } commands::insert::insert_char(cx, c); } })) } CompleteAction::Selected { savepoint } => { let (view, doc) = current!(editor); doc.restore(view, &savepoint, false); } } } on_next_key } pub fn handle_idle_timeout(&mut self, cx: &mut commands::Context) -> EventResult { commands::compute_inlay_hints_for_all_views(cx.editor, cx.jobs); EventResult::Ignored(None) } } impl EditorView { /// must be called whenever the editor processed input that /// is not a `KeyEvent`. In these cases any pending keys/on next /// key callbacks must be canceled. fn handle_non_key_input(&mut self, cxt: &mut commands::Context) { cxt.editor.status_msg = None; cxt.editor.reset_idle_timer(); // HACKS: create a fake key event that will never trigger any actual map // and therefore simply acts as "dismiss" let null_key_event = KeyEvent { code: KeyCode::Null, modifiers: KeyModifiers::empty(), }; // dismiss any pending keys if let Some((on_next_key, _)) = self.on_next_key.take() { on_next_key(cxt, null_key_event); } self.handle_keymap_event(cxt.editor.mode, cxt, null_key_event); self.pseudo_pending.clear(); } fn handle_mouse_event( &mut self, event: &MouseEvent, cxt: &mut commands::Context, ) -> EventResult { if event.kind != MouseEventKind::Moved { self.handle_non_key_input(cxt) } let config = cxt.editor.config(); let MouseEvent { kind, row, column, modifiers, .. } = *event; let pos_and_view = |editor: &Editor, row, column, ignore_virtual_text| { editor.tree.views().find_map(|(view, _focus)| { view.pos_at_screen_coords( &editor.documents[&view.doc], row, column, ignore_virtual_text, ) .map(|pos| (pos, view.id)) }) }; let gutter_coords_and_view = |editor: &Editor, row, column| { editor.tree.views().find_map(|(view, _focus)| { view.gutter_coords_at_screen_coords(row, column) .map(|coords| (coords, view.id)) }) }; match kind { MouseEventKind::Down(MouseButton::Left) => { let editor = &mut cxt.editor; if let Some((pos, view_id)) = pos_and_view(editor, row, column, true) { let prev_view_id = view!(editor).id; let doc = doc_mut!(editor, &view!(editor, view_id).doc); if modifiers == KeyModifiers::ALT { let selection = doc.selection(view_id).clone(); doc.set_selection(view_id, selection.push(Range::point(pos))); } else if editor.mode == Mode::Select { // Discards non-primary selections for consistent UX with normal mode let primary = doc.selection(view_id).primary().put_cursor( doc.text().slice(..), pos, true, ); editor.mouse_down_range = Some(primary); doc.set_selection(view_id, Selection::single(primary.anchor, primary.head)); } else { doc.set_selection(view_id, Selection::point(pos)); } if view_id != prev_view_id { self.clear_completion(editor); } editor.focus(view_id); editor.ensure_cursor_in_view(view_id); return EventResult::Consumed(None); } if let Some((coords, view_id)) = gutter_coords_and_view(editor, row, column) { editor.focus(view_id); let (view, doc) = current!(cxt.editor); let path = match doc.path() { Some(path) => path.clone(), None => return EventResult::Ignored(None), }; if let Some(char_idx) = view.pos_at_visual_coords(doc, coords.row as u16, coords.col as u16, true) { let line = doc.text().char_to_line(char_idx); commands::dap_toggle_breakpoint_impl(cxt, path, line); return EventResult::Consumed(None); } } EventResult::Ignored(None) } MouseEventKind::Drag(MouseButton::Left) => { let (view, doc) = current!(cxt.editor); let pos = match view.pos_at_screen_coords(doc, row, column, true) { Some(pos) => pos, None => return EventResult::Ignored(None), }; let mut selection = doc.selection(view.id).clone(); let primary = selection.primary_mut(); *primary = primary.put_cursor(doc.text().slice(..), pos, true); doc.set_selection(view.id, selection); let view_id = view.id; cxt.editor.ensure_cursor_in_view(view_id); EventResult::Consumed(None) } MouseEventKind::ScrollUp | MouseEventKind::ScrollDown => { let current_view = cxt.editor.tree.focus; let direction = match event.kind { MouseEventKind::ScrollUp => Direction::Backward, MouseEventKind::ScrollDown => Direction::Forward, _ => unreachable!(), }; match pos_and_view(cxt.editor, row, column, false) { Some((_, view_id)) => cxt.editor.tree.focus = view_id, None => return EventResult::Ignored(None), } let offset = config.scroll_lines.unsigned_abs(); commands::scroll(cxt, offset, direction, false); cxt.editor.tree.focus = current_view; cxt.editor.ensure_cursor_in_view(current_view); EventResult::Consumed(None) } MouseEventKind::Up(MouseButton::Left) => { if !config.middle_click_paste { return EventResult::Ignored(None); } let (view, doc) = current!(cxt.editor); let should_yank = match cxt.editor.mouse_down_range.take() { Some(down_range) => doc.selection(view.id).primary() != down_range, None => { // This should not happen under normal cases. We fall back to the original // behavior of yanking on non-single-char selections. doc.selection(view.id) .primary() .slice(doc.text().slice(..)) .len_chars() > 1 } }; if should_yank { commands::MappableCommand::yank_main_selection_to_primary_clipboard .execute(cxt); EventResult::Consumed(None) } else { EventResult::Ignored(None) } } MouseEventKind::Up(MouseButton::Right) => { if let Some((pos, view_id)) = gutter_coords_and_view(cxt.editor, row, column) { cxt.editor.focus(view_id); if let Some((pos, _)) = pos_and_view(cxt.editor, row, column, true) { doc_mut!(cxt.editor).set_selection(view_id, Selection::point(pos)); } else { let (view, doc) = current!(cxt.editor); if let Some(pos) = view.pos_at_visual_coords(doc, pos.row as u16, 0, true) { doc.set_selection(view_id, Selection::point(pos)); match modifiers { KeyModifiers::ALT => { commands::MappableCommand::dap_edit_log.execute(cxt) } _ => commands::MappableCommand::dap_edit_condition.execute(cxt), }; } } cxt.editor.ensure_cursor_in_view(view_id); return EventResult::Consumed(None); } EventResult::Ignored(None) } MouseEventKind::Up(MouseButton::Middle) => { let editor = &mut cxt.editor; if !config.middle_click_paste { return EventResult::Ignored(None); } if modifiers == KeyModifiers::ALT { commands::MappableCommand::replace_selections_with_primary_clipboard .execute(cxt); return EventResult::Consumed(None); } if let Some((pos, view_id)) = pos_and_view(editor, row, column, true) { let doc = doc_mut!(editor, &view!(editor, view_id).doc); doc.set_selection(view_id, Selection::point(pos)); cxt.editor.focus(view_id); commands::MappableCommand::paste_primary_clipboard_before.execute(cxt); return EventResult::Consumed(None); } EventResult::Ignored(None) } _ => EventResult::Ignored(None), } } fn on_next_key( &mut self, kind: OnKeyCallbackKind, ctx: &mut commands::Context, event: KeyEvent, ) -> bool { if let Some((on_next_key, kind_)) = self.on_next_key.take() { if kind == kind_ { on_next_key(ctx, event); true } else { self.on_next_key = Some((on_next_key, kind_)); false } } else { false } } } impl Component for EditorView { fn handle_event( &mut self, event: &Event, context: &mut crate::compositor::Context, ) -> EventResult { let mut cx = commands::Context { editor: context.editor, count: None, register: None, callback: Vec::new(), on_next_key_callback: None, jobs: context.jobs, }; match event { Event::Paste(contents) => { self.handle_non_key_input(&mut cx); cx.count = cx.editor.count; commands::paste_bracketed_value(&mut cx, contents.clone()); cx.editor.count = None; let config = cx.editor.config(); let mode = cx.editor.mode(); let (view, doc) = current!(cx.editor); view.ensure_cursor_in_view(doc, config.scrolloff); // Store a history state if not in insert mode. Otherwise wait till we exit insert // to include any edits to the paste in the history state. if mode != Mode::Insert { doc.append_changes_to_history(view); } EventResult::Consumed(None) } Event::Resize(_width, _height) => { // Ignore this event, we handle resizing just before rendering to screen. // Handling it here but not re-rendering will cause flashing EventResult::Consumed(None) } Event::Key(mut key) => { cx.editor.reset_idle_timer(); canonicalize_key(&mut key); // clear status cx.editor.status_msg = None; let mode = cx.editor.mode(); if !self.on_next_key(OnKeyCallbackKind::PseudoPending, &mut cx, key) { match mode { Mode::Insert => { // let completion swallow the event if necessary let mut consumed = false; if let Some(completion) = &mut self.completion { let res = { // use a fake context here let mut cx = Context { editor: cx.editor, jobs: cx.jobs, scroll: None, }; if let EventResult::Consumed(callback) = completion.handle_event(event, &mut cx) { consumed = true; Some(callback) } else if let EventResult::Consumed(callback) = completion.handle_event(&Event::Key(key!(Enter)), &mut cx) { Some(callback) } else { None } }; if let Some(callback) = res { if callback.is_some() { // assume close_fn if let Some(cb) = self.clear_completion(cx.editor) { if consumed { cx.on_next_key_callback = Some((cb, OnKeyCallbackKind::Fallback)) } else { self.on_next_key = Some((cb, OnKeyCallbackKind::Fallback)); } } } } } // if completion didn't take the event, we pass it onto commands if !consumed { self.insert_mode(&mut cx, key); // record last_insert key self.last_insert.1.push(InsertEvent::Key(key)); } } mode => self.command_mode(mode, &mut cx, key), } } self.on_next_key = cx.on_next_key_callback.take(); match self.on_next_key { Some((_, OnKeyCallbackKind::PseudoPending)) => self.pseudo_pending.push(key), _ => self.pseudo_pending.clear(), } // appease borrowck let callbacks = take(&mut cx.callback); // if the command consumed the last view, skip the render. // on the next loop cycle the Application will then terminate. if cx.editor.should_close() { return EventResult::Ignored(None); } let config = cx.editor.config(); let mode = cx.editor.mode(); let (view, doc) = current!(cx.editor); view.ensure_cursor_in_view(doc, config.scrolloff); // Store a history state if not in insert mode. This also takes care of // committing changes when leaving insert mode. if mode != Mode::Insert { doc.append_changes_to_history(view); } let callback = if callbacks.is_empty() { None } else { let callback: crate::compositor::Callback = Box::new(move |compositor, cx| { for callback in callbacks { callback(compositor, cx) } }); Some(callback) }; EventResult::Consumed(callback) } Event::Mouse(event) => self.handle_mouse_event(event, &mut cx), Event::IdleTimeout => self.handle_idle_timeout(&mut cx), Event::FocusGained => { self.terminal_focused = true; EventResult::Consumed(None) } Event::FocusLost => { if context.editor.config().auto_save.focus_lost { let options = commands::WriteAllOptions { force: false, write_scratch: false, auto_format: false, }; if let Err(e) = commands::typed::write_all_impl(context, options) { context.editor.set_error(format!("{}", e)); } } self.terminal_focused = false; EventResult::Consumed(None) } } } fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) { // clear with background color surface.set_style(area, cx.editor.theme.get("ui.background")); let config = cx.editor.config(); // check if bufferline should be rendered use helix_view::editor::BufferLine; let use_bufferline = match config.bufferline { BufferLine::Always => true, BufferLine::Multiple if cx.editor.documents.len() > 1 => true, _ => false, }; // -1 for commandline and -1 for bufferline let mut editor_area = area.clip_bottom(1); if use_bufferline { editor_area = editor_area.clip_top(1); } // if the terminal size suddenly changed, we need to trigger a resize cx.editor.resize(editor_area); if use_bufferline { Self::render_bufferline(cx.editor, area.with_height(1), surface); } for (view, is_focused) in cx.editor.tree.views() { let doc = cx.editor.document(view.doc).unwrap(); self.render_view(cx.editor, doc, view, area, surface, is_focused); } if config.auto_info { if let Some(mut info) = cx.editor.autoinfo.take() { info.render(area, surface, cx); cx.editor.autoinfo = Some(info) } } let key_width = 15u16; // for showing pending keys let mut status_msg_width = 0; // render status msg if let Some((status_msg, severity)) = &cx.editor.status_msg { status_msg_width = status_msg.width(); use helix_view::editor::Severity; let style = if *severity == Severity::Error { cx.editor.theme.get("error") } else { cx.editor.theme.get("ui.text") }; surface.set_string( area.x, area.y + area.height.saturating_sub(1), status_msg, style, ); } if area.width.saturating_sub(status_msg_width as u16) > key_width { let mut disp = String::new(); if let Some(count) = cx.editor.count { disp.push_str(&count.to_string()) } for key in self.keymaps.pending() { disp.push_str(&key.key_sequence_format()); } for key in &self.pseudo_pending { disp.push_str(&key.key_sequence_format()); } let style = cx.editor.theme.get("ui.text"); let macro_width = if cx.editor.macro_recording.is_some() { 3 } else { 0 }; surface.set_string( area.x + area.width.saturating_sub(key_width + macro_width), area.y + area.height.saturating_sub(1), disp.get(disp.len().saturating_sub(key_width as usize)..) .unwrap_or(&disp), style, ); if let Some((reg, _)) = cx.editor.macro_recording { let disp = format!("[{}]", reg); let style = style .fg(helix_view::graphics::Color::Yellow) .add_modifier(Modifier::BOLD); surface.set_string( area.x + area.width.saturating_sub(3), area.y + area.height.saturating_sub(1), &disp, style, ); } } if let Some(completion) = self.completion.as_mut() { completion.render(area, surface, cx); } } fn cursor(&self, _area: Rect, editor: &Editor) -> (Option, CursorKind) { match editor.cursor() { // all block cursors are drawn manually (pos, CursorKind::Block) => { if self.terminal_focused { (pos, CursorKind::Hidden) } else { // use crossterm cursor when terminal loses focus (pos, CursorKind::Underline) } } cursor => cursor, } } } fn canonicalize_key(key: &mut KeyEvent) { if let KeyEvent { code: KeyCode::Char(_), modifiers: _, } = key { key.modifiers.remove(KeyModifiers::SHIFT) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/ui/info.rs000066400000000000000000000026701500614314100243750ustar00rootroot00000000000000use crate::compositor::{Component, Context}; use helix_view::graphics::{Margin, Rect}; use helix_view::info::Info; use tui::buffer::Buffer as Surface; use tui::text::Text; use tui::widgets::{Block, Paragraph, Widget}; impl Component for Info { fn render(&mut self, viewport: Rect, surface: &mut Surface, cx: &mut Context) { let text_style = cx.editor.theme.get("ui.text.info"); let popup_style = cx.editor.theme.get("ui.popup.info"); // Calculate the area of the terminal to modify. Because we want to // render at the bottom right, we use the viewport's width and height // which evaluate to the most bottom right coordinate. let width = self.width + 2 + 2; // +2 for border, +2 for margin let height = self.height + 2; // +2 for border let area = viewport.intersection(Rect::new( viewport.width.saturating_sub(width), viewport.height.saturating_sub(height + 2), // +2 for statusline width, height, )); surface.clear_with(area, popup_style); let block = Block::bordered() .title(self.title.as_ref()) .border_style(popup_style); let margin = Margin::horizontal(1); let inner = block.inner(area).inner(margin); block.render(area, surface); Paragraph::new(&Text::from(self.text.as_str())) .style(text_style) .render(inner, surface); } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/ui/lsp.rs000066400000000000000000000000471500614314100242340ustar00rootroot00000000000000pub mod hover; pub mod signature_help; helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/ui/lsp/000077500000000000000000000000001500614314100236655ustar00rootroot00000000000000helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/ui/lsp/hover.rs000066400000000000000000000134271500614314100253650ustar00rootroot00000000000000use std::sync::Arc; use arc_swap::ArcSwap; use helix_core::syntax; use helix_lsp::lsp; use helix_view::graphics::{Margin, Rect, Style}; use helix_view::input::Event; use tui::buffer::Buffer; use tui::widgets::{BorderType, Paragraph, Widget, Wrap}; use crate::compositor::{Component, Context, EventResult}; use crate::alt; use crate::ui::Markdown; pub struct Hover { active_index: usize, contents: Vec<(Option, Markdown)>, } impl Hover { pub const ID: &'static str = "hover"; pub fn new( hovers: Vec<(String, lsp::Hover)>, config_loader: Arc>, ) -> Self { let n_hovers = hovers.len(); let contents = hovers .into_iter() .enumerate() .map(|(idx, (server_name, hover))| { let header = (n_hovers > 1).then(|| { Markdown::new( format!("**[{}/{}] {}**", idx + 1, n_hovers, server_name), config_loader.clone(), ) }); let body = Markdown::new( hover_contents_to_string(hover.contents), config_loader.clone(), ); (header, body) }) .collect(); Self { active_index: usize::default(), contents, } } fn has_header(&self) -> bool { self.contents.len() > 1 } fn content(&self) -> &(Option, Markdown) { &self.contents[self.active_index] } fn set_index(&mut self, index: usize) { assert!((0..self.contents.len()).contains(&index)); self.active_index = index; } } const PADDING_HORIZONTAL: u16 = 2; const PADDING_TOP: u16 = 1; const PADDING_BOTTOM: u16 = 1; const HEADER_HEIGHT: u16 = 1; const SEPARATOR_HEIGHT: u16 = 1; impl Component for Hover { fn render(&mut self, area: Rect, surface: &mut Buffer, cx: &mut Context) { let margin = Margin::all(1); let area = area.inner(margin); let (header, contents) = self.content(); // show header and border only when more than one results if let Some(header) = header { // header LSP Name let header = header.parse(Some(&cx.editor.theme)); let header = Paragraph::new(&header); header.render(area.with_height(HEADER_HEIGHT), surface); // border let sep_style = Style::default(); let borders = BorderType::line_symbols(BorderType::Plain); for x in area.left()..area.right() { if let Some(cell) = surface.get_mut(x, area.top() + HEADER_HEIGHT) { cell.set_symbol(borders.horizontal).set_style(sep_style); } } } // hover content let contents = contents.parse(Some(&cx.editor.theme)); let contents_area = area.clip_top(if self.has_header() { HEADER_HEIGHT + SEPARATOR_HEIGHT } else { 0 }); let contents_para = Paragraph::new(&contents) .wrap(Wrap { trim: false }) .scroll((cx.scroll.unwrap_or_default() as u16, 0)); contents_para.render(contents_area, surface); } fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> { let max_text_width = viewport.0.saturating_sub(PADDING_HORIZONTAL).clamp(10, 120); let (header, contents) = self.content(); let header_width = header .as_ref() .map(|header| { let header = header.parse(None); let (width, _height) = crate::ui::text::required_size(&header, max_text_width); width }) .unwrap_or_default(); let contents = contents.parse(None); let (content_width, content_height) = crate::ui::text::required_size(&contents, max_text_width); let width = PADDING_HORIZONTAL + header_width.max(content_width); let height = if self.has_header() { PADDING_TOP + HEADER_HEIGHT + SEPARATOR_HEIGHT + content_height + PADDING_BOTTOM } else { PADDING_TOP + content_height + PADDING_BOTTOM }; Some((width, height)) } fn handle_event(&mut self, event: &Event, _ctx: &mut Context) -> EventResult { let Event::Key(event) = event else { return EventResult::Ignored(None); }; match event { alt!('p') => { let index = self .active_index .checked_sub(1) .unwrap_or(self.contents.len() - 1); self.set_index(index); EventResult::Consumed(None) } alt!('n') => { self.set_index((self.active_index + 1) % self.contents.len()); EventResult::Consumed(None) } _ => EventResult::Ignored(None), } } } fn hover_contents_to_string(contents: lsp::HoverContents) -> String { fn marked_string_to_markdown(contents: lsp::MarkedString) -> String { match contents { lsp::MarkedString::String(contents) => contents, lsp::MarkedString::LanguageString(string) => { if string.language == "markdown" { string.value } else { format!("```{}\n{}\n```", string.language, string.value) } } } } match contents { lsp::HoverContents::Scalar(contents) => marked_string_to_markdown(contents), lsp::HoverContents::Array(contents) => contents .into_iter() .map(marked_string_to_markdown) .collect::>() .join("\n\n"), lsp::HoverContents::Markup(contents) => contents.value, } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/ui/lsp/signature_help.rs000066400000000000000000000152111500614314100272440ustar00rootroot00000000000000use std::sync::Arc; use arc_swap::ArcSwap; use helix_core::syntax; use helix_view::graphics::{Margin, Rect, Style}; use helix_view::input::Event; use tui::buffer::Buffer; use tui::layout::Alignment; use tui::text::Text; use tui::widgets::{BorderType, Paragraph, Widget, Wrap}; use crate::compositor::{Component, Compositor, Context, EventResult}; use crate::alt; use crate::ui::Markdown; use crate::ui::Popup; pub struct Signature { pub signature: String, pub signature_doc: Option, /// Part of signature text pub active_param_range: Option<(usize, usize)>, } pub struct SignatureHelp { language: String, config_loader: Arc>, active_signature: usize, lsp_signature: Option, signatures: Vec, } impl SignatureHelp { pub const ID: &'static str = "signature-help"; pub fn new( language: String, config_loader: Arc>, active_signature: usize, lsp_signature: Option, signatures: Vec, ) -> Self { Self { language, config_loader, active_signature, lsp_signature, signatures, } } pub fn active_signature(&self) -> usize { self.active_signature } pub fn lsp_signature(&self) -> Option { self.lsp_signature } pub fn visible_popup(compositor: &mut Compositor) -> Option<&mut Popup> { compositor.find_id::>(Self::ID) } fn signature_index(&self) -> String { format!("({}/{})", self.active_signature + 1, self.signatures.len()) } } impl Component for SignatureHelp { fn handle_event(&mut self, event: &Event, _cx: &mut Context) -> EventResult { let Event::Key(event) = event else { return EventResult::Ignored(None); }; if self.signatures.len() <= 1 { return EventResult::Ignored(None); } match event { alt!('p') => { self.active_signature = self .active_signature .checked_sub(1) .unwrap_or(self.signatures.len() - 1); EventResult::Consumed(None) } alt!('n') => { self.active_signature = (self.active_signature + 1) % self.signatures.len(); EventResult::Consumed(None) } _ => EventResult::Ignored(None), } } fn render(&mut self, area: Rect, surface: &mut Buffer, cx: &mut Context) { let margin = Margin::horizontal(1); let signature = self .signatures .get(self.active_signature) .unwrap_or_else(|| &self.signatures[0]); let active_param_span = signature.active_param_range.map(|(start, end)| { vec![( cx.editor .theme .find_scope_index_exact("ui.selection") .unwrap(), start..end, )] }); let signature = self .signatures .get(self.active_signature) .unwrap_or_else(|| &self.signatures[0]); let sig_text = crate::ui::markdown::highlighted_code_block( signature.signature.as_str(), &self.language, Some(&cx.editor.theme), Arc::clone(&self.config_loader), active_param_span, ); if self.signatures.len() > 1 { let signature_index = self.signature_index(); let text = Text::from(signature_index); let paragraph = Paragraph::new(&text).alignment(Alignment::Right); paragraph.render(area.clip_top(1).with_height(1).clip_right(1), surface); } let (_, sig_text_height) = crate::ui::text::required_size(&sig_text, area.width); let sig_text_area = area.clip_top(1).with_height(sig_text_height); let sig_text_area = sig_text_area.inner(margin).intersection(surface.area); let sig_text_para = Paragraph::new(&sig_text).wrap(Wrap { trim: false }); sig_text_para.render(sig_text_area, surface); if signature.signature_doc.is_none() { return; } let sep_style = Style::default(); let borders = BorderType::line_symbols(BorderType::Plain); for x in sig_text_area.left()..sig_text_area.right() { if let Some(cell) = surface.get_mut(x, sig_text_area.bottom()) { cell.set_symbol(borders.horizontal).set_style(sep_style); } } let sig_doc = match &signature.signature_doc { None => return, Some(doc) => Markdown::new(doc.clone(), Arc::clone(&self.config_loader)), }; let sig_doc = sig_doc.parse(Some(&cx.editor.theme)); let sig_doc_area = area .clip_top(sig_text_area.height + 2) .clip_bottom(u16::from(cx.editor.popup_border())); let sig_doc_para = Paragraph::new(&sig_doc) .wrap(Wrap { trim: false }) .scroll((cx.scroll.unwrap_or_default() as u16, 0)); sig_doc_para.render(sig_doc_area.inner(margin), surface); } fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> { const PADDING: u16 = 2; const SEPARATOR_HEIGHT: u16 = 1; let signature = self .signatures .get(self.active_signature) .unwrap_or_else(|| &self.signatures[0]); let max_text_width = viewport.0.saturating_sub(PADDING).clamp(10, 120); let signature_text = crate::ui::markdown::highlighted_code_block( signature.signature.as_str(), &self.language, None, Arc::clone(&self.config_loader), None, ); let (sig_width, sig_height) = crate::ui::text::required_size(&signature_text, max_text_width); let (width, height) = match signature.signature_doc { Some(ref doc) => { let doc_md = Markdown::new(doc.clone(), Arc::clone(&self.config_loader)); let doc_text = doc_md.parse(None); let (doc_width, doc_height) = crate::ui::text::required_size(&doc_text, max_text_width); ( sig_width.max(doc_width), sig_height + SEPARATOR_HEIGHT + doc_height, ) } None => (sig_width, sig_height), }; let sig_index_width = if self.signatures.len() > 1 { self.signature_index().len() + 1 } else { 0 }; Some((width + PADDING + sig_index_width as u16, height + PADDING)) } } helix-cbac4273836f5837cec641ab21f365c79b102a4b/helix-term/src/ui/markdown.rs000066400000000000000000000331521500614314100252630ustar00rootroot00000000000000use crate::compositor::{Component, Context}; use arc_swap::ArcSwap; use tui::{ buffer::Buffer as Surface, text::{Span, Spans, Text}, }; use std::sync::Arc; use pulldown_cmark::{CodeBlockKind, Event, HeadingLevel, Options, Parser, Tag, TagEnd}; use helix_core::{ syntax::{self, HighlightEvent, InjectionLanguageMarker, Syntax}, RopeSlice, }; use helix_view::{ graphics::{Margin, Rect, Style}, theme::Modifier, Theme, }; fn styled_multiline_text<'a>(text: &str, style: Style) -> Text<'a> { let spans: Vec<_> = text .lines() .map(|line| Span::styled(line.to_string(), style)) .map(Spans::from) .collect(); Text::from(spans) } pub fn highlighted_code_block<'a>( text: &str, language: &str, theme: Option<&Theme>, config_loader: Arc>, additional_highlight_spans: Option)>>, ) -> Text<'a> { let mut spans = Vec::new(); let mut lines = Vec::new(); let get_theme = |key: &str| -> Style { theme.map(|t| t.get(key)).unwrap_or_default() }; let text_style = get_theme(Markdown::TEXT_STYLE); let code_style = get_theme(Markdown::BLOCK_STYLE); let theme = match theme { Some(t) => t, None => return styled_multiline_text(text, code_style), }; let ropeslice = RopeSlice::from(text); let syntax = config_loader .load() .language_configuration_for_injection_string(&InjectionLanguageMarker::Name( language.into(), )) .and_then(|config| config.highlight_config(theme.scopes())) .and_then(|config| Syntax::new(ropeslice, config, Arc::clone(&config_loader))); let syntax = match syntax { Some(s) => s, None => return styled_multiline_text(text, code_style), }; let highlight_iter = syntax .highlight_iter(ropeslice, None, None) .map(|e| e.unwrap()); let highlight_iter: Box> = if let Some(spans) = additional_highlight_spans { Box::new(helix_core::syntax::merge(highlight_iter, spans)) } else { Box::new(highlight_iter) }; let mut highlights = Vec::new(); for event in highlight_iter { match event { HighlightEvent::HighlightStart(span) => { highlights.push(span); } HighlightEvent::HighlightEnd => { highlights.pop(); } HighlightEvent::Source { start, end } => { let style = highlights .iter() .fold(text_style, |acc, span| acc.patch(theme.highlight(span.0))); let mut slice = &text[start..end]; // TODO: do we need to handle all unicode line endings // here, or is just '\n' okay? while let Some(end) = slice.find('\n') { // emit span up to newline let text = &slice[..end]; let text = text.replace('\t', " "); // replace tabs let span = Span::styled(text, style); spans.push(span); // truncate slice to after newline slice = &slice[end + 1..]; // make a new line let spans = std::mem::take(&mut spans); lines.push(Spans::from(spans)); } // if there's anything left, emit it too if !slice.is_empty() { let span = Span::styled(slice.replace('\t', " "), style); spans.push(span); } } } } if !spans.is_empty() { let spans = std::mem::take(&mut spans); lines.push(Spans::from(spans)); } Text::from(lines) } pub struct Markdown { contents: String, config_loader: Arc>, } // TODO: pre-render and self reference via Pin // better yet, just use Tendril + subtendril for references impl Markdown { const TEXT_STYLE: &'static str = "ui.text"; const BLOCK_STYLE: &'static str = "markup.raw.inline"; const RULE_STYLE: &'static str = "punctuation.special"; const UNNUMBERED_LIST_STYLE: &'static str = "markup.list.unnumbered"; const NUMBERED_LIST_STYLE: &'static str = "markup.list.numbered"; const HEADING_STYLES: [&'static str; 6] = [ "markup.heading.1", "markup.heading.2", "markup.heading.3", "markup.heading.4", "markup.heading.5", "markup.heading.6", ]; const INDENT: &'static str = " "; pub fn new(contents: String, config_loader: Arc>) -> Self { Self { contents, config_loader, } } pub fn parse(&self, theme: Option<&Theme>) -> tui::text::Text<'_> { fn push_line<'a>(spans: &mut Vec>, lines: &mut Vec>) { let spans = std::mem::take(spans); if !spans.is_empty() { lines.push(Spans::from(spans)); } } let mut options = Options::empty(); options.insert(Options::ENABLE_STRIKETHROUGH); let parser = Parser::new_ext(&self.contents, options); // TODO: if possible, render links as terminal hyperlinks: https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda let mut tags = Vec::new(); let mut spans = Vec::new(); let mut lines = Vec::new(); let mut list_stack = Vec::new(); let get_indent = |level: usize| { if level < 1 { String::new() } else { Self::INDENT.repeat(level - 1) } }; let get_theme = |key: &str| -> Style { theme.map(|t| t.get(key)).unwrap_or_default() }; let text_style = get_theme(Self::TEXT_STYLE); let code_style = get_theme(Self::BLOCK_STYLE); let numbered_list_style = get_theme(Self::NUMBERED_LIST_STYLE); let unnumbered_list_style = get_theme(Self::UNNUMBERED_LIST_STYLE); let rule_style = get_theme(Self::RULE_STYLE); let heading_styles: Vec