Try to fix nightly

This commit is contained in:
hustcer 2024-04-27 10:25:20 +08:00
commit 1433e9bed3
1189 changed files with 60879 additions and 23759 deletions

View File

@ -1,11 +0,0 @@
---
name: standard library bug or feature report
about: Used to submit issues related to the nu standard library
title: ''
labels: ['needs-triage', 'std-library']
assignees: ''
---
**Describe the bug or feature**
A clear and concise description of what the bug is.

View File

@ -11,6 +11,10 @@ updates:
directory: "/" directory: "/"
schedule: schedule:
interval: "weekly" interval: "weekly"
# We release on Tuesdays and open dependabot PRs will rebase after the
# version bump and thus consume unnecessary workers during release, thus
# let's open new ones on Wednesday
day: "wednesday"
ignore: ignore:
- dependency-name: "*" - dependency-name: "*"
update-types: ["version-update:semver-patch"] update-types: ["version-update:semver-patch"]
@ -18,3 +22,4 @@ updates:
directory: "/" directory: "/"
schedule: schedule:
interval: "weekly" interval: "weekly"
day: "wednesday"

View File

@ -19,7 +19,7 @@ jobs:
# Prevent sudden announcement of a new advisory from failing ci: # Prevent sudden announcement of a new advisory from failing ci:
continue-on-error: true continue-on-error: true
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4.1.3
- uses: rustsec/audit-check@v1.4.1 - uses: rustsec/audit-check@v1.4.1
with: with:
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}

12
.github/workflows/check-msrv.nu vendored Normal file
View File

@ -0,0 +1,12 @@
let toolchain_spec = open rust-toolchain.toml | get toolchain.channel
let msrv_spec = open Cargo.toml | get package.rust-version
# This check is conservative in the sense that we use `rust-toolchain.toml`'s
# override to ensure that this is the upper-bound for the minimum supported
# rust version
if $toolchain_spec != $msrv_spec {
print -e "Mismatching rust compiler versions specified in `Cargo.toml` and `rust-toolchain.toml`"
print -e $"Cargo.toml: ($msrv_spec)"
print -e $"rust-toolchain.toml: ($toolchain_spec)"
exit 1
}

View File

@ -10,7 +10,11 @@ env:
NUSHELL_CARGO_PROFILE: ci NUSHELL_CARGO_PROFILE: ci
NU_LOG_LEVEL: DEBUG NU_LOG_LEVEL: DEBUG
# If changing these settings also change toolkit.nu # If changing these settings also change toolkit.nu
CLIPPY_OPTIONS: "-D warnings -D clippy::unwrap_used" CLIPPY_OPTIONS: "-D warnings -D clippy::unwrap_used -D clippy::unchecked_duration_subtraction"
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
cancel-in-progress: true
jobs: jobs:
fmt-clippy: fmt-clippy:
@ -20,25 +24,27 @@ jobs:
# Pinning to Ubuntu 20.04 because building on newer Ubuntu versions causes linux-gnu # Pinning to Ubuntu 20.04 because building on newer Ubuntu versions causes linux-gnu
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider # builds to link against a too-new-for-many-Linux-installs glibc version. Consider
# revisiting this when 20.04 is closer to EOL (April 2025) # revisiting this when 20.04 is closer to EOL (April 2025)
platform: [windows-latest, macos-latest, ubuntu-20.04] #
feature: [default, dataframe, extra] # Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB,
# instead of 14 GB) which is too little for us right now. Revisit when `dfr` commands are
# removed and we're only building the `polars` plugin instead
platform: [windows-latest, macos-13, ubuntu-20.04]
feature: [default, dataframe]
include: include:
- feature: default - feature: default
flags: "" flags: ""
- feature: dataframe - feature: dataframe
flags: "--features=dataframe" flags: "--features=dataframe"
- feature: extra
flags: "--features=extra"
exclude: exclude:
- platform: windows-latest - platform: windows-latest
feature: dataframe feature: dataframe
- platform: macos-latest - platform: macos-13
feature: dataframe feature: dataframe
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4.1.3
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
@ -56,12 +62,15 @@ jobs:
- name: Clippy of tests - name: Clippy of tests
run: cargo clippy --tests --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- -D warnings run: cargo clippy --tests --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- -D warnings
- name: Clippy of benchmarks
run: cargo clippy --benches --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- -D warnings
tests: tests:
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
platform: [windows-latest, macos-latest, ubuntu-20.04] platform: [windows-latest, macos-latest, ubuntu-20.04]
feature: [default, dataframe, extra] feature: [default, dataframe]
include: include:
# linux CI cannot handle clipboard feature # linux CI cannot handle clipboard feature
- default-flags: "" - default-flags: ""
@ -71,22 +80,16 @@ jobs:
flags: "" flags: ""
- feature: dataframe - feature: dataframe
flags: "--features=dataframe" flags: "--features=dataframe"
- feature: extra
flags: "--features=extra"
exclude: exclude:
- platform: windows-latest - platform: windows-latest
feature: dataframe feature: dataframe
- platform: macos-latest - platform: macos-latest
feature: dataframe feature: dataframe
- platform: windows-latest
feature: extra
- platform: macos-latest
feature: extra
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4.1.3
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
@ -118,7 +121,7 @@ jobs:
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4.1.3
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
@ -131,6 +134,9 @@ jobs:
- name: Standard library tests - name: Standard library tests
run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std' run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std'
- name: Ensure that Cargo.toml MSRV and rust-toolchain.toml use the same version
run: nu .github/workflows/check-msrv.nu
- name: Setup Python - name: Setup Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
@ -159,12 +165,16 @@ jobs:
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
platform: [windows-latest, macos-latest, ubuntu-20.04] # Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB,
# instead of 14 GB) which is too little for us right now.
#
# Failure occuring with clippy for rust 1.77.2
platform: [windows-latest, macos-13, ubuntu-20.04]
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4.1.3
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
@ -172,7 +182,7 @@ jobs:
rustflags: "" rustflags: ""
- name: Clippy - name: Clippy
run: cargo clippy --package nu_plugin_* ${{ matrix.flags }} -- $CLIPPY_OPTIONS run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS
- name: Tests - name: Tests
run: cargo test --profile ci --package nu_plugin_* run: cargo test --profile ci --package nu_plugin_*

View File

@ -27,7 +27,7 @@ jobs:
# if: github.repository == 'nushell/nightly' # if: github.repository == 'nushell/nightly'
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4.1.3
if: github.repository == 'nushell/nightly' if: github.repository == 'nushell/nightly'
with: with:
ref: main ref: main
@ -36,10 +36,10 @@ jobs:
token: ${{ secrets.WORKFLOW_TOKEN }} token: ${{ secrets.WORKFLOW_TOKEN }}
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.9 uses: hustcer/setup-nu@v3.10
if: github.repository == 'nushell/nightly' if: github.repository == 'nushell/nightly'
with: with:
version: 0.90.1 version: 0.92.2
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo # Synchronize the main branch of nightly repo with the main branch of Nushell official repo
- name: Prepare for Nightly Release - name: Prepare for Nightly Release
@ -123,7 +123,7 @@ jobs:
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4.1.3
with: with:
ref: main ref: main
fetch-depth: 0 fetch-depth: 0
@ -139,9 +139,9 @@ jobs:
rustflags: '' rustflags: ''
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.9 uses: hustcer/setup-nu@v3.10
with: with:
version: 0.90.1 version: 0.92.2
- name: Release Nu Binary - name: Release Nu Binary
id: nu id: nu
@ -174,7 +174,7 @@ jobs:
# REF: https://github.com/marketplace/actions/gh-release # REF: https://github.com/marketplace/actions/gh-release
# Create a release only in nushell/nightly repo # Create a release only in nushell/nightly repo
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v0.1.15 uses: softprops/action-gh-release@v2.0.4
if: ${{ startsWith(github.repository, 'nushell/nightly') }} if: ${{ startsWith(github.repository, 'nushell/nightly') }}
with: with:
prerelease: true prerelease: true
@ -202,40 +202,40 @@ jobs:
include: include:
- target: aarch64-apple-darwin - target: aarch64-apple-darwin
os: macos-latest os: macos-latest
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: x86_64-apple-darwin - target: x86_64-apple-darwin
os: macos-latest os: macos-latest
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: x86_64-pc-windows-msvc - target: x86_64-pc-windows-msvc
extra: 'bin' extra: 'bin'
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: x86_64-pc-windows-msvc - target: x86_64-pc-windows-msvc
extra: msi extra: msi
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: aarch64-pc-windows-msvc - target: aarch64-pc-windows-msvc
extra: 'bin' extra: 'bin'
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: aarch64-pc-windows-msvc - target: aarch64-pc-windows-msvc
extra: msi extra: msi
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: x86_64-unknown-linux-gnu - target: x86_64-unknown-linux-gnu
os: ubuntu-latest os: ubuntu-20.04
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: x86_64-unknown-linux-musl - target: x86_64-unknown-linux-musl
os: ubuntu-latest os: ubuntu-20.04
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: aarch64-unknown-linux-gnu - target: aarch64-unknown-linux-gnu
os: ubuntu-latest os: ubuntu-20.04
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4.1.3
with: with:
ref: main ref: main
fetch-depth: 0 fetch-depth: 0
@ -251,9 +251,9 @@ jobs:
rustflags: '' rustflags: ''
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.9 uses: hustcer/setup-nu@v3.10
with: with:
version: 0.90.1 version: 0.92.2
- name: Release Nu Binary - name: Release Nu Binary
id: nu id: nu
@ -286,7 +286,7 @@ jobs:
# REF: https://github.com/marketplace/actions/gh-release # REF: https://github.com/marketplace/actions/gh-release
# Create a release only in nushell/nightly repo # Create a release only in nushell/nightly repo
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v0.1.15 uses: softprops/action-gh-release@v2.0.4
if: ${{ startsWith(github.repository, 'nushell/nightly') }} if: ${{ startsWith(github.repository, 'nushell/nightly') }}
with: with:
draft: false draft: false
@ -310,14 +310,14 @@ jobs:
- name: Waiting for Release - name: Waiting for Release
run: sleep 1800 run: sleep 1800
- uses: actions/checkout@v4 - uses: actions/checkout@v4.1.3
with: with:
ref: main ref: main
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.9 uses: hustcer/setup-nu@v3.10
with: with:
version: 0.90.1 version: 0.92.2
# Keep the last a few releases # Keep the last a few releases
- name: Delete Older Releases - name: Delete Older Releases

View File

@ -128,16 +128,16 @@ let executable = $'target/($target)/release/($bin)*($suffix)'
print $'Current executable file: ($executable)' print $'Current executable file: ($executable)'
cd $src; mkdir $dist; cd $src; mkdir $dist;
rm -rf $'target/($target)/release/*.d' $'target/($target)/release/nu_pretty_hex*' rm -rf ...(glob $'target/($target)/release/*.d') ...(glob $'target/($target)/release/nu_pretty_hex*')
print $'(char nl)All executable files:'; hr-line print $'(char nl)All executable files:'; hr-line
# We have to use `print` here to make sure the command output is displayed # We have to use `print` here to make sure the command output is displayed
print (ls -f $executable); sleep 1sec print (ls -f ($executable | into glob)); sleep 1sec
print $'(char nl)Copying release files...'; hr-line print $'(char nl)Copying release files...'; hr-line
"To use Nu plugins, use the register command to tell Nu where to find the plugin. For example: "To use Nu plugins, use the register command to tell Nu where to find the plugin. For example:
> register ./nu_plugin_query" | save $'($dist)/README.txt' -f > register ./nu_plugin_query" | save $'($dist)/README.txt' -f
[LICENSE $executable] | each {|it| cp -rv $it $dist } | flatten [LICENSE ...(glob $executable)] | each {|it| cp -rv $it $dist } | flatten
print $'(char nl)Check binary release version detail:'; hr-line print $'(char nl)Check binary release version detail:'; hr-line
let ver = if $os == 'windows-latest' { let ver = if $os == 'windows-latest' {
@ -160,9 +160,9 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
let archive = $'($dist)/($dest).tar.gz' let archive = $'($dist)/($dest).tar.gz'
mkdir $dest mkdir $dest
$files | each {|it| mv $it $dest } | ignore $files | each {|it| cp -v $it $dest }
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls $dest print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls $dest | print
tar -czf $archive $dest tar -czf $archive $dest
print $'archive: ---> ($archive)'; ls $archive print $'archive: ---> ($archive)'; ls $archive
@ -181,10 +181,11 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
if (get-env _EXTRA_) == 'msi' { if (get-env _EXTRA_) == 'msi' {
let wixRelease = $'($src)/target/wix/($releaseStem).msi' let wixRelease = $'($src)/target/wix/($releaseStem).msi'
print $'(char nl)Start creating Windows msi package...' print $'(char nl)Start creating Windows msi package with the following contents...'
cd $src; hr-line cd $src; hr-line
# Wix need the binaries be stored in target/release/ # Wix need the binaries be stored in target/release/
cp -r $'($dist)/*' target/release/ cp -r ($'($dist)/*' | into glob) target/release/
ls target/release/* | print
cargo install cargo-wix --version 0.3.4 cargo install cargo-wix --version 0.3.4
cargo wix --no-build --nocapture --package nu --output $wixRelease cargo wix --no-build --nocapture --package nu --output $wixRelease
# Workaround for https://github.com/softprops/action-gh-release/issues/280 # Workaround for https://github.com/softprops/action-gh-release/issues/280
@ -194,9 +195,9 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
} else { } else {
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls | print
let archive = $'($dist)/($releaseStem).zip' let archive = $'($dist)/($releaseStem).zip'
7z a $archive * 7z a $archive ...(glob *)
let pkg = (ls -f $archive | get name) let pkg = (ls -f $archive | get name)
if not ($pkg | is-empty) { if not ($pkg | is-empty) {
# Workaround for https://github.com/softprops/action-gh-release/issues/280 # Workaround for https://github.com/softprops/action-gh-release/issues/280

View File

@ -18,6 +18,7 @@ jobs:
name: Std name: Std
strategy: strategy:
fail-fast: false
matrix: matrix:
target: target:
- aarch64-apple-darwin - aarch64-apple-darwin
@ -72,22 +73,23 @@ jobs:
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4.1.3
- name: Update Rust Toolchain Target - name: Update Rust Toolchain Target
run: | run: |
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135` # WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with: with:
cache: false
rustflags: '' rustflags: ''
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.9 uses: hustcer/setup-nu@v3.10
with: with:
version: 0.90.1 version: 0.92.2
- name: Release Nu Binary - name: Release Nu Binary
id: nu id: nu
@ -102,7 +104,7 @@ jobs:
# REF: https://github.com/marketplace/actions/gh-release # REF: https://github.com/marketplace/actions/gh-release
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v0.1.15 uses: softprops/action-gh-release@v2.0.4
if: ${{ startsWith(github.ref, 'refs/tags/') }} if: ${{ startsWith(github.ref, 'refs/tags/') }}
with: with:
draft: true draft: true
@ -128,55 +130,56 @@ jobs:
include: include:
- target: aarch64-apple-darwin - target: aarch64-apple-darwin
os: macos-latest os: macos-latest
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: x86_64-apple-darwin - target: x86_64-apple-darwin
os: macos-latest os: macos-latest
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: x86_64-pc-windows-msvc - target: x86_64-pc-windows-msvc
extra: 'bin' extra: 'bin'
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: x86_64-pc-windows-msvc - target: x86_64-pc-windows-msvc
extra: msi extra: msi
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: aarch64-pc-windows-msvc - target: aarch64-pc-windows-msvc
extra: 'bin' extra: 'bin'
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: aarch64-pc-windows-msvc - target: aarch64-pc-windows-msvc
extra: msi extra: msi
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: x86_64-unknown-linux-gnu - target: x86_64-unknown-linux-gnu
os: ubuntu-latest os: ubuntu-20.04
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: x86_64-unknown-linux-musl - target: x86_64-unknown-linux-musl
os: ubuntu-latest os: ubuntu-20.04
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
- target: aarch64-unknown-linux-gnu - target: aarch64-unknown-linux-gnu
os: ubuntu-latest os: ubuntu-20.04
target_rustflags: '--features=dataframe,extra' target_rustflags: '--features=dataframe'
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4.1.3
- name: Update Rust Toolchain Target - name: Update Rust Toolchain Target
run: | run: |
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135` # WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with: with:
cache: false
rustflags: '' rustflags: ''
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.9 uses: hustcer/setup-nu@v3.10
with: with:
version: 0.90.1 version: 0.92.2
- name: Release Nu Binary - name: Release Nu Binary
id: nu id: nu
@ -191,7 +194,7 @@ jobs:
# REF: https://github.com/marketplace/actions/gh-release # REF: https://github.com/marketplace/actions/gh-release
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v0.1.15 uses: softprops/action-gh-release@v2.0.4
if: ${{ startsWith(github.ref, 'refs/tags/') }} if: ${{ startsWith(github.ref, 'refs/tags/') }}
with: with:
draft: true draft: true

View File

@ -7,7 +7,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout Actions Repository - name: Checkout Actions Repository
uses: actions/checkout@v4 uses: actions/checkout@v4.1.3
- name: Check spelling - name: Check spelling
uses: crate-ci/typos@v1.18.2 uses: crate-ci/typos@v1.20.10

View File

@ -16,8 +16,8 @@ Welcome to Nushell and thank you for considering contributing!
More resources can be found in the nascent [developer documentation](devdocs/README.md) in this repo. More resources can be found in the nascent [developer documentation](devdocs/README.md) in this repo.
- [Developer FAQ](FAQ.md) - [Developer FAQ](devdocs/FAQ.md)
- [Platform support policy](PLATFORM_SUPPORT.md) - [Platform support policy](devdocs/PLATFORM_SUPPORT.md)
- [Our Rust style](devdocs/rust_style.md) - [Our Rust style](devdocs/rust_style.md)
## Proposing design changes ## Proposing design changes

2005
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,8 +10,8 @@ homepage = "https://www.nushell.sh"
license = "MIT" license = "MIT"
name = "nu" name = "nu"
repository = "https://github.com/nushell/nushell" repository = "https://github.com/nushell/nushell"
rust-version = "1.74.1" rust-version = "1.77.2"
version = "0.90.2" version = "0.92.3"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -31,6 +31,7 @@ members = [
"crates/nu-cmd-base", "crates/nu-cmd-base",
"crates/nu-cmd-extra", "crates/nu-cmd-extra",
"crates/nu-cmd-lang", "crates/nu-cmd-lang",
"crates/nu-cmd-plugin",
"crates/nu-cmd-dataframe", "crates/nu-cmd-dataframe",
"crates/nu-command", "crates/nu-command",
"crates/nu-color-config", "crates/nu-color-config",
@ -40,47 +41,163 @@ members = [
"crates/nu-pretty-hex", "crates/nu-pretty-hex",
"crates/nu-protocol", "crates/nu-protocol",
"crates/nu-plugin", "crates/nu-plugin",
"crates/nu-plugin-test-support",
"crates/nu_plugin_inc", "crates/nu_plugin_inc",
"crates/nu_plugin_gstat", "crates/nu_plugin_gstat",
"crates/nu_plugin_example", "crates/nu_plugin_example",
"crates/nu_plugin_stream_example",
"crates/nu_plugin_query", "crates/nu_plugin_query",
"crates/nu_plugin_custom_values", "crates/nu_plugin_custom_values",
"crates/nu_plugin_formats", "crates/nu_plugin_formats",
"crates/nu_plugin_polars",
"crates/nu_plugin_stress_internals",
"crates/nu-std", "crates/nu-std",
"crates/nu-table", "crates/nu-table",
"crates/nu-term-grid", "crates/nu-term-grid",
"crates/nu-test-support", "crates/nu-test-support",
"crates/nu-utils", "crates/nu-utils",
"crates/nuon",
] ]
[workspace.dependencies]
alphanumeric-sort = "1.5"
ansi-str = "0.8"
base64 = "0.22"
bracoxide = "0.1.2"
brotli = "5.0"
byteorder = "1.5"
bytesize = "1.3"
calamine = "0.24.0"
chardetng = "0.1.17"
chrono = { default-features = false, version = "0.4" }
chrono-humanize = "0.2.3"
chrono-tz = "0.8"
crossbeam-channel = "0.5.8"
crossterm = "0.27"
csv = "1.3"
ctrlc = "3.4"
dialoguer = { default-features = false, version = "0.11" }
digest = { default-features = false, version = "0.10" }
dirs-next = "2.0"
dtparse = "2.0"
encoding_rs = "0.8"
fancy-regex = "0.13"
filesize = "0.2"
filetime = "0.2"
fs_extra = "1.3"
fuzzy-matcher = "0.3"
hamcrest2 = "0.3"
heck = "0.5.0"
human-date-parser = "0.1.1"
indexmap = "2.2"
indicatif = "0.17"
is_executable = "1.0"
itertools = "0.12"
libc = "0.2"
libproc = "0.14"
log = "0.4"
lru = "0.12"
lscolors = { version = "0.17", default-features = false }
lsp-server = "0.7.5"
lsp-types = "0.95.0"
mach2 = "0.4"
md5 = { version = "0.10", package = "md-5" }
miette = "7.2"
mime = "0.3"
mime_guess = "2.0"
mockito = { version = "1.4", default-features = false }
native-tls = "0.2"
nix = { version = "0.28", default-features = false }
notify-debouncer-full = { version = "0.3", default-features = false }
nu-ansi-term = "0.50.0"
num-format = "0.4"
num-traits = "0.2"
omnipath = "0.1"
once_cell = "1.18"
open = "5.1"
os_pipe = "1.1"
pathdiff = "0.2"
percent-encoding = "2"
pretty_assertions = "1.4"
print-positions = "0.6"
procfs = "0.16.0"
pwd = "1.3"
quick-xml = "0.31.0"
quickcheck = "1.0"
quickcheck_macros = "1.0"
rand = "0.8"
ratatui = "0.26"
rayon = "1.10"
reedline = "0.31.0"
regex = "1.9.5"
rmp = "0.8"
rmp-serde = "1.2"
ropey = "1.6.1"
roxmltree = "0.19"
rstest = { version = "0.18", default-features = false }
rusqlite = "0.31"
rust-embed = "8.3.0"
same-file = "1.0"
serde = { version = "1.0", default-features = false }
serde_json = "1.0"
serde_urlencoded = "0.7.1"
serde_yaml = "0.9"
sha2 = "0.10"
strip-ansi-escapes = "0.2.0"
sysinfo = "0.30"
tabled = { version = "0.14.0", default-features = false }
tempfile = "3.10"
terminal_size = "0.3"
titlecase = "2.0"
toml = "0.8"
trash = "3.3"
umask = "2.1"
unicode-segmentation = "1.11"
unicode-width = "0.1"
ureq = { version = "2.9", default-features = false }
url = "2.2"
uu_cp = "0.0.25"
uu_mkdir = "0.0.25"
uu_mktemp = "0.0.25"
uu_mv = "0.0.25"
uu_whoami = "0.0.25"
uu_uname = "0.0.25"
uucore = "0.0.25"
uuid = "1.8.0"
v_htmlescape = "0.15.0"
wax = "0.6"
which = "6.0.0"
windows = "0.54"
winreg = "0.52"
[dependencies] [dependencies]
nu-cli = { path = "./crates/nu-cli", version = "0.90.2" } nu-cli = { path = "./crates/nu-cli", version = "0.92.3" }
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.90.2" } nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.92.3" }
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.90.2" } nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.92.3" }
nu-cmd-dataframe = { path = "./crates/nu-cmd-dataframe", version = "0.90.2", features = [ nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.92.3", optional = true }
nu-cmd-dataframe = { path = "./crates/nu-cmd-dataframe", version = "0.92.3", features = [
"dataframe", "dataframe",
], optional = true } ], optional = true }
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.90.2", optional = true } nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.92.3" }
nu-command = { path = "./crates/nu-command", version = "0.90.2" } nu-command = { path = "./crates/nu-command", version = "0.92.3" }
nu-engine = { path = "./crates/nu-engine", version = "0.90.2" } nu-engine = { path = "./crates/nu-engine", version = "0.92.3" }
nu-explore = { path = "./crates/nu-explore", version = "0.90.2" } nu-explore = { path = "./crates/nu-explore", version = "0.92.3" }
nu-lsp = { path = "./crates/nu-lsp/", version = "0.90.2" } nu-lsp = { path = "./crates/nu-lsp/", version = "0.92.3" }
nu-parser = { path = "./crates/nu-parser", version = "0.90.2" } nu-parser = { path = "./crates/nu-parser", version = "0.92.3" }
nu-path = { path = "./crates/nu-path", version = "0.90.2" } nu-path = { path = "./crates/nu-path", version = "0.92.3" }
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.90.2" } nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.92.3" }
nu-protocol = { path = "./crates/nu-protocol", version = "0.90.2" } nu-protocol = { path = "./crates/nu-protocol", version = "0.92.3" }
nu-std = { path = "./crates/nu-std", version = "0.90.2" } nu-std = { path = "./crates/nu-std", version = "0.92.3" }
nu-utils = { path = "./crates/nu-utils", version = "0.90.2" } nu-system = { path = "./crates/nu-system", version = "0.92.3" }
nu-utils = { path = "./crates/nu-utils", version = "0.92.3" }
reedline = { version = "0.29.0", features = ["bashisms", "sqlite"] } reedline = { workspace = true, features = ["bashisms", "sqlite"] }
crossterm = "0.27" crossterm = { workspace = true }
ctrlc = "3.4" ctrlc = { workspace = true }
log = "0.4" log = { workspace = true }
miette = { version = "7.1", features = ["fancy-no-backtrace", "fancy"] } miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] }
mimalloc = { version = "0.1.37", default-features = false, optional = true } mimalloc = { version = "0.1.37", default-features = false, optional = true }
serde_json = "1.0" serde_json = { workspace = true }
simplelog = "0.12" simplelog = "0.12"
time = "0.3" time = "0.3"
@ -92,7 +209,7 @@ openssl = { version = "0.10", features = ["vendored"], optional = true }
winresource = "0.1" winresource = "0.1"
[target.'cfg(target_family = "unix")'.dependencies] [target.'cfg(target_family = "unix")'.dependencies]
nix = { version = "0.27", default-features = false, features = [ nix = { workspace = true, default-features = false, features = [
"signal", "signal",
"process", "process",
"fs", "fs",
@ -100,17 +217,19 @@ nix = { version = "0.27", default-features = false, features = [
] } ] }
[dev-dependencies] [dev-dependencies]
nu-test-support = { path = "./crates/nu-test-support", version = "0.90.2" } nu-test-support = { path = "./crates/nu-test-support", version = "0.92.3" }
assert_cmd = "2.0" assert_cmd = "2.0"
criterion = "0.5" dirs-next = { workspace = true }
pretty_assertions = "1.4" divan = "0.1.14"
rstest = { version = "0.18", default-features = false } pretty_assertions = { workspace = true }
serial_test = "3.0" rstest = { workspace = true, default-features = false }
tempfile = "3.10" serial_test = "3.1"
tempfile = { workspace = true }
[features] [features]
plugin = [ plugin = [
"nu-plugin", "nu-plugin",
"nu-cmd-plugin",
"nu-cli/plugin", "nu-cli/plugin",
"nu-parser/plugin", "nu-parser/plugin",
"nu-command/plugin", "nu-command/plugin",
@ -128,7 +247,6 @@ default-no-clipboard = [
"mimalloc", "mimalloc",
] ]
stable = ["default"] stable = ["default"]
wasi = ["nu-cmd-lang/wasi"]
# NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command # NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command
# Enable to statically link OpenSSL (perl is required, to build OpenSSL https://docs.rs/openssl/latest/openssl/); # Enable to statically link OpenSSL (perl is required, to build OpenSSL https://docs.rs/openssl/latest/openssl/);
@ -136,15 +254,16 @@ wasi = ["nu-cmd-lang/wasi"]
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"] static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"] mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
system-clipboard = ["reedline/system_clipboard"] system-clipboard = [
"reedline/system_clipboard",
"nu-cli/system-clipboard",
"nu-cmd-lang/system-clipboard",
]
# Stable (Default) # Stable (Default)
which-support = ["nu-command/which-support", "nu-cmd-lang/which-support"] which-support = ["nu-command/which-support", "nu-cmd-lang/which-support"]
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"] trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
# Extra feature for nushell
extra = ["dep:nu-cmd-extra", "nu-cmd-lang/extra"]
# Dataframe feature for nushell # Dataframe feature for nushell
dataframe = ["dep:nu-cmd-dataframe", "nu-cmd-lang/dataframe"] dataframe = ["dep:nu-cmd-dataframe", "nu-cmd-lang/dataframe"]
@ -182,7 +301,6 @@ bench = false
reedline = { git = "https://github.com/nushell/reedline", branch = "main" } reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"} # nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
# Criterion benchmarking setup
# Run all benchmarks with `cargo bench` # Run all benchmarks with `cargo bench`
# Run individual benchmarks like `cargo bench -- <regex>` e.g. `cargo bench -- parse` # Run individual benchmarks like `cargo bench -- <regex>` e.g. `cargo bench -- parse`
[[bench]] [[bench]]

View File

@ -228,7 +228,7 @@ Please submit an issue or PR to be added to this list.
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed! See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
<a href="https://github.com/nushell/nushell/graphs/contributors"> <a href="https://github.com/nushell/nushell/graphs/contributors">
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=600" /> <img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750" />
</a> </a>
## License ## License

View File

@ -1,6 +1,6 @@
# Criterion benchmarks # Divan benchmarks
These are benchmarks using [Criterion](https://github.com/bheisler/criterion.rs), a microbenchmarking tool for Rust. These are benchmarks using [Divan](https://github.com/nvzqz/divan), a microbenchmarking tool for Rust.
Run all benchmarks with `cargo bench` Run all benchmarks with `cargo bench`

View File

@ -1,13 +1,20 @@
use criterion::{criterion_group, criterion_main, BatchSize, Criterion}; use nu_cli::{eval_source, evaluate_commands};
use nu_cli::eval_source;
use nu_parser::parse; use nu_parser::parse;
use nu_plugin::{Encoder, EncodingType, PluginCallResponse, PluginOutput}; use nu_plugin::{Encoder, EncodingType, PluginCallResponse, PluginOutput};
use nu_protocol::{ use nu_protocol::{
engine::EngineState, eval_const::create_nu_constant, PipelineData, Span, Value, NU_VARIABLE_ID, engine::{EngineState, Stack},
eval_const::create_nu_constant,
PipelineData, Span, Spanned, Value, NU_VARIABLE_ID,
}; };
use nu_std::load_standard_library;
use nu_utils::{get_default_config, get_default_env}; use nu_utils::{get_default_config, get_default_env};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
fn main() {
// Run registered benchmarks.
divan::main();
}
fn load_bench_commands() -> EngineState { fn load_bench_commands() -> EngineState {
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context()) nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
} }
@ -31,41 +38,7 @@ fn get_home_path(engine_state: &EngineState) -> PathBuf {
.unwrap_or_default() .unwrap_or_default()
} }
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking. fn setup_engine() -> EngineState {
// When the *_benchmarks functions were in different files, `cargo bench` would build
// an executable for every single one - incredibly slowly. Would be nice to figure out
// a way to split things up again.
fn parser_benchmarks(c: &mut Criterion) {
let mut engine_state = load_bench_commands();
let home_path = get_home_path(&engine_state);
// parsing config.nu breaks without PWD set, so set a valid path
engine_state.add_env_var(
"PWD".into(),
Value::string(home_path.to_string_lossy(), Span::test_data()),
);
let default_env = get_default_env().as_bytes();
c.bench_function("parse_default_env_file", |b| {
b.iter_batched(
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|mut working_set| parse(&mut working_set, None, default_env, false),
BatchSize::SmallInput,
)
});
let default_config = get_default_config().as_bytes();
c.bench_function("parse_default_config_file", |b| {
b.iter_batched(
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|mut working_set| parse(&mut working_set, None, default_config, false),
BatchSize::SmallInput,
)
});
}
fn eval_benchmarks(c: &mut Criterion) {
let mut engine_state = load_bench_commands(); let mut engine_state = load_bench_commands();
let home_path = get_home_path(&engine_state); let home_path = get_home_path(&engine_state);
@ -79,33 +52,319 @@ fn eval_benchmarks(c: &mut Criterion) {
.expect("Failed to create nushell constant."); .expect("Failed to create nushell constant.");
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const); engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
c.bench_function("eval default_env.nu", |b| { engine_state
b.iter(|| { }
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,
&mut stack,
get_default_env().as_bytes(),
"default_env.nu",
PipelineData::empty(),
false,
)
})
});
c.bench_function("eval default_config.nu", |b| { fn bench_command(bencher: divan::Bencher, scaled_command: String) {
b.iter(|| { bench_command_with_custom_stack_and_engine(
let mut stack = nu_protocol::engine::Stack::new(); bencher,
scaled_command,
Stack::new(),
setup_engine(),
)
}
fn bench_command_with_custom_stack_and_engine(
bencher: divan::Bencher,
scaled_command: String,
stack: nu_protocol::engine::Stack,
mut engine: EngineState,
) {
load_standard_library(&mut engine).unwrap();
let commands = Spanned {
span: Span::unknown(),
item: scaled_command,
};
bencher
.with_inputs(|| engine.clone())
.bench_values(|mut engine| {
evaluate_commands(
&commands,
&mut engine,
&mut stack.clone(),
PipelineData::empty(),
None,
false,
)
.unwrap();
})
}
fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
let mut engine = setup_engine();
let commands = Spanned {
span: Span::unknown(),
item: command.to_string(),
};
let mut stack = Stack::new();
evaluate_commands(
&commands,
&mut engine,
&mut stack,
PipelineData::empty(),
None,
false,
)
.unwrap();
(stack, engine)
}
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
// When the *_benchmarks functions were in different files, `cargo bench` would build
// an executable for every single one - incredibly slowly. Would be nice to figure out
// a way to split things up again.
#[divan::bench]
fn load_standard_lib(bencher: divan::Bencher) {
let engine = setup_engine();
bencher
.with_inputs(|| engine.clone())
.bench_values(|mut engine| {
load_standard_library(&mut engine).unwrap();
})
}
#[divan::bench_group]
mod record {
use super::*;
fn create_flat_record_string(n: i32) -> String {
let mut s = String::from("let record = {");
for i in 0..n {
s.push_str(&format!("col_{}: {}", i, i));
if i < n - 1 {
s.push_str(", ");
}
}
s.push('}');
s
}
fn create_nested_record_string(depth: i32) -> String {
let mut s = String::from("let record = {");
for _ in 0..depth {
s.push_str("col: {");
}
s.push_str("col_final: 0");
for _ in 0..depth {
s.push('}');
}
s.push('}');
s
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn create(bencher: divan::Bencher, n: i32) {
bench_command(bencher, create_flat_record_string(n));
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn flat_access(bencher: divan::Bencher, n: i32) {
let (stack, engine) = setup_stack_and_engine_from_command(&create_flat_record_string(n));
bench_command_with_custom_stack_and_engine(
bencher,
"$record.col_0 | ignore".to_string(),
stack,
engine,
);
}
#[divan::bench(args = [1, 2, 4, 8, 16, 32, 64, 128])]
fn nest_access(bencher: divan::Bencher, depth: i32) {
let (stack, engine) =
setup_stack_and_engine_from_command(&create_nested_record_string(depth));
let nested_access = ".col".repeat(depth as usize);
bench_command_with_custom_stack_and_engine(
bencher,
format!("$record{} | ignore", nested_access),
stack,
engine,
);
}
}
#[divan::bench_group]
mod table {
use super::*;
fn create_example_table_nrows(n: i32) -> String {
let mut s = String::from("let table = [[foo bar baz]; ");
for i in 0..n {
s.push_str(&format!("[0, 1, {i}]"));
if i < n - 1 {
s.push_str(", ");
}
}
s.push(']');
s
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn create(bencher: divan::Bencher, n: i32) {
bench_command(bencher, create_example_table_nrows(n));
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn get(bencher: divan::Bencher, n: i32) {
let (stack, engine) = setup_stack_and_engine_from_command(&create_example_table_nrows(n));
bench_command_with_custom_stack_and_engine(
bencher,
"$table | get bar | math sum | ignore".to_string(),
stack,
engine,
);
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn select(bencher: divan::Bencher, n: i32) {
let (stack, engine) = setup_stack_and_engine_from_command(&create_example_table_nrows(n));
bench_command_with_custom_stack_and_engine(
bencher,
"$table | select foo baz | ignore".to_string(),
stack,
engine,
);
}
}
#[divan::bench_group]
mod eval_commands {
use super::*;
#[divan::bench(args = [100, 1_000, 10_000])]
fn interleave(bencher: divan::Bencher, n: i32) {
bench_command(
bencher,
format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
)
}
#[divan::bench(args = [100, 1_000, 10_000])]
fn interleave_with_ctrlc(bencher: divan::Bencher, n: i32) {
let mut engine = setup_engine();
engine.ctrlc = Some(std::sync::Arc::new(std::sync::atomic::AtomicBool::new(
false,
)));
load_standard_library(&mut engine).unwrap();
let commands = Spanned {
span: Span::unknown(),
item: format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
};
bencher
.with_inputs(|| engine.clone())
.bench_values(|mut engine| {
evaluate_commands(
&commands,
&mut engine,
&mut nu_protocol::engine::Stack::new(),
PipelineData::empty(),
None,
false,
)
.unwrap();
})
}
#[divan::bench(args = [1, 5, 10, 100, 1_000])]
fn for_range(bencher: divan::Bencher, n: i32) {
bench_command(bencher, format!("(for $x in (1..{}) {{ sleep 50ns }})", n))
}
#[divan::bench(args = [1, 5, 10, 100, 1_000])]
fn each(bencher: divan::Bencher, n: i32) {
bench_command(
bencher,
format!("(1..{}) | each {{|_| sleep 50ns }} | ignore", n),
)
}
#[divan::bench(args = [1, 5, 10, 100, 1_000])]
fn par_each_1t(bencher: divan::Bencher, n: i32) {
bench_command(
bencher,
format!("(1..{}) | par-each -t 1 {{|_| sleep 50ns }} | ignore", n),
)
}
#[divan::bench(args = [1, 5, 10, 100, 1_000])]
fn par_each_2t(bencher: divan::Bencher, n: i32) {
bench_command(
bencher,
format!("(1..{}) | par-each -t 2 {{|_| sleep 50ns }} | ignore", n),
)
}
}
#[divan::bench_group()]
mod parser_benchmarks {
use super::*;
#[divan::bench()]
fn parse_default_config_file(bencher: divan::Bencher) {
let engine_state = setup_engine();
let default_env = get_default_config().as_bytes();
bencher
.with_inputs(|| nu_protocol::engine::StateWorkingSet::new(&engine_state))
.bench_refs(|working_set| parse(working_set, None, default_env, false))
}
#[divan::bench()]
fn parse_default_env_file(bencher: divan::Bencher) {
let engine_state = setup_engine();
let default_env = get_default_env().as_bytes();
bencher
.with_inputs(|| nu_protocol::engine::StateWorkingSet::new(&engine_state))
.bench_refs(|working_set| parse(working_set, None, default_env, false))
}
}
#[divan::bench_group()]
mod eval_benchmarks {
use super::*;
#[divan::bench()]
fn eval_default_env(bencher: divan::Bencher) {
let default_env = get_default_env().as_bytes();
let fname = "default_env.nu";
bencher
.with_inputs(|| (setup_engine(), nu_protocol::engine::Stack::new()))
.bench_values(|(mut engine_state, mut stack)| {
eval_source( eval_source(
&mut engine_state, &mut engine_state,
&mut stack, &mut stack,
get_default_config().as_bytes(), default_env,
"default_config.nu", fname,
PipelineData::empty(), PipelineData::empty(),
false, false,
) )
}) })
}); }
#[divan::bench()]
fn eval_default_config(bencher: divan::Bencher) {
let default_env = get_default_config().as_bytes();
let fname = "default_config.nu";
bencher
.with_inputs(|| (setup_engine(), nu_protocol::engine::Stack::new()))
.bench_values(|(mut engine_state, mut stack)| {
eval_source(
&mut engine_state,
&mut stack,
default_env,
fname,
PipelineData::empty(),
false,
)
})
}
} }
// generate a new table data with `row_cnt` rows, `col_cnt` columns. // generate a new table data with `row_cnt` rows, `col_cnt` columns.
@ -119,54 +378,76 @@ fn encoding_test_data(row_cnt: usize, col_cnt: usize) -> Value {
Value::list(vec![record; row_cnt], Span::test_data()) Value::list(vec![record; row_cnt], Span::test_data())
} }
fn encoding_benchmarks(c: &mut Criterion) { #[divan::bench_group()]
let mut group = c.benchmark_group("Encoding"); mod encoding_benchmarks {
let test_cnt_pairs = [(100, 5), (10000, 15)]; use super::*;
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
for fmt in ["json", "msgpack"] { #[divan::bench(args = [(100, 5), (10000, 15)])]
group.bench_function(&format!("{fmt} encode {row_cnt} * {col_cnt}"), |b| { fn json_encode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
let mut res = vec![];
let test_data = PluginOutput::CallResponse( let test_data = PluginOutput::CallResponse(
0, 0,
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)), PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
); );
let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap(); let encoder = EncodingType::try_from_bytes(b"json").unwrap();
b.iter(|| encoder.encode(&test_data, &mut res)) bencher
}); .with_inputs(Vec::new)
.bench_values(|mut res| encoder.encode(&test_data, &mut res))
} }
#[divan::bench(args = [(100, 5), (10000, 15)])]
fn msgpack_encode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
let test_data = PluginOutput::CallResponse(
0,
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
);
let encoder = EncodingType::try_from_bytes(b"msgpack").unwrap();
bencher
.with_inputs(Vec::new)
.bench_values(|mut res| encoder.encode(&test_data, &mut res))
} }
group.finish();
} }
fn decoding_benchmarks(c: &mut Criterion) { #[divan::bench_group()]
let mut group = c.benchmark_group("Decoding"); mod decoding_benchmarks {
let test_cnt_pairs = [(100, 5), (10000, 15)]; use super::*;
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
for fmt in ["json", "msgpack"] { #[divan::bench(args = [(100, 5), (10000, 15)])]
group.bench_function(&format!("{fmt} decode for {row_cnt} * {col_cnt}"), |b| { fn json_decode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
let mut res = vec![];
let test_data = PluginOutput::CallResponse( let test_data = PluginOutput::CallResponse(
0, 0,
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)), PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
); );
let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap(); let encoder = EncodingType::try_from_bytes(b"json").unwrap();
let mut res = vec![];
encoder.encode(&test_data, &mut res).unwrap(); encoder.encode(&test_data, &mut res).unwrap();
let mut binary_data = std::io::Cursor::new(res); bencher
b.iter(|| -> Result<Option<PluginOutput>, _> { .with_inputs(|| {
let mut binary_data = std::io::Cursor::new(res.clone());
binary_data.set_position(0); binary_data.set_position(0);
binary_data
})
.bench_values(|mut binary_data| -> Result<Option<PluginOutput>, _> {
encoder.decode(&mut binary_data) encoder.decode(&mut binary_data)
}) })
});
} }
}
group.finish();
}
criterion_group!( #[divan::bench(args = [(100, 5), (10000, 15)])]
benches, fn msgpack_decode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
parser_benchmarks, let test_data = PluginOutput::CallResponse(
eval_benchmarks, 0,
encoding_benchmarks, PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
decoding_benchmarks );
); let encoder = EncodingType::try_from_bytes(b"msgpack").unwrap();
criterion_main!(benches); let mut res = vec![];
encoder.encode(&test_data, &mut res).unwrap();
bencher
.with_inputs(|| {
let mut binary_data = std::io::Cursor::new(res.clone());
binary_data.set_position(0);
binary_data
})
.bench_values(|mut binary_data| -> Result<Option<PluginOutput>, _> {
encoder.decode(&mut binary_data)
})
}
}

View File

@ -5,43 +5,45 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cli" name = "nu-cli"
version = "0.90.2" version = "0.92.3"
[lib] [lib]
bench = false bench = false
[dev-dependencies] [dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.90.2" } nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.92.3" }
nu-command = { path = "../nu-command", version = "0.90.2" } nu-command = { path = "../nu-command", version = "0.92.3" }
nu-test-support = { path = "../nu-test-support", version = "0.90.2" } nu-test-support = { path = "../nu-test-support", version = "0.92.3" }
rstest = { version = "0.18.1", default-features = false } rstest = { workspace = true, default-features = false }
[dependencies] [dependencies]
nu-cmd-base = { path = "../nu-cmd-base", version = "0.90.2" } nu-cmd-base = { path = "../nu-cmd-base", version = "0.92.3" }
nu-engine = { path = "../nu-engine", version = "0.90.2" } nu-engine = { path = "../nu-engine", version = "0.92.3" }
nu-path = { path = "../nu-path", version = "0.90.2" } nu-path = { path = "../nu-path", version = "0.92.3" }
nu-parser = { path = "../nu-parser", version = "0.90.2" } nu-parser = { path = "../nu-parser", version = "0.92.3" }
nu-protocol = { path = "../nu-protocol", version = "0.90.2" } nu-plugin = { path = "../nu-plugin", version = "0.92.3", optional = true }
nu-utils = { path = "../nu-utils", version = "0.90.2" } nu-protocol = { path = "../nu-protocol", version = "0.92.3" }
nu-color-config = { path = "../nu-color-config", version = "0.90.2" } nu-utils = { path = "../nu-utils", version = "0.92.3" }
nu-ansi-term = "0.50.0" nu-color-config = { path = "../nu-color-config", version = "0.92.3" }
reedline = { version = "0.29.0", features = ["bashisms", "sqlite"] } nu-ansi-term = { workspace = true }
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
chrono = { default-features = false, features = ["std"], version = "0.4" } chrono = { default-features = false, features = ["std"], workspace = true }
crossterm = "0.27" crossterm = { workspace = true }
fancy-regex = "0.13" fancy-regex = { workspace = true }
fuzzy-matcher = "0.3" fuzzy-matcher = { workspace = true }
is_executable = "1.0" is_executable = { workspace = true }
log = "0.4" log = { workspace = true }
miette = { version = "7.1", features = ["fancy-no-backtrace"] } miette = { workspace = true, features = ["fancy-no-backtrace"] }
lscolors = { version = "0.17", default-features = false, features = ["nu-ansi-term"] } lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
once_cell = "1.18" once_cell = { workspace = true }
percent-encoding = "2" percent-encoding = { workspace = true }
pathdiff = "0.2" pathdiff = { workspace = true }
sysinfo = "0.30" sysinfo = { workspace = true }
unicode-segmentation = "1.11" unicode-segmentation = { workspace = true }
uuid = { version = "1.6.0", features = ["v4"] } uuid = { workspace = true, features = ["v4"] }
which = "6.0.0" which = { workspace = true }
[features] [features]
plugin = [] plugin = ["nu-plugin"]
system-clipboard = ["reedline/system_clipboard"]

View File

@ -1,10 +1,4 @@
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Type, Value,
};
use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)] #[derive(Clone)]
pub struct Commandline; pub struct Commandline;
@ -16,45 +10,12 @@ impl Command for Commandline {
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build("commandline") Signature::build("commandline")
.input_output_types(vec![ .input_output_types(vec![(Type::Nothing, Type::String)])
(Type::Nothing, Type::Nothing),
(Type::String, Type::String),
])
.switch(
"cursor",
"Set or get the current cursor position",
Some('c'),
)
.switch(
"cursor-end",
"Set the current cursor position to the end of the buffer",
Some('e'),
)
.switch(
"append",
"appends the string to the end of the buffer",
Some('a'),
)
.switch(
"insert",
"inserts the string into the buffer at the cursor position",
Some('i'),
)
.switch(
"replace",
"replaces the current contents of the buffer (default)",
Some('r'),
)
.optional(
"cmd",
SyntaxShape::String,
"the string to perform the operation with",
)
.category(Category::Core) .category(Category::Core)
} }
fn usage(&self) -> &str { fn usage(&self) -> &str {
"View or modify the current command line input buffer." "View the current command line input buffer."
} }
fn search_terms(&self) -> Vec<&str> { fn search_terms(&self) -> Vec<&str> {
@ -64,126 +25,11 @@ impl Command for Commandline {
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, _stack: &mut Stack,
call: &Call, call: &Call,
_input: PipelineData, _input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
if let Some(cmd) = call.opt::<Value>(engine_state, stack, 0)? { let repl = engine_state.repl_state.lock().expect("repl state mutex");
let span = cmd.span(); Ok(Value::string(repl.buffer.clone(), call.head).into_pipeline_data())
let cmd = cmd.coerce_into_string()?;
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
if call.has_flag(engine_state, stack, "cursor")? {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "`--cursor (-c)` is deprecated".into(),
msg: "Setting the current cursor position by `--cursor (-c)` is deprecated"
.into(),
span: Some(call.arguments_span()),
help: Some("Use `commandline set-cursor`".into()),
inner: vec![],
},
);
match cmd.parse::<i64>() {
Ok(n) => {
repl.cursor_pos = if n <= 0 {
0usize
} else {
repl.buffer
.grapheme_indices(true)
.map(|(i, _c)| i)
.nth(n as usize)
.unwrap_or(repl.buffer.len())
}
}
Err(_) => {
return Err(ShellError::CantConvert {
to_type: "int".to_string(),
from_type: "string".to_string(),
span,
help: Some(format!(r#"string "{cmd}" does not represent a valid int"#)),
})
}
}
} else if call.has_flag(engine_state, stack, "append")? {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "`--append (-a)` is deprecated".into(),
msg: "Appending the string to the end of the buffer by `--append (-a)` is deprecated".into(),
span: Some(call.arguments_span()),
help: Some("Use `commandline edit --append (-a)`".into()),
inner: vec![],
},
);
repl.buffer.push_str(&cmd);
} else if call.has_flag(engine_state, stack, "insert")? {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "`--insert (-i)` is deprecated".into(),
msg: "Inserts the string into the buffer at the cursor position by `--insert (-i)` is deprecated".into(),
span: Some(call.arguments_span()),
help: Some("Use `commandline edit --insert (-i)`".into()),
inner: vec![],
},
);
let cursor_pos = repl.cursor_pos;
repl.buffer.insert_str(cursor_pos, &cmd);
repl.cursor_pos += cmd.len();
} else {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "`--replace (-r)` is deprecated".into(),
msg: "Replaceing the current contents of the buffer by `--replace (-p)` or positional argument is deprecated".into(),
span: Some(call.arguments_span()),
help: Some("Use `commandline edit --replace (-r)`".into()),
inner: vec![],
},
);
repl.buffer = cmd;
repl.cursor_pos = repl.buffer.len();
}
Ok(Value::nothing(call.head).into_pipeline_data())
} else {
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
if call.has_flag(engine_state, stack, "cursor-end")? {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "`--cursor-end (-e)` is deprecated".into(),
msg: "Setting the current cursor position to the end of the buffer by `--cursor-end (-e)` is deprecated".into(),
span: Some(call.arguments_span()),
help: Some("Use `commandline set-cursor --end (-e)`".into()),
inner: vec![],
},
);
repl.cursor_pos = repl.buffer.len();
Ok(Value::nothing(call.head).into_pipeline_data())
} else if call.has_flag(engine_state, stack, "cursor")? {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "`--cursor (-c)` is deprecated".into(),
msg: "Getting the current cursor position by `--cursor (-c)` is deprecated"
.into(),
span: Some(call.arguments_span()),
help: Some("Use `commandline get-cursor`".into()),
inner: vec![],
},
);
let char_pos = repl
.buffer
.grapheme_indices(true)
.chain(std::iter::once((repl.buffer.len(), "")))
.position(|(i, _c)| i == repl.cursor_pos)
.expect("Cursor position isn't on a grapheme boundary");
Ok(Value::string(char_pos.to_string(), call.head).into_pipeline_data())
} else {
Ok(Value::string(repl.buffer.to_string(), call.head).into_pipeline_data())
}
}
} }
} }

View File

@ -1,9 +1,4 @@
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct SubCommand; pub struct SubCommand;

View File

@ -1,8 +1,4 @@
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
};
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)] #[derive(Clone)]

View File

@ -1,9 +1,5 @@
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Type, Value,
};
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)] #[derive(Clone)]

View File

@ -1,6 +1,5 @@
use nu_protocol::engine::{EngineState, StateWorkingSet};
use crate::commands::*; use crate::commands::*;
use nu_protocol::engine::{EngineState, StateWorkingSet};
pub fn add_cli_context(mut engine_state: EngineState) -> EngineState { pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
let delta = { let delta = {

View File

@ -1,10 +1,5 @@
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::ast::Call; use nu_protocol::HistoryFileFormat;
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{
record, Category, Example, HistoryFileFormat, IntoInterruptiblePipelineData, PipelineData,
ShellError, Signature, Span, Type, Value,
};
use reedline::{ use reedline::{
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery, FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
SqliteBackedHistory, SqliteBackedHistory,

View File

@ -1,8 +1,4 @@
use nu_protocol::ast::Call; use nu_engine::command_prelude::*;
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct HistorySession; pub struct HistorySession;

View File

@ -1,9 +1,4 @@
use nu_engine::get_full_help; use nu_engine::{command_prelude::*, get_full_help};
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct Keybindings; pub struct Keybindings;

View File

@ -1,8 +1,4 @@
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
};
use reedline::get_reedline_default_keybindings; use reedline::get_reedline_default_keybindings;
#[derive(Clone)] #[derive(Clone)]
@ -16,7 +12,7 @@ impl Command for KeybindingsDefault {
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build(self.name()) Signature::build(self.name())
.category(Category::Platform) .category(Category::Platform)
.input_output_types(vec![(Type::Nothing, Type::Table(vec![]))]) .input_output_types(vec![(Type::Nothing, Type::table())])
} }
fn usage(&self) -> &str { fn usage(&self) -> &str {

View File

@ -1,9 +1,4 @@
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type,
Value,
};
use reedline::{ use reedline::{
get_reedline_edit_commands, get_reedline_keybinding_modifiers, get_reedline_keycodes, get_reedline_edit_commands, get_reedline_keybinding_modifiers, get_reedline_keycodes,
get_reedline_prompt_edit_modes, get_reedline_reedline_events, get_reedline_prompt_edit_modes, get_reedline_reedline_events,
@ -19,7 +14,7 @@ impl Command for KeybindingsList {
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build(self.name()) Signature::build(self.name())
.input_output_types(vec![(Type::Nothing, Type::Table(vec![]))]) .input_output_types(vec![(Type::Nothing, Type::table())])
.switch("modifiers", "list of modifiers", Some('m')) .switch("modifiers", "list of modifiers", Some('m'))
.switch("keycodes", "list of keycodes", Some('k')) .switch("keycodes", "list of keycodes", Some('k'))
.switch("modes", "list of edit modes", Some('o')) .switch("modes", "list of edit modes", Some('o'))

View File

@ -1,12 +1,7 @@
use crossterm::execute; use crossterm::{
use crossterm::QueueableCommand; event::Event, event::KeyCode, event::KeyEvent, execute, terminal, QueueableCommand,
use crossterm::{event::Event, event::KeyCode, event::KeyEvent, terminal};
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type,
Value,
}; };
use nu_engine::command_prelude::*;
use std::io::{stdout, Write}; use std::io::{stdout, Write};
#[derive(Clone)] #[derive(Clone)]

View File

@ -13,13 +13,13 @@ pub trait Completer {
offset: usize, offset: usize,
pos: usize, pos: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<Suggestion>; ) -> Vec<SemanticSuggestion>;
fn get_sort_by(&self) -> SortBy { fn get_sort_by(&self) -> SortBy {
SortBy::Ascending SortBy::Ascending
} }
fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> { fn sort(&self, items: Vec<SemanticSuggestion>, prefix: Vec<u8>) -> Vec<SemanticSuggestion> {
let prefix_str = String::from_utf8_lossy(&prefix).to_string(); let prefix_str = String::from_utf8_lossy(&prefix).to_string();
let mut filtered_items = items; let mut filtered_items = items;
@ -27,13 +27,13 @@ pub trait Completer {
match self.get_sort_by() { match self.get_sort_by() {
SortBy::LevenshteinDistance => { SortBy::LevenshteinDistance => {
filtered_items.sort_by(|a, b| { filtered_items.sort_by(|a, b| {
let a_distance = levenshtein_distance(&prefix_str, &a.value); let a_distance = levenshtein_distance(&prefix_str, &a.suggestion.value);
let b_distance = levenshtein_distance(&prefix_str, &b.value); let b_distance = levenshtein_distance(&prefix_str, &b.suggestion.value);
a_distance.cmp(&b_distance) a_distance.cmp(&b_distance)
}); });
} }
SortBy::Ascending => { SortBy::Ascending => {
filtered_items.sort_by(|a, b| a.value.cmp(&b.value)); filtered_items.sort_by(|a, b| a.suggestion.value.cmp(&b.suggestion.value));
} }
SortBy::None => {} SortBy::None => {}
}; };
@ -41,3 +41,25 @@ pub trait Completer {
filtered_items filtered_items
} }
} }
#[derive(Debug, Default, PartialEq)]
pub struct SemanticSuggestion {
pub suggestion: Suggestion,
pub kind: Option<SuggestionKind>,
}
// TODO: think about name: maybe suggestion context?
#[derive(Clone, Debug, PartialEq)]
pub enum SuggestionKind {
Command(nu_protocol::engine::CommandType),
Type(nu_protocol::Type),
}
impl From<Suggestion> for SemanticSuggestion {
fn from(suggestion: Suggestion) -> Self {
Self {
suggestion,
..Default::default()
}
}
}

View File

@ -1,12 +1,17 @@
use crate::completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy}; use crate::{
completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy},
SuggestionKind,
};
use nu_parser::FlatShape; use nu_parser::FlatShape;
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, StateWorkingSet}, engine::{CachedFile, EngineState, StateWorkingSet},
Span, Span,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use std::sync::Arc; use std::sync::Arc;
use super::SemanticSuggestion;
pub struct CommandCompletion { pub struct CommandCompletion {
engine_state: Arc<EngineState>, engine_state: Arc<EngineState>,
flattened: Vec<(Span, FlatShape)>, flattened: Vec<(Span, FlatShape)>,
@ -83,7 +88,7 @@ impl CommandCompletion {
offset: usize, offset: usize,
find_externals: bool, find_externals: bool,
match_algorithm: MatchAlgorithm, match_algorithm: MatchAlgorithm,
) -> Vec<Suggestion> { ) -> Vec<SemanticSuggestion> {
let partial = working_set.get_span_contents(span); let partial = working_set.get_span_contents(span);
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial); let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
@ -91,13 +96,16 @@ impl CommandCompletion {
let mut results = working_set let mut results = working_set
.find_commands_by_predicate(filter_predicate, true) .find_commands_by_predicate(filter_predicate, true)
.into_iter() .into_iter()
.map(move |x| Suggestion { .map(move |x| SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(&x.0).to_string(), value: String::from_utf8_lossy(&x.0).to_string(),
description: x.1, description: x.1,
style: None, style: None,
extra: None, extra: None,
span: reedline::Span::new(span.start - offset, span.end - offset), span: reedline::Span::new(span.start - offset, span.end - offset),
append_whitespace: true, append_whitespace: true,
},
kind: Some(SuggestionKind::Command(x.2)),
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -108,27 +116,34 @@ impl CommandCompletion {
let results_external = self let results_external = self
.external_command_completion(&partial, match_algorithm) .external_command_completion(&partial, match_algorithm)
.into_iter() .into_iter()
.map(move |x| Suggestion { .map(move |x| SemanticSuggestion {
suggestion: Suggestion {
value: x, value: x,
description: None, description: None,
style: None, style: None,
extra: None, extra: None,
span: reedline::Span::new(span.start - offset, span.end - offset), span: reedline::Span::new(span.start - offset, span.end - offset),
append_whitespace: true, append_whitespace: true,
},
// TODO: is there a way to create a test?
kind: None,
}); });
let results_strings: Vec<String> = let results_strings: Vec<String> =
results.clone().into_iter().map(|x| x.value).collect(); results.iter().map(|x| x.suggestion.value.clone()).collect();
for external in results_external { for external in results_external {
if results_strings.contains(&external.value) { if results_strings.contains(&external.suggestion.value) {
results.push(Suggestion { results.push(SemanticSuggestion {
value: format!("^{}", external.value), suggestion: Suggestion {
value: format!("^{}", external.suggestion.value),
description: None, description: None,
style: None, style: None,
extra: None, extra: None,
span: external.span, span: external.suggestion.span,
append_whitespace: true, append_whitespace: true,
},
kind: external.kind,
}) })
} else { } else {
results.push(external) results.push(external)
@ -151,7 +166,7 @@ impl Completer for CommandCompletion {
offset: usize, offset: usize,
pos: usize, pos: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<Suggestion> { ) -> Vec<SemanticSuggestion> {
let last = self let last = self
.flattened .flattened
.iter() .iter()
@ -229,8 +244,9 @@ pub fn find_non_whitespace_index(contents: &[u8], start: usize) -> usize {
} }
} }
pub fn is_passthrough_command(working_set_file_contents: &[(Vec<u8>, usize, usize)]) -> bool { pub fn is_passthrough_command(working_set_file_contents: &[CachedFile]) -> bool {
for (contents, _, _) in working_set_file_contents { for cached_file in working_set_file_contents {
let contents = &cached_file.content;
let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|'); let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|');
let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0); let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0);
@ -295,7 +311,7 @@ mod command_completions_tests {
let input = ele.0.as_bytes(); let input = ele.0.as_bytes();
let mut engine_state = EngineState::new(); let mut engine_state = EngineState::new();
engine_state.add_file("test.nu".into(), vec![]); engine_state.add_file("test.nu".into(), Arc::new([]));
let delta = { let delta = {
let mut working_set = StateWorkingSet::new(&engine_state); let mut working_set = StateWorkingSet::new(&engine_state);

View File

@ -4,15 +4,16 @@ use crate::completions::{
}; };
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style}; use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
use nu_engine::eval_block; use nu_engine::eval_block;
use nu_parser::{flatten_expression, parse, FlatShape}; use nu_parser::{flatten_pipeline_element, parse, FlatShape};
use nu_protocol::{ use nu_protocol::{
ast::PipelineElement, debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet}, engine::{Closure, EngineState, Stack, StateWorkingSet},
BlockId, PipelineData, Span, Value, PipelineData, Span, Value,
}; };
use reedline::{Completer as ReedlineCompleter, Suggestion}; use reedline::{Completer as ReedlineCompleter, Suggestion};
use std::str; use std::{str, sync::Arc};
use std::sync::Arc;
use super::base::{SemanticSuggestion, SuggestionKind};
#[derive(Clone)] #[derive(Clone)]
pub struct NuCompleter { pub struct NuCompleter {
@ -24,10 +25,14 @@ impl NuCompleter {
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self { pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
Self { Self {
engine_state, engine_state,
stack, stack: stack.reset_out_dest().capture(),
} }
} }
pub fn fetch_completions_at(&mut self, line: &str, pos: usize) -> Vec<SemanticSuggestion> {
self.completion_helper(line, pos)
}
// Process the completion for a given completer // Process the completion for a given completer
fn process_completion<T: Completer>( fn process_completion<T: Completer>(
&self, &self,
@ -37,7 +42,7 @@ impl NuCompleter {
new_span: Span, new_span: Span,
offset: usize, offset: usize,
pos: usize, pos: usize,
) -> Vec<Suggestion> { ) -> Vec<SemanticSuggestion> {
let config = self.engine_state.get_config(); let config = self.engine_state.get_config();
let options = CompletionOptions { let options = CompletionOptions {
@ -58,14 +63,15 @@ impl NuCompleter {
fn external_completion( fn external_completion(
&self, &self,
block_id: BlockId, closure: &Closure,
spans: &[String], spans: &[String],
offset: usize, offset: usize,
span: Span, span: Span,
) -> Option<Vec<Suggestion>> { ) -> Option<Vec<SemanticSuggestion>> {
let stack = self.stack.clone(); let block = self.engine_state.get_block(closure.block_id);
let block = self.engine_state.get_block(block_id); let mut callee_stack = self
let mut callee_stack = stack.gather_captures(&self.engine_state, &block.captures); .stack
.captures_to_stack_preserve_out_dest(closure.captures.clone());
// Line // Line
if let Some(pos_arg) = block.signature.required_positional.first() { if let Some(pos_arg) = block.signature.required_positional.first() {
@ -83,13 +89,11 @@ impl NuCompleter {
} }
} }
let result = eval_block( let result = eval_block::<WithoutDebug>(
&self.engine_state, &self.engine_state,
&mut callee_stack, &mut callee_stack,
block, block,
PipelineData::empty(), PipelineData::empty(),
true,
true,
); );
match result { match result {
@ -108,7 +112,7 @@ impl NuCompleter {
None None
} }
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<Suggestion> { fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<SemanticSuggestion> {
let mut working_set = StateWorkingSet::new(&self.engine_state); let mut working_set = StateWorkingSet::new(&self.engine_state);
let offset = working_set.next_span_start(); let offset = working_set.next_span_start();
// TODO: Callers should be trimming the line themselves // TODO: Callers should be trimming the line themselves
@ -125,28 +129,15 @@ impl NuCompleter {
let output = parse(&mut working_set, Some("completer"), line.as_bytes(), false); let output = parse(&mut working_set, Some("completer"), line.as_bytes(), false);
for pipeline in output.pipelines.into_iter() { for pipeline in &output.pipelines {
for pipeline_element in pipeline.elements { for pipeline_element in &pipeline.elements {
match pipeline_element { let flattened = flatten_pipeline_element(&working_set, pipeline_element);
PipelineElement::Expression(_, expr)
| PipelineElement::ErrPipedExpression(_, expr)
| PipelineElement::OutErrPipedExpression(_, expr)
| PipelineElement::Redirection(_, _, expr, _)
| PipelineElement::And(_, expr)
| PipelineElement::Or(_, expr)
| PipelineElement::SameTargetRedirection { cmd: (_, expr), .. }
| PipelineElement::SeparateRedirection {
out: (_, expr, _), ..
} => {
let flattened: Vec<_> = flatten_expression(&working_set, &expr);
let mut spans: Vec<String> = vec![]; let mut spans: Vec<String> = vec![];
for (flat_idx, flat) in flattened.iter().enumerate() { for (flat_idx, flat) in flattened.iter().enumerate() {
let is_passthrough_command = spans let is_passthrough_command = spans
.first() .first()
.filter(|content| { .filter(|content| content.as_str() == "sudo" || content.as_str() == "doas")
content.as_str() == "sudo" || content.as_str() == "doas"
})
.is_some(); .is_some();
// Read the current spam to string // Read the current spam to string
let current_span = working_set.get_span_contents(flat.0).to_vec(); let current_span = working_set.get_span_contents(flat.0).to_vec();
@ -203,7 +194,7 @@ impl NuCompleter {
// Flags completion // Flags completion
if prefix.starts_with(b"-") { if prefix.starts_with(b"-") {
// Try to complete flag internally // Try to complete flag internally
let mut completer = FlagCompletion::new(expr.clone()); let mut completer = FlagCompletion::new(pipeline_element.expr.clone());
let result = self.process_completion( let result = self.process_completion(
&mut completer, &mut completer,
&working_set, &working_set,
@ -219,13 +210,10 @@ impl NuCompleter {
// We got no results for internal completion // We got no results for internal completion
// now we can check if external completer is set and use it // now we can check if external completer is set and use it
if let Some(block_id) = config.external_completer { if let Some(closure) = config.external_completer.as_ref() {
if let Some(external_result) = self.external_completion( if let Some(external_result) =
block_id, self.external_completion(closure, &spans, fake_offset, new_span)
&spans, {
fake_offset,
new_span,
) {
return external_result; return external_result;
} }
} }
@ -233,8 +221,7 @@ impl NuCompleter {
// specially check if it is currently empty - always complete commands // specially check if it is currently empty - always complete commands
if (is_passthrough_command && flat_idx == 1) if (is_passthrough_command && flat_idx == 1)
|| (flat_idx == 0 || (flat_idx == 0 && working_set.get_span_contents(new_span).is_empty())
&& working_set.get_span_contents(new_span).is_empty())
{ {
let mut completer = CommandCompletion::new( let mut completer = CommandCompletion::new(
self.engine_state.clone(), self.engine_state.clone(),
@ -370,9 +357,9 @@ impl NuCompleter {
} }
// Try to complete using an external completer (if set) // Try to complete using an external completer (if set)
if let Some(block_id) = config.external_completer { if let Some(closure) = config.external_completer.as_ref() {
if let Some(external_result) = self.external_completion( if let Some(external_result) = self.external_completion(
block_id, closure,
&spans, &spans,
fake_offset, fake_offset,
new_span, new_span,
@ -404,8 +391,6 @@ impl NuCompleter {
} }
} }
} }
}
}
vec![] vec![]
} }
@ -414,6 +399,9 @@ impl NuCompleter {
impl ReedlineCompleter for NuCompleter { impl ReedlineCompleter for NuCompleter {
fn complete(&mut self, line: &str, pos: usize) -> Vec<Suggestion> { fn complete(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
self.completion_helper(line, pos) self.completion_helper(line, pos)
.into_iter()
.map(|s| s.suggestion)
.collect()
} }
} }
@ -471,11 +459,12 @@ pub fn map_value_completions<'a>(
list: impl Iterator<Item = &'a Value>, list: impl Iterator<Item = &'a Value>,
span: Span, span: Span,
offset: usize, offset: usize,
) -> Vec<Suggestion> { ) -> Vec<SemanticSuggestion> {
list.filter_map(move |x| { list.filter_map(move |x| {
// Match for string values // Match for string values
if let Ok(s) = x.coerce_string() { if let Ok(s) = x.coerce_string() {
return Some(Suggestion { return Some(SemanticSuggestion {
suggestion: Suggestion {
value: s, value: s,
description: None, description: None,
style: None, style: None,
@ -485,6 +474,8 @@ pub fn map_value_completions<'a>(
end: span.end - offset, end: span.end - offset,
}, },
append_whitespace: false, append_whitespace: false,
},
kind: Some(SuggestionKind::Type(x.get_type())),
}); });
} }
@ -533,7 +524,10 @@ pub fn map_value_completions<'a>(
} }
}); });
return Some(suggestion); return Some(SemanticSuggestion {
suggestion,
kind: Some(SuggestionKind::Type(x.get_type())),
});
} }
None None
@ -585,13 +579,13 @@ mod completer_tests {
// Test whether the result begins with the expected value // Test whether the result begins with the expected value
result result
.iter() .iter()
.for_each(|x| assert!(x.value.starts_with(begins_with))); .for_each(|x| assert!(x.suggestion.value.starts_with(begins_with)));
// Test whether the result contains all the expected values // Test whether the result contains all the expected values
assert_eq!( assert_eq!(
result result
.iter() .iter()
.map(|x| expected_values.contains(&x.value.as_str())) .map(|x| expected_values.contains(&x.suggestion.value.as_str()))
.filter(|x| *x) .filter(|x| *x)
.count(), .count(),
expected_values.len(), expected_values.len(),

View File

@ -2,11 +2,15 @@ use crate::completions::{matches, CompletionOptions};
use nu_ansi_term::Style; use nu_ansi_term::Style;
use nu_engine::env_to_string; use nu_engine::env_to_string;
use nu_path::home_dir; use nu_path::home_dir;
use nu_protocol::engine::{EngineState, Stack}; use nu_protocol::{
use nu_protocol::{engine::StateWorkingSet, Span}; engine::{EngineState, Stack, StateWorkingSet},
Span,
};
use nu_utils::get_ls_colors; use nu_utils::get_ls_colors;
use std::ffi::OsStr; use std::{
use std::path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP}; ffi::OsStr,
path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP},
};
fn complete_rec( fn complete_rec(
partial: &[String], partial: &[String],

View File

@ -1,8 +1,7 @@
use std::fmt::Display;
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher}; use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
use nu_parser::trim_quotes_str; use nu_parser::trim_quotes_str;
use nu_protocol::CompletionAlgorithm; use nu_protocol::CompletionAlgorithm;
use std::fmt::Display;
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub enum SortBy { pub enum SortBy {
@ -96,7 +95,6 @@ impl std::error::Error for InvalidMatchAlgorithm {}
pub struct CompletionOptions { pub struct CompletionOptions {
pub case_sensitive: bool, pub case_sensitive: bool,
pub positional: bool, pub positional: bool,
pub sort_by: SortBy,
pub match_algorithm: MatchAlgorithm, pub match_algorithm: MatchAlgorithm,
} }
@ -105,7 +103,6 @@ impl Default for CompletionOptions {
Self { Self {
case_sensitive: true, case_sensitive: true,
positional: true, positional: true,
sort_by: SortBy::Ascending,
match_algorithm: MatchAlgorithm::Prefix, match_algorithm: MatchAlgorithm::Prefix,
} }
} }

View File

@ -1,16 +1,16 @@
use crate::completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy}; use crate::completions::{
completer::map_value_completions, Completer, CompletionOptions, MatchAlgorithm,
SemanticSuggestion, SortBy,
};
use nu_engine::eval_call; use nu_engine::eval_call;
use nu_protocol::{ use nu_protocol::{
ast::{Argument, Call, Expr, Expression}, ast::{Argument, Call, Expr, Expression},
debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
PipelineData, Span, Type, Value, PipelineData, Span, Type, Value,
}; };
use nu_utils::IgnoreCaseExt; use nu_utils::IgnoreCaseExt;
use reedline::Suggestion; use std::{collections::HashMap, sync::Arc};
use std::collections::HashMap;
use std::sync::Arc;
use super::completer::map_value_completions;
pub struct CustomCompletion { pub struct CustomCompletion {
engine_state: Arc<EngineState>, engine_state: Arc<EngineState>,
@ -24,7 +24,7 @@ impl CustomCompletion {
pub fn new(engine_state: Arc<EngineState>, stack: Stack, decl_id: usize, line: String) -> Self { pub fn new(engine_state: Arc<EngineState>, stack: Stack, decl_id: usize, line: String) -> Self {
Self { Self {
engine_state, engine_state,
stack, stack: stack.reset_out_dest().capture(),
decl_id, decl_id,
line, line,
sort_by: SortBy::None, sort_by: SortBy::None,
@ -41,12 +41,12 @@ impl Completer for CustomCompletion {
offset: usize, offset: usize,
pos: usize, pos: usize,
completion_options: &CompletionOptions, completion_options: &CompletionOptions,
) -> Vec<Suggestion> { ) -> Vec<SemanticSuggestion> {
// Line position // Line position
let line_pos = pos - offset; let line_pos = pos - offset;
// Call custom declaration // Call custom declaration
let result = eval_call( let result = eval_call::<WithoutDebug>(
&self.engine_state, &self.engine_state,
&mut self.stack, &mut self.stack,
&Call { &Call {
@ -66,8 +66,6 @@ impl Completer for CustomCompletion {
custom_completion: None, custom_completion: None,
}), }),
], ],
redirect_stdout: true,
redirect_stderr: true,
parser_info: HashMap::new(), parser_info: HashMap::new(),
}, },
PipelineData::empty(), PipelineData::empty(),
@ -110,11 +108,6 @@ impl Completer for CustomCompletion {
.get("positional") .get("positional")
.and_then(|val| val.as_bool().ok()) .and_then(|val| val.as_bool().ok())
.unwrap_or(true), .unwrap_or(true),
sort_by: if should_sort {
SortBy::Ascending
} else {
SortBy::None
},
match_algorithm: match options.get("completion_algorithm") { match_algorithm: match options.get("completion_algorithm") {
Some(option) => option Some(option) => option
.coerce_string() .coerce_string()
@ -146,15 +139,22 @@ impl Completer for CustomCompletion {
} }
} }
fn filter(prefix: &[u8], items: Vec<Suggestion>, options: &CompletionOptions) -> Vec<Suggestion> { fn filter(
prefix: &[u8],
items: Vec<SemanticSuggestion>,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
items items
.into_iter() .into_iter()
.filter(|it| match options.match_algorithm { .filter(|it| match options.match_algorithm {
MatchAlgorithm::Prefix => match (options.case_sensitive, options.positional) { MatchAlgorithm::Prefix => match (options.case_sensitive, options.positional) {
(true, true) => it.value.as_bytes().starts_with(prefix), (true, true) => it.suggestion.value.as_bytes().starts_with(prefix),
(true, false) => it.value.contains(std::str::from_utf8(prefix).unwrap_or("")), (true, false) => it
.suggestion
.value
.contains(std::str::from_utf8(prefix).unwrap_or("")),
(false, positional) => { (false, positional) => {
let value = it.value.to_folded_case(); let value = it.suggestion.value.to_folded_case();
let prefix = std::str::from_utf8(prefix).unwrap_or("").to_folded_case(); let prefix = std::str::from_utf8(prefix).unwrap_or("").to_folded_case();
if positional { if positional {
value.starts_with(&prefix) value.starts_with(&prefix)
@ -165,7 +165,7 @@ fn filter(prefix: &[u8], items: Vec<Suggestion>, options: &CompletionOptions) ->
}, },
MatchAlgorithm::Fuzzy => options MatchAlgorithm::Fuzzy => options
.match_algorithm .match_algorithm
.matches_u8(it.value.as_bytes(), prefix), .matches_u8(it.suggestion.value.as_bytes(), prefix),
}) })
.collect() .collect()
} }

View File

@ -8,8 +8,12 @@ use nu_protocol::{
levenshtein_distance, Span, levenshtein_distance, Span,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use std::path::{Path, MAIN_SEPARATOR as SEP}; use std::{
use std::sync::Arc; path::{Path, MAIN_SEPARATOR as SEP},
sync::Arc,
};
use super::SemanticSuggestion;
#[derive(Clone)] #[derive(Clone)]
pub struct DirectoryCompletion { pub struct DirectoryCompletion {
@ -35,7 +39,7 @@ impl Completer for DirectoryCompletion {
offset: usize, offset: usize,
_: usize, _: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<Suggestion> { ) -> Vec<SemanticSuggestion> {
let AdjustView { prefix, span, .. } = adjust_if_intermediate(&prefix, working_set, span); let AdjustView { prefix, span, .. } = adjust_if_intermediate(&prefix, working_set, span);
// Filter only the folders // Filter only the folders
@ -48,7 +52,8 @@ impl Completer for DirectoryCompletion {
&self.stack, &self.stack,
) )
.into_iter() .into_iter()
.map(move |x| Suggestion { .map(move |x| SemanticSuggestion {
suggestion: Suggestion {
value: x.1, value: x.1,
description: None, description: None,
style: x.2, style: x.2,
@ -58,6 +63,9 @@ impl Completer for DirectoryCompletion {
end: x.0.end - offset, end: x.0.end - offset,
}, },
append_whitespace: false, append_whitespace: false,
},
// TODO????
kind: None,
}) })
.collect(); .collect();
@ -65,7 +73,7 @@ impl Completer for DirectoryCompletion {
} }
// Sort results prioritizing the non hidden folders // Sort results prioritizing the non hidden folders
fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> { fn sort(&self, items: Vec<SemanticSuggestion>, prefix: Vec<u8>) -> Vec<SemanticSuggestion> {
let prefix_str = String::from_utf8_lossy(&prefix).to_string(); let prefix_str = String::from_utf8_lossy(&prefix).to_string();
// Sort items // Sort items
@ -75,15 +83,16 @@ impl Completer for DirectoryCompletion {
SortBy::Ascending => { SortBy::Ascending => {
sorted_items.sort_by(|a, b| { sorted_items.sort_by(|a, b| {
// Ignore trailing slashes in folder names when sorting // Ignore trailing slashes in folder names when sorting
a.value a.suggestion
.value
.trim_end_matches(SEP) .trim_end_matches(SEP)
.cmp(b.value.trim_end_matches(SEP)) .cmp(b.suggestion.value.trim_end_matches(SEP))
}); });
} }
SortBy::LevenshteinDistance => { SortBy::LevenshteinDistance => {
sorted_items.sort_by(|a, b| { sorted_items.sort_by(|a, b| {
let a_distance = levenshtein_distance(&prefix_str, &a.value); let a_distance = levenshtein_distance(&prefix_str, &a.suggestion.value);
let b_distance = levenshtein_distance(&prefix_str, &b.value); let b_distance = levenshtein_distance(&prefix_str, &b.suggestion.value);
a_distance.cmp(&b_distance) a_distance.cmp(&b_distance)
}); });
} }
@ -91,11 +100,11 @@ impl Completer for DirectoryCompletion {
} }
// Separate the results between hidden and non hidden // Separate the results between hidden and non hidden
let mut hidden: Vec<Suggestion> = vec![]; let mut hidden: Vec<SemanticSuggestion> = vec![];
let mut non_hidden: Vec<Suggestion> = vec![]; let mut non_hidden: Vec<SemanticSuggestion> = vec![];
for item in sorted_items.into_iter() { for item in sorted_items.into_iter() {
let item_path = Path::new(&item.value); let item_path = Path::new(&item.suggestion.value);
if let Some(value) = item_path.file_name() { if let Some(value) = item_path.file_name() {
if let Some(value) = value.to_str() { if let Some(value) = value.to_str() {

View File

@ -9,6 +9,8 @@ use std::{
sync::Arc, sync::Arc,
}; };
use super::SemanticSuggestion;
#[derive(Clone)] #[derive(Clone)]
pub struct DotNuCompletion { pub struct DotNuCompletion {
engine_state: Arc<EngineState>, engine_state: Arc<EngineState>,
@ -33,7 +35,7 @@ impl Completer for DotNuCompletion {
offset: usize, offset: usize,
_: usize, _: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<Suggestion> { ) -> Vec<SemanticSuggestion> {
let prefix_str = String::from_utf8_lossy(&prefix).replace('`', ""); let prefix_str = String::from_utf8_lossy(&prefix).replace('`', "");
let mut search_dirs: Vec<String> = vec![]; let mut search_dirs: Vec<String> = vec![];
@ -93,7 +95,7 @@ impl Completer for DotNuCompletion {
// Fetch the files filtering the ones that ends with .nu // Fetch the files filtering the ones that ends with .nu
// and transform them into suggestions // and transform them into suggestions
let output: Vec<Suggestion> = search_dirs let output: Vec<SemanticSuggestion> = search_dirs
.into_iter() .into_iter()
.flat_map(|search_dir| { .flat_map(|search_dir| {
let completions = file_path_completion( let completions = file_path_completion(
@ -119,7 +121,8 @@ impl Completer for DotNuCompletion {
} }
} }
}) })
.map(move |x| Suggestion { .map(move |x| SemanticSuggestion {
suggestion: Suggestion {
value: x.1, value: x.1,
description: None, description: None,
style: x.2, style: x.2,
@ -129,6 +132,9 @@ impl Completer for DotNuCompletion {
end: x.0.end - offset, end: x.0.end - offset,
}, },
append_whitespace: true, append_whitespace: true,
},
// TODO????
kind: None,
}) })
}) })
.collect(); .collect();

View File

@ -9,8 +9,12 @@ use nu_protocol::{
}; };
use nu_utils::IgnoreCaseExt; use nu_utils::IgnoreCaseExt;
use reedline::Suggestion; use reedline::Suggestion;
use std::path::{Path, MAIN_SEPARATOR as SEP}; use std::{
use std::sync::Arc; path::{Path, MAIN_SEPARATOR as SEP},
sync::Arc,
};
use super::SemanticSuggestion;
#[derive(Clone)] #[derive(Clone)]
pub struct FileCompletion { pub struct FileCompletion {
@ -36,7 +40,7 @@ impl Completer for FileCompletion {
offset: usize, offset: usize,
_: usize, _: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<Suggestion> { ) -> Vec<SemanticSuggestion> {
let AdjustView { let AdjustView {
prefix, prefix,
span, span,
@ -53,7 +57,8 @@ impl Completer for FileCompletion {
&self.stack, &self.stack,
) )
.into_iter() .into_iter()
.map(move |x| Suggestion { .map(move |x| SemanticSuggestion {
suggestion: Suggestion {
value: x.1, value: x.1,
description: None, description: None,
style: x.2, style: x.2,
@ -63,6 +68,9 @@ impl Completer for FileCompletion {
end: x.0.end - offset, end: x.0.end - offset,
}, },
append_whitespace: false, append_whitespace: false,
},
// TODO????
kind: None,
}) })
.collect(); .collect();
@ -70,7 +78,7 @@ impl Completer for FileCompletion {
} }
// Sort results prioritizing the non hidden folders // Sort results prioritizing the non hidden folders
fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> { fn sort(&self, items: Vec<SemanticSuggestion>, prefix: Vec<u8>) -> Vec<SemanticSuggestion> {
let prefix_str = String::from_utf8_lossy(&prefix).to_string(); let prefix_str = String::from_utf8_lossy(&prefix).to_string();
// Sort items // Sort items
@ -80,15 +88,16 @@ impl Completer for FileCompletion {
SortBy::Ascending => { SortBy::Ascending => {
sorted_items.sort_by(|a, b| { sorted_items.sort_by(|a, b| {
// Ignore trailing slashes in folder names when sorting // Ignore trailing slashes in folder names when sorting
a.value a.suggestion
.value
.trim_end_matches(SEP) .trim_end_matches(SEP)
.cmp(b.value.trim_end_matches(SEP)) .cmp(b.suggestion.value.trim_end_matches(SEP))
}); });
} }
SortBy::LevenshteinDistance => { SortBy::LevenshteinDistance => {
sorted_items.sort_by(|a, b| { sorted_items.sort_by(|a, b| {
let a_distance = levenshtein_distance(&prefix_str, &a.value); let a_distance = levenshtein_distance(&prefix_str, &a.suggestion.value);
let b_distance = levenshtein_distance(&prefix_str, &b.value); let b_distance = levenshtein_distance(&prefix_str, &b.suggestion.value);
a_distance.cmp(&b_distance) a_distance.cmp(&b_distance)
}); });
} }
@ -96,11 +105,11 @@ impl Completer for FileCompletion {
} }
// Separate the results between hidden and non hidden // Separate the results between hidden and non hidden
let mut hidden: Vec<Suggestion> = vec![]; let mut hidden: Vec<SemanticSuggestion> = vec![];
let mut non_hidden: Vec<Suggestion> = vec![]; let mut non_hidden: Vec<SemanticSuggestion> = vec![];
for item in sorted_items.into_iter() { for item in sorted_items.into_iter() {
let item_path = Path::new(&item.value); let item_path = Path::new(&item.suggestion.value);
if let Some(value) = item_path.file_name() { if let Some(value) = item_path.file_name() {
if let Some(value) = value.to_str() { if let Some(value) = value.to_str() {

View File

@ -4,9 +4,10 @@ use nu_protocol::{
engine::StateWorkingSet, engine::StateWorkingSet,
Span, Span,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use super::SemanticSuggestion;
#[derive(Clone)] #[derive(Clone)]
pub struct FlagCompletion { pub struct FlagCompletion {
expression: Expression, expression: Expression,
@ -27,7 +28,7 @@ impl Completer for FlagCompletion {
offset: usize, offset: usize,
_: usize, _: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<Suggestion> { ) -> Vec<SemanticSuggestion> {
// Check if it's a flag // Check if it's a flag
if let Expr::Call(call) = &self.expression.expr { if let Expr::Call(call) = &self.expression.expr {
let decl = working_set.get_decl(call.decl_id); let decl = working_set.get_decl(call.decl_id);
@ -43,7 +44,8 @@ impl Completer for FlagCompletion {
named.insert(0, b'-'); named.insert(0, b'-');
if options.match_algorithm.matches_u8(&named, &prefix) { if options.match_algorithm.matches_u8(&named, &prefix) {
output.push(Suggestion { output.push(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(&named).to_string(), value: String::from_utf8_lossy(&named).to_string(),
description: Some(flag_desc.to_string()), description: Some(flag_desc.to_string()),
style: None, style: None,
@ -53,6 +55,9 @@ impl Completer for FlagCompletion {
end: span.end - offset, end: span.end - offset,
}, },
append_whitespace: true, append_whitespace: true,
},
// TODO????
kind: None,
}); });
} }
} }
@ -66,7 +71,8 @@ impl Completer for FlagCompletion {
named.insert(0, b'-'); named.insert(0, b'-');
if options.match_algorithm.matches_u8(&named, &prefix) { if options.match_algorithm.matches_u8(&named, &prefix) {
output.push(Suggestion { output.push(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(&named).to_string(), value: String::from_utf8_lossy(&named).to_string(),
description: Some(flag_desc.to_string()), description: Some(flag_desc.to_string()),
style: None, style: None,
@ -76,6 +82,9 @@ impl Completer for FlagCompletion {
end: span.end - offset, end: span.end - offset,
}, },
append_whitespace: true, append_whitespace: true,
},
// TODO????
kind: None,
}); });
} }
} }

View File

@ -10,7 +10,7 @@ mod file_completions;
mod flag_completions; mod flag_completions;
mod variable_completions; mod variable_completions;
pub use base::Completer; pub use base::{Completer, SemanticSuggestion, SuggestionKind};
pub use command_completions::CommandCompletion; pub use command_completions::CommandCompletion;
pub use completer::NuCompleter; pub use completer::NuCompleter;
pub use completion_options::{CompletionOptions, MatchAlgorithm, SortBy}; pub use completion_options::{CompletionOptions, MatchAlgorithm, SortBy};

View File

@ -1,15 +1,13 @@
use crate::completions::{Completer, CompletionOptions}; use crate::completions::{
Completer, CompletionOptions, MatchAlgorithm, SemanticSuggestion, SuggestionKind,
};
use nu_engine::{column::get_columns, eval_variable}; use nu_engine::{column::get_columns, eval_variable};
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
Span, Value, Span, Value,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use std::str; use std::{str, sync::Arc};
use std::sync::Arc;
use super::MatchAlgorithm;
#[derive(Clone)] #[derive(Clone)]
pub struct VariableCompletion { pub struct VariableCompletion {
@ -41,7 +39,7 @@ impl Completer for VariableCompletion {
offset: usize, offset: usize,
_: usize, _: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<Suggestion> { ) -> Vec<SemanticSuggestion> {
let mut output = vec![]; let mut output = vec![];
let builtins = ["$nu", "$in", "$env"]; let builtins = ["$nu", "$in", "$env"];
let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or(""); let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or("");
@ -70,12 +68,10 @@ impl Completer for VariableCompletion {
self.var_context.1.clone().into_iter().skip(1).collect(); self.var_context.1.clone().into_iter().skip(1).collect();
if let Some(val) = env_vars.get(&target_var_str) { if let Some(val) = env_vars.get(&target_var_str) {
for suggestion in for suggestion in nested_suggestions(val, &nested_levels, current_span) {
nested_suggestions(val.clone(), nested_levels, current_span)
{
if options.match_algorithm.matches_u8_insensitive( if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive, options.case_sensitive,
suggestion.value.as_bytes(), suggestion.suggestion.value.as_bytes(),
&prefix, &prefix,
) { ) {
output.push(suggestion); output.push(suggestion);
@ -92,13 +88,16 @@ impl Completer for VariableCompletion {
env_var.0.as_bytes(), env_var.0.as_bytes(),
&prefix, &prefix,
) { ) {
output.push(Suggestion { output.push(SemanticSuggestion {
suggestion: Suggestion {
value: env_var.0, value: env_var.0,
description: None, description: None,
style: None, style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
kind: Some(SuggestionKind::Type(env_var.1.get_type())),
}); });
} }
} }
@ -116,12 +115,11 @@ impl Completer for VariableCompletion {
nu_protocol::NU_VARIABLE_ID, nu_protocol::NU_VARIABLE_ID,
nu_protocol::Span::new(current_span.start, current_span.end), nu_protocol::Span::new(current_span.start, current_span.end),
) { ) {
for suggestion in for suggestion in nested_suggestions(&nuval, &self.var_context.1, current_span)
nested_suggestions(nuval, self.var_context.1.clone(), current_span)
{ {
if options.match_algorithm.matches_u8_insensitive( if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive, options.case_sensitive,
suggestion.value.as_bytes(), suggestion.suggestion.value.as_bytes(),
&prefix, &prefix,
) { ) {
output.push(suggestion); output.push(suggestion);
@ -139,12 +137,11 @@ impl Completer for VariableCompletion {
// If the value exists and it's of type Record // If the value exists and it's of type Record
if let Ok(value) = var { if let Ok(value) = var {
for suggestion in for suggestion in nested_suggestions(&value, &self.var_context.1, current_span)
nested_suggestions(value, self.var_context.1.clone(), current_span)
{ {
if options.match_algorithm.matches_u8_insensitive( if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive, options.case_sensitive,
suggestion.value.as_bytes(), suggestion.suggestion.value.as_bytes(),
&prefix, &prefix,
) { ) {
output.push(suggestion); output.push(suggestion);
@ -163,13 +160,17 @@ impl Completer for VariableCompletion {
builtin.as_bytes(), builtin.as_bytes(),
&prefix, &prefix,
) { ) {
output.push(Suggestion { output.push(SemanticSuggestion {
suggestion: Suggestion {
value: builtin.to_string(), value: builtin.to_string(),
description: None, description: None,
style: None, style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
// TODO is there a way to get the VarId to get the type???
kind: None,
}); });
} }
} }
@ -186,13 +187,18 @@ impl Completer for VariableCompletion {
v.0, v.0,
&prefix, &prefix,
) { ) {
output.push(Suggestion { output.push(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(v.0).to_string(), value: String::from_utf8_lossy(v.0).to_string(),
description: None, description: None,
style: None, style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
kind: Some(SuggestionKind::Type(
working_set.get_variable(*v.1).ty.clone(),
)),
}); });
} }
} }
@ -208,13 +214,18 @@ impl Completer for VariableCompletion {
v.0, v.0,
&prefix, &prefix,
) { ) {
output.push(Suggestion { output.push(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(v.0).to_string(), value: String::from_utf8_lossy(v.0).to_string(),
description: None, description: None,
style: None, style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
kind: Some(SuggestionKind::Type(
working_set.get_variable(*v.1).ty.clone(),
)),
}); });
} }
} }
@ -229,24 +240,28 @@ impl Completer for VariableCompletion {
// Find recursively the values for sublevels // Find recursively the values for sublevels
// if no sublevels are set it returns the current value // if no sublevels are set it returns the current value
fn nested_suggestions( fn nested_suggestions(
val: Value, val: &Value,
sublevels: Vec<Vec<u8>>, sublevels: &[Vec<u8>],
current_span: reedline::Span, current_span: reedline::Span,
) -> Vec<Suggestion> { ) -> Vec<SemanticSuggestion> {
let mut output: Vec<Suggestion> = vec![]; let mut output: Vec<SemanticSuggestion> = vec![];
let value = recursive_value(val, sublevels); let value = recursive_value(val, sublevels).unwrap_or_else(Value::nothing);
let kind = SuggestionKind::Type(value.get_type());
match value { match value {
Value::Record { val, .. } => { Value::Record { val, .. } => {
// Add all the columns as completion // Add all the columns as completion
for (col, _) in val.into_iter() { for col in val.columns() {
output.push(Suggestion { output.push(SemanticSuggestion {
value: col, suggestion: Suggestion {
value: col.clone(),
description: None, description: None,
style: None, style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
kind: Some(kind.clone()),
}); });
} }
@ -255,13 +270,16 @@ fn nested_suggestions(
Value::LazyRecord { val, .. } => { Value::LazyRecord { val, .. } => {
// Add all the columns as completion // Add all the columns as completion
for column_name in val.column_names() { for column_name in val.column_names() {
output.push(Suggestion { output.push(SemanticSuggestion {
suggestion: Suggestion {
value: column_name.to_string(), value: column_name.to_string(),
description: None, description: None,
style: None, style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
kind: Some(kind.clone()),
}); });
} }
@ -269,13 +287,16 @@ fn nested_suggestions(
} }
Value::List { vals, .. } => { Value::List { vals, .. } => {
for column_name in get_columns(vals.as_slice()) { for column_name in get_columns(vals.as_slice()) {
output.push(Suggestion { output.push(SemanticSuggestion {
suggestion: Suggestion {
value: column_name, value: column_name,
description: None, description: None,
style: None, style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
kind: Some(kind.clone()),
}); });
} }
@ -286,56 +307,47 @@ fn nested_suggestions(
} }
// Extracts the recursive value (e.g: $var.a.b.c) // Extracts the recursive value (e.g: $var.a.b.c)
fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value { fn recursive_value(val: &Value, sublevels: &[Vec<u8>]) -> Result<Value, Span> {
// Go to next sublevel // Go to next sublevel
if let Some(next_sublevel) = sublevels.clone().into_iter().next() { if let Some((sublevel, next_sublevels)) = sublevels.split_first() {
let span = val.span(); let span = val.span();
match val { match val {
Value::Record { val, .. } => { Value::Record { val, .. } => {
for item in val { if let Some((_, value)) = val.iter().find(|(key, _)| key.as_bytes() == sublevel) {
// Check if index matches with sublevel
if item.0.as_bytes().to_vec() == next_sublevel {
// If matches try to fetch recursively the next // If matches try to fetch recursively the next
return recursive_value(item.1, sublevels.into_iter().skip(1).collect()); recursive_value(value, next_sublevels)
} } else {
}
// Current sublevel value not found // Current sublevel value not found
return Value::nothing(span); Err(span)
}
} }
Value::LazyRecord { val, .. } => { Value::LazyRecord { val, .. } => {
for col in val.column_names() { for col in val.column_names() {
if col.as_bytes().to_vec() == next_sublevel { if col.as_bytes() == *sublevel {
return recursive_value( let val = val.get_column_value(col).map_err(|_| span)?;
val.get_column_value(col).unwrap_or_default(), return recursive_value(&val, next_sublevels);
sublevels.into_iter().skip(1).collect(),
);
} }
} }
// Current sublevel value not found // Current sublevel value not found
return Value::nothing(span); Err(span)
} }
Value::List { vals, .. } => { Value::List { vals, .. } => {
for col in get_columns(vals.as_slice()) { for col in get_columns(vals.as_slice()) {
if col.as_bytes().to_vec() == next_sublevel { if col.as_bytes() == *sublevel {
return recursive_value( let val = val.get_data_by_key(&col).ok_or(span)?;
Value::list(vals, span) return recursive_value(&val, next_sublevels);
.get_data_by_key(&col)
.unwrap_or_default(),
sublevels.into_iter().skip(1).collect(),
);
} }
} }
// Current sublevel value not found // Current sublevel value not found
return Value::nothing(span); Err(span)
} }
_ => return val, _ => Ok(val.clone()),
} }
} else {
Ok(val.clone())
} }
val
} }
impl MatchAlgorithm { impl MatchAlgorithm {

View File

@ -1,17 +1,20 @@
use crate::util::eval_source; use crate::util::eval_source;
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
use nu_path::canonicalize_with; use nu_path::canonicalize_with;
use nu_protocol::engine::{EngineState, Stack, StateWorkingSet}; use nu_protocol::{
use nu_protocol::report_error; engine::{EngineState, Stack, StateWorkingSet},
use nu_protocol::{HistoryFileFormat, PipelineData}; report_error, HistoryFileFormat, PipelineData,
};
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
use nu_protocol::{ParseError, Spanned}; use nu_protocol::{ParseError, PluginRegistryFile, Spanned};
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
use nu_utils::utils::perf; use nu_utils::utils::perf;
use std::path::PathBuf; use std::path::PathBuf;
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
const PLUGIN_FILE: &str = "plugin.nu"; const PLUGIN_FILE: &str = "plugin.msgpackz";
#[cfg(feature = "plugin")]
const OLD_PLUGIN_FILE: &str = "plugin.nu";
const HISTORY_FILE_TXT: &str = "history.txt"; const HISTORY_FILE_TXT: &str = "history.txt";
const HISTORY_FILE_SQLITE: &str = "history.sqlite3"; const HISTORY_FILE_SQLITE: &str = "history.sqlite3";
@ -19,40 +22,150 @@ const HISTORY_FILE_SQLITE: &str = "history.sqlite3";
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
pub fn read_plugin_file( pub fn read_plugin_file(
engine_state: &mut EngineState, engine_state: &mut EngineState,
stack: &mut Stack,
plugin_file: Option<Spanned<String>>, plugin_file: Option<Spanned<String>>,
storage_path: &str, storage_path: &str,
) { ) {
let start_time = std::time::Instant::now(); use std::path::Path;
let mut plug_path = String::new();
// Reading signatures from signature file
// The plugin.nu file stores the parsed signature collected from each registered plugin
add_plugin_file(engine_state, plugin_file, storage_path);
let plugin_path = engine_state.plugin_signatures.clone(); use nu_protocol::{report_error_new, ShellError};
if let Some(plugin_path) = plugin_path {
let plugin_filename = plugin_path.to_string_lossy(); let span = plugin_file.as_ref().map(|s| s.span);
plug_path = plugin_filename.to_string();
if let Ok(contents) = std::fs::read(&plugin_path) { // Check and warn + abort if this is a .nu plugin file
eval_source( if plugin_file
.as_ref()
.and_then(|p| Path::new(&p.item).extension())
.is_some_and(|ext| ext == "nu")
{
report_error_new(
engine_state, engine_state,
stack, &ShellError::GenericError {
&contents, error: "Wrong plugin file format".into(),
&plugin_filename, msg: ".nu plugin files are no longer supported".into(),
PipelineData::empty(), span,
false, help: Some("please recreate this file in the new .msgpackz format".into()),
inner: vec![],
},
); );
} return;
} }
let mut start_time = std::time::Instant::now();
// Reading signatures from plugin registry file
// The plugin.msgpackz file stores the parsed signature collected from each registered plugin
add_plugin_file(engine_state, plugin_file.clone(), storage_path);
perf( perf(
&format!("read_plugin_file {}", &plug_path), "add plugin file to engine_state",
start_time, start_time,
file!(), file!(),
line!(), line!(),
column!(), column!(),
engine_state.get_config().use_ansi_coloring, engine_state.get_config().use_ansi_coloring,
); );
start_time = std::time::Instant::now();
let plugin_path = engine_state.plugin_path.clone();
if let Some(plugin_path) = plugin_path {
// Open the plugin file
let mut file = match std::fs::File::open(&plugin_path) {
Ok(file) => file,
Err(err) => {
if err.kind() == std::io::ErrorKind::NotFound {
log::warn!("Plugin file not found: {}", plugin_path.display());
// Try migration of an old plugin file if this wasn't a custom plugin file
if plugin_file.is_none() && migrate_old_plugin_file(engine_state, storage_path)
{
let Ok(file) = std::fs::File::open(&plugin_path) else {
log::warn!("Failed to load newly migrated plugin file");
return;
};
file
} else {
return;
}
} else {
report_error_new(
engine_state,
&ShellError::GenericError {
error: format!(
"Error while opening plugin registry file: {}",
plugin_path.display()
),
msg: "plugin path defined here".into(),
span,
help: None,
inner: vec![err.into()],
},
);
return;
}
}
};
// Abort if the file is empty.
if file.metadata().is_ok_and(|m| m.len() == 0) {
log::warn!(
"Not reading plugin file because it's empty: {}",
plugin_path.display()
);
return;
}
// Read the contents of the plugin file
let contents = match PluginRegistryFile::read_from(&mut file, span) {
Ok(contents) => contents,
Err(err) => {
log::warn!("Failed to read plugin registry file: {err:?}");
report_error_new(
engine_state,
&ShellError::GenericError {
error: format!(
"Error while reading plugin registry file: {}",
plugin_path.display()
),
msg: "plugin path defined here".into(),
span,
help: Some(
"you might try deleting the file and registering all of your \
plugins again"
.into(),
),
inner: vec![],
},
);
return;
}
};
perf(
&format!("read plugin file {}", plugin_path.display()),
start_time,
file!(),
line!(),
column!(),
engine_state.get_config().use_ansi_coloring,
);
start_time = std::time::Instant::now();
let mut working_set = StateWorkingSet::new(engine_state);
nu_plugin::load_plugin_file(&mut working_set, &contents, span);
if let Err(err) = engine_state.merge_delta(working_set.render()) {
report_error_new(engine_state, &err);
return;
}
perf(
&format!("load plugin file {}", plugin_path.display()),
start_time,
file!(),
line!(),
column!(),
engine_state.get_config().use_ansi_coloring,
);
}
} }
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
@ -61,21 +174,38 @@ pub fn add_plugin_file(
plugin_file: Option<Spanned<String>>, plugin_file: Option<Spanned<String>>,
storage_path: &str, storage_path: &str,
) { ) {
if let Some(plugin_file) = plugin_file { use std::path::Path;
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
let cwd = working_set.get_cwd(); let cwd = working_set.get_cwd();
if let Ok(path) = canonicalize_with(&plugin_file.item, cwd) { if let Some(plugin_file) = plugin_file {
engine_state.plugin_signatures = Some(path) let path = Path::new(&plugin_file.item);
let path_dir = path.parent().unwrap_or(path);
// Just try to canonicalize the directory of the plugin file first.
if let Ok(path_dir) = canonicalize_with(path_dir, &cwd) {
// Try to canonicalize the actual filename, but it's ok if that fails. The file doesn't
// have to exist.
let path = path_dir.join(path.file_name().unwrap_or(path.as_os_str()));
let path = canonicalize_with(&path, &cwd).unwrap_or(path);
engine_state.plugin_path = Some(path)
} else { } else {
let e = ParseError::FileNotFound(plugin_file.item, plugin_file.span); // It's an error if the directory for the plugin file doesn't exist.
report_error(&working_set, &e); report_error(
&working_set,
&ParseError::FileNotFound(
path_dir.to_string_lossy().into_owned(),
plugin_file.span,
),
);
} }
} else if let Some(mut plugin_path) = nu_path::config_dir() { } else if let Some(mut plugin_path) = nu_path::config_dir() {
// Path to store plugins signatures // Path to store plugins signatures
plugin_path.push(storage_path); plugin_path.push(storage_path);
let mut plugin_path = canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path);
plugin_path.push(PLUGIN_FILE); plugin_path.push(PLUGIN_FILE);
engine_state.plugin_signatures = Some(plugin_path.clone()); let plugin_path = canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path);
engine_state.plugin_path = Some(plugin_path);
} }
} }
@ -88,6 +218,10 @@ pub fn eval_config_contents(
let config_filename = config_path.to_string_lossy(); let config_filename = config_path.to_string_lossy();
if let Ok(contents) = std::fs::read(&config_path) { if let Ok(contents) = std::fs::read(&config_path) {
// Set the current active file to the config file.
let prev_file = engine_state.file.take();
engine_state.file = Some(config_path.clone());
eval_source( eval_source(
engine_state, engine_state,
stack, stack,
@ -97,6 +231,9 @@ pub fn eval_config_contents(
false, false,
); );
// Restore the current active file.
engine_state.file = prev_file;
// Merge the environment in case env vars changed in the config // Merge the environment in case env vars changed in the config
match nu_engine::env::current_dir(engine_state, stack) { match nu_engine::env::current_dir(engine_state, stack) {
Ok(cwd) => { Ok(cwd) => {
@ -124,3 +261,129 @@ pub(crate) fn get_history_path(storage_path: &str, mode: HistoryFileFormat) -> O
history_path history_path
}) })
} }
#[cfg(feature = "plugin")]
pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -> bool {
use nu_protocol::{
report_error_new, PluginExample, PluginIdentity, PluginRegistryItem,
PluginRegistryItemData, PluginSignature, ShellError,
};
use std::collections::BTreeMap;
let start_time = std::time::Instant::now();
let cwd = engine_state.current_work_dir();
let Some(config_dir) = nu_path::config_dir().and_then(|mut dir| {
dir.push(storage_path);
nu_path::canonicalize_with(dir, &cwd).ok()
}) else {
return false;
};
let Ok(old_plugin_file_path) = nu_path::canonicalize_with(OLD_PLUGIN_FILE, &config_dir) else {
return false;
};
let old_contents = match std::fs::read(&old_plugin_file_path) {
Ok(old_contents) => old_contents,
Err(err) => {
report_error_new(
engine_state,
&ShellError::GenericError {
error: "Can't read old plugin file to migrate".into(),
msg: "".into(),
span: None,
help: Some(err.to_string()),
inner: vec![],
},
);
return false;
}
};
// Make a copy of the engine state, because we'll read the newly generated file
let mut engine_state = engine_state.clone();
let mut stack = Stack::new();
if !eval_source(
&mut engine_state,
&mut stack,
&old_contents,
&old_plugin_file_path.to_string_lossy(),
PipelineData::Empty,
false,
) {
return false;
}
// Now that the plugin commands are loaded, we just have to generate the file
let mut contents = PluginRegistryFile::new();
let mut groups = BTreeMap::<PluginIdentity, Vec<PluginSignature>>::new();
for decl in engine_state.plugin_decls() {
if let Some(identity) = decl.plugin_identity() {
groups
.entry(identity.clone())
.or_default()
.push(PluginSignature {
sig: decl.signature(),
examples: decl
.examples()
.into_iter()
.map(PluginExample::from)
.collect(),
})
}
}
for (identity, commands) in groups {
contents.upsert_plugin(PluginRegistryItem {
name: identity.name().to_owned(),
filename: identity.filename().to_owned(),
shell: identity.shell().map(|p| p.to_owned()),
data: PluginRegistryItemData::Valid { commands },
});
}
// Write the new file
let new_plugin_file_path = config_dir.join(PLUGIN_FILE);
if let Err(err) = std::fs::File::create(&new_plugin_file_path)
.map_err(|e| e.into())
.and_then(|file| contents.write_to(file, None))
{
report_error_new(
&engine_state,
&ShellError::GenericError {
error: "Failed to save migrated plugin file".into(),
msg: "".into(),
span: None,
help: Some("ensure `$nu.plugin-path` is writable".into()),
inner: vec![err],
},
);
return false;
}
if engine_state.is_interactive {
eprintln!(
"Your old plugin.nu file has been migrated to the new format: {}",
new_plugin_file_path.display()
);
eprintln!(
"The plugin.nu file has not been removed. If `plugin list` looks okay, \
you may do so manually."
);
}
perf(
"migrate old plugin file",
start_time,
file!(),
line!(),
column!(),
engine_state.get_config().use_ansi_coloring,
);
true
}

View File

@ -2,11 +2,10 @@ use log::info;
use miette::Result; use miette::Result;
use nu_engine::{convert_env_values, eval_block}; use nu_engine::{convert_env_values, eval_block};
use nu_parser::parse; use nu_parser::parse;
use nu_protocol::engine::Stack;
use nu_protocol::report_error;
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, StateWorkingSet}, debugger::WithoutDebug,
PipelineData, Spanned, Value, engine::{EngineState, Stack, StateWorkingSet},
report_error, PipelineData, Spanned, Value,
}; };
/// Run a command (or commands) given to us by the user /// Run a command (or commands) given to us by the user
@ -16,6 +15,7 @@ pub fn evaluate_commands(
stack: &mut Stack, stack: &mut Stack,
input: PipelineData, input: PipelineData,
table_mode: Option<Value>, table_mode: Option<Value>,
no_newline: bool,
) -> Result<Option<i64>> { ) -> Result<Option<i64>> {
// Translate environment variables from Strings to Values // Translate environment variables from Strings to Values
if let Some(e) = convert_env_values(engine_state, stack) { if let Some(e) = convert_env_values(engine_state, stack) {
@ -55,13 +55,19 @@ pub fn evaluate_commands(
} }
// Run the block // Run the block
let exit_code = match eval_block(engine_state, stack, &block, input, false, false) { let exit_code = match eval_block::<WithoutDebug>(engine_state, stack, &block, input) {
Ok(pipeline_data) => { Ok(pipeline_data) => {
let mut config = engine_state.get_config().clone(); let mut config = engine_state.get_config().clone();
if let Some(t_mode) = table_mode { if let Some(t_mode) = table_mode {
config.table_mode = t_mode.coerce_str()?.parse().unwrap_or_default(); config.table_mode = t_mode.coerce_str()?.parse().unwrap_or_default();
} }
crate::eval_file::print_table_or_error(engine_state, stack, pipeline_data, &mut config) crate::eval_file::print_table_or_error(
engine_state,
stack,
pipeline_data,
&mut config,
no_newline,
)
} }
Err(err) => { Err(err) => {
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);

View File

@ -1,20 +1,20 @@
use crate::util::eval_source; use crate::util::eval_source;
use log::info; use log::{info, trace};
use log::trace;
use miette::{IntoDiagnostic, Result}; use miette::{IntoDiagnostic, Result};
use nu_engine::eval_block; use nu_engine::{convert_env_values, current_dir, eval_block};
use nu_engine::{convert_env_values, current_dir};
use nu_parser::parse; use nu_parser::parse;
use nu_path::canonicalize_with; use nu_path::canonicalize_with;
use nu_protocol::report_error;
use nu_protocol::{ use nu_protocol::{
ast::Call, debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
Config, PipelineData, ShellError, Span, Value, report_error, Config, PipelineData, ShellError, Span, Value,
}; };
use nu_utils::stdout_write_all_and_flush; use std::{io::Write, sync::Arc};
/// Main function used when a file path is found as argument for nu /// Entry point for evaluating a file.
///
/// If the file contains a main command, it is invoked with `args` and the pipeline data from `input`;
/// otherwise, the pipeline data is forwarded to the first command in the file, and `args` are ignored.
pub fn evaluate_file( pub fn evaluate_file(
path: String, path: String,
args: &[String], args: &[String],
@ -22,7 +22,7 @@ pub fn evaluate_file(
stack: &mut Stack, stack: &mut Stack,
input: PipelineData, input: PipelineData,
) -> Result<()> { ) -> Result<()> {
// Translate environment variables from Strings to Values // Convert environment variables from Strings to Values and store them in the engine state.
if let Some(e) = convert_env_values(engine_state, stack) { if let Some(e) = convert_env_values(engine_state, stack) {
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
report_error(&working_set, &e); report_error(&working_set, &e);
@ -75,8 +75,7 @@ pub fn evaluate_file(
); );
std::process::exit(1); std::process::exit(1);
}); });
engine_state.file = Some(file_path.clone());
engine_state.start_in_file(Some(file_path_str));
let parent = file_path.parent().unwrap_or_else(|| { let parent = file_path.parent().unwrap_or_else(|| {
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
@ -105,18 +104,20 @@ pub fn evaluate_file(
let source_filename = file_path let source_filename = file_path
.file_name() .file_name()
.expect("internal error: script missing filename"); .expect("internal error: missing filename");
let mut working_set = StateWorkingSet::new(engine_state); let mut working_set = StateWorkingSet::new(engine_state);
trace!("parsing file: {}", file_path_str); trace!("parsing file: {}", file_path_str);
let block = parse(&mut working_set, Some(file_path_str), &file, false); let block = parse(&mut working_set, Some(file_path_str), &file, false);
// If any parse errors were found, report the first error and exit.
if let Some(err) = working_set.parse_errors.first() { if let Some(err) = working_set.parse_errors.first() {
report_error(&working_set, err); report_error(&working_set, err);
std::process::exit(1); std::process::exit(1);
} }
for block in &mut working_set.delta.blocks { // Look for blocks whose name starts with "main" and replace it with the filename.
for block in working_set.delta.blocks.iter_mut().map(Arc::make_mut) {
if block.signature.name == "main" { if block.signature.name == "main" {
block.signature.name = source_filename.to_string_lossy().to_string(); block.signature.name = source_filename.to_string_lossy().to_string();
} else if block.signature.name.starts_with("main ") { } else if block.signature.name.starts_with("main ") {
@ -125,25 +126,21 @@ pub fn evaluate_file(
} }
} }
let _ = engine_state.merge_delta(working_set.delta); // Merge the changes into the engine state.
engine_state
.merge_delta(working_set.delta)
.expect("merging delta into engine_state should succeed");
// Check if the file contains a main command.
if engine_state.find_decl(b"main", &[]).is_some() { if engine_state.find_decl(b"main", &[]).is_some() {
let args = format!("main {}", args.join(" ")); // Evaluate the file, but don't run main yet.
let pipeline_data =
let pipeline_data = eval_block( eval_block::<WithoutDebug>(engine_state, stack, &block, PipelineData::empty());
engine_state,
stack,
&block,
PipelineData::empty(),
false,
false,
);
let pipeline_data = match pipeline_data { let pipeline_data = match pipeline_data {
Err(ShellError::Return { .. }) => { Err(ShellError::Return { .. }) => {
// allows early exists before `main` is run. // Allow early return before main is run.
return Ok(()); return Ok(());
} }
x => x, x => x,
} }
.unwrap_or_else(|e| { .unwrap_or_else(|e| {
@ -152,12 +149,12 @@ pub fn evaluate_file(
std::process::exit(1); std::process::exit(1);
}); });
// Print the pipeline output of the file.
// The pipeline output of a file is the pipeline output of its last command.
let result = pipeline_data.print(engine_state, stack, true, false); let result = pipeline_data.print(engine_state, stack, true, false);
match result { match result {
Err(err) => { Err(err) => {
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
report_error(&working_set, &err); report_error(&working_set, &err);
std::process::exit(1); std::process::exit(1);
} }
@ -168,6 +165,9 @@ pub fn evaluate_file(
} }
} }
// Invoke the main command with arguments.
// Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace.
let args = format!("main {}", args.join(" "));
if !eval_source( if !eval_source(
engine_state, engine_state,
stack, stack,
@ -192,6 +192,7 @@ pub(crate) fn print_table_or_error(
stack: &mut Stack, stack: &mut Stack,
mut pipeline_data: PipelineData, mut pipeline_data: PipelineData,
config: &mut Config, config: &mut Config,
no_newline: bool,
) -> Option<i64> { ) -> Option<i64> {
let exit_code = match &mut pipeline_data { let exit_code = match &mut pipeline_data {
PipelineData::ExternalStream { exit_code, .. } => exit_code.take(), PipelineData::ExternalStream { exit_code, .. } => exit_code.take(),
@ -207,30 +208,8 @@ pub(crate) fn print_table_or_error(
std::process::exit(1); std::process::exit(1);
} }
if let Some(decl_id) = engine_state.find_decl("table".as_bytes(), &[]) { // We don't need to do anything special to print a table because print() handles it
let command = engine_state.get_decl(decl_id); print_or_exit(pipeline_data, engine_state, stack, no_newline);
if command.get_block_id().is_some() {
print_or_exit(pipeline_data, engine_state, config);
} else {
// The final call on table command, it's ok to set redirect_output to false.
let mut call = Call::new(Span::new(0, 0));
call.redirect_stdout = false;
let table = command.run(engine_state, stack, &call, pipeline_data);
match table {
Ok(table) => {
print_or_exit(table, engine_state, config);
}
Err(error) => {
let working_set = StateWorkingSet::new(engine_state);
report_error(&working_set, &error);
std::process::exit(1);
}
}
}
} else {
print_or_exit(pipeline_data, engine_state, config);
}
// Make sure everything has finished // Make sure everything has finished
if let Some(exit_code) = exit_code { if let Some(exit_code) = exit_code {
@ -246,17 +225,21 @@ pub(crate) fn print_table_or_error(
} }
} }
fn print_or_exit(pipeline_data: PipelineData, engine_state: &mut EngineState, config: &Config) { fn print_or_exit(
for item in pipeline_data { pipeline_data: PipelineData,
if let Value::Error { error, .. } = item { engine_state: &EngineState,
stack: &mut Stack,
no_newline: bool,
) {
let result = pipeline_data.print(engine_state, stack, no_newline, false);
let _ = std::io::stdout().flush();
let _ = std::io::stderr().flush();
if let Err(error) = result {
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
report_error(&working_set, &error);
report_error(&working_set, &*error); let _ = std::io::stderr().flush();
std::process::exit(1); std::process::exit(1);
} }
let out = item.to_expanded_string("\n", config) + "\n";
let _ = stdout_write_all_and_flush(out).map_err(|err| eprintln!("{err}"));
}
} }

View File

@ -15,7 +15,7 @@ mod util;
mod validation; mod validation;
pub use commands::add_cli_context; pub use commands::add_cli_context;
pub use completions::{FileCompletion, NuCompleter}; pub use completions::{FileCompletion, NuCompleter, SemanticSuggestion, SuggestionKind};
pub use config_files::eval_config_contents; pub use config_files::eval_config_contents;
pub use eval_cmds::evaluate_commands; pub use eval_cmds::evaluate_commands;
pub use eval_file::evaluate_file; pub use eval_file::evaluate_file;
@ -32,4 +32,6 @@ pub use validation::NuValidator;
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
pub use config_files::add_plugin_file; pub use config_files::add_plugin_file;
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
pub use config_files::migrate_old_plugin_file;
#[cfg(feature = "plugin")]
pub use config_files::read_plugin_file; pub use config_files::read_plugin_file;

View File

@ -2,8 +2,7 @@ use nu_engine::documentation::get_flags_section;
use nu_protocol::{engine::EngineState, levenshtein_distance}; use nu_protocol::{engine::EngineState, levenshtein_distance};
use nu_utils::IgnoreCaseExt; use nu_utils::IgnoreCaseExt;
use reedline::{Completer, Suggestion}; use reedline::{Completer, Suggestion};
use std::fmt::Write; use std::{fmt::Write, sync::Arc};
use std::sync::Arc;
pub struct NuHelpCompleter(Arc<EngineState>); pub struct NuHelpCompleter(Arc<EngineState>);

View File

@ -1,5 +1,6 @@
use nu_engine::eval_block; use nu_engine::eval_block;
use nu_protocol::{ use nu_protocol::{
debugger::WithoutDebug,
engine::{EngineState, Stack}, engine::{EngineState, Stack},
IntoPipelineData, Span, Value, IntoPipelineData, Span, Value,
}; };
@ -27,7 +28,7 @@ impl NuMenuCompleter {
Self { Self {
block_id, block_id,
span, span,
stack, stack: stack.reset_out_dest().capture(),
engine_state, engine_state,
only_buffer_difference, only_buffer_difference,
} }
@ -55,14 +56,8 @@ impl Completer for NuMenuCompleter {
} }
let input = Value::nothing(self.span).into_pipeline_data(); let input = Value::nothing(self.span).into_pipeline_data();
let res = eval_block(
&self.engine_state, let res = eval_block::<WithoutDebug>(&self.engine_state, &mut self.stack, block, input);
&mut self.stack,
block,
input,
false,
false,
);
if let Ok(values) = res { if let Ok(values) = res {
let values = values.into_value(self.span); let values = values.into_value(self.span);

View File

@ -1,7 +1,5 @@
use nu_protocol::ast::Call; use nu_engine::command_prelude::*;
use nu_protocol::engine::{Command, EngineState, Stack}; use reedline::{Highlighter, StyledText};
use nu_protocol::{Category, Example, PipelineData, ShellError, Signature, Type, Value};
use reedline::Highlighter;
#[derive(Clone)] #[derive(Clone)]
pub struct NuHighlight; pub struct NuHighlight;
@ -64,3 +62,16 @@ impl Command for NuHighlight {
}] }]
} }
} }
/// A highlighter that does nothing
///
/// Used to remove highlighting from a reedline instance
/// (letting NuHighlighter structs be dropped)
#[derive(Default)]
pub struct NoOpHighlighter {}
impl Highlighter for NoOpHighlighter {
fn highlight(&self, _line: &str, _cursor: usize) -> reedline::StyledText {
StyledText::new()
}
}

View File

@ -1,10 +1,4 @@
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Type,
Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct Print; pub struct Print;

View File

@ -1,13 +1,11 @@
use crate::prompt_update::{POST_PROMPT_MARKER, PRE_PROMPT_MARKER}; use crate::prompt_update::{POST_PROMPT_MARKER, PRE_PROMPT_MARKER};
#[cfg(windows)] #[cfg(windows)]
use nu_utils::enable_vt_processing; use nu_utils::enable_vt_processing;
use reedline::DefaultPrompt; use reedline::{
use { DefaultPrompt, Prompt, PromptEditMode, PromptHistorySearch, PromptHistorySearchStatus,
reedline::{ PromptViMode,
Prompt, PromptEditMode, PromptHistorySearch, PromptHistorySearchStatus, PromptViMode,
},
std::borrow::Cow,
}; };
use std::borrow::Cow;
/// Nushell prompt definition /// Nushell prompt definition
#[derive(Clone)] #[derive(Clone)]

View File

@ -1,10 +1,9 @@
use crate::NushellPrompt; use crate::NushellPrompt;
use log::trace; use log::trace;
use nu_engine::eval_subexpression; use nu_engine::ClosureEvalOnce;
use nu_protocol::report_error;
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
Config, PipelineData, Value, report_error, Config, PipelineData, Value,
}; };
use reedline::Prompt; use reedline::Prompt;
@ -39,11 +38,9 @@ fn get_prompt_string(
.get_env_var(engine_state, prompt) .get_env_var(engine_state, prompt)
.and_then(|v| match v { .and_then(|v| match v {
Value::Closure { val, .. } => { Value::Closure { val, .. } => {
let block = engine_state.get_block(val.block_id); let result = ClosureEvalOnce::new(engine_state, stack, val)
let mut stack = stack.captures_to_stack(val.captures); .run_with_input(PipelineData::Empty);
// Use eval_subexpression to force a redirection of output, so we can use everything in prompt
let ret_val =
eval_subexpression(engine_state, &mut stack, block, PipelineData::empty());
trace!( trace!(
"get_prompt_string (block) {}:{}:{}", "get_prompt_string (block) {}:{}:{}",
file!(), file!(),
@ -51,25 +48,7 @@ fn get_prompt_string(
column!() column!()
); );
ret_val result
.map_err(|err| {
let working_set = StateWorkingSet::new(engine_state);
report_error(&working_set, &err);
})
.ok()
}
Value::Block { val: block_id, .. } => {
let block = engine_state.get_block(block_id);
// Use eval_subexpression to force a redirection of output, so we can use everything in prompt
let ret_val = eval_subexpression(engine_state, stack, block, PipelineData::empty());
trace!(
"get_prompt_string (block) {}:{}:{}",
file!(),
line!(),
column!()
);
ret_val
.map_err(|err| { .map_err(|err| {
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
report_error(&working_set, &err); report_error(&working_set, &err);
@ -99,12 +78,10 @@ fn get_prompt_string(
pub(crate) fn update_prompt( pub(crate) fn update_prompt(
config: &Config, config: &Config,
engine_state: &EngineState, engine_state: &EngineState,
stack: &Stack, stack: &mut Stack,
nu_prompt: &mut NushellPrompt, nu_prompt: &mut NushellPrompt,
) { ) {
let mut stack = stack.clone(); let left_prompt_string = get_prompt_string(PROMPT_COMMAND, config, engine_state, stack);
let left_prompt_string = get_prompt_string(PROMPT_COMMAND, config, engine_state, &mut stack);
// Now that we have the prompt string lets ansify it. // Now that we have the prompt string lets ansify it.
// <133 A><prompt><133 B><command><133 C><command output> // <133 A><prompt><133 B><command><133 C><command output>
@ -120,20 +97,18 @@ pub(crate) fn update_prompt(
left_prompt_string left_prompt_string
}; };
let right_prompt_string = let right_prompt_string = get_prompt_string(PROMPT_COMMAND_RIGHT, config, engine_state, stack);
get_prompt_string(PROMPT_COMMAND_RIGHT, config, engine_state, &mut stack);
let prompt_indicator_string = let prompt_indicator_string = get_prompt_string(PROMPT_INDICATOR, config, engine_state, stack);
get_prompt_string(PROMPT_INDICATOR, config, engine_state, &mut stack);
let prompt_multiline_string = let prompt_multiline_string =
get_prompt_string(PROMPT_MULTILINE_INDICATOR, config, engine_state, &mut stack); get_prompt_string(PROMPT_MULTILINE_INDICATOR, config, engine_state, stack);
let prompt_vi_insert_string = let prompt_vi_insert_string =
get_prompt_string(PROMPT_INDICATOR_VI_INSERT, config, engine_state, &mut stack); get_prompt_string(PROMPT_INDICATOR_VI_INSERT, config, engine_state, stack);
let prompt_vi_normal_string = let prompt_vi_normal_string =
get_prompt_string(PROMPT_INDICATOR_VI_NORMAL, config, engine_state, &mut stack); get_prompt_string(PROMPT_INDICATOR_VI_NORMAL, config, engine_state, stack);
// apply the other indicators // apply the other indicators
nu_prompt.update_all_prompt_strings( nu_prompt.update_all_prompt_strings(

View File

@ -1,10 +1,12 @@
use crate::{menus::NuMenuCompleter, NuHelpCompleter}; use crate::{menus::NuMenuCompleter, NuHelpCompleter};
use crossterm::event::{KeyCode, KeyModifiers}; use crossterm::event::{KeyCode, KeyModifiers};
use log::trace;
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style}; use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
use nu_engine::eval_block; use nu_engine::eval_block;
use nu_parser::parse; use nu_parser::parse;
use nu_protocol::{ use nu_protocol::{
create_menus, create_menus,
debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
extract_value, Config, EditBindings, ParsedKeybinding, ParsedMenu, PipelineData, Record, extract_value, Config, EditBindings, ParsedKeybinding, ParsedMenu, PipelineData, Record,
ShellError, Span, Value, ShellError, Span, Value,
@ -77,6 +79,7 @@ pub(crate) fn add_menus(
stack: &Stack, stack: &Stack,
config: &Config, config: &Config,
) -> Result<Reedline, ShellError> { ) -> Result<Reedline, ShellError> {
trace!("add_menus: config: {:#?}", &config);
line_editor = line_editor.clear_menus(); line_editor = line_editor.clear_menus();
for menu in &config.menus { for menu in &config.menus {
@ -108,9 +111,9 @@ pub(crate) fn add_menus(
(output, working_set.render()) (output, working_set.render())
}; };
let mut temp_stack = Stack::new(); let mut temp_stack = Stack::new().capture();
let input = PipelineData::Empty; let input = PipelineData::Empty;
let res = eval_block(&engine_state, &mut temp_stack, &block, input, false, false)?; let res = eval_block::<WithoutDebug>(&engine_state, &mut temp_stack, &block, input)?;
if let PipelineData::Value(value, None) = res { if let PipelineData::Value(value, None) = res {
for menu in create_menus(&value)? { for menu in create_menus(&value)? {
@ -1275,7 +1278,14 @@ fn edit_from_record(
} }
"complete" => EditCommand::Complete, "complete" => EditCommand::Complete,
"cutselection" => EditCommand::CutSelection, "cutselection" => EditCommand::CutSelection,
#[cfg(feature = "system-clipboard")]
"cutselectionsystem" => EditCommand::CutSelectionSystem,
"copyselection" => EditCommand::CopySelection, "copyselection" => EditCommand::CopySelection,
#[cfg(feature = "system-clipboard")]
"copyselectionsystem" => EditCommand::CopySelectionSystem,
"paste" => EditCommand::Paste,
#[cfg(feature = "system-clipboard")]
"pastesystem" => EditCommand::PasteSystem,
"selectall" => EditCommand::SelectAll, "selectall" => EditCommand::SelectAll,
e => { e => {
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue {
@ -1303,9 +1313,8 @@ fn extract_char(value: &Value, config: &Config) -> Result<char, ShellError> {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use nu_protocol::record;
use super::*; use super::*;
use nu_protocol::record;
#[test] #[test]
fn test_send_event() { fn test_send_event() {

View File

@ -1,5 +1,6 @@
use crate::{ use crate::{
completions::NuCompleter, completions::NuCompleter,
nu_highlight::NoOpHighlighter,
prompt_update, prompt_update,
reedline_config::{add_menus, create_keybindings, KeybindingsMode}, reedline_config::{add_menus, create_keybindings, KeybindingsMode},
util::eval_source, util::eval_source,
@ -8,8 +9,10 @@ use crate::{
use crossterm::cursor::SetCursorStyle; use crossterm::cursor::SetCursorStyle;
use log::{error, trace, warn}; use log::{error, trace, warn};
use miette::{ErrReport, IntoDiagnostic, Result}; use miette::{ErrReport, IntoDiagnostic, Result};
use nu_cmd_base::util::get_guaranteed_cwd; use nu_cmd_base::{
use nu_cmd_base::{hook::eval_hook, util::get_editor}; hook::eval_hook,
util::{get_editor, get_guaranteed_cwd},
};
use nu_color_config::StyleComputer; use nu_color_config::StyleComputer;
use nu_engine::{convert_env_values, env_to_strings}; use nu_engine::{convert_env_values, env_to_strings};
use nu_parser::{lex, parse, trim_quotes_str}; use nu_parser::{lex, parse, trim_quotes_str};
@ -20,19 +23,21 @@ use nu_protocol::{
report_error_new, HistoryConfig, HistoryFileFormat, PipelineData, ShellError, Span, Spanned, report_error_new, HistoryConfig, HistoryFileFormat, PipelineData, ShellError, Span, Spanned,
Value, NU_VARIABLE_ID, Value, NU_VARIABLE_ID,
}; };
use nu_utils::utils::perf; use nu_utils::{
filesystem::{have_permission, PermissionResult},
utils::perf,
};
use reedline::{ use reedline::{
CursorConfig, CwdAwareHinter, EditCommand, Emacs, FileBackedHistory, HistorySessionId, CursorConfig, CwdAwareHinter, DefaultCompleter, EditCommand, Emacs, FileBackedHistory,
Reedline, SqliteBackedHistory, Vi, HistorySessionId, Reedline, SqliteBackedHistory, Vi,
}; };
use std::{ use std::{
collections::HashMap, collections::HashMap,
env::temp_dir, env::temp_dir,
io::{self, IsTerminal, Write}, io::{self, IsTerminal, Write},
panic::{catch_unwind, AssertUnwindSafe}, panic::{catch_unwind, AssertUnwindSafe},
path::Path, path::{Path, PathBuf},
path::PathBuf, sync::{atomic::Ordering, Arc},
sync::atomic::Ordering,
time::{Duration, Instant}, time::{Duration, Instant},
}; };
use sysinfo::System; use sysinfo::System;
@ -47,17 +52,21 @@ const PRE_EXECUTE_MARKER: &str = "\x1b]133;C\x1b\\";
// const CMD_FINISHED_MARKER: &str = "\x1b]133;D;{}\x1b\\"; // const CMD_FINISHED_MARKER: &str = "\x1b]133;D;{}\x1b\\";
const RESET_APPLICATION_MODE: &str = "\x1b[?1l"; const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
///
/// The main REPL loop, including spinning up the prompt itself. /// The main REPL loop, including spinning up the prompt itself.
///
pub fn evaluate_repl( pub fn evaluate_repl(
engine_state: &mut EngineState, engine_state: &mut EngineState,
stack: &mut Stack, stack: Stack,
nushell_path: &str, nushell_path: &str,
prerun_command: Option<Spanned<String>>, prerun_command: Option<Spanned<String>>,
load_std_lib: Option<Spanned<String>>, load_std_lib: Option<Spanned<String>>,
entire_start_time: Instant, entire_start_time: Instant,
) -> Result<()> { ) -> Result<()> {
// throughout this code, we hold this stack uniquely.
// During the main REPL loop, we hand ownership of this value to an Arc,
// so that it may be read by various reedline plugins. During this, we
// can't modify the stack, but at the end of the loop we take back ownership
// from the Arc. This lets us avoid copying stack variables needlessly
let mut unique_stack = stack;
let config = engine_state.get_config(); let config = engine_state.get_config();
let use_color = config.use_ansi_coloring; let use_color = config.use_ansi_coloring;
@ -65,11 +74,12 @@ pub fn evaluate_repl(
let mut entry_num = 0; let mut entry_num = 0;
let nu_prompt = NushellPrompt::new(config.shell_integration); let shell_integration = config.shell_integration;
let nu_prompt = NushellPrompt::new(shell_integration);
let start_time = std::time::Instant::now(); let start_time = std::time::Instant::now();
// Translate environment variables from Strings to Values // Translate environment variables from Strings to Values
if let Some(e) = convert_env_values(engine_state, stack) { if let Some(e) = convert_env_values(engine_state, &unique_stack) {
report_error_new(engine_state, &e); report_error_new(engine_state, &e);
} }
perf( perf(
@ -82,12 +92,12 @@ pub fn evaluate_repl(
); );
// seed env vars // seed env vars
stack.add_env_var( unique_stack.add_env_var(
"CMD_DURATION_MS".into(), "CMD_DURATION_MS".into(),
Value::string("0823", Span::unknown()), Value::string("0823", Span::unknown()),
); );
stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(0, Span::unknown())); unique_stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(0, Span::unknown()));
let mut line_editor = get_line_editor(engine_state, nushell_path, use_color)?; let mut line_editor = get_line_editor(engine_state, nushell_path, use_color)?;
let temp_file = temp_dir().join(format!("{}.nu", uuid::Uuid::new_v4())); let temp_file = temp_dir().join(format!("{}.nu", uuid::Uuid::new_v4()));
@ -95,13 +105,19 @@ pub fn evaluate_repl(
if let Some(s) = prerun_command { if let Some(s) = prerun_command {
eval_source( eval_source(
engine_state, engine_state,
stack, &mut unique_stack,
s.item.as_bytes(), s.item.as_bytes(),
&format!("entry #{entry_num}"), &format!("entry #{entry_num}"),
PipelineData::empty(), PipelineData::empty(),
false, false,
); );
engine_state.merge_env(stack, get_guaranteed_cwd(engine_state, stack))?; let cwd = get_guaranteed_cwd(engine_state, &unique_stack);
engine_state.merge_env(&mut unique_stack, cwd)?;
}
let hostname = System::host_name();
if shell_integration {
shell_integration_osc_7_633_2(hostname.as_deref(), engine_state, &mut unique_stack);
} }
engine_state.set_startup_time(entire_start_time.elapsed().as_nanos() as i64); engine_state.set_startup_time(entire_start_time.elapsed().as_nanos() as i64);
@ -113,7 +129,7 @@ pub fn evaluate_repl(
if load_std_lib.is_none() && engine_state.get_config().show_banner { if load_std_lib.is_none() && engine_state.get_config().show_banner {
eval_source( eval_source(
engine_state, engine_state,
stack, &mut unique_stack,
r#"use std banner; banner"#.as_bytes(), r#"use std banner; banner"#.as_bytes(),
"show_banner", "show_banner",
PipelineData::empty(), PipelineData::empty(),
@ -125,25 +141,28 @@ pub fn evaluate_repl(
// Setup initial engine_state and stack state // Setup initial engine_state and stack state
let mut previous_engine_state = engine_state.clone(); let mut previous_engine_state = engine_state.clone();
let mut previous_stack = stack.clone(); let mut previous_stack_arc = Arc::new(unique_stack);
loop { loop {
// clone these values so that they can be moved by AssertUnwindSafe // clone these values so that they can be moved by AssertUnwindSafe
// If there is a panic within this iteration the last engine_state and stack // If there is a panic within this iteration the last engine_state and stack
// will be used // will be used
let mut current_engine_state = previous_engine_state.clone(); let mut current_engine_state = previous_engine_state.clone();
let mut current_stack = previous_stack.clone(); // for the stack, we are going to hold to create a child stack instead,
// avoiding an expensive copy
let current_stack = Stack::with_parent(previous_stack_arc.clone());
let temp_file_cloned = temp_file.clone(); let temp_file_cloned = temp_file.clone();
let mut nu_prompt_cloned = nu_prompt.clone(); let mut nu_prompt_cloned = nu_prompt.clone();
match catch_unwind(AssertUnwindSafe(move || { let iteration_panic_state = catch_unwind(AssertUnwindSafe(|| {
let (continue_loop, line_editor) = loop_iteration(LoopContext { let (continue_loop, current_stack, line_editor) = loop_iteration(LoopContext {
engine_state: &mut current_engine_state, engine_state: &mut current_engine_state,
stack: &mut current_stack, stack: current_stack,
line_editor, line_editor,
nu_prompt: &mut nu_prompt_cloned, nu_prompt: &mut nu_prompt_cloned,
temp_file: &temp_file_cloned, temp_file: &temp_file_cloned,
use_color, use_color,
entry_num: &mut entry_num, entry_num: &mut entry_num,
hostname: hostname.as_deref(),
}); });
// pass the most recent version of the line_editor back // pass the most recent version of the line_editor back
@ -153,11 +172,14 @@ pub fn evaluate_repl(
current_stack, current_stack,
line_editor, line_editor,
) )
})) { }));
match iteration_panic_state {
Ok((continue_loop, es, s, le)) => { Ok((continue_loop, es, s, le)) => {
// setup state for the next iteration of the repl loop // setup state for the next iteration of the repl loop
previous_engine_state = es; previous_engine_state = es;
previous_stack = s; // we apply the changes from the updated stack back onto our previous stack
previous_stack_arc =
Arc::new(Stack::with_changes_from_child(previous_stack_arc, s));
line_editor = le; line_editor = le;
if !continue_loop { if !continue_loop {
break; break;
@ -211,38 +233,40 @@ fn get_line_editor(
struct LoopContext<'a> { struct LoopContext<'a> {
engine_state: &'a mut EngineState, engine_state: &'a mut EngineState,
stack: &'a mut Stack, stack: Stack,
line_editor: Reedline, line_editor: Reedline,
nu_prompt: &'a mut NushellPrompt, nu_prompt: &'a mut NushellPrompt,
temp_file: &'a Path, temp_file: &'a Path,
use_color: bool, use_color: bool,
entry_num: &'a mut usize, entry_num: &'a mut usize,
hostname: Option<&'a str>,
} }
/// Perform one iteration of the REPL loop /// Perform one iteration of the REPL loop
/// Result is bool: continue loop, current reedline /// Result is bool: continue loop, current reedline
#[inline] #[inline]
fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) { fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
use nu_cmd_base::hook; use nu_cmd_base::hook;
use reedline::Signal; use reedline::Signal;
let loop_start_time = std::time::Instant::now(); let loop_start_time = std::time::Instant::now();
let LoopContext { let LoopContext {
engine_state, engine_state,
stack, mut stack,
line_editor, line_editor,
nu_prompt, nu_prompt,
temp_file, temp_file,
use_color, use_color,
entry_num, entry_num,
hostname,
} = ctx; } = ctx;
let cwd = get_guaranteed_cwd(engine_state, stack); let cwd = get_guaranteed_cwd(engine_state, &stack);
let mut start_time = std::time::Instant::now(); let mut start_time = std::time::Instant::now();
// Before doing anything, merge the environment from the previous REPL iteration into the // Before doing anything, merge the environment from the previous REPL iteration into the
// permanent state. // permanent state.
if let Err(err) = engine_state.merge_env(stack, cwd) { if let Err(err) = engine_state.merge_env(&mut stack, cwd) {
report_error_new(engine_state, &err); report_error_new(engine_state, &err);
} }
perf( perf(
@ -255,7 +279,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
); );
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
//Reset the ctrl-c handler // Reset the ctrl-c handler
if let Some(ctrlc) = &mut engine_state.ctrlc { if let Some(ctrlc) = &mut engine_state.ctrlc {
ctrlc.store(false, Ordering::SeqCst); ctrlc.store(false, Ordering::SeqCst);
} }
@ -269,10 +293,42 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
); );
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
// Right before we start our prompt and take input from the user,
// fire the "pre_prompt" hook
if let Some(hook) = engine_state.get_config().hooks.pre_prompt.clone() {
if let Err(err) = eval_hook(engine_state, &mut stack, None, vec![], &hook, "pre_prompt") {
report_error_new(engine_state, &err);
}
}
perf(
"pre-prompt hook",
start_time,
file!(),
line!(),
column!(),
use_color,
);
start_time = std::time::Instant::now();
// Next, check all the environment variables they ask for
// fire the "env_change" hook
let env_change = engine_state.get_config().hooks.env_change.clone();
if let Err(error) = hook::eval_env_change_hook(env_change, engine_state, &mut stack) {
report_error_new(engine_state, &error)
}
perf(
"env-change hook",
start_time,
file!(),
line!(),
column!(),
use_color,
);
let engine_reference = Arc::new(engine_state.clone());
let config = engine_state.get_config(); let config = engine_state.get_config();
let engine_reference = std::sync::Arc::new(engine_state.clone()); start_time = std::time::Instant::now();
// Find the configured cursor shapes for each mode // Find the configured cursor shapes for each mode
let cursor_config = CursorConfig { let cursor_config = CursorConfig {
vi_insert: map_nucursorshape_to_cursorshape(config.cursor_shape_vi_insert), vi_insert: map_nucursorshape_to_cursorshape(config.cursor_shape_vi_insert),
@ -289,6 +345,10 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
); );
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
// at this line we have cloned the state for the completer and the transient prompt
// until we drop those, we cannot use the stack in the REPL loop itself
// See STACK-REFERENCE to see where we have taken a reference
let stack_arc = Arc::new(stack);
let mut line_editor = line_editor let mut line_editor = line_editor
.use_kitty_keyboard_enhancement(config.use_kitty_protocol) .use_kitty_keyboard_enhancement(config.use_kitty_protocol)
@ -297,7 +357,8 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
.use_bracketed_paste(cfg!(not(target_os = "windows")) && config.bracketed_paste) .use_bracketed_paste(cfg!(not(target_os = "windows")) && config.bracketed_paste)
.with_highlighter(Box::new(NuHighlighter { .with_highlighter(Box::new(NuHighlighter {
engine_state: engine_reference.clone(), engine_state: engine_reference.clone(),
stack: std::sync::Arc::new(stack.clone()), // STACK-REFERENCE 1
stack: stack_arc.clone(),
config: config.clone(), config: config.clone(),
})) }))
.with_validator(Box::new(NuValidator { .with_validator(Box::new(NuValidator {
@ -305,12 +366,14 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
})) }))
.with_completer(Box::new(NuCompleter::new( .with_completer(Box::new(NuCompleter::new(
engine_reference.clone(), engine_reference.clone(),
stack.clone(), // STACK-REFERENCE 2
Stack::with_parent(stack_arc.clone()),
))) )))
.with_quick_completions(config.quick_completions) .with_quick_completions(config.quick_completions)
.with_partial_completions(config.partial_completions) .with_partial_completions(config.partial_completions)
.with_ansi_colors(config.use_ansi_coloring) .with_ansi_colors(config.use_ansi_coloring)
.with_cursor_config(cursor_config); .with_cursor_config(cursor_config);
perf( perf(
"reedline builder", "reedline builder",
start_time, start_time,
@ -320,7 +383,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
use_color, use_color,
); );
let style_computer = StyleComputer::from_config(engine_state, stack); let style_computer = StyleComputer::from_config(engine_state, &stack_arc);
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
line_editor = if config.use_ansi_coloring { line_editor = if config.use_ansi_coloring {
@ -332,6 +395,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
} else { } else {
line_editor.disable_hints() line_editor.disable_hints()
}; };
perf( perf(
"reedline coloring/style_computer", "reedline coloring/style_computer",
start_time, start_time,
@ -342,12 +406,15 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
); );
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
line_editor = add_menus(line_editor, engine_reference, stack, config).unwrap_or_else(|e| { trace!("adding menus");
line_editor =
add_menus(line_editor, engine_reference, &stack_arc, config).unwrap_or_else(|e| {
report_error_new(engine_state, &e); report_error_new(engine_state, &e);
Reedline::create() Reedline::create()
}); });
perf( perf(
"reedline menus", "reedline adding menus",
start_time, start_time,
file!(), file!(),
line!(), line!(),
@ -356,11 +423,11 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
); );
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
let buffer_editor = get_editor(engine_state, stack, Span::unknown()); let buffer_editor = get_editor(engine_state, &stack_arc, Span::unknown());
line_editor = if let Ok((cmd, args)) = buffer_editor { line_editor = if let Ok((cmd, args)) = buffer_editor {
let mut command = std::process::Command::new(cmd); let mut command = std::process::Command::new(cmd);
let envs = env_to_strings(engine_state, stack).unwrap_or_else(|e| { let envs = env_to_strings(engine_state, &stack_arc).unwrap_or_else(|e| {
warn!("Couldn't convert environment variable values to strings: {e}"); warn!("Couldn't convert environment variable values to strings: {e}");
HashMap::default() HashMap::default()
}); });
@ -369,6 +436,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
} else { } else {
line_editor line_editor
}; };
perf( perf(
"reedline buffer_editor", "reedline buffer_editor",
start_time, start_time,
@ -385,6 +453,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
warn!("Failed to sync history: {}", e); warn!("Failed to sync history: {}", e);
} }
} }
perf( perf(
"sync_history", "sync_history",
start_time, start_time,
@ -398,6 +467,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
// Changing the line editor based on the found keybindings // Changing the line editor based on the found keybindings
line_editor = setup_keybindings(engine_state, line_editor); line_editor = setup_keybindings(engine_state, line_editor);
perf( perf(
"keybindings", "keybindings",
start_time, start_time,
@ -407,46 +477,21 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
use_color, use_color,
); );
start_time = std::time::Instant::now();
// Right before we start our prompt and take input from the user,
// fire the "pre_prompt" hook
if let Some(hook) = config.hooks.pre_prompt.clone() {
if let Err(err) = eval_hook(engine_state, stack, None, vec![], &hook, "pre_prompt") {
report_error_new(engine_state, &err);
}
}
perf(
"pre-prompt hook",
start_time,
file!(),
line!(),
column!(),
use_color,
);
start_time = std::time::Instant::now();
// Next, check all the environment variables they ask for
// fire the "env_change" hook
let config = engine_state.get_config();
if let Err(error) =
hook::eval_env_change_hook(config.hooks.env_change.clone(), engine_state, stack)
{
report_error_new(engine_state, &error)
}
perf(
"env-change hook",
start_time,
file!(),
line!(),
column!(),
use_color,
);
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
let config = &engine_state.get_config().clone(); let config = &engine_state.get_config().clone();
prompt_update::update_prompt(config, engine_state, stack, nu_prompt); prompt_update::update_prompt(
let transient_prompt = config,
prompt_update::make_transient_prompt(config, engine_state, stack, nu_prompt); engine_state,
&mut Stack::with_parent(stack_arc.clone()),
nu_prompt,
);
let transient_prompt = prompt_update::make_transient_prompt(
config,
engine_state,
&mut Stack::with_parent(stack_arc.clone()),
nu_prompt,
);
perf( perf(
"update_prompt", "update_prompt",
start_time, start_time,
@ -461,20 +506,41 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
line_editor = line_editor.with_transient_prompt(transient_prompt); line_editor = line_editor.with_transient_prompt(transient_prompt);
let input = line_editor.read_line(nu_prompt); let input = line_editor.read_line(nu_prompt);
// we got our inputs, we can now drop our stack references
// This lists all of the stack references that we have cleaned up
line_editor = line_editor
// CLEAR STACK-REFERENCE 1
.with_highlighter(Box::<NoOpHighlighter>::default())
// CLEAR STACK-REFERENCE 2
.with_completer(Box::<DefaultCompleter>::default());
let shell_integration = config.shell_integration; let shell_integration = config.shell_integration;
let mut stack = Stack::unwrap_unique(stack_arc);
perf(
"line_editor setup",
start_time,
file!(),
line!(),
column!(),
use_color,
);
let line_editor_input_time = std::time::Instant::now();
match input { match input {
Ok(Signal::Success(s)) => { Ok(Signal::Success(s)) => {
let hostname = System::host_name();
let history_supports_meta = matches!( let history_supports_meta = matches!(
engine_state.history_config().map(|h| h.file_format), engine_state.history_config().map(|h| h.file_format),
Some(HistoryFileFormat::Sqlite) Some(HistoryFileFormat::Sqlite)
); );
if history_supports_meta { if history_supports_meta {
prepare_history_metadata(&s, &hostname, engine_state, &mut line_editor); prepare_history_metadata(&s, hostname, engine_state, &mut line_editor);
} }
// For pre_exec_hook
start_time = Instant::now();
// Right before we start running the code the user gave us, fire the `pre_execution` // Right before we start running the code the user gave us, fire the `pre_execution`
// hook // hook
if let Some(hook) = config.hooks.pre_execution.clone() { if let Some(hook) = config.hooks.pre_execution.clone() {
@ -483,47 +549,102 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
repl.buffer = s.to_string(); repl.buffer = s.to_string();
drop(repl); drop(repl);
if let Err(err) = if let Err(err) = eval_hook(
eval_hook(engine_state, stack, None, vec![], &hook, "pre_execution") engine_state,
{ &mut stack,
None,
vec![],
&hook,
"pre_execution",
) {
report_error_new(engine_state, &err); report_error_new(engine_state, &err);
} }
} }
perf(
"pre_execution_hook",
start_time,
file!(),
line!(),
column!(),
use_color,
);
let mut repl = engine_state.repl_state.lock().expect("repl state mutex"); let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
repl.cursor_pos = line_editor.current_insertion_point(); repl.cursor_pos = line_editor.current_insertion_point();
repl.buffer = line_editor.current_buffer_contents().to_string(); repl.buffer = line_editor.current_buffer_contents().to_string();
drop(repl); drop(repl);
if shell_integration { if shell_integration {
start_time = Instant::now();
run_ansi_sequence(PRE_EXECUTE_MARKER); run_ansi_sequence(PRE_EXECUTE_MARKER);
perf(
"pre_execute_marker (133;C) ansi escape sequence",
start_time,
file!(),
line!(),
column!(),
use_color,
);
} }
// Actual command execution logic starts from here // Actual command execution logic starts from here
let start_time = Instant::now(); let cmd_execution_start_time = Instant::now();
match parse_operation(s.clone(), engine_state, stack) { match parse_operation(s.clone(), engine_state, &stack) {
Ok(operation) => match operation { Ok(operation) => match operation {
ReplOperation::AutoCd { cwd, target, span } => { ReplOperation::AutoCd { cwd, target, span } => {
do_auto_cd(target, cwd, stack, engine_state, span); do_auto_cd(target, cwd, &mut stack, engine_state, span);
if shell_integration {
start_time = Instant::now();
run_ansi_sequence(&get_command_finished_marker(&stack, engine_state));
perf(
"post_execute_marker (133;D) ansi escape sequences",
start_time,
file!(),
line!(),
column!(),
use_color,
);
}
} }
ReplOperation::RunCommand(cmd) => { ReplOperation::RunCommand(cmd) => {
line_editor = do_run_cmd( line_editor = do_run_cmd(
&cmd, &cmd,
stack, &mut stack,
engine_state, engine_state,
line_editor, line_editor,
shell_integration, shell_integration,
*entry_num, *entry_num,
) use_color,
);
if shell_integration {
start_time = Instant::now();
run_ansi_sequence(&get_command_finished_marker(&stack, engine_state));
perf(
"post_execute_marker (133;D) ansi escape sequences",
start_time,
file!(),
line!(),
column!(),
use_color,
);
}
} }
// as the name implies, we do nothing in this case // as the name implies, we do nothing in this case
ReplOperation::DoNothing => {} ReplOperation::DoNothing => {}
}, },
Err(ref e) => error!("Error parsing operation: {e}"), Err(ref e) => error!("Error parsing operation: {e}"),
} }
let cmd_duration = cmd_execution_start_time.elapsed();
let cmd_duration = start_time.elapsed();
stack.add_env_var( stack.add_env_var(
"CMD_DURATION_MS".into(), "CMD_DURATION_MS".into(),
@ -535,7 +656,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
&s, &s,
engine_state, engine_state,
cmd_duration, cmd_duration,
stack, &mut stack,
&mut line_editor, &mut line_editor,
) { ) {
warn!("Could not fill in result related history metadata: {e}"); warn!("Could not fill in result related history metadata: {e}");
@ -543,7 +664,18 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
} }
if shell_integration { if shell_integration {
do_shell_integration_finalize_command(hostname, engine_state, stack); start_time = Instant::now();
shell_integration_osc_7_633_2(hostname, engine_state, &mut stack);
perf(
"shell_integration_finalize ansi escape sequences",
start_time,
file!(),
line!(),
column!(),
use_color,
);
} }
flush_engine_state_repl_buffer(engine_state, &mut line_editor); flush_engine_state_repl_buffer(engine_state, &mut line_editor);
@ -551,16 +683,38 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
Ok(Signal::CtrlC) => { Ok(Signal::CtrlC) => {
// `Reedline` clears the line content. New prompt is shown // `Reedline` clears the line content. New prompt is shown
if shell_integration { if shell_integration {
run_ansi_sequence(&get_command_finished_marker(stack, engine_state)); start_time = Instant::now();
run_ansi_sequence(&get_command_finished_marker(&stack, engine_state));
perf(
"command_finished_marker ansi escape sequence",
start_time,
file!(),
line!(),
column!(),
use_color,
);
} }
} }
Ok(Signal::CtrlD) => { Ok(Signal::CtrlD) => {
// When exiting clear to a new line // When exiting clear to a new line
if shell_integration { if shell_integration {
run_ansi_sequence(&get_command_finished_marker(stack, engine_state)); start_time = Instant::now();
run_ansi_sequence(&get_command_finished_marker(&stack, engine_state));
perf(
"command_finished_marker ansi escape sequence",
start_time,
file!(),
line!(),
column!(),
use_color,
);
} }
println!(); println!();
return (false, line_editor); return (false, stack, line_editor);
} }
Err(err) => { Err(err) => {
let message = err.to_string(); let message = err.to_string();
@ -572,13 +726,24 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
// Alternatively only allow that expected failures let the REPL loop // Alternatively only allow that expected failures let the REPL loop
} }
if shell_integration { if shell_integration {
run_ansi_sequence(&get_command_finished_marker(stack, engine_state)); start_time = Instant::now();
run_ansi_sequence(&get_command_finished_marker(&stack, engine_state));
perf(
"command_finished_marker ansi escape sequence",
start_time,
file!(),
line!(),
column!(),
use_color,
);
} }
} }
} }
perf( perf(
"processing line editor input", "processing line editor input",
start_time, line_editor_input_time,
file!(), file!(),
line!(), line!(),
column!(), column!(),
@ -586,7 +751,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
); );
perf( perf(
"finished repl loop", "time between prompts in line editor loop",
loop_start_time, loop_start_time,
file!(), file!(),
line!(), line!(),
@ -594,7 +759,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
use_color, use_color,
); );
(true, line_editor) (true, stack, line_editor)
} }
/// ///
@ -602,7 +767,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Reedline) {
/// ///
fn prepare_history_metadata( fn prepare_history_metadata(
s: &str, s: &str,
hostname: &Option<String>, hostname: Option<&str>,
engine_state: &EngineState, engine_state: &EngineState,
line_editor: &mut Reedline, line_editor: &mut Reedline,
) { ) {
@ -610,7 +775,7 @@ fn prepare_history_metadata(
let result = line_editor let result = line_editor
.update_last_command_context(&|mut c| { .update_last_command_context(&|mut c| {
c.start_timestamp = Some(chrono::Utc::now()); c.start_timestamp = Some(chrono::Utc::now());
c.hostname = hostname.clone(); c.hostname = hostname.map(str::to_string);
c.cwd = Some(StateWorkingSet::new(engine_state).get_cwd()); c.cwd = Some(StateWorkingSet::new(engine_state).get_cwd());
c c
@ -683,7 +848,7 @@ fn parse_operation(
orig = trim_quotes_str(&orig).to_string() orig = trim_quotes_str(&orig).to_string()
} }
let path = nu_path::expand_path_with(&orig, &cwd); let path = nu_path::expand_path_with(&orig, &cwd, true);
if looks_like_path(&orig) && path.is_dir() && tokens.0.len() == 1 { if looks_like_path(&orig) && path.is_dir() && tokens.0.len() == 1 {
Ok(ReplOperation::AutoCd { Ok(ReplOperation::AutoCd {
cwd, cwd,
@ -722,6 +887,16 @@ fn do_auto_cd(
path.to_string_lossy().to_string() path.to_string_lossy().to_string()
}; };
if let PermissionResult::PermissionDenied(reason) = have_permission(path.clone()) {
report_error_new(
engine_state,
&ShellError::IOError {
msg: format!("Cannot change directory to {path}: {reason}"),
},
);
return;
}
stack.add_env_var("OLDPWD".into(), Value::string(cwd.clone(), Span::unknown())); stack.add_env_var("OLDPWD".into(), Value::string(cwd.clone(), Span::unknown()));
//FIXME: this only changes the current scope, but instead this environment variable //FIXME: this only changes the current scope, but instead this environment variable
@ -757,6 +932,7 @@ fn do_auto_cd(
"NUSHELL_LAST_SHELL".into(), "NUSHELL_LAST_SHELL".into(),
Value::int(last_shell as i64, span), Value::int(last_shell as i64, span),
); );
stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(0, Span::unknown()));
} }
/// ///
@ -772,6 +948,7 @@ fn do_run_cmd(
line_editor: Reedline, line_editor: Reedline,
shell_integration: bool, shell_integration: bool,
entry_num: usize, entry_num: usize,
use_color: bool,
) -> Reedline { ) -> Reedline {
trace!("eval source: {}", s); trace!("eval source: {}", s);
@ -797,6 +974,7 @@ fn do_run_cmd(
} }
if shell_integration { if shell_integration {
let start_time = Instant::now();
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") { if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
match cwd.coerce_into_string() { match cwd.coerce_into_string() {
Ok(path) => { Ok(path) => {
@ -819,6 +997,15 @@ fn do_run_cmd(
} }
} }
} }
perf(
"set title with command ansi escape sequence",
start_time,
file!(),
line!(),
column!(),
use_color,
);
} }
eval_source( eval_source(
@ -835,14 +1022,14 @@ fn do_run_cmd(
/// ///
/// Output some things and set environment variables so shells with the right integration /// Output some things and set environment variables so shells with the right integration
/// can have more information about what is going on (after we have run a command) /// can have more information about what is going on (both on startup and after we have
/// run a command)
/// ///
fn do_shell_integration_finalize_command( fn shell_integration_osc_7_633_2(
hostname: Option<String>, hostname: Option<&str>,
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, stack: &mut Stack,
) { ) {
run_ansi_sequence(&get_command_finished_marker(stack, engine_state));
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") { if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
match cwd.coerce_into_string() { match cwd.coerce_into_string() {
Ok(path) => { Ok(path) => {
@ -859,7 +1046,7 @@ fn do_shell_integration_finalize_command(
run_ansi_sequence(&format!( run_ansi_sequence(&format!(
"\x1b]7;file://{}{}{}\x1b\\", "\x1b]7;file://{}{}{}\x1b\\",
percent_encoding::utf8_percent_encode( percent_encoding::utf8_percent_encode(
&hostname.unwrap_or_else(|| "localhost".to_string()), hostname.unwrap_or("localhost"),
percent_encoding::CONTROLS percent_encoding::CONTROLS
), ),
if path.starts_with('/') { "" } else { "/" }, if path.starts_with('/') { "" } else { "/" },

View File

@ -3,9 +3,11 @@ use nu_ansi_term::Style;
use nu_color_config::{get_matching_brackets_style, get_shape_color}; use nu_color_config::{get_matching_brackets_style, get_shape_color};
use nu_engine::env; use nu_engine::env;
use nu_parser::{flatten_block, parse, FlatShape}; use nu_parser::{flatten_block, parse, FlatShape};
use nu_protocol::ast::{Argument, Block, Expr, Expression, PipelineElement, RecordItem}; use nu_protocol::{
use nu_protocol::engine::{EngineState, Stack, StateWorkingSet}; ast::{Argument, Block, Expr, Expression, PipelineRedirection, RecordItem},
use nu_protocol::{Config, Span}; engine::{EngineState, Stack, StateWorkingSet},
Config, Span,
};
use reedline::{Highlighter, StyledText}; use reedline::{Highlighter, StyledText};
use std::sync::Arc; use std::sync::Arc;
@ -262,26 +264,38 @@ fn find_matching_block_end_in_block(
) -> Option<usize> { ) -> Option<usize> {
for p in &block.pipelines { for p in &block.pipelines {
for e in &p.elements { for e in &p.elements {
match e { if e.expr.span.contains(global_cursor_offset) {
PipelineElement::Expression(_, e)
| PipelineElement::ErrPipedExpression(_, e)
| PipelineElement::OutErrPipedExpression(_, e)
| PipelineElement::Redirection(_, _, e, _)
| PipelineElement::And(_, e)
| PipelineElement::Or(_, e)
| PipelineElement::SameTargetRedirection { cmd: (_, e), .. }
| PipelineElement::SeparateRedirection { out: (_, e, _), .. } => {
if e.span.contains(global_cursor_offset) {
if let Some(pos) = find_matching_block_end_in_expr( if let Some(pos) = find_matching_block_end_in_expr(
line, line,
working_set, working_set,
e, &e.expr,
global_span_offset, global_span_offset,
global_cursor_offset, global_cursor_offset,
) { ) {
return Some(pos); return Some(pos);
} }
} }
if let Some(redirection) = e.redirection.as_ref() {
match redirection {
PipelineRedirection::Single { target, .. }
| PipelineRedirection::Separate { out: target, .. }
| PipelineRedirection::Separate { err: target, .. }
if target.span().contains(global_cursor_offset) =>
{
if let Some(pos) = target.expr().and_then(|expr| {
find_matching_block_end_in_expr(
line,
working_set,
expr,
global_span_offset,
global_cursor_offset,
)
}) {
return Some(pos);
}
}
_ => {}
} }
} }
} }
@ -346,9 +360,8 @@ fn find_matching_block_end_in_expr(
Expr::MatchBlock(_) => None, Expr::MatchBlock(_) => None,
Expr::Nothing => None, Expr::Nothing => None,
Expr::Garbage => None, Expr::Garbage => None,
Expr::Spread(_) => None,
Expr::Table(hdr, rows) => { Expr::Table(table) => {
if expr_last == global_cursor_offset { if expr_last == global_cursor_offset {
// cursor is at table end // cursor is at table end
Some(expr_first) Some(expr_first)
@ -357,11 +370,11 @@ fn find_matching_block_end_in_expr(
Some(expr_last) Some(expr_last)
} else { } else {
// cursor is inside table // cursor is inside table
for inner_expr in hdr { for inner_expr in table.columns.as_ref() {
find_in_expr_or_continue!(inner_expr); find_in_expr_or_continue!(inner_expr);
} }
for row in rows { for row in table.rows.as_ref() {
for inner_expr in row { for inner_expr in row.as_ref() {
find_in_expr_or_continue!(inner_expr); find_in_expr_or_continue!(inner_expr);
} }
} }
@ -454,7 +467,7 @@ fn find_matching_block_end_in_expr(
None None
} }
Expr::List(inner_expr) => { Expr::List(list) => {
if expr_last == global_cursor_offset { if expr_last == global_cursor_offset {
// cursor is at list end // cursor is at list end
Some(expr_first) Some(expr_first)
@ -463,8 +476,9 @@ fn find_matching_block_end_in_expr(
Some(expr_last) Some(expr_last)
} else { } else {
// cursor is inside list // cursor is inside list
for inner_expr in inner_expr { for item in list {
find_in_expr_or_continue!(inner_expr); let expr = item.expr();
find_in_expr_or_continue!(expr);
} }
None None
} }

View File

@ -1,12 +1,11 @@
use nu_cmd_base::hook::eval_hook; use nu_cmd_base::hook::eval_hook;
use nu_engine::{eval_block, eval_block_with_early_return}; use nu_engine::{eval_block, eval_block_with_early_return};
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents}; use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
use nu_protocol::engine::StateWorkingSet;
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack}, debugger::WithoutDebug,
print_if_stream, PipelineData, ShellError, Span, Value, engine::{EngineState, Stack, StateWorkingSet},
print_if_stream, report_error, report_error_new, PipelineData, ShellError, Span, Value,
}; };
use nu_protocol::{report_error, report_error_new};
#[cfg(windows)] #[cfg(windows)]
use nu_utils::enable_vt_processing; use nu_utils::enable_vt_processing;
use nu_utils::utils::perf; use nu_utils::utils::perf;
@ -93,8 +92,8 @@ fn gather_env_vars(
let span_offset = engine_state.next_span_start(); let span_offset = engine_state.next_span_start();
engine_state.add_file( engine_state.add_file(
"Host Environment Variables".to_string(), "Host Environment Variables".into(),
fake_env_file.as_bytes().to_vec(), fake_env_file.as_bytes().into(),
); );
let (tokens, _) = lex(fake_env_file.as_bytes(), span_offset, &[], &[], true); let (tokens, _) = lex(fake_env_file.as_bytes(), span_offset, &[], &[], true);
@ -240,9 +239,9 @@ pub fn eval_source(
} }
let b = if allow_return { let b = if allow_return {
eval_block_with_early_return(engine_state, stack, &block, input, false, false) eval_block_with_early_return::<WithoutDebug>(engine_state, stack, &block, input)
} else { } else {
eval_block(engine_state, stack, &block, input, false, false) eval_block::<WithoutDebug>(engine_state, stack, &block, input)
}; };
match b { match b {

View File

@ -1,12 +1,12 @@
pub mod support; pub mod support;
use std::path::PathBuf;
use nu_cli::NuCompleter; use nu_cli::NuCompleter;
use nu_engine::eval_block;
use nu_parser::parse; use nu_parser::parse;
use nu_protocol::engine::StateWorkingSet; use nu_protocol::{debugger::WithoutDebug, engine::StateWorkingSet, PipelineData};
use reedline::{Completer, Suggestion}; use reedline::{Completer, Suggestion};
use rstest::{fixture, rstest}; use rstest::{fixture, rstest};
use std::path::PathBuf;
use support::{ use support::{
completions_helpers::{new_partial_engine, new_quote_engine}, completions_helpers::{new_partial_engine, new_quote_engine},
file, folder, match_suggestions, new_engine, file, folder, match_suggestions, new_engine,
@ -178,7 +178,7 @@ fn dotnu_completions() {
#[ignore] #[ignore]
fn external_completer_trailing_space() { fn external_completer_trailing_space() {
// https://github.com/nushell/nushell/issues/6378 // https://github.com/nushell/nushell/issues/6378
let block = "let external_completer = {|spans| $spans}"; let block = "{|spans| $spans}";
let input = "gh alias ".to_string(); let input = "gh alias ".to_string();
let suggestions = run_external_completion(block, &input); let suggestions = run_external_completion(block, &input);
@ -848,12 +848,14 @@ fn alias_of_another_alias() {
match_suggestions(expected_paths, suggestions) match_suggestions(expected_paths, suggestions)
} }
fn run_external_completion(block: &str, input: &str) -> Vec<Suggestion> { fn run_external_completion(completer: &str, input: &str) -> Vec<Suggestion> {
let completer = format!("$env.config.completions.external.completer = {completer}");
// Create a new engine // Create a new engine
let (dir, _, mut engine_state, mut stack) = new_engine(); let (dir, _, mut engine_state, mut stack) = new_engine();
let (_, delta) = { let (block, delta) = {
let mut working_set = StateWorkingSet::new(&engine_state); let mut working_set = StateWorkingSet::new(&engine_state);
let block = parse(&mut working_set, None, block.as_bytes(), false); let block = parse(&mut working_set, None, completer.as_bytes(), false);
assert!(working_set.parse_errors.is_empty()); assert!(working_set.parse_errors.is_empty());
(block, working_set.render()) (block, working_set.render())
@ -861,16 +863,13 @@ fn run_external_completion(block: &str, input: &str) -> Vec<Suggestion> {
assert!(engine_state.merge_delta(delta).is_ok()); assert!(engine_state.merge_delta(delta).is_ok());
assert!(
eval_block::<WithoutDebug>(&engine_state, &mut stack, &block, PipelineData::Empty).is_ok()
);
// Merge environment into the permanent state // Merge environment into the permanent state
assert!(engine_state.merge_env(&mut stack, &dir).is_ok()); assert!(engine_state.merge_env(&mut stack, &dir).is_ok());
let latest_block_id = engine_state.num_blocks() - 1;
// Change config adding the external completer
let mut config = engine_state.get_config().clone();
config.external_completer = Some(latest_block_id);
engine_state.set_config(config);
// Instantiate a new completer // Instantiate a new completer
let mut completer = NuCompleter::new(std::sync::Arc::new(engine_state), stack); let mut completer = NuCompleter::new(std::sync::Arc::new(engine_state), stack);

View File

@ -1,14 +1,15 @@
use std::path::PathBuf;
use nu_engine::eval_block; use nu_engine::eval_block;
use nu_parser::parse; use nu_parser::parse;
use nu_protocol::{ use nu_protocol::{
debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
eval_const::create_nu_constant, eval_const::create_nu_constant,
PipelineData, ShellError, Span, Value, NU_VARIABLE_ID, PipelineData, ShellError, Span, Value, NU_VARIABLE_ID,
}; };
use nu_test_support::fs; use nu_test_support::fs;
use reedline::Suggestion; use reedline::Suggestion;
use std::path::PathBuf;
const SEP: char = std::path::MAIN_SEPARATOR; const SEP: char = std::path::MAIN_SEPARATOR;
fn create_default_context() -> EngineState { fn create_default_context() -> EngineState {
@ -194,13 +195,11 @@ pub fn merge_input(
engine_state.merge_delta(delta)?; engine_state.merge_delta(delta)?;
assert!(eval_block( assert!(eval_block::<WithoutDebug>(
engine_state, engine_state,
stack, stack,
&block, &block,
PipelineData::Value(Value::nothing(Span::unknown(),), None), PipelineData::Value(Value::nothing(Span::unknown()), None),
false,
false
) )
.is_ok()); .is_ok());

View File

@ -5,17 +5,17 @@ edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cmd-base" name = "nu-cmd-base"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base" repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
version = "0.90.2" version = "0.92.3"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
nu-engine = { path = "../nu-engine", version = "0.90.2" } nu-engine = { path = "../nu-engine", version = "0.92.3" }
nu-parser = { path = "../nu-parser", version = "0.90.2" } nu-parser = { path = "../nu-parser", version = "0.92.3" }
nu-path = { path = "../nu-path", version = "0.90.2" } nu-path = { path = "../nu-path", version = "0.92.3" }
nu-protocol = { path = "../nu-protocol", version = "0.90.2" } nu-protocol = { path = "../nu-protocol", version = "0.92.3" }
indexmap = "2.2" indexmap = { workspace = true }
miette = "7.1.0" miette = { workspace = true }
[dev-dependencies] [dev-dependencies]

View File

@ -2,9 +2,13 @@ use crate::util::get_guaranteed_cwd;
use miette::Result; use miette::Result;
use nu_engine::{eval_block, eval_block_with_early_return}; use nu_engine::{eval_block, eval_block_with_early_return};
use nu_parser::parse; use nu_parser::parse;
use nu_protocol::cli_error::{report_error, report_error_new}; use nu_protocol::{
use nu_protocol::engine::{EngineState, Stack, StateWorkingSet}; cli_error::{report_error, report_error_new},
use nu_protocol::{BlockId, PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId}; debugger::WithoutDebug,
engine::{Closure, EngineState, Stack, StateWorkingSet},
PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
};
use std::sync::Arc;
pub fn eval_env_change_hook( pub fn eval_env_change_hook(
env_change_hook: Option<Value>, env_change_hook: Option<Value>,
@ -14,7 +18,7 @@ pub fn eval_env_change_hook(
if let Some(hook) = env_change_hook { if let Some(hook) = env_change_hook {
match hook { match hook {
Value::Record { val, .. } => { Value::Record { val, .. } => {
for (env_name, hook_value) in &val { for (env_name, hook_value) in &*val {
let before = engine_state let before = engine_state
.previous_env_vars .previous_env_vars
.get(env_name) .get(env_name)
@ -35,8 +39,7 @@ pub fn eval_env_change_hook(
"env_change", "env_change",
)?; )?;
engine_state Arc::make_mut(&mut engine_state.previous_env_vars)
.previous_env_vars
.insert(env_name.to_string(), after); .insert(env_name.to_string(), after);
} }
} }
@ -115,7 +118,7 @@ pub fn eval_hook(
}) })
.collect(); .collect();
match eval_block(engine_state, stack, &block, input, false, false) { match eval_block::<WithoutDebug>(engine_state, stack, &block, input) {
Ok(pipeline_data) => { Ok(pipeline_data) => {
output = pipeline_data; output = pipeline_data;
} }
@ -150,11 +153,11 @@ pub fn eval_hook(
// If it returns true (the default if a condition block is not specified), the hook should be run. // If it returns true (the default if a condition block is not specified), the hook should be run.
let do_run_hook = if let Some(condition) = val.get("condition") { let do_run_hook = if let Some(condition) = val.get("condition") {
let other_span = condition.span(); let other_span = condition.span();
if let Ok(block_id) = condition.coerce_block() { if let Ok(closure) = condition.as_closure() {
match run_hook_block( match run_hook(
engine_state, engine_state,
stack, stack,
block_id, closure,
None, None,
arguments.clone(), arguments.clone(),
other_span, other_span,
@ -243,7 +246,7 @@ pub fn eval_hook(
}) })
.collect(); .collect();
match eval_block(engine_state, stack, &block, input, false, false) { match eval_block::<WithoutDebug>(engine_state, stack, &block, input) {
Ok(pipeline_data) => { Ok(pipeline_data) => {
output = pipeline_data; output = pipeline_data;
} }
@ -256,25 +259,8 @@ pub fn eval_hook(
stack.remove_var(*var_id); stack.remove_var(*var_id);
} }
} }
Value::Block { val: block_id, .. } => {
run_hook_block(
engine_state,
stack,
*block_id,
input,
arguments,
source_span,
)?;
}
Value::Closure { val, .. } => { Value::Closure { val, .. } => {
run_hook_block( run_hook(engine_state, stack, val, input, arguments, source_span)?;
engine_state,
stack,
val.block_id,
input,
arguments,
source_span,
)?;
} }
other => { other => {
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue {
@ -286,11 +272,8 @@ pub fn eval_hook(
} }
} }
} }
Value::Block { val: block_id, .. } => {
output = run_hook_block(engine_state, stack, *block_id, input, arguments, span)?;
}
Value::Closure { val, .. } => { Value::Closure { val, .. } => {
output = run_hook_block(engine_state, stack, val.block_id, input, arguments, span)?; output = run_hook(engine_state, stack, val, input, arguments, span)?;
} }
other => { other => {
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue {
@ -307,19 +290,21 @@ pub fn eval_hook(
Ok(output) Ok(output)
} }
fn run_hook_block( fn run_hook(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, stack: &mut Stack,
block_id: BlockId, closure: &Closure,
optional_input: Option<PipelineData>, optional_input: Option<PipelineData>,
arguments: Vec<(String, Value)>, arguments: Vec<(String, Value)>,
span: Span, span: Span,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let block = engine_state.get_block(block_id); let block = engine_state.get_block(closure.block_id);
let input = optional_input.unwrap_or_else(PipelineData::empty); let input = optional_input.unwrap_or_else(PipelineData::empty);
let mut callee_stack = stack.gather_captures(engine_state, &block.captures); let mut callee_stack = stack
.captures_to_stack_preserve_out_dest(closure.captures.clone())
.reset_pipes();
for (idx, PositionalArg { var_id, .. }) in for (idx, PositionalArg { var_id, .. }) in
block.signature.required_positional.iter().enumerate() block.signature.required_positional.iter().enumerate()
@ -336,8 +321,12 @@ fn run_hook_block(
} }
} }
let pipeline_data = let pipeline_data = eval_block_with_early_return::<WithoutDebug>(
eval_block_with_early_return(engine_state, &mut callee_stack, block, input, false, false)?; engine_state,
&mut callee_stack,
block,
input,
)?;
if let PipelineData::Value(Value::Error { error, .. }, _) = pipeline_data { if let PipelineData::Value(Value::Error { error, .. }, _) = pipeline_data {
return Err(*error); return Err(*error);

View File

@ -1,7 +1,5 @@
use nu_protocol::ast::CellPath; use nu_protocol::{ast::CellPath, PipelineData, ShellError, Span, Value};
use nu_protocol::{PipelineData, ShellError, Span, Value}; use std::sync::{atomic::AtomicBool, Arc};
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
pub trait CmdArgument { pub trait CmdArgument {
fn take_cell_paths(&mut self) -> Option<Vec<CellPath>>; fn take_cell_paths(&mut self) -> Option<Vec<CellPath>>;

View File

@ -1,10 +1,8 @@
use nu_protocol::report_error;
use nu_protocol::{ use nu_protocol::{
ast::RangeInclusion,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
Range, ShellError, Span, Value, report_error, Range, ShellError, Span, Value,
}; };
use std::path::PathBuf; use std::{ops::Bound, path::PathBuf};
pub fn get_init_cwd() -> PathBuf { pub fn get_init_cwd() -> PathBuf {
std::env::current_dir().unwrap_or_else(|_| { std::env::current_dir().unwrap_or_else(|_| {
@ -25,35 +23,21 @@ pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> PathBuf
type MakeRangeError = fn(&str, Span) -> ShellError; type MakeRangeError = fn(&str, Span) -> ShellError;
pub fn process_range(range: &Range) -> Result<(isize, isize), MakeRangeError> { pub fn process_range(range: &Range) -> Result<(isize, isize), MakeRangeError> {
let start = match &range.from { match range {
Value::Int { val, .. } => isize::try_from(*val).unwrap_or_default(), Range::IntRange(range) => {
Value::Nothing { .. } => 0, let start = range.start().try_into().unwrap_or(0);
_ => { let end = match range.end() {
return Err(|msg, span| ShellError::TypeMismatch { Bound::Included(v) => v as isize,
err_message: msg.to_string(), Bound::Excluded(v) => (v - 1) as isize,
span, Bound::Unbounded => isize::MAX,
})
}
}; };
let end = match &range.to {
Value::Int { val, .. } => {
if matches!(range.inclusion, RangeInclusion::Inclusive) {
isize::try_from(*val).unwrap_or(isize::max_value())
} else {
isize::try_from(*val).unwrap_or(isize::max_value()) - 1
}
}
Value::Nothing { .. } => isize::max_value(),
_ => {
return Err(|msg, span| ShellError::TypeMismatch {
err_message: msg.to_string(),
span,
})
}
};
Ok((start, end)) Ok((start, end))
}
Range::FloatRange(_) => Err(|msg, span| ShellError::TypeMismatch {
err_message: msg.to_string(),
span,
}),
}
} }
const HELP_MSG: &str = "Nushell's config file can be found with the command: $nu.config-path. \ const HELP_MSG: &str = "Nushell's config file can be found with the command: $nu.config-path. \
@ -99,7 +83,7 @@ fn get_editor_commandline(
pub fn get_editor( pub fn get_editor(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, stack: &Stack,
span: Span, span: Span,
) -> Result<(String, Vec<String>), ShellError> { ) -> Result<(String, Vec<String>), ShellError> {
let config = engine_state.get_config(); let config = engine_state.get_config();

View File

@ -5,7 +5,7 @@ edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cmd-dataframe" name = "nu-cmd-dataframe"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-dataframe" repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-dataframe"
version = "0.90.2" version = "0.92.3"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -13,23 +13,24 @@ version = "0.90.2"
bench = false bench = false
[dependencies] [dependencies]
nu-engine = { path = "../nu-engine", version = "0.90.2" } nu-engine = { path = "../nu-engine", version = "0.92.3" }
nu-parser = { path = "../nu-parser", version = "0.90.2" } nu-parser = { path = "../nu-parser", version = "0.92.3" }
nu-protocol = { path = "../nu-protocol", version = "0.90.2" } nu-protocol = { path = "../nu-protocol", version = "0.92.3" }
# Potential dependencies for extras # Potential dependencies for extras
chrono = { version = "0.4", features = ["std", "unstable-locales"], default-features = false } chrono = { workspace = true, features = ["std", "unstable-locales"], default-features = false }
chrono-tz = "0.8" chrono-tz = { workspace = true }
fancy-regex = "0.13" fancy-regex = { workspace = true }
indexmap = { version = "2.2" } indexmap = { workspace = true }
num = { version = "0.4", optional = true } num = { version = "0.4", optional = true }
serde = { version = "1.0", features = ["derive"] } serde = { workspace = true, features = ["derive"] }
sqlparser = { version = "0.43", optional = true } # keep sqlparser at 0.39.0 until we can update polars
polars-io = { version = "0.37", features = ["avro"], optional = true } sqlparser = { version = "0.45", optional = true }
polars-arrow = { version = "0.37", optional = true } polars-io = { version = "0.39", features = ["avro"], optional = true }
polars-ops = { version = "0.37", optional = true } polars-arrow = { version = "0.39", optional = true }
polars-plan = { version = "0.37", features = ["regex"], optional = true } polars-ops = { version = "0.39", optional = true }
polars-utils = { version = "0.37", optional = true } polars-plan = { version = "0.39", features = ["regex"], optional = true }
polars-utils = { version = "0.39", optional = true }
[dependencies.polars] [dependencies.polars]
features = [ features = [
@ -39,7 +40,6 @@ features = [
"cross_join", "cross_join",
"csv", "csv",
"cum_agg", "cum_agg",
"default",
"dtype-categorical", "dtype-categorical",
"dtype-datetime", "dtype-datetime",
"dtype-struct", "dtype-struct",
@ -60,14 +60,16 @@ features = [
"serde", "serde",
"serde-lazy", "serde-lazy",
"strings", "strings",
"temporal",
"to_dummies", "to_dummies",
] ]
default-features = false
optional = true optional = true
version = "0.37" version = "0.39"
[features] [features]
dataframe = ["num", "polars", "polars-io", "polars-arrow", "polars-ops", "polars-plan", "polars-utils", "sqlparser"] dataframe = ["num", "polars", "polars-io", "polars-arrow", "polars-ops", "polars-plan", "polars-utils", "sqlparser"]
default = [] default = []
[dev-dependencies] [dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.90.2" } nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.92.3" }

View File

@ -1,11 +1,5 @@
use nu_engine::CallExt; use crate::dataframe::values::{Axis, Column, NuDataFrame};
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use super::super::values::{Axis, Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct AppendDF; pub struct AppendDF;

View File

@ -1,12 +1,6 @@
use crate::dataframe::values::{str_to_dtype, NuExpression, NuLazyFrame}; use crate::dataframe::values::{str_to_dtype, NuDataFrame, NuExpression, NuLazyFrame};
use nu_engine::command_prelude::*;
use super::super::values::NuDataFrame;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
record, Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::prelude::*; use polars::prelude::*;
#[derive(Clone)] #[derive(Clone)]

View File

@ -1,9 +1,5 @@
use super::super::values::NuDataFrame; use crate::dataframe::values::NuDataFrame;
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct ColumnsDF; pub struct ColumnsDF;

View File

@ -1,12 +1,5 @@
use nu_engine::CallExt; use crate::dataframe::values::{utils::convert_columns, Column, NuDataFrame};
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use super::super::values::utils::convert_columns;
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct DropDF; pub struct DropDF;

View File

@ -1,13 +1,7 @@
use nu_engine::CallExt; use crate::dataframe::values::{utils::convert_columns_string, Column, NuDataFrame};
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::prelude::UniqueKeepStrategy;
use super::super::values::utils::convert_columns_string; use polars::prelude::UniqueKeepStrategy;
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct DropDuplicates; pub struct DropDuplicates;

View File

@ -1,12 +1,5 @@
use nu_engine::CallExt; use crate::dataframe::values::{utils::convert_columns_string, Column, NuDataFrame};
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use super::super::values::utils::convert_columns_string;
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct DropNulls; pub struct DropNulls;

View File

@ -1,9 +1,5 @@
use super::super::values::{Column, NuDataFrame}; use crate::dataframe::values::{Column, NuDataFrame};
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct DataTypes; pub struct DataTypes;

View File

@ -1,10 +1,6 @@
use super::super::values::NuDataFrame; use crate::dataframe::values::NuDataFrame;
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type,
};
use polars::{prelude::*, series::Series}; use polars::{prelude::*, series::Series};
#[derive(Clone)] #[derive(Clone)]

View File

@ -1,15 +1,8 @@
use nu_engine::CallExt; use crate::dataframe::values::{Column, NuDataFrame, NuExpression, NuLazyFrame};
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::prelude::LazyFrame; use polars::prelude::LazyFrame;
use crate::dataframe::values::{NuExpression, NuLazyFrame};
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct FilterWith; pub struct FilterWith;

View File

@ -1,10 +1,5 @@
use super::super::values::{Column, NuDataFrame, NuExpression}; use crate::dataframe::values::{Column, NuDataFrame, NuExpression};
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct FirstDF; pub struct FirstDF;

View File

@ -1,13 +1,5 @@
use nu_engine::CallExt; use crate::dataframe::values::{utils::convert_columns_string, Column, NuDataFrame};
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use crate::dataframe::values::utils::convert_columns_string;
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct GetDF; pub struct GetDF;

View File

@ -1,10 +1,5 @@
use super::super::values::{utils::DEFAULT_ROWS, Column, NuDataFrame, NuExpression}; use crate::dataframe::values::{utils::DEFAULT_ROWS, Column, NuDataFrame, NuExpression};
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct LastDF; pub struct LastDF;

View File

@ -1,10 +1,5 @@
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Value,
};
use crate::dataframe::values::NuDataFrame; use crate::dataframe::values::NuDataFrame;
use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct ListDF; pub struct ListDF;

View File

@ -1,14 +1,5 @@
use nu_engine::CallExt; use crate::dataframe::values::{utils::convert_columns_string, Column, NuDataFrame};
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Spanned, SyntaxShape, Type,
Value,
};
use crate::dataframe::values::utils::convert_columns_string;
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct MeltDF; pub struct MeltDF;

View File

@ -1,21 +1,12 @@
use crate::dataframe::values::NuSchema; use crate::dataframe::values::{NuDataFrame, NuLazyFrame, NuSchema};
use nu_engine::command_prelude::*;
use super::super::values::{NuDataFrame, NuLazyFrame};
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
};
use std::{fs::File, io::BufReader, path::PathBuf};
use polars::prelude::{ use polars::prelude::{
CsvEncoding, CsvReader, IpcReader, JsonFormat, JsonReader, LazyCsvReader, LazyFileListReader, CsvEncoding, CsvReader, IpcReader, JsonFormat, JsonReader, LazyCsvReader, LazyFileListReader,
LazyFrame, ParallelStrategy, ParquetReader, ScanArgsIpc, ScanArgsParquet, SerReader, LazyFrame, ParallelStrategy, ParquetReader, ScanArgsIpc, ScanArgsParquet, SerReader,
}; };
use polars_io::{avro::AvroReader, HiveOptions};
use polars_io::avro::AvroReader; use std::{fs::File, io::BufReader, path::PathBuf};
#[derive(Clone)] #[derive(Clone)]
pub struct OpenDataFrame; pub struct OpenDataFrame;
@ -130,7 +121,9 @@ fn command(
"jsonl" => from_jsonl(engine_state, stack, call), "jsonl" => from_jsonl(engine_state, stack, call),
"avro" => from_avro(engine_state, stack, call), "avro" => from_avro(engine_state, stack, call),
_ => Err(ShellError::FileNotFoundCustom { _ => Err(ShellError::FileNotFoundCustom {
msg: format!("{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json"), msg: format!(
"{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json, jsonl, avro"
),
span: blamed, span: blamed,
}), }),
}, },
@ -158,7 +151,7 @@ fn from_parquet(
low_memory: false, low_memory: false,
cloud_options: None, cloud_options: None,
use_statistics: false, use_statistics: false,
hive_partitioning: false, hive_options: HiveOptions::default(),
}; };
let df: NuLazyFrame = LazyFrame::scan_parquet(file, args) let df: NuLazyFrame = LazyFrame::scan_parquet(file, args)
@ -253,7 +246,8 @@ fn from_ipc(
cache: true, cache: true,
rechunk: false, rechunk: false,
row_index: None, row_index: None,
memmap: true, memory_map: true,
cloud_options: None,
}; };
let df: NuLazyFrame = LazyFrame::scan_ipc(file, args) let df: NuLazyFrame = LazyFrame::scan_ipc(file, args)

View File

@ -1,12 +1,8 @@
use super::super::values::NuDataFrame; use crate::dataframe::{
use crate::dataframe::values::Column; eager::SQLContext,
use crate::dataframe::{eager::SQLContext, values::NuLazyFrame}; values::{Column, NuDataFrame, NuLazyFrame},
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
}; };
use nu_engine::command_prelude::*;
// attribution: // attribution:
// sql_context.rs, and sql_expr.rs were copied from polars-sql. thank you. // sql_context.rs, and sql_expr.rs were copied from polars-sql. thank you.
@ -91,7 +87,7 @@ fn command(
let lazy = NuLazyFrame::new(false, df_sql); let lazy = NuLazyFrame::new(false, df_sql);
let eager = lazy.collect(call.head)?; let eager = lazy.collect(call.head)?;
let value = Value::custom_value(Box::new(eager), call.head); let value = Value::custom(Box::new(eager), call.head);
Ok(PipelineData::Value(value, None)) Ok(PipelineData::Value(value, None))
} }

View File

@ -1,13 +1,8 @@
use nu_engine::CallExt; use crate::dataframe::{
use nu_protocol::{ utils::extract_strings,
ast::Call, values::{Column, NuDataFrame, NuLazyFrame},
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
}; };
use nu_engine::command_prelude::*;
use crate::dataframe::{utils::extract_strings, values::NuLazyFrame};
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct RenameDF; pub struct RenameDF;

View File

@ -1,13 +1,7 @@
use nu_engine::CallExt; use crate::dataframe::values::NuDataFrame;
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type,
};
use polars::prelude::NamedFrom;
use polars::series::Series;
use super::super::values::NuDataFrame; use polars::{prelude::NamedFrom, series::Series};
#[derive(Clone)] #[derive(Clone)]
pub struct SampleDF; pub struct SampleDF;

View File

@ -1,10 +1,5 @@
use super::super::values::NuDataFrame; use crate::dataframe::values::NuDataFrame;
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
record, Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct SchemaDF; pub struct SchemaDF;

View File

@ -1,12 +1,5 @@
use nu_protocol::{ use crate::dataframe::values::{Column, NuDataFrame};
ast::Call, use nu_engine::command_prelude::*;
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
};
use crate::dataframe::values::Column;
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct ShapeDF; pub struct ShapeDF;

View File

@ -1,13 +1,5 @@
use nu_engine::CallExt; use crate::dataframe::values::{Column, NuDataFrame};
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use crate::dataframe::values::Column;
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct SliceDF; pub struct SliceDF;

View File

@ -1,11 +1,6 @@
use super::super::values::{Column, NuDataFrame}; use crate::dataframe::values::{Column, NuDataFrame};
use nu_engine::command_prelude::*;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::{ use polars::{
chunked_array::ChunkedArray, chunked_array::ChunkedArray,
prelude::{ prelude::{

View File

@ -1,15 +1,8 @@
use nu_engine::CallExt; use crate::dataframe::values::{Column, NuDataFrame};
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::prelude::DataType; use polars::prelude::DataType;
use crate::dataframe::values::Column;
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct TakeDF; pub struct TakeDF;

View File

@ -1,14 +1,8 @@
use std::{fs::File, path::PathBuf}; use crate::dataframe::values::NuDataFrame;
use nu_engine::command_prelude::*;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
};
use polars::prelude::{IpcWriter, SerWriter}; use polars::prelude::{IpcWriter, SerWriter};
use std::{fs::File, path::PathBuf};
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct ToArrow; pub struct ToArrow;

View File

@ -1,15 +1,11 @@
use std::{fs::File, path::PathBuf}; use crate::dataframe::values::NuDataFrame;
use nu_engine::command_prelude::*;
use nu_engine::CallExt; use polars_io::{
use nu_protocol::{ avro::{AvroCompression, AvroWriter},
ast::Call, SerWriter,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
}; };
use polars_io::avro::{AvroCompression, AvroWriter}; use std::{fs::File, path::PathBuf};
use polars_io::SerWriter;
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct ToAvro; pub struct ToAvro;

View File

@ -1,14 +1,8 @@
use std::{fs::File, path::PathBuf}; use crate::dataframe::values::NuDataFrame;
use nu_engine::command_prelude::*;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
};
use polars::prelude::{CsvWriter, SerWriter}; use polars::prelude::{CsvWriter, SerWriter};
use std::{fs::File, path::PathBuf};
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct ToCSV; pub struct ToCSV;

View File

@ -1,13 +1,6 @@
use crate::dataframe::values::NuSchema; use crate::dataframe::values::{Column, NuDataFrame, NuSchema};
use nu_engine::command_prelude::*;
use super::super::values::{Column, NuDataFrame};
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::prelude::*; use polars::prelude::*;
#[derive(Clone)] #[derive(Clone)]

View File

@ -1,14 +1,8 @@
use std::{fs::File, io::BufWriter, path::PathBuf}; use crate::dataframe::values::NuDataFrame;
use nu_engine::command_prelude::*;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
};
use polars::prelude::{JsonWriter, SerWriter}; use polars::prelude::{JsonWriter, SerWriter};
use std::{fs::File, io::BufWriter, path::PathBuf};
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct ToJsonLines; pub struct ToJsonLines;

View File

@ -1,13 +1,5 @@
use nu_engine::CallExt; use crate::dataframe::values::{NuDataFrame, NuExpression};
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
record, Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use crate::dataframe::values::NuExpression;
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct ToNu; pub struct ToNu;
@ -32,7 +24,7 @@ impl Command for ToNu {
.switch("tail", "shows tail rows", Some('t')) .switch("tail", "shows tail rows", Some('t'))
.input_output_types(vec![ .input_output_types(vec![
(Type::Custom("expression".into()), Type::Any), (Type::Custom("expression".into()), Type::Any),
(Type::Custom("dataframe".into()), Type::Table(vec![])), (Type::Custom("dataframe".into()), Type::table()),
]) ])
//.input_output_type(Type::Any, Type::Any) //.input_output_type(Type::Any, Type::Any)
.category(Category::Custom("dataframe".into())) .category(Category::Custom("dataframe".into()))

View File

@ -1,14 +1,8 @@
use std::{fs::File, path::PathBuf}; use crate::dataframe::values::NuDataFrame;
use nu_engine::command_prelude::*;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
};
use polars::prelude::ParquetWriter; use polars::prelude::ParquetWriter;
use std::{fs::File, path::PathBuf};
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct ToParquet; pub struct ToParquet;

View File

@ -1,11 +1,5 @@
use super::super::values::{Column, NuDataFrame}; use crate::dataframe::values::{Column, NuDataFrame, NuExpression, NuLazyFrame};
use crate::dataframe::values::{NuExpression, NuLazyFrame}; use nu_engine::command_prelude::*;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct WithColumn; pub struct WithColumn;

View File

@ -1,11 +1,5 @@
use super::super::values::NuExpression; use crate::dataframe::values::NuExpression;
use nu_engine::command_prelude::*;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
record, Category, Example, PipelineData, ShellError, Signature, SyntaxShape, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct ExprAlias; pub struct ExprAlias;

View File

@ -1,10 +1,6 @@
use crate::dataframe::values::{Column, NuDataFrame, NuExpression}; use crate::dataframe::values::{Column, NuDataFrame, NuExpression};
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::prelude::arg_where; use polars::prelude::arg_where;
#[derive(Clone)] #[derive(Clone)]

View File

@ -1,10 +1,6 @@
use crate::dataframe::values::NuExpression; use crate::dataframe::values::NuExpression;
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
record, Category, Example, PipelineData, ShellError, Signature, SyntaxShape, Type, Value,
};
use polars::prelude::col; use polars::prelude::col;
#[derive(Clone)] #[derive(Clone)]

View File

@ -1,10 +1,6 @@
use crate::dataframe::values::{Column, NuDataFrame, NuExpression}; use crate::dataframe::values::{Column, NuDataFrame, NuExpression};
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::prelude::concat_str; use polars::prelude::concat_str;
#[derive(Clone)] #[derive(Clone)]

View File

@ -1,14 +1,7 @@
use super::super::values::NuExpression; use crate::dataframe::values::{Column, NuDataFrame, NuExpression};
use crate::dataframe::values::{Column, NuDataFrame};
use chrono::{DateTime, FixedOffset}; use chrono::{DateTime, FixedOffset};
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Spanned, SyntaxShape, Type,
Value,
};
use polars::{ use polars::{
datatypes::{DataType, TimeUnit}, datatypes::{DataType, TimeUnit},
prelude::NamedFrom, prelude::NamedFrom,

View File

@ -2,11 +2,7 @@
/// All of these expressions have an identical body and only require /// All of these expressions have an identical body and only require
/// to have a change in the name, description and expression function /// to have a change in the name, description and expression function
use crate::dataframe::values::{Column, NuDataFrame, NuExpression, NuLazyFrame}; use crate::dataframe::values::{Column, NuDataFrame, NuExpression, NuLazyFrame};
use nu_protocol::{ use nu_engine::command_prelude::*;
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
};
// The structs defined in this file are structs that form part of other commands // The structs defined in this file are structs that form part of other commands
// since they share a similar name // since they share a similar name

View File

@ -1,10 +1,6 @@
use crate::dataframe::values::{Column, NuDataFrame, NuExpression}; use crate::dataframe::values::{Column, NuDataFrame, NuExpression};
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::prelude::{lit, DataType}; use polars::prelude::{lit, DataType};
#[derive(Clone)] #[derive(Clone)]

View File

@ -1,10 +1,5 @@
use crate::dataframe::values::NuExpression; use crate::dataframe::values::NuExpression;
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
record, Category, Example, PipelineData, ShellError, Signature, SyntaxShape, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct ExprLit; pub struct ExprLit;

View File

@ -1,10 +1,5 @@
use crate::dataframe::values::{Column, NuDataFrame, NuExpression, NuWhen}; use crate::dataframe::values::{Column, NuDataFrame, NuExpression, NuWhen};
use nu_engine::CallExt; use nu_engine::command_prelude::*;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct ExprOtherwise; pub struct ExprOtherwise;

Some files were not shown because too many files have changed in this diff Show More