Merge branch 'main' into plugin-version-reporting

This commit is contained in:
Devyn Cairns 2024-06-21 03:39:34 -07:00
commit 1899a4f058
320 changed files with 12385 additions and 6937 deletions

View File

@ -18,6 +18,14 @@ updates:
ignore: ignore:
- dependency-name: "*" - dependency-name: "*"
update-types: ["version-update:semver-patch"] update-types: ["version-update:semver-patch"]
groups:
# Only update polars as a whole as there are many subcrates that need to
# be updated at once. We explicitly depend on some of them, so batch their
# updates to not take up dependabot PR slots with dysfunctional PRs
polars:
patterns:
- "polars"
- "polars-*"
- package-ecosystem: "github-actions" - package-ecosystem: "github-actions"
directory: "/" directory: "/"
schedule: schedule:

View File

@ -19,7 +19,7 @@ jobs:
# Prevent sudden announcement of a new advisory from failing ci: # Prevent sudden announcement of a new advisory from failing ci:
continue-on-error: true continue-on-error: true
steps: steps:
- uses: actions/checkout@v4.1.5 - uses: actions/checkout@v4.1.7
- uses: rustsec/audit-check@v1.4.1 - uses: rustsec/audit-check@v1.4.1
with: with:
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -33,10 +33,10 @@ jobs:
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v4.1.5 - uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
- name: cargo fmt - name: cargo fmt
run: cargo fmt --all -- --check run: cargo fmt --all -- --check
@ -66,10 +66,10 @@ jobs:
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v4.1.5 - uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
- name: Tests - name: Tests
run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.default-flags }} run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.default-flags }}
@ -95,10 +95,10 @@ jobs:
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v4.1.5 - uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
- name: Install Nushell - name: Install Nushell
run: cargo install --path . --locked --no-default-features run: cargo install --path . --locked --no-default-features
@ -146,10 +146,10 @@ jobs:
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v4.1.5 - uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
- name: Clippy - name: Clippy
run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS

View File

@ -27,7 +27,7 @@ jobs:
# if: github.repository == 'nushell/nightly' # if: github.repository == 'nushell/nightly'
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4.1.5 uses: actions/checkout@v4.1.7
if: github.repository == 'nushell/nightly' if: github.repository == 'nushell/nightly'
with: with:
ref: main ref: main
@ -36,7 +36,7 @@ jobs:
token: ${{ secrets.WORKFLOW_TOKEN }} token: ${{ secrets.WORKFLOW_TOKEN }}
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.10 uses: hustcer/setup-nu@v3.11
if: github.repository == 'nushell/nightly' if: github.repository == 'nushell/nightly'
with: with:
version: 0.93.0 version: 0.93.0
@ -112,7 +112,7 @@ jobs:
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
- uses: actions/checkout@v4.1.5 - uses: actions/checkout@v4.1.7
with: with:
ref: main ref: main
fetch-depth: 0 fetch-depth: 0
@ -122,13 +122,13 @@ jobs:
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135` # WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with: with:
rustflags: '' rustflags: ''
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.10 uses: hustcer/setup-nu@v3.11
with: with:
version: 0.93.0 version: 0.93.0
@ -181,12 +181,12 @@ jobs:
- name: Waiting for Release - name: Waiting for Release
run: sleep 1800 run: sleep 1800
- uses: actions/checkout@v4.1.5 - uses: actions/checkout@v4.1.7
with: with:
ref: main ref: main
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.10 uses: hustcer/setup-nu@v3.11
with: with:
version: 0.93.0 version: 0.93.0

View File

@ -62,21 +62,21 @@ jobs:
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
- uses: actions/checkout@v4.1.5 - uses: actions/checkout@v4.1.7
- name: Update Rust Toolchain Target - name: Update Rust Toolchain Target
run: | run: |
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain - name: Setup Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135` # WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with: with:
cache: false cache: false
rustflags: '' rustflags: ''
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.10 uses: hustcer/setup-nu@v3.11
with: with:
version: 0.93.0 version: 0.93.0

View File

@ -7,7 +7,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout Actions Repository - name: Checkout Actions Repository
uses: actions/checkout@v4.1.5 uses: actions/checkout@v4.1.7
- name: Check spelling - name: Check spelling
uses: crate-ci/typos@v1.21.0 uses: crate-ci/typos@v1.22.7

26
CITATION.cff Normal file
View File

@ -0,0 +1,26 @@
cff-version: 1.2.0
title: 'Nushell'
message: >-
If you use this software and wish to cite it,
you can use the metadata from this file.
type: software
authors:
- name: "The Nushell Project Team"
identifiers:
- type: url
value: 'https://github.com/nushell/nushell'
description: Repository
repository-code: 'https://github.com/nushell/nushell'
url: 'https://www.nushell.sh/'
abstract: >-
The goal of the Nushell project is to take the Unix
philosophy of shells, where pipes connect simple commands
together, and bring it to the modern style of development.
Thus, rather than being either a shell, or a programming
language, Nushell connects both by bringing a rich
programming language and a full-featured shell together
into one package.
keywords:
- nushell
- shell
license: MIT

351
Cargo.lock generated
View File

@ -377,7 +377,7 @@ dependencies = [
"bitflags 2.5.0", "bitflags 2.5.0",
"cexpr", "cexpr",
"clang-sys", "clang-sys",
"itertools 0.11.0", "itertools 0.12.1",
"lazy_static", "lazy_static",
"lazycell", "lazycell",
"proc-macro2", "proc-macro2",
@ -478,17 +478,6 @@ version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ada7f35ca622a86a4d6c27be2633fc6c243ecc834859628fcce0681d8e76e1c8" checksum = "ada7f35ca622a86a4d6c27be2633fc6c243ecc834859628fcce0681d8e76e1c8"
[[package]]
name = "brotli"
version = "3.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d640d25bc63c50fb1f0b545ffd80207d2e10a4c965530809b40ba3386825c391"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
"brotli-decompressor 2.5.1",
]
[[package]] [[package]]
name = "brotli" name = "brotli"
version = "5.0.0" version = "5.0.0"
@ -497,17 +486,7 @@ checksum = "19483b140a7ac7174d34b5a581b406c64f84da5409d3e09cf4fff604f9270e67"
dependencies = [ dependencies = [
"alloc-no-stdlib", "alloc-no-stdlib",
"alloc-stdlib", "alloc-stdlib",
"brotli-decompressor 4.0.0", "brotli-decompressor",
]
[[package]]
name = "brotli-decompressor"
version = "2.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e2e4afe60d7dd600fdd3de8d0f08c2b7ec039712e3b6137ff98b7004e82de4f"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
] ]
[[package]] [[package]]
@ -871,7 +850,7 @@ checksum = "b34115915337defe99b2aff5c2ce6771e5fbc4079f4b506301f5cf394c8452f7"
dependencies = [ dependencies = [
"crossterm", "crossterm",
"strum", "strum",
"strum_macros 0.26.2", "strum_macros",
"unicode-width", "unicode-width",
] ]
@ -941,6 +920,15 @@ dependencies = [
"unicode-xid", "unicode-xid",
] ]
[[package]]
name = "convert_case"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
dependencies = [
"unicode-segmentation",
]
[[package]] [[package]]
name = "core-foundation" name = "core-foundation"
version = "0.9.4" version = "0.9.4"
@ -1239,6 +1227,12 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "doctest-file"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aac81fa3e28d21450aa4d2ac065992ba96a1d7303efbce51a95f4fd175b67562"
[[package]] [[package]]
name = "downcast-rs" name = "downcast-rs"
version = "1.2.1" version = "1.2.1"
@ -1295,6 +1289,9 @@ name = "either"
version = "1.11.0" version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "eml-parser" name = "eml-parser"
@ -1696,9 +1693,9 @@ checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253"
[[package]] [[package]]
name = "git2" name = "git2"
version = "0.18.3" version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "232e6a7bfe35766bf715e55a88b39a700596c0ccfd88cd3680b4cdb40d66ef70" checksum = "b903b73e45dc0c6c596f2d37eccece7c1c8bb6e4407b001096387c63d0d93724"
dependencies = [ dependencies = [
"bitflags 2.5.0", "bitflags 2.5.0",
"libc", "libc",
@ -1794,6 +1791,7 @@ dependencies = [
"ahash 0.8.11", "ahash 0.8.11",
"allocator-api2", "allocator-api2",
"rayon", "rayon",
"serde",
] ]
[[package]] [[package]]
@ -2034,10 +2032,11 @@ dependencies = [
[[package]] [[package]]
name = "interprocess" name = "interprocess"
version = "2.1.0" version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b4d0250d41da118226e55b3d50ca3f0d9e0a0f6829b92f543ac0054aeea1572" checksum = "67bafc2f5dbdad79a6d925649758d5472647b416028099f0b829d1b67fdd47d3"
dependencies = [ dependencies = [
"doctest-file",
"libc", "libc",
"recvmsg", "recvmsg",
"widestring", "widestring",
@ -2296,9 +2295,9 @@ dependencies = [
[[package]] [[package]]
name = "libgit2-sys" name = "libgit2-sys"
version = "0.16.2+1.7.2" version = "0.17.0+1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee4126d8b4ee5c9d9ea891dd875cfdc1e9d0950437179104b183d7d8a74d24e8" checksum = "10472326a8a6477c3c20a64547b0059e4b0d086869eee31e6d7da728a8eb7224"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@ -2326,9 +2325,9 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
[[package]] [[package]]
name = "libmimalloc-sys" name = "libmimalloc-sys"
version = "0.1.37" version = "0.1.38"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81eb4061c0582dedea1cbc7aff2240300dd6982e0239d1c99e65c1dbf4a30ba7" checksum = "0e7bb23d733dfcc8af652a78b7bf232f0e967710d044732185e561e47c0336b6"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@ -2582,9 +2581,9 @@ dependencies = [
[[package]] [[package]]
name = "mimalloc" name = "mimalloc"
version = "0.1.41" version = "0.1.42"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f41a2280ded0da56c8cf898babb86e8f10651a34adcfff190ae9a1159c6908d" checksum = "e9186d86b79b52f4a77af65604b51225e8db1d6ee7e3f41aec1e40829c71a176"
dependencies = [ dependencies = [
"libmimalloc-sys", "libmimalloc-sys",
] ]
@ -2770,7 +2769,7 @@ dependencies = [
[[package]] [[package]]
name = "nu" name = "nu"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"assert_cmd", "assert_cmd",
"crossterm", "crossterm",
@ -2823,7 +2822,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-cli" name = "nu-cli"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"chrono", "chrono",
"crossterm", "crossterm",
@ -2846,7 +2845,6 @@ dependencies = [
"nu-test-support", "nu-test-support",
"nu-utils", "nu-utils",
"once_cell", "once_cell",
"pathdiff",
"percent-encoding", "percent-encoding",
"reedline", "reedline",
"rstest", "rstest",
@ -2859,7 +2857,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-cmd-base" name = "nu-cmd-base"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"indexmap", "indexmap",
"miette", "miette",
@ -2871,7 +2869,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-cmd-extra" name = "nu-cmd-extra"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"fancy-regex", "fancy-regex",
"heck 0.5.0", "heck 0.5.0",
@ -2896,7 +2894,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-cmd-lang" name = "nu-cmd-lang"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"itertools 0.12.1", "itertools 0.12.1",
"nu-engine", "nu-engine",
@ -2908,7 +2906,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-cmd-plugin" name = "nu-cmd-plugin"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"itertools 0.12.1", "itertools 0.12.1",
"nu-engine", "nu-engine",
@ -2919,7 +2917,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-color-config" name = "nu-color-config"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"nu-ansi-term", "nu-ansi-term",
"nu-engine", "nu-engine",
@ -2931,12 +2929,12 @@ dependencies = [
[[package]] [[package]]
name = "nu-command" name = "nu-command"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"alphanumeric-sort", "alphanumeric-sort",
"base64 0.22.1", "base64 0.22.1",
"bracoxide", "bracoxide",
"brotli 5.0.0", "brotli",
"byteorder", "byteorder",
"bytesize", "bytesize",
"calamine", "calamine",
@ -3013,6 +3011,7 @@ dependencies = [
"sha2", "sha2",
"sysinfo", "sysinfo",
"tabled", "tabled",
"tempfile",
"terminal_size", "terminal_size",
"titlecase", "titlecase",
"toml 0.8.12", "toml 0.8.12",
@ -3028,7 +3027,7 @@ dependencies = [
"uu_mv", "uu_mv",
"uu_uname", "uu_uname",
"uu_whoami", "uu_whoami",
"uucore 0.0.25", "uucore",
"uuid", "uuid",
"v_htmlescape", "v_htmlescape",
"wax", "wax",
@ -3037,9 +3036,20 @@ dependencies = [
"winreg", "winreg",
] ]
[[package]]
name = "nu-derive-value"
version = "0.94.3"
dependencies = [
"convert_case",
"proc-macro-error",
"proc-macro2",
"quote",
"syn 2.0.60",
]
[[package]] [[package]]
name = "nu-engine" name = "nu-engine"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"nu-glob", "nu-glob",
"nu-path", "nu-path",
@ -3049,7 +3059,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-explore" name = "nu-explore"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"ansi-str", "ansi-str",
"anyhow", "anyhow",
@ -3074,14 +3084,14 @@ dependencies = [
[[package]] [[package]]
name = "nu-glob" name = "nu-glob"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"doc-comment", "doc-comment",
] ]
[[package]] [[package]]
name = "nu-json" name = "nu-json"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"linked-hash-map", "linked-hash-map",
"num-traits", "num-traits",
@ -3091,7 +3101,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-lsp" name = "nu-lsp"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"assert-json-diff", "assert-json-diff",
"crossbeam-channel", "crossbeam-channel",
@ -3112,7 +3122,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-parser" name = "nu-parser"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"bytesize", "bytesize",
"chrono", "chrono",
@ -3128,7 +3138,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-path" name = "nu-path"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"dirs-next", "dirs-next",
"omnipath", "omnipath",
@ -3137,7 +3147,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-plugin" name = "nu-plugin"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"log", "log",
"nix", "nix",
@ -3152,7 +3162,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-plugin-core" name = "nu-plugin-core"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"interprocess", "interprocess",
"log", "log",
@ -3166,7 +3176,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-plugin-engine" name = "nu-plugin-engine"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"log", "log",
"nu-engine", "nu-engine",
@ -3181,7 +3191,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-plugin-protocol" name = "nu-plugin-protocol"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"bincode", "bincode",
"nu-protocol", "nu-protocol",
@ -3193,7 +3203,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-plugin-test-support" name = "nu-plugin-test-support"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"nu-ansi-term", "nu-ansi-term",
"nu-cmd-lang", "nu-cmd-lang",
@ -3211,7 +3221,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-pretty-hex" name = "nu-pretty-hex"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"heapless", "heapless",
"nu-ansi-term", "nu-ansi-term",
@ -3220,17 +3230,19 @@ dependencies = [
[[package]] [[package]]
name = "nu-protocol" name = "nu-protocol"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"brotli 5.0.0", "brotli",
"byte-unit", "byte-unit",
"chrono", "chrono",
"chrono-humanize", "chrono-humanize",
"convert_case",
"fancy-regex", "fancy-regex",
"indexmap", "indexmap",
"lru", "lru",
"miette", "miette",
"nix", "nix",
"nu-derive-value",
"nu-path", "nu-path",
"nu-system", "nu-system",
"nu-test-support", "nu-test-support",
@ -3243,7 +3255,7 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"strum", "strum",
"strum_macros 0.26.2", "strum_macros",
"tempfile", "tempfile",
"thiserror", "thiserror",
"typetag", "typetag",
@ -3251,7 +3263,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-std" name = "nu-std"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"log", "log",
"miette", "miette",
@ -3262,9 +3274,10 @@ dependencies = [
[[package]] [[package]]
name = "nu-system" name = "nu-system"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"chrono", "chrono",
"itertools 0.12.1",
"libc", "libc",
"libproc", "libproc",
"log", "log",
@ -3279,7 +3292,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-table" name = "nu-table"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"fancy-regex", "fancy-regex",
"nu-ansi-term", "nu-ansi-term",
@ -3293,7 +3306,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-term-grid" name = "nu-term-grid"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"nu-utils", "nu-utils",
"unicode-width", "unicode-width",
@ -3301,7 +3314,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-test-support" name = "nu-test-support"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"nu-glob", "nu-glob",
"nu-path", "nu-path",
@ -3313,7 +3326,7 @@ dependencies = [
[[package]] [[package]]
name = "nu-utils" name = "nu-utils"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"crossterm_winapi", "crossterm_winapi",
"log", "log",
@ -3339,7 +3352,7 @@ dependencies = [
[[package]] [[package]]
name = "nu_plugin_example" name = "nu_plugin_example"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"nu-cmd-lang", "nu-cmd-lang",
"nu-plugin", "nu-plugin",
@ -3349,7 +3362,7 @@ dependencies = [
[[package]] [[package]]
name = "nu_plugin_formats" name = "nu_plugin_formats"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"eml-parser", "eml-parser",
"ical", "ical",
@ -3362,7 +3375,7 @@ dependencies = [
[[package]] [[package]]
name = "nu_plugin_gstat" name = "nu_plugin_gstat"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"git2", "git2",
"nu-plugin", "nu-plugin",
@ -3371,7 +3384,7 @@ dependencies = [
[[package]] [[package]]
name = "nu_plugin_inc" name = "nu_plugin_inc"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"nu-plugin", "nu-plugin",
"nu-protocol", "nu-protocol",
@ -3380,12 +3393,13 @@ dependencies = [
[[package]] [[package]]
name = "nu_plugin_polars" name = "nu_plugin_polars"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"chrono", "chrono",
"chrono-tz 0.9.0", "chrono-tz 0.9.0",
"fancy-regex", "fancy-regex",
"indexmap", "indexmap",
"mimalloc",
"nu-cmd-lang", "nu-cmd-lang",
"nu-command", "nu-command",
"nu-engine", "nu-engine",
@ -3398,12 +3412,11 @@ dependencies = [
"polars", "polars",
"polars-arrow", "polars-arrow",
"polars-io", "polars-io",
"polars-lazy",
"polars-ops", "polars-ops",
"polars-plan", "polars-plan",
"polars-utils", "polars-utils",
"serde", "serde",
"sqlparser 0.45.0", "sqlparser 0.47.0",
"tempfile", "tempfile",
"typetag", "typetag",
"uuid", "uuid",
@ -3411,7 +3424,7 @@ dependencies = [
[[package]] [[package]]
name = "nu_plugin_query" name = "nu_plugin_query"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"gjson", "gjson",
"nu-plugin", "nu-plugin",
@ -3423,7 +3436,7 @@ dependencies = [
[[package]] [[package]]
name = "nu_plugin_stress_internals" name = "nu_plugin_stress_internals"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"interprocess", "interprocess",
"serde", "serde",
@ -3549,7 +3562,7 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
[[package]] [[package]]
name = "nuon" name = "nuon"
version = "0.93.1" version = "0.94.3"
dependencies = [ dependencies = [
"chrono", "chrono",
"fancy-regex", "fancy-regex",
@ -3730,9 +3743,9 @@ dependencies = [
[[package]] [[package]]
name = "os_pipe" name = "os_pipe"
version = "1.1.5" version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57119c3b893986491ec9aa85056780d3a0f3cf4da7cc09dd3650dbd6c6738fb9" checksum = "29d73ba8daf8fac13b0501d1abeddcfe21ba7401ada61a819144b6c2a4f32209"
dependencies = [ dependencies = [
"libc", "libc",
"windows-sys 0.52.0", "windows-sys 0.52.0",
@ -4013,9 +4026,9 @@ dependencies = [
[[package]] [[package]]
name = "polars" name = "polars"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ea21b858b16b9c0e17a12db2800d11aa5b4bd182be6b3022eb537bbfc1f2db5" checksum = "e148396dca5496566880fa19374f3f789a29db94e3eb458afac1497b4bac5442"
dependencies = [ dependencies = [
"getrandom", "getrandom",
"polars-arrow", "polars-arrow",
@ -4033,9 +4046,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-arrow" name = "polars-arrow"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "725b09f2b5ef31279b66e27bbab63c58d49d8f6696b66b1f46c7eaab95e80f75" checksum = "1cb5e11cd0752ae022fa6ca3afa50a14b0301b7ce53c0135828fbb0f4fa8303e"
dependencies = [ dependencies = [
"ahash 0.8.11", "ahash 0.8.11",
"atoi", "atoi",
@ -4081,9 +4094,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-compute" name = "polars-compute"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a796945b14b14fbb79b91ef0406e6fddca2be636e889f81ea5d6ee7d36efb4fe" checksum = "89fc4578f826234cdecb782952aa9c479dc49373f81694a7b439c70b6f609ba0"
dependencies = [ dependencies = [
"bytemuck", "bytemuck",
"either", "either",
@ -4097,9 +4110,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-core" name = "polars-core"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "465f70d3e96b6d0b1a43c358ba451286b8c8bd56696feff020d65702aa33e35c" checksum = "e490c6bace1366a558feea33d1846f749a8ca90bd72a6748752bc65bb4710b2a"
dependencies = [ dependencies = [
"ahash 0.8.11", "ahash 0.8.11",
"bitflags 2.5.0", "bitflags 2.5.0",
@ -4131,9 +4144,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-error" name = "polars-error"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5224d5d05e6b8a6f78b75951ae1b5f82c8ab1979e11ffaf5fd41941e3d5b0757" checksum = "08888f58e61599b00f5ea0c2ccdc796b54b9859559cc0d4582733509451fa01a"
dependencies = [ dependencies = [
"avro-schema", "avro-schema",
"polars-arrow-format", "polars-arrow-format",
@ -4143,10 +4156,30 @@ dependencies = [
] ]
[[package]] [[package]]
name = "polars-io" name = "polars-expr"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2c8589e418cbe4a48228d64b2a8a40284a82ec3c98817c0c2bcc0267701338b" checksum = "4173591920fe56ad55af025f92eb0d08421ca85705c326a640c43856094e3484"
dependencies = [
"ahash 0.8.11",
"bitflags 2.5.0",
"once_cell",
"polars-arrow",
"polars-core",
"polars-io",
"polars-ops",
"polars-plan",
"polars-time",
"polars-utils",
"rayon",
"smartstring",
]
[[package]]
name = "polars-io"
version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5842896aea46d975b425d63f156f412aed3cfde4c257b64fb1f43ceea288074e"
dependencies = [ dependencies = [
"ahash 0.8.11", "ahash 0.8.11",
"async-trait", "async-trait",
@ -4185,9 +4218,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-json" name = "polars-json"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81224492a649a12b668480c0cf219d703f432509765d2717e72fe32ad16fc701" checksum = "160cbad0145b93ac6a88639aadfa6f7d7c769d05a8674f9b7e895b398cae9901"
dependencies = [ dependencies = [
"ahash 0.8.11", "ahash 0.8.11",
"chrono", "chrono",
@ -4206,9 +4239,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-lazy" name = "polars-lazy"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b2632b1af668e2058d5f8f916d8fbde3cac63d03ae29a705f598e41dcfeb7f" checksum = "e805ea2ebbc6b7749b0afb31b7fc5d32b42b57ba29b984549d43d3a16114c4a5"
dependencies = [ dependencies = [
"ahash 0.8.11", "ahash 0.8.11",
"bitflags 2.5.0", "bitflags 2.5.0",
@ -4216,6 +4249,7 @@ dependencies = [
"once_cell", "once_cell",
"polars-arrow", "polars-arrow",
"polars-core", "polars-core",
"polars-expr",
"polars-io", "polars-io",
"polars-json", "polars-json",
"polars-ops", "polars-ops",
@ -4230,13 +4264,13 @@ dependencies = [
[[package]] [[package]]
name = "polars-ops" name = "polars-ops"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "efdbdb4d9a92109bc2e0ce8e17af5ae8ab643bb5b7ee9d1d74f0aeffd1fbc95f" checksum = "7b0aed7e169c81b98457641cf82b251f52239a668916c2e683abd1f38df00d58"
dependencies = [ dependencies = [
"ahash 0.8.11", "ahash 0.8.11",
"argminmax", "argminmax",
"base64 0.21.7", "base64 0.22.1",
"bytemuck", "bytemuck",
"chrono", "chrono",
"chrono-tz 0.8.6", "chrono-tz 0.8.6",
@ -4266,14 +4300,14 @@ dependencies = [
[[package]] [[package]]
name = "polars-parquet" name = "polars-parquet"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b421d2196f786fdfe162db614c8485f8308fe41575d4de634a39bbe460d1eb6a" checksum = "c70670a9e51cac66d0e77fd20b5cc957dbcf9f2660d410633862bb72f846d5b8"
dependencies = [ dependencies = [
"ahash 0.8.11", "ahash 0.8.11",
"async-stream", "async-stream",
"base64 0.21.7", "base64 0.22.1",
"brotli 3.5.0", "brotli",
"ethnum", "ethnum",
"flate2", "flate2",
"futures", "futures",
@ -4292,9 +4326,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-pipe" name = "polars-pipe"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48700f1d5bd56a15451e581f465c09541492750360f18637b196f995470a015c" checksum = "0a40ae1b3c74ee07e2d1f7cbf56c5d6e15969e45d9b6f0903bd2acaf783ba436"
dependencies = [ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"crossbeam-queue", "crossbeam-queue",
@ -4304,6 +4338,7 @@ dependencies = [
"polars-arrow", "polars-arrow",
"polars-compute", "polars-compute",
"polars-core", "polars-core",
"polars-expr",
"polars-io", "polars-io",
"polars-ops", "polars-ops",
"polars-plan", "polars-plan",
@ -4317,13 +4352,14 @@ dependencies = [
[[package]] [[package]]
name = "polars-plan" name = "polars-plan"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fb8e2302e20c44defd5be8cad9c96e75face63c3a5f609aced8c4ec3b3ac97d" checksum = "8daa3541ae7e9af311a4389bc2b21f83349c34c723cc67fa524cdefdaa172d90"
dependencies = [ dependencies = [
"ahash 0.8.11", "ahash 0.8.11",
"bytemuck", "bytemuck",
"chrono-tz 0.8.6", "chrono-tz 0.8.6",
"either",
"hashbrown 0.14.5", "hashbrown 0.14.5",
"once_cell", "once_cell",
"percent-encoding", "percent-encoding",
@ -4340,15 +4376,15 @@ dependencies = [
"regex", "regex",
"serde", "serde",
"smartstring", "smartstring",
"strum_macros 0.25.3", "strum_macros",
"version_check", "version_check",
] ]
[[package]] [[package]]
name = "polars-row" name = "polars-row"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a515bdc68c2ae3702e3de70d89601f3b71ca8137e282a226dddb53ee4bacfa2e" checksum = "deb285f2f3a65b00dd06bef16bb9f712dbb5478f941dab5cf74f9f016d382e40"
dependencies = [ dependencies = [
"bytemuck", "bytemuck",
"polars-arrow", "polars-arrow",
@ -4358,11 +4394,12 @@ dependencies = [
[[package]] [[package]]
name = "polars-sql" name = "polars-sql"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b4bb7cc1c04c3023d1953b2f1dec50515e8fd8169a5a2bf4967b3b082232db7" checksum = "a724f699d194cb02c25124d3832f7d4d77f387f1a89ee42f6b9e88ec561d4ad9"
dependencies = [ dependencies = [
"hex", "hex",
"once_cell",
"polars-arrow", "polars-arrow",
"polars-core", "polars-core",
"polars-error", "polars-error",
@ -4376,11 +4413,12 @@ dependencies = [
[[package]] [[package]]
name = "polars-time" name = "polars-time"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "efc18e3ad92eec55db89d88f16c22d436559ba7030cf76f86f6ed7a754b673f1" checksum = "87ebec238d8b6200d9f0c3ce411c8441e950bd5a7df7806b8172d06c1d5a4b97"
dependencies = [ dependencies = [
"atoi", "atoi",
"bytemuck",
"chrono", "chrono",
"chrono-tz 0.8.6", "chrono-tz 0.8.6",
"now", "now",
@ -4397,9 +4435,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-utils" name = "polars-utils"
version = "0.39.2" version = "0.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c760b6c698cfe2fbbbd93d6cfb408db14ececfe1d92445dae2229ce1b5b21ae8" checksum = "34e1a907c63abf71e5f21467e2e4ff748896c28196746f631c6c25512ec6102c"
dependencies = [ dependencies = [
"ahash 0.8.11", "ahash 0.8.11",
"bytemuck", "bytemuck",
@ -4819,7 +4857,8 @@ dependencies = [
[[package]] [[package]]
name = "reedline" name = "reedline"
version = "0.32.0" version = "0.32.0"
source = "git+https://github.com/nushell/reedline?branch=main#a580ea56d4e5a889468b2969d2a1534379504ab6" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abf59e4c97b5049ba96b052cdb652368305a2eddcbce9bf1c16f9d003139eeea"
dependencies = [ dependencies = [
"arboard", "arboard",
"chrono", "chrono",
@ -4832,7 +4871,7 @@ dependencies = [
"serde_json", "serde_json",
"strip-ansi-escapes", "strip-ansi-escapes",
"strum", "strum",
"strum_macros 0.26.2", "strum_macros",
"thiserror", "thiserror",
"unicode-segmentation", "unicode-segmentation",
"unicode-width", "unicode-width",
@ -5402,9 +5441,9 @@ dependencies = [
[[package]] [[package]]
name = "shadow-rs" name = "shadow-rs"
version = "0.27.1" version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7960cbd6ba74691bb15e7ebf97f7136bd02d1115f5695a58c1f31d5645750128" checksum = "1d75516bdaee8f640543ad1f6e292448c23ce57143f812c3736ab4b0874383df"
dependencies = [ dependencies = [
"const_format", "const_format",
"is_debug", "is_debug",
@ -5560,9 +5599,9 @@ dependencies = [
[[package]] [[package]]
name = "sqlparser" name = "sqlparser"
version = "0.45.0" version = "0.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7bbffee862a796d67959a89859d6b1046bb5016d63e23835ad0da182777bbe0" checksum = "295e9930cd7a97e58ca2a070541a3ca502b17f5d1fa7157376d0fabd85324f25"
dependencies = [ dependencies = [
"log", "log",
] ]
@ -5676,20 +5715,7 @@ version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29" checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29"
dependencies = [ dependencies = [
"strum_macros 0.26.2", "strum_macros",
]
[[package]]
name = "strum_macros"
version = "0.25.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0"
dependencies = [
"heck 0.4.1",
"proc-macro2",
"quote",
"rustversion",
"syn 2.0.60",
] ]
[[package]] [[package]]
@ -6066,6 +6092,7 @@ version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9dd1545e8208b4a5af1aa9bbd0b4cf7e9ea08fabc5d0a5c67fcaafa17433aa3" checksum = "e9dd1545e8208b4a5af1aa9bbd0b4cf7e9ea08fabc5d0a5c67fcaafa17433aa3"
dependencies = [ dependencies = [
"indexmap",
"serde", "serde",
"serde_spanned", "serde_spanned",
"toml_datetime", "toml_datetime",
@ -6354,93 +6381,77 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]] [[package]]
name = "uu_cp" name = "uu_cp"
version = "0.0.25" version = "0.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcbe045dc92209114afdfd366bd18f7b95dbf999f3eaa85ad6dca910b0be3d56" checksum = "c31fc5c95f7668999e129464a29e9080f69ba01ccf7a0ae43ff2cfdb15baa340"
dependencies = [ dependencies = [
"clap", "clap",
"filetime", "filetime",
"indicatif", "indicatif",
"libc", "libc",
"quick-error 2.0.1", "quick-error 2.0.1",
"uucore 0.0.26", "uucore",
"walkdir", "walkdir",
"xattr", "xattr",
] ]
[[package]] [[package]]
name = "uu_mkdir" name = "uu_mkdir"
version = "0.0.25" version = "0.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "040aa4584036b2f65e05387b0ea9ac468afce1db325743ce5f350689fd9ce4ae" checksum = "496d95e0e3121e4d424ba62019eb84a6f1102213ca8ca16c0a2f8c652c7236c3"
dependencies = [ dependencies = [
"clap", "clap",
"uucore 0.0.26", "uucore",
] ]
[[package]] [[package]]
name = "uu_mktemp" name = "uu_mktemp"
version = "0.0.25" version = "0.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f240a99c36d768153874d198c43605a45c86996b576262689a0f18248cc3bc57" checksum = "a28a0d9744bdc28ceaf13f70b959bacded91aedfd008402d72fa1e3224158653"
dependencies = [ dependencies = [
"clap", "clap",
"rand", "rand",
"tempfile", "tempfile",
"uucore 0.0.26", "uucore",
] ]
[[package]] [[package]]
name = "uu_mv" name = "uu_mv"
version = "0.0.25" version = "0.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c99fd7c75e6e85553c92537314be3d9a64b4927051aa1608513feea2f933022" checksum = "53680908b01c5ac3cc0ee8a376de3e51a36dde2c5a5227a115a3d0977cc4539b"
dependencies = [ dependencies = [
"clap", "clap",
"fs_extra", "fs_extra",
"indicatif", "indicatif",
"uucore 0.0.26", "uucore",
] ]
[[package]] [[package]]
name = "uu_uname" name = "uu_uname"
version = "0.0.25" version = "0.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5951832d73199636bde6c0d61cf960932b3c4450142c290375bc10c7abed6db5" checksum = "a7f4125fb4f286313bca8f222abaefe39db54d65179ea788c91ebd3162345f4e"
dependencies = [ dependencies = [
"clap", "clap",
"platform-info", "platform-info",
"uucore 0.0.26", "uucore",
] ]
[[package]] [[package]]
name = "uu_whoami" name = "uu_whoami"
version = "0.0.25" version = "0.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3b44166eb6335aeac42744ea368cc4c32d3f2287a4ff765a5ce44d927ab8bb4" checksum = "7f7b313901a15cfde2d88f434fcd077903d690f73cc36d1cec20f47906960aec"
dependencies = [ dependencies = [
"clap", "clap",
"libc", "libc",
"uucore 0.0.26", "uucore",
"windows-sys 0.48.0", "windows-sys 0.48.0",
] ]
[[package]]
name = "uucore"
version = "0.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23994a722acb43dbc56877e271c9723f167ae42c4c089f909b2d7dd106c3a9b4"
dependencies = [
"clap",
"glob",
"libc",
"nix",
"once_cell",
"os_display",
"uucore_procs",
"wild",
]
[[package]] [[package]]
name = "uucore" name = "uucore"
version = "0.0.26" version = "0.0.26"

View File

@ -11,7 +11,7 @@ license = "MIT"
name = "nu" name = "nu"
repository = "https://github.com/nushell/nushell" repository = "https://github.com/nushell/nushell"
rust-version = "1.77.2" rust-version = "1.77.2"
version = "0.93.1" version = "0.94.3"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -39,6 +39,7 @@ members = [
"crates/nu-lsp", "crates/nu-lsp",
"crates/nu-pretty-hex", "crates/nu-pretty-hex",
"crates/nu-protocol", "crates/nu-protocol",
"crates/nu-derive-value",
"crates/nu-plugin", "crates/nu-plugin",
"crates/nu-plugin-core", "crates/nu-plugin-core",
"crates/nu-plugin-engine", "crates/nu-plugin-engine",
@ -74,6 +75,7 @@ chardetng = "0.1.17"
chrono = { default-features = false, version = "0.4.34" } chrono = { default-features = false, version = "0.4.34" }
chrono-humanize = "0.2.3" chrono-humanize = "0.2.3"
chrono-tz = "0.8" chrono-tz = "0.8"
convert_case = "0.6"
crossbeam-channel = "0.5.8" crossbeam-channel = "0.5.8"
crossterm = "0.27" crossterm = "0.27"
csv = "1.3" csv = "1.3"
@ -93,7 +95,7 @@ heck = "0.5.0"
human-date-parser = "0.1.1" human-date-parser = "0.1.1"
indexmap = "2.2" indexmap = "2.2"
indicatif = "0.17" indicatif = "0.17"
interprocess = "2.1.0" interprocess = "2.2.0"
is_executable = "1.0" is_executable = "1.0"
itertools = "0.12" itertools = "0.12"
libc = "0.2" libc = "0.2"
@ -118,16 +120,19 @@ num-traits = "0.2"
omnipath = "0.1" omnipath = "0.1"
once_cell = "1.18" once_cell = "1.18"
open = "5.1" open = "5.1"
os_pipe = { version = "1.1", features = ["io_safety"] } os_pipe = { version = "1.2", features = ["io_safety"] }
pathdiff = "0.2" pathdiff = "0.2"
percent-encoding = "2" percent-encoding = "2"
pretty_assertions = "1.4" pretty_assertions = "1.4"
print-positions = "0.6" print-positions = "0.6"
proc-macro-error = { version = "1.0", default-features = false }
proc-macro2 = "1.0"
procfs = "0.16.0" procfs = "0.16.0"
pwd = "1.3" pwd = "1.3"
quick-xml = "0.31.0" quick-xml = "0.31.0"
quickcheck = "1.0" quickcheck = "1.0"
quickcheck_macros = "1.0" quickcheck_macros = "1.0"
quote = "1.0"
rand = "0.8" rand = "0.8"
ratatui = "0.26" ratatui = "0.26"
rayon = "1.10" rayon = "1.10"
@ -147,6 +152,7 @@ serde_urlencoded = "0.7.1"
serde_yaml = "0.9" serde_yaml = "0.9"
sha2 = "0.10" sha2 = "0.10"
strip-ansi-escapes = "0.2.0" strip-ansi-escapes = "0.2.0"
syn = "2.0"
sysinfo = "0.30" sysinfo = "0.30"
tabled = { version = "0.14.0", default-features = false } tabled = { version = "0.14.0", default-features = false }
tempfile = "3.10" tempfile = "3.10"
@ -159,13 +165,13 @@ unicode-segmentation = "1.11"
unicode-width = "0.1" unicode-width = "0.1"
ureq = { version = "2.9", default-features = false } ureq = { version = "2.9", default-features = false }
url = "2.2" url = "2.2"
uu_cp = "0.0.25" uu_cp = "0.0.26"
uu_mkdir = "0.0.25" uu_mkdir = "0.0.26"
uu_mktemp = "0.0.25" uu_mktemp = "0.0.26"
uu_mv = "0.0.25" uu_mv = "0.0.26"
uu_whoami = "0.0.25" uu_whoami = "0.0.26"
uu_uname = "0.0.25" uu_uname = "0.0.26"
uucore = "0.0.25" uucore = "0.0.26"
uuid = "1.8.0" uuid = "1.8.0"
v_htmlescape = "0.15.0" v_htmlescape = "0.15.0"
wax = "0.6" wax = "0.6"
@ -174,30 +180,31 @@ windows = "0.54"
winreg = "0.52" winreg = "0.52"
[dependencies] [dependencies]
nu-cli = { path = "./crates/nu-cli", version = "0.93.1" } nu-cli = { path = "./crates/nu-cli", version = "0.94.3" }
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.93.1" } nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.94.3" }
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.93.1" } nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.94.3" }
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.93.1", optional = true } nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.94.3", optional = true }
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.93.1" } nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.94.3" }
nu-command = { path = "./crates/nu-command", version = "0.93.1" } nu-command = { path = "./crates/nu-command", version = "0.94.3" }
nu-engine = { path = "./crates/nu-engine", version = "0.93.1" } nu-engine = { path = "./crates/nu-engine", version = "0.94.3" }
nu-explore = { path = "./crates/nu-explore", version = "0.93.1" } nu-explore = { path = "./crates/nu-explore", version = "0.94.3" }
nu-lsp = { path = "./crates/nu-lsp/", version = "0.93.1" } nu-lsp = { path = "./crates/nu-lsp/", version = "0.94.3" }
nu-parser = { path = "./crates/nu-parser", version = "0.93.1" } nu-parser = { path = "./crates/nu-parser", version = "0.94.3" }
nu-path = { path = "./crates/nu-path", version = "0.93.1" } nu-path = { path = "./crates/nu-path", version = "0.94.3" }
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.93.1" } nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.94.3" }
nu-protocol = { path = "./crates/nu-protocol", version = "0.93.1" } nu-protocol = { path = "./crates/nu-protocol", version = "0.94.3" }
nu-std = { path = "./crates/nu-std", version = "0.93.1" } nu-std = { path = "./crates/nu-std", version = "0.94.3" }
nu-system = { path = "./crates/nu-system", version = "0.93.1" } nu-system = { path = "./crates/nu-system", version = "0.94.3" }
nu-utils = { path = "./crates/nu-utils", version = "0.93.1" } nu-utils = { path = "./crates/nu-utils", version = "0.94.3" }
reedline = { workspace = true, features = ["bashisms", "sqlite"] } reedline = { workspace = true, features = ["bashisms", "sqlite"] }
crossterm = { workspace = true } crossterm = { workspace = true }
ctrlc = { workspace = true } ctrlc = { workspace = true }
dirs-next = { workspace = true }
log = { workspace = true } log = { workspace = true }
miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] } miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] }
mimalloc = { version = "0.1.37", default-features = false, optional = true } mimalloc = { version = "0.1.42", default-features = false, optional = true }
serde_json = { workspace = true } serde_json = { workspace = true }
simplelog = "0.12" simplelog = "0.12"
time = "0.3" time = "0.3"
@ -218,9 +225,9 @@ nix = { workspace = true, default-features = false, features = [
] } ] }
[dev-dependencies] [dev-dependencies]
nu-test-support = { path = "./crates/nu-test-support", version = "0.93.1" } nu-test-support = { path = "./crates/nu-test-support", version = "0.94.3" }
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.93.1" } nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.94.3" }
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.93.1" } nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.94.3" }
assert_cmd = "2.0" assert_cmd = "2.0"
dirs-next = { workspace = true } dirs-next = { workspace = true }
tango-bench = "0.5" tango-bench = "0.5"
@ -244,7 +251,6 @@ default = ["default-no-clipboard", "system-clipboard"]
# See https://github.com/nushell/nushell/pull/11535 # See https://github.com/nushell/nushell/pull/11535
default-no-clipboard = [ default-no-clipboard = [
"plugin", "plugin",
"which-support",
"trash-support", "trash-support",
"sqlite", "sqlite",
"mimalloc", "mimalloc",
@ -264,7 +270,6 @@ system-clipboard = [
] ]
# Stable (Default) # Stable (Default)
which-support = ["nu-command/which-support", "nu-cmd-lang/which-support"]
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"] trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
# SQLite commands for nushell # SQLite commands for nushell
@ -298,7 +303,7 @@ bench = false
# To use a development version of a dependency please use a global override here # To use a development version of a dependency please use a global override here
# changing versions in each sub-crate of the workspace is tedious # changing versions in each sub-crate of the workspace is tedious
[patch.crates-io] [patch.crates-io]
reedline = { git = "https://github.com/nushell/reedline", branch = "main" } # reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"} # nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
# Run all benchmarks with `cargo bench` # Run all benchmarks with `cargo bench`

View File

@ -52,7 +52,7 @@ To use `Nu` in GitHub Action, check [setup-nu](https://github.com/marketplace/ac
Detailed installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/installation.html). Nu is available via many package managers: Detailed installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/installation.html). Nu is available via many package managers:
[![Packaging status](https://repology.org/badge/vertical-allrepos/nushell.svg)](https://repology.org/project/nushell/versions) [![Packaging status](https://repology.org/badge/vertical-allrepos/nushell.svg?columns=3)](https://repology.org/project/nushell/versions)
For details about which platforms the Nushell team actively supports, see [our platform support policy](devdocs/PLATFORM_SUPPORT.md). For details about which platforms the Nushell team actively supports, see [our platform support policy](devdocs/PLATFORM_SUPPORT.md).
@ -222,6 +222,7 @@ Please submit an issue or PR to be added to this list.
- [clap](https://github.com/clap-rs/clap/tree/master/clap_complete_nushell) - [clap](https://github.com/clap-rs/clap/tree/master/clap_complete_nushell)
- [Dorothy](http://github.com/bevry/dorothy) - [Dorothy](http://github.com/bevry/dorothy)
- [Direnv](https://github.com/direnv/direnv/blob/master/docs/hook.md#nushell) - [Direnv](https://github.com/direnv/direnv/blob/master/docs/hook.md#nushell)
- [x-cmd](https://x-cmd.com/mod/nu)
## Contributing ## Contributing

View File

@ -47,8 +47,7 @@ fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
&mut engine, &mut engine,
&mut stack, &mut stack,
PipelineData::empty(), PipelineData::empty(),
None, Default::default(),
false,
) )
.unwrap(); .unwrap();
@ -90,8 +89,7 @@ fn bench_command(
&mut engine, &mut engine,
&mut stack, &mut stack,
PipelineData::empty(), PipelineData::empty(),
None, Default::default(),
false,
) )
.unwrap(), .unwrap(),
); );

View File

@ -5,27 +5,27 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cli" name = "nu-cli"
version = "0.93.1" version = "0.94.3"
[lib] [lib]
bench = false bench = false
[dev-dependencies] [dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.93.1" } nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.94.3" }
nu-command = { path = "../nu-command", version = "0.93.1" } nu-command = { path = "../nu-command", version = "0.94.3" }
nu-test-support = { path = "../nu-test-support", version = "0.93.1" } nu-test-support = { path = "../nu-test-support", version = "0.94.3" }
rstest = { workspace = true, default-features = false } rstest = { workspace = true, default-features = false }
tempfile = { workspace = true } tempfile = { workspace = true }
[dependencies] [dependencies]
nu-cmd-base = { path = "../nu-cmd-base", version = "0.93.1" } nu-cmd-base = { path = "../nu-cmd-base", version = "0.94.3" }
nu-engine = { path = "../nu-engine", version = "0.93.1" } nu-engine = { path = "../nu-engine", version = "0.94.3" }
nu-path = { path = "../nu-path", version = "0.93.1" } nu-path = { path = "../nu-path", version = "0.94.3" }
nu-parser = { path = "../nu-parser", version = "0.93.1" } nu-parser = { path = "../nu-parser", version = "0.94.3" }
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.93.1", optional = true } nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.94.3", optional = true }
nu-protocol = { path = "../nu-protocol", version = "0.93.1" } nu-protocol = { path = "../nu-protocol", version = "0.94.3" }
nu-utils = { path = "../nu-utils", version = "0.93.1" } nu-utils = { path = "../nu-utils", version = "0.94.3" }
nu-color-config = { path = "../nu-color-config", version = "0.93.1" } nu-color-config = { path = "../nu-color-config", version = "0.94.3" }
nu-ansi-term = { workspace = true } nu-ansi-term = { workspace = true }
reedline = { workspace = true, features = ["bashisms", "sqlite"] } reedline = { workspace = true, features = ["bashisms", "sqlite"] }
@ -39,7 +39,6 @@ miette = { workspace = true, features = ["fancy-no-backtrace"] }
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] } lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
once_cell = { workspace = true } once_cell = { workspace = true }
percent-encoding = { workspace = true } percent-encoding = { workspace = true }
pathdiff = { workspace = true }
sysinfo = { workspace = true } sysinfo = { workspace = true }
unicode-segmentation = { workspace = true } unicode-segmentation = { workspace = true }
uuid = { workspace = true, features = ["v4"] } uuid = { workspace = true, features = ["v4"] }

View File

@ -1,7 +1,7 @@
use crate::completions::{matches, CompletionOptions}; use crate::completions::{matches, CompletionOptions};
use nu_ansi_term::Style; use nu_ansi_term::Style;
use nu_engine::env_to_string; use nu_engine::env_to_string;
use nu_path::home_dir; use nu_path::{expand_to_real_path, home_dir};
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
Span, Span,
@ -185,9 +185,14 @@ pub fn complete_item(
.map(|p| { .map(|p| {
let path = original_cwd.apply(p); let path = original_cwd.apply(p);
let style = ls_colors.as_ref().map(|lsc| { let style = ls_colors.as_ref().map(|lsc| {
lsc.style_for_path_with_metadata(&path, std::fs::symlink_metadata(&path).ok().as_ref()) lsc.style_for_path_with_metadata(
.map(lscolors::Style::to_nu_ansi_term_style) &path,
.unwrap_or_default() std::fs::symlink_metadata(expand_to_real_path(&path))
.ok()
.as_ref(),
)
.map(lscolors::Style::to_nu_ansi_term_style)
.unwrap_or_default()
}); });
(span, escape_path(path, want_directory), style) (span, escape_path(path, want_directory), style)
}) })

View File

@ -52,18 +52,16 @@ impl Completer for CustomCompletion {
decl_id: self.decl_id, decl_id: self.decl_id,
head: span, head: span,
arguments: vec![ arguments: vec![
Argument::Positional(Expression { Argument::Positional(Expression::new_unknown(
span: Span::unknown(), Expr::String(self.line.clone()),
ty: Type::String, Span::unknown(),
expr: Expr::String(self.line.clone()), Type::String,
custom_completion: None, )),
}), Argument::Positional(Expression::new_unknown(
Argument::Positional(Expression { Expr::Int(line_pos as i64),
span: Span::unknown(), Span::unknown(),
ty: Type::Int, Type::Int,
expr: Expr::Int(line_pos as i64), )),
custom_completion: None,
}),
], ],
parser_info: HashMap::new(), parser_info: HashMap::new(),
}, },

View File

@ -8,15 +8,45 @@ use nu_protocol::{
}; };
use std::sync::Arc; use std::sync::Arc;
#[derive(Default)]
pub struct EvaluateCommandsOpts {
pub table_mode: Option<Value>,
pub error_style: Option<Value>,
pub no_newline: bool,
}
/// Run a command (or commands) given to us by the user /// Run a command (or commands) given to us by the user
pub fn evaluate_commands( pub fn evaluate_commands(
commands: &Spanned<String>, commands: &Spanned<String>,
engine_state: &mut EngineState, engine_state: &mut EngineState,
stack: &mut Stack, stack: &mut Stack,
input: PipelineData, input: PipelineData,
table_mode: Option<Value>, opts: EvaluateCommandsOpts,
no_newline: bool,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let EvaluateCommandsOpts {
table_mode,
error_style,
no_newline,
} = opts;
// Handle the configured error style early
if let Some(e_style) = error_style {
match e_style.coerce_str()?.parse() {
Ok(e_style) => {
Arc::make_mut(&mut engine_state.config).error_style = e_style;
}
Err(err) => {
return Err(ShellError::GenericError {
error: "Invalid value for `--error-style`".into(),
msg: err.into(),
span: Some(e_style.span()),
help: None,
inner: vec![],
});
}
}
}
// Translate environment variables from Strings to Values // Translate environment variables from Strings to Values
convert_env_values(engine_state, stack)?; convert_env_values(engine_state, stack)?;

View File

@ -17,7 +17,7 @@ mod validation;
pub use commands::add_cli_context; pub use commands::add_cli_context;
pub use completions::{FileCompletion, NuCompleter, SemanticSuggestion, SuggestionKind}; pub use completions::{FileCompletion, NuCompleter, SemanticSuggestion, SuggestionKind};
pub use config_files::eval_config_contents; pub use config_files::eval_config_contents;
pub use eval_cmds::evaluate_commands; pub use eval_cmds::{evaluate_commands, EvaluateCommandsOpts};
pub use eval_file::evaluate_file; pub use eval_file::evaluate_file;
pub use menus::NuHelpCompleter; pub use menus::NuHelpCompleter;
pub use nu_cmd_base::util::get_init_cwd; pub use nu_cmd_base::util::get_init_cwd;

View File

@ -1,6 +1,5 @@
use crate::{menus::NuMenuCompleter, NuHelpCompleter}; use crate::{menus::NuMenuCompleter, NuHelpCompleter};
use crossterm::event::{KeyCode, KeyModifiers}; use crossterm::event::{KeyCode, KeyModifiers};
use log::trace;
use nu_ansi_term::Style; use nu_ansi_term::Style;
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style}; use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
use nu_engine::eval_block; use nu_engine::eval_block;
@ -76,15 +75,15 @@ const DEFAULT_HELP_MENU: &str = r#"
// Adds all menus to line editor // Adds all menus to line editor
pub(crate) fn add_menus( pub(crate) fn add_menus(
mut line_editor: Reedline, mut line_editor: Reedline,
engine_state: Arc<EngineState>, engine_state_ref: Arc<EngineState>,
stack: &Stack, stack: &Stack,
config: &Config, config: &Config,
) -> Result<Reedline, ShellError> { ) -> Result<Reedline, ShellError> {
trace!("add_menus: config: {:#?}", &config); //log::trace!("add_menus: config: {:#?}", &config);
line_editor = line_editor.clear_menus(); line_editor = line_editor.clear_menus();
for menu in &config.menus { for menu in &config.menus {
line_editor = add_menu(line_editor, menu, engine_state.clone(), stack, config)? line_editor = add_menu(line_editor, menu, engine_state_ref.clone(), stack, config)?
} }
// Checking if the default menus have been added from the config file // Checking if the default menus have been added from the config file
@ -94,13 +93,16 @@ pub(crate) fn add_menus(
("help_menu", DEFAULT_HELP_MENU), ("help_menu", DEFAULT_HELP_MENU),
]; ];
let mut engine_state = (*engine_state_ref).clone();
let mut menu_eval_results = vec![];
for (name, definition) in default_menus { for (name, definition) in default_menus {
if !config if !config
.menus .menus
.iter() .iter()
.any(|menu| menu.name.to_expanded_string("", config) == name) .any(|menu| menu.name.to_expanded_string("", config) == name)
{ {
let (block, _) = { let (block, delta) = {
let mut working_set = StateWorkingSet::new(&engine_state); let mut working_set = StateWorkingSet::new(&engine_state);
let output = parse( let output = parse(
&mut working_set, &mut working_set,
@ -112,15 +114,31 @@ pub(crate) fn add_menus(
(output, working_set.render()) (output, working_set.render())
}; };
engine_state.merge_delta(delta)?;
let mut temp_stack = Stack::new().capture(); let mut temp_stack = Stack::new().capture();
let input = PipelineData::Empty; let input = PipelineData::Empty;
let res = eval_block::<WithoutDebug>(&engine_state, &mut temp_stack, &block, input)?; menu_eval_results.push(eval_block::<WithoutDebug>(
&engine_state,
&mut temp_stack,
&block,
input,
)?);
}
}
if let PipelineData::Value(value, None) = res { let new_engine_state_ref = Arc::new(engine_state);
for menu in create_menus(&value)? {
line_editor = for res in menu_eval_results.into_iter() {
add_menu(line_editor, &menu, engine_state.clone(), stack, config)?; if let PipelineData::Value(value, None) = res {
} for menu in create_menus(&value)? {
line_editor = add_menu(
line_editor,
&menu,
new_engine_state_ref.clone(),
stack,
config,
)?;
} }
} }
} }

View File

@ -1110,9 +1110,9 @@ fn run_shell_integration_osc9_9(engine_state: &EngineState, stack: &mut Stack, u
let start_time = Instant::now(); let start_time = Instant::now();
// Otherwise, communicate the path as OSC 9;9 from ConEmu (often used for spawning new tabs in the same dir) // Otherwise, communicate the path as OSC 9;9 from ConEmu (often used for spawning new tabs in the same dir)
// This is helpful in Windows Terminal with Duplicate Tab
run_ansi_sequence(&format!( run_ansi_sequence(&format!(
"\x1b]9;9;{}{}\x1b\\", "\x1b]9;9;{}\x1b\\",
if path.starts_with('/') { "" } else { "/" },
percent_encoding::utf8_percent_encode(&path, percent_encoding::CONTROLS) percent_encoding::utf8_percent_encode(&path, percent_encoding::CONTROLS)
)); ));

View File

@ -138,6 +138,7 @@ impl Highlighter for NuHighlighter {
FlatShape::Filepath => add_colored_token(&shape.1, next_token), FlatShape::Filepath => add_colored_token(&shape.1, next_token),
FlatShape::Directory => add_colored_token(&shape.1, next_token), FlatShape::Directory => add_colored_token(&shape.1, next_token),
FlatShape::GlobInterpolation => add_colored_token(&shape.1, next_token),
FlatShape::GlobPattern => add_colored_token(&shape.1, next_token), FlatShape::GlobPattern => add_colored_token(&shape.1, next_token),
FlatShape::Variable(_) | FlatShape::VarDecl(_) => { FlatShape::Variable(_) | FlatShape::VarDecl(_) => {
add_colored_token(&shape.1, next_token) add_colored_token(&shape.1, next_token)
@ -452,15 +453,17 @@ fn find_matching_block_end_in_expr(
} }
} }
Expr::StringInterpolation(exprs) => exprs.iter().find_map(|expr| { Expr::StringInterpolation(exprs) | Expr::GlobInterpolation(exprs, _) => {
find_matching_block_end_in_expr( exprs.iter().find_map(|expr| {
line, find_matching_block_end_in_expr(
working_set, line,
expr, working_set,
global_span_offset, expr,
global_cursor_offset, global_span_offset,
) global_cursor_offset,
}), )
})
}
Expr::List(list) => { Expr::List(list) => {
if expr_last == global_cursor_offset { if expr_last == global_cursor_offset {

View File

@ -292,6 +292,8 @@ fn partial_completions() {
// Create the expected values // Create the expected values
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
file(dir.join("partial_a").join("have_ext.exe")),
file(dir.join("partial_a").join("have_ext.txt")),
file(dir.join("partial_a").join("hello")), file(dir.join("partial_a").join("hello")),
file(dir.join("partial_a").join("hola")), file(dir.join("partial_a").join("hola")),
file(dir.join("partial_b").join("hello_b")), file(dir.join("partial_b").join("hello_b")),
@ -310,6 +312,8 @@ fn partial_completions() {
// Create the expected values // Create the expected values
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
file(dir.join("partial_a").join("anotherfile")), file(dir.join("partial_a").join("anotherfile")),
file(dir.join("partial_a").join("have_ext.exe")),
file(dir.join("partial_a").join("have_ext.txt")),
file(dir.join("partial_a").join("hello")), file(dir.join("partial_a").join("hello")),
file(dir.join("partial_a").join("hola")), file(dir.join("partial_a").join("hola")),
file(dir.join("partial_b").join("hello_b")), file(dir.join("partial_b").join("hello_b")),
@ -360,6 +364,34 @@ fn partial_completions() {
// Match the results // Match the results
match_suggestions(expected_paths, suggestions); match_suggestions(expected_paths, suggestions);
// Test completion for all files under directories whose names begin with "pa"
let file_str = file(dir.join("partial_a").join("have"));
let target_file = format!("rm {file_str}");
let suggestions = completer.complete(&target_file, target_file.len());
// Create the expected values
let expected_paths: Vec<String> = vec![
file(dir.join("partial_a").join("have_ext.exe")),
file(dir.join("partial_a").join("have_ext.txt")),
];
// Match the results
match_suggestions(expected_paths, suggestions);
// Test completion for all files under directories whose names begin with "pa"
let file_str = file(dir.join("partial_a").join("have_ext."));
let file_dir = format!("rm {file_str}");
let suggestions = completer.complete(&file_dir, file_dir.len());
// Create the expected values
let expected_paths: Vec<String> = vec![
file(dir.join("partial_a").join("have_ext.exe")),
file(dir.join("partial_a").join("have_ext.txt")),
];
// Match the results
match_suggestions(expected_paths, suggestions);
} }
#[test] #[test]
@ -394,6 +426,13 @@ fn command_ls_with_filecompletion() {
".hidden_folder/".to_string(), ".hidden_folder/".to_string(),
]; ];
match_suggestions(expected_paths, suggestions);
let target_dir = "ls custom_completion.";
let suggestions = completer.complete(target_dir, target_dir.len());
let expected_paths: Vec<String> = vec!["custom_completion.nu".to_string()];
match_suggestions(expected_paths, suggestions) match_suggestions(expected_paths, suggestions)
} }
#[test] #[test]
@ -428,6 +467,13 @@ fn command_open_with_filecompletion() {
".hidden_folder/".to_string(), ".hidden_folder/".to_string(),
]; ];
match_suggestions(expected_paths, suggestions);
let target_dir = "open custom_completion.";
let suggestions = completer.complete(target_dir, target_dir.len());
let expected_paths: Vec<String> = vec!["custom_completion.nu".to_string()];
match_suggestions(expected_paths, suggestions) match_suggestions(expected_paths, suggestions)
} }
@ -717,11 +763,13 @@ fn variables_completions() {
// Test completions for $nu // Test completions for $nu
let suggestions = completer.complete("$nu.", 4); let suggestions = completer.complete("$nu.", 4);
assert_eq!(15, suggestions.len()); assert_eq!(18, suggestions.len());
let expected: Vec<String> = vec![ let expected: Vec<String> = vec![
"cache-dir".into(),
"config-path".into(), "config-path".into(),
"current-exe".into(), "current-exe".into(),
"data-dir".into(),
"default-config-dir".into(), "default-config-dir".into(),
"env-path".into(), "env-path".into(),
"history-enabled".into(), "history-enabled".into(),
@ -735,6 +783,7 @@ fn variables_completions() {
"plugin-path".into(), "plugin-path".into(),
"startup-time".into(), "startup-time".into(),
"temp-path".into(), "temp-path".into(),
"vendor-autoload-dir".into(),
]; ];
// Match results // Match results

View File

@ -5,15 +5,15 @@ edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cmd-base" name = "nu-cmd-base"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base" repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
version = "0.93.1" version = "0.94.3"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
nu-engine = { path = "../nu-engine", version = "0.93.1" } nu-engine = { path = "../nu-engine", version = "0.94.3" }
nu-parser = { path = "../nu-parser", version = "0.93.1" } nu-parser = { path = "../nu-parser", version = "0.94.3" }
nu-path = { path = "../nu-path", version = "0.93.1" } nu-path = { path = "../nu-path", version = "0.94.3" }
nu-protocol = { path = "../nu-protocol", version = "0.93.1" } nu-protocol = { path = "../nu-protocol", version = "0.94.3" }
indexmap = { workspace = true } indexmap = { workspace = true }
miette = { workspace = true } miette = { workspace = true }

View File

@ -194,7 +194,7 @@ pub fn eval_hook(
let Some(follow) = val.get("code") else { let Some(follow) = val.get("code") else {
return Err(ShellError::CantFindColumn { return Err(ShellError::CantFindColumn {
col_name: "code".into(), col_name: "code".into(),
span, span: Some(span),
src_span: span, src_span: span,
}); });
}; };

View File

@ -20,6 +20,7 @@ pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> PathBuf
type MakeRangeError = fn(&str, Span) -> ShellError; type MakeRangeError = fn(&str, Span) -> ShellError;
/// Returns a inclusive pair of boundary in given `range`.
pub fn process_range(range: &Range) -> Result<(isize, isize), MakeRangeError> { pub fn process_range(range: &Range) -> Result<(isize, isize), MakeRangeError> {
match range { match range {
Range::IntRange(range) => { Range::IntRange(range) => {

View File

@ -5,7 +5,7 @@ edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cmd-extra" name = "nu-cmd-extra"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-extra" repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-extra"
version = "0.93.1" version = "0.94.3"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -13,13 +13,13 @@ version = "0.93.1"
bench = false bench = false
[dependencies] [dependencies]
nu-cmd-base = { path = "../nu-cmd-base", version = "0.93.1" } nu-cmd-base = { path = "../nu-cmd-base", version = "0.94.3" }
nu-engine = { path = "../nu-engine", version = "0.93.1" } nu-engine = { path = "../nu-engine", version = "0.94.3" }
nu-json = { version = "0.93.1", path = "../nu-json" } nu-json = { version = "0.94.3", path = "../nu-json" }
nu-parser = { path = "../nu-parser", version = "0.93.1" } nu-parser = { path = "../nu-parser", version = "0.94.3" }
nu-pretty-hex = { version = "0.93.1", path = "../nu-pretty-hex" } nu-pretty-hex = { version = "0.94.3", path = "../nu-pretty-hex" }
nu-protocol = { path = "../nu-protocol", version = "0.93.1" } nu-protocol = { path = "../nu-protocol", version = "0.94.3" }
nu-utils = { path = "../nu-utils", version = "0.93.1" } nu-utils = { path = "../nu-utils", version = "0.94.3" }
# Potential dependencies for extras # Potential dependencies for extras
heck = { workspace = true } heck = { workspace = true }
@ -32,11 +32,7 @@ serde_urlencoded = { workspace = true }
v_htmlescape = { workspace = true } v_htmlescape = { workspace = true }
itertools = { workspace = true } itertools = { workspace = true }
[features]
extra = ["default"]
default = []
[dev-dependencies] [dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.93.1" } nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.94.3" }
nu-command = { path = "../nu-command", version = "0.93.1" } nu-command = { path = "../nu-command", version = "0.94.3" }
nu-test-support = { path = "../nu-test-support", version = "0.93.1" } nu-test-support = { path = "../nu-test-support", version = "0.94.3" }

View File

@ -1,5 +1,4 @@
use nu_engine::{command_prelude::*, get_eval_expression}; use nu_engine::command_prelude::*;
use nu_parser::parse_expression;
use nu_protocol::{ast::PathMember, engine::StateWorkingSet, ListStream}; use nu_protocol::{ast::PathMember, engine::StateWorkingSet, ListStream};
#[derive(Clone)] #[derive(Clone)]
@ -57,14 +56,7 @@ impl Command for FormatPattern {
string_span.start + 1, string_span.start + 1,
)?; )?;
format( format(input_val, &ops, engine_state, call.head)
input_val,
&ops,
engine_state,
&mut working_set,
stack,
call.head,
)
} }
} }
} }
@ -100,8 +92,6 @@ enum FormatOperation {
FixedText(String), FixedText(String),
// raw input is something like {column1.column2} // raw input is something like {column1.column2}
ValueFromColumn(String, Span), ValueFromColumn(String, Span),
// raw input is something like {$it.column1.column2} or {$var}.
ValueNeedEval(String, Span),
} }
/// Given a pattern that is fed into the Format command, we can process it and subdivide it /// Given a pattern that is fed into the Format command, we can process it and subdivide it
@ -110,7 +100,6 @@ enum FormatOperation {
/// there without any further processing. /// there without any further processing.
/// FormatOperation::ValueFromColumn contains the name of a column whose values will be /// FormatOperation::ValueFromColumn contains the name of a column whose values will be
/// formatted according to the input pattern. /// formatted according to the input pattern.
/// FormatOperation::ValueNeedEval contains expression which need to eval, it has the following form:
/// "$it.column1.column2" or "$variable" /// "$it.column1.column2" or "$variable"
fn extract_formatting_operations( fn extract_formatting_operations(
input: String, input: String,
@ -161,10 +150,17 @@ fn extract_formatting_operations(
if !column_name.is_empty() { if !column_name.is_empty() {
if column_need_eval { if column_need_eval {
output.push(FormatOperation::ValueNeedEval( return Err(ShellError::GenericError {
column_name.clone(), error: "Removed functionality".into(),
Span::new(span_start + column_span_start, span_start + column_span_end), msg: "The ability to use variables ($it) in `format pattern` has been removed."
)); .into(),
span: Some(error_span),
help: Some(
"You can use other formatting options, such as string interpolation."
.into(),
),
inner: vec![],
});
} else { } else {
output.push(FormatOperation::ValueFromColumn( output.push(FormatOperation::ValueFromColumn(
column_name.clone(), column_name.clone(),
@ -185,8 +181,6 @@ fn format(
input_data: Value, input_data: Value,
format_operations: &[FormatOperation], format_operations: &[FormatOperation],
engine_state: &EngineState, engine_state: &EngineState,
working_set: &mut StateWorkingSet,
stack: &mut Stack,
head_span: Span, head_span: Span,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let data_as_value = input_data; let data_as_value = input_data;
@ -194,13 +188,7 @@ fn format(
// We can only handle a Record or a List of Records // We can only handle a Record or a List of Records
match data_as_value { match data_as_value {
Value::Record { .. } => { Value::Record { .. } => {
match format_record( match format_record(format_operations, &data_as_value, engine_state) {
format_operations,
&data_as_value,
engine_state,
working_set,
stack,
) {
Ok(value) => Ok(PipelineData::Value(Value::string(value, head_span), None)), Ok(value) => Ok(PipelineData::Value(Value::string(value, head_span), None)),
Err(value) => Err(value), Err(value) => Err(value),
} }
@ -211,13 +199,7 @@ fn format(
for val in vals.iter() { for val in vals.iter() {
match val { match val {
Value::Record { .. } => { Value::Record { .. } => {
match format_record( match format_record(format_operations, val, engine_state) {
format_operations,
val,
engine_state,
working_set,
stack,
) {
Ok(value) => { Ok(value) => {
list.push(Value::string(value, head_span)); list.push(Value::string(value, head_span));
} }
@ -256,12 +238,9 @@ fn format_record(
format_operations: &[FormatOperation], format_operations: &[FormatOperation],
data_as_value: &Value, data_as_value: &Value,
engine_state: &EngineState, engine_state: &EngineState,
working_set: &mut StateWorkingSet,
stack: &mut Stack,
) -> Result<String, ShellError> { ) -> Result<String, ShellError> {
let config = engine_state.get_config(); let config = engine_state.get_config();
let mut output = String::new(); let mut output = String::new();
let eval_expression = get_eval_expression(engine_state);
for op in format_operations { for op in format_operations {
match op { match op {
@ -283,23 +262,6 @@ fn format_record(
Err(se) => return Err(se), Err(se) => return Err(se),
} }
} }
FormatOperation::ValueNeedEval(_col_name, span) => {
let exp = parse_expression(working_set, &[*span]);
match working_set.parse_errors.first() {
None => {
let parsed_result = eval_expression(engine_state, stack, &exp);
if let Ok(val) = parsed_result {
output.push_str(&val.to_abbreviated_string(config))
}
}
Some(err) => {
return Err(ShellError::TypeMismatch {
err_message: format!("expression is invalid, detail message: {err:?}"),
span: *span,
})
}
}
}
} }
} }
Ok(output) Ok(output)

View File

@ -6,26 +6,25 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-lang"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cmd-lang" name = "nu-cmd-lang"
version = "0.93.1" version = "0.94.3"
[lib] [lib]
bench = false bench = false
[dependencies] [dependencies]
nu-engine = { path = "../nu-engine", version = "0.93.1" } nu-engine = { path = "../nu-engine", version = "0.94.3" }
nu-parser = { path = "../nu-parser", version = "0.93.1" } nu-parser = { path = "../nu-parser", version = "0.94.3" }
nu-protocol = { path = "../nu-protocol", version = "0.93.1" } nu-protocol = { path = "../nu-protocol", version = "0.94.3" }
nu-utils = { path = "../nu-utils", version = "0.93.1" } nu-utils = { path = "../nu-utils", version = "0.94.3" }
itertools = { workspace = true } itertools = { workspace = true }
shadow-rs = { version = "0.27", default-features = false } shadow-rs = { version = "0.28", default-features = false }
[build-dependencies] [build-dependencies]
shadow-rs = { version = "0.27", default-features = false } shadow-rs = { version = "0.28", default-features = false }
[features] [features]
mimalloc = [] mimalloc = []
which-support = []
trash-support = [] trash-support = []
sqlite = [] sqlite = []
static-link-openssl = [] static-link-openssl = []

View File

@ -60,10 +60,15 @@ impl Command for Def {
example: r#"def --env foo [] { $env.BAR = "BAZ" }; foo; $env.BAR"#, example: r#"def --env foo [] { $env.BAR = "BAZ" }; foo; $env.BAR"#,
result: Some(Value::test_string("BAZ")), result: Some(Value::test_string("BAZ")),
}, },
Example {
description: "cd affects the environment, so '--env' is required to change directory from within a command",
example: r#"def --env gohome [] { cd ~ }; gohome; $env.PWD == ('~' | path expand)"#,
result: Some(Value::test_string("true")),
},
Example { Example {
description: "Define a custom wrapper for an external command", description: "Define a custom wrapper for an external command",
example: r#"def --wrapped my-echo [...rest] { echo $rest }; my-echo spam"#, example: r#"def --wrapped my-echo [...rest] { ^echo ...$rest }; my-echo -e 'spam\tspam'"#,
result: Some(Value::test_list(vec![Value::test_string("spam")])), result: Some(Value::test_string("spam\tspam")),
}, },
] ]
} }

View File

@ -229,14 +229,24 @@ impl Command for Do {
result: None, result: None,
}, },
Example { Example {
description: "Run the closure, with a positional parameter", description: "Run the closure with a positional, type-checked parameter",
example: r#"do {|x| 100 + $x } 77"#, example: r#"do {|x:int| 100 + $x } 77"#,
result: Some(Value::test_int(177)), result: Some(Value::test_int(177)),
}, },
Example { Example {
description: "Run the closure, with input", description: "Run the closure with pipeline input",
example: r#"77 | do {|x| 100 + $in }"#, example: r#"77 | do { 100 + $in }"#,
result: None, // TODO: returns 177 result: Some(Value::test_int(177)),
},
Example {
description: "Run the closure with a default parameter value",
example: r#"77 | do {|x=100| $x + $in }"#,
result: Some(Value::test_int(177)),
},
Example {
description: "Run the closure with two positional parameters",
example: r#"do {|x,y| $x + $y } 77 100"#,
result: Some(Value::test_int(177)),
}, },
Example { Example {
description: "Run the closure and keep changes to the environment", description: "Run the closure and keep changes to the environment",
@ -298,9 +308,7 @@ fn bind_args_to(
if let Some(rest_positional) = &signature.rest_positional { if let Some(rest_positional) = &signature.rest_positional {
let mut rest_items = vec![]; let mut rest_items = vec![];
for result in for result in val_iter {
val_iter.skip(signature.required_positional.len() + signature.optional_positional.len())
{
rest_items.push(result); rest_items.push(result);
} }

View File

@ -1,5 +1,6 @@
use nu_engine::{command_prelude::*, get_eval_block, get_eval_expression}; use nu_engine::{command_prelude::*, get_eval_block, get_eval_expression};
use nu_protocol::engine::CommandType; use nu_protocol::engine::CommandType;
use nu_protocol::ParseWarning;
#[derive(Clone)] #[derive(Clone)]
pub struct For; pub struct For;
@ -30,7 +31,7 @@ impl Command for For {
.required("block", SyntaxShape::Block, "The block to run.") .required("block", SyntaxShape::Block, "The block to run.")
.switch( .switch(
"numbered", "numbered",
"return a numbered item ($it.index and $it.item)", "DEPRECATED: return a numbered item ($it.index and $it.item)",
Some('n'), Some('n'),
) )
.creates_scope() .creates_scope()
@ -78,6 +79,20 @@ impl Command for For {
let value = eval_expression(engine_state, stack, keyword_expr)?; let value = eval_expression(engine_state, stack, keyword_expr)?;
let numbered = call.has_flag(engine_state, stack, "numbered")?; let numbered = call.has_flag(engine_state, stack, "numbered")?;
if numbered {
nu_protocol::report_error_new(
engine_state,
&ParseWarning::DeprecatedWarning {
old_command: "--numbered/-n".into(),
new_suggestion: "use `enumerate`".into(),
span: call
.get_named_arg("numbered")
.expect("`get_named_arg` found `--numbered` but still failed")
.span,
url: "See `help for` examples".into(),
},
);
}
let ctrlc = engine_state.ctrlc.clone(); let ctrlc = engine_state.ctrlc.clone();
let engine_state = engine_state.clone(); let engine_state = engine_state.clone();
@ -198,8 +213,7 @@ impl Command for For {
}, },
Example { Example {
description: "Number each item and print a message", description: "Number each item and print a message",
example: example: r#"for $it in (['bob' 'fred'] | enumerate) { print $"($it.index) is ($it.item)" }"#,
"for $it in ['bob' 'fred'] --numbered { print $\"($it.index) is ($it.item)\" }",
result: None, result: None,
}, },
] ]

View File

@ -122,6 +122,10 @@ impl Command for If {
} }
} }
fn search_terms(&self) -> Vec<&str> {
vec!["else", "conditional"]
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {

View File

@ -10,7 +10,7 @@ impl Command for Try {
} }
fn usage(&self) -> &str { fn usage(&self) -> &str {
"Try to run a block, if it fails optionally run a catch block." "Try to run a block, if it fails optionally run a catch closure."
} }
fn signature(&self) -> nu_protocol::Signature { fn signature(&self) -> nu_protocol::Signature {
@ -18,7 +18,7 @@ impl Command for Try {
.input_output_types(vec![(Type::Any, Type::Any)]) .input_output_types(vec![(Type::Any, Type::Any)])
.required("try_block", SyntaxShape::Block, "Block to run.") .required("try_block", SyntaxShape::Block, "Block to run.")
.optional( .optional(
"catch_block", "catch_closure",
SyntaxShape::Keyword( SyntaxShape::Keyword(
b"catch".to_vec(), b"catch".to_vec(),
Box::new(SyntaxShape::OneOf(vec![ Box::new(SyntaxShape::OneOf(vec![
@ -26,7 +26,7 @@ impl Command for Try {
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])), SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
])), ])),
), ),
"Block to run if try block fails.", "Closure to run if try block fails.",
) )
.category(Category::Core) .category(Category::Core)
} }
@ -85,9 +85,14 @@ impl Command for Try {
}, },
Example { Example {
description: "Try to run a missing command", description: "Try to run a missing command",
example: "try { asdfasdf } catch { 'missing' } ", example: "try { asdfasdf } catch { 'missing' }",
result: Some(Value::test_string("missing")), result: Some(Value::test_string("missing")),
}, },
Example {
description: "Try to run a missing command and report the message",
example: "try { asdfasdf } catch { |err| $err.msg }",
result: None,
},
] ]
} }
} }

View File

@ -167,11 +167,6 @@ fn features_enabled() -> Vec<String> {
// NOTE: There should be another way to know features on. // NOTE: There should be another way to know features on.
#[cfg(feature = "which-support")]
{
names.push("which".to_string());
}
#[cfg(feature = "trash-support")] #[cfg(feature = "trash-support")]
{ {
names.push("trash".to_string()); names.push("trash".to_string());

View File

@ -5,15 +5,15 @@ edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cmd-plugin" name = "nu-cmd-plugin"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-plugin" repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-plugin"
version = "0.93.1" version = "0.94.3"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
nu-engine = { path = "../nu-engine", version = "0.93.1" } nu-engine = { path = "../nu-engine", version = "0.94.3" }
nu-path = { path = "../nu-path", version = "0.93.1" } nu-path = { path = "../nu-path", version = "0.94.3" }
nu-protocol = { path = "../nu-protocol", version = "0.93.1", features = ["plugin"] } nu-protocol = { path = "../nu-protocol", version = "0.94.3", features = ["plugin"] }
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.93.1" } nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.94.3" }
itertools = { workspace = true } itertools = { workspace = true }

View File

@ -5,18 +5,18 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-color-confi
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
name = "nu-color-config" name = "nu-color-config"
version = "0.93.1" version = "0.94.3"
[lib] [lib]
bench = false bench = false
[dependencies] [dependencies]
nu-protocol = { path = "../nu-protocol", version = "0.93.1" } nu-protocol = { path = "../nu-protocol", version = "0.94.3" }
nu-engine = { path = "../nu-engine", version = "0.93.1" } nu-engine = { path = "../nu-engine", version = "0.94.3" }
nu-json = { path = "../nu-json", version = "0.93.1" } nu-json = { path = "../nu-json", version = "0.94.3" }
nu-ansi-term = { workspace = true } nu-ansi-term = { workspace = true }
serde = { workspace = true, features = ["derive"] } serde = { workspace = true, features = ["derive"] }
[dev-dependencies] [dev-dependencies]
nu-test-support = { path = "../nu-test-support", version = "0.93.1" } nu-test-support = { path = "../nu-test-support", version = "0.94.3" }

View File

@ -20,6 +20,7 @@ pub fn default_shape_color(shape: &str) -> Style {
"shape_flag" => Style::new().fg(Color::Blue).bold(), "shape_flag" => Style::new().fg(Color::Blue).bold(),
"shape_float" => Style::new().fg(Color::Purple).bold(), "shape_float" => Style::new().fg(Color::Purple).bold(),
"shape_garbage" => Style::new().fg(Color::White).on(Color::Red).bold(), "shape_garbage" => Style::new().fg(Color::White).on(Color::Red).bold(),
"shape_glob_interpolation" => Style::new().fg(Color::Cyan).bold(),
"shape_globpattern" => Style::new().fg(Color::Cyan).bold(), "shape_globpattern" => Style::new().fg(Color::Cyan).bold(),
"shape_int" => Style::new().fg(Color::Purple).bold(), "shape_int" => Style::new().fg(Color::Purple).bold(),
"shape_internalcall" => Style::new().fg(Color::Cyan).bold(), "shape_internalcall" => Style::new().fg(Color::Cyan).bold(),

View File

@ -5,7 +5,7 @@ edition = "2021"
license = "MIT" license = "MIT"
name = "nu-command" name = "nu-command"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-command" repository = "https://github.com/nushell/nushell/tree/main/crates/nu-command"
version = "0.93.1" version = "0.94.3"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -13,21 +13,21 @@ version = "0.93.1"
bench = false bench = false
[dependencies] [dependencies]
nu-cmd-base = { path = "../nu-cmd-base", version = "0.93.1" } nu-cmd-base = { path = "../nu-cmd-base", version = "0.94.3" }
nu-color-config = { path = "../nu-color-config", version = "0.93.1" } nu-color-config = { path = "../nu-color-config", version = "0.94.3" }
nu-engine = { path = "../nu-engine", version = "0.93.1" } nu-engine = { path = "../nu-engine", version = "0.94.3" }
nu-glob = { path = "../nu-glob", version = "0.93.1" } nu-glob = { path = "../nu-glob", version = "0.94.3" }
nu-json = { path = "../nu-json", version = "0.93.1" } nu-json = { path = "../nu-json", version = "0.94.3" }
nu-parser = { path = "../nu-parser", version = "0.93.1" } nu-parser = { path = "../nu-parser", version = "0.94.3" }
nu-path = { path = "../nu-path", version = "0.93.1" } nu-path = { path = "../nu-path", version = "0.94.3" }
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.93.1" } nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.94.3" }
nu-protocol = { path = "../nu-protocol", version = "0.93.1" } nu-protocol = { path = "../nu-protocol", version = "0.94.3" }
nu-system = { path = "../nu-system", version = "0.93.1" } nu-system = { path = "../nu-system", version = "0.94.3" }
nu-table = { path = "../nu-table", version = "0.93.1" } nu-table = { path = "../nu-table", version = "0.94.3" }
nu-term-grid = { path = "../nu-term-grid", version = "0.93.1" } nu-term-grid = { path = "../nu-term-grid", version = "0.94.3" }
nu-utils = { path = "../nu-utils", version = "0.93.1" } nu-utils = { path = "../nu-utils", version = "0.94.3" }
nu-ansi-term = { workspace = true } nu-ansi-term = { workspace = true }
nuon = { path = "../nuon", version = "0.93.1" } nuon = { path = "../nuon", version = "0.94.3" }
alphanumeric-sort = { workspace = true } alphanumeric-sort = { workspace = true }
base64 = { workspace = true } base64 = { workspace = true }
@ -86,7 +86,7 @@ sysinfo = { workspace = true }
tabled = { workspace = true, features = ["color"], default-features = false } tabled = { workspace = true, features = ["color"], default-features = false }
terminal_size = { workspace = true } terminal_size = { workspace = true }
titlecase = { workspace = true } titlecase = { workspace = true }
toml = { workspace = true } toml = { workspace = true, features = ["preserve_order"]}
unicode-segmentation = { workspace = true } unicode-segmentation = { workspace = true }
ureq = { workspace = true, default-features = false, features = ["charset", "gzip", "json", "native-tls"] } ureq = { workspace = true, default-features = false, features = ["charset", "gzip", "json", "native-tls"] }
url = { workspace = true } url = { workspace = true }
@ -99,7 +99,7 @@ uu_whoami = { workspace = true }
uuid = { workspace = true, features = ["v4"] } uuid = { workspace = true, features = ["v4"] }
v_htmlescape = { workspace = true } v_htmlescape = { workspace = true }
wax = { workspace = true } wax = { workspace = true }
which = { workspace = true, optional = true } which = { workspace = true }
unicode-width = { workspace = true } unicode-width = { workspace = true }
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
@ -134,11 +134,10 @@ workspace = true
plugin = ["nu-parser/plugin"] plugin = ["nu-parser/plugin"]
sqlite = ["rusqlite"] sqlite = ["rusqlite"]
trash-support = ["trash"] trash-support = ["trash"]
which-support = ["which"]
[dev-dependencies] [dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.93.1" } nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.94.3" }
nu-test-support = { path = "../nu-test-support", version = "0.93.1" } nu-test-support = { path = "../nu-test-support", version = "0.94.3" }
dirs-next = { workspace = true } dirs-next = { workspace = true }
mockito = { workspace = true, default-features = false } mockito = { workspace = true, default-features = false }
@ -146,3 +145,4 @@ quickcheck = { workspace = true }
quickcheck_macros = { workspace = true } quickcheck_macros = { workspace = true }
rstest = { workspace = true, default-features = false } rstest = { workspace = true, default-features = false }
pretty_assertions = { workspace = true } pretty_assertions = { workspace = true }
tempfile = { workspace = true }

View File

@ -128,48 +128,24 @@ fn action(input: &Value, args: &Arguments, head: Span) -> Value {
let range = &args.indexes; let range = &args.indexes;
match input { match input {
Value::Binary { val, .. } => { Value::Binary { val, .. } => {
use std::cmp::{self, Ordering};
let len = val.len() as isize; let len = val.len() as isize;
let start = if range.0 < 0 { range.0 + len } else { range.0 }; let start = if range.0 < 0 { range.0 + len } else { range.0 };
let end = if range.1 < 0 { range.1 + len } else { range.1 };
let end = if range.1 < 0 { if start > end {
cmp::max(range.1 + len, 0)
} else {
range.1
};
if start < len && end >= 0 {
match start.cmp(&end) {
Ordering::Equal => Value::binary(vec![], head),
Ordering::Greater => Value::error(
ShellError::TypeMismatch {
err_message: "End must be greater than or equal to Start".to_string(),
span: head,
},
head,
),
Ordering::Less => Value::binary(
if end == isize::MAX {
val.iter()
.skip(start as usize)
.copied()
.collect::<Vec<u8>>()
} else {
val.iter()
.skip(start as usize)
.take((end - start) as usize)
.copied()
.collect()
},
head,
),
}
} else {
Value::binary(vec![], head) Value::binary(vec![], head)
} else {
let val_iter = val.iter().skip(start as usize);
Value::binary(
if end == isize::MAX {
val_iter.copied().collect::<Vec<u8>>()
} else {
val_iter.take((end - start + 1) as usize).copied().collect()
},
head,
)
} }
} }
Value::Error { .. } => input.clone(), Value::Error { .. } => input.clone(),
other => Value::error( other => Value::error(

View File

@ -194,7 +194,7 @@ fn run_histogram(
if inputs.is_empty() { if inputs.is_empty() {
return Err(ShellError::CantFindColumn { return Err(ShellError::CantFindColumn {
col_name: col_name.clone(), col_name: col_name.clone(),
span: head_span, span: Some(head_span),
src_span: list_span, src_span: list_span,
}); });
} }

View File

@ -154,7 +154,7 @@ fn record_to_path_member(
let Some(value) = record.get("value") else { let Some(value) = record.get("value") else {
return Err(ShellError::CantFindColumn { return Err(ShellError::CantFindColumn {
col_name: "value".into(), col_name: "value".into(),
span: val_span, span: Some(val_span),
src_span: span, src_span: span,
}); });
}; };

View File

@ -122,7 +122,7 @@ pub fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value {
Value::Filesize { .. } => input.clone(), Value::Filesize { .. } => input.clone(),
Value::Int { val, .. } => Value::filesize(*val, value_span), Value::Int { val, .. } => Value::filesize(*val, value_span),
Value::Float { val, .. } => Value::filesize(*val as i64, value_span), Value::Float { val, .. } => Value::filesize(*val as i64, value_span),
Value::String { val, .. } => match int_from_string(val, value_span) { Value::String { val, .. } => match i64_from_string(val, value_span) {
Ok(val) => Value::filesize(val, value_span), Ok(val) => Value::filesize(val, value_span),
Err(error) => Value::error(error, value_span), Err(error) => Value::error(error, value_span),
}, },
@ -138,7 +138,8 @@ pub fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value {
), ),
} }
} }
fn int_from_string(a_string: &str, span: Span) -> Result<i64, ShellError> {
fn i64_from_string(a_string: &str, span: Span) -> Result<i64, ShellError> {
// Get the Locale so we know what the thousands separator is // Get the Locale so we know what the thousands separator is
let locale = get_system_locale(); let locale = get_system_locale();
@ -148,29 +149,46 @@ fn int_from_string(a_string: &str, span: Span) -> Result<i64, ShellError> {
let clean_string = no_comma_string.trim(); let clean_string = no_comma_string.trim();
// Hadle negative file size // Hadle negative file size
if let Some(stripped_string) = clean_string.strip_prefix('-') { if let Some(stripped_negative_string) = clean_string.strip_prefix('-') {
match stripped_string.parse::<bytesize::ByteSize>() { match stripped_negative_string.parse::<bytesize::ByteSize>() {
Ok(n) => Ok(-(n.as_u64() as i64)), Ok(n) => i64_from_byte_size(n, true, span),
Err(_) => Err(ShellError::CantConvert { Err(_) => Err(string_convert_error(span)),
to_type: "int".into(), }
from_type: "string".into(), } else if let Some(stripped_positive_string) = clean_string.strip_prefix('+') {
span, match stripped_positive_string.parse::<bytesize::ByteSize>() {
help: None, Ok(n) if stripped_positive_string.starts_with(|c: char| c.is_ascii_digit()) => {
}), i64_from_byte_size(n, false, span)
}
_ => Err(string_convert_error(span)),
} }
} else { } else {
match clean_string.parse::<bytesize::ByteSize>() { match clean_string.parse::<bytesize::ByteSize>() {
Ok(n) => Ok(n.0 as i64), Ok(n) => i64_from_byte_size(n, false, span),
Err(_) => Err(ShellError::CantConvert { Err(_) => Err(string_convert_error(span)),
to_type: "int".into(),
from_type: "string".into(),
span,
help: None,
}),
} }
} }
} }
fn i64_from_byte_size(
byte_size: bytesize::ByteSize,
is_negative: bool,
span: Span,
) -> Result<i64, ShellError> {
match i64::try_from(byte_size.as_u64()) {
Ok(n) => Ok(if is_negative { -n } else { n }),
Err(_) => Err(string_convert_error(span)),
}
}
fn string_convert_error(span: Span) -> ShellError {
ShellError::CantConvert {
to_type: "filesize".into(),
from_type: "string".into(),
span,
help: None,
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;

View File

@ -160,7 +160,7 @@ fn string_helper(
// Just set the type - that should be good enough. There is no guarantee that the data // Just set the type - that should be good enough. There is no guarantee that the data
// within a string stream is actually valid UTF-8. But refuse to do it if it was already set // within a string stream is actually valid UTF-8. But refuse to do it if it was already set
// to binary // to binary
if stream.type_() != ByteStreamType::Binary { if stream.type_().is_string_coercible() {
Ok(PipelineData::ByteStream( Ok(PipelineData::ByteStream(
stream.with_type(ByteStreamType::String), stream.with_type(ByteStreamType::String),
metadata, metadata,

View File

@ -28,6 +28,7 @@ impl Command for DebugProfile {
Some('v'), Some('v'),
) )
.switch("expr", "Collect expression types", Some('x')) .switch("expr", "Collect expression types", Some('x'))
.switch("lines", "Collect line numbers", Some('l'))
.named( .named(
"max-depth", "max-depth",
SyntaxShape::Int, SyntaxShape::Int,
@ -90,6 +91,7 @@ confusing the id/parent_id hierarchy. The --expr flag is helpful for investigati
let collect_expanded_source = call.has_flag(engine_state, stack, "expanded-source")?; let collect_expanded_source = call.has_flag(engine_state, stack, "expanded-source")?;
let collect_values = call.has_flag(engine_state, stack, "values")?; let collect_values = call.has_flag(engine_state, stack, "values")?;
let collect_exprs = call.has_flag(engine_state, stack, "expr")?; let collect_exprs = call.has_flag(engine_state, stack, "expr")?;
let collect_lines = call.has_flag(engine_state, stack, "lines")?;
let max_depth = call let max_depth = call
.get_flag(engine_state, stack, "max-depth")? .get_flag(engine_state, stack, "max-depth")?
.unwrap_or(2); .unwrap_or(2);
@ -101,6 +103,7 @@ confusing the id/parent_id hierarchy. The --expr flag is helpful for investigati
collect_expanded_source, collect_expanded_source,
collect_values, collect_values,
collect_exprs, collect_exprs,
collect_lines,
call.span(), call.span(),
); );
@ -118,14 +121,11 @@ confusing the id/parent_id hierarchy. The --expr flag is helpful for investigati
let result = ClosureEvalOnce::new(engine_state, stack, closure).run_with_input(input); let result = ClosureEvalOnce::new(engine_state, stack, closure).run_with_input(input);
// TODO: See eval_source() // Return potential errors
match result { let pipeline_data = result?;
Ok(pipeline_data) => {
let _ = pipeline_data.into_value(call.span()); // Collect the output
// pipeline_data.print(engine_state, caller_stack, true, false) let _ = pipeline_data.into_value(call.span());
}
Err(_e) => (), // TODO: Report error
}
Ok(engine_state Ok(engine_state
.deactivate_debugger() .deactivate_debugger()

View File

@ -127,7 +127,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
SysTemp, SysTemp,
SysUsers, SysUsers,
UName, UName,
Which,
}; };
// Help // Help
@ -164,14 +164,14 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
#[cfg(any( #[cfg(any(
target_os = "android", target_os = "android",
target_os = "linux", target_os = "linux",
target_os = "freebsd",
target_os = "netbsd",
target_os = "openbsd",
target_os = "macos", target_os = "macos",
target_os = "windows" target_os = "windows"
))] ))]
bind_command! { Ps }; bind_command! { Ps };
#[cfg(feature = "which-support")]
bind_command! { Which };
// Strings // Strings
bind_command! { bind_command! {
Char, Char,

View File

@ -1,6 +1,7 @@
use super::utils::gen_command;
use nu_cmd_base::util::get_editor; use nu_cmd_base::util::get_editor;
use nu_engine::{command_prelude::*, env_to_strings}; use nu_engine::{command_prelude::*, env_to_strings};
use nu_protocol::{process::ChildProcess, ByteStream};
use nu_system::ForegroundChild;
#[derive(Clone)] #[derive(Clone)]
pub struct ConfigEnv; pub struct ConfigEnv;
@ -47,7 +48,7 @@ impl Command for ConfigEnv {
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, _input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
// `--default` flag handling // `--default` flag handling
if call.has_flag(engine_state, stack, "default")? { if call.has_flag(engine_state, stack, "default")? {
@ -55,27 +56,59 @@ impl Command for ConfigEnv {
return Ok(Value::string(nu_utils::get_default_env(), head).into_pipeline_data()); return Ok(Value::string(nu_utils::get_default_env(), head).into_pipeline_data());
} }
let env_vars_str = env_to_strings(engine_state, stack)?; // Find the editor executable.
let nu_config = match engine_state.get_config_path("env-path") { let (editor_name, editor_args) = get_editor(engine_state, stack, call.head)?;
Some(path) => path, let paths = nu_engine::env::path_str(engine_state, stack, call.head)?;
None => { let cwd = engine_state.cwd(Some(stack))?;
return Err(ShellError::GenericError { let editor_executable =
error: "Could not find $nu.env-path".into(), crate::which(&editor_name, &paths, &cwd).ok_or(ShellError::ExternalCommand {
msg: "Could not find $nu.env-path".into(), label: format!("`{editor_name}` not found"),
span: None, help: "Failed to find the editor executable".into(),
help: None, span: call.head,
inner: vec![], })?;
});
} let Some(env_path) = engine_state.get_config_path("env-path") else {
return Err(ShellError::GenericError {
error: "Could not find $nu.env-path".into(),
msg: "Could not find $nu.env-path".into(),
span: None,
help: None,
inner: vec![],
});
}; };
let env_path = env_path.to_string_lossy().to_string();
let (item, config_args) = get_editor(engine_state, stack, call.head)?; // Create the command.
let mut command = std::process::Command::new(editor_executable);
gen_command(call.head, nu_config, item, config_args, env_vars_str).run_with_input( // Configure PWD.
engine_state, command.current_dir(cwd);
stack,
input, // Configure environment variables.
true, let envs = env_to_strings(engine_state, stack)?;
) command.env_clear();
command.envs(envs);
// Configure args.
command.arg(env_path);
command.args(editor_args);
// Spawn the child process. On Unix, also put the child process to
// foreground if we're in an interactive session.
#[cfg(windows)]
let child = ForegroundChild::spawn(command)?;
#[cfg(unix)]
let child = ForegroundChild::spawn(
command,
engine_state.is_interactive,
&engine_state.pipeline_externals_state,
)?;
// Wrap the output into a `PipelineData::ByteStream`.
let child = ChildProcess::new(child, None, false, call.head)?;
Ok(PipelineData::ByteStream(
ByteStream::child(child, call.head),
None,
))
} }
} }

View File

@ -1,6 +1,7 @@
use super::utils::gen_command;
use nu_cmd_base::util::get_editor; use nu_cmd_base::util::get_editor;
use nu_engine::{command_prelude::*, env_to_strings}; use nu_engine::{command_prelude::*, env_to_strings};
use nu_protocol::{process::ChildProcess, ByteStream};
use nu_system::ForegroundChild;
#[derive(Clone)] #[derive(Clone)]
pub struct ConfigNu; pub struct ConfigNu;
@ -51,7 +52,7 @@ impl Command for ConfigNu {
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, _input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
// `--default` flag handling // `--default` flag handling
if call.has_flag(engine_state, stack, "default")? { if call.has_flag(engine_state, stack, "default")? {
@ -59,27 +60,59 @@ impl Command for ConfigNu {
return Ok(Value::string(nu_utils::get_default_config(), head).into_pipeline_data()); return Ok(Value::string(nu_utils::get_default_config(), head).into_pipeline_data());
} }
let env_vars_str = env_to_strings(engine_state, stack)?; // Find the editor executable.
let nu_config = match engine_state.get_config_path("config-path") { let (editor_name, editor_args) = get_editor(engine_state, stack, call.head)?;
Some(path) => path, let paths = nu_engine::env::path_str(engine_state, stack, call.head)?;
None => { let cwd = engine_state.cwd(Some(stack))?;
return Err(ShellError::GenericError { let editor_executable =
error: "Could not find $nu.config-path".into(), crate::which(&editor_name, &paths, &cwd).ok_or(ShellError::ExternalCommand {
msg: "Could not find $nu.config-path".into(), label: format!("`{editor_name}` not found"),
span: None, help: "Failed to find the editor executable".into(),
help: None, span: call.head,
inner: vec![], })?;
});
} let Some(config_path) = engine_state.get_config_path("config-path") else {
return Err(ShellError::GenericError {
error: "Could not find $nu.config-path".into(),
msg: "Could not find $nu.config-path".into(),
span: None,
help: None,
inner: vec![],
});
}; };
let config_path = config_path.to_string_lossy().to_string();
let (item, config_args) = get_editor(engine_state, stack, call.head)?; // Create the command.
let mut command = std::process::Command::new(editor_executable);
gen_command(call.head, nu_config, item, config_args, env_vars_str).run_with_input( // Configure PWD.
engine_state, command.current_dir(cwd);
stack,
input, // Configure environment variables.
true, let envs = env_to_strings(engine_state, stack)?;
) command.env_clear();
command.envs(envs);
// Configure args.
command.arg(config_path);
command.args(editor_args);
// Spawn the child process. On Unix, also put the child process to
// foreground if we're in an interactive session.
#[cfg(windows)]
let child = ForegroundChild::spawn(command)?;
#[cfg(unix)]
let child = ForegroundChild::spawn(
command,
engine_state.is_interactive,
&engine_state.pipeline_externals_state,
)?;
// Wrap the output into a `PipelineData::ByteStream`.
let child = ChildProcess::new(child, None, false, call.head)?;
Ok(PipelineData::ByteStream(
ByteStream::child(child, call.head),
None,
))
} }
} }

View File

@ -2,7 +2,6 @@ mod config_;
mod config_env; mod config_env;
mod config_nu; mod config_nu;
mod config_reset; mod config_reset;
mod utils;
pub use config_::ConfigMeta; pub use config_::ConfigMeta;
pub use config_env::ConfigEnv; pub use config_env::ConfigEnv;
pub use config_nu::ConfigNu; pub use config_nu::ConfigNu;

View File

@ -1,36 +0,0 @@
use crate::ExternalCommand;
use nu_protocol::{OutDest, Span, Spanned};
use std::{collections::HashMap, path::Path};
pub(crate) fn gen_command(
span: Span,
config_path: &Path,
item: String,
config_args: Vec<String>,
env_vars_str: HashMap<String, String>,
) -> ExternalCommand {
let name = Spanned { item, span };
let mut args = vec![Spanned {
item: config_path.to_string_lossy().to_string(),
span: Span::unknown(),
}];
let number_of_args = config_args.len() + 1;
for arg in config_args {
args.push(Spanned {
item: arg,
span: Span::unknown(),
})
}
ExternalCommand {
name,
args,
arg_keep_raw: vec![false; number_of_args],
out: OutDest::Inherit,
err: OutDest::Inherit,
env_vars: env_vars_str,
}
}

View File

@ -1,6 +1,5 @@
use nu_engine::{command_prelude::*, eval_block}; use nu_engine::{command_prelude::*, eval_block};
use nu_protocol::{debugger::WithoutDebug, engine::Closure}; use nu_protocol::{debugger::WithoutDebug, engine::Closure};
use std::collections::HashMap;
#[derive(Clone)] #[derive(Clone)]
pub struct WithEnv; pub struct WithEnv;
@ -58,78 +57,14 @@ fn with_env(
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let variable: Value = call.req(engine_state, stack, 0)?; let env: Record = call.req(engine_state, stack, 0)?;
let capture_block: Closure = call.req(engine_state, stack, 1)?; let capture_block: Closure = call.req(engine_state, stack, 1)?;
let block = engine_state.get_block(capture_block.block_id); let block = engine_state.get_block(capture_block.block_id);
let mut stack = stack.captures_to_stack_preserve_out_dest(capture_block.captures); let mut stack = stack.captures_to_stack_preserve_out_dest(capture_block.captures);
let mut env: HashMap<String, Value> = HashMap::new();
match &variable {
Value::List { vals: table, .. } => {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "Deprecated argument type".into(),
msg: "providing the variables to `with-env` as a list or single row table has been deprecated".into(),
span: Some(variable.span()),
help: Some("use the record form instead".into()),
inner: vec![],
},
);
if table.len() == 1 {
// single row([[X W]; [Y Z]])
match &table[0] {
Value::Record { val, .. } => {
for (k, v) in &**val {
env.insert(k.to_string(), v.clone());
}
}
x => {
return Err(ShellError::CantConvert {
to_type: "record".into(),
from_type: x.get_type().to_string(),
span: x.span(),
help: None,
});
}
}
} else {
// primitive values([X Y W Z])
for row in table.chunks(2) {
if row.len() == 2 {
env.insert(row[0].coerce_string()?, row[1].clone());
}
if row.len() == 1 {
return Err(ShellError::IncorrectValue {
msg: format!("Missing value for $env.{}", row[0].coerce_string()?),
val_span: row[0].span(),
call_span: call.head,
});
}
}
}
}
// when get object by `open x.json` or `from json`
Value::Record { val, .. } => {
for (k, v) in &**val {
env.insert(k.clone(), v.clone());
}
}
x => {
return Err(ShellError::CantConvert {
to_type: "record".into(),
from_type: x.get_type().to_string(),
span: x.span(),
help: None,
});
}
};
// TODO: factor list of prohibited env vars into common place // TODO: factor list of prohibited env vars into common place
for prohibited in ["PWD", "FILE_PWD", "CURRENT_FILE"] { for prohibited in ["PWD", "FILE_PWD", "CURRENT_FILE"] {
if env.contains_key(prohibited) { if env.contains(prohibited) {
return Err(ShellError::AutomaticEnvVarSetManually { return Err(ShellError::AutomaticEnvVarSetManually {
envvar_name: prohibited.into(), envvar_name: prohibited.into(),
span: call.head, span: call.head,

View File

@ -135,6 +135,11 @@ impl Command for Cd {
example: r#"cd -"#, example: r#"cd -"#,
result: None, result: None,
}, },
Example {
description: "Changing directory with a custom command requires 'def --env'",
example: r#"def --env gohome [] { cd ~ }"#,
result: None,
},
] ]
} }
} }

View File

@ -1,10 +1,9 @@
use filetime::FileTime; use filetime::FileTime;
#[allow(deprecated)] use nu_engine::command_prelude::*;
use nu_engine::{command_prelude::*, current_dir};
use nu_path::expand_path_with; use nu_path::expand_path_with;
use nu_protocol::NuGlob; use nu_protocol::NuGlob;
use std::{fs::OpenOptions, path::Path, time::SystemTime}; use std::{fs::OpenOptions, time::SystemTime};
use super::util::get_rest_for_glob_pattern; use super::util::get_rest_for_glob_pattern;
@ -36,12 +35,12 @@ impl Command for Touch {
) )
.switch( .switch(
"modified", "modified",
"change the modification time of the file or directory. If no timestamp, date or reference file/directory is given, the current time is used", "change the modification time of the file or directory. If no reference file/directory is given, the current time is used",
Some('m'), Some('m'),
) )
.switch( .switch(
"access", "access",
"change the access time of the file or directory. If no timestamp, date or reference file/directory is given, the current time is used", "change the access time of the file or directory. If no reference file/directory is given, the current time is used",
Some('a'), Some('a'),
) )
.switch( .switch(
@ -69,6 +68,8 @@ impl Command for Touch {
let no_create: bool = call.has_flag(engine_state, stack, "no-create")?; let no_create: bool = call.has_flag(engine_state, stack, "no-create")?;
let files: Vec<Spanned<NuGlob>> = get_rest_for_glob_pattern(engine_state, stack, call, 0)?; let files: Vec<Spanned<NuGlob>> = get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
let cwd = engine_state.cwd(Some(stack))?;
if files.is_empty() { if files.is_empty() {
return Err(ShellError::MissingParameter { return Err(ShellError::MissingParameter {
param_name: "requires file paths".to_string(), param_name: "requires file paths".to_string(),
@ -86,7 +87,7 @@ impl Command for Touch {
} }
if let Some(reference) = reference { if let Some(reference) = reference {
let reference_path = Path::new(&reference.item); let reference_path = nu_path::expand_path_with(reference.item, &cwd, true);
if !reference_path.exists() { if !reference_path.exists() {
return Err(ShellError::FileNotFoundCustom { return Err(ShellError::FileNotFoundCustom {
msg: "Reference path not found".into(), msg: "Reference path not found".into(),
@ -114,9 +115,6 @@ impl Command for Touch {
})?; })?;
} }
#[allow(deprecated)]
let cwd = current_dir(engine_state, stack)?;
for glob in files { for glob in files {
let path = expand_path_with(glob.item.as_ref(), &cwd, glob.item.is_expand()); let path = expand_path_with(glob.item.as_ref(), &cwd, glob.item.is_expand());
@ -191,11 +189,6 @@ impl Command for Touch {
example: r#"touch -m -r fixture.json d e"#, example: r#"touch -m -r fixture.json d e"#,
result: None, result: None,
}, },
Example {
description: r#"Changes the last accessed time of "fixture.json" to a date"#,
example: r#"touch -a -d "August 24, 2019; 12:30:30" fixture.json"#,
result: None,
},
] ]
} }
} }

View File

@ -1,4 +1,5 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use std::io::Read;
#[derive(Clone)] #[derive(Clone)]
pub struct First; pub struct First;
@ -171,10 +172,9 @@ fn first_helper(
} }
} }
PipelineData::ByteStream(stream, metadata) => { PipelineData::ByteStream(stream, metadata) => {
if stream.type_() == ByteStreamType::Binary { if stream.type_().is_binary_coercible() {
let span = stream.span(); let span = stream.span();
if let Some(mut reader) = stream.reader() { if let Some(mut reader) = stream.reader() {
use std::io::Read;
if return_single_element { if return_single_element {
// Take a single byte // Take a single byte
let mut byte = [0u8]; let mut byte = [0u8];

View File

@ -1,6 +1,5 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use std::{collections::VecDeque, io::Read};
use std::collections::VecDeque;
#[derive(Clone)] #[derive(Clone)]
pub struct Last; pub struct Last;
@ -161,10 +160,9 @@ impl Command for Last {
} }
} }
PipelineData::ByteStream(stream, ..) => { PipelineData::ByteStream(stream, ..) => {
if stream.type_() == ByteStreamType::Binary { if stream.type_().is_binary_coercible() {
let span = stream.span(); let span = stream.span();
if let Some(mut reader) = stream.reader() { if let Some(mut reader) = stream.reader() {
use std::io::Read;
// Have to be a bit tricky here, but just consume into a VecDeque that we // Have to be a bit tricky here, but just consume into a VecDeque that we
// shrink to fit each time // shrink to fit each time
const TAKE: u64 = 8192; const TAKE: u64 = 8192;

View File

@ -1,4 +1,5 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use std::io::{self, Read};
#[derive(Clone)] #[derive(Clone)]
pub struct Skip; pub struct Skip;
@ -94,12 +95,11 @@ impl Command for Skip {
let input_span = input.span().unwrap_or(call.head); let input_span = input.span().unwrap_or(call.head);
match input { match input {
PipelineData::ByteStream(stream, metadata) => { PipelineData::ByteStream(stream, metadata) => {
if stream.type_() == ByteStreamType::Binary { if stream.type_().is_binary_coercible() {
let span = stream.span(); let span = stream.span();
if let Some(mut reader) = stream.reader() { if let Some(mut reader) = stream.reader() {
use std::io::Read;
// Copy the number of skipped bytes into the sink before proceeding // Copy the number of skipped bytes into the sink before proceeding
std::io::copy(&mut (&mut reader).take(n as u64), &mut std::io::sink()) io::copy(&mut (&mut reader).take(n as u64), &mut io::sink())
.err_span(span)?; .err_span(span)?;
Ok(PipelineData::ByteStream( Ok(PipelineData::ByteStream(
ByteStream::read(reader, call.head, None, ByteStreamType::Binary), ByteStream::read(reader, call.head, None, ByteStreamType::Binary),

View File

@ -130,7 +130,7 @@ pub fn split(
Some(group_key) => Ok(group_key.coerce_string()?), Some(group_key) => Ok(group_key.coerce_string()?),
None => Err(ShellError::CantFindColumn { None => Err(ShellError::CantFindColumn {
col_name: column_name.item.to_string(), col_name: column_name.item.to_string(),
span: column_name.span, span: Some(column_name.span),
src_span: row.span(), src_span: row.span(),
}), }),
} }

View File

@ -1,4 +1,5 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use std::io::Read;
#[derive(Clone)] #[derive(Clone)]
pub struct Take; pub struct Take;
@ -79,9 +80,8 @@ impl Command for Take {
metadata, metadata,
)), )),
PipelineData::ByteStream(stream, metadata) => { PipelineData::ByteStream(stream, metadata) => {
if stream.type_() == ByteStreamType::Binary { if stream.type_().is_binary_coercible() {
if let Some(reader) = stream.reader() { if let Some(reader) = stream.reader() {
use std::io::Read;
// Just take 'rows' bytes off the stream, mimicking the binary behavior // Just take 'rows' bytes off the stream, mimicking the binary behavior
Ok(PipelineData::ByteStream( Ok(PipelineData::ByteStream(
ByteStream::read( ByteStream::read(

View File

@ -123,7 +123,7 @@ fn validate(vec: &[Value], columns: &[String], span: Span) -> Result<(), ShellEr
if let Some(nonexistent) = nonexistent_column(columns, record.columns()) { if let Some(nonexistent) = nonexistent_column(columns, record.columns()) {
return Err(ShellError::CantFindColumn { return Err(ShellError::CantFindColumn {
col_name: nonexistent, col_name: nonexistent,
span, span: Some(span),
src_span: val_span, src_span: val_span,
}); });
} }

View File

@ -1,7 +1,14 @@
use csv::{ReaderBuilder, Trim}; use csv::{ReaderBuilder, Trim};
use nu_protocol::{IntoPipelineData, PipelineData, ShellError, Span, Value}; use nu_protocol::{ByteStream, ListStream, PipelineData, ShellError, Span, Value};
fn from_delimited_string_to_value( fn from_csv_error(err: csv::Error, span: Span) -> ShellError {
ShellError::DelimiterError {
msg: err.to_string(),
span,
}
}
fn from_delimited_stream(
DelimitedReaderConfig { DelimitedReaderConfig {
separator, separator,
comment, comment,
@ -12,9 +19,15 @@ fn from_delimited_string_to_value(
no_infer, no_infer,
trim, trim,
}: DelimitedReaderConfig, }: DelimitedReaderConfig,
s: String, input: ByteStream,
span: Span, span: Span,
) -> Result<Value, csv::Error> { ) -> Result<ListStream, ShellError> {
let input_reader = if let Some(stream) = input.reader() {
stream
} else {
return Ok(ListStream::new(std::iter::empty(), span, None));
};
let mut reader = ReaderBuilder::new() let mut reader = ReaderBuilder::new()
.has_headers(!noheaders) .has_headers(!noheaders)
.flexible(flexible) .flexible(flexible)
@ -23,19 +36,29 @@ fn from_delimited_string_to_value(
.quote(quote as u8) .quote(quote as u8)
.escape(escape.map(|c| c as u8)) .escape(escape.map(|c| c as u8))
.trim(trim) .trim(trim)
.from_reader(s.as_bytes()); .from_reader(input_reader);
let headers = if noheaders { let headers = if noheaders {
(1..=reader.headers()?.len()) (1..=reader
.headers()
.map_err(|err| from_csv_error(err, span))?
.len())
.map(|i| format!("column{i}")) .map(|i| format!("column{i}"))
.collect::<Vec<String>>() .collect::<Vec<String>>()
} else { } else {
reader.headers()?.iter().map(String::from).collect() reader
.headers()
.map_err(|err| from_csv_error(err, span))?
.iter()
.map(String::from)
.collect()
}; };
let mut rows = vec![]; let iter = reader.into_records().map(move |row| {
for row in reader.records() { let row = match row {
let row = row?; Ok(row) => row,
Err(err) => return Value::error(from_csv_error(err, span), span),
};
let columns = headers.iter().cloned(); let columns = headers.iter().cloned();
let values = row let values = row
.into_iter() .into_iter()
@ -57,10 +80,10 @@ fn from_delimited_string_to_value(
// //
// Otherwise, if there are less values than headers, // Otherwise, if there are less values than headers,
// then `Value::nothing(span)` is used to fill the remaining columns. // then `Value::nothing(span)` is used to fill the remaining columns.
rows.push(Value::record(columns.zip(values).collect(), span)); Value::record(columns.zip(values).collect(), span)
} });
Ok(Value::list(rows, span)) Ok(ListStream::new(iter, span, None))
} }
pub(super) struct DelimitedReaderConfig { pub(super) struct DelimitedReaderConfig {
@ -79,14 +102,27 @@ pub(super) fn from_delimited_data(
input: PipelineData, input: PipelineData,
name: Span, name: Span,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let (concat_string, _span, metadata) = input.collect_string_strict(name)?; match input {
PipelineData::Empty => Ok(PipelineData::Empty),
Ok(from_delimited_string_to_value(config, concat_string, name) PipelineData::Value(value, metadata) => {
.map_err(|x| ShellError::DelimiterError { let string = value.into_string()?;
msg: x.to_string(), let byte_stream = ByteStream::read_string(string, name, None);
span: name, Ok(PipelineData::ListStream(
})? from_delimited_stream(config, byte_stream, name)?,
.into_pipeline_data_with_metadata(metadata)) metadata,
))
}
PipelineData::ListStream(list_stream, _) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "string".into(),
wrong_type: "list".into(),
dst_span: name,
src_span: list_stream.span(),
}),
PipelineData::ByteStream(byte_stream, metadata) => Ok(PipelineData::ListStream(
from_delimited_stream(config, byte_stream, name)?,
metadata,
)),
}
} }
pub fn trim_from_str(trim: Option<Value>) -> Result<Trim, ShellError> { pub fn trim_from_str(trim: Option<Value>) -> Result<Trim, ShellError> {

View File

@ -1,4 +1,10 @@
use std::{
io::{BufRead, Cursor},
sync::{atomic::AtomicBool, Arc},
};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::ListStream;
#[derive(Clone)] #[derive(Clone)]
pub struct FromJson; pub struct FromJson;
@ -45,6 +51,15 @@ impl Command for FromJson {
"b" => Value::test_int(2), "b" => Value::test_int(2),
})), })),
}, },
Example {
example: r#"'{ "a": 1 }
{ "b": 2 }' | from json --objects"#,
description: "Parse a stream of line-delimited JSON values",
result: Some(Value::test_list(vec![
Value::test_record(record! {"a" => Value::test_int(1)}),
Value::test_record(record! {"b" => Value::test_int(2)}),
])),
},
] ]
} }
@ -56,49 +71,80 @@ impl Command for FromJson {
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let span = call.head; let span = call.head;
let (string_input, span, metadata) = input.collect_string_strict(span)?;
if string_input.is_empty() {
return Ok(Value::nothing(span).into_pipeline_data());
}
let strict = call.has_flag(engine_state, stack, "strict")?; let strict = call.has_flag(engine_state, stack, "strict")?;
// TODO: turn this into a structured underline of the nu_json error // TODO: turn this into a structured underline of the nu_json error
if call.has_flag(engine_state, stack, "objects")? { if call.has_flag(engine_state, stack, "objects")? {
let lines = string_input.lines().filter(|line| !line.trim().is_empty()); // Return a stream of JSON values, one for each non-empty line
match input {
let converted_lines: Vec<_> = if strict { PipelineData::Value(Value::String { val, .. }, metadata) => {
lines Ok(PipelineData::ListStream(
.map(|line| { read_json_lines(Cursor::new(val), span, strict, engine_state.ctrlc.clone()),
convert_string_to_value_strict(line, span) metadata,
.unwrap_or_else(|err| Value::error(err, span)) ))
}) }
.collect() PipelineData::ByteStream(stream, metadata)
} else { if stream.type_() != ByteStreamType::Binary =>
lines {
.map(|line| { if let Some(reader) = stream.reader() {
convert_string_to_value(line, span) Ok(PipelineData::ListStream(
.unwrap_or_else(|err| Value::error(err, span)) read_json_lines(reader, span, strict, None),
}) metadata,
.collect() ))
}; } else {
Ok(PipelineData::Empty)
Ok(converted_lines.into_pipeline_data_with_metadata( }
span, }
engine_state.ctrlc.clone(), _ => Err(ShellError::OnlySupportsThisInputType {
metadata, exp_input_type: "string".into(),
)) wrong_type: input.get_type().to_string(),
} else if strict { dst_span: call.head,
Ok(convert_string_to_value_strict(&string_input, span)? src_span: input.span().unwrap_or(call.head),
.into_pipeline_data_with_metadata(metadata)) }),
}
} else { } else {
Ok(convert_string_to_value(&string_input, span)? // Return a single JSON value
.into_pipeline_data_with_metadata(metadata)) let (string_input, span, metadata) = input.collect_string_strict(span)?;
if string_input.is_empty() {
return Ok(Value::nothing(span).into_pipeline_data());
}
if strict {
Ok(convert_string_to_value_strict(&string_input, span)?
.into_pipeline_data_with_metadata(metadata))
} else {
Ok(convert_string_to_value(&string_input, span)?
.into_pipeline_data_with_metadata(metadata))
}
} }
} }
} }
/// Create a stream of values from a reader that produces line-delimited JSON
fn read_json_lines(
input: impl BufRead + Send + 'static,
span: Span,
strict: bool,
interrupt: Option<Arc<AtomicBool>>,
) -> ListStream {
let iter = input
.lines()
.filter(|line| line.as_ref().is_ok_and(|line| !line.trim().is_empty()) || line.is_err())
.map(move |line| {
let line = line.err_span(span)?;
if strict {
convert_string_to_value_strict(&line, span)
} else {
convert_string_to_value(&line, span)
}
})
.map(move |result| result.unwrap_or_else(|err| Value::error(err, span)));
ListStream::new(iter, span, interrupt)
}
fn convert_nujson_to_value(value: nu_json::Value, span: Span) -> Value { fn convert_nujson_to_value(value: nu_json::Value, span: Span) -> Value {
match value { match value {
nu_json::Value::Array(array) => Value::list( nu_json::Value::Array(array) => Value::list(

View File

@ -1,3 +1,5 @@
use std::sync::Arc;
use crate::formats::to::delimited::to_delimited_data; use crate::formats::to::delimited::to_delimited_data;
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::Config; use nu_protocol::Config;
@ -27,26 +29,37 @@ impl Command for ToCsv {
"do not output the columns names as the first row", "do not output the columns names as the first row",
Some('n'), Some('n'),
) )
.named(
"columns",
SyntaxShape::List(SyntaxShape::String.into()),
"the names (in order) of the columns to use",
None,
)
.category(Category::Formats) .category(Category::Formats)
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {
description: "Outputs an CSV string representing the contents of this table", description: "Outputs a CSV string representing the contents of this table",
example: "[[foo bar]; [1 2]] | to csv", example: "[[foo bar]; [1 2]] | to csv",
result: Some(Value::test_string("foo,bar\n1,2\n")), result: Some(Value::test_string("foo,bar\n1,2\n")),
}, },
Example { Example {
description: "Outputs an CSV string representing the contents of this table", description: "Outputs a CSV string representing the contents of this table",
example: "[[foo bar]; [1 2]] | to csv --separator ';' ", example: "[[foo bar]; [1 2]] | to csv --separator ';' ",
result: Some(Value::test_string("foo;bar\n1;2\n")), result: Some(Value::test_string("foo;bar\n1;2\n")),
}, },
Example { Example {
description: "Outputs an CSV string representing the contents of this record", description: "Outputs a CSV string representing the contents of this record",
example: "{a: 1 b: 2} | to csv", example: "{a: 1 b: 2} | to csv",
result: Some(Value::test_string("a,b\n1,2\n")), result: Some(Value::test_string("a,b\n1,2\n")),
}, },
Example {
description: "Outputs a CSV stream with column names pre-determined",
example: "[[foo bar baz]; [1 2 3]] | to csv --columns [baz foo]",
result: Some(Value::test_string("baz,foo\n3,1\n")),
},
] ]
} }
@ -64,8 +77,9 @@ impl Command for ToCsv {
let head = call.head; let head = call.head;
let noheaders = call.has_flag(engine_state, stack, "noheaders")?; let noheaders = call.has_flag(engine_state, stack, "noheaders")?;
let separator: Option<Spanned<String>> = call.get_flag(engine_state, stack, "separator")?; let separator: Option<Spanned<String>> = call.get_flag(engine_state, stack, "separator")?;
let config = engine_state.get_config(); let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
to_csv(input, noheaders, separator, head, config) let config = engine_state.config.clone();
to_csv(input, noheaders, separator, columns, head, config)
} }
} }
@ -73,13 +87,14 @@ fn to_csv(
input: PipelineData, input: PipelineData,
noheaders: bool, noheaders: bool,
separator: Option<Spanned<String>>, separator: Option<Spanned<String>>,
columns: Option<Vec<String>>,
head: Span, head: Span,
config: &Config, config: Arc<Config>,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let sep = match separator { let sep = match separator {
Some(Spanned { item: s, span, .. }) => { Some(Spanned { item: s, span, .. }) => {
if s == r"\t" { if s == r"\t" {
'\t' Spanned { item: '\t', span }
} else { } else {
let vec_s: Vec<char> = s.chars().collect(); let vec_s: Vec<char> = s.chars().collect();
if vec_s.len() != 1 { if vec_s.len() != 1 {
@ -89,13 +104,19 @@ fn to_csv(
span, span,
}); });
}; };
vec_s[0] Spanned {
item: vec_s[0],
span: head,
}
} }
} }
_ => ',', _ => Spanned {
item: ',',
span: head,
},
}; };
to_delimited_data(noheaders, sep, "CSV", input, head, config) to_delimited_data(noheaders, sep, columns, "CSV", input, head, config)
} }
#[cfg(test)] #[cfg(test)]

View File

@ -1,113 +1,31 @@
use csv::{Writer, WriterBuilder}; use csv::WriterBuilder;
use nu_cmd_base::formats::to::delimited::merge_descriptors; use nu_cmd_base::formats::to::delimited::merge_descriptors;
use nu_protocol::{Config, IntoPipelineData, PipelineData, Record, ShellError, Span, Value}; use nu_protocol::{
use std::{collections::VecDeque, error::Error}; ByteStream, ByteStreamType, Config, PipelineData, ShellError, Span, Spanned, Value,
};
use std::{iter, sync::Arc};
fn from_value_to_delimited_string( fn make_csv_error(error: csv::Error, format_name: &str, head: Span) -> ShellError {
value: &Value, if let csv::ErrorKind::Io(error) = error.kind() {
separator: char, ShellError::IOErrorSpanned {
config: &Config, msg: error.to_string(),
head: Span, span: head,
) -> Result<String, ShellError> { }
let span = value.span(); } else {
match value { ShellError::GenericError {
Value::Record { val, .. } => record_to_delimited(val, span, separator, config, head), error: format!("Failed to generate {format_name} data"),
Value::List { vals, .. } => table_to_delimited(vals, span, separator, config, head), msg: error.to_string(),
// Propagate errors by explicitly matching them before the final case. span: Some(head),
Value::Error { error, .. } => Err(*error.clone()), help: None,
v => Err(make_unsupported_input_error(v, head, v.span())), inner: vec![],
}
}
fn record_to_delimited(
record: &Record,
span: Span,
separator: char,
config: &Config,
head: Span,
) -> Result<String, ShellError> {
let mut wtr = WriterBuilder::new()
.delimiter(separator as u8)
.from_writer(vec![]);
let mut fields: VecDeque<String> = VecDeque::new();
let mut values: VecDeque<String> = VecDeque::new();
for (k, v) in record {
fields.push_back(k.clone());
values.push_back(to_string_tagged_value(v, config, head, span)?);
}
wtr.write_record(fields).expect("can not write.");
wtr.write_record(values).expect("can not write.");
writer_to_string(wtr).map_err(|_| make_conversion_error("record", span))
}
fn table_to_delimited(
vals: &[Value],
span: Span,
separator: char,
config: &Config,
head: Span,
) -> Result<String, ShellError> {
if let Some(val) = find_non_record(vals) {
return Err(make_unsupported_input_error(val, head, span));
}
let mut wtr = WriterBuilder::new()
.delimiter(separator as u8)
.from_writer(vec![]);
let merged_descriptors = merge_descriptors(vals);
if merged_descriptors.is_empty() {
let vals = vals
.iter()
.map(|ele| {
to_string_tagged_value(ele, config, head, span).unwrap_or_else(|_| String::new())
})
.collect::<Vec<_>>();
wtr.write_record(vals).expect("can not write");
} else {
wtr.write_record(merged_descriptors.iter().map(|item| &item[..]))
.expect("can not write.");
for l in vals {
// should always be true because of `find_non_record` above
if let Value::Record { val: l, .. } = l {
let mut row = vec![];
for desc in &merged_descriptors {
row.push(match l.get(desc) {
Some(s) => to_string_tagged_value(s, config, head, span)?,
None => String::new(),
});
}
wtr.write_record(&row).expect("can not write");
}
} }
}
writer_to_string(wtr).map_err(|_| make_conversion_error("table", span))
}
fn writer_to_string(writer: Writer<Vec<u8>>) -> Result<String, Box<dyn Error>> {
Ok(String::from_utf8(writer.into_inner()?)?)
}
fn make_conversion_error(type_from: &str, span: Span) -> ShellError {
ShellError::CantConvert {
to_type: type_from.to_string(),
from_type: "string".to_string(),
span,
help: None,
} }
} }
fn to_string_tagged_value( fn to_string_tagged_value(
v: &Value, v: &Value,
config: &Config, config: &Config,
span: Span, format_name: &'static str,
head: Span,
) -> Result<String, ShellError> { ) -> Result<String, ShellError> {
match &v { match &v {
Value::String { .. } Value::String { .. }
@ -123,50 +41,124 @@ fn to_string_tagged_value(
Value::Nothing { .. } => Ok(String::new()), Value::Nothing { .. } => Ok(String::new()),
// Propagate existing errors // Propagate existing errors
Value::Error { error, .. } => Err(*error.clone()), Value::Error { error, .. } => Err(*error.clone()),
_ => Err(make_unsupported_input_error(v, head, span)), _ => Err(make_cant_convert_error(v, format_name)),
} }
} }
fn make_unsupported_input_error(value: &Value, head: Span, span: Span) -> ShellError { fn make_unsupported_input_error(
r#type: impl std::fmt::Display,
head: Span,
span: Span,
) -> ShellError {
ShellError::UnsupportedInput { ShellError::UnsupportedInput {
msg: "Unexpected type".to_string(), msg: "expected table or record".to_string(),
input: format!("input type: {:?}", value.get_type()), input: format!("input type: {}", r#type),
msg_span: head, msg_span: head,
input_span: span, input_span: span,
} }
} }
pub fn find_non_record(values: &[Value]) -> Option<&Value> { fn make_cant_convert_error(value: &Value, format_name: &'static str) -> ShellError {
values ShellError::CantConvert {
.iter() to_type: "string".into(),
.find(|val| !matches!(val, Value::Record { .. })) from_type: value.get_type().to_string(),
span: value.span(),
help: Some(format!(
"only simple values are supported for {format_name} output"
)),
}
} }
pub fn to_delimited_data( pub fn to_delimited_data(
noheaders: bool, noheaders: bool,
sep: char, separator: Spanned<char>,
columns: Option<Vec<String>>,
format_name: &'static str, format_name: &'static str,
input: PipelineData, input: PipelineData,
span: Span, head: Span,
config: &Config, config: Arc<Config>,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(span)?; let mut input = input;
let output = match from_value_to_delimited_string(&value, sep, config, span) { let span = input.span().unwrap_or(head);
Ok(mut x) => { let metadata = input.metadata();
if noheaders {
if let Some(second_line) = x.find('\n') { let separator = u8::try_from(separator.item).map_err(|_| ShellError::IncorrectValue {
let start = second_line + 1; msg: "separator must be an ASCII character".into(),
x.replace_range(0..start, ""); val_span: separator.span,
} call_span: head,
} })?;
Ok(x)
// Check to ensure the input is likely one of our supported types first. We can't check a stream
// without consuming it though
match input {
PipelineData::Value(Value::List { .. } | Value::Record { .. }, _) => (),
PipelineData::Value(Value::Error { error, .. }, _) => return Err(*error),
PipelineData::Value(other, _) => {
return Err(make_unsupported_input_error(other.get_type(), head, span))
} }
Err(_) => Err(ShellError::CantConvert { PipelineData::ByteStream(..) => {
to_type: format_name.into(), return Err(make_unsupported_input_error("byte stream", head, span))
from_type: value.get_type().to_string(), }
span: value.span(), PipelineData::ListStream(..) => (),
help: None, PipelineData::Empty => (),
}), }
}?;
Ok(Value::string(output, span).into_pipeline_data()) // Determine the columns we'll use. This is necessary even if we don't write the header row,
// because we need to write consistent columns.
let columns = match columns {
Some(columns) => columns,
None => {
// The columns were not provided. We need to detect them, and in order to do so, we have
// to convert the input into a value first, so that we can find all of them
let value = input.into_value(span)?;
let columns = match &value {
Value::List { vals, .. } => merge_descriptors(vals),
Value::Record { val, .. } => val.columns().cloned().collect(),
_ => return Err(make_unsupported_input_error(value.get_type(), head, span)),
};
input = PipelineData::Value(value, metadata.clone());
columns
}
};
// Generate a byte stream of all of the values in the pipeline iterator, with a non-strict
// iterator so we can still accept plain records.
let mut iter = input.into_iter();
// If we're configured to generate a header, we generate it first, then set this false
let mut is_header = !noheaders;
let stream = ByteStream::from_fn(head, None, ByteStreamType::String, move |buffer| {
let mut wtr = WriterBuilder::new()
.delimiter(separator)
.from_writer(buffer);
if is_header {
// Unless we are configured not to write a header, we write the header row now, once,
// before everything else.
wtr.write_record(&columns)
.map_err(|err| make_csv_error(err, format_name, head))?;
is_header = false;
Ok(true)
} else if let Some(row) = iter.next() {
// Write each column of a normal row, in order
let record = row.into_record()?;
for column in &columns {
let field = record
.get(column)
.map(|v| to_string_tagged_value(v, &config, format_name))
.unwrap_or(Ok(String::new()))?;
wtr.write_field(field)
.map_err(|err| make_csv_error(err, format_name, head))?;
}
// End the row
wtr.write_record(iter::empty::<String>())
.map_err(|err| make_csv_error(err, format_name, head))?;
Ok(true)
} else {
Ok(false)
}
});
Ok(PipelineData::ByteStream(stream, metadata))
} }

View File

@ -5,7 +5,7 @@ use nu_engine::command_prelude::*;
use super::msgpack::write_value; use super::msgpack::write_value;
const BUFFER_SIZE: usize = 65536; const BUFFER_SIZE: usize = 65536;
const DEFAULT_QUALITY: u32 = 1; const DEFAULT_QUALITY: u32 = 3; // 1 can be very bad
const DEFAULT_WINDOW_SIZE: u32 = 20; const DEFAULT_WINDOW_SIZE: u32 = 20;
#[derive(Clone)] #[derive(Clone)]
@ -22,7 +22,7 @@ impl Command for ToMsgpackz {
.named( .named(
"quality", "quality",
SyntaxShape::Int, SyntaxShape::Int,
"Quality of brotli compression (default 1)", "Quality of brotli compression (default 3)",
Some('q'), Some('q'),
) )
.named( .named(

View File

@ -1,4 +1,4 @@
use chrono::SecondsFormat; use chrono::{DateTime, Datelike, FixedOffset, Timelike};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::ast::PathMember; use nu_protocol::ast::PathMember;
@ -24,7 +24,7 @@ impl Command for ToToml {
vec![Example { vec![Example {
description: "Outputs an TOML string representing the contents of this record", description: "Outputs an TOML string representing the contents of this record",
example: r#"{foo: 1 bar: 'qwe'} | to toml"#, example: r#"{foo: 1 bar: 'qwe'} | to toml"#,
result: Some(Value::test_string("bar = \"qwe\"\nfoo = 1\n")), result: Some(Value::test_string("foo = 1\nbar = \"qwe\"\n")),
}] }]
} }
@ -49,9 +49,7 @@ fn helper(engine_state: &EngineState, v: &Value) -> Result<toml::Value, ShellErr
Value::Int { val, .. } => toml::Value::Integer(*val), Value::Int { val, .. } => toml::Value::Integer(*val),
Value::Filesize { val, .. } => toml::Value::Integer(*val), Value::Filesize { val, .. } => toml::Value::Integer(*val),
Value::Duration { val, .. } => toml::Value::String(val.to_string()), Value::Duration { val, .. } => toml::Value::String(val.to_string()),
Value::Date { val, .. } => { Value::Date { val, .. } => toml::Value::Datetime(to_toml_datetime(val)),
toml::Value::String(val.to_rfc3339_opts(SecondsFormat::AutoSi, false))
}
Value::Range { .. } => toml::Value::String("<Range>".to_string()), Value::Range { .. } => toml::Value::String("<Range>".to_string()),
Value::Float { val, .. } => toml::Value::Float(*val), Value::Float { val, .. } => toml::Value::Float(*val),
Value::String { val, .. } | Value::Glob { val, .. } => toml::Value::String(val.clone()), Value::String { val, .. } | Value::Glob { val, .. } => toml::Value::String(val.clone()),
@ -103,7 +101,7 @@ fn toml_into_pipeline_data(
value_type: Type, value_type: Type,
span: Span, span: Span,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
match toml::to_string(&toml_value) { match toml::to_string_pretty(&toml_value) {
Ok(serde_toml_string) => Ok(Value::string(serde_toml_string, span).into_pipeline_data()), Ok(serde_toml_string) => Ok(Value::string(serde_toml_string, span).into_pipeline_data()),
_ => Ok(Value::error( _ => Ok(Value::error(
ShellError::CantConvert { ShellError::CantConvert {
@ -157,6 +155,43 @@ fn to_toml(
} }
} }
/// Convert chrono datetime into a toml::Value datetime. The latter uses its
/// own ad-hoc datetime types, which makes this somewhat convoluted.
fn to_toml_datetime(datetime: &DateTime<FixedOffset>) -> toml::value::Datetime {
let date = toml::value::Date {
// TODO: figure out what to do with BC dates, because the toml
// crate doesn't support them. Same for large years, which
// don't fit in u16.
year: datetime.year_ce().1 as u16,
// Panic: this is safe, because chrono guarantees that the month
// value will be between 1 and 12 and the day will be between 1
// and 31
month: datetime.month() as u8,
day: datetime.day() as u8,
};
let time = toml::value::Time {
// Panic: same as before, chorono guarantees that all of the following 3
// methods return values less than 65'000
hour: datetime.hour() as u8,
minute: datetime.minute() as u8,
second: datetime.second() as u8,
nanosecond: datetime.nanosecond(),
};
let offset = toml::value::Offset::Custom {
// Panic: minute timezone offset fits into i16 (that's more than
// 1000 hours)
minutes: (-datetime.timezone().utc_minus_local() / 60) as i16,
};
toml::value::Datetime {
date: Some(date),
time: Some(time),
offset: Some(offset),
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -181,7 +216,20 @@ mod tests {
Span::test_data(), Span::test_data(),
); );
let reference_date = toml::Value::String(String::from("1980-10-12T10:12:44+02:00")); let reference_date = toml::Value::Datetime(toml::value::Datetime {
date: Some(toml::value::Date {
year: 1980,
month: 10,
day: 12,
}),
time: Some(toml::value::Time {
hour: 10,
minute: 12,
second: 44,
nanosecond: 0,
}),
offset: Some(toml::value::Offset::Custom { minutes: 120 }),
});
let result = helper(&engine_state, &test_date); let result = helper(&engine_state, &test_date);

View File

@ -1,3 +1,5 @@
use std::sync::Arc;
use crate::formats::to::delimited::to_delimited_data; use crate::formats::to::delimited::to_delimited_data;
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::Config; use nu_protocol::Config;
@ -21,6 +23,12 @@ impl Command for ToTsv {
"do not output the column names as the first row", "do not output the column names as the first row",
Some('n'), Some('n'),
) )
.named(
"columns",
SyntaxShape::List(SyntaxShape::String.into()),
"the names (in order) of the columns to use",
None,
)
.category(Category::Formats) .category(Category::Formats)
} }
@ -31,15 +39,20 @@ impl Command for ToTsv {
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {
description: "Outputs an TSV string representing the contents of this table", description: "Outputs a TSV string representing the contents of this table",
example: "[[foo bar]; [1 2]] | to tsv", example: "[[foo bar]; [1 2]] | to tsv",
result: Some(Value::test_string("foo\tbar\n1\t2\n")), result: Some(Value::test_string("foo\tbar\n1\t2\n")),
}, },
Example { Example {
description: "Outputs an TSV string representing the contents of this record", description: "Outputs a TSV string representing the contents of this record",
example: "{a: 1 b: 2} | to tsv", example: "{a: 1 b: 2} | to tsv",
result: Some(Value::test_string("a\tb\n1\t2\n")), result: Some(Value::test_string("a\tb\n1\t2\n")),
}, },
Example {
description: "Outputs a TSV stream with column names pre-determined",
example: "[[foo bar baz]; [1 2 3]] | to tsv --columns [baz foo]",
result: Some(Value::test_string("baz\tfoo\n3\t1\n")),
},
] ]
} }
@ -52,18 +65,24 @@ impl Command for ToTsv {
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head; let head = call.head;
let noheaders = call.has_flag(engine_state, stack, "noheaders")?; let noheaders = call.has_flag(engine_state, stack, "noheaders")?;
let config = engine_state.get_config(); let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
to_tsv(input, noheaders, head, config) let config = engine_state.config.clone();
to_tsv(input, noheaders, columns, head, config)
} }
} }
fn to_tsv( fn to_tsv(
input: PipelineData, input: PipelineData,
noheaders: bool, noheaders: bool,
columns: Option<Vec<String>>,
head: Span, head: Span,
config: &Config, config: Arc<Config>,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
to_delimited_data(noheaders, '\t', "TSV", input, head, config) let sep = Spanned {
item: '\t',
span: head,
};
to_delimited_data(noheaders, sep, columns, "TSV", input, head, config)
} }
#[cfg(test)] #[cfg(test)]

View File

@ -122,7 +122,7 @@ fn build_help_commands(engine_state: &EngineState, span: Span) -> Vec<Value> {
let usage = sig.usage; let usage = sig.usage;
let search_terms = sig.search_terms; let search_terms = sig.search_terms;
let command_type = format!("{:?}", decl.command_type()).to_ascii_lowercase(); let command_type = decl.command_type().to_string();
// Build table of parameters // Build table of parameters
let param_table = { let param_table = {

View File

@ -1,10 +1,14 @@
use super::PathSubcommandArguments; use super::PathSubcommandArguments;
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_path::expand_tilde;
use nu_protocol::engine::StateWorkingSet; use nu_protocol::engine::StateWorkingSet;
use std::path::Path; use std::{
io,
path::{Path, PathBuf},
};
struct Arguments; struct Arguments {
pwd: PathBuf,
}
impl PathSubcommandArguments for Arguments {} impl PathSubcommandArguments for Arguments {}
@ -35,7 +39,7 @@ impl Command for SubCommand {
fn extra_usage(&self) -> &str { fn extra_usage(&self) -> &str {
r#"This checks the file system to confirm the path's object type. r#"This checks the file system to confirm the path's object type.
If nothing is found, an empty string will be returned."# If the path does not exist, null will be returned."#
} }
fn is_const(&self) -> bool { fn is_const(&self) -> bool {
@ -45,19 +49,21 @@ If nothing is found, an empty string will be returned."#
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
_stack: &mut Stack, stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head; let head = call.head;
let args = Arguments; let args = Arguments {
pwd: engine_state.cwd(Some(stack))?,
};
// This doesn't match explicit nulls // This doesn't match explicit nulls
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map( input.map(
move |value| super::operate(&r#type, &args, value, head), move |value| super::operate(&path_type, &args, value, head),
engine_state.ctrlc.clone(), engine_state.ctrlc.clone(),
) )
} }
@ -69,14 +75,16 @@ If nothing is found, an empty string will be returned."#
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head; let head = call.head;
let args = Arguments; let args = Arguments {
pwd: working_set.permanent().cwd(None)?,
};
// This doesn't match explicit nulls // This doesn't match explicit nulls
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map( input.map(
move |value| super::operate(&r#type, &args, value, head), move |value| super::operate(&path_type, &args, value, head),
working_set.permanent().ctrlc.clone(), working_set.permanent().ctrlc.clone(),
) )
} }
@ -97,21 +105,13 @@ If nothing is found, an empty string will be returned."#
} }
} }
fn r#type(path: &Path, span: Span, _: &Arguments) -> Value { fn path_type(path: &Path, span: Span, args: &Arguments) -> Value {
let meta = if path.starts_with("~") { let path = nu_path::expand_path_with(path, &args.pwd, true);
let p = expand_tilde(path); match path.symlink_metadata() {
std::fs::symlink_metadata(p) Ok(metadata) => Value::string(get_file_type(&metadata), span),
} else { Err(err) if err.kind() == io::ErrorKind::NotFound => Value::nothing(span),
std::fs::symlink_metadata(path) Err(err) => Value::error(err.into_spanned(span).into(), span),
}; }
Value::string(
match &meta {
Ok(data) => get_file_type(data),
Err(_) => "",
},
span,
)
} }
fn get_file_type(md: &std::fs::Metadata) -> &str { fn get_file_type(md: &std::fs::Metadata) -> &str {

View File

@ -81,7 +81,7 @@ pub fn sort(
if let Some(nonexistent) = nonexistent_column(&sort_columns, record.columns()) { if let Some(nonexistent) = nonexistent_column(&sort_columns, record.columns()) {
return Err(ShellError::CantFindColumn { return Err(ShellError::CantFindColumn {
col_name: nonexistent, col_name: nonexistent,
span, span: Some(span),
src_span: val_span, src_span: val_span,
}); });
} }

View File

@ -12,14 +12,17 @@ impl Command for StorInsert {
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build("stor insert") Signature::build("stor insert")
.input_output_types(vec![(Type::Nothing, Type::table())]) .input_output_types(vec![
(Type::Nothing, Type::table()),
(Type::record(), Type::table()),
])
.required_named( .required_named(
"table-name", "table-name",
SyntaxShape::String, SyntaxShape::String,
"name of the table you want to insert into", "name of the table you want to insert into",
Some('t'), Some('t'),
) )
.required_named( .named(
"data-record", "data-record",
SyntaxShape::Record(vec![]), SyntaxShape::Record(vec![]),
"a record of column names and column values to insert into the specified table", "a record of column names and column values to insert into the specified table",
@ -39,10 +42,16 @@ impl Command for StorInsert {
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![Example { vec![Example {
description: "Insert data the in-memory sqlite database using a data-record of column-name and column-value pairs", description: "Insert data the in-memory sqlite database using a data-record of column-name and column-value pairs",
example: "stor insert --table-name nudb --data-record {bool1: true, int1: 5, float1: 1.1, str1: fdncred, datetime1: 2023-04-17}", example: "stor insert --table-name nudb --data-record {bool1: true, int1: 5, float1: 1.1, str1: fdncred, datetime1: 2023-04-17}",
result: None, result: None,
}] },
Example {
description: "Insert data through pipeline input as a record of column-name and column-value pairs",
example: "{bool1: true, int1: 5, float1: 1.1, str1: fdncred, datetime1: 2023-04-17} | stor insert --table-name nudb",
result: None,
},
]
} }
fn run( fn run(
@ -50,25 +59,79 @@ impl Command for StorInsert {
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, stack: &mut Stack,
call: &Call, call: &Call,
_input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let span = call.head; let span = call.head;
let table_name: Option<String> = call.get_flag(engine_state, stack, "table-name")?; let table_name: Option<String> = call.get_flag(engine_state, stack, "table-name")?;
let columns: Option<Record> = call.get_flag(engine_state, stack, "data-record")?; let data_record: Option<Record> = call.get_flag(engine_state, stack, "data-record")?;
// let config = engine_state.get_config(); // let config = engine_state.get_config();
let db = Box::new(SQLiteDatabase::new(std::path::Path::new(MEMORY_DB), None)); let db = Box::new(SQLiteDatabase::new(std::path::Path::new(MEMORY_DB), None));
// Check if the record is being passed as input or using the data record parameter
let columns = handle(span, data_record, input)?;
process(table_name, span, &db, columns)?; process(table_name, span, &db, columns)?;
Ok(Value::custom(db, span).into_pipeline_data()) Ok(Value::custom(db, span).into_pipeline_data())
} }
} }
fn handle(
span: Span,
data_record: Option<Record>,
input: PipelineData,
) -> Result<Record, ShellError> {
match input {
PipelineData::Empty => data_record.ok_or_else(|| ShellError::MissingParameter {
param_name: "requires a record".into(),
span,
}),
PipelineData::Value(value, ..) => {
// Since input is being used, check if the data record parameter is used too
if data_record.is_some() {
return Err(ShellError::GenericError {
error: "Pipeline and Flag both being used".into(),
msg: "Use either pipeline input or '--data-record' parameter".into(),
span: Some(span),
help: None,
inner: vec![],
});
}
match value {
Value::Record { val, .. } => Ok(val.into_owned()),
val => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record".into(),
wrong_type: val.get_type().to_string(),
dst_span: Span::unknown(),
src_span: val.span(),
}),
}
}
_ => {
if data_record.is_some() {
return Err(ShellError::GenericError {
error: "Pipeline and Flag both being used".into(),
msg: "Use either pipeline input or '--data-record' parameter".into(),
span: Some(span),
help: None,
inner: vec![],
});
}
Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record".into(),
wrong_type: "".into(),
dst_span: span,
src_span: span,
})
}
}
}
fn process( fn process(
table_name: Option<String>, table_name: Option<String>,
span: Span, span: Span,
db: &SQLiteDatabase, db: &SQLiteDatabase,
columns: Option<Record>, record: Record,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
if table_name.is_none() { if table_name.is_none() {
return Err(ShellError::MissingParameter { return Err(ShellError::MissingParameter {
@ -77,54 +140,45 @@ fn process(
}); });
} }
let new_table_name = table_name.unwrap_or("table".into()); let new_table_name = table_name.unwrap_or("table".into());
if let Ok(conn) = db.open_connection() { if let Ok(conn) = db.open_connection() {
match columns { let mut create_stmt = format!("INSERT INTO {} ( ", new_table_name);
Some(record) => { let cols = record.columns();
let mut create_stmt = format!("INSERT INTO {} ( ", new_table_name); cols.for_each(|col| {
let cols = record.columns(); create_stmt.push_str(&format!("{}, ", col));
cols.for_each(|col| { });
create_stmt.push_str(&format!("{}, ", col)); if create_stmt.ends_with(", ") {
}); create_stmt.pop();
if create_stmt.ends_with(", ") { create_stmt.pop();
create_stmt.pop(); }
create_stmt.pop();
}
// Values are set as placeholders. // Values are set as placeholders.
create_stmt.push_str(") VALUES ( "); create_stmt.push_str(") VALUES ( ");
for (index, _) in record.columns().enumerate() { for (index, _) in record.columns().enumerate() {
create_stmt.push_str(&format!("?{}, ", index + 1)); create_stmt.push_str(&format!("?{}, ", index + 1));
} }
if create_stmt.ends_with(", ") { if create_stmt.ends_with(", ") {
create_stmt.pop(); create_stmt.pop();
create_stmt.pop(); create_stmt.pop();
} }
create_stmt.push(')'); create_stmt.push(')');
// dbg!(&create_stmt); // dbg!(&create_stmt);
// Get the params from the passed values // Get the params from the passed values
let params = values_to_sql(record.values().cloned())?; let params = values_to_sql(record.values().cloned())?;
conn.execute(&create_stmt, params_from_iter(params)) conn.execute(&create_stmt, params_from_iter(params))
.map_err(|err| ShellError::GenericError { .map_err(|err| ShellError::GenericError {
error: "Failed to open SQLite connection in memory from insert".into(), error: "Failed to open SQLite connection in memory from insert".into(),
msg: err.to_string(), msg: err.to_string(),
span: Some(Span::test_data()), span: Some(Span::test_data()),
help: None, help: None,
inner: vec![], inner: vec![],
})?; })?;
} };
None => {
return Err(ShellError::MissingParameter {
param_name: "requires at least one column".into(),
span,
});
}
};
}
// dbg!(db.clone()); // dbg!(db.clone());
Ok(()) Ok(())
} }
@ -176,7 +230,7 @@ mod test {
), ),
); );
let result = process(table_name, span, &db, Some(columns)); let result = process(table_name, span, &db, columns);
assert!(result.is_ok()); assert!(result.is_ok());
} }
@ -201,7 +255,7 @@ mod test {
Value::test_string("String With Spaces".to_string()), Value::test_string("String With Spaces".to_string()),
); );
let result = process(table_name, span, &db, Some(columns)); let result = process(table_name, span, &db, columns);
assert!(result.is_ok()); assert!(result.is_ok());
} }
@ -226,7 +280,7 @@ mod test {
Value::test_string("ThisIsALongString".to_string()), Value::test_string("ThisIsALongString".to_string()),
); );
let result = process(table_name, span, &db, Some(columns)); let result = process(table_name, span, &db, columns);
// SQLite uses dynamic typing, making any length acceptable for a varchar column // SQLite uses dynamic typing, making any length acceptable for a varchar column
assert!(result.is_ok()); assert!(result.is_ok());
} }
@ -251,7 +305,7 @@ mod test {
Value::test_string("ThisIsTheWrongType".to_string()), Value::test_string("ThisIsTheWrongType".to_string()),
); );
let result = process(table_name, span, &db, Some(columns)); let result = process(table_name, span, &db, columns);
// SQLite uses dynamic typing, making any type acceptable for a column // SQLite uses dynamic typing, making any type acceptable for a column
assert!(result.is_ok()); assert!(result.is_ok());
} }
@ -276,7 +330,7 @@ mod test {
Value::test_string("ThisIsALongString".to_string()), Value::test_string("ThisIsALongString".to_string()),
); );
let result = process(table_name, span, &db, Some(columns)); let result = process(table_name, span, &db, columns);
assert!(result.is_err()); assert!(result.is_err());
} }
@ -293,7 +347,7 @@ mod test {
Value::test_string("ThisIsALongString".to_string()), Value::test_string("ThisIsALongString".to_string()),
); );
let result = process(table_name, span, &db, Some(columns)); let result = process(table_name, span, &db, columns);
assert!(result.is_err()); assert!(result.is_err());
} }

View File

@ -11,14 +11,17 @@ impl Command for StorUpdate {
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build("stor update") Signature::build("stor update")
.input_output_types(vec![(Type::Nothing, Type::table())]) .input_output_types(vec![
(Type::Nothing, Type::table()),
(Type::record(), Type::table()),
])
.required_named( .required_named(
"table-name", "table-name",
SyntaxShape::String, SyntaxShape::String,
"name of the table you want to insert into", "name of the table you want to insert into",
Some('t'), Some('t'),
) )
.required_named( .named(
"update-record", "update-record",
SyntaxShape::Record(vec![]), SyntaxShape::Record(vec![]),
"a record of column names and column values to update in the specified table", "a record of column names and column values to update in the specified table",
@ -54,6 +57,11 @@ impl Command for StorUpdate {
example: "stor update --table-name nudb --update-record {str1: nushell datetime1: 2020-04-17} --where-clause \"bool1 = 1\"", example: "stor update --table-name nudb --update-record {str1: nushell datetime1: 2020-04-17} --where-clause \"bool1 = 1\"",
result: None, result: None,
}, },
Example {
description: "Update the in-memory sqlite database through pipeline input",
example: "{str1: nushell datetime1: 2020-04-17} | stor update --table-name nudb",
result: None,
},
] ]
} }
@ -62,91 +70,147 @@ impl Command for StorUpdate {
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, stack: &mut Stack,
call: &Call, call: &Call,
_input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let span = call.head; let span = call.head;
let table_name: Option<String> = call.get_flag(engine_state, stack, "table-name")?; let table_name: Option<String> = call.get_flag(engine_state, stack, "table-name")?;
let columns: Option<Record> = call.get_flag(engine_state, stack, "update-record")?; let update_record: Option<Record> = call.get_flag(engine_state, stack, "update-record")?;
let where_clause_opt: Option<Spanned<String>> = let where_clause_opt: Option<Spanned<String>> =
call.get_flag(engine_state, stack, "where-clause")?; call.get_flag(engine_state, stack, "where-clause")?;
// Open the in-mem database // Open the in-mem database
let db = Box::new(SQLiteDatabase::new(std::path::Path::new(MEMORY_DB), None)); let db = Box::new(SQLiteDatabase::new(std::path::Path::new(MEMORY_DB), None));
if table_name.is_none() { // Check if the record is being passed as input or using the update record parameter
return Err(ShellError::MissingParameter { let columns = handle(span, update_record, input)?;
param_name: "requires at table name".into(),
span,
});
}
let new_table_name = table_name.unwrap_or("table".into());
if let Ok(conn) = db.open_connection() {
match columns {
Some(record) => {
let mut update_stmt = format!("UPDATE {} ", new_table_name);
update_stmt.push_str("SET "); process(table_name, span, &db, columns, where_clause_opt)?;
let vals = record.iter();
vals.for_each(|(key, val)| match val {
Value::Int { val, .. } => {
update_stmt.push_str(&format!("{} = {}, ", key, val));
}
Value::Float { val, .. } => {
update_stmt.push_str(&format!("{} = {}, ", key, val));
}
Value::String { val, .. } => {
update_stmt.push_str(&format!("{} = '{}', ", key, val));
}
Value::Date { val, .. } => {
update_stmt.push_str(&format!("{} = '{}', ", key, val));
}
Value::Bool { val, .. } => {
update_stmt.push_str(&format!("{} = {}, ", key, val));
}
_ => {
// return Err(ShellError::UnsupportedInput {
// msg: format!("{} is not a valid datepart, expected one of year, month, day, hour, minute, second, millisecond, microsecond, nanosecond", part.item),
// input: "value originates from here".to_string(),
// msg_span: span,
// input_span: val.span(),
// });
}
});
if update_stmt.ends_with(", ") {
update_stmt.pop();
update_stmt.pop();
}
// Yup, this is a bit janky, but I'm not sure a better way to do this without having
// --and and --or flags as well as supporting ==, !=, <>, is null, is not null, etc.
// and other sql syntax. So, for now, just type a sql where clause as a string.
if let Some(where_clause) = where_clause_opt {
update_stmt.push_str(&format!(" WHERE {}", where_clause.item));
}
// dbg!(&update_stmt);
conn.execute(&update_stmt, [])
.map_err(|err| ShellError::GenericError {
error: "Failed to open SQLite connection in memory from update".into(),
msg: err.to_string(),
span: Some(Span::test_data()),
help: None,
inner: vec![],
})?;
}
None => {
return Err(ShellError::MissingParameter {
param_name: "requires at least one column".into(),
span: call.head,
});
}
};
}
// dbg!(db.clone());
Ok(Value::custom(db, span).into_pipeline_data()) Ok(Value::custom(db, span).into_pipeline_data())
} }
} }
fn handle(
span: Span,
update_record: Option<Record>,
input: PipelineData,
) -> Result<Record, ShellError> {
match input {
PipelineData::Empty => update_record.ok_or_else(|| ShellError::MissingParameter {
param_name: "requires a record".into(),
span,
}),
PipelineData::Value(value, ..) => {
// Since input is being used, check if the data record parameter is used too
if update_record.is_some() {
return Err(ShellError::GenericError {
error: "Pipeline and Flag both being used".into(),
msg: "Use either pipeline input or '--update-record' parameter".into(),
span: Some(span),
help: None,
inner: vec![],
});
}
match value {
Value::Record { val, .. } => Ok(val.into_owned()),
val => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record".into(),
wrong_type: val.get_type().to_string(),
dst_span: Span::unknown(),
src_span: val.span(),
}),
}
}
_ => {
if update_record.is_some() {
return Err(ShellError::GenericError {
error: "Pipeline and Flag both being used".into(),
msg: "Use either pipeline input or '--update-record' parameter".into(),
span: Some(span),
help: None,
inner: vec![],
});
}
Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record".into(),
wrong_type: "".into(),
dst_span: span,
src_span: span,
})
}
}
}
fn process(
table_name: Option<String>,
span: Span,
db: &SQLiteDatabase,
record: Record,
where_clause_opt: Option<Spanned<String>>,
) -> Result<(), ShellError> {
if table_name.is_none() {
return Err(ShellError::MissingParameter {
param_name: "requires at table name".into(),
span,
});
}
let new_table_name = table_name.unwrap_or("table".into());
if let Ok(conn) = db.open_connection() {
let mut update_stmt = format!("UPDATE {} ", new_table_name);
update_stmt.push_str("SET ");
let vals = record.iter();
vals.for_each(|(key, val)| match val {
Value::Int { val, .. } => {
update_stmt.push_str(&format!("{} = {}, ", key, val));
}
Value::Float { val, .. } => {
update_stmt.push_str(&format!("{} = {}, ", key, val));
}
Value::String { val, .. } => {
update_stmt.push_str(&format!("{} = '{}', ", key, val));
}
Value::Date { val, .. } => {
update_stmt.push_str(&format!("{} = '{}', ", key, val));
}
Value::Bool { val, .. } => {
update_stmt.push_str(&format!("{} = {}, ", key, val));
}
_ => {
// return Err(ShellError::UnsupportedInput {
// msg: format!("{} is not a valid datepart, expected one of year, month, day, hour, minute, second, millisecond, microsecond, nanosecond", part.item),
// input: "value originates from here".to_string(),
// msg_span: span,
// input_span: val.span(),
// });
}
});
if update_stmt.ends_with(", ") {
update_stmt.pop();
update_stmt.pop();
}
// Yup, this is a bit janky, but I'm not sure a better way to do this without having
// --and and --or flags as well as supporting ==, !=, <>, is null, is not null, etc.
// and other sql syntax. So, for now, just type a sql where clause as a string.
if let Some(where_clause) = where_clause_opt {
update_stmt.push_str(&format!(" WHERE {}", where_clause.item));
}
// dbg!(&update_stmt);
conn.execute(&update_stmt, [])
.map_err(|err| ShellError::GenericError {
error: "Failed to open SQLite connection in memory from update".into(),
msg: err.to_string(),
span: Some(Span::test_data()),
help: None,
inner: vec![],
})?;
}
// dbg!(db.clone());
Ok(())
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;

View File

@ -1,6 +1,6 @@
use indexmap::{indexmap, IndexMap}; use indexmap::{indexmap, IndexMap};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::engine::StateWorkingSet;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use std::sync::{atomic::AtomicBool, Arc}; use std::sync::{atomic::AtomicBool, Arc};

View File

@ -45,20 +45,6 @@ impl Command for DetectColumns {
vec!["split", "tabular"] vec!["split", "tabular"]
} }
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
if call.has_flag(engine_state, stack, "guess")? {
guess_width(engine_state, stack, call, input)
} else {
detect_columns(engine_state, stack, call, input)
}
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {
@ -109,33 +95,87 @@ none 8150224 4 8150220 1% /mnt/c' | detect columns --gue
}, },
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let num_rows_to_skip: Option<usize> = call.get_flag(engine_state, stack, "skip")?;
let noheader = call.has_flag(engine_state, stack, "no-headers")?;
let range: Option<Range> = call.get_flag(engine_state, stack, "combine-columns")?;
let args = Arguments {
noheader,
num_rows_to_skip,
range,
};
if call.has_flag(engine_state, stack, "guess")? {
guess_width(engine_state, call, input, args)
} else {
detect_columns(engine_state, call, input, args)
}
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let num_rows_to_skip: Option<usize> = call.get_flag_const(working_set, "skip")?;
let noheader = call.has_flag_const(working_set, "no-headers")?;
let range: Option<Range> = call.get_flag_const(working_set, "combine-columns")?;
let args = Arguments {
noheader,
num_rows_to_skip,
range,
};
if call.has_flag_const(working_set, "guess")? {
guess_width(working_set.permanent(), call, input, args)
} else {
detect_columns(working_set.permanent(), call, input, args)
}
}
}
struct Arguments {
num_rows_to_skip: Option<usize>,
noheader: bool,
range: Option<Range>,
} }
fn guess_width( fn guess_width(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
args: Arguments,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
use super::guess_width::GuessWidth; use super::guess_width::GuessWidth;
let input_span = input.span().unwrap_or(call.head); let input_span = input.span().unwrap_or(call.head);
let mut input = input.collect_string("", engine_state.get_config())?; let mut input = input.collect_string("", engine_state.get_config())?;
let num_rows_to_skip: Option<usize> = call.get_flag(engine_state, stack, "skip")?; if let Some(rows) = args.num_rows_to_skip {
if let Some(rows) = num_rows_to_skip {
input = input.lines().skip(rows).map(|x| x.to_string()).join("\n"); input = input.lines().skip(rows).map(|x| x.to_string()).join("\n");
} }
let mut guess_width = GuessWidth::new_reader(Box::new(Cursor::new(input))); let mut guess_width = GuessWidth::new_reader(Box::new(Cursor::new(input)));
let noheader = call.has_flag(engine_state, stack, "no-headers")?;
let result = guess_width.read_all(); let result = guess_width.read_all();
if result.is_empty() { if result.is_empty() {
return Ok(Value::nothing(input_span).into_pipeline_data()); return Ok(Value::nothing(input_span).into_pipeline_data());
} }
let range: Option<Range> = call.get_flag(engine_state, stack, "combine-columns")?; if !args.noheader {
if !noheader {
let columns = result[0].clone(); let columns = result[0].clone();
Ok(result Ok(result
.into_iter() .into_iter()
@ -152,7 +192,7 @@ fn guess_width(
let record = let record =
Record::from_raw_cols_vals(columns.clone(), values, input_span, input_span); Record::from_raw_cols_vals(columns.clone(), values, input_span, input_span);
match record { match record {
Ok(r) => match &range { Ok(r) => match &args.range {
Some(range) => merge_record(r, range, input_span), Some(range) => merge_record(r, range, input_span),
None => Value::record(r, input_span), None => Value::record(r, input_span),
}, },
@ -177,7 +217,7 @@ fn guess_width(
let record = let record =
Record::from_raw_cols_vals(columns.clone(), values, input_span, input_span); Record::from_raw_cols_vals(columns.clone(), values, input_span, input_span);
match record { match record {
Ok(r) => match &range { Ok(r) => match &args.range {
Some(range) => merge_record(r, range, input_span), Some(range) => merge_record(r, range, input_span),
None => Value::record(r, input_span), None => Value::record(r, input_span),
}, },
@ -190,21 +230,18 @@ fn guess_width(
fn detect_columns( fn detect_columns(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
args: Arguments,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let name_span = call.head; let name_span = call.head;
let num_rows_to_skip: Option<usize> = call.get_flag(engine_state, stack, "skip")?;
let noheader = call.has_flag(engine_state, stack, "no-headers")?;
let range: Option<Range> = call.get_flag(engine_state, stack, "combine-columns")?;
let ctrlc = engine_state.ctrlc.clone(); let ctrlc = engine_state.ctrlc.clone();
let config = engine_state.get_config(); let config = engine_state.get_config();
let input = input.collect_string("", config)?; let input = input.collect_string("", config)?;
let input: Vec<_> = input let input: Vec<_> = input
.lines() .lines()
.skip(num_rows_to_skip.unwrap_or_default()) .skip(args.num_rows_to_skip.unwrap_or_default())
.map(|x| x.to_string()) .map(|x| x.to_string())
.collect(); .collect();
@ -214,13 +251,14 @@ fn detect_columns(
if let Some(orig_headers) = headers { if let Some(orig_headers) = headers {
let mut headers = find_columns(&orig_headers); let mut headers = find_columns(&orig_headers);
if noheader { if args.noheader {
for header in headers.iter_mut().enumerate() { for header in headers.iter_mut().enumerate() {
header.1.item = format!("column{}", header.0); header.1.item = format!("column{}", header.0);
} }
} }
Ok(noheader Ok(args
.noheader
.then_some(orig_headers) .then_some(orig_headers)
.into_iter() .into_iter()
.chain(input) .chain(input)
@ -273,7 +311,7 @@ fn detect_columns(
} }
} }
match &range { match &args.range {
Some(range) => merge_record(record, range, name_span), Some(range) => merge_record(record, range, name_span),
None => Value::record(record, name_span), None => Value::record(record, name_span),
} }

View File

@ -7,10 +7,9 @@ use base64::{
Engine, Engine,
}; };
use nu_cmd_base::input_handler::{operate as general_operate, CmdArgument}; use nu_cmd_base::input_handler::{operate as general_operate, CmdArgument};
use nu_engine::CallExt;
use nu_protocol::{ use nu_protocol::{
ast::{Call, CellPath}, ast::{Call, CellPath},
engine::{EngineState, Stack}, engine::EngineState,
PipelineData, ShellError, Span, Spanned, Value, PipelineData, ShellError, Span, Spanned, Value,
}; };
@ -42,22 +41,24 @@ impl CmdArgument for Arguments {
} }
} }
pub(super) struct Base64CommandArguments {
pub(super) character_set: Option<Spanned<String>>,
pub(super) action_type: ActionType,
pub(super) binary: bool,
}
pub fn operate( pub fn operate(
action_type: ActionType,
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
cell_paths: Vec<CellPath>,
args: Base64CommandArguments,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head; let head = call.head;
let character_set: Option<Spanned<String>> =
call.get_flag(engine_state, stack, "character-set")?;
let binary = call.has_flag(engine_state, stack, "binary")?;
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths); let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
// Default the character set to standard if the argument is not specified. // Default the character set to standard if the argument is not specified.
let character_set = match character_set { let character_set = match args.character_set {
Some(inner_tag) => inner_tag, Some(inner_tag) => inner_tag,
None => Spanned { None => Spanned {
item: "standard".to_string(), item: "standard".to_string(),
@ -68,9 +69,9 @@ pub fn operate(
let args = Arguments { let args = Arguments {
encoding_config: Base64Config { encoding_config: Base64Config {
character_set, character_set,
action_type, action_type: args.action_type,
}, },
binary, binary: args.binary,
cell_paths, cell_paths,
}; };

View File

@ -46,6 +46,10 @@ documentation link at https://docs.rs/encoding_rs/latest/encoding_rs/#statics"#
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -53,49 +57,67 @@ documentation link at https://docs.rs/encoding_rs/latest/encoding_rs/#statics"#
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head;
let encoding: Option<Spanned<String>> = call.opt(engine_state, stack, 0)?; let encoding: Option<Spanned<String>> = call.opt(engine_state, stack, 0)?;
run(call, input, encoding)
}
match input { fn run_const(
PipelineData::ByteStream(stream, ..) => { &self,
let span = stream.span(); working_set: &StateWorkingSet,
let bytes = stream.into_bytes()?; call: &Call,
match encoding { input: PipelineData,
) -> Result<PipelineData, ShellError> {
let encoding: Option<Spanned<String>> = call.opt_const(working_set, 0)?;
run(call, input, encoding)
}
}
fn run(
call: &Call,
input: PipelineData,
encoding: Option<Spanned<String>>,
) -> Result<PipelineData, ShellError> {
let head = call.head;
match input {
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
let bytes = stream.into_bytes()?;
match encoding {
Some(encoding_name) => super::encoding::decode(head, encoding_name, &bytes),
None => super::encoding::detect_encoding_name(head, span, &bytes)
.map(|encoding| encoding.decode(&bytes).0.into_owned())
.map(|s| Value::string(s, head)),
}
.map(|val| val.into_pipeline_data())
}
PipelineData::Value(v, ..) => {
let input_span = v.span();
match v {
Value::Binary { val: bytes, .. } => match encoding {
Some(encoding_name) => super::encoding::decode(head, encoding_name, &bytes), Some(encoding_name) => super::encoding::decode(head, encoding_name, &bytes),
None => super::encoding::detect_encoding_name(head, span, &bytes) None => super::encoding::detect_encoding_name(head, input_span, &bytes)
.map(|encoding| encoding.decode(&bytes).0.into_owned()) .map(|encoding| encoding.decode(&bytes).0.into_owned())
.map(|s| Value::string(s, head)), .map(|s| Value::string(s, head)),
} }
.map(|val| val.into_pipeline_data()) .map(|val| val.into_pipeline_data()),
Value::Error { error, .. } => Err(*error),
_ => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "binary".into(),
wrong_type: v.get_type().to_string(),
dst_span: head,
src_span: v.span(),
}),
} }
PipelineData::Value(v, ..) => {
let input_span = v.span();
match v {
Value::Binary { val: bytes, .. } => match encoding {
Some(encoding_name) => super::encoding::decode(head, encoding_name, &bytes),
None => super::encoding::detect_encoding_name(head, input_span, &bytes)
.map(|encoding| encoding.decode(&bytes).0.into_owned())
.map(|s| Value::string(s, head)),
}
.map(|val| val.into_pipeline_data()),
Value::Error { error, .. } => Err(*error),
_ => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "binary".into(),
wrong_type: v.get_type().to_string(),
dst_span: head,
src_span: v.span(),
}),
}
}
// This should be more precise, but due to difficulties in getting spans
// from PipelineData::ListData, this is as it is.
_ => Err(ShellError::UnsupportedInput {
msg: "non-binary input".into(),
input: "value originates from here".into(),
msg_span: head,
input_span: input.span().unwrap_or(head),
}),
} }
// This should be more precise, but due to difficulties in getting spans
// from PipelineData::ListData, this is as it is.
_ => Err(ShellError::UnsupportedInput {
msg: "non-binary input".into(),
input: "value originates from here".into(),
msg_span: head,
input_span: input.span().unwrap_or(head),
}),
} }
} }

View File

@ -1,4 +1,4 @@
use super::base64::{operate, ActionType, CHARACTER_SET_DESC}; use super::base64::{operate, ActionType, Base64CommandArguments, CHARACTER_SET_DESC};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
@ -66,6 +66,10 @@ impl Command for DecodeBase64 {
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -73,7 +77,34 @@ impl Command for DecodeBase64 {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
operate(ActionType::Decode, engine_state, stack, call, input) let character_set: Option<Spanned<String>> =
call.get_flag(engine_state, stack, "character-set")?;
let binary = call.has_flag(engine_state, stack, "binary")?;
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let args = Base64CommandArguments {
action_type: ActionType::Decode,
binary,
character_set,
};
operate(engine_state, call, input, cell_paths, args)
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let character_set: Option<Spanned<String>> =
call.get_flag_const(working_set, "character-set")?;
let binary = call.has_flag_const(working_set, "binary")?;
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 0)?;
let args = Base64CommandArguments {
action_type: ActionType::Decode,
binary,
character_set,
};
operate(working_set.permanent(), call, input, cell_paths, args)
} }
} }

View File

@ -69,6 +69,10 @@ documentation link at https://docs.rs/encoding_rs/latest/encoding_rs/#statics"#
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -76,42 +80,62 @@ documentation link at https://docs.rs/encoding_rs/latest/encoding_rs/#statics"#
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head;
let encoding: Spanned<String> = call.req(engine_state, stack, 0)?; let encoding: Spanned<String> = call.req(engine_state, stack, 0)?;
let ignore_errors = call.has_flag(engine_state, stack, "ignore-errors")?; let ignore_errors = call.has_flag(engine_state, stack, "ignore-errors")?;
run(call, input, encoding, ignore_errors)
}
match input { fn run_const(
PipelineData::ByteStream(stream, ..) => { &self,
let span = stream.span(); working_set: &StateWorkingSet,
let s = stream.into_string()?; call: &Call,
super::encoding::encode(head, encoding, &s, span, ignore_errors) input: PipelineData,
.map(|val| val.into_pipeline_data()) ) -> Result<PipelineData, ShellError> {
} let encoding: Spanned<String> = call.req_const(working_set, 0)?;
PipelineData::Value(v, ..) => { let ignore_errors = call.has_flag_const(working_set, "ignore-errors")?;
let span = v.span(); run(call, input, encoding, ignore_errors)
match v { }
Value::String { val: s, .. } => { }
super::encoding::encode(head, encoding, &s, span, ignore_errors)
.map(|val| val.into_pipeline_data()) fn run(
} call: &Call,
Value::Error { error, .. } => Err(*error), input: PipelineData,
_ => Err(ShellError::OnlySupportsThisInputType { encoding: Spanned<String>,
exp_input_type: "string".into(), ignore_errors: bool,
wrong_type: v.get_type().to_string(), ) -> Result<PipelineData, ShellError> {
dst_span: head, let head = call.head;
src_span: v.span(),
}), match input {
} PipelineData::ByteStream(stream, ..) => {
} let span = stream.span();
// This should be more precise, but due to difficulties in getting spans let s = stream.into_string()?;
// from PipelineData::ListStream, this is as it is. super::encoding::encode(head, encoding, &s, span, ignore_errors)
_ => Err(ShellError::UnsupportedInput { .map(|val| val.into_pipeline_data())
msg: "non-string input".into(),
input: "value originates from here".into(),
msg_span: head,
input_span: input.span().unwrap_or(head),
}),
} }
PipelineData::Value(v, ..) => {
let span = v.span();
match v {
Value::String { val: s, .. } => {
super::encoding::encode(head, encoding, &s, span, ignore_errors)
.map(|val| val.into_pipeline_data())
}
Value::Error { error, .. } => Err(*error),
_ => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "string".into(),
wrong_type: v.get_type().to_string(),
dst_span: head,
src_span: v.span(),
}),
}
}
// This should be more precise, but due to difficulties in getting spans
// from PipelineData::ListStream, this is as it is.
_ => Err(ShellError::UnsupportedInput {
msg: "non-string input".into(),
input: "value originates from here".into(),
msg_span: head,
input_span: input.span().unwrap_or(head),
}),
} }
} }

View File

@ -1,4 +1,4 @@
use super::base64::{operate, ActionType, CHARACTER_SET_DESC}; use super::base64::{operate, ActionType, Base64CommandArguments, CHARACTER_SET_DESC};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
@ -70,6 +70,10 @@ impl Command for EncodeBase64 {
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -77,7 +81,34 @@ impl Command for EncodeBase64 {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
operate(ActionType::Encode, engine_state, stack, call, input) let character_set: Option<Spanned<String>> =
call.get_flag(engine_state, stack, "character-set")?;
let binary = call.has_flag(engine_state, stack, "binary")?;
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let args = Base64CommandArguments {
action_type: ActionType::Encode,
binary,
character_set,
};
operate(engine_state, call, input, cell_paths, args)
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let character_set: Option<Spanned<String>> =
call.get_flag_const(working_set, "character-set")?;
let binary = call.has_flag_const(working_set, "binary")?;
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 0)?;
let args = Base64CommandArguments {
action_type: ActionType::Encode,
binary,
character_set,
};
operate(working_set.permanent(), call, input, cell_paths, args)
} }
} }

View File

@ -27,7 +27,7 @@ impl Command for FormatDate {
SyntaxShape::String, SyntaxShape::String,
"The desired format date.", "The desired format date.",
) )
.category(Category::Date) .category(Category::Strings)
} }
fn usage(&self) -> &str { fn usage(&self) -> &str {
@ -38,36 +38,6 @@ impl Command for FormatDate {
vec!["fmt", "strftime"] vec!["fmt", "strftime"]
} }
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
if call.has_flag(engine_state, stack, "list")? {
return Ok(PipelineData::Value(
generate_strftime_list(head, false),
None,
));
}
let format = call.opt::<Spanned<String>>(engine_state, stack, 0)?;
// This doesn't match explicit nulls
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(
move |value| match &format {
Some(format) => format_helper(value, format.item.as_str(), format.span, head),
None => format_helper_rfc2822(value, head),
},
engine_state.ctrlc.clone(),
)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {
@ -104,6 +74,61 @@ impl Command for FormatDate {
}, },
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let list = call.has_flag(engine_state, stack, "list")?;
let format = call.opt::<Spanned<String>>(engine_state, stack, 0)?;
run(engine_state, call, input, list, format)
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let list = call.has_flag_const(working_set, "list")?;
let format = call.opt_const::<Spanned<String>>(working_set, 0)?;
run(working_set.permanent(), call, input, list, format)
}
}
fn run(
engine_state: &EngineState,
call: &Call,
input: PipelineData,
list: bool,
format: Option<Spanned<String>>,
) -> Result<PipelineData, ShellError> {
let head = call.head;
if list {
return Ok(PipelineData::Value(
generate_strftime_list(head, false),
None,
));
}
// This doesn't match explicit nulls
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(
move |value| match &format {
Some(format) => format_helper(value, format.item.as_str(), format.span, head),
None => format_helper_rfc2822(value, head),
},
engine_state.ctrlc.clone(),
)
} }
fn format_from<Tz: TimeZone>(date_time: DateTime<Tz>, formatter: &str, span: Span) -> Value fn format_from<Tz: TimeZone>(date_time: DateTime<Tz>, formatter: &str, span: Span) -> Value

View File

@ -53,6 +53,10 @@ impl Command for FormatDuration {
vec!["convert", "display", "pattern", "human readable"] vec!["convert", "display", "pattern", "human readable"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -81,6 +85,33 @@ impl Command for FormatDuration {
) )
} }
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let format_value = call
.req_const::<Value>(working_set, 0)?
.coerce_into_string()?
.to_ascii_lowercase();
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 1)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let float_precision = working_set.permanent().config.float_precision as usize;
let arg = Arguments {
format_value,
float_precision,
cell_paths,
};
operate(
format_value_impl,
arg,
input,
call.head,
working_set.permanent().ctrlc.clone(),
)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {

View File

@ -1,6 +1,6 @@
use nu_cmd_base::input_handler::{operate, CmdArgument}; use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::format_filesize; use nu_protocol::{engine::StateWorkingSet, format_filesize};
struct Arguments { struct Arguments {
format_value: String, format_value: String,
@ -50,6 +50,10 @@ impl Command for FormatFilesize {
vec!["convert", "display", "pattern", "human readable"] vec!["convert", "display", "pattern", "human readable"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -76,6 +80,31 @@ impl Command for FormatFilesize {
) )
} }
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let format_value = call
.req_const::<Value>(working_set, 0)?
.coerce_into_string()?
.to_ascii_lowercase();
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 1)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let arg = Arguments {
format_value,
cell_paths,
};
operate(
format_value_impl,
arg,
input,
call.head,
working_set.permanent().ctrlc.clone(),
)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {

View File

@ -1,6 +1,6 @@
use fancy_regex::{Captures, Regex}; use fancy_regex::{Captures, Regex};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::ListStream; use nu_protocol::{engine::StateWorkingSet, ListStream};
use std::{ use std::{
collections::VecDeque, collections::VecDeque,
sync::{atomic::AtomicBool, Arc}, sync::{atomic::AtomicBool, Arc},
@ -99,6 +99,10 @@ impl Command for Parse {
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -106,19 +110,31 @@ impl Command for Parse {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
operate(engine_state, stack, call, input) let pattern: Spanned<String> = call.req(engine_state, stack, 0)?;
let regex: bool = call.has_flag(engine_state, stack, "regex")?;
operate(engine_state, pattern, regex, call, input)
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let pattern: Spanned<String> = call.req_const(working_set, 0)?;
let regex: bool = call.has_flag_const(working_set, "regex")?;
operate(working_set.permanent(), pattern, regex, call, input)
} }
} }
fn operate( fn operate(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, pattern: Spanned<String>,
regex: bool,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head; let head = call.head;
let pattern: Spanned<String> = call.req(engine_state, stack, 0)?;
let regex: bool = call.has_flag(engine_state, stack, "regex")?;
let pattern_item = pattern.item; let pattern_item = pattern.item;
let pattern_span = pattern.span; let pattern_span = pattern.span;

View File

@ -1,5 +1,6 @@
use crate::grapheme_flags; use crate::{grapheme_flags, grapheme_flags_const};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)] #[derive(Clone)]
@ -88,6 +89,10 @@ impl Command for SubCommand {
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -95,19 +100,28 @@ impl Command for SubCommand {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
split_chars(engine_state, stack, call, input) let graphemes = grapheme_flags(engine_state, stack, call)?;
split_chars(engine_state, call, input, graphemes)
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let graphemes = grapheme_flags_const(working_set, call)?;
split_chars(working_set.permanent(), call, input, graphemes)
} }
} }
fn split_chars( fn split_chars(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
graphemes: bool,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let span = call.head; let span = call.head;
let graphemes = grapheme_flags(engine_state, stack, call)?;
input.map( input.map(
move |x| split_chars_helper(&x, span, graphemes), move |x| split_chars_helper(&x, span, graphemes),
engine_state.ctrlc.clone(), engine_state.ctrlc.clone(),

View File

@ -43,16 +43,6 @@ impl Command for SubCommand {
vec!["separate", "divide", "regex"] vec!["separate", "divide", "regex"]
} }
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
split_column(engine_state, stack, call, input)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {
@ -103,35 +93,83 @@ impl Command for SubCommand {
}, },
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let separator: Spanned<String> = call.req(engine_state, stack, 0)?;
let rest: Vec<Spanned<String>> = call.rest(engine_state, stack, 1)?;
let collapse_empty = call.has_flag(engine_state, stack, "collapse-empty")?;
let has_regex = call.has_flag(engine_state, stack, "regex")?;
let args = Arguments {
separator,
rest,
collapse_empty,
has_regex,
};
split_column(engine_state, call, input, args)
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let separator: Spanned<String> = call.req_const(working_set, 0)?;
let rest: Vec<Spanned<String>> = call.rest_const(working_set, 1)?;
let collapse_empty = call.has_flag_const(working_set, "collapse-empty")?;
let has_regex = call.has_flag_const(working_set, "regex")?;
let args = Arguments {
separator,
rest,
collapse_empty,
has_regex,
};
split_column(working_set.permanent(), call, input, args)
}
}
struct Arguments {
separator: Spanned<String>,
rest: Vec<Spanned<String>>,
collapse_empty: bool,
has_regex: bool,
} }
fn split_column( fn split_column(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
args: Arguments,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let name_span = call.head; let name_span = call.head;
let separator: Spanned<String> = call.req(engine_state, stack, 0)?; let regex = if args.has_regex {
let rest: Vec<Spanned<String>> = call.rest(engine_state, stack, 1)?; Regex::new(&args.separator.item)
let collapse_empty = call.has_flag(engine_state, stack, "collapse-empty")?;
let regex = if call.has_flag(engine_state, stack, "regex")? {
Regex::new(&separator.item)
} else { } else {
let escaped = regex::escape(&separator.item); let escaped = regex::escape(&args.separator.item);
Regex::new(&escaped) Regex::new(&escaped)
} }
.map_err(|e| ShellError::GenericError { .map_err(|e| ShellError::GenericError {
error: "Error with regular expression".into(), error: "Error with regular expression".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(separator.span), span: Some(args.separator.span),
help: None, help: None,
inner: vec![], inner: vec![],
})?; })?;
input.flat_map( input.flat_map(
move |x| split_column_helper(&x, &regex, &rest, collapse_empty, name_span), move |x| split_column_helper(&x, &regex, &args.rest, args.collapse_empty, name_span),
engine_state.ctrlc.clone(), engine_state.ctrlc.clone(),
) )
} }

View File

@ -36,16 +36,6 @@ impl Command for SubCommand {
vec!["separate", "divide", "regex"] vec!["separate", "divide", "regex"]
} }
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
split_list(engine_state, stack, call, input)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {
@ -145,6 +135,33 @@ impl Command for SubCommand {
}, },
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let has_regex = call.has_flag(engine_state, stack, "regex")?;
let separator: Value = call.req(engine_state, stack, 0)?;
split_list(engine_state, call, input, has_regex, separator)
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let has_regex = call.has_flag_const(working_set, "regex")?;
let separator: Value = call.req_const(working_set, 0)?;
split_list(working_set.permanent(), call, input, has_regex, separator)
}
} }
enum Matcher { enum Matcher {
@ -188,15 +205,15 @@ impl Matcher {
fn split_list( fn split_list(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
has_regex: bool,
separator: Value,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let separator: Value = call.req(engine_state, stack, 0)?;
let mut temp_list = Vec::new(); let mut temp_list = Vec::new();
let mut returned_list = Vec::new(); let mut returned_list = Vec::new();
let matcher = Matcher::new(call.has_flag(engine_state, stack, "regex")?, separator)?; let matcher = Matcher::new(has_regex, separator)?;
for val in input { for val in input {
if nu_utils::ctrl_c::was_pressed(&engine_state.ctrlc) { if nu_utils::ctrl_c::was_pressed(&engine_state.ctrlc) {
break; break;

View File

@ -43,16 +43,6 @@ impl Command for SubCommand {
vec!["separate", "divide", "regex"] vec!["separate", "divide", "regex"]
} }
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
split_row(engine_state, stack, call, input)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {
@ -109,32 +99,77 @@ impl Command for SubCommand {
}, },
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let separator: Spanned<String> = call.req(engine_state, stack, 0)?;
let max_split: Option<usize> = call.get_flag(engine_state, stack, "number")?;
let has_regex = call.has_flag(engine_state, stack, "regex")?;
let args = Arguments {
separator,
max_split,
has_regex,
};
split_row(engine_state, call, input, args)
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let separator: Spanned<String> = call.req_const(working_set, 0)?;
let max_split: Option<usize> = call.get_flag_const(working_set, "number")?;
let has_regex = call.has_flag_const(working_set, "regex")?;
let args = Arguments {
separator,
max_split,
has_regex,
};
split_row(working_set.permanent(), call, input, args)
}
}
struct Arguments {
has_regex: bool,
separator: Spanned<String>,
max_split: Option<usize>,
} }
fn split_row( fn split_row(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
args: Arguments,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let name_span = call.head; let name_span = call.head;
let separator: Spanned<String> = call.req(engine_state, stack, 0)?; let regex = if args.has_regex {
let regex = if call.has_flag(engine_state, stack, "regex")? { Regex::new(&args.separator.item)
Regex::new(&separator.item)
} else { } else {
let escaped = regex::escape(&separator.item); let escaped = regex::escape(&args.separator.item);
Regex::new(&escaped) Regex::new(&escaped)
} }
.map_err(|e| ShellError::GenericError { .map_err(|e| ShellError::GenericError {
error: "Error with regular expression".into(), error: "Error with regular expression".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(separator.span), span: Some(args.separator.span),
help: None, help: None,
inner: vec![], inner: vec![],
})?; })?;
let max_split: Option<usize> = call.get_flag(engine_state, stack, "number")?;
input.flat_map( input.flat_map(
move |x| split_row_helper(&x, &regex, max_split, name_span), move |x| split_row_helper(&x, &regex, args.max_split, name_span),
engine_state.ctrlc.clone(), engine_state.ctrlc.clone(),
) )
} }

View File

@ -1,4 +1,4 @@
use crate::grapheme_flags; use crate::{grapheme_flags, grapheme_flags_const};
use fancy_regex::Regex; use fancy_regex::Regex;
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
@ -96,6 +96,10 @@ impl Command for SubCommand {
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -103,40 +107,76 @@ impl Command for SubCommand {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
split_words(engine_state, stack, call, input) let word_length: Option<usize> = call.get_flag(engine_state, stack, "min-word-length")?;
let has_grapheme = call.has_flag(engine_state, stack, "grapheme-clusters")?;
let has_utf8 = call.has_flag(engine_state, stack, "utf-8-bytes")?;
let graphemes = grapheme_flags(engine_state, stack, call)?;
let args = Arguments {
word_length,
has_grapheme,
has_utf8,
graphemes,
};
split_words(engine_state, call, input, args)
} }
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let word_length: Option<usize> = call.get_flag_const(working_set, "min-word-length")?;
let has_grapheme = call.has_flag_const(working_set, "grapheme-clusters")?;
let has_utf8 = call.has_flag_const(working_set, "utf-8-bytes")?;
let graphemes = grapheme_flags_const(working_set, call)?;
let args = Arguments {
word_length,
has_grapheme,
has_utf8,
graphemes,
};
split_words(working_set.permanent(), call, input, args)
}
}
struct Arguments {
word_length: Option<usize>,
has_grapheme: bool,
has_utf8: bool,
graphemes: bool,
} }
fn split_words( fn split_words(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
args: Arguments,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let span = call.head; let span = call.head;
// let ignore_hyphenated = call.has_flag(engine_state, stack, "ignore-hyphenated")?; // let ignore_hyphenated = call.has_flag(engine_state, stack, "ignore-hyphenated")?;
// let ignore_apostrophes = call.has_flag(engine_state, stack, "ignore-apostrophes")?; // let ignore_apostrophes = call.has_flag(engine_state, stack, "ignore-apostrophes")?;
// let ignore_punctuation = call.has_flag(engine_state, stack, "ignore-punctuation")?; // let ignore_punctuation = call.has_flag(engine_state, stack, "ignore-punctuation")?;
let word_length: Option<usize> = call.get_flag(engine_state, stack, "min-word-length")?;
if word_length.is_none() { if args.word_length.is_none() {
if call.has_flag(engine_state, stack, "grapheme-clusters")? { if args.has_grapheme {
return Err(ShellError::IncompatibleParametersSingle { return Err(ShellError::IncompatibleParametersSingle {
msg: "--grapheme-clusters (-g) requires --min-word-length (-l)".to_string(), msg: "--grapheme-clusters (-g) requires --min-word-length (-l)".to_string(),
span, span,
}); });
} }
if call.has_flag(engine_state, stack, "utf-8-bytes")? { if args.has_utf8 {
return Err(ShellError::IncompatibleParametersSingle { return Err(ShellError::IncompatibleParametersSingle {
msg: "--utf-8-bytes (-b) requires --min-word-length (-l)".to_string(), msg: "--utf-8-bytes (-b) requires --min-word-length (-l)".to_string(),
span, span,
}); });
} }
} }
let graphemes = grapheme_flags(engine_state, stack, call)?;
input.map( input.map(
move |x| split_words_helper(&x, word_length, span, graphemes), move |x| split_words_helper(&x, args.word_length, span, args.graphemes),
engine_state.ctrlc.clone(), engine_state.ctrlc.clone(),
) )
} }

View File

@ -36,6 +36,10 @@ impl Command for SubCommand {
vec!["convert", "style", "caps", "upper"] vec!["convert", "style", "caps", "upper"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -43,7 +47,18 @@ impl Command for SubCommand {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
operate(engine_state, stack, call, input) let column_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
operate(engine_state, call, input, column_paths)
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let column_paths: Vec<CellPath> = call.rest_const(working_set, 0)?;
operate(working_set.permanent(), call, input, column_paths)
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
@ -72,12 +87,11 @@ impl Command for SubCommand {
fn operate( fn operate(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
column_paths: Vec<CellPath>,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head; let head = call.head;
let column_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
input.map( input.map(
move |v| { move |v| {
if column_paths.is_empty() { if column_paths.is_empty() {

View File

@ -36,6 +36,10 @@ impl Command for SubCommand {
vec!["lower case", "lowercase"] vec!["lower case", "lowercase"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -43,7 +47,18 @@ impl Command for SubCommand {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
operate(engine_state, stack, call, input) let column_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
operate(engine_state, call, input, column_paths)
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let column_paths: Vec<CellPath> = call.rest_const(working_set, 0)?;
operate(working_set.permanent(), call, input, column_paths)
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
@ -80,12 +95,11 @@ impl Command for SubCommand {
fn operate( fn operate(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
column_paths: Vec<CellPath>,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head; let head = call.head;
let column_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
input.map( input.map(
move |v| { move |v| {
if column_paths.is_empty() { if column_paths.is_empty() {

View File

@ -36,6 +36,10 @@ impl Command for SubCommand {
vec!["uppercase", "upper case"] vec!["uppercase", "upper case"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -43,7 +47,18 @@ impl Command for SubCommand {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
operate(engine_state, stack, call, input) let column_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
operate(engine_state, call, input, column_paths)
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let column_paths: Vec<CellPath> = call.rest_const(working_set, 0)?;
operate(working_set.permanent(), call, input, column_paths)
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
@ -57,12 +72,11 @@ impl Command for SubCommand {
fn operate( fn operate(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
column_paths: Vec<CellPath>,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head; let head = call.head;
let column_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
input.map( input.map(
move |v| { move |v| {
if column_paths.is_empty() { if column_paths.is_empty() {

View File

@ -10,7 +10,6 @@ struct Arguments {
substring: String, substring: String,
cell_paths: Option<Vec<CellPath>>, cell_paths: Option<Vec<CellPath>>,
case_insensitive: bool, case_insensitive: bool,
not_contain: bool,
} }
impl CmdArgument for Arguments { impl CmdArgument for Arguments {
@ -40,7 +39,6 @@ impl Command for SubCommand {
"For a data structure input, check strings at the given cell paths, and replace with result.", "For a data structure input, check strings at the given cell paths, and replace with result.",
) )
.switch("ignore-case", "search is case insensitive", Some('i')) .switch("ignore-case", "search is case insensitive", Some('i'))
.switch("not", "DEPRECATED OPTION: does not contain", Some('n'))
.category(Category::Strings) .category(Category::Strings)
} }
@ -52,6 +50,10 @@ impl Command for SubCommand {
vec!["substring", "match", "find", "search"] vec!["substring", "match", "find", "search"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -59,9 +61,25 @@ impl Command for SubCommand {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
if call.has_flag(engine_state, stack, "not")? { let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 1)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let args = Arguments {
substring: call.req::<String>(engine_state, stack, 0)?,
cell_paths,
case_insensitive: call.has_flag(engine_state, stack, "ignore-case")?,
};
operate(action, args, input, call.head, engine_state.ctrlc.clone())
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
if call.has_flag_const(working_set, "not")? {
nu_protocol::report_error_new( nu_protocol::report_error_new(
engine_state, working_set.permanent(),
&ShellError::GenericError { &ShellError::GenericError {
error: "Deprecated option".into(), error: "Deprecated option".into(),
msg: "`str contains --not {string}` is deprecated and will be removed in 0.95." msg: "`str contains --not {string}` is deprecated and will be removed in 0.95."
@ -73,15 +91,20 @@ impl Command for SubCommand {
); );
} }
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 1)?; let cell_paths: Vec<CellPath> = call.rest_const(working_set, 1)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths); let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let args = Arguments { let args = Arguments {
substring: call.req::<String>(engine_state, stack, 0)?, substring: call.req_const::<String>(working_set, 0)?,
cell_paths, cell_paths,
case_insensitive: call.has_flag(engine_state, stack, "ignore-case")?, case_insensitive: call.has_flag_const(working_set, "ignore-case")?,
not_contain: call.has_flag(engine_state, stack, "not")?,
}; };
operate(action, args, input, call.head, engine_state.ctrlc.clone()) operate(
action,
args,
input,
call.head,
working_set.permanent().ctrlc.clone(),
)
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
@ -142,7 +165,6 @@ fn action(
input: &Value, input: &Value,
Arguments { Arguments {
case_insensitive, case_insensitive,
not_contain,
substring, substring,
.. ..
}: &Arguments, }: &Arguments,
@ -150,23 +172,11 @@ fn action(
) -> Value { ) -> Value {
match input { match input {
Value::String { val, .. } => Value::bool( Value::String { val, .. } => Value::bool(
match case_insensitive { if *case_insensitive {
true => { val.to_folded_case()
if *not_contain { .contains(substring.to_folded_case().as_str())
!val.to_folded_case() } else {
.contains(substring.to_folded_case().as_str()) val.contains(substring)
} else {
val.to_folded_case()
.contains(substring.to_folded_case().as_str())
}
}
false => {
if *not_contain {
!val.contains(substring)
} else {
val.contains(substring)
}
}
}, },
head, head,
), ),

View File

@ -1,6 +1,6 @@
use nu_cmd_base::input_handler::{operate, CmdArgument}; use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::levenshtein_distance; use nu_protocol::{engine::StateWorkingSet, levenshtein_distance};
#[derive(Clone)] #[derive(Clone)]
pub struct SubCommand; pub struct SubCommand;
@ -49,6 +49,10 @@ impl Command for SubCommand {
vec!["edit", "levenshtein"] vec!["edit", "levenshtein"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -66,6 +70,28 @@ impl Command for SubCommand {
operate(action, args, input, call.head, engine_state.ctrlc.clone()) operate(action, args, input, call.head, engine_state.ctrlc.clone())
} }
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let compare_string: String = call.req_const(working_set, 0)?;
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 1)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let args = Arguments {
compare_string,
cell_paths,
};
operate(
action,
args,
input,
call.head,
working_set.permanent().ctrlc.clone(),
)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![Example { vec![Example {
description: "get the edit distance between two strings", description: "get the edit distance between two strings",

View File

@ -50,6 +50,10 @@ impl Command for SubCommand {
vec!["suffix", "match", "find", "search"] vec!["suffix", "match", "find", "search"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -67,6 +71,28 @@ impl Command for SubCommand {
operate(action, args, input, call.head, engine_state.ctrlc.clone()) operate(action, args, input, call.head, engine_state.ctrlc.clone())
} }
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 1)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let args = Arguments {
substring: call.req_const::<String>(working_set, 0)?,
cell_paths,
case_insensitive: call.has_flag_const(working_set, "ignore-case")?,
};
operate(
action,
args,
input,
call.head,
working_set.permanent().ctrlc.clone(),
)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {

View File

@ -179,6 +179,10 @@ impl Command for SubCommand {
] ]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -186,32 +190,51 @@ impl Command for SubCommand {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let span = call.head;
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: span });
}
let is_path = call.has_flag(engine_state, stack, "path")?; let is_path = call.has_flag(engine_state, stack, "path")?;
input.map( run(call, input, is_path, engine_state)
move |v| {
let value_span = v.span();
match v.coerce_into_string() {
Ok(s) => {
let contents = if is_path { s.replace('\\', "\\\\") } else { s };
str_expand(&contents, span, value_span)
}
Err(_) => Value::error(
ShellError::PipelineMismatch {
exp_input_type: "string".into(),
dst_span: span,
src_span: value_span,
},
span,
),
}
},
engine_state.ctrlc.clone(),
)
} }
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let is_path = call.has_flag_const(working_set, "path")?;
run(call, input, is_path, working_set.permanent())
}
}
fn run(
call: &Call,
input: PipelineData,
is_path: bool,
engine_state: &EngineState,
) -> Result<PipelineData, ShellError> {
let span = call.head;
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: span });
}
input.map(
move |v| {
let value_span = v.span();
match v.coerce_into_string() {
Ok(s) => {
let contents = if is_path { s.replace('\\', "\\\\") } else { s };
str_expand(&contents, span, value_span)
}
Err(_) => Value::error(
ShellError::PipelineMismatch {
exp_input_type: "string".into(),
dst_span: span,
src_span: value_span,
},
span,
),
}
},
engine_state.ctrlc.clone(),
)
} }
fn str_expand(contents: &str, span: Span, value_span: Span) -> Value { fn str_expand(contents: &str, span: Span, value_span: Span) -> Value {

View File

@ -1,10 +1,10 @@
use crate::grapheme_flags; use crate::{grapheme_flags, grapheme_flags_const};
use nu_cmd_base::{ use nu_cmd_base::{
input_handler::{operate, CmdArgument}, input_handler::{operate, CmdArgument},
util, util,
}; };
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::Range; use nu_protocol::{engine::StateWorkingSet, Range};
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
struct Arguments { struct Arguments {
@ -72,6 +72,10 @@ impl Command for SubCommand {
vec!["match", "find", "search"] vec!["match", "find", "search"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -92,6 +96,31 @@ impl Command for SubCommand {
operate(action, args, input, call.head, engine_state.ctrlc.clone()) operate(action, args, input, call.head, engine_state.ctrlc.clone())
} }
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let substring: Spanned<String> = call.req_const(working_set, 0)?;
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 1)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let args = Arguments {
substring: substring.item,
range: call.get_flag_const(working_set, "range")?,
end: call.has_flag_const(working_set, "end")?,
cell_paths,
graphemes: grapheme_flags_const(working_set, call)?,
};
operate(
action,
args,
input,
call.head,
working_set.permanent().ctrlc.clone(),
)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {
@ -405,7 +434,7 @@ mod tests {
let range = Range::new( let range = Range::new(
Value::int(0, Span::test_data()), Value::int(0, Span::test_data()),
Value::int(1, Span::test_data()), Value::int(1, Span::test_data()),
Value::int(3, Span::test_data()), Value::int(2, Span::test_data()),
RangeInclusion::Inclusive, RangeInclusion::Inclusive,
Span::test_data(), Span::test_data(),
) )

View File

@ -1,4 +1,5 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use std::io::Write; use std::io::Write;
#[derive(Clone)] #[derive(Clone)]
@ -32,6 +33,10 @@ impl Command for StrJoin {
vec!["collect", "concatenate"] vec!["collect", "concatenate"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -40,41 +45,17 @@ impl Command for StrJoin {
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let separator: Option<String> = call.opt(engine_state, stack, 0)?; let separator: Option<String> = call.opt(engine_state, stack, 0)?;
run(engine_state, call, input, separator)
}
let config = engine_state.config.clone(); fn run_const(
&self,
let span = call.head; working_set: &StateWorkingSet,
call: &Call,
let metadata = input.metadata(); input: PipelineData,
let mut iter = input.into_iter(); ) -> Result<PipelineData, ShellError> {
let mut first = true; let separator: Option<String> = call.opt_const(working_set, 0)?;
run(working_set.permanent(), call, input, separator)
let output = ByteStream::from_fn(span, None, ByteStreamType::String, move |buffer| {
// Write each input to the buffer
if let Some(value) = iter.next() {
// Write the separator if this is not the first
if first {
first = false;
} else if let Some(separator) = &separator {
write!(buffer, "{}", separator)?;
}
match value {
Value::Error { error, .. } => {
return Err(*error);
}
// Hmm, not sure what we actually want.
// `to_expanded_string` formats dates as human readable which feels funny.
Value::Date { val, .. } => write!(buffer, "{val:?}")?,
value => write!(buffer, "{}", value.to_expanded_string("\n", &config))?,
}
Ok(true)
} else {
Ok(false)
}
});
Ok(PipelineData::ByteStream(output, metadata))
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
@ -93,6 +74,48 @@ impl Command for StrJoin {
} }
} }
fn run(
engine_state: &EngineState,
call: &Call,
input: PipelineData,
separator: Option<String>,
) -> Result<PipelineData, ShellError> {
let config = engine_state.config.clone();
let span = call.head;
let metadata = input.metadata();
let mut iter = input.into_iter();
let mut first = true;
let output = ByteStream::from_fn(span, None, ByteStreamType::String, move |buffer| {
// Write each input to the buffer
if let Some(value) = iter.next() {
// Write the separator if this is not the first
if first {
first = false;
} else if let Some(separator) = &separator {
write!(buffer, "{}", separator)?;
}
match value {
Value::Error { error, .. } => {
return Err(*error);
}
// Hmm, not sure what we actually want.
// `to_expanded_string` formats dates as human readable which feels funny.
Value::Date { val, .. } => write!(buffer, "{val:?}")?,
value => write!(buffer, "{}", value.to_expanded_string("\n", &config))?,
}
Ok(true)
} else {
Ok(false)
}
});
Ok(PipelineData::ByteStream(output, metadata))
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -1,7 +1,7 @@
use crate::{grapheme_flags, grapheme_flags_const}; use crate::{grapheme_flags, grapheme_flags_const};
use nu_cmd_base::input_handler::{operate, CmdArgument}; use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::engine::StateWorkingSet;
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
struct Arguments { struct Arguments {

View File

@ -73,6 +73,10 @@ impl Command for SubCommand {
vec!["search", "shift", "switch", "regex"] vec!["search", "shift", "switch", "regex"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -101,6 +105,39 @@ impl Command for SubCommand {
operate(action, args, input, call.head, engine_state.ctrlc.clone()) operate(action, args, input, call.head, engine_state.ctrlc.clone())
} }
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let find: Spanned<String> = call.req_const(working_set, 0)?;
let replace: Spanned<String> = call.req_const(working_set, 1)?;
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 2)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let literal_replace = call.has_flag_const(working_set, "no-expand")?;
let no_regex = !call.has_flag_const(working_set, "regex")?
&& !call.has_flag_const(working_set, "multiline")?;
let multiline = call.has_flag_const(working_set, "multiline")?;
let args = Arguments {
all: call.has_flag_const(working_set, "all")?,
find,
replace,
cell_paths,
literal_replace,
no_regex,
multiline,
};
operate(
action,
args,
input,
call.head,
working_set.permanent().ctrlc.clone(),
)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {

View File

@ -37,6 +37,10 @@ impl Command for SubCommand {
vec!["convert", "inverse", "flip"] vec!["convert", "inverse", "flip"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -49,6 +53,23 @@ impl Command for SubCommand {
operate(action, args, input, call.head, engine_state.ctrlc.clone()) operate(action, args, input, call.head, engine_state.ctrlc.clone())
} }
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 0)?;
let args = CellPathOnlyArgs::from(cell_paths);
operate(
action,
args,
input,
call.head,
working_set.permanent().ctrlc.clone(),
)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {

View File

@ -51,6 +51,10 @@ impl Command for SubCommand {
vec!["prefix", "match", "find", "search"] vec!["prefix", "match", "find", "search"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -69,6 +73,29 @@ impl Command for SubCommand {
operate(action, args, input, call.head, engine_state.ctrlc.clone()) operate(action, args, input, call.head, engine_state.ctrlc.clone())
} }
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let substring: Spanned<String> = call.req_const(working_set, 0)?;
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 1)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let args = Arguments {
substring: substring.item,
cell_paths,
case_insensitive: call.has_flag_const(working_set, "ignore-case")?,
};
operate(
action,
args,
input,
call.head,
working_set.permanent().ctrlc.clone(),
)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {

View File

@ -1,5 +1,6 @@
use fancy_regex::Regex; use fancy_regex::Regex;
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::{fmt, str}; use std::{fmt, str};
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
@ -29,6 +30,10 @@ impl Command for SubCommand {
vec!["count", "word", "character", "unicode", "wc"] vec!["count", "word", "character", "unicode", "wc"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -39,6 +44,15 @@ impl Command for SubCommand {
stats(engine_state, call, input) stats(engine_state, call, input)
} }
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
stats(working_set.permanent(), call, input)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {

View File

@ -1,11 +1,10 @@
use crate::grapheme_flags; use crate::{grapheme_flags, grapheme_flags_const};
use nu_cmd_base::{ use nu_cmd_base::{
input_handler::{operate, CmdArgument}, input_handler::{operate, CmdArgument},
util, util,
}; };
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::Range; use nu_protocol::{engine::StateWorkingSet, Range};
use std::cmp::Ordering;
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)] #[derive(Clone)]
@ -70,13 +69,17 @@ impl Command for SubCommand {
} }
fn usage(&self) -> &str { fn usage(&self) -> &str {
"Get part of a string. Note that the start is included but the end is excluded, and that the first character of a string is index 0." "Get part of a string. Note that the first character of a string is index 0."
} }
fn search_terms(&self) -> Vec<&str> { fn search_terms(&self) -> Vec<&str> {
vec!["slice"] vec!["slice"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -103,19 +106,55 @@ impl Command for SubCommand {
operate(action, args, input, call.head, engine_state.ctrlc.clone()) operate(action, args, input, call.head, engine_state.ctrlc.clone())
} }
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let range: Range = call.req_const(working_set, 0)?;
let indexes = match util::process_range(&range) {
Ok(idxs) => idxs.into(),
Err(processing_error) => {
return Err(processing_error("could not perform substring", call.head))
}
};
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 1)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let args = Arguments {
indexes,
cell_paths,
graphemes: grapheme_flags_const(working_set, call)?,
};
operate(
action,
args,
input,
call.head,
working_set.permanent().ctrlc.clone(),
)
}
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {
description: description:
"Get a substring \"nushell\" from the text \"good nushell\" using a range", "Get a substring \"nushell\" from the text \"good nushell\" using a range",
example: " 'good nushell' | str substring 5..12", example: " 'good nushell' | str substring 5..11",
result: Some(Value::test_string("nushell")), result: Some(Value::test_string("nushell")),
}, },
Example { Example {
description: "Count indexes and split using grapheme clusters", description: "Count indexes and split using grapheme clusters",
example: " '🇯🇵ほげ ふが ぴよ' | str substring --grapheme-clusters 4..6", example: " '🇯🇵ほげ ふが ぴよ' | str substring --grapheme-clusters 4..5",
result: Some(Value::test_string("ふが")), result: Some(Value::test_string("ふが")),
}, },
Example {
description: "sub string by negative index",
example: " 'good nushell' | str substring 5..-2",
result: Some(Value::test_string("nushel")),
},
] ]
} }
} }
@ -132,56 +171,46 @@ fn action(input: &Value, args: &Arguments, head: Span) -> Value {
options.0 options.0
}; };
let end: isize = if options.1 < 0 { let end: isize = if options.1 < 0 {
std::cmp::max(len + options.1, 0) options.1 + len
} else { } else {
options.1 options.1
}; };
if start < len && end >= 0 { if start > end {
match start.cmp(&end) { Value::string("", head)
Ordering::Equal => Value::string("", head), } else {
Ordering::Greater => Value::error( Value::string(
ShellError::TypeMismatch { {
err_message: "End must be greater than or equal to Start".to_string(), if end == isize::MAX {
span: head, if args.graphemes {
},
head,
),
Ordering::Less => Value::string(
{
if end == isize::MAX {
if args.graphemes {
s.graphemes(true)
.skip(start as usize)
.collect::<Vec<&str>>()
.join("")
} else {
String::from_utf8_lossy(
&s.bytes().skip(start as usize).collect::<Vec<_>>(),
)
.to_string()
}
} else if args.graphemes {
s.graphemes(true) s.graphemes(true)
.skip(start as usize) .skip(start as usize)
.take((end - start) as usize)
.collect::<Vec<&str>>() .collect::<Vec<&str>>()
.join("") .join("")
} else { } else {
String::from_utf8_lossy( String::from_utf8_lossy(
&s.bytes() &s.bytes().skip(start as usize).collect::<Vec<_>>(),
.skip(start as usize)
.take((end - start) as usize)
.collect::<Vec<_>>(),
) )
.to_string() .to_string()
} }
}, } else if args.graphemes {
head, s.graphemes(true)
), .skip(start as usize)
} .take((end - start + 1) as usize)
} else { .collect::<Vec<&str>>()
Value::string("", head) .join("")
} else {
String::from_utf8_lossy(
&s.bytes()
.skip(start as usize)
.take((end - start + 1) as usize)
.collect::<Vec<_>>(),
)
.to_string()
}
},
head,
)
} }
} }
// Propagate errors by explicitly matching them before the final case. // Propagate errors by explicitly matching them before the final case.
@ -208,6 +237,7 @@ mod tests {
test_examples(SubCommand {}) test_examples(SubCommand {})
} }
#[derive(Debug)]
struct Expectation<'a> { struct Expectation<'a> {
options: (isize, isize), options: (isize, isize),
expected: &'a str, expected: &'a str,
@ -231,18 +261,19 @@ mod tests {
let word = Value::test_string("andres"); let word = Value::test_string("andres");
let cases = vec![ let cases = vec![
expectation("a", (0, 1)), expectation("a", (0, 0)),
expectation("an", (0, 2)), expectation("an", (0, 1)),
expectation("and", (0, 3)), expectation("and", (0, 2)),
expectation("andr", (0, 4)), expectation("andr", (0, 3)),
expectation("andre", (0, 5)), expectation("andre", (0, 4)),
expectation("andres", (0, 5)),
expectation("andres", (0, 6)), expectation("andres", (0, 6)),
expectation("", (0, -6)), expectation("a", (0, -6)),
expectation("a", (0, -5)), expectation("an", (0, -5)),
expectation("an", (0, -4)), expectation("and", (0, -4)),
expectation("and", (0, -3)), expectation("andr", (0, -3)),
expectation("andr", (0, -2)), expectation("andre", (0, -2)),
expectation("andre", (0, -1)), expectation("andres", (0, -1)),
// str substring [ -4 , _ ] // str substring [ -4 , _ ]
// str substring -4 , // str substring -4 ,
expectation("dres", (-4, isize::MAX)), expectation("dres", (-4, isize::MAX)),
@ -257,6 +288,7 @@ mod tests {
]; ];
for expectation in &cases { for expectation in &cases {
println!("{:?}", expectation);
let expected = expectation.expected; let expected = expectation.expected;
let actual = action( let actual = action(
&word, &word,

View File

@ -71,6 +71,10 @@ impl Command for SubCommand {
vec!["whitespace", "strip", "lstrip", "rstrip"] vec!["whitespace", "strip", "lstrip", "rstrip"]
} }
fn is_const(&self) -> bool {
true
}
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
@ -79,44 +83,37 @@ impl Command for SubCommand {
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let character = call.get_flag::<Spanned<String>>(engine_state, stack, "char")?; let character = call.get_flag::<Spanned<String>>(engine_state, stack, "char")?;
let to_trim = match character.as_ref() {
Some(v) => {
if v.item.chars().count() > 1 {
return Err(ShellError::GenericError {
error: "Trim only works with single character".into(),
msg: "needs single character".into(),
span: Some(v.span),
help: None,
inner: vec![],
});
}
v.item.chars().next()
}
None => None,
};
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?; let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let mode = match cell_paths {
None => ActionMode::Global,
Some(_) => ActionMode::Local,
};
let left = call.has_flag(engine_state, stack, "left")?; let left = call.has_flag(engine_state, stack, "left")?;
let right = call.has_flag(engine_state, stack, "right")?; let right = call.has_flag(engine_state, stack, "right")?;
let trim_side = match (left, right) { run(
(true, true) => TrimSide::Both, character,
(true, false) => TrimSide::Left,
(false, true) => TrimSide::Right,
(false, false) => TrimSide::Both,
};
let args = Arguments {
to_trim,
trim_side,
cell_paths, cell_paths,
mode, (left, right),
}; call,
operate(action, args, input, call.head, engine_state.ctrlc.clone()) input,
engine_state,
)
}
fn run_const(
&self,
working_set: &StateWorkingSet,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let character = call.get_flag_const::<Spanned<String>>(working_set, "char")?;
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 0)?;
let left = call.has_flag_const(working_set, "left")?;
let right = call.has_flag_const(working_set, "right")?;
run(
character,
cell_paths,
(left, right),
call,
input,
working_set.permanent(),
)
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
@ -150,6 +147,52 @@ impl Command for SubCommand {
} }
} }
fn run(
character: Option<Spanned<String>>,
cell_paths: Vec<CellPath>,
(left, right): (bool, bool),
call: &Call,
input: PipelineData,
engine_state: &EngineState,
) -> Result<PipelineData, ShellError> {
let to_trim = match character.as_ref() {
Some(v) => {
if v.item.chars().count() > 1 {
return Err(ShellError::GenericError {
error: "Trim only works with single character".into(),
msg: "needs single character".into(),
span: Some(v.span),
help: None,
inner: vec![],
});
}
v.item.chars().next()
}
None => None,
};
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let mode = match cell_paths {
None => ActionMode::Global,
Some(_) => ActionMode::Local,
};
let trim_side = match (left, right) {
(true, true) => TrimSide::Both,
(true, false) => TrimSide::Left,
(false, true) => TrimSide::Right,
(false, false) => TrimSide::Both,
};
let args = Arguments {
to_trim,
trim_side,
cell_paths,
mode,
};
operate(action, args, input, call.head, engine_state.ctrlc.clone())
}
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
pub enum ActionMode { pub enum ActionMode {
Local, Local,

Some files were not shown because too many files have changed in this diff Show More