Merge branch 'main' into encode
This commit is contained in:
commit
3da060bcf9
2
.github/workflows/nightly-build.yml
vendored
2
.github/workflows/nightly-build.yml
vendored
|
@ -161,7 +161,7 @@ jobs:
|
||||||
# REF: https://github.com/marketplace/actions/gh-release
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
# Create a release only in nushell/nightly repo
|
# Create a release only in nushell/nightly repo
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v2.0.6
|
uses: softprops/action-gh-release@v2.0.8
|
||||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||||
with:
|
with:
|
||||||
prerelease: true
|
prerelease: true
|
||||||
|
|
6
.github/workflows/release-pkg.nu
vendored
6
.github/workflows/release-pkg.nu
vendored
|
@ -161,8 +161,12 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
||||||
let releaseStem = $'($bin)-($version)-($target)'
|
let releaseStem = $'($bin)-($version)-($target)'
|
||||||
|
|
||||||
print $'(char nl)Download less related stuffs...'; hr-line
|
print $'(char nl)Download less related stuffs...'; hr-line
|
||||||
|
# todo: less-v661 is out but is released as a zip file. maybe we should switch to that and extract it?
|
||||||
aria2c https://github.com/jftuga/less-Windows/releases/download/less-v608/less.exe -o less.exe
|
aria2c https://github.com/jftuga/less-Windows/releases/download/less-v608/less.exe -o less.exe
|
||||||
aria2c https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o LICENSE-for-less.txt
|
# the below was renamed because it was failing to download for darren. it should work but it wasn't
|
||||||
|
# todo: maybe we should get rid of this aria2c dependency and just use http get?
|
||||||
|
#aria2c https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o LICENSE-for-less.txt
|
||||||
|
aria2c https://github.com/jftuga/less-Windows/blob/master/LICENSE -o LICENSE-for-less.txt
|
||||||
|
|
||||||
# Create Windows msi release package
|
# Create Windows msi release package
|
||||||
if (get-env _EXTRA_) == 'msi' {
|
if (get-env _EXTRA_) == 'msi' {
|
||||||
|
|
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
|
@ -91,7 +91,7 @@ jobs:
|
||||||
|
|
||||||
# REF: https://github.com/marketplace/actions/gh-release
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v2.0.6
|
uses: softprops/action-gh-release@v2.0.8
|
||||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
|
|
2
.github/workflows/typos.yml
vendored
2
.github/workflows/typos.yml
vendored
|
@ -10,4 +10,4 @@ jobs:
|
||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Check spelling
|
- name: Check spelling
|
||||||
uses: crate-ci/typos@v1.23.2
|
uses: crate-ci/typos@v1.23.5
|
||||||
|
|
101
Cargo.lock
generated
101
Cargo.lock
generated
|
@ -2874,7 +2874,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu"
|
name = "nu"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"assert_cmd",
|
"assert_cmd",
|
||||||
"crossterm",
|
"crossterm",
|
||||||
|
@ -2919,16 +2919,16 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-ansi-term"
|
name = "nu-ansi-term"
|
||||||
version = "0.50.0"
|
version = "0.50.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dd2800e1520bdc966782168a627aa5d1ad92e33b984bf7c7615d31280c83ff14"
|
checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-sys 0.48.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-cli"
|
name = "nu-cli"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"crossterm",
|
"crossterm",
|
||||||
|
@ -2963,7 +2963,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-cmd-base"
|
name = "nu-cmd-base"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"miette",
|
"miette",
|
||||||
|
@ -2975,7 +2975,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-cmd-extra"
|
name = "nu-cmd-extra"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"fancy-regex",
|
"fancy-regex",
|
||||||
"heck 0.5.0",
|
"heck 0.5.0",
|
||||||
|
@ -3000,7 +3000,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-cmd-lang"
|
name = "nu-cmd-lang"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itertools 0.12.1",
|
"itertools 0.12.1",
|
||||||
"nu-engine",
|
"nu-engine",
|
||||||
|
@ -3012,7 +3012,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-cmd-plugin"
|
name = "nu-cmd-plugin"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itertools 0.12.1",
|
"itertools 0.12.1",
|
||||||
"nu-engine",
|
"nu-engine",
|
||||||
|
@ -3023,7 +3023,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-color-config"
|
name = "nu-color-config"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nu-ansi-term",
|
"nu-ansi-term",
|
||||||
"nu-engine",
|
"nu-engine",
|
||||||
|
@ -3035,7 +3035,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-command"
|
name = "nu-command"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alphanumeric-sort",
|
"alphanumeric-sort",
|
||||||
"base64 0.22.1",
|
"base64 0.22.1",
|
||||||
|
@ -3147,7 +3147,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-derive-value"
|
name = "nu-derive-value"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"convert_case",
|
"convert_case",
|
||||||
"proc-macro-error",
|
"proc-macro-error",
|
||||||
|
@ -3158,18 +3158,19 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-engine"
|
name = "nu-engine"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"nu-glob",
|
"nu-glob",
|
||||||
"nu-path",
|
"nu-path",
|
||||||
"nu-protocol",
|
"nu-protocol",
|
||||||
"nu-utils",
|
"nu-utils",
|
||||||
|
"terminal_size",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-explore"
|
name = "nu-explore"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ansi-str",
|
"ansi-str",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
@ -3194,14 +3195,14 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-glob"
|
name = "nu-glob"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"doc-comment",
|
"doc-comment",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-json"
|
name = "nu-json"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"fancy-regex",
|
"fancy-regex",
|
||||||
"linked-hash-map",
|
"linked-hash-map",
|
||||||
|
@ -3214,7 +3215,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-lsp"
|
name = "nu-lsp"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"assert-json-diff",
|
"assert-json-diff",
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
|
@ -3235,7 +3236,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-parser"
|
name = "nu-parser"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytesize",
|
"bytesize",
|
||||||
"chrono",
|
"chrono",
|
||||||
|
@ -3251,7 +3252,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-path"
|
name = "nu-path"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"dirs",
|
"dirs",
|
||||||
"omnipath",
|
"omnipath",
|
||||||
|
@ -3260,7 +3261,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-plugin"
|
name = "nu-plugin"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"nix",
|
"nix",
|
||||||
|
@ -3276,7 +3277,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-plugin-core"
|
name = "nu-plugin-core"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"interprocess",
|
"interprocess",
|
||||||
"log",
|
"log",
|
||||||
|
@ -3290,7 +3291,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-plugin-engine"
|
name = "nu-plugin-engine"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"nu-engine",
|
"nu-engine",
|
||||||
|
@ -3306,7 +3307,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-plugin-protocol"
|
name = "nu-plugin-protocol"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bincode",
|
"bincode",
|
||||||
"nu-protocol",
|
"nu-protocol",
|
||||||
|
@ -3318,7 +3319,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-plugin-test-support"
|
name = "nu-plugin-test-support"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nu-ansi-term",
|
"nu-ansi-term",
|
||||||
"nu-cmd-lang",
|
"nu-cmd-lang",
|
||||||
|
@ -3336,7 +3337,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-pretty-hex"
|
name = "nu-pretty-hex"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heapless",
|
"heapless",
|
||||||
"nu-ansi-term",
|
"nu-ansi-term",
|
||||||
|
@ -3345,7 +3346,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-protocol"
|
name = "nu-protocol"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"brotli",
|
"brotli",
|
||||||
"byte-unit",
|
"byte-unit",
|
||||||
|
@ -3382,7 +3383,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-std"
|
name = "nu-std"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"miette",
|
"miette",
|
||||||
|
@ -3393,7 +3394,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-system"
|
name = "nu-system"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"itertools 0.12.1",
|
"itertools 0.12.1",
|
||||||
|
@ -3411,7 +3412,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-table"
|
name = "nu-table"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"fancy-regex",
|
"fancy-regex",
|
||||||
"nu-ansi-term",
|
"nu-ansi-term",
|
||||||
|
@ -3425,7 +3426,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-term-grid"
|
name = "nu-term-grid"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nu-utils",
|
"nu-utils",
|
||||||
"unicode-width",
|
"unicode-width",
|
||||||
|
@ -3433,7 +3434,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-test-support"
|
name = "nu-test-support"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nu-glob",
|
"nu-glob",
|
||||||
"nu-path",
|
"nu-path",
|
||||||
|
@ -3445,7 +3446,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-utils"
|
name = "nu-utils"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"crossterm_winapi",
|
"crossterm_winapi",
|
||||||
"log",
|
"log",
|
||||||
|
@ -3471,7 +3472,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu_plugin_example"
|
name = "nu_plugin_example"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nu-cmd-lang",
|
"nu-cmd-lang",
|
||||||
"nu-plugin",
|
"nu-plugin",
|
||||||
|
@ -3481,7 +3482,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu_plugin_formats"
|
name = "nu_plugin_formats"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"eml-parser",
|
"eml-parser",
|
||||||
"ical",
|
"ical",
|
||||||
|
@ -3494,7 +3495,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu_plugin_gstat"
|
name = "nu_plugin_gstat"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"git2",
|
"git2",
|
||||||
"nu-plugin",
|
"nu-plugin",
|
||||||
|
@ -3503,7 +3504,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu_plugin_inc"
|
name = "nu_plugin_inc"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nu-plugin",
|
"nu-plugin",
|
||||||
"nu-protocol",
|
"nu-protocol",
|
||||||
|
@ -3512,7 +3513,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu_plugin_polars"
|
name = "nu_plugin_polars"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"chrono-tz 0.9.0",
|
"chrono-tz 0.9.0",
|
||||||
|
@ -3546,7 +3547,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu_plugin_query"
|
name = "nu_plugin_query"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"gjson",
|
"gjson",
|
||||||
"nu-plugin",
|
"nu-plugin",
|
||||||
|
@ -3561,7 +3562,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu_plugin_stress_internals"
|
name = "nu_plugin_stress_internals"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"interprocess",
|
"interprocess",
|
||||||
"serde",
|
"serde",
|
||||||
|
@ -3687,7 +3688,7 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nuon"
|
name = "nuon"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"fancy-regex",
|
"fancy-regex",
|
||||||
|
@ -3795,9 +3796,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "openssl"
|
name = "openssl"
|
||||||
version = "0.10.64"
|
version = "0.10.66"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f"
|
checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.5.0",
|
"bitflags 2.5.0",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
|
@ -3836,9 +3837,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "openssl-sys"
|
name = "openssl-sys"
|
||||||
version = "0.9.102"
|
version = "0.9.103"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2"
|
checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"libc",
|
"libc",
|
||||||
|
@ -5012,8 +5013,8 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "reedline"
|
name = "reedline"
|
||||||
version = "0.32.0"
|
version = "0.33.0"
|
||||||
source = "git+https://github.com/nushell/reedline?branch=main#480059a3f52cf919341cda88e8c544edd846bc73"
|
source = "git+https://github.com/nushell/reedline?branch=main#919292e40fd417e3da882692021961b444150c59"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arboard",
|
"arboard",
|
||||||
"chrono",
|
"chrono",
|
||||||
|
@ -5596,9 +5597,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "shadow-rs"
|
name = "shadow-rs"
|
||||||
version = "0.29.0"
|
version = "0.30.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0a600f795d0894cda22235b44eea4b85c2a35b405f65523645ac8e35b306817a"
|
checksum = "d253e54681d4be0161e965db57974ae642a0b6aaeb18a999424c4dab062be8c5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"const_format",
|
"const_format",
|
||||||
"is_debug",
|
"is_debug",
|
||||||
|
@ -5673,9 +5674,9 @@ checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "similar"
|
name = "similar"
|
||||||
version = "2.5.0"
|
version = "2.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fa42c91313f1d05da9b26f267f931cf178d4aba455b4c4622dd7355eb80c6640"
|
checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "simplelog"
|
name = "simplelog"
|
||||||
|
|
46
Cargo.toml
46
Cargo.toml
|
@ -10,8 +10,8 @@ homepage = "https://www.nushell.sh"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu"
|
name = "nu"
|
||||||
repository = "https://github.com/nushell/nushell"
|
repository = "https://github.com/nushell/nushell"
|
||||||
rust-version = "1.77.2"
|
rust-version = "1.78.0"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
@ -116,7 +116,7 @@ mockito = { version = "1.4", default-features = false }
|
||||||
native-tls = "0.2"
|
native-tls = "0.2"
|
||||||
nix = { version = "0.28", default-features = false }
|
nix = { version = "0.28", default-features = false }
|
||||||
notify-debouncer-full = { version = "0.3", default-features = false }
|
notify-debouncer-full = { version = "0.3", default-features = false }
|
||||||
nu-ansi-term = "0.50.0"
|
nu-ansi-term = "0.50.1"
|
||||||
num-format = "0.4"
|
num-format = "0.4"
|
||||||
num-traits = "0.2"
|
num-traits = "0.2"
|
||||||
omnipath = "0.1"
|
omnipath = "0.1"
|
||||||
|
@ -138,7 +138,7 @@ quote = "1.0"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
ratatui = "0.26"
|
ratatui = "0.26"
|
||||||
rayon = "1.10"
|
rayon = "1.10"
|
||||||
reedline = "0.32.0"
|
reedline = "0.33.0"
|
||||||
regex = "1.9.5"
|
regex = "1.9.5"
|
||||||
rmp = "0.8"
|
rmp = "0.8"
|
||||||
rmp-serde = "1.3"
|
rmp-serde = "1.3"
|
||||||
|
@ -183,22 +183,22 @@ windows-sys = "0.48"
|
||||||
winreg = "0.52"
|
winreg = "0.52"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cli = { path = "./crates/nu-cli", version = "0.95.1" }
|
nu-cli = { path = "./crates/nu-cli", version = "0.96.2" }
|
||||||
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.95.1" }
|
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.96.2" }
|
||||||
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.95.1" }
|
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.96.2" }
|
||||||
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.95.1", optional = true }
|
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.96.2", optional = true }
|
||||||
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.95.1" }
|
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.96.2" }
|
||||||
nu-command = { path = "./crates/nu-command", version = "0.95.1" }
|
nu-command = { path = "./crates/nu-command", version = "0.96.2" }
|
||||||
nu-engine = { path = "./crates/nu-engine", version = "0.95.1" }
|
nu-engine = { path = "./crates/nu-engine", version = "0.96.2" }
|
||||||
nu-explore = { path = "./crates/nu-explore", version = "0.95.1" }
|
nu-explore = { path = "./crates/nu-explore", version = "0.96.2" }
|
||||||
nu-lsp = { path = "./crates/nu-lsp/", version = "0.95.1" }
|
nu-lsp = { path = "./crates/nu-lsp/", version = "0.96.2" }
|
||||||
nu-parser = { path = "./crates/nu-parser", version = "0.95.1" }
|
nu-parser = { path = "./crates/nu-parser", version = "0.96.2" }
|
||||||
nu-path = { path = "./crates/nu-path", version = "0.95.1" }
|
nu-path = { path = "./crates/nu-path", version = "0.96.2" }
|
||||||
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.95.1" }
|
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.96.2" }
|
||||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.95.1" }
|
nu-protocol = { path = "./crates/nu-protocol", version = "0.96.2" }
|
||||||
nu-std = { path = "./crates/nu-std", version = "0.95.1" }
|
nu-std = { path = "./crates/nu-std", version = "0.96.2" }
|
||||||
nu-system = { path = "./crates/nu-system", version = "0.95.1" }
|
nu-system = { path = "./crates/nu-system", version = "0.96.2" }
|
||||||
nu-utils = { path = "./crates/nu-utils", version = "0.95.1" }
|
nu-utils = { path = "./crates/nu-utils", version = "0.96.2" }
|
||||||
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
||||||
|
|
||||||
crossterm = { workspace = true }
|
crossterm = { workspace = true }
|
||||||
|
@ -227,9 +227,9 @@ nix = { workspace = true, default-features = false, features = [
|
||||||
] }
|
] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path = "./crates/nu-test-support", version = "0.95.1" }
|
nu-test-support = { path = "./crates/nu-test-support", version = "0.96.2" }
|
||||||
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.95.1" }
|
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.96.2" }
|
||||||
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.95.1" }
|
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.96.2" }
|
||||||
assert_cmd = "2.0"
|
assert_cmd = "2.0"
|
||||||
dirs = { workspace = true }
|
dirs = { workspace = true }
|
||||||
tango-bench = "0.5"
|
tango-bench = "0.5"
|
||||||
|
|
29
SECURITY.md
Normal file
29
SECURITY.md
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
# Security Policy
|
||||||
|
|
||||||
|
As a shell and programming language Nushell provides you with great powers and the potential to do dangerous things to your computer and data. Whenever there is a risk that a malicious actor can abuse a bug or a violation of documented behavior/assumptions in Nushell to harm you this is a *security* risk.
|
||||||
|
We want to fix those issues without exposing our users to unnecessary risk. Thus we want to explain our security policy.
|
||||||
|
Additional issues may be part of *safety* where the behavior of Nushell as designed and implemented can cause unintended harm or a bug causes damage without the involvement of a third party.
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
As Nushell is still under very active pre-stable development, the only version the core team prioritizes for security and safety fixes is the [most recent version as published on GitHub](https://github.com/nushell/nushell/releases/latest).
|
||||||
|
Only if you provide a strong reasoning and the necessary resources, will we consider blessing a backported fix with an official patch release for a previous version.
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
If you suspect that a bug or behavior of Nushell can affect security or may be potentially exploitable, please report the issue to us in private.
|
||||||
|
Either reach out to the core team on [our Discord server](https://discord.gg/NtAbbGn) to arrange a private channel or use the [GitHub vulnerability reporting form](https://github.com/nushell/nushell/security/advisories/new).
|
||||||
|
Please try to answer the following questions:
|
||||||
|
- How can we reach you for further questions?
|
||||||
|
- What is the bug? Which system of Nushell may be affected?
|
||||||
|
- Do you have proof-of-concept for a potential exploit or have you observed an exploit in the wild?
|
||||||
|
- What is your assessment of the severity based on what could be impacted should the bug be exploited?
|
||||||
|
- Are additional people aware of the issue or deserve credit for identifying the issue?
|
||||||
|
|
||||||
|
We will try to get back to you within a week with:
|
||||||
|
- acknowledging the receipt of the report
|
||||||
|
- an initial plan of how we want to address this including the primary points of contact for further communication
|
||||||
|
- our preliminary assessment of how severe we judge the issue
|
||||||
|
- a proposal for how we can coordinate responsible disclosure (e.g. how we ship the bugfix, if we need to coordinate with distribution maintainers, when you can release a blog post if you want to etc.)
|
||||||
|
|
||||||
|
For purely *safety* related issues where the impact is severe by direct user action instead of malicious input or third parties, feel free to open a regular issue. If we deem that there may be an additional *security* risk on a *safety* issue we may continue discussions in a restricted forum.
|
|
@ -5,27 +5,27 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cli"
|
name = "nu-cli"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.95.1" }
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.96.2" }
|
||||||
nu-command = { path = "../nu-command", version = "0.95.1" }
|
nu-command = { path = "../nu-command", version = "0.96.2" }
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.95.1" }
|
nu-test-support = { path = "../nu-test-support", version = "0.96.2" }
|
||||||
rstest = { workspace = true, default-features = false }
|
rstest = { workspace = true, default-features = false }
|
||||||
tempfile = { workspace = true }
|
tempfile = { workspace = true }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.95.1" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.96.2" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.95.1" }
|
nu-engine = { path = "../nu-engine", version = "0.96.2" }
|
||||||
nu-path = { path = "../nu-path", version = "0.95.1" }
|
nu-path = { path = "../nu-path", version = "0.96.2" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.95.1" }
|
nu-parser = { path = "../nu-parser", version = "0.96.2" }
|
||||||
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.95.1", optional = true }
|
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.96.2", optional = true }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.95.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.96.2" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.95.1" }
|
nu-utils = { path = "../nu-utils", version = "0.96.2" }
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.95.1" }
|
nu-color-config = { path = "../nu-color-config", version = "0.96.2" }
|
||||||
nu-ansi-term = { workspace = true }
|
nu-ansi-term = { workspace = true }
|
||||||
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
||||||
|
|
||||||
|
|
|
@ -67,7 +67,7 @@ impl Command for History {
|
||||||
} else {
|
} else {
|
||||||
let history_reader: Option<Box<dyn ReedlineHistory>> = match history.file_format {
|
let history_reader: Option<Box<dyn ReedlineHistory>> = match history.file_format {
|
||||||
HistoryFileFormat::Sqlite => {
|
HistoryFileFormat::Sqlite => {
|
||||||
SqliteBackedHistory::with_file(history_path.clone(), None, None)
|
SqliteBackedHistory::with_file(history_path.clone().into(), None, None)
|
||||||
.map(|inner| {
|
.map(|inner| {
|
||||||
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||||
boxed
|
boxed
|
||||||
|
@ -77,7 +77,7 @@ impl Command for History {
|
||||||
|
|
||||||
HistoryFileFormat::PlainText => FileBackedHistory::with_file(
|
HistoryFileFormat::PlainText => FileBackedHistory::with_file(
|
||||||
history.max_size as usize,
|
history.max_size as usize,
|
||||||
history_path.clone(),
|
history_path.clone().into(),
|
||||||
)
|
)
|
||||||
.map(|inner| {
|
.map(|inner| {
|
||||||
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||||
|
@ -156,58 +156,34 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
||||||
//2. Create a record of either short or long columns and values
|
//2. Create a record of either short or long columns and values
|
||||||
|
|
||||||
let item_id_value = Value::int(
|
let item_id_value = Value::int(
|
||||||
match entry.id {
|
entry
|
||||||
Some(id) => {
|
.id
|
||||||
let ids = id.to_string();
|
.and_then(|id| id.to_string().parse::<i64>().ok())
|
||||||
match ids.parse::<i64>() {
|
.unwrap_or_default(),
|
||||||
Ok(i) => i,
|
|
||||||
_ => 0i64,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => 0i64,
|
|
||||||
},
|
|
||||||
head,
|
head,
|
||||||
);
|
);
|
||||||
let start_timestamp_value = Value::string(
|
let start_timestamp_value = Value::string(
|
||||||
match entry.start_timestamp {
|
entry
|
||||||
Some(time) => time.to_string(),
|
.start_timestamp
|
||||||
None => "".into(),
|
.map(|time| time.to_string())
|
||||||
},
|
.unwrap_or_default(),
|
||||||
head,
|
head,
|
||||||
);
|
);
|
||||||
let command_value = Value::string(entry.command_line, head);
|
let command_value = Value::string(entry.command_line, head);
|
||||||
let session_id_value = Value::int(
|
let session_id_value = Value::int(
|
||||||
match entry.session_id {
|
entry
|
||||||
Some(sid) => {
|
.session_id
|
||||||
let sids = sid.to_string();
|
.and_then(|id| id.to_string().parse::<i64>().ok())
|
||||||
match sids.parse::<i64>() {
|
.unwrap_or_default(),
|
||||||
Ok(i) => i,
|
|
||||||
_ => 0i64,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => 0i64,
|
|
||||||
},
|
|
||||||
head,
|
|
||||||
);
|
|
||||||
let hostname_value = Value::string(
|
|
||||||
match entry.hostname {
|
|
||||||
Some(host) => host,
|
|
||||||
None => "".into(),
|
|
||||||
},
|
|
||||||
head,
|
|
||||||
);
|
|
||||||
let cwd_value = Value::string(
|
|
||||||
match entry.cwd {
|
|
||||||
Some(cwd) => cwd,
|
|
||||||
None => "".into(),
|
|
||||||
},
|
|
||||||
head,
|
head,
|
||||||
);
|
);
|
||||||
|
let hostname_value = Value::string(entry.hostname.unwrap_or_default(), head);
|
||||||
|
let cwd_value = Value::string(entry.cwd.unwrap_or_default(), head);
|
||||||
let duration_value = Value::duration(
|
let duration_value = Value::duration(
|
||||||
match entry.duration {
|
entry
|
||||||
Some(d) => d.as_nanos().try_into().unwrap_or(0),
|
.duration
|
||||||
None => 0,
|
.and_then(|d| d.as_nanos().try_into().ok())
|
||||||
},
|
.unwrap_or(0),
|
||||||
head,
|
head,
|
||||||
);
|
);
|
||||||
let exit_status_value = Value::int(entry.exit_status.unwrap_or(0), head);
|
let exit_status_value = Value::int(entry.exit_status.unwrap_or(0), head);
|
||||||
|
|
|
@ -61,10 +61,12 @@ impl Command for KeybindingsList {
|
||||||
.map(|option| call.has_flag(engine_state, stack, option))
|
.map(|option| call.has_flag(engine_state, stack, option))
|
||||||
.collect::<Result<Vec<_>, ShellError>>()?;
|
.collect::<Result<Vec<_>, ShellError>>()?;
|
||||||
|
|
||||||
|
let no_option_specified = presence.iter().all(|present| !*present);
|
||||||
|
|
||||||
let records = all_options
|
let records = all_options
|
||||||
.iter()
|
.iter()
|
||||||
.zip(presence)
|
.zip(presence)
|
||||||
.filter(|(_, present)| *present)
|
.filter(|(_, present)| no_option_specified || *present)
|
||||||
.flat_map(|(option, _)| get_records(option, call.head))
|
.flat_map(|(option, _)| get_records(option, call.head))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
|
|
@ -99,10 +99,9 @@ impl CommandCompletion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: String::from_utf8_lossy(&x.0).to_string(),
|
value: String::from_utf8_lossy(&x.0).to_string(),
|
||||||
description: x.1,
|
description: x.1,
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
kind: Some(SuggestionKind::Command(x.2)),
|
kind: Some(SuggestionKind::Command(x.2)),
|
||||||
})
|
})
|
||||||
|
@ -118,11 +117,9 @@ impl CommandCompletion {
|
||||||
.map(move |x| SemanticSuggestion {
|
.map(move |x| SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: x,
|
value: x,
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO: is there a way to create a test?
|
// TODO: is there a way to create a test?
|
||||||
kind: None,
|
kind: None,
|
||||||
|
@ -136,11 +133,9 @@ impl CommandCompletion {
|
||||||
results.push(SemanticSuggestion {
|
results.push(SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: format!("^{}", external.suggestion.value),
|
value: format!("^{}", external.suggestion.value),
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: external.suggestion.span,
|
span: external.suggestion.span,
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
kind: external.kind,
|
kind: external.kind,
|
||||||
})
|
})
|
||||||
|
|
|
@ -443,14 +443,11 @@ pub fn map_value_completions<'a>(
|
||||||
return Some(SemanticSuggestion {
|
return Some(SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: s,
|
value: s,
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: span.start - offset,
|
start: span.start - offset,
|
||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
kind: Some(SuggestionKind::Type(x.get_type())),
|
kind: Some(SuggestionKind::Type(x.get_type())),
|
||||||
});
|
});
|
||||||
|
@ -460,14 +457,11 @@ pub fn map_value_completions<'a>(
|
||||||
if let Ok(record) = x.as_record() {
|
if let Ok(record) = x.as_record() {
|
||||||
let mut suggestion = Suggestion {
|
let mut suggestion = Suggestion {
|
||||||
value: String::from(""), // Initialize with empty string
|
value: String::from(""), // Initialize with empty string
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: span.start - offset,
|
start: span.start - offset,
|
||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Iterate the cols looking for `value` and `description`
|
// Iterate the cols looking for `value` and `description`
|
||||||
|
|
|
@ -10,11 +10,9 @@ use nu_protocol::{
|
||||||
levenshtein_distance, Span,
|
levenshtein_distance, Span,
|
||||||
};
|
};
|
||||||
use nu_utils::get_ls_colors;
|
use nu_utils::get_ls_colors;
|
||||||
use std::path::{
|
use std::path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP};
|
||||||
is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::SortBy;
|
use super::{MatchAlgorithm, SortBy};
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct PathBuiltFromString {
|
pub struct PathBuiltFromString {
|
||||||
|
@ -22,12 +20,21 @@ pub struct PathBuiltFromString {
|
||||||
isdir: bool,
|
isdir: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_rec(
|
/// Recursively goes through paths that match a given `partial`.
|
||||||
|
/// built: State struct for a valid matching path built so far.
|
||||||
|
///
|
||||||
|
/// `isdir`: whether the current partial path has a trailing slash.
|
||||||
|
/// Parsing a path string into a pathbuf loses that bit of information.
|
||||||
|
///
|
||||||
|
/// want_directory: Whether we want only directories as completion matches.
|
||||||
|
/// Some commands like `cd` can only be run on directories whereas others
|
||||||
|
/// like `ls` can be run on regular files as well.
|
||||||
|
pub fn complete_rec(
|
||||||
partial: &[&str],
|
partial: &[&str],
|
||||||
built: &PathBuiltFromString,
|
built: &PathBuiltFromString,
|
||||||
cwd: &Path,
|
cwd: &Path,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
dir: bool,
|
want_directory: bool,
|
||||||
isdir: bool,
|
isdir: bool,
|
||||||
) -> Vec<PathBuiltFromString> {
|
) -> Vec<PathBuiltFromString> {
|
||||||
let mut completions = vec![];
|
let mut completions = vec![];
|
||||||
|
@ -37,7 +44,7 @@ fn complete_rec(
|
||||||
let mut built = built.clone();
|
let mut built = built.clone();
|
||||||
built.parts.push(base.to_string());
|
built.parts.push(base.to_string());
|
||||||
built.isdir = true;
|
built.isdir = true;
|
||||||
return complete_rec(rest, &built, cwd, options, dir, isdir);
|
return complete_rec(rest, &built, cwd, options, want_directory, isdir);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,7 +65,7 @@ fn complete_rec(
|
||||||
built.parts.push(entry_name.clone());
|
built.parts.push(entry_name.clone());
|
||||||
built.isdir = entry_isdir;
|
built.isdir = entry_isdir;
|
||||||
|
|
||||||
if !dir || entry_isdir {
|
if !want_directory || entry_isdir {
|
||||||
entries.push((entry_name, built));
|
entries.push((entry_name, built));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -70,12 +77,29 @@ fn complete_rec(
|
||||||
match partial.split_first() {
|
match partial.split_first() {
|
||||||
Some((base, rest)) => {
|
Some((base, rest)) => {
|
||||||
if matches(base, &entry_name, options) {
|
if matches(base, &entry_name, options) {
|
||||||
|
// We use `isdir` to confirm that the current component has
|
||||||
|
// at least one next component or a slash.
|
||||||
|
// Serves as confirmation to ignore longer completions for
|
||||||
|
// components in between.
|
||||||
if !rest.is_empty() || isdir {
|
if !rest.is_empty() || isdir {
|
||||||
completions.extend(complete_rec(rest, &built, cwd, options, dir, isdir));
|
completions.extend(complete_rec(
|
||||||
|
rest,
|
||||||
|
&built,
|
||||||
|
cwd,
|
||||||
|
options,
|
||||||
|
want_directory,
|
||||||
|
isdir,
|
||||||
|
));
|
||||||
} else {
|
} else {
|
||||||
completions.push(built);
|
completions.push(built);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if entry_name.eq(base)
|
||||||
|
&& matches!(options.match_algorithm, MatchAlgorithm::Prefix)
|
||||||
|
&& isdir
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
completions.push(built);
|
completions.push(built);
|
||||||
|
@ -93,16 +117,16 @@ enum OriginalCwd {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OriginalCwd {
|
impl OriginalCwd {
|
||||||
fn apply(&self, mut p: PathBuiltFromString) -> String {
|
fn apply(&self, mut p: PathBuiltFromString, path_separator: char) -> String {
|
||||||
match self {
|
match self {
|
||||||
Self::None => {}
|
Self::None => {}
|
||||||
Self::Home => p.parts.insert(0, "~".to_string()),
|
Self::Home => p.parts.insert(0, "~".to_string()),
|
||||||
Self::Prefix(s) => p.parts.insert(0, s.clone()),
|
Self::Prefix(s) => p.parts.insert(0, s.clone()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut ret = p.parts.join(MAIN_SEPARATOR_STR);
|
let mut ret = p.parts.join(&path_separator.to_string());
|
||||||
if p.isdir {
|
if p.isdir {
|
||||||
ret.push(SEP);
|
ret.push(path_separator);
|
||||||
}
|
}
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
@ -133,6 +157,14 @@ pub fn complete_item(
|
||||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
||||||
let partial = surround_remove(partial);
|
let partial = surround_remove(partial);
|
||||||
let isdir = partial.ends_with(is_separator);
|
let isdir = partial.ends_with(is_separator);
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
let path_separator = SEP;
|
||||||
|
#[cfg(windows)]
|
||||||
|
let path_separator = partial
|
||||||
|
.chars()
|
||||||
|
.rfind(|c: &char| is_separator(*c))
|
||||||
|
.unwrap_or(SEP);
|
||||||
let cwd_pathbuf = Path::new(cwd).to_path_buf();
|
let cwd_pathbuf = Path::new(cwd).to_path_buf();
|
||||||
let ls_colors = (engine_state.config.use_ls_colors_completions
|
let ls_colors = (engine_state.config.use_ls_colors_completions
|
||||||
&& engine_state.config.use_ansi_coloring)
|
&& engine_state.config.use_ansi_coloring)
|
||||||
|
@ -170,7 +202,7 @@ pub fn complete_item(
|
||||||
}
|
}
|
||||||
Some(Component::Normal(home)) if home.to_string_lossy() == "~" => {
|
Some(Component::Normal(home)) if home.to_string_lossy() == "~" => {
|
||||||
components.next();
|
components.next();
|
||||||
cwd = home_dir().unwrap_or(cwd_pathbuf);
|
cwd = home_dir().map(Into::into).unwrap_or(cwd_pathbuf);
|
||||||
prefix_len = 1;
|
prefix_len = 1;
|
||||||
original_cwd = OriginalCwd::Home;
|
original_cwd = OriginalCwd::Home;
|
||||||
}
|
}
|
||||||
|
@ -195,7 +227,7 @@ pub fn complete_item(
|
||||||
)
|
)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|p| {
|
.map(|p| {
|
||||||
let path = original_cwd.apply(p);
|
let path = original_cwd.apply(p, path_separator);
|
||||||
let style = ls_colors.as_ref().map(|lsc| {
|
let style = ls_colors.as_ref().map(|lsc| {
|
||||||
lsc.style_for_path_with_metadata(
|
lsc.style_for_path_with_metadata(
|
||||||
&path,
|
&path,
|
||||||
|
|
|
@ -48,14 +48,12 @@ impl Completer for DirectoryCompletion {
|
||||||
.map(move |x| SemanticSuggestion {
|
.map(move |x| SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: x.1,
|
value: x.1,
|
||||||
description: None,
|
|
||||||
style: x.2,
|
style: x.2,
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: x.0.start - offset,
|
start: x.0.start - offset,
|
||||||
end: x.0.end - offset,
|
end: x.0.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO????
|
// TODO????
|
||||||
kind: None,
|
kind: None,
|
||||||
|
|
|
@ -116,14 +116,13 @@ impl Completer for DotNuCompletion {
|
||||||
.map(move |x| SemanticSuggestion {
|
.map(move |x| SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: x.1,
|
value: x.1,
|
||||||
description: None,
|
|
||||||
style: x.2,
|
style: x.2,
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: x.0.start - offset,
|
start: x.0.start - offset,
|
||||||
end: x.0.end - offset,
|
end: x.0.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO????
|
// TODO????
|
||||||
kind: None,
|
kind: None,
|
||||||
|
|
|
@ -53,14 +53,12 @@ impl Completer for FileCompletion {
|
||||||
.map(move |x| SemanticSuggestion {
|
.map(move |x| SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: x.1,
|
value: x.1,
|
||||||
description: None,
|
|
||||||
style: x.2,
|
style: x.2,
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: x.0.start - offset,
|
start: x.0.start - offset,
|
||||||
end: x.0.end - offset,
|
end: x.0.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO????
|
// TODO????
|
||||||
kind: None,
|
kind: None,
|
||||||
|
|
|
@ -51,13 +51,12 @@ impl Completer for FlagCompletion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: String::from_utf8_lossy(&named).to_string(),
|
value: String::from_utf8_lossy(&named).to_string(),
|
||||||
description: Some(flag_desc.to_string()),
|
description: Some(flag_desc.to_string()),
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: span.start - offset,
|
start: span.start - offset,
|
||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO????
|
// TODO????
|
||||||
kind: None,
|
kind: None,
|
||||||
|
@ -78,13 +77,12 @@ impl Completer for FlagCompletion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: String::from_utf8_lossy(&named).to_string(),
|
value: String::from_utf8_lossy(&named).to_string(),
|
||||||
description: Some(flag_desc.to_string()),
|
description: Some(flag_desc.to_string()),
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: span.start - offset,
|
start: span.start - offset,
|
||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO????
|
// TODO????
|
||||||
kind: None,
|
kind: None,
|
||||||
|
|
|
@ -85,11 +85,8 @@ impl Completer for VariableCompletion {
|
||||||
output.push(SemanticSuggestion {
|
output.push(SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: env_var.0,
|
value: env_var.0,
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: current_span,
|
span: current_span,
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
kind: Some(SuggestionKind::Type(env_var.1.get_type())),
|
kind: Some(SuggestionKind::Type(env_var.1.get_type())),
|
||||||
});
|
});
|
||||||
|
@ -157,11 +154,8 @@ impl Completer for VariableCompletion {
|
||||||
output.push(SemanticSuggestion {
|
output.push(SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: builtin.to_string(),
|
value: builtin.to_string(),
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: current_span,
|
span: current_span,
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO is there a way to get the VarId to get the type???
|
// TODO is there a way to get the VarId to get the type???
|
||||||
kind: None,
|
kind: None,
|
||||||
|
@ -184,11 +178,8 @@ impl Completer for VariableCompletion {
|
||||||
output.push(SemanticSuggestion {
|
output.push(SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: String::from_utf8_lossy(v.0).to_string(),
|
value: String::from_utf8_lossy(v.0).to_string(),
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: current_span,
|
span: current_span,
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
kind: Some(SuggestionKind::Type(
|
kind: Some(SuggestionKind::Type(
|
||||||
working_set.get_variable(*v.1).ty.clone(),
|
working_set.get_variable(*v.1).ty.clone(),
|
||||||
|
@ -215,11 +206,8 @@ impl Completer for VariableCompletion {
|
||||||
output.push(SemanticSuggestion {
|
output.push(SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: String::from_utf8_lossy(v.0).to_string(),
|
value: String::from_utf8_lossy(v.0).to_string(),
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: current_span,
|
span: current_span,
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
kind: Some(SuggestionKind::Type(
|
kind: Some(SuggestionKind::Type(
|
||||||
working_set.get_variable(*v.1).ty.clone(),
|
working_set.get_variable(*v.1).ty.clone(),
|
||||||
|
@ -255,11 +243,8 @@ fn nested_suggestions(
|
||||||
output.push(SemanticSuggestion {
|
output.push(SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: col.clone(),
|
value: col.clone(),
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: current_span,
|
span: current_span,
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
kind: Some(kind.clone()),
|
kind: Some(kind.clone()),
|
||||||
});
|
});
|
||||||
|
@ -272,11 +257,8 @@ fn nested_suggestions(
|
||||||
output.push(SemanticSuggestion {
|
output.push(SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: column_name,
|
value: column_name,
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: current_span,
|
span: current_span,
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
kind: Some(kind.clone()),
|
kind: Some(kind.clone()),
|
||||||
});
|
});
|
||||||
|
|
|
@ -192,7 +192,8 @@ pub fn add_plugin_file(
|
||||||
} else if let Some(mut plugin_path) = nu_path::config_dir() {
|
} else if let Some(mut plugin_path) = nu_path::config_dir() {
|
||||||
// Path to store plugins signatures
|
// Path to store plugins signatures
|
||||||
plugin_path.push(storage_path);
|
plugin_path.push(storage_path);
|
||||||
let mut plugin_path = canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path);
|
let mut plugin_path =
|
||||||
|
canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path.into());
|
||||||
plugin_path.push(PLUGIN_FILE);
|
plugin_path.push(PLUGIN_FILE);
|
||||||
let plugin_path = canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path);
|
let plugin_path = canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path);
|
||||||
engine_state.plugin_path = Some(plugin_path);
|
engine_state.plugin_path = Some(plugin_path);
|
||||||
|
@ -247,7 +248,7 @@ pub(crate) fn get_history_path(storage_path: &str, mode: HistoryFileFormat) -> O
|
||||||
HistoryFileFormat::PlainText => HISTORY_FILE_TXT,
|
HistoryFileFormat::PlainText => HISTORY_FILE_TXT,
|
||||||
HistoryFileFormat::Sqlite => HISTORY_FILE_SQLITE,
|
HistoryFileFormat::Sqlite => HISTORY_FILE_SQLITE,
|
||||||
});
|
});
|
||||||
history_path
|
history_path.into()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -110,13 +110,12 @@ impl NuHelpCompleter {
|
||||||
Suggestion {
|
Suggestion {
|
||||||
value: decl.name().into(),
|
value: decl.name().into(),
|
||||||
description: Some(long_desc),
|
description: Some(long_desc),
|
||||||
style: None,
|
|
||||||
extra: Some(extra),
|
extra: Some(extra),
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: pos - line.len(),
|
start: pos - line.len(),
|
||||||
end: pos,
|
end: pos,
|
||||||
},
|
},
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
|
|
@ -142,10 +142,9 @@ fn convert_to_suggestions(
|
||||||
vec![Suggestion {
|
vec![Suggestion {
|
||||||
value: text,
|
value: text,
|
||||||
description,
|
description,
|
||||||
style: None,
|
|
||||||
extra,
|
extra,
|
||||||
span,
|
span,
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
Value::List { vals, .. } => vals
|
Value::List { vals, .. } => vals
|
||||||
|
@ -154,9 +153,6 @@ fn convert_to_suggestions(
|
||||||
.collect(),
|
.collect(),
|
||||||
_ => vec![Suggestion {
|
_ => vec![Suggestion {
|
||||||
value: format!("Not a record: {value:?}"),
|
value: format!("Not a record: {value:?}"),
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: if only_buffer_difference {
|
start: if only_buffer_difference {
|
||||||
pos - line.len()
|
pos - line.len()
|
||||||
|
@ -169,7 +165,7 @@ fn convert_to_suggestions(
|
||||||
line.len()
|
line.len()
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
}],
|
}],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -193,6 +193,29 @@ fn get_style(record: &Record, name: &str, span: Span) -> Option<Style> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn set_menu_style<M: MenuBuilder>(mut menu: M, style: &Value) -> M {
|
||||||
|
let span = style.span();
|
||||||
|
let Value::Record { val, .. } = &style else {
|
||||||
|
return menu;
|
||||||
|
};
|
||||||
|
if let Some(style) = get_style(val, "text", span) {
|
||||||
|
menu = menu.with_text_style(style);
|
||||||
|
}
|
||||||
|
if let Some(style) = get_style(val, "selected_text", span) {
|
||||||
|
menu = menu.with_selected_text_style(style);
|
||||||
|
}
|
||||||
|
if let Some(style) = get_style(val, "description_text", span) {
|
||||||
|
menu = menu.with_description_text_style(style);
|
||||||
|
}
|
||||||
|
if let Some(style) = get_style(val, "match_text", span) {
|
||||||
|
menu = menu.with_match_text_style(style);
|
||||||
|
}
|
||||||
|
if let Some(style) = get_style(val, "selected_match_text", span) {
|
||||||
|
menu = menu.with_selected_match_text_style(style);
|
||||||
|
}
|
||||||
|
menu
|
||||||
|
}
|
||||||
|
|
||||||
// Adds a columnar menu to the editor engine
|
// Adds a columnar menu to the editor engine
|
||||||
pub(crate) fn add_columnar_menu(
|
pub(crate) fn add_columnar_menu(
|
||||||
line_editor: Reedline,
|
line_editor: Reedline,
|
||||||
|
@ -231,24 +254,7 @@ pub(crate) fn add_columnar_menu(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
let span = menu.style.span();
|
columnar_menu = set_menu_style(columnar_menu, &menu.style);
|
||||||
if let Value::Record { val, .. } = &menu.style {
|
|
||||||
if let Some(style) = get_style(val, "text", span) {
|
|
||||||
columnar_menu = columnar_menu.with_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "selected_text", span) {
|
|
||||||
columnar_menu = columnar_menu.with_selected_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "description_text", span) {
|
|
||||||
columnar_menu = columnar_menu.with_description_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "match_text", span) {
|
|
||||||
columnar_menu = columnar_menu.with_match_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "selected_match_text", span) {
|
|
||||||
columnar_menu = columnar_menu.with_selected_match_text_style(style);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let marker = menu.marker.to_expanded_string("", config);
|
let marker = menu.marker.to_expanded_string("", config);
|
||||||
columnar_menu = columnar_menu.with_marker(&marker);
|
columnar_menu = columnar_menu.with_marker(&marker);
|
||||||
|
@ -304,18 +310,7 @@ pub(crate) fn add_list_menu(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
let span = menu.style.span();
|
list_menu = set_menu_style(list_menu, &menu.style);
|
||||||
if let Value::Record { val, .. } = &menu.style {
|
|
||||||
if let Some(style) = get_style(val, "text", span) {
|
|
||||||
list_menu = list_menu.with_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "selected_text", span) {
|
|
||||||
list_menu = list_menu.with_selected_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "description_text", span) {
|
|
||||||
list_menu = list_menu.with_description_text_style(style);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let marker = menu.marker.to_expanded_string("", &config);
|
let marker = menu.marker.to_expanded_string("", &config);
|
||||||
list_menu = list_menu.with_marker(&marker);
|
list_menu = list_menu.with_marker(&marker);
|
||||||
|
@ -496,24 +491,7 @@ pub(crate) fn add_ide_menu(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
let span = menu.style.span();
|
ide_menu = set_menu_style(ide_menu, &menu.style);
|
||||||
if let Value::Record { val, .. } = &menu.style {
|
|
||||||
if let Some(style) = get_style(val, "text", span) {
|
|
||||||
ide_menu = ide_menu.with_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "selected_text", span) {
|
|
||||||
ide_menu = ide_menu.with_selected_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "description_text", span) {
|
|
||||||
ide_menu = ide_menu.with_description_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "match_text", span) {
|
|
||||||
ide_menu = ide_menu.with_match_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "selected_match_text", span) {
|
|
||||||
ide_menu = ide_menu.with_selected_match_text_style(style);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let marker = menu.marker.to_expanded_string("", &config);
|
let marker = menu.marker.to_expanded_string("", &config);
|
||||||
ide_menu = ide_menu.with_marker(&marker);
|
ide_menu = ide_menu.with_marker(&marker);
|
||||||
|
@ -601,18 +579,7 @@ pub(crate) fn add_description_menu(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
let span = menu.style.span();
|
description_menu = set_menu_style(description_menu, &menu.style);
|
||||||
if let Value::Record { val, .. } = &menu.style {
|
|
||||||
if let Some(style) = get_style(val, "text", span) {
|
|
||||||
description_menu = description_menu.with_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "selected_text", span) {
|
|
||||||
description_menu = description_menu.with_selected_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "description_text", span) {
|
|
||||||
description_menu = description_menu.with_description_text_style(style);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let marker = menu.marker.to_expanded_string("", &config);
|
let marker = menu.marker.to_expanded_string("", &config);
|
||||||
description_menu = description_menu.with_marker(&marker);
|
description_menu = description_menu.with_marker(&marker);
|
||||||
|
|
|
@ -1337,20 +1337,26 @@ fn are_session_ids_in_sync() {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test_auto_cd {
|
mod test_auto_cd {
|
||||||
use super::{do_auto_cd, parse_operation, ReplOperation};
|
use super::{do_auto_cd, parse_operation, ReplOperation};
|
||||||
|
use nu_path::AbsolutePath;
|
||||||
use nu_protocol::engine::{EngineState, Stack};
|
use nu_protocol::engine::{EngineState, Stack};
|
||||||
use std::path::Path;
|
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
|
|
||||||
/// Create a symlink. Works on both Unix and Windows.
|
/// Create a symlink. Works on both Unix and Windows.
|
||||||
#[cfg(any(unix, windows))]
|
#[cfg(any(unix, windows))]
|
||||||
fn symlink(original: impl AsRef<Path>, link: impl AsRef<Path>) -> std::io::Result<()> {
|
fn symlink(
|
||||||
|
original: impl AsRef<AbsolutePath>,
|
||||||
|
link: impl AsRef<AbsolutePath>,
|
||||||
|
) -> std::io::Result<()> {
|
||||||
|
let original = original.as_ref();
|
||||||
|
let link = link.as_ref();
|
||||||
|
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
{
|
{
|
||||||
std::os::unix::fs::symlink(original, link)
|
std::os::unix::fs::symlink(original, link)
|
||||||
}
|
}
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
{
|
{
|
||||||
if original.as_ref().is_dir() {
|
if original.is_dir() {
|
||||||
std::os::windows::fs::symlink_dir(original, link)
|
std::os::windows::fs::symlink_dir(original, link)
|
||||||
} else {
|
} else {
|
||||||
std::os::windows::fs::symlink_file(original, link)
|
std::os::windows::fs::symlink_file(original, link)
|
||||||
|
@ -1362,11 +1368,11 @@ mod test_auto_cd {
|
||||||
/// `before`, and after `input` is parsed and evaluated, PWD should be
|
/// `before`, and after `input` is parsed and evaluated, PWD should be
|
||||||
/// changed to `after`.
|
/// changed to `after`.
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn check(before: impl AsRef<Path>, input: &str, after: impl AsRef<Path>) {
|
fn check(before: impl AsRef<AbsolutePath>, input: &str, after: impl AsRef<AbsolutePath>) {
|
||||||
// Setup EngineState and Stack.
|
// Setup EngineState and Stack.
|
||||||
let mut engine_state = EngineState::new();
|
let mut engine_state = EngineState::new();
|
||||||
let mut stack = Stack::new();
|
let mut stack = Stack::new();
|
||||||
stack.set_cwd(before).unwrap();
|
stack.set_cwd(before.as_ref()).unwrap();
|
||||||
|
|
||||||
// Parse the input. It must be an auto-cd operation.
|
// Parse the input. It must be an auto-cd operation.
|
||||||
let op = parse_operation(input.to_string(), &engine_state, &stack).unwrap();
|
let op = parse_operation(input.to_string(), &engine_state, &stack).unwrap();
|
||||||
|
@ -1382,54 +1388,66 @@ mod test_auto_cd {
|
||||||
// don't have to be byte-wise equal (on Windows, the 8.3 filename
|
// don't have to be byte-wise equal (on Windows, the 8.3 filename
|
||||||
// conversion messes things up),
|
// conversion messes things up),
|
||||||
let updated_cwd = std::fs::canonicalize(updated_cwd).unwrap();
|
let updated_cwd = std::fs::canonicalize(updated_cwd).unwrap();
|
||||||
let after = std::fs::canonicalize(after).unwrap();
|
let after = std::fs::canonicalize(after.as_ref()).unwrap();
|
||||||
assert_eq!(updated_cwd, after);
|
assert_eq!(updated_cwd, after);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn auto_cd_root() {
|
fn auto_cd_root() {
|
||||||
let tempdir = tempdir().unwrap();
|
let tempdir = tempdir().unwrap();
|
||||||
let root = if cfg!(windows) { r"C:\" } else { "/" };
|
let tempdir = AbsolutePath::try_new(tempdir.path()).unwrap();
|
||||||
check(&tempdir, root, root);
|
|
||||||
|
let input = if cfg!(windows) { r"C:\" } else { "/" };
|
||||||
|
let root = AbsolutePath::try_new(input).unwrap();
|
||||||
|
check(tempdir, input, root);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn auto_cd_tilde() {
|
fn auto_cd_tilde() {
|
||||||
let tempdir = tempdir().unwrap();
|
let tempdir = tempdir().unwrap();
|
||||||
|
let tempdir = AbsolutePath::try_new(tempdir.path()).unwrap();
|
||||||
|
|
||||||
let home = nu_path::home_dir().unwrap();
|
let home = nu_path::home_dir().unwrap();
|
||||||
check(&tempdir, "~", home);
|
check(tempdir, "~", home);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn auto_cd_dot() {
|
fn auto_cd_dot() {
|
||||||
let tempdir = tempdir().unwrap();
|
let tempdir = tempdir().unwrap();
|
||||||
check(&tempdir, ".", &tempdir);
|
let tempdir = AbsolutePath::try_new(tempdir.path()).unwrap();
|
||||||
|
|
||||||
|
check(tempdir, ".", tempdir);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn auto_cd_double_dot() {
|
fn auto_cd_double_dot() {
|
||||||
let tempdir = tempdir().unwrap();
|
let tempdir = tempdir().unwrap();
|
||||||
let dir = tempdir.path().join("foo");
|
let tempdir = AbsolutePath::try_new(tempdir.path()).unwrap();
|
||||||
|
|
||||||
|
let dir = tempdir.join("foo");
|
||||||
std::fs::create_dir_all(&dir).unwrap();
|
std::fs::create_dir_all(&dir).unwrap();
|
||||||
check(dir, "..", &tempdir);
|
check(dir, "..", tempdir);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn auto_cd_triple_dot() {
|
fn auto_cd_triple_dot() {
|
||||||
let tempdir = tempdir().unwrap();
|
let tempdir = tempdir().unwrap();
|
||||||
let dir = tempdir.path().join("foo").join("bar");
|
let tempdir = AbsolutePath::try_new(tempdir.path()).unwrap();
|
||||||
|
|
||||||
|
let dir = tempdir.join("foo").join("bar");
|
||||||
std::fs::create_dir_all(&dir).unwrap();
|
std::fs::create_dir_all(&dir).unwrap();
|
||||||
check(dir, "...", &tempdir);
|
check(dir, "...", tempdir);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn auto_cd_relative() {
|
fn auto_cd_relative() {
|
||||||
let tempdir = tempdir().unwrap();
|
let tempdir = tempdir().unwrap();
|
||||||
let foo = tempdir.path().join("foo");
|
let tempdir = AbsolutePath::try_new(tempdir.path()).unwrap();
|
||||||
let bar = tempdir.path().join("bar");
|
|
||||||
|
let foo = tempdir.join("foo");
|
||||||
|
let bar = tempdir.join("bar");
|
||||||
std::fs::create_dir_all(&foo).unwrap();
|
std::fs::create_dir_all(&foo).unwrap();
|
||||||
std::fs::create_dir_all(&bar).unwrap();
|
std::fs::create_dir_all(&bar).unwrap();
|
||||||
|
|
||||||
let input = if cfg!(windows) { r"..\bar" } else { "../bar" };
|
let input = if cfg!(windows) { r"..\bar" } else { "../bar" };
|
||||||
check(foo, input, bar);
|
check(foo, input, bar);
|
||||||
}
|
}
|
||||||
|
@ -1437,32 +1455,35 @@ mod test_auto_cd {
|
||||||
#[test]
|
#[test]
|
||||||
fn auto_cd_trailing_slash() {
|
fn auto_cd_trailing_slash() {
|
||||||
let tempdir = tempdir().unwrap();
|
let tempdir = tempdir().unwrap();
|
||||||
let dir = tempdir.path().join("foo");
|
let tempdir = AbsolutePath::try_new(tempdir.path()).unwrap();
|
||||||
std::fs::create_dir_all(&dir).unwrap();
|
|
||||||
|
|
||||||
|
let dir = tempdir.join("foo");
|
||||||
|
std::fs::create_dir_all(&dir).unwrap();
|
||||||
let input = if cfg!(windows) { r"foo\" } else { "foo/" };
|
let input = if cfg!(windows) { r"foo\" } else { "foo/" };
|
||||||
check(&tempdir, input, dir);
|
check(tempdir, input, dir);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn auto_cd_symlink() {
|
fn auto_cd_symlink() {
|
||||||
let tempdir = tempdir().unwrap();
|
let tempdir = tempdir().unwrap();
|
||||||
let dir = tempdir.path().join("foo");
|
let tempdir = AbsolutePath::try_new(tempdir.path()).unwrap();
|
||||||
std::fs::create_dir_all(&dir).unwrap();
|
|
||||||
let link = tempdir.path().join("link");
|
|
||||||
symlink(&dir, &link).unwrap();
|
|
||||||
|
|
||||||
|
let dir = tempdir.join("foo");
|
||||||
|
std::fs::create_dir_all(&dir).unwrap();
|
||||||
|
let link = tempdir.join("link");
|
||||||
|
symlink(&dir, &link).unwrap();
|
||||||
let input = if cfg!(windows) { r".\link" } else { "./link" };
|
let input = if cfg!(windows) { r".\link" } else { "./link" };
|
||||||
check(&tempdir, input, link);
|
check(tempdir, input, link);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[should_panic(expected = "was not parsed into an auto-cd operation")]
|
#[should_panic(expected = "was not parsed into an auto-cd operation")]
|
||||||
fn auto_cd_nonexistent_directory() {
|
fn auto_cd_nonexistent_directory() {
|
||||||
let tempdir = tempdir().unwrap();
|
let tempdir = tempdir().unwrap();
|
||||||
let dir = tempdir.path().join("foo");
|
let tempdir = AbsolutePath::try_new(tempdir.path()).unwrap();
|
||||||
|
|
||||||
|
let dir = tempdir.join("foo");
|
||||||
let input = if cfg!(windows) { r"foo\" } else { "foo/" };
|
let input = if cfg!(windows) { r"foo\" } else { "foo/" };
|
||||||
check(&tempdir, input, dir);
|
check(tempdir, input, dir);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -321,16 +321,10 @@ mod test {
|
||||||
|
|
||||||
let env = engine_state.render_env_vars();
|
let env = engine_state.render_env_vars();
|
||||||
|
|
||||||
assert!(
|
assert!(matches!(env.get("FOO"), Some(&Value::String { val, .. }) if val == "foo"));
|
||||||
matches!(env.get(&"FOO".to_string()), Some(&Value::String { val, .. }) if val == "foo")
|
assert!(matches!(env.get("SYMBOLS"), Some(&Value::String { val, .. }) if val == symbols));
|
||||||
);
|
assert!(matches!(env.get(symbols), Some(&Value::String { val, .. }) if val == "symbols"));
|
||||||
assert!(
|
assert!(env.contains_key("PWD"));
|
||||||
matches!(env.get(&"SYMBOLS".to_string()), Some(&Value::String { val, .. }) if val == symbols)
|
|
||||||
);
|
|
||||||
assert!(
|
|
||||||
matches!(env.get(&symbols.to_string()), Some(&Value::String { val, .. }) if val == "symbols")
|
|
||||||
);
|
|
||||||
assert!(env.get(&"PWD".to_string()).is_some());
|
|
||||||
assert_eq!(env.len(), 4);
|
assert_eq!(env.len(), 4);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
7
crates/nu-cli/tests/commands/keybindings_list.rs
Normal file
7
crates/nu-cli/tests/commands/keybindings_list.rs
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
use nu_test_support::nu;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn not_empty() {
|
||||||
|
let result = nu!("keybindings list | is-not-empty");
|
||||||
|
assert_eq!(result.out, "true");
|
||||||
|
}
|
|
@ -1 +1,2 @@
|
||||||
|
mod keybindings_list;
|
||||||
mod nu_highlight;
|
mod nu_highlight;
|
||||||
|
|
|
@ -32,7 +32,6 @@ fn completer() -> NuCompleter {
|
||||||
fn completer_strings() -> NuCompleter {
|
fn completer_strings() -> NuCompleter {
|
||||||
// Create a new engine
|
// Create a new engine
|
||||||
let (dir, _, mut engine, mut stack) = new_engine();
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
// Add record value as example
|
// Add record value as example
|
||||||
let record = r#"def animals [] { ["cat", "dog", "eel" ] }
|
let record = r#"def animals [] { ["cat", "dog", "eel" ] }
|
||||||
def my-command [animal: string@animals] { print $animal }"#;
|
def my-command [animal: string@animals] { print $animal }"#;
|
||||||
|
@ -123,28 +122,28 @@ fn variables_double_dash_argument_with_flagcompletion(mut completer: NuCompleter
|
||||||
let suggestions = completer.complete("tst --", 6);
|
let suggestions = completer.complete("tst --", 6);
|
||||||
let expected: Vec<String> = vec!["--help".into(), "--mod".into()];
|
let expected: Vec<String> = vec!["--help".into(), "--mod".into()];
|
||||||
// dbg!(&expected, &suggestions);
|
// dbg!(&expected, &suggestions);
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn variables_single_dash_argument_with_flagcompletion(mut completer: NuCompleter) {
|
fn variables_single_dash_argument_with_flagcompletion(mut completer: NuCompleter) {
|
||||||
let suggestions = completer.complete("tst -", 5);
|
let suggestions = completer.complete("tst -", 5);
|
||||||
let expected: Vec<String> = vec!["--help".into(), "--mod".into(), "-h".into(), "-s".into()];
|
let expected: Vec<String> = vec!["--help".into(), "--mod".into(), "-h".into(), "-s".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn variables_command_with_commandcompletion(mut completer_strings: NuCompleter) {
|
fn variables_command_with_commandcompletion(mut completer_strings: NuCompleter) {
|
||||||
let suggestions = completer_strings.complete("my-c ", 4);
|
let suggestions = completer_strings.complete("my-c ", 4);
|
||||||
let expected: Vec<String> = vec!["my-command".into()];
|
let expected: Vec<String> = vec!["my-command".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn variables_subcommands_with_customcompletion(mut completer_strings: NuCompleter) {
|
fn variables_subcommands_with_customcompletion(mut completer_strings: NuCompleter) {
|
||||||
let suggestions = completer_strings.complete("my-command ", 11);
|
let suggestions = completer_strings.complete("my-command ", 11);
|
||||||
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
|
@ -153,7 +152,7 @@ fn variables_customcompletion_subcommands_with_customcompletion_2(
|
||||||
) {
|
) {
|
||||||
let suggestions = completer_strings.complete("my-command ", 11);
|
let suggestions = completer_strings.complete("my-command ", 11);
|
||||||
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -182,19 +181,19 @@ fn dotnu_completions() {
|
||||||
let completion_str = "source-env ".to_string();
|
let completion_str = "source-env ".to_string();
|
||||||
let suggestions = completer.complete(&completion_str, completion_str.len());
|
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||||
|
|
||||||
match_suggestions(expected.clone(), suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
|
|
||||||
// Test use completion
|
// Test use completion
|
||||||
let completion_str = "use ".to_string();
|
let completion_str = "use ".to_string();
|
||||||
let suggestions = completer.complete(&completion_str, completion_str.len());
|
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||||
|
|
||||||
match_suggestions(expected.clone(), suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
|
|
||||||
// Test overlay use completion
|
// Test overlay use completion
|
||||||
let completion_str = "overlay use ".to_string();
|
let completion_str = "overlay use ".to_string();
|
||||||
let suggestions = completer.complete(&completion_str, completion_str.len());
|
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||||
|
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -258,8 +257,22 @@ fn file_completions() {
|
||||||
folder(dir.join(".hidden_folder")),
|
folder(dir.join(".hidden_folder")),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
let separator = '/';
|
||||||
|
let target_dir = format!("cp {dir_str}{separator}");
|
||||||
|
let slash_suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
let expected_slash_paths: Vec<String> = expected_paths
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.replace('\\', "/"))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
match_suggestions(&expected_slash_paths, &slash_suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
// Match the results
|
// Match the results
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
// Test completions for a file
|
// Test completions for a file
|
||||||
let target_dir = format!("cp {}", folder(dir.join("another")));
|
let target_dir = format!("cp {}", folder(dir.join("another")));
|
||||||
|
@ -269,17 +282,91 @@ fn file_completions() {
|
||||||
let expected_paths: Vec<String> = vec![file(dir.join("another").join("newfile"))];
|
let expected_paths: Vec<String> = vec![file(dir.join("another").join("newfile"))];
|
||||||
|
|
||||||
// Match the results
|
// Match the results
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
// Test completions for hidden files
|
// Test completions for hidden files
|
||||||
let target_dir = format!("ls {}/.", folder(dir.join(".hidden_folder")));
|
let target_dir = format!("ls {}{MAIN_SEPARATOR}.", folder(dir.join(".hidden_folder")));
|
||||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
let expected_paths: Vec<String> =
|
let expected_paths: Vec<String> =
|
||||||
vec![file(dir.join(".hidden_folder").join(".hidden_subfile"))];
|
vec![file(dir.join(".hidden_folder").join(".hidden_subfile"))];
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
let target_dir = format!("ls {}/.", folder(dir.join(".hidden_folder")));
|
||||||
|
let slash_suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
let expected_slash: Vec<String> = expected_paths
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.replace('\\', "/"))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
match_suggestions(&expected_slash, &slash_suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
// Match the results
|
// Match the results
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
#[test]
|
||||||
|
fn file_completions_with_mixed_separators() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, dir_str, engine, stack) = new_dotnu_engine();
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
|
// Create Expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
file(dir.join("lib-dir1").join("bar.nu")),
|
||||||
|
file(dir.join("lib-dir1").join("baz.nu")),
|
||||||
|
file(dir.join("lib-dir1").join("xyzzy.nu")),
|
||||||
|
];
|
||||||
|
let expecetd_slash_paths: Vec<String> = expected_paths
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.replace(MAIN_SEPARATOR, "/"))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let target_dir = format!("ls {dir_str}/lib-dir1/");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
match_suggestions(&expecetd_slash_paths, &suggestions);
|
||||||
|
|
||||||
|
let target_dir = format!("cp {dir_str}\\lib-dir1/");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
match_suggestions(&expecetd_slash_paths, &suggestions);
|
||||||
|
|
||||||
|
let target_dir = format!("ls {dir_str}/lib-dir1\\/");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
match_suggestions(&expecetd_slash_paths, &suggestions);
|
||||||
|
|
||||||
|
let target_dir = format!("ls {dir_str}\\lib-dir1\\/");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
match_suggestions(&expecetd_slash_paths, &suggestions);
|
||||||
|
|
||||||
|
let target_dir = format!("ls {dir_str}\\lib-dir1\\");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
|
let target_dir = format!("ls {dir_str}/lib-dir1\\");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
|
let target_dir = format!("ls {dir_str}/lib-dir1/\\");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
|
let target_dir = format!("ls {dir_str}\\lib-dir1/\\");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -303,7 +390,7 @@ fn partial_completions() {
|
||||||
];
|
];
|
||||||
|
|
||||||
// Match the results
|
// Match the results
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
// Test completions for the files whose name begin with "h"
|
// Test completions for the files whose name begin with "h"
|
||||||
// and are present under directories whose names begin with "pa"
|
// and are present under directories whose names begin with "pa"
|
||||||
|
@ -324,7 +411,7 @@ fn partial_completions() {
|
||||||
];
|
];
|
||||||
|
|
||||||
// Match the results
|
// Match the results
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
// Test completion for all files under directories whose names begin with "pa"
|
// Test completion for all files under directories whose names begin with "pa"
|
||||||
let dir_str = folder(dir.join("pa"));
|
let dir_str = folder(dir.join("pa"));
|
||||||
|
@ -345,7 +432,7 @@ fn partial_completions() {
|
||||||
];
|
];
|
||||||
|
|
||||||
// Match the results
|
// Match the results
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
// Test completion for a single file
|
// Test completion for a single file
|
||||||
let dir_str = file(dir.join("fi").join("so"));
|
let dir_str = file(dir.join("fi").join("so"));
|
||||||
|
@ -356,7 +443,7 @@ fn partial_completions() {
|
||||||
let expected_paths: Vec<String> = vec![file(dir.join("final_partial").join("somefile"))];
|
let expected_paths: Vec<String> = vec![file(dir.join("final_partial").join("somefile"))];
|
||||||
|
|
||||||
// Match the results
|
// Match the results
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
// Test completion where there is a sneaky `..` in the path
|
// Test completion where there is a sneaky `..` in the path
|
||||||
let dir_str = file(dir.join("par").join("..").join("fi").join("so"));
|
let dir_str = file(dir.join("par").join("..").join("fi").join("so"));
|
||||||
|
@ -392,7 +479,7 @@ fn partial_completions() {
|
||||||
];
|
];
|
||||||
|
|
||||||
// Match the results
|
// Match the results
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
// Test completion for all files under directories whose names begin with "pa"
|
// Test completion for all files under directories whose names begin with "pa"
|
||||||
let file_str = file(dir.join("partial-a").join("have"));
|
let file_str = file(dir.join("partial-a").join("have"));
|
||||||
|
@ -406,7 +493,7 @@ fn partial_completions() {
|
||||||
];
|
];
|
||||||
|
|
||||||
// Match the results
|
// Match the results
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
// Test completion for all files under directories whose names begin with "pa"
|
// Test completion for all files under directories whose names begin with "pa"
|
||||||
let file_str = file(dir.join("partial-a").join("have_ext."));
|
let file_str = file(dir.join("partial-a").join("have_ext."));
|
||||||
|
@ -420,7 +507,7 @@ fn partial_completions() {
|
||||||
];
|
];
|
||||||
|
|
||||||
// Match the results
|
// Match the results
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -455,15 +542,16 @@ fn command_ls_with_filecompletion() {
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
let target_dir = "ls custom_completion.";
|
let target_dir = "ls custom_completion.";
|
||||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
|
|
||||||
let expected_paths: Vec<String> = vec!["custom_completion.nu".to_string()];
|
let expected_paths: Vec<String> = vec!["custom_completion.nu".to_string()];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions)
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn command_open_with_filecompletion() {
|
fn command_open_with_filecompletion() {
|
||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
@ -496,14 +584,14 @@ fn command_open_with_filecompletion() {
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
let target_dir = "open custom_completion.";
|
let target_dir = "open custom_completion.";
|
||||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
|
|
||||||
let expected_paths: Vec<String> = vec!["custom_completion.nu".to_string()];
|
let expected_paths: Vec<String> = vec!["custom_completion.nu".to_string()];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions)
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -538,7 +626,7 @@ fn command_rm_with_globcompletion() {
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions)
|
match_suggestions(&expected_paths, &suggestions)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -573,7 +661,7 @@ fn command_cp_with_globcompletion() {
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions)
|
match_suggestions(&expected_paths, &suggestions)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -608,7 +696,7 @@ fn command_save_with_filecompletion() {
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions)
|
match_suggestions(&expected_paths, &suggestions)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -643,7 +731,7 @@ fn command_touch_with_filecompletion() {
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions)
|
match_suggestions(&expected_paths, &suggestions)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -678,7 +766,7 @@ fn command_watch_with_filecompletion() {
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions)
|
match_suggestions(&expected_paths, &suggestions)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
|
@ -686,19 +774,19 @@ fn subcommand_completions(mut subcommand_completer: NuCompleter) {
|
||||||
let prefix = "foo br";
|
let prefix = "foo br";
|
||||||
let suggestions = subcommand_completer.complete(prefix, prefix.len());
|
let suggestions = subcommand_completer.complete(prefix, prefix.len());
|
||||||
match_suggestions(
|
match_suggestions(
|
||||||
vec!["foo bar".to_string(), "foo aabrr".to_string()],
|
&vec!["foo bar".to_string(), "foo aabrr".to_string()],
|
||||||
suggestions,
|
&suggestions,
|
||||||
);
|
);
|
||||||
|
|
||||||
let prefix = "foo b";
|
let prefix = "foo b";
|
||||||
let suggestions = subcommand_completer.complete(prefix, prefix.len());
|
let suggestions = subcommand_completer.complete(prefix, prefix.len());
|
||||||
match_suggestions(
|
match_suggestions(
|
||||||
vec![
|
&vec![
|
||||||
"foo bar".to_string(),
|
"foo bar".to_string(),
|
||||||
"foo abaz".to_string(),
|
"foo abaz".to_string(),
|
||||||
"foo aabrr".to_string(),
|
"foo aabrr".to_string(),
|
||||||
],
|
],
|
||||||
suggestions,
|
&suggestions,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -724,7 +812,7 @@ fn file_completion_quoted() {
|
||||||
format!("`{}`", folder("test dir")),
|
format!("`{}`", folder("test dir")),
|
||||||
];
|
];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
let dir: PathBuf = "test dir".into();
|
let dir: PathBuf = "test dir".into();
|
||||||
let target_dir = format!("open '{}'", folder(dir.clone()));
|
let target_dir = format!("open '{}'", folder(dir.clone()));
|
||||||
|
@ -735,7 +823,7 @@ fn file_completion_quoted() {
|
||||||
format!("`{}`", file(dir.join("single quote"))),
|
format!("`{}`", file(dir.join("single quote"))),
|
||||||
];
|
];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions)
|
match_suggestions(&expected_paths, &suggestions)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -770,7 +858,7 @@ fn flag_completions() {
|
||||||
];
|
];
|
||||||
|
|
||||||
// Match results
|
// Match results
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -794,8 +882,21 @@ fn folder_with_directorycompletions() {
|
||||||
folder(dir.join(".hidden_folder")),
|
folder(dir.join(".hidden_folder")),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
let target_dir = format!("cd {dir_str}/");
|
||||||
|
let slash_suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
let expected_slash_paths: Vec<String> = expected_paths
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.replace('\\', "/"))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
match_suggestions(&expected_slash_paths, &slash_suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
// Match the results
|
// Match the results
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -837,7 +938,7 @@ fn variables_completions() {
|
||||||
];
|
];
|
||||||
|
|
||||||
// Match results
|
// Match results
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
|
|
||||||
// Test completions for $nu.h (filter)
|
// Test completions for $nu.h (filter)
|
||||||
let suggestions = completer.complete("$nu.h", 5);
|
let suggestions = completer.complete("$nu.h", 5);
|
||||||
|
@ -851,7 +952,7 @@ fn variables_completions() {
|
||||||
];
|
];
|
||||||
|
|
||||||
// Match results
|
// Match results
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
|
|
||||||
// Test completions for $nu.os-info
|
// Test completions for $nu.os-info
|
||||||
let suggestions = completer.complete("$nu.os-info.", 12);
|
let suggestions = completer.complete("$nu.os-info.", 12);
|
||||||
|
@ -863,7 +964,7 @@ fn variables_completions() {
|
||||||
"name".into(),
|
"name".into(),
|
||||||
];
|
];
|
||||||
// Match results
|
// Match results
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
|
|
||||||
// Test completions for custom var
|
// Test completions for custom var
|
||||||
let suggestions = completer.complete("$actor.", 7);
|
let suggestions = completer.complete("$actor.", 7);
|
||||||
|
@ -873,7 +974,7 @@ fn variables_completions() {
|
||||||
let expected: Vec<String> = vec!["age".into(), "name".into()];
|
let expected: Vec<String> = vec!["age".into(), "name".into()];
|
||||||
|
|
||||||
// Match results
|
// Match results
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
|
|
||||||
// Test completions for custom var (filtering)
|
// Test completions for custom var (filtering)
|
||||||
let suggestions = completer.complete("$actor.n", 8);
|
let suggestions = completer.complete("$actor.n", 8);
|
||||||
|
@ -883,7 +984,7 @@ fn variables_completions() {
|
||||||
let expected: Vec<String> = vec!["name".into()];
|
let expected: Vec<String> = vec!["name".into()];
|
||||||
|
|
||||||
// Match results
|
// Match results
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
|
|
||||||
// Test completions for $env
|
// Test completions for $env
|
||||||
let suggestions = completer.complete("$env.", 5);
|
let suggestions = completer.complete("$env.", 5);
|
||||||
|
@ -896,7 +997,7 @@ fn variables_completions() {
|
||||||
let expected: Vec<String> = vec!["PATH".into(), "PWD".into(), "TEST".into()];
|
let expected: Vec<String> = vec!["PATH".into(), "PWD".into(), "TEST".into()];
|
||||||
|
|
||||||
// Match results
|
// Match results
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
|
|
||||||
// Test completions for $env
|
// Test completions for $env
|
||||||
let suggestions = completer.complete("$env.T", 6);
|
let suggestions = completer.complete("$env.T", 6);
|
||||||
|
@ -906,12 +1007,12 @@ fn variables_completions() {
|
||||||
let expected: Vec<String> = vec!["TEST".into()];
|
let expected: Vec<String> = vec!["TEST".into()];
|
||||||
|
|
||||||
// Match results
|
// Match results
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
|
|
||||||
let suggestions = completer.complete("$", 1);
|
let suggestions = completer.complete("$", 1);
|
||||||
let expected: Vec<String> = vec!["$actor".into(), "$env".into(), "$in".into(), "$nu".into()];
|
let expected: Vec<String> = vec!["$actor".into(), "$env".into(), "$in".into(), "$nu".into()];
|
||||||
|
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -930,7 +1031,7 @@ fn alias_of_command_and_flags() {
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
let expected_paths: Vec<String> = vec!["test_a/".to_string(), "test_b/".to_string()];
|
let expected_paths: Vec<String> = vec!["test_a/".to_string(), "test_b/".to_string()];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions)
|
match_suggestions(&expected_paths, &suggestions)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -949,7 +1050,7 @@ fn alias_of_basic_command() {
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
let expected_paths: Vec<String> = vec!["test_a/".to_string(), "test_b/".to_string()];
|
let expected_paths: Vec<String> = vec!["test_a/".to_string(), "test_b/".to_string()];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions)
|
match_suggestions(&expected_paths, &suggestions)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -971,7 +1072,7 @@ fn alias_of_another_alias() {
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
let expected_paths: Vec<String> = vec!["test_a/".to_string(), "test_b/".to_string()];
|
let expected_paths: Vec<String> = vec!["test_a/".to_string(), "test_b/".to_string()];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions)
|
match_suggestions(&expected_paths, &suggestions)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_external_completion(completer: &str, input: &str) -> Vec<Suggestion> {
|
fn run_external_completion(completer: &str, input: &str) -> Vec<Suggestion> {
|
||||||
|
@ -1034,35 +1135,35 @@ fn unknown_command_completion() {
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions)
|
match_suggestions(&expected_paths, &suggestions)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn flagcompletion_triggers_after_cursor(mut completer: NuCompleter) {
|
fn flagcompletion_triggers_after_cursor(mut completer: NuCompleter) {
|
||||||
let suggestions = completer.complete("tst -h", 5);
|
let suggestions = completer.complete("tst -h", 5);
|
||||||
let expected: Vec<String> = vec!["--help".into(), "--mod".into(), "-h".into(), "-s".into()];
|
let expected: Vec<String> = vec!["--help".into(), "--mod".into(), "-h".into(), "-s".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn customcompletion_triggers_after_cursor(mut completer_strings: NuCompleter) {
|
fn customcompletion_triggers_after_cursor(mut completer_strings: NuCompleter) {
|
||||||
let suggestions = completer_strings.complete("my-command c", 11);
|
let suggestions = completer_strings.complete("my-command c", 11);
|
||||||
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn customcompletion_triggers_after_cursor_piped(mut completer_strings: NuCompleter) {
|
fn customcompletion_triggers_after_cursor_piped(mut completer_strings: NuCompleter) {
|
||||||
let suggestions = completer_strings.complete("my-command c | ls", 11);
|
let suggestions = completer_strings.complete("my-command c | ls", 11);
|
||||||
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn flagcompletion_triggers_after_cursor_piped(mut completer: NuCompleter) {
|
fn flagcompletion_triggers_after_cursor_piped(mut completer: NuCompleter) {
|
||||||
let suggestions = completer.complete("tst -h | ls", 5);
|
let suggestions = completer.complete("tst -h | ls", 5);
|
||||||
let expected: Vec<String> = vec!["--help".into(), "--mod".into(), "-h".into(), "-s".into()];
|
let expected: Vec<String> = vec!["--help".into(), "--mod".into(), "-h".into(), "-s".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1096,77 +1197,77 @@ fn filecompletions_triggers_after_cursor() {
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
|
|
||||||
match_suggestions(expected_paths, suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn extern_custom_completion_positional(mut extern_completer: NuCompleter) {
|
fn extern_custom_completion_positional(mut extern_completer: NuCompleter) {
|
||||||
let suggestions = extern_completer.complete("spam ", 5);
|
let suggestions = extern_completer.complete("spam ", 5);
|
||||||
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn extern_custom_completion_long_flag_1(mut extern_completer: NuCompleter) {
|
fn extern_custom_completion_long_flag_1(mut extern_completer: NuCompleter) {
|
||||||
let suggestions = extern_completer.complete("spam --foo=", 11);
|
let suggestions = extern_completer.complete("spam --foo=", 11);
|
||||||
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn extern_custom_completion_long_flag_2(mut extern_completer: NuCompleter) {
|
fn extern_custom_completion_long_flag_2(mut extern_completer: NuCompleter) {
|
||||||
let suggestions = extern_completer.complete("spam --foo ", 11);
|
let suggestions = extern_completer.complete("spam --foo ", 11);
|
||||||
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn extern_custom_completion_long_flag_short(mut extern_completer: NuCompleter) {
|
fn extern_custom_completion_long_flag_short(mut extern_completer: NuCompleter) {
|
||||||
let suggestions = extern_completer.complete("spam -f ", 8);
|
let suggestions = extern_completer.complete("spam -f ", 8);
|
||||||
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn extern_custom_completion_short_flag(mut extern_completer: NuCompleter) {
|
fn extern_custom_completion_short_flag(mut extern_completer: NuCompleter) {
|
||||||
let suggestions = extern_completer.complete("spam -b ", 8);
|
let suggestions = extern_completer.complete("spam -b ", 8);
|
||||||
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn extern_complete_flags(mut extern_completer: NuCompleter) {
|
fn extern_complete_flags(mut extern_completer: NuCompleter) {
|
||||||
let suggestions = extern_completer.complete("spam -", 6);
|
let suggestions = extern_completer.complete("spam -", 6);
|
||||||
let expected: Vec<String> = vec!["--foo".into(), "-b".into(), "-f".into()];
|
let expected: Vec<String> = vec!["--foo".into(), "-b".into(), "-f".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn custom_completer_triggers_cursor_before_word(mut custom_completer: NuCompleter) {
|
fn custom_completer_triggers_cursor_before_word(mut custom_completer: NuCompleter) {
|
||||||
let suggestions = custom_completer.complete("cmd foo bar", 8);
|
let suggestions = custom_completer.complete("cmd foo bar", 8);
|
||||||
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "".into()];
|
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn custom_completer_triggers_cursor_on_word_left_boundary(mut custom_completer: NuCompleter) {
|
fn custom_completer_triggers_cursor_on_word_left_boundary(mut custom_completer: NuCompleter) {
|
||||||
let suggestions = custom_completer.complete("cmd foo bar", 8);
|
let suggestions = custom_completer.complete("cmd foo bar", 8);
|
||||||
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "".into()];
|
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn custom_completer_triggers_cursor_next_to_word(mut custom_completer: NuCompleter) {
|
fn custom_completer_triggers_cursor_next_to_word(mut custom_completer: NuCompleter) {
|
||||||
let suggestions = custom_completer.complete("cmd foo bar", 11);
|
let suggestions = custom_completer.complete("cmd foo bar", 11);
|
||||||
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "bar".into()];
|
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "bar".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn custom_completer_triggers_cursor_after_word(mut custom_completer: NuCompleter) {
|
fn custom_completer_triggers_cursor_after_word(mut custom_completer: NuCompleter) {
|
||||||
let suggestions = custom_completer.complete("cmd foo bar ", 12);
|
let suggestions = custom_completer.complete("cmd foo bar ", 12);
|
||||||
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "bar".into(), "".into()];
|
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "bar".into(), "".into()];
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[ignore = "was reverted, still needs fixing"]
|
#[ignore = "was reverted, still needs fixing"]
|
||||||
|
|
|
@ -186,7 +186,7 @@ pub fn new_partial_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// match a list of suggestions with the expected values
|
// match a list of suggestions with the expected values
|
||||||
pub fn match_suggestions(expected: Vec<String>, suggestions: Vec<Suggestion>) {
|
pub fn match_suggestions(expected: &Vec<String>, suggestions: &Vec<Suggestion>) {
|
||||||
let expected_len = expected.len();
|
let expected_len = expected.len();
|
||||||
let suggestions_len = suggestions.len();
|
let suggestions_len = suggestions.len();
|
||||||
if expected_len != suggestions_len {
|
if expected_len != suggestions_len {
|
||||||
|
@ -196,13 +196,13 @@ pub fn match_suggestions(expected: Vec<String>, suggestions: Vec<Suggestion>) {
|
||||||
Expected: {expected:#?}\n"
|
Expected: {expected:#?}\n"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
assert_eq!(
|
|
||||||
expected,
|
let suggestoins_str = suggestions
|
||||||
suggestions
|
.iter()
|
||||||
.into_iter()
|
.map(|it| it.value.clone())
|
||||||
.map(|it| it.value)
|
.collect::<Vec<_>>();
|
||||||
.collect::<Vec<_>>()
|
|
||||||
);
|
assert_eq!(expected, &suggestoins_str);
|
||||||
}
|
}
|
||||||
|
|
||||||
// append the separator to the converted path
|
// append the separator to the converted path
|
||||||
|
|
|
@ -5,15 +5,15 @@ edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cmd-base"
|
name = "nu-cmd-base"
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.95.1" }
|
nu-engine = { path = "../nu-engine", version = "0.96.2" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.95.1" }
|
nu-parser = { path = "../nu-parser", version = "0.96.2" }
|
||||||
nu-path = { path = "../nu-path", version = "0.95.1" }
|
nu-path = { path = "../nu-path", version = "0.96.2" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.95.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.96.2" }
|
||||||
|
|
||||||
indexmap = { workspace = true }
|
indexmap = { workspace = true }
|
||||||
miette = { workspace = true }
|
miette = { workspace = true }
|
||||||
|
|
|
@ -3,21 +3,26 @@ use nu_protocol::{
|
||||||
engine::{EngineState, Stack},
|
engine::{EngineState, Stack},
|
||||||
Range, ShellError, Span, Value,
|
Range, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
use std::{ops::Bound, path::PathBuf};
|
use std::ops::Bound;
|
||||||
|
|
||||||
pub fn get_init_cwd() -> PathBuf {
|
pub fn get_init_cwd() -> AbsolutePathBuf {
|
||||||
std::env::current_dir().unwrap_or_else(|_| {
|
std::env::current_dir()
|
||||||
std::env::var("PWD")
|
.ok()
|
||||||
.map(Into::into)
|
.and_then(|path| AbsolutePathBuf::try_from(path).ok())
|
||||||
.unwrap_or_else(|_| nu_path::home_dir().unwrap_or_default())
|
.or_else(|| {
|
||||||
})
|
std::env::var("PWD")
|
||||||
|
.ok()
|
||||||
|
.and_then(|path| AbsolutePathBuf::try_from(path).ok())
|
||||||
|
})
|
||||||
|
.or_else(nu_path::home_dir)
|
||||||
|
.expect("Failed to get current working directory")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> PathBuf {
|
pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> AbsolutePathBuf {
|
||||||
engine_state
|
engine_state
|
||||||
.cwd(Some(stack))
|
.cwd(Some(stack))
|
||||||
.map(AbsolutePathBuf::into_std_path_buf)
|
.ok()
|
||||||
.unwrap_or(crate::util::get_init_cwd())
|
.unwrap_or_else(get_init_cwd)
|
||||||
}
|
}
|
||||||
|
|
||||||
type MakeRangeError = fn(&str, Span) -> ShellError;
|
type MakeRangeError = fn(&str, Span) -> ShellError;
|
||||||
|
|
|
@ -5,7 +5,7 @@ edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cmd-extra"
|
name = "nu-cmd-extra"
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-extra"
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-extra"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
@ -13,13 +13,13 @@ version = "0.95.1"
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.95.1" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.96.2" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.95.1" }
|
nu-engine = { path = "../nu-engine", version = "0.96.2" }
|
||||||
nu-json = { version = "0.95.1", path = "../nu-json" }
|
nu-json = { version = "0.96.2", path = "../nu-json" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.95.1" }
|
nu-parser = { path = "../nu-parser", version = "0.96.2" }
|
||||||
nu-pretty-hex = { version = "0.95.1", path = "../nu-pretty-hex" }
|
nu-pretty-hex = { version = "0.96.2", path = "../nu-pretty-hex" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.95.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.96.2" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.95.1" }
|
nu-utils = { path = "../nu-utils", version = "0.96.2" }
|
||||||
|
|
||||||
# Potential dependencies for extras
|
# Potential dependencies for extras
|
||||||
heck = { workspace = true }
|
heck = { workspace = true }
|
||||||
|
@ -33,6 +33,6 @@ v_htmlescape = { workspace = true }
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.95.1" }
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.96.2" }
|
||||||
nu-command = { path = "../nu-command", version = "0.95.1" }
|
nu-command = { path = "../nu-command", version = "0.96.2" }
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.95.1" }
|
nu-test-support = { path = "../nu-test-support", version = "0.96.2" }
|
|
@ -6,22 +6,22 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-lang"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cmd-lang"
|
name = "nu-cmd-lang"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.95.1" }
|
nu-engine = { path = "../nu-engine", version = "0.96.2" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.95.1" }
|
nu-parser = { path = "../nu-parser", version = "0.96.2" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.95.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.96.2" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.95.1" }
|
nu-utils = { path = "../nu-utils", version = "0.96.2" }
|
||||||
|
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
shadow-rs = { version = "0.29", default-features = false }
|
shadow-rs = { version = "0.30", default-features = false }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
shadow-rs = { version = "0.29", default-features = false }
|
shadow-rs = { version = "0.30", default-features = false }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
mimalloc = []
|
mimalloc = []
|
||||||
|
|
|
@ -72,7 +72,7 @@ pub fn check_example_input_and_output_types_match_command_signature(
|
||||||
witnessed_type_transformations
|
witnessed_type_transformations
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval_pipeline_without_terminal_expression(
|
pub fn eval_pipeline_without_terminal_expression(
|
||||||
src: &str,
|
src: &str,
|
||||||
cwd: &std::path::Path,
|
cwd: &std::path::Path,
|
||||||
engine_state: &mut Box<EngineState>,
|
engine_state: &mut Box<EngineState>,
|
||||||
|
|
|
@ -5,15 +5,15 @@ edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cmd-plugin"
|
name = "nu-cmd-plugin"
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-plugin"
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-plugin"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.95.1" }
|
nu-engine = { path = "../nu-engine", version = "0.96.2" }
|
||||||
nu-path = { path = "../nu-path", version = "0.95.1" }
|
nu-path = { path = "../nu-path", version = "0.96.2" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.95.1", features = ["plugin"] }
|
nu-protocol = { path = "../nu-protocol", version = "0.96.2", features = ["plugin"] }
|
||||||
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.95.1" }
|
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.96.2" }
|
||||||
|
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
|
|
||||||
|
|
|
@ -5,18 +5,18 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-color-confi
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-color-config"
|
name = "nu-color-config"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.95.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.96.2" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.95.1" }
|
nu-engine = { path = "../nu-engine", version = "0.96.2" }
|
||||||
nu-json = { path = "../nu-json", version = "0.95.1" }
|
nu-json = { path = "../nu-json", version = "0.96.2" }
|
||||||
nu-ansi-term = { workspace = true }
|
nu-ansi-term = { workspace = true }
|
||||||
|
|
||||||
serde = { workspace = true, features = ["derive"] }
|
serde = { workspace = true, features = ["derive"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.95.1" }
|
nu-test-support = { path = "../nu-test-support", version = "0.96.2" }
|
|
@ -5,7 +5,7 @@ edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-command"
|
name = "nu-command"
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-command"
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-command"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
@ -13,21 +13,21 @@ version = "0.95.1"
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.95.1" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.96.2" }
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.95.1" }
|
nu-color-config = { path = "../nu-color-config", version = "0.96.2" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.95.1" }
|
nu-engine = { path = "../nu-engine", version = "0.96.2" }
|
||||||
nu-glob = { path = "../nu-glob", version = "0.95.1" }
|
nu-glob = { path = "../nu-glob", version = "0.96.2" }
|
||||||
nu-json = { path = "../nu-json", version = "0.95.1" }
|
nu-json = { path = "../nu-json", version = "0.96.2" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.95.1" }
|
nu-parser = { path = "../nu-parser", version = "0.96.2" }
|
||||||
nu-path = { path = "../nu-path", version = "0.95.1" }
|
nu-path = { path = "../nu-path", version = "0.96.2" }
|
||||||
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.95.1" }
|
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.96.2" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.95.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.96.2" }
|
||||||
nu-system = { path = "../nu-system", version = "0.95.1" }
|
nu-system = { path = "../nu-system", version = "0.96.2" }
|
||||||
nu-table = { path = "../nu-table", version = "0.95.1" }
|
nu-table = { path = "../nu-table", version = "0.96.2" }
|
||||||
nu-term-grid = { path = "../nu-term-grid", version = "0.95.1" }
|
nu-term-grid = { path = "../nu-term-grid", version = "0.96.2" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.95.1" }
|
nu-utils = { path = "../nu-utils", version = "0.96.2" }
|
||||||
nu-ansi-term = { workspace = true }
|
nu-ansi-term = { workspace = true }
|
||||||
nuon = { path = "../nuon", version = "0.95.1" }
|
nuon = { path = "../nuon", version = "0.96.2" }
|
||||||
|
|
||||||
alphanumeric-sort = { workspace = true }
|
alphanumeric-sort = { workspace = true }
|
||||||
base64 = { workspace = true }
|
base64 = { workspace = true }
|
||||||
|
@ -138,8 +138,8 @@ sqlite = ["rusqlite"]
|
||||||
trash-support = ["trash"]
|
trash-support = ["trash"]
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.95.1" }
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.96.2" }
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.95.1" }
|
nu-test-support = { path = "../nu-test-support", version = "0.96.2" }
|
||||||
|
|
||||||
dirs = { workspace = true }
|
dirs = { workspace = true }
|
||||||
mockito = { workspace = true, default-features = false }
|
mockito = { workspace = true, default-features = false }
|
||||||
|
|
|
@ -177,11 +177,9 @@ fn run_histogram(
|
||||||
match v {
|
match v {
|
||||||
// parse record, and fill valid value to actual input.
|
// parse record, and fill valid value to actual input.
|
||||||
Value::Record { val, .. } => {
|
Value::Record { val, .. } => {
|
||||||
for (c, v) in val.iter() {
|
if let Some(v) = val.get(col_name) {
|
||||||
if c == col_name {
|
if let Ok(v) = HashableValue::from_value(v.clone(), head_span) {
|
||||||
if let Ok(v) = HashableValue::from_value(v.clone(), head_span) {
|
inputs.push(v);
|
||||||
inputs.push(v);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -150,13 +150,9 @@ fn fill(
|
||||||
FillAlignment::Left
|
FillAlignment::Left
|
||||||
};
|
};
|
||||||
|
|
||||||
let width = if let Some(arg) = width_arg { arg } else { 1 };
|
let width = width_arg.unwrap_or(1);
|
||||||
|
|
||||||
let character = if let Some(arg) = character_arg {
|
let character = character_arg.unwrap_or_else(|| " ".to_string());
|
||||||
arg
|
|
||||||
} else {
|
|
||||||
" ".to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
let arg = Arguments {
|
let arg = Arguments {
|
||||||
width,
|
width,
|
||||||
|
|
|
@ -424,11 +424,7 @@ pub fn value_to_sql(value: Value) -> Result<Box<dyn rusqlite::ToSql>, ShellError
|
||||||
Value::Filesize { val, .. } => Box::new(val),
|
Value::Filesize { val, .. } => Box::new(val),
|
||||||
Value::Duration { val, .. } => Box::new(val),
|
Value::Duration { val, .. } => Box::new(val),
|
||||||
Value::Date { val, .. } => Box::new(val),
|
Value::Date { val, .. } => Box::new(val),
|
||||||
Value::String { val, .. } => {
|
Value::String { val, .. } => Box::new(val),
|
||||||
// don't store ansi escape sequences in the database
|
|
||||||
// escape single quotes
|
|
||||||
Box::new(nu_utils::strip_ansi_unlikely(&val).into_owned())
|
|
||||||
}
|
|
||||||
Value::Binary { val, .. } => Box::new(val),
|
Value::Binary { val, .. } => Box::new(val),
|
||||||
Value::Nothing { .. } => Box::new(rusqlite::types::Null),
|
Value::Nothing { .. } => Box::new(rusqlite::types::Null),
|
||||||
val => {
|
val => {
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
use nu_cmd_base::util::get_init_cwd;
|
use nu_cmd_base::util::get_init_cwd;
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_path::AbsolutePathBuf;
|
|
||||||
use nu_utils::filesystem::{have_permission, PermissionResult};
|
use nu_utils::filesystem::{have_permission, PermissionResult};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
@ -46,8 +45,8 @@ impl Command for Cd {
|
||||||
// user can use `cd` to recover PWD to a good state.
|
// user can use `cd` to recover PWD to a good state.
|
||||||
let cwd = engine_state
|
let cwd = engine_state
|
||||||
.cwd(Some(stack))
|
.cwd(Some(stack))
|
||||||
.map(AbsolutePathBuf::into_std_path_buf)
|
.ok()
|
||||||
.unwrap_or(get_init_cwd());
|
.unwrap_or_else(get_init_cwd);
|
||||||
|
|
||||||
let path_val = {
|
let path_val = {
|
||||||
if let Some(path) = path_val {
|
if let Some(path) = path_val {
|
||||||
|
@ -66,7 +65,7 @@ impl Command for Cd {
|
||||||
if let Some(oldpwd) = stack.get_env_var(engine_state, "OLDPWD") {
|
if let Some(oldpwd) = stack.get_env_var(engine_state, "OLDPWD") {
|
||||||
oldpwd.to_path()?
|
oldpwd.to_path()?
|
||||||
} else {
|
} else {
|
||||||
cwd
|
cwd.into()
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Trim whitespace from the end of path.
|
// Trim whitespace from the end of path.
|
||||||
|
@ -135,7 +134,7 @@ impl Command for Cd {
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Change to the previous working directory ($OLDPWD)",
|
description: r#"Change to the previous working directory (same as "cd $env.OLDPWD")"#,
|
||||||
example: r#"cd -"#,
|
example: r#"cd -"#,
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
|
@ -144,6 +143,16 @@ impl Command for Cd {
|
||||||
example: r#"def --env gohome [] { cd ~ }"#,
|
example: r#"def --env gohome [] { cd ~ }"#,
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Move two directories up in the tree (the parent directory's parent). Additional dots can be added for additional levels.",
|
||||||
|
example: r#"cd ..."#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "The cd command itself is often optional. Simply entering a path to a directory will cd to it.",
|
||||||
|
example: r#"/home"#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -135,12 +135,9 @@ fn rm(
|
||||||
let home: Option<String> = nu_path::home_dir().map(|path| {
|
let home: Option<String> = nu_path::home_dir().map(|path| {
|
||||||
{
|
{
|
||||||
if path.exists() {
|
if path.exists() {
|
||||||
match nu_path::canonicalize_with(&path, ¤tdir_path) {
|
nu_path::canonicalize_with(&path, ¤tdir_path).unwrap_or(path.into())
|
||||||
Ok(canon_path) => canon_path,
|
|
||||||
Err(_) => path,
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
path
|
path.into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
|
|
|
@ -121,9 +121,11 @@ impl Command for Save {
|
||||||
} else {
|
} else {
|
||||||
match stderr {
|
match stderr {
|
||||||
ChildPipe::Pipe(mut pipe) => {
|
ChildPipe::Pipe(mut pipe) => {
|
||||||
io::copy(&mut pipe, &mut io::sink())
|
io::copy(&mut pipe, &mut io::stderr())
|
||||||
|
}
|
||||||
|
ChildPipe::Tee(mut tee) => {
|
||||||
|
io::copy(&mut tee, &mut io::stderr())
|
||||||
}
|
}
|
||||||
ChildPipe::Tee(mut tee) => io::copy(&mut tee, &mut io::sink()),
|
|
||||||
}
|
}
|
||||||
.err_span(span)?;
|
.err_span(span)?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,17 +86,22 @@ impl Command for UCp {
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Copy only if source file is newer than target file",
|
description: "Copy only if source file is newer than target file",
|
||||||
example: "cp -u a b",
|
example: "cp -u myfile newfile",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Copy file preserving mode and timestamps attributes",
|
description: "Copy file preserving mode and timestamps attributes",
|
||||||
example: "cp --preserve [ mode timestamps ] a b",
|
example: "cp --preserve [ mode timestamps ] myfile newfile",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Copy file erasing all attributes",
|
description: "Copy file erasing all attributes",
|
||||||
example: "cp --preserve [] a b",
|
example: "cp --preserve [] myfile newfile",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Copy file to a directory three levels above its current location",
|
||||||
|
example: "cp myfile ....",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
@ -235,7 +240,7 @@ impl Command for UCp {
|
||||||
for (sources, need_expand_tilde) in sources.iter_mut() {
|
for (sources, need_expand_tilde) in sources.iter_mut() {
|
||||||
for src in sources.iter_mut() {
|
for src in sources.iter_mut() {
|
||||||
if !src.is_absolute() {
|
if !src.is_absolute() {
|
||||||
*src = nu_path::expand_path_with(&src, &cwd, *need_expand_tilde);
|
*src = nu_path::expand_path_with(&*src, &cwd, *need_expand_tilde);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,11 +30,21 @@ impl Command for UMv {
|
||||||
example: "mv test.txt my/subdirectory",
|
example: "mv test.txt my/subdirectory",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Move only if source file is newer than target file",
|
||||||
|
example: "mv -u new/test.txt old/",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Move many files into a directory",
|
description: "Move many files into a directory",
|
||||||
example: "mv *.txt my/subdirectory",
|
example: "mv *.txt my/subdirectory",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: r#"Move a file into the "my" directory two levels up in the directory tree"#,
|
||||||
|
example: "mv test.txt .../my/",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,6 +59,11 @@ impl Command for UMv {
|
||||||
.switch("verbose", "explain what is being done.", Some('v'))
|
.switch("verbose", "explain what is being done.", Some('v'))
|
||||||
.switch("progress", "display a progress bar", Some('p'))
|
.switch("progress", "display a progress bar", Some('p'))
|
||||||
.switch("interactive", "prompt before overwriting", Some('i'))
|
.switch("interactive", "prompt before overwriting", Some('i'))
|
||||||
|
.switch(
|
||||||
|
"update",
|
||||||
|
"move and overwrite only when the SOURCE file is newer than the destination file or when the destination file is missing",
|
||||||
|
Some('u')
|
||||||
|
)
|
||||||
.switch("no-clobber", "do not overwrite an existing file", Some('n'))
|
.switch("no-clobber", "do not overwrite an existing file", Some('n'))
|
||||||
.rest(
|
.rest(
|
||||||
"paths",
|
"paths",
|
||||||
|
@ -77,6 +92,11 @@ impl Command for UMv {
|
||||||
} else {
|
} else {
|
||||||
uu_mv::OverwriteMode::Force
|
uu_mv::OverwriteMode::Force
|
||||||
};
|
};
|
||||||
|
let update = if call.has_flag(engine_state, stack, "update")? {
|
||||||
|
UpdateMode::ReplaceIfOlder
|
||||||
|
} else {
|
||||||
|
UpdateMode::ReplaceAll
|
||||||
|
};
|
||||||
|
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let cwd = current_dir(engine_state, stack)?;
|
let cwd = current_dir(engine_state, stack)?;
|
||||||
|
@ -141,7 +161,7 @@ impl Command for UMv {
|
||||||
for (files, need_expand_tilde) in files.iter_mut() {
|
for (files, need_expand_tilde) in files.iter_mut() {
|
||||||
for src in files.iter_mut() {
|
for src in files.iter_mut() {
|
||||||
if !src.is_absolute() {
|
if !src.is_absolute() {
|
||||||
*src = nu_path::expand_path_with(&src, &cwd, *need_expand_tilde);
|
*src = nu_path::expand_path_with(&*src, &cwd, *need_expand_tilde);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -164,7 +184,7 @@ impl Command for UMv {
|
||||||
verbose,
|
verbose,
|
||||||
suffix: String::from("~"),
|
suffix: String::from("~"),
|
||||||
backup: BackupMode::NoBackup,
|
backup: BackupMode::NoBackup,
|
||||||
update: UpdateMode::ReplaceAll,
|
update,
|
||||||
target_dir: None,
|
target_dir: None,
|
||||||
no_target_dir: false,
|
no_target_dir: false,
|
||||||
strip_slashes: false,
|
strip_slashes: false,
|
||||||
|
|
|
@ -27,7 +27,7 @@ impl Command for Default {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Sets a default row's column if missing."
|
"Sets a default value if a row's column is missing or null."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
|
@ -66,6 +66,20 @@ impl Command for Default {
|
||||||
Span::test_data(),
|
Span::test_data(),
|
||||||
)),
|
)),
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: r#"Replace the missing value in the "a" column of a list"#,
|
||||||
|
example: "[{a:1 b:2} {b:1}] | default 'N/A' a",
|
||||||
|
result: Some(Value::test_list(vec![
|
||||||
|
Value::test_record(record! {
|
||||||
|
"a" => Value::test_int(1),
|
||||||
|
"b" => Value::test_int(2),
|
||||||
|
}),
|
||||||
|
Value::test_record(record! {
|
||||||
|
"a" => Value::test_string("N/A"),
|
||||||
|
"b" => Value::test_int(1),
|
||||||
|
}),
|
||||||
|
])),
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -88,19 +102,13 @@ fn default(
|
||||||
val: ref mut record,
|
val: ref mut record,
|
||||||
..
|
..
|
||||||
} => {
|
} => {
|
||||||
let mut found = false;
|
let record = record.to_mut();
|
||||||
|
if let Some(val) = record.get_mut(&column.item) {
|
||||||
for (col, val) in record.to_mut().iter_mut() {
|
if matches!(val, Value::Nothing { .. }) {
|
||||||
if *col == column.item {
|
*val = value.clone();
|
||||||
found = true;
|
|
||||||
if matches!(val, Value::Nothing { .. }) {
|
|
||||||
*val = value.clone();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
|
record.push(column.item.clone(), value.clone());
|
||||||
if !found {
|
|
||||||
record.to_mut().push(column.item.clone(), value.clone());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
item
|
item
|
||||||
|
|
|
@ -120,8 +120,7 @@ impl Command for Reduce {
|
||||||
engine_state.signals().check(head)?;
|
engine_state.signals().check(head)?;
|
||||||
acc = closure
|
acc = closure
|
||||||
.add_arg(value)
|
.add_arg(value)
|
||||||
.add_arg(acc)
|
.run_with_value(acc)?
|
||||||
.run_with_input(PipelineData::Empty)?
|
|
||||||
.into_value(head)?;
|
.into_value(head)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use std::io::{BufRead, Cursor};
|
use std::io::{BufRead, Cursor};
|
||||||
|
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_protocol::{ListStream, PipelineMetadata, Signals};
|
use nu_protocol::{ListStream, Signals};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct FromJson;
|
pub struct FromJson;
|
||||||
|
@ -83,7 +83,7 @@ impl Command for FromJson {
|
||||||
strict,
|
strict,
|
||||||
engine_state.signals().clone(),
|
engine_state.signals().clone(),
|
||||||
),
|
),
|
||||||
update_metadata(metadata),
|
metadata,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
PipelineData::ByteStream(stream, metadata)
|
PipelineData::ByteStream(stream, metadata)
|
||||||
|
@ -92,7 +92,7 @@ impl Command for FromJson {
|
||||||
if let Some(reader) = stream.reader() {
|
if let Some(reader) = stream.reader() {
|
||||||
Ok(PipelineData::ListStream(
|
Ok(PipelineData::ListStream(
|
||||||
read_json_lines(reader, span, strict, Signals::empty()),
|
read_json_lines(reader, span, strict, Signals::empty()),
|
||||||
update_metadata(metadata),
|
metadata,
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
Ok(PipelineData::Empty)
|
Ok(PipelineData::Empty)
|
||||||
|
@ -115,10 +115,10 @@ impl Command for FromJson {
|
||||||
|
|
||||||
if strict {
|
if strict {
|
||||||
Ok(convert_string_to_value_strict(&string_input, span)?
|
Ok(convert_string_to_value_strict(&string_input, span)?
|
||||||
.into_pipeline_data_with_metadata(update_metadata(metadata)))
|
.into_pipeline_data_with_metadata(metadata))
|
||||||
} else {
|
} else {
|
||||||
Ok(convert_string_to_value(&string_input, span)?
|
Ok(convert_string_to_value(&string_input, span)?
|
||||||
.into_pipeline_data_with_metadata(update_metadata(metadata)))
|
.into_pipeline_data_with_metadata(metadata))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -265,14 +265,6 @@ fn convert_string_to_value_strict(string_input: &str, span: Span) -> Result<Valu
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_metadata(metadata: Option<PipelineMetadata>) -> Option<PipelineMetadata> {
|
|
||||||
metadata
|
|
||||||
.map(|md| md.with_content_type(Some("application/json".into())))
|
|
||||||
.or_else(|| {
|
|
||||||
Some(PipelineMetadata::default().with_content_type(Some("application/json".into())))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
@ -4,6 +4,8 @@ use crate::formats::to::delimited::to_delimited_data;
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_protocol::Config;
|
use nu_protocol::Config;
|
||||||
|
|
||||||
|
use super::delimited::ToDelimitedDataArgs;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct ToCsv;
|
pub struct ToCsv;
|
||||||
|
|
||||||
|
@ -116,17 +118,62 @@ fn to_csv(
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
to_delimited_data(noheaders, sep, columns, "CSV", input, head, config)
|
to_delimited_data(
|
||||||
|
ToDelimitedDataArgs {
|
||||||
|
noheaders,
|
||||||
|
separator: sep,
|
||||||
|
columns,
|
||||||
|
format_name: "CSV",
|
||||||
|
input,
|
||||||
|
head,
|
||||||
|
content_type: Some(mime::TEXT_CSV.to_string()),
|
||||||
|
},
|
||||||
|
config,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
|
use crate::Metadata;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_examples() {
|
fn test_examples() {
|
||||||
use crate::test_examples;
|
use crate::test_examples;
|
||||||
|
|
||||||
test_examples(ToCsv {})
|
test_examples(ToCsv {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
// Base functions that are needed for testing
|
||||||
|
// Try to keep this working set small to keep tests running as fast as possible
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(ToCsv {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = "{a: 1 b: 2} | to csv | metadata | get content_type";
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("content_type" => Value::test_string("text/csv"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,18 +69,36 @@ fn make_cant_convert_error(value: &Value, format_name: &'static str) -> ShellErr
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct ToDelimitedDataArgs {
|
||||||
|
pub noheaders: bool,
|
||||||
|
pub separator: Spanned<char>,
|
||||||
|
pub columns: Option<Vec<String>>,
|
||||||
|
pub format_name: &'static str,
|
||||||
|
pub input: PipelineData,
|
||||||
|
pub head: Span,
|
||||||
|
pub content_type: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
pub fn to_delimited_data(
|
pub fn to_delimited_data(
|
||||||
noheaders: bool,
|
ToDelimitedDataArgs {
|
||||||
separator: Spanned<char>,
|
noheaders,
|
||||||
columns: Option<Vec<String>>,
|
separator,
|
||||||
format_name: &'static str,
|
columns,
|
||||||
input: PipelineData,
|
format_name,
|
||||||
head: Span,
|
input,
|
||||||
|
head,
|
||||||
|
content_type,
|
||||||
|
}: ToDelimitedDataArgs,
|
||||||
config: Arc<Config>,
|
config: Arc<Config>,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let mut input = input;
|
let mut input = input;
|
||||||
let span = input.span().unwrap_or(head);
|
let span = input.span().unwrap_or(head);
|
||||||
let metadata = input.metadata();
|
let metadata = Some(
|
||||||
|
input
|
||||||
|
.metadata()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.with_content_type(content_type),
|
||||||
|
);
|
||||||
|
|
||||||
let separator = u8::try_from(separator.item).map_err(|_| ShellError::IncorrectValue {
|
let separator = u8::try_from(separator.item).map_err(|_| ShellError::IncorrectValue {
|
||||||
msg: "separator must be an ASCII character".into(),
|
msg: "separator must be an ASCII character".into(),
|
||||||
|
|
|
@ -64,7 +64,7 @@ impl Command for ToJson {
|
||||||
let res = Value::string(serde_json_string, span);
|
let res = Value::string(serde_json_string, span);
|
||||||
let metadata = PipelineMetadata {
|
let metadata = PipelineMetadata {
|
||||||
data_source: nu_protocol::DataSource::None,
|
data_source: nu_protocol::DataSource::None,
|
||||||
content_type: Some("application/json".to_string()),
|
content_type: Some(mime::APPLICATION_JSON.to_string()),
|
||||||
};
|
};
|
||||||
Ok(PipelineData::Value(res, Some(metadata)))
|
Ok(PipelineData::Value(res, Some(metadata)))
|
||||||
}
|
}
|
||||||
|
@ -159,6 +159,10 @@ fn json_list(input: &[Value]) -> Result<Vec<nu_json::Value>, ShellError> {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
|
use crate::Metadata;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -167,4 +171,34 @@ mod test {
|
||||||
|
|
||||||
test_examples(ToJson {})
|
test_examples(ToJson {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
// Base functions that are needed for testing
|
||||||
|
// Try to keep this working set small to keep tests running as fast as possible
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(ToJson {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = "{a: 1 b: 2} | to json | metadata | get content_type";
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("content_type" => Value::test_string("application/json"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -82,6 +82,12 @@ fn to_md(
|
||||||
config: &Config,
|
config: &Config,
|
||||||
head: Span,
|
head: Span,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
// text/markdown became a valid mimetype with rfc7763
|
||||||
|
let metadata = input
|
||||||
|
.metadata()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.with_content_type(Some("text/markdown".into()));
|
||||||
|
|
||||||
let (grouped_input, single_list) = group_by(input, head, config);
|
let (grouped_input, single_list) = group_by(input, head, config);
|
||||||
if per_element || single_list {
|
if per_element || single_list {
|
||||||
return Ok(Value::string(
|
return Ok(Value::string(
|
||||||
|
@ -95,9 +101,10 @@ fn to_md(
|
||||||
.join(""),
|
.join(""),
|
||||||
head,
|
head,
|
||||||
)
|
)
|
||||||
.into_pipeline_data());
|
.into_pipeline_data_with_metadata(Some(metadata)));
|
||||||
}
|
}
|
||||||
Ok(Value::string(table(grouped_input, pretty, config), head).into_pipeline_data())
|
Ok(Value::string(table(grouped_input, pretty, config), head)
|
||||||
|
.into_pipeline_data_with_metadata(Some(metadata)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fragment(input: Value, pretty: bool, config: &Config) -> String {
|
fn fragment(input: Value, pretty: bool, config: &Config) -> String {
|
||||||
|
@ -328,7 +335,10 @@ fn get_padded_string(text: String, desired_length: usize, padding_character: cha
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use crate::Metadata;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
use nu_protocol::{record, Config, IntoPipelineData, Value};
|
use nu_protocol::{record, Config, IntoPipelineData, Value};
|
||||||
|
|
||||||
fn one(string: &str) -> String {
|
fn one(string: &str) -> String {
|
||||||
|
@ -453,4 +463,35 @@ mod tests {
|
||||||
"#)
|
"#)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let state_delta = {
|
||||||
|
// Base functions that are needed for testing
|
||||||
|
// Try to keep this working set small to keep tests running as fast as possible
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(ToMd {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
let delta = state_delta;
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = "{a: 1 b: 2} | to md | metadata | get content_type";
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("content_type" => Value::test_string("text/markdown"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,13 +74,18 @@ MessagePack: https://msgpack.org/
|
||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let metadata = input
|
||||||
|
.metadata()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.with_content_type(Some("application/x-msgpack".into()));
|
||||||
|
|
||||||
let value_span = input.span().unwrap_or(call.head);
|
let value_span = input.span().unwrap_or(call.head);
|
||||||
let value = input.into_value(value_span)?;
|
let value = input.into_value(value_span)?;
|
||||||
let mut out = vec![];
|
let mut out = vec![];
|
||||||
|
|
||||||
write_value(&mut out, &value, 0)?;
|
write_value(&mut out, &value, 0)?;
|
||||||
|
|
||||||
Ok(Value::binary(out, call.head).into_pipeline_data())
|
Ok(Value::binary(out, call.head).into_pipeline_data_with_metadata(Some(metadata)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -268,6 +273,10 @@ where
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
|
use crate::Metadata;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -276,4 +285,36 @@ mod test {
|
||||||
|
|
||||||
test_examples(ToMsgpack {})
|
test_examples(ToMsgpack {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
// Base functions that are needed for testing
|
||||||
|
// Try to keep this working set small to keep tests running as fast as possible
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(ToMsgpack {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = "{a: 1 b: 2} | to msgpack | metadata | get content_type";
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(
|
||||||
|
record!("content_type" => Value::test_string("application/x-msgpack"))
|
||||||
|
),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,6 +42,11 @@ impl Command for ToNuon {
|
||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let metadata = input
|
||||||
|
.metadata()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.with_content_type(Some("application/x-nuon".into()));
|
||||||
|
|
||||||
let style = if call.has_flag(engine_state, stack, "raw")? {
|
let style = if call.has_flag(engine_state, stack, "raw")? {
|
||||||
nuon::ToStyle::Raw
|
nuon::ToStyle::Raw
|
||||||
} else if let Some(t) = call.get_flag(engine_state, stack, "tabs")? {
|
} else if let Some(t) = call.get_flag(engine_state, stack, "tabs")? {
|
||||||
|
@ -56,9 +61,8 @@ impl Command for ToNuon {
|
||||||
let value = input.into_value(span)?;
|
let value = input.into_value(span)?;
|
||||||
|
|
||||||
match nuon::to_nuon(&value, style, Some(span)) {
|
match nuon::to_nuon(&value, style, Some(span)) {
|
||||||
Ok(serde_nuon_string) => {
|
Ok(serde_nuon_string) => Ok(Value::string(serde_nuon_string, span)
|
||||||
Ok(Value::string(serde_nuon_string, span).into_pipeline_data())
|
.into_pipeline_data_with_metadata(Some(metadata))),
|
||||||
}
|
|
||||||
_ => Ok(Value::error(
|
_ => Ok(Value::error(
|
||||||
ShellError::CantConvert {
|
ShellError::CantConvert {
|
||||||
to_type: "NUON".into(),
|
to_type: "NUON".into(),
|
||||||
|
@ -68,7 +72,7 @@ impl Command for ToNuon {
|
||||||
},
|
},
|
||||||
span,
|
span,
|
||||||
)
|
)
|
||||||
.into_pipeline_data()),
|
.into_pipeline_data_with_metadata(Some(metadata))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,10 +104,45 @@ impl Command for ToNuon {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
|
use crate::Metadata;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_examples() {
|
fn test_examples() {
|
||||||
use super::ToNuon;
|
use super::ToNuon;
|
||||||
use crate::test_examples;
|
use crate::test_examples;
|
||||||
test_examples(ToNuon {})
|
test_examples(ToNuon {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
// Base functions that are needed for testing
|
||||||
|
// Try to keep this working set small to keep tests running as fast as possible
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(ToNuon {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = "{a: 1 b: 2} | to nuon | metadata | get content_type";
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("content_type" => Value::test_string("application/x-nuon"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -134,14 +134,18 @@ fn local_into_string(value: Value, separator: &str, config: &Config) -> String {
|
||||||
|
|
||||||
fn update_metadata(metadata: Option<PipelineMetadata>) -> Option<PipelineMetadata> {
|
fn update_metadata(metadata: Option<PipelineMetadata>) -> Option<PipelineMetadata> {
|
||||||
metadata
|
metadata
|
||||||
.map(|md| md.with_content_type(Some("text/plain".to_string())))
|
.map(|md| md.with_content_type(Some(mime::TEXT_PLAIN.to_string())))
|
||||||
.or_else(|| {
|
.or_else(|| {
|
||||||
Some(PipelineMetadata::default().with_content_type(Some("text/plain".to_string())))
|
Some(PipelineMetadata::default().with_content_type(Some(mime::TEXT_PLAIN.to_string())))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
|
use crate::Metadata;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -150,4 +154,34 @@ mod test {
|
||||||
|
|
||||||
test_examples(ToText {})
|
test_examples(ToText {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
// Base functions that are needed for testing
|
||||||
|
// Try to keep this working set small to keep tests running as fast as possible
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(ToText {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = "{a: 1 b: 2} | to text | metadata | get content_type";
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("content_type" => Value::test_string("text/plain"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,8 @@ use crate::formats::to::delimited::to_delimited_data;
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_protocol::Config;
|
use nu_protocol::Config;
|
||||||
|
|
||||||
|
use super::delimited::ToDelimitedDataArgs;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct ToTsv;
|
pub struct ToTsv;
|
||||||
|
|
||||||
|
@ -82,11 +84,26 @@ fn to_tsv(
|
||||||
item: '\t',
|
item: '\t',
|
||||||
span: head,
|
span: head,
|
||||||
};
|
};
|
||||||
to_delimited_data(noheaders, sep, columns, "TSV", input, head, config)
|
to_delimited_data(
|
||||||
|
ToDelimitedDataArgs {
|
||||||
|
noheaders,
|
||||||
|
separator: sep,
|
||||||
|
columns,
|
||||||
|
format_name: "TSV",
|
||||||
|
input,
|
||||||
|
head,
|
||||||
|
content_type: Some(mime::TEXT_TAB_SEPARATED_VALUES.to_string()),
|
||||||
|
},
|
||||||
|
config,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
|
use crate::Metadata;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -95,4 +112,36 @@ mod test {
|
||||||
|
|
||||||
test_examples(ToTsv {})
|
test_examples(ToTsv {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
// Base functions that are needed for testing
|
||||||
|
// Try to keep this working set small to keep tests running as fast as possible
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(ToTsv {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = "{a: 1 b: 2} | to tsv | metadata | get content_type";
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(
|
||||||
|
record!("content_type" => Value::test_string("text/tab-separated-values"))
|
||||||
|
),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -132,6 +132,10 @@ impl Job {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(mut self, input: PipelineData, head: Span) -> Result<PipelineData, ShellError> {
|
fn run(mut self, input: PipelineData, head: Span) -> Result<PipelineData, ShellError> {
|
||||||
|
let metadata = input
|
||||||
|
.metadata()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.with_content_type(Some("application/xml".into()));
|
||||||
let value = input.into_value(head)?;
|
let value = input.into_value(head)?;
|
||||||
|
|
||||||
self.write_xml_entry(value, true).and_then(|_| {
|
self.write_xml_entry(value, true).and_then(|_| {
|
||||||
|
@ -141,7 +145,7 @@ impl Job {
|
||||||
} else {
|
} else {
|
||||||
return Err(ShellError::NonUtf8 { span: head });
|
return Err(ShellError::NonUtf8 { span: head });
|
||||||
};
|
};
|
||||||
Ok(Value::string(s, head).into_pipeline_data())
|
Ok(Value::string(s, head).into_pipeline_data_with_metadata(Some(metadata)))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -508,6 +512,10 @@ impl Job {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
|
use crate::Metadata;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -516,4 +524,34 @@ mod test {
|
||||||
|
|
||||||
test_examples(ToXml {})
|
test_examples(ToXml {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
// Base functions that are needed for testing
|
||||||
|
// Try to keep this working set small to keep tests running as fast as possible
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(ToXml {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = "{tag: note attributes: {} content : [{tag: remember attributes: {} content : [{tag: null attributes: null content : Event}]}]} | to xml | metadata | get content_type";
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("content_type" => Value::test_string("application/xml"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,11 +95,18 @@ pub fn value_to_yaml_value(v: &Value) -> Result<serde_yaml::Value, ShellError> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_yaml(input: PipelineData, head: Span) -> Result<PipelineData, ShellError> {
|
fn to_yaml(input: PipelineData, head: Span) -> Result<PipelineData, ShellError> {
|
||||||
|
let metadata = input
|
||||||
|
.metadata()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.with_content_type(Some("application/yaml".into()));
|
||||||
let value = input.into_value(head)?;
|
let value = input.into_value(head)?;
|
||||||
|
|
||||||
let yaml_value = value_to_yaml_value(&value)?;
|
let yaml_value = value_to_yaml_value(&value)?;
|
||||||
match serde_yaml::to_string(&yaml_value) {
|
match serde_yaml::to_string(&yaml_value) {
|
||||||
Ok(serde_yaml_string) => Ok(Value::string(serde_yaml_string, head).into_pipeline_data()),
|
Ok(serde_yaml_string) => {
|
||||||
|
Ok(Value::string(serde_yaml_string, head)
|
||||||
|
.into_pipeline_data_with_metadata(Some(metadata)))
|
||||||
|
}
|
||||||
_ => Ok(Value::error(
|
_ => Ok(Value::error(
|
||||||
ShellError::CantConvert {
|
ShellError::CantConvert {
|
||||||
to_type: "YAML".into(),
|
to_type: "YAML".into(),
|
||||||
|
@ -109,12 +116,16 @@ fn to_yaml(input: PipelineData, head: Span) -> Result<PipelineData, ShellError>
|
||||||
},
|
},
|
||||||
head,
|
head,
|
||||||
)
|
)
|
||||||
.into_pipeline_data()),
|
.into_pipeline_data_with_metadata(Some(metadata))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
|
use crate::Metadata;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -123,4 +134,34 @@ mod test {
|
||||||
|
|
||||||
test_examples(ToYaml {})
|
test_examples(ToYaml {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
// Base functions that are needed for testing
|
||||||
|
// Try to keep this working set small to keep tests running as fast as possible
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(ToYaml {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = "{a: 1 b: 2} | to yaml | metadata | get content_type";
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("content_type" => Value::test_string("application/yaml"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,10 @@ impl Command for SubCommand {
|
||||||
vec!["square", "root"]
|
vec!["square", "root"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_const(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
|
@ -44,6 +48,23 @@ impl Command for SubCommand {
|
||||||
input.map(move |value| operate(value, head), engine_state.signals())
|
input.map(move |value| operate(value, head), engine_state.signals())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn run_const(
|
||||||
|
&self,
|
||||||
|
working_set: &StateWorkingSet,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let head = call.head;
|
||||||
|
// This doesn't match explicit nulls
|
||||||
|
if matches!(input, PipelineData::Empty) {
|
||||||
|
return Err(ShellError::PipelineEmpty { dst_span: head });
|
||||||
|
}
|
||||||
|
input.map(
|
||||||
|
move |value| operate(value, head),
|
||||||
|
working_set.permanent().signals(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Compute the square root of each number in a list",
|
description: "Compute the square root of each number in a list",
|
||||||
|
|
|
@ -141,17 +141,17 @@ pub fn request_add_authorization_header(
|
||||||
let login = match (user, password) {
|
let login = match (user, password) {
|
||||||
(Some(user), Some(password)) => {
|
(Some(user), Some(password)) => {
|
||||||
let mut enc_str = String::new();
|
let mut enc_str = String::new();
|
||||||
base64_engine.encode_string(&format!("{user}:{password}"), &mut enc_str);
|
base64_engine.encode_string(format!("{user}:{password}"), &mut enc_str);
|
||||||
Some(enc_str)
|
Some(enc_str)
|
||||||
}
|
}
|
||||||
(Some(user), _) => {
|
(Some(user), _) => {
|
||||||
let mut enc_str = String::new();
|
let mut enc_str = String::new();
|
||||||
base64_engine.encode_string(&format!("{user}:"), &mut enc_str);
|
base64_engine.encode_string(format!("{user}:"), &mut enc_str);
|
||||||
Some(enc_str)
|
Some(enc_str)
|
||||||
}
|
}
|
||||||
(_, Some(password)) => {
|
(_, Some(password)) => {
|
||||||
let mut enc_str = String::new();
|
let mut enc_str = String::new();
|
||||||
base64_engine.encode_string(&format!(":{password}"), &mut enc_str);
|
base64_engine.encode_string(format!(":{password}"), &mut enc_str);
|
||||||
Some(enc_str)
|
Some(enc_str)
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
|
|
|
@ -19,12 +19,17 @@ impl Command for SubCommand {
|
||||||
Signature::build("random chars")
|
Signature::build("random chars")
|
||||||
.input_output_types(vec![(Type::Nothing, Type::String)])
|
.input_output_types(vec![(Type::Nothing, Type::String)])
|
||||||
.allow_variants_without_examples(true)
|
.allow_variants_without_examples(true)
|
||||||
.named("length", SyntaxShape::Int, "Number of chars", Some('l'))
|
.named(
|
||||||
|
"length",
|
||||||
|
SyntaxShape::Int,
|
||||||
|
"Number of chars (default 25)",
|
||||||
|
Some('l'),
|
||||||
|
)
|
||||||
.category(Category::Random)
|
.category(Category::Random)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Generate random chars."
|
"Generate random chars uniformly distributed over ASCII letters and numbers: a-z, A-Z and 0-9."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
@ -44,7 +49,7 @@ impl Command for SubCommand {
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Generate random chars",
|
description: "Generate a string with 25 random chars",
|
||||||
example: "random chars",
|
example: "random chars",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
|
|
|
@ -15,7 +15,11 @@ impl Command for SubCommand {
|
||||||
Signature::build("random int")
|
Signature::build("random int")
|
||||||
.input_output_types(vec![(Type::Nothing, Type::Int)])
|
.input_output_types(vec![(Type::Nothing, Type::Int)])
|
||||||
.allow_variants_without_examples(true)
|
.allow_variants_without_examples(true)
|
||||||
.optional("range", SyntaxShape::Range, "Range of values.")
|
.optional(
|
||||||
|
"range",
|
||||||
|
SyntaxShape::Range,
|
||||||
|
"Range of potential values, inclusive of both start and end values.",
|
||||||
|
)
|
||||||
.category(Category::Random)
|
.category(Category::Random)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,12 +44,12 @@ impl Command for SubCommand {
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Generate an unconstrained random integer",
|
description: "Generate a non-negative random integer",
|
||||||
example: "random int",
|
example: "random int",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Generate a random integer less than or equal to 500",
|
description: "Generate a random integer between 0 (inclusive) and 500 (inclusive)",
|
||||||
example: "random int ..500",
|
example: "random int ..500",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
|
@ -55,8 +59,8 @@ impl Command for SubCommand {
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Generate a random integer between 1 and 10",
|
description: "Generate a random integer between -10 (inclusive) and 10 (inclusive)",
|
||||||
example: "random int 1..10",
|
example: "random int (-10)..10",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
|
@ -187,7 +187,7 @@ fn split_words_helper(v: &Value, word_length: Option<usize>, span: Span, graphem
|
||||||
// [^[:alpha:]\'] = do not match any uppercase or lowercase letters or apostrophes
|
// [^[:alpha:]\'] = do not match any uppercase or lowercase letters or apostrophes
|
||||||
// [^\p{L}\'] = do not match any unicode uppercase or lowercase letters or apostrophes
|
// [^\p{L}\'] = do not match any unicode uppercase or lowercase letters or apostrophes
|
||||||
// Let's go with the unicode one in hopes that it works on more than just ascii characters
|
// Let's go with the unicode one in hopes that it works on more than just ascii characters
|
||||||
let regex_replace = Regex::new(r"[^\p{L}\']").expect("regular expression error");
|
let regex_replace = Regex::new(r"[^\p{L}\p{N}\']").expect("regular expression error");
|
||||||
let v_span = v.span();
|
let v_span = v.span();
|
||||||
|
|
||||||
match v {
|
match v {
|
||||||
|
@ -422,4 +422,9 @@ mod test {
|
||||||
|
|
||||||
test_examples(SubCommand {})
|
test_examples(SubCommand {})
|
||||||
}
|
}
|
||||||
|
#[test]
|
||||||
|
fn mixed_letter_number() {
|
||||||
|
let actual = nu!(r#"echo "a1 b2 c3" | split words | str join ','"#);
|
||||||
|
assert_eq!(actual.out, "a1,b2,c3");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
|
use nu_path::Path;
|
||||||
use nu_test_support::fs::Stub::EmptyFile;
|
use nu_test_support::fs::Stub::EmptyFile;
|
||||||
use nu_test_support::nu;
|
use nu_test_support::nu;
|
||||||
use nu_test_support::playground::Playground;
|
use nu_test_support::playground::Playground;
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn cd_works_with_in_var() {
|
fn cd_works_with_in_var() {
|
||||||
|
@ -22,7 +22,7 @@ fn filesystem_change_from_current_directory_using_relative_path() {
|
||||||
Playground::setup("cd_test_1", |dirs, _| {
|
Playground::setup("cd_test_1", |dirs, _| {
|
||||||
let actual = nu!( cwd: dirs.root(), "cd cd_test_1; $env.PWD");
|
let actual = nu!( cwd: dirs.root(), "cd cd_test_1; $env.PWD");
|
||||||
|
|
||||||
assert_eq!(PathBuf::from(actual.out), *dirs.test());
|
assert_eq!(Path::new(&actual.out), dirs.test());
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ fn filesystem_change_from_current_directory_using_relative_path_with_trailing_sl
|
||||||
// Intentionally not using correct path sep because this should work on Windows
|
// Intentionally not using correct path sep because this should work on Windows
|
||||||
let actual = nu!( cwd: dirs.root(), "cd cd_test_1_slash/; $env.PWD");
|
let actual = nu!( cwd: dirs.root(), "cd cd_test_1_slash/; $env.PWD");
|
||||||
|
|
||||||
assert_eq!(PathBuf::from(actual.out), *dirs.test());
|
assert_eq!(Path::new(&actual.out), *dirs.test());
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ fn filesystem_change_from_current_directory_using_absolute_path() {
|
||||||
dirs.formats().display()
|
dirs.formats().display()
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(PathBuf::from(actual.out), dirs.formats());
|
assert_eq!(Path::new(&actual.out), dirs.formats());
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ fn filesystem_change_from_current_directory_using_absolute_path_with_trailing_sl
|
||||||
std::path::MAIN_SEPARATOR_STR,
|
std::path::MAIN_SEPARATOR_STR,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(PathBuf::from(actual.out), dirs.formats());
|
assert_eq!(Path::new(&actual.out), dirs.formats());
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ fn filesystem_switch_back_to_previous_working_directory() {
|
||||||
dirs.test().display()
|
dirs.test().display()
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(PathBuf::from(actual.out), dirs.test().join("odin"));
|
assert_eq!(Path::new(&actual.out), dirs.test().join("odin"));
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -101,10 +101,7 @@ fn filesystem_change_from_current_directory_using_relative_path_and_dash() {
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(Path::new(&actual.out), dirs.test().join("odin").join("-"));
|
||||||
PathBuf::from(actual.out),
|
|
||||||
dirs.test().join("odin").join("-")
|
|
||||||
);
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -119,7 +116,7 @@ fn filesystem_change_current_directory_to_parent_directory() {
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(PathBuf::from(actual.out), *dirs.root());
|
assert_eq!(Path::new(&actual.out), *dirs.root());
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,7 +133,7 @@ fn filesystem_change_current_directory_to_two_parents_up_using_multiple_dots() {
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(PathBuf::from(actual.out), *dirs.test());
|
assert_eq!(Path::new(&actual.out), *dirs.test());
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,7 +148,7 @@ fn filesystem_change_to_home_directory() {
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(Some(PathBuf::from(actual.out)), dirs::home_dir());
|
assert_eq!(Path::new(&actual.out), dirs::home_dir().unwrap());
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -169,7 +166,7 @@ fn filesystem_change_to_a_directory_containing_spaces() {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
PathBuf::from(actual.out),
|
Path::new(&actual.out),
|
||||||
dirs.test().join("robalino turner katz")
|
dirs.test().join("robalino turner katz")
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
|
@ -234,7 +231,7 @@ fn filesystem_change_directory_to_symlink_relative() {
|
||||||
$env.PWD
|
$env.PWD
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
assert_eq!(PathBuf::from(actual.out), dirs.test().join("foo_link"));
|
assert_eq!(Path::new(&actual.out), dirs.test().join("foo_link"));
|
||||||
|
|
||||||
let actual = nu!(
|
let actual = nu!(
|
||||||
cwd: dirs.test().join("boo"),
|
cwd: dirs.test().join("boo"),
|
||||||
|
@ -243,7 +240,7 @@ fn filesystem_change_directory_to_symlink_relative() {
|
||||||
$env.PWD
|
$env.PWD
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
assert_eq!(PathBuf::from(actual.out), dirs.test().join("foo"));
|
assert_eq!(Path::new(&actual.out), dirs.test().join("foo"));
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -95,13 +95,13 @@ fn capture_error_with_both_stdout_stderr_messages_not_hang_nushell() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn combined_pipe_redirection() {
|
fn combined_pipe_redirection() {
|
||||||
let actual = nu!("$env.FOO = hello; $env.BAR = world; nu --testbin echo_env_mixed out-err FOO BAR o+e>| complete | get stdout");
|
let actual = nu!("$env.FOO = 'hello'; $env.BAR = 'world'; nu --testbin echo_env_mixed out-err FOO BAR o+e>| complete | get stdout");
|
||||||
assert_eq!(actual.out, "helloworld");
|
assert_eq!(actual.out, "helloworld");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn err_pipe_redirection() {
|
fn err_pipe_redirection() {
|
||||||
let actual =
|
let actual =
|
||||||
nu!("$env.FOO = hello; nu --testbin echo_env_stderr FOO e>| complete | get stdout");
|
nu!("$env.FOO = 'hello'; nu --testbin echo_env_stderr FOO e>| complete | get stdout");
|
||||||
assert_eq!(actual.out, "hello");
|
assert_eq!(actual.out, "hello");
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
use std::{io::Write, path::PathBuf};
|
|
||||||
|
|
||||||
use chrono::{DateTime, FixedOffset};
|
use chrono::{DateTime, FixedOffset};
|
||||||
|
use nu_path::AbsolutePathBuf;
|
||||||
use nu_protocol::{ast::PathMember, record, Span, Value};
|
use nu_protocol::{ast::PathMember, record, Span, Value};
|
||||||
use nu_test_support::{
|
use nu_test_support::{
|
||||||
fs::{line_ending, Stub},
|
fs::{line_ending, Stub},
|
||||||
|
@ -13,6 +12,7 @@ use rand::{
|
||||||
rngs::StdRng,
|
rngs::StdRng,
|
||||||
Rng, SeedableRng,
|
Rng, SeedableRng,
|
||||||
};
|
};
|
||||||
|
use std::io::Write;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn into_sqlite_schema() {
|
fn into_sqlite_schema() {
|
||||||
|
@ -453,7 +453,7 @@ impl Distribution<TestRow> for Standard {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_sqlite_db(dirs: &Dirs, nu_table: &str) -> PathBuf {
|
fn make_sqlite_db(dirs: &Dirs, nu_table: &str) -> AbsolutePathBuf {
|
||||||
let testdir = dirs.test();
|
let testdir = dirs.test();
|
||||||
let testdb_path =
|
let testdb_path =
|
||||||
testdir.join(testdir.file_name().unwrap().to_str().unwrap().to_owned() + ".db");
|
testdir.join(testdir.file_name().unwrap().to_str().unwrap().to_owned() + ".db");
|
||||||
|
@ -465,7 +465,7 @@ fn make_sqlite_db(dirs: &Dirs, nu_table: &str) -> PathBuf {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(nucmd.status.success());
|
assert!(nucmd.status.success());
|
||||||
testdb_path.into()
|
testdb_path
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_test_rows(dirs: &Dirs, nu_table: &str, sql_query: Option<&str>, expected: Vec<TestRow>) {
|
fn insert_test_rows(dirs: &Dirs, nu_table: &str, sql_query: Option<&str>, expected: Vec<TestRow>) {
|
||||||
|
|
|
@ -23,6 +23,13 @@ fn let_takes_pipeline() {
|
||||||
assert_eq!(actual.out, "11");
|
assert_eq!(actual.out, "11");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn let_takes_pipeline_with_declared_type() {
|
||||||
|
let actual = nu!(r#"let x: list<string> = [] | append "hello world"; print $x.0"#);
|
||||||
|
|
||||||
|
assert_eq!(actual.out, "hello world");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn let_pipeline_allows_in() {
|
fn let_pipeline_allows_in() {
|
||||||
let actual =
|
let actual =
|
||||||
|
@ -38,6 +45,13 @@ fn mut_takes_pipeline() {
|
||||||
assert_eq!(actual.out, "11");
|
assert_eq!(actual.out, "11");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mut_takes_pipeline_with_declared_type() {
|
||||||
|
let actual = nu!(r#"mut x: list<string> = [] | append "hello world"; print $x.0"#);
|
||||||
|
|
||||||
|
assert_eq!(actual.out, "hello world");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn mut_pipeline_allows_in() {
|
fn mut_pipeline_allows_in() {
|
||||||
let actual =
|
let actual =
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
|
use nu_path::AbsolutePath;
|
||||||
use nu_test_support::nu;
|
use nu_test_support::nu;
|
||||||
use nu_test_support::playground::Playground;
|
use nu_test_support::playground::Playground;
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn creates_temp_file() {
|
fn creates_temp_file() {
|
||||||
|
@ -9,7 +9,7 @@ fn creates_temp_file() {
|
||||||
cwd: dirs.test(),
|
cwd: dirs.test(),
|
||||||
"mktemp"
|
"mktemp"
|
||||||
);
|
);
|
||||||
let loc = PathBuf::from(output.out.clone());
|
let loc = AbsolutePath::try_new(&output.out).unwrap();
|
||||||
println!("{:?}", loc);
|
println!("{:?}", loc);
|
||||||
assert!(loc.exists());
|
assert!(loc.exists());
|
||||||
})
|
})
|
||||||
|
@ -22,7 +22,7 @@ fn creates_temp_file_with_suffix() {
|
||||||
cwd: dirs.test(),
|
cwd: dirs.test(),
|
||||||
"mktemp --suffix .txt tempfileXXX"
|
"mktemp --suffix .txt tempfileXXX"
|
||||||
);
|
);
|
||||||
let loc = PathBuf::from(output.out.clone());
|
let loc = AbsolutePath::try_new(&output.out).unwrap();
|
||||||
assert!(loc.exists());
|
assert!(loc.exists());
|
||||||
assert!(loc.is_file());
|
assert!(loc.is_file());
|
||||||
assert!(output.out.ends_with(".txt"));
|
assert!(output.out.ends_with(".txt"));
|
||||||
|
@ -37,8 +37,7 @@ fn creates_temp_directory() {
|
||||||
cwd: dirs.test(),
|
cwd: dirs.test(),
|
||||||
"mktemp -d"
|
"mktemp -d"
|
||||||
);
|
);
|
||||||
|
let loc = AbsolutePath::try_new(&output.out).unwrap();
|
||||||
let loc = PathBuf::from(output.out);
|
|
||||||
assert!(loc.exists());
|
assert!(loc.exists());
|
||||||
assert!(loc.is_dir());
|
assert!(loc.is_dir());
|
||||||
})
|
})
|
||||||
|
|
|
@ -2,7 +2,6 @@ use nu_test_support::fs::{files_exist_at, Stub::EmptyFile, Stub::FileWithContent
|
||||||
use nu_test_support::nu;
|
use nu_test_support::nu;
|
||||||
use nu_test_support::playground::Playground;
|
use nu_test_support::playground::Playground;
|
||||||
use rstest::rstest;
|
use rstest::rstest;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn moves_a_file() {
|
fn moves_a_file() {
|
||||||
|
@ -96,7 +95,7 @@ fn moves_the_directory_inside_directory_if_path_to_move_is_existing_directory()
|
||||||
|
|
||||||
assert!(!original_dir.exists());
|
assert!(!original_dir.exists());
|
||||||
assert!(expected.exists());
|
assert!(expected.exists());
|
||||||
assert!(files_exist_at(vec!["jttxt"], expected))
|
assert!(files_exist_at(&["jttxt"], expected))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -125,7 +124,7 @@ fn moves_using_path_with_wildcard() {
|
||||||
nu!(cwd: work_dir, "mv ../originals/*.ini ../expected");
|
nu!(cwd: work_dir, "mv ../originals/*.ini ../expected");
|
||||||
|
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(
|
||||||
vec!["yehuda.ini", "jt.ini", "sample.ini", "andres.ini",],
|
&["yehuda.ini", "jt.ini", "sample.ini", "andres.ini",],
|
||||||
expected
|
expected
|
||||||
));
|
));
|
||||||
})
|
})
|
||||||
|
@ -152,7 +151,7 @@ fn moves_using_a_glob() {
|
||||||
|
|
||||||
assert!(meal_dir.exists());
|
assert!(meal_dir.exists());
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(
|
||||||
vec!["arepa.txt", "empanada.txt", "taquiza.txt",],
|
&["arepa.txt", "empanada.txt", "taquiza.txt",],
|
||||||
expected
|
expected
|
||||||
));
|
));
|
||||||
})
|
})
|
||||||
|
@ -184,7 +183,7 @@ fn moves_a_directory_with_files() {
|
||||||
assert!(!original_dir.exists());
|
assert!(!original_dir.exists());
|
||||||
assert!(expected_dir.exists());
|
assert!(expected_dir.exists());
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(
|
||||||
vec![
|
&[
|
||||||
"car/car1.txt",
|
"car/car1.txt",
|
||||||
"car/car2.txt",
|
"car/car2.txt",
|
||||||
"bicycle/bicycle1.txt",
|
"bicycle/bicycle1.txt",
|
||||||
|
@ -322,7 +321,7 @@ fn move_files_using_glob_two_parents_up_using_multiple_dots() {
|
||||||
"#
|
"#
|
||||||
);
|
);
|
||||||
|
|
||||||
let files = vec![
|
let files = &[
|
||||||
"yehuda.yaml",
|
"yehuda.yaml",
|
||||||
"jtjson",
|
"jtjson",
|
||||||
"andres.xml",
|
"andres.xml",
|
||||||
|
@ -333,7 +332,7 @@ fn move_files_using_glob_two_parents_up_using_multiple_dots() {
|
||||||
let original_dir = dirs.test().join("foo/bar");
|
let original_dir = dirs.test().join("foo/bar");
|
||||||
let destination_dir = dirs.test();
|
let destination_dir = dirs.test();
|
||||||
|
|
||||||
assert!(files_exist_at(files.clone(), destination_dir));
|
assert!(files_exist_at(files, destination_dir));
|
||||||
assert!(!files_exist_at(files, original_dir))
|
assert!(!files_exist_at(files, original_dir))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -440,10 +439,7 @@ fn mv_change_case_of_directory() {
|
||||||
);
|
);
|
||||||
|
|
||||||
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
|
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(&["somefile.txt"], dirs.test().join(new_dir)));
|
||||||
vec!["somefile.txt",],
|
|
||||||
dirs.test().join(new_dir)
|
|
||||||
));
|
|
||||||
|
|
||||||
#[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
|
#[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
|
||||||
_actual.err.contains("to a subdirectory of itself");
|
_actual.err.contains("to a subdirectory of itself");
|
||||||
|
@ -647,10 +643,10 @@ fn test_cp_inside_glob_metachars_dir() {
|
||||||
|
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(!files_exist_at(
|
assert!(!files_exist_at(
|
||||||
vec!["test_file.txt"],
|
&["test_file.txt"],
|
||||||
dirs.test().join(sub_dir)
|
dirs.test().join(sub_dir)
|
||||||
));
|
));
|
||||||
assert!(files_exist_at(vec!["test_file.txt"], dirs.test()));
|
assert!(files_exist_at(&["test_file.txt"], dirs.test()));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -667,19 +663,13 @@ fn mv_with_tilde() {
|
||||||
// mv file
|
// mv file
|
||||||
let actual = nu!(cwd: dirs.test(), "mv '~tilde/f1.txt' ./");
|
let actual = nu!(cwd: dirs.test(), "mv '~tilde/f1.txt' ./");
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(!files_exist_at(
|
assert!(!files_exist_at(&["f1.txt"], dirs.test().join("~tilde")));
|
||||||
vec![Path::new("f1.txt")],
|
assert!(files_exist_at(&["f1.txt"], dirs.test()));
|
||||||
dirs.test().join("~tilde")
|
|
||||||
));
|
|
||||||
assert!(files_exist_at(vec![Path::new("f1.txt")], dirs.test()));
|
|
||||||
|
|
||||||
// pass variable
|
// pass variable
|
||||||
let actual = nu!(cwd: dirs.test(), "let f = '~tilde/f2.txt'; mv $f ./");
|
let actual = nu!(cwd: dirs.test(), "let f = '~tilde/f2.txt'; mv $f ./");
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(!files_exist_at(
|
assert!(!files_exist_at(&["f2.txt"], dirs.test().join("~tilde")));
|
||||||
vec![Path::new("f2.txt")],
|
assert!(files_exist_at(&["f1.txt"], dirs.test()));
|
||||||
dirs.test().join("~tilde")
|
|
||||||
));
|
|
||||||
assert!(files_exist_at(vec![Path::new("f1.txt")], dirs.test()));
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
|
use nu_path::Path;
|
||||||
use nu_test_support::fs::Stub::EmptyFile;
|
use nu_test_support::fs::Stub::EmptyFile;
|
||||||
use nu_test_support::playground::Playground;
|
use nu_test_support::playground::Playground;
|
||||||
use nu_test_support::{nu, pipeline};
|
use nu_test_support::{nu, pipeline};
|
||||||
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn expands_path_with_dot() {
|
fn expands_path_with_dot() {
|
||||||
Playground::setup("path_expand_1", |dirs, sandbox| {
|
Playground::setup("path_expand_1", |dirs, sandbox| {
|
||||||
|
@ -18,7 +17,7 @@ fn expands_path_with_dot() {
|
||||||
));
|
));
|
||||||
|
|
||||||
let expected = dirs.test.join("menu").join("spam.txt");
|
let expected = dirs.test.join("menu").join("spam.txt");
|
||||||
assert_eq!(PathBuf::from(actual.out), expected);
|
assert_eq!(Path::new(&actual.out), expected);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,7 +37,7 @@ fn expands_path_without_follow_symlink() {
|
||||||
));
|
));
|
||||||
|
|
||||||
let expected = dirs.test.join("menu").join("spam_link.ln");
|
let expected = dirs.test.join("menu").join("spam_link.ln");
|
||||||
assert_eq!(PathBuf::from(actual.out), expected);
|
assert_eq!(Path::new(&actual.out), expected);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -56,7 +55,7 @@ fn expands_path_with_double_dot() {
|
||||||
));
|
));
|
||||||
|
|
||||||
let expected = dirs.test.join("menu").join("spam.txt");
|
let expected = dirs.test.join("menu").join("spam.txt");
|
||||||
assert_eq!(PathBuf::from(actual.out), expected);
|
assert_eq!(Path::new(&actual.out), expected);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -74,7 +73,7 @@ fn const_path_expand() {
|
||||||
));
|
));
|
||||||
|
|
||||||
let expected = dirs.test.join("menu").join("spam.txt");
|
let expected = dirs.test.join("menu").join("spam.txt");
|
||||||
assert_eq!(PathBuf::from(actual.out), expected);
|
assert_eq!(Path::new(&actual.out), expected);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,7 +91,7 @@ mod windows {
|
||||||
"#
|
"#
|
||||||
));
|
));
|
||||||
|
|
||||||
assert!(!PathBuf::from(actual.out).starts_with("~"));
|
assert!(!Path::new(&actual.out).starts_with("~"));
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -106,7 +105,7 @@ mod windows {
|
||||||
"#
|
"#
|
||||||
));
|
));
|
||||||
|
|
||||||
assert!(!PathBuf::from(actual.out).starts_with("~"));
|
assert!(!Path::new(&actual.out).starts_with("~"));
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -131,7 +130,7 @@ mod windows {
|
||||||
));
|
));
|
||||||
|
|
||||||
let expected = dirs.test.join("menu").join("spam_link.ln");
|
let expected = dirs.test.join("menu").join("spam_link.ln");
|
||||||
assert_eq!(PathBuf::from(actual.out), expected);
|
assert_eq!(Path::new(&actual.out), expected);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,5 +3,4 @@ mod chars;
|
||||||
mod dice;
|
mod dice;
|
||||||
mod float;
|
mod float;
|
||||||
mod int;
|
mod int;
|
||||||
#[cfg(feature = "uuid_crate")]
|
|
||||||
mod uuid;
|
mod uuid;
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use nu_test_support::nu;
|
use nu_test_support::nu;
|
||||||
use uuid_crate::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn generates_valid_uuid4() {
|
fn generates_valid_uuid4() {
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
|
#[cfg(not(windows))]
|
||||||
use nu_path::AbsolutePath;
|
use nu_path::AbsolutePath;
|
||||||
use nu_test_support::fs::{files_exist_at, Stub::EmptyFile};
|
use nu_test_support::fs::{files_exist_at, Stub::EmptyFile};
|
||||||
use nu_test_support::nu;
|
use nu_test_support::nu;
|
||||||
use nu_test_support::playground::Playground;
|
use nu_test_support::playground::Playground;
|
||||||
use rstest::rstest;
|
use rstest::rstest;
|
||||||
|
#[cfg(not(windows))]
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn removes_a_file() {
|
fn removes_a_file() {
|
||||||
|
@ -48,7 +49,7 @@ fn removes_files_with_wildcard() {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(!files_exist_at(
|
assert!(!files_exist_at(
|
||||||
vec![
|
&[
|
||||||
"src/parser/parse/token_tree.rs",
|
"src/parser/parse/token_tree.rs",
|
||||||
"src/parser/hir/baseline_parse.rs",
|
"src/parser/hir/baseline_parse.rs",
|
||||||
"src/parser/hir/baseline_parse_tokens.rs"
|
"src/parser/hir/baseline_parse_tokens.rs"
|
||||||
|
@ -89,7 +90,7 @@ fn removes_deeply_nested_directories_with_wildcard_and_recursive_flag() {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(!files_exist_at(
|
assert!(!files_exist_at(
|
||||||
vec!["src/parser/parse", "src/parser/hir"],
|
&["src/parser/parse", "src/parser/hir"],
|
||||||
dirs.test()
|
dirs.test()
|
||||||
));
|
));
|
||||||
})
|
})
|
||||||
|
@ -144,7 +145,7 @@ fn errors_if_attempting_to_delete_home() {
|
||||||
Playground::setup("rm_test_8", |dirs, _| {
|
Playground::setup("rm_test_8", |dirs, _| {
|
||||||
let actual = nu!(
|
let actual = nu!(
|
||||||
cwd: dirs.root(),
|
cwd: dirs.root(),
|
||||||
"$env.HOME = myhome ; rm -rf ~"
|
"$env.HOME = 'myhome' ; rm -rf ~"
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(actual.err.contains("please use -I or -i"));
|
assert!(actual.err.contains("please use -I or -i"));
|
||||||
|
@ -275,7 +276,7 @@ fn remove_files_from_two_parents_up_using_multiple_dots_and_glob() {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(!files_exist_at(
|
assert!(!files_exist_at(
|
||||||
vec!["yehuda.txt", "jttxt", "kevin.txt"],
|
&["yehuda.txt", "jttxt", "kevin.txt"],
|
||||||
dirs.test()
|
dirs.test()
|
||||||
));
|
));
|
||||||
})
|
})
|
||||||
|
@ -303,8 +304,8 @@ fn rm_wildcard_keeps_dotfiles() {
|
||||||
r#"rm *"#
|
r#"rm *"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(!files_exist_at(vec!["foo"], dirs.test()));
|
assert!(!files_exist_at(&["foo"], dirs.test()));
|
||||||
assert!(files_exist_at(vec![".bar"], dirs.test()));
|
assert!(files_exist_at(&[".bar"], dirs.test()));
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -318,8 +319,8 @@ fn rm_wildcard_leading_dot_deletes_dotfiles() {
|
||||||
"rm .*"
|
"rm .*"
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(files_exist_at(vec!["foo"], dirs.test()));
|
assert!(files_exist_at(&["foo"], dirs.test()));
|
||||||
assert!(!files_exist_at(vec![".bar"], dirs.test()));
|
assert!(!files_exist_at(&[".bar"], dirs.test()));
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -405,16 +406,19 @@ fn removes_file_after_cd() {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
struct Cleanup<'a> {
|
struct Cleanup<'a> {
|
||||||
dir_to_clean: &'a AbsolutePath,
|
dir_to_clean: &'a AbsolutePath,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
fn set_dir_read_only(directory: &AbsolutePath, read_only: bool) {
|
fn set_dir_read_only(directory: &AbsolutePath, read_only: bool) {
|
||||||
let mut permissions = fs::metadata(directory).unwrap().permissions();
|
let mut permissions = fs::metadata(directory).unwrap().permissions();
|
||||||
permissions.set_readonly(read_only);
|
permissions.set_readonly(read_only);
|
||||||
fs::set_permissions(directory, permissions).expect("failed to set directory permissions");
|
fs::set_permissions(directory, permissions).expect("failed to set directory permissions");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
impl<'a> Drop for Cleanup<'a> {
|
impl<'a> Drop for Cleanup<'a> {
|
||||||
/// Restores write permissions to the given directory so that the Playground can be successfully
|
/// Restores write permissions to the given directory so that the Playground can be successfully
|
||||||
/// cleaned up.
|
/// cleaned up.
|
||||||
|
@ -448,7 +452,7 @@ fn rm_prints_filenames_on_error() {
|
||||||
// This rm is expected to fail, and stderr output indicating so is also expected.
|
// This rm is expected to fail, and stderr output indicating so is also expected.
|
||||||
let actual = nu!(cwd: test_dir, "rm test*.txt");
|
let actual = nu!(cwd: test_dir, "rm test*.txt");
|
||||||
|
|
||||||
assert!(files_exist_at(file_names.clone(), test_dir));
|
assert!(files_exist_at(&file_names, test_dir));
|
||||||
for file_name in file_names {
|
for file_name in file_names {
|
||||||
let path = test_dir.join(file_name);
|
let path = test_dir.join(file_name);
|
||||||
let substr = format!("Could not delete {}", path.to_string_lossy());
|
let substr = format!("Could not delete {}", path.to_string_lossy());
|
||||||
|
@ -477,7 +481,7 @@ fn rm_files_inside_glob_metachars_dir() {
|
||||||
|
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(!files_exist_at(
|
assert!(!files_exist_at(
|
||||||
vec!["test_file.txt"],
|
&["test_file.txt"],
|
||||||
dirs.test().join(sub_dir)
|
dirs.test().join(sub_dir)
|
||||||
));
|
));
|
||||||
});
|
});
|
||||||
|
@ -551,22 +555,16 @@ fn rm_with_tilde() {
|
||||||
|
|
||||||
let actual = nu!(cwd: dirs.test(), "rm '~tilde/f1.txt'");
|
let actual = nu!(cwd: dirs.test(), "rm '~tilde/f1.txt'");
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(!files_exist_at(
|
assert!(!files_exist_at(&["f1.txt"], dirs.test().join("~tilde")));
|
||||||
vec![Path::new("f1.txt")],
|
|
||||||
dirs.test().join("~tilde")
|
|
||||||
));
|
|
||||||
|
|
||||||
// pass variable
|
// pass variable
|
||||||
let actual = nu!(cwd: dirs.test(), "let f = '~tilde/f2.txt'; rm $f");
|
let actual = nu!(cwd: dirs.test(), "let f = '~tilde/f2.txt'; rm $f");
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(!files_exist_at(
|
assert!(!files_exist_at(&["f2.txt"], dirs.test().join("~tilde")));
|
||||||
vec![Path::new("f2.txt")],
|
|
||||||
dirs.test().join("~tilde")
|
|
||||||
));
|
|
||||||
|
|
||||||
// remove directory
|
// remove directory
|
||||||
let actual = nu!(cwd: dirs.test(), "let f = '~tilde'; rm -r $f");
|
let actual = nu!(cwd: dirs.test(), "let f = '~tilde'; rm -r $f");
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(!files_exist_at(vec![Path::new("~tilde")], dirs.test()));
|
assert!(!files_exist_at(&["~tilde"], dirs.test()));
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -463,3 +463,65 @@ fn save_same_file_with_collect_and_filter() {
|
||||||
assert_eq!("helloworld", actual.out);
|
assert_eq!("helloworld", actual.out);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn save_from_child_process_dont_sink_stderr() {
|
||||||
|
Playground::setup("save_test_22", |dirs, sandbox| {
|
||||||
|
sandbox.with_files(&[
|
||||||
|
Stub::FileWithContent("log.txt", "Old"),
|
||||||
|
Stub::FileWithContent("err.txt", "Old Err"),
|
||||||
|
]);
|
||||||
|
|
||||||
|
let expected_file = dirs.test().join("log.txt");
|
||||||
|
let expected_stderr_file = dirs.test().join("err.txt");
|
||||||
|
|
||||||
|
let actual = nu!(
|
||||||
|
cwd: dirs.root(),
|
||||||
|
r#"
|
||||||
|
$env.FOO = " New";
|
||||||
|
$env.BAZ = " New Err";
|
||||||
|
do -i {nu -n -c 'nu --testbin echo_env FOO; nu --testbin echo_env_stderr BAZ'} | save -a -r save_test_22/log.txt"#,
|
||||||
|
);
|
||||||
|
assert_eq!(actual.err.trim_end(), " New Err");
|
||||||
|
|
||||||
|
let actual = file_contents(expected_file);
|
||||||
|
assert_eq!(actual.trim_end(), "Old New");
|
||||||
|
|
||||||
|
let actual = file_contents(expected_stderr_file);
|
||||||
|
assert_eq!(actual.trim_end(), "Old Err");
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parent_redirection_doesnt_affect_save() {
|
||||||
|
Playground::setup("save_test_23", |dirs, sandbox| {
|
||||||
|
sandbox.with_files(&[
|
||||||
|
Stub::FileWithContent("log.txt", "Old"),
|
||||||
|
Stub::FileWithContent("err.txt", "Old Err"),
|
||||||
|
]);
|
||||||
|
|
||||||
|
let expected_file = dirs.test().join("log.txt");
|
||||||
|
let expected_stderr_file = dirs.test().join("err.txt");
|
||||||
|
|
||||||
|
let actual = nu!(
|
||||||
|
cwd: dirs.root(),
|
||||||
|
r#"
|
||||||
|
$env.FOO = " New";
|
||||||
|
$env.BAZ = " New Err";
|
||||||
|
def tttt [] {
|
||||||
|
do -i {nu -n -c 'nu --testbin echo_env FOO; nu --testbin echo_env_stderr BAZ'} | save -a -r save_test_23/log.txt
|
||||||
|
};
|
||||||
|
tttt e> ("save_test_23" | path join empty_file)"#
|
||||||
|
);
|
||||||
|
assert_eq!(actual.err.trim_end(), " New Err");
|
||||||
|
|
||||||
|
let actual = file_contents(expected_file);
|
||||||
|
assert_eq!(actual.trim_end(), "Old New");
|
||||||
|
|
||||||
|
let actual = file_contents(expected_stderr_file);
|
||||||
|
assert_eq!(actual.trim_end(), "Old Err");
|
||||||
|
|
||||||
|
let actual = file_contents(dirs.test().join("empty_file"));
|
||||||
|
assert_eq!(actual.trim_end(), "");
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -2567,7 +2567,7 @@ fn theme_cmd(theme: &str, footer: bool, then: &str) -> String {
|
||||||
with_footer = "$env.config.footer_mode = \"always\"".to_string();
|
with_footer = "$env.config.footer_mode = \"always\"".to_string();
|
||||||
}
|
}
|
||||||
|
|
||||||
format!("$env.config.table.mode = {theme}; $env.config.table.header_on_separator = true; {with_footer}; {then}")
|
format!("$env.config.table.mode = \"{theme}\"; $env.config.table.header_on_separator = true; {with_footer}; {then}")
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -2,7 +2,6 @@ use chrono::{DateTime, Local};
|
||||||
use nu_test_support::fs::{files_exist_at, Stub};
|
use nu_test_support::fs::{files_exist_at, Stub};
|
||||||
use nu_test_support::nu;
|
use nu_test_support::nu;
|
||||||
use nu_test_support::playground::Playground;
|
use nu_test_support::playground::Playground;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
// Use 1 instead of 0 because 0 has a special meaning in Windows
|
// Use 1 instead of 0 because 0 has a special meaning in Windows
|
||||||
const TIME_ONE: filetime::FileTime = filetime::FileTime::from_unix_time(1, 0);
|
const TIME_ONE: filetime::FileTime = filetime::FileTime::from_unix_time(1, 0);
|
||||||
|
@ -494,12 +493,12 @@ fn create_a_file_with_tilde() {
|
||||||
Playground::setup("touch with tilde", |dirs, _| {
|
Playground::setup("touch with tilde", |dirs, _| {
|
||||||
let actual = nu!(cwd: dirs.test(), "touch '~tilde'");
|
let actual = nu!(cwd: dirs.test(), "touch '~tilde'");
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(files_exist_at(vec![Path::new("~tilde")], dirs.test()));
|
assert!(files_exist_at(&["~tilde"], dirs.test()));
|
||||||
|
|
||||||
// pass variable
|
// pass variable
|
||||||
let actual = nu!(cwd: dirs.test(), "let f = '~tilde2'; touch $f");
|
let actual = nu!(cwd: dirs.test(), "let f = '~tilde2'; touch $f");
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(files_exist_at(vec![Path::new("~tilde2")], dirs.test()));
|
assert!(files_exist_at(&["~tilde2"], dirs.test()));
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,6 @@ use nu_test_support::nu;
|
||||||
use nu_test_support::playground::Playground;
|
use nu_test_support::playground::Playground;
|
||||||
|
|
||||||
use rstest::rstest;
|
use rstest::rstest;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
#[cfg(not(target_os = "windows"))]
|
#[cfg(not(target_os = "windows"))]
|
||||||
const PATH_SEPARATOR: &str = "/";
|
const PATH_SEPARATOR: &str = "/";
|
||||||
|
@ -131,11 +130,7 @@ fn copies_the_directory_inside_directory_if_path_to_copy_is_directory_and_with_r
|
||||||
|
|
||||||
assert!(expected_dir.exists());
|
assert!(expected_dir.exists());
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(
|
||||||
vec![
|
&["yehuda.txt", "jttxt", "andres.txt"],
|
||||||
Path::new("yehuda.txt"),
|
|
||||||
Path::new("jttxt"),
|
|
||||||
Path::new("andres.txt")
|
|
||||||
],
|
|
||||||
&expected_dir
|
&expected_dir
|
||||||
));
|
));
|
||||||
})
|
})
|
||||||
|
@ -181,15 +176,15 @@ fn deep_copies_with_recursive_flag_impl(progress: bool) {
|
||||||
|
|
||||||
assert!(expected_dir.exists());
|
assert!(expected_dir.exists());
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(
|
||||||
vec![Path::new("errors.txt"), Path::new("multishells.txt")],
|
&["errors.txt", "multishells.txt"],
|
||||||
jts_expected_copied_dir
|
jts_expected_copied_dir
|
||||||
));
|
));
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(
|
||||||
vec![Path::new("coverage.txt"), Path::new("commands.txt")],
|
&["coverage.txt", "commands.txt"],
|
||||||
andres_expected_copied_dir
|
andres_expected_copied_dir
|
||||||
));
|
));
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(
|
||||||
vec![Path::new("defer-evaluation.txt")],
|
&["defer-evaluation.txt"],
|
||||||
yehudas_expected_copied_dir
|
yehudas_expected_copied_dir
|
||||||
));
|
));
|
||||||
})
|
})
|
||||||
|
@ -220,13 +215,13 @@ fn copies_using_path_with_wildcard_impl(progress: bool) {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(
|
||||||
vec![
|
&[
|
||||||
Path::new("caco3_plastics.csv"),
|
"caco3_plastics.csv",
|
||||||
Path::new("cargo_sample.toml"),
|
"cargo_sample.toml",
|
||||||
Path::new("jt.xml"),
|
"jt.xml",
|
||||||
Path::new("sample.ini"),
|
"sample.ini",
|
||||||
Path::new("sgml_description.json"),
|
"sgml_description.json",
|
||||||
Path::new("utf16.ini"),
|
"utf16.ini",
|
||||||
],
|
],
|
||||||
dirs.test()
|
dirs.test()
|
||||||
));
|
));
|
||||||
|
@ -265,13 +260,13 @@ fn copies_using_a_glob_impl(progress: bool) {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(
|
||||||
vec![
|
&[
|
||||||
Path::new("caco3_plastics.csv"),
|
"caco3_plastics.csv",
|
||||||
Path::new("cargo_sample.toml"),
|
"cargo_sample.toml",
|
||||||
Path::new("jt.xml"),
|
"jt.xml",
|
||||||
Path::new("sample.ini"),
|
"sample.ini",
|
||||||
Path::new("sgml_description.json"),
|
"sgml_description.json",
|
||||||
Path::new("utf16.ini"),
|
"utf16.ini",
|
||||||
],
|
],
|
||||||
dirs.test()
|
dirs.test()
|
||||||
));
|
));
|
||||||
|
@ -341,7 +336,7 @@ fn copy_files_using_glob_two_parents_up_using_multiple_dots_imp(progress: bool)
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(
|
||||||
vec![
|
&[
|
||||||
"yehuda.yaml",
|
"yehuda.yaml",
|
||||||
"jtjson",
|
"jtjson",
|
||||||
"andres.xml",
|
"andres.xml",
|
||||||
|
@ -377,7 +372,7 @@ fn copy_file_and_dir_from_two_parents_up_using_multiple_dots_to_current_dir_recu
|
||||||
|
|
||||||
let expected = dirs.test().join("foo/bar");
|
let expected = dirs.test().join("foo/bar");
|
||||||
|
|
||||||
assert!(files_exist_at(vec!["hello_there", "hello_again"], expected));
|
assert!(files_exist_at(&["hello_there", "hello_again"], expected));
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -428,7 +423,7 @@ fn copy_dir_contains_symlink_ignored_impl(progress: bool) {
|
||||||
|
|
||||||
// check hello_there exists inside `tmp_dir_2`, and `dangle_symlink` don't exists inside `tmp_dir_2`.
|
// check hello_there exists inside `tmp_dir_2`, and `dangle_symlink` don't exists inside `tmp_dir_2`.
|
||||||
let expected = sandbox.cwd().join("tmp_dir_2");
|
let expected = sandbox.cwd().join("tmp_dir_2");
|
||||||
assert!(files_exist_at(vec!["hello_there"], expected));
|
assert!(files_exist_at(&["hello_there"], expected));
|
||||||
// GNU cp will copy the broken symlink, so following their behavior
|
// GNU cp will copy the broken symlink, so following their behavior
|
||||||
// thus commenting out below
|
// thus commenting out below
|
||||||
// let path = expected.join("dangle_symlink");
|
// let path = expected.join("dangle_symlink");
|
||||||
|
@ -461,7 +456,7 @@ fn copy_dir_contains_symlink_impl(progress: bool) {
|
||||||
|
|
||||||
// check hello_there exists inside `tmp_dir_2`, and `dangle_symlink` also exists inside `tmp_dir_2`.
|
// check hello_there exists inside `tmp_dir_2`, and `dangle_symlink` also exists inside `tmp_dir_2`.
|
||||||
let expected = sandbox.cwd().join("tmp_dir_2");
|
let expected = sandbox.cwd().join("tmp_dir_2");
|
||||||
assert!(files_exist_at(vec!["hello_there"], expected.clone()));
|
assert!(files_exist_at(&["hello_there"], expected.clone()));
|
||||||
let path = expected.join("dangle_symlink");
|
let path = expected.join("dangle_symlink");
|
||||||
assert!(path.is_symlink());
|
assert!(path.is_symlink());
|
||||||
});
|
});
|
||||||
|
@ -1151,10 +1146,10 @@ fn test_cp_inside_glob_metachars_dir() {
|
||||||
|
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(
|
||||||
vec!["test_file.txt"],
|
&["test_file.txt"],
|
||||||
dirs.test().join(sub_dir)
|
dirs.test().join(sub_dir)
|
||||||
));
|
));
|
||||||
assert!(files_exist_at(vec!["test_file.txt"], dirs.test()));
|
assert!(files_exist_at(&["test_file.txt"], dirs.test()));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1167,10 +1162,7 @@ fn test_cp_to_customized_home_directory() {
|
||||||
let actual = nu!(cwd: dirs.test(), "mkdir test; cp test_file.txt ~/test/");
|
let actual = nu!(cwd: dirs.test(), "mkdir test; cp test_file.txt ~/test/");
|
||||||
|
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(&["test_file.txt"], dirs.test().join("test")));
|
||||||
vec!["test_file.txt"],
|
|
||||||
dirs.test().join("test")
|
|
||||||
));
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1193,20 +1185,14 @@ fn cp_with_tilde() {
|
||||||
// cp file
|
// cp file
|
||||||
let actual = nu!(cwd: dirs.test(), "cp '~tilde/f1.txt' ./");
|
let actual = nu!(cwd: dirs.test(), "cp '~tilde/f1.txt' ./");
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(&["f1.txt"], dirs.test().join("~tilde")));
|
||||||
vec![Path::new("f1.txt")],
|
assert!(files_exist_at(&["f1.txt"], dirs.test()));
|
||||||
dirs.test().join("~tilde")
|
|
||||||
));
|
|
||||||
assert!(files_exist_at(vec![Path::new("f1.txt")], dirs.test()));
|
|
||||||
|
|
||||||
// pass variable
|
// pass variable
|
||||||
let actual = nu!(cwd: dirs.test(), "let f = '~tilde/f2.txt'; cp $f ./");
|
let actual = nu!(cwd: dirs.test(), "let f = '~tilde/f2.txt'; cp $f ./");
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(&["f2.txt"], dirs.test().join("~tilde")));
|
||||||
vec![Path::new("f2.txt")],
|
assert!(files_exist_at(&["f1.txt"], dirs.test()));
|
||||||
dirs.test().join("~tilde")
|
|
||||||
));
|
|
||||||
assert!(files_exist_at(vec![Path::new("f1.txt")], dirs.test()));
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use nu_test_support::fs::files_exist_at;
|
use nu_test_support::fs::files_exist_at;
|
||||||
use nu_test_support::playground::Playground;
|
use nu_test_support::playground::Playground;
|
||||||
use nu_test_support::{nu, pipeline};
|
use nu_test_support::{nu, pipeline};
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn creates_directory() {
|
fn creates_directory() {
|
||||||
|
@ -25,10 +24,7 @@ fn accepts_and_creates_directories() {
|
||||||
"mkdir dir_1 dir_2 dir_3"
|
"mkdir dir_1 dir_2 dir_3"
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(&["dir_1", "dir_2", "dir_3"], dirs.test()));
|
||||||
vec![Path::new("dir_1"), Path::new("dir_2"), Path::new("dir_3")],
|
|
||||||
dirs.test()
|
|
||||||
));
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -70,10 +66,7 @@ fn print_created_paths() {
|
||||||
pipeline("mkdir -v dir_1 dir_2 dir_3")
|
pipeline("mkdir -v dir_1 dir_2 dir_3")
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(files_exist_at(
|
assert!(files_exist_at(&["dir_1", "dir_2", "dir_3"], dirs.test()));
|
||||||
vec![Path::new("dir_1"), Path::new("dir_2"), Path::new("dir_3")],
|
|
||||||
dirs.test()
|
|
||||||
));
|
|
||||||
|
|
||||||
assert!(actual.out.contains("dir_1"));
|
assert!(actual.out.contains("dir_1"));
|
||||||
assert!(actual.out.contains("dir_2"));
|
assert!(actual.out.contains("dir_2"));
|
||||||
|
@ -165,11 +158,11 @@ fn mkdir_with_tilde() {
|
||||||
Playground::setup("mkdir with tilde", |dirs, _| {
|
Playground::setup("mkdir with tilde", |dirs, _| {
|
||||||
let actual = nu!(cwd: dirs.test(), "mkdir '~tilde'");
|
let actual = nu!(cwd: dirs.test(), "mkdir '~tilde'");
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(files_exist_at(vec![Path::new("~tilde")], dirs.test()));
|
assert!(files_exist_at(&["~tilde"], dirs.test()));
|
||||||
|
|
||||||
// pass variable
|
// pass variable
|
||||||
let actual = nu!(cwd: dirs.test(), "let f = '~tilde2'; mkdir $f");
|
let actual = nu!(cwd: dirs.test(), "let f = '~tilde2'; mkdir $f");
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
assert!(files_exist_at(vec![Path::new("~tilde2")], dirs.test()));
|
assert!(files_exist_at(&["~tilde2"], dirs.test()));
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-derive-value"
|
name = "nu-derive-value"
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-derive-value"
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-derive-value"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
proc-macro = true
|
proc-macro = true
|
||||||
|
|
|
@ -5,17 +5,18 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-engine"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-engine"
|
name = "nu-engine"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-protocol = { path = "../nu-protocol", features = ["plugin"], version = "0.95.1" }
|
nu-protocol = { path = "../nu-protocol", features = ["plugin"], version = "0.96.2" }
|
||||||
nu-path = { path = "../nu-path", version = "0.95.1" }
|
nu-path = { path = "../nu-path", version = "0.96.2" }
|
||||||
nu-glob = { path = "../nu-glob", version = "0.95.1" }
|
nu-glob = { path = "../nu-glob", version = "0.96.2" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.95.1" }
|
nu-utils = { path = "../nu-utils", version = "0.96.2" }
|
||||||
log = { workspace = true }
|
log = { workspace = true }
|
||||||
|
terminal_size = { workspace = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = []
|
plugin = []
|
||||||
|
|
|
@ -423,7 +423,7 @@ impl BlockBuilder {
|
||||||
self.push(Instruction::Jump { index: label_id.0 }.into_spanned(span))
|
self.push(Instruction::Jump { index: label_id.0 }.into_spanned(span))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The index that the next instruction [`.push()`]ed will have.
|
/// The index that the next instruction [`.push()`](Self::push)ed will have.
|
||||||
pub(crate) fn here(&self) -> usize {
|
pub(crate) fn here(&self) -> usize {
|
||||||
self.instructions.len()
|
self.instructions.len()
|
||||||
}
|
}
|
||||||
|
|
|
@ -444,7 +444,15 @@ pub(crate) fn compile_expression(
|
||||||
working_set,
|
working_set,
|
||||||
builder,
|
builder,
|
||||||
&full_cell_path.head,
|
&full_cell_path.head,
|
||||||
RedirectModes::capture_out(expr.span),
|
// Only capture the output if there is a tail. This was a bit of a headscratcher
|
||||||
|
// as the parser emits a FullCellPath with no tail for subexpressions in
|
||||||
|
// general, which shouldn't be captured any differently than they otherwise
|
||||||
|
// would be.
|
||||||
|
if !full_cell_path.tail.is_empty() {
|
||||||
|
RedirectModes::capture_out(expr.span)
|
||||||
|
} else {
|
||||||
|
redirect_modes
|
||||||
|
},
|
||||||
in_reg,
|
in_reg,
|
||||||
out_reg,
|
out_reg,
|
||||||
)?;
|
)?;
|
||||||
|
|
|
@ -7,6 +7,7 @@ use nu_protocol::{
|
||||||
Spanned, SyntaxShape, Type, Value,
|
Spanned, SyntaxShape, Type, Value,
|
||||||
};
|
};
|
||||||
use std::{collections::HashMap, fmt::Write};
|
use std::{collections::HashMap, fmt::Write};
|
||||||
|
use terminal_size::{Height, Width};
|
||||||
|
|
||||||
pub fn get_full_help(
|
pub fn get_full_help(
|
||||||
command: &dyn Command,
|
command: &dyn Command,
|
||||||
|
@ -234,6 +235,14 @@ fn get_documentation(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_term_width() -> usize {
|
||||||
|
if let Some((Width(w), Height(_))) = terminal_size::terminal_size() {
|
||||||
|
w as usize
|
||||||
|
} else {
|
||||||
|
80
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if !is_parser_keyword && !sig.input_output_types.is_empty() {
|
if !is_parser_keyword && !sig.input_output_types.is_empty() {
|
||||||
if let Some(decl_id) = engine_state.find_decl(b"table", &[]) {
|
if let Some(decl_id) = engine_state.find_decl(b"table", &[]) {
|
||||||
// FIXME: we may want to make this the span of the help command in the future
|
// FIXME: we may want to make this the span of the help command in the future
|
||||||
|
@ -256,7 +265,18 @@ fn get_documentation(
|
||||||
&Call {
|
&Call {
|
||||||
decl_id,
|
decl_id,
|
||||||
head: span,
|
head: span,
|
||||||
arguments: vec![],
|
arguments: vec![Argument::Named((
|
||||||
|
Spanned {
|
||||||
|
item: "width".to_string(),
|
||||||
|
span: Span::unknown(),
|
||||||
|
},
|
||||||
|
None,
|
||||||
|
Some(Expression::new_unknown(
|
||||||
|
Expr::Int(get_term_width() as i64 - 2), // padding, see below
|
||||||
|
Span::unknown(),
|
||||||
|
Type::Int,
|
||||||
|
)),
|
||||||
|
))],
|
||||||
parser_info: HashMap::new(),
|
parser_info: HashMap::new(),
|
||||||
},
|
},
|
||||||
PipelineData::Value(Value::list(vals, span), None),
|
PipelineData::Value(Value::list(vals, span), None),
|
||||||
|
@ -334,6 +354,19 @@ fn get_documentation(
|
||||||
None,
|
None,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
table_call.add_named((
|
||||||
|
Spanned {
|
||||||
|
item: "width".to_string(),
|
||||||
|
span: Span::unknown(),
|
||||||
|
},
|
||||||
|
None,
|
||||||
|
Some(Expression::new_unknown(
|
||||||
|
Expr::Int(get_term_width() as i64 - 2),
|
||||||
|
Span::unknown(),
|
||||||
|
Type::Int,
|
||||||
|
)),
|
||||||
|
));
|
||||||
|
|
||||||
let table = engine_state
|
let table = engine_state
|
||||||
.find_decl("table".as_bytes(), &[])
|
.find_decl("table".as_bytes(), &[])
|
||||||
.and_then(|decl_id| {
|
.and_then(|decl_id| {
|
||||||
|
|
|
@ -198,7 +198,7 @@ pub fn redirect_env(engine_state: &EngineState, caller_stack: &mut Stack, callee
|
||||||
}
|
}
|
||||||
|
|
||||||
// set config to callee config, to capture any updates to that
|
// set config to callee config, to capture any updates to that
|
||||||
caller_stack.config = callee_stack.config.clone();
|
caller_stack.config.clone_from(&callee_stack.config);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval_external(
|
fn eval_external(
|
||||||
|
|
|
@ -5,21 +5,21 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-explore"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-explore"
|
name = "nu-explore"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.95.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.96.2" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.95.1" }
|
nu-parser = { path = "../nu-parser", version = "0.96.2" }
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.95.1" }
|
nu-color-config = { path = "../nu-color-config", version = "0.96.2" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.95.1" }
|
nu-engine = { path = "../nu-engine", version = "0.96.2" }
|
||||||
nu-table = { path = "../nu-table", version = "0.95.1" }
|
nu-table = { path = "../nu-table", version = "0.96.2" }
|
||||||
nu-json = { path = "../nu-json", version = "0.95.1" }
|
nu-json = { path = "../nu-json", version = "0.96.2" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.95.1" }
|
nu-utils = { path = "../nu-utils", version = "0.96.2" }
|
||||||
nu-ansi-term = { workspace = true }
|
nu-ansi-term = { workspace = true }
|
||||||
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.95.1" }
|
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.96.2" }
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
log = { workspace = true }
|
log = { workspace = true }
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "nu-glob"
|
name = "nu-glob"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
authors = ["The Nushell Project Developers", "The Rust Project Developers"]
|
authors = ["The Nushell Project Developers", "The Rust Project Developers"]
|
||||||
license = "MIT/Apache-2.0"
|
license = "MIT/Apache-2.0"
|
||||||
description = """
|
description = """
|
||||||
|
|
|
@ -8,7 +8,7 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-json"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-json"
|
name = "nu-json"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.95.1" }
|
nu-test-support = { path = "../nu-test-support", version = "0.96.2" }
|
||||||
nu-path = { path = "../nu-path", version = "0.95.1" }
|
nu-path = { path = "../nu-path", version = "0.96.2" }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
fancy-regex = "0.13.0"
|
fancy-regex = "0.13.0"
|
|
@ -3,14 +3,14 @@ authors = ["The Nushell Project Developers"]
|
||||||
description = "Nushell's integrated LSP server"
|
description = "Nushell's integrated LSP server"
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-lsp"
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-lsp"
|
||||||
name = "nu-lsp"
|
name = "nu-lsp"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cli = { path = "../nu-cli", version = "0.95.1" }
|
nu-cli = { path = "../nu-cli", version = "0.96.2" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.95.1" }
|
nu-parser = { path = "../nu-parser", version = "0.96.2" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.95.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.96.2" }
|
||||||
|
|
||||||
reedline = { workspace = true }
|
reedline = { workspace = true }
|
||||||
|
|
||||||
|
@ -23,8 +23,8 @@ serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.95.1" }
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.96.2" }
|
||||||
nu-command = { path = "../nu-command", version = "0.95.1" }
|
nu-command = { path = "../nu-command", version = "0.96.2" }
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.95.1" }
|
nu-test-support = { path = "../nu-test-support", version = "0.96.2" }
|
||||||
|
|
||||||
assert-json-diff = "2.0"
|
assert-json-diff = "2.0"
|
|
@ -5,17 +5,17 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-parser"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-parser"
|
name = "nu-parser"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
exclude = ["/fuzz"]
|
exclude = ["/fuzz"]
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.95.1" }
|
nu-engine = { path = "../nu-engine", version = "0.96.2" }
|
||||||
nu-path = { path = "../nu-path", version = "0.95.1" }
|
nu-path = { path = "../nu-path", version = "0.96.2" }
|
||||||
nu-plugin-engine = { path = "../nu-plugin-engine", optional = true, version = "0.95.1" }
|
nu-plugin-engine = { path = "../nu-plugin-engine", optional = true, version = "0.96.2" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.95.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.96.2" }
|
||||||
|
|
||||||
bytesize = { workspace = true }
|
bytesize = { workspace = true }
|
||||||
chrono = { default-features = false, features = ['std'], workspace = true }
|
chrono = { default-features = false, features = ['std'], workspace = true }
|
||||||
|
|
|
@ -6,6 +6,7 @@ pub enum TokenContents {
|
||||||
Comment,
|
Comment,
|
||||||
Pipe,
|
Pipe,
|
||||||
PipePipe,
|
PipePipe,
|
||||||
|
AssignmentOperator,
|
||||||
ErrGreaterPipe,
|
ErrGreaterPipe,
|
||||||
OutErrGreaterPipe,
|
OutErrGreaterPipe,
|
||||||
Semicolon,
|
Semicolon,
|
||||||
|
@ -69,6 +70,12 @@ fn is_item_terminator(
|
||||||
|| special_tokens.contains(&c))
|
|| special_tokens.contains(&c))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Assignment operators have special handling distinct from math expressions, as they cause the
|
||||||
|
/// rest of the pipeline to be consumed.
|
||||||
|
pub fn is_assignment_operator(bytes: &[u8]) -> bool {
|
||||||
|
matches!(bytes, b"=" | b"+=" | b"++=" | b"-=" | b"*=" | b"/=")
|
||||||
|
}
|
||||||
|
|
||||||
// A special token is one that is a byte that stands alone as its own token. For example
|
// A special token is one that is a byte that stands alone as its own token. For example
|
||||||
// when parsing a signature you may want to have `:` be able to separate tokens and also
|
// when parsing a signature you may want to have `:` be able to separate tokens and also
|
||||||
// to be handled as its own token to notify you you're about to parse a type in the example
|
// to be handled as its own token to notify you you're about to parse a type in the example
|
||||||
|
@ -297,6 +304,10 @@ pub fn lex_item(
|
||||||
|
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
let output = match &input[(span.start - span_offset)..(span.end - span_offset)] {
|
let output = match &input[(span.start - span_offset)..(span.end - span_offset)] {
|
||||||
|
bytes if is_assignment_operator(bytes) => Token {
|
||||||
|
contents: TokenContents::AssignmentOperator,
|
||||||
|
span,
|
||||||
|
},
|
||||||
b"out>" | b"o>" => Token {
|
b"out>" | b"o>" => Token {
|
||||||
contents: TokenContents::OutGreaterThan,
|
contents: TokenContents::OutGreaterThan,
|
||||||
span,
|
span,
|
||||||
|
|
|
@ -196,10 +196,43 @@ pub fn lite_parse(tokens: &[Token]) -> (LiteBlock, Option<ParseError>) {
|
||||||
let mut last_token = TokenContents::Eol;
|
let mut last_token = TokenContents::Eol;
|
||||||
let mut file_redirection = None;
|
let mut file_redirection = None;
|
||||||
let mut curr_comment: Option<Vec<Span>> = None;
|
let mut curr_comment: Option<Vec<Span>> = None;
|
||||||
|
let mut is_assignment = false;
|
||||||
let mut error = None;
|
let mut error = None;
|
||||||
|
|
||||||
for (idx, token) in tokens.iter().enumerate() {
|
for (idx, token) in tokens.iter().enumerate() {
|
||||||
if let Some((source, append, span)) = file_redirection.take() {
|
if is_assignment {
|
||||||
|
match &token.contents {
|
||||||
|
// Consume until semicolon or terminating EOL. Assignments absorb pipelines and
|
||||||
|
// redirections.
|
||||||
|
TokenContents::Eol => {
|
||||||
|
// Handle `[Command] [Pipe] ([Comment] | [Eol])+ [Command]`
|
||||||
|
//
|
||||||
|
// `[Eol]` branch checks if previous token is `[Pipe]` to construct pipeline
|
||||||
|
// and so `[Comment] | [Eol]` should be ignore to make it work
|
||||||
|
let actual_token = last_non_comment_token(tokens, idx);
|
||||||
|
if actual_token != Some(TokenContents::Pipe) {
|
||||||
|
is_assignment = false;
|
||||||
|
pipeline.push(&mut command);
|
||||||
|
block.push(&mut pipeline);
|
||||||
|
}
|
||||||
|
|
||||||
|
if last_token == TokenContents::Eol {
|
||||||
|
// Clear out the comment as we're entering a new comment
|
||||||
|
curr_comment = None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TokenContents::Semicolon => {
|
||||||
|
is_assignment = false;
|
||||||
|
pipeline.push(&mut command);
|
||||||
|
block.push(&mut pipeline);
|
||||||
|
}
|
||||||
|
TokenContents::Comment => {
|
||||||
|
command.comments.push(token.span);
|
||||||
|
curr_comment = None;
|
||||||
|
}
|
||||||
|
_ => command.push(token.span),
|
||||||
|
}
|
||||||
|
} else if let Some((source, append, span)) = file_redirection.take() {
|
||||||
match &token.contents {
|
match &token.contents {
|
||||||
TokenContents::PipePipe => {
|
TokenContents::PipePipe => {
|
||||||
error = error.or(Some(ParseError::ShellOrOr(token.span)));
|
error = error.or(Some(ParseError::ShellOrOr(token.span)));
|
||||||
|
@ -218,6 +251,11 @@ pub fn lite_parse(tokens: &[Token]) -> (LiteBlock, Option<ParseError>) {
|
||||||
command.push(token.span)
|
command.push(token.span)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
TokenContents::AssignmentOperator => {
|
||||||
|
error = error.or(Some(ParseError::Expected("redirection target", token.span)));
|
||||||
|
command.push(span);
|
||||||
|
command.push(token.span);
|
||||||
|
}
|
||||||
TokenContents::OutGreaterThan
|
TokenContents::OutGreaterThan
|
||||||
| TokenContents::OutGreaterGreaterThan
|
| TokenContents::OutGreaterGreaterThan
|
||||||
| TokenContents::ErrGreaterThan
|
| TokenContents::ErrGreaterThan
|
||||||
|
@ -280,6 +318,15 @@ pub fn lite_parse(tokens: &[Token]) -> (LiteBlock, Option<ParseError>) {
|
||||||
}
|
}
|
||||||
command.push(token.span);
|
command.push(token.span);
|
||||||
}
|
}
|
||||||
|
TokenContents::AssignmentOperator => {
|
||||||
|
// When in assignment mode, we'll just consume pipes or redirections as part of
|
||||||
|
// the command.
|
||||||
|
is_assignment = true;
|
||||||
|
if let Some(curr_comment) = curr_comment.take() {
|
||||||
|
command.comments = curr_comment;
|
||||||
|
}
|
||||||
|
command.push(token.span);
|
||||||
|
}
|
||||||
TokenContents::OutGreaterThan => {
|
TokenContents::OutGreaterThan => {
|
||||||
error = error.or(command.check_accepts_redirection(token.span));
|
error = error.or(command.check_accepts_redirection(token.span));
|
||||||
file_redirection = Some((RedirectionSource::Stdout, false, token.span));
|
file_redirection = Some((RedirectionSource::Stdout, false, token.span));
|
||||||
|
|
|
@ -35,8 +35,8 @@ use crate::{
|
||||||
lite_parser::{lite_parse, LiteCommand},
|
lite_parser::{lite_parse, LiteCommand},
|
||||||
parser::{
|
parser::{
|
||||||
check_call, garbage, garbage_pipeline, parse, parse_call, parse_expression,
|
check_call, garbage, garbage_pipeline, parse, parse_call, parse_expression,
|
||||||
parse_full_signature, parse_import_pattern, parse_internal_call, parse_multispan_value,
|
parse_full_signature, parse_import_pattern, parse_internal_call, parse_string, parse_value,
|
||||||
parse_string, parse_value, parse_var_with_opt_type, trim_quotes, ParsedInternalCall,
|
parse_var_with_opt_type, trim_quotes, ParsedInternalCall,
|
||||||
},
|
},
|
||||||
unescape_unquote_string, Token, TokenContents,
|
unescape_unquote_string, Token, TokenContents,
|
||||||
};
|
};
|
||||||
|
@ -169,11 +169,7 @@ pub fn parse_def_predecl(working_set: &mut StateWorkingSet, spans: &[Span]) {
|
||||||
|
|
||||||
// Now, pos should point at the next span after the def-like call.
|
// Now, pos should point at the next span after the def-like call.
|
||||||
// Skip all potential flags, like --env, --wrapped or --help:
|
// Skip all potential flags, like --env, --wrapped or --help:
|
||||||
while pos < spans.len()
|
while pos < spans.len() && working_set.get_span_contents(spans[pos]).starts_with(b"-") {
|
||||||
&& working_set
|
|
||||||
.get_span_contents(spans[pos])
|
|
||||||
.starts_with(&[b'-'])
|
|
||||||
{
|
|
||||||
pos += 1;
|
pos += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -202,12 +198,8 @@ pub fn parse_def_predecl(working_set: &mut StateWorkingSet, spans: &[Span]) {
|
||||||
let mut signature_pos = None;
|
let mut signature_pos = None;
|
||||||
|
|
||||||
while pos < spans.len() {
|
while pos < spans.len() {
|
||||||
if working_set
|
if working_set.get_span_contents(spans[pos]).starts_with(b"[")
|
||||||
.get_span_contents(spans[pos])
|
|| working_set.get_span_contents(spans[pos]).starts_with(b"(")
|
||||||
.starts_with(&[b'['])
|
|
||||||
|| working_set
|
|
||||||
.get_span_contents(spans[pos])
|
|
||||||
.starts_with(&[b'('])
|
|
||||||
{
|
{
|
||||||
signature_pos = Some(pos);
|
signature_pos = Some(pos);
|
||||||
break;
|
break;
|
||||||
|
@ -308,21 +300,21 @@ pub fn parse_for(working_set: &mut StateWorkingSet, lite_command: &LiteCommand)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Let's get our block and make sure it has the right signature
|
// Let's get our block and make sure it has the right signature
|
||||||
if let Some(arg) = call.positional_nth(2) {
|
if let Some(
|
||||||
match arg {
|
Expression {
|
||||||
Expression {
|
expr: Expr::Block(block_id),
|
||||||
expr: Expr::Block(block_id),
|
..
|
||||||
..
|
}
|
||||||
}
|
| Expression {
|
||||||
| Expression {
|
expr: Expr::RowCondition(block_id),
|
||||||
expr: Expr::RowCondition(block_id),
|
..
|
||||||
..
|
},
|
||||||
} => {
|
) = call.positional_nth(2)
|
||||||
let block = working_set.get_block_mut(*block_id);
|
{
|
||||||
|
{
|
||||||
|
let block = working_set.get_block_mut(*block_id);
|
||||||
|
|
||||||
block.signature = Box::new(sig);
|
block.signature = Box::new(sig);
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -424,7 +416,7 @@ pub fn parse_def(
|
||||||
let mut decl_name_span = None;
|
let mut decl_name_span = None;
|
||||||
|
|
||||||
for span in rest_spans {
|
for span in rest_spans {
|
||||||
if !working_set.get_span_contents(*span).starts_with(&[b'-']) {
|
if !working_set.get_span_contents(*span).starts_with(b"-") {
|
||||||
decl_name_span = Some(*span);
|
decl_name_span = Some(*span);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -554,7 +546,7 @@ pub fn parse_def(
|
||||||
for arg_name in &signature.optional_positional {
|
for arg_name in &signature.optional_positional {
|
||||||
verify_not_reserved_variable_name(working_set, &arg_name.name, sig.span);
|
verify_not_reserved_variable_name(working_set, &arg_name.name, sig.span);
|
||||||
}
|
}
|
||||||
for arg_name in &signature.rest_positional {
|
if let Some(arg_name) = &signature.rest_positional {
|
||||||
verify_not_reserved_variable_name(working_set, &arg_name.name, sig.span);
|
verify_not_reserved_variable_name(working_set, &arg_name.name, sig.span);
|
||||||
}
|
}
|
||||||
for flag_name in &signature.get_names() {
|
for flag_name in &signature.get_names() {
|
||||||
|
@ -3171,9 +3163,6 @@ pub fn parse_const(working_set: &mut StateWorkingSet, spans: &[Span]) -> Pipelin
|
||||||
// }
|
// }
|
||||||
|
|
||||||
if let Some(decl_id) = working_set.find_decl(b"const") {
|
if let Some(decl_id) = working_set.find_decl(b"const") {
|
||||||
let cmd = working_set.get_decl(decl_id);
|
|
||||||
let call_signature = cmd.signature().call_signature();
|
|
||||||
|
|
||||||
if spans.len() >= 4 {
|
if spans.len() >= 4 {
|
||||||
// This is a bit of by-hand parsing to get around the issue where we want to parse in the reverse order
|
// This is a bit of by-hand parsing to get around the issue where we want to parse in the reverse order
|
||||||
// so that the var-id created by the variable isn't visible in the expression that init it
|
// so that the var-id created by the variable isn't visible in the expression that init it
|
||||||
|
@ -3181,18 +3170,29 @@ pub fn parse_const(working_set: &mut StateWorkingSet, spans: &[Span]) -> Pipelin
|
||||||
let item = working_set.get_span_contents(*span.1);
|
let item = working_set.get_span_contents(*span.1);
|
||||||
// const x = 'f', = at least start from index 2
|
// const x = 'f', = at least start from index 2
|
||||||
if item == b"=" && spans.len() > (span.0 + 1) && span.0 > 1 {
|
if item == b"=" && spans.len() > (span.0 + 1) && span.0 > 1 {
|
||||||
let mut idx = span.0;
|
// Parse the rvalue as a subexpression
|
||||||
|
let rvalue_span = Span::concat(&spans[(span.0 + 1)..]);
|
||||||
|
|
||||||
let rvalue = parse_multispan_value(
|
let (rvalue_tokens, rvalue_error) = lex(
|
||||||
working_set,
|
working_set.get_span_contents(rvalue_span),
|
||||||
spans,
|
rvalue_span.start,
|
||||||
&mut idx,
|
&[],
|
||||||
&SyntaxShape::Keyword(b"=".to_vec(), Box::new(SyntaxShape::MathExpression)),
|
&[],
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
working_set.parse_errors.extend(rvalue_error);
|
||||||
|
|
||||||
|
trace!("parsing: const right-hand side subexpression");
|
||||||
|
let rvalue_block =
|
||||||
|
parse_block(working_set, &rvalue_tokens, rvalue_span, false, true);
|
||||||
|
let rvalue_ty = rvalue_block.output_type();
|
||||||
|
let rvalue_block_id = working_set.add_block(Arc::new(rvalue_block));
|
||||||
|
let rvalue = Expression::new(
|
||||||
|
working_set,
|
||||||
|
Expr::Subexpression(rvalue_block_id),
|
||||||
|
rvalue_span,
|
||||||
|
rvalue_ty,
|
||||||
);
|
);
|
||||||
if idx < (spans.len() - 1) {
|
|
||||||
working_set
|
|
||||||
.error(ParseError::ExtraPositional(call_signature, spans[idx + 1]));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut idx = 0;
|
let mut idx = 0;
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
lex::{lex, lex_signature},
|
lex::{is_assignment_operator, lex, lex_signature},
|
||||||
lite_parser::{lite_parse, LiteCommand, LitePipeline, LiteRedirection, LiteRedirectionTarget},
|
lite_parser::{lite_parse, LiteCommand, LitePipeline, LiteRedirection, LiteRedirectionTarget},
|
||||||
parse_keywords::*,
|
parse_keywords::*,
|
||||||
parse_patterns::parse_pattern,
|
parse_patterns::parse_pattern,
|
||||||
|
@ -1458,7 +1458,8 @@ fn parse_binary_with_base(
|
||||||
| TokenContents::ErrGreaterThan
|
| TokenContents::ErrGreaterThan
|
||||||
| TokenContents::ErrGreaterGreaterThan
|
| TokenContents::ErrGreaterGreaterThan
|
||||||
| TokenContents::OutErrGreaterThan
|
| TokenContents::OutErrGreaterThan
|
||||||
| TokenContents::OutErrGreaterGreaterThan => {
|
| TokenContents::OutErrGreaterGreaterThan
|
||||||
|
| TokenContents::AssignmentOperator => {
|
||||||
working_set.error(ParseError::Expected("binary", span));
|
working_set.error(ParseError::Expected("binary", span));
|
||||||
return garbage(working_set, span);
|
return garbage(working_set, span);
|
||||||
}
|
}
|
||||||
|
@ -3409,7 +3410,7 @@ pub fn parse_signature_helper(working_set: &mut StateWorkingSet, span: Span) ->
|
||||||
for token in &output {
|
for token in &output {
|
||||||
match token {
|
match token {
|
||||||
Token {
|
Token {
|
||||||
contents: crate::TokenContents::Item,
|
contents: crate::TokenContents::Item | crate::TokenContents::AssignmentOperator,
|
||||||
span,
|
span,
|
||||||
} => {
|
} => {
|
||||||
let span = *span;
|
let span = *span;
|
||||||
|
@ -4829,7 +4830,7 @@ pub fn parse_value(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_operator(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
pub fn parse_assignment_operator(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
||||||
let contents = working_set.get_span_contents(span);
|
let contents = working_set.get_span_contents(span);
|
||||||
|
|
||||||
let operator = match contents {
|
let operator = match contents {
|
||||||
|
@ -4839,6 +4840,95 @@ pub fn parse_operator(working_set: &mut StateWorkingSet, span: Span) -> Expressi
|
||||||
b"-=" => Operator::Assignment(Assignment::MinusAssign),
|
b"-=" => Operator::Assignment(Assignment::MinusAssign),
|
||||||
b"*=" => Operator::Assignment(Assignment::MultiplyAssign),
|
b"*=" => Operator::Assignment(Assignment::MultiplyAssign),
|
||||||
b"/=" => Operator::Assignment(Assignment::DivideAssign),
|
b"/=" => Operator::Assignment(Assignment::DivideAssign),
|
||||||
|
_ => {
|
||||||
|
working_set.error(ParseError::Expected("assignment operator", span));
|
||||||
|
return garbage(working_set, span);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Expression::new(working_set, Expr::Operator(operator), span, Type::Any)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_assignment_expression(
|
||||||
|
working_set: &mut StateWorkingSet,
|
||||||
|
spans: &[Span],
|
||||||
|
) -> Expression {
|
||||||
|
trace!("parsing: assignment expression");
|
||||||
|
let expr_span = Span::concat(spans);
|
||||||
|
|
||||||
|
// Assignment always has the most precedence, and its right-hand side can be a pipeline
|
||||||
|
let Some(op_index) = spans
|
||||||
|
.iter()
|
||||||
|
.position(|span| is_assignment_operator(working_set.get_span_contents(*span)))
|
||||||
|
else {
|
||||||
|
working_set.error(ParseError::Expected("assignment expression", expr_span));
|
||||||
|
return garbage(working_set, expr_span);
|
||||||
|
};
|
||||||
|
|
||||||
|
let lhs_spans = &spans[0..op_index];
|
||||||
|
let op_span = spans[op_index];
|
||||||
|
let rhs_spans = &spans[(op_index + 1)..];
|
||||||
|
|
||||||
|
if lhs_spans.is_empty() {
|
||||||
|
working_set.error(ParseError::Expected(
|
||||||
|
"left hand side of assignment",
|
||||||
|
op_span,
|
||||||
|
));
|
||||||
|
return garbage(working_set, expr_span);
|
||||||
|
}
|
||||||
|
|
||||||
|
if rhs_spans.is_empty() {
|
||||||
|
working_set.error(ParseError::Expected(
|
||||||
|
"right hand side of assignment",
|
||||||
|
op_span,
|
||||||
|
));
|
||||||
|
return garbage(working_set, expr_span);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the lhs and operator as usual for a math expression
|
||||||
|
let mut lhs = parse_expression(working_set, lhs_spans);
|
||||||
|
let mut operator = parse_assignment_operator(working_set, op_span);
|
||||||
|
|
||||||
|
// Re-parse the right-hand side as a subexpression
|
||||||
|
let rhs_span = Span::concat(rhs_spans);
|
||||||
|
|
||||||
|
let (rhs_tokens, rhs_error) = lex(
|
||||||
|
working_set.get_span_contents(rhs_span),
|
||||||
|
rhs_span.start,
|
||||||
|
&[],
|
||||||
|
&[],
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
working_set.parse_errors.extend(rhs_error);
|
||||||
|
|
||||||
|
trace!("parsing: assignment right-hand side subexpression");
|
||||||
|
let rhs_block = parse_block(working_set, &rhs_tokens, rhs_span, false, true);
|
||||||
|
let rhs_ty = rhs_block.output_type();
|
||||||
|
let rhs_block_id = working_set.add_block(Arc::new(rhs_block));
|
||||||
|
let mut rhs = Expression::new(
|
||||||
|
working_set,
|
||||||
|
Expr::Subexpression(rhs_block_id),
|
||||||
|
rhs_span,
|
||||||
|
rhs_ty,
|
||||||
|
);
|
||||||
|
|
||||||
|
let (result_ty, err) = math_result_type(working_set, &mut lhs, &mut operator, &mut rhs);
|
||||||
|
if let Some(err) = err {
|
||||||
|
working_set.parse_errors.push(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
Expression::new(
|
||||||
|
working_set,
|
||||||
|
Expr::BinaryOp(Box::new(lhs), Box::new(operator), Box::new(rhs)),
|
||||||
|
expr_span,
|
||||||
|
result_ty,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_operator(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
||||||
|
let contents = working_set.get_span_contents(span);
|
||||||
|
|
||||||
|
let operator = match contents {
|
||||||
b"==" => Operator::Comparison(Comparison::Equal),
|
b"==" => Operator::Comparison(Comparison::Equal),
|
||||||
b"!=" => Operator::Comparison(Comparison::NotEqual),
|
b"!=" => Operator::Comparison(Comparison::NotEqual),
|
||||||
b"<" => Operator::Comparison(Comparison::LessThan),
|
b"<" => Operator::Comparison(Comparison::LessThan),
|
||||||
|
@ -4954,6 +5044,10 @@ pub fn parse_operator(working_set: &mut StateWorkingSet, span: Span) -> Expressi
|
||||||
));
|
));
|
||||||
return garbage(working_set, span);
|
return garbage(working_set, span);
|
||||||
}
|
}
|
||||||
|
op if is_assignment_operator(op) => {
|
||||||
|
working_set.error(ParseError::Expected("a non-assignment operator", span));
|
||||||
|
return garbage(working_set, span);
|
||||||
|
}
|
||||||
_ => {
|
_ => {
|
||||||
working_set.error(ParseError::Expected("operator", span));
|
working_set.error(ParseError::Expected("operator", span));
|
||||||
return garbage(working_set, span);
|
return garbage(working_set, span);
|
||||||
|
@ -5258,7 +5352,12 @@ pub fn parse_expression(working_set: &mut StateWorkingSet, spans: &[Span]) -> Ex
|
||||||
return garbage(working_set, Span::concat(spans));
|
return garbage(working_set, Span::concat(spans));
|
||||||
}
|
}
|
||||||
|
|
||||||
let output = if is_math_expression_like(working_set, spans[pos]) {
|
let output = if spans[pos..]
|
||||||
|
.iter()
|
||||||
|
.any(|span| is_assignment_operator(working_set.get_span_contents(*span)))
|
||||||
|
{
|
||||||
|
parse_assignment_expression(working_set, &spans[pos..])
|
||||||
|
} else if is_math_expression_like(working_set, spans[pos]) {
|
||||||
parse_math_expression(working_set, &spans[pos..], None)
|
parse_math_expression(working_set, &spans[pos..], None)
|
||||||
} else {
|
} else {
|
||||||
let bytes = working_set.get_span_contents(spans[pos]).to_vec();
|
let bytes = working_set.get_span_contents(spans[pos]).to_vec();
|
||||||
|
@ -5690,69 +5789,24 @@ pub(crate) fn redirecting_builtin_error(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_pipeline(working_set: &mut StateWorkingSet, pipeline: &LitePipeline) -> Pipeline {
|
pub fn parse_pipeline(working_set: &mut StateWorkingSet, pipeline: &LitePipeline) -> Pipeline {
|
||||||
let first_command = pipeline.commands.first();
|
|
||||||
let first_command_name = first_command
|
|
||||||
.and_then(|command| command.parts.first())
|
|
||||||
.map(|span| working_set.get_span_contents(*span));
|
|
||||||
|
|
||||||
if pipeline.commands.len() > 1 {
|
if pipeline.commands.len() > 1 {
|
||||||
// Special case: allow "let" or "mut" to consume the whole pipeline, if this is a pipeline
|
// Parse a normal multi command pipeline
|
||||||
// with multiple commands
|
let elements: Vec<_> = pipeline
|
||||||
if matches!(first_command_name, Some(b"let" | b"mut")) {
|
.commands
|
||||||
// Merge the pipeline into one command
|
.iter()
|
||||||
let first_command = first_command.expect("must be Some");
|
.enumerate()
|
||||||
|
.map(|(index, element)| {
|
||||||
|
let element = parse_pipeline_element(working_set, element);
|
||||||
|
// Handle $in for pipeline elements beyond the first one
|
||||||
|
if index > 0 && element.has_in_variable(working_set) {
|
||||||
|
wrap_element_with_collect(working_set, element.clone())
|
||||||
|
} else {
|
||||||
|
element
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
let remainder_span = first_command
|
Pipeline { elements }
|
||||||
.parts_including_redirection()
|
|
||||||
.skip(3)
|
|
||||||
.chain(
|
|
||||||
pipeline.commands[1..]
|
|
||||||
.iter()
|
|
||||||
.flat_map(|command| command.parts_including_redirection()),
|
|
||||||
)
|
|
||||||
.reduce(Span::append);
|
|
||||||
|
|
||||||
let parts = first_command
|
|
||||||
.parts
|
|
||||||
.iter()
|
|
||||||
.take(3) // the let/mut start itself
|
|
||||||
.copied()
|
|
||||||
.chain(remainder_span) // everything else
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let comments = pipeline
|
|
||||||
.commands
|
|
||||||
.iter()
|
|
||||||
.flat_map(|command| command.comments.iter())
|
|
||||||
.copied()
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let new_command = LiteCommand {
|
|
||||||
pipe: None,
|
|
||||||
comments,
|
|
||||||
parts,
|
|
||||||
redirection: None,
|
|
||||||
};
|
|
||||||
parse_builtin_commands(working_set, &new_command)
|
|
||||||
} else {
|
|
||||||
// Parse a normal multi command pipeline
|
|
||||||
let elements: Vec<_> = pipeline
|
|
||||||
.commands
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(index, element)| {
|
|
||||||
let element = parse_pipeline_element(working_set, element);
|
|
||||||
// Handle $in for pipeline elements beyond the first one
|
|
||||||
if index > 0 && element.has_in_variable(working_set) {
|
|
||||||
wrap_element_with_collect(working_set, element.clone())
|
|
||||||
} else {
|
|
||||||
element
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Pipeline { elements }
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// If there's only one command in the pipeline, this could be a builtin command
|
// If there's only one command in the pipeline, this could be a builtin command
|
||||||
parse_builtin_commands(working_set, &pipeline.commands[0])
|
parse_builtin_commands(working_set, &pipeline.commands[0])
|
||||||
|
@ -5872,7 +5926,7 @@ pub fn discover_captures_in_closure(
|
||||||
seen.push(var_id);
|
seen.push(var_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for positional in &block.signature.rest_positional {
|
if let Some(positional) = &block.signature.rest_positional {
|
||||||
if let Some(var_id) = positional.var_id {
|
if let Some(var_id) = positional.var_id {
|
||||||
seen.push(var_id);
|
seen.push(var_id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1177,9 +1177,9 @@ fn test_nothing_comparison_eq() {
|
||||||
#[rstest]
|
#[rstest]
|
||||||
#[case(b"let a = 1 err> /dev/null")]
|
#[case(b"let a = 1 err> /dev/null")]
|
||||||
#[case(b"let a = 1 out> /dev/null")]
|
#[case(b"let a = 1 out> /dev/null")]
|
||||||
|
#[case(b"let a = 1 out+err> /dev/null")]
|
||||||
#[case(b"mut a = 1 err> /dev/null")]
|
#[case(b"mut a = 1 err> /dev/null")]
|
||||||
#[case(b"mut a = 1 out> /dev/null")]
|
#[case(b"mut a = 1 out> /dev/null")]
|
||||||
#[case(b"let a = 1 out+err> /dev/null")]
|
|
||||||
#[case(b"mut a = 1 out+err> /dev/null")]
|
#[case(b"mut a = 1 out+err> /dev/null")]
|
||||||
fn test_redirection_with_letmut(#[case] phase: &[u8]) {
|
fn test_redirection_with_letmut(#[case] phase: &[u8]) {
|
||||||
let engine_state = EngineState::new();
|
let engine_state = EngineState::new();
|
||||||
|
|
|
@ -5,7 +5,7 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-path"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-path"
|
name = "nu-path"
|
||||||
version = "0.95.1"
|
version = "0.96.2"
|
||||||
exclude = ["/fuzz"]
|
exclude = ["/fuzz"]
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use super::helpers;
|
#[cfg(windows)]
|
||||||
|
use omnipath::WinPathExt;
|
||||||
use std::path::{Component, Path, PathBuf};
|
use std::path::{Component, Path, PathBuf};
|
||||||
|
|
||||||
/// Normalize the path, expanding occurrences of n-dots.
|
/// Normalize the path, expanding occurrences of n-dots.
|
||||||
|
@ -63,7 +64,18 @@ pub fn expand_dots(path: impl AsRef<Path>) -> PathBuf {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
helpers::simiplified(&result)
|
simiplified(&result)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
fn simiplified(path: &std::path::Path) -> PathBuf {
|
||||||
|
path.to_winuser_path()
|
||||||
|
.unwrap_or_else(|_| path.to_path_buf())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
fn simiplified(path: &std::path::Path) -> PathBuf {
|
||||||
|
path.to_path_buf()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
|
#[cfg(windows)]
|
||||||
|
use omnipath::WinPathExt;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use super::dots::{expand_dots, expand_ndots};
|
use super::dots::{expand_dots, expand_ndots};
|
||||||
use super::helpers;
|
|
||||||
use super::tilde::expand_tilde;
|
use super::tilde::expand_tilde;
|
||||||
|
|
||||||
// Join a path relative to another path. Paths starting with tilde are considered as absolute.
|
// Join a path relative to another path. Paths starting with tilde are considered as absolute.
|
||||||
|
@ -30,8 +31,17 @@ where
|
||||||
fn canonicalize(path: impl AsRef<Path>) -> io::Result<PathBuf> {
|
fn canonicalize(path: impl AsRef<Path>) -> io::Result<PathBuf> {
|
||||||
let path = expand_tilde(path);
|
let path = expand_tilde(path);
|
||||||
let path = expand_ndots(path);
|
let path = expand_ndots(path);
|
||||||
|
canonicalize_path(&path)
|
||||||
|
}
|
||||||
|
|
||||||
helpers::canonicalize(&path)
|
#[cfg(windows)]
|
||||||
|
fn canonicalize_path(path: &std::path::Path) -> std::io::Result<std::path::PathBuf> {
|
||||||
|
path.canonicalize()?.to_winuser_path()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
fn canonicalize_path(path: &std::path::Path) -> std::io::Result<std::path::PathBuf> {
|
||||||
|
path.canonicalize()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolve all symbolic links and all components (tilde, ., .., ...+) and return the path in its
|
/// Resolve all symbolic links and all components (tilde, ., .., ...+) and return the path in its
|
||||||
|
|
|
@ -1,59 +1,32 @@
|
||||||
#[cfg(windows)]
|
use crate::AbsolutePathBuf;
|
||||||
use omnipath::WinPathExt;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
pub fn home_dir() -> Option<PathBuf> {
|
pub fn home_dir() -> Option<AbsolutePathBuf> {
|
||||||
dirs::home_dir()
|
dirs::home_dir().and_then(|home| AbsolutePathBuf::try_from(home).ok())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the data directory for the current platform or XDG_DATA_HOME if specified.
|
/// Return the data directory for the current platform or XDG_DATA_HOME if specified.
|
||||||
pub fn data_dir() -> Option<PathBuf> {
|
pub fn data_dir() -> Option<AbsolutePathBuf> {
|
||||||
match std::env::var("XDG_DATA_HOME").map(PathBuf::from) {
|
std::env::var("XDG_DATA_HOME")
|
||||||
Ok(xdg_data) if xdg_data.is_absolute() => Some(canonicalize(&xdg_data).unwrap_or(xdg_data)),
|
.ok()
|
||||||
_ => get_canonicalized_path(dirs::data_dir()),
|
.and_then(|path| AbsolutePathBuf::try_from(path).ok())
|
||||||
}
|
.or_else(|| dirs::data_dir().and_then(|path| AbsolutePathBuf::try_from(path).ok()))
|
||||||
|
.map(|path| path.canonicalize().map(Into::into).unwrap_or(path))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the cache directory for the current platform or XDG_CACHE_HOME if specified.
|
/// Return the cache directory for the current platform or XDG_CACHE_HOME if specified.
|
||||||
pub fn cache_dir() -> Option<PathBuf> {
|
pub fn cache_dir() -> Option<AbsolutePathBuf> {
|
||||||
match std::env::var("XDG_CACHE_HOME").map(PathBuf::from) {
|
std::env::var("XDG_CACHE_HOME")
|
||||||
Ok(xdg_cache) if xdg_cache.is_absolute() => {
|
.ok()
|
||||||
Some(canonicalize(&xdg_cache).unwrap_or(xdg_cache))
|
.and_then(|path| AbsolutePathBuf::try_from(path).ok())
|
||||||
}
|
.or_else(|| dirs::cache_dir().and_then(|path| AbsolutePathBuf::try_from(path).ok()))
|
||||||
_ => get_canonicalized_path(dirs::cache_dir()),
|
.map(|path| path.canonicalize().map(Into::into).unwrap_or(path))
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the config directory for the current platform or XDG_CONFIG_HOME if specified.
|
/// Return the config directory for the current platform or XDG_CONFIG_HOME if specified.
|
||||||
pub fn config_dir() -> Option<PathBuf> {
|
pub fn config_dir() -> Option<AbsolutePathBuf> {
|
||||||
match std::env::var("XDG_CONFIG_HOME").map(PathBuf::from) {
|
std::env::var("XDG_CONFIG_HOME")
|
||||||
Ok(xdg_config) if xdg_config.is_absolute() => {
|
.ok()
|
||||||
Some(canonicalize(&xdg_config).unwrap_or(xdg_config))
|
.and_then(|path| AbsolutePathBuf::try_from(path).ok())
|
||||||
}
|
.or_else(|| dirs::config_dir().and_then(|path| AbsolutePathBuf::try_from(path).ok()))
|
||||||
_ => get_canonicalized_path(dirs::config_dir()),
|
.map(|path| path.canonicalize().map(Into::into).unwrap_or(path))
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_canonicalized_path(path: Option<PathBuf>) -> Option<PathBuf> {
|
|
||||||
let path = path?;
|
|
||||||
Some(canonicalize(&path).unwrap_or(path))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
pub fn canonicalize(path: &std::path::Path) -> std::io::Result<std::path::PathBuf> {
|
|
||||||
path.canonicalize()?.to_winuser_path()
|
|
||||||
}
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
pub fn canonicalize(path: &std::path::Path) -> std::io::Result<std::path::PathBuf> {
|
|
||||||
path.canonicalize()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
pub fn simiplified(path: &std::path::Path) -> PathBuf {
|
|
||||||
path.to_winuser_path()
|
|
||||||
.unwrap_or_else(|_| path.to_path_buf())
|
|
||||||
}
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
pub fn simiplified(path: &std::path::Path) -> PathBuf {
|
|
||||||
path.to_path_buf()
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ mod trailing_slash;
|
||||||
|
|
||||||
pub use components::components;
|
pub use components::components;
|
||||||
pub use expansions::{canonicalize_with, expand_path_with, expand_to_real_path, locate_in_dirs};
|
pub use expansions::{canonicalize_with, expand_path_with, expand_to_real_path, locate_in_dirs};
|
||||||
pub use helpers::{cache_dir, config_dir, data_dir, get_canonicalized_path, home_dir};
|
pub use helpers::{cache_dir, config_dir, data_dir, home_dir};
|
||||||
pub use path::*;
|
pub use path::*;
|
||||||
pub use tilde::expand_tilde;
|
pub use tilde::expand_tilde;
|
||||||
pub use trailing_slash::{has_trailing_slash, strip_trailing_slash};
|
pub use trailing_slash::{has_trailing_slash, strip_trailing_slash};
|
||||||
|
|
|
@ -660,10 +660,10 @@ impl Path {
|
||||||
/// the current directory.
|
/// the current directory.
|
||||||
///
|
///
|
||||||
/// * On Unix, a path is absolute if it starts with the root,
|
/// * On Unix, a path is absolute if it starts with the root,
|
||||||
/// so [`is_absolute`](Path::is_absolute) and [`has_root`](Path::has_root) are equivalent.
|
/// so [`is_absolute`](Path::is_absolute) and [`has_root`](Path::has_root) are equivalent.
|
||||||
///
|
///
|
||||||
/// * On Windows, a path is absolute if it has a prefix and starts with the root:
|
/// * On Windows, a path is absolute if it has a prefix and starts with the root:
|
||||||
/// `c:\windows` is absolute, while `c:temp` and `\temp` are not.
|
/// `c:\windows` is absolute, while `c:temp` and `\temp` are not.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
|
|
|
@ -121,7 +121,7 @@ fn expand_tilde_with_another_user_home(path: &Path) -> PathBuf {
|
||||||
return match path.to_str() {
|
return match path.to_str() {
|
||||||
Some(file_path) => {
|
Some(file_path) => {
|
||||||
let mut file = file_path.to_string();
|
let mut file = file_path.to_string();
|
||||||
match file_path.find(|c| c == '/' || c == '\\') {
|
match file_path.find(['/', '\\']) {
|
||||||
None => {
|
None => {
|
||||||
file.remove(0);
|
file.remove(0);
|
||||||
user_home_dir(&file)
|
user_home_dir(&file)
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user