Merge remote-tracking branch 'origin/main' into plugin-ctrlc
This commit is contained in:
commit
24f3863f0b
12
.github/workflows/nightly-build.yml
vendored
12
.github/workflows/nightly-build.yml
vendored
|
@ -36,10 +36,10 @@ jobs:
|
|||
token: ${{ secrets.WORKFLOW_TOKEN }}
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3.11
|
||||
uses: hustcer/setup-nu@v3.12
|
||||
if: github.repository == 'nushell/nightly'
|
||||
with:
|
||||
version: 0.93.0
|
||||
version: 0.95.0
|
||||
|
||||
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo
|
||||
- name: Prepare for Nightly Release
|
||||
|
@ -128,9 +128,9 @@ jobs:
|
|||
rustflags: ''
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3.11
|
||||
uses: hustcer/setup-nu@v3.12
|
||||
with:
|
||||
version: 0.93.0
|
||||
version: 0.95.0
|
||||
|
||||
- name: Release Nu Binary
|
||||
id: nu
|
||||
|
@ -186,9 +186,9 @@ jobs:
|
|||
ref: main
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3.11
|
||||
uses: hustcer/setup-nu@v3.12
|
||||
with:
|
||||
version: 0.93.0
|
||||
version: 0.95.0
|
||||
|
||||
# Keep the last a few releases
|
||||
- name: Delete Older Releases
|
||||
|
|
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
|
@ -76,9 +76,9 @@ jobs:
|
|||
rustflags: ''
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3.11
|
||||
uses: hustcer/setup-nu@v3.12
|
||||
with:
|
||||
version: 0.93.0
|
||||
version: 0.95.0
|
||||
|
||||
- name: Release Nu Binary
|
||||
id: nu
|
||||
|
|
277
Cargo.lock
generated
277
Cargo.lock
generated
|
@ -1117,6 +1117,36 @@ dependencies = [
|
|||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "curl"
|
||||
version = "0.4.46"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e2161dd6eba090ff1594084e95fd67aeccf04382ffea77999ea94ed42ec67b6"
|
||||
dependencies = [
|
||||
"curl-sys",
|
||||
"libc",
|
||||
"openssl-probe",
|
||||
"openssl-sys",
|
||||
"schannel",
|
||||
"socket2",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "curl-sys"
|
||||
version = "0.4.73+curl-8.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "450ab250ecf17227c39afb9a2dd9261dc0035cb80f2612472fc0c4aac2dcb84d"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
"libz-sys",
|
||||
"openssl-sys",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deranged"
|
||||
version = "0.3.11"
|
||||
|
@ -1148,6 +1178,12 @@ dependencies = [
|
|||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deunicode"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "339544cc9e2c4dc3fc7149fd630c5f22263a4fdf18a98afd0075784968b5cf00"
|
||||
|
||||
[[package]]
|
||||
name = "dialoguer"
|
||||
version = "0.11.0"
|
||||
|
@ -1330,6 +1366,16 @@ dependencies = [
|
|||
"syn 2.0.60",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "env_filter"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a009aa4810eb158359dda09d0c87378e4bbb89b5a801f016885a4707ba24f7ea"
|
||||
dependencies = [
|
||||
"log",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.8.4"
|
||||
|
@ -1340,6 +1386,19 @@ dependencies = [
|
|||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.11.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
"env_filter",
|
||||
"humantime",
|
||||
"log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "equivalent"
|
||||
version = "1.0.1"
|
||||
|
@ -1854,12 +1913,26 @@ checksum = "bea68cab48b8459f17cf1c944c67ddc572d272d9f2b274140f223ecb1da4a3b7"
|
|||
dependencies = [
|
||||
"log",
|
||||
"mac",
|
||||
"markup5ever",
|
||||
"markup5ever 0.11.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "html5ever"
|
||||
version = "0.27.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c13771afe0e6e846f1e67d038d4cb29998a6779f93c809212e4e9c32efd244d4"
|
||||
dependencies = [
|
||||
"log",
|
||||
"mac",
|
||||
"markup5ever 0.12.1",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.60",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "0.2.12"
|
||||
|
@ -1906,6 +1979,12 @@ dependencies = [
|
|||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "humantime"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
|
||||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "0.14.28"
|
||||
|
@ -1995,12 +2074,6 @@ dependencies = [
|
|||
"unicode-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indoc"
|
||||
version = "2.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5"
|
||||
|
||||
[[package]]
|
||||
name = "inotify"
|
||||
version = "0.9.6"
|
||||
|
@ -2523,6 +2596,32 @@ dependencies = [
|
|||
"tendril",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markup5ever"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "16ce3abbeba692c8b8441d036ef91aea6df8da2c6b6e21c7e14d3c18e526be45"
|
||||
dependencies = [
|
||||
"log",
|
||||
"phf 0.11.2",
|
||||
"phf_codegen 0.11.2",
|
||||
"string_cache",
|
||||
"string_cache_codegen",
|
||||
"tendril",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markup5ever_rcdom"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "edaa21ab3701bfee5099ade5f7e1f84553fd19228cf332f13cd6e964bf59be18"
|
||||
dependencies = [
|
||||
"html5ever 0.27.0",
|
||||
"markup5ever 0.12.1",
|
||||
"tendril",
|
||||
"xml5ever",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "md-5"
|
||||
version = "0.10.6"
|
||||
|
@ -2944,6 +3043,7 @@ dependencies = [
|
|||
"chrono-tz 0.8.6",
|
||||
"crossterm",
|
||||
"csv",
|
||||
"deunicode",
|
||||
"dialoguer",
|
||||
"digest",
|
||||
"dirs-next",
|
||||
|
@ -3397,8 +3497,10 @@ version = "0.95.1"
|
|||
dependencies = [
|
||||
"chrono",
|
||||
"chrono-tz 0.9.0",
|
||||
"env_logger 0.11.3",
|
||||
"fancy-regex",
|
||||
"indexmap",
|
||||
"log",
|
||||
"mimalloc",
|
||||
"nu-cmd-lang",
|
||||
"nu-command",
|
||||
|
@ -3408,6 +3510,7 @@ dependencies = [
|
|||
"nu-plugin",
|
||||
"nu-plugin-test-support",
|
||||
"nu-protocol",
|
||||
"nu-utils",
|
||||
"num",
|
||||
"polars",
|
||||
"polars-arrow",
|
||||
|
@ -3416,7 +3519,7 @@ dependencies = [
|
|||
"polars-plan",
|
||||
"polars-utils",
|
||||
"serde",
|
||||
"sqlparser 0.47.0",
|
||||
"sqlparser",
|
||||
"tempfile",
|
||||
"typetag",
|
||||
"uuid",
|
||||
|
@ -3430,8 +3533,11 @@ dependencies = [
|
|||
"nu-plugin",
|
||||
"nu-protocol",
|
||||
"scraper",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sxd-document",
|
||||
"sxd-xpath",
|
||||
"webpage",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4026,9 +4132,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e148396dca5496566880fa19374f3f789a29db94e3eb458afac1497b4bac5442"
|
||||
checksum = "ce49e10a756f68eb99c102c6b2a0cbc0c583a0fa7263536ad0913d94be878d2d"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"polars-arrow",
|
||||
|
@ -4046,9 +4152,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-arrow"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1cb5e11cd0752ae022fa6ca3afa50a14b0301b7ce53c0135828fbb0f4fa8303e"
|
||||
checksum = "b436f83f62e864f0d91871e26528f2c5552c7cf07c8d77547f1b8e3fde22bd27"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"atoi",
|
||||
|
@ -4094,9 +4200,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-compute"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89fc4578f826234cdecb782952aa9c479dc49373f81694a7b439c70b6f609ba0"
|
||||
checksum = "f6758f834f07e622a2f859bebb542b2b7f8879b8704dbb2b2bbab460ddcdca4b"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"either",
|
||||
|
@ -4110,9 +4216,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-core"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e490c6bace1366a558feea33d1846f749a8ca90bd72a6748752bc65bb4710b2a"
|
||||
checksum = "7ed262e9bdda15a12a9bfcfc9200bec5253335633dbd86cf5b94fda0194244b3"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"bitflags 2.5.0",
|
||||
|
@ -4144,9 +4250,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-error"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08888f58e61599b00f5ea0c2ccdc796b54b9859559cc0d4582733509451fa01a"
|
||||
checksum = "53e1707a17475ba5e74c349154b415e3148a1a275e395965427971b5e53ad621"
|
||||
dependencies = [
|
||||
"avro-schema",
|
||||
"polars-arrow-format",
|
||||
|
@ -4157,9 +4263,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-expr"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4173591920fe56ad55af025f92eb0d08421ca85705c326a640c43856094e3484"
|
||||
checksum = "31a9688d5842e7a7fbad88e67a174778794a91d97d3bba1b3c09dd1656fee3b2"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"bitflags 2.5.0",
|
||||
|
@ -4177,9 +4283,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-io"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5842896aea46d975b425d63f156f412aed3cfde4c257b64fb1f43ceea288074e"
|
||||
checksum = "18798dacd94fb9263f65f63f0feab0908675422646d6f7fc37043b85ff6dca35"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"async-trait",
|
||||
|
@ -4218,9 +4324,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-json"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "160cbad0145b93ac6a88639aadfa6f7d7c769d05a8674f9b7e895b398cae9901"
|
||||
checksum = "044ea319f667efbf8007c4c38171c2956e0e7f9b078eb66e31e82f80d1e14b51"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"chrono",
|
||||
|
@ -4239,19 +4345,21 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-lazy"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e805ea2ebbc6b7749b0afb31b7fc5d32b42b57ba29b984549d43d3a16114c4a5"
|
||||
checksum = "74a11994c2211f2e99d9ac31776fd7c2c0607d5fe62d5b5db9e396f7d663f3d5"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"bitflags 2.5.0",
|
||||
"glob",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"polars-arrow",
|
||||
"polars-core",
|
||||
"polars-expr",
|
||||
"polars-io",
|
||||
"polars-json",
|
||||
"polars-mem-engine",
|
||||
"polars-ops",
|
||||
"polars-pipe",
|
||||
"polars-plan",
|
||||
|
@ -4263,10 +4371,29 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "polars-ops"
|
||||
version = "0.40.0"
|
||||
name = "polars-mem-engine"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b0aed7e169c81b98457641cf82b251f52239a668916c2e683abd1f38df00d58"
|
||||
checksum = "5acd5fde6fadaddfcae3227ec5b64121007928f8e68870c80653438e20c1c587"
|
||||
dependencies = [
|
||||
"polars-arrow",
|
||||
"polars-core",
|
||||
"polars-error",
|
||||
"polars-expr",
|
||||
"polars-io",
|
||||
"polars-json",
|
||||
"polars-ops",
|
||||
"polars-plan",
|
||||
"polars-time",
|
||||
"polars-utils",
|
||||
"rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "polars-ops"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4170c59e974727941edfb722f6d430ed623be9e7f30581ee00832c907f1b9fd"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"argminmax",
|
||||
|
@ -4300,9 +4427,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-parquet"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c70670a9e51cac66d0e77fd20b5cc957dbcf9f2660d410633862bb72f846d5b8"
|
||||
checksum = "c684638c36c60c691d707d414249fe8af4a19a35a39d418464b140fe23732e5d"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"async-stream",
|
||||
|
@ -4315,9 +4442,11 @@ dependencies = [
|
|||
"num-traits",
|
||||
"parquet-format-safe",
|
||||
"polars-arrow",
|
||||
"polars-compute",
|
||||
"polars-error",
|
||||
"polars-utils",
|
||||
"seq-macro",
|
||||
"serde",
|
||||
"simdutf8",
|
||||
"snap",
|
||||
"streaming-decompression",
|
||||
|
@ -4326,9 +4455,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-pipe"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a40ae1b3c74ee07e2d1f7cbf56c5d6e15969e45d9b6f0903bd2acaf783ba436"
|
||||
checksum = "832af9fbebc4c074d95fb19e1ef9e1bf37c343641238c2476febff296a7028ea"
|
||||
dependencies = [
|
||||
"crossbeam-channel",
|
||||
"crossbeam-queue",
|
||||
|
@ -4352,9 +4481,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-plan"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8daa3541ae7e9af311a4389bc2b21f83349c34c723cc67fa524cdefdaa172d90"
|
||||
checksum = "801390ea815c05c9cf8337f3148090c9c10c9595a839fa0706b77cc2405b4466"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"bytemuck",
|
||||
|
@ -4382,9 +4511,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-row"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "deb285f2f3a65b00dd06bef16bb9f712dbb5478f941dab5cf74f9f016d382e40"
|
||||
checksum = "dee955e91b605fc91db4d0a8ea02609d3a09ff79256d905214a2a6f758cd6f7b"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"polars-arrow",
|
||||
|
@ -4394,9 +4523,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-sql"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a724f699d194cb02c25124d3832f7d4d77f387f1a89ee42f6b9e88ec561d4ad9"
|
||||
checksum = "d89c00a4b399501d5bd478e8e8022b9391047fe8570324ecba20c4e4833c0e87"
|
||||
dependencies = [
|
||||
"hex",
|
||||
"once_cell",
|
||||
|
@ -4404,18 +4533,20 @@ dependencies = [
|
|||
"polars-core",
|
||||
"polars-error",
|
||||
"polars-lazy",
|
||||
"polars-ops",
|
||||
"polars-plan",
|
||||
"polars-time",
|
||||
"rand",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sqlparser 0.39.0",
|
||||
"sqlparser",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "polars-time"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87ebec238d8b6200d9f0c3ce411c8441e950bd5a7df7806b8172d06c1d5a4b97"
|
||||
checksum = "9689b3aff99d64befe300495528bdc44c36d2656c3a8b242a790d4f43df027fc"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"bytemuck",
|
||||
|
@ -4435,9 +4566,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "polars-utils"
|
||||
version = "0.40.0"
|
||||
version = "0.41.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34e1a907c63abf71e5f21467e2e4ff748896c28196746f631c6c25512ec6102c"
|
||||
checksum = "12081e346983a91e26f395597e1d53dea1b4ecd694653aee1cc402d2fae01f04"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"bytemuck",
|
||||
|
@ -4673,7 +4804,7 @@ version = "1.0.3"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6"
|
||||
dependencies = [
|
||||
"env_logger",
|
||||
"env_logger 0.8.4",
|
||||
"log",
|
||||
"rand",
|
||||
]
|
||||
|
@ -4752,21 +4883,21 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ratatui"
|
||||
version = "0.26.2"
|
||||
version = "0.26.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a564a852040e82671dc50a37d88f3aa83bbc690dfc6844cfe7a2591620206a80"
|
||||
checksum = "f44c9e68fd46eda15c646fbb85e1040b657a58cdc8c98db1d97a55930d991eef"
|
||||
dependencies = [
|
||||
"bitflags 2.5.0",
|
||||
"cassowary",
|
||||
"compact_str",
|
||||
"crossterm",
|
||||
"indoc",
|
||||
"itertools 0.12.1",
|
||||
"lru",
|
||||
"paste",
|
||||
"stability",
|
||||
"strum",
|
||||
"unicode-segmentation",
|
||||
"unicode-truncate",
|
||||
"unicode-width",
|
||||
]
|
||||
|
||||
|
@ -5225,7 +5356,7 @@ dependencies = [
|
|||
"ahash 0.8.11",
|
||||
"cssparser",
|
||||
"ego-tree",
|
||||
"html5ever",
|
||||
"html5ever 0.26.0",
|
||||
"once_cell",
|
||||
"selectors",
|
||||
"tendril",
|
||||
|
@ -5441,9 +5572,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "shadow-rs"
|
||||
version = "0.28.0"
|
||||
version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d75516bdaee8f640543ad1f6e292448c23ce57143f812c3736ab4b0874383df"
|
||||
checksum = "0a600f795d0894cda22235b44eea4b85c2a35b405f65523645ac8e35b306817a"
|
||||
dependencies = [
|
||||
"const_format",
|
||||
"is_debug",
|
||||
|
@ -5588,15 +5719,6 @@ dependencies = [
|
|||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlparser"
|
||||
version = "0.39.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "743b4dc2cbde11890ccb254a8fc9d537fa41b36da00de2a1c5e9848c9bc42bd7"
|
||||
dependencies = [
|
||||
"log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlparser"
|
||||
version = "0.47.0"
|
||||
|
@ -6314,6 +6436,16 @@ version = "1.11.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-truncate"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a5fbabedabe362c618c714dbefda9927b5afc8e2a8102f47f081089a9019226"
|
||||
dependencies = [
|
||||
"itertools 0.12.1",
|
||||
"unicode-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.1.12"
|
||||
|
@ -6738,6 +6870,20 @@ dependencies = [
|
|||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpage"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "70862efc041d46e6bbaa82bb9c34ae0596d090e86cbd14bd9e93b36ee6802eac"
|
||||
dependencies = [
|
||||
"curl",
|
||||
"html5ever 0.27.0",
|
||||
"markup5ever_rcdom",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "which"
|
||||
version = "6.0.1"
|
||||
|
@ -7150,6 +7296,17 @@ dependencies = [
|
|||
"rustix",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xml5ever"
|
||||
version = "0.18.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9bbb26405d8e919bc1547a5aa9abc95cbfa438f04844f5fdd9dc7596b748bf69"
|
||||
dependencies = [
|
||||
"log",
|
||||
"mac",
|
||||
"markup5ever 0.12.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xxhash-rust"
|
||||
version = "0.8.10"
|
||||
|
|
|
@ -80,6 +80,7 @@ crossbeam-channel = "0.5.8"
|
|||
crossterm = "0.27"
|
||||
csv = "1.3"
|
||||
ctrlc = "3.4"
|
||||
deunicode = "1.6.0"
|
||||
dialoguer = { default-features = false, version = "0.11" }
|
||||
digest = { default-features = false, version = "0.10" }
|
||||
dirs-next = "2.0"
|
||||
|
|
|
@ -8,7 +8,7 @@ use nu_protocol::{
|
|||
report_error_new, HistoryFileFormat, PipelineData,
|
||||
};
|
||||
#[cfg(feature = "plugin")]
|
||||
use nu_utils::utils::perf;
|
||||
use nu_utils::perf;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[cfg(feature = "plugin")]
|
||||
|
@ -53,13 +53,10 @@ pub fn read_plugin_file(
|
|||
// Reading signatures from plugin registry file
|
||||
// The plugin.msgpackz file stores the parsed signature collected from each registered plugin
|
||||
add_plugin_file(engine_state, plugin_file.clone(), storage_path);
|
||||
perf(
|
||||
perf!(
|
||||
"add plugin file to engine_state",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
engine_state.get_config().use_ansi_coloring,
|
||||
engine_state.get_config().use_ansi_coloring
|
||||
);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
|
@ -137,13 +134,10 @@ pub fn read_plugin_file(
|
|||
}
|
||||
};
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
&format!("read plugin file {}", plugin_path.display()),
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
engine_state.get_config().use_ansi_coloring,
|
||||
engine_state.get_config().use_ansi_coloring
|
||||
);
|
||||
start_time = std::time::Instant::now();
|
||||
|
||||
|
@ -156,13 +150,10 @@ pub fn read_plugin_file(
|
|||
return;
|
||||
}
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
&format!("load plugin file {}", plugin_path.display()),
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
engine_state.get_config().use_ansi_coloring,
|
||||
engine_state.get_config().use_ansi_coloring
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -381,13 +372,10 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -
|
|||
);
|
||||
}
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
"migrate old plugin file",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
engine_state.get_config().use_ansi_coloring,
|
||||
engine_state.get_config().use_ansi_coloring
|
||||
);
|
||||
true
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ use nu_protocol::{
|
|||
};
|
||||
use nu_utils::{
|
||||
filesystem::{have_permission, PermissionResult},
|
||||
utils::perf,
|
||||
perf,
|
||||
};
|
||||
use reedline::{
|
||||
CursorConfig, CwdAwareHinter, DefaultCompleter, EditCommand, Emacs, FileBackedHistory,
|
||||
|
@ -89,14 +89,7 @@ pub fn evaluate_repl(
|
|||
if let Err(e) = convert_env_values(engine_state, &unique_stack) {
|
||||
report_error_new(engine_state, &e);
|
||||
}
|
||||
perf(
|
||||
"translate env vars",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("translate env vars", start_time, use_color);
|
||||
|
||||
// seed env vars
|
||||
unique_stack.add_env_var(
|
||||
|
@ -225,28 +218,14 @@ fn get_line_editor(
|
|||
|
||||
// Now that reedline is created, get the history session id and store it in engine_state
|
||||
store_history_id_in_engine(engine_state, &line_editor);
|
||||
perf(
|
||||
"setup reedline",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("setup reedline", start_time, use_color);
|
||||
|
||||
if let Some(history) = engine_state.history_config() {
|
||||
start_time = std::time::Instant::now();
|
||||
|
||||
line_editor = setup_history(nushell_path, engine_state, line_editor, history)?;
|
||||
|
||||
perf(
|
||||
"setup history",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("setup history", start_time, use_color);
|
||||
}
|
||||
Ok(line_editor)
|
||||
}
|
||||
|
@ -289,28 +268,14 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
if let Err(err) = engine_state.merge_env(&mut stack, cwd) {
|
||||
report_error_new(engine_state, &err);
|
||||
}
|
||||
perf(
|
||||
"merge env",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("merge env", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
// Reset the ctrl-c handler
|
||||
if let Some(ctrlc) = &mut engine_state.ctrlc {
|
||||
ctrlc.store(false, Ordering::SeqCst);
|
||||
}
|
||||
perf(
|
||||
"reset ctrlc",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("reset ctrlc", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
// Right before we start our prompt and take input from the user,
|
||||
|
@ -320,14 +285,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
report_error_new(engine_state, &err);
|
||||
}
|
||||
}
|
||||
perf(
|
||||
"pre-prompt hook",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("pre-prompt hook", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
// Next, check all the environment variables they ask for
|
||||
|
@ -336,14 +294,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
if let Err(error) = hook::eval_env_change_hook(env_change, engine_state, &mut stack) {
|
||||
report_error_new(engine_state, &error)
|
||||
}
|
||||
perf(
|
||||
"env-change hook",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("env-change hook", start_time, use_color);
|
||||
|
||||
let engine_reference = Arc::new(engine_state.clone());
|
||||
let config = engine_state.get_config();
|
||||
|
@ -355,14 +306,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
vi_normal: map_nucursorshape_to_cursorshape(config.cursor_shape_vi_normal),
|
||||
emacs: map_nucursorshape_to_cursorshape(config.cursor_shape_emacs),
|
||||
};
|
||||
perf(
|
||||
"get config/cursor config",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("get config/cursor config", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
// at this line we have cloned the state for the completer and the transient prompt
|
||||
|
@ -394,14 +338,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
.with_ansi_colors(config.use_ansi_coloring)
|
||||
.with_cursor_config(cursor_config);
|
||||
|
||||
perf(
|
||||
"reedline builder",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("reedline builder", start_time, use_color);
|
||||
|
||||
let style_computer = StyleComputer::from_config(engine_state, &stack_arc);
|
||||
|
||||
|
@ -416,14 +353,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
line_editor.disable_hints()
|
||||
};
|
||||
|
||||
perf(
|
||||
"reedline coloring/style_computer",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("reedline coloring/style_computer", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
trace!("adding menus");
|
||||
|
@ -433,14 +363,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
Reedline::create()
|
||||
});
|
||||
|
||||
perf(
|
||||
"reedline adding menus",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("reedline adding menus", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
let buffer_editor = get_editor(engine_state, &stack_arc, Span::unknown());
|
||||
|
@ -457,14 +380,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
line_editor
|
||||
};
|
||||
|
||||
perf(
|
||||
"reedline buffer_editor",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("reedline buffer_editor", start_time, use_color);
|
||||
|
||||
if let Some(history) = engine_state.history_config() {
|
||||
start_time = std::time::Instant::now();
|
||||
|
@ -474,28 +390,14 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
}
|
||||
}
|
||||
|
||||
perf(
|
||||
"sync_history",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("sync_history", start_time, use_color);
|
||||
}
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
// Changing the line editor based on the found keybindings
|
||||
line_editor = setup_keybindings(engine_state, line_editor);
|
||||
|
||||
perf(
|
||||
"keybindings",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("keybindings", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
let config = &engine_state.get_config().clone();
|
||||
|
@ -512,14 +414,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
nu_prompt,
|
||||
);
|
||||
|
||||
perf(
|
||||
"update_prompt",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("update_prompt", start_time, use_color);
|
||||
|
||||
*entry_num += 1;
|
||||
|
||||
|
@ -546,14 +441,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
// so we should avoid it or making stack cheaper to clone.
|
||||
let mut stack = Arc::unwrap_or_clone(stack_arc);
|
||||
|
||||
perf(
|
||||
"line_editor setup",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("line_editor setup", start_time, use_color);
|
||||
|
||||
let line_editor_input_time = std::time::Instant::now();
|
||||
match input {
|
||||
|
@ -590,14 +478,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
}
|
||||
}
|
||||
|
||||
perf(
|
||||
"pre_execution_hook",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("pre_execution_hook", start_time, use_color);
|
||||
|
||||
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||
repl.cursor_pos = line_editor.current_insertion_point();
|
||||
|
@ -612,26 +493,20 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
|
||||
run_ansi_sequence(VSCODE_PRE_EXECUTION_MARKER);
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
"pre_execute_marker (633;C) ansi escape sequence",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
use_color
|
||||
);
|
||||
} else if shell_integration_osc133 {
|
||||
start_time = Instant::now();
|
||||
|
||||
run_ansi_sequence(PRE_EXECUTION_MARKER);
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
"pre_execute_marker (133;C) ansi escape sequence",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
use_color
|
||||
);
|
||||
}
|
||||
} else if shell_integration_osc133 {
|
||||
|
@ -639,13 +514,10 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
|
||||
run_ansi_sequence(PRE_EXECUTION_MARKER);
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
"pre_execute_marker (133;C) ansi escape sequence",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
use_color
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -769,22 +641,16 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||
);
|
||||
}
|
||||
}
|
||||
perf(
|
||||
perf!(
|
||||
"processing line editor input",
|
||||
line_editor_input_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
use_color
|
||||
);
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
"time between prompts in line editor loop",
|
||||
loop_start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
use_color
|
||||
);
|
||||
|
||||
(true, stack, line_editor)
|
||||
|
@ -1061,14 +927,7 @@ fn run_shell_integration_osc2(
|
|||
// ESC]2;stringBEL -- Set window title to string
|
||||
run_ansi_sequence(&format!("\x1b]2;{title}\x07"));
|
||||
|
||||
perf(
|
||||
"set title with command osc2",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("set title with command osc2", start_time, use_color);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1093,13 +952,10 @@ fn run_shell_integration_osc7(
|
|||
percent_encoding::utf8_percent_encode(&path, percent_encoding::CONTROLS)
|
||||
));
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
"communicate path to terminal with osc7",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
use_color
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1116,13 +972,10 @@ fn run_shell_integration_osc9_9(engine_state: &EngineState, stack: &mut Stack, u
|
|||
percent_encoding::utf8_percent_encode(&path, percent_encoding::CONTROLS)
|
||||
));
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
"communicate path to terminal with osc9;9",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
use_color
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1142,13 +995,10 @@ fn run_shell_integration_osc633(engine_state: &EngineState, stack: &mut Stack, u
|
|||
VSCODE_CWD_PROPERTY_MARKER_PREFIX, path, VSCODE_CWD_PROPERTY_MARKER_SUFFIX
|
||||
));
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
"communicate path to terminal with osc633;P",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
use_color
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1371,13 +1221,10 @@ fn run_finaliziation_ansi_sequence(
|
|||
shell_integration_osc133,
|
||||
));
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
"post_execute_marker (633;D) ansi escape sequences",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
use_color
|
||||
);
|
||||
} else if shell_integration_osc133 {
|
||||
let start_time = Instant::now();
|
||||
|
@ -1389,13 +1236,10 @@ fn run_finaliziation_ansi_sequence(
|
|||
shell_integration_osc133,
|
||||
));
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
"post_execute_marker (133;D) ansi escape sequences",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
use_color
|
||||
);
|
||||
}
|
||||
} else if shell_integration_osc133 {
|
||||
|
@ -1408,13 +1252,10 @@ fn run_finaliziation_ansi_sequence(
|
|||
shell_integration_osc133,
|
||||
));
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
"post_execute_marker (133;D) ansi escape sequences",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
use_color
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use nu_protocol::{
|
|||
};
|
||||
#[cfg(windows)]
|
||||
use nu_utils::enable_vt_processing;
|
||||
use nu_utils::utils::perf;
|
||||
use nu_utils::perf;
|
||||
use std::path::Path;
|
||||
|
||||
// This will collect environment variables from std::env and adds them to a stack.
|
||||
|
@ -228,13 +228,10 @@ pub fn eval_source(
|
|||
let _ = enable_vt_processing();
|
||||
}
|
||||
|
||||
perf(
|
||||
perf!(
|
||||
&format!("eval_source {}", &fname),
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
engine_state.get_config().use_ansi_coloring,
|
||||
engine_state.get_config().use_ansi_coloring
|
||||
);
|
||||
|
||||
exit_code
|
||||
|
|
|
@ -368,6 +368,7 @@ fn theme_demo(span: Span) -> PipelineData {
|
|||
.collect();
|
||||
Value::list(result, span).into_pipeline_data_with_metadata(PipelineMetadata {
|
||||
data_source: DataSource::HtmlThemes,
|
||||
content_type: None,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -18,10 +18,10 @@ nu-protocol = { path = "../nu-protocol", version = "0.95.1" }
|
|||
nu-utils = { path = "../nu-utils", version = "0.95.1" }
|
||||
|
||||
itertools = { workspace = true }
|
||||
shadow-rs = { version = "0.28", default-features = false }
|
||||
shadow-rs = { version = "0.29", default-features = false }
|
||||
|
||||
[build-dependencies]
|
||||
shadow-rs = { version = "0.28", default-features = false }
|
||||
shadow-rs = { version = "0.29", default-features = false }
|
||||
|
||||
[features]
|
||||
mimalloc = []
|
||||
|
|
|
@ -50,6 +50,7 @@ is particularly large, this can cause high memory usage."#
|
|||
// check where some input came from.
|
||||
Some(PipelineMetadata {
|
||||
data_source: DataSource::FilePath(_),
|
||||
content_type: None,
|
||||
}) => None,
|
||||
other => other,
|
||||
};
|
||||
|
|
|
@ -23,11 +23,7 @@ impl Command for Do {
|
|||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("do")
|
||||
.required(
|
||||
"closure",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::Closure(None), SyntaxShape::Any]),
|
||||
"The closure to run.",
|
||||
)
|
||||
.required("closure", SyntaxShape::Closure(None), "The closure to run.")
|
||||
.input_output_types(vec![(Type::Any, Type::Any)])
|
||||
.switch(
|
||||
"ignore-errors",
|
||||
|
|
|
@ -31,11 +31,20 @@ pub(crate) fn modify_plugin_file(
|
|||
})?
|
||||
};
|
||||
|
||||
let file_span = custom_path.as_ref().map(|p| p.span).unwrap_or(span);
|
||||
|
||||
// Try to read the plugin file if it exists
|
||||
let mut contents = if fs::metadata(&plugin_registry_file_path).is_ok_and(|m| m.len() > 0) {
|
||||
PluginRegistryFile::read_from(
|
||||
File::open(&plugin_registry_file_path).err_span(span)?,
|
||||
Some(span),
|
||||
File::open(&plugin_registry_file_path).map_err(|err| ShellError::IOErrorSpanned {
|
||||
msg: format!(
|
||||
"failed to read `{}`: {}",
|
||||
plugin_registry_file_path.display(),
|
||||
err
|
||||
),
|
||||
span: file_span,
|
||||
})?,
|
||||
Some(file_span),
|
||||
)?
|
||||
} else {
|
||||
PluginRegistryFile::default()
|
||||
|
@ -46,7 +55,14 @@ pub(crate) fn modify_plugin_file(
|
|||
|
||||
// Save the modified file on success
|
||||
contents.write_to(
|
||||
File::create(&plugin_registry_file_path).err_span(span)?,
|
||||
File::create(&plugin_registry_file_path).map_err(|err| ShellError::IOErrorSpanned {
|
||||
msg: format!(
|
||||
"failed to create `{}`: {}",
|
||||
plugin_registry_file_path.display(),
|
||||
err
|
||||
),
|
||||
span: file_span,
|
||||
})?,
|
||||
Some(span),
|
||||
)?;
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ chrono-humanize = { workspace = true }
|
|||
chrono-tz = { workspace = true }
|
||||
crossterm = { workspace = true }
|
||||
csv = { workspace = true }
|
||||
deunicode = { workspace = true }
|
||||
dialoguer = { workspace = true, default-features = false, features = ["fuzzy-select"] }
|
||||
digest = { workspace = true, default-features = false }
|
||||
dtparse = { workspace = true }
|
||||
|
|
|
@ -80,16 +80,23 @@ impl Command for Metadata {
|
|||
match x {
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::Ls,
|
||||
..
|
||||
} => record.push("source", Value::string("ls", head)),
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::HtmlThemes,
|
||||
..
|
||||
} => record.push("source", Value::string("into html --list", head)),
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::FilePath(path),
|
||||
..
|
||||
} => record.push(
|
||||
"source",
|
||||
Value::string(path.to_string_lossy().to_string(), head),
|
||||
),
|
||||
_ => {}
|
||||
}
|
||||
if let Some(ref content_type) = x.content_type {
|
||||
record.push("content_type", Value::string(content_type, head));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -133,16 +140,23 @@ fn build_metadata_record(arg: &Value, metadata: Option<&PipelineMetadata>, head:
|
|||
match x {
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::Ls,
|
||||
..
|
||||
} => record.push("source", Value::string("ls", head)),
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::HtmlThemes,
|
||||
..
|
||||
} => record.push("source", Value::string("into html --list", head)),
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::FilePath(path),
|
||||
..
|
||||
} => record.push(
|
||||
"source",
|
||||
Value::string(path.to_string_lossy().to_string(), head),
|
||||
),
|
||||
_ => {}
|
||||
}
|
||||
if let Some(ref content_type) = x.content_type {
|
||||
record.push("content_type", Value::string(content_type, head));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -46,6 +46,7 @@ impl Command for MetadataSet {
|
|||
(Some(path), false) => {
|
||||
let metadata = PipelineMetadata {
|
||||
data_source: DataSource::FilePath(path.into()),
|
||||
content_type: None,
|
||||
};
|
||||
Ok(input.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
|
@ -56,6 +57,7 @@ impl Command for MetadataSet {
|
|||
(None, true) => {
|
||||
let metadata = PipelineMetadata {
|
||||
data_source: DataSource::Ls,
|
||||
content_type: None,
|
||||
};
|
||||
Ok(input.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
|
|
|
@ -189,6 +189,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
|||
Str,
|
||||
StrCapitalize,
|
||||
StrContains,
|
||||
StrDeunicode,
|
||||
StrDistance,
|
||||
StrDowncase,
|
||||
StrEndswith,
|
||||
|
|
|
@ -122,6 +122,7 @@ impl Command for Ls {
|
|||
ctrl_c,
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::Ls,
|
||||
content_type: None,
|
||||
},
|
||||
)),
|
||||
Some(pattern) => {
|
||||
|
@ -145,6 +146,7 @@ impl Command for Ls {
|
|||
ctrl_c,
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::Ls,
|
||||
content_type: None,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
@ -175,20 +177,32 @@ impl Command for Ls {
|
|||
},
|
||||
Example {
|
||||
description: "List files and directories whose name do not contain 'bar'",
|
||||
example: "ls -s | where name !~ bar",
|
||||
example: "ls | where name !~ bar",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "List all dirs in your home directory",
|
||||
description: "List the full path of all dirs in your home directory",
|
||||
example: "ls -a ~ | where type == dir",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"List all dirs in your home directory which have not been modified in 7 days",
|
||||
"List only the names (not paths) of all dirs in your home directory which have not been modified in 7 days",
|
||||
example: "ls -as ~ | where type == dir and modified < ((date now) - 7day)",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Recursively list all files and subdirectories under the current directory using a glob pattern",
|
||||
example: "ls -a **/*",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Recursively list *.rs and *.toml files using the glob command",
|
||||
example: "ls ...(glob **/*.{rs,toml})",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "List given paths and show directories themselves",
|
||||
example: "['/path/to/directory' '/path/to/file'] | each {|| ls -D $in } | flatten",
|
||||
|
|
|
@ -147,6 +147,7 @@ impl Command for Open {
|
|||
ByteStream::file(file, call_span, ctrlc.clone()),
|
||||
Some(PipelineMetadata {
|
||||
data_source: DataSource::FilePath(path.to_path_buf()),
|
||||
content_type: None,
|
||||
}),
|
||||
);
|
||||
|
||||
|
|
|
@ -69,9 +69,9 @@ impl Command for Find {
|
|||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Search and highlight text for a term in a string",
|
||||
example: r#"'Cargo.toml' | find toml"#,
|
||||
result: Some(Value::test_string("\u{1b}[37mCargo.\u{1b}[0m\u{1b}[41;37mtoml\u{1b}[0m\u{1b}[37m\u{1b}[0m".to_owned())),
|
||||
description: "Search and highlight text for a term in a string. Note that regular search is case insensitive",
|
||||
example: r#"'Cargo.toml' | find cargo"#,
|
||||
result: Some(Value::test_string("\u{1b}[37m\u{1b}[0m\u{1b}[41;37mCargo\u{1b}[0m\u{1b}[37m.toml\u{1b}[0m".to_owned())),
|
||||
},
|
||||
Example {
|
||||
description: "Search a number or a file size in a list of numbers",
|
||||
|
@ -457,9 +457,10 @@ fn find_with_rest_and_highlight(
|
|||
|
||||
let mut output: Vec<Value> = vec![];
|
||||
for line in lines {
|
||||
let line = line?.to_lowercase();
|
||||
let line = line?;
|
||||
let lower_val = line.to_lowercase();
|
||||
for term in &terms {
|
||||
if line.contains(term) {
|
||||
if lower_val.contains(term) {
|
||||
output.push(Value::string(
|
||||
highlight_search_string(
|
||||
&line,
|
||||
|
|
|
@ -39,7 +39,7 @@ fn from_delimited_stream(
|
|||
.from_reader(input_reader);
|
||||
|
||||
let headers = if noheaders {
|
||||
(1..=reader
|
||||
(0..reader
|
||||
.headers()
|
||||
.map_err(|err| from_csv_error(err, span))?
|
||||
.len())
|
||||
|
|
|
@ -4,7 +4,7 @@ use std::{
|
|||
};
|
||||
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::ListStream;
|
||||
use nu_protocol::{ListStream, PipelineMetadata};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FromJson;
|
||||
|
@ -81,7 +81,7 @@ impl Command for FromJson {
|
|||
PipelineData::Value(Value::String { val, .. }, metadata) => {
|
||||
Ok(PipelineData::ListStream(
|
||||
read_json_lines(Cursor::new(val), span, strict, engine_state.ctrlc.clone()),
|
||||
metadata,
|
||||
update_metadata(metadata),
|
||||
))
|
||||
}
|
||||
PipelineData::ByteStream(stream, metadata)
|
||||
|
@ -90,7 +90,7 @@ impl Command for FromJson {
|
|||
if let Some(reader) = stream.reader() {
|
||||
Ok(PipelineData::ListStream(
|
||||
read_json_lines(reader, span, strict, None),
|
||||
metadata,
|
||||
update_metadata(metadata),
|
||||
))
|
||||
} else {
|
||||
Ok(PipelineData::Empty)
|
||||
|
@ -113,10 +113,10 @@ impl Command for FromJson {
|
|||
|
||||
if strict {
|
||||
Ok(convert_string_to_value_strict(&string_input, span)?
|
||||
.into_pipeline_data_with_metadata(metadata))
|
||||
.into_pipeline_data_with_metadata(update_metadata(metadata)))
|
||||
} else {
|
||||
Ok(convert_string_to_value(&string_input, span)?
|
||||
.into_pipeline_data_with_metadata(metadata))
|
||||
.into_pipeline_data_with_metadata(update_metadata(metadata)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -263,6 +263,14 @@ fn convert_string_to_value_strict(string_input: &str, span: Span) -> Result<Valu
|
|||
}
|
||||
}
|
||||
|
||||
fn update_metadata(metadata: Option<PipelineMetadata>) -> Option<PipelineMetadata> {
|
||||
metadata
|
||||
.map(|md| md.with_content_type(Some("application/json".into())))
|
||||
.or_else(|| {
|
||||
Some(PipelineMetadata::default().with_content_type(Some("application/json".into())))
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
|
|
@ -52,12 +52,12 @@ impl Command for FromSsv {
|
|||
Value::test_list(
|
||||
vec![
|
||||
Value::test_record(record! {
|
||||
"column1" => Value::test_string("FOO"),
|
||||
"column2" => Value::test_string("BAR"),
|
||||
"column0" => Value::test_string("FOO"),
|
||||
"column1" => Value::test_string("BAR"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column1" => Value::test_string("1"),
|
||||
"column2" => Value::test_string("2"),
|
||||
"column0" => Value::test_string("1"),
|
||||
"column1" => Value::test_string("2"),
|
||||
}),
|
||||
],
|
||||
)
|
||||
|
@ -170,7 +170,7 @@ fn parse_aligned_columns<'a>(
|
|||
let headers: Vec<(String, usize)> = indices
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, position)| (format!("column{}", i + 1), *position))
|
||||
.map(|(i, position)| (format!("column{}", i), *position))
|
||||
.collect();
|
||||
|
||||
construct(ls.iter().map(|s| s.to_owned()), headers)
|
||||
|
@ -215,7 +215,7 @@ fn parse_separated_columns<'a>(
|
|||
let parse_without_headers = |ls: Vec<&str>| {
|
||||
let num_columns = ls.iter().map(|r| r.len()).max().unwrap_or(0);
|
||||
|
||||
let headers = (1..=num_columns)
|
||||
let headers = (0..=num_columns)
|
||||
.map(|i| format!("column{i}"))
|
||||
.collect::<Vec<String>>();
|
||||
collect(headers, ls.into_iter(), separator)
|
||||
|
@ -370,9 +370,9 @@ mod tests {
|
|||
assert_eq!(
|
||||
result,
|
||||
vec![
|
||||
vec![owned("column1", "a"), owned("column2", "b")],
|
||||
vec![owned("column1", "1"), owned("column2", "2")],
|
||||
vec![owned("column1", "3"), owned("column2", "4")]
|
||||
vec![owned("column0", "a"), owned("column1", "b")],
|
||||
vec![owned("column0", "1"), owned("column1", "2")],
|
||||
vec![owned("column0", "3"), owned("column1", "4")]
|
||||
]
|
||||
);
|
||||
}
|
||||
|
@ -484,25 +484,25 @@ mod tests {
|
|||
result,
|
||||
vec![
|
||||
vec![
|
||||
owned("column1", "a multi-word value"),
|
||||
owned("column2", "b"),
|
||||
owned("column3", ""),
|
||||
owned("column4", "d"),
|
||||
owned("column5", "")
|
||||
],
|
||||
vec![
|
||||
owned("column1", "1"),
|
||||
owned("column0", "a multi-word value"),
|
||||
owned("column1", "b"),
|
||||
owned("column2", ""),
|
||||
owned("column3", "3-3"),
|
||||
owned("column4", "4"),
|
||||
owned("column5", "")
|
||||
owned("column3", "d"),
|
||||
owned("column4", "")
|
||||
],
|
||||
vec![
|
||||
owned("column0", "1"),
|
||||
owned("column1", ""),
|
||||
owned("column2", "3-3"),
|
||||
owned("column3", "4"),
|
||||
owned("column4", "")
|
||||
],
|
||||
vec![
|
||||
owned("column0", ""),
|
||||
owned("column1", ""),
|
||||
owned("column2", ""),
|
||||
owned("column3", ""),
|
||||
owned("column4", ""),
|
||||
owned("column5", "last")
|
||||
owned("column4", "last")
|
||||
],
|
||||
]
|
||||
);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::ast::PathMember;
|
||||
use nu_protocol::{ast::PathMember, PipelineMetadata};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ToJson;
|
||||
|
@ -61,7 +61,12 @@ impl Command for ToJson {
|
|||
|
||||
match json_result {
|
||||
Ok(serde_json_string) => {
|
||||
Ok(Value::string(serde_json_string, span).into_pipeline_data())
|
||||
let res = Value::string(serde_json_string, span);
|
||||
let metadata = PipelineMetadata {
|
||||
data_source: nu_protocol::DataSource::None,
|
||||
content_type: Some("application/json".to_string()),
|
||||
};
|
||||
Ok(PipelineData::Value(res, Some(metadata)))
|
||||
}
|
||||
_ => Ok(Value::error(
|
||||
ShellError::CantConvert {
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
use chrono_humanize::HumanTime;
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{format_duration, format_filesize_from_conf, ByteStream, Config};
|
||||
use nu_protocol::{
|
||||
format_duration, format_filesize_from_conf, ByteStream, Config, PipelineMetadata,
|
||||
};
|
||||
|
||||
const LINE_ENDING: &str = if cfg!(target_os = "windows") {
|
||||
"\r\n"
|
||||
|
@ -37,10 +39,14 @@ impl Command for ToText {
|
|||
let input = input.try_expand_range()?;
|
||||
|
||||
match input {
|
||||
PipelineData::Empty => Ok(Value::string(String::new(), span).into_pipeline_data()),
|
||||
PipelineData::Empty => Ok(Value::string(String::new(), span)
|
||||
.into_pipeline_data_with_metadata(update_metadata(None))),
|
||||
PipelineData::Value(value, ..) => {
|
||||
let str = local_into_string(value, LINE_ENDING, engine_state.get_config());
|
||||
Ok(Value::string(str, span).into_pipeline_data())
|
||||
Ok(
|
||||
Value::string(str, span)
|
||||
.into_pipeline_data_with_metadata(update_metadata(None)),
|
||||
)
|
||||
}
|
||||
PipelineData::ListStream(stream, meta) => {
|
||||
let span = stream.span();
|
||||
|
@ -57,10 +63,12 @@ impl Command for ToText {
|
|||
engine_state.ctrlc.clone(),
|
||||
ByteStreamType::String,
|
||||
),
|
||||
meta,
|
||||
update_metadata(meta),
|
||||
))
|
||||
}
|
||||
PipelineData::ByteStream(stream, meta) => Ok(PipelineData::ByteStream(stream, meta)),
|
||||
PipelineData::ByteStream(stream, meta) => {
|
||||
Ok(PipelineData::ByteStream(stream, update_metadata(meta)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -124,6 +132,14 @@ fn local_into_string(value: Value, separator: &str, config: &Config) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
fn update_metadata(metadata: Option<PipelineMetadata>) -> Option<PipelineMetadata> {
|
||||
metadata
|
||||
.map(|md| md.with_content_type(Some("text/plain".to_string())))
|
||||
.or_else(|| {
|
||||
Some(PipelineMetadata::default().with_content_type(Some("text/plain".to_string())))
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
|
|
@ -180,41 +180,64 @@ impl From<ShellError> for ShellErrorOrRequestError {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum HttpBody {
|
||||
Value(Value),
|
||||
ByteStream(ByteStream),
|
||||
None,
|
||||
}
|
||||
|
||||
// remove once all commands have been migrated
|
||||
pub fn send_request(
|
||||
request: Request,
|
||||
body: Option<Value>,
|
||||
http_body: HttpBody,
|
||||
content_type: Option<String>,
|
||||
ctrl_c: Option<Arc<AtomicBool>>,
|
||||
) -> Result<Response, ShellErrorOrRequestError> {
|
||||
let request_url = request.url().to_string();
|
||||
if body.is_none() {
|
||||
return send_cancellable_request(&request_url, Box::new(|| request.call()), ctrl_c);
|
||||
}
|
||||
let body = body.expect("Should never be none.");
|
||||
|
||||
let body_type = match content_type {
|
||||
Some(it) if it == "application/json" => BodyType::Json,
|
||||
Some(it) if it == "application/x-www-form-urlencoded" => BodyType::Form,
|
||||
_ => BodyType::Unknown,
|
||||
match http_body {
|
||||
HttpBody::None => {
|
||||
send_cancellable_request(&request_url, Box::new(|| request.call()), ctrl_c)
|
||||
}
|
||||
HttpBody::ByteStream(byte_stream) => {
|
||||
let req = if let Some(content_type) = content_type {
|
||||
request.set("Content-Type", &content_type)
|
||||
} else {
|
||||
request
|
||||
};
|
||||
|
||||
send_cancellable_request_bytes(&request_url, req, byte_stream, ctrl_c)
|
||||
}
|
||||
HttpBody::Value(body) => {
|
||||
let (body_type, req) = match content_type {
|
||||
Some(it) if it == "application/json" => (BodyType::Json, request),
|
||||
Some(it) if it == "application/x-www-form-urlencoded" => (BodyType::Form, request),
|
||||
Some(it) => {
|
||||
let r = request.clone().set("Content-Type", &it);
|
||||
(BodyType::Unknown, r)
|
||||
}
|
||||
_ => (BodyType::Unknown, request),
|
||||
};
|
||||
|
||||
match body {
|
||||
Value::Binary { val, .. } => send_cancellable_request(
|
||||
&request_url,
|
||||
Box::new(move || request.send_bytes(&val)),
|
||||
Box::new(move || req.send_bytes(&val)),
|
||||
ctrl_c,
|
||||
),
|
||||
Value::String { .. } if body_type == BodyType::Json => {
|
||||
let data = value_to_json_value(&body)?;
|
||||
send_cancellable_request(&request_url, Box::new(|| request.send_json(data)), ctrl_c)
|
||||
send_cancellable_request(&request_url, Box::new(|| req.send_json(data)), ctrl_c)
|
||||
}
|
||||
Value::String { val, .. } => send_cancellable_request(
|
||||
&request_url,
|
||||
Box::new(move || request.send_string(&val)),
|
||||
Box::new(move || req.send_string(&val)),
|
||||
ctrl_c,
|
||||
),
|
||||
Value::Record { .. } if body_type == BodyType::Json => {
|
||||
let data = value_to_json_value(&body)?;
|
||||
send_cancellable_request(&request_url, Box::new(|| request.send_json(data)), ctrl_c)
|
||||
send_cancellable_request(&request_url, Box::new(|| req.send_json(data)), ctrl_c)
|
||||
}
|
||||
Value::Record { val, .. } if body_type == BodyType::Form => {
|
||||
let mut data: Vec<(String, String)> = Vec::with_capacity(val.len());
|
||||
|
@ -229,7 +252,7 @@ pub fn send_request(
|
|||
.iter()
|
||||
.map(|(a, b)| (a.as_str(), b.as_str()))
|
||||
.collect::<Vec<(&str, &str)>>();
|
||||
request.send_form(&data)
|
||||
req.send_form(&data)
|
||||
};
|
||||
send_cancellable_request(&request_url, Box::new(request_fn), ctrl_c)
|
||||
}
|
||||
|
@ -251,18 +274,20 @@ pub fn send_request(
|
|||
.iter()
|
||||
.map(|(a, b)| (a.as_str(), b.as_str()))
|
||||
.collect::<Vec<(&str, &str)>>();
|
||||
request.send_form(&data)
|
||||
req.send_form(&data)
|
||||
};
|
||||
send_cancellable_request(&request_url, Box::new(request_fn), ctrl_c)
|
||||
}
|
||||
Value::List { .. } if body_type == BodyType::Json => {
|
||||
let data = value_to_json_value(&body)?;
|
||||
send_cancellable_request(&request_url, Box::new(|| request.send_json(data)), ctrl_c)
|
||||
send_cancellable_request(&request_url, Box::new(|| req.send_json(data)), ctrl_c)
|
||||
}
|
||||
_ => Err(ShellErrorOrRequestError::ShellError(ShellError::IOError {
|
||||
msg: "unsupported body input".into(),
|
||||
})),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper method used to make blocking HTTP request calls cancellable with ctrl+c
|
||||
|
@ -305,6 +330,61 @@ fn send_cancellable_request(
|
|||
}
|
||||
}
|
||||
|
||||
// Helper method used to make blocking HTTP request calls cancellable with ctrl+c
|
||||
// ureq functions can block for a long time (default 30s?) while attempting to make an HTTP connection
|
||||
fn send_cancellable_request_bytes(
|
||||
request_url: &str,
|
||||
request: Request,
|
||||
byte_stream: ByteStream,
|
||||
ctrl_c: Option<Arc<AtomicBool>>,
|
||||
) -> Result<Response, ShellErrorOrRequestError> {
|
||||
let (tx, rx) = mpsc::channel::<Result<Response, ShellErrorOrRequestError>>();
|
||||
let request_url_string = request_url.to_string();
|
||||
|
||||
// Make the blocking request on a background thread...
|
||||
std::thread::Builder::new()
|
||||
.name("HTTP requester".to_string())
|
||||
.spawn(move || {
|
||||
let ret = byte_stream
|
||||
.reader()
|
||||
.ok_or_else(|| {
|
||||
ShellErrorOrRequestError::ShellError(ShellError::GenericError {
|
||||
error: "Could not read byte stream".to_string(),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
})
|
||||
.and_then(|reader| {
|
||||
request.send(reader).map_err(|e| {
|
||||
ShellErrorOrRequestError::RequestError(request_url_string, Box::new(e))
|
||||
})
|
||||
});
|
||||
|
||||
// may fail if the user has cancelled the operation
|
||||
let _ = tx.send(ret);
|
||||
})
|
||||
.map_err(ShellError::from)?;
|
||||
|
||||
// ...and poll the channel for responses
|
||||
loop {
|
||||
if nu_utils::ctrl_c::was_pressed(&ctrl_c) {
|
||||
// Return early and give up on the background thread. The connection will either time out or be disconnected
|
||||
return Err(ShellErrorOrRequestError::ShellError(
|
||||
ShellError::InterruptedByUser { span: None },
|
||||
));
|
||||
}
|
||||
|
||||
// 100ms wait time chosen arbitrarily
|
||||
match rx.recv_timeout(Duration::from_millis(100)) {
|
||||
Ok(result) => return result,
|
||||
Err(RecvTimeoutError::Timeout) => continue,
|
||||
Err(RecvTimeoutError::Disconnected) => panic!("http response channel disconnected"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn request_set_timeout(
|
||||
timeout: Option<Value>,
|
||||
mut request: Request,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use crate::network::http::client::{
|
||||
check_response_redirection, http_client, http_parse_redirect_mode, http_parse_url,
|
||||
request_add_authorization_header, request_add_custom_headers, request_handle_response,
|
||||
request_set_timeout, send_request, RequestFlags,
|
||||
request_set_timeout, send_request, HttpBody, RequestFlags,
|
||||
};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
|
@ -15,7 +15,7 @@ impl Command for SubCommand {
|
|||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("http delete")
|
||||
.input_output_types(vec![(Type::Nothing, Type::Any)])
|
||||
.input_output_types(vec![(Type::Any, Type::Any)])
|
||||
.allow_variants_without_examples(true)
|
||||
.required(
|
||||
"URL",
|
||||
|
@ -132,6 +132,11 @@ impl Command for SubCommand {
|
|||
"http delete --content-type application/json --data { field: value } https://www.example.com",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Perform an HTTP delete with JSON content from a pipeline to example.com",
|
||||
example: "open foo.json | http delete https://www.example.com",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@ -139,7 +144,7 @@ impl Command for SubCommand {
|
|||
struct Arguments {
|
||||
url: Value,
|
||||
headers: Option<Value>,
|
||||
data: Option<Value>,
|
||||
data: HttpBody,
|
||||
content_type: Option<String>,
|
||||
raw: bool,
|
||||
insecure: bool,
|
||||
|
@ -155,13 +160,27 @@ fn run_delete(
|
|||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let (data, maybe_metadata) = call
|
||||
.get_flag::<Value>(engine_state, stack, "data")?
|
||||
.map(|v| (HttpBody::Value(v), None))
|
||||
.unwrap_or_else(|| match input {
|
||||
PipelineData::Value(v, metadata) => (HttpBody::Value(v), metadata),
|
||||
PipelineData::ByteStream(byte_stream, metadata) => {
|
||||
(HttpBody::ByteStream(byte_stream), metadata)
|
||||
}
|
||||
_ => (HttpBody::None, None),
|
||||
});
|
||||
let content_type = call
|
||||
.get_flag(engine_state, stack, "content-type")?
|
||||
.or_else(|| maybe_metadata.and_then(|m| m.content_type));
|
||||
|
||||
let args = Arguments {
|
||||
url: call.req(engine_state, stack, 0)?,
|
||||
headers: call.get_flag(engine_state, stack, "headers")?,
|
||||
data: call.get_flag(engine_state, stack, "data")?,
|
||||
content_type: call.get_flag(engine_state, stack, "content-type")?,
|
||||
data,
|
||||
content_type,
|
||||
raw: call.has_flag(engine_state, stack, "raw")?,
|
||||
insecure: call.has_flag(engine_state, stack, "insecure")?,
|
||||
user: call.get_flag(engine_state, stack, "user")?,
|
||||
|
|
|
@ -5,6 +5,8 @@ use crate::network::http::client::{
|
|||
};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
use super::client::HttpBody;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
|
||||
|
@ -180,7 +182,7 @@ fn helper(
|
|||
request = request_add_authorization_header(args.user, args.password, request);
|
||||
request = request_add_custom_headers(args.headers, request)?;
|
||||
|
||||
let response = send_request(request.clone(), None, None, ctrl_c);
|
||||
let response = send_request(request.clone(), HttpBody::None, None, ctrl_c);
|
||||
|
||||
let request_flags = RequestFlags {
|
||||
raw: args.raw,
|
||||
|
|
|
@ -7,6 +7,8 @@ use nu_engine::command_prelude::*;
|
|||
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
|
||||
use super::client::HttpBody;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
|
||||
|
@ -156,7 +158,7 @@ fn helper(
|
|||
request = request_add_authorization_header(args.user, args.password, request);
|
||||
request = request_add_custom_headers(args.headers, request)?;
|
||||
|
||||
let response = send_request(request, None, None, ctrlc);
|
||||
let response = send_request(request, HttpBody::None, None, ctrlc);
|
||||
check_response_redirection(redirect_mode, span, &response)?;
|
||||
request_handle_response_headers(span, response)
|
||||
}
|
||||
|
|
|
@ -4,6 +4,8 @@ use crate::network::http::client::{
|
|||
};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
use super::client::HttpBody;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
|
||||
|
@ -159,7 +161,7 @@ fn helper(
|
|||
request = request_add_authorization_header(args.user, args.password, request);
|
||||
request = request_add_custom_headers(args.headers, request)?;
|
||||
|
||||
let response = send_request(request.clone(), None, None, ctrl_c);
|
||||
let response = send_request(request.clone(), HttpBody::None, None, ctrl_c);
|
||||
|
||||
// http options' response always showed in header, so we set full to true.
|
||||
// And `raw` is useless too because options method doesn't return body, here we set to true
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use crate::network::http::client::{
|
||||
check_response_redirection, http_client, http_parse_redirect_mode, http_parse_url,
|
||||
request_add_authorization_header, request_add_custom_headers, request_handle_response,
|
||||
request_set_timeout, send_request, RequestFlags,
|
||||
request_set_timeout, send_request, HttpBody, RequestFlags,
|
||||
};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
|
@ -15,10 +15,10 @@ impl Command for SubCommand {
|
|||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("http patch")
|
||||
.input_output_types(vec![(Type::Nothing, Type::Any)])
|
||||
.input_output_types(vec![(Type::Any, Type::Any)])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("URL", SyntaxShape::String, "The URL to post to.")
|
||||
.required("data", SyntaxShape::Any, "The contents of the post body.")
|
||||
.optional("data", SyntaxShape::Any, "The contents of the post body.")
|
||||
.named(
|
||||
"user",
|
||||
SyntaxShape::Any,
|
||||
|
@ -124,6 +124,11 @@ impl Command for SubCommand {
|
|||
example: "http patch --content-type application/json https://www.example.com { field: value }",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Patch JSON content from a pipeline to example.com",
|
||||
example: "open foo.json | http patch https://www.example.com",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@ -131,7 +136,7 @@ impl Command for SubCommand {
|
|||
struct Arguments {
|
||||
url: Value,
|
||||
headers: Option<Value>,
|
||||
data: Value,
|
||||
data: HttpBody,
|
||||
content_type: Option<String>,
|
||||
raw: bool,
|
||||
insecure: bool,
|
||||
|
@ -147,13 +152,37 @@ fn run_patch(
|
|||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let (data, maybe_metadata) = call
|
||||
.opt::<Value>(engine_state, stack, 1)?
|
||||
.map(|v| (HttpBody::Value(v), None))
|
||||
.unwrap_or_else(|| match input {
|
||||
PipelineData::Value(v, metadata) => (HttpBody::Value(v), metadata),
|
||||
PipelineData::ByteStream(byte_stream, metadata) => {
|
||||
(HttpBody::ByteStream(byte_stream), metadata)
|
||||
}
|
||||
_ => (HttpBody::None, None),
|
||||
});
|
||||
let content_type = call
|
||||
.get_flag(engine_state, stack, "content-type")?
|
||||
.or_else(|| maybe_metadata.and_then(|m| m.content_type));
|
||||
|
||||
if let HttpBody::None = data {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Data must be provided either through pipeline or positional argument".into(),
|
||||
msg: "".into(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
let args = Arguments {
|
||||
url: call.req(engine_state, stack, 0)?,
|
||||
headers: call.get_flag(engine_state, stack, "headers")?,
|
||||
data: call.req(engine_state, stack, 1)?,
|
||||
content_type: call.get_flag(engine_state, stack, "content-type")?,
|
||||
data,
|
||||
content_type,
|
||||
raw: call.has_flag(engine_state, stack, "raw")?,
|
||||
insecure: call.has_flag(engine_state, stack, "insecure")?,
|
||||
user: call.get_flag(engine_state, stack, "user")?,
|
||||
|
@ -187,7 +216,7 @@ fn helper(
|
|||
request = request_add_authorization_header(args.user, args.password, request);
|
||||
request = request_add_custom_headers(args.headers, request)?;
|
||||
|
||||
let response = send_request(request.clone(), Some(args.data), args.content_type, ctrl_c);
|
||||
let response = send_request(request.clone(), args.data, args.content_type, ctrl_c);
|
||||
|
||||
let request_flags = RequestFlags {
|
||||
raw: args.raw,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use crate::network::http::client::{
|
||||
check_response_redirection, http_client, http_parse_redirect_mode, http_parse_url,
|
||||
request_add_authorization_header, request_add_custom_headers, request_handle_response,
|
||||
request_set_timeout, send_request, RequestFlags,
|
||||
request_set_timeout, send_request, HttpBody, RequestFlags,
|
||||
};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
|
@ -15,10 +15,10 @@ impl Command for SubCommand {
|
|||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("http post")
|
||||
.input_output_types(vec![(Type::Nothing, Type::Any)])
|
||||
.input_output_types(vec![(Type::Any, Type::Any)])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("URL", SyntaxShape::String, "The URL to post to.")
|
||||
.required("data", SyntaxShape::Any, "The contents of the post body.")
|
||||
.optional("data", SyntaxShape::Any, "The contents of the post body. Required unless part of a pipeline.")
|
||||
.named(
|
||||
"user",
|
||||
SyntaxShape::Any,
|
||||
|
@ -122,6 +122,11 @@ impl Command for SubCommand {
|
|||
example: "http post --content-type application/json https://www.example.com { field: value }",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Post JSON content from a pipeline to example.com",
|
||||
example: "open foo.json | http post https://www.example.com",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@ -129,7 +134,7 @@ impl Command for SubCommand {
|
|||
struct Arguments {
|
||||
url: Value,
|
||||
headers: Option<Value>,
|
||||
data: Value,
|
||||
data: HttpBody,
|
||||
content_type: Option<String>,
|
||||
raw: bool,
|
||||
insecure: bool,
|
||||
|
@ -145,13 +150,37 @@ fn run_post(
|
|||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let (data, maybe_metadata) = call
|
||||
.opt::<Value>(engine_state, stack, 1)?
|
||||
.map(|v| (HttpBody::Value(v), None))
|
||||
.unwrap_or_else(|| match input {
|
||||
PipelineData::Value(v, metadata) => (HttpBody::Value(v), metadata),
|
||||
PipelineData::ByteStream(byte_stream, metadata) => {
|
||||
(HttpBody::ByteStream(byte_stream), metadata)
|
||||
}
|
||||
_ => (HttpBody::None, None),
|
||||
});
|
||||
let content_type = call
|
||||
.get_flag(engine_state, stack, "content-type")?
|
||||
.or_else(|| maybe_metadata.and_then(|m| m.content_type));
|
||||
|
||||
if let HttpBody::None = data {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Data must be provided either through pipeline or positional argument".into(),
|
||||
msg: "".into(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
let args = Arguments {
|
||||
url: call.req(engine_state, stack, 0)?,
|
||||
headers: call.get_flag(engine_state, stack, "headers")?,
|
||||
data: call.req(engine_state, stack, 1)?,
|
||||
content_type: call.get_flag(engine_state, stack, "content-type")?,
|
||||
data,
|
||||
content_type,
|
||||
raw: call.has_flag(engine_state, stack, "raw")?,
|
||||
insecure: call.has_flag(engine_state, stack, "insecure")?,
|
||||
user: call.get_flag(engine_state, stack, "user")?,
|
||||
|
@ -185,7 +214,7 @@ fn helper(
|
|||
request = request_add_authorization_header(args.user, args.password, request);
|
||||
request = request_add_custom_headers(args.headers, request)?;
|
||||
|
||||
let response = send_request(request.clone(), Some(args.data), args.content_type, ctrl_c);
|
||||
let response = send_request(request.clone(), args.data, args.content_type, ctrl_c);
|
||||
|
||||
let request_flags = RequestFlags {
|
||||
raw: args.raw,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use crate::network::http::client::{
|
||||
check_response_redirection, http_client, http_parse_redirect_mode, http_parse_url,
|
||||
request_add_authorization_header, request_add_custom_headers, request_handle_response,
|
||||
request_set_timeout, send_request, RequestFlags,
|
||||
request_set_timeout, send_request, HttpBody, RequestFlags,
|
||||
};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
|
@ -15,10 +15,10 @@ impl Command for SubCommand {
|
|||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("http put")
|
||||
.input_output_types(vec![(Type::Nothing, Type::Any)])
|
||||
.input_output_types(vec![(Type::Any, Type::Any)])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("URL", SyntaxShape::String, "The URL to post to.")
|
||||
.required("data", SyntaxShape::Any, "The contents of the post body.")
|
||||
.optional("data", SyntaxShape::Any, "The contents of the post body. Required unless part of a pipeline.")
|
||||
.named(
|
||||
"user",
|
||||
SyntaxShape::Any,
|
||||
|
@ -122,6 +122,11 @@ impl Command for SubCommand {
|
|||
example: "http put --content-type application/json https://www.example.com { field: value }",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Put JSON content from a pipeline to example.com",
|
||||
example: "open foo.json | http put https://www.example.com",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@ -129,7 +134,7 @@ impl Command for SubCommand {
|
|||
struct Arguments {
|
||||
url: Value,
|
||||
headers: Option<Value>,
|
||||
data: Value,
|
||||
data: HttpBody,
|
||||
content_type: Option<String>,
|
||||
raw: bool,
|
||||
insecure: bool,
|
||||
|
@ -145,13 +150,38 @@ fn run_put(
|
|||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let (data, maybe_metadata) = call
|
||||
.opt::<Value>(engine_state, stack, 1)?
|
||||
.map(|v| (HttpBody::Value(v), None))
|
||||
.unwrap_or_else(|| match input {
|
||||
PipelineData::Value(v, metadata) => (HttpBody::Value(v), metadata),
|
||||
PipelineData::ByteStream(byte_stream, metadata) => {
|
||||
(HttpBody::ByteStream(byte_stream), metadata)
|
||||
}
|
||||
_ => (HttpBody::None, None),
|
||||
});
|
||||
|
||||
if let HttpBody::None = data {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Data must be provided either through pipeline or positional argument".into(),
|
||||
msg: "".into(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
let content_type = call
|
||||
.get_flag(engine_state, stack, "content-type")?
|
||||
.or_else(|| maybe_metadata.and_then(|m| m.content_type));
|
||||
|
||||
let args = Arguments {
|
||||
url: call.req(engine_state, stack, 0)?,
|
||||
headers: call.get_flag(engine_state, stack, "headers")?,
|
||||
data: call.req(engine_state, stack, 1)?,
|
||||
content_type: call.get_flag(engine_state, stack, "content-type")?,
|
||||
data,
|
||||
content_type,
|
||||
raw: call.has_flag(engine_state, stack, "raw")?,
|
||||
insecure: call.has_flag(engine_state, stack, "insecure")?,
|
||||
user: call.get_flag(engine_state, stack, "user")?,
|
||||
|
@ -185,7 +215,7 @@ fn helper(
|
|||
request = request_add_authorization_header(args.user, args.password, request);
|
||||
request = request_add_custom_headers(args.headers, request)?;
|
||||
|
||||
let response = send_request(request.clone(), Some(args.data), args.content_type, ctrl_c);
|
||||
let response = send_request(request.clone(), args.data, args.content_type, ctrl_c);
|
||||
|
||||
let request_flags = RequestFlags {
|
||||
raw: args.raw,
|
||||
|
|
|
@ -19,6 +19,9 @@ static CHAR_MAP: Lazy<IndexMap<&'static str, String>> = Lazy::new(|| {
|
|||
// These are some regular characters that either can't be used or
|
||||
// it's just easier to use them like this.
|
||||
|
||||
"nul" => '\x00'.to_string(), // nul character, 0x00
|
||||
"null_byte" => '\x00'.to_string(), // nul character, 0x00
|
||||
"zero_byte" => '\x00'.to_string(), // nul character, 0x00
|
||||
// This are the "normal" characters section
|
||||
"newline" => '\n'.to_string(),
|
||||
"enter" => '\n'.to_string(),
|
||||
|
|
|
@ -72,8 +72,10 @@ impl GuessWidth {
|
|||
|
||||
let mut rows = Vec::new();
|
||||
while let Ok(columns) = self.read() {
|
||||
if !columns.is_empty() {
|
||||
rows.push(columns);
|
||||
}
|
||||
}
|
||||
rows
|
||||
}
|
||||
|
||||
|
@ -175,34 +177,47 @@ fn separator_position(lr: &[char], p: usize, pos: &[usize], n: usize) -> usize {
|
|||
|
||||
fn split(line: &str, pos: &[usize], trim_space: bool) -> Vec<String> {
|
||||
let mut n = 0;
|
||||
let mut start = 0;
|
||||
let mut start_char = 0;
|
||||
let mut columns = Vec::with_capacity(pos.len() + 1);
|
||||
let lr: Vec<char> = line.chars().collect();
|
||||
let (line_char_boundaries, line_chars): (Vec<usize>, Vec<char>) = line.char_indices().unzip();
|
||||
let mut w = 0;
|
||||
|
||||
for p in 0..lr.len() {
|
||||
if line_chars.is_empty() || line_chars.iter().all(|&c| c.is_whitespace()) {
|
||||
// current line is completely empty, or only filled with whitespace
|
||||
return Vec::new();
|
||||
} else if !pos.is_empty()
|
||||
&& line_chars.iter().all(|&c| !c.is_whitespace())
|
||||
&& pos[0] < UnicodeWidthStr::width(line)
|
||||
{
|
||||
// we have more than 1 column in the input, but the current line has no whitespace,
|
||||
// and it is longer than the first detected column separation position
|
||||
// this indicates some kind of decoration line. let's skip it
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
for p in 0..line_char_boundaries.len() {
|
||||
if pos.is_empty() || n > pos.len() - 1 {
|
||||
start = p;
|
||||
start_char = p;
|
||||
break;
|
||||
}
|
||||
|
||||
if pos[n] <= w {
|
||||
let end = separator_position(&lr, p, pos, n);
|
||||
if start > end {
|
||||
let end_char = separator_position(&line_chars, p, pos, n);
|
||||
if start_char > end_char {
|
||||
break;
|
||||
}
|
||||
let col = &line[start..end];
|
||||
let col = &line[line_char_boundaries[start_char]..line_char_boundaries[end_char]];
|
||||
let col = if trim_space { col.trim() } else { col };
|
||||
columns.push(col.to_string());
|
||||
n += 1;
|
||||
start = end;
|
||||
start_char = end_char;
|
||||
}
|
||||
|
||||
w += UnicodeWidthStr::width(lr[p].to_string().as_str());
|
||||
w += UnicodeWidthStr::width(line_chars[p].to_string().as_str());
|
||||
}
|
||||
|
||||
// add last part.
|
||||
let col = &line[start..];
|
||||
let col = &line[line_char_boundaries[start_char]..];
|
||||
let col = if trim_space { col.trim() } else { col };
|
||||
columns.push(col.to_string());
|
||||
columns
|
||||
|
@ -423,6 +438,162 @@ D: 104792064 17042676 87749388 17% /d";
|
|||
assert_eq!(got, want);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_width_multibyte() {
|
||||
let input = "A… B\nC… D";
|
||||
let r = Box::new(std::io::BufReader::new(input.as_bytes())) as Box<dyn std::io::Read>;
|
||||
let reader = std::io::BufReader::new(r);
|
||||
|
||||
let mut guess_width = GuessWidth {
|
||||
reader,
|
||||
pos: Vec::new(),
|
||||
pre_lines: Vec::new(),
|
||||
pre_count: 0,
|
||||
limit_split: 0,
|
||||
};
|
||||
|
||||
let want = vec![vec!["A…", "B"], vec!["C…", "D"]];
|
||||
let got = guess_width.read_all();
|
||||
assert_eq!(got, want);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_width_combining_diacritical_marks() {
|
||||
let input = "Name Surname
|
||||
Ștefan Țincu ";
|
||||
|
||||
let r = Box::new(std::io::BufReader::new(input.as_bytes())) as Box<dyn std::io::Read>;
|
||||
let reader = std::io::BufReader::new(r);
|
||||
|
||||
let mut guess_width = GuessWidth {
|
||||
reader,
|
||||
pos: Vec::new(),
|
||||
pre_lines: Vec::new(),
|
||||
pre_count: 0,
|
||||
limit_split: 0,
|
||||
};
|
||||
|
||||
let want = vec![vec!["Name", "Surname"], vec!["Ștefan", "Țincu"]];
|
||||
let got = guess_width.read_all();
|
||||
assert_eq!(got, want);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_width_single_column() {
|
||||
let input = "A
|
||||
|
||||
B
|
||||
|
||||
C";
|
||||
|
||||
let r = Box::new(std::io::BufReader::new(input.as_bytes())) as Box<dyn std::io::Read>;
|
||||
let reader = std::io::BufReader::new(r);
|
||||
|
||||
let mut guess_width = GuessWidth {
|
||||
reader,
|
||||
pos: Vec::new(),
|
||||
pre_lines: Vec::new(),
|
||||
pre_count: 0,
|
||||
limit_split: 0,
|
||||
};
|
||||
|
||||
let want = vec![vec!["A"], vec!["B"], vec!["C"]];
|
||||
let got = guess_width.read_all();
|
||||
assert_eq!(got, want);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_width_row_without_whitespace() {
|
||||
let input = "A B C D
|
||||
-------
|
||||
E F G H";
|
||||
|
||||
let r = Box::new(std::io::BufReader::new(input.as_bytes())) as Box<dyn std::io::Read>;
|
||||
let reader = std::io::BufReader::new(r);
|
||||
|
||||
let mut guess_width = GuessWidth {
|
||||
reader,
|
||||
pos: Vec::new(),
|
||||
pre_lines: Vec::new(),
|
||||
pre_count: 0,
|
||||
limit_split: 0,
|
||||
};
|
||||
|
||||
let want = vec![vec!["A", "B", "C", "D"], vec!["E", "F", "G", "H"]];
|
||||
let got = guess_width.read_all();
|
||||
assert_eq!(got, want);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_width_row_with_single_column() {
|
||||
let input = "A B C D
|
||||
E
|
||||
F G H I";
|
||||
|
||||
let r = Box::new(std::io::BufReader::new(input.as_bytes())) as Box<dyn std::io::Read>;
|
||||
let reader = std::io::BufReader::new(r);
|
||||
|
||||
let mut guess_width = GuessWidth {
|
||||
reader,
|
||||
pos: Vec::new(),
|
||||
pre_lines: Vec::new(),
|
||||
pre_count: 0,
|
||||
limit_split: 0,
|
||||
};
|
||||
|
||||
let want = vec![
|
||||
vec!["A", "B", "C", "D"],
|
||||
vec!["E"],
|
||||
vec!["F", "G", "H", "I"],
|
||||
];
|
||||
let got = guess_width.read_all();
|
||||
assert_eq!(got, want);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_width_empty_row() {
|
||||
let input = "A B C D
|
||||
|
||||
E F G H";
|
||||
|
||||
let r = Box::new(std::io::BufReader::new(input.as_bytes())) as Box<dyn std::io::Read>;
|
||||
let reader = std::io::BufReader::new(r);
|
||||
|
||||
let mut guess_width = GuessWidth {
|
||||
reader,
|
||||
pos: Vec::new(),
|
||||
pre_lines: Vec::new(),
|
||||
pre_count: 0,
|
||||
limit_split: 0,
|
||||
};
|
||||
|
||||
let want = vec![vec!["A", "B", "C", "D"], vec!["E", "F", "G", "H"]];
|
||||
let got = guess_width.read_all();
|
||||
assert_eq!(got, want);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_width_row_with_only_whitespace() {
|
||||
let input = "A B C D
|
||||
|
||||
E F G H";
|
||||
|
||||
let r = Box::new(std::io::BufReader::new(input.as_bytes())) as Box<dyn std::io::Read>;
|
||||
let reader = std::io::BufReader::new(r);
|
||||
|
||||
let mut guess_width = GuessWidth {
|
||||
reader,
|
||||
pos: Vec::new(),
|
||||
pre_lines: Vec::new(),
|
||||
pre_count: 0,
|
||||
limit_split: 0,
|
||||
};
|
||||
|
||||
let want = vec![vec!["A", "B", "C", "D"], vec!["E", "F", "G", "H"]];
|
||||
let got = guess_width.read_all();
|
||||
assert_eq!(got, want);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_table() {
|
||||
let lines = vec![
|
||||
|
|
98
crates/nu-command/src/strings/str_/deunicode.rs
Normal file
98
crates/nu-command/src/strings/str_/deunicode.rs
Normal file
|
@ -0,0 +1,98 @@
|
|||
use deunicode::deunicode;
|
||||
use nu_cmd_base::input_handler::{operate, CellPathOnlyArgs};
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::engine::StateWorkingSet;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
|
||||
impl Command for SubCommand {
|
||||
fn name(&self) -> &str {
|
||||
"str deunicode"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("str deunicode")
|
||||
.input_output_types(vec![(Type::String, Type::String)])
|
||||
.category(Category::Strings)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Convert Unicode string to pure ASCII."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["convert", "ascii"]
|
||||
}
|
||||
|
||||
fn is_const(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
|
||||
let args = CellPathOnlyArgs::from(cell_paths);
|
||||
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
}
|
||||
|
||||
fn run_const(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let cell_paths: Vec<CellPath> = call.rest_const(working_set, 0)?;
|
||||
let args = CellPathOnlyArgs::from(cell_paths);
|
||||
|
||||
operate(
|
||||
action,
|
||||
args,
|
||||
input,
|
||||
call.head,
|
||||
working_set.permanent().ctrlc.clone(),
|
||||
)
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "deunicode a string",
|
||||
example: "'A…B' | str deunicode",
|
||||
result: Some(Value::test_string("A...B")),
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
fn action(input: &Value, _args: &CellPathOnlyArgs, head: Span) -> Value {
|
||||
match input {
|
||||
Value::String { val, .. } => Value::string(deunicode(val), head),
|
||||
Value::Error { .. } => input.clone(),
|
||||
_ => Value::error(
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string".into(),
|
||||
wrong_type: input.get_type().to_string(),
|
||||
dst_span: head,
|
||||
src_span: input.span(),
|
||||
},
|
||||
head,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
mod case;
|
||||
mod contains;
|
||||
mod deunicode;
|
||||
mod distance;
|
||||
mod ends_with;
|
||||
mod expand;
|
||||
|
@ -15,6 +16,7 @@ mod trim;
|
|||
|
||||
pub use case::*;
|
||||
pub use contains::SubCommand as StrContains;
|
||||
pub use deunicode::SubCommand as StrDeunicode;
|
||||
pub use distance::SubCommand as StrDistance;
|
||||
pub use ends_with::SubCommand as StrEndswith;
|
||||
pub use expand::SubCommand as StrExpand;
|
||||
|
|
|
@ -605,6 +605,7 @@ fn handle_row_stream(
|
|||
// First, `ls` sources:
|
||||
Some(PipelineMetadata {
|
||||
data_source: DataSource::Ls,
|
||||
..
|
||||
}) => {
|
||||
let config = get_config(input.engine_state, input.stack);
|
||||
let ls_colors_env_str = match input.stack.get_env_var(input.engine_state, "LS_COLORS") {
|
||||
|
@ -636,6 +637,7 @@ fn handle_row_stream(
|
|||
// Next, `to html -l` sources:
|
||||
Some(PipelineMetadata {
|
||||
data_source: DataSource::HtmlThemes,
|
||||
..
|
||||
}) => {
|
||||
stream.map(|mut value| {
|
||||
if let Value::Record { val: record, .. } = &mut value {
|
||||
|
|
|
@ -17,6 +17,16 @@ fn find_with_list_search_with_char() {
|
|||
assert_eq!(actual.out, "[\"\\u001b[37m\\u001b[0m\\u001b[41;37ml\\u001b[0m\\u001b[37marry\\u001b[0m\",\"\\u001b[37mcur\\u001b[0m\\u001b[41;37ml\\u001b[0m\\u001b[37my\\u001b[0m\"]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn find_with_bytestream_search_with_char() {
|
||||
let actual =
|
||||
nu!("\"ABC\" | save foo.txt; let res = open foo.txt | find abc; rm foo.txt; $res | get 0");
|
||||
assert_eq!(
|
||||
actual.out,
|
||||
"\u{1b}[37m\u{1b}[0m\u{1b}[41;37mABC\u{1b}[0m\u{1b}[37m\u{1b}[0m"
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn find_with_list_search_with_number() {
|
||||
let actual = nu!("[1 2 3 4 5] | find 3 | get 0");
|
||||
|
|
|
@ -20,6 +20,25 @@ fn http_delete_is_success() {
|
|||
assert!(actual.out.is_empty())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn http_delete_is_success_pipeline() {
|
||||
let mut server = Server::new();
|
||||
|
||||
let _mock = server.mock("DELETE", "/").create();
|
||||
|
||||
let actual = nu!(pipeline(
|
||||
format!(
|
||||
r#"
|
||||
"foo" | http delete {url}
|
||||
"#,
|
||||
url = server.url()
|
||||
)
|
||||
.as_str()
|
||||
));
|
||||
|
||||
assert!(actual.out.is_empty())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn http_delete_failed_due_to_server_error() {
|
||||
let mut server = Server::new();
|
||||
|
|
|
@ -20,6 +20,25 @@ fn http_patch_is_success() {
|
|||
assert!(actual.out.is_empty())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn http_patch_is_success_pipeline() {
|
||||
let mut server = Server::new();
|
||||
|
||||
let _mock = server.mock("PATCH", "/").match_body("foo").create();
|
||||
|
||||
let actual = nu!(pipeline(
|
||||
format!(
|
||||
r#"
|
||||
"foo" | http patch {url}
|
||||
"#,
|
||||
url = server.url()
|
||||
)
|
||||
.as_str()
|
||||
));
|
||||
|
||||
assert!(actual.out.is_empty())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn http_patch_failed_due_to_server_error() {
|
||||
let mut server = Server::new();
|
||||
|
@ -55,7 +74,9 @@ fn http_patch_failed_due_to_missing_body() {
|
|||
.as_str()
|
||||
));
|
||||
|
||||
assert!(actual.err.contains("Usage: http patch"))
|
||||
assert!(actual
|
||||
.err
|
||||
.contains("Data must be provided either through pipeline or positional argument"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -19,6 +19,24 @@ fn http_post_is_success() {
|
|||
|
||||
assert!(actual.out.is_empty())
|
||||
}
|
||||
#[test]
|
||||
fn http_post_is_success_pipeline() {
|
||||
let mut server = Server::new();
|
||||
|
||||
let _mock = server.mock("POST", "/").match_body("foo").create();
|
||||
|
||||
let actual = nu!(pipeline(
|
||||
format!(
|
||||
r#"
|
||||
"foo" | http post {url}
|
||||
"#,
|
||||
url = server.url()
|
||||
)
|
||||
.as_str()
|
||||
));
|
||||
|
||||
assert!(actual.out.is_empty())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn http_post_failed_due_to_server_error() {
|
||||
|
@ -55,7 +73,9 @@ fn http_post_failed_due_to_missing_body() {
|
|||
.as_str()
|
||||
));
|
||||
|
||||
assert!(actual.err.contains("Usage: http post"))
|
||||
assert!(actual
|
||||
.err
|
||||
.contains("Data must be provided either through pipeline or positional argument"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -20,6 +20,25 @@ fn http_put_is_success() {
|
|||
assert!(actual.out.is_empty())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn http_put_is_success_pipeline() {
|
||||
let mut server = Server::new();
|
||||
|
||||
let _mock = server.mock("PUT", "/").match_body("foo").create();
|
||||
|
||||
let actual = nu!(pipeline(
|
||||
format!(
|
||||
r#"
|
||||
"foo" | http put {url}
|
||||
"#,
|
||||
url = server.url()
|
||||
)
|
||||
.as_str()
|
||||
));
|
||||
|
||||
assert!(actual.out.is_empty())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn http_put_failed_due_to_server_error() {
|
||||
let mut server = Server::new();
|
||||
|
@ -55,7 +74,9 @@ fn http_put_failed_due_to_missing_body() {
|
|||
.as_str()
|
||||
));
|
||||
|
||||
assert!(actual.err.contains("Usage: http put"))
|
||||
assert!(actual
|
||||
.err
|
||||
.contains("Data must be provided either through pipeline or positional argument"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -284,7 +284,7 @@ fn from_csv_text_skipping_headers_to_table() {
|
|||
r#"
|
||||
open los_tres_amigos.txt
|
||||
| from csv --noheaders
|
||||
| get column3
|
||||
| get column2
|
||||
| length
|
||||
"#
|
||||
));
|
||||
|
|
|
@ -74,7 +74,7 @@ fn from_ssv_text_treating_first_line_as_data_with_flag() {
|
|||
open oc_get_svc.txt
|
||||
| from ssv --noheaders -a
|
||||
| first
|
||||
| get column1
|
||||
| get column0
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -84,7 +84,7 @@ fn from_ssv_text_treating_first_line_as_data_with_flag() {
|
|||
open oc_get_svc.txt
|
||||
| from ssv --noheaders
|
||||
| first
|
||||
| get column1
|
||||
| get column0
|
||||
|
||||
"#
|
||||
));
|
||||
|
|
|
@ -207,7 +207,7 @@ fn from_tsv_text_skipping_headers_to_table() {
|
|||
r#"
|
||||
open los_tres_amigos.txt
|
||||
| from tsv --noheaders
|
||||
| get column3
|
||||
| get column2
|
||||
| length
|
||||
"#
|
||||
));
|
||||
|
|
|
@ -2828,6 +2828,36 @@ pub fn parse_string(working_set: &mut StateWorkingSet, span: Span) -> Expression
|
|||
if bytes[0] != b'\'' && bytes[0] != b'"' && bytes[0] != b'`' && bytes.contains(&b'(') {
|
||||
return parse_string_interpolation(working_set, span);
|
||||
}
|
||||
// Check for unbalanced quotes:
|
||||
{
|
||||
if bytes.starts_with(b"\"")
|
||||
&& (bytes.iter().filter(|ch| **ch == b'"').count() > 1 && !bytes.ends_with(b"\""))
|
||||
{
|
||||
let close_delimiter_index = bytes
|
||||
.iter()
|
||||
.skip(1)
|
||||
.position(|ch| *ch == b'"')
|
||||
.expect("Already check input bytes contains at least two double quotes");
|
||||
// needs `+2` rather than `+1`, because we have skip 1 to find close_delimiter_index before.
|
||||
let span = Span::new(span.start + close_delimiter_index + 2, span.end);
|
||||
working_set.error(ParseError::ExtraTokensAfterClosingDelimiter(span));
|
||||
return garbage(working_set, span);
|
||||
}
|
||||
|
||||
if bytes.starts_with(b"\'")
|
||||
&& (bytes.iter().filter(|ch| **ch == b'\'').count() > 1 && !bytes.ends_with(b"\'"))
|
||||
{
|
||||
let close_delimiter_index = bytes
|
||||
.iter()
|
||||
.skip(1)
|
||||
.position(|ch| *ch == b'\'')
|
||||
.expect("Already check input bytes contains at least two double quotes");
|
||||
// needs `+2` rather than `+1`, because we have skip 1 to find close_delimiter_index before.
|
||||
let span = Span::new(span.start + close_delimiter_index + 2, span.end);
|
||||
working_set.error(ParseError::ExtraTokensAfterClosingDelimiter(span));
|
||||
return garbage(working_set, span);
|
||||
}
|
||||
}
|
||||
|
||||
let (s, err) = unescape_unquote_string(bytes, span);
|
||||
if let Some(err) = err {
|
||||
|
|
|
@ -18,6 +18,7 @@ use std::{path::Path, sync::Arc};
|
|||
fn test_metadata() -> PipelineMetadata {
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::FilePath("/test/path".into()),
|
||||
content_type: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -258,7 +259,7 @@ fn read_pipeline_data_prepared_properly() -> Result<(), ShellError> {
|
|||
});
|
||||
match manager.read_pipeline_data(header, None)? {
|
||||
PipelineData::ListStream(_, meta) => match meta {
|
||||
Some(PipelineMetadata { data_source }) => match data_source {
|
||||
Some(PipelineMetadata { data_source, .. }) => match data_source {
|
||||
DataSource::FilePath(path) => {
|
||||
assert_eq!(Path::new("/test/path"), path);
|
||||
Ok(())
|
||||
|
|
|
@ -17,6 +17,13 @@ pub enum ParseError {
|
|||
#[diagnostic(code(nu::parser::extra_tokens), help("Try removing them."))]
|
||||
ExtraTokens(#[label = "extra tokens"] Span),
|
||||
|
||||
#[error("Invalid characters after closing delimiter")]
|
||||
#[diagnostic(
|
||||
code(nu::parser::extra_token_after_closing_delimiter),
|
||||
help("Try removing them.")
|
||||
)]
|
||||
ExtraTokensAfterClosingDelimiter(#[label = "invalid characters"] Span),
|
||||
|
||||
#[error("Extra positional argument.")]
|
||||
#[diagnostic(code(nu::parser::extra_positional), help("Usage: {0}"))]
|
||||
ExtraPositional(String, #[label = "extra positional argument"] Span),
|
||||
|
@ -577,6 +584,7 @@ impl ParseError {
|
|||
ParseError::LabeledErrorWithHelp { span: s, .. } => *s,
|
||||
ParseError::RedirectingBuiltinCommand(_, s, _) => *s,
|
||||
ParseError::UnexpectedSpreadArg(_, s) => *s,
|
||||
ParseError::ExtraTokensAfterClosingDelimiter(s) => *s,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,18 +1,37 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
/// Metadata that is valid for the whole [`PipelineData`](crate::PipelineData)
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct PipelineMetadata {
|
||||
pub data_source: DataSource,
|
||||
pub content_type: Option<String>,
|
||||
}
|
||||
|
||||
impl PipelineMetadata {
|
||||
pub fn with_data_source(self, data_source: DataSource) -> Self {
|
||||
Self {
|
||||
data_source,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_content_type(self, content_type: Option<String>) -> Self {
|
||||
Self {
|
||||
content_type,
|
||||
..self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Describes where the particular [`PipelineMetadata`] originates.
|
||||
///
|
||||
/// This can either be a particular family of commands (useful so downstream commands can adjust
|
||||
/// the presentation e.g. `Ls`) or the opened file to protect against overwrite-attempts properly.
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub enum DataSource {
|
||||
Ls,
|
||||
HtmlThemes,
|
||||
FilePath(PathBuf),
|
||||
#[default]
|
||||
None,
|
||||
}
|
||||
|
|
|
@ -204,7 +204,7 @@ More design guidelines:
|
|||
### Useful Commands
|
||||
- Run all unit tests for the standard library:
|
||||
```nushell
|
||||
cargo run -- -c 'use std testing; testing run-tests --path crates/nu-std'
|
||||
cargo run -- -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std'
|
||||
```
|
||||
> **Note**
|
||||
> this uses the debug version of NU interpreter from the same repo, which is
|
||||
|
@ -216,7 +216,7 @@ More design guidelines:
|
|||
- Run all tests for a specific test module, e.g,
|
||||
`crates/nu-std/tests/test_foo.nu`
|
||||
```nushell
|
||||
cargo run -- -c 'use std testing; testing run-tests --path crates/nu-std --module test_foo'
|
||||
cargo run -- -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std --module test_foo'
|
||||
```
|
||||
- Run a custom command with additional logging (assuming you have instrumented
|
||||
the command with `log <level>`, as we recommend.)
|
||||
|
|
|
@ -66,7 +66,7 @@ export def --env "path add" [
|
|||
"record" => { $p | get --ignore-errors $nu.os-info.name },
|
||||
}
|
||||
|
||||
$p | path expand
|
||||
$p | path expand --no-symlink
|
||||
}
|
||||
|
||||
if null in $paths or ($paths | is-empty) {
|
||||
|
@ -80,7 +80,6 @@ export def --env "path add" [
|
|||
$env
|
||||
| get $path_name
|
||||
| split row (char esep)
|
||||
| path expand
|
||||
| if $append { append $paths } else { prepend $paths }
|
||||
)}
|
||||
|
||||
|
|
|
@ -38,8 +38,8 @@ def assert_error [] {
|
|||
assert error $failing_code
|
||||
|
||||
let good_code = {|| }
|
||||
let assert_error_raised = (try { do assert $good_code; false } catch { true })
|
||||
assert $assert_error_raised "The assert error should raise an error if there is no error in the executed code."
|
||||
let assert_error_raised = (try { assert error $good_code; false } catch { true })
|
||||
assert $assert_error_raised "The assert error should be false if there is no error in the executed code."
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -38,12 +38,38 @@ def path_add [] {
|
|||
std path add $target_paths
|
||||
assert equal (get_path) ([($target_paths | get $nu.os-info.name)] | path expand)
|
||||
|
||||
load-env {$path_name: [$"/foo(char esep)/bar"]}
|
||||
load-env {$path_name: [$"(["/foo", "/bar"] | path expand | str join (char esep))"]}
|
||||
std path add "~/foo"
|
||||
assert equal (get_path) (["~/foo", "/foo", "/bar"] | path expand)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
def path_add_expand [] {
|
||||
use std assert
|
||||
|
||||
# random paths to avoid collision, especially if left dangling on failure
|
||||
let real_dir = $nu.temp-path | path join $"real-dir-(random chars)"
|
||||
let link_dir = $nu.temp-path | path join $"link-dir-(random chars)"
|
||||
mkdir $real_dir
|
||||
let path_name = if $nu.os-info.family == 'windows' {
|
||||
mklink /D $link_dir $real_dir
|
||||
"Path"
|
||||
} else {
|
||||
ln -s $real_dir $link_dir | ignore
|
||||
"PATH"
|
||||
}
|
||||
|
||||
with-env {$path_name: []} {
|
||||
def get_path [] { $env | get $path_name }
|
||||
|
||||
std path add $link_dir
|
||||
assert equal (get_path) ([$link_dir])
|
||||
}
|
||||
|
||||
rm $real_dir $link_dir
|
||||
}
|
||||
|
||||
#[test]
|
||||
def banner [] {
|
||||
std assert ((std banner | lines | length) == 15)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use log::info;
|
||||
use lscolors::LsColors;
|
||||
use std::io::{Result, Write};
|
||||
|
||||
|
@ -393,31 +392,27 @@ pub fn get_ls_colors(lscolors_env_string: Option<String>) -> LsColors {
|
|||
}
|
||||
|
||||
// Log some performance metrics (green text with yellow timings)
|
||||
pub fn perf(
|
||||
msg: &str,
|
||||
dur: std::time::Instant,
|
||||
file: &str,
|
||||
line: u32,
|
||||
column: u32,
|
||||
use_color: bool,
|
||||
) {
|
||||
if use_color {
|
||||
info!(
|
||||
#[macro_export]
|
||||
macro_rules! perf {
|
||||
($msg:expr, $dur:expr, $use_color:expr) => {
|
||||
if $use_color {
|
||||
log::info!(
|
||||
"perf: {}:{}:{} \x1b[32m{}\x1b[0m took \x1b[33m{:?}\x1b[0m",
|
||||
file,
|
||||
line,
|
||||
column,
|
||||
msg,
|
||||
dur.elapsed(),
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
$msg,
|
||||
$dur.elapsed(),
|
||||
);
|
||||
} else {
|
||||
info!(
|
||||
log::info!(
|
||||
"perf: {}:{}:{} {} took {:?}",
|
||||
file,
|
||||
line,
|
||||
column,
|
||||
msg,
|
||||
dur.elapsed(),
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
$msg,
|
||||
$dur.elapsed(),
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,10 +1,37 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use nu_plugin::{EngineInterface, EvaluatedCall, SimplePluginCommand};
|
||||
use nu_protocol::{Category, LabeledError, Signature, Type, Value};
|
||||
use nu_protocol::{Category, FromValue, LabeledError, Signature, Spanned, Type, Value};
|
||||
|
||||
use crate::ExamplePlugin;
|
||||
|
||||
pub struct Config;
|
||||
|
||||
/// Example config struct.
|
||||
///
|
||||
/// Using the `FromValue` derive macro, structs can be easily loaded from [`Value`]s,
|
||||
/// similar to serde's `Deserialize` macro.
|
||||
/// This is handy for plugin configs or piped data.
|
||||
/// All fields must implement [`FromValue`].
|
||||
/// For [`Option`] fields, they can be omitted in the config.
|
||||
///
|
||||
/// This example shows that nested and spanned data work too, so you can describe nested
|
||||
/// structures and get spans of values wrapped in [`Spanned`].
|
||||
/// Since this config uses only `Option`s, no field is required in the config.
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug, FromValue)]
|
||||
struct PluginConfig {
|
||||
path: Option<Spanned<PathBuf>>,
|
||||
nested: Option<SubConfig>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug, FromValue)]
|
||||
struct SubConfig {
|
||||
bool: bool,
|
||||
string: String,
|
||||
}
|
||||
|
||||
impl SimplePluginCommand for Config {
|
||||
type Plugin = ExamplePlugin;
|
||||
|
||||
|
@ -39,7 +66,11 @@ impl SimplePluginCommand for Config {
|
|||
) -> Result<Value, LabeledError> {
|
||||
let config = engine.get_plugin_config()?;
|
||||
match config {
|
||||
Some(config) => Ok(config.clone()),
|
||||
Some(value) => {
|
||||
let config = PluginConfig::from_value(value.clone())?;
|
||||
println!("got config {config:?}");
|
||||
Ok(value)
|
||||
}
|
||||
None => Err(LabeledError::new("No config sent").with_label(
|
||||
"configuration for this plugin was not found in `$env.config.plugins.example`",
|
||||
call.head,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use nu_plugin::{EngineInterface, EvaluatedCall, SimplePluginCommand};
|
||||
use nu_protocol::{record, Category, LabeledError, Signature, SyntaxShape, Value};
|
||||
use nu_protocol::{Category, IntoValue, LabeledError, Signature, SyntaxShape, Value};
|
||||
|
||||
use crate::ExamplePlugin;
|
||||
|
||||
|
@ -38,14 +38,22 @@ impl SimplePluginCommand for Two {
|
|||
) -> Result<Value, LabeledError> {
|
||||
plugin.print_values(2, call, input)?;
|
||||
|
||||
// Use the IntoValue derive macro and trait to easily design output data.
|
||||
#[derive(IntoValue)]
|
||||
struct Output {
|
||||
one: i64,
|
||||
two: i64,
|
||||
three: i64,
|
||||
}
|
||||
|
||||
let vals = (0..10i64)
|
||||
.map(|i| {
|
||||
let record = record! {
|
||||
"one" => Value::int(i, call.head),
|
||||
"two" => Value::int(2 * i, call.head),
|
||||
"three" => Value::int(3 * i, call.head),
|
||||
};
|
||||
Value::record(record, call.head)
|
||||
Output {
|
||||
one: i,
|
||||
two: 2 * i,
|
||||
three: 3 * i,
|
||||
}
|
||||
.into_value(call.head)
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ bench = false
|
|||
nu-protocol = { path = "../nu-protocol", version = "0.95.1" }
|
||||
nu-plugin = { path = "../nu-plugin", version = "0.95.1" }
|
||||
nu-path = { path = "../nu-path", version = "0.95.1" }
|
||||
nu-utils = { path = "../nu-utils", version = "0.95.1" }
|
||||
|
||||
# Potential dependencies for extras
|
||||
chrono = { workspace = true, features = ["std", "unstable-locales"], default-features = false }
|
||||
|
@ -30,12 +31,14 @@ mimalloc = { version = "0.1.42" }
|
|||
num = {version = "0.4"}
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
sqlparser = { version = "0.47"}
|
||||
polars-io = { version = "0.40", features = ["avro"]}
|
||||
polars-arrow = { version = "0.40"}
|
||||
polars-ops = { version = "0.40"}
|
||||
polars-plan = { version = "0.40", features = ["regex"]}
|
||||
polars-utils = { version = "0.40"}
|
||||
polars-io = { version = "0.41", features = ["avro"]}
|
||||
polars-arrow = { version = "0.41"}
|
||||
polars-ops = { version = "0.41"}
|
||||
polars-plan = { version = "0.41", features = ["regex"]}
|
||||
polars-utils = { version = "0.41"}
|
||||
typetag = "0.2"
|
||||
env_logger = "0.11.3"
|
||||
log.workspace = true
|
||||
uuid = { version = "1.9", features = ["v4", "serde"] }
|
||||
|
||||
[dependencies.polars]
|
||||
|
@ -70,7 +73,7 @@ features = [
|
|||
"to_dummies",
|
||||
]
|
||||
optional = false
|
||||
version = "0.40"
|
||||
version = "0.41"
|
||||
|
||||
[dev-dependencies]
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.95.1" }
|
||||
|
|
23
crates/nu_plugin_polars/src/cache/mod.rs
vendored
23
crates/nu_plugin_polars/src/cache/mod.rs
vendored
|
@ -13,7 +13,9 @@ use nu_plugin::{EngineInterface, PluginCommand};
|
|||
use nu_protocol::{LabeledError, ShellError, Span};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{plugin_debug, values::PolarsPluginObject, EngineWrapper, PolarsPlugin};
|
||||
use crate::{values::PolarsPluginObject, EngineWrapper, PolarsPlugin};
|
||||
|
||||
use log::debug;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CacheValue {
|
||||
|
@ -60,22 +62,16 @@ impl Cache {
|
|||
|
||||
let removed = if force || reference_count.unwrap_or_default() < 1 {
|
||||
let removed = lock.remove(key);
|
||||
plugin_debug!(
|
||||
engine,
|
||||
"PolarsPlugin: removing {key} from cache: {removed:?}"
|
||||
);
|
||||
debug!("PolarsPlugin: removing {key} from cache: {removed:?}");
|
||||
removed
|
||||
} else {
|
||||
plugin_debug!(
|
||||
engine,
|
||||
"PolarsPlugin: decrementing reference count for {key}"
|
||||
);
|
||||
debug!("PolarsPlugin: decrementing reference count for {key}");
|
||||
None
|
||||
};
|
||||
|
||||
// Once there are no more entries in the cache
|
||||
// we can turn plugin gc back on
|
||||
plugin_debug!(engine, "PolarsPlugin: Cache is empty enabling GC");
|
||||
debug!("PolarsPlugin: Cache is empty enabling GC");
|
||||
engine.set_gc_disabled(false).map_err(LabeledError::from)?;
|
||||
drop(lock);
|
||||
Ok(removed)
|
||||
|
@ -91,14 +87,11 @@ impl Cache {
|
|||
span: Span,
|
||||
) -> Result<Option<CacheValue>, ShellError> {
|
||||
let mut lock = self.lock()?;
|
||||
plugin_debug!(
|
||||
engine,
|
||||
"PolarsPlugin: Inserting {uuid} into cache: {value:?}"
|
||||
);
|
||||
debug!("PolarsPlugin: Inserting {uuid} into cache: {value:?}");
|
||||
// turn off plugin gc the first time an entry is added to the cache
|
||||
// as we don't want the plugin to be garbage collected if there
|
||||
// is any live data
|
||||
plugin_debug!(engine, "PolarsPlugin: Cache has values disabling GC");
|
||||
debug!("PolarsPlugin: Cache has values disabling GC");
|
||||
engine.set_gc_disabled(true).map_err(LabeledError::from)?;
|
||||
let cache_value = CacheValue {
|
||||
uuid,
|
||||
|
|
|
@ -1,253 +0,0 @@
|
|||
use nu_plugin::{EngineInterface, EvaluatedCall, PluginCommand};
|
||||
use nu_protocol::{
|
||||
Category, Example, LabeledError, PipelineData, ShellError, Signature, Span, Spanned,
|
||||
SyntaxShape, Type, Value,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
dataframe::values::utils::convert_columns_string, values::CustomValueSupport, PolarsPlugin,
|
||||
};
|
||||
|
||||
use super::super::values::{Column, NuDataFrame};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MeltDF;
|
||||
|
||||
impl PluginCommand for MeltDF {
|
||||
type Plugin = PolarsPlugin;
|
||||
|
||||
fn name(&self) -> &str {
|
||||
"polars melt"
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Unpivot a DataFrame from wide to long format."
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.required_named(
|
||||
"columns",
|
||||
SyntaxShape::Table(vec![]),
|
||||
"column names for melting",
|
||||
Some('c'),
|
||||
)
|
||||
.required_named(
|
||||
"values",
|
||||
SyntaxShape::Table(vec![]),
|
||||
"column names used as value columns",
|
||||
Some('v'),
|
||||
)
|
||||
.named(
|
||||
"variable-name",
|
||||
SyntaxShape::String,
|
||||
"optional name for variable column",
|
||||
Some('r'),
|
||||
)
|
||||
.named(
|
||||
"value-name",
|
||||
SyntaxShape::String,
|
||||
"optional name for value column",
|
||||
Some('l'),
|
||||
)
|
||||
.input_output_type(
|
||||
Type::Custom("dataframe".into()),
|
||||
Type::Custom("dataframe".into()),
|
||||
)
|
||||
.category(Category::Custom("dataframe".into()))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "melt dataframe",
|
||||
example:
|
||||
"[[a b c d]; [x 1 4 a] [y 2 5 b] [z 3 6 c]] | polars into-df | polars melt -c [b c] -v [a d]",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![
|
||||
Value::test_int(1),
|
||||
Value::test_int(2),
|
||||
Value::test_int(3),
|
||||
Value::test_int(1),
|
||||
Value::test_int(2),
|
||||
Value::test_int(3),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"c".to_string(),
|
||||
vec![
|
||||
Value::test_int(4),
|
||||
Value::test_int(5),
|
||||
Value::test_int(6),
|
||||
Value::test_int(4),
|
||||
Value::test_int(5),
|
||||
Value::test_int(6),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"variable".to_string(),
|
||||
vec![
|
||||
Value::test_string("a"),
|
||||
Value::test_string("a"),
|
||||
Value::test_string("a"),
|
||||
Value::test_string("d"),
|
||||
Value::test_string("d"),
|
||||
Value::test_string("d"),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"value".to_string(),
|
||||
vec![
|
||||
Value::test_string("x"),
|
||||
Value::test_string("y"),
|
||||
Value::test_string("z"),
|
||||
Value::test_string("a"),
|
||||
Value::test_string("b"),
|
||||
Value::test_string("c"),
|
||||
],
|
||||
),
|
||||
], None)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
}]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
plugin: &Self::Plugin,
|
||||
engine: &EngineInterface,
|
||||
call: &EvaluatedCall,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, LabeledError> {
|
||||
command(plugin, engine, call, input).map_err(LabeledError::from)
|
||||
}
|
||||
}
|
||||
|
||||
fn command(
|
||||
plugin: &PolarsPlugin,
|
||||
engine: &EngineInterface,
|
||||
call: &EvaluatedCall,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let id_col: Vec<Value> = call.get_flag("columns")?.expect("required value");
|
||||
let val_col: Vec<Value> = call.get_flag("values")?.expect("required value");
|
||||
|
||||
let value_name: Option<Spanned<String>> = call.get_flag("value-name")?;
|
||||
let variable_name: Option<Spanned<String>> = call.get_flag("variable-name")?;
|
||||
|
||||
let (id_col_string, id_col_span) = convert_columns_string(id_col, call.head)?;
|
||||
let (val_col_string, val_col_span) = convert_columns_string(val_col, call.head)?;
|
||||
|
||||
let df = NuDataFrame::try_from_pipeline_coerce(plugin, input, call.head)?;
|
||||
|
||||
check_column_datatypes(df.as_ref(), &id_col_string, id_col_span)?;
|
||||
check_column_datatypes(df.as_ref(), &val_col_string, val_col_span)?;
|
||||
|
||||
let mut res = df
|
||||
.as_ref()
|
||||
.melt(&id_col_string, &val_col_string)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error calculating melt".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
if let Some(name) = &variable_name {
|
||||
res.rename("variable", &name.item)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error renaming column".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(name.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
}
|
||||
|
||||
if let Some(name) = &value_name {
|
||||
res.rename("value", &name.item)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error renaming column".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(name.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
}
|
||||
|
||||
let res = NuDataFrame::new(false, res);
|
||||
res.to_pipeline_data(plugin, engine, call.head)
|
||||
}
|
||||
|
||||
fn check_column_datatypes<T: AsRef<str>>(
|
||||
df: &polars::prelude::DataFrame,
|
||||
cols: &[T],
|
||||
col_span: Span,
|
||||
) -> Result<(), ShellError> {
|
||||
if cols.is_empty() {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Merge error".into(),
|
||||
msg: "empty column list".into(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
// Checking if they are same type
|
||||
if cols.len() > 1 {
|
||||
for w in cols.windows(2) {
|
||||
let l_series = df
|
||||
.column(w[0].as_ref())
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error selecting columns".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
let r_series = df
|
||||
.column(w[1].as_ref())
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error selecting columns".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
if l_series.dtype() != r_series.dtype() {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Merge error".into(),
|
||||
msg: "found different column types in list".into(),
|
||||
span: Some(col_span),
|
||||
help: Some(format!(
|
||||
"datatypes {} and {} are incompatible",
|
||||
l_series.dtype(),
|
||||
r_series.dtype()
|
||||
)),
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::test::test_polars_plugin_command;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() -> Result<(), ShellError> {
|
||||
test_polars_plugin_command(&MeltDF)
|
||||
}
|
||||
}
|
|
@ -9,7 +9,6 @@ mod filter_with;
|
|||
mod first;
|
||||
mod get;
|
||||
mod last;
|
||||
mod melt;
|
||||
mod open;
|
||||
mod query_df;
|
||||
mod rename;
|
||||
|
@ -28,6 +27,7 @@ mod to_df;
|
|||
mod to_json_lines;
|
||||
mod to_nu;
|
||||
mod to_parquet;
|
||||
mod unpivot;
|
||||
mod with_column;
|
||||
|
||||
use crate::PolarsPlugin;
|
||||
|
@ -44,7 +44,6 @@ pub use filter_with::FilterWith;
|
|||
pub use first::FirstDF;
|
||||
pub use get::GetDF;
|
||||
pub use last::LastDF;
|
||||
pub use melt::MeltDF;
|
||||
use nu_plugin::PluginCommand;
|
||||
pub use query_df::QueryDf;
|
||||
pub use rename::RenameDF;
|
||||
|
@ -62,6 +61,7 @@ pub use to_df::ToDataFrame;
|
|||
pub use to_json_lines::ToJsonLines;
|
||||
pub use to_nu::ToNu;
|
||||
pub use to_parquet::ToParquet;
|
||||
pub use unpivot::UnpivotDF;
|
||||
pub use with_column::WithColumn;
|
||||
|
||||
pub(crate) fn eager_commands() -> Vec<Box<dyn PluginCommand<Plugin = PolarsPlugin>>> {
|
||||
|
@ -76,7 +76,7 @@ pub(crate) fn eager_commands() -> Vec<Box<dyn PluginCommand<Plugin = PolarsPlugi
|
|||
Box::new(FilterWith),
|
||||
Box::new(GetDF),
|
||||
Box::new(OpenDataFrame),
|
||||
Box::new(MeltDF),
|
||||
Box::new(UnpivotDF),
|
||||
Box::new(Summary),
|
||||
Box::new(FirstDF),
|
||||
Box::new(LastDF),
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use crate::{
|
||||
dataframe::values::NuSchema,
|
||||
perf,
|
||||
values::{CustomValueSupport, NuLazyFrame},
|
||||
PolarsPlugin,
|
||||
EngineWrapper, PolarsPlugin,
|
||||
};
|
||||
use nu_path::expand_path_with;
|
||||
use nu_utils::perf;
|
||||
|
||||
use super::super::values::NuDataFrame;
|
||||
use nu_plugin::PluginCommand;
|
||||
|
@ -16,6 +16,7 @@ use nu_protocol::{
|
|||
use std::{
|
||||
fs::File,
|
||||
io::BufReader,
|
||||
num::NonZeroUsize,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
@ -376,9 +377,13 @@ fn from_jsonl(
|
|||
file_path: &Path,
|
||||
file_span: Span,
|
||||
) -> Result<Value, ShellError> {
|
||||
let infer_schema: usize = call
|
||||
let infer_schema: NonZeroUsize = call
|
||||
.get_flag("infer-schema")?
|
||||
.unwrap_or(DEFAULT_INFER_SCHEMA);
|
||||
.and_then(NonZeroUsize::new)
|
||||
.unwrap_or(
|
||||
NonZeroUsize::new(DEFAULT_INFER_SCHEMA)
|
||||
.expect("The default infer-schema should be non zero"),
|
||||
);
|
||||
let maybe_schema = call
|
||||
.get_flag("schema")?
|
||||
.map(|schema| NuSchema::try_from(&schema))
|
||||
|
@ -399,13 +404,10 @@ fn from_jsonl(
|
|||
inner: vec![],
|
||||
})?;
|
||||
|
||||
perf(
|
||||
engine,
|
||||
perf!(
|
||||
"Lazy json lines dataframe open",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
engine.use_color()
|
||||
);
|
||||
|
||||
let df = NuLazyFrame::new(false, df);
|
||||
|
@ -441,13 +443,10 @@ fn from_jsonl(
|
|||
})?
|
||||
.into();
|
||||
|
||||
perf(
|
||||
engine,
|
||||
perf!(
|
||||
"Eager json lines dataframe open",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
engine.use_color()
|
||||
);
|
||||
|
||||
df.cache_and_to_value(plugin, engine, call.head)
|
||||
|
@ -524,14 +523,7 @@ fn from_csv(
|
|||
})?
|
||||
.into();
|
||||
|
||||
perf(
|
||||
engine,
|
||||
"Lazy CSV dataframe open",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
);
|
||||
perf!("Lazy CSV dataframe open", start_time, engine.use_color());
|
||||
|
||||
df.cache_and_to_value(plugin, engine, call.head)
|
||||
} else {
|
||||
|
@ -541,7 +533,7 @@ fn from_csv(
|
|||
.with_infer_schema_length(Some(infer_schema))
|
||||
.with_skip_rows(skip_rows.unwrap_or_default())
|
||||
.with_schema(maybe_schema.map(|s| s.into()))
|
||||
.with_columns(columns.map(Arc::new))
|
||||
.with_columns(columns.map(|v| Arc::from(v.into_boxed_slice())))
|
||||
.map_parse_options(|options| {
|
||||
options
|
||||
.with_separator(
|
||||
|
@ -569,14 +561,7 @@ fn from_csv(
|
|||
inner: vec![],
|
||||
})?;
|
||||
|
||||
perf(
|
||||
engine,
|
||||
"Eager CSV dataframe open",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
);
|
||||
perf!("Eager CSV dataframe open", start_time, engine.use_color());
|
||||
|
||||
let df = NuDataFrame::new(false, df);
|
||||
df.cache_and_to_value(plugin, engine, call.head)
|
||||
|
|
|
@ -70,7 +70,7 @@ fn command(
|
|||
let value: Value = schema.into();
|
||||
Ok(PipelineData::Value(value, None))
|
||||
}
|
||||
PolarsPluginObject::NuLazyFrame(lazy) => {
|
||||
PolarsPluginObject::NuLazyFrame(mut lazy) => {
|
||||
let schema = lazy.schema()?;
|
||||
let value: Value = schema.into();
|
||||
Ok(PipelineData::Value(value, None))
|
||||
|
|
358
crates/nu_plugin_polars/src/dataframe/eager/unpivot.rs
Normal file
358
crates/nu_plugin_polars/src/dataframe/eager/unpivot.rs
Normal file
|
@ -0,0 +1,358 @@
|
|||
use nu_plugin::{EngineInterface, EvaluatedCall, PluginCommand};
|
||||
use nu_protocol::{
|
||||
Category, Example, LabeledError, PipelineData, ShellError, Signature, Span, Spanned,
|
||||
SyntaxShape, Type, Value,
|
||||
};
|
||||
use polars::frame::explode::UnpivotArgs;
|
||||
|
||||
use crate::{
|
||||
dataframe::values::utils::convert_columns_string,
|
||||
values::{CustomValueSupport, NuLazyFrame, PolarsPluginObject},
|
||||
PolarsPlugin,
|
||||
};
|
||||
|
||||
use super::super::values::{Column, NuDataFrame};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct UnpivotDF;
|
||||
|
||||
impl PluginCommand for UnpivotDF {
|
||||
type Plugin = PolarsPlugin;
|
||||
|
||||
fn name(&self) -> &str {
|
||||
"polars unpivot"
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Unpivot a DataFrame from wide to long format."
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.required_named(
|
||||
"columns",
|
||||
SyntaxShape::Table(vec![]),
|
||||
"column names for unpivoting",
|
||||
Some('c'),
|
||||
)
|
||||
.required_named(
|
||||
"values",
|
||||
SyntaxShape::Table(vec![]),
|
||||
"column names used as value columns",
|
||||
Some('v'),
|
||||
)
|
||||
.named(
|
||||
"variable-name",
|
||||
SyntaxShape::String,
|
||||
"optional name for variable column",
|
||||
Some('r'),
|
||||
)
|
||||
.named(
|
||||
"value-name",
|
||||
SyntaxShape::String,
|
||||
"optional name for value column",
|
||||
Some('l'),
|
||||
)
|
||||
.input_output_type(
|
||||
Type::Custom("dataframe".into()),
|
||||
Type::Custom("dataframe".into()),
|
||||
)
|
||||
.switch(
|
||||
"streamable",
|
||||
"Whether or not to use the polars streaming engine. Only valid for lazy dataframes",
|
||||
Some('s'),
|
||||
)
|
||||
.category(Category::Custom("dataframe".into()))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "unpivot on an eager dataframe",
|
||||
example:
|
||||
"[[a b c d]; [x 1 4 a] [y 2 5 b] [z 3 6 c]] | polars into-df | polars unpivot -c [b c] -v [a d]",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![
|
||||
Value::test_int(1),
|
||||
Value::test_int(2),
|
||||
Value::test_int(3),
|
||||
Value::test_int(1),
|
||||
Value::test_int(2),
|
||||
Value::test_int(3),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"c".to_string(),
|
||||
vec![
|
||||
Value::test_int(4),
|
||||
Value::test_int(5),
|
||||
Value::test_int(6),
|
||||
Value::test_int(4),
|
||||
Value::test_int(5),
|
||||
Value::test_int(6),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"variable".to_string(),
|
||||
vec![
|
||||
Value::test_string("a"),
|
||||
Value::test_string("a"),
|
||||
Value::test_string("a"),
|
||||
Value::test_string("d"),
|
||||
Value::test_string("d"),
|
||||
Value::test_string("d"),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"value".to_string(),
|
||||
vec![
|
||||
Value::test_string("x"),
|
||||
Value::test_string("y"),
|
||||
Value::test_string("z"),
|
||||
Value::test_string("a"),
|
||||
Value::test_string("b"),
|
||||
Value::test_string("c"),
|
||||
],
|
||||
),
|
||||
], None)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
},
|
||||
Example {
|
||||
description: "unpivot on a lazy dataframe",
|
||||
example:
|
||||
"[[a b c d]; [x 1 4 a] [y 2 5 b] [z 3 6 c]] | polars into-lazy | polars unpivot -c [b c] -v [a d] | polars collect",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![
|
||||
Value::test_int(1),
|
||||
Value::test_int(2),
|
||||
Value::test_int(3),
|
||||
Value::test_int(1),
|
||||
Value::test_int(2),
|
||||
Value::test_int(3),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"c".to_string(),
|
||||
vec![
|
||||
Value::test_int(4),
|
||||
Value::test_int(5),
|
||||
Value::test_int(6),
|
||||
Value::test_int(4),
|
||||
Value::test_int(5),
|
||||
Value::test_int(6),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"variable".to_string(),
|
||||
vec![
|
||||
Value::test_string("a"),
|
||||
Value::test_string("a"),
|
||||
Value::test_string("a"),
|
||||
Value::test_string("d"),
|
||||
Value::test_string("d"),
|
||||
Value::test_string("d"),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"value".to_string(),
|
||||
vec![
|
||||
Value::test_string("x"),
|
||||
Value::test_string("y"),
|
||||
Value::test_string("z"),
|
||||
Value::test_string("a"),
|
||||
Value::test_string("b"),
|
||||
Value::test_string("c"),
|
||||
],
|
||||
),
|
||||
], None)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
plugin: &Self::Plugin,
|
||||
engine: &EngineInterface,
|
||||
call: &EvaluatedCall,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, LabeledError> {
|
||||
match PolarsPluginObject::try_from_pipeline(plugin, input, call.head)? {
|
||||
PolarsPluginObject::NuDataFrame(df) => command_eager(plugin, engine, call, df),
|
||||
PolarsPluginObject::NuLazyFrame(lazy) => command_lazy(plugin, engine, call, lazy),
|
||||
_ => Err(ShellError::GenericError {
|
||||
error: "Must be a dataframe or lazy dataframe".into(),
|
||||
msg: "".into(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
}),
|
||||
}
|
||||
.map_err(LabeledError::from)
|
||||
}
|
||||
}
|
||||
|
||||
fn command_eager(
|
||||
plugin: &PolarsPlugin,
|
||||
engine: &EngineInterface,
|
||||
call: &EvaluatedCall,
|
||||
df: NuDataFrame,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let id_col: Vec<Value> = call.get_flag("columns")?.expect("required value");
|
||||
let val_col: Vec<Value> = call.get_flag("values")?.expect("required value");
|
||||
|
||||
let value_name: Option<Spanned<String>> = call.get_flag("value-name")?;
|
||||
let variable_name: Option<Spanned<String>> = call.get_flag("variable-name")?;
|
||||
|
||||
let (id_col_string, id_col_span) = convert_columns_string(id_col, call.head)?;
|
||||
let (val_col_string, val_col_span) = convert_columns_string(val_col, call.head)?;
|
||||
|
||||
check_column_datatypes(df.as_ref(), &id_col_string, id_col_span)?;
|
||||
check_column_datatypes(df.as_ref(), &val_col_string, val_col_span)?;
|
||||
|
||||
let mut res = df
|
||||
.as_ref()
|
||||
.unpivot(&val_col_string, &id_col_string)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error calculating unpivot".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
if let Some(name) = &variable_name {
|
||||
res.rename("variable", &name.item)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error renaming column".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(name.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
}
|
||||
|
||||
if let Some(name) = &value_name {
|
||||
res.rename("value", &name.item)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error renaming column".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(name.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
}
|
||||
|
||||
let res = NuDataFrame::new(false, res);
|
||||
res.to_pipeline_data(plugin, engine, call.head)
|
||||
}
|
||||
|
||||
fn command_lazy(
|
||||
plugin: &PolarsPlugin,
|
||||
engine: &EngineInterface,
|
||||
call: &EvaluatedCall,
|
||||
df: NuLazyFrame,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let id_col: Vec<Value> = call.get_flag("columns")?.expect("required value");
|
||||
let val_col: Vec<Value> = call.get_flag("values")?.expect("required value");
|
||||
|
||||
let (id_col_string, _id_col_span) = convert_columns_string(id_col, call.head)?;
|
||||
let (val_col_string, _val_col_span) = convert_columns_string(val_col, call.head)?;
|
||||
|
||||
let value_name: Option<String> = call.get_flag("value-name")?;
|
||||
let variable_name: Option<String> = call.get_flag("variable-name")?;
|
||||
|
||||
let streamable = call.has_flag("streamable")?;
|
||||
|
||||
let unpivot_args = UnpivotArgs {
|
||||
on: val_col_string.iter().map(Into::into).collect(),
|
||||
index: id_col_string.iter().map(Into::into).collect(),
|
||||
value_name: value_name.map(Into::into),
|
||||
variable_name: variable_name.map(Into::into),
|
||||
streamable,
|
||||
};
|
||||
|
||||
let polars_df = df.to_polars().unpivot(unpivot_args);
|
||||
|
||||
let res = NuLazyFrame::new(false, polars_df);
|
||||
res.to_pipeline_data(plugin, engine, call.head)
|
||||
}
|
||||
|
||||
fn check_column_datatypes<T: AsRef<str>>(
|
||||
df: &polars::prelude::DataFrame,
|
||||
cols: &[T],
|
||||
col_span: Span,
|
||||
) -> Result<(), ShellError> {
|
||||
if cols.is_empty() {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Merge error".into(),
|
||||
msg: "empty column list".into(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
// Checking if they are same type
|
||||
if cols.len() > 1 {
|
||||
for w in cols.windows(2) {
|
||||
let l_series = df
|
||||
.column(w[0].as_ref())
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error selecting columns".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
let r_series = df
|
||||
.column(w[1].as_ref())
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error selecting columns".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
if l_series.dtype() != r_series.dtype() {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Merge error".into(),
|
||||
msg: "found different column types in list".into(),
|
||||
span: Some(col_span),
|
||||
help: Some(format!(
|
||||
"datatypes {} and {} are incompatible",
|
||||
l_series.dtype(),
|
||||
r_series.dtype()
|
||||
)),
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::test::test_polars_plugin_command;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() -> Result<(), ShellError> {
|
||||
test_polars_plugin_command(&UnpivotDF)
|
||||
}
|
||||
}
|
|
@ -196,7 +196,8 @@ fn get_col_name(expr: &Expr) -> Option<String> {
|
|||
| Expr::Nth(_)
|
||||
| Expr::SubPlan(_, _)
|
||||
| Expr::IndexColumn(_)
|
||||
| Expr::Selector(_) => None,
|
||||
| Expr::Selector(_)
|
||||
| Expr::Field(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -148,11 +148,11 @@ fn command(
|
|||
plugin: &PolarsPlugin,
|
||||
engine: &EngineInterface,
|
||||
call: &EvaluatedCall,
|
||||
lazy: NuLazyFrame,
|
||||
mut lazy: NuLazyFrame,
|
||||
expressions: Vec<Expr>,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let group_by = lazy.to_polars().group_by(expressions);
|
||||
let group_by = NuLazyGroupBy::new(group_by, lazy.from_eager, lazy.schema()?);
|
||||
let group_by = NuLazyGroupBy::new(group_by, lazy.from_eager, lazy.schema().clone()?);
|
||||
group_by.to_pipeline_data(plugin, engine, call.head)
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ impl PluginCommand for LazyJoin {
|
|||
Some('i'),
|
||||
)
|
||||
.switch("left", "left join between lazyframes", Some('l'))
|
||||
.switch("outer", "outer join between lazyframes", Some('o'))
|
||||
.switch("full", "full join between lazyframes", Some('f'))
|
||||
.switch("cross", "cross join between lazyframes", Some('c'))
|
||||
.named(
|
||||
"suffix",
|
||||
|
@ -183,13 +183,13 @@ impl PluginCommand for LazyJoin {
|
|||
input: PipelineData,
|
||||
) -> Result<PipelineData, LabeledError> {
|
||||
let left = call.has_flag("left")?;
|
||||
let outer = call.has_flag("outer")?;
|
||||
let full = call.has_flag("full")?;
|
||||
let cross = call.has_flag("cross")?;
|
||||
|
||||
let how = if left {
|
||||
JoinType::Left
|
||||
} else if outer {
|
||||
JoinType::Outer
|
||||
} else if full {
|
||||
JoinType::Full
|
||||
} else if cross {
|
||||
JoinType::Cross
|
||||
} else {
|
||||
|
|
|
@ -140,7 +140,7 @@ impl PluginCommand for LazySortBy {
|
|||
|
||||
let sort_options = SortMultipleOptions {
|
||||
descending: reverse,
|
||||
nulls_last,
|
||||
nulls_last: vec![nulls_last],
|
||||
multithreaded: true,
|
||||
maintain_order,
|
||||
};
|
||||
|
|
|
@ -7,7 +7,10 @@ use nu_protocol::{
|
|||
Category, Example, LabeledError, PipelineData, ShellError, Signature, Span, SyntaxShape, Type,
|
||||
Value,
|
||||
};
|
||||
use polars::prelude::{ChunkSet, DataType, IntoSeries};
|
||||
use polars::{
|
||||
chunked_array::cast::CastOptions,
|
||||
prelude::{ChunkSet, DataType, IntoSeries},
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SetWithIndex;
|
||||
|
@ -96,7 +99,7 @@ fn command(
|
|||
let casted = match indices.dtype() {
|
||||
DataType::UInt32 | DataType::UInt64 | DataType::Int32 | DataType::Int64 => indices
|
||||
.as_ref()
|
||||
.cast(&DataType::UInt32)
|
||||
.cast(&DataType::UInt32, CastOptions::default())
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error casting indices".into(),
|
||||
msg: e.to_string(),
|
||||
|
|
|
@ -4,7 +4,8 @@ use super::super::values::{Column, NuDataFrame};
|
|||
|
||||
use nu_plugin::{EngineInterface, EvaluatedCall, PluginCommand};
|
||||
use nu_protocol::{
|
||||
Category, Example, LabeledError, PipelineData, ShellError, Signature, Span, Type, Value,
|
||||
Category, Example, LabeledError, PipelineData, ShellError, Signature, Span, SyntaxShape, Type,
|
||||
Value,
|
||||
};
|
||||
|
||||
use polars::prelude::SeriesMethods;
|
||||
|
@ -25,6 +26,24 @@ impl PluginCommand for ValueCount {
|
|||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.named(
|
||||
"column",
|
||||
SyntaxShape::String,
|
||||
"Provide a custom name for the coutn column",
|
||||
Some('c'),
|
||||
)
|
||||
.switch("sort", "Whether or not values should be sorted", Some('s'))
|
||||
.switch(
|
||||
"parallel",
|
||||
"Use multiple threads when processing",
|
||||
Some('p'),
|
||||
)
|
||||
.named(
|
||||
"normalize",
|
||||
SyntaxShape::String,
|
||||
"Normalize the counts",
|
||||
Some('n'),
|
||||
)
|
||||
.input_output_type(
|
||||
Type::Custom("dataframe".into()),
|
||||
Type::Custom("dataframe".into()),
|
||||
|
@ -73,11 +92,15 @@ fn command(
|
|||
call: &EvaluatedCall,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let column = call.get_flag("column")?.unwrap_or("count".to_string());
|
||||
let parallel = call.has_flag("parallel")?;
|
||||
let sort = call.has_flag("sort")?;
|
||||
let normalize = call.has_flag("normalize")?;
|
||||
let df = NuDataFrame::try_from_pipeline_coerce(plugin, input, call.head)?;
|
||||
let series = df.as_series(call.head)?;
|
||||
|
||||
let res = series
|
||||
.value_counts(false, false)
|
||||
.value_counts(sort, parallel, column, normalize)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error calculating value counts values".into(),
|
||||
msg: e.to_string(),
|
||||
|
|
|
@ -41,19 +41,37 @@ pub(super) fn compute_between_series(
|
|||
let operation_span = Span::merge(left.span(), right.span());
|
||||
match operator.item {
|
||||
Operator::Math(Math::Plus) => {
|
||||
let mut res = lhs + rhs;
|
||||
let mut res = (lhs + rhs).map_err(|e| ShellError::GenericError {
|
||||
error: format!("Addition error: {e}"),
|
||||
msg: "".into(),
|
||||
span: Some(operation_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
let name = format!("sum_{}_{}", lhs.name(), rhs.name());
|
||||
res.rename(&name);
|
||||
NuDataFrame::try_from_series(res, operation_span)
|
||||
}
|
||||
Operator::Math(Math::Minus) => {
|
||||
let mut res = lhs - rhs;
|
||||
let mut res = (lhs - rhs).map_err(|e| ShellError::GenericError {
|
||||
error: format!("Subtraction error: {e}"),
|
||||
msg: "".into(),
|
||||
span: Some(operation_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
let name = format!("sub_{}_{}", lhs.name(), rhs.name());
|
||||
res.rename(&name);
|
||||
NuDataFrame::try_from_series(res, operation_span)
|
||||
}
|
||||
Operator::Math(Math::Multiply) => {
|
||||
let mut res = lhs * rhs;
|
||||
let mut res = (lhs * rhs).map_err(|e| ShellError::GenericError {
|
||||
error: format!("Multiplication error: {e}"),
|
||||
msg: "".into(),
|
||||
span: Some(operation_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
let name = format!("mul_{}_{}", lhs.name(), rhs.name());
|
||||
res.rename(&name);
|
||||
NuDataFrame::try_from_series(res, operation_span)
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
mod custom_value;
|
||||
|
||||
use nu_protocol::{record, ShellError, Span, Value};
|
||||
use polars::prelude::{col, AggExpr, Expr, Literal};
|
||||
use polars::{
|
||||
chunked_array::cast::CastOptions,
|
||||
prelude::{col, AggExpr, Expr, Literal},
|
||||
};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use uuid::Uuid;
|
||||
|
||||
|
@ -269,15 +272,23 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Result<Value, ShellError> {
|
|||
Expr::Cast {
|
||||
expr,
|
||||
data_type,
|
||||
strict,
|
||||
} => Ok(Value::record(
|
||||
options,
|
||||
} => {
|
||||
let cast_option_str = match options {
|
||||
CastOptions::Strict => "STRICT",
|
||||
CastOptions::NonStrict => "NON_STRICT",
|
||||
CastOptions::Overflowing => "OVERFLOWING",
|
||||
};
|
||||
|
||||
Ok(Value::record(
|
||||
record! {
|
||||
"expr" => expr_to_value(expr.as_ref(), span)?,
|
||||
"dtype" => Value::string(format!("{data_type:?}"), span),
|
||||
"strict" => Value::bool(*strict, span),
|
||||
"cast_options" => Value::string(cast_option_str, span)
|
||||
},
|
||||
span,
|
||||
)),
|
||||
))
|
||||
}
|
||||
Expr::Gather {
|
||||
expr,
|
||||
idx,
|
||||
|
@ -388,6 +399,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Result<Value, ShellError> {
|
|||
Expr::Window {
|
||||
function,
|
||||
partition_by,
|
||||
order_by,
|
||||
options,
|
||||
} => {
|
||||
let partition_by: Result<Vec<Value>, ShellError> = partition_by
|
||||
|
@ -399,6 +411,23 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Result<Value, ShellError> {
|
|||
record! {
|
||||
"function" => expr_to_value(function, span)?,
|
||||
"partition_by" => Value::list(partition_by?, span),
|
||||
"order_by" => {
|
||||
if let Some((order_expr, sort_options)) = order_by {
|
||||
Value::record(record! {
|
||||
"expr" => expr_to_value(order_expr.as_ref(), span)?,
|
||||
"sort_options" => {
|
||||
Value::record(record!(
|
||||
"descending" => Value::bool(sort_options.descending, span),
|
||||
"nulls_last"=> Value::bool(sort_options.nulls_last, span),
|
||||
"multithreaded"=> Value::bool(sort_options.multithreaded, span),
|
||||
"maintain_order"=> Value::bool(sort_options.maintain_order, span),
|
||||
), span)
|
||||
}
|
||||
}, span)
|
||||
} else {
|
||||
Value::nothing(span)
|
||||
}
|
||||
},
|
||||
"options" => Value::string(format!("{options:?}"), span),
|
||||
},
|
||||
span,
|
||||
|
@ -424,6 +453,18 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Result<Value, ShellError> {
|
|||
msg_span: span,
|
||||
input_span: Span::unknown(),
|
||||
}),
|
||||
Expr::Field(column_name) => {
|
||||
let fields: Vec<Value> = column_name
|
||||
.iter()
|
||||
.map(|s| Value::string(s.to_string(), span))
|
||||
.collect();
|
||||
Ok(Value::record(
|
||||
record!(
|
||||
"fields" => Value::list(fields, span)
|
||||
),
|
||||
span,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -77,8 +77,11 @@ impl NuLazyFrame {
|
|||
Self::new(self.from_eager, new_frame)
|
||||
}
|
||||
|
||||
pub fn schema(&self) -> Result<NuSchema, ShellError> {
|
||||
let internal_schema = self.lazy.schema().map_err(|e| ShellError::GenericError {
|
||||
pub fn schema(&mut self) -> Result<NuSchema, ShellError> {
|
||||
let internal_schema =
|
||||
Arc::make_mut(&mut self.lazy)
|
||||
.schema()
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error getting schema from lazy frame".into(),
|
||||
msg: e.to_string(),
|
||||
span: None,
|
||||
|
|
|
@ -45,52 +45,6 @@ impl EngineWrapper for &EngineInterface {
|
|||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! plugin_debug {
|
||||
($env_var_provider:tt, $($arg:tt)*) => {{
|
||||
if $env_var_provider.get_env_var("POLARS_PLUGIN_DEBUG")
|
||||
.filter(|s| s == "1" || s == "true")
|
||||
.is_some() {
|
||||
eprintln!($($arg)*);
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
pub fn perf(
|
||||
env: impl EngineWrapper,
|
||||
msg: &str,
|
||||
dur: std::time::Instant,
|
||||
file: &str,
|
||||
line: u32,
|
||||
column: u32,
|
||||
) {
|
||||
if env
|
||||
.get_env_var("POLARS_PLUGIN_PERF")
|
||||
.filter(|s| s == "1" || s == "true")
|
||||
.is_some()
|
||||
{
|
||||
if env.use_color() {
|
||||
eprintln!(
|
||||
"perf: {}:{}:{} \x1b[32m{}\x1b[0m took \x1b[33m{:?}\x1b[0m",
|
||||
file,
|
||||
line,
|
||||
column,
|
||||
msg,
|
||||
dur.elapsed(),
|
||||
);
|
||||
} else {
|
||||
eprintln!(
|
||||
"perf: {}:{}:{} {} took {:?}",
|
||||
file,
|
||||
line,
|
||||
column,
|
||||
msg,
|
||||
dur.elapsed(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct PolarsPlugin {
|
||||
pub(crate) cache: Cache,
|
||||
|
|
|
@ -5,5 +5,6 @@ use nu_plugin_polars::PolarsPlugin;
|
|||
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
fn main() {
|
||||
env_logger::init();
|
||||
serve_plugin(&PolarsPlugin::default(), MsgPackSerializer {})
|
||||
}
|
||||
|
|
|
@ -23,3 +23,6 @@ gjson = "0.8"
|
|||
scraper = { default-features = false, version = "0.19" }
|
||||
sxd-document = "0.3"
|
||||
sxd-xpath = "0.4"
|
||||
webpage = { version = "2.0.1", features = ["serde"] }
|
||||
serde_json.workspace = true
|
||||
serde.workspace = true
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
mod query;
|
||||
mod query_json;
|
||||
mod query_web;
|
||||
mod query_webpage_info;
|
||||
mod query_xml;
|
||||
mod web_tables;
|
||||
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
use crate::{query_json::QueryJson, query_web::QueryWeb, query_xml::QueryXml};
|
||||
use crate::{
|
||||
query_json::QueryJson, query_web::QueryWeb, query_webpage_info::QueryWebpageInfo,
|
||||
query_xml::QueryXml,
|
||||
};
|
||||
use nu_plugin::{EvaluatedCall, Plugin, PluginCommand, SimplePluginCommand};
|
||||
use nu_protocol::{Category, LabeledError, Signature, Value};
|
||||
|
||||
|
@ -26,6 +29,7 @@ impl Plugin for Query {
|
|||
Box::new(QueryJson),
|
||||
Box::new(QueryXml),
|
||||
Box::new(QueryWeb),
|
||||
Box::new(QueryWebpageInfo),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,10 +24,20 @@ impl SimplePluginCommand for QueryWeb {
|
|||
.switch("as-html", "return the query output as html", Some('m'))
|
||||
.named(
|
||||
"attribute",
|
||||
SyntaxShape::String,
|
||||
SyntaxShape::Any,
|
||||
"downselect based on the given attribute",
|
||||
Some('a'),
|
||||
)
|
||||
// TODO: use detailed shape when https://github.com/nushell/nushell/issues/13253 is resolved
|
||||
// .named(
|
||||
// "attribute",
|
||||
// SyntaxShape::OneOf(vec![
|
||||
// SyntaxShape::List(Box::new(SyntaxShape::String)),
|
||||
// SyntaxShape::String,
|
||||
// ]),
|
||||
// "downselect based on the given attribute",
|
||||
// Some('a'),
|
||||
// )
|
||||
.named(
|
||||
"as-table",
|
||||
SyntaxShape::List(Box::new(SyntaxShape::String)),
|
||||
|
@ -79,6 +89,11 @@ pub fn web_examples() -> Vec<Example<'static>> {
|
|||
example: "http get https://example.org | query web --query a --attribute href",
|
||||
description: "Retrieve a specific html attribute instead of the default text",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
example: r#"http get https://www.rust-lang.org | query web --query 'meta[property^="og:"]' --attribute [ property content ]"#,
|
||||
description: r#"Retrieve the OpenGraph properties (`<meta property="og:...">`) from a web page"#,
|
||||
result: None,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -86,7 +101,7 @@ pub fn web_examples() -> Vec<Example<'static>> {
|
|||
pub struct Selector {
|
||||
pub query: String,
|
||||
pub as_html: bool,
|
||||
pub attribute: String,
|
||||
pub attribute: Value,
|
||||
pub as_table: Value,
|
||||
pub inspect: bool,
|
||||
}
|
||||
|
@ -96,7 +111,7 @@ impl Selector {
|
|||
Selector {
|
||||
query: String::new(),
|
||||
as_html: false,
|
||||
attribute: String::new(),
|
||||
attribute: Value::string("".to_string(), Span::unknown()),
|
||||
as_table: Value::string("".to_string(), Span::unknown()),
|
||||
inspect: false,
|
||||
}
|
||||
|
@ -113,7 +128,9 @@ pub fn parse_selector_params(call: &EvaluatedCall, input: &Value) -> Result<Valu
|
|||
let head = call.head;
|
||||
let query: Option<Spanned<String>> = call.get_flag("query")?;
|
||||
let as_html = call.has_flag("as-html")?;
|
||||
let attribute = call.get_flag("attribute")?.unwrap_or_default();
|
||||
let attribute = call
|
||||
.get_flag("attribute")?
|
||||
.unwrap_or_else(|| Value::nothing(head));
|
||||
let as_table: Value = call
|
||||
.get_flag("as-table")?
|
||||
.unwrap_or_else(|| Value::nothing(head));
|
||||
|
@ -160,11 +177,19 @@ fn begin_selector_query(input_html: String, selector: Selector, span: Span) -> V
|
|||
selector.inspect,
|
||||
span,
|
||||
)
|
||||
} else if let Value::List { .. } = selector.attribute {
|
||||
execute_selector_query_with_attributes(
|
||||
input_html.as_str(),
|
||||
selector.query.as_str(),
|
||||
&selector.attribute,
|
||||
selector.inspect,
|
||||
span,
|
||||
)
|
||||
} else {
|
||||
execute_selector_query_with_attribute(
|
||||
input_html.as_str(),
|
||||
selector.query.as_str(),
|
||||
selector.attribute.as_str(),
|
||||
selector.attribute.as_str().unwrap_or(""),
|
||||
selector.inspect,
|
||||
span,
|
||||
)
|
||||
|
@ -317,6 +342,40 @@ fn execute_selector_query_with_attribute(
|
|||
Value::list(vals, span)
|
||||
}
|
||||
|
||||
fn execute_selector_query_with_attributes(
|
||||
input_string: &str,
|
||||
query_string: &str,
|
||||
attributes: &Value,
|
||||
inspect: bool,
|
||||
span: Span,
|
||||
) -> Value {
|
||||
let doc = Html::parse_fragment(input_string);
|
||||
|
||||
let mut attrs: Vec<String> = Vec::new();
|
||||
if let Value::List { vals, .. } = &attributes {
|
||||
for x in vals {
|
||||
if let Value::String { val, .. } = x {
|
||||
attrs.push(val.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let vals: Vec<Value> = doc
|
||||
.select(&css(query_string, inspect))
|
||||
.map(|selection| {
|
||||
let mut record = Record::new();
|
||||
for attr in &attrs {
|
||||
record.push(
|
||||
attr.to_string(),
|
||||
Value::string(selection.value().attr(attr).unwrap_or("").to_string(), span),
|
||||
);
|
||||
}
|
||||
Value::record(record, span)
|
||||
})
|
||||
.collect();
|
||||
Value::list(vals, span)
|
||||
}
|
||||
|
||||
fn execute_selector_query(
|
||||
input_string: &str,
|
||||
query_string: &str,
|
||||
|
@ -369,6 +428,10 @@ mod tests {
|
|||
"#;
|
||||
|
||||
const NESTED_TEXT: &str = r#"<p>Hello there, <span style="color: red;">World</span></p>"#;
|
||||
const MULTIPLE_ATTRIBUTES: &str = r#"
|
||||
<a href="https://example.org" target="_blank">Example</a>
|
||||
<a href="https://example.com" target="_self">Example</a>
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
fn test_first_child_is_not_empty() {
|
||||
|
@ -424,4 +487,48 @@ mod tests {
|
|||
vec![vec!["Hello there, ".to_string(), "World".to_string()]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_attributes() {
|
||||
let item = execute_selector_query_with_attributes(
|
||||
MULTIPLE_ATTRIBUTES,
|
||||
"a",
|
||||
&Value::list(
|
||||
vec![
|
||||
Value::string("href".to_string(), Span::unknown()),
|
||||
Value::string("target".to_string(), Span::unknown()),
|
||||
],
|
||||
Span::unknown(),
|
||||
),
|
||||
false,
|
||||
Span::test_data(),
|
||||
);
|
||||
let out = item
|
||||
.into_list()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|matches| {
|
||||
matches
|
||||
.into_record()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|(key, value)| (key, value.coerce_into_string().unwrap()))
|
||||
.collect::<Vec<(String, String)>>()
|
||||
})
|
||||
.collect::<Vec<Vec<(String, String)>>>();
|
||||
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![
|
||||
vec![
|
||||
("href".to_string(), "https://example.org".to_string()),
|
||||
("target".to_string(), "_blank".to_string())
|
||||
],
|
||||
vec![
|
||||
("href".to_string(), "https://example.com".to_string()),
|
||||
("target".to_string(), "_self".to_string())
|
||||
]
|
||||
]
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
478
crates/nu_plugin_query/src/query_webpage_info.rs
Normal file
478
crates/nu_plugin_query/src/query_webpage_info.rs
Normal file
|
@ -0,0 +1,478 @@
|
|||
use nu_plugin::{EngineInterface, EvaluatedCall, SimplePluginCommand};
|
||||
use nu_protocol::{Category, Example, LabeledError, Record, Signature, Span, Type, Value};
|
||||
|
||||
use crate::Query;
|
||||
|
||||
pub struct QueryWebpageInfo;
|
||||
|
||||
impl SimplePluginCommand for QueryWebpageInfo {
|
||||
type Plugin = Query;
|
||||
|
||||
fn name(&self) -> &str {
|
||||
"query webpage-info"
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"uses the webpage crate to extract info from html: title, description, language, links, RSS feeds, Opengraph, Schema.org, and more"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.input_output_type(Type::String, Type::record())
|
||||
.category(Category::Network)
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
web_examples()
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
_plugin: &Query,
|
||||
_engine: &EngineInterface,
|
||||
_call: &EvaluatedCall,
|
||||
input: &Value,
|
||||
) -> Result<Value, LabeledError> {
|
||||
let span = input.span();
|
||||
match input {
|
||||
Value::String { val, .. } => execute_webpage(val, span),
|
||||
_ => Err(LabeledError::new("Requires text input")
|
||||
.with_label("expected text from pipeline", span)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn web_examples() -> Vec<Example<'static>> {
|
||||
vec![Example {
|
||||
example: "http get https://phoronix.com | query webpage-info",
|
||||
description: "extract detailed info from phoronix.com website",
|
||||
result: None,
|
||||
}]
|
||||
}
|
||||
|
||||
fn execute_webpage(html: &str, span: Span) -> Result<Value, LabeledError> {
|
||||
let info = webpage::HTML::from_string(html.to_string(), None)
|
||||
.map_err(|e| LabeledError::new(e.to_string()).with_label("error parsing html", span))?;
|
||||
|
||||
let value = to_value(info, span).map_err(|e| {
|
||||
LabeledError::new(e.to_string()).with_label("error convert Value::Record", span)
|
||||
})?;
|
||||
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
// revive nu-serde sketch
|
||||
|
||||
use serde::Serialize;
|
||||
|
||||
/// Convert any serde:Serialize into a `nu_protocol::Value`
|
||||
pub fn to_value<T>(value: T, span: Span) -> Result<Value, Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
value.serialize(&ValueSerializer { span })
|
||||
}
|
||||
|
||||
struct ValueSerializer {
|
||||
span: Span,
|
||||
}
|
||||
|
||||
struct MapSerializer<'a> {
|
||||
record: Record,
|
||||
serializer: &'a ValueSerializer,
|
||||
current_key: Option<String>,
|
||||
}
|
||||
|
||||
impl<'a> serde::Serializer for &'a ValueSerializer {
|
||||
type Ok = Value;
|
||||
type Error = Error;
|
||||
|
||||
type SerializeSeq = SeqSerializer<'a>;
|
||||
type SerializeTuple = SeqSerializer<'a>;
|
||||
type SerializeTupleStruct = SeqSerializer<'a>;
|
||||
type SerializeTupleVariant = SeqSerializer<'a>;
|
||||
|
||||
type SerializeMap = MapSerializer<'a>;
|
||||
type SerializeStruct = MapSerializer<'a>;
|
||||
type SerializeStructVariant = MapSerializer<'a>;
|
||||
|
||||
fn serialize_bool(self, v: bool) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::bool(v, self.span))
|
||||
}
|
||||
|
||||
fn serialize_i8(self, v: i8) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::int(v.into(), self.span))
|
||||
}
|
||||
|
||||
fn serialize_i16(self, v: i16) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::int(v.into(), self.span))
|
||||
}
|
||||
|
||||
fn serialize_i32(self, v: i32) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::int(v.into(), self.span))
|
||||
}
|
||||
|
||||
fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::int(v, self.span))
|
||||
}
|
||||
|
||||
fn serialize_u8(self, v: u8) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::int(v.into(), self.span))
|
||||
}
|
||||
|
||||
fn serialize_u16(self, v: u16) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::int(v.into(), self.span))
|
||||
}
|
||||
|
||||
fn serialize_u32(self, v: u32) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::int(v.into(), self.span))
|
||||
}
|
||||
|
||||
fn serialize_u64(self, _v: u64) -> Result<Self::Ok, Self::Error> {
|
||||
// TODO: how to represent a u64 value a Value<i64>?
|
||||
Err(Error::new("the numbers are too big"))
|
||||
// Ok(Value::int(v.into(), self.span))
|
||||
}
|
||||
|
||||
fn serialize_f32(self, v: f32) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::float(v.into(), self.span))
|
||||
}
|
||||
|
||||
fn serialize_f64(self, v: f64) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::float(v, self.span))
|
||||
}
|
||||
|
||||
fn serialize_char(self, v: char) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::string(v, self.span))
|
||||
}
|
||||
|
||||
fn serialize_str(self, v: &str) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::string(v, self.span))
|
||||
}
|
||||
|
||||
fn serialize_bytes(self, v: &[u8]) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::binary(v, self.span))
|
||||
}
|
||||
|
||||
fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::nothing(self.span))
|
||||
}
|
||||
|
||||
fn serialize_some<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
value.serialize(self)
|
||||
}
|
||||
|
||||
fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
|
||||
// TODO: is this OK?
|
||||
Ok(Value::nothing(self.span))
|
||||
}
|
||||
|
||||
fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> {
|
||||
// TODO: is this OK?
|
||||
Ok(Value::nothing(self.span))
|
||||
}
|
||||
|
||||
fn serialize_unit_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
_variant: &'static str,
|
||||
) -> Result<Self::Ok, Self::Error> {
|
||||
// TODO: is this OK?
|
||||
Ok(Value::nothing(self.span))
|
||||
}
|
||||
|
||||
fn serialize_newtype_struct<T: ?Sized>(
|
||||
self,
|
||||
_name: &'static str,
|
||||
value: &T,
|
||||
) -> Result<Self::Ok, Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
value.serialize(self)
|
||||
}
|
||||
|
||||
fn serialize_newtype_variant<T: ?Sized>(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
_variant: &'static str,
|
||||
value: &T,
|
||||
) -> Result<Self::Ok, Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
value.serialize(self)
|
||||
}
|
||||
|
||||
fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
|
||||
Ok(SeqSerializer::new(self))
|
||||
}
|
||||
|
||||
fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
|
||||
Ok(SeqSerializer::new(self))
|
||||
}
|
||||
|
||||
fn serialize_tuple_struct(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeTupleStruct, Self::Error> {
|
||||
Ok(SeqSerializer::new(self))
|
||||
}
|
||||
|
||||
fn serialize_tuple_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
_variant: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeTupleVariant, Self::Error> {
|
||||
Ok(SeqSerializer::new(self))
|
||||
}
|
||||
|
||||
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
|
||||
Ok(MapSerializer::new(self))
|
||||
}
|
||||
|
||||
fn serialize_struct(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeStruct, Self::Error> {
|
||||
Ok(MapSerializer::new(self))
|
||||
}
|
||||
|
||||
fn serialize_struct_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
_variant: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeStructVariant, Self::Error> {
|
||||
Ok(MapSerializer::new(self))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Error {
|
||||
message: String,
|
||||
}
|
||||
|
||||
impl Error {
|
||||
pub fn new<T: std::fmt::Display>(msg: T) -> Self {
|
||||
Error {
|
||||
message: msg.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::ser::Error for Error {
|
||||
fn custom<T: std::fmt::Display>(msg: T) -> Self {
|
||||
Error::new(msg.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.message)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.message)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
//
|
||||
// maps
|
||||
impl<'a> MapSerializer<'a> {
|
||||
fn new(serializer: &'a ValueSerializer) -> Self {
|
||||
Self {
|
||||
record: Record::new(),
|
||||
current_key: None,
|
||||
serializer,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> serde::ser::SerializeStruct for MapSerializer<'a> {
|
||||
type Ok = Value;
|
||||
type Error = Error;
|
||||
|
||||
fn serialize_field<T: ?Sized>(
|
||||
&mut self,
|
||||
key: &'static str,
|
||||
value: &T,
|
||||
) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
self.record
|
||||
.insert(key.to_owned(), value.serialize(self.serializer)?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::record(self.record, self.serializer.span))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> serde::ser::SerializeMap for MapSerializer<'a> {
|
||||
type Ok = Value;
|
||||
type Error = Error;
|
||||
|
||||
fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let value = serde_json::to_value(key).map_err(Error::new)?;
|
||||
let key = value
|
||||
.as_str()
|
||||
.ok_or(Error::new("key must be a string"))?
|
||||
.to_string();
|
||||
self.current_key = Some(key);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let key = self.current_key.take().ok_or(Error::new("key expected"))?;
|
||||
self.record.insert(key, value.serialize(self.serializer)?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::record(self.record, self.serializer.span))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> serde::ser::SerializeStructVariant for MapSerializer<'a> {
|
||||
type Ok = Value;
|
||||
type Error = Error;
|
||||
|
||||
fn serialize_field<T: ?Sized>(
|
||||
&mut self,
|
||||
key: &'static str,
|
||||
value: &T,
|
||||
) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
self.record
|
||||
.insert(key.to_owned(), value.serialize(self.serializer)?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::record(self.record, self.serializer.span))
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// sequences
|
||||
struct SeqSerializer<'a> {
|
||||
seq: Vec<Value>,
|
||||
serializer: &'a ValueSerializer,
|
||||
}
|
||||
|
||||
impl<'a> SeqSerializer<'a> {
|
||||
fn new(serializer: &'a ValueSerializer) -> Self {
|
||||
Self {
|
||||
seq: Vec::new(),
|
||||
serializer,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> serde::ser::SerializeSeq for SeqSerializer<'a> {
|
||||
type Ok = Value;
|
||||
type Error = Error;
|
||||
|
||||
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
self.seq.push(value.serialize(self.serializer)?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::list(self.seq, self.serializer.span))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> serde::ser::SerializeTuple for SeqSerializer<'a> {
|
||||
type Ok = Value;
|
||||
type Error = Error;
|
||||
|
||||
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
self.seq.push(value.serialize(self.serializer)?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::list(self.seq, self.serializer.span))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> serde::ser::SerializeTupleStruct for SeqSerializer<'a> {
|
||||
type Ok = Value;
|
||||
type Error = Error;
|
||||
|
||||
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
self.seq.push(value.serialize(self.serializer)?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::list(self.seq, self.serializer.span))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> serde::ser::SerializeTupleVariant for SeqSerializer<'a> {
|
||||
type Ok = Value;
|
||||
type Error = Error;
|
||||
|
||||
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
self.seq.push(value.serialize(self.serializer)?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(Value::list(self.seq, self.serializer.span))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
const HTML: &str = r#"
|
||||
<html><head><meta><title>My Title</title></head></html>
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
fn test_basics() {
|
||||
let info = execute_webpage(HTML, Span::test_data()).unwrap();
|
||||
let record = info.as_record().unwrap();
|
||||
assert_eq!(record.get("title").unwrap().as_str().unwrap(), "My Title");
|
||||
}
|
||||
}
|
92
src/main.rs
92
src/main.rs
|
@ -29,7 +29,7 @@ use nu_protocol::{
|
|||
report_error_new, ByteStream, PipelineData, ShellError, Span, Spanned, Value,
|
||||
};
|
||||
use nu_std::load_standard_library;
|
||||
use nu_utils::utils::perf;
|
||||
use nu_utils::perf;
|
||||
use run::{run_commands, run_file, run_repl};
|
||||
use signals::ctrlc_protection;
|
||||
use std::{
|
||||
|
@ -220,14 +220,7 @@ fn main() -> Result<()> {
|
|||
|
||||
logger(|builder| configure(&level, &target, filters, builder))?;
|
||||
// info!("start logging {}:{}:{}", file!(), line!(), column!());
|
||||
perf(
|
||||
"start logging",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("start logging", start_time, use_color);
|
||||
}
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
|
@ -246,27 +239,13 @@ fn main() -> Result<()> {
|
|||
"env-path",
|
||||
parsed_nu_cli_args.env_file.as_ref(),
|
||||
);
|
||||
perf(
|
||||
"set_config_path",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("set_config_path", start_time, use_color);
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
start_time = std::time::Instant::now();
|
||||
terminal::acquire(engine_state.is_interactive);
|
||||
perf(
|
||||
"acquire_terminal",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("acquire_terminal", start_time, use_color);
|
||||
}
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
|
@ -280,26 +259,12 @@ fn main() -> Result<()> {
|
|||
|
||||
engine_state.add_env_var("NU_LIB_DIRS".into(), Value::list(vals, span));
|
||||
}
|
||||
perf(
|
||||
"NU_LIB_DIRS setup",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("NU_LIB_DIRS setup", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
// First, set up env vars as strings only
|
||||
gather_parent_env_vars(&mut engine_state, &init_cwd);
|
||||
perf(
|
||||
"gather env vars",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("gather env vars", start_time, use_color);
|
||||
|
||||
engine_state.add_env_var(
|
||||
"NU_VERSION".to_string(),
|
||||
|
@ -360,14 +325,7 @@ fn main() -> Result<()> {
|
|||
}
|
||||
std::process::exit(0)
|
||||
}
|
||||
perf(
|
||||
"run test_bins",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("run test_bins", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
let input = if let Some(redirect_stdin) = &parsed_nu_cli_args.redirect_stdin {
|
||||
|
@ -377,26 +335,12 @@ fn main() -> Result<()> {
|
|||
trace!("not redirecting stdin");
|
||||
PipelineData::empty()
|
||||
};
|
||||
perf(
|
||||
"redirect stdin",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("redirect stdin", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
// Set up the $nu constant before evaluating config files (need to have $nu available in them)
|
||||
engine_state.generate_nu_constant();
|
||||
perf(
|
||||
"create_nu_constant",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("create_nu_constant", start_time, use_color);
|
||||
|
||||
#[cfg(feature = "plugin")]
|
||||
if let Some(plugins) = &parsed_nu_cli_args.plugins {
|
||||
|
@ -434,26 +378,12 @@ fn main() -> Result<()> {
|
|||
}
|
||||
engine_state.merge_delta(working_set.render())?;
|
||||
|
||||
perf(
|
||||
"load plugins specified in --plugins",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
)
|
||||
perf!("load plugins specified in --plugins", start_time, use_color)
|
||||
}
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
if parsed_nu_cli_args.lsp {
|
||||
perf(
|
||||
"lsp starting",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("lsp starting", start_time, use_color);
|
||||
|
||||
if parsed_nu_cli_args.no_config_file.is_none() {
|
||||
let mut stack = nu_protocol::engine::Stack::new();
|
||||
|
|
110
src/run.rs
110
src/run.rs
|
@ -12,7 +12,7 @@ use nu_protocol::{
|
|||
engine::{EngineState, Stack},
|
||||
report_error_new, PipelineData, Spanned,
|
||||
};
|
||||
use nu_utils::utils::perf;
|
||||
use nu_utils::perf;
|
||||
|
||||
pub(crate) fn run_commands(
|
||||
engine_state: &mut EngineState,
|
||||
|
@ -35,14 +35,7 @@ pub(crate) fn run_commands(
|
|||
#[cfg(feature = "plugin")]
|
||||
read_plugin_file(engine_state, parsed_nu_cli_args.plugin_file, NUSHELL_FOLDER);
|
||||
|
||||
perf(
|
||||
"read plugins",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("read plugins", start_time, use_color);
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
// If we have a env file parameter *OR* we have a login shell parameter, read the env file
|
||||
|
@ -57,14 +50,7 @@ pub(crate) fn run_commands(
|
|||
config_files::read_default_env_file(engine_state, &mut stack)
|
||||
}
|
||||
|
||||
perf(
|
||||
"read env.nu",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("read env.nu", start_time, use_color);
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
// If we have a config file parameter *OR* we have a login shell parameter, read the config file
|
||||
|
@ -77,14 +63,7 @@ pub(crate) fn run_commands(
|
|||
);
|
||||
}
|
||||
|
||||
perf(
|
||||
"read config.nu",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("read config.nu", start_time, use_color);
|
||||
|
||||
// If we have a login shell parameter, read the login file
|
||||
let start_time = std::time::Instant::now();
|
||||
|
@ -92,14 +71,7 @@ pub(crate) fn run_commands(
|
|||
config_files::read_loginshell_file(engine_state, &mut stack);
|
||||
}
|
||||
|
||||
perf(
|
||||
"read login.nu",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("read login.nu", start_time, use_color);
|
||||
}
|
||||
|
||||
// Before running commands, set up the startup time
|
||||
|
@ -123,14 +95,7 @@ pub(crate) fn run_commands(
|
|||
report_error_new(engine_state, &err);
|
||||
std::process::exit(1);
|
||||
}
|
||||
perf(
|
||||
"evaluate_commands",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("evaluate_commands", start_time, use_color);
|
||||
}
|
||||
|
||||
pub(crate) fn run_file(
|
||||
|
@ -153,14 +118,7 @@ pub(crate) fn run_file(
|
|||
let start_time = std::time::Instant::now();
|
||||
#[cfg(feature = "plugin")]
|
||||
read_plugin_file(engine_state, parsed_nu_cli_args.plugin_file, NUSHELL_FOLDER);
|
||||
perf(
|
||||
"read plugins",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("read plugins", start_time, use_color);
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
// only want to load config and env if relative argument is provided.
|
||||
|
@ -174,14 +132,7 @@ pub(crate) fn run_file(
|
|||
} else {
|
||||
config_files::read_default_env_file(engine_state, &mut stack)
|
||||
}
|
||||
perf(
|
||||
"read env.nu",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("read env.nu", start_time, use_color);
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
if parsed_nu_cli_args.config_file.is_some() {
|
||||
|
@ -192,14 +143,7 @@ pub(crate) fn run_file(
|
|||
false,
|
||||
);
|
||||
}
|
||||
perf(
|
||||
"read config.nu",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("read config.nu", start_time, use_color);
|
||||
}
|
||||
|
||||
// Regenerate the $nu constant to contain the startup time and any other potential updates
|
||||
|
@ -216,14 +160,7 @@ pub(crate) fn run_file(
|
|||
report_error_new(engine_state, &err);
|
||||
std::process::exit(1);
|
||||
}
|
||||
perf(
|
||||
"evaluate_file",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("evaluate_file", start_time, use_color);
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
let last_exit_code = stack.get_env_var(&*engine_state, "LAST_EXIT_CODE");
|
||||
|
@ -235,14 +172,7 @@ pub(crate) fn run_file(
|
|||
}
|
||||
}
|
||||
}
|
||||
perf(
|
||||
"get exit code",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("get exit code", start_time, use_color);
|
||||
}
|
||||
|
||||
pub(crate) fn run_repl(
|
||||
|
@ -268,14 +198,7 @@ pub(crate) fn run_repl(
|
|||
|
||||
// Reload use_color from config in case it's different from the default value
|
||||
let use_color = engine_state.get_config().use_ansi_coloring;
|
||||
perf(
|
||||
"setup_config",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("setup_config", start_time, use_color);
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
let ret_val = evaluate_repl(
|
||||
|
@ -286,14 +209,7 @@ pub(crate) fn run_repl(
|
|||
parsed_nu_cli_args.no_std_lib,
|
||||
entire_start_time,
|
||||
);
|
||||
perf(
|
||||
"evaluate_repl",
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
perf!("evaluate_repl", start_time, use_color);
|
||||
|
||||
ret_val
|
||||
}
|
||||
|
|
|
@ -1,27 +1,5 @@
|
|||
use nu_test_support::nu_with_plugins;
|
||||
|
||||
#[test]
|
||||
fn closure() {
|
||||
let actual = nu_with_plugins!(
|
||||
cwd: "tests",
|
||||
plugin: ("nu_plugin_example"),
|
||||
r#"
|
||||
$env.env_value = "value from env"
|
||||
|
||||
$env.config = {
|
||||
plugins: {
|
||||
example: {||
|
||||
$env.env_value
|
||||
}
|
||||
}
|
||||
}
|
||||
example config
|
||||
"#
|
||||
);
|
||||
|
||||
assert!(actual.out.contains("value from env"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn none() {
|
||||
let actual = nu_with_plugins!(
|
||||
|
@ -34,7 +12,7 @@ fn none() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn record() {
|
||||
fn some() {
|
||||
let actual = nu_with_plugins!(
|
||||
cwd: "tests",
|
||||
plugin: ("nu_plugin_example"),
|
||||
|
@ -42,8 +20,11 @@ fn record() {
|
|||
$env.config = {
|
||||
plugins: {
|
||||
example: {
|
||||
key1: "value"
|
||||
key2: "other"
|
||||
path: "some/path",
|
||||
nested: {
|
||||
bool: true,
|
||||
string: "Hello Example!"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -51,6 +32,6 @@ fn record() {
|
|||
"#
|
||||
);
|
||||
|
||||
assert!(actual.out.contains("value"));
|
||||
assert!(actual.out.contains("other"));
|
||||
assert!(actual.out.contains("some/path"));
|
||||
assert!(actual.out.contains("Hello Example!"));
|
||||
}
|
||||
|
|
|
@ -36,6 +36,12 @@ fn non_string_in_record() -> TestResult {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unbalance_string() -> TestResult {
|
||||
fail_test(r#""aaaab"cc"#, "invalid characters")?;
|
||||
fail_test(r#"'aaaab'cc"#, "invalid characters")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_in_valuestream() -> TestResult {
|
||||
run_test(
|
||||
|
|
Loading…
Reference in New Issue
Block a user