Merge branch 'main' into collect-optimization
This commit is contained in:
commit
b51bdfaf12
33
Cargo.lock
generated
33
Cargo.lock
generated
|
@ -1219,24 +1219,24 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "dirs-next"
|
||||
version = "2.0.0"
|
||||
name = "dirs"
|
||||
version = "5.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1"
|
||||
checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"dirs-sys-next",
|
||||
"dirs-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dirs-sys-next"
|
||||
version = "0.1.2"
|
||||
name = "dirs-sys"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
|
||||
checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"option-ext",
|
||||
"redox_users",
|
||||
"winapi",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2873,7 +2873,7 @@ dependencies = [
|
|||
"assert_cmd",
|
||||
"crossterm",
|
||||
"ctrlc",
|
||||
"dirs-next",
|
||||
"dirs",
|
||||
"log",
|
||||
"miette",
|
||||
"mimalloc",
|
||||
|
@ -3047,7 +3047,7 @@ dependencies = [
|
|||
"deunicode",
|
||||
"dialoguer",
|
||||
"digest",
|
||||
"dirs-next",
|
||||
"dirs",
|
||||
"dtparse",
|
||||
"encoding_rs",
|
||||
"fancy-regex",
|
||||
|
@ -3195,7 +3195,10 @@ dependencies = [
|
|||
name = "nu-json"
|
||||
version = "0.95.1"
|
||||
dependencies = [
|
||||
"fancy-regex",
|
||||
"linked-hash-map",
|
||||
"nu-path",
|
||||
"nu-test-support",
|
||||
"num-traits",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -3242,7 +3245,7 @@ dependencies = [
|
|||
name = "nu-path"
|
||||
version = "0.95.1"
|
||||
dependencies = [
|
||||
"dirs-next",
|
||||
"dirs",
|
||||
"omnipath",
|
||||
"pwd",
|
||||
]
|
||||
|
@ -3833,6 +3836,12 @@ dependencies = [
|
|||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "option-ext"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
|
||||
|
||||
[[package]]
|
||||
name = "ordered-multimap"
|
||||
version = "0.7.3"
|
||||
|
|
|
@ -83,7 +83,7 @@ ctrlc = "3.4"
|
|||
deunicode = "1.6.0"
|
||||
dialoguer = { default-features = false, version = "0.11" }
|
||||
digest = { default-features = false, version = "0.10" }
|
||||
dirs-next = "2.0"
|
||||
dirs = "5.0"
|
||||
dtparse = "2.0"
|
||||
encoding_rs = "0.8"
|
||||
fancy-regex = "0.13"
|
||||
|
@ -201,7 +201,7 @@ reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
|||
|
||||
crossterm = { workspace = true }
|
||||
ctrlc = { workspace = true }
|
||||
dirs-next = { workspace = true }
|
||||
dirs = { workspace = true }
|
||||
log = { workspace = true }
|
||||
miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] }
|
||||
mimalloc = { version = "0.1.42", default-features = false, optional = true }
|
||||
|
@ -229,7 +229,7 @@ nu-test-support = { path = "./crates/nu-test-support", version = "0.95.1" }
|
|||
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.95.1" }
|
||||
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.95.1" }
|
||||
assert_cmd = "2.0"
|
||||
dirs-next = { workspace = true }
|
||||
dirs = { workspace = true }
|
||||
tango-bench = "0.5"
|
||||
pretty_assertions = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
|
|
7
crates/nu-cli/README.md
Normal file
7
crates/nu-cli/README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
This crate implements the core functionality of the interactive Nushell REPL and interfaces with `reedline`.
|
||||
Currently implements the syntax highlighting and completions logic.
|
||||
Furthermore includes a few commands that are specific to `reedline`
|
||||
|
||||
## Internal Nushell crate
|
||||
|
||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
@ -76,12 +76,21 @@ pub fn evaluate_file(
|
|||
trace!("parsing file: {}", file_path_str);
|
||||
let block = parse(&mut working_set, Some(file_path_str), &file, false);
|
||||
|
||||
if let Some(warning) = working_set.parse_warnings.first() {
|
||||
report_error(&working_set, warning);
|
||||
}
|
||||
|
||||
// If any parse errors were found, report the first error and exit.
|
||||
if let Some(err) = working_set.parse_errors.first() {
|
||||
report_error(&working_set, err);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
if let Some(err) = working_set.compile_errors.first() {
|
||||
report_error(&working_set, err);
|
||||
// Not a fatal error, for now
|
||||
}
|
||||
|
||||
// Look for blocks whose name starts with "main" and replace it with the filename.
|
||||
for block in working_set.delta.blocks.iter_mut().map(Arc::make_mut) {
|
||||
if block.signature.name == "main" {
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod commands;
|
||||
mod completions;
|
||||
mod config_files;
|
||||
|
|
5
crates/nu-cmd-base/README.md
Normal file
5
crates/nu-cmd-base/README.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
Utilities used by the different `nu-command`/`nu-cmd-*` crates, should not contain any full `Command` implementations.
|
||||
|
||||
## Internal Nushell crate
|
||||
|
||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
pub mod formats;
|
||||
pub mod hook;
|
||||
pub mod input_handler;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod example_test;
|
||||
pub mod extra;
|
||||
pub use extra::*;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod core_commands;
|
||||
mod default_context;
|
||||
pub mod example_support;
|
||||
|
|
5
crates/nu-color-config/README.md
Normal file
5
crates/nu-color-config/README.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
Logic to resolve colors for syntax highlighting and output formatting
|
||||
|
||||
## Internal Nushell crate
|
||||
|
||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod color_config;
|
||||
mod matching_brackets_style;
|
||||
mod nu_style;
|
||||
|
|
|
@ -140,10 +140,10 @@ trash-support = ["trash"]
|
|||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.95.1" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.95.1" }
|
||||
|
||||
dirs-next = { workspace = true }
|
||||
dirs = { workspace = true }
|
||||
mockito = { workspace = true, default-features = false }
|
||||
quickcheck = { workspace = true }
|
||||
quickcheck_macros = { workspace = true }
|
||||
rstest = { workspace = true, default-features = false }
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
|
7
crates/nu-command/README.md
Normal file
7
crates/nu-command/README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
This crate contains the majority of our commands
|
||||
|
||||
We allow ourselves to move some of the commands in `nu-command` to `nu-cmd-*` crates as needed.
|
||||
|
||||
## Internal Nushell crate
|
||||
|
||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
@ -1,5 +1,8 @@
|
|||
use nu_engine::{command_prelude::*, ClosureEvalOnce};
|
||||
use nu_protocol::{debugger::Profiler, engine::Closure};
|
||||
use nu_protocol::{
|
||||
debugger::{Profiler, ProfilerOptions},
|
||||
engine::Closure,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DebugProfile;
|
||||
|
@ -28,6 +31,7 @@ impl Command for DebugProfile {
|
|||
Some('v'),
|
||||
)
|
||||
.switch("expr", "Collect expression types", Some('x'))
|
||||
.switch("instructions", "Collect IR instructions", Some('i'))
|
||||
.switch("lines", "Collect line numbers", Some('l'))
|
||||
.named(
|
||||
"max-depth",
|
||||
|
@ -91,19 +95,23 @@ confusing the id/parent_id hierarchy. The --expr flag is helpful for investigati
|
|||
let collect_expanded_source = call.has_flag(engine_state, stack, "expanded-source")?;
|
||||
let collect_values = call.has_flag(engine_state, stack, "values")?;
|
||||
let collect_exprs = call.has_flag(engine_state, stack, "expr")?;
|
||||
let collect_instructions = call.has_flag(engine_state, stack, "instructions")?;
|
||||
let collect_lines = call.has_flag(engine_state, stack, "lines")?;
|
||||
let max_depth = call
|
||||
.get_flag(engine_state, stack, "max-depth")?
|
||||
.unwrap_or(2);
|
||||
|
||||
let profiler = Profiler::new(
|
||||
max_depth,
|
||||
collect_spans,
|
||||
true,
|
||||
collect_expanded_source,
|
||||
collect_values,
|
||||
collect_exprs,
|
||||
collect_lines,
|
||||
ProfilerOptions {
|
||||
max_depth,
|
||||
collect_spans,
|
||||
collect_source: true,
|
||||
collect_expanded_source,
|
||||
collect_values,
|
||||
collect_exprs,
|
||||
collect_instructions,
|
||||
collect_lines,
|
||||
},
|
||||
call.span(),
|
||||
);
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
|||
All,
|
||||
Any,
|
||||
Append,
|
||||
Chunks,
|
||||
Columns,
|
||||
Compact,
|
||||
Default,
|
||||
|
@ -290,7 +291,6 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
|||
ToText,
|
||||
ToToml,
|
||||
ToTsv,
|
||||
Touch,
|
||||
Upsert,
|
||||
Where,
|
||||
ToXml,
|
||||
|
|
153
crates/nu-command/src/filters/chunks.rs
Normal file
153
crates/nu-command/src/filters/chunks.rs
Normal file
|
@ -0,0 +1,153 @@
|
|||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::ListStream;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Chunks;
|
||||
|
||||
impl Command for Chunks {
|
||||
fn name(&self) -> &str {
|
||||
"chunks"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("chunks")
|
||||
.input_output_types(vec![
|
||||
(Type::table(), Type::list(Type::table())),
|
||||
(Type::list(Type::Any), Type::list(Type::list(Type::Any))),
|
||||
])
|
||||
.required("chunk_size", SyntaxShape::Int, "The size of each chunk.")
|
||||
.category(Category::Filters)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Divide a list or table into chunks of `chunk_size`."
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
"This command will error if `chunk_size` is negative or zero."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["batch", "group"]
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
example: "[1 2 3 4] | chunks 2",
|
||||
description: "Chunk a list into pairs",
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_list(vec![Value::test_int(1), Value::test_int(2)]),
|
||||
Value::test_list(vec![Value::test_int(3), Value::test_int(4)]),
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
example: "[[foo bar]; [0 1] [2 3] [4 5] [6 7] [8 9]] | chunks 3",
|
||||
description: "Chunk the rows of a table into triplets",
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"foo" => Value::test_int(0),
|
||||
"bar" => Value::test_int(1),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"foo" => Value::test_int(2),
|
||||
"bar" => Value::test_int(3),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"foo" => Value::test_int(4),
|
||||
"bar" => Value::test_int(5),
|
||||
}),
|
||||
]),
|
||||
Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"foo" => Value::test_int(6),
|
||||
"bar" => Value::test_int(7),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"foo" => Value::test_int(8),
|
||||
"bar" => Value::test_int(9),
|
||||
}),
|
||||
]),
|
||||
])),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let chunk_size: Value = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let size =
|
||||
usize::try_from(chunk_size.as_int()?).map_err(|_| ShellError::NeedsPositiveValue {
|
||||
span: chunk_size.span(),
|
||||
})?;
|
||||
|
||||
if size == 0 {
|
||||
return Err(ShellError::IncorrectValue {
|
||||
msg: "`chunk_size` cannot be zero".into(),
|
||||
val_span: chunk_size.span(),
|
||||
call_span: head,
|
||||
});
|
||||
}
|
||||
|
||||
match input {
|
||||
PipelineData::Value(Value::List { vals, .. }, metadata) => {
|
||||
let chunks = ChunksIter::new(vals, size, head);
|
||||
let stream = ListStream::new(chunks, head, engine_state.signals().clone());
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
}
|
||||
PipelineData::ListStream(stream, metadata) => {
|
||||
let stream = stream.modify(|iter| ChunksIter::new(iter, size, head));
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
}
|
||||
input => Err(input.unsupported_input_error("list", head)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ChunksIter<I: Iterator<Item = Value>> {
|
||||
iter: I,
|
||||
size: usize,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl<I: Iterator<Item = Value>> ChunksIter<I> {
|
||||
fn new(iter: impl IntoIterator<IntoIter = I>, size: usize, span: Span) -> Self {
|
||||
Self {
|
||||
iter: iter.into_iter(),
|
||||
size,
|
||||
span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<I: Iterator<Item = Value>> Iterator for ChunksIter<I> {
|
||||
type Item = Value;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let first = self.iter.next()?;
|
||||
let mut chunk = Vec::with_capacity(self.size); // delay allocation to optimize for empty iter
|
||||
chunk.push(first);
|
||||
chunk.extend((&mut self.iter).take(self.size - 1));
|
||||
Some(Value::list(chunk, self.span))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(Chunks {})
|
||||
}
|
||||
}
|
|
@ -51,11 +51,11 @@ impl Command for Default {
|
|||
description:
|
||||
"Get the env value of `MY_ENV` with a default value 'abc' if not present",
|
||||
example: "$env | get --ignore-errors MY_ENV | default 'abc'",
|
||||
result: None, // Some(Value::test_string("abc")),
|
||||
result: Some(Value::test_string("abc")),
|
||||
},
|
||||
Example {
|
||||
description: "Replace the `null` value in a list",
|
||||
example: "[1, 2, null, 4] | default 3",
|
||||
example: "[1, 2, null, 4] | each { default 3 }",
|
||||
result: Some(Value::list(
|
||||
vec![
|
||||
Value::test_int(1),
|
||||
|
@ -113,15 +113,7 @@ fn default(
|
|||
} else if input.is_nothing() {
|
||||
Ok(value.into_pipeline_data())
|
||||
} else {
|
||||
input
|
||||
.map(
|
||||
move |item| match item {
|
||||
Value::Nothing { .. } => value.clone(),
|
||||
x => x,
|
||||
},
|
||||
engine_state.signals(),
|
||||
)
|
||||
.map(|x| x.set_metadata(metadata))
|
||||
Ok(input)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::ValueIterator;
|
||||
use nu_protocol::{report_warning_new, ParseWarning, ValueIterator};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Group;
|
||||
|
@ -54,6 +54,17 @@ impl Command for Group {
|
|||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
|
||||
report_warning_new(
|
||||
engine_state,
|
||||
&ParseWarning::DeprecatedWarning {
|
||||
old_command: "group".into(),
|
||||
new_suggestion: "the new `chunks` command".into(),
|
||||
span: head,
|
||||
url: "`help chunks`".into(),
|
||||
},
|
||||
);
|
||||
|
||||
let group_size: Spanned<usize> = call.req(engine_state, stack, 0)?;
|
||||
let metadata = input.metadata();
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
mod all;
|
||||
mod any;
|
||||
mod append;
|
||||
mod chunks;
|
||||
mod columns;
|
||||
mod compact;
|
||||
mod default;
|
||||
|
@ -58,6 +59,7 @@ mod zip;
|
|||
pub use all::All;
|
||||
pub use any::Any;
|
||||
pub use append::Append;
|
||||
pub use chunks::Chunks;
|
||||
pub use columns::Columns;
|
||||
pub use compact::Compact;
|
||||
pub use default::Default;
|
||||
|
|
|
@ -240,7 +240,7 @@ fn select(
|
|||
//FIXME: improve implementation to not clone
|
||||
match input_val.clone().follow_cell_path(&path.members, false) {
|
||||
Ok(fetcher) => {
|
||||
record.push(path.to_string().replace('.', "_"), fetcher);
|
||||
record.push(path.to_string(), fetcher);
|
||||
if !columns_with_value.contains(&path) {
|
||||
columns_with_value.push(path);
|
||||
}
|
||||
|
@ -271,7 +271,7 @@ fn select(
|
|||
// FIXME: remove clone
|
||||
match v.clone().follow_cell_path(&cell_path.members, false) {
|
||||
Ok(result) => {
|
||||
record.push(cell_path.to_string().replace('.', "_"), result);
|
||||
record.push(cell_path.to_string(), result);
|
||||
}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
|
@ -295,7 +295,7 @@ fn select(
|
|||
//FIXME: improve implementation to not clone
|
||||
match x.clone().follow_cell_path(&path.members, false) {
|
||||
Ok(value) => {
|
||||
record.push(path.to_string().replace('.', "_"), value);
|
||||
record.push(path.to_string(), value);
|
||||
}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod bytes;
|
||||
mod charting;
|
||||
mod conversions;
|
||||
|
|
|
@ -606,7 +606,7 @@ mod test {
|
|||
assert_eq!(actual, expected);
|
||||
|
||||
let actual = expand_glob("~/foo.txt", cwd, Span::unknown(), &Signals::empty()).unwrap();
|
||||
let home = dirs_next::home_dir().expect("failed to get home dir");
|
||||
let home = dirs::home_dir().expect("failed to get home dir");
|
||||
let expected: Vec<OsString> = vec![home.join("foo.txt").into()];
|
||||
assert_eq!(actual, expected);
|
||||
})
|
||||
|
|
|
@ -151,7 +151,7 @@ fn filesystem_change_to_home_directory() {
|
|||
"
|
||||
);
|
||||
|
||||
assert_eq!(Some(PathBuf::from(actual.out)), dirs_next::home_dir());
|
||||
assert_eq!(Some(PathBuf::from(actual.out)), dirs::home_dir());
|
||||
})
|
||||
}
|
||||
|
||||
|
|
43
crates/nu-command/tests/commands/chunks.rs
Normal file
43
crates/nu-command/tests/commands/chunks.rs
Normal file
|
@ -0,0 +1,43 @@
|
|||
use nu_test_support::nu;
|
||||
|
||||
#[test]
|
||||
fn chunk_size_negative() {
|
||||
let actual = nu!("[0 1 2] | chunks -1");
|
||||
assert!(actual.err.contains("positive"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chunk_size_zero() {
|
||||
let actual = nu!("[0 1 2] | chunks 0");
|
||||
assert!(actual.err.contains("zero"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chunk_size_not_int() {
|
||||
let actual = nu!("[0 1 2] | chunks (if true { 1sec })");
|
||||
assert!(actual.err.contains("can't convert"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
let actual = nu!("[] | chunks 2 | is-empty");
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_stream() {
|
||||
let actual = nu!("([0 1 2] | every 1 | chunks 2) == ([0 1 2] | chunks 2)");
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn table_stream() {
|
||||
let actual = nu!("([[foo bar]; [0 1] [2 3] [4 5]] | every 1 | chunks 2) == ([[foo bar]; [0 1] [2 3] [4 5]] | chunks 2)");
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_empty_chunks() {
|
||||
let actual = nu!("([0 1 2 3 4 5] | chunks 3 | length) == 2");
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
|
@ -32,3 +32,15 @@ fn default_after_empty_filter() {
|
|||
|
||||
assert_eq!(actual.out, "d");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keeps_nulls_in_lists() {
|
||||
let actual = nu!(r#"[null, 2, 3] | default [] | to json -r"#);
|
||||
assert_eq!(actual.out, "[null,2,3]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_null() {
|
||||
let actual = nu!(r#"null | default 1"#);
|
||||
assert_eq!(actual.out, "1");
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ mod break_;
|
|||
mod bytes;
|
||||
mod cal;
|
||||
mod cd;
|
||||
mod chunks;
|
||||
mod compact;
|
||||
mod complete;
|
||||
mod config_env_default;
|
||||
|
|
|
@ -309,7 +309,7 @@ fn external_arg_expand_tilde() {
|
|||
"#
|
||||
));
|
||||
|
||||
let home = dirs_next::home_dir().expect("failed to find home dir");
|
||||
let home = dirs::home_dir().expect("failed to find home dir");
|
||||
|
||||
assert_eq!(
|
||||
actual.out,
|
||||
|
|
|
@ -50,7 +50,7 @@ fn complex_nested_columns() {
|
|||
r#"
|
||||
{sample}
|
||||
| select nu."0xATYKARNU" nu.committers.name nu.releases.version
|
||||
| get nu_releases_version
|
||||
| get "nu.releases.version"
|
||||
| where $it > "0.8"
|
||||
| get 0
|
||||
"#
|
||||
|
|
|
@ -2901,3 +2901,9 @@ fn table_general_header_on_separator_trim_algorithm() {
|
|||
let actual = nu!("$env.config.table.header_on_separator = true; [[a b]; ['11111111111111111111111111111111111111' 2] ] | table --width=20 --theme basic");
|
||||
assert_eq!(actual.out, "+-#-+----a-----+-b-+| 0 | 11111111 | 2 || | 11111111 | || | 11111111 | || | 11111111 | || | 111111 | |+---+----------+---+");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn table_general_header_on_separator_issue1() {
|
||||
let actual = nu!("$env.config.table.header_on_separator = true; [['Llll oo Bbbbbbbb' 'Bbbbbbbb Aaaa' Nnnnnn Ggggg 'Xxxxx Llllllll #' Bbb 'Pppp Ccccc' 'Rrrrrrrr Dddd' Rrrrrr 'Rrrrrr Ccccc II' 'Rrrrrr Ccccc Ppppppp II' 'Pppppp Dddddddd Tttt' 'Pppppp Dddddddd Dddd' 'Rrrrrrrrr Trrrrrr' 'Pppppp Ppppp Dddd' 'Ppppp Dddd' Hhhh]; [RRRRRRR FFFFFFFF UUUU VV 202407160001 BBB 1 '7/16/2024' '' AAA-1111 AAA-1111-11 '7 YEARS' 2555 'RRRRRRRR DDDD' '7/16/2031' '7/16/2031' NN]] | table --width=87 --theme basic");
|
||||
assert_eq!(actual.out, "+-#-+-Llll oo Bbbbbbbb-+-Bbbbbbbb Aaaa-+-Nnnnnn-+-Ggggg-+-Xxxxx Llllllll #-+-...-+| 0 | RRRRRRR | FFFFFFFF | UUUU | VV | 202407160001 | ... |+---+------------------+---------------+--------+-------+------------------+-----+");
|
||||
}
|
||||
|
|
9
crates/nu-engine/README.md
Normal file
9
crates/nu-engine/README.md
Normal file
|
@ -0,0 +1,9 @@
|
|||
This crate primarily drives the evaluation of expressions.
|
||||
|
||||
(Some overlap with nu-protocol)
|
||||
|
||||
- Provides `CallExt`
|
||||
|
||||
## Internal Nushell crate
|
||||
|
||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
@ -184,11 +184,20 @@ fn eval_ir_block_impl<D: DebugContext>(
|
|||
let instruction = &ir_block.instructions[pc];
|
||||
let span = &ir_block.spans[pc];
|
||||
let ast = &ir_block.ast[pc];
|
||||
log::trace!(
|
||||
"{pc:-4}: {}",
|
||||
instruction.display(ctx.engine_state, ctx.data)
|
||||
|
||||
D::enter_instruction(ctx.engine_state, ir_block, pc, ctx.registers);
|
||||
|
||||
let result = eval_instruction::<D>(ctx, instruction, span, ast);
|
||||
|
||||
D::leave_instruction(
|
||||
ctx.engine_state,
|
||||
ir_block,
|
||||
pc,
|
||||
ctx.registers,
|
||||
result.as_ref().err(),
|
||||
);
|
||||
match eval_instruction::<D>(ctx, instruction, span, ast) {
|
||||
|
||||
match result {
|
||||
Ok(InstructionResult::Continue) => {
|
||||
pc += 1;
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod call_ext;
|
||||
mod closure_eval;
|
||||
pub mod column;
|
||||
|
|
5
crates/nu-explore/README.md
Normal file
5
crates/nu-explore/README.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
Implementation of the interactive `explore` command pager.
|
||||
|
||||
## Internal Nushell crate
|
||||
|
||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod commands;
|
||||
mod default_context;
|
||||
mod explore;
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
[package]
|
||||
authors = ["The Nushell Project Developers", "Christian Zangl <laktak@cdak.net>"]
|
||||
authors = [
|
||||
"The Nushell Project Developers",
|
||||
"Christian Zangl <laktak@cdak.net>",
|
||||
]
|
||||
description = "Fork of serde-hjson"
|
||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-json"
|
||||
edition = "2021"
|
||||
|
@ -19,9 +22,11 @@ default = ["preserve_order"]
|
|||
[dependencies]
|
||||
linked-hash-map = { version = "0.5", optional = true }
|
||||
num-traits = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
# nu-path = { path="../nu-path", version = "0.95.1" }
|
||||
# serde_json = "1.0"
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.95.1" }
|
||||
nu-path = { path = "../nu-path", version = "0.95.1" }
|
||||
serde_json = "1.0"
|
||||
fancy-regex = "0.13.0"
|
||||
|
|
|
@ -24,7 +24,7 @@ nu-json = "0.76"
|
|||
## From the Commandline
|
||||
|
||||
Add with:
|
||||
```
|
||||
```sh
|
||||
cargo add serde
|
||||
cargo add nu-json
|
||||
```
|
||||
|
@ -43,7 +43,7 @@ fn main() {
|
|||
|
||||
let sample_text=r#"
|
||||
{
|
||||
# specify rate in requests/second
|
||||
## specify rate in requests/second
|
||||
rate: 1000
|
||||
array:
|
||||
[
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
pub use self::de::{
|
||||
from_iter, from_reader, from_slice, from_str, Deserializer, StreamDeserializer,
|
||||
};
|
||||
|
|
|
@ -714,7 +714,7 @@ impl<'a> HjsonFormatter<'a> {
|
|||
stack: Vec::new(),
|
||||
at_colon: false,
|
||||
indent,
|
||||
braces_same_line: false,
|
||||
braces_same_line: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
// FIXME: re-enable tests
|
||||
/*
|
||||
use nu_json::Value;
|
||||
use fancy_regex::Regex;
|
||||
use nu_json::Value;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
@ -11,7 +9,7 @@ fn txt(text: &str) -> String {
|
|||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
out.replace("\r\n", "").replace("\n", "")
|
||||
out.replace("\r\n", "").replace('\n', "")
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
|
@ -21,15 +19,7 @@ fn txt(text: &str) -> String {
|
|||
}
|
||||
|
||||
fn hjson_expectations() -> PathBuf {
|
||||
let assets = nu_test_support::fs::assets().join("nu_json");
|
||||
|
||||
nu_path::canonicalize(assets.clone()).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Couldn't canonicalize hjson assets path {}: {:?}",
|
||||
assets.display(),
|
||||
e
|
||||
)
|
||||
})
|
||||
nu_test_support::fs::assets().join("nu_json").into()
|
||||
}
|
||||
|
||||
fn get_test_content(name: &str) -> io::Result<String> {
|
||||
|
@ -50,7 +40,7 @@ fn get_result_content(name: &str) -> io::Result<(String, String)> {
|
|||
let p1 = format!("{}/{}_result.json", expectations.display(), name);
|
||||
let p2 = format!("{}/{}_result.hjson", expectations.display(), name);
|
||||
|
||||
Ok((fs::read_to_string(&p1)?, fs::read_to_string(&p2)?))
|
||||
Ok((fs::read_to_string(p1)?, fs::read_to_string(p2)?))
|
||||
}
|
||||
|
||||
macro_rules! run_test {
|
||||
|
@ -73,7 +63,8 @@ macro_rules! run_test {
|
|||
let actual_hjson = txt(&actual_hjson);
|
||||
let actual_json = $fix(serde_json::to_string_pretty(&udata).unwrap());
|
||||
let actual_json = txt(&actual_json);
|
||||
if rhjson != actual_hjson {
|
||||
// nu_json::to_string now outputs json instead of hjson!
|
||||
if rjson != actual_hjson {
|
||||
println!(
|
||||
"{:?}\n---hjson expected\n{}\n---hjson actual\n{}\n---\n",
|
||||
name, rhjson, actual_hjson
|
||||
|
@ -85,7 +76,7 @@ macro_rules! run_test {
|
|||
name, rjson, actual_json
|
||||
);
|
||||
}
|
||||
assert!(rhjson == actual_hjson && rjson == actual_json);
|
||||
assert!(rjson == actual_hjson && rjson == actual_json);
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
@ -198,7 +189,7 @@ fn test_hjson() {
|
|||
|
||||
let missing = all
|
||||
.into_iter()
|
||||
.filter(|x| done.iter().find(|y| &x == y) == None)
|
||||
.filter(|x| !done.iter().any(|y| x == y))
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
if !missing.is_empty() {
|
||||
|
@ -208,5 +199,3 @@ fn test_hjson() {
|
|||
panic!();
|
||||
}
|
||||
}
|
||||
|
||||
*/
|
||||
|
|
7
crates/nu-lsp/README.md
Normal file
7
crates/nu-lsp/README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
Implementation of the Nushell language server.
|
||||
|
||||
See [the Language Server Protocol specification](https://microsoft.github.io/language-server-protocol/)
|
||||
|
||||
## Internal Nushell crate
|
||||
|
||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
use lsp_server::{Connection, IoThreads, Message, Response, ResponseError};
|
||||
use lsp_types::{
|
||||
request::{Completion, GotoDefinition, HoverRequest, Request},
|
||||
|
|
|
@ -4,7 +4,7 @@ Nushell's parser is a type-directed parser, meaning that the parser will use typ
|
|||
|
||||
Nushell's base language is whitespace-separated tokens with the command (Nushell's term for a function) name in the head position:
|
||||
|
||||
```
|
||||
```nushell
|
||||
head1 arg1 arg2 | head2
|
||||
```
|
||||
|
||||
|
@ -12,7 +12,7 @@ head1 arg1 arg2 | head2
|
|||
|
||||
The first job of the parser is to a lexical analysis to find where the tokens start and end in the input. This turns the above into:
|
||||
|
||||
```
|
||||
```text
|
||||
<item: "head1">, <item: "arg1">, <item: "arg2">, <pipe>, <item: "head2">
|
||||
```
|
||||
|
||||
|
@ -24,7 +24,7 @@ As Nushell is a language of pipelines, pipes form a key role in both separating
|
|||
|
||||
The above tokens are converted the following during the lite parse phase:
|
||||
|
||||
```
|
||||
```text
|
||||
Pipeline:
|
||||
Command #1:
|
||||
<item: "head1">, <item: "arg1">, <item: "arg2">
|
||||
|
@ -45,7 +45,7 @@ Each command has a shape assigned to each of the arguments it reads in. These sh
|
|||
|
||||
For example, if the command is written as:
|
||||
|
||||
```sql
|
||||
```text
|
||||
where $x > 10
|
||||
```
|
||||
|
||||
|
@ -53,7 +53,7 @@ When the parsing happens, the parser will look up the `where` command and find i
|
|||
|
||||
In the above example, if the Signature of `where` said that it took three String values, the result would be:
|
||||
|
||||
```
|
||||
```text
|
||||
CallInfo:
|
||||
Name: `where`
|
||||
Args:
|
||||
|
@ -64,7 +64,7 @@ CallInfo:
|
|||
|
||||
Or, the Signature could state that it takes in three positional arguments: a Variable, an Operator, and a Number, which would give:
|
||||
|
||||
```
|
||||
```text
|
||||
CallInfo:
|
||||
Name: `where`
|
||||
Args:
|
||||
|
@ -77,7 +77,7 @@ Note that in this case, each would be checked at compile time to confirm that th
|
|||
|
||||
Finally, some Shapes can consume more than one token. In the above, if the `where` command stated it took in a single required argument, and that the Shape of this argument was a MathExpression, then the parser would treat the remaining tokens as part of the math expression.
|
||||
|
||||
```
|
||||
```text
|
||||
CallInfo:
|
||||
Name: `where`
|
||||
Args:
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod deparse;
|
||||
mod exportable;
|
||||
mod flatten;
|
||||
|
|
|
@ -12,10 +12,10 @@ exclude = ["/fuzz"]
|
|||
bench = false
|
||||
|
||||
[dependencies]
|
||||
dirs-next = { workspace = true }
|
||||
dirs = { workspace = true }
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
omnipath = { workspace = true }
|
||||
|
||||
[target.'cfg(all(unix, not(target_os = "macos"), not(target_os = "android")))'.dependencies]
|
||||
pwd = { workspace = true }
|
||||
pwd = { workspace = true }
|
||||
|
|
|
@ -3,14 +3,14 @@ use omnipath::WinPathExt;
|
|||
use std::path::PathBuf;
|
||||
|
||||
pub fn home_dir() -> Option<PathBuf> {
|
||||
dirs_next::home_dir()
|
||||
dirs::home_dir()
|
||||
}
|
||||
|
||||
/// Return the data directory for the current platform or XDG_DATA_HOME if specified.
|
||||
pub fn data_dir() -> Option<PathBuf> {
|
||||
match std::env::var("XDG_DATA_HOME").map(PathBuf::from) {
|
||||
Ok(xdg_data) if xdg_data.is_absolute() => Some(canonicalize(&xdg_data).unwrap_or(xdg_data)),
|
||||
_ => get_canonicalized_path(dirs_next::data_dir()),
|
||||
_ => get_canonicalized_path(dirs::data_dir()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -20,7 +20,7 @@ pub fn cache_dir() -> Option<PathBuf> {
|
|||
Ok(xdg_cache) if xdg_cache.is_absolute() => {
|
||||
Some(canonicalize(&xdg_cache).unwrap_or(xdg_cache))
|
||||
}
|
||||
_ => get_canonicalized_path(dirs_next::cache_dir()),
|
||||
_ => get_canonicalized_path(dirs::cache_dir()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -30,7 +30,7 @@ pub fn config_dir() -> Option<PathBuf> {
|
|||
Ok(xdg_config) if xdg_config.is_absolute() => {
|
||||
Some(canonicalize(&xdg_config).unwrap_or(xdg_config))
|
||||
}
|
||||
_ => get_canonicalized_path(dirs_next::config_dir()),
|
||||
_ => get_canonicalized_path(dirs::config_dir()),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod assert_path_eq;
|
||||
mod components;
|
||||
pub mod dots;
|
||||
|
|
|
@ -77,7 +77,7 @@ fn user_home_dir(username: &str) -> PathBuf {
|
|||
fn user_home_dir(username: &str) -> PathBuf {
|
||||
use std::path::Component;
|
||||
|
||||
match dirs_next::home_dir() {
|
||||
match dirs::home_dir() {
|
||||
None => {
|
||||
// Termux always has the same home directory
|
||||
#[cfg(target_os = "android")]
|
||||
|
@ -145,7 +145,7 @@ fn expand_tilde_with_another_user_home(path: &Path) -> PathBuf {
|
|||
/// Expand tilde ("~") into a home directory if it is the first path component
|
||||
pub fn expand_tilde(path: impl AsRef<Path>) -> PathBuf {
|
||||
// TODO: Extend this to work with "~user" style of home paths
|
||||
expand_tilde_with_home(path, dirs_next::home_dir())
|
||||
expand_tilde_with_home(path, dirs::home_dir())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
use crate::{
|
||||
ast::{Block, PipelineElement},
|
||||
engine::EngineState,
|
||||
ir::IrBlock,
|
||||
PipelineData, ShellError, Span, Value,
|
||||
};
|
||||
use std::{fmt::Debug, ops::DerefMut};
|
||||
|
@ -35,11 +36,11 @@ pub trait DebugContext: Clone + Copy + Debug {
|
|||
#[allow(unused_variables)]
|
||||
fn leave_block(engine_state: &EngineState, block: &Block) {}
|
||||
|
||||
/// Called when the evaluator enters a pipeline element
|
||||
/// Called when the AST evaluator enters a pipeline element
|
||||
#[allow(unused_variables)]
|
||||
fn enter_element(engine_state: &EngineState, element: &PipelineElement) {}
|
||||
|
||||
/// Called when the evaluator leaves a pipeline element
|
||||
/// Called when the AST evaluator leaves a pipeline element
|
||||
#[allow(unused_variables)]
|
||||
fn leave_element(
|
||||
engine_state: &EngineState,
|
||||
|
@ -47,6 +48,27 @@ pub trait DebugContext: Clone + Copy + Debug {
|
|||
result: &Result<PipelineData, ShellError>,
|
||||
) {
|
||||
}
|
||||
|
||||
/// Called before the IR evaluator runs an instruction
|
||||
#[allow(unused_variables)]
|
||||
fn enter_instruction(
|
||||
engine_state: &EngineState,
|
||||
ir_block: &IrBlock,
|
||||
instruction_index: usize,
|
||||
registers: &[PipelineData],
|
||||
) {
|
||||
}
|
||||
|
||||
/// Called after the IR evaluator runs an instruction
|
||||
#[allow(unused_variables)]
|
||||
fn leave_instruction(
|
||||
engine_state: &EngineState,
|
||||
ir_block: &IrBlock,
|
||||
instruction_index: usize,
|
||||
registers: &[PipelineData],
|
||||
error: Option<&ShellError>,
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
/// Marker struct signalizing that evaluation should use a Debugger
|
||||
|
@ -85,6 +107,40 @@ impl DebugContext for WithDebug {
|
|||
.leave_element(engine_state, element, result);
|
||||
}
|
||||
}
|
||||
|
||||
fn enter_instruction(
|
||||
engine_state: &EngineState,
|
||||
ir_block: &IrBlock,
|
||||
instruction_index: usize,
|
||||
registers: &[PipelineData],
|
||||
) {
|
||||
if let Ok(mut debugger) = engine_state.debugger.lock() {
|
||||
debugger.deref_mut().enter_instruction(
|
||||
engine_state,
|
||||
ir_block,
|
||||
instruction_index,
|
||||
registers,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn leave_instruction(
|
||||
engine_state: &EngineState,
|
||||
ir_block: &IrBlock,
|
||||
instruction_index: usize,
|
||||
registers: &[PipelineData],
|
||||
error: Option<&ShellError>,
|
||||
) {
|
||||
if let Ok(mut debugger) = engine_state.debugger.lock() {
|
||||
debugger.deref_mut().leave_instruction(
|
||||
engine_state,
|
||||
ir_block,
|
||||
instruction_index,
|
||||
registers,
|
||||
error,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Marker struct signalizing that evaluation should NOT use a Debugger
|
||||
|
@ -118,11 +174,11 @@ pub trait Debugger: Send + Debug {
|
|||
#[allow(unused_variables)]
|
||||
fn leave_block(&mut self, engine_state: &EngineState, block: &Block) {}
|
||||
|
||||
/// Called when the evaluator enters a pipeline element
|
||||
/// Called when the AST evaluator enters a pipeline element
|
||||
#[allow(unused_variables)]
|
||||
fn enter_element(&mut self, engine_state: &EngineState, pipeline_element: &PipelineElement) {}
|
||||
|
||||
/// Called when the evaluator leaves a pipeline element
|
||||
/// Called when the AST evaluator leaves a pipeline element
|
||||
#[allow(unused_variables)]
|
||||
fn leave_element(
|
||||
&mut self,
|
||||
|
@ -132,6 +188,29 @@ pub trait Debugger: Send + Debug {
|
|||
) {
|
||||
}
|
||||
|
||||
/// Called before the IR evaluator runs an instruction
|
||||
#[allow(unused_variables)]
|
||||
fn enter_instruction(
|
||||
&mut self,
|
||||
engine_state: &EngineState,
|
||||
ir_block: &IrBlock,
|
||||
instruction_index: usize,
|
||||
registers: &[PipelineData],
|
||||
) {
|
||||
}
|
||||
|
||||
/// Called after the IR evaluator runs an instruction
|
||||
#[allow(unused_variables)]
|
||||
fn leave_instruction(
|
||||
&mut self,
|
||||
engine_state: &EngineState,
|
||||
ir_block: &IrBlock,
|
||||
instruction_index: usize,
|
||||
registers: &[PipelineData],
|
||||
error: Option<&ShellError>,
|
||||
) {
|
||||
}
|
||||
|
||||
/// Create a final report as a Value
|
||||
///
|
||||
/// Intended to be called after deactivate()
|
||||
|
|
|
@ -7,10 +7,11 @@ use crate::{
|
|||
ast::{Block, Expr, PipelineElement},
|
||||
debugger::Debugger,
|
||||
engine::EngineState,
|
||||
ir::IrBlock,
|
||||
record, PipelineData, ShellError, Span, Value,
|
||||
};
|
||||
use std::io::BufRead;
|
||||
use std::time::Instant;
|
||||
use std::{borrow::Borrow, io::BufRead};
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct ElementId(usize);
|
||||
|
@ -24,6 +25,7 @@ struct ElementInfo {
|
|||
element_span: Span,
|
||||
element_output: Option<Value>,
|
||||
expr: Option<String>,
|
||||
instruction: Option<(usize, String)>,
|
||||
children: Vec<ElementId>,
|
||||
}
|
||||
|
||||
|
@ -36,57 +38,53 @@ impl ElementInfo {
|
|||
element_span,
|
||||
element_output: None,
|
||||
expr: None,
|
||||
instruction: None,
|
||||
children: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Options for [`Profiler`]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProfilerOptions {
|
||||
pub max_depth: i64,
|
||||
pub collect_spans: bool,
|
||||
pub collect_source: bool,
|
||||
pub collect_expanded_source: bool,
|
||||
pub collect_values: bool,
|
||||
pub collect_exprs: bool,
|
||||
pub collect_instructions: bool,
|
||||
pub collect_lines: bool,
|
||||
}
|
||||
|
||||
/// Basic profiler, used in `debug profile`
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Profiler {
|
||||
depth: i64,
|
||||
max_depth: i64,
|
||||
collect_spans: bool,
|
||||
collect_source: bool,
|
||||
collect_expanded_source: bool,
|
||||
collect_values: bool,
|
||||
collect_exprs: bool,
|
||||
collect_lines: bool,
|
||||
opts: ProfilerOptions,
|
||||
elements: Vec<ElementInfo>,
|
||||
element_stack: Vec<ElementId>,
|
||||
}
|
||||
|
||||
impl Profiler {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
max_depth: i64,
|
||||
collect_spans: bool,
|
||||
collect_source: bool,
|
||||
collect_expanded_source: bool,
|
||||
collect_values: bool,
|
||||
collect_exprs: bool,
|
||||
collect_lines: bool,
|
||||
span: Span,
|
||||
) -> Self {
|
||||
pub fn new(opts: ProfilerOptions, span: Span) -> Self {
|
||||
let first = ElementInfo {
|
||||
start: Instant::now(),
|
||||
duration_sec: 0.0,
|
||||
depth: 0,
|
||||
element_span: span,
|
||||
element_output: collect_values.then(|| Value::nothing(span)),
|
||||
expr: collect_exprs.then(|| "call".to_string()),
|
||||
element_output: opts.collect_values.then(|| Value::nothing(span)),
|
||||
expr: opts.collect_exprs.then(|| "call".to_string()),
|
||||
instruction: opts
|
||||
.collect_instructions
|
||||
.then(|| (0, "<start>".to_string())),
|
||||
children: vec![],
|
||||
};
|
||||
|
||||
Profiler {
|
||||
depth: 0,
|
||||
max_depth,
|
||||
collect_spans,
|
||||
collect_source,
|
||||
collect_expanded_source,
|
||||
collect_values,
|
||||
collect_exprs,
|
||||
collect_lines,
|
||||
opts,
|
||||
elements: vec![first],
|
||||
element_stack: vec![ElementId(0)],
|
||||
}
|
||||
|
@ -130,7 +128,7 @@ impl Debugger for Profiler {
|
|||
}
|
||||
|
||||
fn enter_element(&mut self, engine_state: &EngineState, element: &PipelineElement) {
|
||||
if self.depth > self.max_depth {
|
||||
if self.depth > self.opts.max_depth {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -140,6 +138,7 @@ impl Debugger for Profiler {
|
|||
};
|
||||
|
||||
let expr_opt = self
|
||||
.opts
|
||||
.collect_exprs
|
||||
.then(|| expr_to_string(engine_state, &element.expr.expr));
|
||||
|
||||
|
@ -165,13 +164,13 @@ impl Debugger for Profiler {
|
|||
element: &PipelineElement,
|
||||
result: &Result<PipelineData, ShellError>,
|
||||
) {
|
||||
if self.depth > self.max_depth {
|
||||
if self.depth > self.opts.max_depth {
|
||||
return;
|
||||
}
|
||||
|
||||
let element_span = element.expr.span;
|
||||
|
||||
let out_opt = self.collect_values.then(|| match result {
|
||||
let out_opt = self.opts.collect_values.then(|| match result {
|
||||
Ok(pipeline_data) => match pipeline_data {
|
||||
PipelineData::Value(val, ..) => val.clone(),
|
||||
PipelineData::ListStream(..) => Value::string("list stream", element_span),
|
||||
|
@ -192,6 +191,91 @@ impl Debugger for Profiler {
|
|||
self.element_stack.pop();
|
||||
}
|
||||
|
||||
fn enter_instruction(
|
||||
&mut self,
|
||||
engine_state: &EngineState,
|
||||
ir_block: &IrBlock,
|
||||
instruction_index: usize,
|
||||
_registers: &[PipelineData],
|
||||
) {
|
||||
if self.depth > self.opts.max_depth {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(parent_id) = self.last_element_id() else {
|
||||
eprintln!("Profiler Error: Missing parent element ID.");
|
||||
return;
|
||||
};
|
||||
|
||||
let instruction = &ir_block.instructions[instruction_index];
|
||||
let span = ir_block.spans[instruction_index];
|
||||
|
||||
let instruction_opt = self.opts.collect_instructions.then(|| {
|
||||
(
|
||||
instruction_index,
|
||||
instruction
|
||||
.display(engine_state, &ir_block.data)
|
||||
.to_string(),
|
||||
)
|
||||
});
|
||||
|
||||
let new_id = ElementId(self.elements.len());
|
||||
|
||||
let mut new_element = ElementInfo::new(self.depth, span);
|
||||
new_element.instruction = instruction_opt;
|
||||
|
||||
self.elements.push(new_element);
|
||||
|
||||
let Some(parent) = self.elements.get_mut(parent_id.0) else {
|
||||
eprintln!("Profiler Error: Missing parent element.");
|
||||
return;
|
||||
};
|
||||
|
||||
parent.children.push(new_id);
|
||||
self.element_stack.push(new_id);
|
||||
}
|
||||
|
||||
fn leave_instruction(
|
||||
&mut self,
|
||||
_engine_state: &EngineState,
|
||||
ir_block: &IrBlock,
|
||||
instruction_index: usize,
|
||||
registers: &[PipelineData],
|
||||
error: Option<&ShellError>,
|
||||
) {
|
||||
if self.depth > self.opts.max_depth {
|
||||
return;
|
||||
}
|
||||
|
||||
let instruction = &ir_block.instructions[instruction_index];
|
||||
let span = ir_block.spans[instruction_index];
|
||||
|
||||
let out_opt = self
|
||||
.opts
|
||||
.collect_values
|
||||
.then(|| {
|
||||
error
|
||||
.map(Err)
|
||||
.or_else(|| {
|
||||
instruction
|
||||
.output_register()
|
||||
.map(|register| Ok(®isters[register.0 as usize]))
|
||||
})
|
||||
.map(|result| format_result(&result, span))
|
||||
})
|
||||
.flatten();
|
||||
|
||||
let Some(last_element) = self.last_element_mut() else {
|
||||
eprintln!("Profiler Error: Missing last element.");
|
||||
return;
|
||||
};
|
||||
|
||||
last_element.duration_sec = last_element.start.elapsed().as_secs_f64();
|
||||
last_element.element_output = out_opt;
|
||||
|
||||
self.element_stack.pop();
|
||||
}
|
||||
|
||||
fn report(&self, engine_state: &EngineState, profiler_span: Span) -> Result<Value, ShellError> {
|
||||
Ok(Value::list(
|
||||
collect_data(
|
||||
|
@ -269,6 +353,21 @@ fn expr_to_string(engine_state: &EngineState, expr: &Expr) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
fn format_result(
|
||||
result: &Result<impl Borrow<PipelineData>, impl Borrow<ShellError>>,
|
||||
element_span: Span,
|
||||
) -> Value {
|
||||
match result {
|
||||
Ok(pipeline_data) => match pipeline_data.borrow() {
|
||||
PipelineData::Value(val, ..) => val.clone(),
|
||||
PipelineData::ListStream(..) => Value::string("list stream", element_span),
|
||||
PipelineData::ByteStream(..) => Value::string("byte stream", element_span),
|
||||
_ => Value::nothing(element_span),
|
||||
},
|
||||
Err(e) => Value::error(e.borrow().clone(), element_span),
|
||||
}
|
||||
}
|
||||
|
||||
// Find a file name and a line number (indexed from 1) of a span
|
||||
fn find_file_of_span(engine_state: &EngineState, span: Span) -> Option<(&str, usize)> {
|
||||
for file in engine_state.files() {
|
||||
|
@ -309,7 +408,7 @@ fn collect_data(
|
|||
"parent_id" => Value::int(parent_id.0 as i64, profiler_span),
|
||||
};
|
||||
|
||||
if profiler.collect_lines {
|
||||
if profiler.opts.collect_lines {
|
||||
if let Some((fname, line_num)) = find_file_of_span(engine_state, element.element_span) {
|
||||
row.push("file", Value::string(fname, profiler_span));
|
||||
row.push("line", Value::int(line_num as i64, profiler_span));
|
||||
|
@ -319,7 +418,7 @@ fn collect_data(
|
|||
}
|
||||
}
|
||||
|
||||
if profiler.collect_spans {
|
||||
if profiler.opts.collect_spans {
|
||||
let span_start = i64::try_from(element.element_span.start)
|
||||
.map_err(|_| profiler_error("error converting span start to i64", profiler_span))?;
|
||||
let span_end = i64::try_from(element.element_span.end)
|
||||
|
@ -337,12 +436,12 @@ fn collect_data(
|
|||
);
|
||||
}
|
||||
|
||||
if profiler.collect_source {
|
||||
if profiler.opts.collect_source {
|
||||
let val = String::from_utf8_lossy(engine_state.get_span_contents(element.element_span));
|
||||
let val = val.trim();
|
||||
let nlines = val.lines().count();
|
||||
|
||||
let fragment = if profiler.collect_expanded_source {
|
||||
let fragment = if profiler.opts.collect_expanded_source {
|
||||
val.to_string()
|
||||
} else {
|
||||
let mut first_line = val.lines().next().unwrap_or("").to_string();
|
||||
|
@ -361,6 +460,17 @@ fn collect_data(
|
|||
row.push("expr", Value::string(expr_string.clone(), profiler_span));
|
||||
}
|
||||
|
||||
if let Some((instruction_index, instruction)) = &element.instruction {
|
||||
row.push(
|
||||
"pc",
|
||||
(*instruction_index)
|
||||
.try_into()
|
||||
.map(|index| Value::int(index, profiler_span))
|
||||
.unwrap_or(Value::nothing(profiler_span)),
|
||||
);
|
||||
row.push("instruction", Value::string(instruction, profiler_span));
|
||||
}
|
||||
|
||||
if let Some(val) = &element.element_output {
|
||||
row.push("output", val.clone());
|
||||
}
|
||||
|
|
|
@ -25,7 +25,6 @@ use crate::{PluginIdentity, PluginRegistryItem, RegisteredPlugin};
|
|||
pub struct StateWorkingSet<'a> {
|
||||
pub permanent_state: &'a EngineState,
|
||||
pub delta: StateDelta,
|
||||
pub external_commands: Vec<Vec<u8>>,
|
||||
pub files: FileStack,
|
||||
/// Whether or not predeclarations are searched when looking up a command (used with aliases)
|
||||
pub search_predecls: bool,
|
||||
|
@ -46,7 +45,6 @@ impl<'a> StateWorkingSet<'a> {
|
|||
Self {
|
||||
delta: StateDelta::new(permanent_state),
|
||||
permanent_state,
|
||||
external_commands: vec![],
|
||||
files,
|
||||
search_predecls: true,
|
||||
parse_errors: vec![],
|
||||
|
|
|
@ -47,6 +47,26 @@ pub fn report_error_new(
|
|||
report_error(&working_set, error);
|
||||
}
|
||||
|
||||
pub fn report_warning(
|
||||
working_set: &StateWorkingSet,
|
||||
error: &(dyn miette::Diagnostic + Send + Sync + 'static),
|
||||
) {
|
||||
eprintln!("Warning: {:?}", CliError(error, working_set));
|
||||
// reset vt processing, aka ansi because illbehaved externals can break it
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let _ = nu_utils::enable_vt_processing();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn report_warning_new(
|
||||
engine_state: &EngineState,
|
||||
error: &(dyn miette::Diagnostic + Send + Sync + 'static),
|
||||
) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, error);
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for CliError<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let config = self.1.get_config();
|
||||
|
|
|
@ -5,7 +5,9 @@ mod parse_error;
|
|||
mod parse_warning;
|
||||
mod shell_error;
|
||||
|
||||
pub use cli_error::{format_error, report_error, report_error_new};
|
||||
pub use cli_error::{
|
||||
format_error, report_error, report_error_new, report_warning, report_warning_new,
|
||||
};
|
||||
pub use compile_error::CompileError;
|
||||
pub use labeled_error::{ErrorLabel, LabeledError};
|
||||
pub use parse_error::{DidYouMean, ParseError};
|
||||
|
|
|
@ -6,11 +6,11 @@ use thiserror::Error;
|
|||
#[derive(Clone, Debug, Error, Diagnostic, Serialize, Deserialize)]
|
||||
pub enum ParseWarning {
|
||||
#[error("Deprecated: {old_command}")]
|
||||
#[diagnostic(help("for more info: {url}"))]
|
||||
#[diagnostic(help("for more info see {url}"))]
|
||||
DeprecatedWarning {
|
||||
old_command: String,
|
||||
new_suggestion: String,
|
||||
#[label("`{old_command}` is deprecated and will be removed in a future release. Please {new_suggestion} instead")]
|
||||
#[label("`{old_command}` is deprecated and will be removed in a future release. Please {new_suggestion} instead.")]
|
||||
span: Span,
|
||||
url: String,
|
||||
},
|
||||
|
|
|
@ -37,7 +37,7 @@ impl fmt::Debug for IrBlock {
|
|||
.field("data", &self.data)
|
||||
.field("comments", &self.comments)
|
||||
.field("register_count", &self.register_count)
|
||||
.field("file_count", &self.register_count)
|
||||
.field("file_count", &self.file_count)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
@ -278,6 +278,63 @@ impl Instruction {
|
|||
}
|
||||
}
|
||||
|
||||
/// Get the output register, for instructions that produce some kind of immediate result.
|
||||
pub fn output_register(&self) -> Option<RegId> {
|
||||
match *self {
|
||||
Instruction::Unreachable => None,
|
||||
Instruction::LoadLiteral { dst, .. } => Some(dst),
|
||||
Instruction::LoadValue { dst, .. } => Some(dst),
|
||||
Instruction::Move { dst, .. } => Some(dst),
|
||||
Instruction::Clone { dst, .. } => Some(dst),
|
||||
Instruction::Collect { src_dst } => Some(src_dst),
|
||||
Instruction::Span { src_dst } => Some(src_dst),
|
||||
Instruction::Drop { .. } => None,
|
||||
Instruction::Drain { .. } => None,
|
||||
Instruction::LoadVariable { dst, .. } => Some(dst),
|
||||
Instruction::StoreVariable { .. } => None,
|
||||
Instruction::LoadEnv { dst, .. } => Some(dst),
|
||||
Instruction::LoadEnvOpt { dst, .. } => Some(dst),
|
||||
Instruction::StoreEnv { .. } => None,
|
||||
Instruction::PushPositional { .. } => None,
|
||||
Instruction::AppendRest { .. } => None,
|
||||
Instruction::PushFlag { .. } => None,
|
||||
Instruction::PushShortFlag { .. } => None,
|
||||
Instruction::PushNamed { .. } => None,
|
||||
Instruction::PushShortNamed { .. } => None,
|
||||
Instruction::PushParserInfo { .. } => None,
|
||||
Instruction::RedirectOut { .. } => None,
|
||||
Instruction::RedirectErr { .. } => None,
|
||||
Instruction::CheckErrRedirected { .. } => None,
|
||||
Instruction::OpenFile { .. } => None,
|
||||
Instruction::WriteFile { .. } => None,
|
||||
Instruction::CloseFile { .. } => None,
|
||||
Instruction::Call { src_dst, .. } => Some(src_dst),
|
||||
Instruction::StringAppend { src_dst, .. } => Some(src_dst),
|
||||
Instruction::GlobFrom { src_dst, .. } => Some(src_dst),
|
||||
Instruction::ListPush { src_dst, .. } => Some(src_dst),
|
||||
Instruction::ListSpread { src_dst, .. } => Some(src_dst),
|
||||
Instruction::RecordInsert { src_dst, .. } => Some(src_dst),
|
||||
Instruction::RecordSpread { src_dst, .. } => Some(src_dst),
|
||||
Instruction::Not { src_dst } => Some(src_dst),
|
||||
Instruction::BinaryOp { lhs_dst, .. } => Some(lhs_dst),
|
||||
Instruction::FollowCellPath { src_dst, .. } => Some(src_dst),
|
||||
Instruction::CloneCellPath { dst, .. } => Some(dst),
|
||||
Instruction::UpsertCellPath { src_dst, .. } => Some(src_dst),
|
||||
Instruction::Jump { .. } => None,
|
||||
Instruction::BranchIf { .. } => None,
|
||||
Instruction::BranchIfEmpty { .. } => None,
|
||||
Instruction::Match { .. } => None,
|
||||
Instruction::CheckMatchGuard { .. } => None,
|
||||
Instruction::Iterate { dst, .. } => Some(dst),
|
||||
Instruction::OnError { .. } => None,
|
||||
Instruction::OnErrorInto { .. } => None,
|
||||
Instruction::PopErrorHandler => None,
|
||||
Instruction::CheckExternalFailed { dst, .. } => Some(dst),
|
||||
Instruction::ReturnEarly { .. } => None,
|
||||
Instruction::Return { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the branch target index of the instruction if this is a branching instruction.
|
||||
pub fn branch_target(&self) -> Option<usize> {
|
||||
match self {
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod alias;
|
||||
pub mod ast;
|
||||
pub mod config;
|
||||
|
|
|
@ -159,14 +159,14 @@ impl From<ByteStreamType> for Type {
|
|||
/// Try not to use this method if possible. Rather, please use [`reader`](ByteStream::reader)
|
||||
/// (or [`lines`](ByteStream::lines) if it matches the situation).
|
||||
///
|
||||
/// Additionally, there are few methods to collect a [`Bytestream`] into memory:
|
||||
/// Additionally, there are few methods to collect a [`ByteStream`] into memory:
|
||||
/// - [`into_bytes`](ByteStream::into_bytes): collects all bytes into a [`Vec<u8>`].
|
||||
/// - [`into_string`](ByteStream::into_string): collects all bytes into a [`String`], erroring if utf-8 decoding failed.
|
||||
/// - [`into_value`](ByteStream::into_value): collects all bytes into a value typed appropriately
|
||||
/// for the [type](.type_()) of this stream. If the type is [`Unknown`](ByteStreamType::Unknown),
|
||||
/// it will produce a string value if the data is valid UTF-8, or a binary value otherwise.
|
||||
///
|
||||
/// There are also a few other methods to consume all the data of a [`Bytestream`]:
|
||||
/// There are also a few other methods to consume all the data of a [`ByteStream`]:
|
||||
/// - [`drain`](ByteStream::drain): consumes all bytes and outputs nothing.
|
||||
/// - [`write_to`](ByteStream::write_to): writes all bytes to the given [`Write`] destination.
|
||||
/// - [`print`](ByteStream::print): a convenience wrapper around [`write_to`](ByteStream::write_to).
|
||||
|
|
|
@ -635,6 +635,34 @@ impl PipelineData {
|
|||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unsupported_input_error(
|
||||
self,
|
||||
expected_type: impl Into<String>,
|
||||
span: Span,
|
||||
) -> ShellError {
|
||||
match self {
|
||||
PipelineData::Empty => ShellError::PipelineEmpty { dst_span: span },
|
||||
PipelineData::Value(value, ..) => ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: expected_type.into(),
|
||||
wrong_type: value.get_type().get_non_specified_string(),
|
||||
dst_span: span,
|
||||
src_span: value.span(),
|
||||
},
|
||||
PipelineData::ListStream(stream, ..) => ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: expected_type.into(),
|
||||
wrong_type: "list (stream)".into(),
|
||||
dst_span: span,
|
||||
src_span: stream.span(),
|
||||
},
|
||||
PipelineData::ByteStream(stream, ..) => ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: expected_type.into(),
|
||||
wrong_type: stream.type_().describe().into(),
|
||||
dst_span: span,
|
||||
src_span: stream.span(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum PipelineIteratorInner {
|
||||
|
|
|
@ -36,6 +36,10 @@ pub enum Type {
|
|||
}
|
||||
|
||||
impl Type {
|
||||
pub fn list(inner: Type) -> Self {
|
||||
Self::List(Box::new(inner))
|
||||
}
|
||||
|
||||
pub fn record() -> Self {
|
||||
Self::Record([].into())
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ The standard library is a pure-`nushell` collection of custom commands which
|
|||
provide interactive utilities and building blocks for users writing casual scripts or complex applications.
|
||||
|
||||
To see what's here:
|
||||
```
|
||||
```text
|
||||
> use std
|
||||
> scope commands | select name usage | where name =~ "std "
|
||||
#┬───────────name────────────┬──────────────────────usage──────────────────────
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
use log::trace;
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
|
|
7
crates/nu-system/README.md
Normal file
7
crates/nu-system/README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
Operating system specific bindings used by Nushell.
|
||||
|
||||
Currently primarily wrappers around processes and ways to gather process info from the system
|
||||
|
||||
## Internal Nushell crate
|
||||
|
||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod foreground;
|
||||
|
||||
#[cfg(target_os = "freebsd")]
|
||||
|
|
7
crates/nu-table/README.md
Normal file
7
crates/nu-table/README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
The layout logic for Nushell's table viewer.
|
||||
|
||||
See also the separate `table` command implementation
|
||||
|
||||
## Internal Nushell crate
|
||||
|
||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod table;
|
||||
mod table_theme;
|
||||
mod types;
|
||||
|
|
|
@ -418,62 +418,7 @@ impl TableOption<NuRecords, CompleteDimensionVecRecords<'_>, ColoredConfig> for
|
|||
// we already must have been estimated that it's safe to do.
|
||||
// and all dims will be suffitient
|
||||
if self.trim_as_head {
|
||||
if recs.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// even though it's safe to trim columns by header there might be left unused space
|
||||
// so we do use it if possible prioritizing left columns
|
||||
|
||||
let headers = recs[0].to_owned();
|
||||
let headers_widths = headers
|
||||
.iter()
|
||||
.map(CellInfo::width)
|
||||
.map(|v| v + self.pad)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let min_width_use = get_total_width2(&headers_widths, cfg);
|
||||
|
||||
let mut free_width = self.width_max.saturating_sub(min_width_use);
|
||||
|
||||
for (i, head_width) in headers_widths.into_iter().enumerate() {
|
||||
let head_width = head_width - self.pad;
|
||||
let column_width = self.width[i] - self.pad; // safe to assume width is bigger then paddding
|
||||
|
||||
let mut use_width = head_width;
|
||||
if free_width > 0 {
|
||||
// it's safe to assume that column_width is always bigger or equal to head_width
|
||||
debug_assert!(column_width >= head_width);
|
||||
|
||||
let additional_width = min(free_width, column_width - head_width);
|
||||
free_width -= additional_width;
|
||||
use_width += additional_width;
|
||||
}
|
||||
|
||||
match &self.strategy {
|
||||
TrimStrategy::Wrap { try_to_keep_words } => {
|
||||
let mut wrap = Width::wrap(use_width);
|
||||
if *try_to_keep_words {
|
||||
wrap = wrap.keep_words();
|
||||
}
|
||||
|
||||
Modify::new(Columns::single(i))
|
||||
.with(wrap)
|
||||
.change(recs, cfg, dims);
|
||||
}
|
||||
TrimStrategy::Truncate { suffix } => {
|
||||
let mut truncate = Width::truncate(use_width);
|
||||
if let Some(suffix) = suffix {
|
||||
truncate = truncate.suffix(suffix).suffix_try_color(true);
|
||||
}
|
||||
|
||||
Modify::new(Columns::single(i))
|
||||
.with(truncate)
|
||||
.change(recs, cfg, dims);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trim_as_header(recs, cfg, dims, self);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -498,6 +443,67 @@ impl TableOption<NuRecords, CompleteDimensionVecRecords<'_>, ColoredConfig> for
|
|||
}
|
||||
}
|
||||
|
||||
fn trim_as_header(
|
||||
recs: &mut VecRecords<CellInfo<String>>,
|
||||
cfg: &mut ColoredConfig,
|
||||
dims: &mut CompleteDimensionVecRecords,
|
||||
trim: TableTrim,
|
||||
) {
|
||||
if recs.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let headers = recs[0].to_owned();
|
||||
let headers_widths = headers
|
||||
.iter()
|
||||
.map(CellInfo::width)
|
||||
.map(|v| v + trim.pad)
|
||||
.collect::<Vec<_>>();
|
||||
let min_width_use = get_total_width2(&headers_widths, cfg);
|
||||
let mut free_width = trim.width_max.saturating_sub(min_width_use);
|
||||
|
||||
// even though it's safe to trim columns by header there might be left unused space
|
||||
// so we do use it if possible prioritizing left columns
|
||||
|
||||
for (i, head_width) in headers_widths.into_iter().enumerate() {
|
||||
let head_width = head_width - trim.pad;
|
||||
let column_width = trim.width[i] - trim.pad; // safe to assume width is bigger then paddding
|
||||
|
||||
let mut use_width = head_width;
|
||||
if free_width > 0 {
|
||||
// it's safe to assume that column_width is always bigger or equal to head_width
|
||||
debug_assert!(column_width >= head_width);
|
||||
|
||||
let additional_width = min(free_width, column_width - head_width);
|
||||
free_width -= additional_width;
|
||||
use_width += additional_width;
|
||||
}
|
||||
|
||||
match &trim.strategy {
|
||||
TrimStrategy::Wrap { try_to_keep_words } => {
|
||||
let mut wrap = Width::wrap(use_width);
|
||||
if *try_to_keep_words {
|
||||
wrap = wrap.keep_words();
|
||||
}
|
||||
|
||||
Modify::new(Columns::single(i))
|
||||
.with(wrap)
|
||||
.change(recs, cfg, dims);
|
||||
}
|
||||
TrimStrategy::Truncate { suffix } => {
|
||||
let mut truncate = Width::truncate(use_width);
|
||||
if let Some(suffix) = suffix {
|
||||
truncate = truncate.suffix(suffix).suffix_try_color(true);
|
||||
}
|
||||
|
||||
Modify::new(Columns::single(i))
|
||||
.with(truncate)
|
||||
.change(recs, cfg, dims);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn align_table(
|
||||
table: &mut Table,
|
||||
alignments: Alignments,
|
||||
|
@ -793,14 +799,14 @@ fn truncate_columns_by_head(
|
|||
let mut truncate_pos = 0;
|
||||
for (i, column_header) in head.iter().enumerate() {
|
||||
let column_header_width = Cell::width(column_header);
|
||||
width += column_header_width;
|
||||
width += column_header_width + pad;
|
||||
|
||||
if i > 0 {
|
||||
width += has_vertical as usize;
|
||||
}
|
||||
|
||||
if width >= termwidth {
|
||||
width -= column_header_width + (i > 0 && has_vertical) as usize;
|
||||
width -= column_header_width + (i > 0 && has_vertical) as usize + pad;
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
5
crates/nu-term-grid/README.md
Normal file
5
crates/nu-term-grid/README.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
Implementation of the layout engine for the `grid` command.
|
||||
|
||||
## Internal Nushell crate
|
||||
|
||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
pub mod grid;
|
||||
|
||||
pub use grid::Grid;
|
||||
|
|
7
crates/nu-test-support/README.md
Normal file
7
crates/nu-test-support/README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
This crate provides utilities for testing of Nushell
|
||||
|
||||
Plugin authors should instead refer to `nu-plugin-test-support`
|
||||
|
||||
## Internal Nushell crate
|
||||
|
||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
@ -102,13 +102,12 @@ pub fn fixtures() -> AbsolutePathBuf {
|
|||
path
|
||||
}
|
||||
|
||||
// FIXME: re-enable nu_json tests
|
||||
// pub fn assets() -> AbsolutePathBuf {
|
||||
// let mut path = root();
|
||||
// path.push("tests");
|
||||
// path.push("assets");
|
||||
// path
|
||||
// }
|
||||
pub fn assets() -> AbsolutePathBuf {
|
||||
let mut path = root();
|
||||
path.push("tests");
|
||||
path.push("assets");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn in_directory(path: impl AsRef<nu_path::Path>) -> AbsolutePathBuf {
|
||||
root().join(path)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
pub mod commands;
|
||||
pub mod fs;
|
||||
pub mod locale_override;
|
||||
|
|
6
crates/nu-utils/README.md
Normal file
6
crates/nu-utils/README.md
Normal file
|
@ -0,0 +1,6 @@
|
|||
Collection of small utilities that are shared across Nushell crates.
|
||||
|
||||
This crate should compile early in the crate graph and thus not depend on major dependencies or core-nushell crates itself.
|
||||
## Internal Nushell crate
|
||||
|
||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
@ -1,3 +1,4 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
mod casing;
|
||||
mod deansi;
|
||||
pub mod emoji;
|
||||
|
|
|
@ -4,7 +4,7 @@ The NUON format is a superset of JSON designed to fit the feel of Nushell.
|
|||
Some of its extra features are
|
||||
- trailing commas are allowed
|
||||
- commas are optional in lists
|
||||
- quotes are not required around keys or any _bare_ string that do not contain spaces
|
||||
- quotes are not required around keys or any _bare_ string that do not contain spaces or special characters
|
||||
- comments are allowed, though not preserved when using [`from_nuon`]
|
||||
|
||||
## Example
|
||||
|
|
|
@ -100,7 +100,7 @@ fn main() -> Result<()> {
|
|||
},
|
||||
);
|
||||
} else if let Some(old_config) =
|
||||
nu_path::get_canonicalized_path(dirs_next::config_dir()).map(|p| p.join("nushell"))
|
||||
nu_path::get_canonicalized_path(dirs::config_dir()).map(|p| p.join("nushell"))
|
||||
{
|
||||
let xdg_config_empty = nushell_config_path
|
||||
.read_dir()
|
||||
|
|
|
@ -235,7 +235,7 @@ fn test_xdg_config_empty() {
|
|||
playground.with_env("XDG_CONFIG_HOME", "");
|
||||
|
||||
let actual = run(playground, "$nu.default-config-dir");
|
||||
let expected = dirs_next::config_dir().unwrap().join("nushell");
|
||||
let expected = dirs::config_dir().unwrap().join("nushell");
|
||||
assert_eq!(
|
||||
actual,
|
||||
adjust_canonicalization(expected.canonicalize().unwrap_or(expected))
|
||||
|
@ -250,7 +250,7 @@ fn test_xdg_config_bad() {
|
|||
playground.with_env("XDG_CONFIG_HOME", xdg_config_home);
|
||||
|
||||
let actual = run(playground, "$nu.default-config-dir");
|
||||
let expected = dirs_next::config_dir().unwrap().join("nushell");
|
||||
let expected = dirs::config_dir().unwrap().join("nushell");
|
||||
assert_eq!(
|
||||
actual,
|
||||
adjust_canonicalization(expected.canonicalize().unwrap_or(expected))
|
||||
|
|
Loading…
Reference in New Issue
Block a user