Rename Interrupt to Signals

This commit is contained in:
Ian Manske 2024-07-06 19:27:55 -07:00
parent e659213fc1
commit 9d00d4d6fe
240 changed files with 781 additions and 810 deletions

View File

@ -4,7 +4,7 @@ use nu_plugin_protocol::{PluginCallResponse, PluginOutput};
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack}, engine::{EngineState, Stack},
Interrupt, PipelineData, Span, Spanned, Value, PipelineData, Signals, Span, Spanned, Value,
}; };
use nu_std::load_standard_library; use nu_std::load_standard_library;
use nu_utils::{get_default_config, get_default_env}; use nu_utils::{get_default_config, get_default_env};
@ -253,7 +253,7 @@ fn bench_eval_interleave(n: i32) -> impl IntoBenchmarks {
fn bench_eval_interleave_with_interrupt(n: i32) -> impl IntoBenchmarks { fn bench_eval_interleave_with_interrupt(n: i32) -> impl IntoBenchmarks {
let mut engine = setup_engine(); let mut engine = setup_engine();
engine.set_interrupt(Interrupt::new(Arc::new(AtomicBool::new(false)))); engine.set_signals(Signals::new(Arc::new(AtomicBool::new(false))));
let stack = Stack::new(); let stack = Stack::new();
bench_command( bench_command(
&format!("eval_interleave_with_interrupt_{n}"), &format!("eval_interleave_with_interrupt_{n}"),

View File

@ -47,7 +47,7 @@ impl Command for History {
if let Some(config_path) = nu_path::config_dir() { if let Some(config_path) = nu_path::config_dir() {
let clear = call.has_flag(engine_state, stack, "clear")?; let clear = call.has_flag(engine_state, stack, "clear")?;
let long = call.has_flag(engine_state, stack, "long")?; let long = call.has_flag(engine_state, stack, "long")?;
let interrupt = engine_state.interrupt().clone(); let signals = engine_state.signals().clone();
let mut history_path = config_path; let mut history_path = config_path;
history_path.push("nushell"); history_path.push("nushell");
@ -107,7 +107,7 @@ impl Command for History {
file: history_path.display().to_string(), file: history_path.display().to_string(),
span: head, span: head,
})? })?
.into_pipeline_data(head, interrupt)), .into_pipeline_data(head, signals)),
HistoryFileFormat::Sqlite => Ok(history_reader HistoryFileFormat::Sqlite => Ok(history_reader
.and_then(|h| { .and_then(|h| {
h.search(SearchQuery::everything(SearchDirection::Forward, None)) h.search(SearchQuery::everything(SearchDirection::Forward, None))
@ -122,7 +122,7 @@ impl Command for History {
file: history_path.display().to_string(), file: history_path.display().to_string(),
span: head, span: head,
})? })?
.into_pipeline_data(head, interrupt)), .into_pipeline_data(head, signals)),
} }
} }
} else { } else {

View File

@ -32,7 +32,7 @@ impl Command for NuHighlight {
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head; let head = call.head;
let interrupt = engine_state.interrupt(); let signals = engine_state.signals();
let engine_state = std::sync::Arc::new(engine_state.clone()); let engine_state = std::sync::Arc::new(engine_state.clone());
let config = engine_state.get_config().clone(); let config = engine_state.get_config().clone();
@ -50,7 +50,7 @@ impl Command for NuHighlight {
} }
Err(err) => Value::error(err, head), Err(err) => Value::error(err, head),
}, },
interrupt, signals,
) )
} }

View File

@ -271,8 +271,8 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
perf!("merge env", start_time, use_color); perf!("merge env", start_time, use_color);
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
engine_state.reset_interrupt(); engine_state.reset_signals();
perf!("reset interrupt", start_time, use_color); perf!("reset signals", start_time, use_color);
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
// Right before we start our prompt and take input from the user, // Right before we start our prompt and take input from the user,

View File

@ -1,4 +1,4 @@
use nu_protocol::{ast::CellPath, Interrupt, PipelineData, ShellError, Span, Value}; use nu_protocol::{ast::CellPath, PipelineData, ShellError, Signals, Span, Value};
use std::sync::Arc; use std::sync::Arc;
pub trait CmdArgument { pub trait CmdArgument {
@ -40,7 +40,7 @@ pub fn operate<C, A>(
mut arg: A, mut arg: A,
input: PipelineData, input: PipelineData,
span: Span, span: Span,
interrupt: &Interrupt, signals: &Signals,
) -> Result<PipelineData, ShellError> ) -> Result<PipelineData, ShellError>
where where
A: CmdArgument + Send + Sync + 'static, A: CmdArgument + Send + Sync + 'static,
@ -55,7 +55,7 @@ where
_ => cmd(&v, &arg, span), _ => cmd(&v, &arg, span),
} }
}, },
interrupt, signals,
), ),
Some(column_paths) => { Some(column_paths) => {
let arg = Arc::new(arg); let arg = Arc::new(arg);
@ -79,7 +79,7 @@ where
} }
v v
}, },
interrupt, signals,
) )
} }
} }

View File

@ -79,7 +79,7 @@ impl Command for BitsAnd {
input.map( input.map(
move |value| binary_op(&value, &target, little_endian, |(l, r)| l & r, head), move |value| binary_op(&value, &target, little_endian, |(l, r)| l & r, head),
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -123,7 +123,7 @@ fn into_bits(
Ok(Value::binary(stream.into_bytes()?, head).into_pipeline_data()) Ok(Value::binary(stream.into_bytes()?, head).into_pipeline_data())
} else { } else {
let args = Arguments { cell_paths }; let args = Arguments { cell_paths };
operate(action, args, input, call.head, engine_state.interrupt()) operate(action, args, input, call.head, engine_state.signals())
} }
} }

View File

@ -82,7 +82,7 @@ impl Command for BitsNot {
number_size, number_size,
}; };
operate(action, args, input, head, engine_state.interrupt()) operate(action, args, input, head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -80,7 +80,7 @@ impl Command for BitsOr {
input.map( input.map(
move |value| binary_op(&value, &target, little_endian, |(l, r)| l | r, head), move |value| binary_op(&value, &target, little_endian, |(l, r)| l | r, head),
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -86,7 +86,7 @@ impl Command for BitsRol {
bits, bits,
}; };
operate(action, args, input, head, engine_state.interrupt()) operate(action, args, input, head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -86,7 +86,7 @@ impl Command for BitsRor {
bits, bits,
}; };
operate(action, args, input, head, engine_state.interrupt()) operate(action, args, input, head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -88,7 +88,7 @@ impl Command for BitsShl {
bits, bits,
}; };
operate(action, args, input, head, engine_state.interrupt()) operate(action, args, input, head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -88,7 +88,7 @@ impl Command for BitsShr {
bits, bits,
}; };
operate(action, args, input, head, engine_state.interrupt()) operate(action, args, input, head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -80,7 +80,7 @@ impl Command for BitsXor {
input.map( input.map(
move |value| binary_op(&value, &target, little_endian, |(l, r)| l ^ r, head), move |value| binary_op(&value, &target, little_endian, |(l, r)| l ^ r, head),
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -59,7 +59,7 @@ fn fmt(
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?; let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let args = CellPathOnlyArgs::from(cell_paths); let args = CellPathOnlyArgs::from(cell_paths);
operate(action, args, input, call.head, engine_state.interrupt()) operate(action, args, input, call.head, engine_state.signals())
} }
fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value { fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value {

View File

@ -89,7 +89,7 @@ impl Command for EachWhile {
} }
}) })
.fuse() .fuse()
.into_pipeline_data(head, engine_state.interrupt().clone())) .into_pipeline_data(head, engine_state.signals().clone()))
} }
PipelineData::ByteStream(stream, ..) => { PipelineData::ByteStream(stream, ..) => {
let span = stream.span(); let span = stream.span();
@ -107,7 +107,7 @@ impl Command for EachWhile {
} }
}) })
.fuse() .fuse()
.into_pipeline_data(head, engine_state.interrupt().clone())) .into_pipeline_data(head, engine_state.signals().clone()))
} else { } else {
Ok(PipelineData::Empty) Ok(PipelineData::Empty)
} }

View File

@ -108,7 +108,7 @@ impl Command for UpdateCells {
columns, columns,
span: head, span: head,
} }
.into_pipeline_data(head, engine_state.interrupt().clone()) .into_pipeline_data(head, engine_state.signals().clone())
.set_metadata(metadata)) .set_metadata(metadata))
} }
} }

View File

@ -45,7 +45,7 @@ impl Command for SubCommand {
} }
input.map( input.map(
move |value| operate(value, head, use_degrees), move |value| operate(value, head, use_degrees),
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -41,7 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map(move |value| operate(value, head), engine_state.interrupt()) input.map(move |value| operate(value, head), engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -45,7 +45,7 @@ impl Command for SubCommand {
} }
input.map( input.map(
move |value| operate(value, head, use_degrees), move |value| operate(value, head, use_degrees),
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -41,7 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map(move |value| operate(value, head), engine_state.interrupt()) input.map(move |value| operate(value, head), engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -45,7 +45,7 @@ impl Command for SubCommand {
} }
input.map( input.map(
move |value| operate(value, head, use_degrees), move |value| operate(value, head, use_degrees),
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -41,7 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map(move |value| operate(value, head), engine_state.interrupt()) input.map(move |value| operate(value, head), engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -44,7 +44,7 @@ impl Command for SubCommand {
} }
input.map( input.map(
move |value| operate(value, head, use_degrees), move |value| operate(value, head, use_degrees),
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -41,7 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map(move |value| operate(value, head), engine_state.interrupt()) input.map(move |value| operate(value, head), engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -41,7 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map(move |value| operate(value, head), engine_state.interrupt()) input.map(move |value| operate(value, head), engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -41,7 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map(move |value| operate(value, head), engine_state.interrupt()) input.map(move |value| operate(value, head), engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -44,7 +44,7 @@ impl Command for SubCommand {
} }
input.map( input.map(
move |value| operate(value, head, use_degrees), move |value| operate(value, head, use_degrees),
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -41,7 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map(move |value| operate(value, head), engine_state.interrupt()) input.map(move |value| operate(value, head), engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -44,7 +44,7 @@ impl Command for SubCommand {
} }
input.map( input.map(
move |value| operate(value, head, use_degrees), move |value| operate(value, head, use_degrees),
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -41,7 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map(move |value| operate(value, head), engine_state.interrupt()) input.map(move |value| operate(value, head), engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -140,7 +140,7 @@ fn operate(
ret ret
} }
}, },
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -88,7 +88,7 @@ pub fn operate(
cell_paths, cell_paths,
}; };
general_operate(action, args, input, call.head, engine_state.interrupt()) general_operate(action, args, input, call.head, engine_state.signals())
} }
fn action( fn action(

View File

@ -223,7 +223,7 @@ fn format(
Ok(ListStream::new( Ok(ListStream::new(
list.into_iter(), list.into_iter(),
head_span, head_span,
engine_state.interrupt().clone(), engine_state.signals().clone(),
) )
.into()) .into())
} }

View File

@ -44,7 +44,7 @@ where
case_operation, case_operation,
cell_paths, cell_paths,
}; };
general_operate(action, args, input, call.head, engine_state.interrupt()) general_operate(action, args, input, call.head, engine_state.signals())
} }
fn action<F>(input: &Value, args: &Arguments<F>, head: Span) -> Value fn action<F>(input: &Value, args: &Arguments<F>, head: Span) -> Value

View File

@ -1,5 +1,5 @@
use nu_engine::{command_prelude::*, get_eval_block, get_eval_expression}; use nu_engine::{command_prelude::*, get_eval_block, get_eval_expression};
use nu_protocol::{engine::CommandType, Interrupt}; use nu_protocol::{engine::CommandType, Signals};
#[derive(Clone)] #[derive(Clone)]
pub struct For; pub struct For;
@ -81,7 +81,7 @@ impl Command for For {
match value { match value {
Value::List { vals, .. } => { Value::List { vals, .. } => {
for x in vals.into_iter() { for x in vals.into_iter() {
engine_state.interrupt().check(head)?; engine_state.signals().check(head)?;
// with_env() is used here to ensure that each iteration uses // with_env() is used here to ensure that each iteration uses
// a different set of environment variables. // a different set of environment variables.
@ -113,8 +113,8 @@ impl Command for For {
} }
} }
Value::Range { val, .. } => { Value::Range { val, .. } => {
for x in val.into_range_iter(span, Interrupt::empty()) { for x in val.into_range_iter(span, Signals::empty()) {
engine_state.interrupt().check(head)?; engine_state.signals().check(head)?;
stack.add_var(var_id, x); stack.add_var(var_id, x);
match eval_block(&engine_state, stack, block, PipelineData::empty()) { match eval_block(&engine_state, stack, block, PipelineData::empty()) {

View File

@ -50,7 +50,7 @@ impl Command for Loop {
let stack = &mut stack.push_redirection(None, None); let stack = &mut stack.push_redirection(None, None);
loop { loop {
engine_state.interrupt().check(head)?; engine_state.signals().check(head)?;
match eval_block(engine_state, stack, block, PipelineData::empty()) { match eval_block(engine_state, stack, block, PipelineData::empty()) {
Err(ShellError::Break { .. }) => { Err(ShellError::Break { .. }) => {

View File

@ -60,7 +60,7 @@ impl Command for While {
let stack = &mut stack.push_redirection(None, None); let stack = &mut stack.push_redirection(None, None);
loop { loop {
engine_state.interrupt().check(head)?; engine_state.signals().check(head)?;
let result = eval_expression(engine_state, stack, cond)?; let result = eval_expression(engine_state, stack, cond)?;

View File

@ -78,7 +78,7 @@ impl Command for BytesAdd {
end, end,
cell_paths, cell_paths,
}; };
operate(add, arg, input, call.head, engine_state.interrupt()) operate(add, arg, input, call.head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -83,7 +83,7 @@ impl Command for BytesAt {
cell_paths, cell_paths,
}; };
operate(action, args, input, call.head, engine_state.interrupt()) operate(action, args, input, call.head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -63,7 +63,7 @@ impl Command for BytesCollect {
let output = ByteStream::from_result_iter( let output = ByteStream::from_result_iter(
iter, iter,
span, span,
engine_state.interrupt().clone(), engine_state.signals().clone(),
ByteStreamType::Binary, ByteStreamType::Binary,
); );

View File

@ -102,7 +102,7 @@ impl Command for BytesEndsWith {
pattern, pattern,
cell_paths, cell_paths,
}; };
operate(ends_with, arg, input, head, engine_state.interrupt()) operate(ends_with, arg, input, head, engine_state.signals())
} }
} }

View File

@ -71,7 +71,7 @@ impl Command for BytesIndexOf {
all: call.has_flag(engine_state, stack, "all")?, all: call.has_flag(engine_state, stack, "all")?,
cell_paths, cell_paths,
}; };
operate(index_of, arg, input, call.head, engine_state.interrupt()) operate(index_of, arg, input, call.head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -46,7 +46,7 @@ impl Command for BytesLen {
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 1)?; let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 1)?;
let arg = CellPathOnlyArgs::from(cell_paths); let arg = CellPathOnlyArgs::from(cell_paths);
operate(length, arg, input, call.head, engine_state.interrupt()) operate(length, arg, input, call.head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -73,7 +73,7 @@ impl Command for BytesRemove {
all: call.has_flag(engine_state, stack, "all")?, all: call.has_flag(engine_state, stack, "all")?,
}; };
operate(remove, arg, input, call.head, engine_state.interrupt()) operate(remove, arg, input, call.head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -73,7 +73,7 @@ impl Command for BytesReplace {
all: call.has_flag(engine_state, stack, "all")?, all: call.has_flag(engine_state, stack, "all")?,
}; };
operate(replace, arg, input, call.head, engine_state.interrupt()) operate(replace, arg, input, call.head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -42,7 +42,7 @@ impl Command for BytesReverse {
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?; let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let arg = CellPathOnlyArgs::from(cell_paths); let arg = CellPathOnlyArgs::from(cell_paths);
operate(reverse, arg, input, call.head, engine_state.interrupt()) operate(reverse, arg, input, call.head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -79,7 +79,7 @@ impl Command for BytesStartsWith {
pattern, pattern,
cell_paths, cell_paths,
}; };
operate(starts_with, arg, input, head, engine_state.interrupt()) operate(starts_with, arg, input, head, engine_state.signals())
} }
} }

View File

@ -165,7 +165,7 @@ fn fill(
cell_paths, cell_paths,
}; };
operate(action, arg, input, call.head, engine_state.interrupt()) operate(action, arg, input, call.head, engine_state.signals())
} }
fn action(input: &Value, args: &Arguments, span: Span) -> Value { fn action(input: &Value, args: &Arguments, span: Span) -> Value {

View File

@ -138,7 +138,7 @@ fn into_binary(
cell_paths, cell_paths,
compact: call.has_flag(engine_state, stack, "compact")?, compact: call.has_flag(engine_state, stack, "compact")?,
}; };
operate(action, args, input, head, engine_state.interrupt()) operate(action, args, input, head, engine_state.signals())
} }
} }

View File

@ -107,7 +107,7 @@ fn into_bool(
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?; let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let args = CellPathOnlyArgs::from(cell_paths); let args = CellPathOnlyArgs::from(cell_paths);
operate(action, args, input, call.head, engine_state.interrupt()) operate(action, args, input, call.head, engine_state.signals())
} }
fn string_to_boolean(s: &str, span: Span) -> Result<bool, ShellError> { fn string_to_boolean(s: &str, span: Span) -> Result<bool, ShellError> {

View File

@ -141,7 +141,7 @@ impl Command for SubCommand {
zone_options, zone_options,
cell_paths, cell_paths,
}; };
operate(action, args, input, call.head, engine_state.interrupt()) operate(action, args, input, call.head, engine_state.signals())
} }
} }

View File

@ -166,7 +166,7 @@ fn into_duration(
ret ret
} }
}, },
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -68,7 +68,7 @@ impl Command for SubCommand {
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?; let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let args = CellPathOnlyArgs::from(cell_paths); let args = CellPathOnlyArgs::from(cell_paths);
operate(action, args, input, call.head, engine_state.interrupt()) operate(action, args, input, call.head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -49,7 +49,7 @@ impl Command for SubCommand {
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?; let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let args = CellPathOnlyArgs::from(cell_paths); let args = CellPathOnlyArgs::from(cell_paths);
operate(action, args, input, call.head, engine_state.interrupt()) operate(action, args, input, call.head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -87,7 +87,7 @@ fn glob_helper(
Ok(Value::glob(stream.into_string()?, false, head).into_pipeline_data()) Ok(Value::glob(stream.into_string()?, false, head).into_pipeline_data())
} else { } else {
let args = Arguments { cell_paths }; let args = Arguments { cell_paths };
operate(action, args, input, head, engine_state.interrupt()) operate(action, args, input, head, engine_state.signals())
} }
} }

View File

@ -158,7 +158,7 @@ impl Command for SubCommand {
signed, signed,
cell_paths, cell_paths,
}; };
operate(action, args, input, call.head, engine_state.interrupt()) operate(action, args, input, call.head, engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -125,7 +125,7 @@ fn into_record(
), ),
}, },
Value::Range { val, .. } => Value::record( Value::Range { val, .. } => Value::record(
val.into_range_iter(span, engine_state.interrupt().clone()) val.into_range_iter(span, engine_state.signals().clone())
.enumerate() .enumerate()
.map(|(idx, val)| (format!("{idx}"), val)) .map(|(idx, val)| (format!("{idx}"), val))
.collect(), .collect(),

View File

@ -180,7 +180,7 @@ fn string_helper(
cell_paths, cell_paths,
config, config,
}; };
operate(action, args, input, head, engine_state.interrupt()) operate(action, args, input, head, engine_state.signals())
} }
} }

View File

@ -79,7 +79,7 @@ impl Command for IntoValue {
display_as_filesizes, display_as_filesizes,
span, span,
} }
.into_pipeline_data(span, engine_state.interrupt().clone()) .into_pipeline_data(span, engine_state.signals().clone())
.set_metadata(metadata)) .set_metadata(metadata))
} }
} }

View File

@ -2,7 +2,7 @@ use crate::database::values::sqlite::{open_sqlite_db, values_to_sql};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use itertools::Itertools; use itertools::Itertools;
use nu_protocol::Interrupt; use nu_protocol::Signals;
use std::path::Path; use std::path::Path;
pub const DEFAULT_TABLE_NAME: &str = "main"; pub const DEFAULT_TABLE_NAME: &str = "main";
@ -183,18 +183,18 @@ fn operate(
let file_name: Spanned<String> = call.req(engine_state, stack, 0)?; let file_name: Spanned<String> = call.req(engine_state, stack, 0)?;
let table_name: Option<Spanned<String>> = call.get_flag(engine_state, stack, "table-name")?; let table_name: Option<Spanned<String>> = call.get_flag(engine_state, stack, "table-name")?;
let table = Table::new(&file_name, table_name)?; let table = Table::new(&file_name, table_name)?;
Ok(action(input, table, span, engine_state.interrupt())?.into_pipeline_data()) Ok(action(input, table, span, engine_state.signals())?.into_pipeline_data())
} }
fn action( fn action(
input: PipelineData, input: PipelineData,
table: Table, table: Table,
span: Span, span: Span,
interrupt: &Interrupt, signals: &Signals,
) -> Result<Value, ShellError> { ) -> Result<Value, ShellError> {
match input { match input {
PipelineData::ListStream(stream, _) => { PipelineData::ListStream(stream, _) => {
insert_in_transaction(stream.into_iter(), span, table, interrupt) insert_in_transaction(stream.into_iter(), span, table, signals)
} }
PipelineData::Value( PipelineData::Value(
Value::List { Value::List {
@ -202,9 +202,9 @@ fn action(
internal_span, internal_span,
}, },
_, _,
) => insert_in_transaction(vals.into_iter(), internal_span, table, interrupt), ) => insert_in_transaction(vals.into_iter(), internal_span, table, signals),
PipelineData::Value(val, _) => { PipelineData::Value(val, _) => {
insert_in_transaction(std::iter::once(val), span, table, interrupt) insert_in_transaction(std::iter::once(val), span, table, signals)
} }
_ => Err(ShellError::OnlySupportsThisInputType { _ => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list".into(), exp_input_type: "list".into(),
@ -219,7 +219,7 @@ fn insert_in_transaction(
stream: impl Iterator<Item = Value>, stream: impl Iterator<Item = Value>,
span: Span, span: Span,
mut table: Table, mut table: Table,
interrupt: &Interrupt, signals: &Signals,
) -> Result<Value, ShellError> { ) -> Result<Value, ShellError> {
let mut stream = stream.peekable(); let mut stream = stream.peekable();
let first_val = match stream.peek() { let first_val = match stream.peek() {
@ -241,7 +241,7 @@ fn insert_in_transaction(
let tx = table.try_init(&first_val)?; let tx = table.try_init(&first_val)?;
for stream_value in stream { for stream_value in stream {
if let Err(err) = interrupt.check(span) { if let Err(err) = signals.check(span) {
tx.rollback().map_err(|e| ShellError::GenericError { tx.rollback().map_err(|e| ShellError::GenericError {
error: "Failed to rollback SQLite transaction".into(), error: "Failed to rollback SQLite transaction".into(),
msg: e.to_string(), msg: e.to_string(),

View File

@ -2,7 +2,7 @@ use super::definitions::{
db_column::DbColumn, db_constraint::DbConstraint, db_foreignkey::DbForeignKey, db_column::DbColumn, db_constraint::DbConstraint, db_foreignkey::DbForeignKey,
db_index::DbIndex, db_table::DbTable, db_index::DbIndex, db_table::DbTable,
}; };
use nu_protocol::{CustomValue, Interrupt, PipelineData, Record, ShellError, Span, Spanned, Value}; use nu_protocol::{CustomValue, PipelineData, Record, ShellError, Signals, Span, Spanned, Value};
use rusqlite::{ use rusqlite::{
types::ValueRef, Connection, DatabaseName, Error as SqliteError, OpenFlags, Row, Statement, types::ValueRef, Connection, DatabaseName, Error as SqliteError, OpenFlags, Row, Statement,
ToSql, ToSql,
@ -23,25 +23,21 @@ pub struct SQLiteDatabase {
// 1) YAGNI, 2) it's not obvious how cloning a connection could work, 3) state // 1) YAGNI, 2) it's not obvious how cloning a connection could work, 3) state
// management gets tricky quick. Revisit this approach if we find a compelling use case. // management gets tricky quick. Revisit this approach if we find a compelling use case.
pub path: PathBuf, pub path: PathBuf,
#[serde(skip, default = "Interrupt::empty")] #[serde(skip, default = "Signals::empty")]
// this understandably can't be serialized. think that's OK, I'm not aware of a // this understandably can't be serialized. think that's OK, I'm not aware of a
// reason why a CustomValue would be serialized outside of a plugin // reason why a CustomValue would be serialized outside of a plugin
interrupt: Interrupt, signals: Signals,
} }
impl SQLiteDatabase { impl SQLiteDatabase {
pub fn new(path: &Path, interrupt: Interrupt) -> Self { pub fn new(path: &Path, signals: Signals) -> Self {
Self { Self {
path: PathBuf::from(path), path: PathBuf::from(path),
interrupt, signals,
} }
} }
pub fn try_from_path( pub fn try_from_path(path: &Path, span: Span, signals: Signals) -> Result<Self, ShellError> {
path: &Path,
span: Span,
interrupt: Interrupt,
) -> Result<Self, ShellError> {
let mut file = File::open(path).map_err(|e| ShellError::ReadingFile { let mut file = File::open(path).map_err(|e| ShellError::ReadingFile {
msg: e.to_string(), msg: e.to_string(),
span, span,
@ -55,7 +51,7 @@ impl SQLiteDatabase {
}) })
.and_then(|_| { .and_then(|_| {
if buf == SQLITE_MAGIC_BYTES { if buf == SQLITE_MAGIC_BYTES {
Ok(SQLiteDatabase::new(path, interrupt)) Ok(SQLiteDatabase::new(path, signals))
} else { } else {
Err(ShellError::ReadingFile { Err(ShellError::ReadingFile {
msg: "Not a SQLite file".into(), msg: "Not a SQLite file".into(),
@ -71,7 +67,7 @@ impl SQLiteDatabase {
Value::Custom { val, .. } => match val.as_any().downcast_ref::<Self>() { Value::Custom { val, .. } => match val.as_any().downcast_ref::<Self>() {
Some(db) => Ok(Self { Some(db) => Ok(Self {
path: db.path.clone(), path: db.path.clone(),
interrupt: db.interrupt.clone(), signals: db.signals.clone(),
}), }),
None => Err(ShellError::CantConvert { None => Err(ShellError::CantConvert {
to_type: "database".into(), to_type: "database".into(),
@ -106,7 +102,7 @@ impl SQLiteDatabase {
call_span: Span, call_span: Span,
) -> Result<Value, ShellError> { ) -> Result<Value, ShellError> {
let conn = open_sqlite_db(&self.path, call_span)?; let conn = open_sqlite_db(&self.path, call_span)?;
let stream = run_sql_query(conn, sql, params, &self.interrupt) let stream = run_sql_query(conn, sql, params, &self.signals)
.map_err(|e| e.into_shell_error(sql.span, "Failed to query SQLite database"))?; .map_err(|e| e.into_shell_error(sql.span, "Failed to query SQLite database"))?;
Ok(stream) Ok(stream)
@ -352,7 +348,7 @@ impl CustomValue for SQLiteDatabase {
fn to_base_value(&self, span: Span) -> Result<Value, ShellError> { fn to_base_value(&self, span: Span) -> Result<Value, ShellError> {
let db = open_sqlite_db(&self.path, span)?; let db = open_sqlite_db(&self.path, span)?;
read_entire_sqlite_db(db, span, &self.interrupt) read_entire_sqlite_db(db, span, &self.signals)
.map_err(|e| e.into_shell_error(span, "Failed to read from SQLite database")) .map_err(|e| e.into_shell_error(span, "Failed to read from SQLite database"))
} }
@ -381,7 +377,7 @@ impl CustomValue for SQLiteDatabase {
path_span: Span, path_span: Span,
) -> Result<Value, ShellError> { ) -> Result<Value, ShellError> {
let db = open_sqlite_db(&self.path, path_span)?; let db = open_sqlite_db(&self.path, path_span)?;
read_single_table(db, column_name, path_span, &self.interrupt) read_single_table(db, column_name, path_span, &self.signals)
.map_err(|e| e.into_shell_error(path_span, "Failed to read from SQLite database")) .map_err(|e| e.into_shell_error(path_span, "Failed to read from SQLite database"))
} }
@ -413,10 +409,10 @@ fn run_sql_query(
conn: Connection, conn: Connection,
sql: &Spanned<String>, sql: &Spanned<String>,
params: NuSqlParams, params: NuSqlParams,
interrupt: &Interrupt, signals: &Signals,
) -> Result<Value, SqliteOrShellError> { ) -> Result<Value, SqliteOrShellError> {
let stmt = conn.prepare(&sql.item)?; let stmt = conn.prepare(&sql.item)?;
prepared_statement_to_nu_list(stmt, params, sql.span, interrupt) prepared_statement_to_nu_list(stmt, params, sql.span, signals)
} }
// This is taken from to text local_into_string but tweaks it a bit so that certain formatting does not happen // This is taken from to text local_into_string but tweaks it a bit so that certain formatting does not happen
@ -543,18 +539,18 @@ fn read_single_table(
conn: Connection, conn: Connection,
table_name: String, table_name: String,
call_span: Span, call_span: Span,
interrupt: &Interrupt, signals: &Signals,
) -> Result<Value, SqliteOrShellError> { ) -> Result<Value, SqliteOrShellError> {
// TODO: Should use params here? // TODO: Should use params here?
let stmt = conn.prepare(&format!("SELECT * FROM [{table_name}]"))?; let stmt = conn.prepare(&format!("SELECT * FROM [{table_name}]"))?;
prepared_statement_to_nu_list(stmt, NuSqlParams::default(), call_span, interrupt) prepared_statement_to_nu_list(stmt, NuSqlParams::default(), call_span, signals)
} }
fn prepared_statement_to_nu_list( fn prepared_statement_to_nu_list(
mut stmt: Statement, mut stmt: Statement,
params: NuSqlParams, params: NuSqlParams,
call_span: Span, call_span: Span,
interrupt: &Interrupt, signals: &Signals,
) -> Result<Value, SqliteOrShellError> { ) -> Result<Value, SqliteOrShellError> {
let column_names = stmt let column_names = stmt
.column_names() .column_names()
@ -581,7 +577,7 @@ fn prepared_statement_to_nu_list(
let mut row_values = vec![]; let mut row_values = vec![];
for row_result in row_results { for row_result in row_results {
interrupt.check(call_span)?; signals.check(call_span)?;
if let Ok(row_value) = row_result { if let Ok(row_value) = row_result {
row_values.push(row_value); row_values.push(row_value);
} }
@ -607,7 +603,7 @@ fn prepared_statement_to_nu_list(
let mut row_values = vec![]; let mut row_values = vec![];
for row_result in row_results { for row_result in row_results {
interrupt.check(call_span)?; signals.check(call_span)?;
if let Ok(row_value) = row_result { if let Ok(row_value) = row_result {
row_values.push(row_value); row_values.push(row_value);
} }
@ -623,7 +619,7 @@ fn prepared_statement_to_nu_list(
fn read_entire_sqlite_db( fn read_entire_sqlite_db(
conn: Connection, conn: Connection,
call_span: Span, call_span: Span,
interrupt: &Interrupt, signals: &Signals,
) -> Result<Value, SqliteOrShellError> { ) -> Result<Value, SqliteOrShellError> {
let mut tables = Record::new(); let mut tables = Record::new();
@ -635,12 +631,8 @@ fn read_entire_sqlite_db(
let table_name: String = row?; let table_name: String = row?;
// TODO: Should use params here? // TODO: Should use params here?
let table_stmt = conn.prepare(&format!("select * from [{table_name}]"))?; let table_stmt = conn.prepare(&format!("select * from [{table_name}]"))?;
let rows = prepared_statement_to_nu_list( let rows =
table_stmt, prepared_statement_to_nu_list(table_stmt, NuSqlParams::default(), call_span, signals)?;
NuSqlParams::default(),
call_span,
interrupt,
)?;
tables.push(table_name, rows); tables.push(table_name, rows);
} }
@ -707,8 +699,7 @@ mod test {
#[test] #[test]
fn can_read_empty_db() { fn can_read_empty_db() {
let db = open_connection_in_memory().unwrap(); let db = open_connection_in_memory().unwrap();
let converted_db = let converted_db = read_entire_sqlite_db(db, Span::test_data(), &Signals::empty()).unwrap();
read_entire_sqlite_db(db, Span::test_data(), &Interrupt::empty()).unwrap();
let expected = Value::test_record(Record::new()); let expected = Value::test_record(Record::new());
@ -728,8 +719,7 @@ mod test {
[], [],
) )
.unwrap(); .unwrap();
let converted_db = let converted_db = read_entire_sqlite_db(db, Span::test_data(), &Signals::empty()).unwrap();
read_entire_sqlite_db(db, Span::test_data(), &Interrupt::empty()).unwrap();
let expected = Value::test_record(record! { let expected = Value::test_record(record! {
"person" => Value::test_list(vec![]), "person" => Value::test_list(vec![]),
@ -758,7 +748,7 @@ mod test {
db.execute("INSERT INTO item (id, name) VALUES (456, 'foo bar')", []) db.execute("INSERT INTO item (id, name) VALUES (456, 'foo bar')", [])
.unwrap(); .unwrap();
let converted_db = read_entire_sqlite_db(db, span, &Interrupt::empty()).unwrap(); let converted_db = read_entire_sqlite_db(db, span, &Signals::empty()).unwrap();
let expected = Value::test_record(record! { let expected = Value::test_record(record! {
"item" => Value::test_list( "item" => Value::test_list(

View File

@ -50,7 +50,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map(move |value| helper(value, head), engine_state.interrupt()) input.map(move |value| helper(value, head), engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -40,7 +40,7 @@ impl Command for SubCommand {
head, head,
) )
}) })
.into_pipeline_data(head, engine_state.interrupt().clone())) .into_pipeline_data(head, engine_state.signals().clone()))
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -40,7 +40,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map(move |value| helper(value, head), engine_state.interrupt()) input.map(move |value| helper(value, head), engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -40,7 +40,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) { if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head }); return Err(ShellError::PipelineEmpty { dst_span: head });
} }
input.map(move |value| helper(value, head), engine_state.interrupt()) input.map(move |value| helper(value, head), engine_state.signals())
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -55,7 +55,7 @@ impl Command for SubCommand {
} }
input.map( input.map(
move |value| helper(value, head, &timezone), move |value| helper(value, head, &timezone),
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -46,7 +46,7 @@ impl Command for Debug {
Value::string(x.to_expanded_string(", ", &config), head) Value::string(x.to_expanded_string(", ", &config), head)
} }
}, },
engine_state.interrupt(), engine_state.signals(),
) )
} }

View File

@ -48,7 +48,7 @@ impl Command for MetadataSet {
let ds_fp: Option<String> = call.get_flag(engine_state, stack, "datasource-filepath")?; let ds_fp: Option<String> = call.get_flag(engine_state, stack, "datasource-filepath")?;
let ds_ls = call.has_flag(engine_state, stack, "datasource-ls")?; let ds_ls = call.has_flag(engine_state, stack, "datasource-ls")?;
let content_type: Option<String> = call.get_flag(engine_state, stack, "content-type")?; let content_type: Option<String> = call.get_flag(engine_state, stack, "content-type")?;
let interrupt = engine_state.interrupt().clone(); let signals = engine_state.signals().clone();
let metadata = input let metadata = input
.metadata() .metadata()
.clone() .clone()
@ -58,15 +58,15 @@ impl Command for MetadataSet {
match (ds_fp, ds_ls) { match (ds_fp, ds_ls) {
(Some(path), false) => Ok(input.into_pipeline_data_with_metadata( (Some(path), false) => Ok(input.into_pipeline_data_with_metadata(
head, head,
interrupt, signals,
metadata.with_data_source(DataSource::FilePath(path.into())), metadata.with_data_source(DataSource::FilePath(path.into())),
)), )),
(None, true) => Ok(input.into_pipeline_data_with_metadata( (None, true) => Ok(input.into_pipeline_data_with_metadata(
head, head,
interrupt, signals,
metadata.with_data_source(DataSource::Ls), metadata.with_data_source(DataSource::Ls),
)), )),
_ => Ok(input.into_pipeline_data_with_metadata(head, interrupt, metadata)), _ => Ok(input.into_pipeline_data_with_metadata(head, signals, metadata)),
} }
} }

View File

@ -3,7 +3,7 @@ use crate::{DirBuilder, DirInfo, FileInfo};
#[allow(deprecated)] #[allow(deprecated)]
use nu_engine::{command_prelude::*, current_dir}; use nu_engine::{command_prelude::*, current_dir};
use nu_glob::Pattern; use nu_glob::Pattern;
use nu_protocol::{Interrupt, NuGlob}; use nu_protocol::{NuGlob, Signals};
use serde::Deserialize; use serde::Deserialize;
use std::path::Path; use std::path::Path;
@ -119,8 +119,8 @@ impl Command for Du {
min_size, min_size,
}; };
Ok( Ok(
du_for_one_pattern(args, &current_dir, tag, engine_state.interrupt())? du_for_one_pattern(args, &current_dir, tag, engine_state.signals())?
.into_pipeline_data(tag, engine_state.interrupt().clone()), .into_pipeline_data(tag, engine_state.signals().clone()),
) )
} }
Some(paths) => { Some(paths) => {
@ -138,7 +138,7 @@ impl Command for Du {
args, args,
&current_dir, &current_dir,
tag, tag,
engine_state.interrupt(), engine_state.signals(),
)?) )?)
} }
@ -146,7 +146,7 @@ impl Command for Du {
Ok(result_iters Ok(result_iters
.into_iter() .into_iter()
.flatten() .flatten()
.into_pipeline_data(tag, engine_state.interrupt().clone())) .into_pipeline_data(tag, engine_state.signals().clone()))
} }
} }
} }
@ -164,7 +164,7 @@ fn du_for_one_pattern(
args: DuArgs, args: DuArgs,
current_dir: &Path, current_dir: &Path,
span: Span, span: Span,
interrupt: &Interrupt, signals: &Signals,
) -> Result<impl Iterator<Item = Value> + Send, ShellError> { ) -> Result<impl Iterator<Item = Value> + Send, ShellError> {
let exclude = args.exclude.map_or(Ok(None), move |x| { let exclude = args.exclude.map_or(Ok(None), move |x| {
Pattern::new(x.item.as_ref()) Pattern::new(x.item.as_ref())
@ -216,7 +216,7 @@ fn du_for_one_pattern(
match p { match p {
Ok(a) => { Ok(a) => {
if a.is_dir() { if a.is_dir() {
output.push(DirInfo::new(a, &params, max_depth, span, interrupt)?.into()); output.push(DirInfo::new(a, &params, max_depth, span, signals)?.into());
} else if let Ok(v) = FileInfo::new(a, deref, span) { } else if let Ok(v) = FileInfo::new(a, deref, span) {
output.push(v.into()); output.push(v.into());
} }

View File

@ -1,5 +1,5 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::Interrupt; use nu_protocol::Signals;
use wax::{Glob as WaxGlob, WalkBehavior, WalkEntry}; use wax::{Glob as WaxGlob, WalkBehavior, WalkEntry};
#[derive(Clone)] #[derive(Clone)]
@ -216,7 +216,7 @@ impl Command for Glob {
})? })?
.flatten(); .flatten();
glob_to_value( glob_to_value(
engine_state.interrupt(), engine_state.signals(),
glob_results, glob_results,
no_dirs, no_dirs,
no_files, no_files,
@ -234,7 +234,7 @@ impl Command for Glob {
) )
.flatten(); .flatten();
glob_to_value( glob_to_value(
engine_state.interrupt(), engine_state.signals(),
glob_results, glob_results,
no_dirs, no_dirs,
no_files, no_files,
@ -245,7 +245,7 @@ impl Command for Glob {
Ok(result Ok(result
.into_iter() .into_iter()
.into_pipeline_data(span, engine_state.interrupt().clone())) .into_pipeline_data(span, engine_state.signals().clone()))
} }
} }
@ -265,7 +265,7 @@ fn convert_patterns(columns: &[Value]) -> Result<Vec<String>, ShellError> {
} }
fn glob_to_value<'a>( fn glob_to_value<'a>(
interrupt: &Interrupt, signals: &Signals,
glob_results: impl Iterator<Item = WalkEntry<'a>>, glob_results: impl Iterator<Item = WalkEntry<'a>>,
no_dirs: bool, no_dirs: bool,
no_files: bool, no_files: bool,
@ -274,7 +274,7 @@ fn glob_to_value<'a>(
) -> Result<Vec<Value>, ShellError> { ) -> Result<Vec<Value>, ShellError> {
let mut result: Vec<Value> = Vec::new(); let mut result: Vec<Value> = Vec::new();
for entry in glob_results { for entry in glob_results {
interrupt.check(span)?; signals.check(span)?;
let file_type = entry.file_type(); let file_type = entry.file_type();
if !(no_dirs && file_type.is_dir() if !(no_dirs && file_type.is_dir()

View File

@ -6,7 +6,7 @@ use nu_engine::glob_from;
use nu_engine::{command_prelude::*, env::current_dir}; use nu_engine::{command_prelude::*, env::current_dir};
use nu_glob::MatchOptions; use nu_glob::MatchOptions;
use nu_path::expand_to_real_path; use nu_path::expand_to_real_path;
use nu_protocol::{DataSource, Interrupt, NuGlob, PipelineMetadata}; use nu_protocol::{DataSource, NuGlob, PipelineMetadata, Signals};
use pathdiff::diff_paths; use pathdiff::diff_paths;
#[cfg(unix)] #[cfg(unix)]
@ -114,24 +114,22 @@ impl Command for Ls {
Some(pattern_arg) Some(pattern_arg)
}; };
match input_pattern_arg { match input_pattern_arg {
None => Ok( None => Ok(ls_for_one_pattern(None, args, engine_state.signals(), cwd)?
ls_for_one_pattern(None, args, engine_state.interrupt(), cwd)?
.into_pipeline_data_with_metadata( .into_pipeline_data_with_metadata(
call_span, call_span,
engine_state.interrupt().clone(), engine_state.signals().clone(),
PipelineMetadata { PipelineMetadata {
data_source: DataSource::Ls, data_source: DataSource::Ls,
content_type: None, content_type: None,
}, },
), )),
),
Some(pattern) => { Some(pattern) => {
let mut result_iters = vec![]; let mut result_iters = vec![];
for pat in pattern { for pat in pattern {
result_iters.push(ls_for_one_pattern( result_iters.push(ls_for_one_pattern(
Some(pat), Some(pat),
args, args,
engine_state.interrupt(), engine_state.signals(),
cwd.clone(), cwd.clone(),
)?) )?)
} }
@ -143,7 +141,7 @@ impl Command for Ls {
.flatten() .flatten()
.into_pipeline_data_with_metadata( .into_pipeline_data_with_metadata(
call_span, call_span,
engine_state.interrupt().clone(), engine_state.signals().clone(),
PipelineMetadata { PipelineMetadata {
data_source: DataSource::Ls, data_source: DataSource::Ls,
content_type: None, content_type: None,
@ -215,7 +213,7 @@ impl Command for Ls {
fn ls_for_one_pattern( fn ls_for_one_pattern(
pattern_arg: Option<Spanned<NuGlob>>, pattern_arg: Option<Spanned<NuGlob>>,
args: Args, args: Args,
interrupt: &Interrupt, signals: &Signals,
cwd: PathBuf, cwd: PathBuf,
) -> Result<Box<dyn Iterator<Item = Value> + Send>, ShellError> { ) -> Result<Box<dyn Iterator<Item = Value> + Send>, ShellError> {
let Args { let Args {
@ -342,7 +340,7 @@ fn ls_for_one_pattern(
let mut hidden_dirs = vec![]; let mut hidden_dirs = vec![];
let interrupt = interrupt.clone(); let signals = signals.clone();
Ok(Box::new(paths_peek.filter_map(move |x| match x { Ok(Box::new(paths_peek.filter_map(move |x| match x {
Ok(path) => { Ok(path) => {
let metadata = match std::fs::symlink_metadata(&path) { let metadata = match std::fs::symlink_metadata(&path) {
@ -412,7 +410,7 @@ fn ls_for_one_pattern(
call_span, call_span,
long, long,
du, du,
&interrupt, &signals,
use_mime_type, use_mime_type,
); );
match entry { match entry {
@ -522,7 +520,7 @@ pub(crate) fn dir_entry_dict(
span: Span, span: Span,
long: bool, long: bool,
du: bool, du: bool,
interrupt: &Interrupt, signals: &Signals,
use_mime_type: bool, use_mime_type: bool,
) -> Result<Value, ShellError> { ) -> Result<Value, ShellError> {
#[cfg(windows)] #[cfg(windows)]
@ -617,8 +615,7 @@ pub(crate) fn dir_entry_dict(
if md.is_dir() { if md.is_dir() {
if du { if du {
let params = DirBuilder::new(Span::new(0, 2), None, false, None, false); let params = DirBuilder::new(Span::new(0, 2), None, false, None, false);
let dir_size = let dir_size = DirInfo::new(filename, &params, None, span, signals)?.get_size();
DirInfo::new(filename, &params, None, span, interrupt)?.get_size();
Value::filesize(dir_size as i64, span) Value::filesize(dir_size as i64, span)
} else { } else {

View File

@ -124,7 +124,7 @@ impl Command for Open {
let res = SQLiteDatabase::try_from_path( let res = SQLiteDatabase::try_from_path(
path, path,
arg_span, arg_span,
engine_state.interrupt().clone(), engine_state.signals().clone(),
) )
.map(|db| db.into_value(call.head).into_pipeline_data()); .map(|db| db.into_value(call.head).into_pipeline_data());
@ -147,7 +147,7 @@ impl Command for Open {
}; };
let stream = PipelineData::ByteStream( let stream = PipelineData::ByteStream(
ByteStream::file(file, call_span, engine_state.interrupt().clone()), ByteStream::file(file, call_span, engine_state.signals().clone()),
Some(PipelineMetadata { Some(PipelineMetadata {
data_source: DataSource::FilePath(path.to_path_buf()), data_source: DataSource::FilePath(path.to_path_buf()),
content_type: None, content_type: None,
@ -206,7 +206,7 @@ impl Command for Open {
Ok(output Ok(output
.into_iter() .into_iter()
.flatten() .flatten()
.into_pipeline_data(call_span, engine_state.interrupt().clone())) .into_pipeline_data(call_span, engine_state.signals().clone()))
} }
} }

View File

@ -451,7 +451,7 @@ fn rm(
}); });
for result in iter { for result in iter {
engine_state.interrupt().check(call.head)?; engine_state.signals().check(call.head)?;
match result { match result {
Ok(None) => {} Ok(None) => {}
Ok(Some(msg)) => eprintln!("{msg}"), Ok(Some(msg)) => eprintln!("{msg}"),

View File

@ -5,9 +5,9 @@ use nu_engine::{command_prelude::*, current_dir};
use nu_path::expand_path_with; use nu_path::expand_path_with;
use nu_protocol::{ use nu_protocol::{
ast::{Expr, Expression}, ast::{Expr, Expression},
byte_stream::copy_with_interrupt, byte_stream::copy_with_signals,
process::ChildPipe, process::ChildPipe,
ByteStreamSource, DataSource, Interrupt, OutDest, PipelineMetadata, ByteStreamSource, DataSource, OutDest, PipelineMetadata, Signals,
}; };
use std::{ use std::{
fs::File, fs::File,
@ -119,30 +119,30 @@ impl Command for Save {
)?; )?;
let size = stream.known_size(); let size = stream.known_size();
let interrupt = engine_state.interrupt(); let signals = engine_state.signals();
match stream.into_source() { match stream.into_source() {
ByteStreamSource::Read(read) => { ByteStreamSource::Read(read) => {
stream_to_file(read, size, interrupt, file, span, progress)?; stream_to_file(read, size, signals, file, span, progress)?;
} }
ByteStreamSource::File(source) => { ByteStreamSource::File(source) => {
stream_to_file(source, size, interrupt, file, span, progress)?; stream_to_file(source, size, signals, file, span, progress)?;
} }
ByteStreamSource::Child(mut child) => { ByteStreamSource::Child(mut child) => {
fn write_or_consume_stderr( fn write_or_consume_stderr(
stderr: ChildPipe, stderr: ChildPipe,
file: Option<File>, file: Option<File>,
span: Span, span: Span,
interrupt: &Interrupt, signals: &Signals,
progress: bool, progress: bool,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
if let Some(file) = file { if let Some(file) = file {
match stderr { match stderr {
ChildPipe::Pipe(pipe) => { ChildPipe::Pipe(pipe) => {
stream_to_file(pipe, None, interrupt, file, span, progress) stream_to_file(pipe, None, signals, file, span, progress)
} }
ChildPipe::Tee(tee) => { ChildPipe::Tee(tee) => {
stream_to_file(tee, None, interrupt, file, span, progress) stream_to_file(tee, None, signals, file, span, progress)
} }
}? }?
} else { } else {
@ -162,14 +162,14 @@ impl Command for Save {
// delegate a thread to redirect stderr to result. // delegate a thread to redirect stderr to result.
let handler = stderr let handler = stderr
.map(|stderr| { .map(|stderr| {
let interrupt = interrupt.clone(); let signals = signals.clone();
thread::Builder::new().name("stderr saver".into()).spawn( thread::Builder::new().name("stderr saver".into()).spawn(
move || { move || {
write_or_consume_stderr( write_or_consume_stderr(
stderr, stderr,
stderr_file, stderr_file,
span, span,
&interrupt, &signals,
progress, progress,
) )
}, },
@ -180,10 +180,10 @@ impl Command for Save {
let res = match stdout { let res = match stdout {
ChildPipe::Pipe(pipe) => { ChildPipe::Pipe(pipe) => {
stream_to_file(pipe, None, interrupt, file, span, progress) stream_to_file(pipe, None, signals, file, span, progress)
} }
ChildPipe::Tee(tee) => { ChildPipe::Tee(tee) => {
stream_to_file(tee, None, interrupt, file, span, progress) stream_to_file(tee, None, signals, file, span, progress)
} }
}; };
if let Some(h) = handler { if let Some(h) = handler {
@ -201,7 +201,7 @@ impl Command for Save {
stderr, stderr,
stderr_file, stderr_file,
span, span,
interrupt, signals,
progress, progress,
)?; )?;
} }
@ -509,7 +509,7 @@ fn get_files(
fn stream_to_file( fn stream_to_file(
source: impl Read, source: impl Read,
known_size: Option<u64>, known_size: Option<u64>,
interrupt: &Interrupt, signals: &Signals,
mut file: File, mut file: File,
span: Span, span: Span,
progress: bool, progress: bool,
@ -525,7 +525,7 @@ fn stream_to_file(
let mut reader = BufReader::new(source); let mut reader = BufReader::new(source);
let res = loop { let res = loop {
if let Err(err) = interrupt.check(span) { if let Err(err) = signals.check(span) {
bar.abandoned_msg("# Cancelled #".to_owned()); bar.abandoned_msg("# Cancelled #".to_owned());
return Err(err); return Err(err);
} }
@ -554,7 +554,7 @@ fn stream_to_file(
Ok(()) Ok(())
} }
} else { } else {
copy_with_interrupt(source, file, span, interrupt)?; copy_with_signals(source, file, span, signals)?;
Ok(()) Ok(())
} }
} }

View File

@ -255,7 +255,7 @@ impl Command for Watch {
} }
Err(RecvTimeoutError::Timeout) => {} Err(RecvTimeoutError::Timeout) => {}
} }
if engine_state.interrupt().triggered() { if engine_state.signals().interrupted() {
break; break;
} }
} }

View File

@ -118,7 +118,7 @@ only unwrap the outer list, and leave the variable's contents untouched."#
.chain(other.into_pipeline_data()) .chain(other.into_pipeline_data())
.into_pipeline_data_with_metadata( .into_pipeline_data_with_metadata(
call.head, call.head,
engine_state.interrupt().clone(), engine_state.signals().clone(),
metadata, metadata,
)) ))
} }

View File

@ -140,7 +140,7 @@ pub fn compact(
_ => true, _ => true,
} }
}, },
engine_state.interrupt(), engine_state.signals(),
) )
.map(|m| m.set_metadata(metadata)) .map(|m| m.set_metadata(metadata))
} }

View File

@ -107,7 +107,7 @@ fn default(
} }
_ => item, _ => item,
}, },
engine_state.interrupt(), engine_state.signals(),
) )
.map(|x| x.set_metadata(metadata)) .map(|x| x.set_metadata(metadata))
} else if input.is_nothing() { } else if input.is_nothing() {
@ -119,7 +119,7 @@ fn default(
Value::Nothing { .. } => value.clone(), Value::Nothing { .. } => value.clone(),
x => x, x => x,
}, },
engine_state.interrupt(), engine_state.signals(),
) )
.map(|x| x.set_metadata(metadata)) .map(|x| x.set_metadata(metadata))
} }

View File

@ -104,7 +104,7 @@ fn drop_cols(
})) }))
.into_pipeline_data_with_metadata( .into_pipeline_data_with_metadata(
head, head,
engine_state.interrupt().clone(), engine_state.signals().clone(),
metadata, metadata,
)) ))
} else { } else {

View File

@ -156,7 +156,7 @@ impl Command for DropNth {
.take(start) .take(start)
.into_pipeline_data_with_metadata( .into_pipeline_data_with_metadata(
head, head,
engine_state.interrupt().clone(), engine_state.signals().clone(),
metadata, metadata,
)) ))
} }
@ -177,7 +177,7 @@ impl Command for DropNth {
rows, rows,
current: 0, current: 0,
} }
.into_pipeline_data_with_metadata(head, engine_state.interrupt().clone(), metadata)) .into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
} }
} }

View File

@ -140,7 +140,7 @@ with 'transpose' first."#
} }
} }
}) })
.into_pipeline_data(head, engine_state.interrupt().clone())) .into_pipeline_data(head, engine_state.signals().clone()))
} }
PipelineData::ByteStream(stream, ..) => { PipelineData::ByteStream(stream, ..) => {
if let Some(chunks) = stream.chunks() { if let Some(chunks) = stream.chunks() {
@ -171,7 +171,7 @@ with 'transpose' first."#
} }
} }
}) })
.into_pipeline_data(head, engine_state.interrupt().clone())) .into_pipeline_data(head, engine_state.signals().clone()))
} else { } else {
Ok(PipelineData::Empty) Ok(PipelineData::Empty)
} }
@ -185,7 +185,7 @@ with 'transpose' first."#
.and_then(|x| { .and_then(|x| {
x.filter( x.filter(
move |x| if !keep_empty { !x.is_nothing() } else { true }, move |x| if !keep_empty { !x.is_nothing() } else { true },
engine_state.interrupt(), engine_state.signals(),
) )
}) })
.map(|data| data.set_metadata(metadata)) .map(|data| data.set_metadata(metadata))

View File

@ -65,7 +65,7 @@ impl Command for Enumerate {
head, head,
) )
}) })
.into_pipeline_data_with_metadata(head, engine_state.interrupt().clone(), metadata)) .into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
} }
} }

View File

@ -80,7 +80,7 @@ impl Command for Every {
}) })
.into_pipeline_data_with_metadata( .into_pipeline_data_with_metadata(
call.head, call.head,
engine_state.interrupt().clone(), engine_state.signals().clone(),
metadata, metadata,
)) ))
} }

View File

@ -72,7 +72,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
} }
} }
}) })
.into_pipeline_data(head, engine_state.interrupt().clone())) .into_pipeline_data(head, engine_state.signals().clone()))
} }
PipelineData::ByteStream(stream, ..) => { PipelineData::ByteStream(stream, ..) => {
if let Some(chunks) = stream.chunks() { if let Some(chunks) = stream.chunks() {
@ -97,7 +97,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
} }
} }
}) })
.into_pipeline_data(head, engine_state.interrupt().clone())) .into_pipeline_data(head, engine_state.signals().clone()))
} else { } else {
Ok(PipelineData::Empty) Ok(PipelineData::Empty)
} }
@ -117,7 +117,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
Some(Value::error(err, span)) Some(Value::error(err, span))
} }
} }
.into_pipeline_data(head, engine_state.interrupt().clone())) .into_pipeline_data(head, engine_state.signals().clone()))
} }
} }
.map(|data| data.set_metadata(metadata)) .map(|data| data.set_metadata(metadata))

View File

@ -245,7 +245,7 @@ fn find_with_regex(
Value::List { vals, .. } => values_match_find(vals, &re, &config, invert), Value::List { vals, .. } => values_match_find(vals, &re, &config, invert),
_ => false, _ => false,
}, },
engine_state.interrupt(), engine_state.signals(),
) )
} }
@ -398,7 +398,7 @@ fn find_with_rest_and_highlight(
_ => x, _ => x,
} }
}, },
engine_state.interrupt(), engine_state.signals(),
)? )?
.filter( .filter(
move |value| { move |value| {
@ -411,7 +411,7 @@ fn find_with_rest_and_highlight(
invert, invert,
) )
}, },
engine_state.interrupt(), engine_state.signals(),
), ),
PipelineData::ListStream(stream, metadata) => { PipelineData::ListStream(stream, metadata) => {
let stream = stream.modify(|iter| { let stream = stream.modify(|iter| {

View File

@ -1,5 +1,5 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::Interrupt; use nu_protocol::Signals;
use std::io::Read; use std::io::Read;
#[derive(Clone)] #[derive(Clone)]
@ -134,7 +134,7 @@ fn first_helper(
} }
} }
Value::Range { val, .. } => { Value::Range { val, .. } => {
let mut iter = val.into_range_iter(span, Interrupt::empty()); let mut iter = val.into_range_iter(span, Signals::empty());
if return_single_element { if return_single_element {
if let Some(v) = iter.next() { if let Some(v) = iter.next() {
Ok(v.into_pipeline_data()) Ok(v.into_pipeline_data())
@ -144,7 +144,7 @@ fn first_helper(
} else { } else {
Ok(iter.take(rows).into_pipeline_data_with_metadata( Ok(iter.take(rows).into_pipeline_data_with_metadata(
span, span,
engine_state.interrupt().clone(), engine_state.signals().clone(),
metadata, metadata,
)) ))
} }
@ -191,7 +191,7 @@ fn first_helper(
ByteStream::read( ByteStream::read(
reader.take(rows as u64), reader.take(rows as u64),
head, head,
Interrupt::empty(), Signals::empty(),
ByteStreamType::Binary, ByteStreamType::Binary,
), ),
metadata, metadata,

View File

@ -127,7 +127,7 @@ fn flatten(
input input
.flat_map( .flat_map(
move |item| flat_value(&columns, item, flatten_all), move |item| flat_value(&columns, item, flatten_all),
engine_state.interrupt(), engine_state.signals(),
) )
.map(|x| x.set_metadata(metadata)) .map(|x| x.set_metadata(metadata))
} }

View File

@ -90,7 +90,7 @@ If multiple cell paths are given, this will produce a list of values."#
Ok(output Ok(output
.into_iter() .into_iter()
.into_pipeline_data(span, engine_state.interrupt().clone())) .into_pipeline_data(span, engine_state.signals().clone()))
} }
.map(|x| x.set_metadata(metadata)) .map(|x| x.set_metadata(metadata))
} }

View File

@ -65,7 +65,7 @@ impl Command for Group {
Ok(each_group_iterator.into_pipeline_data_with_metadata( Ok(each_group_iterator.into_pipeline_data_with_metadata(
head, head,
engine_state.interrupt().clone(), engine_state.signals().clone(),
metadata, metadata,
)) ))
} }

View File

@ -224,7 +224,7 @@ fn insert(
.chain(stream) .chain(stream)
.into_pipeline_data_with_metadata( .into_pipeline_data_with_metadata(
head, head,
engine_state.interrupt().clone(), engine_state.signals().clone(),
metadata, metadata,
)) ))
} else if let Value::Closure { val, .. } = replacement { } else if let Value::Closure { val, .. } = replacement {

View File

@ -147,7 +147,7 @@ interleave
// Now that threads are writing to the channel, we just return it as a stream // Now that threads are writing to the channel, we just return it as a stream
Ok(rx Ok(rx
.into_iter() .into_iter()
.into_pipeline_data(head, engine_state.interrupt().clone())) .into_pipeline_data(head, engine_state.signals().clone()))
} }
} }

View File

@ -67,7 +67,7 @@ impl Command for Items {
} }
} }
}) })
.into_pipeline_data(head, engine_state.interrupt().clone())) .into_pipeline_data(head, engine_state.signals().clone()))
} }
Value::Error { error, .. } => Err(*error), Value::Error { error, .. } => Err(*error),
other => Err(ShellError::OnlySupportsThisInputType { other => Err(ShellError::OnlySupportsThisInputType {

View File

@ -99,7 +99,7 @@ impl Command for Last {
let mut buf = VecDeque::new(); let mut buf = VecDeque::new();
for row in iterator { for row in iterator {
engine_state.interrupt().check(head)?; engine_state.signals().check(head)?;
if buf.len() == rows { if buf.len() == rows {
buf.pop_front(); buf.pop_front();
} }

View File

@ -90,7 +90,7 @@ impl Command for Lines {
Ok(line) => Value::string(line, head), Ok(line) => Value::string(line, head),
Err(err) => Value::error(err, head), Err(err) => Value::error(err, head),
}) })
.into_pipeline_data(head, engine_state.interrupt().clone())) .into_pipeline_data(head, engine_state.signals().clone()))
} else { } else {
Ok(PipelineData::empty()) Ok(PipelineData::empty())
} }

View File

@ -111,7 +111,7 @@ repeating this process with row 1, and so on."#
Ok(res.into_pipeline_data_with_metadata( Ok(res.into_pipeline_data_with_metadata(
head, head,
engine_state.interrupt().clone(), engine_state.signals().clone(),
metadata, metadata,
)) ))
} }

View File

@ -159,7 +159,7 @@ impl Command for Move {
Ok(res.into_pipeline_data_with_metadata( Ok(res.into_pipeline_data_with_metadata(
head, head,
engine_state.interrupt().clone(), engine_state.signals().clone(),
metadata, metadata,
)) ))
} }

View File

@ -1,6 +1,6 @@
use super::utils::chain_error_with_input; use super::utils::chain_error_with_input;
use nu_engine::{command_prelude::*, ClosureEvalOnce}; use nu_engine::{command_prelude::*, ClosureEvalOnce};
use nu_protocol::{engine::Closure, Interrupt}; use nu_protocol::{engine::Closure, Signals};
use rayon::prelude::*; use rayon::prelude::*;
#[derive(Clone)] #[derive(Clone)]
@ -158,11 +158,11 @@ impl Command for ParEach {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
apply_order(vec).into_pipeline_data(span, engine_state.interrupt().clone()) apply_order(vec).into_pipeline_data(span, engine_state.signals().clone())
})), })),
Value::Range { val, .. } => Ok(create_pool(max_threads)?.install(|| { Value::Range { val, .. } => Ok(create_pool(max_threads)?.install(|| {
let vec = val let vec = val
.into_range_iter(span, Interrupt::empty()) .into_range_iter(span, Signals::empty())
.enumerate() .enumerate()
.par_bridge() .par_bridge()
.map(move |(index, value)| { .map(move |(index, value)| {
@ -183,7 +183,7 @@ impl Command for ParEach {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
apply_order(vec).into_pipeline_data(span, engine_state.interrupt().clone()) apply_order(vec).into_pipeline_data(span, engine_state.signals().clone())
})), })),
// This match allows non-iterables to be accepted, // This match allows non-iterables to be accepted,
// which is currently considered undesirable (Nov 2022). // which is currently considered undesirable (Nov 2022).
@ -211,7 +211,7 @@ impl Command for ParEach {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
apply_order(vec).into_pipeline_data(head, engine_state.interrupt().clone()) apply_order(vec).into_pipeline_data(head, engine_state.signals().clone())
})), })),
PipelineData::ByteStream(stream, ..) => { PipelineData::ByteStream(stream, ..) => {
if let Some(chunks) = stream.chunks() { if let Some(chunks) = stream.chunks() {
@ -235,14 +235,14 @@ impl Command for ParEach {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
apply_order(vec).into_pipeline_data(head, engine_state.interrupt().clone()) apply_order(vec).into_pipeline_data(head, engine_state.signals().clone())
})) }))
} else { } else {
Ok(PipelineData::empty()) Ok(PipelineData::empty())
} }
} }
} }
.and_then(|x| x.filter(|v| !v.is_nothing(), engine_state.interrupt())) .and_then(|x| x.filter(|v| !v.is_nothing(), engine_state.signals()))
.map(|data| data.set_metadata(metadata)) .map(|data| data.set_metadata(metadata))
} }
} }

View File

@ -119,7 +119,7 @@ only unwrap the outer list, and leave the variable's contents untouched."#
.chain(input) .chain(input)
.into_pipeline_data_with_metadata( .into_pipeline_data_with_metadata(
call.head, call.head,
engine_state.interrupt().clone(), engine_state.signals().clone(),
metadata, metadata,
)) ))
} }

View File

@ -106,7 +106,7 @@ impl Command for Range {
Ok(PipelineData::Value(Value::nothing(head), None)) Ok(PipelineData::Value(Value::nothing(head), None))
} else { } else {
let iter = v.into_iter().skip(from).take(to - from + 1); let iter = v.into_iter().skip(from).take(to - from + 1);
Ok(iter.into_pipeline_data(head, engine_state.interrupt().clone())) Ok(iter.into_pipeline_data(head, engine_state.signals().clone()))
} }
} else { } else {
let from = start as usize; let from = start as usize;
@ -116,7 +116,7 @@ impl Command for Range {
Ok(PipelineData::Value(Value::nothing(head), None)) Ok(PipelineData::Value(Value::nothing(head), None))
} else { } else {
let iter = input.into_iter().skip(from).take(to - from + 1); let iter = input.into_iter().skip(from).take(to - from + 1);
Ok(iter.into_pipeline_data(head, engine_state.interrupt().clone())) Ok(iter.into_pipeline_data(head, engine_state.signals().clone()))
} }
} }
.map(|x| x.set_metadata(metadata)) .map(|x| x.set_metadata(metadata))

Some files were not shown because too many files have changed in this diff Show More