Merge 8bf907b7c2
into 948b90299d
This commit is contained in:
commit
19c4e77e18
12
Cargo.lock
generated
12
Cargo.lock
generated
|
@ -2926,7 +2926,6 @@ dependencies = [
|
|||
"chrono",
|
||||
"crossterm",
|
||||
"fancy-regex",
|
||||
"fuzzy-matcher",
|
||||
"is_executable",
|
||||
"log",
|
||||
"lscolors",
|
||||
|
@ -2943,6 +2942,7 @@ dependencies = [
|
|||
"nu-protocol",
|
||||
"nu-test-support",
|
||||
"nu-utils",
|
||||
"nucleo-matcher",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"reedline",
|
||||
|
@ -3549,6 +3549,16 @@ dependencies = [
|
|||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nucleo-matcher"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf33f538733d1a5a3494b836ba913207f14d9d4a1d3cd67030c5061bdd2cac85"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num"
|
||||
version = "0.4.2"
|
||||
|
|
|
@ -90,7 +90,6 @@ fancy-regex = "0.13"
|
|||
filesize = "0.2"
|
||||
filetime = "0.2"
|
||||
fs_extra = "1.3"
|
||||
fuzzy-matcher = "0.3"
|
||||
hamcrest2 = "0.3"
|
||||
heck = "0.5.0"
|
||||
human-date-parser = "0.1.1"
|
||||
|
@ -116,6 +115,7 @@ native-tls = "0.2"
|
|||
nix = { version = "0.28", default-features = false }
|
||||
notify-debouncer-full = { version = "0.3", default-features = false }
|
||||
nu-ansi-term = "0.50.0"
|
||||
nucleo-matcher = "0.3"
|
||||
num-format = "0.4"
|
||||
num-traits = "0.2"
|
||||
omnipath = "0.1"
|
||||
|
|
|
@ -32,11 +32,11 @@ reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
|||
chrono = { default-features = false, features = ["std"], workspace = true }
|
||||
crossterm = { workspace = true }
|
||||
fancy-regex = { workspace = true }
|
||||
fuzzy-matcher = { workspace = true }
|
||||
is_executable = { workspace = true }
|
||||
log = { workspace = true }
|
||||
miette = { workspace = true, features = ["fancy-no-backtrace"] }
|
||||
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
|
||||
nucleo-matcher = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
percent-encoding = { workspace = true }
|
||||
sysinfo = { workspace = true }
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use std::collections::HashSet;
|
||||
|
||||
use crate::{
|
||||
completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy},
|
||||
completions::{Completer, CompletionOptions, SortBy},
|
||||
SuggestionKind,
|
||||
};
|
||||
use nu_parser::FlatShape;
|
||||
|
@ -9,7 +11,10 @@ use nu_protocol::{
|
|||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
use super::{completion_common::sort_suggestions, SemanticSuggestion};
|
||||
use super::{
|
||||
completion_options::{MatcherOptions, NuMatcher},
|
||||
SemanticSuggestion,
|
||||
};
|
||||
|
||||
pub struct CommandCompletion {
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
|
@ -33,10 +38,11 @@ impl CommandCompletion {
|
|||
fn external_command_completion(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
prefix: &str,
|
||||
match_algorithm: MatchAlgorithm,
|
||||
) -> Vec<String> {
|
||||
let mut executables = vec![];
|
||||
sugg_span: reedline::Span,
|
||||
matched_internal: HashSet<String>,
|
||||
matcher: &mut NuMatcher<SemanticSuggestion>,
|
||||
) {
|
||||
let mut executables = HashSet::new();
|
||||
|
||||
// os agnostic way to get the PATH env var
|
||||
let paths = working_set.permanent_state.get_path_env_var();
|
||||
|
@ -48,27 +54,36 @@ impl CommandCompletion {
|
|||
|
||||
if let Ok(mut contents) = std::fs::read_dir(path.as_ref()) {
|
||||
while let Some(Ok(item)) = contents.next() {
|
||||
if working_set
|
||||
.permanent_state
|
||||
.config
|
||||
.max_external_completion_results
|
||||
> executables.len() as i64
|
||||
&& !executables.contains(
|
||||
&item
|
||||
.path()
|
||||
.file_name()
|
||||
.map(|x| x.to_string_lossy().to_string())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
&& matches!(
|
||||
item.path().file_name().map(|x| match_algorithm
|
||||
.matches_str(&x.to_string_lossy(), prefix)),
|
||||
Some(true)
|
||||
)
|
||||
&& is_executable::is_executable(item.path())
|
||||
{
|
||||
if let Ok(name) = item.file_name().into_string() {
|
||||
executables.push(name);
|
||||
if let Ok(name) = item.file_name().into_string() {
|
||||
if working_set
|
||||
.permanent_state
|
||||
.config
|
||||
.max_external_completion_results
|
||||
> executables.len() as i64
|
||||
&& !executables.contains(&name)
|
||||
&& is_executable::is_executable(item.path())
|
||||
{
|
||||
executables.insert(name.clone());
|
||||
let name = if matched_internal.contains(&name) {
|
||||
format!("^{}", name)
|
||||
} else {
|
||||
name.to_string()
|
||||
};
|
||||
matcher.add(
|
||||
name.clone(),
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: name,
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: sugg_span,
|
||||
append_whitespace: true,
|
||||
},
|
||||
// TODO: is there a way to create a test?
|
||||
kind: None,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -76,8 +91,6 @@ impl CommandCompletion {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
executables
|
||||
}
|
||||
|
||||
fn complete_commands(
|
||||
|
@ -86,73 +99,47 @@ impl CommandCompletion {
|
|||
span: Span,
|
||||
offset: usize,
|
||||
find_externals: bool,
|
||||
match_algorithm: MatchAlgorithm,
|
||||
options: MatcherOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let partial = working_set.get_span_contents(span);
|
||||
let partial = String::from_utf8_lossy(partial);
|
||||
|
||||
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
|
||||
let sugg_span = reedline::Span::new(span.start - offset, span.end - offset);
|
||||
|
||||
let mut results = working_set
|
||||
.find_commands_by_predicate(filter_predicate, true)
|
||||
.into_iter()
|
||||
.map(move |x| SemanticSuggestion {
|
||||
let mut matcher = NuMatcher::new(partial, options);
|
||||
|
||||
let all_internal_commands = working_set.find_commands_by_predicate(|_| true, true);
|
||||
|
||||
let mut matched_internal = HashSet::new();
|
||||
|
||||
for (name, usage, typ) in all_internal_commands {
|
||||
let name = String::from_utf8_lossy(&name);
|
||||
let sugg = SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(&x.0).to_string(),
|
||||
description: x.1,
|
||||
value: name.to_string(),
|
||||
description: usage,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||
span: sugg_span,
|
||||
append_whitespace: true,
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(x.2)),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let partial = working_set.get_span_contents(span);
|
||||
let partial = String::from_utf8_lossy(partial).to_string();
|
||||
kind: Some(SuggestionKind::Command(typ)),
|
||||
};
|
||||
if matcher.add(&name, sugg) {
|
||||
matched_internal.insert(name.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if find_externals {
|
||||
let results_external = self
|
||||
.external_command_completion(working_set, &partial, match_algorithm)
|
||||
.into_iter()
|
||||
.map(move |x| SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: x,
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||
append_whitespace: true,
|
||||
},
|
||||
// TODO: is there a way to create a test?
|
||||
kind: None,
|
||||
});
|
||||
|
||||
let results_strings: Vec<String> =
|
||||
results.iter().map(|x| x.suggestion.value.clone()).collect();
|
||||
|
||||
for external in results_external {
|
||||
if results_strings.contains(&external.suggestion.value) {
|
||||
results.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: format!("^{}", external.suggestion.value),
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: external.suggestion.span,
|
||||
append_whitespace: true,
|
||||
},
|
||||
kind: external.kind,
|
||||
})
|
||||
} else {
|
||||
results.push(external)
|
||||
}
|
||||
}
|
||||
|
||||
results
|
||||
} else {
|
||||
results
|
||||
self.external_command_completion(
|
||||
working_set,
|
||||
sugg_span,
|
||||
matched_internal,
|
||||
&mut matcher,
|
||||
);
|
||||
}
|
||||
|
||||
matcher.get_results()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -167,6 +154,8 @@ impl Completer for CommandCompletion {
|
|||
pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let matcher_options = MatcherOptions::new(options).sort_by(self.get_sort_by());
|
||||
|
||||
let last = self
|
||||
.flattened
|
||||
.iter()
|
||||
|
@ -191,7 +180,7 @@ impl Completer for CommandCompletion {
|
|||
Span::new(last.0.start, pos),
|
||||
offset,
|
||||
false,
|
||||
options.match_algorithm,
|
||||
matcher_options.clone(),
|
||||
)
|
||||
} else {
|
||||
vec![]
|
||||
|
@ -221,7 +210,7 @@ impl Completer for CommandCompletion {
|
|||
span,
|
||||
offset,
|
||||
config.enable_external_completion,
|
||||
options.match_algorithm,
|
||||
matcher_options,
|
||||
)
|
||||
} else {
|
||||
vec![]
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
use crate::{
|
||||
completions::{matches, CompletionOptions},
|
||||
SemanticSuggestion,
|
||||
};
|
||||
use nu_ansi_term::Style;
|
||||
use nu_engine::env_to_string;
|
||||
use nu_path::{expand_to_real_path, home_dir};
|
||||
|
@ -14,75 +10,100 @@ use std::path::{
|
|||
is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR,
|
||||
};
|
||||
|
||||
use super::SortBy;
|
||||
use super::completion_options::{MatcherOptions, NuMatcher};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct PathBuiltFromString {
|
||||
cwd: PathBuf,
|
||||
parts: Vec<String>,
|
||||
isdir: bool,
|
||||
}
|
||||
|
||||
/// Recursively find files matching the search string
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `partial` - Remaining components of the partial text the user's typed
|
||||
/// * `built_paths` - Directories matching the previous components of `partial`
|
||||
/// * `isdir` - Is the user looking for a directory? (true if partial text ended in a slash)
|
||||
fn complete_rec(
|
||||
partial: &[&str],
|
||||
built: &PathBuiltFromString,
|
||||
cwd: &Path,
|
||||
options: &CompletionOptions,
|
||||
dir: bool,
|
||||
built_paths: &[PathBuiltFromString],
|
||||
options: MatcherOptions,
|
||||
want_dir: bool,
|
||||
isdir: bool,
|
||||
) -> Vec<PathBuiltFromString> {
|
||||
let mut completions = vec![];
|
||||
|
||||
if let Some((&base, rest)) = partial.split_first() {
|
||||
if (base == "." || base == "..") && (isdir || !rest.is_empty()) {
|
||||
let mut built = built.clone();
|
||||
built.parts.push(base.to_string());
|
||||
built.isdir = true;
|
||||
return complete_rec(rest, &built, cwd, options, dir, isdir);
|
||||
let builts: Vec<_> = built_paths
|
||||
.iter()
|
||||
.map(|built| {
|
||||
let mut built = built.clone();
|
||||
built.parts.push(base.to_string());
|
||||
built.isdir = true;
|
||||
built
|
||||
})
|
||||
.collect();
|
||||
return complete_rec(rest, &builts, options, want_dir, isdir);
|
||||
}
|
||||
}
|
||||
|
||||
let mut built_path = cwd.to_path_buf();
|
||||
for part in &built.parts {
|
||||
built_path.push(part);
|
||||
}
|
||||
let entries: Vec<_> = built_paths
|
||||
.iter()
|
||||
.flat_map(|built| {
|
||||
let mut built_path = built.cwd.clone();
|
||||
for part in &built.parts {
|
||||
built_path.push(part);
|
||||
}
|
||||
|
||||
let Ok(result) = built_path.read_dir() else {
|
||||
return completions;
|
||||
};
|
||||
let Ok(result) = built_path.read_dir() else {
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
let mut entries = Vec::new();
|
||||
for entry in result.filter_map(|e| e.ok()) {
|
||||
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
||||
let entry_isdir = entry.path().is_dir();
|
||||
let mut built = built.clone();
|
||||
built.parts.push(entry_name.clone());
|
||||
built.isdir = entry_isdir;
|
||||
result
|
||||
.filter_map(|e| e.ok())
|
||||
.filter_map(|entry| {
|
||||
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
||||
let entry_isdir = entry.path().is_dir();
|
||||
let mut built = built.clone();
|
||||
built.parts.push(entry_name.clone());
|
||||
built.isdir = entry_isdir;
|
||||
|
||||
if !dir || entry_isdir {
|
||||
entries.push((entry_name, built));
|
||||
}
|
||||
}
|
||||
|
||||
let prefix = partial.first().unwrap_or(&"");
|
||||
let sorted_entries = sort_completions(prefix, entries, SortBy::Ascending, |(entry, _)| entry);
|
||||
|
||||
for (entry_name, built) in sorted_entries {
|
||||
match partial.split_first() {
|
||||
Some((base, rest)) => {
|
||||
if matches(base, &entry_name, options) {
|
||||
if !rest.is_empty() || isdir {
|
||||
completions.extend(complete_rec(rest, &built, cwd, options, dir, isdir));
|
||||
if !want_dir || entry_isdir {
|
||||
Some((entry_name, built))
|
||||
} else {
|
||||
completions.push(built);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
completions.push(built);
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.collect();
|
||||
|
||||
if let Some((base, rest)) = partial.split_first() {
|
||||
let mut matcher = NuMatcher::new(base, options.clone());
|
||||
|
||||
for (entry_name, built) in entries {
|
||||
matcher.add(entry_name, built);
|
||||
}
|
||||
|
||||
let results = matcher.get_results();
|
||||
|
||||
if !rest.is_empty() || isdir {
|
||||
results
|
||||
.into_iter()
|
||||
.flat_map(|built| complete_rec(rest, &[built], options.clone(), want_dir, isdir))
|
||||
.collect()
|
||||
} else {
|
||||
results
|
||||
}
|
||||
} else {
|
||||
// We could directly return the entries, but then they wouldn't be sorted
|
||||
let mut matcher = NuMatcher::new("", options.clone());
|
||||
for (entry_name, built) in entries {
|
||||
matcher.add(entry_name, built);
|
||||
}
|
||||
matcher.get_results()
|
||||
}
|
||||
completions
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -93,7 +114,7 @@ enum OriginalCwd {
|
|||
}
|
||||
|
||||
impl OriginalCwd {
|
||||
fn apply(&self, mut p: PathBuiltFromString) -> String {
|
||||
fn apply(&self, p: &mut PathBuiltFromString) -> String {
|
||||
match self {
|
||||
Self::None => {}
|
||||
Self::Home => p.parts.insert(0, "~".to_string()),
|
||||
|
@ -122,18 +143,22 @@ fn surround_remove(partial: &str) -> String {
|
|||
partial.to_string()
|
||||
}
|
||||
|
||||
/// Looks inside a set of directories (given by `cwds`) to find files matching
|
||||
/// `partial` (text the user typed in)
|
||||
///
|
||||
/// Returns (span, cwd, path suggestion, style)
|
||||
pub fn complete_item(
|
||||
want_directory: bool,
|
||||
span: nu_protocol::Span,
|
||||
partial: &str,
|
||||
cwd: &str,
|
||||
options: &CompletionOptions,
|
||||
cwds: &[impl AsRef<str>],
|
||||
options: MatcherOptions,
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
||||
) -> Vec<(nu_protocol::Span, PathBuf, String, Option<Style>)> {
|
||||
let partial = surround_remove(partial);
|
||||
let isdir = partial.ends_with(is_separator);
|
||||
let cwd_pathbuf = Path::new(cwd).to_path_buf();
|
||||
let cwd_pathbufs: Vec<_> = cwds.iter().map(|cwd| PathBuf::from(cwd.as_ref())).collect();
|
||||
let ls_colors = (engine_state.config.use_ls_colors_completions
|
||||
&& engine_state.config.use_ansi_coloring)
|
||||
.then(|| {
|
||||
|
@ -144,7 +169,7 @@ pub fn complete_item(
|
|||
get_ls_colors(ls_colors_env_str)
|
||||
});
|
||||
|
||||
let mut cwd = cwd_pathbuf.clone();
|
||||
let mut cwds = cwd_pathbufs.clone();
|
||||
let mut prefix_len = 0;
|
||||
let mut original_cwd = OriginalCwd::None;
|
||||
|
||||
|
@ -156,7 +181,7 @@ pub fn complete_item(
|
|||
if let Some(Component::RootDir) = components.peek().cloned() {
|
||||
components.next();
|
||||
};
|
||||
cwd = [c, Component::RootDir].iter().collect();
|
||||
cwds = vec![[c, Component::RootDir].iter().collect()];
|
||||
prefix_len = c.as_os_str().len();
|
||||
original_cwd = OriginalCwd::Prefix(c.as_os_str().to_string_lossy().into_owned());
|
||||
}
|
||||
|
@ -164,13 +189,13 @@ pub fn complete_item(
|
|||
components.next();
|
||||
// This is kind of a hack. When joining an empty string with the rest,
|
||||
// we add the slash automagically
|
||||
cwd = PathBuf::from(c.as_os_str());
|
||||
cwds = vec![PathBuf::from(c.as_os_str())];
|
||||
prefix_len = 1;
|
||||
original_cwd = OriginalCwd::Prefix(String::new());
|
||||
}
|
||||
Some(Component::Normal(home)) if home.to_string_lossy() == "~" => {
|
||||
components.next();
|
||||
cwd = home_dir().unwrap_or(cwd_pathbuf);
|
||||
cwds = home_dir().map(|dir| vec![dir]).unwrap_or(cwd_pathbufs);
|
||||
prefix_len = 1;
|
||||
original_cwd = OriginalCwd::Home;
|
||||
}
|
||||
|
@ -187,15 +212,21 @@ pub fn complete_item(
|
|||
|
||||
complete_rec(
|
||||
partial.as_slice(),
|
||||
&PathBuiltFromString::default(),
|
||||
&cwd,
|
||||
&cwds
|
||||
.into_iter()
|
||||
.map(|cwd| PathBuiltFromString {
|
||||
cwd,
|
||||
parts: Vec::new(),
|
||||
isdir: false,
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
)
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
let path = original_cwd.apply(p);
|
||||
.map(|mut p| {
|
||||
let path = original_cwd.apply(&mut p);
|
||||
let style = ls_colors.as_ref().map(|lsc| {
|
||||
lsc.style_for_path_with_metadata(
|
||||
&path,
|
||||
|
@ -206,7 +237,7 @@ pub fn complete_item(
|
|||
.map(lscolors::Style::to_nu_ansi_term_style)
|
||||
.unwrap_or_default()
|
||||
});
|
||||
(span, escape_path(path, want_directory), style)
|
||||
(span, p.cwd, escape_path(path, want_directory), style)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
|
||||
use nu_parser::trim_quotes_str;
|
||||
use nu_protocol::CompletionAlgorithm;
|
||||
use std::fmt::Display;
|
||||
use nu_protocol::{levenshtein_distance, CompletionAlgorithm};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use nucleo_matcher::{
|
||||
pattern::{AtomKind, CaseMatching, Normalization, Pattern},
|
||||
Config, Matcher, Utf32Str,
|
||||
};
|
||||
use std::{borrow::Cow, cmp::Ordering, fmt::Display, path::MAIN_SEPARATOR};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum SortBy {
|
||||
|
@ -26,33 +30,230 @@ pub enum MatchAlgorithm {
|
|||
Fuzzy,
|
||||
}
|
||||
|
||||
impl MatchAlgorithm {
|
||||
/// Returns whether the `needle` search text matches the given `haystack`.
|
||||
pub fn matches_str(&self, haystack: &str, needle: &str) -> bool {
|
||||
let haystack = trim_quotes_str(haystack);
|
||||
let needle = trim_quotes_str(needle);
|
||||
match *self {
|
||||
MatchAlgorithm::Prefix => haystack.starts_with(needle),
|
||||
pub struct NuMatcher<T> {
|
||||
options: MatcherOptions,
|
||||
needle: String,
|
||||
state: State<T>,
|
||||
}
|
||||
|
||||
enum State<T> {
|
||||
Prefix {
|
||||
/// Holds (haystack, item)
|
||||
items: Vec<(String, T)>,
|
||||
},
|
||||
Nucleo {
|
||||
matcher: Matcher,
|
||||
pat: Pattern,
|
||||
/// Holds (score, haystack, item, indices of matches)
|
||||
items: Vec<(u32, String, T, Vec<usize>)>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MatcherOptions {
|
||||
pub match_algorithm: MatchAlgorithm,
|
||||
pub case_sensitive: bool,
|
||||
/// How to sort results. [`SortBy::None`] by default.
|
||||
pub sort_by: SortBy,
|
||||
/// When fuzzy matching, this will configure Nucleo to reward file paths.
|
||||
/// When sorting alphabetically, this will disregard trailing slashes.
|
||||
/// False by default.
|
||||
pub match_paths: bool,
|
||||
}
|
||||
|
||||
impl<T> NuMatcher<T> {
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `needle` - The text to search for
|
||||
pub fn new(needle: impl AsRef<str>, options: MatcherOptions) -> NuMatcher<T> {
|
||||
let orig_needle = trim_quotes_str(needle.as_ref());
|
||||
let lowercase_needle = if options.case_sensitive {
|
||||
orig_needle.to_owned()
|
||||
} else {
|
||||
orig_needle.to_folded_case()
|
||||
};
|
||||
match options.match_algorithm {
|
||||
MatchAlgorithm::Prefix => NuMatcher {
|
||||
options,
|
||||
needle: lowercase_needle,
|
||||
state: State::Prefix { items: Vec::new() },
|
||||
},
|
||||
MatchAlgorithm::Fuzzy => {
|
||||
let matcher = SkimMatcherV2::default();
|
||||
matcher.fuzzy_match(haystack, needle).is_some()
|
||||
let matcher = Matcher::new(if options.match_paths {
|
||||
Config::DEFAULT.match_paths()
|
||||
} else {
|
||||
Config::DEFAULT
|
||||
});
|
||||
let pat = Pattern::new(
|
||||
// Use the original needle even if case sensitive, because Nucleo handles that
|
||||
orig_needle,
|
||||
if options.case_sensitive {
|
||||
CaseMatching::Respect
|
||||
} else {
|
||||
CaseMatching::Ignore
|
||||
},
|
||||
Normalization::Smart,
|
||||
AtomKind::Fuzzy,
|
||||
);
|
||||
NuMatcher {
|
||||
options,
|
||||
// Use lowercase needle here for Levenshtein distance comparison
|
||||
needle: lowercase_needle,
|
||||
state: State::Nucleo {
|
||||
matcher,
|
||||
pat,
|
||||
items: Vec::new(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the `needle` search text matches the given `haystack`.
|
||||
pub fn matches_u8(&self, haystack: &[u8], needle: &[u8]) -> bool {
|
||||
match *self {
|
||||
MatchAlgorithm::Prefix => haystack.starts_with(needle),
|
||||
MatchAlgorithm::Fuzzy => {
|
||||
let haystack_str = String::from_utf8_lossy(haystack);
|
||||
let needle_str = String::from_utf8_lossy(needle);
|
||||
/// Add the given item if the given haystack matches.
|
||||
///
|
||||
/// Returns whether the item was added.
|
||||
pub fn add(&mut self, haystack: impl AsRef<str>, item: T) -> bool {
|
||||
let haystack = haystack.as_ref();
|
||||
|
||||
let matcher = SkimMatcherV2::default();
|
||||
matcher.fuzzy_match(&haystack_str, &needle_str).is_some()
|
||||
match &mut self.state {
|
||||
State::Prefix { items } => {
|
||||
let haystack = trim_quotes_str(haystack).to_owned();
|
||||
let haystack_lowercased = if self.options.case_sensitive {
|
||||
Cow::Borrowed(&haystack)
|
||||
} else {
|
||||
Cow::Owned(haystack.to_folded_case())
|
||||
};
|
||||
if haystack_lowercased.starts_with(self.needle.as_str()) {
|
||||
match self.options.sort_by {
|
||||
SortBy::None => items.push((haystack, item)),
|
||||
_ => {
|
||||
let ind = match items.binary_search_by(|(other, _)| {
|
||||
cmp(
|
||||
&self.needle,
|
||||
&self.options,
|
||||
other.as_str(),
|
||||
haystack.as_str(),
|
||||
)
|
||||
}) {
|
||||
Ok(i) => i,
|
||||
Err(i) => i,
|
||||
};
|
||||
items.insert(ind, (haystack, item));
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
State::Nucleo {
|
||||
matcher,
|
||||
pat,
|
||||
items,
|
||||
} => {
|
||||
let mut haystack_buf = Vec::new();
|
||||
let haystack_utf32 = Utf32Str::new(trim_quotes_str(haystack), &mut haystack_buf);
|
||||
// todo find out why nucleo uses u32 instead of usize for indices
|
||||
let mut indices = Vec::new();
|
||||
match pat.indices(haystack_utf32, matcher, &mut indices) {
|
||||
Some(score) => {
|
||||
indices.sort_unstable();
|
||||
indices.dedup();
|
||||
|
||||
let match_record = (
|
||||
score,
|
||||
haystack.to_string(),
|
||||
item,
|
||||
indices.into_iter().map(|i| i as usize).collect(),
|
||||
);
|
||||
let ind =
|
||||
match items.binary_search_by(|(other_score, other_haystack, _, _)| {
|
||||
match self.options.sort_by {
|
||||
SortBy::None => {
|
||||
// Use alphabetical order if same score
|
||||
score
|
||||
.cmp(other_score)
|
||||
.then(other_haystack.as_str().cmp(haystack))
|
||||
}
|
||||
_ => cmp(
|
||||
&self.needle,
|
||||
&self.options,
|
||||
other_haystack.as_str(),
|
||||
haystack,
|
||||
),
|
||||
}
|
||||
}) {
|
||||
Ok(i) => i,
|
||||
Err(i) => i,
|
||||
};
|
||||
items.insert(ind, match_record);
|
||||
true
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get all the items that matched
|
||||
pub fn get_results(self) -> Vec<T> {
|
||||
let (results, _): (Vec<_>, Vec<_>) = self.get_results_with_inds().into_iter().unzip();
|
||||
results
|
||||
}
|
||||
|
||||
/// Get all the items that matched, along with the indices in their haystacks that matched
|
||||
pub fn get_results_with_inds(self) -> Vec<(T, Vec<usize>)> {
|
||||
match self.state {
|
||||
State::Prefix { items, .. } => items
|
||||
.into_iter()
|
||||
.map(|(_, item)| (item, (0..self.needle.len()).collect()))
|
||||
.collect(),
|
||||
State::Nucleo { items, .. } => items
|
||||
.into_iter()
|
||||
.map(|(_, _, items, indices)| (items, indices))
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn cmp(needle: &str, options: &MatcherOptions, a: &str, b: &str) -> Ordering {
|
||||
let alpha_ordering = if options.match_paths {
|
||||
a.trim_end_matches(MAIN_SEPARATOR)
|
||||
.cmp(b.trim_end_matches(MAIN_SEPARATOR))
|
||||
} else {
|
||||
a.cmp(b)
|
||||
};
|
||||
match options.sort_by {
|
||||
SortBy::LevenshteinDistance => {
|
||||
let a_distance = levenshtein_distance(needle, a);
|
||||
let b_distance = levenshtein_distance(needle, b);
|
||||
a_distance.cmp(&b_distance).then(alpha_ordering)
|
||||
}
|
||||
SortBy::Ascending => alpha_ordering,
|
||||
SortBy::None => Ordering::Less,
|
||||
}
|
||||
}
|
||||
|
||||
impl MatcherOptions {
|
||||
pub fn new(completion_options: &CompletionOptions) -> Self {
|
||||
MatcherOptions {
|
||||
match_algorithm: completion_options.match_algorithm,
|
||||
case_sensitive: completion_options.case_sensitive,
|
||||
sort_by: SortBy::None,
|
||||
match_paths: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sort_by(mut self, sort_by: SortBy) -> Self {
|
||||
self.sort_by = sort_by;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn match_paths(mut self, match_paths: bool) -> Self {
|
||||
self.match_paths = match_paths;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CompletionAlgorithm> for MatchAlgorithm {
|
||||
|
@ -110,35 +311,54 @@ impl Default for CompletionOptions {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::MatchAlgorithm;
|
||||
use rstest::rstest;
|
||||
|
||||
#[test]
|
||||
fn match_algorithm_prefix() {
|
||||
let algorithm = MatchAlgorithm::Prefix;
|
||||
use super::{CompletionOptions, MatchAlgorithm, MatcherOptions, NuMatcher};
|
||||
|
||||
assert!(algorithm.matches_str("example text", ""));
|
||||
assert!(algorithm.matches_str("example text", "examp"));
|
||||
assert!(!algorithm.matches_str("example text", "text"));
|
||||
#[rstest]
|
||||
#[case(MatchAlgorithm::Prefix, "example text", "", true)]
|
||||
#[case(MatchAlgorithm::Prefix, "example text", "examp", true)]
|
||||
#[case(MatchAlgorithm::Prefix, "example text", "text", false)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "", true)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "examp", true)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "ext", true)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "mplxt", true)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "mpp", false)]
|
||||
fn match_algorithm_simple(
|
||||
#[case] match_algorithm: MatchAlgorithm,
|
||||
#[case] haystack: &str,
|
||||
#[case] needle: &str,
|
||||
#[case] should_match: bool,
|
||||
) {
|
||||
let options = MatcherOptions::new(&CompletionOptions {
|
||||
match_algorithm,
|
||||
case_sensitive: true,
|
||||
positional: false,
|
||||
});
|
||||
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
|
||||
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
|
||||
let mut matcher = NuMatcher::new(needle, options);
|
||||
matcher.add(haystack, haystack);
|
||||
if should_match {
|
||||
assert_eq!(vec![haystack], matcher.get_results());
|
||||
} else {
|
||||
assert_ne!(vec![haystack], matcher.get_results());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_algorithm_fuzzy() {
|
||||
let algorithm = MatchAlgorithm::Fuzzy;
|
||||
fn match_algorithm_fuzzy_sort_score() {
|
||||
let options = MatcherOptions::new(&CompletionOptions {
|
||||
match_algorithm: MatchAlgorithm::Fuzzy,
|
||||
case_sensitive: true,
|
||||
positional: false,
|
||||
});
|
||||
|
||||
assert!(algorithm.matches_str("example text", ""));
|
||||
assert!(algorithm.matches_str("example text", "examp"));
|
||||
assert!(algorithm.matches_str("example text", "ext"));
|
||||
assert!(algorithm.matches_str("example text", "mplxt"));
|
||||
assert!(!algorithm.matches_str("example text", "mpp"));
|
||||
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 3]));
|
||||
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 2]));
|
||||
// Taken from the nucleo-matcher crate's examples
|
||||
// todo more thorough tests
|
||||
let mut matcher = NuMatcher::new("foo bar", options);
|
||||
matcher.add("foo/bar", "foo/bar");
|
||||
matcher.add("bar/foo", "bar/foo");
|
||||
matcher.add("foobar", "foobar");
|
||||
assert_eq!(vec!["bar/foo", "foo/bar", "foobar"], matcher.get_results());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,10 +9,9 @@ use nu_protocol::{
|
|||
engine::{Stack, StateWorkingSet},
|
||||
PipelineData, Span, Type, Value,
|
||||
};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::completion_common::sort_suggestions;
|
||||
use super::completion_options::{MatcherOptions, NuMatcher};
|
||||
|
||||
pub struct CustomCompletion {
|
||||
stack: Stack,
|
||||
|
@ -124,42 +123,30 @@ impl Completer for CustomCompletion {
|
|||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let suggestions = if let Some(custom_completion_options) = custom_completion_options {
|
||||
filter(&prefix, suggestions, &custom_completion_options)
|
||||
} else {
|
||||
filter(&prefix, suggestions, completion_options)
|
||||
};
|
||||
sort_suggestions(&String::from_utf8_lossy(&prefix), suggestions, self.sort_by)
|
||||
filter(
|
||||
&prefix,
|
||||
suggestions,
|
||||
MatcherOptions::new(
|
||||
custom_completion_options
|
||||
.as_ref()
|
||||
.unwrap_or(completion_options),
|
||||
)
|
||||
.sort_by(self.sort_by),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn filter(
|
||||
prefix: &[u8],
|
||||
items: Vec<SemanticSuggestion>,
|
||||
options: &CompletionOptions,
|
||||
options: MatcherOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
items
|
||||
.into_iter()
|
||||
.filter(|it| match options.match_algorithm {
|
||||
MatchAlgorithm::Prefix => match (options.case_sensitive, options.positional) {
|
||||
(true, true) => it.suggestion.value.as_bytes().starts_with(prefix),
|
||||
(true, false) => it
|
||||
.suggestion
|
||||
.value
|
||||
.contains(std::str::from_utf8(prefix).unwrap_or("")),
|
||||
(false, positional) => {
|
||||
let value = it.suggestion.value.to_folded_case();
|
||||
let prefix = std::str::from_utf8(prefix).unwrap_or("").to_folded_case();
|
||||
if positional {
|
||||
value.starts_with(&prefix)
|
||||
} else {
|
||||
value.contains(&prefix)
|
||||
}
|
||||
}
|
||||
},
|
||||
MatchAlgorithm::Fuzzy => options
|
||||
.match_algorithm
|
||||
.matches_u8(it.suggestion.value.as_bytes(), prefix),
|
||||
})
|
||||
.collect()
|
||||
let prefix = String::from_utf8_lossy(prefix);
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
for it in items {
|
||||
matcher.add(it.suggestion.value.clone(), it);
|
||||
}
|
||||
|
||||
matcher.get_results()
|
||||
}
|
||||
|
|
|
@ -8,9 +8,9 @@ use nu_protocol::{
|
|||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::path::Path;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use super::SemanticSuggestion;
|
||||
use super::{completion_options::MatcherOptions, SemanticSuggestion};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DirectoryCompletion {}
|
||||
|
@ -39,21 +39,23 @@ impl Completer for DirectoryCompletion {
|
|||
let items: Vec<_> = directory_completion(
|
||||
span,
|
||||
&prefix,
|
||||
&working_set.permanent_state.current_work_dir(),
|
||||
options,
|
||||
&[&working_set.permanent_state.current_work_dir()],
|
||||
MatcherOptions::new(options)
|
||||
.sort_by(self.get_sort_by())
|
||||
.match_paths(true),
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
)
|
||||
.into_iter()
|
||||
.map(move |x| SemanticSuggestion {
|
||||
.map(move |(span, _, path, style)| SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: x.1,
|
||||
value: path,
|
||||
description: None,
|
||||
style: x.2,
|
||||
style,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
},
|
||||
|
@ -90,10 +92,10 @@ impl Completer for DirectoryCompletion {
|
|||
pub fn directory_completion(
|
||||
span: nu_protocol::Span,
|
||||
partial: &str,
|
||||
cwd: &str,
|
||||
options: &CompletionOptions,
|
||||
cwds: &[&str],
|
||||
options: MatcherOptions,
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
||||
complete_item(true, span, partial, cwd, options, engine_state, stack)
|
||||
) -> Vec<(nu_protocol::Span, PathBuf, String, Option<Style>)> {
|
||||
complete_item(true, span, partial, cwds, options, engine_state, stack)
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ use nu_protocol::{
|
|||
use reedline::Suggestion;
|
||||
use std::path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR};
|
||||
|
||||
use super::{completion_common::sort_suggestions, SemanticSuggestion, SortBy};
|
||||
use super::{completion_options::MatcherOptions, SemanticSuggestion};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DotNuCompletion {}
|
||||
|
@ -87,50 +87,44 @@ impl Completer for DotNuCompletion {
|
|||
|
||||
// Fetch the files filtering the ones that ends with .nu
|
||||
// and transform them into suggestions
|
||||
let output: Vec<SemanticSuggestion> = search_dirs
|
||||
let completions = file_path_completion(
|
||||
span,
|
||||
&partial,
|
||||
&search_dirs,
|
||||
MatcherOptions::new(options).sort_by(SortBy::LevenshteinDistance),
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
);
|
||||
completions
|
||||
.into_iter()
|
||||
.flat_map(|search_dir| {
|
||||
let completions = file_path_completion(
|
||||
span,
|
||||
&partial,
|
||||
&search_dir,
|
||||
options,
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
);
|
||||
completions
|
||||
.into_iter()
|
||||
.filter(move |it| {
|
||||
// Different base dir, so we list the .nu files or folders
|
||||
if !is_current_folder {
|
||||
it.1.ends_with(".nu") || it.1.ends_with(SEP)
|
||||
} else {
|
||||
// Lib dirs, so we filter only the .nu files or directory modules
|
||||
if it.1.ends_with(SEP) {
|
||||
Path::new(&search_dir).join(&it.1).join("mod.nu").exists()
|
||||
} else {
|
||||
it.1.ends_with(".nu")
|
||||
}
|
||||
}
|
||||
})
|
||||
.map(move |x| SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: x.1,
|
||||
description: None,
|
||||
style: x.2,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
})
|
||||
.filter(move |(_, search_dir, path, _)| {
|
||||
// Different base dir, so we list the .nu files or folders
|
||||
if !is_current_folder {
|
||||
path.ends_with(".nu") || path.ends_with(SEP)
|
||||
} else {
|
||||
// Lib dirs, so we filter only the .nu files or directory modules
|
||||
if path.ends_with(SEP) {
|
||||
Path::new(&search_dir).join(path).join("mod.nu").exists()
|
||||
} else {
|
||||
path.ends_with(".nu")
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
sort_suggestions(&prefix_str, output, SortBy::Ascending)
|
||||
.map(move |(span, _, path, style)| SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: path,
|
||||
description: None,
|
||||
style,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,11 +7,10 @@ use nu_protocol::{
|
|||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use reedline::Suggestion;
|
||||
use std::path::Path;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use super::SemanticSuggestion;
|
||||
use super::{completion_options::MatcherOptions, SemanticSuggestion};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct FileCompletion {}
|
||||
|
@ -44,21 +43,23 @@ impl Completer for FileCompletion {
|
|||
readjusted,
|
||||
span,
|
||||
&prefix,
|
||||
&working_set.permanent_state.current_work_dir(),
|
||||
options,
|
||||
&[&working_set.permanent_state.current_work_dir()],
|
||||
MatcherOptions::new(options)
|
||||
.sort_by(self.get_sort_by())
|
||||
.match_paths(true),
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
)
|
||||
.into_iter()
|
||||
.map(move |x| SemanticSuggestion {
|
||||
.map(move |(span, _, path, style)| SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: x.1,
|
||||
value: path,
|
||||
description: None,
|
||||
style: x.2,
|
||||
style,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
},
|
||||
|
@ -97,21 +98,10 @@ impl Completer for FileCompletion {
|
|||
pub fn file_path_completion(
|
||||
span: nu_protocol::Span,
|
||||
partial: &str,
|
||||
cwd: &str,
|
||||
options: &CompletionOptions,
|
||||
cwds: &[impl AsRef<str>],
|
||||
options: MatcherOptions,
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
||||
complete_item(false, span, partial, cwd, options, engine_state, stack)
|
||||
}
|
||||
|
||||
pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool {
|
||||
// Check for case sensitive
|
||||
if !options.case_sensitive {
|
||||
return options
|
||||
.match_algorithm
|
||||
.matches_str(&from.to_folded_case(), &partial.to_folded_case());
|
||||
}
|
||||
|
||||
options.match_algorithm.matches_str(from, partial)
|
||||
) -> Vec<(nu_protocol::Span, PathBuf, String, Option<Style>)> {
|
||||
complete_item(false, span, partial, cwds, options, engine_state, stack)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use crate::completions::{
|
||||
completion_common::sort_suggestions, Completer, CompletionOptions, SortBy,
|
||||
completion_options::{MatcherOptions, NuMatcher},
|
||||
Completer, CompletionOptions,
|
||||
};
|
||||
use nu_protocol::{
|
||||
ast::{Expr, Expression},
|
||||
|
@ -37,19 +38,22 @@ impl Completer for FlagCompletion {
|
|||
let decl = working_set.get_decl(call.decl_id);
|
||||
let sig = decl.signature();
|
||||
|
||||
let mut output = vec![];
|
||||
let prefix = String::from_utf8_lossy(&prefix);
|
||||
let mut matcher = NuMatcher::new(
|
||||
prefix,
|
||||
MatcherOptions::new(options).sort_by(self.get_sort_by()),
|
||||
);
|
||||
|
||||
for named in &sig.named {
|
||||
let flag_desc = &named.desc;
|
||||
if let Some(short) = named.short {
|
||||
let mut named = vec![0; short.len_utf8()];
|
||||
short.encode_utf8(&mut named);
|
||||
named.insert(0, b'-');
|
||||
let named = format!("-{}", short);
|
||||
|
||||
if options.match_algorithm.matches_u8(&named, &prefix) {
|
||||
output.push(SemanticSuggestion {
|
||||
matcher.add(
|
||||
named.clone(),
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(&named).to_string(),
|
||||
value: named,
|
||||
description: Some(flag_desc.to_string()),
|
||||
style: None,
|
||||
extra: None,
|
||||
|
@ -61,22 +65,21 @@ impl Completer for FlagCompletion {
|
|||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
if named.long.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut named = named.long.as_bytes().to_vec();
|
||||
named.insert(0, b'-');
|
||||
named.insert(0, b'-');
|
||||
let named = format!("--{}", named.long);
|
||||
|
||||
if options.match_algorithm.matches_u8(&named, &prefix) {
|
||||
output.push(SemanticSuggestion {
|
||||
matcher.add(
|
||||
named.clone(),
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(&named).to_string(),
|
||||
value: named,
|
||||
description: Some(flag_desc.to_string()),
|
||||
style: None,
|
||||
extra: None,
|
||||
|
@ -88,11 +91,11 @@ impl Completer for FlagCompletion {
|
|||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
return sort_suggestions(&String::from_utf8_lossy(&prefix), output, SortBy::Ascending);
|
||||
return matcher.get_results();
|
||||
}
|
||||
|
||||
vec![]
|
||||
|
|
|
@ -17,6 +17,6 @@ pub use completion_options::{CompletionOptions, MatchAlgorithm, SortBy};
|
|||
pub use custom_completions::CustomCompletion;
|
||||
pub use directory_completions::DirectoryCompletion;
|
||||
pub use dotnu_completions::DotNuCompletion;
|
||||
pub use file_completions::{file_path_completion, matches, FileCompletion};
|
||||
pub use file_completions::{file_path_completion, FileCompletion};
|
||||
pub use flag_completions::FlagCompletion;
|
||||
pub use variable_completions::VariableCompletion;
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
use crate::completions::{
|
||||
Completer, CompletionOptions, MatchAlgorithm, SemanticSuggestion, SuggestionKind,
|
||||
};
|
||||
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
|
||||
use nu_engine::{column::get_columns, eval_variable};
|
||||
use nu_protocol::{
|
||||
engine::{Stack, StateWorkingSet},
|
||||
|
@ -9,7 +7,7 @@ use nu_protocol::{
|
|||
use reedline::Suggestion;
|
||||
use std::str;
|
||||
|
||||
use super::{completion_common::sort_suggestions, SortBy};
|
||||
use super::completion_options::{MatcherOptions, NuMatcher};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct VariableCompletion {
|
||||
|
@ -33,7 +31,6 @@ impl Completer for VariableCompletion {
|
|||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut output = vec![];
|
||||
let builtins = ["$nu", "$in", "$env"];
|
||||
let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or("");
|
||||
let var_id = working_set.find_variable(&self.var_context.0);
|
||||
|
@ -44,6 +41,12 @@ impl Completer for VariableCompletion {
|
|||
let sublevels_count = self.var_context.1.len();
|
||||
let prefix_str = String::from_utf8_lossy(&prefix);
|
||||
|
||||
let prefix = String::from_utf8_lossy(&prefix);
|
||||
let mut matcher = NuMatcher::new(
|
||||
prefix,
|
||||
MatcherOptions::new(options).sort_by(SortBy::Ascending),
|
||||
);
|
||||
|
||||
// Completions for the given variable
|
||||
if !var_str.is_empty() {
|
||||
// Completion for $env.<tab>
|
||||
|
@ -62,41 +65,30 @@ impl Completer for VariableCompletion {
|
|||
self.var_context.1.clone().into_iter().skip(1).collect();
|
||||
|
||||
if let Some(val) = env_vars.get(&target_var_str) {
|
||||
for suggestion in nested_suggestions(val, &nested_levels, current_span) {
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
suggestion.suggestion.value.as_bytes(),
|
||||
&prefix,
|
||||
) {
|
||||
output.push(suggestion);
|
||||
}
|
||||
for it in nested_suggestions(val, &nested_levels, current_span) {
|
||||
matcher.add(it.suggestion.value.clone(), it);
|
||||
}
|
||||
|
||||
return sort_suggestions(&prefix_str, output, SortBy::Ascending);
|
||||
return matcher.get_results();
|
||||
}
|
||||
} else {
|
||||
// No nesting provided, return all env vars
|
||||
for env_var in env_vars {
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
env_var.0.as_bytes(),
|
||||
&prefix,
|
||||
) {
|
||||
output.push(SemanticSuggestion {
|
||||
for (var_name, value) in env_vars {
|
||||
matcher.add(
|
||||
var_name.clone(),
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: env_var.0,
|
||||
value: var_name,
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(env_var.1.get_type())),
|
||||
});
|
||||
}
|
||||
kind: Some(SuggestionKind::Type(value.get_type())),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
return sort_suggestions(&prefix_str, output, SortBy::Ascending);
|
||||
return matcher.get_results();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -109,18 +101,10 @@ impl Completer for VariableCompletion {
|
|||
nu_protocol::NU_VARIABLE_ID,
|
||||
nu_protocol::Span::new(current_span.start, current_span.end),
|
||||
) {
|
||||
for suggestion in nested_suggestions(&nuval, &self.var_context.1, current_span)
|
||||
{
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
suggestion.suggestion.value.as_bytes(),
|
||||
&prefix,
|
||||
) {
|
||||
output.push(suggestion);
|
||||
}
|
||||
for it in nested_suggestions(&nuval, &self.var_context.1, current_span) {
|
||||
matcher.add(it.suggestion.value.clone(), it);
|
||||
}
|
||||
|
||||
return sort_suggestions(&prefix_str, output, SortBy::Ascending);
|
||||
return matcher.get_results();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -131,30 +115,19 @@ impl Completer for VariableCompletion {
|
|||
|
||||
// If the value exists and it's of type Record
|
||||
if let Ok(value) = var {
|
||||
for suggestion in nested_suggestions(&value, &self.var_context.1, current_span)
|
||||
{
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
suggestion.suggestion.value.as_bytes(),
|
||||
&prefix,
|
||||
) {
|
||||
output.push(suggestion);
|
||||
}
|
||||
for it in nested_suggestions(&value, &self.var_context.1, current_span) {
|
||||
matcher.add(it.suggestion.value.clone(), it);
|
||||
}
|
||||
|
||||
return sort_suggestions(&prefix_str, output, SortBy::Ascending);
|
||||
return matcher.get_results();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Variable completion (e.g: $en<tab> to complete $env)
|
||||
for builtin in builtins {
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
builtin.as_bytes(),
|
||||
&prefix,
|
||||
) {
|
||||
output.push(SemanticSuggestion {
|
||||
matcher.add(
|
||||
builtin,
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: builtin.to_string(),
|
||||
description: None,
|
||||
|
@ -165,8 +138,8 @@ impl Completer for VariableCompletion {
|
|||
},
|
||||
// TODO is there a way to get the VarId to get the type???
|
||||
kind: None,
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: The following can be refactored (see find_commands_by_predicate() used in
|
||||
|
@ -175,15 +148,13 @@ impl Completer for VariableCompletion {
|
|||
// Working set scope vars
|
||||
for scope_frame in working_set.delta.scope.iter().rev() {
|
||||
for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() {
|
||||
for v in &overlay_frame.vars {
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
v.0,
|
||||
&prefix,
|
||||
) {
|
||||
output.push(SemanticSuggestion {
|
||||
for (name, id) in &overlay_frame.vars {
|
||||
let name = String::from_utf8_lossy(name);
|
||||
matcher.add(
|
||||
name.clone(),
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
value: name.to_string(),
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
|
@ -191,10 +162,10 @@ impl Completer for VariableCompletion {
|
|||
append_whitespace: false,
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(
|
||||
working_set.get_variable(*v.1).ty.clone(),
|
||||
working_set.get_variable(*id).ty.clone(),
|
||||
)),
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -206,15 +177,13 @@ impl Completer for VariableCompletion {
|
|||
.active_overlays(&removed_overlays)
|
||||
.rev()
|
||||
{
|
||||
for v in &overlay_frame.vars {
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
v.0,
|
||||
&prefix,
|
||||
) {
|
||||
output.push(SemanticSuggestion {
|
||||
for (name, id) in &overlay_frame.vars {
|
||||
let name = String::from_utf8_lossy(name);
|
||||
matcher.add(
|
||||
name.clone(),
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
value: name.to_string(),
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
|
@ -222,16 +191,18 @@ impl Completer for VariableCompletion {
|
|||
append_whitespace: false,
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(
|
||||
working_set.get_variable(*v.1).ty.clone(),
|
||||
working_set.get_variable(*id).ty.clone(),
|
||||
)),
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
output.dedup(); // TODO: Removes only consecutive duplicates, is it intended?
|
||||
let mut matches = matcher.get_results();
|
||||
|
||||
output
|
||||
matches.dedup(); // TODO: Removes only consecutive duplicates, is it intended?
|
||||
|
||||
matches
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -318,13 +289,3 @@ fn recursive_value(val: &Value, sublevels: &[Vec<u8>]) -> Result<Value, Span> {
|
|||
Ok(val.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl MatchAlgorithm {
|
||||
pub fn matches_u8_insensitive(&self, sensitive: bool, haystack: &[u8], needle: &[u8]) -> bool {
|
||||
if sensitive {
|
||||
self.matches_u8(haystack, needle)
|
||||
} else {
|
||||
self.matches_u8(&haystack.to_ascii_lowercase(), &needle.to_ascii_lowercase())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -158,6 +158,11 @@ fn variables_customcompletion_subcommands_with_customcompletion_2(
|
|||
|
||||
#[test]
|
||||
fn dotnu_completions() {
|
||||
#[cfg(windows)]
|
||||
let expected = vec!["custom_completion.nu", "directory_completion\\"];
|
||||
#[cfg(not(windows))]
|
||||
let expected = vec!["custom_completion.nu", "directory_completion/"];
|
||||
|
||||
// Create a new engine
|
||||
let (_, _, engine, stack) = new_dotnu_engine();
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user