Skip to content

Commit

Permalink
feat: Add feature to run incremental search
Browse files Browse the repository at this point in the history
  • Loading branch information
AMythicDev committed Dec 8, 2023
1 parent 3b3f9bd commit 080bd88
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 31 deletions.
7 changes: 7 additions & 0 deletions src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,11 @@ pub struct CommandLineInterface {
pub filename: Vec<PathBuf>,
#[arg(short, long)]
pub buffers: Option<isize>,
/// Automatically quit when all files have been done viewing. By default you can quit only
/// using the "q" key.
#[arg(short = 'e', long)]
pub quit_on_eof: bool,
/// Whether to use incremental search for searches
#[arg(long = "incsearch")]
pub incsearch: bool,
}
76 changes: 45 additions & 31 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ impl FileList {
async fn main() -> Result<(), Box<(dyn std::error::Error + 'static)>> {
let cl_args = cli::CommandLineInterface::parse();

let mut filenames = cl_args.filename.into_iter();
let mut filenames = cl_args.filename.clone().into_iter();
let bufsize = cl_args.buffers.unwrap_or(64);
// TODO: Introduce proper error handling
assert!(
Expand Down Expand Up @@ -101,7 +101,9 @@ async fn main() -> Result<(), Box<(dyn std::error::Error + 'static)>> {
file_list.lock().push(first_filename, text);
let file_list_clone = file_list.clone();

let pager_run = tokio::task::spawn_blocking(move || start_pager(file_list_clone.clone()));
let pager = configure_pager(&&cl_args, file_list.clone())?;

Check warning on line 104 in src/main.rs

View workflow job for this annotation

GitHub Actions / Clippy Output

this expression creates a reference which is immediately dereferenced by the compiler

warning: this expression creates a reference which is immediately dereferenced by the compiler --> src/main.rs:104:33 | 104 | let pager = configure_pager(&&cl_args, file_list.clone())?; | ^^^^^^^^^ help: change this to: `&cl_args` | = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#needless_borrow = note: `#[warn(clippy::needless_borrow)]` on by default

tokio::task::spawn_blocking(move || start_pager(pager, file_list_clone.clone()));
tokio::spawn(async move {
let mut job_set = read_files_in_parallel(filenames).await;

Expand All @@ -115,35 +117,15 @@ async fn main() -> Result<(), Box<(dyn std::error::Error + 'static)>> {
Ok(())
}

async fn read_files_in_parallel(
filenames: IntoIter<PathBuf>,
) -> JoinSet<Result<(PathBuf, String), std::io::Error>> {
let mut job_set = JoinSet::new();

for fnm in filenames {
job_set.spawn(async move {
let mut buffer = Vec::with_capacity(64 * 1024);
// Immidiately read the first file into buffer
let mut file = File::open(&fnm).await?;
file.read_to_end(&mut buffer).await?;
let text = String::from_utf8_lossy(&buffer).into_owned();

Ok((fnm, text))
});
}
job_set
}

fn start_pager(file_list: Arc<Mutex<FileList>>) -> Result<(), MinusError> {
let mut fl_lock = file_list.lock();
let data = fl_lock.move_next().unwrap();

let (first_filename, first_file_data) = (data.0.clone(), data.1.clone());
drop(fl_lock);

fn configure_pager(
cl_args: &cli::CommandLineInterface,
file_list: Arc<Mutex<FileList>>,
) -> Result<Pager, MinusError> {
let pager = Pager::new();
let mut input_register = minus::input::HashedEventRegister::default();
let to_jump = AtomicBool::new(false);
let quit_on_eof = cl_args.quit_on_eof;
let incsearch = cl_args.incsearch;

let fl_clone = file_list.clone();
let pager_clone = pager.clone();
Expand All @@ -155,12 +137,15 @@ fn start_pager(file_list: Arc<Mutex<FileList>>) -> Result<(), MinusError> {
to_jump.store(false, std::sync::atomic::Ordering::SeqCst);
let mut guard = fl_clone.lock();
if guard.end() {
return InputEvent::Exit;
if quit_on_eof {
return InputEvent::Exit;
} else {
return InputEvent::Ignore;
}
}
let (filename, file_contents) = guard.move_next().unwrap();
let _ = pager_clone.set_text(file_contents);
let _ = pager_clone.set_prompt(filename.to_string_lossy());

InputEvent::Ignore
} else {
to_jump.store(true, std::sync::atomic::Ordering::SeqCst);
Expand All @@ -173,10 +158,39 @@ fn start_pager(file_list: Arc<Mutex<FileList>>) -> Result<(), MinusError> {
InputEvent::UpdateUpperMark(ps.upper_mark.saturating_add(position))
}
});
pager.set_input_classifier(Box::new(input_register))?;
pager.set_incremental_search_condition(Box::new(move |_| incsearch))?;
Ok(pager)
}

async fn read_files_in_parallel(
filenames: IntoIter<PathBuf>,
) -> JoinSet<Result<(PathBuf, String), std::io::Error>> {
let mut job_set = JoinSet::new();

for fnm in filenames {
job_set.spawn(async move {
let mut buffer = Vec::with_capacity(64 * 1024);
// Immidiately read the first file into buffer
let mut file = File::open(&fnm).await?;
file.read_to_end(&mut buffer).await?;
let text = String::from_utf8_lossy(&buffer).into_owned();

Ok((fnm, text))
});
}
job_set
}

fn start_pager(pager: Pager, file_list: Arc<Mutex<FileList>>) -> Result<(), MinusError> {
let mut fl_lock = file_list.lock();
let data = fl_lock.move_next().unwrap();

let (first_filename, first_file_data) = (data.0.clone(), data.1.clone());
drop(fl_lock);

pager.set_text(first_file_data)?;
pager.set_prompt(first_filename.to_string_lossy())?;
pager.set_input_classifier(Box::new(input_register))?;

minus::dynamic_paging(pager)
}
Expand Down

0 comments on commit 080bd88

Please sign in to comment.