Skip to content

Commit

Permalink
citnames: main module methods are sliced up to smaller functions
Browse files Browse the repository at this point in the history
  • Loading branch information
rizsotto committed Jan 26, 2024
1 parent dce3a1a commit 60f2411
Show file tree
Hide file tree
Showing 3 changed files with 86 additions and 102 deletions.
3 changes: 0 additions & 3 deletions source/citnames_rs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,6 @@ lazy_static = "1.4"
serde = { version = "1.0", default-features = false, features = ["derive"] }
serde_json = { version = "1.0", default-features = false, features = ["std"] }
path-absolutize = "3.1"
num_cpus = "1.16"
crossbeam = "0.8"
crossbeam-channel = "0.5"
json_compilation_db = "1.0"
log = "0.4"
simple_logger = { version = "4.2", default-features = false, features = ["timestamps"]}
Expand Down
4 changes: 2 additions & 2 deletions source/citnames_rs/src/configuration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ impl Default for Format {
// This will act as a filter on the output elements.
// These attributes can be read from the configuration file, and can be
// overridden by command line arguments.
#[derive(Debug, Deserialize, PartialEq)]
#[derive(Clone, Debug, Deserialize, PartialEq)]
pub struct Content {
#[serde(default = "disabled")]
pub include_only_existing_source: bool,
Expand Down Expand Up @@ -122,7 +122,7 @@ fn enabled() -> bool {
}

/// Represents how the duplicate filtering detects duplicate entries.
#[derive(Debug, Default, Deserialize, PartialEq)]
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
#[serde(try_from = "String")]
pub enum DuplicateFilterFields {
FileOnly,
Expand Down
181 changes: 84 additions & 97 deletions source/citnames_rs/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,19 +20,16 @@
extern crate core;

use std::fs::{File, OpenOptions};
use std::io::{BufReader, BufWriter, stdin, stdout};
use std::path::{Path, PathBuf};
use std::thread;
use std::io::{BufReader, BufWriter, Read, stdin, stdout};
use std::path::Path;

use anyhow::{anyhow, Context, Result};
use clap::{arg, ArgAction, command};
use crossbeam_channel::{bounded, Sender};
use json_compilation_db::Entry;
use log::LevelFilter;
use simple_logger::SimpleLogger;

use crate::configuration::{Compilation, Configuration};
use crate::execution::Execution;
use crate::filter::EntryPredicate;
use crate::tools::{RecognitionResult, Semantic, Tool};

Expand Down Expand Up @@ -168,31 +165,15 @@ impl Application {
}

fn run(self) -> Result<()> {
let (snd, rcv) = bounded::<Entry>(32);

// Start reading entries (in a new thread), and send them across the channel.
let (compilation_config, output_config) =
(self.configuration.compilation, self.configuration.output);
let output = PathBuf::from(&self.output);
thread::spawn(move || {
process_executions(self.input.as_str(), &compilation_config, &snd)
.expect("Failed to process events.");

if self.append {
copy_entries(output.as_path(), &snd)
.expect("Failed to process existing compilation database");
}
drop(snd);
});

// Start writing the entries (from the channel) to the output.
let filter: EntryPredicate = output_config.content.into();
let entries = rcv.iter()
let filter: EntryPredicate = self.configuration.output.content.clone().into();
let entries = self.create_entries()?
.inspect(|entry| log::debug!("{:?}", entry))
.filter(filter);
match self.output.as_str() {
"-" | "/dev/stdout" =>
json_compilation_db::write(stdout(), entries)?,
"-" | "/dev/stdout" => {
let buffer = BufWriter::new(stdout());
json_compilation_db::write(buffer, entries)?
}
output => {
let temp = format!("{}.tmp", output);
// Create scope for the file, so it will be closed when the scope is over.
Expand All @@ -209,70 +190,22 @@ impl Application {

Ok(())
}
}

fn copy_entries(source: &Path, destination: &Sender<Entry>) -> Result<()> {
let mut count: u32 = 0;

let file = OpenOptions::new().read(true).open(source)
.with_context(|| format!("Failed to open file: {:?}", source))?;
let buffer = BufReader::new(file);

for event in json_compilation_db::read(buffer) {
match event {
Ok(value) => {
destination.send(value)?;
count += 1;
}
Err(_error) => {
// todo
log::error!("")
}
fn create_entries(&self) -> Result<Box<dyn Iterator<Item=Entry>>> {
let from_events = from_execution_events(self.input.as_str(), &self.configuration.compilation)?;
// Based on the append flag, we should read the existing compilation database too.
if self.append {
let from_db = from_compilation_db(Path::new(&self.output))?;
Ok(Box::new(from_events.chain(from_db)))
} else {
Ok(Box::new(from_events))
}
}

log::debug!("Found {count} entries from previous run.");
Ok(())
}

fn process_executions(source: &str, config: &Compilation, destination: &Sender<Entry>) -> Result<()> {
let (snd, rcv) = bounded::<Execution>(128);

// Start worker threads, which will process executions and create compilation database entry.
for _ in 0..num_cpus::get() {
let tool: Box<dyn Tool> = config.into();
let captured_sink = destination.clone();
let captured_source = rcv.clone();
thread::spawn(move || {
for execution in captured_source.into_iter() {
let result = tool.recognize(&execution);
match result {
RecognitionResult::Recognized(Ok(Semantic::UnixCommand)) =>
log::debug!("execution recognized as unix command: {:?}", execution),
RecognitionResult::Recognized(Ok(Semantic::BuildCommand)) =>
log::debug!("execution recognized as build command: {:?}", execution),
RecognitionResult::Recognized(Ok(semantic)) => {
log::debug!("execution recognized as compiler call, {:?} : {:?}", semantic, execution);
let entries: Result<Vec<Entry>> = semantic.try_into();
match entries {
Ok(entries) => for entry in entries {
captured_sink.send(entry).expect("")
}
Err(error) =>
log::debug!("can't convert into compilation entry: {}", error),
}
}
RecognitionResult::Recognized(Err(reason)) =>
log::debug!("execution recognized with failure, {:?} : {:?}", reason, execution),
RecognitionResult::NotRecognized =>
log::debug!("execution not recognized: {:?}", execution),
}
}
});
}

// Start sending execution events from the given file.
let buffer: BufReader<Box<dyn std::io::Read>> = match source {
fn from_execution_events(source: &str, config: &Compilation) -> Result<impl Iterator<Item=Entry>> {
let tool: Box<dyn Tool> = config.into();
let reader: BufReader<Box<dyn Read>> = match source {
"-" | "/dev/stdin" =>
BufReader::new(Box::new(stdin())),
_ => {
Expand All @@ -281,19 +214,73 @@ fn process_executions(source: &str, config: &Compilation, destination: &Sender<E
BufReader::new(Box::new(file))
}
};
let entries = entries_from_execution_events(reader, tool);

Ok(entries)
}

for execution in events::from_reader(buffer) {
match execution {
Ok(value) => {
snd.send(value)?;
fn entries_from_execution_events(reader: impl std::io::Read, tools: Box<dyn Tool>) -> impl Iterator<Item=Entry> {
events::from_reader(reader)
.flat_map(|candidate| {
match candidate {
Ok(execution) => Some(execution),
Err(error) => {
log::error!("Failed to read entry: {}", error);
None
}
}
Err(_error) => {
// todo
log::error!("")
})
.flat_map(move |execution| {
match tools.recognize(&execution) {
RecognitionResult::Recognized(Ok(Semantic::UnixCommand)) => {
log::debug!("execution recognized as unix command: {:?}", execution);
None
}
RecognitionResult::Recognized(Ok(Semantic::BuildCommand)) => {
log::debug!("execution recognized as build command: {:?}", execution);
None
}
RecognitionResult::Recognized(Ok(semantic)) => {
log::debug!("execution recognized as compiler call, {:?} : {:?}", semantic, execution);
Some(semantic)
}
RecognitionResult::Recognized(Err(reason)) => {
log::debug!("execution recognized with failure, {:?} : {:?}", reason, execution);
None
}
RecognitionResult::NotRecognized => {
log::debug!("execution not recognized: {:?}", execution);
None
}
}
}
}
drop(snd);
})
.flat_map(|semantic| {
let entries: Result<Vec<Entry>, anyhow::Error> = semantic.try_into();
entries.unwrap_or_else(|error| {
log::debug!("compiler call failed to convert to compilation db entry: {}", error);
vec![]
})
})
}

Ok(())
fn from_compilation_db(source: &Path) -> Result<impl Iterator<Item=Entry>> {
let file = OpenOptions::new().read(true).open(source)
.with_context(|| format!("Failed to open file: {:?}", source))?;
let buffer = BufReader::new(file);
let entries = entries_from_compilation_db(buffer);

Ok(entries)
}

fn entries_from_compilation_db(reader: impl std::io::Read) -> impl Iterator<Item=Entry> {
json_compilation_db::read(reader)
.flat_map(|candidate| {
match candidate {
Ok(entry) => Some(entry),
Err(error) => {
log::error!("Failed to read entry: {}", error);
None
}
}
})
}

0 comments on commit 60f2411

Please sign in to comment.