Skip to content

Commit

Permalink
removed BufReaderML
Browse files Browse the repository at this point in the history
  • Loading branch information
raabh committed Mar 13, 2023
1 parent 90d90f4 commit 2f6171f
Show file tree
Hide file tree
Showing 9 changed files with 31 additions and 105 deletions.
11 changes: 7 additions & 4 deletions src/bin/ddnnife.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
//#![warn(clippy::all, clippy::pedantic)]

#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;

Expand All @@ -18,7 +16,8 @@ use ddnnf_lib::ddnnf::Ddnnf;
use ddnnf_lib::parser::{self as dparser, persisting::write_ddnnf};

#[derive(Parser)]
#[command(author, version, about, arg_required_else_help(true), help_template("\
#[command(author, version, about, arg_required_else_help(true),
help_template("\
{before-help}{name} {version}
{author-with-newline}{about-with-newline}
{usage-heading} {usage}
Expand Down Expand Up @@ -116,6 +115,7 @@ fn main() {
ddnnf = dparser::build_ddnnf(ddnnf_path, cli.ommited_features)
}

// print additional output, iff we are not in the stream mode
if !cli.stream {
let elapsed_time = time.elapsed().as_secs_f32();
println!(
Expand Down Expand Up @@ -215,7 +215,7 @@ fn main() {

let response = ddnnf.handle_stream_msg(&buffer);

if response.as_str() == "exit" { handle_out.write_all("ENDE \\ü/".as_bytes()).unwrap(); break; }
if response.as_str() == "exit" { handle_out.write_all("ENDE \\ü/\n".as_bytes()).unwrap(); break; }

handle_out.write_all(format!("{}\n", response).as_bytes()).unwrap();
handle_out.flush().unwrap();
Expand Down Expand Up @@ -244,6 +244,8 @@ fn main() {
}
}

// Uses the supplied file path if there is any.
// If there is no prefix, we switch to the default fallback.
fn build_file_path(maybe_prefix: Option<Vec<String>>, fallback: &String, postfix: &str) -> String {
let potential_path = maybe_prefix.unwrap();
let mut custom_file_path;
Expand All @@ -257,6 +259,7 @@ fn build_file_path(maybe_prefix: Option<Vec<String>>, fallback: &String, postfix
custom_file_path
}

// spawns a new thread that listens on stdin and delivers its request to the stream message handling
fn spawn_stdin_channel() -> Receiver<String> {
let (tx, rx) = mpsc::channel::<String>();
thread::spawn(move || {
Expand Down
13 changes: 7 additions & 6 deletions src/bin/dhone.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,12 @@
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;

use clap::Parser;
use ddnnf_lib::parser::bufreader_for_big_files::BufReaderMl;
use rustc_hash::FxHashMap;

use std::time::Instant;

use std::fs::File;
use std::io::{BufWriter, Write};
use std::io::{BufWriter, Write, BufRead, BufReader};

pub use ddnnf_lib::parser as dparser;
pub use dparser::c2d_lexer;
Expand Down Expand Up @@ -121,14 +120,16 @@ fn preprocess(path: &str) -> Vec<C2DToken> {

// generates a token stream from a file path
fn get_token_stream(path: &str) -> Vec<C2DToken> {
let buf_reader = BufReaderMl::open(path).expect("Unable to open file");
let file = File::open(path).unwrap();
let lines = BufReader::new(file)
.lines()
.map(|line| line.expect("Unable to read line"));
// we do not know the capacity beforehand without applying semmantics but we know that the file will often be quite big
let mut parsed_tokens: Vec<C2DToken> = Vec::with_capacity(10000);

// opens the file with a BufReaderMl which is similar to a regular BufReader
// opens the file with a BufReader and
// works off each line of the file data seperatly
for line in buf_reader {
let line = line.expect("Unable to read line");
for line in lines {
parsed_tokens.push(dparser::c2d_lexer::lex_line(line.as_ref()).unwrap().1);
}

Expand Down
11 changes: 4 additions & 7 deletions src/ddnnf/anomalies/atomic_sets.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,15 @@ use crate::Ddnnf;

use std::hash::Hash;

/// A quite basic union-find implementation that uses ranks and path compresion
#[derive(Debug, Clone, PartialEq)]
pub struct UnionFind<N: Hash + Eq + Clone> {
struct UnionFind<N: Hash + Eq + Clone> {
size: usize,
parents: FxHashMap<N, N>,
rank: FxHashMap<N, usize>,
}

pub trait UnionFindTrait<N: Eq + Hash + Clone> {
trait UnionFindTrait<N: Eq + Hash + Clone> {
fn find(&mut self, node: N) -> N;
fn equiv(&mut self, x: N, y: N) -> bool;
fn union(&mut self, x: N, y: N);
Expand All @@ -30,7 +31,7 @@ where T: Eq + Hash + Clone {

impl<T> UnionFind<T>
where T: Eq + Hash + Clone {
pub fn new() -> UnionFind<T> {
fn new() -> UnionFind<T> {
let parents: FxHashMap<T, T> = FxHashMap::default();
let rank: FxHashMap<T, usize> = FxHashMap::default();

Expand All @@ -40,10 +41,6 @@ where T: Eq + Hash + Clone {
rank,
}
}

pub fn entries(&self) -> Vec<T> {
self.rank.clone().into_keys().collect()
}
}

impl<T> UnionFindTrait<T> for UnionFind<T>
Expand Down
2 changes: 1 addition & 1 deletion src/ddnnf/anomalies/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use rustc_hash::FxHashSet;
use crate::{Ddnnf};

impl Ddnnf {
/// Computes all core features
/// Computes all core features
/// A feature is a core feature iff there exists only the positiv occurence of that feature
pub(crate) fn get_core(&mut self) {
self.core = (1..=self.number_of_variables as i32)
Expand Down
15 changes: 1 addition & 14 deletions src/ddnnf/anomalies/false_optional.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,6 @@ impl Ddnnf {
/// the feature while simultanously excluding its parent
#[allow(dead_code)]
fn get_false_optional(&mut self) {
/***
* Probleme:
* - Wie findet man den parent eines features?
* - Welches feature ist das root feature?
*
* Beziehung eines features zu parent:
* p => c (Damit das child ausgewählt werden kann muss der parent gewählt sein)
*
* F_FM and p and not c is unsatisfiable
* <=> #SAT(F_FM and p) == #SAT(F_FM and p and c) und f ist nicht mandatory
* Wie ermittelt man ob ein feature mandatory ist?
* -> SAT(F_FM and p and other child von p)
*
*/
// TODO
}
}
2 changes: 1 addition & 1 deletion src/ddnnf/anomalies/sat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ impl Ddnnf {
}

#[inline]
// CComputes if a node is sat
// Computes if a node is sat
pub(crate) fn sat_node_default(&mut self, i: usize) {
match &self.nodes[i].ntype {
And { children } => {
Expand Down
1 change: 0 additions & 1 deletion src/ddnnf/config_creation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,6 @@ impl Ddnnf {
}

#[cfg(test)]
#[cfg_attr(coverage_nightly, no_coverage)]
mod test {
use std::{collections::HashSet};

Expand Down
22 changes: 10 additions & 12 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@ use std::{
use rug::{Integer, Complete};
use rustc_hash::{FxHashMap, FxHashSet};

pub mod bufreader_for_big_files;
use bufreader_for_big_files::BufReaderMl;

use crate::ddnnf::{Ddnnf, node::Node, node::NodeType};

use petgraph::graph::DiGraph;
Expand Down Expand Up @@ -101,7 +98,7 @@ fn build_c2d_ddnnf(lines: Vec<String>, variables: u32) -> Ddnnf {
let mut literals: FxHashMap<i32, usize> = FxHashMap::default();
let mut true_nodes = Vec::new();

// opens the file with a BufReaderMl which is similar to a regular BufReader
// opens the file with a BufReader and
// works off each line of the file data seperatly
// skip the first line, because we already looked at the header
for line in lines.into_iter().skip(1) {
Expand Down Expand Up @@ -235,7 +232,7 @@ fn build_d4_ddnnf(lines: Vec<String>, ommited_features: u32) -> Ddnnf {
ddnnf_graph.add_edge(and_node, to, ());
};

// opens the file with a BufReaderMl which is similar to a regular BufReader
// opens the file with a BufReader and
// works off each line of the file data seperatly
for line in lines {
let next: D4Token = lex_line_d4(line.as_ref()).unwrap().1;
Expand Down Expand Up @@ -507,16 +504,17 @@ fn calc_or_count(
///
/// Panics for a path to a non existing file
pub fn parse_queries_file(path: &str) -> Vec<(usize, Vec<i32>)> {
let buf_reader = BufReaderMl::open(path).expect("Unable to open file");
let mut parsed_queries: Vec<(usize, Vec<i32>)> = Vec::new();

// opens the file with a BufReaderMl which is similar to a regular BufReader
// opens the file with a BufReader and
// works off each line of the file data seperatly
for (line_number, line) in buf_reader.enumerate() {
let l = line.expect("Unable to read line");
let file = File::open(path).unwrap();
let lines = BufReader::new(file)
.lines()
.map(|line| line.expect("Unable to read line"));
let mut parsed_queries: Vec<(usize, Vec<i32>)> = Vec::new();

for (line_number, line) in lines.enumerate() {
// takes a line of the file and parses the i32 values
let res: Vec<i32> = l.as_ref().split_whitespace().into_iter()
let res: Vec<i32> = line.split_whitespace().into_iter()
.map(|elem| elem.to_string().parse::<i32>()
.unwrap_or_else(|_| panic!("Unable to parse {:?} into an i32 value while trying to parse the querie file at {:?}.\nCheck the help page with \"-h\" or \"--help\" for further information.\n", elem, path))
).collect();
Expand Down
59 changes: 0 additions & 59 deletions src/parser/bufreader_for_big_files.rs

This file was deleted.

0 comments on commit 2f6171f

Please sign in to comment.