From ba3f7b354575e921d38bde96c66bcc34f6e54655 Mon Sep 17 00:00:00 2001 From: Justin Restivo Date: Mon, 26 Apr 2021 23:03:18 -0400 Subject: [PATCH 1/7] starting out the input generation --- Cargo.lock | 1 + Cargo.toml | 1 + src/parser/input_utils.rs | 34 +++++++++++++++++++++++++++++++++ src/parser/input_utils_tests.rs | 4 ++++ src/parser/mod.rs | 5 +++++ 5 files changed, 45 insertions(+) create mode 100644 src/parser/input_utils.rs create mode 100644 src/parser/input_utils_tests.rs diff --git a/Cargo.lock b/Cargo.lock index 62cff95..1f42213 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -380,6 +380,7 @@ name = "flake_generator" version = "0.1.0" dependencies = [ "anyhow", + "either", "parse-display", "rnix", "rowan", diff --git a/Cargo.toml b/Cargo.toml index 98e4c94..d5a3013 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,6 +13,7 @@ rowan = "0.12.6" parse-display = "0.4.1" anyhow = "1.0" rust-nix-templater = { git = "https://github.com/yusdacra/rust-nix-templater.git", branch = "master" } +either = "1.6.1" [package.metadata.nix] app = true diff --git a/src/parser/input_utils.rs b/src/parser/input_utils.rs new file mode 100644 index 0000000..ef89741 --- /dev/null +++ b/src/parser/input_utils.rs @@ -0,0 +1,34 @@ +use crate::parser::utils::{string_to_node, NixNode}; +use anyhow::{anyhow, bail}; +use either::Either; +use rnix::{types::*, NixLanguage, StrPart, SyntaxKind::*}; +use rowan::{api::SyntaxNode, GreenNode, GreenNodeBuilder, Language}; +use std::convert::identity; + +#[derive(Debug, Clone, Eq, PartialEq)] +struct Input { + url: String, + is_flake: bool, +} + +/// inputs are: [(lhs, rhs), ...] +fn create_attr(attr_pairs: Vec<(String, Either)>) -> GreenNode { + attr_pairs + .iter() + .map(|(lhs, rhs)| (lhs, rhs.as_ref().either(identity, |x| x.to_string()))); + let mut node = GreenNodeBuilder::new(); + let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_ATTR_SET); + node.start_node(NixLanguage::kind_to_raw(NODE_ATTR_SET)); + node.finish_node(); + node.finish() +} + +impl From for GreenNode { + fn from(item: Input) -> Self { + let mut node = GreenNodeBuilder::new(); + let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_ATTR_SET); + node.start_node(kind); + node.finish_node(); + node.finish() + } +} diff --git a/src/parser/input_utils_tests.rs b/src/parser/input_utils_tests.rs new file mode 100644 index 0000000..afafbeb --- /dev/null +++ b/src/parser/input_utils_tests.rs @@ -0,0 +1,4 @@ +#[cfg(test)] +mod tests { + use super::*; +} diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 422543d..060435e 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -1,5 +1,10 @@ pub mod file; pub mod utils; +// TODO better name +pub mod input_utils; #[cfg(test)] mod utils_tests; +// TODO better name +#[cfg(test)] +mod input_utils_tests; From 4b27f8fbcf54454a8f8e24245e3fa2fd5c59ed12 Mon Sep 17 00:00:00 2001 From: Justin Restivo Date: Sun, 2 May 2021 16:21:30 -0400 Subject: [PATCH 2/7] idiomatic attribute set generation --- Cargo.lock | 99 +++++++++++++++++++++++++++++++++ Cargo.toml | 1 + src/parser/input_utils.rs | 94 ++++++++++++++++++++++++++----- src/parser/input_utils_tests.rs | 34 ++++++++++- 4 files changed, 210 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1f42213..de40418 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -90,6 +90,15 @@ dependencies = [ "constant_time_eq", ] +[[package]] +name = "bstr" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a40b47ad93e1a5404e6c18dec46b628214fee441c70f4ab5d6942142cc268a3d" +dependencies = [ + "memchr", +] + [[package]] name = "bumpalo" version = "3.6.1" @@ -183,6 +192,15 @@ dependencies = [ "crossbeam-utils 0.8.3", ] +[[package]] +name = "crossbeam-channel" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ec7fcd21571dc78f96cc96243cab8d8f035247c3efd16c687be154c3fa9efa" +dependencies = [ + "crossbeam-utils 0.6.6", +] + [[package]] name = "crossbeam-channel" version = "0.4.4" @@ -237,6 +255,16 @@ dependencies = [ "crossbeam-utils 0.8.3", ] +[[package]] +name = "crossbeam-utils" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6" +dependencies = [ + "cfg-if 0.1.10", + "lazy_static", +] + [[package]] name = "crossbeam-utils" version = "0.7.2" @@ -381,6 +409,7 @@ version = "0.1.0" dependencies = [ "anyhow", "either", + "nixpkgs-fmt", "parse-display", "rnix", "rowan", @@ -415,6 +444,19 @@ dependencies = [ "wasi 0.9.0+wasi-snapshot-preview1", ] +[[package]] +name = "globset" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c152169ef1e421390738366d2f796655fec62621dabbd0fd476f905934061e4a" +dependencies = [ + "aho-corasick", + "bstr", + "fnv", + "log", + "regex", +] + [[package]] name = "hashbrown" version = "0.9.1" @@ -451,6 +493,24 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "ignore" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b287fb45c60bb826a0dc68ff08742b9d88a2fea13d6e0c286b3172065aaf878c" +dependencies = [ + "crossbeam-utils 0.8.3", + "globset", + "lazy_static", + "log", + "memchr", + "regex", + "same-file", + "thread_local", + "walkdir", + "winapi-util", +] + [[package]] name = "itoa" version = "0.4.7" @@ -524,6 +584,21 @@ dependencies = [ "libc", ] +[[package]] +name = "nixpkgs-fmt" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a82272a0eee2fef69a1af311f01ff84edac1f4f1680095b9a86f409cf00bd00" +dependencies = [ + "clap", + "crossbeam-channel 0.3.9", + "ignore", + "rnix", + "rowan", + "serde_json", + "smol_str", +] + [[package]] name = "num-integer" version = "0.1.44" @@ -712,6 +787,7 @@ dependencies = [ "hashbrown", "memoffset", "rustc-hash", + "serde", "text-size", ] @@ -757,6 +833,15 @@ version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + [[package]] name = "scopeguard" version = "1.1.0" @@ -982,6 +1067,9 @@ name = "text-size" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a" +dependencies = [ + "serde", +] [[package]] name = "textwrap" @@ -1136,6 +1224,17 @@ dependencies = [ "quote", ] +[[package]] +name = "walkdir" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +dependencies = [ + "same-file", + "winapi", + "winapi-util", +] + [[package]] name = "wasi" version = "0.9.0+wasi-snapshot-preview1" diff --git a/Cargo.toml b/Cargo.toml index d5a3013..e3be5ac 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,6 +14,7 @@ parse-display = "0.4.1" anyhow = "1.0" rust-nix-templater = { git = "https://github.com/yusdacra/rust-nix-templater.git", branch = "master" } either = "1.6.1" +nixpkgs-fmt = "1.1.0" [package.metadata.nix] app = true diff --git a/src/parser/input_utils.rs b/src/parser/input_utils.rs index ef89741..cd5ddc5 100644 --- a/src/parser/input_utils.rs +++ b/src/parser/input_utils.rs @@ -1,9 +1,11 @@ use crate::parser::utils::{string_to_node, NixNode}; use anyhow::{anyhow, bail}; use either::Either; -use rnix::{types::*, NixLanguage, StrPart, SyntaxKind::*}; -use rowan::{api::SyntaxNode, GreenNode, GreenNodeBuilder, Language}; -use std::convert::identity; +use rnix::{types::*, NixLanguage, StrPart, SyntaxKind::*, parse}; +use rowan::{api::SyntaxNode, GreenNode, GreenNodeBuilder, Language, NodeOrToken, GreenToken}; +use std::string::ToString; +use nixpkgs_fmt::reformat_node; +use std::ops::Deref; #[derive(Debug, Clone, Eq, PartialEq)] struct Input { @@ -11,24 +13,86 @@ struct Input { is_flake: bool, } -/// inputs are: [(lhs, rhs), ...] -fn create_attr(attr_pairs: Vec<(String, Either)>) -> GreenNode { - attr_pairs - .iter() - .map(|(lhs, rhs)| (lhs, rhs.as_ref().either(identity, |x| x.to_string()))); +pub fn wrap_root(node: GreenNode) -> NixNode { + let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_ROOT); + let root = GreenNode::new(kind, (vec![NodeOrToken::Node(node)])); + SyntaxNode::new_root(root) +} + +pub fn new_string(s : String) -> GreenNode { let mut node = GreenNodeBuilder::new(); - let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_ATTR_SET); - node.start_node(NixLanguage::kind_to_raw(NODE_ATTR_SET)); + let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_STRING); + node.start_node(kind); + let start_string_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_STRING_START); + node.token(start_string_kind, "\""); + let string_content: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_STRING_CONTENT); + node.token(string_content, &s); + let end_string_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_STRING_END); + node.token(end_string_kind, "\""); node.finish_node(); node.finish() } +pub fn new_key(s : String) -> GreenNode { + let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_KEY); + let children = vec![NodeOrToken::Node(new_string(s))]; + GreenNode::new(kind, children) +} + +pub fn gen_key_value(key: String, value: String) -> GreenNode { + let key_node : GreenNode = new_key(key); + let value_node : GreenNode = new_string(value); + new_key_value(key_node, value_node) +} + +pub fn new_key_value(key : GreenNode, value: GreenNode) -> GreenNode { + let kind = NixLanguage::kind_to_raw(NODE_KEY_VALUE); + let assign_kind = NixLanguage::kind_to_raw(TOKEN_ASSIGN); + let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); + let semicolon_kind = NixLanguage::kind_to_raw(TOKEN_SEMICOLON); + let children = vec![ + NodeOrToken::Node(key), + NodeOrToken::Token(GreenToken::new(whitespace_kind, " ")), + NodeOrToken::Token(GreenToken::new(assign_kind, "=")), + NodeOrToken::Token(GreenToken::new(whitespace_kind, " ")), + NodeOrToken::Node(value), + NodeOrToken::Token(GreenToken::new(semicolon_kind, ";")), + ]; + GreenNode::new(kind, children) +} + +pub fn gen_attr_set(attr_pairs: Vec<(String, String)>) -> GreenNode{ + let new_attr_pairs : Vec<(GreenNode, GreenNode)> = + attr_pairs.iter().map(|(key, value)| (new_key(key.to_string()), new_string(value.to_string()))).collect(); + new_attr_set(new_attr_pairs) +} + +// TODO give all the tokens their own constructors +/// inputs are: [(lhs, rhs), ...] +pub fn new_attr_set(attr_pairs: Vec<(GreenNode, GreenNode)>) -> GreenNode { + let pairs : Vec> = attr_pairs + .iter() + .map(move |(k, v)| { + NodeOrToken::Node(new_key_value(k.clone(), v.clone())) + }).collect::>(); + let open_curly_kind = NixLanguage::kind_to_raw(TOKEN_CURLY_B_OPEN); + let close_curly_kind = NixLanguage::kind_to_raw(TOKEN_CURLY_B_CLOSE); + let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); + let attr_set_kind = NixLanguage::kind_to_raw(NODE_ATTR_SET); + let mut token_vec = Vec::new(); + token_vec.push(vec![NodeOrToken::Token(GreenToken::new(open_curly_kind, "{"))]); + token_vec.push(pairs); + token_vec.push(vec![NodeOrToken::Token(GreenToken::new(close_curly_kind, "}"))]); + let tokens = token_vec.iter().flatten().cloned().collect::>>(); + GreenNode::new(attr_set_kind, tokens) +} + impl From for GreenNode { fn from(item: Input) -> Self { - let mut node = GreenNodeBuilder::new(); - let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_ATTR_SET); - node.start_node(kind); - node.finish_node(); - node.finish() + let inputs = vec![ + ("url".to_string(), item.url), + ("flake".to_string(), item.is_flake.to_string()) + ]; + gen_attr_set(inputs) } } diff --git a/src/parser/input_utils_tests.rs b/src/parser/input_utils_tests.rs index afafbeb..1fcdf79 100644 --- a/src/parser/input_utils_tests.rs +++ b/src/parser/input_utils_tests.rs @@ -1,4 +1,32 @@ -#[cfg(test)] -mod tests { - use super::*; +use crate::parser::input_utils::{new_string, wrap_root, new_key, gen_attr_set}; +use crate::parser::utils::{node_to_string, string_to_node}; + +use rnix::{types::*, SyntaxKind::*}; + +#[test] +pub fn check_new_string() { + let phrase = "hello_world".to_string(); + let n = new_string(phrase.clone()); + let root = wrap_root(n); + let result = Root::cast(root).unwrap(); + //let ast = rnix::parse(&phrase) + //.as_result() + //.map(|ast| ast.root()).unwrap(); + //assert_eq!(format!("{}", result.dump()).trim(), format!("{}", ast.dump()).trim()) + assert_eq!(format!("{}", result.dump()).trim(), "NODE_ROOT 0..13 {\n NODE_STRING 0..13 {\n TOKEN_STRING_START(\"\\\"\") 0..1\n TOKEN_STRING_CONTENT(\"hello_world\") 1..12\n TOKEN_STRING_END(\"\\\"\") 12..13\n }\n}") } + + +#[test] +pub fn check_new_attr_set() { + let attrset = vec![("test1".to_string(), "value1".to_string()), ("test2".to_string(), "value2".to_string())]; + let result = gen_attr_set(attrset); + let root = wrap_root(result); + // TODO separate this out into a dump ast method.. + //let result = Root::cast(root).unwrap(); + //let r_string = format!("{}", result.dump()); + //let r_string = r_string; + //println!("ast: {}", r_string.clone()); + assert_eq!(root.to_string(), "{\"test1\" = \"value1\";\"test2\" = \"value2\";}"); +} + From fbe3676c30a064ffff89ab4545fe673c6484ed1a Mon Sep 17 00:00:00 2001 From: Justin Restivo Date: Sun, 2 May 2021 17:38:10 -0400 Subject: [PATCH 3/7] updating readme a bit --- README.md | 23 ++++++----------------- 1 file changed, 6 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index ddbc805..369740e 100644 --- a/README.md +++ b/README.md @@ -34,23 +34,12 @@ their package and set of dependencies. The idea is to have the user specify the type of dependencies, flake inputs and outputs, and type of the package with `skim`, -then to output a flake. This flake is then validated with `rnix`. +then to output a flake. This flake is then validated with `rnix` +formatted with `nixpkgs-fmt`, then written to a file. If the user wants to modify an existing flake to add or remove dependencies, this will also be possible. The flake shall be -parsed in with `rnix`, and the user will be able to modify it. - -As of now, basically none of the features exist. I've only -got the proof of concept working: skim can be used for a cli -and rnix can be used to modify the AST. - -Further down the line, I'd like to make this even more interactive. -This will involve querying github, crates.io, pypy, nixpkgs and more for packages, -then piping them into skim for selection based on language. - -The hope is to also provide automatic support for pre-existing -nix expression generators such as node2nix, poetry2nix, cabal2nix, -and naersk. +parsed with `rnix`, and the user will be able to modify it. # Dependencies # @@ -60,11 +49,11 @@ and the `skim` fuzzy finder for the cli. # Roadmap # - [x] Proof of concept -- [ ] Flake Input management - - [ ] Add inputs +- [x] Flake Input management + - [x] Add inputs - [ ] Remove inputs - [ ] Change inputs - - [ ] Query github + - [ ] Query github into skim (this is hard...may not be feasible..) - [ ] BuildInput management - [ ] Query nixpkgs - [ ] Modify buildInputs From bd9ea6879569a6bcb688fd1b24dd8d71528ad21f Mon Sep 17 00:00:00 2001 From: Justin Restivo Date: Sun, 9 May 2021 20:19:04 -0400 Subject: [PATCH 4/7] Input generation works! --- src/main.rs | 83 +++++++++++++++++- src/parser/input_utils.rs | 147 +++++++++++++++++++++++++------- src/parser/input_utils_tests.rs | 32 +++++-- src/parser/utils.rs | 30 ++++++- src/user/mod.rs | 25 +++++- 5 files changed, 274 insertions(+), 43 deletions(-) diff --git a/src/main.rs b/src/main.rs index 403266d..8ef4bf2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,9 +1,15 @@ mod parser; mod user; +use crate::parser::input_utils::SyntaxStructure; use anyhow::anyhow; +use nixpkgs_fmt::reformat_node; use parser::file::{filename_to_node, write_to_node}; -use parser::utils::remove_input; +use parser::input_utils::{merge_attr_sets, Input}; +use parser::utils::{get_node_idx, remove_input, search_for_attr, NixNode}; +use rowan::GreenNode; +use rowan::NodeOrToken; +use rowan::SyntaxNode; use user::*; struct ActionStack { @@ -21,6 +27,12 @@ impl ActionStack { self.inner.push(action) } + fn push_seq(&mut self, actions: impl IntoIterator) { + actions + .into_iter() + .for_each(|action| self.inner.push(action)) + } + fn pop(&mut self) -> UserAction { self.inner.pop().unwrap_or(Self::START_ACTION) } @@ -64,7 +76,14 @@ fn main() { UserPrompt::Create => action_stack.push(UserAction::CreateNew), UserPrompt::Modify => action_stack.push(UserAction::ModifyExisting), UserPrompt::DeleteInput => action_stack.push(UserAction::RemoveInput), - UserPrompt::AddInput => action_stack.push(UserAction::AddInput), + UserPrompt::AddInput => action_stack.push_seq(vec![ + UserAction::IntroParsed, + UserAction::ConfirmWrite, + UserAction::ConfirmInputCorrect, + UserAction::QueryInputName, + UserAction::QueryInputUrl, + UserAction::IsInputFlake, + ]), UserPrompt::SelectLang(lang) => match lang { Lang::Rust => action_stack.push(UserAction::Rust(user::rust::Action::Intro)), lang => todo!("lang {}", lang), @@ -79,6 +98,18 @@ fn main() { .clone() .process_action(other, &mut action_stack, &mut user_data) } + UserAction::QueryInputUrl => { + let mut i = user_data.new_input.unwrap_or_default(); + i.url = Some(SyntaxStructure::StringLiteral(other)); + user_data.new_input = Some(i); + action_stack.pop(); + } + UserAction::QueryInputName => { + let mut i = user_data.new_input.unwrap_or_default(); + i.name = Some(SyntaxStructure::StringLiteral(other)); + user_data.new_input = Some(i); + action_stack.pop(); + } UserAction::ModifyExisting => { let filename = other.0.as_str(); match filename_to_node(filename, &other) { @@ -99,7 +130,7 @@ fn main() { ) .unwrap(); user_data.new_root(new_root); - write_to_node(&user_data); + action_stack.pop(); // TODO add in a "write to file" option at the end instead of writing after every modification action_stack.push(UserAction::IntroParsed); @@ -107,6 +138,52 @@ fn main() { _ => unimplemented!(), } } + UserPrompt::Bool(b) => match cur_action { + UserAction::IsInputFlake => { + action_stack.pop(); + let mut i = user_data + .new_input + .as_ref() + .and_then(|x: &Input| Some(x.clone())) + .unwrap_or_default(); + i.is_flake = Some(SyntaxStructure::Bool(b)); + user_data.new_input = Some(i); + } + UserAction::ConfirmInputCorrect => { + let root = user_data.root.as_ref().unwrap(); + let (inputs, _, _) = search_for_attr("inputs", 1, root, None)[0].clone(); + let new_input: GreenNode = user_data + .new_input + .as_ref() + .and_then(|x| Some(x.clone())) + .unwrap() + .into(); + let augmented_input = merge_attr_sets(inputs.green().to_owned(), new_input); + println!("aug: {:?}", augmented_input.to_string()); + let idx = get_node_idx(&inputs).unwrap(); + let parent = inputs.parent().unwrap(); + let new_root = inputs + .parent() + .unwrap() + .green() + .to_owned() + .replace_child(idx, NodeOrToken::Node(augmented_input)); + let mut new_root_wrapped: NixNode = + SyntaxNode::new_root(parent.replace_with(new_root)); + while let Some(parent) = new_root_wrapped.parent() { + new_root_wrapped = parent; + } + action_stack.pop(); + user_data.new_root(reformat_node(&new_root_wrapped)) + } + UserAction::ConfirmWrite => { + action_stack.pop(); + if b { + write_to_node(&user_data) + } + } + _ => unimplemented!("bool not implemented in this case {}", cur_action), + }, } } } diff --git a/src/parser/input_utils.rs b/src/parser/input_utils.rs index cd5ddc5..8b4b772 100644 --- a/src/parser/input_utils.rs +++ b/src/parser/input_utils.rs @@ -1,25 +1,31 @@ +#![feature(iter_intersperse)] use crate::parser::utils::{string_to_node, NixNode}; +use crate::SmlStr; use anyhow::{anyhow, bail}; use either::Either; -use rnix::{types::*, NixLanguage, StrPart, SyntaxKind::*, parse}; -use rowan::{api::SyntaxNode, GreenNode, GreenNodeBuilder, Language, NodeOrToken, GreenToken}; -use std::string::ToString; use nixpkgs_fmt::reformat_node; +use rnix::{parse, types::*, NixLanguage, StrPart, SyntaxKind::*}; +use rowan::{ + api::SyntaxNode, GreenNode, GreenNodeBuilder, GreenNodeData, GreenToken, Language, NodeOrToken, +}; +use std::borrow::Borrow; use std::ops::Deref; +use std::string::ToString; -#[derive(Debug, Clone, Eq, PartialEq)] -struct Input { - url: String, - is_flake: bool, +#[derive(Debug, Clone, Eq, PartialEq, Default)] +pub struct Input { + pub(crate) name: Option, + pub(crate) url: Option, + pub(crate) is_flake: Option, } pub fn wrap_root(node: GreenNode) -> NixNode { let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_ROOT); - let root = GreenNode::new(kind, (vec![NodeOrToken::Node(node)])); + let root = GreenNode::new(kind, vec![NodeOrToken::Node(node)]); SyntaxNode::new_root(root) } -pub fn new_string(s : String) -> GreenNode { +pub fn new_string(s: String) -> GreenNode { let mut node = GreenNodeBuilder::new(); let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_STRING); node.start_node(kind); @@ -32,20 +38,35 @@ pub fn new_string(s : String) -> GreenNode { node.finish_node(); node.finish() } +//TOKEN_WHITESPACE(" ") 18..19 +//NODE_IDENT 19..23 { +//TOKEN_IDENT("true") 19..23 +//} +//TOKEN_SEMICOLON(";") 23.. -pub fn new_key(s : String) -> GreenNode { +pub fn new_bool_literal(b: bool) -> GreenNode { + let mut node = GreenNodeBuilder::new(); + let token_ident_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_IDENT); + let node_ident_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_IDENT); + node.start_node(node_ident_kind); + node.token(token_ident_kind, b.to_string().as_str()); + node.finish_node(); + node.finish() +} + +pub fn new_key(s: String) -> GreenNode { let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_KEY); let children = vec![NodeOrToken::Node(new_string(s))]; GreenNode::new(kind, children) } pub fn gen_key_value(key: String, value: String) -> GreenNode { - let key_node : GreenNode = new_key(key); - let value_node : GreenNode = new_string(value); + let key_node: GreenNode = new_key(key); + let value_node: GreenNode = new_string(value); new_key_value(key_node, value_node) } -pub fn new_key_value(key : GreenNode, value: GreenNode) -> GreenNode { +pub fn new_key_value(key: GreenNode, value: GreenNode) -> GreenNode { let kind = NixLanguage::kind_to_raw(NODE_KEY_VALUE); let assign_kind = NixLanguage::kind_to_raw(TOKEN_ASSIGN); let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); @@ -57,42 +78,110 @@ pub fn new_key_value(key : GreenNode, value: GreenNode) -> GreenNode { NodeOrToken::Token(GreenToken::new(whitespace_kind, " ")), NodeOrToken::Node(value), NodeOrToken::Token(GreenToken::new(semicolon_kind, ";")), + NodeOrToken::Token(GreenToken::new(whitespace_kind, "\n")), ]; GreenNode::new(kind, children) } -pub fn gen_attr_set(attr_pairs: Vec<(String, String)>) -> GreenNode{ - let new_attr_pairs : Vec<(GreenNode, GreenNode)> = - attr_pairs.iter().map(|(key, value)| (new_key(key.to_string()), new_string(value.to_string()))).collect(); +// TODO merge with new_ +pub fn gen_attr_set(attr_pairs: Vec<(String, String)>) -> GreenNode { + let new_attr_pairs: Vec<(GreenNode, GreenNode)> = attr_pairs + .iter() + .map(|(key, value)| (new_key(key.to_string()), new_string(value.to_string()))) + .collect(); new_attr_set(new_attr_pairs) } +pub fn merge_attr_sets(a1: GreenNode, a2: GreenNode) -> GreenNode { + let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); + let token = GreenToken::new(whitespace_kind, "\n"); + let delimiter = NodeOrToken::Token(token); + let mut nodes = a2 + .children() + .into_iter() + .filter(|node| node.kind() == NixLanguage::kind_to_raw(NODE_KEY_VALUE)) + .map(|x| match x { + NodeOrToken::Node(x) => NodeOrToken::Node(x.clone()), + NodeOrToken::Token(x) => NodeOrToken::Token(x.clone()), + }) + .flat_map(|x| vec![delimiter.clone(), x]) + .collect::>(); + nodes.push(delimiter); + let idx = a1 + .children() + .position(|x| x.kind() == NixLanguage::kind_to_raw(TOKEN_CURLY_B_OPEN)) + .unwrap() + + 1; + a1.splice_children(idx..idx, nodes) +} + // TODO give all the tokens their own constructors /// inputs are: [(lhs, rhs), ...] pub fn new_attr_set(attr_pairs: Vec<(GreenNode, GreenNode)>) -> GreenNode { - let pairs : Vec> = attr_pairs + let pairs: Vec> = attr_pairs .iter() - .map(move |(k, v)| { - NodeOrToken::Node(new_key_value(k.clone(), v.clone())) - }).collect::>(); + .map(move |(k, v)| NodeOrToken::Node(new_key_value(k.clone(), v.clone()))) + .collect::>(); let open_curly_kind = NixLanguage::kind_to_raw(TOKEN_CURLY_B_OPEN); let close_curly_kind = NixLanguage::kind_to_raw(TOKEN_CURLY_B_CLOSE); - let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); let attr_set_kind = NixLanguage::kind_to_raw(NODE_ATTR_SET); + let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); let mut token_vec = Vec::new(); - token_vec.push(vec![NodeOrToken::Token(GreenToken::new(open_curly_kind, "{"))]); + token_vec.push(vec![NodeOrToken::Token(GreenToken::new( + open_curly_kind, + "{", + ))]); + token_vec.push(vec![NodeOrToken::Token(GreenToken::new( + whitespace_kind, + "\n", + ))]); token_vec.push(pairs); - token_vec.push(vec![NodeOrToken::Token(GreenToken::new(close_curly_kind, "}"))]); - let tokens = token_vec.iter().flatten().cloned().collect::>>(); + token_vec.push(vec![NodeOrToken::Token(GreenToken::new( + close_curly_kind, + "}", + ))]); + let tokens = token_vec + .iter() + .flatten() + .cloned() + .collect::>>(); GreenNode::new(attr_set_kind, tokens) } +#[derive(Debug, Clone, Eq, PartialEq)] +pub(crate) enum SyntaxStructure { + Key(SmlStr), + StringLiteral(SmlStr), + Bool(bool), +} + +impl From for GreenNode { + fn from(ss: SyntaxStructure) -> Self { + match ss { + SyntaxStructure::Key(k) => new_key(k.to_string()), + SyntaxStructure::StringLiteral(sl) => new_string(sl.to_string()), + SyntaxStructure::Bool(b) => new_bool_literal(b), + } + } +} + impl From for GreenNode { fn from(item: Input) -> Self { - let inputs = vec![ - ("url".to_string(), item.url), - ("flake".to_string(), item.is_flake.to_string()) - ]; - gen_attr_set(inputs) + let mut inputs = Vec::new(); + if let Some(s) = item.url { + inputs.push(( + SyntaxStructure::Key(SmlStr::new_inline("url")).into(), + s.into(), + )); + } + if let Some(s) = item.is_flake { + inputs.push(( + SyntaxStructure::Key(SmlStr::new_inline("flake")).into(), + s.into(), + )) + } + let input_name = item.name.unwrap().into(); + let inner_nodes = new_attr_set(inputs); + new_attr_set(vec![(input_name, inner_nodes)]) } } diff --git a/src/parser/input_utils_tests.rs b/src/parser/input_utils_tests.rs index 1fcdf79..8edeca4 100644 --- a/src/parser/input_utils_tests.rs +++ b/src/parser/input_utils_tests.rs @@ -1,8 +1,9 @@ -use crate::parser::input_utils::{new_string, wrap_root, new_key, gen_attr_set}; +use crate::parser::input_utils::{gen_attr_set, merge_attr_sets, new_key, new_string, wrap_root}; use crate::parser::utils::{node_to_string, string_to_node}; use rnix::{types::*, SyntaxKind::*}; +// TODO remove all the gen stuff and jsut use into trait #[test] pub fn check_new_string() { let phrase = "hello_world".to_string(); @@ -10,16 +11,18 @@ pub fn check_new_string() { let root = wrap_root(n); let result = Root::cast(root).unwrap(); //let ast = rnix::parse(&phrase) - //.as_result() - //.map(|ast| ast.root()).unwrap(); + //.as_result() + //.map(|ast| ast.root()).unwrap(); //assert_eq!(format!("{}", result.dump()).trim(), format!("{}", ast.dump()).trim()) assert_eq!(format!("{}", result.dump()).trim(), "NODE_ROOT 0..13 {\n NODE_STRING 0..13 {\n TOKEN_STRING_START(\"\\\"\") 0..1\n TOKEN_STRING_CONTENT(\"hello_world\") 1..12\n TOKEN_STRING_END(\"\\\"\") 12..13\n }\n}") } - #[test] pub fn check_new_attr_set() { - let attrset = vec![("test1".to_string(), "value1".to_string()), ("test2".to_string(), "value2".to_string())]; + let attrset = vec![ + ("test1".to_string(), "value1".to_string()), + ("test2".to_string(), "value2".to_string()), + ]; let result = gen_attr_set(attrset); let root = wrap_root(result); // TODO separate this out into a dump ast method.. @@ -27,6 +30,23 @@ pub fn check_new_attr_set() { //let r_string = format!("{}", result.dump()); //let r_string = r_string; //println!("ast: {}", r_string.clone()); - assert_eq!(root.to_string(), "{\"test1\" = \"value1\";\"test2\" = \"value2\";}"); + assert_eq!( + root.to_string(), + "{\n\"test1\" = \"value1\";\n\"test2\" = \"value2\";\n}" + ); } +#[test] +pub fn check_merge_attr_set() { + let attrset = vec![ + ("test1".to_string(), "value1".to_string()), + ("test2".to_string(), "value2".to_string()), + ]; + let result = gen_attr_set(attrset); + let merged = merge_attr_sets(result.clone(), result); + let root = wrap_root(merged); + assert_eq!( + root.to_string(), + "{\n\"test1\" = \"value1\";\n\n\"test2\" = \"value2\";\n\n\n\"test1\" = \"value1\";\n\"test2\" = \"value2\";\n}" + ); +} diff --git a/src/parser/utils.rs b/src/parser/utils.rs index d43a725..3b24722 100644 --- a/src/parser/utils.rs +++ b/src/parser/utils.rs @@ -6,6 +6,33 @@ pub(crate) type NixNode = SyntaxNode; use std::collections::HashMap; +/// Precondition: node is a attribute and parent is an attribute set +/// returns the index in the parent tree of the node +pub fn get_node_idx(node: &NixNode) -> anyhow::Result { + let parent = node.parent().unwrap(); + match parent.kind() { + NODE_ATTR_SET | NODE_PATTERN | NODE_KEY_VALUE => { + let mut child_node_idxs = + parent + .green() + .children() + .enumerate() + .filter_map(|(idx, val)| { + // the '.to_owned()' is required to turn GreenNodeData into GreenNode + // because GreenNodeData doesn't implement PartialEq + val.into_node().and_then(|inner_node| { + (*inner_node == node.green().to_owned()).then(|| idx) + }) + }); + Ok(child_node_idxs.next().expect("Child not in parent tree")) + } + x => Err(anyhow!(format!( + "Precondition violated: parent {:?} was not attribute set.", + x + ))), + } +} + /// Precondition: node is a attribute and parent is an attribute set /// (1) get parent attrset /// (2) iterate through parent's children nodes, searching for node to delete @@ -89,7 +116,7 @@ pub fn string_to_node(content: String) -> anyhow::Result { /// "{\"foo\": \"bar\"}" /// ``` /// Will return [bar_node, "foo", 1] -fn search_for_attr( +pub fn search_for_attr( attr: &str, max_depth: usize, root_node: &NixNode, @@ -255,6 +282,7 @@ pub fn remove_input( dead_node_name: &str, user_inputs: Option<&HashMap>, ) -> anyhow::Result { + // have to use outer scoped lifetime let tmp; let inputs = match user_inputs { Some(inputs) => inputs, diff --git a/src/user/mod.rs b/src/user/mod.rs index 413a6ff..1b867b7 100644 --- a/src/user/mod.rs +++ b/src/user/mod.rs @@ -4,6 +4,7 @@ use crate::parser::utils::{get_inputs, NixNode}; use std::{collections::HashMap, io::Cursor, str::FromStr}; +use crate::parser::input_utils::Input; use parse_display::{Display, FromStr}; use skim::prelude::*; use smol_str::SmolStr; @@ -51,12 +52,14 @@ pub(crate) struct UserMetadata { pub(crate) inputs: Option>, pub(crate) filename: Option, pub(crate) rust_options: rust_nix_templater::Options, + pub(crate) new_input: Option, } impl UserMetadata { pub(crate) fn new_root(&mut self, root: NixNode) { self.inputs = None; self.root = Some(root); + self.new_input = None; } fn ensure_inputs(&mut self) -> &mut HashMap { @@ -75,7 +78,12 @@ impl UserMetadata { UserPrompt::Back, ], UserAction::CreateNew => vec![UserPrompt::SelectLang(Lang::Rust), UserPrompt::Back], - UserAction::ModifyExisting => vec![], + UserAction::ModifyExisting | UserAction::QueryInputName | UserAction::QueryInputUrl => { + vec![] + } + UserAction::IsInputFlake | UserAction::ConfirmInputCorrect => { + vec![UserPrompt::Bool(true), UserPrompt::Bool(false)] + } UserAction::RemoveInput => { // check cache self.ensure_inputs() @@ -84,6 +92,7 @@ impl UserMetadata { .chain(std::iter::once(UserPrompt::Back)) .collect() } + UserAction::ConfirmWrite => vec![UserPrompt::Bool(true), UserPrompt::Bool(false)], UserAction::Error(_) => vec![UserPrompt::Back, UserPrompt::StartOver, UserPrompt::Exit], x => unimplemented!("prompt not implemented for: {:?}", x), } @@ -124,6 +133,8 @@ pub(crate) enum UserPrompt { #[display("{0}")] SelectLang(Lang), #[display("{0}")] + Bool(bool), + #[display("{0}")] Other(SmlStr), } @@ -141,18 +152,24 @@ pub(crate) enum UserAction { AddDep, #[display("Remove a dependency from your flake.\nPlease select a input to remove.")] RemoveDep, - #[display( - "Add an input to your flake.\nPlease input a flake url and indicate if it's a flake" - )] + #[display("Add an input to your flake.")] AddInput, #[display("Please select an input to remove.")] RemoveInput, #[display("Is the input a flake?")] IsInputFlake, + #[display("Provide input name.")] + QueryInputName, + #[display("Provide input URL.")] + QueryInputUrl, #[display("Encountered an error: {0}")] Error(anyhow::Error), #[display("{0}")] Rust(rust::Action), + #[display("Is the input correct?")] + ConfirmInputCorrect, + #[display("Write to file?")] + ConfirmWrite, } #[derive(Eq, PartialEq, Debug, Copy, Clone, Display, FromStr)] From 7db4be554fecdf62de8548c5b3cf139fa0b7c3b2 Mon Sep 17 00:00:00 2001 From: Justin Restivo Date: Sun, 9 May 2021 21:08:16 -0400 Subject: [PATCH 5/7] cleanup --- src/main.rs | 85 ++++++++++++++++----------------- src/parser/input_utils.rs | 39 ++++----------- src/parser/input_utils_tests.rs | 29 +++++++---- src/parser/url.rs | 0 4 files changed, 70 insertions(+), 83 deletions(-) create mode 100644 src/parser/url.rs diff --git a/src/main.rs b/src/main.rs index 8ef4bf2..2124b9c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -91,53 +91,49 @@ fn main() { UserPrompt::Rust(prompt) => { prompt.process_prompt(&mut action_stack, &mut user_data); } - UserPrompt::Other(other) => { - match cur_action { - UserAction::Rust(action) => { - action - .clone() - .process_action(other, &mut action_stack, &mut user_data) - } - UserAction::QueryInputUrl => { - let mut i = user_data.new_input.unwrap_or_default(); - i.url = Some(SyntaxStructure::StringLiteral(other)); - user_data.new_input = Some(i); - action_stack.pop(); - } - UserAction::QueryInputName => { - let mut i = user_data.new_input.unwrap_or_default(); - i.name = Some(SyntaxStructure::StringLiteral(other)); - user_data.new_input = Some(i); - action_stack.pop(); - } - UserAction::ModifyExisting => { - let filename = other.0.as_str(); - match filename_to_node(filename, &other) { - Err(err_msg) => action_stack.push(UserAction::Error(err_msg)), - Ok(root) => { - user_data.filename = Some(filename.to_string()); - user_data.root = Some(root); - action_stack.push(UserAction::IntroParsed); - } + UserPrompt::Other(other) => match cur_action { + UserAction::Rust(action) => { + action + .clone() + .process_action(other, &mut action_stack, &mut user_data) + } + UserAction::QueryInputUrl => { + let mut i = user_data.new_input.unwrap_or_default(); + i.url = Some(SyntaxStructure::StringLiteral(other)); + user_data.new_input = Some(i); + action_stack.pop(); + } + UserAction::QueryInputName => { + let mut i = user_data.new_input.unwrap_or_default(); + i.name = Some(SyntaxStructure::StringLiteral(other)); + user_data.new_input = Some(i); + action_stack.pop(); + } + UserAction::ModifyExisting => { + let filename = other.0.as_str(); + match filename_to_node(filename, &other) { + Err(err_msg) => action_stack.push(UserAction::Error(err_msg)), + Ok(root) => { + user_data.filename = Some(filename.to_string()); + user_data.root = Some(root); + action_stack.push(UserAction::IntroParsed); } } - UserAction::RemoveInput => { - let inputs = user_data.inputs.as_ref().unwrap(); - let new_root = remove_input( - user_data.root.as_ref().unwrap(), - other.0.as_str(), - Some(inputs), - ) - .unwrap(); - user_data.new_root(new_root); - action_stack.pop(); - - // TODO add in a "write to file" option at the end instead of writing after every modification - action_stack.push(UserAction::IntroParsed); - } - _ => unimplemented!(), } - } + UserAction::RemoveInput => { + let inputs = user_data.inputs.as_ref().unwrap(); + let new_root = remove_input( + user_data.root.as_ref().unwrap(), + other.0.as_str(), + Some(inputs), + ) + .unwrap(); + user_data.new_root(new_root); + action_stack.pop(); + action_stack.push(UserAction::IntroParsed); + } + _ => unimplemented!(), + }, UserPrompt::Bool(b) => match cur_action { UserAction::IsInputFlake => { action_stack.pop(); @@ -159,7 +155,6 @@ fn main() { .unwrap() .into(); let augmented_input = merge_attr_sets(inputs.green().to_owned(), new_input); - println!("aug: {:?}", augmented_input.to_string()); let idx = get_node_idx(&inputs).unwrap(); let parent = inputs.parent().unwrap(); let new_root = inputs diff --git a/src/parser/input_utils.rs b/src/parser/input_utils.rs index 8b4b772..601e38f 100644 --- a/src/parser/input_utils.rs +++ b/src/parser/input_utils.rs @@ -1,15 +1,7 @@ -#![feature(iter_intersperse)] use crate::parser::utils::{string_to_node, NixNode}; use crate::SmlStr; -use anyhow::{anyhow, bail}; -use either::Either; -use nixpkgs_fmt::reformat_node; -use rnix::{parse, types::*, NixLanguage, StrPart, SyntaxKind::*}; -use rowan::{ - api::SyntaxNode, GreenNode, GreenNodeBuilder, GreenNodeData, GreenToken, Language, NodeOrToken, -}; -use std::borrow::Borrow; -use std::ops::Deref; +use rnix::{NixLanguage, SyntaxKind::*}; +use rowan::{api::SyntaxNode, GreenNode, GreenNodeBuilder, GreenToken, Language, NodeOrToken}; use std::string::ToString; #[derive(Debug, Clone, Eq, PartialEq, Default)] @@ -38,12 +30,14 @@ pub fn new_string(s: String) -> GreenNode { node.finish_node(); node.finish() } -//TOKEN_WHITESPACE(" ") 18..19 -//NODE_IDENT 19..23 { -//TOKEN_IDENT("true") 19..23 -//} -//TOKEN_SEMICOLON(";") 23.. +/// creates a bool literal. Example of what +/// this should look like structurally: +/// +/// NODE_IDENT { +/// TOKEN_IDENT("true") +/// } +///TOKEN_SEMICOLON(";") 23.. pub fn new_bool_literal(b: bool) -> GreenNode { let mut node = GreenNodeBuilder::new(); let token_ident_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_IDENT); @@ -60,12 +54,6 @@ pub fn new_key(s: String) -> GreenNode { GreenNode::new(kind, children) } -pub fn gen_key_value(key: String, value: String) -> GreenNode { - let key_node: GreenNode = new_key(key); - let value_node: GreenNode = new_string(value); - new_key_value(key_node, value_node) -} - pub fn new_key_value(key: GreenNode, value: GreenNode) -> GreenNode { let kind = NixLanguage::kind_to_raw(NODE_KEY_VALUE); let assign_kind = NixLanguage::kind_to_raw(TOKEN_ASSIGN); @@ -83,15 +71,6 @@ pub fn new_key_value(key: GreenNode, value: GreenNode) -> GreenNode { GreenNode::new(kind, children) } -// TODO merge with new_ -pub fn gen_attr_set(attr_pairs: Vec<(String, String)>) -> GreenNode { - let new_attr_pairs: Vec<(GreenNode, GreenNode)> = attr_pairs - .iter() - .map(|(key, value)| (new_key(key.to_string()), new_string(value.to_string()))) - .collect(); - new_attr_set(new_attr_pairs) -} - pub fn merge_attr_sets(a1: GreenNode, a2: GreenNode) -> GreenNode { let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); let token = GreenToken::new(whitespace_kind, "\n"); diff --git a/src/parser/input_utils_tests.rs b/src/parser/input_utils_tests.rs index 8edeca4..9ab2327 100644 --- a/src/parser/input_utils_tests.rs +++ b/src/parser/input_utils_tests.rs @@ -1,9 +1,10 @@ -use crate::parser::input_utils::{gen_attr_set, merge_attr_sets, new_key, new_string, wrap_root}; +use crate::parser::input_utils::{merge_attr_sets, new_attr_set, new_key, new_string, wrap_root}; use crate::parser::utils::{node_to_string, string_to_node}; +use crate::SmlStr; +use crate::SyntaxStructure; use rnix::{types::*, SyntaxKind::*}; -// TODO remove all the gen stuff and jsut use into trait #[test] pub fn check_new_string() { let phrase = "hello_world".to_string(); @@ -20,10 +21,16 @@ pub fn check_new_string() { #[test] pub fn check_new_attr_set() { let attrset = vec![ - ("test1".to_string(), "value1".to_string()), - ("test2".to_string(), "value2".to_string()), + ( + SyntaxStructure::Key(SmlStr::new_inline("test1")).into(), + SyntaxStructure::StringLiteral(SmlStr::new_inline("value1")).into(), + ), + ( + SyntaxStructure::Key(SmlStr::new_inline("test2")).into(), + SyntaxStructure::StringLiteral(SmlStr::new_inline("value2")).into(), + ), ]; - let result = gen_attr_set(attrset); + let result = new_attr_set(attrset); let root = wrap_root(result); // TODO separate this out into a dump ast method.. //let result = Root::cast(root).unwrap(); @@ -39,10 +46,16 @@ pub fn check_new_attr_set() { #[test] pub fn check_merge_attr_set() { let attrset = vec![ - ("test1".to_string(), "value1".to_string()), - ("test2".to_string(), "value2".to_string()), + ( + SyntaxStructure::Key(SmlStr::new_inline("test1")).into(), + SyntaxStructure::StringLiteral(SmlStr::new_inline("value1")).into(), + ), + ( + SyntaxStructure::Key(SmlStr::new_inline("test2")).into(), + SyntaxStructure::StringLiteral(SmlStr::new_inline("value2")).into(), + ), ]; - let result = gen_attr_set(attrset); + let result = new_attr_set(attrset); let merged = merge_attr_sets(result.clone(), result); let root = wrap_root(merged); assert_eq!( diff --git a/src/parser/url.rs b/src/parser/url.rs new file mode 100644 index 0000000..e69de29 From 844d87dd240fb146e4d16b6abb086290eacb8543 Mon Sep 17 00:00:00 2001 From: Justin Restivo Date: Sat, 15 May 2021 09:40:14 -0400 Subject: [PATCH 6/7] more changes --- Cargo.lock | 1 + Cargo.toml | 1 + src/main.rs | 14 ++---- src/parser/body.rs | 8 ++++ src/parser/input_utils.rs | 96 ++++++++++++++++++++++++++------------- src/parser/mod.rs | 1 + src/parser/url.rs | 5 ++ 7 files changed, 84 insertions(+), 42 deletions(-) create mode 100644 src/parser/body.rs diff --git a/Cargo.lock b/Cargo.lock index de40418..d9e7b3b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -411,6 +411,7 @@ dependencies = [ "either", "nixpkgs-fmt", "parse-display", + "regex", "rnix", "rowan", "rust-nix-templater", diff --git a/Cargo.toml b/Cargo.toml index e3be5ac..a0bb092 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,6 +15,7 @@ anyhow = "1.0" rust-nix-templater = { git = "https://github.com/yusdacra/rust-nix-templater.git", branch = "master" } either = "1.6.1" nixpkgs-fmt = "1.1.0" +regex = "1" [package.metadata.nix] app = true diff --git a/src/main.rs b/src/main.rs index 2124b9c..50236f9 100644 --- a/src/main.rs +++ b/src/main.rs @@ -7,9 +7,7 @@ use nixpkgs_fmt::reformat_node; use parser::file::{filename_to_node, write_to_node}; use parser::input_utils::{merge_attr_sets, Input}; use parser::utils::{get_node_idx, remove_input, search_for_attr, NixNode}; -use rowan::GreenNode; -use rowan::NodeOrToken; -use rowan::SyntaxNode; +use rowan::{GreenNode, NodeOrToken, SyntaxNode}; use user::*; struct ActionStack { @@ -28,9 +26,7 @@ impl ActionStack { } fn push_seq(&mut self, actions: impl IntoIterator) { - actions - .into_iter() - .for_each(|action| self.inner.push(action)) + self.inner.extend(actions.into_iter()); } fn pop(&mut self) -> UserAction { @@ -137,11 +133,7 @@ fn main() { UserPrompt::Bool(b) => match cur_action { UserAction::IsInputFlake => { action_stack.pop(); - let mut i = user_data - .new_input - .as_ref() - .and_then(|x: &Input| Some(x.clone())) - .unwrap_or_default(); + let mut i = user_data.new_input.as_ref().cloned().unwrap_or_default(); i.is_flake = Some(SyntaxStructure::Bool(b)); user_data.new_input = Some(i); } diff --git a/src/parser/body.rs b/src/parser/body.rs new file mode 100644 index 0000000..3b88e58 --- /dev/null +++ b/src/parser/body.rs @@ -0,0 +1,8 @@ +#[derive(Debug, Clone, Eq, PartialEq, Default)] +pub struct Input { + pub(crate) name: Option, + pub(crate) url: Option, + pub(crate) is_flake: Option, +} + + diff --git a/src/parser/input_utils.rs b/src/parser/input_utils.rs index 601e38f..11e11d7 100644 --- a/src/parser/input_utils.rs +++ b/src/parser/input_utils.rs @@ -3,6 +3,7 @@ use crate::SmlStr; use rnix::{NixLanguage, SyntaxKind::*}; use rowan::{api::SyntaxNode, GreenNode, GreenNodeBuilder, GreenToken, Language, NodeOrToken}; use std::string::ToString; +use NodeOrToken::{Node, Token}; #[derive(Debug, Clone, Eq, PartialEq, Default)] pub struct Input { @@ -13,7 +14,7 @@ pub struct Input { pub fn wrap_root(node: GreenNode) -> NixNode { let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_ROOT); - let root = GreenNode::new(kind, vec![NodeOrToken::Node(node)]); + let root = GreenNode::new(kind, vec![Node(node)]); SyntaxNode::new_root(root) } @@ -48,25 +49,19 @@ pub fn new_bool_literal(b: bool) -> GreenNode { node.finish() } -pub fn new_key(s: String) -> GreenNode { - let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_KEY); - let children = vec![NodeOrToken::Node(new_string(s))]; - GreenNode::new(kind, children) -} - pub fn new_key_value(key: GreenNode, value: GreenNode) -> GreenNode { let kind = NixLanguage::kind_to_raw(NODE_KEY_VALUE); let assign_kind = NixLanguage::kind_to_raw(TOKEN_ASSIGN); let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); let semicolon_kind = NixLanguage::kind_to_raw(TOKEN_SEMICOLON); let children = vec![ - NodeOrToken::Node(key), - NodeOrToken::Token(GreenToken::new(whitespace_kind, " ")), - NodeOrToken::Token(GreenToken::new(assign_kind, "=")), - NodeOrToken::Token(GreenToken::new(whitespace_kind, " ")), - NodeOrToken::Node(value), - NodeOrToken::Token(GreenToken::new(semicolon_kind, ";")), - NodeOrToken::Token(GreenToken::new(whitespace_kind, "\n")), + Node(key), + Token(GreenToken::new(whitespace_kind, " ")), + Token(GreenToken::new(assign_kind, "=")), + Token(GreenToken::new(whitespace_kind, " ")), + Node(value), + Token(GreenToken::new(semicolon_kind, ";")), + Token(GreenToken::new(whitespace_kind, "\n")), ]; GreenNode::new(kind, children) } @@ -74,14 +69,14 @@ pub fn new_key_value(key: GreenNode, value: GreenNode) -> GreenNode { pub fn merge_attr_sets(a1: GreenNode, a2: GreenNode) -> GreenNode { let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); let token = GreenToken::new(whitespace_kind, "\n"); - let delimiter = NodeOrToken::Token(token); + let delimiter = Token(token); let mut nodes = a2 .children() .into_iter() .filter(|node| node.kind() == NixLanguage::kind_to_raw(NODE_KEY_VALUE)) .map(|x| match x { - NodeOrToken::Node(x) => NodeOrToken::Node(x.clone()), - NodeOrToken::Token(x) => NodeOrToken::Token(x.clone()), + Node(x) => Node(x.clone()), + Token(x) => Token(x.clone()), }) .flat_map(|x| vec![delimiter.clone(), x]) .collect::>(); @@ -99,26 +94,17 @@ pub fn merge_attr_sets(a1: GreenNode, a2: GreenNode) -> GreenNode { pub fn new_attr_set(attr_pairs: Vec<(GreenNode, GreenNode)>) -> GreenNode { let pairs: Vec> = attr_pairs .iter() - .map(move |(k, v)| NodeOrToken::Node(new_key_value(k.clone(), v.clone()))) + .map(move |(k, v)| Node(new_key_value(k.clone(), v.clone()))) .collect::>(); let open_curly_kind = NixLanguage::kind_to_raw(TOKEN_CURLY_B_OPEN); let close_curly_kind = NixLanguage::kind_to_raw(TOKEN_CURLY_B_CLOSE); let attr_set_kind = NixLanguage::kind_to_raw(NODE_ATTR_SET); let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); let mut token_vec = Vec::new(); - token_vec.push(vec![NodeOrToken::Token(GreenToken::new( - open_curly_kind, - "{", - ))]); - token_vec.push(vec![NodeOrToken::Token(GreenToken::new( - whitespace_kind, - "\n", - ))]); + token_vec.push(vec![Token(GreenToken::new(open_curly_kind, "{"))]); + token_vec.push(vec![Token(GreenToken::new(whitespace_kind, "\n"))]); token_vec.push(pairs); - token_vec.push(vec![NodeOrToken::Token(GreenToken::new( - close_curly_kind, - "}", - ))]); + token_vec.push(vec![Token(GreenToken::new(close_curly_kind, "}"))]); let tokens = token_vec .iter() .flatten() @@ -127,8 +113,55 @@ pub fn new_attr_set(attr_pairs: Vec<(GreenNode, GreenNode)>) -> GreenNode { GreenNode::new(attr_set_kind, tokens) } +pub(crate) struct Key { + val: SmlStr, +} + +impl From for GreenNode { + fn from(item: Key) -> Self { + let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_KEY); + let children = vec![Node(new_string(item.val.to_string()))]; + GreenNode::new(kind, children) + } +} + +pub(crate) struct StringLiteral { + val: SmlStr, +} + +impl From for GreenNode { + fn from(item: StringLiteral) -> Self { + let mut node = GreenNodeBuilder::new(); + let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_STRING); + node.start_node(kind); + let start_string_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_STRING_START); + node.token(start_string_kind, "\""); + let string_content: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_STRING_CONTENT); + node.token(string_content, &item.val.to_string()); + let end_string_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_STRING_END); + node.token(end_string_kind, "\""); + node.finish_node(); + node.finish() + } +} + +pub(crate) struct Bool { + val: bool, +} + +pub(crate) struct KeyValue { + key: Key, + val: StringLiteral, +} + +/// rnix::ParsedType is not good for node/creation since it literally wraps +/// SyntaxNode. This is a solution for that +/// This isn't quite a bijection, but we should make a tryFrom implementation +/// both ways (bijection) that fails if SyntaxStructure does not have an analogue ParsedType +/// FIXME this should be nearly trivial given the from implementation with greennode #[derive(Debug, Clone, Eq, PartialEq)] pub(crate) enum SyntaxStructure { + KeyValue(Box, Box), Key(SmlStr), StringLiteral(SmlStr), Bool(bool), @@ -137,9 +170,10 @@ pub(crate) enum SyntaxStructure { impl From for GreenNode { fn from(ss: SyntaxStructure) -> Self { match ss { - SyntaxStructure::Key(k) => new_key(k.to_string()), + SyntaxStructure::Key(k) => (Key { val: k }).into(), SyntaxStructure::StringLiteral(sl) => new_string(sl.to_string()), SyntaxStructure::Bool(b) => new_bool_literal(b), + SyntaxStructure::KeyValue(key, value) => new_key_value((*key).into(), (*value).into()), } } } diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 060435e..d5bd780 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -2,6 +2,7 @@ pub mod file; pub mod utils; // TODO better name pub mod input_utils; +pub mod url; #[cfg(test)] mod utils_tests; diff --git a/src/parser/url.rs b/src/parser/url.rs index e69de29..0da5d90 100644 --- a/src/parser/url.rs +++ b/src/parser/url.rs @@ -0,0 +1,5 @@ +use regex::Regex; + +pub fn translate_url(s: String) { + let re = Regex::new(r"^(github|gitlab)$").unwrap(); +} From e4439055b8aae44600620d615a829704ed87225a Mon Sep 17 00:00:00 2001 From: Justin Restivo Date: Sun, 13 Jun 2021 18:19:48 -0400 Subject: [PATCH 7/7] redoing the rep --- src/ir/hlir_types.rs | 21 +++ src/ir/mod.rs | 1 + src/main.rs | 12 +- src/parser/input_utils.rs | 273 ++++++++++++++++++++++++-------------- src/user/mod.rs | 4 +- 5 files changed, 202 insertions(+), 109 deletions(-) create mode 100644 src/ir/hlir_types.rs create mode 100644 src/ir/mod.rs diff --git a/src/ir/hlir_types.rs b/src/ir/hlir_types.rs new file mode 100644 index 0000000..1a06bcb --- /dev/null +++ b/src/ir/hlir_types.rs @@ -0,0 +1,21 @@ +use crate::SmlStr; + +enum NixPrimitive { + Bool(bool), + Int(i64), + Str(SmlStr), +} + +trait NixValue {} + +enum NixType { + Integer, + String, + Function, + List(Box), + AttrSet(Vec>), +} + +//enum NixStructure { +//List(Vec>), +//} diff --git a/src/ir/mod.rs b/src/ir/mod.rs new file mode 100644 index 0000000..5a95df8 --- /dev/null +++ b/src/ir/mod.rs @@ -0,0 +1 @@ +pub mod hlir_types; diff --git a/src/main.rs b/src/main.rs index 50236f9..a01ea05 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,11 +1,12 @@ +mod ir; mod parser; mod user; -use crate::parser::input_utils::SyntaxStructure; +//use crate::parser::input_utils::SyntaxStructure; use anyhow::anyhow; use nixpkgs_fmt::reformat_node; use parser::file::{filename_to_node, write_to_node}; -use parser::input_utils::{merge_attr_sets, Input}; +use parser::input_utils::{merge_attr_sets, NixFlakeInput, NixKey, NixStringLiteral}; use parser::utils::{get_node_idx, remove_input, search_for_attr, NixNode}; use rowan::{GreenNode, NodeOrToken, SyntaxNode}; use user::*; @@ -95,13 +96,14 @@ fn main() { } UserAction::QueryInputUrl => { let mut i = user_data.new_input.unwrap_or_default(); - i.url = Some(SyntaxStructure::StringLiteral(other)); + i.url = Some(NixStringLiteral { val: other }); user_data.new_input = Some(i); action_stack.pop(); } UserAction::QueryInputName => { let mut i = user_data.new_input.unwrap_or_default(); - i.name = Some(SyntaxStructure::StringLiteral(other)); + // TODO uncomment + //i.name = Some(NixKey { val: other }); user_data.new_input = Some(i); action_stack.pop(); } @@ -134,7 +136,7 @@ fn main() { UserAction::IsInputFlake => { action_stack.pop(); let mut i = user_data.new_input.as_ref().cloned().unwrap_or_default(); - i.is_flake = Some(SyntaxStructure::Bool(b)); + //i.is_flake = Some(SyntaxStructure::Bool(b)); user_data.new_input = Some(i); } UserAction::ConfirmInputCorrect => { diff --git a/src/parser/input_utils.rs b/src/parser/input_utils.rs index 11e11d7..297ea33 100644 --- a/src/parser/input_utils.rs +++ b/src/parser/input_utils.rs @@ -5,28 +5,34 @@ use rowan::{api::SyntaxNode, GreenNode, GreenNodeBuilder, GreenToken, Language, use std::string::ToString; use NodeOrToken::{Node, Token}; +#[inline(always)] +pub fn kind_to_raw(x: rnix::SyntaxKind) -> rowan::SyntaxKind { + NixLanguage::kind_to_raw(x) +} + #[derive(Debug, Clone, Eq, PartialEq, Default)] -pub struct Input { - pub(crate) name: Option, - pub(crate) url: Option, - pub(crate) is_flake: Option, +pub struct NixFlakeInput { + // required + pub(crate) name: NixKey, + pub(crate) url: Option, + pub(crate) is_flake: Option, } pub fn wrap_root(node: GreenNode) -> NixNode { - let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_ROOT); + let kind: rowan::SyntaxKind = kind_to_raw(NODE_ROOT); let root = GreenNode::new(kind, vec![Node(node)]); SyntaxNode::new_root(root) } pub fn new_string(s: String) -> GreenNode { let mut node = GreenNodeBuilder::new(); - let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_STRING); + let kind: rowan::SyntaxKind = kind_to_raw(NODE_STRING); node.start_node(kind); - let start_string_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_STRING_START); + let start_string_kind: rowan::SyntaxKind = kind_to_raw(TOKEN_STRING_START); node.token(start_string_kind, "\""); - let string_content: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_STRING_CONTENT); + let string_content: rowan::SyntaxKind = kind_to_raw(TOKEN_STRING_CONTENT); node.token(string_content, &s); - let end_string_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_STRING_END); + let end_string_kind: rowan::SyntaxKind = kind_to_raw(TOKEN_STRING_END); node.token(end_string_kind, "\""); node.finish_node(); node.finish() @@ -41,8 +47,8 @@ pub fn new_string(s: String) -> GreenNode { ///TOKEN_SEMICOLON(";") 23.. pub fn new_bool_literal(b: bool) -> GreenNode { let mut node = GreenNodeBuilder::new(); - let token_ident_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_IDENT); - let node_ident_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_IDENT); + let token_ident_kind: rowan::SyntaxKind = kind_to_raw(TOKEN_IDENT); + let node_ident_kind: rowan::SyntaxKind = kind_to_raw(NODE_IDENT); node.start_node(node_ident_kind); node.token(token_ident_kind, b.to_string().as_str()); node.finish_node(); @@ -50,10 +56,10 @@ pub fn new_bool_literal(b: bool) -> GreenNode { } pub fn new_key_value(key: GreenNode, value: GreenNode) -> GreenNode { - let kind = NixLanguage::kind_to_raw(NODE_KEY_VALUE); - let assign_kind = NixLanguage::kind_to_raw(TOKEN_ASSIGN); - let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); - let semicolon_kind = NixLanguage::kind_to_raw(TOKEN_SEMICOLON); + let kind = kind_to_raw(NODE_KEY_VALUE); + let assign_kind = kind_to_raw(TOKEN_ASSIGN); + let whitespace_kind = kind_to_raw(TOKEN_WHITESPACE); + let semicolon_kind = kind_to_raw(TOKEN_SEMICOLON); let children = vec![ Node(key), Token(GreenToken::new(whitespace_kind, " ")), @@ -67,13 +73,13 @@ pub fn new_key_value(key: GreenNode, value: GreenNode) -> GreenNode { } pub fn merge_attr_sets(a1: GreenNode, a2: GreenNode) -> GreenNode { - let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); + let whitespace_kind = kind_to_raw(TOKEN_WHITESPACE); let token = GreenToken::new(whitespace_kind, "\n"); let delimiter = Token(token); let mut nodes = a2 .children() .into_iter() - .filter(|node| node.kind() == NixLanguage::kind_to_raw(NODE_KEY_VALUE)) + .filter(|node| node.kind() == kind_to_raw(NODE_KEY_VALUE)) .map(|x| match x { Node(x) => Node(x.clone()), Token(x) => Token(x.clone()), @@ -83,118 +89,181 @@ pub fn merge_attr_sets(a1: GreenNode, a2: GreenNode) -> GreenNode { nodes.push(delimiter); let idx = a1 .children() - .position(|x| x.kind() == NixLanguage::kind_to_raw(TOKEN_CURLY_B_OPEN)) + .position(|x| x.kind() == kind_to_raw(TOKEN_CURLY_B_OPEN)) .unwrap() + 1; a1.splice_children(idx..idx, nodes) } -// TODO give all the tokens their own constructors -/// inputs are: [(lhs, rhs), ...] -pub fn new_attr_set(attr_pairs: Vec<(GreenNode, GreenNode)>) -> GreenNode { - let pairs: Vec> = attr_pairs - .iter() - .map(move |(k, v)| Node(new_key_value(k.clone(), v.clone()))) - .collect::>(); - let open_curly_kind = NixLanguage::kind_to_raw(TOKEN_CURLY_B_OPEN); - let close_curly_kind = NixLanguage::kind_to_raw(TOKEN_CURLY_B_CLOSE); - let attr_set_kind = NixLanguage::kind_to_raw(NODE_ATTR_SET); - let whitespace_kind = NixLanguage::kind_to_raw(TOKEN_WHITESPACE); - let mut token_vec = Vec::new(); - token_vec.push(vec![Token(GreenToken::new(open_curly_kind, "{"))]); - token_vec.push(vec![Token(GreenToken::new(whitespace_kind, "\n"))]); - token_vec.push(pairs); - token_vec.push(vec![Token(GreenToken::new(close_curly_kind, "}"))]); - let tokens = token_vec - .iter() - .flatten() - .cloned() - .collect::>>(); - GreenNode::new(attr_set_kind, tokens) -} - -pub(crate) struct Key { - val: SmlStr, -} - -impl From for GreenNode { - fn from(item: Key) -> Self { - let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_KEY); +#[derive(Debug, Clone, Eq, PartialEq, Default)] +pub(crate) struct NixAttrSet + Clone> { + kvs: Vec>, +} + +impl From for GreenNode { + fn from(input: NixFlakeInput) -> GreenNode { + //let mut inputs_gn = Vec::>::new() + //if let Some(url) = input.url { + //inputs_gn.push(( + //NixKey{val: SmlStr::new_inline("url")}.into(), + //url.into(), + //)); + //let url_gn: GreenNode = url.into(); + //} + //if let Some(is_flake) = input.is_flake { + //inputs_gn.push(NixKey{val: SmlStr::new_inline("flake")}.into(), ); + //} + //let input_name = input.name.unwrap().into(); + //let inner_nodes = inputs; + todo!(); + } +} + +impl + Clone> From> for GreenNode { + fn from(attrset: NixAttrSet) -> GreenNode { + let pairs: Vec> = attrset + .kvs + .iter() + .map(Clone::clone) + .map(Into::into) + .map(Node) + .collect::>(); + let open_curly_kind = kind_to_raw(TOKEN_CURLY_B_OPEN); + let close_curly_kind = kind_to_raw(TOKEN_CURLY_B_CLOSE); + let attr_set_kind = kind_to_raw(NODE_ATTR_SET); + let whitespace_kind = kind_to_raw(TOKEN_WHITESPACE); + let mut token_vec = Vec::new(); + token_vec.push(vec![Token(GreenToken::new(open_curly_kind, "{"))]); + token_vec.push(vec![Token(GreenToken::new(whitespace_kind, "\n"))]); + token_vec.push(pairs); + token_vec.push(vec![Token(GreenToken::new(close_curly_kind, "}"))]); + let tokens = token_vec + .iter() + .flatten() + .cloned() + .collect::>>(); + GreenNode::new(attr_set_kind, tokens) + } +} + +#[derive(Debug, Clone, Eq, PartialEq, Default)] +pub(crate) struct NixKey { + pub val: SmlStr, +} + +impl From for GreenNode { + fn from(item: NixKey) -> Self { + let kind: rowan::SyntaxKind = kind_to_raw(NODE_KEY); let children = vec![Node(new_string(item.val.to_string()))]; GreenNode::new(kind, children) } } -pub(crate) struct StringLiteral { - val: SmlStr, +#[derive(Debug, Clone, Eq, PartialEq, Default)] +pub(crate) struct NixStringLiteral { + pub val: SmlStr, } -impl From for GreenNode { - fn from(item: StringLiteral) -> Self { +impl From for GreenNode { + fn from(item: NixStringLiteral) -> Self { let mut node = GreenNodeBuilder::new(); - let kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(NODE_STRING); + let kind: rowan::SyntaxKind = kind_to_raw(NODE_STRING); node.start_node(kind); - let start_string_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_STRING_START); + let start_string_kind: rowan::SyntaxKind = kind_to_raw(TOKEN_STRING_START); node.token(start_string_kind, "\""); - let string_content: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_STRING_CONTENT); + let string_content: rowan::SyntaxKind = kind_to_raw(TOKEN_STRING_CONTENT); node.token(string_content, &item.val.to_string()); - let end_string_kind: rowan::SyntaxKind = NixLanguage::kind_to_raw(TOKEN_STRING_END); + let end_string_kind: rowan::SyntaxKind = kind_to_raw(TOKEN_STRING_END); node.token(end_string_kind, "\""); node.finish_node(); node.finish() } } -pub(crate) struct Bool { +#[derive(Debug, Clone, Eq, PartialEq, Default)] +pub(crate) struct NixBool { val: bool, } -pub(crate) struct KeyValue { - key: Key, - val: StringLiteral, -} - -/// rnix::ParsedType is not good for node/creation since it literally wraps -/// SyntaxNode. This is a solution for that -/// This isn't quite a bijection, but we should make a tryFrom implementation -/// both ways (bijection) that fails if SyntaxStructure does not have an analogue ParsedType -/// FIXME this should be nearly trivial given the from implementation with greennode -#[derive(Debug, Clone, Eq, PartialEq)] -pub(crate) enum SyntaxStructure { - KeyValue(Box, Box), - Key(SmlStr), - StringLiteral(SmlStr), - Bool(bool), -} - -impl From for GreenNode { - fn from(ss: SyntaxStructure) -> Self { - match ss { - SyntaxStructure::Key(k) => (Key { val: k }).into(), - SyntaxStructure::StringLiteral(sl) => new_string(sl.to_string()), - SyntaxStructure::Bool(b) => new_bool_literal(b), - SyntaxStructure::KeyValue(key, value) => new_key_value((*key).into(), (*value).into()), - } +impl From for GreenNode { + fn from(b: NixBool) -> GreenNode { + let mut node = GreenNodeBuilder::new(); + let token_ident_kind: rowan::SyntaxKind = kind_to_raw(TOKEN_IDENT); + let node_ident_kind: rowan::SyntaxKind = kind_to_raw(NODE_IDENT); + node.start_node(node_ident_kind); + node.token(token_ident_kind, b.val.to_string().as_str()); + node.finish_node(); + node.finish() } } -impl From for GreenNode { - fn from(item: Input) -> Self { - let mut inputs = Vec::new(); - if let Some(s) = item.url { - inputs.push(( - SyntaxStructure::Key(SmlStr::new_inline("url")).into(), - s.into(), - )); - } - if let Some(s) = item.is_flake { - inputs.push(( - SyntaxStructure::Key(SmlStr::new_inline("flake")).into(), - s.into(), - )) - } - let input_name = item.name.unwrap().into(); - let inner_nodes = new_attr_set(inputs); - new_attr_set(vec![(input_name, inner_nodes)]) +// TODO rename to NixKeyValue etc +#[derive(Debug, Clone, Eq, PartialEq, Default)] +pub(crate) struct NixKeyValue> { + key: NixKey, + val: T, +} + +impl> From> for GreenNode { + fn from(kv: NixKeyValue) -> GreenNode { + let kind = kind_to_raw(NODE_KEY_VALUE); + let assign_kind = kind_to_raw(TOKEN_ASSIGN); + let whitespace_kind = kind_to_raw(TOKEN_WHITESPACE); + let semicolon_kind = kind_to_raw(TOKEN_SEMICOLON); + let children = vec![ + Node(kv.key.into()), + Token(GreenToken::new(whitespace_kind, " ")), + Token(GreenToken::new(assign_kind, "=")), + Token(GreenToken::new(whitespace_kind, " ")), + Node(kv.val.into()), + Token(GreenToken::new(semicolon_kind, ";")), + Token(GreenToken::new(whitespace_kind, "\n")), + ]; + GreenNode::new(kind, children) } } + +// rnix::ParsedType is not good for node/creation since it literally wraps +// SyntaxNode. This is a solution for that +// This isn't quite a bijection, but we should make a tryFrom implementation +// both ways (bijection) that fails if SyntaxStructure does not have an analogue ParsedType +// FIXME this should be nearly trivial given the from implementation with greennode +//#[derive(Debug, Clone, Eq, PartialEq)] +//pub(crate) enum SyntaxStructure { +//KeyValue(Box, Box), +//Key(SmlStr), +//StringLiteral(SmlStr), +//Bool(bool), +//} + +//impl From for GreenNode { +//fn from(ss: SyntaxStructure) -> Self { +//match ss { +//SyntaxStructure::Key(k) => (Key { val: k }).into(), +//SyntaxStructure::StringLiteral(sl) => new_string(sl.to_string()), +//SyntaxStructure::Bool(b) => new_bool_literal(b), +//SyntaxStructure::KeyValue(key, value) => new_key_value((*key).into(), (*value).into()), +//} +//} +//} + +//impl From for GreenNode { +//fn from(item: Input) -> Self { +//let mut inputs = Vec::new(); +//if let Some(s) = item.url { +//inputs.push(( +//SyntaxStructure::Key(SmlStr::new_inline("url")).into(), +//s.into(), +//)); +//} +//if let Some(s) = item.is_flake { +//inputs.push(( +//SyntaxStructure::Key(SmlStr::new_inline("flake")).into(), +//s.into(), +//)) +//} +//let input_name = item.name.unwrap().into(); +//let inner_nodes = new_attr_set(inputs); +//new_attr_set(vec![(input_name, inner_nodes)]) +//} +//} diff --git a/src/user/mod.rs b/src/user/mod.rs index 1b867b7..88c7c93 100644 --- a/src/user/mod.rs +++ b/src/user/mod.rs @@ -4,7 +4,7 @@ use crate::parser::utils::{get_inputs, NixNode}; use std::{collections::HashMap, io::Cursor, str::FromStr}; -use crate::parser::input_utils::Input; +use crate::parser::input_utils::NixFlakeInput; use parse_display::{Display, FromStr}; use skim::prelude::*; use smol_str::SmolStr; @@ -52,7 +52,7 @@ pub(crate) struct UserMetadata { pub(crate) inputs: Option>, pub(crate) filename: Option, pub(crate) rust_options: rust_nix_templater::Options, - pub(crate) new_input: Option, + pub(crate) new_input: Option, } impl UserMetadata {