Skip to content

Commit

Permalink
[parser] support all instructions
Browse files Browse the repository at this point in the history
  • Loading branch information
floatshadow committed Mar 2, 2024
1 parent 5565fef commit babf313
Show file tree
Hide file tree
Showing 8 changed files with 283 additions and 72 deletions.
30 changes: 28 additions & 2 deletions examples/add.acc
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ fn %add_but_unused_bb(#a: i64, #b: i64) -> i64 {
ret %5
}


fn %add_but_direct_link_bb(#a: i64, #b: i64) -> i64 {
%3:
let %4 = add #a, #b
Expand All @@ -37,4 +36,31 @@ fn %add_but_direct_link_bb(#a: i64, #b: i64) -> i64 {
ret %5
%6:
ret %4
}
}

fn %add_with_load_store_alloca(#1: i64, #2: i64) -> i64 {
%entry:
let %arg.1.addr = alloca i64, 1
let %arg.2.addr = alloca i64, 1
let %3 = store #1, %arg.1.addr
let %4 = store #2, %arg.2.addr
jmp label %6
%6:
let %7 = load %arg.1.addr
let %8 = load %arg.2.addr
let %9: i64 = add %7, %8
ret %9
}

fn %load_store_alloca_offset(#1: i64, #2: i64) -> i64 {
%entry:
let %arg.array = alloca i64, 6
let %arg.1.addr = offset i64, %arg.array, [0 < 2], [1 < 3]
let %arg.2.addr = offset i64, %arg.array, [1 < 2], [2 < 3]
let %3 = store #1, %arg.1.addr
let %4 = store #2, %arg.2.addr
jmp label %6
%6:
let %8 = load %arg.2.addr
ret %8
}
20 changes: 18 additions & 2 deletions src/apps/executor.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use core::fmt;
use std::collections::HashMap;

use crate::ir::{structures::*, values};
use crate::ir::{structures::*, values::{self, ConstantInt}};

use slotmap::{SlotMap, SecondaryMap};

Expand All @@ -16,6 +16,8 @@ pub enum ExecutionError {
NotImplemented(String),
UnexpectedIncompatibleVal(Val),
UseUndefinedValue,
LexerError,
ParseError
}

/// Trace the source of pointer values,
Expand Down Expand Up @@ -290,7 +292,7 @@ pub fn single_step(
// compute accumulated offset
let last_dim_subdim = [Some(1usize)];
let total_offset: usize = indices
.into_iter().zip(inner.bounds.iter().cloned().chain(last_dim_subdim.into_iter()))
.into_iter().zip(inner.bounds.iter().cloned().skip(1).chain(last_dim_subdim.into_iter()))
.fold(0usize, | acc, (index, next_dim_bound) | {
acc + index * next_dim_bound.expect("expected bounded dimension in `Offset`")
});
Expand Down Expand Up @@ -445,6 +447,20 @@ pub fn run_on_module(
entry_fn: &str,
args: Vec<Val>
) -> Result<Val, ExecutionError> {
// set all constant value
module.value_ctx
.iter()
.for_each(| (value, value_data) | {
match &value_data.kind {
ValueKind::ConstantInt(inner) =>
env.set_val(value, Val::Integer(inner.value)),
ValueKind::ConstantBool(inner) =>
env.set_val(value, Val::Bool(inner.value)),
ValueKind::ConstantUnit(_) =>
env.set_val(value, Val::Unit),
_ => None,
};
});
let function = module.get_function_ref(entry_fn);
run_on_function(env, module, function, args)
}
27 changes: 15 additions & 12 deletions src/frontend/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -194,18 +194,21 @@ impl Lexer {
}

pub fn lex(input: &str) -> IResult<&str, Vec<Token>> {
many0(alt((
// `let` `le` has name collision.
lex_keyword,
lex_literal,
lex_identifier,
lex_primitive_type,
lex_delimiter,
lex_binary_operator,
lex_offset_operator,
lex_memory_operator,
lex_function_cal_operator,
lex_terminator_operator,
all_consuming(
many1(terminated(alt((
// `let` `le` has name collision.
lex_keyword,
lex_literal,
lex_identifier,
lex_primitive_type,
lex_delimiter,
lex_binary_operator,
lex_offset_operator,
lex_memory_operator,
lex_function_cal_operator,
lex_terminator_operator,
)),
filter_whitespace_and_comment,
)))(input)
}
}
152 changes: 135 additions & 17 deletions src/frontend/parser.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
use std::cell::RefCell;
use std::rc::Rc;
use nom::{
branch::alt, bytes::complete::{is_not, tag, take, take_until}, character::complete::{
alpha0, alpha1,
alphanumeric0, alphanumeric1,
char, digit0, digit1,
multispace0, multispace1
}, combinator::{all_consuming, map, map_res, opt, peek, recognize, value}, error::{Error, ErrorKind, ParseError}, multi::{fold_many1, many0, many0_count, many1, many1_count, separated_list0}, sequence::{delimited, pair, preceded, separated_pair, terminated, tuple}, Compare, CompareResult, Err, IResult, InputIter, InputLength, InputTake, Needed, Slice
branch::alt,
bytes::complete::take,
combinator::{all_consuming, map, opt, peek, value},
error::{context, Error, ErrorKind, ParseError, VerboseError},
multi::{fold_many1, many0, many0_count, many1, many1_count, separated_list0},
sequence::{delimited, pair, preceded, separated_pair, terminated, tuple},
Compare, CompareResult, Err, InputIter, InputLength, InputTake
};

use crate::ir::{
Expand All @@ -16,6 +17,8 @@ use crate::ir::{
use super::{lexer::Lexer, token};
use super::token::{Token, Tokens};

pub type IResult<I, O, E=nom::error::VerboseError<I>> = Result<(I, O), nom::Err<E>>;


fn token<'a, Input, Error: ParseError<Input>>(
t: Token<'a>
Expand Down Expand Up @@ -44,23 +47,23 @@ fn identifier(input: Tokens) -> IResult<Tokens, &str> {
// println!("identifier {}, now token: {:?}", id, input);
Ok((input, id))
},
_ => Err(Err::Error(Error::new(input, ErrorKind::Tag)))
_ => Err(Err::Error(VerboseError::from_error_kind(input, ErrorKind::Tag)))
}
}

fn i64_literal(input: Tokens) -> IResult<Tokens, i64> {
let (input, tk) = take(1usize)(input)?;
match tk.iter_elements().next().unwrap() {
Token::LtInt64(value) => Ok((input, value.clone())),
_ => Err(Err::Error(Error::new(input, ErrorKind::Tag)))
_ => Err(Err::Error(VerboseError::from_error_kind(input, ErrorKind::Tag)))
}
}

fn i1_literal(input: Tokens) -> IResult<Tokens, bool> {
let (input, tk) = take(1usize)(input)?;
match tk.iter_elements().next().unwrap() {
Token::LtInt1(value) => Ok((input, value.clone())),
_ => Err(Err::Error(Error::new(input, ErrorKind::Tag)))
_ => Err(Err::Error(VerboseError::from_error_kind(input, ErrorKind::Tag)))
}
}

Expand Down Expand Up @@ -167,7 +170,7 @@ impl<'a, 'b: 'a> Parser {
builder
.borrow()
.get_value_ref(name)
.unwrap()
.expect("undefined symbol")
}),
map(parse_literal, | value: Value| {
builder
Expand Down Expand Up @@ -222,13 +225,131 @@ impl<'a, 'b: 'a> Parser {
anno_ty
)))
}

fn parse_load(
input: Tokens<'a>,
builder: Rc<RefCell<IRBuilder>>
) -> IResult<Tokens<'a>, ValueRef> {
let (input,(name, anno_ty)) =
delimited(token(Token::KwLet), parse_symbol, token(Token::Equal))(input)?;

let (input, addr) = preceded(
token(Token::TkLoad),
| token: Tokens<'a> | Parser::parse_value(token, builder.clone())
)(input)?;

Ok((input, builder.borrow_mut().emit_load(
Some(String::from(name)),
addr,
anno_ty
)))
}

fn parse_store(
input: Tokens<'a>,
builder: Rc<RefCell<IRBuilder>>
) -> IResult<Tokens<'a>, ValueRef> {
let (input,(name, anno_ty)) =
delimited(token(Token::KwLet), parse_symbol, token(Token::Equal))(input)?;

let (input, (stored, addr)) = preceded(
token(Token::TkStore),
separated_pair(
| token: Tokens<'a> | Parser::parse_value(token, builder.clone()),
token(Token::Comma),
| token: Tokens<'a> | Parser::parse_value(token, builder.clone())
))(input)?;

Ok((input, builder.borrow_mut().emit_store(
Some(String::from(name)),
stored,
addr,
anno_ty
)))
}

fn parse_offset(
input: Tokens<'a>,
builder: Rc<RefCell<IRBuilder>>
) -> IResult<Tokens<'a>, ValueRef> {
let (input,(name, anno_ty)) =
delimited(token(Token::KwLet), parse_symbol, token(Token::Equal))(input)?;

let parse_bounds = alt((
value(None, token(Token::LtNone)),
map(i64_literal, | lit | Some(usize::try_from(lit).expect("expect non-negative offset bound")))
));

let (input, (base_ty, addr, indices_bounds)) = preceded(
token(Token::TkOffset),
tuple((
parse_type,
preceded(
token(Token::Comma),
| token: Tokens<'a> | Parser::parse_value(token, builder.clone()),
),
many1(
preceded(
token(Token::Comma),
delimited(
token(Token::LBracket),
separated_pair(
| token: Tokens<'a> | Parser::parse_value(token, builder.clone()),
token(Token::Less),
parse_bounds),
token(Token::RBracket)
)
)
)
))
)(input)?;

Ok((input, builder.borrow_mut().emit_offset(
Some(String::from(name)),
base_ty,
addr,
indices_bounds,
anno_ty
)))
}

fn parse_fncall(
input: Tokens<'a>,
builder: Rc<RefCell<IRBuilder>>
) -> IResult<Tokens<'a>, ValueRef> {
let (input,(name, anno_ty)) =
delimited(token(Token::KwLet), parse_symbol, token(Token::Equal))(input)?;

let (input, ((callee, _), args)) = preceded(
token(Token::TkFnCall),
tuple((
parse_symbol,
separated_list0(
token(Token::Comma),
| token: Tokens<'a> | Parser::parse_value(token, builder.clone()),
)
))
)(input)?;

Ok((input, builder.borrow_mut().emit_function_call(
Some(String::from(name)),
String::from(callee),
args,
anno_ty
)))
}

fn parse_instruction(
input: Tokens<'a>,
builder: Rc<RefCell<IRBuilder>>,
) -> IResult<Tokens<'a>, ValueRef> {
alt((
| input: Tokens<'a> | Parser::parse_binary_expr(input, builder.clone()),
| input: Tokens<'a> | Parser::parse_alloca(input, builder.clone()),
| input: Tokens<'a> | Parser::parse_load(input, builder.clone()),
| input: Tokens<'a> | Parser::parse_store(input, builder.clone()),
| input: Tokens<'a> | Parser::parse_offset(input, builder.clone()),
| input: Tokens<'a> | Parser::parse_fncall(input, builder.clone()),
))(input)
}

Expand Down Expand Up @@ -386,14 +507,11 @@ impl<'a, 'b: 'a> Parser {
input: Tokens<'a>,
builder: Rc<RefCell<IRBuilder>>
) -> IResult<Tokens<'a>, Module> {
let (input, _) = many0(
let (input, _) = all_consuming(many1(
| input: Tokens<'a> | Parser::parse_function(input, builder.clone())
)(input)?;
if input.input_len() > 0 {
Err(Err::Failure(Error::new(input, ErrorKind::Tag)))
} else {
Ok((input, builder.borrow().module.clone()))
}
))(input)?;

Ok((input, builder.borrow().module.clone()))
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/ir/builders.rs
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ impl IRBuilder {
inner_name
);

let mut offset = values::Offset::new_value(addr_ty, addr, index, bound);
let mut offset = values::Offset::new_value(base_type, addr, index, bound);
offset.set_name(inner_name);
self.insert_instruction_symbol(offset)
}
Expand Down
Loading

0 comments on commit babf313

Please sign in to comment.