feat: add macro for ad hoc parsing rules
This commit is contained in:
parent
551b4468aa
commit
6bec93765a
|
@ -7,6 +7,20 @@ use std::collections::{HashMap, BTreeMap, VecDeque};
|
||||||
|
|
||||||
use ponderosa::state::{Land, Rule};
|
use ponderosa::state::{Land, Rule};
|
||||||
|
|
||||||
|
macro_rules! ad_hoc_rule {
|
||||||
|
($tokens:expr, $argument_count:expr, $body:block) => {
|
||||||
|
if $tokens.len() < $argument_count {
|
||||||
|
Err("Too few arguments".to_owned())
|
||||||
|
} else if $tokens.len() > $argument_count {
|
||||||
|
Err("Too many arguments".to_owned())
|
||||||
|
} else {
|
||||||
|
#[allow(unused)]
|
||||||
|
let parse_rule = |tokens: &Vec<&str>| -> Result<u16, String> {$body};
|
||||||
|
parse_rule($tokens)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
macro_rules! assert_argument_count {
|
macro_rules! assert_argument_count {
|
||||||
($tokens:expr, $count:expr) => {
|
($tokens:expr, $count:expr) => {
|
||||||
if $tokens.len() < $count {
|
if $tokens.len() < $count {
|
||||||
|
@ -17,20 +31,17 @@ macro_rules! assert_argument_count {
|
||||||
return Err("Too many arguments".to_owned());
|
return Err("Too many arguments".to_owned());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
($tokens:expr, $count:expr, $body:block) => {
|
|
||||||
if $tokens.len() < $count {
|
|
||||||
Err("Too few arguments".to_owned());
|
|
||||||
} else if $tokens.len() > $count {
|
|
||||||
Err("Too many arguments".to_owned());
|
|
||||||
} else $body
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_number(string: &str) -> u16 {
|
fn parse_number(string: &str) -> u16 {
|
||||||
string.parse::<u16>().unwrap()
|
string.parse::<u16>().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn parse_safely<T: std::str::FromStr>(string: &str) -> Result<T, String> {
|
||||||
|
string.parse::<T>()
|
||||||
|
.map_err(|_| format!("Cannot parse number: {t}", t=string))
|
||||||
|
}
|
||||||
|
|
||||||
fn stack_bit(token: &str) -> Result<u16, String> {
|
fn stack_bit(token: &str) -> Result<u16, String> {
|
||||||
let c = token.as_bytes()[0] as char;
|
let c = token.as_bytes()[0] as char;
|
||||||
match c {
|
match c {
|
||||||
|
@ -54,11 +65,8 @@ fn parse_binary_op(tokens: &Vec<&str>, opcode: u16, signed: bool) -> Result<u16,
|
||||||
fn parse_binary_immediate_op(tokens: &Vec<&str>, opcode: u16, signed: bool) -> Result<u16, String> {
|
fn parse_binary_immediate_op(tokens: &Vec<&str>, opcode: u16, signed: bool) -> Result<u16, String> {
|
||||||
assert_argument_count!(tokens, 4);
|
assert_argument_count!(tokens, 4);
|
||||||
|
|
||||||
let signed_immediate = tokens[2].parse::<i16>()
|
let signed_immediate = parse_safely::<i16>(tokens[2])?;
|
||||||
.map_err(|_| format!("Cannot parse number: {t}", t=tokens[2]))?;
|
let unsigned_immediate = parse_safely::<u16>(tokens[2])?;
|
||||||
|
|
||||||
let unsigned_immediate = tokens[2].parse::<u16>()
|
|
||||||
.map_err(|_| format!("Cannot parse number: {t}", t=tokens[2]))?;
|
|
||||||
|
|
||||||
if signed {
|
if signed {
|
||||||
if signed_immediate < -32 || signed_immediate > 31 {
|
if signed_immediate < -32 || signed_immediate > 31 {
|
||||||
|
@ -82,11 +90,8 @@ fn parse_push(tokens: &Vec<&str>, opcode: u16, signed: bool) -> Result<u16, Stri
|
||||||
assert_argument_count!(tokens, 3);
|
assert_argument_count!(tokens, 3);
|
||||||
|
|
||||||
let dest = stack_bit(tokens[1])?;
|
let dest = stack_bit(tokens[1])?;
|
||||||
let signed_immediate = tokens[2].parse::<i16>()
|
let signed_immediate = parse_safely::<i16>(tokens[2])?;
|
||||||
.map_err(|_| format!("Cannot parse number: {t}", t=tokens[2]))?;
|
let unsigned_immediate = parse_safely::<u16>(tokens[2])?;
|
||||||
|
|
||||||
let unsigned_immediate = tokens[2].parse::<u16>()
|
|
||||||
.map_err(|_| format!("Cannot parse number: {t}", t=tokens[2]))?;
|
|
||||||
|
|
||||||
let immediate = if signed { signed_immediate as u16 } else { unsigned_immediate };
|
let immediate = if signed { signed_immediate as u16 } else { unsigned_immediate };
|
||||||
|
|
||||||
|
@ -111,11 +116,8 @@ fn parse_forestry_op(tokens: &Vec<&str>, opcode: u16) -> Result<u16, String> {
|
||||||
assert_argument_count!(tokens, 4);
|
assert_argument_count!(tokens, 4);
|
||||||
|
|
||||||
let stack = stack_bit(tokens[1])?;
|
let stack = stack_bit(tokens[1])?;
|
||||||
let relative_x = tokens[2].parse::<i16>()
|
let relative_x = parse_safely::<i16>(tokens[2])?;
|
||||||
.map_err(|_| format!("Cannot parse number: {t}", t=tokens[2]))?;
|
let relative_y = parse_safely::<i16>(tokens[3])?;
|
||||||
|
|
||||||
let relative_y = tokens[3].parse::<i16>()
|
|
||||||
.map_err(|_| format!("Cannot parse number: {t}", t=tokens[2]))?;
|
|
||||||
|
|
||||||
if relative_x > 15 || relative_x < -16 {
|
if relative_x > 15 || relative_x < -16 {
|
||||||
Err(format!("Cannot reach cell at {relative_x} along the X-axis; trees may only access cells in the range of -16 to 15"))
|
Err(format!("Cannot reach cell at {relative_x} along the X-axis; trees may only access cells in the range of -16 to 15"))
|
||||||
|
@ -210,7 +212,7 @@ fn main() -> io::Result<()> {
|
||||||
"mult" => parse_binary_op(&tokens, 0b00111, true),
|
"mult" => parse_binary_op(&tokens, 0b00111, true),
|
||||||
"div" => parse_binary_op(&tokens, 0b01000, true),
|
"div" => parse_binary_op(&tokens, 0b01000, true),
|
||||||
"slt" => parse_binary_op(&tokens, 0b01001, true),
|
"slt" => parse_binary_op(&tokens, 0b01001, true),
|
||||||
"not" => assert_argument_count!(tokens, 3, {
|
"not" => ad_hoc_rule!(&tokens, 3, {
|
||||||
let opcode = 0b10010;
|
let opcode = 0b10010;
|
||||||
let (src, dest) = (stack_bit(tokens[1])?, stack_bit(tokens[2])?);
|
let (src, dest) = (stack_bit(tokens[1])?, stack_bit(tokens[2])?);
|
||||||
Ok(opcode<<11 & src<<9 & dest<<8)
|
Ok(opcode<<11 & src<<9 & dest<<8)
|
||||||
|
@ -244,23 +246,20 @@ fn main() -> io::Result<()> {
|
||||||
"swap" => parse_stack_op(&tokens, 0b01111),
|
"swap" => parse_stack_op(&tokens, 0b01111),
|
||||||
"rotate" => parse_stack_op(&tokens, 0b10000),
|
"rotate" => parse_stack_op(&tokens, 0b10000),
|
||||||
"flip" => parse_stack_op(&tokens, 0b10001),
|
"flip" => parse_stack_op(&tokens, 0b10001),
|
||||||
"clear" => {
|
"clear" => ad_hoc_rule!(&tokens, 2, {
|
||||||
let opcode = 0b10010;
|
let opcode = 0b10010;
|
||||||
assert_argument_count!(tokens, 2);
|
|
||||||
let to_clear = stack_bit(tokens[1])?;
|
let to_clear = stack_bit(tokens[1])?;
|
||||||
Ok(opcode<<11 | to_clear<<10)
|
Ok(opcode<<11 | to_clear<<10)
|
||||||
},
|
}),
|
||||||
|
|
||||||
"skip" => {
|
"skip" => ad_hoc_rule!(&tokens, 2, {
|
||||||
let opcode = 0b10011;
|
let opcode = 0b10011;
|
||||||
assert_argument_count!(tokens, 2);
|
Ok(opcode<<11 | (parse_safely::<u16>(tokens[1])? & 0b111111111))
|
||||||
Ok(opcode<<11 | (tokens[1].parse::<u16>() & 0b111111111))
|
}),
|
||||||
},
|
"skim" => ad_hoc_rule!(&tokens, 2, {
|
||||||
"skim" => {
|
|
||||||
let opcode = 0b10100;
|
let opcode = 0b10100;
|
||||||
assert_argument_count!(tokens, 2);
|
|
||||||
Ok(opcode<<11 | stack_bit(tokens[1])? << 9)
|
Ok(opcode<<11 | stack_bit(tokens[1])? << 9)
|
||||||
},
|
}),
|
||||||
|
|
||||||
"sow" => parse_forestry_op(&tokens, 0b10101),
|
"sow" => parse_forestry_op(&tokens, 0b10101),
|
||||||
"reap" => parse_forestry_op(&tokens, 0b10110),
|
"reap" => parse_forestry_op(&tokens, 0b10110),
|
||||||
|
@ -268,11 +267,10 @@ fn main() -> io::Result<()> {
|
||||||
"replant" => parse_forestry_op(&tokens, 0b11000),
|
"replant" => parse_forestry_op(&tokens, 0b11000),
|
||||||
"yield" => Ok(0b11001_0_00000_00000),
|
"yield" => Ok(0b11001_0_00000_00000),
|
||||||
"compost" => Ok(0b11010_0_00000_00000),
|
"compost" => Ok(0b11010_0_00000_00000),
|
||||||
"define" => {
|
"define" => ad_hoc_rule!(&tokens, 2, {
|
||||||
let opcode = 0b11011;
|
let opcode = 0b11011;
|
||||||
assert_argument_count!(tokens, 2);
|
|
||||||
Ok(opcode<<11 | stack_bit(tokens[1])? << 10)
|
Ok(opcode<<11 | stack_bit(tokens[1])? << 10)
|
||||||
},
|
}),
|
||||||
|
|
||||||
"wait" => parse_forestry_op(&tokens, 0b11100),
|
"wait" => parse_forestry_op(&tokens, 0b11100),
|
||||||
"signal" => parse_forestry_op(&tokens, 0b11101),
|
"signal" => parse_forestry_op(&tokens, 0b11101),
|
||||||
|
|
Loading…
Reference in New Issue