Skip to content

Commit

Permalink
Merge branch 'functions'
Browse files Browse the repository at this point in the history
  • Loading branch information
michaeljones committed Oct 23, 2022
2 parents d54472f + 9ec1497 commit 7623900
Show file tree
Hide file tree
Showing 47 changed files with 926 additions and 72 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
/target
/test/template/*.gleam
/test/templates/*.gleam
45 changes: 45 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,51 @@ to use with the `with` syntax below to help Gleam check variables used in the te
{> import my_user.{MyUser}
```

### Functions

You can use the `{> fn ... {> endfn` syntax to add a local function to your template:

```
{> fn full_name(second_name: String)
Lucy {{ second_name }}
{> endfn
```

The function always returns a `StringBuilder` value so you must use `{[ ... ]}` syntax to insert
them into templates. The function body has its last new line trimmed, so the above function called
as `full_name("Gleam")` would result in `Lucy Gleam` and not `\nLucy Gleam\n` or any other
variation. If you want a trailing new line in the output then add an extra blank line before the `{> endfn`.

The function declaration has no impact on the final template as all lines are removed from the
final text.

Like in normal code, functions make it easier to deal with repeated components within your template.

```
{> fn item(name: String)
<li class="px-2 py-1 font-bold">{{ name }}</li>
{> endfn
<ul>
{[ item(name: "Alice") ]}
{[ item(name: "Bob") ]}
{[ item(name: "Cary") ]}
</ul>
```

You can use the `pub` keyword to declare the function as public in which case other modules will be
able to import it from gleam module compiled from the template.

```
{> pub fn user_item(name: String)
<li class="px-2 py-1 font-bold">{{ name }}</li>
{> endfn
```

If a template only includes function declarations and no meaningful template content then matcha
will not add the `render` and `render_builder`. Instead the module will act as a library of
functions where each function body is a template.

## Output

A template like:
Expand Down
30 changes: 29 additions & 1 deletion src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::parser::ParserError;
use crate::renderer::RenderError;
use crate::scanner::{Range, ScanError, Token};

#[derive(Debug, PartialEq, Clone)]
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Source {
pub filename: String,
pub contents: String,
Expand Down Expand Up @@ -106,6 +106,12 @@ pub fn write<W: termcolor::WriteColor>(writer: &mut W, error: Error) {
ParserError::UnexpectedEnd => {
let _ = write!(writer, "Unexpected end");
}
ParserError::FunctionWithinStatement(range) => explain_with_source(
writer,
"Functions must be declared at the top level.",
source,
range,
),
},
Error::Render(error, source) => match error {
RenderError::DuplicateParamName(name, range) => explain_with_source(
Expand Down Expand Up @@ -220,4 +226,26 @@ mod test {
Hello"#
);
}

#[test]
fn test_function_in_for_loop_error() {
assert_error!(
r#"{% for item in list %}
{> fn full_name(second_name: String)
Lucy {{ second_name }}
{> endfn
{% endfor %}"#
);
}

#[test]
fn test_public_function_in_for_loop_error() {
assert_error!(
r#"{% for item in list %}
{> pub fn full_name(second_name: String)
Lucy {{ second_name }}
{> endfn
{% endfor %}"#
);
}
}
4 changes: 2 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ fn convert(prog_name: &str, file_path: &std::path::Path) -> Result<(), ()> {
let from_file_name = file_path
.file_name()
.map(|name| name.to_string_lossy().into_owned())
.unwrap_or(String::from("unknown"));
.unwrap_or_else(|| String::from("unknown"));

let result = std::fs::read_to_string(file_path)
.map_err(|err| Error::IO(err, file_path.to_path_buf()))
Expand Down Expand Up @@ -89,7 +89,7 @@ fn main() {
if opt.verbose {
println!("Converting {}", path.display());
}
Some(convert(NAME, &path.to_path_buf()))
Some(convert(NAME, path))
} else {
None
}
Expand Down
151 changes: 134 additions & 17 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,20 @@ pub enum Node {
For(String, Option<Type>, String, Vec<Node>),
Import(String),
With((String, Range), Type),
BlockFunction(Visibility, String, Vec<Node>, Range),
}

#[derive(Debug)]
pub enum ParserError {
UnexpectedToken(Token, Range, Vec<Token>),
UnexpectedEnd,
FunctionWithinStatement(Range),
}

#[derive(Debug)]
pub enum Visibility {
Public,
Private,
}

pub fn parse(tokens: &mut TokenIter) -> Result<Vec<Node>, ParserError> {
Expand All @@ -27,6 +35,7 @@ pub fn parse(tokens: &mut TokenIter) -> Result<Vec<Node>, ParserError> {
}

fn parse_statement(tokens: &mut TokenIter) -> Result<Node, ParserError> {
log::trace!("parse_statement");
match tokens.next() {
Some((Token::If, _)) => parse_if_statement(tokens),
Some((Token::For, _)) => parse_for_statement(tokens),
Expand All @@ -51,12 +60,12 @@ fn parse_inner(tokens: &mut TokenIter, in_statement: bool) -> Result<Vec<Node>,
Some((Token::OpenValue, _)) => {
let (name, _) = extract_code(tokens)?;
ast.push(Node::Identifier(name.clone()));
consume_token(tokens, Token::CloseValue)?;
consume_token(tokens, Token::CloseValue, false)?;
}
Some((Token::OpenBuilder, _)) => {
let (name, _) = extract_code(tokens)?;
ast.push(Node::Builder(name.clone()));
consume_token(tokens, Token::CloseBuilder)?;
consume_token(tokens, Token::CloseBuilder, false)?;
}
Some((Token::OpenStmt, _)) => {
if let Some((Token::Else, _)) | Some((Token::EndIf, _)) | Some((Token::EndFor, _)) =
Expand All @@ -81,20 +90,46 @@ fn parse_inner(tokens: &mut TokenIter, in_statement: bool) -> Result<Vec<Node>,
ast.push(node);
}
Some((Token::OpenLine, _)) => {
if let Some((Token::EndFn, _)) = tokens.peek() {
break;
}

match tokens.next() {
Some((Token::Import, _)) => {
let import_details = extract_import_details(tokens)?;
ast.push(Node::Import(import_details))
}
Some((Token::With, _)) => {
let (identifier, range) = extract_identifier(tokens)?;
consume_token(tokens, Token::As)?;
consume_token(tokens, Token::As, false)?;
let (type_, _) = extract_identifier(tokens)?;
ast.push(Node::With((identifier, range), type_))
}
Some((Token::Fn, range)) => {
if in_statement {
return Err(ParserError::FunctionWithinStatement(range.clone()));
}

let node = parse_function(tokens, Visibility::Private)?;
ast.push(node);
}
Some((Token::Pub, pub_range)) => {
let fn_range = consume_token(tokens, Token::Fn, false)?;
let range = fn_range
.map(|range| Range {
start: std::cmp::min(range.start, pub_range.start),
end: std::cmp::max(range.end, pub_range.end),
})
.unwrap_or_else(|| pub_range.clone());
if in_statement {
return Err(ParserError::FunctionWithinStatement(range));
}
let node = parse_function(tokens, Visibility::Public)?;
ast.push(node);
}
_ => {}
}
consume_token(tokens, Token::CloseLine)?;
consume_token(tokens, Token::CloseLine, false)?;
}
Some((token, range)) => {
return Err(ParserError::UnexpectedToken(
Expand All @@ -112,24 +147,34 @@ fn parse_inner(tokens: &mut TokenIter, in_statement: bool) -> Result<Vec<Node>,
Ok(ast)
}

fn parse_function(tokens: &mut TokenIter, visibility: Visibility) -> Result<Node, ParserError> {
let (head, range) = extract_code(tokens)?;
consume_token(tokens, Token::CloseLine, false)?;
let body = parse_inner(tokens, true)?;
let body = trim_trailing_newline(body);
consume_token(tokens, Token::EndFn, false)?;

Ok(Node::BlockFunction(visibility, head, body, range))
}

fn parse_if_statement(tokens: &mut TokenIter) -> Result<Node, ParserError> {
log::trace!("parse_if_statement");
let (name, _) = extract_code(tokens)?;
consume_token(tokens, Token::CloseStmt)?;
consume_token(tokens, Token::CloseStmt, false)?;

let if_nodes = parse_inner(tokens, true)?;
let mut else_nodes = vec![];

match tokens.next() {
Some((Token::EndIf, _)) => {
consume_token(tokens, Token::CloseStmt)?;
consume_token(tokens, Token::CloseStmt, false)?;
}
Some((Token::Else, _)) => {
consume_token(tokens, Token::CloseStmt)?;
consume_token(tokens, Token::CloseStmt, false)?;

else_nodes = parse_inner(tokens, true)?;
consume_token(tokens, Token::EndIf)?;
consume_token(tokens, Token::CloseStmt)?;
consume_token(tokens, Token::EndIf, false)?;
consume_token(tokens, Token::CloseStmt, false)?;
}
Some((token, range)) => {
return Err(ParserError::UnexpectedToken(
Expand All @@ -147,11 +192,12 @@ fn parse_if_statement(tokens: &mut TokenIter) -> Result<Node, ParserError> {
}

fn parse_for_statement(tokens: &mut TokenIter) -> Result<Node, ParserError> {
log::trace!("parse_for_statement");
let (entry_identifier, _) = extract_identifier(tokens)?;
let entry_type = match tokens.next() {
Some((Token::As, _)) => {
let (type_identifier, _) = extract_identifier(tokens)?;
consume_token(tokens, Token::In)?;
consume_token(tokens, Token::In, false)?;
Some(type_identifier)
}
Some((Token::In, _)) => None,
Expand All @@ -166,12 +212,12 @@ fn parse_for_statement(tokens: &mut TokenIter) -> Result<Node, ParserError> {
};

let (list_identifier, _) = extract_code(tokens)?;
consume_token(tokens, Token::CloseStmt)?;
consume_token(tokens, Token::CloseStmt, false)?;

let loop_nodes = parse_inner(tokens, true)?;

consume_token(tokens, Token::EndFor)?;
consume_token(tokens, Token::CloseStmt)?;
consume_token(tokens, Token::EndFor, false)?;
consume_token(tokens, Token::CloseStmt, false)?;

Ok(Node::For(
entry_identifier,
Expand Down Expand Up @@ -222,6 +268,7 @@ fn extract_code(tokens: &mut TokenIter) -> Result<(String, Range), ParserError>
Some((Token::CloseStmt, _)) => break,
Some((Token::CloseValue, _)) => break,
Some((Token::CloseBuilder, _)) => break,
Some((Token::CloseLine, _)) => break,
Some((token, range)) => {
if code.is_empty() {
return Err(ParserError::UnexpectedToken(
Expand Down Expand Up @@ -253,19 +300,64 @@ fn extract_import_details(tokens: &mut TokenIter) -> Result<String, ParserError>
}
}

fn consume_token(tokens: &mut TokenIter, expected_token: Token) -> Result<(), ParserError> {
log::trace!("consume_token");
fn consume_token(
tokens: &mut TokenIter,
expected_token: Token,
accept_end: bool,
) -> Result<Option<Range>, ParserError> {
log::trace!("consume_token: {:?}", expected_token);
match tokens.next() {
Some((matched_token, _)) if *matched_token == expected_token => Ok(()),
Some((matched_token, range)) if *matched_token == expected_token => Ok(Some(range.clone())),
Some((matched_token, range)) => Err(ParserError::UnexpectedToken(
matched_token.clone(),
range.clone(),
vec![expected_token],
)),
None => Err(ParserError::UnexpectedEnd),
None => {
if accept_end {
Ok(None)
} else {
log::error!(
"consume_token - found: None, expected_token: {:?}",
expected_token
);
Err(ParserError::UnexpectedEnd)
}
}
}
}

/// Find the last item in the nodes and if it is a Text node then trim the final '\n' from it. If
/// it is just a '\n' then drop the node entirely
fn trim_trailing_newline(nodes: Vec<Node>) -> Vec<Node> {
let length = nodes.len();
nodes
.into_iter()
.enumerate()
.flat_map(|(i, node)| {
if i == length - 1 {
match node {
Node::Text(text) => {
if text == "\n" {
None
} else {
Some(Node::Text(
text.strip_suffix('\n')
.map(String::from)
.unwrap_or_else(|| text.to_string()),
))
}
}

node => Some(node),
}
} else {
Some(node)
}
})
.collect()
}

#[cfg(test)]
mod test {
use std::fmt::Debug;
Expand Down Expand Up @@ -397,4 +489,29 @@ mod test {
fn test_parse_builder_expression() {
assert_parse!("Hello {[ string_builder.from_strings([\"Anna\", \" and \", \"Bob\"]) ]}, good to meet you");
}

#[test]
fn test_parse_function() {
assert_parse!("{> fn classes()\na b c d\n{> endfn\n");
}

#[test]
fn test_parse_function_with_trailing_new_line() {
assert_parse!("{> fn classes()\na b c d\n\n{> endfn\n");
}

#[test]
fn test_parse_public_function() {
assert_parse!("{> pub fn classes()\na b c d\n{> endfn\n");
}

#[test]
fn test_parse_function_with_arg_and_usage() {
assert_parse!(
r#"{> fn full_name(second_name: String)
Lucy {{ second_name }}
{> endfn
Hello {[ full_name("Gleam") ]}"#
);
}
}
Loading

0 comments on commit 7623900

Please sign in to comment.