Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 43 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,49 @@ fun main() {

---

## Build From Source

```bash
git clone https://github.com/wavefnd/Wave.git
cd Wave
cargo build
```

Compiler binary path:

- `target/debug/wavec` (development build)
- `target/release/wavec` (release build)

---

## Target Support

- Linux `x86_64`
- macOS (Darwin) `arm64` (Apple Silicon)
- Windows: not supported yet

---

## CLI Usage

```bash
wavec run <file>
wavec build <file>
wavec build -o <file>
wavec img <file>
```

Useful global options:

- `-O0..-O3`, `-Os`, `-Oz`, `-Ofast`
- `--debug-wave=tokens,ast,ir,mc,hex,all`
- `--link=<lib>`
- `-L <path>`
- `--dep-root=<path>`
- `--dep=<name>=<path>`

---

<p align="center">
<a href="https://star-history.com/#wavefnd/Wave&Date">
<img src="https://api.star-history.com/svg?repos=wavefnd/Wave&type=Date" alt="Star History Chart" width="80%">
Expand Down
16 changes: 15 additions & 1 deletion front/lexer/src/ident.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,11 @@ use crate::{Lexer, Token};

impl<'a> Lexer<'a> {
pub(crate) fn identifier(&mut self) -> String {
let start = if self.current > 0 { self.current - 1 } else { 0 };
let start = if self.current > 0 {
self.current - 1
} else {
0
};

while !self.is_at_end() {
let c = self.peek();
Expand Down Expand Up @@ -50,6 +54,11 @@ impl<'a> Lexer<'a> {
lexeme: "enum".to_string(),
line: self.line,
},
"static" => Token {
token_type: TokenType::Static,
lexeme: "static".to_string(),
line: self.line,
},
"var" => Token {
token_type: TokenType::Var,
lexeme: "var".to_string(),
Expand Down Expand Up @@ -135,6 +144,11 @@ impl<'a> Lexer<'a> {
lexeme: "is".to_string(),
line: self.line,
},
"as" => Token {
token_type: TokenType::As,
lexeme: "as".to_string(),
line: self.line,
},
"asm" => Token {
token_type: TokenType::Asm,
lexeme: "asm".to_string(),
Expand Down
2 changes: 2 additions & 0 deletions front/lexer/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ pub enum TokenType {
Extern,
Type,
Enum,
Static,
Var,
Let,
Mut,
Expand Down Expand Up @@ -118,6 +119,7 @@ pub enum TokenType {
In, // in
Out, // out
Is, // is
As, // as
Asm,
Rol,
Ror,
Expand Down
16 changes: 10 additions & 6 deletions front/parser/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,10 @@ pub enum Expression {
operator: Operator,
expr: Box<Expression>,
},
Cast {
expr: Box<Expression>,
target_type: WaveType,
},
IncDec {
kind: IncDecKind,
target: Box<Expression>,
Expand Down Expand Up @@ -300,6 +304,7 @@ pub enum StatementNode {

#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Mutability {
Static,
Var,
Let,
LetMut,
Expand Down Expand Up @@ -354,17 +359,16 @@ impl Expression {
Expression::Unary { operator, expr } => {
let t = expr.get_wave_type(variables);
match operator {
Operator::Neg => {
match &t {
WaveType::Int(_) | WaveType::Uint(_) | WaveType::Float(_) => t,
_ => panic!("unary '-' not allowed for type {:?}", t),
}
}
Operator::Neg => match &t {
WaveType::Int(_) | WaveType::Uint(_) | WaveType::Float(_) => t,
_ => panic!("unary '-' not allowed for type {:?}", t),
},
Operator::Not | Operator::LogicalNot => WaveType::Bool,
Operator::BitwiseNot => t,
_ => panic!("unary op type inference not supported: {:?}", operator),
}
}
Expression::Cast { target_type, .. } => target_type.clone(),
_ => panic!("get_wave_type not implemented for {:?}", self),
}
}
Expand Down
65 changes: 52 additions & 13 deletions front/parser/src/expr/binary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,18 +9,22 @@
//
// SPDX-License-Identifier: MPL-2.0

use lexer::Token;
use lexer::token::TokenType;
use crate::ast::{Expression, Operator};
use crate::expr::unary::parse_unary_expression;
use crate::types::parse_type_from_stream;
use lexer::token::TokenType;
use lexer::Token;

pub fn parse_logical_or_expression<'a, T>(tokens: &mut std::iter::Peekable<T>) -> Option<Expression>
where
T: Iterator<Item = &'a Token>,
{
let mut left = parse_logical_and_expression(tokens)?;

while matches!(tokens.peek().map(|t| &t.token_type), Some(TokenType::LogicalOr)) {
while matches!(
tokens.peek().map(|t| &t.token_type),
Some(TokenType::LogicalOr)
) {
tokens.next();
let right = parse_logical_and_expression(tokens)?;
left = Expression::BinaryExpression {
Expand All @@ -33,13 +37,18 @@ where
Some(left)
}

pub fn parse_logical_and_expression<'a, T>(tokens: &mut std::iter::Peekable<T>) -> Option<Expression>
pub fn parse_logical_and_expression<'a, T>(
tokens: &mut std::iter::Peekable<T>,
) -> Option<Expression>
where
T: Iterator<Item = &'a Token>,
{
let mut left = parse_bitwise_or_expression(tokens)?;

while matches!(tokens.peek().map(|t| &t.token_type), Some(TokenType::LogicalAnd)) {
while matches!(
tokens.peek().map(|t| &t.token_type),
Some(TokenType::LogicalAnd)
) {
tokens.next();
let right = parse_bitwise_or_expression(tokens)?;
left = Expression::BinaryExpression {
Expand All @@ -58,7 +67,10 @@ where
{
let mut left = parse_bitwise_xor_expression(tokens)?;

while matches!(tokens.peek().map(|t| &t.token_type), Some(TokenType::BitwiseOr)) {
while matches!(
tokens.peek().map(|t| &t.token_type),
Some(TokenType::BitwiseOr)
) {
tokens.next();
let right = parse_bitwise_xor_expression(tokens)?;
left = Expression::BinaryExpression {
Expand All @@ -71,7 +83,9 @@ where
Some(left)
}

pub fn parse_bitwise_xor_expression<'a, T>(tokens: &mut std::iter::Peekable<T>) -> Option<Expression>
pub fn parse_bitwise_xor_expression<'a, T>(
tokens: &mut std::iter::Peekable<T>,
) -> Option<Expression>
where
T: Iterator<Item = &'a Token>,
{
Expand All @@ -90,13 +104,18 @@ where
Some(left)
}

pub fn parse_bitwise_and_expression<'a, T>(tokens: &mut std::iter::Peekable<T>) -> Option<Expression>
pub fn parse_bitwise_and_expression<'a, T>(
tokens: &mut std::iter::Peekable<T>,
) -> Option<Expression>
where
T: Iterator<Item = &'a Token>,
{
let mut left = parse_equality_expression(tokens)?;

while matches!(tokens.peek().map(|t| &t.token_type), Some(TokenType::AddressOf)) {
while matches!(
tokens.peek().map(|t| &t.token_type),
Some(TokenType::AddressOf)
) {
tokens.next();
let right = parse_equality_expression(tokens)?;
left = Expression::BinaryExpression {
Expand Down Expand Up @@ -208,11 +227,13 @@ where
Some(left)
}

pub fn parse_multiplicative_expression<'a, T>(tokens: &mut std::iter::Peekable<T>) -> Option<Expression>
pub fn parse_multiplicative_expression<'a, T>(
tokens: &mut std::iter::Peekable<T>,
) -> Option<Expression>
where
T: Iterator<Item = &'a Token>,
{
let mut left = parse_unary_expression(tokens)?;
let mut left = parse_cast_expression(tokens)?;

while let Some(token) = tokens.peek() {
let op = match token.token_type {
Expand All @@ -222,7 +243,7 @@ where
_ => break,
};
tokens.next();
let right = parse_unary_expression(tokens)?;
let right = parse_cast_expression(tokens)?;
left = Expression::BinaryExpression {
left: Box::new(left),
operator: op,
Expand All @@ -231,4 +252,22 @@ where
}

Some(left)
}
}

fn parse_cast_expression<'a, T>(tokens: &mut std::iter::Peekable<T>) -> Option<Expression>
where
T: Iterator<Item = &'a Token>,
{
let mut expr = parse_unary_expression(tokens)?;

while matches!(tokens.peek().map(|t| &t.token_type), Some(TokenType::As)) {
tokens.next(); // consume `as`
let target_type = parse_type_from_stream(tokens)?;
expr = Expression::Cast {
expr: Box::new(expr),
target_type,
};
}

Some(expr)
}
4 changes: 2 additions & 2 deletions front/parser/src/import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
// SPDX-License-Identifier: MPL-2.0

use crate::ast::{ASTNode};
use crate::{parse, ParseError};
use crate::{parse_syntax_only, ParseError};
use error::error::{WaveError, WaveErrorKind};
use lexer::Lexer;
use std::collections::{HashMap, HashSet};
Expand Down Expand Up @@ -347,7 +347,7 @@ fn parse_wave_file(
let mut lexer = Lexer::new_with_file(&content, abs_path.display().to_string());
let tokens = lexer.tokenize()?;

let ast = parse(&tokens).map_err(|e| {
let ast = parse_syntax_only(&tokens).map_err(|e| {
let (kind, phase, code) = match &e {
ParseError::Syntax(_) => (WaveErrorKind::SyntaxError(e.message().to_string()), "syntax", "E2001"),
ParseError::Semantic(_) => (WaveErrorKind::InvalidStatement(e.message().to_string()), "semantic", "E3001"),
Expand Down
8 changes: 6 additions & 2 deletions front/parser/src/parser/control.rs
Original file line number Diff line number Diff line change
Expand Up @@ -230,8 +230,12 @@ fn parse_for_initializer(tokens: &mut Peekable<Iter<Token>>) -> Option<ASTNode>
parse_typed_for_initializer(tokens, mutability)
}
Some(TokenType::Const) => {
tokens.next(); // consume `const`
parse_typed_for_initializer(tokens, Mutability::Const)
println!("Error: `const` is not allowed in local for-loop initializer");
None
}
Some(TokenType::Static) => {
println!("Error: `static` is not allowed in local for-loop initializer");
None
}
_ if is_typed_for_initializer(tokens) => parse_typed_for_initializer(tokens, Mutability::Var),
_ => {
Expand Down
Loading