diff --git a/README.md b/README.md
index a3e2b982..fdf77a78 100644
--- a/README.md
+++ b/README.md
@@ -77,9 +77,45 @@ Compiler binary path:
## Target Support
-- Linux `x86_64`
-- macOS (Darwin) `arm64` (Apple Silicon)
-- Windows: not supported yet
+
+Wave follows a tiered platform policy to set clear expectations for stability, CI, and standard library coverage.
+
+
+
+ 🥇 Tier 1 · Primary — Linux, Darwin, WaveOS
+
+ - Full standard library support
+ - Required CI coverage
+ - ABI stability commitment
+ - Release-blocking platforms
+
+
+
+
+ 🥈 Tier 2 · Secondary — FreeBSD, Redox, Fuchsia
+
+ - Build support maintained
+ - Partial standard library coverage
+ - Open to community collaboration
+
+
+
+
+ 🥉 Tier 3 · Experimental — OpenBSD
+
+ - Compiler build/compile path prioritized
+ - Minimal standard library coverage
+
+
+
+
+ 🪦 Tier 4 · Unofficial — Windows
+
+ - Build may work in some environments, but is not guaranteed
+ - No official standard library target at this time
+ - Community-maintained status
+
+
---
@@ -88,8 +124,8 @@ Compiler binary path:
```bash
wavec run
wavec build
-wavec build -o
-wavec img
+wavec build -o
+wavec build -c
```
Useful global options:
diff --git a/examples/doom.wave b/examples/doom.wave
new file mode 100644
index 00000000..196be566
--- /dev/null
+++ b/examples/doom.wave
@@ -0,0 +1,297 @@
+import("std::sys::linux::fs");
+import("std::sys::linux::tty");
+import("std::time::clock");
+import("std::time::sleep");
+import("std::math::trig");
+
+const STDIN_FILENO: i32 = 0;
+const STDOUT_FILENO: i32 = 1;
+
+const SCREEN_W: i32 = 120;
+const SCREEN_H: i32 = 40;
+const MAP_W: i32 = 24;
+const MAP_H: i32 = 24;
+const ROW_STRIDE: i32 = 121;
+const SCREEN_SIZE: i32 = 4840;
+
+const FOV: f64 = 1.0471975511965976;
+const DEPTH: f64 = 20.0;
+const MOVE_SPEED: f64 = 5.0;
+const ROT_SPEED: f64 = 2.2;
+const STATUS_TEXT_LEN: i32 = 52;
+
+fun cstrlen(s: str) -> i32 {
+ var i: i32 = 0;
+ while (s[i] != 0) {
+ i += 1;
+ }
+ return i;
+}
+
+fun write_str(s: str) {
+ write(STDOUT_FILENO as i64, s as ptr, cstrlen(s) as i64);
+}
+
+fun is_wall(map_rows: ptr, x: f64, y: f64) -> bool {
+ var tx: i32 = x as i32;
+ var ty: i32 = y as i32;
+
+ if (tx < 0 || tx >= MAP_W || ty < 0 || ty >= MAP_H) {
+ return true;
+ }
+
+ var row: str = map_rows[ty];
+ if (row[tx] == 35) {
+ return true;
+ }
+
+ return false;
+}
+
+fun main() {
+ var map_data: array = [
+ "########################",
+ "#..............##......#",
+ "#......###.....##......#",
+ "#......#...............#",
+ "#......#.......#####...#",
+ "#...............#......#",
+ "#....######.....#......#",
+ "#....#..........#......#",
+ "#....#..........#......#",
+ "#....#....###...#......#",
+ "#....#....#.....#......#",
+ "#....#....#.....#......#",
+ "#....######.....####...#",
+ "#......................#",
+ "#.....#####............#",
+ "#.....#................#",
+ "#.....#.........###....#",
+ "#.....#................#",
+ "#.....##########.......#",
+ "#......................#",
+ "#.......########.......#",
+ "#......................#",
+ "#......................#",
+ "########################"
+ ];
+ var status_text: str = "Doom-ish Raycaster | WASD move, Q/E turn, X/ESC exit";
+
+ var screen: array;
+
+ var term: TtyRawState;
+ if (tty_enable_raw_nonblock(STDIN_FILENO, &term) < 0) {
+ println("Failed to enable raw terminal mode.");
+ return;
+ }
+
+ write_str("\x1b[2J\x1b[H\x1b[?25l");
+
+ var player_x: f64 = 8.0;
+ var player_y: f64 = 8.0;
+ var player_a: f64 = 0.0;
+
+ var prev_ns: i64 = time_now_monotonic_ns();
+ if (prev_ns < 0) {
+ prev_ns = 0;
+ }
+ var running: bool = true;
+ var input_buf: array;
+
+ while (running) {
+ var now_ns: i64 = time_now_monotonic_ns();
+ var dt: f64 = 0.016;
+ if (now_ns > 0 && prev_ns > 0 && now_ns >= prev_ns) {
+ dt = (now_ns - prev_ns) as f64 / 1000000000.0;
+ }
+ prev_ns = now_ns;
+ if (dt > 0.2) {
+ dt = 0.2;
+ }
+
+ var bytes_read: i64 = read(STDIN_FILENO as i64, &input_buf[0], 16);
+ if (bytes_read > 0) {
+ var i: i32 = 0;
+ var n: i32 = bytes_read as i32;
+ while (i < n) {
+ var ch: u8 = input_buf[i];
+
+ if (ch == 113 || ch == 81) {
+ player_a -= ROT_SPEED * dt;
+ } else if (ch == 101 || ch == 69) {
+ player_a += ROT_SPEED * dt;
+ } else if (ch == 119 || ch == 87) {
+ var nx: f64 = player_x + cos_f64(player_a) * MOVE_SPEED * dt;
+ var ny: f64 = player_y + sin_f64(player_a) * MOVE_SPEED * dt;
+ if (!is_wall(&map_data[0], nx, ny)) {
+ player_x = nx;
+ player_y = ny;
+ }
+ } else if (ch == 115 || ch == 83) {
+ var nx: f64 = player_x - cos_f64(player_a) * MOVE_SPEED * dt;
+ var ny: f64 = player_y - sin_f64(player_a) * MOVE_SPEED * dt;
+ if (!is_wall(&map_data[0], nx, ny)) {
+ player_x = nx;
+ player_y = ny;
+ }
+ } else if (ch == 97 || ch == 65) {
+ var nx: f64 = player_x + sin_f64(player_a) * MOVE_SPEED * dt;
+ var ny: f64 = player_y - cos_f64(player_a) * MOVE_SPEED * dt;
+ if (!is_wall(&map_data[0], nx, ny)) {
+ player_x = nx;
+ player_y = ny;
+ }
+ } else if (ch == 100 || ch == 68) {
+ var nx: f64 = player_x - sin_f64(player_a) * MOVE_SPEED * dt;
+ var ny: f64 = player_y + cos_f64(player_a) * MOVE_SPEED * dt;
+ if (!is_wall(&map_data[0], nx, ny)) {
+ player_x = nx;
+ player_y = ny;
+ }
+ } else if (ch == 120 || ch == 88 || ch == 27) {
+ running = false;
+ }
+
+ i += 1;
+ }
+ }
+
+ var x: i32 = 0;
+ while (x < SCREEN_W) {
+ var ray_a: f64 = (player_a - (FOV / 2.0)) + ((x as f64) / (SCREEN_W as f64)) * FOV;
+ var eye_x: f64 = cos_f64(ray_a);
+ var eye_y: f64 = sin_f64(ray_a);
+
+ var dist: f64 = 0.0;
+ var hit_wall: bool = false;
+ var boundary: bool = false;
+
+ while (!hit_wall && dist < DEPTH) {
+ dist += 0.05;
+
+ var test_x: i32 = (player_x + eye_x * dist) as i32;
+ var test_y: i32 = (player_y + eye_y * dist) as i32;
+
+ if (test_x < 0 || test_x >= MAP_W || test_y < 0 || test_y >= MAP_H) {
+ hit_wall = true;
+ dist = DEPTH;
+ } else if (map_data[test_y][test_x] == 35) {
+ hit_wall = true;
+
+ var tx: i32 = 0;
+ while (tx < 2) {
+ var ty: i32 = 0;
+ while (ty < 2) {
+ var vx: f64 = (test_x as f64) + (tx as f64) - player_x;
+ var vy: f64 = (test_y as f64) + (ty as f64) - player_y;
+ var d: f64 = sqrt_f64(vx * vx + vy * vy);
+ if (d > 0.000001) {
+ var dot: f64 = (eye_x * vx / d) + (eye_y * vy / d);
+ if (dot > 0.995) {
+ boundary = true;
+ }
+ }
+ ty += 1;
+ }
+ tx += 1;
+ }
+ }
+ }
+
+ var ceiling: i32 = ((SCREEN_H as f64) / 2.0 - (SCREEN_H as f64) / dist) as i32;
+ var floor: i32 = SCREEN_H - ceiling;
+ if (ceiling < 0) {
+ ceiling = 0;
+ }
+ if (floor >= SCREEN_H) {
+ floor = SCREEN_H - 1;
+ }
+
+ var wall_shade: u8 = 32;
+ if (dist <= DEPTH / 6.0) {
+ wall_shade = 64;
+ } else if (dist <= DEPTH / 4.0) {
+ wall_shade = 35;
+ } else if (dist <= DEPTH / 3.0) {
+ wall_shade = 79;
+ } else if (dist <= DEPTH / 2.0) {
+ wall_shade = 61;
+ } else if (dist <= DEPTH) {
+ wall_shade = 45;
+ }
+
+ if (boundary) {
+ wall_shade = 124;
+ }
+
+ var y: i32 = 0;
+ while (y < SCREEN_H) {
+ var idx: i32 = y * ROW_STRIDE + x;
+
+ if (y < ceiling) {
+ deref screen[idx] = 32;
+ } else if (y >= ceiling && y <= floor) {
+ deref screen[idx] = wall_shade;
+ } else {
+ var b: f64 = 1.0 - (((y as f64) - (SCREEN_H as f64) / 2.0) / ((SCREEN_H as f64) / 2.0));
+ if (b < 0.25) {
+ deref screen[idx] = 35;
+ } else if (b < 0.5) {
+ deref screen[idx] = 120;
+ } else if (b < 0.75) {
+ deref screen[idx] = 46;
+ } else {
+ deref screen[idx] = 32;
+ }
+ }
+
+ y += 1;
+ }
+
+ x += 1;
+ }
+
+ var y2: i32 = 0;
+ while (y2 < SCREEN_H) {
+ var row_end: i32 = y2 * ROW_STRIDE + SCREEN_W;
+ deref screen[row_end] = 10;
+ y2 += 1;
+ }
+
+ var si: i32 = 0;
+ while (si < SCREEN_W && si < STATUS_TEXT_LEN) {
+ deref screen[si] = status_text[si];
+ si += 1;
+ }
+
+ var my: i32 = 0;
+ while (my < MAP_H) {
+ if (my + 1 >= SCREEN_H) {
+ my = MAP_H;
+ } else {
+ var mx: i32 = 0;
+ while (mx < MAP_W && mx < SCREEN_W) {
+ var idx2: i32 = (my + 1) * ROW_STRIDE + mx;
+ deref screen[idx2] = map_data[my][mx];
+ mx += 1;
+ }
+ my += 1;
+ }
+ }
+
+ var px: i32 = player_x as i32;
+ var py: i32 = player_y as i32;
+ if (px >= 0 && px < MAP_W && py >= 0 && py < MAP_H && py + 1 < SCREEN_H) {
+ var pidx: i32 = (py + 1) * ROW_STRIDE + px;
+ deref screen[pidx] = 80;
+ }
+
+ write_str("\x1b[H");
+ write(STDOUT_FILENO as i64, &screen[0], SCREEN_SIZE as i64);
+
+ time_sleep_us(16000);
+ }
+
+ tty_restore(STDIN_FILENO, &term);
+ write_str("\x1b[?25h\n");
+}
diff --git a/front/error/src/error.rs b/front/error/src/error.rs
index dde181cb..54e06163 100644
--- a/front/error/src/error.rs
+++ b/front/error/src/error.rs
@@ -255,14 +255,24 @@ impl WaveError {
for i in start..=end {
let ln = i + 1;
let ln_str = format!("{:>width$}", ln, width = width);
- eprintln!(" {} {} {}", ln_str.color("38,139,235").bold(), pipe, lines[i]);
+ eprintln!(
+ " {} {} {}",
+ ln_str.color("38,139,235").bold(),
+ pipe,
+ lines[i]
+ );
}
let pad = " ".repeat(width);
let spaces = " ".repeat(col.saturating_sub(1));
- let marks = "^".repeat(self.span_len.max(1)).color(self.severity_color()).bold();
+ let marks = "^"
+ .repeat(self.span_len.max(1))
+ .color(self.severity_color())
+ .bold();
match &self.label {
- Some(label) => eprintln!(" {} {} {}{} {}", pad, pipe, spaces, marks, label.dim()),
+ Some(label) => {
+ eprintln!(" {} {} {}{} {}", pad, pipe, spaces, marks, label.dim())
+ }
None => eprintln!(" {} {} {}{}", pad, pipe, spaces, marks),
}
@@ -273,11 +283,19 @@ impl WaveError {
if let Some(source_line) = &self.source {
let width = line.to_string().len().max(2);
let ln_str = format!("{:>width$}", line, width = width);
- eprintln!(" {} {} {}", ln_str.color("38,139,235").bold(), pipe, source_line);
+ eprintln!(
+ " {} {} {}",
+ ln_str.color("38,139,235").bold(),
+ pipe,
+ source_line
+ );
let pad = " ".repeat(width);
let spaces = " ".repeat(col.saturating_sub(1));
- let marks = "^".repeat(self.span_len.max(1)).color(self.severity_color()).bold();
+ let marks = "^"
+ .repeat(self.span_len.max(1))
+ .color(self.severity_color())
+ .bold();
match &self.label {
Some(label) => eprintln!(" {} {} {}{} {}", pad, pipe, spaces, marks, label.dim()),
None => eprintln!(" {} {} {}{}", pad, pipe, spaces, marks),
@@ -299,7 +317,12 @@ impl WaveError {
let code = self
.code
.as_ref()
- .map(|c| format!("[{}]", c).color(self.severity_color()).bold().to_string())
+ .map(|c| {
+ format!("[{}]", c)
+ .color(self.severity_color())
+ .bold()
+ .to_string()
+ })
.unwrap_or_default();
if code.is_empty() {
diff --git a/front/lexer/src/core.rs b/front/lexer/src/core.rs
index 697c7d82..f25a7580 100644
--- a/front/lexer/src/core.rs
+++ b/front/lexer/src/core.rs
@@ -21,7 +21,11 @@ pub struct Token {
impl Token {
pub fn new(token_type: TokenType, lexeme: String, line: usize) -> Self {
- Token { token_type, lexeme, line }
+ Token {
+ token_type,
+ lexeme,
+ line,
+ }
}
}
diff --git a/front/lexer/src/cursor.rs b/front/lexer/src/cursor.rs
index d00a0313..16779de4 100644
--- a/front/lexer/src/cursor.rs
+++ b/front/lexer/src/cursor.rs
@@ -9,7 +9,7 @@
//
// SPDX-License-Identifier: MPL-2.0
-use crate::{Lexer};
+use crate::Lexer;
impl<'a> Lexer<'a> {
pub(crate) fn is_at_end(&self) -> bool {
@@ -25,7 +25,11 @@ impl<'a> Lexer<'a> {
let (ch, size) = match std::str::from_utf8(rest.as_ref()) {
Ok(s) => {
let mut chars = s.chars();
- if let Some(c) = chars.next() { (c, c.len_utf8()) } else { ('\0', 1) }
+ if let Some(c) = chars.next() {
+ (c, c.len_utf8())
+ } else {
+ ('\0', 1)
+ }
}
Err(_) => ('\0', 1),
};
@@ -60,8 +64,12 @@ impl<'a> Lexer<'a> {
}
pub(crate) fn match_next(&mut self, expected: char) -> bool {
- if self.is_at_end() { return false; }
- if self.peek() != expected { return false; }
+ if self.is_at_end() {
+ return false;
+ }
+ if self.peek() != expected {
+ return false;
+ }
self.advance();
true
}
diff --git a/front/lexer/src/lib.rs b/front/lexer/src/lib.rs
index 428cb30a..838e7c1b 100644
--- a/front/lexer/src/lib.rs
+++ b/front/lexer/src/lib.rs
@@ -9,12 +9,12 @@
//
// SPDX-License-Identifier: MPL-2.0
-pub mod token;
pub mod core;
pub mod cursor;
-pub mod trivia;
-pub mod literals;
pub mod ident;
+pub mod literals;
pub mod scan;
+pub mod token;
+pub mod trivia;
pub use crate::core::{Lexer, Token};
diff --git a/front/lexer/src/literals.rs b/front/lexer/src/literals.rs
index 44b82e8c..300584bf 100644
--- a/front/lexer/src/literals.rs
+++ b/front/lexer/src/literals.rs
@@ -20,8 +20,8 @@ impl<'a> Lexer<'a> {
while !self.is_at_end() && self.peek() != '"' {
if self.peek() == '\n' {
- return Err(
- self.make_error(
+ return Err(self
+ .make_error(
WaveErrorKind::UnterminatedString,
"unterminated string literal (newline encountered before closing quote)",
start_line,
@@ -30,16 +30,15 @@ impl<'a> Lexer<'a> {
.with_code("E1003")
.with_label("string literal starts here")
.with_help("close the string with `\"` before the end of line")
- .with_suggestion("use `\\n` if you meant to embed a newline"),
- );
+ .with_suggestion("use `\\n` if you meant to embed a newline"));
}
let c = self.advance();
if c == '\\' {
if self.is_at_end() {
- return Err(
- self.make_error_here(
+ return Err(self
+ .make_error_here(
WaveErrorKind::InvalidString(
"dangling escape sequence in string literal".to_string(),
),
@@ -47,8 +46,9 @@ impl<'a> Lexer<'a> {
)
.with_code("E1004")
.with_label("escape sequence is incomplete")
- .with_help("append a valid escape character such as `n`, `t`, `\"`, or `\\`"),
- );
+ .with_help(
+ "append a valid escape character such as `n`, `t`, `\"`, or `\\`",
+ ));
}
let next = self.advance();
@@ -60,29 +60,27 @@ impl<'a> Lexer<'a> {
'"' => string_literal.push('"'),
'x' => {
if self.is_at_end() {
- return Err(
- self.make_error_here(
+ return Err(self
+ .make_error_here(
WaveErrorKind::InvalidString(
"incomplete hex escape sequence".to_string(),
),
"invalid escape sequence: expected two hex digits after `\\x`",
)
.with_code("E1004")
- .with_help("example: `\\x41` for `A`"),
- );
+ .with_help("example: `\\x41` for `A`"));
}
let h1 = self.advance();
if self.is_at_end() {
- return Err(
- self.make_error_here(
+ return Err(self
+ .make_error_here(
WaveErrorKind::InvalidString(
"incomplete hex escape sequence".to_string(),
),
"invalid escape sequence: expected two hex digits after `\\x`",
)
.with_code("E1004")
- .with_help("example: `\\x41` for `A`"),
- );
+ .with_help("example: `\\x41` for `A`"));
}
let h2 = self.advance();
@@ -90,8 +88,8 @@ impl<'a> Lexer<'a> {
let value = match u8::from_str_radix(&hex, 16) {
Ok(v) => v,
Err(_) => {
- return Err(
- self.make_error_here(
+ return Err(self
+ .make_error_here(
WaveErrorKind::InvalidString(format!(
"invalid hex escape: \\x{}",
hex
@@ -102,17 +100,18 @@ impl<'a> Lexer<'a> {
),
)
.with_code("E1004")
- .with_label("hex escapes must be exactly two hexadecimal digits")
- .with_help("valid range: `00` to `FF`"),
- );
+ .with_label(
+ "hex escapes must be exactly two hexadecimal digits",
+ )
+ .with_help("valid range: `00` to `FF`"));
}
};
string_literal.push(value as char);
}
_ => {
- return Err(
- self.make_error_here(
+ return Err(self
+ .make_error_here(
WaveErrorKind::InvalidString(format!(
"unknown escape sequence: \\{}",
next
@@ -121,19 +120,17 @@ impl<'a> Lexer<'a> {
)
.with_code("E1004")
.with_label("unsupported escape sequence")
- .with_help("supported escapes: \\\\, \\\", \\n, \\t, \\r, \\xNN"),
- );
+ .with_help("supported escapes: \\\\, \\\", \\n, \\t, \\r, \\xNN"));
}
}
- }
- else {
+ } else {
string_literal.push(c);
}
}
if self.is_at_end() {
- return Err(
- self.make_error(
+ return Err(self
+ .make_error(
WaveErrorKind::UnterminatedString,
"unterminated string literal; missing closing quote",
start_line,
@@ -141,8 +138,7 @@ impl<'a> Lexer<'a> {
)
.with_code("E1003")
.with_label("string literal starts here")
- .with_help("add `\"` to close the string"),
- );
+ .with_help("add `\"` to close the string"));
}
self.advance(); // closing quote
@@ -154,32 +150,30 @@ impl<'a> Lexer<'a> {
let start_col = self.current_column().saturating_sub(1).max(1);
if self.is_at_end() {
- return Err(
- self.make_error(
+ return Err(self
+ .make_error(
WaveErrorKind::InvalidString("empty char literal".to_string()),
"unterminated char literal; expected a character before closing quote",
start_line,
start_col,
)
.with_code("E1005")
- .with_help("write a single character like `'a'` or an escape like `'\\n'`"),
- );
+ .with_help("write a single character like `'a'` or an escape like `'\\n'`"));
}
let c = if self.peek() == '\\' {
self.advance();
if self.is_at_end() {
- return Err(
- self.make_error(
+ return Err(self
+ .make_error(
WaveErrorKind::InvalidString("dangling char escape".to_string()),
"unterminated char literal; dangling escape sequence",
start_line,
start_col,
)
.with_code("E1005")
- .with_help("complete the escape and close the char literal with `'`"),
- );
+ .with_help("complete the escape and close the char literal with `'`"));
}
let escaped = self.advance();
@@ -192,8 +186,8 @@ impl<'a> Lexer<'a> {
'"' => '"',
'x' => {
if self.is_at_end() {
- return Err(
- self.make_error(
+ return Err(self
+ .make_error(
WaveErrorKind::InvalidString(
"incomplete hex escape in char literal".to_string(),
),
@@ -202,13 +196,12 @@ impl<'a> Lexer<'a> {
start_col,
)
.with_code("E1005")
- .with_help("example: `'\\x41'` for `A`"),
- );
+ .with_help("example: `'\\x41'` for `A`"));
}
let h1 = self.advance();
if self.is_at_end() {
- return Err(
- self.make_error(
+ return Err(self
+ .make_error(
WaveErrorKind::InvalidString(
"incomplete hex escape in char literal".to_string(),
),
@@ -217,16 +210,15 @@ impl<'a> Lexer<'a> {
start_col,
)
.with_code("E1005")
- .with_help("example: `'\\x41'` for `A`"),
- );
+ .with_help("example: `'\\x41'` for `A`"));
}
let h2 = self.advance();
let hex = format!("{}{}", h1, h2);
let value = match u8::from_str_radix(&hex, 16) {
Ok(v) => v,
Err(_) => {
- return Err(
- self.make_error(
+ return Err(self
+ .make_error(
WaveErrorKind::InvalidString(format!(
"invalid hex escape in char literal: \\x{}",
hex
@@ -239,15 +231,14 @@ impl<'a> Lexer<'a> {
start_col,
)
.with_code("E1005")
- .with_help("hex escapes must be two hexadecimal digits"),
- );
+ .with_help("hex escapes must be two hexadecimal digits"));
}
};
value as char
}
_ => {
- return Err(
- self.make_error(
+ return Err(self
+ .make_error(
WaveErrorKind::InvalidString(format!(
"invalid escape sequence in char literal: \\{}",
escaped
@@ -257,8 +248,7 @@ impl<'a> Lexer<'a> {
start_col,
)
.with_code("E1005")
- .with_help("supported escapes: \\\\, \\\', \\n, \\t, \\r, \\xNN"),
- );
+ .with_help("supported escapes: \\\\, \\\', \\n, \\t, \\r, \\xNN"));
}
}
} else {
@@ -266,8 +256,8 @@ impl<'a> Lexer<'a> {
};
if self.peek() != '\'' {
- return Err(
- self.make_error(
+ return Err(self
+ .make_error(
WaveErrorKind::InvalidString("unterminated char literal".to_string()),
"unterminated or invalid char literal",
start_line,
@@ -275,8 +265,7 @@ impl<'a> Lexer<'a> {
)
.with_code("E1005")
.with_label("char literal must contain exactly one character")
- .with_help("close with `'` and ensure exactly one character value"),
- );
+ .with_help("close with `'` and ensure exactly one character value"));
}
self.advance(); // closing '
Ok(c)
diff --git a/front/lexer/src/scan.rs b/front/lexer/src/scan.rs
index 2e6f252d..858cae95 100644
--- a/front/lexer/src/scan.rs
+++ b/front/lexer/src/scan.rs
@@ -19,7 +19,11 @@ impl<'a> Lexer<'a> {
self.skip_trivia()?;
if self.is_at_end() {
- return Ok(Token { token_type: TokenType::Eof, lexeme: String::new(), line: self.line });
+ return Ok(Token {
+ token_type: TokenType::Eof,
+ lexeme: String::new(),
+ line: self.line,
+ });
}
let c = self.advance();
@@ -31,19 +35,19 @@ impl<'a> Lexer<'a> {
token_type: TokenType::Increment,
lexeme: "++".to_string(),
line: self.line,
- })
+ });
} else if self.match_next('=') {
return Ok(Token {
token_type: TokenType::PlusEq,
lexeme: "+=".to_string(),
line: self.line,
- })
+ });
} else {
return Ok(Token {
token_type: TokenType::Plus,
lexeme: "+".to_string(),
line: self.line,
- })
+ });
}
}
'-' => {
@@ -52,25 +56,25 @@ impl<'a> Lexer<'a> {
token_type: TokenType::Decrement,
lexeme: "--".to_string(),
line: self.line,
- })
+ });
} else if self.match_next('>') {
return Ok(Token {
token_type: TokenType::Arrow,
lexeme: "->".to_string(),
line: self.line,
- })
+ });
} else if self.match_next('=') {
return Ok(Token {
token_type: TokenType::MinusEq,
lexeme: "-=".to_string(),
line: self.line,
- })
+ });
} else {
return Ok(Token {
token_type: TokenType::Minus,
lexeme: "-".to_string(),
line: self.line,
- })
+ });
}
}
'*' => {
@@ -79,25 +83,35 @@ impl<'a> Lexer<'a> {
token_type: TokenType::StarEq,
lexeme: "*=".to_string(),
line: self.line,
- })
+ });
} else {
return Ok(Token {
token_type: TokenType::Star,
lexeme: "*".to_string(),
line: self.line,
- })
+ });
}
}
- '.' => return Ok(Token {
- token_type: TokenType::Dot,
- lexeme: ".".to_string(),
- line: self.line,
- }),
+ '.' => {
+ return Ok(Token {
+ token_type: TokenType::Dot,
+ lexeme: ".".to_string(),
+ line: self.line,
+ })
+ }
'/' => {
if self.match_next('=') {
- return Ok(Token { token_type: TokenType::DivEq, lexeme: "/=".to_string(), line: self.line });
+ return Ok(Token {
+ token_type: TokenType::DivEq,
+ lexeme: "/=".to_string(),
+ line: self.line,
+ });
} else {
- return Ok(Token { token_type: TokenType::Div, lexeme: "/".to_string(), line: self.line });
+ return Ok(Token {
+ token_type: TokenType::Div,
+ lexeme: "/".to_string(),
+ line: self.line,
+ });
}
}
'%' => {
@@ -106,44 +120,48 @@ impl<'a> Lexer<'a> {
token_type: TokenType::RemainderEq,
lexeme: "%=".to_string(),
line: self.line,
- })
+ });
} else {
return Ok(Token {
token_type: TokenType::Remainder,
lexeme: "%".to_string(),
line: self.line,
- })
+ });
}
}
- ';' => return Ok(Token {
- token_type: TokenType::SemiColon,
- lexeme: ";".to_string(),
- line: self.line,
- }),
- ':' => return Ok(Token {
- token_type: TokenType::Colon,
- lexeme: ":".to_string(),
- line: self.line,
- }),
+ ';' => {
+ return Ok(Token {
+ token_type: TokenType::SemiColon,
+ lexeme: ";".to_string(),
+ line: self.line,
+ })
+ }
+ ':' => {
+ return Ok(Token {
+ token_type: TokenType::Colon,
+ lexeme: ":".to_string(),
+ line: self.line,
+ })
+ }
'<' => {
if self.match_next('<') {
return Ok(Token {
token_type: TokenType::Rol,
lexeme: "<<".to_string(),
line: self.line,
- })
+ });
} else if self.match_next('=') {
return Ok(Token {
token_type: TokenType::LchevrEq,
lexeme: "<=".to_string(),
line: self.line,
- })
+ });
} else {
return Ok(Token {
token_type: TokenType::Lchevr,
lexeme: "<".to_string(),
line: self.line,
- })
+ });
}
}
'>' => {
@@ -152,64 +170,76 @@ impl<'a> Lexer<'a> {
token_type: TokenType::Ror,
lexeme: ">>".to_string(),
line: self.line,
- })
+ });
} else if self.match_next('=') {
return Ok(Token {
token_type: TokenType::RchevrEq,
lexeme: ">=".to_string(),
line: self.line,
- })
+ });
} else {
return Ok(Token {
token_type: TokenType::Rchevr,
lexeme: ">".to_string(),
line: self.line,
- })
+ });
}
}
- '(' => return Ok(Token {
- token_type: TokenType::Lparen,
- lexeme: "(".to_string(),
- line: self.line,
- }),
- ')' => return Ok(Token {
- token_type: TokenType::Rparen,
- lexeme: ")".to_string(),
- line: self.line,
- }),
- '{' => return Ok(Token {
- token_type: TokenType::Lbrace,
- lexeme: "{".to_string(),
- line: self.line,
- }),
- '}' => return Ok(Token {
- token_type: TokenType::Rbrace,
- lexeme: "}".to_string(),
- line: self.line,
- }),
- '[' => return Ok(Token {
- token_type: TokenType::Lbrack,
- lexeme: "[".to_string(),
- line: self.line,
- }),
- ']' => return Ok(Token {
- token_type: TokenType::Rbrack,
- lexeme: "]".to_string(),
- line: self.line,
- }),
+ '(' => {
+ return Ok(Token {
+ token_type: TokenType::Lparen,
+ lexeme: "(".to_string(),
+ line: self.line,
+ })
+ }
+ ')' => {
+ return Ok(Token {
+ token_type: TokenType::Rparen,
+ lexeme: ")".to_string(),
+ line: self.line,
+ })
+ }
+ '{' => {
+ return Ok(Token {
+ token_type: TokenType::Lbrace,
+ lexeme: "{".to_string(),
+ line: self.line,
+ })
+ }
+ '}' => {
+ return Ok(Token {
+ token_type: TokenType::Rbrace,
+ lexeme: "}".to_string(),
+ line: self.line,
+ })
+ }
+ '[' => {
+ return Ok(Token {
+ token_type: TokenType::Lbrack,
+ lexeme: "[".to_string(),
+ line: self.line,
+ })
+ }
+ ']' => {
+ return Ok(Token {
+ token_type: TokenType::Rbrack,
+ lexeme: "]".to_string(),
+ line: self.line,
+ })
+ }
'=' => {
if self.match_next('=') {
return Ok(Token {
token_type: TokenType::EqualTwo,
lexeme: "==".to_string(),
line: self.line,
- })
+ });
} else {
return Ok(Token {
token_type: TokenType::Equal,
lexeme: "=".to_string(),
line: self.line,
- })
+ });
}
}
'&' => {
@@ -218,13 +248,13 @@ impl<'a> Lexer<'a> {
token_type: TokenType::LogicalAnd,
lexeme: "&&".to_string(),
line: self.line,
- })
+ });
} else {
return Ok(Token {
token_type: TokenType::AddressOf,
lexeme: "&".to_string(),
line: self.line,
- })
+ });
}
}
'|' => {
@@ -233,13 +263,13 @@ impl<'a> Lexer<'a> {
token_type: TokenType::LogicalOr,
lexeme: "||".to_string(),
line: self.line,
- })
+ });
} else {
return Ok(Token {
token_type: TokenType::BitwiseOr,
lexeme: "|".to_string(),
line: self.line,
- })
+ });
}
}
'!' => {
@@ -248,45 +278,47 @@ impl<'a> Lexer<'a> {
token_type: TokenType::NotEqual,
lexeme: "!=".to_string(),
line: self.line,
- })
+ });
} else if self.match_next('&') {
return Ok(Token {
token_type: TokenType::Nand,
lexeme: "!&".to_string(),
line: self.line,
- })
+ });
} else if self.match_next('|') {
return Ok(Token {
token_type: TokenType::Nor,
lexeme: "!|".to_string(),
line: self.line,
- })
+ });
} else {
return Ok(Token {
token_type: TokenType::Not,
lexeme: "!".to_string(),
line: self.line,
- })
+ });
}
}
- '^' => return Ok(Token {
- token_type: TokenType::Xor,
- lexeme: "^".to_string(),
- line: self.line,
- }),
+ '^' => {
+ return Ok(Token {
+ token_type: TokenType::Xor,
+ lexeme: "^".to_string(),
+ line: self.line,
+ })
+ }
'~' => {
if self.match_next('^') {
return Ok(Token {
token_type: TokenType::Xnor,
lexeme: "~^".to_string(),
line: self.line,
- })
+ });
} else {
return Ok(Token {
token_type: TokenType::BitwiseNot,
lexeme: "~".to_string(),
line: self.line,
- })
+ });
}
}
'?' => {
@@ -295,35 +327,37 @@ impl<'a> Lexer<'a> {
token_type: TokenType::NullCoalesce,
lexeme: "??".to_string(),
line: self.line,
- })
+ });
} else {
return Ok(Token {
token_type: TokenType::Condition,
lexeme: "?".to_string(),
line: self.line,
- })
+ });
}
}
- ',' => return Ok(Token {
- token_type: TokenType::Comma,
- lexeme: ",".to_string(),
- line: self.line,
- }),
+ ',' => {
+ return Ok(Token {
+ token_type: TokenType::Comma,
+ lexeme: ",".to_string(),
+ line: self.line,
+ })
+ }
'\'' => {
let value = self.char_literal()?;
return Ok(Token {
token_type: TokenType::CharLiteral(value),
lexeme: format!("'{}'", value),
line: self.line,
- })
- },
+ });
+ }
'"' => {
let string_value = self.string()?;
return Ok(Token {
token_type: TokenType::String(string_value.clone()),
lexeme: format!("\"{}\"", string_value),
line: self.line,
- })
+ });
}
'a'..='z' | 'A'..='Z' | '_' => {
@@ -406,15 +440,14 @@ impl<'a> Lexer<'a> {
match num_str.parse::() {
Ok(v) => TokenType::Float(v),
Err(_) => {
- return Err(
- self.make_error_here(
+ return Err(self
+ .make_error_here(
WaveErrorKind::InvalidNumber(num_str.clone()),
format!("invalid floating-point literal `{}`", num_str),
)
.with_code("E1006")
.with_label("cannot parse float literal")
- .with_help("check decimal point placement and digits"),
- );
+ .with_help("check decimal point placement and digits"));
}
}
} else {
@@ -425,40 +458,39 @@ impl<'a> Lexer<'a> {
token_type,
lexeme: num_str,
line: self.line,
- })
+ });
}
_ => {
if c == '\0' {
- return Err(
- self.make_error_here(
+ return Err(self
+ .make_error_here(
WaveErrorKind::UnexpectedChar(c),
"null character (`\\0`) is not allowed in source",
)
.with_code("E1001")
.with_label("unexpected null byte in source")
- .with_help("remove the null byte and save the file as plain UTF-8 text"),
- );
+ .with_help(
+ "remove the null byte and save the file as plain UTF-8 text",
+ ));
} else if c == '\\' {
- return Err(
- self.make_error_here(
+ return Err(self
+ .make_error_here(
WaveErrorKind::UnexpectedChar(c),
"unexpected backslash outside of string literal",
)
.with_code("E1001")
.with_label("`\\` is only valid inside string/char literals")
- .with_help("if you intended a string, wrap it with quotes"),
- );
+ .with_help("if you intended a string, wrap it with quotes"));
} else {
- return Err(
- self.make_error_here(
+ return Err(self
+ .make_error_here(
WaveErrorKind::UnexpectedChar(c),
format!("unexpected character `{}` (U+{:04X})", c, c as u32),
)
.with_code("E1001")
.with_label("this character is not valid in Wave syntax")
- .with_help("remove it or replace it with a valid token"),
- );
+ .with_help("remove it or replace it with a valid token"));
}
}
}
diff --git a/front/lexer/src/trivia.rs b/front/lexer/src/trivia.rs
index d129831e..4258ba38 100644
--- a/front/lexer/src/trivia.rs
+++ b/front/lexer/src/trivia.rs
@@ -10,8 +10,8 @@
// SPDX-License-Identifier: MPL-2.0
use super::Lexer;
-use error::WaveErrorKind;
use error::WaveError;
+use error::WaveErrorKind;
impl<'a> Lexer<'a> {
pub(crate) fn skip_trivia(&mut self) -> Result<(), WaveError> {
@@ -48,7 +48,9 @@ impl<'a> Lexer<'a> {
while !self.is_at_end() {
let c = self.peek();
match c {
- ' ' | '\r' | '\t' => { self.advance(); }
+ ' ' | '\r' | '\t' => {
+ self.advance();
+ }
'\n' => {
self.advance();
self.line += 1;
@@ -96,15 +98,14 @@ impl<'a> Lexer<'a> {
self.advance();
}
- Err(
- self.make_error_here(
+ Err(self
+ .make_error_here(
WaveErrorKind::UnterminatedComment,
"unterminated block comment; expected closing `*/`",
)
.with_code("E1002")
.with_label("block comment starts here and never closes")
.with_help("add `*/` to close the block comment")
- .with_suggestion("if you intended a line comment, use `// ...`"),
- )
+ .with_suggestion("if you intended a line comment, use `// ...`"))
}
}
diff --git a/front/parser/src/expr/assign.rs b/front/parser/src/expr/assign.rs
index 1590d7fa..3ae84a7b 100644
--- a/front/parser/src/expr/assign.rs
+++ b/front/parser/src/expr/assign.rs
@@ -9,10 +9,10 @@
//
// SPDX-License-Identifier: MPL-2.0
-use lexer::Token;
-use lexer::token::TokenType;
use crate::ast::{AssignOperator, Expression};
use crate::expr::binary::parse_logical_or_expression;
+use lexer::token::TokenType;
+use lexer::Token;
pub fn parse_expression<'a, T>(tokens: &mut std::iter::Peekable) -> Option
where
@@ -49,4 +49,4 @@ where
}
Some(left)
-}
\ No newline at end of file
+}
diff --git a/front/parser/src/expr/helpers.rs b/front/parser/src/expr/helpers.rs
index f2c59d45..4e1a703d 100644
--- a/front/parser/src/expr/helpers.rs
+++ b/front/parser/src/expr/helpers.rs
@@ -9,13 +9,13 @@
//
// SPDX-License-Identifier: MPL-2.0
-use std::iter::Peekable;
-use std::slice::Iter;
-use lexer::Token;
-use lexer::token::TokenType;
use crate::ast::Expression;
use crate::expr::parse_expression;
use crate::expr::unary::parse_unary_expression;
+use lexer::token::TokenType;
+use lexer::Token;
+use std::iter::Peekable;
+use std::slice::Iter;
pub fn is_assignable(expr: &Expression) -> bool {
match expr {
@@ -40,7 +40,10 @@ fn parse_lvalue_tail(
Some(TokenType::Dot) => {
tokens.next(); // '.'
let field = match tokens.next() {
- Some(Token { token_type: TokenType::Identifier(s), .. }) => s.clone(),
+ Some(Token {
+ token_type: TokenType::Identifier(s),
+ ..
+ }) => s.clone(),
_ => {
println!("Error: Expected identifier after '.'");
return None;
@@ -93,4 +96,4 @@ pub fn parse_expression_from_token(
_ => None,
}
-}
\ No newline at end of file
+}
diff --git a/front/parser/src/expr/mod.rs b/front/parser/src/expr/mod.rs
index 57be7762..24a5af64 100644
--- a/front/parser/src/expr/mod.rs
+++ b/front/parser/src/expr/mod.rs
@@ -9,12 +9,12 @@
//
// SPDX-License-Identifier: MPL-2.0
+mod assign;
+mod binary;
mod helpers;
-mod primary;
mod postfix;
+mod primary;
mod unary;
-mod binary;
-mod assign;
-pub use helpers::*;
pub use assign::parse_expression;
+pub use helpers::*;
diff --git a/front/parser/src/expr/postfix.rs b/front/parser/src/expr/postfix.rs
index 24d830d4..2733e7cd 100644
--- a/front/parser/src/expr/postfix.rs
+++ b/front/parser/src/expr/postfix.rs
@@ -30,9 +30,9 @@ where
tokens.next(); // consume '.'
let name = if let Some(Token {
- token_type: TokenType::Identifier(name),
- ..
- }) = tokens.next()
+ token_type: TokenType::Identifier(name),
+ ..
+ }) = tokens.next()
{
name.clone()
} else {
@@ -41,9 +41,9 @@ where
};
if let Some(Token {
- token_type: TokenType::Lparen,
- ..
- }) = tokens.peek()
+ token_type: TokenType::Lparen,
+ ..
+ }) = tokens.peek()
{
// ----- MethodCall -----
tokens.next(); // consume '('
@@ -58,9 +58,9 @@ where
args.push(arg);
if let Some(Token {
- token_type: TokenType::Comma,
- ..
- }) = tokens.peek()
+ token_type: TokenType::Comma,
+ ..
+ }) = tokens.peek()
{
tokens.next(); // consume ','
} else {
@@ -119,7 +119,10 @@ where
tokens.next(); // consume '++'
if !is_assignable(&expr) {
- println!("Error: postfix ++ target must be assignable (line {})", line);
+ println!(
+ "Error: postfix ++ target must be assignable (line {})",
+ line
+ );
return None;
}
@@ -137,7 +140,10 @@ where
tokens.next(); // consume '--'
if !is_assignable(&expr) {
- println!("Error: postfix -- target must be assignable (line {})", line);
+ println!(
+ "Error: postfix -- target must be assignable (line {})",
+ line
+ );
return None;
}
diff --git a/front/parser/src/expr/primary.rs b/front/parser/src/expr/primary.rs
index d1027e1a..6526d552 100644
--- a/front/parser/src/expr/primary.rs
+++ b/front/parser/src/expr/primary.rs
@@ -65,9 +65,9 @@ where
args.push(arg);
if let Some(Token {
- token_type: TokenType::Comma,
- ..
- }) = tokens.peek()
+ token_type: TokenType::Comma,
+ ..
+ }) = tokens.peek()
{
tokens.next();
} else {
@@ -96,9 +96,9 @@ where
.map_or(false, |t| t.token_type != TokenType::Rbrace)
{
let field_name = if let Some(Token {
- token_type: TokenType::Identifier(n),
- ..
- }) = tokens.next()
+ token_type: TokenType::Identifier(n),
+ ..
+ }) = tokens.next()
{
n.clone()
} else {
@@ -119,9 +119,9 @@ where
fields.push((field_name, value));
if let Some(Token {
- token_type: TokenType::Comma,
- ..
- }) = tokens.peek()
+ token_type: TokenType::Comma,
+ ..
+ }) = tokens.peek()
{
tokens.next();
} else {
@@ -175,9 +175,9 @@ where
loop {
elements.push(parse_expression(tokens)?);
if let Some(Token {
- token_type: TokenType::Comma,
- ..
- }) = tokens.peek()
+ token_type: TokenType::Comma,
+ ..
+ }) = tokens.peek()
{
tokens.next();
} else {
@@ -265,10 +265,18 @@ where
})
}
_ => match token.token_type {
- TokenType::Continue | TokenType::Break | TokenType::Return | TokenType::SemiColon => None,
+ TokenType::Continue | TokenType::Break | TokenType::Return | TokenType::SemiColon => {
+ None
+ }
_ => {
- println!("Error: Expected primary expression, found {:?}", token.token_type);
- println!("Error: Expected primary expression, found {:?}", token.lexeme);
+ println!(
+ "Error: Expected primary expression, found {:?}",
+ token.token_type
+ );
+ println!(
+ "Error: Expected primary expression, found {:?}",
+ token.lexeme
+ );
println!("Error: Expected primary expression, found {:?}", token.line);
None
}
diff --git a/front/parser/src/expr/unary.rs b/front/parser/src/expr/unary.rs
index ec1b7715..963d09ef 100644
--- a/front/parser/src/expr/unary.rs
+++ b/front/parser/src/expr/unary.rs
@@ -9,11 +9,11 @@
//
// SPDX-License-Identifier: MPL-2.0
-use lexer::Token;
-use lexer::token::TokenType;
use crate::ast::{Expression, IncDecKind, Literal, Operator};
use crate::expr::is_assignable;
use crate::expr::primary::parse_primary_expression;
+use lexer::token::TokenType;
+use lexer::Token;
pub fn parse_unary_expression<'a, T>(tokens: &mut std::iter::Peekable) -> Option
where
@@ -102,4 +102,4 @@ where
}
parse_primary_expression(tokens)
-}
\ No newline at end of file
+}
diff --git a/front/parser/src/import.rs b/front/parser/src/import.rs
index 0ca6d850..a503a423 100644
--- a/front/parser/src/import.rs
+++ b/front/parser/src/import.rs
@@ -9,13 +9,157 @@
//
// SPDX-License-Identifier: MPL-2.0
-use crate::ast::{ASTNode};
+use crate::ast::ASTNode;
use crate::{parse_syntax_only, ParseError};
use error::error::{WaveError, WaveErrorKind};
use lexer::Lexer;
use std::collections::{HashMap, HashSet};
use std::path::{Path, PathBuf};
+enum TargetAttr<'a> {
+ Supported(&'a str),
+ Unsupported,
+}
+
+fn parse_target_os_attr(line: &str) -> Option> {
+ let trimmed = line.trim();
+ if !trimmed.starts_with("#[target(os=\"") || !trimmed.ends_with("\")]") {
+ return None;
+ }
+
+ let start = "#[target(os=\"".len();
+ let end = trimmed.len() - 3; // ")]"
+ let os = &trimmed[start..end];
+ if os == "linux" || os == "macos" {
+ Some(TargetAttr::Supported(os))
+ } else {
+ Some(TargetAttr::Unsupported)
+ }
+}
+
+fn is_supported_target_item_start(line: &str) -> bool {
+ fn has_ident_boundary(rest: &str) -> bool {
+ match rest.chars().next() {
+ None => true,
+ Some(c) => !(c.is_ascii_alphanumeric() || c == '_'),
+ }
+ }
+
+ let trimmed = line.trim_start();
+ for kw in ["fun", "struct", "enum", "const", "static", "type", "proto"] {
+ if let Some(rest) = trimmed.strip_prefix(kw) {
+ if has_ident_boundary(rest) {
+ return true;
+ }
+ }
+ }
+
+ false
+}
+
+fn consume_target_item(
+ lines: &[&str],
+ mut idx: usize,
+ keep: bool,
+ out: &mut Vec,
+) -> usize {
+ let mut depth: i32 = 0;
+ let mut seen_open = false;
+
+ while idx < lines.len() {
+ let line = lines[idx];
+ if keep {
+ out.push(line.to_string());
+ } else {
+ out.push(String::new());
+ }
+
+ for ch in line.chars() {
+ if ch == '{' {
+ depth += 1;
+ seen_open = true;
+ } else if ch == '}' && depth > 0 {
+ depth -= 1;
+ }
+ }
+
+ idx += 1;
+
+ let trimmed = line.trim_end();
+ if seen_open && depth == 0 {
+ break;
+ }
+ }
+
+ idx
+}
+
+fn preprocess_target_attrs(source: &str) -> String {
+ let host = std::env::consts::OS;
+ let lines: Vec<&str> = source.lines().collect();
+ let mut out: Vec = Vec::with_capacity(lines.len());
+ let mut idx: usize = 0;
+
+ while idx < lines.len() {
+ let line = lines[idx];
+ if let Some(target_attr) = parse_target_os_attr(line) {
+ // Attribute line is removed for parser compatibility,
+ // but we keep its line slot to preserve diagnostics.
+ out.push(String::new());
+ idx += 1;
+
+ let keep_item = match target_attr {
+ TargetAttr::Supported(target_os) => target_os == host,
+ // Ignore unsupported target values.
+ TargetAttr::Unsupported => true,
+ };
+
+ // Attribute applies to the next top-level item.
+ // Preserve line count for any leading blanks/comments.
+ while idx < lines.len() {
+ let item_line = lines[idx];
+ let trimmed = item_line.trim_start();
+
+ let is_leading_comment = trimmed.starts_with("//")
+ || trimmed.starts_with("/*")
+ || trimmed.starts_with('*')
+ || trimmed.starts_with("*/");
+
+ if trimmed.is_empty() || is_leading_comment {
+ if keep_item {
+ out.push(item_line.to_string());
+ } else {
+ out.push(String::new());
+ }
+ idx += 1;
+ continue;
+ }
+
+ if is_supported_target_item_start(trimmed) {
+ idx = consume_target_item(&lines, idx, keep_item, &mut out);
+ } else if keep_item {
+ out.push(item_line.to_string());
+ idx += 1;
+ } else {
+ out.push(String::new());
+ idx += 1;
+ }
+ break;
+ }
+ continue;
+ }
+
+ out.push(line.to_string());
+ idx += 1;
+ }
+
+ let mut processed = out.join("\n");
+ if source.ends_with('\n') {
+ processed.push('\n');
+ }
+ processed
+}
+
pub struct ImportedUnit {
pub abs_path: PathBuf,
pub ast: Vec,
@@ -84,7 +228,10 @@ pub fn local_import(
already_imported: &mut HashSet,
base_dir: &Path,
) -> Result, WaveError> {
- Ok(local_import_unit_with_config(path, already_imported, base_dir, &ImportConfig::default())?.ast)
+ Ok(
+ local_import_unit_with_config(path, already_imported, base_dir, &ImportConfig::default())?
+ .ast,
+ )
}
pub fn local_import_with_config(
@@ -96,7 +243,10 @@ pub fn local_import_with_config(
Ok(local_import_unit_with_config(path, already_imported, base_dir, config)?.ast)
}
-fn resolve_external_package_root(package: &str, config: &ImportConfig) -> Result