compiling expressions (ch17)

This commit is contained in:
Patrick MARIE 2024-08-25 13:09:52 +02:00
parent 4d04208b2a
commit f9c7a531dc
6 changed files with 303 additions and 77 deletions

View File

@ -15,19 +15,21 @@ pub const Chunk = struct {
code: []u8, code: []u8,
lines: []usize, lines: []usize,
constants: ValueArray, constants: ValueArray,
allocator: Allocator,
pub fn new() Chunk { pub fn new(allocator: Allocator) Chunk {
return Chunk{ return Chunk{
.count = 0, .count = 0,
.capacity = 0, .capacity = 0,
.code = &.{}, .code = &.{},
.lines = &.{}, .lines = &.{},
.constants = ValueArray.new(), .constants = ValueArray.new(),
.allocator = allocator,
}; };
} }
pub fn init(self: *Chunk, allocator: Allocator) !void { pub fn init(self: *Chunk) !void {
self.deinit(allocator); self.deinit(self.allocator);
self.count = 0; self.count = 0;
self.capacity = 0; self.capacity = 0;
@ -36,12 +38,12 @@ pub const Chunk = struct {
self.constants = ValueArray.new(); self.constants = ValueArray.new();
} }
pub fn write(self: *Chunk, allocator: Allocator, byte: u8, line: usize) !void { pub fn write(self: *Chunk, byte: u8, line: usize) !void {
if (self.capacity < self.count + 1) { if (self.capacity < self.count + 1) {
const old_capacity = self.capacity; const old_capacity = self.capacity;
self.capacity = grow_capacity(old_capacity); self.capacity = grow_capacity(old_capacity);
self.code = try allocator.realloc(self.code, self.capacity); self.code = try self.allocator.realloc(self.code, self.capacity);
self.lines = try allocator.realloc(self.lines, self.capacity); self.lines = try self.allocator.realloc(self.lines, self.capacity);
} }
self.code[self.count] = byte; self.code[self.count] = byte;
@ -59,7 +61,7 @@ pub const Chunk = struct {
var offset: usize = 0; var offset: usize = 0;
while (offset < self.count) { while (offset < self.count) {
offset += self.dissassemble_instruction(offset); offset = self.dissassemble_instruction(offset);
} }
debug.print("== end of {s} ==\n\n", .{name}); debug.print("== end of {s} ==\n\n", .{name});
} }
@ -90,17 +92,17 @@ pub const Chunk = struct {
} }
} }
pub fn deinit(self: *Chunk, allocator: Allocator) void { pub fn deinit(self: *Chunk) void {
self.constants.free(allocator); self.constants.free(self.allocator);
if (self.capacity > 0) { if (self.capacity > 0) {
allocator.free(self.code); self.allocator.free(self.code);
allocator.free(self.lines); self.allocator.free(self.lines);
} }
} }
pub fn add_constant(self: *Chunk, allocator: Allocator, value: Value) !usize { pub fn add_constant(self: *Chunk, value: Value) !usize {
try self.constants.write(allocator, value); try self.constants.write(self.allocator, value);
return self.constants.count - 1; return self.constants.count - 1;
} }
}; };

View File

@ -2,28 +2,265 @@ const std = @import("std");
const debug = std.debug; const debug = std.debug;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const OpCode = @import("./opcode.zig").OpCode;
const Scanner = @import("./scanner.zig").Scanner; const Scanner = @import("./scanner.zig").Scanner;
const Token = @import("./scanner.zig").Token; const Token = @import("./scanner.zig").Token;
const TokenType = @import("./scanner.zig").TokenType; const TokenType = @import("./scanner.zig").TokenType;
const Chunk = @import("./chunk.zig").Chunk;
const Value = @import("./values.zig").Value;
pub fn compile(allocator: Allocator, contents: []const u8) !void { const ParsingError = @import("./errors.zig").ParsingError;
var line: ?usize = null;
const DEBUG_TRACE_EXECUTION = @import("./main.zig").DEBUG_TRACE_EXECUTION;
const Precedence = enum {
None,
Assignement,
Or,
And,
Equality,
Comparison,
Term,
Factor,
Unary,
Call,
Primary,
};
const ParserRule = struct {
prefix: ?*const fn (*Parser) ParsingError!void,
infix: ?*const fn (*Parser) ParsingError!void,
precedence: Precedence,
};
const Parser = struct {
current: ?Token,
previous: ?Token,
scanner: *Scanner,
had_error: bool,
panic_mode: bool,
chunk: *Chunk,
fn new(scanner: *Scanner, chunk: *Chunk) Parser {
return Parser{
.current = null,
.previous = null,
.scanner = scanner,
.had_error = false,
.panic_mode = false,
.chunk = chunk,
};
}
fn advance(self: *Parser) void {
self.previous = self.current;
while (true) {
self.current = self.scanner.scan_token();
if (self.current.?.token_type != TokenType.ERROR) {
break;
}
self.error_at_current(self.current.?.start);
}
}
fn expression(self: *Parser) ParsingError!void {
try self.parse_precedence(Precedence.Assignement);
}
fn consume(self: *Parser, token_type: TokenType, error_message: []const u8) void {
if (self.current.?.token_type == token_type) {
self.advance();
return;
}
self.error_at_current(error_message);
}
fn error_at_current(self: *Parser, error_message: []const u8) void {
self.error_at(self.current.?, error_message);
}
fn error_at(self: *Parser, token: Token, error_message: []const u8) void {
if (self.panic_mode) {
return;
}
self.panic_mode = true;
debug.print("[line {d}] Error", .{token.line});
if (token.token_type == TokenType.EOF) {
debug.print(" at end", .{});
} else if (token.token_type == TokenType.ERROR) {
// Nothing
} else {
const expr = std.mem.trimRight(u8, token.start[0..token.length], "\n");
debug.print(" at '{s}'", .{expr});
}
debug.print(": {s}\n", .{error_message});
self.had_error = true;
}
fn error_msg(self: *Parser, error_message: []const u8) void {
self.error_at(self.previous.?, error_message);
}
fn emit_byte(self: *Parser, byte: u8) !void {
try self.chunk.write(byte, self.previous.?.line);
}
fn emit_bytes(self: *Parser, byte0: u8, byte1: u8) !void {
try self.emit_byte(byte0);
try self.emit_byte(byte1);
}
fn emit_return(self: *Parser) !void {
try self.emit_byte(@intFromEnum(OpCode.OP_RETURN));
}
fn end_parser(self: *Parser) !void {
if (!self.had_error and DEBUG_TRACE_EXECUTION) {
self.chunk.dissassemble("code");
}
try self.emit_return();
}
fn number(self: *Parser) ParsingError!void {
const value = std.fmt.parseFloat(f64, self.previous.?.start[0..self.previous.?.length]) catch {
self.error_msg("Failed converting float.");
return ParsingError.FloatConv;
};
self.emit_constant(value) catch {
self.error_msg("Failed emiting constant.");
return ParsingError.ChunkError;
};
}
fn emit_constant(self: *Parser, value: Value) !void {
const constant = try self.make_constant(value);
try self.emit_bytes(@intFromEnum(OpCode.OP_CONSTANT), constant);
}
fn make_constant(self: *Parser, value: Value) !u8 {
const constant = try self.chunk.add_constant(value);
if (constant > 256) {
self.error_msg("Too many constants in one chunk.");
return 0;
}
return @intCast(constant);
}
fn grouping(self: *Parser) ParsingError!void {
try self.expression();
self.consume(TokenType.RIGHT_PAREN, "Expect ')' after expression.");
}
fn unary(self: *Parser) ParsingError!void {
const operation_type = self.previous.?.token_type;
// Compile the operand
try self.parse_precedence(Precedence.Unary);
// Emit the operator instruction
switch (operation_type) {
TokenType.MINUS => self.emit_byte(@intFromEnum(OpCode.OP_NEGATE)) catch {
self.error_msg("Failed emiting NEGATE opcode.");
return ParsingError.ChunkError;
},
else => {},
}
}
fn binary(self: *Parser) ParsingError!void {
const operator_type = self.previous.?.token_type;
const parser_rule = Parser.get_rule(operator_type);
try self.parse_precedence(@enumFromInt(1 + @intFromEnum(parser_rule.precedence)));
switch (operator_type) {
TokenType.PLUS => self.emit_byte(@intFromEnum(OpCode.OP_ADD)) catch {},
TokenType.MINUS => self.emit_byte(@intFromEnum(OpCode.OP_SUBSTRACT)) catch {},
TokenType.STAR => self.emit_byte(@intFromEnum(OpCode.OP_MULTIPLY)) catch {},
TokenType.SLASH => self.emit_byte(@intFromEnum(OpCode.OP_DIVIDE)) catch {},
else => return,
}
}
fn get_rule(operator_type: TokenType) ParserRule {
return switch (operator_type) {
TokenType.LEFT_PAREN => ParserRule{ .prefix = grouping, .infix = null, .precedence = Precedence.None },
TokenType.RIGHT_PAREN => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.LEFT_BRACE => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.RIGHT_BRACE => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.COMMA => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.DOT => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.MINUS => ParserRule{ .prefix = unary, .infix = binary, .precedence = Precedence.Term },
TokenType.PLUS => ParserRule{ .prefix = null, .infix = binary, .precedence = Precedence.Term },
TokenType.SEMICOLON => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.SLASH => ParserRule{ .prefix = null, .infix = binary, .precedence = Precedence.Factor },
TokenType.STAR => ParserRule{ .prefix = null, .infix = binary, .precedence = Precedence.Factor },
TokenType.BANG => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.BANG_EQUAL => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.EQUAL => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.EQUAL_EQUAL => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.GREATER => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.GREATER_EQUAL => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.LESS => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.LESS_EQUAL => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.IDENTIFIER => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.STRING => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.NUMBER => ParserRule{ .prefix = number, .infix = null, .precedence = Precedence.None },
TokenType.AND => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.CLASS => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.ELSE => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.FALSE => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.FOR => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.FUN => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.IF => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.NIL => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.OR => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.PRINT => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.RETURN => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.SUPER => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.THIS => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.TRUE => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.VAR => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.WHILE => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.ERROR => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
TokenType.EOF => ParserRule{ .prefix = null, .infix = null, .precedence = Precedence.None },
};
}
fn parse_precedence(self: *Parser, precedence: Precedence) ParsingError!void {
self.advance();
const prefix_rule = Parser.get_rule(self.previous.?.token_type).prefix;
if (prefix_rule == null) {
self.error_msg("Expect expression.");
return;
}
try prefix_rule.?(self);
while (@intFromEnum(precedence) <= @intFromEnum(Parser.get_rule(self.current.?.token_type).precedence)) {
self.advance();
const infix_rule = Parser.get_rule(self.previous.?.token_type).infix;
try infix_rule.?(self);
}
}
};
pub fn compile(allocator: Allocator, contents: []const u8, chunk: *Chunk) !bool {
_ = allocator; _ = allocator;
var scanner = Scanner.init(contents); var scanner = Scanner.init(contents);
var parser = Parser.new(&scanner, chunk);
while (true) { parser.advance();
const token = scanner.scan_token(); try parser.expression();
if (line == null or token.line != line.?) { parser.consume(TokenType.EOF, "Expect end of expression.");
debug.print("{d:4} ", .{token.line}); try parser.end_parser();
line = token.line;
} else {
debug.print(" | ", .{});
}
debug.print("{s:12} len:{d:2} '{s}'\n", .{ token.token_type.string(), token.length, token.start[0..token.length] });
if (token.token_type == TokenType.EOF) { return !parser.had_error;
break;
}
}
} }

4
src/errors.zig Normal file
View File

@ -0,0 +1,4 @@
pub const ParsingError = error{
FloatConv,
ChunkError,
};

View File

@ -7,11 +7,9 @@ const OpCode = @import("./opcode.zig").OpCode;
const VM = @import("./vm.zig").VM; const VM = @import("./vm.zig").VM;
const InterpretResult = @import("./vm.zig").InterpretResult; const InterpretResult = @import("./vm.zig").InterpretResult;
const compile = @import("./compile.zig").compile;
pub const DEBUG_TRACE_EXECUTION = true; pub const DEBUG_TRACE_EXECUTION = true;
pub fn repl(allocator: Allocator) !void { pub fn repl(allocator: Allocator, vm: *VM) !void {
var line: [1024]u8 = undefined; var line: [1024]u8 = undefined;
const stdin = std.io.getStdIn().reader(); const stdin = std.io.getStdIn().reader();
@ -29,18 +27,18 @@ pub fn repl(allocator: Allocator) !void {
break; break;
} }
_ = try interpret(allocator, &line); _ = try vm.interpret(allocator, &line);
} }
} }
pub fn run_file(allocator: Allocator, filepath: []const u8) !void { pub fn run_file(allocator: Allocator, vm: *VM, filepath: []const u8) !void {
const file = try std.fs.cwd().openFile(filepath, .{}); const file = try std.fs.cwd().openFile(filepath, .{});
defer file.close(); defer file.close();
const file_content = try file.readToEndAlloc(allocator, 1024 * 1024); const file_content = try file.readToEndAlloc(allocator, 1024 * 1024);
defer allocator.free(file_content); defer allocator.free(file_content);
const result = try interpret(allocator, file_content); const result = try vm.interpret(allocator, file_content);
switch (result) { switch (result) {
InterpretResult.COMPILE_ERROR => std.process.exit(65), InterpretResult.COMPILE_ERROR => std.process.exit(65),
@ -49,12 +47,6 @@ pub fn run_file(allocator: Allocator, filepath: []const u8) !void {
} }
} }
pub fn interpret(allocator: Allocator, content: []const u8) !InterpretResult {
// XXX catch and return InterpretResult.COMPILE_ERROR ?
try compile(allocator, content);
return InterpretResult.OK;
}
pub fn main() !void { pub fn main() !void {
var gpa = std.heap.GeneralPurposeAllocator(.{ .safety = true }){}; var gpa = std.heap.GeneralPurposeAllocator(.{ .safety = true }){};
defer _ = debug.assert(gpa.deinit() == .ok); defer _ = debug.assert(gpa.deinit() == .ok);
@ -63,44 +55,16 @@ pub fn main() !void {
const args = try std.process.argsAlloc(allocator); const args = try std.process.argsAlloc(allocator);
defer std.process.argsFree(allocator, args); defer std.process.argsFree(allocator, args);
var vm = VM.new(allocator);
defer vm.free();
if (args.len == 1) { if (args.len == 1) {
try repl(allocator); try repl(allocator, &vm);
} else if (args.len == 2) { } else if (args.len == 2) {
try run_file(allocator, args[1]); try run_file(allocator, &vm, args[1]);
} else { } else {
const stdout = std.io.getStdOut().writer(); const stdout = std.io.getStdOut().writer();
try stdout.print("Usage: clox [path]\n", .{}); try stdout.print("Usage: clox [path]\n", .{});
std.process.exit(64); std.process.exit(64);
} }
// var vm = VM.new(allocator);
// var chunk = Chunk.new();
// try chunk.init(allocator);
// var constant = try chunk.add_constant(allocator, 1.2);
// try chunk.write(allocator, @intFromEnum(OpCode.OP_CONSTANT), 123);
// try chunk.write(allocator, @intCast(constant), 123);
// constant = try chunk.add_constant(allocator, 3.4);
// try chunk.write(allocator, @intFromEnum(OpCode.OP_CONSTANT), 123);
// try chunk.write(allocator, @intCast(constant), 123);
// try chunk.write(allocator, @intFromEnum(OpCode.OP_ADD), 123);
// constant = try chunk.add_constant(allocator, 5.6);
// try chunk.write(allocator, @intFromEnum(OpCode.OP_CONSTANT), 123);
// try chunk.write(allocator, @intCast(constant), 123);
// try chunk.write(allocator, @intFromEnum(OpCode.OP_DIVIDE), 123);
// try chunk.write(allocator, @intFromEnum(OpCode.OP_NEGATE), 123);
// try chunk.write(allocator, @intFromEnum(OpCode.OP_RETURN), 123);
// chunk.dissassemble("test chunk");
// _ = try vm.interpret(&chunk);
// vm.free();
// chunk.deinit(allocator);
} }

View File

@ -1 +1,9 @@
pub const OpCode = enum(u8) { OP_CONSTANT, OP_ADD, OP_SUBSTRACT, OP_MULTIPLY, OP_DIVIDE, OP_NEGATE, OP_RETURN }; pub const OpCode = enum(u8) {
OP_CONSTANT,
OP_ADD,
OP_SUBSTRACT,
OP_MULTIPLY,
OP_DIVIDE,
OP_NEGATE,
OP_RETURN,
};

View File

@ -6,6 +6,8 @@ const Chunk = @import("./chunk.zig").Chunk;
const OpCode = @import("./opcode.zig").OpCode; const OpCode = @import("./opcode.zig").OpCode;
const Value = @import("./values.zig").Value; const Value = @import("./values.zig").Value;
const compile = @import("./compile.zig").compile;
const DEBUG_TRACE_EXECUTION = @import("./main.zig").DEBUG_TRACE_EXECUTION; const DEBUG_TRACE_EXECUTION = @import("./main.zig").DEBUG_TRACE_EXECUTION;
const print_value = @import("./values.zig").print_value; const print_value = @import("./values.zig").print_value;
@ -35,11 +37,19 @@ pub const VM = struct {
self.stack.deinit(); self.stack.deinit();
} }
pub fn interpret(self: *VM, chunk: *Chunk) !InterpretResult { pub fn interpret(self: *VM, allocator: Allocator, content: []const u8) !InterpretResult {
self.chunk = chunk; var chunk = Chunk.new(allocator);
defer chunk.deinit();
const res = try compile(allocator, content, &chunk);
if (!res) {
return InterpretResult.COMPILE_ERROR;
}
self.chunk = &chunk;
self.ip = 0; self.ip = 0;
return self.run(); return try self.run();
} }
pub fn run(self: *VM) !InterpretResult { pub fn run(self: *VM) !InterpretResult {
@ -73,6 +83,7 @@ pub const VM = struct {
}, },
@intFromEnum(OpCode.OP_RETURN) => { @intFromEnum(OpCode.OP_RETURN) => {
print_value(self.pop()); print_value(self.pop());
debug.print("\n", .{});
return InterpretResult.OK; return InterpretResult.OK;
}, },
else => { else => {