language

Some fools attempt at an interpreted language
Log | Files | Refs

commit ae7f80e5114b977dee545b4ebfd631c7f21d25b9
parent 2ca2d45b1126d5cfc5052e5e186b379b7bce4ef6
Author: Paul Longtine <paullongtine@gmail.com>
Date:   Mon Apr  4 09:15:02 2016

More files

Diffstat:
 src/lexer_prototype/interpreter.py        |  28 ++++-
 src/lexer_prototype/lexer.py              | 160 +--------------------------
 src/lexer_prototype/main.py               |   7 +-
 src/lexer_prototype/parser.py             | 193 +++++++++++++++++++++++++++++++-
 src/lexer_prototype/test_files/testing.ti |   9 +-
 src/vm/Makefile                           |   2 +-
 src/vm/src/ns.c                           |  22 ++--
 7 files changed, 257 insertions(+), 164 deletions(-)

diff --git a/src/lexer_prototype/interpreter.py b/src/lexer_prototype/interpreter.py @@ -0,0 +1,28 @@ +from parser import * + +class Interpreter(): + def __init__(self, filename): + self.p = Parser(filename) + + self.program = self.p.get_statements() + + self.line = (None, None) + + for self.line in self.program: + self.line[0].action(self) + + def eval_label(self, index): + return(self.line) + + def eval_args(self, index): + return(self.line) + + def eval_type(self, index): + return(self.line) + + def eval_param(self, index): + return(self.line) + + def eval_expr(self, index): + return(self.line) + diff --git a/src/lexer_prototype/lexer.py b/src/lexer_prototype/lexer.py @@ -94,9 +94,10 @@ class GroupingSymbol(PolySymbol): return [index, rv] if len(rv) > 0 else [False, None] class Statement(): - def __init__(self, name, expression=[]): - self.name = name - self.expr = expression + def __init__(self, name, expression=[], func=None): + self.name = name + self.expr = expression + self.action = func def match(self, tokenstring): rv = [] @@ -173,156 +174,3 @@ class Tokenizer(): return rv -def lex(file_name): - splitters = [ - ":", - ";", - "\(", - "\)", - "\[", - "\]", - "{", - "}", - ",", - " " - ] - end_statements = [ - ":", - ";", - "{", - "}" - ] - - known_tokens = [ - "if", - "for", - "func" - ] - - defined_types = [ - "int", - "float", - "array" - ] - - number_def = AtomicSymbol("[0-9]+") - - type_def = InclusiveSymbol(defined_types) - label_def = ExclusiveSymbol(defined_types + known_tokens) - - paramlist_def = GroupingSymbol( [ - AtomicSymbol("\("), - AtomicSymbol("\)") - ] ) - - expr_def = PolySymbol( [ - label_def, - number_def, - AtomicSymbol("\("), - AtomicSymbol("\)"), - AtomicSymbol("\+"), - AtomicSymbol("\-"), - AtomicSymbol("\*"), - AtomicSymbol("\/"), - AtomicSymbol("\>"), - AtomicSymbol("\<"), - AtomicSymbol("=\<"), - AtomicSymbol("\>="), - AtomicSymbol("=="), - AtomicSymbol("\""), - AtomicSymbol("'") - ], terminator=[ - AtomicSymbol(";"), - AtomicSymbol(":") - ]) - - active_tokens = [ - Statement( - "codeblock_begin", - expression=[ - AtomicSymbol("{") - ] - ), - Statement( - "codeblock_end", - expression=[ - AtomicSymbol("}") - ] - ), - Statement( - "if", - expression=[ - AtomicSymbol("if"), - expr_def, - AtomicSymbol(":") - ] - ), - Statement( - "for", - expression=[ - AtomicSymbol("for"), - expr_def, - AtomicSymbol(":") - ] - ), - Statement( - "function", - expression=[ - AtomicSymbol("func"), - label_def, - paramlist_def, - AtomicSymbol("->"), - type_def, - AtomicSymbol(":") - ] - ), - Statement( - "instantiation", - expression=[ - type_def, - label_def, - AtomicSymbol("="), - expr_def, - AtomicSymbol(";") - ] - ), - Statement( - "assignment", - expression=[ - label_def, - AtomicSymbol("="), - expr_def, - AtomicSymbol(";") - ] - ), - Statement( - "expression", - expression=[ - expr_def, - AtomicSymbol(";") - ] - ) - ] - data="" - with open(file_name, 'r') as program: - data=program.read().replace('\n', '') - - symbols = Tokenizer(splitters, end_statements) - - symbols.generate_symbols(data) - - lines = symbols.generate_statements() - rv = [] - for l in lines: - for a in active_tokens: - r = a.match(l) - if r: - rv.append((a.name,r)) - break - - return rv - -if __name__ == "__main__": - import sys - for i in lex(sys.argv[1]): - print(i) diff --git a/src/lexer_prototype/main.py b/src/lexer_prototype/main.py @@ -0,0 +1,7 @@ +from parser import * +if __name__ == "__main__": + import sys + + thing = Parser(sys.argv[1]) + for i in thing.get_statements(): + print(i) diff --git a/src/lexer_prototype/parser.py b/src/lexer_prototype/parser.py @@ -0,0 +1,193 @@ +from lexer import * + +class Parser(): + def __init__(self, file_name): + self.splitters = [ + ":", + ";", + "\(", + "\)", + "\[", + "\]", + "{", + "}", + ",", + " " + ] + self.end_statements = [ + ":", + ";", + "{", + "}" + ] + + self.known_tokens = [ + "if", + "else", + "for", + "while", + "func" + ] + + self.defined_types = [ + "int", + "float", + "array", + "object", + "ptr" + ] + + self.number_def = AtomicSymbol("[0-9]+") + + self.type_def = InclusiveSymbol(self.defined_types) + self.label_def = ExclusiveSymbol(self.defined_types + + self.known_tokens ) + + self.paramlist_def = GroupingSymbol( [ + AtomicSymbol("\("), + AtomicSymbol("\)") + ] ) + + self.expr_def = PolySymbol( [ + self.label_def, + self.number_def, + AtomicSymbol("\("), + AtomicSymbol("\)"), + AtomicSymbol("\+"), + AtomicSymbol("\-"), + AtomicSymbol("\*"), + AtomicSymbol("\/"), + AtomicSymbol("\>"), + AtomicSymbol("\<"), + AtomicSymbol("=\<"), + AtomicSymbol("\>="), + AtomicSymbol("=="), + AtomicSymbol("\""), + AtomicSymbol("'") + ], terminator=[ + AtomicSymbol(";"), + AtomicSymbol(":") + ]) + + self.active_tokens = [ + Statement( + "codeblock_begin", + expression=[ + AtomicSymbol("{") + ], + func=(lambda x: []) + ), + Statement( + "codeblock_end", + expression=[ + AtomicSymbol("}") + ], + func=(lambda x: []) + ), + Statement( + "if", + expression=[ + AtomicSymbol("if"), + self.expr_def, + AtomicSymbol(":") + ], + func=(lambda x: [x.eval_expr(1)]) + ), + Statement( + "else", + expression=[ + AtomicSymbol("else"), + AtomicSymbol(":") + ], + func=(lambda x: []) + ), + Statement( + "for", + expression=[ + AtomicSymbol("for"), + self.expr_def, + AtomicSymbol(":") + ], + func=(lambda x: [x.eval_expr(1)]) + ), + Statement( + "while", + expression=[ + AtomicSymbol("while"), + self.expr_def, + AtomicSymbol(":") + ], + func=(lambda x: [x.eval_expr(1)]) + ), + Statement( + "function", + expression=[ + AtomicSymbol("func"), + self.label_def, + self.paramlist_def, + AtomicSymbol("->"), + self.type_def, + AtomicSymbol(":") + ], + func=(lambda x: [x.eval_label(1), x.eval_param(2), x.eval_type(5)]) + ), + Statement( + "instantiation", + expression=[ + self.type_def, + self.label_def, + AtomicSymbol("="), + self.expr_def, + AtomicSymbol(";") + ], + func=(lambda x: [x.eval_type(0), x.eval_label(1), x.eval_expr(3)]) + ), + Statement( + "assignment", + expression=[ + self.label_def, + AtomicSymbol("="), + self.expr_def, + AtomicSymbol(";") + ], + func=(lambda x: [x.eval_label(0), x.eval_expr(2)]) + ), + Statement( + "func_call", + expression=[ + self.label_def, + self.paramlist_def, + AtomicSymbol(";") + ], + func=(lambda x: [x.eval_label(0), x.eval_args(1)]) + ), + Statement( + "expression", + expression=[ + self.expr_def, + AtomicSymbol(";") + ], + func=(lambda x: [x.eval_expr(0)]) + ) + ] + data="" + with open(file_name, 'r') as program: + data=program.read().replace('\n', '') + + self.symbols = Tokenizer(self.splitters, self.end_statements) + + self.symbols.generate_symbols(data) + + self.lines = self.symbols.generate_statements() + + def get_statements(self): + rv = [] + for l in self.lines: + for a in self.active_tokens: + r = a.match(l) + if r: + rv.append((a,r)) + break + + return rv + diff --git a/src/lexer_prototype/test_files/testing.ti b/src/lexer_prototype/test_files/testing.ti @@ -17,4 +17,13 @@ if x == 3: print("Potatoes are good for \"the\" soul."); } +for x in y: +{ + print(x); +} + some_important_function(3, 4, 2); + +ptr something *= ObjectClass(); + +something.potato(); diff --git a/src/vm/Makefile b/src/vm/Makefile @@ -2,7 +2,7 @@ SRC_DIR = src INC_DIR = inc CC = gcc -CFLAGS = -std=c99 -Wall -I$(INC_DIR) +CFLAGS = -std=c99 -Wall -Wconversion -I$(INC_DIR) DEPS = $(INC_DIR)/is_mdata.h \ helper.h \ diff --git a/src/vm/src/ns.c b/src/vm/src/ns.c @@ -123,12 +123,15 @@ var_cont* ns_pop(ns_t* ns) { N_ASSERT(ns, "ns_pop\n"); + // Define our return value var_cont* rv; + // Is this the last link on the chain? if (ns->last->next != NULL) { + // Get the next to last link on the chain ns_cont* newlast = ns->last->next; - + // Try to delete that namespace container rv = ns_cont_del(ns->last, 0); - + // Set the new last to the last link on the chain ns->last = newlast; } return rv; @@ -148,8 +151,9 @@ void ns_dec(ns_t* ns, b_type type, int scope, ns_addr address) { N_ASSERT(ns, "ns_dec\n"); + // Which namespace are we searching in? (scope == 0 -> local) ns_cont* scoped_ns = scope ? ns->root : ns->last; - + // Declare the name in the correct namespace ns_cont_dec(scoped_ns, type, address); } @@ -162,10 +166,11 @@ void ns_dec(ns_t* ns, b_type type, int scope, ns_addr address) void ns_cont_dec(ns_cont* container, b_type type, ns_addr address) { N_ASSERT(container, "ns_cont_dec\n"); - + // Address must be in range SIZE_ASSERT( container->size > address ); - + // Initalize a variable container container->names[ address ] = var_new(type); + // Set the ownership of this container->names[ address ]->ownership = container->level; } @@ -197,12 +202,15 @@ void ns_set(ns_t* ns, int scope, ns_addr address, var_cont* var) void ns_cont_set(ns_cont* container, var_cont* var, ns_addr address) { N_ASSERT(container, "ns_cont_set\n"); - N_ASSERT(var, "ns_cont_set\n"); + N_ASSERT(var, "ns_cont_set\n"); + // Addresss must be in range SIZE_ASSERT( container->size > address ); - N_ASSERT(container->names[ address ], "Attempt to set an undeclared variable\n"); + N_ASSERT(container->names[ address ], + "Attempt to set an undeclared variable\n"); if (var->ownership < 0) { + // This is my variable now var->ownership = container->level; }