下面我们来让程序语言支持If语句,这会使得bkcalclang能够处理更加复杂的问题。
这次的代码以上一篇《使程序语言支持变量》的代码为基础编写,如果发现不熟悉当下的内容可以回顾一下之前的篇章。
直接放出代码清单有点劝退,先进行内容讲解。
定义If节点的结构体
type If struct {
condition Node
then *Block
}
func NewIf(condition Node, then *Block) *If {
return &If{condition: condition, then: then}
}
If
结构当中condition
存储判断表达式节点,then
存储代码块,使用NewIf
函数实例化。
定义If节点的运行方法
func (_if *If) Eval() float64 {
condition := _if.condition.Eval()
if condition != 0 {
_if.then.Eval();
}
return 0.
}
如果If
的condition
运行后的值不为0则运行代码块then
,否则不运行。
处理If语句的解析
} else if token.Name == "IF" {
lexer.NextToken()
condition := parse_binary_add(lexer)
if (condition == nil) {
return nil
}
token = lexer.GetToken()
if token.Name != "THEN" {
return nil
}
then := parse(lexer)
if then == nil {
return nil
}
isBlockEnd = false
return NewIf(condition, then)
} else if token.Name == "END" {
lexer.NextToken()
isBlockEnd = true
return nil
}
我们在parse_statement
函数中编写处理If语句的内容。
如果检测到token.Name
为IF
则尝试进行If
语法解析,先跳过当前token
然后进行表达式解析,若解析成功则将其作为If
结构的condition
成员:
} else if token.Name == "IF" {
lexer.NextToken()
condition := parse_binary_add(lexer)
if (condition == nil) {
return nil
}
接下来取出下一个token
判断如果为THEN
就进行代码块解析,否则返回nil
,在返回NewIf
前,我们需要设定isBlockEnd
为false
,它是一个用于parse
函数判断是否需要提前返回的全局变量:
token = lexer.GetToken()
if token.Name != "THEN" {
return nil
}
then := parse(lexer)
if then == nil {
return nil
}
isBlockEnd = false
return NewIf(condition, then)
我们需要在parse
函数中利用isBlockEnd
判断当前代码块是否结束:
func parse(lexer *BKLexer.Lexer) *Block {
block := NewBlock()
token := lexer.NextToken()
for token.TType == BKLexer.TOKEN_TYPE_NEWLINE {
token = lexer.NextToken()
}
for token.TType != BKLexer.TOKEN_TYPE_EOF {
statement := parse_statement(lexer)
if isBlockEnd {
return block
}
当遇到token.Name
为END
,我们需要设置isBlockEnd
为true
用以终结当前代码块。
} else if token.Name == "END" {
lexer.NextToken()
isBlockEnd = true
return nil
}
定义词法解析器规则
lexer.AddRule("\\d+\\.?\\d*", "NUMBER")
lexer.AddRule("[\\p{L}\\d_]+", "NAME")
lexer.AddRule("\\+", "PLUS")
lexer.AddRule("-", "MINUS")
lexer.AddRule("\\*", "MUL")
lexer.AddRule("/", "DIV")
lexer.AddRule("\\(", "LPAR")
lexer.AddRule("\\)", "RPAR")
lexer.AddRule("=", "ASSIGN")
lexer.AddIgnores("[ \\f\\t]+")
lexer.AddIgnores("#[^\\r\\n]*")
lexer.AddReserve("set")
lexer.AddReserve("echo")
lexer.AddReserve("if")
lexer.AddReserve("then")
lexer.AddReserve("end")
这里我们需要添加if
、then
、end
保留字。
使用一段测试脚本进行测试
测试内容:
set a = 2
if a then
echo a
set a = a - 1
if a then
echo a
set a = a - 1
if a then
echo a
end
end
end
echo a
运行结果:
➜ go calc.go
:= 2
:= 1
:= 0
代码清单
package main
import (
"fmt"
"strconv"
"io/ioutil"
"./bklexer"
)
var valueDict map[string]float64
var isBlockEnd bool = false
type Node interface {
Eval() float64
}
type Block struct {
statements []Node
}
func NewBlock() *Block {
return &Block{}
}
func (block *Block) AddStatement(statement Node) {
block.statements = append(block.statements, statement)
}
func (block *Block) Eval() {
for _, statement := range block.statements {
statement.Eval()
}
}
type Number struct {
value float64
}
func NewNumber(token *BKLexer.Token) *Number {
value, _ := strconv.ParseFloat(token.Source, 64)
return &Number{value: value}
}
func (number *Number) Eval() float64 {
return number.value
}
type Name struct {
name string
}
func NewName(token *BKLexer.Token) *Name {
return &Name{name: token.Source}
}
func (name *Name) Eval() float64 {
if value, found := valueDict[name.name]; found {
return value;
}
return 0.
}
type BinaryOpt struct {
opt string
lhs Node
rhs Node
}
func NewBinaryOpt(token *BKLexer.Token, lhs Node, rhs Node) *BinaryOpt {
return &BinaryOpt{opt: token.Source, lhs: lhs, rhs: rhs}
}
func (binaryOpt *BinaryOpt) Eval() float64 {
lhs, rhs := binaryOpt.lhs, binaryOpt.rhs
switch binaryOpt.opt {
case "+": return lhs.Eval() + rhs.Eval()
case "-": return lhs.Eval() - rhs.Eval()
case "*": return lhs.Eval() * rhs.Eval()
case "/": return lhs.Eval() / rhs.Eval()
}
return 0
}
type Assign struct {
name string
value Node
}
func NewAssign(token *BKLexer.Token, value Node) *Assign {
return &Assign{name: token.Source, value: value}
}
func (assign *Assign) Eval() float64 {
value := assign.value.Eval()
valueDict[assign.name] = value
return value
}
type Echo struct {
value Node
}
func NewEcho(value Node) *Echo {
return &Echo{value: value}
}
func (echo *Echo) Eval() float64 {
value := echo.value.Eval()
fmt.Println(":=", value)
return value
}
type If struct {
condition Node
then *Block
}
func NewIf(condition Node, then *Block) *If {
return &If{condition: condition, then: then}
}
func (_if *If) Eval() float64 {
condition := _if.condition.Eval()
if condition != 0 {
_if.then.Eval();
}
return 0.
}
func parse(lexer *BKLexer.Lexer) *Block {
block := NewBlock()
token := lexer.NextToken()
for token.TType == BKLexer.TOKEN_TYPE_NEWLINE {
token = lexer.NextToken()
}
for token.TType != BKLexer.TOKEN_TYPE_EOF {
statement := parse_statement(lexer)
if isBlockEnd {
return block
}
if statement == nil {
return nil;
}
token = lexer.GetToken()
if token.TType != BKLexer.TOKEN_TYPE_NEWLINE &&
token.TType != BKLexer.TOKEN_TYPE_EOF {
return nil;
}
block.AddStatement(statement)
for token.TType == BKLexer.TOKEN_TYPE_NEWLINE {
token = lexer.NextToken()
}
}
return block
}
func parse_statement(lexer *BKLexer.Lexer) Node {
token := lexer.GetToken()
if token.Name == "SET" {
name := lexer.NextToken()
if name.Name != "NAME" {
return nil
}
token = lexer.NextToken()
if token.Name != "ASSIGN" {
return nil
}
lexer.NextToken()
value := parse_binary_add(lexer)
if value == nil {
return nil
}
return NewAssign(name, value)
} else if token.Name == "ECHO" {
lexer.NextToken()
value := parse_binary_add(lexer)
if (value == nil) {
return nil
}
return NewEcho(value)
} else if token.Name == "IF" {
lexer.NextToken()
condition := parse_binary_add(lexer)
if (condition == nil) {
return nil
}
token = lexer.GetToken()
if token.Name != "THEN" {
return nil
}
then := parse(lexer)
if then == nil {
return nil
}
isBlockEnd = false
return NewIf(condition, then)
} else if token.Name == "END" {
lexer.NextToken()
isBlockEnd = true
return nil
}
return parse_binary_add(lexer)
}
func parse_binary_add(lexer *BKLexer.Lexer) Node {
lhs := parse_binary_mul(lexer)
if lhs == nil {
return nil
}
token := lexer.GetToken()
for token.Source == "+" || token.Source == "-" {
lexer.NextToken()
rhs := parse_binary_mul(lexer)
if rhs == nil {
return nil
}
lhs = NewBinaryOpt(token, lhs, rhs)
token = lexer.GetToken()
}
return lhs
}
func parse_binary_mul(lexer *BKLexer.Lexer) Node {
lhs := factor(lexer)
if lhs == nil {
return nil
}
token := lexer.GetToken()
for token.Source == "*" || token.Source == "/" {
lexer.NextToken()
rhs := factor(lexer)
if rhs == nil {
return nil
}
lhs = NewBinaryOpt(token, lhs, rhs)
token = lexer.GetToken()
}
return lhs
}
func factor(lexer *BKLexer.Lexer) Node {
token := lexer.GetToken()
if token.Name == "LPAR" {
lexer.NextToken()
expr := parse_binary_add(lexer)
if expr == nil {
return nil
}
token := lexer.GetToken()
if token.Name != "RPAR" {
return nil
}
lexer.NextToken()
return expr
}
if token.Name == "NUMBER" {
number := NewNumber(token)
lexer.NextToken()
return number
}
if token.Name == "NAME" {
name := NewName(token)
lexer.NextToken()
return name
}
return nil
}
func main() {
lexer := BKLexer.NewLexer()
lexer.AddRule("\\d+\\.?\\d*", "NUMBER")
lexer.AddRule("[\\p{L}\\d_]+", "NAME")
lexer.AddRule("\\+", "PLUS")
lexer.AddRule("-", "MINUS")
lexer.AddRule("\\*", "MUL")
lexer.AddRule("/", "DIV")
lexer.AddRule("\\(", "LPAR")
lexer.AddRule("\\)", "RPAR")
lexer.AddRule("=", "ASSIGN")
lexer.AddIgnores("[ \\f\\t]+")
lexer.AddIgnores("#[^\\r\\n]*")
lexer.AddReserve("set")
lexer.AddReserve("echo")
lexer.AddReserve("if")
lexer.AddReserve("then")
lexer.AddReserve("end")
bytes, err := ioutil.ReadFile("../test.txt")
if err != nil {
fmt.Println("read faild")
return
}
code := string(bytes)
lexer.Build(code)
result := parse(lexer)
if result == nil {
fmt.Println("null result")
return
}
valueDict = make(map[string]float64)
result.Eval()
}