Skip to content

Commit 9dbbc67

Browse files
stable commit
1 parent 183e46f commit 9dbbc67

File tree

5 files changed

+93
-58
lines changed

5 files changed

+93
-58
lines changed

eval.go

+21-1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ package main
22

33
import (
44
"errors"
5+
"math"
56
"strconv"
67
)
78

@@ -15,6 +16,20 @@ func evalNumber(ast *astNode) (any, error) {
1516
return strconv.ParseFloat(string(ast.token.val), 64)
1617
}
1718

19+
func evalUnary(ast *astNode) (any, error) {
20+
right, err := eval(ast.right)
21+
if err != nil {
22+
return nil, err
23+
}
24+
switch ast.token.tokenType {
25+
case Minus:
26+
return -right.(float64), nil
27+
case Not:
28+
return !right.(bool), nil
29+
}
30+
return nil, nil
31+
}
32+
1833
func evalInfix(ast *astNode) (any, error) {
1934
var left, right any
2035
left, err := eval(ast.left)
@@ -43,6 +58,9 @@ func evalInfix(ast *astNode) (any, error) {
4358
case Mod:
4459
return float64(int64(leftF) % int64(rightF)), nil
4560

61+
case RaisePower:
62+
return math.Pow(leftF, rightF), nil
63+
4664
case LT:
4765
return leftF < rightF, nil
4866
case GT:
@@ -53,7 +71,9 @@ func evalInfix(ast *astNode) (any, error) {
5371
return leftF >= rightF, nil
5472
case Eq:
5573
return leftF == rightF, nil
74+
case NotEq:
75+
return leftF != rightF, nil
5676

5777
}
58-
return 0, nil // Need to take care of this
78+
return nil, nil // Need to take care of this
5979
}

eval_test.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import (
66
)
77

88
func TestEval(t *testing.T) {
9-
lexer := NewLexer(bytes.NewReader([]byte("(2*3*2+32) == 8")))
9+
lexer := NewLexer(bytes.NewReader([]byte("(2+3)*(6/2)*(4**4**(4-3))")))
1010

1111
err := lexer.Analyze()
1212
if err != nil {
@@ -22,7 +22,7 @@ func TestEval(t *testing.T) {
2222
t.Error("error occured while evaluting ast: ", err)
2323
return
2424
}
25-
expectedVal := false
25+
expectedVal := 65536.0
2626
if actualResult != expectedVal {
2727
t.Errorf("Wrong Evaluation Actual: %v, Expected: %v", actualResult, expectedVal)
2828
}

lexer.go

+31-37
Original file line numberDiff line numberDiff line change
@@ -27,92 +27,84 @@ func (lexer *Lexer) addT(token Token) {
2727
lexer.tokenList = append(lexer.tokenList, token)
2828
}
2929

30-
func (lexer *Lexer) retriveByte() byte {
31-
bytes, err := lexer.reader.Peek(lexer.unProcessedBytes)
32-
if err != nil {
33-
lexer.lastErr = err
34-
return 0
35-
}
36-
if lexer.unProcessedBytes > 0 {
37-
return bytes[lexer.unProcessedBytes-1]
38-
}
39-
return 0
40-
}
41-
4230
func (lexer *Lexer) eof() bool {
4331
return lexer.lastErr != nil && lexer.lastErr == io.EOF
4432
}
4533

46-
func (lexer *Lexer) nextByte() byte {
47-
lexer.unProcessedBytes++
48-
return lexer.retriveByte()
49-
}
50-
func (lexer *Lexer) prevByte() byte {
51-
lexer.unProcessedBytes--
52-
return lexer.retriveByte()
34+
func (lexer *Lexer) next() byte {
35+
by, err := lexer.reader.ReadByte()
36+
if err != nil {
37+
lexer.lastErr = err
38+
return 0
39+
}
40+
return by
5341
}
5442

55-
func (lexer *Lexer) drain() {
56-
_, lexer.lastErr = lexer.reader.Discard(lexer.unProcessedBytes)
57-
lexer.unProcessedBytes = 0
43+
func (lexer *Lexer) peek() byte {
44+
by, err := lexer.reader.Peek(1)
45+
if err != nil {
46+
lexer.lastErr = err
47+
return 0
48+
}
49+
return by[0]
5850
}
5951

6052
func (lexer *Lexer) anaEq() {
61-
nextByte := lexer.nextByte()
53+
nextByte := lexer.peek()
6254
if !lexer.eof() && nextByte == '=' {
55+
lexer.next() // consume
6356
lexer.addT(NewToken(Eq, '=', '='))
6457
return
6558
}
6659
lexer.addT(NewToken(Assign, '='))
67-
lexer.prevByte()
6860
}
6961

7062
func (lexer *Lexer) anaAsterisk() {
71-
nextByte := lexer.nextByte()
63+
nextByte := lexer.peek()
7264
if !lexer.eof() && nextByte == '*' {
65+
lexer.next() // consume
7366
lexer.addT(NewToken(RaisePower, '*', '*'))
7467
return
7568
}
7669
lexer.addT(NewToken(Asterisk, '*'))
77-
lexer.prevByte()
7870
}
7971

8072
func (lexer *Lexer) anaNot() {
81-
nextByte := lexer.nextByte()
73+
nextByte := lexer.peek()
8274
if !lexer.eof() && nextByte == '=' {
75+
lexer.next() // consume
8376
lexer.addT(NewToken(NotEq, '!', '='))
8477
return
8578
}
8679
lexer.addT(NewToken(Not, '!'))
87-
lexer.prevByte()
8880
}
8981

9082
func (lexer *Lexer) anaLT() {
91-
nextByte := lexer.nextByte()
83+
nextByte := lexer.peek()
9284
if !lexer.eof() && nextByte == '=' {
85+
lexer.next() //consume
9386
lexer.addT(NewToken(LTEQ, '<', '='))
9487
return
9588
}
9689
lexer.addT(NewToken(LT, '<'))
97-
lexer.prevByte()
9890

9991
}
10092

10193
func (lexer *Lexer) anaGT() {
102-
nextByte := lexer.nextByte()
94+
nextByte := lexer.peek()
10395
if !lexer.eof() && nextByte == '=' {
96+
lexer.next() //consume
10497
lexer.addT(NewToken(GTEQ, '>', '='))
10598
return
10699
}
107100
lexer.addT(NewToken(GT, '>'))
108-
lexer.prevByte()
109101

110102
}
111103

112104
func (lexer *Lexer) anaComment() {
113105
lexer.addT(NewToken(CommentHash, '#'))
114106
for {
115-
nextByte := lexer.nextByte()
107+
nextByte := lexer.next()
116108
if lexer.eof() || nextByte == '\n' { // Read until next line
117109
return
118110
}
@@ -124,10 +116,12 @@ func (lexer *Lexer) anaDigits(startingDigit byte) error {
124116
var fractionStarted bool
125117

126118
for {
127-
nextByte := lexer.nextByte()
119+
nextByte := lexer.peek()
128120
if isDigit(nextByte) {
121+
lexer.next() // consume
129122
number = append(number, nextByte)
130123
} else if nextByte == '.' {
124+
lexer.next() //consume
131125
if fractionStarted {
132126
return illegalToken
133127
}
@@ -136,7 +130,6 @@ func (lexer *Lexer) anaDigits(startingDigit byte) error {
136130

137131
} else {
138132
lexer.addT(NewToken(Number, number...))
139-
lexer.prevByte()
140133
return nil
141134
}
142135
}
@@ -152,8 +145,7 @@ func isWhiteSpace(b byte) bool {
152145
}
153146

154147
func (lexer *Lexer) Analyze() error {
155-
for ; lexer.lastErr == nil; lexer.drain() {
156-
nextByte := lexer.nextByte()
148+
for nextByte := lexer.next(); lexer.lastErr == nil; nextByte = lexer.next() {
157149

158150
switch nextByte {
159151
case '+':
@@ -180,6 +172,8 @@ func (lexer *Lexer) Analyze() error {
180172
lexer.anaLT()
181173
case '>':
182174
lexer.anaGT()
175+
case '!':
176+
lexer.anaNot()
183177
default:
184178
if isDigit(nextByte) {
185179
err := lexer.anaDigits(nextByte)

lexer_test.go

+3-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ func TestLexer(t *testing.T) {
1212
NewToken(Plus, '+'),
1313
NewToken(Number, []byte("2.343")...),
1414
},
15-
"(<><=>= ===> =)": {
15+
"(<><=>= ===> = != !)": {
1616
NewToken(LeftBrace, '('),
1717
NewToken(LT, '<'),
1818
NewToken(GT, '>'),
@@ -22,6 +22,8 @@ func TestLexer(t *testing.T) {
2222
NewToken(Assign, '='),
2323
NewToken(GT, '>'),
2424
NewToken(Assign, '='),
25+
NewToken(NotEq, '!', '='),
26+
NewToken(Not, '!'),
2527
NewToken(RightBrace, ')'),
2628
},
2729
}

parser_combinators.go

+36-17
Original file line numberDiff line numberDiff line change
@@ -27,40 +27,59 @@ func parsePrimary(pr *parser) *astNode {
2727
return nil
2828
}
2929

30-
// unary -> (-) unary | priamry
30+
// unary -> (- | !) unary | priamry
3131
func parseUnary(pr *parser) *astNode {
3232
log.Print("inside unary")
3333
token := pr.peek()
3434
if pr.eof() {
3535
return nil
3636
}
37-
if token.tokenType == Minus { // (-) unary
38-
pr.next() // consuming minus
39-
// not checking for eof as we have checked while calling peek()
40-
return newASTNode(token, newASTNode(NewToken(Number, '0'), nilASTNode, nilASTNode, evalNumber), parseUnary(pr), evalInfix)
37+
if token.tokenType == Minus || token.tokenType == Not {
38+
pr.next() // consume minus or not
39+
return newASTNode(token, nilASTNode, parseUnary(pr), evalUnary)
4140
}
4241

4342
return parsePrimary(pr)
4443

4544
}
4645

47-
// factor -> unary ( ( "/" | "*" | "%" ) unary)*
48-
func parseFactor(pr *parser) *astNode {
49-
log.Print("inside factor")
46+
func parseExponent(pr *parser) *astNode {
47+
log.Print("inside exponent")
5048

5149
leftUnary := parseUnary(pr) // consume first unary
5250
var parent = leftUnary
5351

52+
for token := pr.peek(); token.tokenType == RaisePower; token = pr.peek() {
53+
54+
if pr.eof() {
55+
break // we are just breaking the rule as we in the recusrive expansion of (**) unary
56+
}
57+
pr.next() // consume **
58+
// not checkig for eof as we have checked while calling peek()
59+
rightUnary := parseExponent(pr)
60+
61+
parent = newASTNode(token, parent, rightUnary, evalInfix) // exponent is right associated operation for us
62+
}
63+
return parent
64+
}
65+
66+
// factor -> exponent ( ( "/" | "*" | "%" ) exponent)*
67+
func parseFactor(pr *parser) *astNode {
68+
log.Print("inside factor")
69+
70+
leftExponent := parseExponent(pr) // consume first unary
71+
var parent = leftExponent
72+
5473
for token := pr.peek(); token.tokenType == Mod || token.tokenType == Asterisk || token.tokenType == Slash; token = pr.peek() {
5574

5675
if pr.eof() {
5776
break // we are just breaking the rule as we in the recusrive expansion of (*|/) unary
5877
}
5978
pr.next() // consume * or /
6079
// not checkig for eof as we have checked while calling peek()
61-
rightUnary := parseUnary(pr)
80+
rightExponent := parseFactor(pr) // Makr
6281

63-
parent = newASTNode(token, parent, rightUnary, evalInfix)
82+
parent = newASTNode(token, parent, rightExponent, evalInfix)
6483
}
6584
return parent
6685
}
@@ -78,7 +97,7 @@ func parseTerm(pr *parser) *astNode {
7897
}
7998

8099
pr.next() // consume + or -
81-
rightUnary := parseFactor(pr)
100+
rightUnary := parseTerm(pr)
82101

83102
parent = newASTNode(token, parent, rightUnary, evalInfix)
84103

@@ -102,7 +121,7 @@ func parseComp(pr *parser) *astNode {
102121
switch token.tokenType {
103122
case LT, GT, LTEQ, GTEQ:
104123
pr.next() // consume > or >= or < or <=
105-
rightUnary := parseTerm(pr)
124+
rightUnary := parseComp(pr)
106125

107126
parent = newASTNode(token, parent, rightUnary, evalInfix)
108127
default:
@@ -114,20 +133,20 @@ func parseComp(pr *parser) *astNode {
114133
return parent
115134
}
116135

117-
// expr -> comparison ( ( "==" ) comparison )*
136+
// expr -> comparison ( ( "==" | "!=" ) comparison )*
118137
func parseExpr(pr *parser) *astNode {
119138
log.Print("inside expr")
120139

121140
leftUnary := parseComp(pr) // consume first unary
122141
var parent = leftUnary
123142

124-
for token := pr.peek(); token.tokenType == Eq; token = pr.peek() {
143+
for token := pr.peek(); token.tokenType == Eq || token.tokenType == NotEq; token = pr.peek() {
125144
if pr.eof() {
126-
break // we are just breaking the rule as we in the recusrive expansion of (==) comparison
145+
break // we are just breaking the rule as we in the recusrive expansion of (== or !=) comparison
127146
}
128147

129-
pr.next() // consume ==
130-
rightUnary := parseComp(pr)
148+
pr.next() // consume == or !=
149+
rightUnary := parseExpr(pr)
131150

132151
parent = newASTNode(token, parent, rightUnary, evalInfix)
133152

0 commit comments

Comments
 (0)