-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtokenizer.go
More file actions
103 lines (82 loc) · 1.91 KB
/
tokenizer.go
File metadata and controls
103 lines (82 loc) · 1.91 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
package main
import (
"fmt"
"os"
"strings"
)
type Tokenizer struct {
tokens []string
parser_position int
}
const TOKEN_EOF = "EOF"
func (t *Tokenizer) PullToken() (token string) {
if t.parser_position > len(t.tokens)-1 {
return TOKEN_EOF
}
current_token := t.tokens[t.parser_position]
t.parser_position++
return current_token
}
func (t *Tokenizer) CheckCurentToken() (token string) {
return t.tokens[t.parser_position]
}
func (t *Tokenizer) tokenize_entire_file(path string) (err error) {
f, read_file_err := os.ReadFile(path)
if read_file_err != nil {
return read_file_err
}
t.chars_to_tokens(strings.Split(string(f), ""))
return nil
}
func (t *Tokenizer) print_tokens() {
fmt.Printf("Parsed tokens: %#v \n", t.tokens)
}
func (t *Tokenizer) chars_to_tokens(chars []string) {
accomulator := ""
char_index := 0
for {
if char_index == len(chars)-1 {
break
}
char := chars[char_index]
// t.print_tokens()
// fmt.Printf("char: %#v char_index: %v \n", char, char_index)
switch accomulator {
// TODO: in theory the # have to be in default section
case "#":
for {
char = chars[char_index]
if char == "\n" {
accomulator = ""
break
}
char_index += len(char)
}
case "-m", "-s", "-pr":
t.tokens = append(t.tokens, accomulator)
accomulator = ""
// char_index += len(char)
default:
switch char {
case ":", "{", "}", ",":
if len(accomulator) != 0 && accomulator != " " {
t.tokens = append(t.tokens, accomulator)
}
t.tokens = append(t.tokens, char)
accomulator = ""
case " ":
if len(accomulator) != 0 && accomulator != " " {
// PrintFl("accomulator: %#v", accomulator)
t.tokens = append(t.tokens, accomulator)
}
accomulator = ""
default:
if char != "\n" && char != " " {
accomulator += char
// PrintFl("accomulator: %#v", accomulator)
}
}
char_index += len(char)
}
}
}