Repository: elvin-du/tinyscript
Branch: master
Commit: 5e109414914e
Files: 76
Total size: 86.4 KB
Directory structure:
gitextract_72t38b4e/
├── .gitignore
├── Makefile
├── README.md
├── gen/
│ ├── instruction.go
│ ├── opcode.go
│ ├── opcode_gen.go
│ ├── opcode_gen_test.go
│ ├── opcode_program.go
│ ├── opcode_test.go
│ ├── operand/
│ │ ├── immediate_number.go
│ │ ├── label.go
│ │ ├── offset.go
│ │ ├── oprand.go
│ │ ├── register.go
│ │ └── types.go
│ └── types.go
├── go.mod
├── go.sum
├── lexer/
│ ├── alphabet.go
│ ├── alphabet_test.go
│ ├── keywords.go
│ ├── lexer.go
│ ├── lexer_test.go
│ ├── token.go
│ └── util/
│ ├── stream.go
│ └── stream_test.go
├── main.go
├── parser/
│ ├── ast/
│ │ ├── ast.go
│ │ ├── block.go
│ │ ├── expr.go
│ │ ├── expr_call.go
│ │ ├── factor.go
│ │ ├── func_args.go
│ │ ├── priority_table.go
│ │ ├── program.go
│ │ ├── scalar.go
│ │ ├── stmt.go
│ │ ├── stmt_assign.go
│ │ ├── stmt_assign_test.go
│ │ ├── stmt_declare.go
│ │ ├── stmt_declare_test.go
│ │ ├── stmt_for.go
│ │ ├── stmt_func_declare.go
│ │ ├── stmt_func_declare_test.go
│ │ ├── stmt_if.go
│ │ ├── stmt_if_test.go
│ │ ├── stmt_return.go
│ │ ├── stream.go
│ │ ├── stream_test.go
│ │ ├── type.go
│ │ ├── util.go
│ │ └── variable.go
│ ├── parser.go
│ └── parser_test.go
├── tests/
│ ├── add.ts
│ ├── complex-if.ts
│ ├── fact2.ts
│ ├── fact5.ts
│ ├── function.ts
│ └── recursion.ts
├── translator/
│ ├── static_table_test.go
│ ├── symbol/
│ │ ├── static_table.go
│ │ ├── symbol.go
│ │ ├── table.go
│ │ ├── table_test.go
│ │ ├── types.go
│ │ └── util.go
│ ├── tainstruction.go
│ ├── tainstruction_type.go
│ ├── taprogram.go
│ ├── translator.go
│ ├── translator_test.go
│ ├── util.go
│ └── util_test.go
└── vm/
├── vm.go
└── vm_test.go
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitignore
================================================
.idea/
./tinyscript
================================================
FILE: Makefile
================================================
test:
go test ./...
================================================
FILE: README.md
================================================
# tinyscript
整个项目包括三个东西:
1. 创建了一个自己的语言
2. 编译器
3. 虚拟机
golang实现的一个编译器,用来编译一个自己创建的语言(用来玩的),最后写了一个自定义虚拟机用来运行自定义语言。
## 语言介绍
为了跨平台(其实是为了方便开发 ^ ^),所以这个语言没有静态编译成硬件指令集,最后的机器码是我自己的定义的,和MIPS类似的(其实就是一个mips子集)虚拟指令集。为了运行这些指令集,我写了一个虚拟机。
语言和golang和javascript类似,实现了函数,类型声明,函数调用等最基本的一些语言元素,没有实现类,结构体,接口等复杂数据结构。
下面是用这个语言编程的例子:
```
func fact(int n) int {
if(n == 0) {
return 1
}
return fact(n-1) * n
}
func main() void {
return fact(2)
}
```
每个函数都实现了相应的UnitTest,单元测试真香~
## 声明:
工程思路不是我自己想出来的,来自于慕课网《大学计算机必修课新讲--编译原理+操作系统+图形学》这个课程。
理论主要是看《龙书》的一部分,《自己动手写编译器,连接器》和bilibili上的中科大华保健老师的《编译原理》和哈尔滨工业大学的《编译原理》课程的一部分。
================================================
FILE: gen/instruction.go
================================================
package gen
import (
"fmt"
"reflect"
"strings"
"tinyscript/gen/operand"
"tinyscript/translator/symbol"
)
const (
MASK_OPCODE = 0xfc000000
MASK_R0 = 0x03e00000
MASK_R1 = 0x001f0000
MASK_R2 = 0x0000f800
MASK_OFFSET0 = 0x03ffffff
MASK_OFFSET1 = 0x001fffff
MASK_OFFSET2 = 0x000007ff
)
type Instruction struct {
Code *OpCode
OpList []operand.Operand
}
func NewInstruction(code *OpCode) *Instruction {
return &Instruction{Code: code, OpList: make([]operand.Operand, 0)}
}
func NewJumpInstruction(code *OpCode, offset int) *Instruction {
i := NewInstruction(code)
i.AddOperand(operand.NewOffset(offset))
return i
}
func NewOffsetInstruction(code *OpCode, r1, r2 *operand.Register, offset *operand.Offset) *Instruction {
i := NewInstruction(code)
i.AddOperand(r1)
i.AddOperand(r2)
i.AddOperand(offset)
return i
}
func NewRegisterInstruction(code *OpCode, r1, r2, r3 *operand.Register) *Instruction {
i := NewInstruction(code)
i.AddOperand(r1)
if r2 != nil {
i.AddOperand(r2)
}
if r3 != nil {
i.AddOperand(r3)
}
return i
}
func NewBNEInstruction(r1, r2 *operand.Register, label string) *Instruction {
i := NewInstruction(BNE)
i.AddOperand(r1)
i.AddOperand(r2)
i.AddOperand(operand.NewLabel(label))
return i
}
func NewImmediateInstruction(code *OpCode, r1 *operand.Register, number *operand.ImmediateNumber) *Instruction {
i := NewInstruction(code)
i.AddOperand(r1)
i.AddOperand(number)
return i
}
func (i *Instruction) AddOperand(o operand.Operand) {
i.OpList = append(i.OpList, o)
}
func (i *Instruction) String() string {
s := i.Code.String()
prts := make([]string, 0, len(i.OpList)+1)
for _, op := range i.OpList {
prts = append(prts, op.String())
}
return s + " " + strings.Join(prts, " ")
}
func (i *Instruction) ToByteCode() int {
code := 0
x := i.Code.Value
code |= int(x) << 26
switch i.Code.AddrType {
case ADDRESSING_TYPE_IMMEDIATE:
r0 := i.OpList[0].(*operand.Register)
code |= int(r0.Addr) << 21
code |= i.OpList[1].(*operand.ImmediateNumber).Value
return code
case ADDRESSING_TYPE_REGISTER:
r1 := i.OpList[0].(*operand.Register)
code |= int(r1.Addr) << 21
if len(i.OpList) > 1 {
code |= int(i.OpList[1].(*operand.Register).Addr) << 16
if len(i.OpList) > 2 {
r2 := int(i.OpList[2].(*operand.Register).Addr)
code |= r2 << 11
}
}
case ADDRESSING_TYPE_JUMP:
if len(i.OpList) > 0 {
code |= i.OpList[0].(*operand.Label).Offset.GetEncodedOffset()
}
case ADDRESSING_TYPE_OFFSET:
r1 := i.OpList[0].(*operand.Register)
r2 := i.OpList[1].(*operand.Register)
var offset *operand.Offset = nil
if reflect.TypeOf(i.OpList[2]).String() == reflect.TypeOf(&operand.Label{}).String() {
offset = i.OpList[2].(*operand.Label).Offset
} else {
offset = i.OpList[2].(*operand.Offset)
}
code |= int(r1.Addr) << 21
code |= int(r2.Addr) << 16
code |= offset.GetEncodedOffset()
}
return code
}
func LoadToRegister(target *operand.Register, arg *symbol.Symbol) *Instruction {
//转成证书,目前只支持整数
if arg.Typ == symbol.SYMBOL_ADDRESS {
return NewOffsetInstruction(LW, target, operand.SP, operand.NewOffset(-arg.Offset))
} else if arg.Typ == symbol.SYMBOL_IMMEDIATE {
return NewOffsetInstruction(LW, target, operand.STATIC, operand.NewOffset(arg.Offset))
}
panic(fmt.Sprintf("Cannot load type %v symbol to register", arg.Typ))
}
func SaveToMemory(source *operand.Register, arg *symbol.Symbol) *Instruction {
return NewOffsetInstruction(SW, source, operand.SP, operand.NewOffset(-arg.Offset))
}
func FromByCode(code int) *Instruction {
byteOpcode := (byte)(int(code&MASK_OPCODE) >> 26)
opcode := FromByte(byteOpcode)
i := NewInstruction(opcode)
switch opcode.AddrType {
case ADDRESSING_TYPE_IMMEDIATE:
reg := (code & MASK_R0) >> 21
number := code & MASK_OFFSET1
i.OpList = append(i.OpList, operand.RegisterFromAddr(reg))
i.OpList = append(i.OpList, operand.NewImmediateNumber(number))
case ADDRESSING_TYPE_REGISTER:
r1Addr := (code & MASK_R0) >> 21
r2Addr := (code & MASK_R1) >> 16
r3Addr := (code & MASK_R2) >> 11
r1 := operand.RegisterFromAddr(r1Addr)
var r2 *operand.Register = nil
if r2Addr != 0 {
r2 = operand.RegisterFromAddr(r2Addr)
}
var r3 *operand.Register = nil
if r3Addr != 0 {
r3 = operand.RegisterFromAddr(r3Addr)
}
i.OpList = append(i.OpList, r1)
if nil != r2 {
i.OpList = append(i.OpList, r2)
}
if nil != r3 {
i.OpList = append(i.OpList, r3)
}
case ADDRESSING_TYPE_JUMP:
offset := code & MASK_OFFSET0
i.OpList = append(i.OpList, operand.DecodeOffset(offset))
case ADDRESSING_TYPE_OFFSET:
r1Addr := (code & MASK_R0) >> 21
r2Addr := (code & MASK_R1) >> 16
offset := code & MASK_OFFSET2
i.OpList = append(i.OpList, operand.RegisterFromAddr(r1Addr))
i.OpList = append(i.OpList, operand.RegisterFromAddr(r2Addr))
i.OpList = append(i.OpList, operand.DecodeOffset(offset))
}
return i
}
func (i *Instruction) GetOperand(index int) operand.Operand {
return i.OpList[index]
}
================================================
FILE: gen/opcode.go
================================================
package gen
import "fmt"
var Codes = [63]*OpCode{}
var (
ADD = NewOpCode(ADDRESSING_TYPE_REGISTER, "ADD", 0x01)
SUB = NewOpCode(ADDRESSING_TYPE_REGISTER, "SUB", 0x02)
MULT = NewOpCode(ADDRESSING_TYPE_REGISTER, "MULT", 0x03)
ADDI = NewOpCode(ADDRESSING_TYPE_IMMEDIATE, "ADDI", 0x05) //立即数加
SUBI = NewOpCode(ADDRESSING_TYPE_IMMEDIATE, "SUBI", 0x06)
MULTI = NewOpCode(ADDRESSING_TYPE_IMMEDIATE, "MULTI", 0x07)
MFLO = NewOpCode(ADDRESSING_TYPE_REGISTER, "MFLO", 0x08) //MULT/MULTI操作码的结果会存储到这个寄存器中
EQ = NewOpCode(ADDRESSING_TYPE_REGISTER, "EQ", 0x09)
BNE = NewOpCode(ADDRESSING_TYPE_OFFSET, "BNE", 0x15) //不相等
SW = NewOpCode(ADDRESSING_TYPE_OFFSET, "SW", 0x10) //从寄存器写回内存
LW = NewOpCode(ADDRESSING_TYPE_OFFSET, "LW", 0x11) //从内存读入到寄存器
JUMP = NewOpCode(ADDRESSING_TYPE_JUMP, "JUMP", 0x20)
JR = NewOpCode(ADDRESSING_TYPE_JUMP, "JR", 0x21) //函数的跳转
RETURN = NewOpCode(ADDRESSING_TYPE_JUMP, "RETURN", 0x22)
)
type OpCode struct {
Name string
Value byte
AddrType AddressingType
}
func NewOpCode(addrType AddressingType, name string, value byte) *OpCode {
oc := &OpCode{Name: name, Value: value, AddrType: addrType}
Codes[value] = oc
return oc
}
func (oc *OpCode) String() string {
return oc.Name
}
func FromByte(byteOpcode byte) *OpCode {
code := Codes[byteOpcode]
if nil == code {
panic(fmt.Sprintf("%x opcode undefined", byteOpcode))
}
return code
}
================================================
FILE: gen/opcode_gen.go
================================================
package gen
import (
"fmt"
"tinyscript/gen/operand"
"tinyscript/translator"
"tinyscript/translator/symbol"
)
type OpCodeGen struct {
}
func NewOpCodeGen() *OpCodeGen {
return &OpCodeGen{}
}
func (g *OpCodeGen) Gen(taProgram *translator.TAProgram) *OpCodeProgram {
program := NewOpCodeProgram()
taInstrs := taProgram.Instructions
labelHash := make(map[string]int)
for _, taInstr := range taInstrs {
program.AddComment(taInstr.String())
switch taInstr.Typ {
case translator.TAINSTR_TYPE_ASSIGN:
g.GenCopy(program, taInstr)
case translator.TAINSTR_TYPE_GOTO:
g.GenGoTo(program, taInstr)
case translator.TAINSTR_TYPE_CALL:
g.GenCall(program, taInstr)
case translator.TAINSTR_TYPE_PARAM:
g.GenPass(program, taInstr)
case translator.TAINSTR_TYPE_SP:
g.GenSP(program, taInstr)
case translator.TAINSTR_TYPE_LABEL:
if taInstr.Arg2 != nil && taInstr.Arg2.(string) == "main" {
size := len(program.Instructions)
program.SetEntry(&size)
}
//这里用于给计算label在代码中的行号做基础
labelHash[taInstr.Arg1.(string)] = len(program.Instructions)
case translator.TAINSTR_TYPE_RETURN:
g.GenReturn(program, taInstr)
case translator.TAINSTR_TYPE_FUNC_BEGIN:
g.GenFuncBegin(program, taInstr)
case translator.TAINSTR_TYPE_IF:
g.GenIf(program, taInstr)
default:
panic(fmt.Sprintf("unknown type %d", taInstr.Typ))
}
}
g.Relabel(program, labelHash)
return program
}
func (g *OpCodeGen) GenGoTo(program *OpCodeProgram, ta *translator.TAInstruction) {
label := ta.Arg1.(string)
i := NewInstruction(JUMP)
//label对应的未知在relabel阶段计算
i.OpList = append(i.OpList, operand.NewLabel(label))
program.Add(i)
}
func (g *OpCodeGen) GenIf(program *OpCodeProgram, ta *translator.TAInstruction) {
label := ta.Arg2
program.Add(NewBNEInstruction(operand.S2, operand.ZERO, label.(string)))
}
func (g *OpCodeGen) Relabel(program *OpCodeProgram, labelMap map[string]int) {
for _, instr := range program.Instructions {
if instr.Code == JUMP || instr.Code == JR || instr.Code == BNE {
idx := 0
if instr.Code == BNE {
idx = 2
}
labelOperand := instr.OpList[idx].(*operand.Label)
label := labelOperand.Label
offset := labelMap[label]
labelOperand.Offset.Offset = offset
}
}
}
func (g *OpCodeGen) GenReturn(program *OpCodeProgram, ta *translator.TAInstruction) {
ret := ta.Arg1.(*symbol.Symbol)
if nil != ret {
program.Add(LoadToRegister(operand.S0, ret))
}
program.Add(NewOffsetInstruction(SW, operand.S0, operand.SP, operand.NewOffset(1)))
i := NewInstruction(RETURN)
program.Add(i)
}
func (g *OpCodeGen) GenSP(program *OpCodeProgram, ta *translator.TAInstruction) {
offset := ta.Arg1.(int)
if offset > 0 {
program.Add(NewImmediateInstruction(ADDI, operand.SP, operand.NewImmediateNumber(offset)))
} else {
program.Add(NewImmediateInstruction(SUBI, operand.SP, operand.NewImmediateNumber(-offset)))
}
}
func (g *OpCodeGen) GenPass(program *OpCodeProgram, ta *translator.TAInstruction) {
arg1 := ta.Arg1.(*symbol.Symbol)
number := ta.Arg2.(int)
program.Add(LoadToRegister(operand.S0, arg1))
//pass a
program.Add(NewOffsetInstruction(SW, operand.S0, operand.SP, operand.NewOffset(-number)))
}
func (g *OpCodeGen) GenFuncBegin(program *OpCodeProgram, ta *translator.TAInstruction) {
i := NewOffsetInstruction(SW, operand.RA, operand.SP, operand.NewOffset(0))
program.Add(i)
}
func (g *OpCodeGen) GenCall(program *OpCodeProgram, ta *translator.TAInstruction) {
label := ta.Arg1.(*symbol.Symbol)
i := NewInstruction(JR) //跳转之前会把PC寄存器的值存储到RA寄存器
i.OpList = append(i.OpList, operand.NewLabel(label.Label))
program.Add(i)
}
func (g *OpCodeGen) GenCopy(program *OpCodeProgram, ta *translator.TAInstruction) {
result := ta.Result
op := ta.Op
arg1 := ta.Arg1.(*symbol.Symbol)
if nil == ta.Arg2 {
program.Add(LoadToRegister(operand.S0, arg1))
program.Add(SaveToMemory(operand.S0, result))
} else {
program.Add(LoadToRegister(operand.S0, arg1))
arg2 := ta.Arg2.(*symbol.Symbol)
program.Add(LoadToRegister(operand.S1, arg2))
switch op {
case "+":
program.Add(NewRegisterInstruction(ADD, operand.S2, operand.S0, operand.S1))
case "-":
program.Add(NewRegisterInstruction(SUB, operand.S2, operand.S0, operand.S1))
case "*":
program.Add(NewRegisterInstruction(MULT, operand.S0, operand.S1, nil))
program.Add(NewRegisterInstruction(MFLO, operand.S2, nil, nil))
case "==":
program.Add(NewRegisterInstruction(EQ, operand.S2, operand.S1, operand.S0))
}
program.Add(SaveToMemory(operand.S2, result))
}
}
================================================
FILE: gen/opcode_gen_test.go
================================================
package gen
import (
"github.com/magiconair/properties/assert"
"testing"
"tinyscript/parser"
"tinyscript/translator"
)
func TestExprEvaluate(t *testing.T) {
source := "var a = 3 * 2*(5+1)"
node := parser.Parse(source)
taprog := translator.NewTranslator().Translate(node)
assert.Equal(t, taprog.StaticTable.String(), `0:3
1:2
2:5
3:1`)
g := NewOpCodeGen()
prog := g.Gen(taprog)
expected := `#p0 = 5 + 1
LW S0 STATIC 2
LW S1 STATIC 3
ADD S2 S0 S1
SW S2 SP -1
#p1 = 2 * p0
LW S0 STATIC 1
LW S1 SP -1
MULT S0 S1
MFLO S2
SW S2 SP -2
#p2 = 3 * p1
LW S0 STATIC 0
LW S1 SP -2
MULT S0 S1
MFLO S2
SW S2 SP -3
#a = p2
LW S0 SP -3
SW S0 SP 0`
assert.Equal(t, prog.String(), expected)
}
func TestFuncEvaluate(t *testing.T) {
node := parser.ParseFromFile("../tests/add.ts")
taprog := translator.NewTranslator().Translate(node)
g := NewOpCodeGen()
prog := g.Gen(taprog)
expected := `#FUNC_BEGIN
SW RA SP 0
#p1 = a + b
LW S0 SP -1
LW S1 SP -2
ADD S2 S0 S1
SW S2 SP -3
#RETURN p1
LW S0 SP -3
SW S0 SP 1
RETURN
#FUNC_BEGIN
MAIN:SW RA SP 0
#PARAM 10 3
LW S0 STATIC 0
SW S0 SP -3
#PARAM 20 4
LW S0 STATIC 1
SW S0 SP -4
#SP -2
SUBI SP 2
#CALL L0
JR L0
#SP 2
ADDI SP 2
#RETURN
SW S0 SP 1
RETURN
#SP -1
SUBI SP 1
#CALL L1
JR L1
#SP 1
ADDI SP 1`
assert.Equal(t,prog.String(),expected)
}
================================================
FILE: gen/opcode_program.go
================================================
package gen
import (
"strconv"
"strings"
"tinyscript/translator"
)
type OpCodeProgram struct {
Entry *int
Instructions []*Instruction
Comments map[int]string //注释;行号:注释内容
}
func NewOpCodeProgram() *OpCodeProgram {
return &OpCodeProgram{Entry: nil, Instructions: make([]*Instruction, 0), Comments: make(map[int]string)}
}
func (o *OpCodeProgram) Add(instr *Instruction) {
o.Instructions = append(o.Instructions, instr)
}
func (o *OpCodeProgram) String() string {
prts := make([]string, 0, len(o.Instructions))
for i, instr := range o.Instructions {
if c, ok := o.Comments[i]; ok {
prts = append(prts, "#"+c)
}
str := instr.String()
if o.Entry != nil && *o.Entry == i {
str = "MAIN:" + str
}
prts = append(prts, str)
}
return strings.Join(prts, "\n")
}
func (o *OpCodeProgram) SetEntry(entry *int) {
o.Entry = entry
}
//当前指令的位置添加一行注释
func (o *OpCodeProgram) AddComment(comment string) {
o.Comments[len(o.Instructions)] = comment
}
func (o *OpCodeProgram) ToByteCode() []int {
codes := []int{}
for _, instr := range o.Instructions {
codes = append(codes, instr.ToByteCode())
}
return codes
}
//从三地址代码中获取静态符号表中的值,存起来在虚拟机实例化时写入内存静态区
func (o *OpCodeProgram) GetStaticArea(taProgram *translator.TAProgram) []int {
l := []int{}
for _, symbol := range taProgram.StaticTable.Symbols {
i, err := strconv.Atoi(symbol.Lexeme.Value)
if nil != err {
panic(err)
}
l = append(l, i)
}
return l
}
================================================
FILE: gen/opcode_test.go
================================================
package gen
import (
"github.com/magiconair/properties/assert"
"reflect"
"testing"
"tinyscript/gen/operand"
symbol2 "tinyscript/translator/symbol"
)
func TestAdd(t *testing.T) {
a := NewInstruction(ADD)
a.AddOperand(operand.S2)
a.AddOperand(operand.S0)
a.AddOperand(operand.S1)
AssertSameInstruction(t, a, FromByCode(a.ToByteCode()))
}
func TestMult(t *testing.T) {
a := NewInstruction(MULT)
a.AddOperand(operand.S0)
a.AddOperand(operand.S1)
AssertSameInstruction(t, a, FromByCode(a.ToByteCode()))
}
func TestNewJumpInstruction(t *testing.T) {
a := NewInstruction(JUMP)
label := operand.NewLabel("L0")
a.AddOperand(label)
label.SetOffset(100)
AssertSameInstruction(t, a, FromByCode(a.ToByteCode()))
}
func TestJR(t *testing.T) {
a := NewInstruction(JR)
label := operand.NewLabel("L0")
a.AddOperand(label)
label.SetOffset(100)
AssertSameInstruction(t, a, FromByCode(a.ToByteCode()))
}
func TestSW(t *testing.T) {
symbol := symbol2.NewSymbol(symbol2.SYMBOL_IMMEDIATE)
symbol.Offset = -100
a := SaveToMemory(operand.S0, symbol)
AssertSameInstruction(t, a, FromByCode(a.ToByteCode()))
}
func TestSW1(t *testing.T) {
symbol := symbol2.NewSymbol(symbol2.SYMBOL_IMMEDIATE)
symbol.Offset = 100
a := SaveToMemory(operand.S0, symbol)
AssertSameInstruction(t, a, FromByCode(a.ToByteCode()))
}
func TestLW(t *testing.T) {
symbol := symbol2.NewSymbol(symbol2.SYMBOL_IMMEDIATE)
symbol.Offset = 100
a := LoadToRegister(operand.S0, symbol)
AssertSameInstruction(t, a, FromByCode(a.ToByteCode()))
}
func TestLW2(t *testing.T) {
symbol := symbol2.NewSymbol(symbol2.SYMBOL_ADDRESS)
symbol.Offset = 100
a := LoadToRegister(operand.S0, symbol)
AssertSameInstruction(t, a, FromByCode(a.ToByteCode()))
}
func TestSP(t *testing.T) {
a := NewImmediateInstruction(ADDI, operand.SP, operand.NewImmediateNumber(100))
AssertSameInstruction(t, a, FromByCode(a.ToByteCode()))
}
func TestBNE(t *testing.T) {
a := NewBNEInstruction(operand.S0, operand.S1, "L0")
a.GetOperand(2).(*operand.Label).SetOffset(100)
AssertSameInstruction(t, a, FromByCode(a.ToByteCode()))
}
func AssertSameInstruction(t *testing.T, a, b *Instruction) {
assert.Equal(t, a.Code, b.Code)
assert.Equal(t, len(a.OpList), len(b.OpList))
for i, av := range a.OpList {
bv := b.GetOperand(i)
if reflect.ValueOf(av).Type().String() == reflect.TypeOf(&operand.Label{}).String() {
assert.Equal(t, bv, av.(*operand.Label).Offset)
}else {
assert.Equal(t, bv, av)
}
if reflect.ValueOf(av).Type().String() == reflect.TypeOf(&operand.ImmediateNumber{}).String() {
assert.Equal(t, av.(*operand.ImmediateNumber).Value, bv.(*operand.ImmediateNumber).Value)
} else if reflect.ValueOf(av).Type().String() == reflect.TypeOf(&operand.Offset{}).String() {
assert.Equal(t, av.(*operand.Offset).Offset, bv.(*operand.Offset).Offset)
} else if reflect.ValueOf(av).Type().String() == reflect.TypeOf(&operand.Register{}).String() {
assert.Equal(t, av.(*operand.Register).Addr, bv.(*operand.Register).Addr)
assert.Equal(t, av.(*operand.Register).Name, bv.(*operand.Register).Name)
} else if reflect.ValueOf(av).Type().String() == reflect.TypeOf(&operand.Label{}).String() {
assert.Equal(t, av.(*operand.Label).Offset.Offset, bv.(*operand.Offset).Offset)
} else {
panic("unsupported encode/decode type" + av.String())
}
}
}
================================================
FILE: gen/operand/immediate_number.go
================================================
package operand
import "fmt"
var _ Operand = &ImmediateNumber{}
type ImmediateNumber struct {
Value int
}
func NewImmediateNumber(value int) *ImmediateNumber {
return &ImmediateNumber{Value: value}
}
func (i *ImmediateNumber) String() string {
return fmt.Sprintf("%d", i.Value)
}
func (*ImmediateNumber) Typ() OperandType {
return TYPE_IMMEDIATE
}
================================================
FILE: gen/operand/label.go
================================================
package operand
var _ Operand = &Label{}
type Label struct {
Label string
*Offset
}
func NewLabel(label string) *Label {
return &Label{Label: label, Offset: NewOffset(0)}
}
func (l *Label) String() string {
return l.Label
}
func (*Label) Typ() OperandType {
return TYPE_LABEL
}
func (l *Label) SetOffset(offset int) {
l.Offset = NewOffset(offset)
}
================================================
FILE: gen/operand/offset.go
================================================
package operand
import "fmt"
var _ Operand = &Offset{}
type Offset struct {
Offset int
}
func NewOffset(offset int) *Offset {
return &Offset{Offset: offset}
}
func (o *Offset) String() string {
return fmt.Sprintf("%d", o.Offset)
}
func (o *Offset) GetEncodedOffset() int {
if o.Offset > 0 {
return o.Offset
}
return 0x400 | -o.Offset
}
func DecodeOffset(offset int) *Offset {
if offset&0x400 > 0 {
offset = offset & 0x3ff
offset = -offset
}
return NewOffset(offset)
}
func (*Offset) Typ() OperandType {
return TYPE_OFFSET
}
================================================
FILE: gen/operand/oprand.go
================================================
package operand
type Operand interface {
String() string
Typ() OperandType
}
================================================
FILE: gen/operand/register.go
================================================
package operand
import "fmt"
var _ Operand = &Register{}
var (
Registers = [31]*Register{}
ZERO = NewRegister("ZERO", 1)
PC = NewRegister("PC", 2)
SP = NewRegister("SP", 3)
STATIC = NewRegister("STATIC", 4)
RA = NewRegister("RA", 5)
S0 = NewRegister("S0", 10)
S1 = NewRegister("S1", 11)
S2 = NewRegister("S2", 12)
L0 = NewRegister("L0", 20)
)
type Register struct {
Addr byte
Name string
}
func NewRegister(name string, addr byte) *Register {
reg := &Register{Addr: addr, Name: name}
Registers[addr] = reg
return reg
}
func (reg *Register) Typ() OperandType {
return TYPE_REGISTER
}
func (reg *Register) String() string {
return reg.Name
}
func RegisterFromAddr(reg int) *Register {
if reg < 0 || reg >= len(Registers) {
panic(fmt.Sprintf("no register's address is %d", reg))
}
return Registers[reg]
}
================================================
FILE: gen/operand/types.go
================================================
package operand
type OperandType int
const (
TYPE_REGISTER = iota
TYPE_IMMEDIATE
TYPE_LABEL
TYPE_OFFSET
)
================================================
FILE: gen/types.go
================================================
package gen
//寻址类型
type AddressingType int
const (
ADDRESSING_TYPE_IMMEDIATE AddressingType = iota
ADDRESSING_TYPE_REGISTER
ADDRESSING_TYPE_JUMP
ADDRESSING_TYPE_BRANCH
ADDRESSING_TYPE_OFFSET
)
================================================
FILE: go.mod
================================================
module tinyscript
go 1.14
require github.com/magiconair/properties v1.8.1
================================================
FILE: go.sum
================================================
github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
================================================
FILE: lexer/alphabet.go
================================================
package lexer
import "regexp"
var (
ptnLetter = regexp.MustCompile("^[a-zA-Z]$")
ptnNumber = regexp.MustCompile("^[0-9]$")
ptnLiteral = regexp.MustCompile("^[_a-zA-Z0-9]$")
ptnOperator = regexp.MustCompile("^[+-\\\\*<>=!&|^%/]$")
)
func IsLetter(c string) bool {
return ptnLetter.MatchString(c)
}
func IsNumber(c string) bool {
return ptnNumber.MatchString(c)
}
func IsLiteral(c string) bool {
return ptnLiteral.MatchString(c)
}
func IsOperator(c string) bool {
return ptnOperator.MatchString(c)
}
================================================
FILE: lexer/alphabet_test.go
================================================
package lexer
import (
"github.com/magiconair/properties/assert"
"testing"
)
func TestAlphabet(t *testing.T) {
assert.Equal(t, IsLetter("a"), true)
assert.Equal(t, IsLiteral("a"), true)
assert.Equal(t, IsNumber("2"), true)
assert.Equal(t, IsOperator("*"), true)
assert.Equal(t, IsOperator("^"), true)
assert.Equal(t, IsOperator("-"), true)
assert.Equal(t, IsOperator("="), true)
assert.Equal(t, IsOperator("/"), true)
assert.Equal(t, IsOperator("%"), true)
}
================================================
FILE: lexer/keywords.go
================================================
package lexer
var KeyWords = map[string]bool{
"var": true,
"if": true,
"else": true,
"for": true,
"while": true,
"break": true,
"func": true,
"return": true,
}
func IsKeyword(key string) bool {
return KeyWords[key]
}
================================================
FILE: lexer/lexer.go
================================================
package lexer
import (
"bytes"
"io"
"os"
"path/filepath"
"tinyscript/lexer/util"
)
const EndToken = "$"
type Lexer struct {
*util.Stream
endToken string
}
func FromFile(path string) []*Token {
absPath, err := filepath.Abs(path)
if nil != err {
panic(err)
}
f, err := os.Open(absPath)
if nil != err {
panic(err)
}
defer f.Close()
return NewLexer(f, EndToken).Analyse()
}
func Analyse(source string) []*Token {
return NewLexer(bytes.NewBufferString(source), EndToken).Analyse()
}
func NewLexer(r io.Reader, et string) *Lexer {
s := util.NewStream(r, EndToken)
return &Lexer{Stream: s, endToken: et}
}
func (l *Lexer) Analyse() []*Token {
tokens := make([]*Token, 0)
for ; l.HasNext(); {
c := l.Next()
if c == EndToken {
break
}
lookahead := l.Peek()
if c == " " || c == "\n" || c == "\t" {
continue
}
if "/" == c {
if lookahead == "/" {
for ; l.HasNext(); {
if "\n" == l.Next() {
break
}
}
} else if lookahead == "*" {
valid := false
for ; l.HasNext(); {
p := l.Next()
if "*" == p && l.Peek() == "/" {
l.Next()
valid = true
break
}
}
if !valid {
panic("source comment invalid")
}
}
continue
}
if c == "{" || c == "}" || c == "(" || c == ")" {
tokens = append(tokens, NewToken(BRACKET, c))
continue
}
if c == `"` || c == `'` {
l.PutBack(c)
tokens = append(tokens, l.MakeString())
continue
}
if IsLetter(c) {
l.PutBack(c)
tokens = append(tokens, l.MakeVarOrKeyword())
continue
}
if IsNumber(c) {
l.PutBack(c)
tokens = append(tokens, l.MakeNumber())
continue
}
//+ - .
//+-: 3+5, +5, 3 * -5
if (c == "+" || c == "-" || c == ".") && IsNumber(lookahead) {
var lastToken *Token = nil
if len(tokens) > 0 {
lastToken = tokens[len(tokens)-1]
}
if nil == lastToken || !lastToken.IsValue() || lastToken.IsOperator() {
l.PutBack(c)
tokens = append(tokens, l.MakeNumber())
continue
}
}
if IsOperator(c) {
l.PutBack(c)
tokens = append(tokens, l.MakeOp())
continue
}
panic("unexpected character" + c)
}
return tokens
}
func (l *Lexer) MakeString() *Token {
s := ""
state := 0
for ; l.HasNext(); {
c := l.Next()
switch state {
case 0:
if c == `'` {
state = 1
} else {
state = 2
}
s += c
case 1:
if `'` == c {
return NewToken(STRING, s+c)
} else {
s += c
}
case 2:
if `"` == c {
return NewToken(STRING, s+c)
} else {
s += c
}
}
}
panic("make string failed")
}
func (l *Lexer) MakeVarOrKeyword() *Token {
s := ""
for ; l.HasNext(); {
lookahead := l.Peek()
if IsLiteral(lookahead) {
s += lookahead
} else {
break
}
l.Next()
}
if IsKeyword(s) {
return NewToken(KEYWORD, s)
}
if "true" == s || "false" == s {
return NewToken(BOOLEAN, s)
}
return NewToken(VARIABLE, s)
}
func (l *Lexer) MakeOp() *Token {
state := 0
for ; l.HasNext(); {
lookahead := l.Next()
switch state {
case 0:
switch lookahead {
case "+":
state = 1
case "-":
state = 2
case "*":
state = 3
case `/`:
state = 4
case `>`:
state = 5
case `<`:
state = 6
case `=`:
state = 7
case `!`:
state = 8
case `&`:
state = 9
case `|`:
state = 10
case `^`:
state = 11
case `%`:
state = 12
case ",":
return NewToken(OPERATOR, ",")
case ";":
return NewToken(OPERATOR, ";")
}
case 1:
switch lookahead {
case `+`:
return NewToken(OPERATOR, "++")
case `=`:
return NewToken(OPERATOR, "+=")
default:
l.PutBack(lookahead)
return NewToken(OPERATOR, "+")
}
case 2:
switch lookahead {
case `-`:
return NewToken(OPERATOR, "--")
case `=`:
return NewToken(OPERATOR, "-=")
default:
l.PutBack(lookahead)
return NewToken(OPERATOR, "-")
}
case 3:
switch lookahead {
case `=`:
return NewToken(OPERATOR, "*=")
default:
l.PutBack(lookahead)
return NewToken(OPERATOR, "*")
}
case 4:
switch lookahead {
case `=`:
return NewToken(OPERATOR, "/=")
default:
l.PutBack(lookahead)
return NewToken(OPERATOR, "/")
}
case 5:
switch lookahead {
case `=`:
return NewToken(OPERATOR, ">=")
case `>`:
return NewToken(OPERATOR, ">>")
default:
l.PutBack(lookahead)
return NewToken(OPERATOR, ">")
}
case 6:
switch lookahead {
case `=`:
return NewToken(OPERATOR, "<=")
case `<`:
return NewToken(OPERATOR, "<<")
default:
l.PutBack(lookahead)
return NewToken(OPERATOR, "<")
}
case 7:
switch lookahead {
case `=`:
return NewToken(OPERATOR, "==")
default:
l.PutBack(lookahead)
return NewToken(OPERATOR, "=")
}
case 8:
switch lookahead {
case `=`:
return NewToken(OPERATOR, "!=")
default:
l.PutBack(lookahead)
return NewToken(OPERATOR, "!")
}
case 9:
switch lookahead {
case `&`:
return NewToken(OPERATOR, "&&")
case `=`:
return NewToken(OPERATOR, "&=")
default:
l.PutBack(lookahead)
return NewToken(OPERATOR, "&")
}
case 10:
switch lookahead {
case `|`:
return NewToken(OPERATOR, "||")
case `=`:
return NewToken(OPERATOR, "|=")
default:
l.PutBack(lookahead)
return NewToken(OPERATOR, "|")
}
case 11:
switch lookahead {
case `^`:
return NewToken(OPERATOR, "^^")
case `=`:
return NewToken(OPERATOR, "^=")
default:
l.PutBack(lookahead)
return NewToken(OPERATOR, "^")
}
case 12:
switch lookahead {
case `=`:
return NewToken(OPERATOR, "%=")
default:
l.PutBack(lookahead)
return NewToken(OPERATOR, "%")
}
}
}
panic("makeOp failed")
}
func (l *Lexer) MakeNumber() *Token {
state := 0
s := ""
for ; l.HasNext(); {
lookahead := l.Peek()
switch state {
case 0:
if "0" == lookahead {
state = 1
} else if IsNumber(lookahead) {
state = 2
} else if `+` == lookahead || `-` == lookahead {
state = 3
} else if lookahead == `.` {
state = 5
}
case 1:
if lookahead == "0" {
state = 1
} else if IsNumber(lookahead) {
state = 2
} else if lookahead == "." {
state = 4
} else {
return NewToken(INTEGER, s)
}
case 2:
if IsNumber(lookahead) {
state = 2
} else if lookahead == "." {
state = 4
} else {
return NewToken(INTEGER, s)
}
case 3:
if IsNumber(lookahead) {
state = 2
} else if lookahead == "." {
state = 5
} else {
panic("unexpected character " + lookahead)
}
case 4:
if "." == lookahead {
panic("unexpected character" + lookahead)
} else if IsNumber(lookahead) {
state = 20
} else {
return NewToken(FLOAT, s)
}
case 5:
if IsNumber(lookahead) {
state = 20
} else {
panic("unexpected character" + lookahead)
}
case 20:
if IsNumber(lookahead) {
state = 20
} else if "." == lookahead {
panic("unexpected character" + lookahead)
} else {
return NewToken(FLOAT, s)
}
}
l.Next()
s += lookahead
}
panic("makeNumber failed")
}
================================================
FILE: lexer/lexer_test.go
================================================
package lexer
import (
"bytes"
"github.com/magiconair/properties/assert"
"regexp"
"strings"
"testing"
)
func TestLexer_MakeVarOrKeyword(t *testing.T) {
l := NewLexer(bytes.NewBufferString("if abc"), "$")
token := l.MakeVarOrKeyword()
token2 := NewLexer(bytes.NewBufferString("true abc"), EndToken).MakeVarOrKeyword()
assert.Equal(t, token.Typ, KEYWORD)
assert.Equal(t, token.Value, "if")
assert.Equal(t, token2.Typ, BOOLEAN)
assert.Equal(t, token2.Value, "true")
l.Next()
token3 := l.MakeVarOrKeyword()
assert.Equal(t, token3.Typ, VARIABLE)
assert.Equal(t, token3.Value, "abc")
}
func TestLexer_MakeString(t *testing.T) {
token := NewLexer(bytes.NewBufferString(`"123"`), "$").MakeString()
assert.Equal(t, token.Typ, STRING)
assert.Equal(t, token.Value, `"123"`)
}
func TestLexer_MakeOp(t *testing.T) {
tests := []string{
"+ xxx",
"++mmm",
"/=g",
"==1",
"&=3434",
"&8888",
"||xxxx",
"^=111",
"%79",
}
for _, test := range tests {
token := NewLexer(bytes.NewBufferString(test), "$").MakeOp()
assert.Equal(t, token.Typ, OPERATOR)
}
}
func TestLexer_MakeNumber(t *testing.T) {
tests := []string{
"+0 aa",
"-0 aa",
".3000 aa",
".55 ww",
"778.99 aa",
"355 kkk",
"-888*234aa",
}
for _, test := range tests {
token := NewLexer(bytes.NewBufferString(test), "$").MakeNumber()
value := regexp.MustCompile("[* ]+").Split(token.Value, 1)[0]
//t.Log(value)
assert.Equal(t, token.Value, value)
if strings.Contains(token.Value, ".") {
assert.Equal(t, token.Typ, FLOAT)
} else {
assert.Equal(t, token.Typ, INTEGER)
}
}
}
func TestLexer_Analyse(t *testing.T) {
source := `(w+c)^100.12==+100-30eee`
lexer := NewLexer(bytes.NewBufferString(source), EndToken)
tokens := lexer.Analyse()
assert.Equal(t, len(tokens), 12)
assert.Equal(t, tokens[0].Value, "(")
assert.Equal(t, tokens[1].Value, "w")
assert.Equal(t, tokens[2].Value, "+")
assert.Equal(t, tokens[3].Value, "c")
assert.Equal(t, tokens[4].Value, ")")
assert.Equal(t, tokens[5].Value, "^")
assert.Equal(t, tokens[6].Value, "100.12")
assert.Equal(t, tokens[7].Value, "==")
assert.Equal(t, tokens[8].Value, "+100")
assert.Equal(t, tokens[9].Value, "-")
assert.Equal(t, tokens[10].Value, "30")
assert.Equal(t, tokens[11].Value, "eee")
}
func Test_Function(t *testing.T) {
source := `func foo(a,b){
print(a+b)
}
foo(-100.0,100)`
lexer := NewLexer(bytes.NewBufferString(source), EndToken)
tokens := lexer.Analyse()
assertToken(t, tokens[0], "func", KEYWORD)
assertToken(t, tokens[1], "foo", VARIABLE)
assertToken(t, tokens[2], "(", BRACKET)
assertToken(t, tokens[3], "a", VARIABLE)
assertToken(t, tokens[4], ",", OPERATOR)
assertToken(t, tokens[5], "b", VARIABLE)
assertToken(t, tokens[6], ")", BRACKET)
assertToken(t, tokens[7], "{", BRACKET)
assertToken(t, tokens[8], "print", VARIABLE)
assertToken(t, tokens[9], "(", BRACKET)
assertToken(t, tokens[10], "a", VARIABLE)
assertToken(t, tokens[11], "+", OPERATOR)
assertToken(t, tokens[12], "b", VARIABLE)
assertToken(t, tokens[13], ")", BRACKET)
assertToken(t, tokens[14], "}", BRACKET)
assertToken(t, tokens[15], "foo", VARIABLE)
assertToken(t, tokens[16], "(", BRACKET)
assertToken(t, tokens[17], "-100.0", FLOAT)
assertToken(t, tokens[18], ",", OPERATOR)
assertToken(t, tokens[19], "100", INTEGER)
assertToken(t, tokens[20], ")", BRACKET)
}
func TestDeleteComment(t *testing.T) {
source := `/*12324abdfda
34fa9kfjl*/a=1
`
lexer := NewLexer(bytes.NewBufferString(source), EndToken)
tokens := lexer.Analyse()
assert.Equal(t, len(tokens), 3)
}
func assertToken(t *testing.T, token *Token, wantValue string, wantType TokenType) {
assert.Equal(t, token.Typ, wantType)
assert.Equal(t, token.Value, wantValue)
}
func TestFromFile(t *testing.T) {
tokens := FromFile("./../tests/function.ts")
assert.Equal(t, len(tokens), 16)
}
================================================
FILE: lexer/token.go
================================================
package lexer
import "fmt"
type TokenType int
const (
KEYWORD TokenType = 1
VARIABLE TokenType = 2
OPERATOR TokenType = 3
BRACKET TokenType = 4
STRING TokenType = 5
FLOAT TokenType = 6
BOOLEAN TokenType = 7
INTEGER TokenType = 8
)
func (tt TokenType) String() string {
switch tt {
case KEYWORD:
return "keyword"
case VARIABLE:
return "variable"
case OPERATOR:
return "operator"
case BRACKET:
return "bracket"
case STRING:
return "string`"
case FLOAT:
return "float"
case BOOLEAN:
return "boolean"
case INTEGER:
return "integer"
}
panic("unexpected token type")
}
type Token struct {
Typ TokenType
Value string
}
func NewToken(t TokenType, v string) *Token {
return &Token{Typ: t, Value: v}
}
func (t *Token) IsVariable() bool {
return t.Typ == VARIABLE
}
func (t *Token) IsScalar() bool {
return t.Typ == FLOAT || t.Typ == BOOLEAN || t.Typ == INTEGER || t.Typ == STRING
}
func (t *Token) IsNumber() bool {
return t.Typ == INTEGER || t.Typ == FLOAT
}
func (t *Token) IsOperator() bool {
return t.Typ == OPERATOR
}
func (t *Token) String() string {
return fmt.Sprintf("type:%v,value:%s", t.Typ, t.Value)
}
func (t *Token) IsValue() bool {
return t.IsVariable() || t.IsScalar()
}
func (t *Token) IsType() bool {
switch t.Value {
case "bool", "int", "float", "void", "string":
return true
}
return false
}
================================================
FILE: lexer/util/stream.go
================================================
package util
import (
"bufio"
"container/list"
"io"
)
type Stream struct {
scanner *bufio.Scanner
queueCache *list.List
endToken string
isEnd bool
}
func NewStream(r io.Reader, et string) *Stream {
s := bufio.NewScanner(r)
s.Split(bufio.ScanRunes)
return &Stream{scanner: s, queueCache: list.New(), endToken: et, isEnd: false}
}
func (s *Stream) Next() string {
if s.queueCache.Len() != 0 {
e := s.queueCache.Front()
return s.queueCache.Remove(e).(string)
}
if s.scanner.Scan() {
return s.scanner.Text()
}
s.isEnd = true
return s.endToken
}
func (s *Stream) HasNext() bool {
if s.queueCache.Len() != 0 {
return true
}
if s.scanner.Scan() {
s.queueCache.PushBack(s.scanner.Text())
return true
}
if !s.isEnd {
return true
}
return false
}
func (s *Stream) Peek() string {
if s.queueCache.Len() != 0 {
return s.queueCache.Front().Value.(string)
}
if s.scanner.Scan() {
e := s.scanner.Text()
s.queueCache.PushBack(e)
return e
}
return s.endToken
}
func (s *Stream) PutBack(e string) {
s.queueCache.PushFront(e)
}
================================================
FILE: lexer/util/stream_test.go
================================================
package util
import (
"bytes"
"github.com/magiconair/properties/assert"
"testing"
)
func TestNewStream(t *testing.T) {
str := "abcd"
s := NewStream(bytes.NewReader([]byte(str)), "$")
assert.Equal(t, s.Next(), "a")
assert.Equal(t, s.Next(), "b")
assert.Equal(t, s.Peek(), "c")
assert.Equal(t, s.Peek(), "c")
s.PutBack("b")
assert.Equal(t, s.Peek(), "b")
assert.Equal(t, s.Next(), "b")
assert.Equal(t, s.Next(), "c")
assert.Equal(t, s.HasNext(), true, "hasnext failed")
assert.Equal(t, s.Next(), "d")
assert.Equal(t, s.Next(), "$")
assert.Equal(t, s.Next(), "$")
assert.Equal(t, s.HasNext(), false, "hasnext failed")
}
================================================
FILE: main.go
================================================
package main
func main() {
}
================================================
FILE: parser/ast/ast.go
================================================
package ast
import (
"fmt"
"strings"
"tinyscript/lexer"
)
type ASTNode interface {
//get
Lexeme() *lexer.Token //ast节点对应的token是什么
Type() NodeType
Label() string //用字符串标识ast节点的含义,主要用于打印日志
Children() []ASTNode
GetChild(uint) ASTNode
Parent() ASTNode
Print(indent int)
TypeLexeme() *lexer.Token //标识变量的类型和函数的返回值类型;别的ast节点没有必要设置这个属性
IsValueType() bool
Prop(string) interface{}
//set
AddChild(ASTNode)
SetLexeme(*lexer.Token)
SetTypeLexeme(*lexer.Token)
SetType(NodeType)
SetLabel(string)
SetParent(ASTNode)
SetProp(string, interface{})
}
type node struct {
parent ASTNode
children []ASTNode
label string //备注(标签)
typ NodeType
lexeme *lexer.Token //词法单元
typeLexeme *lexer.Token //func foo(int a); 这时typelexeme等于int型的token
prop map[string]interface{} //用于符号表,语法分析不会用到
}
//test
var _ ASTNode = &node{}
func MakeNode() *node {
return &node{children: make([]ASTNode, 0), prop: make(map[string]interface{})}
}
func (n *node) Prop(key string) interface{} {
return n.prop[key]
}
func (n *node) SetProp(key string, value interface{}) {
n.prop[key] = value
}
func (n *node) Lexeme() *lexer.Token {
return n.lexeme
}
func (n *node) TypeLexeme() *lexer.Token {
return n.typeLexeme
}
func (n *node) IsValueType() bool {
return n.typ == ASTNODE_TYPE_VARIABLE || n.typ == ASTNODE_TYPE_SCALAR
}
func (n *node) Type() NodeType {
return n.typ
}
func (n *node) Label() string {
return n.label
}
func (n *node) Children() []ASTNode {
return n.children
}
func (n *node) GetChild(index uint) ASTNode {
if int(index) >= len(n.children) {
return nil
}
return n.children[index]
}
func (n *node) Parent() ASTNode {
return n.parent
}
func (n *node) AddChild(node ASTNode) {
node.SetParent(n)
n.children = append(n.children, node)
}
func (n *node) SetLexeme(lexeme *lexer.Token) {
n.lexeme = lexeme
}
func (n *node) SetTypeLexeme(lexeme *lexer.Token) {
n.typeLexeme = lexeme
}
func (n *node) SetType(t NodeType) {
n.typ = t
}
func (n *node) SetLabel(str string) {
n.label = str
}
func (n *node) SetParent(node ASTNode) {
n.parent = node
}
func (n *node) Print(indent int) {
fmt.Printf("%s%s\n", strings.Repeat(" ", indent*2), n.label)
for _, child := range n.children {
child.Print(indent + 2)
}
}
================================================
FILE: parser/ast/block.go
================================================
package ast
var DefaultBlock ASTNode = MakeBlock()
type Block struct {
*Stmt
}
func MakeBlock() *Block {
b := &Block{MakeStmt()}
b.SetType(ASTNODE_TYPE_BLOCK)
b.SetLabel("block")
return b
}
func BlockParse(stream *PeekTokenStream) ASTNode {
stream.NextMatch("{")
block := MakeBlock()
for stmt := StmtParse(stream); nil != stmt; {
block.AddChild(stmt)
stmt = StmtParse(stream)
}
stream.NextMatch("}")
return block
}
================================================
FILE: parser/ast/expr.go
================================================
package ast
import (
"tinyscript/lexer"
)
type Expr struct {
*node
}
func MakeExpr() *Expr {
e := &Expr{MakeNode()}
return e
}
func NewExpr(typ NodeType, token *lexer.Token) *Expr {
expr := MakeExpr()
expr.SetType(typ)
expr.SetLexeme(token)
expr.SetLabel(token.Value)
return expr
}
type ExprHOF func() ASTNode
//left: E(k) -> E(k) op(k) E(k+1) | E(k+1)
//right:
// E(k) -> E(k+1) E_(k)
// E_(k) -> op(k) E(k+1) E_(k) | ⍷
// 最高优先级:
// E(t) -> F E_(k) | U E_(k)
// E_(t) -> op(t) E(t) E_(t) | ⍷
func E(stream *PeekTokenStream, k int) ASTNode {
if k < PriorityTable.Size()-1 {
return combine(
stream,
func() ASTNode {
return E(stream, k+1)
},
func() ASTNode {
return E_(stream, k)
},
)
}
return race(
stream,
func() ASTNode {
return combine(
stream,
func() ASTNode {
return F(stream)
},
func() ASTNode {
return E_(stream, k)
},
)
},
func() ASTNode {
return combine(
stream,
func() ASTNode {
return U(stream)
},
func() ASTNode {
return E_(stream, k)
},
)
},
)
}
func U(stream *PeekTokenStream) ASTNode {
token := stream.Peek()
value := token.Value
if value == "(" {
stream.NextMatch("(")
expr := E(stream, 0)
stream.NextMatch(")")
return expr
} else if value == "++" || value == "--" || value == "!" {
t := stream.Peek()
stream.NextMatch(value)
unaryExpr := NewExpr(ASTNODE_TYPE_UNARY_EXPR, t)
unaryExpr.AddChild(E(stream, 0))
return unaryExpr
}
return nil
}
func F(stream *PeekTokenStream) ASTNode {
factor := FactorParse(stream)
if nil == factor {
return nil
}
if stream.HasNext() && stream.Peek().Value == "(" {
return CallExprParse(factor, stream)
}
return factor
}
func E_(stream *PeekTokenStream, k int) ASTNode {
token := stream.Peek()
value := token.Value
if PriorityTable.IsContain(k, value) {
expr := NewExpr(ASTNODE_TYPE_BINARY_EXPR, stream.NextMatch(value))
expr.AddChild(
combine(
stream,
func() ASTNode {
return E(stream, k+1)
},
func() ASTNode {
return E_(stream, k)
},
),
)
return expr
}
return nil
}
func race(stream *PeekTokenStream, af ExprHOF, bf ExprHOF) ASTNode {
if !stream.HasNext() {
return nil
}
a := af()
if nil != a {
return a
}
return bf()
}
func combine(stream *PeekTokenStream, af ExprHOF, bf ExprHOF) ASTNode {
a := af()
if nil == a {
if stream.HasNext() {
return bf()
}
return nil
}
var b ASTNode = nil
if stream.HasNext() {
b = bf()
if nil == b {
return a
}
} else {
return a
}
expr := NewExpr(ASTNODE_TYPE_BINARY_EXPR, b.Lexeme())
expr.AddChild(a)
expr.AddChild(b.GetChild(0))
return expr
}
func ExprParse(stream *PeekTokenStream) ASTNode {
return E(stream, 0)
}
================================================
FILE: parser/ast/expr_call.go
================================================
package ast
var _ ASTNode = &CallExpr{}
type CallExpr struct {
*node
}
func MakeCallExpr() *CallExpr {
e := &CallExpr{MakeNode()}
e.SetType(ASTNODE_TYPE_CALL_EXPR)
e.SetLabel("call")
return e
}
func CallExprParse(factor ASTNode, stream *PeekTokenStream) ASTNode {
expr := MakeCallExpr()
expr.AddChild(factor)
stream.NextMatch("(")
for p := ExprParse(stream); p != nil; p = ExprParse(stream) {
expr.AddChild(p)
if stream.Peek().Value != ")" {
stream.NextMatch(",")
}
}
stream.NextMatch(")")
return expr
}
================================================
FILE: parser/ast/factor.go
================================================
package ast
import (
"tinyscript/lexer"
)
var _ ASTNode = &Factor{}
type Factor struct {
*node
}
func MakeFactor() *Factor {
return &Factor{MakeNode()}
}
func NewFactor(stream *PeekTokenStream) *Factor {
factor := &Factor{MakeNode()}
token := stream.Next()
factor.SetLexeme(token)
factor.SetLabel(token.Value)
if lexer.VARIABLE == token.Typ {
factor.SetType(ASTNODE_TYPE_VARIABLE)
} else {
factor.SetType(ASTNODE_TYPE_SCALAR)
}
return factor
}
func FactorParse(stream *PeekTokenStream) ASTNode {
token := stream.Peek()
typ := token.Typ
if lexer.VARIABLE == typ {
stream.Next()
v := MakeVariable()
v.SetLabel(token.Value)
v.SetLexeme(token)
return v
} else if token.IsScalar() {
stream.Next()
scalar := MakeScalar()
scalar.SetLabel(token.Value)
scalar.SetLexeme(token)
return scalar
}
return nil
}
================================================
FILE: parser/ast/func_args.go
================================================
package ast
var _ ASTNode = &Factor{}
type FuncArgs struct {
*node
}
func MakeFuncArgs() *FuncArgs {
s := &FuncArgs{MakeNode()}
s.SetLabel("args")
return s
}
func FuncArgsParse(stream *PeekTokenStream) ASTNode {
args := MakeFuncArgs()
for stream.Peek().IsType() {
typ := stream.Next()
v := FactorParse(stream)
v.SetTypeLexeme(typ) //为语义分析做准备,设置参数变量的类型
args.AddChild(v)
if stream.Peek().Value != ")" {
stream.NextMatch(",")
}
}
return args
}
================================================
FILE: parser/ast/priority_table.go
================================================
package ast
var PriorityTable = NewPriorityTable()
type priorityTable struct {
table [][]string
}
func NewPriorityTable() *priorityTable {
return &priorityTable{[][]string{
[]string{"&", "|", "^"},
[]string{"==", "!=", ">", "<", ">=", "<="},
[]string{"+", "-"},
[]string{"*", "/"},
[]string{"<<", ">>"},
}}
}
func (pt *priorityTable) Size() int {
return len(pt.table)
}
func (pt *priorityTable) Get(level int) []string {
return pt.table[level]
}
func (pt *priorityTable) IsContain(level int, key string) bool {
strs := pt.Get(level)
for _, str := range strs {
if str == key {
return true
}
}
return false
}
================================================
FILE: parser/ast/program.go
================================================
package ast
var _ ASTNode = &Block{}
type Program struct {
*Block
}
func MakeProgram() *Program {
b := &Program{MakeBlock()}
b.SetLabel("program")
return b
}
func ProgramParse(stream *PeekTokenStream) ASTNode {
p := MakeProgram()
for stmt := StmtParse(stream); nil != stmt; {
p.AddChild(stmt)
stmt = StmtParse(stream)
}
return p
}
================================================
FILE: parser/ast/scalar.go
================================================
package ast
var _ ASTNode = &Factor{}
type Scalar struct {
*Factor
}
func NewScalar(stream *PeekTokenStream) *Scalar {
return &Scalar{NewFactor(stream)}
}
func MakeScalar() *Scalar {
s := &Scalar{MakeFactor()}
s.SetType(ASTNODE_TYPE_SCALAR)
return s
}
================================================
FILE: parser/ast/stmt.go
================================================
package ast
var DefaultStmt ASTNode = MakeStmt()
type Stmt struct {
*node
}
func MakeStmt() *Stmt {
s := &Stmt{MakeNode()}
return s
}
func StmtParse(stream *PeekTokenStream) ASTNode {
if !stream.HasNext(){
return nil
}
token := stream.Next()
lookahead := stream.Peek()
stream.PutBack(1)
if token.IsVariable() && lookahead != nil && lookahead.Value == "=" {
return AssignStmtParse(stream)
} else if token.Value == "var" {
return DeclareStmtParse(stream)
} else if token.Value == "func" {
return FuncDeclareStmtParse(stream)
} else if token.Value == "return" {
return ReturnStmtParse(stream)
} else if token.Value == "if" {
return IfStmtParse(stream)
} else if token.Value == "{" {
return BlockParse(stream)
} else {
return ExprParse(stream)
}
}
================================================
FILE: parser/ast/stmt_assign.go
================================================
package ast
var DefaultAssignStmt ASTNode = MakeAssignStmt()
type AssignStmt struct {
*Stmt
}
func MakeAssignStmt() *AssignStmt {
v := &AssignStmt{MakeStmt()}
v.SetType(ASTNODE_TYPE_ASSIGN_STMT)
v.SetLabel("assign_stmt")
return v
}
func AssignStmtParse(stream *PeekTokenStream) ASTNode {
stmt := MakeAssignStmt()
//stmt.SetParent(parent)
tkn := stream.Peek()
factor := FactorParse(stream)
if nil == factor {
panic("syntax error:" + tkn.String())
}
stmt.AddChild(factor)
lexeme := stream.NextMatch("=")
stmt.SetLexeme(lexeme)
expr := ExprParse(stream)
stmt.AddChild(expr)
return stmt
}
================================================
FILE: parser/ast/stmt_assign_test.go
================================================
package ast
import (
"bytes"
"github.com/magiconair/properties/assert"
"testing"
"tinyscript/lexer"
)
func TestAssignStmtParse(t *testing.T) {
src := "i = 100*2"
tokens := lexer.NewLexer(bytes.NewBufferString(src), lexer.EndToken).Analyse()
stream := NewPeekTokenStream(tokens)
stmt := AssignStmtParse(stream)
assert.Equal(t, ToPostfixExpr(stmt), "i 100 2 * =")
}
================================================
FILE: parser/ast/stmt_declare.go
================================================
package ast
var DefaultDeclareStmt ASTNode = MakeDeclareStmt()
type DeclareStmt struct {
*Stmt
}
func NewDeclareStmt() *DeclareStmt {
d := MakeDeclareStmt()
return d
}
func MakeDeclareStmt() *DeclareStmt {
v := &DeclareStmt{MakeStmt()}
v.SetType(ASTNODE_TYPE_DECLARE_STMT)
v.SetLabel("declare_stmt")
return v
}
func DeclareStmtParse(stream *PeekTokenStream) ASTNode {
stmt := NewDeclareStmt()
stream.NextMatch("var")
tkn := stream.Peek()
factor := FactorParse(stream)
if nil == factor {
panic("syntax error:" + tkn.String())
}
stmt.AddChild(factor)
lexeme := stream.NextMatch("=")
stmt.SetLexeme(lexeme)
expr := ExprParse(stream)
stmt.AddChild(expr)
return stmt
}
================================================
FILE: parser/ast/stmt_declare_test.go
================================================
package ast
import (
"bytes"
"github.com/magiconair/properties/assert"
"testing"
"tinyscript/lexer"
)
func TestDeclareStmtParse(t *testing.T) {
src := "var i = 100*2"
tokens := lexer.NewLexer(bytes.NewBufferString(src), lexer.EndToken).Analyse()
stream := NewPeekTokenStream(tokens)
stmt := DeclareStmtParse(stream)
assert.Equal(t, ToPostfixExpr(stmt), "i 100 2 * =")
}
================================================
FILE: parser/ast/stmt_for.go
================================================
package ast
var _ ASTNode = MakeForStmt()
type ForStmt struct {
*Stmt
}
func MakeForStmt() *ForStmt {
v := &ForStmt{MakeStmt()}
v.SetType(ASTNODE_TYPE_FOR_STMT)
v.SetLabel("for")
return v
}
================================================
FILE: parser/ast/stmt_func_declare.go
================================================
package ast
var _ ASTNode = MakeFuncDeclareStmt()
type FuncDeclareStmt struct {
*Stmt
}
func MakeFuncDeclareStmt() *FuncDeclareStmt {
v := &FuncDeclareStmt{MakeStmt()}
v.SetType(ASTNODE_TYPE_FUNCTION_DECLARE_STMT)
v.SetLabel("func")
return v
}
func FuncDeclareStmtParse(stream *PeekTokenStream) *FuncDeclareStmt {
stream.NextMatch("func")
//func add() int {}
fn := MakeFuncDeclareStmt()
lexeme := stream.Peek()
fnV := FactorParse(stream) //函数名ast节点
fn.SetLexeme(lexeme) //函数名token作为这个ast节点的lexeme
fn.AddChild(fnV)
stream.NextMatch("(")
args := FuncArgsParse(stream)
stream.NextMatch(")")
fn.AddChild(args)
keyword := stream.Next()
if !keyword.IsType() {
panic("syntax error: unexpected " + keyword.Value)
}
fnV.SetTypeLexeme(keyword) //函数名ast节点的类型,即:函数返回值类型,用token表示
block := BlockParse(stream)
fn.AddChild(block)
return fn
}
func (f *FuncDeclareStmt) FuncVariable() ASTNode {
return f.GetChild(0)
}
func (f *FuncDeclareStmt) Args() ASTNode {
return f.GetChild(1)
}
func (f *FuncDeclareStmt) FuncType() string {
return f.FuncVariable().TypeLexeme().Value
}
func (f *FuncDeclareStmt) Block() ASTNode {
return f.GetChild(2)
}
================================================
FILE: parser/ast/stmt_func_declare_test.go
================================================
package ast
import (
"github.com/magiconair/properties/assert"
"testing"
"tinyscript/lexer"
)
func TestFuncDeclareStmtParse(t *testing.T) {
stream := NewPeekTokenStream(lexer.FromFile("./../../tests/function.ts"))
stmt := StmtParse(stream).(*FuncDeclareStmt)
args := stmt.Args()
assert.Equal(t, args.GetChild(0).Lexeme().Value, "a")
assert.Equal(t, args.GetChild(1).Lexeme().Value, "b")
typ := stmt.FuncType()
assert.Equal(t, typ, "int")
funcVariable := stmt.FuncVariable()
assert.Equal(t, funcVariable.Lexeme().Value, "add")
block := stmt.Block()
assert.Equal(t, block.GetChild(0).Lexeme().Value, "return")
}
func TestFunctionRecursion(t *testing.T) {
stream := NewPeekTokenStream(lexer.FromFile("./../../tests/recursion.ts"))
stmt := StmtParse(stream).(*FuncDeclareStmt)
assert.Equal(t, ToBFSString(stmt, 4), "func fact args block")
assert.Equal(t, ToBFSString(stmt.Args(), 2), "args n")
assert.Equal(t, ToBFSString(stmt.Block(), 3), "block if return")
}
================================================
FILE: parser/ast/stmt_if.go
================================================
package ast
var _ ASTNode = MakeIfStmt()
type IfStmt struct {
*Stmt
}
func MakeIfStmt() *IfStmt {
v := &IfStmt{MakeStmt()}
v.SetType(ASTNODE_TYPE_IF_STMT)
v.SetLabel("if")
return v
}
func IfStmtParse(stream *PeekTokenStream) ASTNode {
return IfParse(stream)
}
//IfStmt -> If(Expr) Block Tail
func IfParse(stream *PeekTokenStream) ASTNode {
lexeme := stream.NextMatch("if")
stream.NextMatch("(")
ifStmt := MakeIfStmt()
ifStmt.SetLexeme(lexeme)
e := ExprParse(stream)
ifStmt.AddChild(e)
stream.NextMatch(")")
block := BlockParse(stream)
ifStmt.AddChild(block)
tail := TailParse(stream)
if tail != nil {
ifStmt.AddChild(tail)
}
return ifStmt
}
//Tail -> else {Block} | else IfStmt | ⍷
func TailParse(stream *PeekTokenStream) ASTNode {
if !stream.HasNext() || stream.Peek().Value != "else" {
return nil
}
stream.NextMatch("else")
lookahead := stream.Peek()
if lookahead.Value == "{" {
return BlockParse(stream)
} else if lookahead.Value == "if" {
return IfParse(stream)
}
return nil
}
func (i *IfStmt) GetExpr() ASTNode {
return i.GetChild(0)
}
func (i *IfStmt) GetBlock() ASTNode {
return i.GetChild(1)
}
func (i *IfStmt) GetElseBlock() ASTNode {
block := i.GetChild(2)
if block != nil && block.Type() == ASTNODE_TYPE_BLOCK {
return block
}
return nil
}
func (i *IfStmt) GetElseIfStmt() ASTNode {
ifStmt := i.GetChild(2)
if ifStmt != nil && ifStmt.Type() == ASTNODE_TYPE_IF_STMT {
return ifStmt
}
return nil
}
================================================
FILE: parser/ast/stmt_if_test.go
================================================
package ast
import (
"bytes"
"github.com/magiconair/properties/assert"
"testing"
"tinyscript/lexer"
)
func TestIfStmtParse(t *testing.T) {
stream := createTokenStream(`if(a){
a = 1
}`)
stmt := IfStmtParse(stream)
e := stmt.GetChild(0)
block := stmt.GetChild(1)
assignStmt := block.GetChild(0)
assert.Equal(t, e.Lexeme().Value, "a")
assert.Equal(t, assignStmt.Lexeme().Value, "=")
}
func createTokenStream(src string) *PeekTokenStream {
tokens := lexer.NewLexer(bytes.NewBufferString(src), lexer.EndToken).Analyse()
stream := NewPeekTokenStream(tokens)
return stream
}
func TestIfElseStmtParse(t *testing.T) {
stream := createTokenStream(`if(a){
a = 1
}else{
a = 2
a = a * 3
}`)
stmt := IfStmtParse(stream)
expr := stmt.GetChild(0)
block := stmt.GetChild(1)
assignStmt := block.GetChild(0)
elseBlock := stmt.GetChild(2)
assignStmt2 := elseBlock.GetChild(0)
assert.Equal(t, expr.Lexeme().Value, "a")
assert.Equal(t, assignStmt.Lexeme().Value, "=")
assert.Equal(t, assignStmt2.Lexeme().Value, "=")
assert.Equal(t, len(elseBlock.Children()), 2)
}
================================================
FILE: parser/ast/stmt_return.go
================================================
package ast
var _ ASTNode = &ReturnStmt{}
type ReturnStmt struct {
*Stmt
}
func MakeReturnStmt() *ReturnStmt {
v := &ReturnStmt{MakeStmt()}
v.SetType(ASTNODE_TYPE_RETURN_STMT)
v.SetLabel("return")
return v
}
func ReturnStmtParse(stream *PeekTokenStream) ASTNode {
var lexeme = stream.NextMatch("return")
var expr = ExprParse(stream)
var stmt = MakeReturnStmt()
stmt.SetLexeme(lexeme)
if expr != nil {
stmt.AddChild(expr)
}
return stmt
}
================================================
FILE: parser/ast/stream.go
================================================
package ast
import (
"fmt"
"tinyscript/lexer"
)
type PeekTokenStream struct {
tokens []*lexer.Token //TODO 保存lexer生成的tokens,更加好的方式不是全部存储,这样内存消耗会比较大
current int
}
func NewPeekTokenStream(tokens []*lexer.Token) *PeekTokenStream {
return &PeekTokenStream{tokens: tokens}
}
func (pt *PeekTokenStream) Next() *lexer.Token {
if pt.current >= len(pt.tokens) {
return nil
}
t := pt.tokens[pt.current]
pt.current++
return t
}
func (pt *PeekTokenStream) HasNext() bool {
if pt.current >= len(pt.tokens) {
return false
}
return true
}
func (pt *PeekTokenStream) Peek() *lexer.Token {
t := pt.Next()
if nil == t {
return nil
}
pt.current -= 1
return t
}
//参数:n 表示退回多少个token
func (pt *PeekTokenStream) PutBack(n int) {
if pt.current-n < 0 {
panic("putback parameter is invalid")
}
pt.current -= n //必须+1,因为初始化时current就指向第一个元素
}
//下一个token的value匹配实参字符的话,返回这个token,否则panic
func (pt *PeekTokenStream) NextMatch(value string) *lexer.Token {
token := pt.Next()
if token.Value != value {
panic(fmt.Sprintf("syntax err: want value:%s,got %s", value, token.Value))
}
return token
}
//下一个token匹配实参类型的话,返回下一个token
func (pt *PeekTokenStream) NextMatchType(typ lexer.TokenType) *lexer.Token {
token := pt.Next()
if token.Typ != typ {
panic(fmt.Sprintf("syntax err: want type: %s,got %s", typ, token.Value))
}
return token
}
================================================
FILE: parser/ast/stream_test.go
================================================
package ast
import (
"bytes"
"github.com/magiconair/properties/assert"
"testing"
lexer2 "tinyscript/lexer"
)
func TestNewPeekTokenStream(t *testing.T) {
tokens := lexer2.NewLexer(bytes.NewBufferString("a+b*c"), lexer2.EndToken).Analyse()
peekts := NewPeekTokenStream(tokens)
assert.Equal(t, peekts.HasNext(), true)
assertToken(t, peekts.Next(), "a", lexer2.VARIABLE)
assertToken(t, peekts.Next(), "+", lexer2.OPERATOR)
assertToken(t, peekts.Peek(), "b", lexer2.VARIABLE)
assertToken(t, peekts.Next(), "b", lexer2.VARIABLE)
peekts.PutBack(3)
assertToken(t, peekts.Peek(), "a", lexer2.VARIABLE)
assertToken(t, peekts.Next(), "a", lexer2.VARIABLE)
}
func assertToken(t *testing.T, token *lexer2.Token, wantValue string, wantType lexer2.TokenType) {
assert.Equal(t, token.Typ, wantType, "err detail:"+token.String())
assert.Equal(t, token.Value, wantValue, "err detail:"+token.String())
}
================================================
FILE: parser/ast/type.go
================================================
package ast
type NodeType int
const (
ASTNODE_TYPE_BLOCK NodeType = iota
ASTNODE_TYPE_BINARY_EXPR // 1+1
ASTNODE_TYPE_UNARY_EXPR //++1
ASTNODE_TYPE_CALL_EXPR
ASTNODE_TYPE_VARIABLE
ASTNODE_TYPE_SCALAR // 1.0 true
ASTNODE_TYPE_IF_STMT
ASTNODE_TYPE_WHILE_STMT
ASTNODE_TYPE_FOR_STMT
ASTNODE_TYPE_RETURN_STMT
ASTNODE_TYPE_ASSIGN_STMT
ASTNODE_TYPE_FUNCTION_DECLARE_STMT
ASTNODE_TYPE_DECLARE_STMT
)
var NodeTypeStringMap = map[NodeType]string{
ASTNODE_TYPE_BLOCK: "block",
ASTNODE_TYPE_ASSIGN_STMT: "assign_stmt",
ASTNODE_TYPE_BINARY_EXPR: "binary_expr",
ASTNODE_TYPE_UNARY_EXPR: "unary_expr",
ASTNODE_TYPE_CALL_EXPR: "call_expr",
ASTNODE_TYPE_DECLARE_STMT: "declare_stmt",
ASTNODE_TYPE_FOR_STMT: "for_stmt",
ASTNODE_TYPE_FUNCTION_DECLARE_STMT: "function_declare_stmt",
ASTNODE_TYPE_IF_STMT: "if_stmt",
ASTNODE_TYPE_RETURN_STMT: "return_stmt",
ASTNODE_TYPE_SCALAR: "scalar",
ASTNODE_TYPE_VARIABLE: "variable",
ASTNODE_TYPE_WHILE_STMT: "while_stmt",
}
func (nt NodeType) String() string {
return NodeTypeStringMap[nt]
}
================================================
FILE: parser/ast/util.go
================================================
package ast
import (
"container/list"
"strings"
)
func ToPostfixExpr(node ASTNode) string {
if node.Type() == ASTNODE_TYPE_SCALAR || node.Type() == ASTNODE_TYPE_VARIABLE {
return node.Lexeme().Value
}
arr := []string{}
for _, child := range node.Children() {
arr = append(arr, ToPostfixExpr(child))
}
str := ""
if nil != node.Lexeme() {
str = node.Lexeme().Value
}
if len(str) > 0 {
return strings.Join(arr, " ") + " " + str
}
return strings.Join(arr, " ")
//left := ""
//right := ""
//
//switch node.Type() {
//case ASTNODE_TYPE_BINARY_EXPR:
// left = ToPostfixExpr(node.GetChild(0))
// right = ToPostfixExpr(node.GetChild(1))
// return left + " " + right + " " + node.Lexeme().Value
//case ASTNODE_TYPE_SCALAR, ASTNODE_TYPE_VARIABLE:
// return node.Lexeme().Value
//}
//
//panic("ToPostfixExpr failed")
}
func ToBFSString(node ASTNode, max int) string {
l := list.New()
l.PushBack(node)
strs := []string{}
for e, i := l.Front(), 0; nil != e && i < max; e = l.Front() {
i += 1
parent := l.Remove(e).(ASTNode)
strs = append(strs, parent.Label())
for _, child := range parent.Children() {
l.PushBack(child)
}
}
return strings.Join(strs, " ")
}
================================================
FILE: parser/ast/variable.go
================================================
package ast
var _ ASTNode = &Variable{}
type Variable struct {
*Factor
}
func NewVariable(stream *PeekTokenStream) *Variable {
return &Variable{NewFactor(stream)}
}
func MakeVariable() *Variable {
v := &Variable{MakeFactor()}
v.SetType(ASTNODE_TYPE_VARIABLE)
return v
}
================================================
FILE: parser/parser.go
================================================
package parser
import (
"tinyscript/lexer"
"tinyscript/parser/ast"
)
type Parser struct {
stream *ast.PeekTokenStream
}
func Parse(source string) ast.ASTNode {
return NewParser(lexer.Analyse(source)).parse()
}
func ParseFromFile(file string) ast.ASTNode {
tokens := lexer.FromFile(file)
return NewParser(tokens).parse()
}
func NewParser(tokens []*lexer.Token) *Parser {
return &Parser{stream: ast.NewPeekTokenStream(tokens)}
}
func (p *Parser) parse() ast.ASTNode {
return ast.ProgramParse(p.stream)
}
//Expr -> digit + Expr | d|igit
//digit -> 0|1|2|....|9
func (p *Parser) SimpleParse() ast.ASTNode {
expr := ast.MakeExpr()
scalar := ast.NewScalar(p.stream)
if !p.stream.HasNext() {
return scalar
}
expr.SetLexeme(p.stream.Peek())
p.stream.NextMatch("+")
expr.SetLabel("+")
expr.SetType(ast.ASTNODE_TYPE_BINARY_EXPR)
expr.AddChild(scalar)
rightNode := p.SimpleParse()
expr.AddChild(rightNode)
return expr
}
================================================
FILE: parser/parser_test.go
================================================
package parser
import (
"bytes"
"github.com/magiconair/properties/assert"
"testing"
lexer "tinyscript/lexer"
"tinyscript/parser/ast"
)
func TestParser_Parse(t *testing.T) {
source := "1+2+3+4"
parser := NewParser(lexer.NewLexer(bytes.NewBufferString(source), lexer.EndToken).Analyse())
expr := parser.SimpleParse()
assert.Equal(t, len(expr.Children()), 2)
v1 := expr.GetChild(0)
assert.Equal(t, v1.Lexeme().Value, "1")
assert.Equal(t, expr.Lexeme().Value, "+")
e2 := expr.GetChild(1)
v2 := e2.GetChild(0)
assert.Equal(t, v2.Lexeme().Value, "2")
assert.Equal(t, e2.Lexeme().Value, "+")
e3 := e2.GetChild(1)
v3 := e3.GetChild(0)
assert.Equal(t, v3.Lexeme().Value, "3")
assert.Equal(t, e3.Lexeme().Value, "+")
v4 := e3.GetChild(1)
assert.Equal(t, v4.Lexeme().Value, "4")
}
func createExpr(src string) ast.ASTNode {
tokens := lexer.NewLexer(bytes.NewBufferString(src), lexer.EndToken).Analyse()
stream := ast.NewPeekTokenStream(tokens)
return ast.ExprParse(stream)
}
func TestSimple(t *testing.T) {
expr := createExpr("1+1+1")
assert.Equal(t, ast.ToPostfixExpr(expr), "1 1 1 + +")
}
func TestSimple1(t *testing.T) {
expr := createExpr(`"123" == ""`)
assert.Equal(t, ast.ToPostfixExpr(expr), `"123" "" ==`)
}
func TestComplex(t *testing.T) {
expr1 := createExpr("1+2*3")
expr2 := createExpr("1*2+3")
e3 := createExpr("10 * (7+4)")
e4 := createExpr("(1*2!=7)==3!=4*5+6")
assert.Equal(t, ast.ToPostfixExpr(expr1), "1 2 3 * +")
assert.Equal(t, ast.ToPostfixExpr(expr2), "1 2 * 3 +")
assert.Equal(t, ast.ToPostfixExpr(e3), "10 7 4 + *")
assert.Equal(t, ast.ToPostfixExpr(e4), "1 2 * 7 != 3 4 5 * 6 + != ==")
}
================================================
FILE: tests/add.ts
================================================
func add(int a, int b) int {
return a + b
}
func main() void {
add(10, 20)
return
}
================================================
FILE: tests/complex-if.ts
================================================
if(a == 1) {
b = 100
} else if(a == 2) {
b = 500
} else if(a == 3) {
b = a * 1000
} else {
b = -1
}
================================================
FILE: tests/fact2.ts
================================================
func fact(int n) int {
if(n == 0) {
return 1
}
return fact(n-1) * n
}
func main() void {
return fact(2)
}
================================================
FILE: tests/fact5.ts
================================================
func fact(int n) int {
if(n == 0) {
return 1
}
return fact(n-1) * n
}
func main() void {
return fact(5)
}
================================================
FILE: tests/function.ts
================================================
func add(int a,int b)int{
return a + b
}
================================================
FILE: tests/recursion.ts
================================================
func fact(int n)int{
if (n ==0){
return 1
}
return fact(n-1)*n
}
================================================
FILE: translator/static_table_test.go
================================================
package translator
import (
"github.com/magiconair/properties/assert"
"testing"
"tinyscript/parser"
)
func TestStaticTable(t *testing.T) {
source := `if(a){a=1}else{b=a+1*5}`
node := parser.Parse(source)
program := NewTranslator().Translate(node)
assert.Equal(t, program.StaticTable.Size(), 2)
}
================================================
FILE: translator/symbol/static_table.go
================================================
package symbol
import (
"fmt"
"strings"
)
type StaticSymbolTable struct {
OffsetMap map[string]*Symbol
OffsetCounter int
Symbols []*Symbol
}
func NewStaticSymbolTable() *StaticSymbolTable {
return &StaticSymbolTable{OffsetCounter: 0, OffsetMap: make(map[string]*Symbol), Symbols: make([]*Symbol, 0)}
}
func (s *StaticSymbolTable) Add(symbol *Symbol) {
lexval := symbol.Lexeme.Value
if _, ok := s.OffsetMap[lexval]; !ok {
s.OffsetMap[lexval] = symbol
symbol.Offset = s.OffsetCounter
s.OffsetCounter += 1
s.Symbols = append(s.Symbols, symbol)
} else {
sameSymbol := s.OffsetMap[lexval]
symbol.Offset = sameSymbol.Offset
}
}
func (s *StaticSymbolTable) Size() int {
return len(s.Symbols)
}
func (s *StaticSymbolTable) String() string {
var list []string
for i, v := range s.Symbols {
list = append(list, fmt.Sprintf("%d:%s", i, v))
}
return strings.Join(list, "\n")
}
================================================
FILE: translator/symbol/symbol.go
================================================
package symbol
import "tinyscript/lexer"
type Symbol struct {
Parent *Table
Lexeme *lexer.Token
Label string
Offset int
LayerOffset int
Typ SymbolType
}
func NewSymbol(typ SymbolType) *Symbol {
return &Symbol{Typ: typ}
}
func (s *Symbol) String() string {
if SYMBOL_LABEL == s.Typ {
return s.Label
}
return s.Lexeme.Value
}
func MakeAddressSymbol(lexeme *lexer.Token, offset int) *Symbol {
syb := NewSymbol(SYMBOL_ADDRESS)
syb.Lexeme = lexeme
syb.Offset = offset
return syb
}
func MakeImmediateSymbol(lexeme *lexer.Token) *Symbol {
syb := NewSymbol(SYMBOL_IMMEDIATE)
syb.Lexeme = lexeme
return syb
}
func MakeLabelSymbol(label string, lexeme *lexer.Token) *Symbol {
syb := NewSymbol(SYMBOL_LABEL)
syb.Lexeme = lexeme
syb.Label = label
return syb
}
================================================
FILE: translator/symbol/table.go
================================================
package symbol
import (
"fmt"
"tinyscript/lexer"
)
/*
一个符号表在运行时就是活动记录,一个符号表可以对应多个活动记录(递归),符号表这个时候就是一个模板
*/
type Table struct {
Parent *Table
Children []*Table
Symbols []*Symbol
TempIndex int
OffsetIndex int
Level int
}
func NewTable() *Table {
return &Table{
Symbols: make([]*Symbol, 0),
Children: make([]*Table, 0),
}
}
func (t *Table) AddSymbol(symbol *Symbol) {
t.Symbols = append(t.Symbols, symbol)
symbol.Parent = t
}
func (t *Table) symbolByLexeme(lexeme *lexer.Token) *Symbol {
for _, v := range t.Symbols {
if lexeme.Value == v.Lexeme.Value {
return v
}
}
return nil
}
func (t *Table) Exists(lexeme *lexer.Token) bool {
symbol := t.symbolByLexeme(lexeme)
if nil != symbol {
return true
}
if t.Parent != nil {
return t.Parent.Exists(lexeme)
}
return false
}
func (t *Table) CloneFromSymbolTree(lexeme *lexer.Token, layoutOffset int) *Symbol {
symbl := t.symbolByLexeme(lexeme)
if nil != symbl {
symbol := *symbl
symbol.LayerOffset = layoutOffset
return &symbol
}
if nil != t.Parent {
return t.Parent.CloneFromSymbolTree(lexeme, layoutOffset+1)
}
return nil
}
func (t *Table) CreateSymbolByLexeme(lexeme *lexer.Token) *Symbol {
var symbol *Symbol = nil
if lexeme.IsScalar() {
symbol = MakeImmediateSymbol(lexeme)
t.AddSymbol(symbol)
} else {
symbol2 := t.symbolByLexeme(lexeme)
if nil == symbol2 {
symbol = t.CloneFromSymbolTree(lexeme, 0)
if symbol == nil {
symbol = MakeAddressSymbol(lexeme, t.OffsetIndex)
t.OffsetIndex += 1
}
t.AddSymbol(symbol)
} else {
symbol = symbol2
}
}
return symbol
}
func (t *Table) CreateVariable() *Symbol {
lexeme := lexer.NewToken(lexer.VARIABLE, "p"+fmt.Sprintf("%d", t.TempIndex))
t.TempIndex += 1
symbol := MakeAddressSymbol(lexeme, t.OffsetIndex)
t.OffsetIndex += 1
t.AddSymbol(symbol)
return symbol
}
func (t *Table) AddChild(child *Table) {
child.Parent = t
child.Level = t.Level + 1
t.Children = append(t.Children, child)
}
func (t *Table) LocalSize() int {
return t.OffsetIndex
}
func (t *Table) CreateLabel(label string, lexeme *lexer.Token) {
labelSymbol := MakeLabelSymbol(label, lexeme)
t.AddSymbol(labelSymbol)
}
================================================
FILE: translator/symbol/table_test.go
================================================
package symbol
import (
"github.com/magiconair/properties/assert"
"testing"
"tinyscript/lexer"
)
func TestSymbolTable(t *testing.T) {
table := NewTable()
table.CreateLabel("L0", lexer.NewToken(lexer.VARIABLE, "foo"))
table.CreateVariable()
table.CreateSymbolByLexeme(lexer.NewToken(lexer.VARIABLE, "foo"))
assert.Equal(t, table.LocalSize(), 1)
}
func TestTableChain(t *testing.T) {
table := NewTable()
table.CreateSymbolByLexeme(lexer.NewToken(lexer.VARIABLE, "a"))
childTable := NewTable()
table.AddChild(childTable)
childChildTable := NewTable()
childTable.AddChild(childChildTable)
assert.Equal(t, childChildTable.Exists(lexer.NewToken(lexer.VARIABLE, "a")), true)
assert.Equal(t, childTable.Exists(lexer.NewToken(lexer.VARIABLE, "a")), true)
}
func TestOffset(t *testing.T) {
table := NewTable()
table.CreateSymbolByLexeme(lexer.NewToken(lexer.INTEGER, "100"))
symbola := table.CreateSymbolByLexeme(lexer.NewToken(lexer.VARIABLE, "a"))
symbolb := table.CreateSymbolByLexeme(lexer.NewToken(lexer.VARIABLE, "b"))
childTable := NewTable()
table.AddChild(childTable)
anotherSymbolB := childTable.CreateSymbolByLexeme(lexer.NewToken(lexer.VARIABLE, "b"))
symbolC := childTable.CreateSymbolByLexeme(lexer.NewToken(lexer.VARIABLE, "c"))
assert.Equal(t, symbola.Offset, 0)
assert.Equal(t, symbolb.Offset, 1)
assert.Equal(t, anotherSymbolB.Offset, 1)
assert.Equal(t, anotherSymbolB.LayerOffset, 1)
assert.Equal(t, symbolC.Offset, 0)
assert.Equal(t, symbolC.LayerOffset, 0)
}
================================================
FILE: translator/symbol/types.go
================================================
package symbol
type SymbolType int
const (
SYMBOL_ADDRESS SymbolType = iota
SYMBOL_IMMEDIATE
SYMBOL_LABEL
)
func (s SymbolType) String() string {
switch s {
case SYMBOL_ADDRESS:
return "symbol_address"
case SYMBOL_IMMEDIATE:
return "symbol_immediate"
case SYMBOL_LABEL:
return "symbol_label"
}
panic("unknown symbol type")
}
================================================
FILE: translator/symbol/util.go
================================================
package symbol
================================================
FILE: translator/tainstruction.go
================================================
package translator
import (
"fmt"
"tinyscript/translator/symbol"
)
type TAInstruction struct {
Arg1 interface{}
Arg2 interface{}
Op string
Result *symbol.Symbol
Typ TAInstructionType
Label string
}
func NewTAInstruction(typ TAInstructionType, result *symbol.Symbol, op string, arg1 interface{}, arg2 interface{}) *TAInstruction {
return &TAInstruction{Arg1: arg1, Arg2: arg2, Op: op, Result: result, Typ: typ}
}
func (t TAInstruction) String() string {
switch t.Typ {
case TAINSTR_TYPE_ASSIGN:
if nil != t.Arg2 {
return fmt.Sprintf("%v = %v %v %v", t.Result, t.Arg1, t.Op, t.Arg2)
} else {
return fmt.Sprintf("%v = %v", t.Result, t.Arg1)
}
case TAINSTR_TYPE_IF:
return fmt.Sprintf("IF %v ELSE %v", t.Arg1, t.Arg2)
case TAINSTR_TYPE_GOTO:
return fmt.Sprintf("GOTO %v", t.Arg1)
case TAINSTR_TYPE_LABEL:
return fmt.Sprintf("%v:", t.Arg1)
case TAINSTR_TYPE_FUNC_BEGIN:
return "FUNC_BEGIN"
case TAINSTR_TYPE_RETURN:
if !IsNil(t.Arg1) {
return fmt.Sprintf("RETURN %v", t.Arg1)
}
return fmt.Sprintf("RETURN")
case TAINSTR_TYPE_PARAM:
return fmt.Sprintf("PARAM %v %v", t.Arg1, t.Arg2)
case TAINSTR_TYPE_SP:
return fmt.Sprintf("SP %v", t.Arg1)
case TAINSTR_TYPE_CALL:
return fmt.Sprintf("CALL %v", t.Arg1)
}
panic("unknown opcode type")
}
================================================
FILE: translator/tainstruction_type.go
================================================
package translator
type TAInstructionType int
const (
TAINSTR_TYPE_ASSIGN TAInstructionType = iota
TAINSTR_TYPE_GOTO
TAINSTR_TYPE_IF
TAINSTR_TYPE_LABEL
TAINSTR_TYPE_CALL
TAINSTR_TYPE_RETURN
TAINSTR_TYPE_SP
TAINSTR_TYPE_PARAM
TAINSTR_TYPE_FUNC_BEGIN
)
================================================
FILE: translator/taprogram.go
================================================
package translator
import (
"fmt"
"strings"
"tinyscript/translator/symbol"
)
type TAProgram struct {
Instructions []*TAInstruction
LabelCounter int
StaticTable *symbol.StaticSymbolTable
}
func NewTAProgram() *TAProgram {
return &TAProgram{Instructions: make([]*TAInstruction, 0), StaticTable: symbol.NewStaticSymbolTable()}
}
func (t *TAProgram) Add(instr *TAInstruction) {
t.Instructions = append(t.Instructions, instr)
}
func (t *TAProgram) AddLabel() *TAInstruction {
label := fmt.Sprintf("L%d", t.LabelCounter)
t.LabelCounter += 1
taCode := NewTAInstruction(TAINSTR_TYPE_LABEL, nil, "", nil, nil)
taCode.Arg1 = label
t.Instructions = append(t.Instructions, taCode)
return taCode
}
func (t *TAProgram) String() string {
var lines []string
for _, v := range t.Instructions {
lines = append(lines, v.String())
}
return strings.Join(lines, "\n")
}
//根据符号表的内容,判断符号类型,如果是SYMBOL_IMMEDIATE,则加入静态符号表,以此来设置静态符号表的信息
func (t *TAProgram) SetStaticSymbols(table *symbol.Table) {
for _, v := range table.Symbols {
if symbol.SYMBOL_IMMEDIATE == v.Typ {
t.StaticTable.Add(v)
}
}
for _, child := range table.Children {
t.SetStaticSymbols(child)
}
}
================================================
FILE: translator/translator.go
================================================
package translator
import (
"fmt"
"tinyscript/lexer"
"tinyscript/parser/ast"
"tinyscript/translator/symbol"
)
type Translator struct {
}
func NewTranslator() *Translator {
return &Translator{}
}
/*
符号表是辅助工具。对ast遍历的同时产生符号表,根据符号表的内容产生TAProgram。
*/
func (t *Translator) Translate(node ast.ASTNode) *TAProgram {
program := NewTAProgram()
table := symbol.NewTable()
for _, child := range node.Children() {
t.TranslateStmt(program, child, table)
}
program.SetStaticSymbols(table)
mainFn := lexer.NewToken(lexer.VARIABLE, "main")
if table.Exists(mainFn) {
table.CreateVariable() //返回值
program.Add(NewTAInstruction(TAINSTR_TYPE_SP, nil, "", -table.LocalSize(), nil))
program.Add(NewTAInstruction(TAINSTR_TYPE_CALL, nil, "", table.CloneFromSymbolTree(mainFn, 0), nil))
program.Add(NewTAInstruction(TAINSTR_TYPE_SP, nil, "", table.LocalSize(), nil))
}
return program
}
func (t *Translator) TranslateStmt(program *TAProgram, node ast.ASTNode, table *symbol.Table) {
switch node.Type() {
case ast.ASTNODE_TYPE_BLOCK:
t.TranslateBlock(program, node, table)
return
case ast.ASTNODE_TYPE_IF_STMT:
t.TranslateIfStmt(program, node.(*ast.IfStmt), table)
return
case ast.ASTNODE_TYPE_ASSIGN_STMT:
t.TranslateAssignStmt(program, node, table)
return
case ast.ASTNODE_TYPE_DECLARE_STMT:
t.TranslateDeclareStmt(program, node, table)
return
case ast.ASTNODE_TYPE_FUNCTION_DECLARE_STMT:
t.TranslateFunctionDeclareStmt(program, node, table)
return
case ast.ASTNODE_TYPE_RETURN_STMT:
t.TranslateReturnStmt(program, node, table)
return
case ast.ASTNODE_TYPE_CALL_EXPR:
t.TranslateCallExpr(program, node, table)
return
}
panic("unknown node type" + node.Type().String())
}
func (t *Translator) TranslateDeclareStmt(program *TAProgram, node ast.ASTNode, table *symbol.Table) {
lexeme := node.GetChild(0).Lexeme()
if table.Exists(lexeme) {
panic("Syntax Error, Identifier " + lexeme.Value + " is already defined")
}
assigned := table.CreateSymbolByLexeme(lexeme)
expr := node.GetChild(1)
addr := t.TranslateExpr(program, expr, table)
program.Add(NewTAInstruction(TAINSTR_TYPE_ASSIGN, assigned, "=", addr, nil))
}
func (t *Translator) TranslateAssignStmt(program *TAProgram, node ast.ASTNode, table *symbol.Table) {
assigned := table.CreateSymbolByLexeme(node.GetChild(0).Lexeme())
expr := node.GetChild(1)
addr := t.TranslateExpr(program, expr, table)
program.Add(NewTAInstruction(TAINSTR_TYPE_ASSIGN, assigned, "=", addr, nil))
}
/*
SDD:
E -> E1 op E2
E -> F
*/
func (t *Translator) TranslateExpr(program *TAProgram, node ast.ASTNode, table *symbol.Table) *symbol.Symbol {
if node.IsValueType() {
addr := table.CreateSymbolByLexeme(node.Lexeme())
node.SetProp("addr", addr)
return addr
} else if node.Type() == ast.ASTNODE_TYPE_CALL_EXPR {
addr := t.TranslateCallExpr(program, node, table)
node.SetProp("addr", addr)
return addr
} else if IsInstanceOfExpr(node) {
for _, child := range node.Children() {
t.TranslateExpr(program, child, table)
}
if node.Prop("addr") == nil {
node.SetProp("addr", table.CreateVariable())
}
instr := NewTAInstruction(
TAINSTR_TYPE_ASSIGN,
node.Prop("addr").(*symbol.Symbol),
node.Lexeme().Value,
node.GetChild(0).Prop("addr").(*symbol.Symbol),
node.GetChild(1).Prop("addr").(*symbol.Symbol),
)
program.Add(instr)
return instr.Result
}
panic("unexpected node type :" + node.Type().String())
}
func (t *Translator) TranslateBlock(program *TAProgram, node ast.ASTNode, parent *symbol.Table) {
table := symbol.NewTable()
parent.AddChild(table)
parentOffset := table.CreateVariable()
parentOffset.Lexeme = lexer.NewToken(lexer.INTEGER, fmt.Sprintf("%d", parent.LocalSize()))
//pushRecord := NewTAInstruction(TAINSTR_TYPE_SP, nil, "", nil, nil)
//program.Add(pushRecord)
for _, stmt := range node.Children() {
t.TranslateStmt(program, stmt, table)
}
//popRecord := NewTAInstruction(TAINSTR_TYPE_SP, nil, "", nil, nil)
//program.Add(popRecord)
//
//pushRecord.Arg1 = -parent.LocalSize()
//popRecord.Arg1 = parent.LocalSize()
}
func (t *Translator) TranslateIfStmt(program *TAProgram, node *ast.IfStmt, table *symbol.Table) {
expr := node.GetExpr()
exprAddr := t.TranslateExpr(program, expr, table)
ifOpCode := NewTAInstruction(TAINSTR_TYPE_IF, nil, "", exprAddr, nil)
program.Add(ifOpCode)
t.TranslateBlock(program, node.GetBlock(), table)
var gotoInstr *TAInstruction = nil
if node.GetChild(2) != nil {
gotoInstr = NewTAInstruction(TAINSTR_TYPE_GOTO, nil, "", nil, nil)
program.Add(gotoInstr)
labelEndIf := program.AddLabel()
ifOpCode.Arg2 = labelEndIf.Arg1
}
if node.GetElseBlock() != nil {
t.TranslateBlock(program, node.GetElseBlock(), table)
} else if node.GetElseIfStmt() != nil {
t.TranslateIfStmt(program, node.GetElseIfStmt().(*ast.IfStmt), table)
}
labelEnd := program.AddLabel()
if node.GetChild(2) == nil {
ifOpCode.Arg2 = labelEnd.Arg1
} else {
gotoInstr.Arg1 = labelEnd.Arg1
}
}
func (t *Translator) TranslateFunctionDeclareStmt(program *TAProgram, node ast.ASTNode, parent *symbol.Table) {
label := program.AddLabel()
table := symbol.NewTable()
program.Add(NewTAInstruction(TAINSTR_TYPE_FUNC_BEGIN, nil, "", nil, nil))
table.CreateVariable() //返回地址
label.Arg2 = node.Lexeme().Value
fn := node.(*ast.FuncDeclareStmt)
args := fn.Args()
parent.AddChild(table)
parent.CreateLabel(label.Arg1.(string), node.Lexeme())
for _, arg := range args.Children() {
table.CreateSymbolByLexeme(arg.Lexeme())
}
for _, child := range fn.Block().Children() {
t.TranslateStmt(program, child, table)
}
}
func (t *Translator) TranslateCallExpr(program *TAProgram, node ast.ASTNode, table *symbol.Table) *symbol.Symbol {
//foo()
factor := node.GetChild(0)
//foo -> symbol(foo) L0
//table.CreateVariable() //返回地址
var l = make([]*TAInstruction, 0)
for i := 1; i < len(node.Children()); i++ {
expr := node.GetChild(uint(i))
addr := t.TranslateExpr(program, expr, table)
l = append(l, NewTAInstruction(TAINSTR_TYPE_PARAM, nil, "", addr, i-1))
}
for _, instr := range l {
instr.Arg2 = table.LocalSize() + instr.Arg2.(int) + 2
program.Add(instr)
}
returnValue := table.CreateVariable() //返回值
funcAddr := table.CloneFromSymbolTree(factor.Lexeme(), 0)
if nil == funcAddr {
panic("function " + factor.Lexeme().Value + " not found")
}
program.Add(NewTAInstruction(TAINSTR_TYPE_SP, nil, "", -table.LocalSize(), nil))
program.Add(NewTAInstruction(TAINSTR_TYPE_CALL, nil, "", funcAddr, nil))
program.Add(NewTAInstruction(TAINSTR_TYPE_SP, nil, "", table.LocalSize(), nil))
return returnValue
}
func (t *Translator) TranslateReturnStmt(program *TAProgram, node ast.ASTNode, table *symbol.Table) {
var resultValue *symbol.Symbol = nil
if node.GetChild(0) != nil {
resultValue = t.TranslateExpr(program, node.GetChild(0), table)
}
program.Add(NewTAInstruction(TAINSTR_TYPE_RETURN, nil, "", resultValue, nil))
}
================================================
FILE: translator/translator_test.go
================================================
package translator
import (
"github.com/magiconair/properties/assert"
"testing"
"tinyscript/parser"
"tinyscript/translator/symbol"
)
func TestExprTranslator(t *testing.T) {
source := `a+(b-c)+d*(b-c)*2`
p := parser.Parse(source)
exprNode := p.GetChild(0)
translator := NewTranslator()
table := symbol.NewTable()
program := NewTAProgram()
translator.TranslateExpr(program, exprNode, table)
expected := `p0 = b - c
p1 = b - c
p2 = p1 * 2
p3 = d * p2
p4 = p0 + p3
p5 = a + p4`
assert.Equal(t, program.String(), expected)
}
func TestAssignStmt(t *testing.T) {
source := "a=1.0*2.0*3.0"
node := parser.Parse(source)
translator := NewTranslator()
program := translator.Translate(node)
expected := `p0 = 2.0 * 3.0
p1 = 1.0 * p0
a = p1`
assert.Equal(t, program.String(), expected)
}
func TestTranslator_TranslateDeclareStmt(t *testing.T) {
source := "var a=1.0*2.0*3.0"
node := parser.Parse(source)
translator := NewTranslator()
program := translator.Translate(node)
expected := `p0 = 2.0 * 3.0
p1 = 1.0 * p0
a = p1`
assert.Equal(t, program.String(), expected)
}
func TestAssignStmt2(t *testing.T) {
source := "a=1"
node := parser.Parse(source)
translator := NewTranslator()
program := translator.Translate(node)
assert.Equal(t, program.String(), "a = 1")
}
func TestBlock(t *testing.T) {
sourc := `var a = 1
{
var b = 1 * 100
}
{
var b = a * 100
}`
ast := parser.Parse(sourc)
translator := NewTranslator()
program := translator.Translate(ast)
expected := `a = 1
p1 = 1 * 100
b = p1
p1 = a * 100
b = p1`
assert.Equal(t, program.String(), expected)
}
func TestTranslator_TranslateIfStmt(t *testing.T) {
source := `if(a){
b=1
}`
astNode := parser.Parse(source)
translator := NewTranslator()
program := translator.Translate(astNode)
expected := `IF a ELSE L0
b = 1
L0:`
assert.Equal(t, program.String(), expected)
}
func TestTranslator_TranslateIfElseStmt(t *testing.T) {
source := `if(a){
b=1
}else{
b=2
}`
astNode := parser.Parse(source)
translator := NewTranslator()
program := translator.Translate(astNode)
expected := `IF a ELSE L0
b = 1
GOTO L1
L0:
b = 2
L1:`
assert.Equal(t, program.String(), expected)
}
func TestTranslator_TranslateIfElseIfStmt(t *testing.T) {
astNode := parser.ParseFromFile("../tests/complex-if.ts")
translator := NewTranslator()
program := translator.Translate(astNode)
expected := `p0 = a == 1
IF p0 ELSE L0
b = 100
GOTO L5
L0:
p1 = a == 2
IF p1 ELSE L1
b = 500
GOTO L4
L1:
p2 = a == 3
IF p2 ELSE L2
p1 = a * 1000
b = p1
GOTO L3
L2:
b = -1
L3:
L4:
L5:`
assert.Equal(t, program.String(), expected)
}
func TestSimpleFunction(t *testing.T) {
node := parser.ParseFromFile("../tests/function.ts")
translator := NewTranslator()
program := translator.Translate(node)
expected := `L0:
FUNC_BEGIN
p1 = a + b
RETURN p1`
assert.Equal(t, program.String(), expected)
}
func TestRecursionFunc(t *testing.T) {
node := parser.ParseFromFile("../tests/recursion.ts")
translator := NewTranslator()
program := translator.Translate(node)
expected := `L0:
FUNC_BEGIN
p1 = n == 0
IF p1 ELSE L1
RETURN 1
L1:
p2 = n - 1
PARAM p2 6
SP -5
CALL L0
SP 5
p4 = p3 * n
RETURN p4`
assert.Equal(t, program.String(), expected)
}
================================================
FILE: translator/util.go
================================================
package translator
import (
"reflect"
"strings"
)
func IsInstanceOfExpr(instance interface{}) bool {
ival := reflect.ValueOf(instance)
return strings.LastIndex(ival.Type().String(), "ast.Expr") != -1
}
func IsNil(i interface{}) bool {
vi := reflect.ValueOf(i)
//if vi.Kind() == reflect.Ptr {
return vi.IsNil()
//}
//return false
}
================================================
FILE: translator/util_test.go
================================================
package translator
import (
"github.com/magiconair/properties/assert"
"testing"
"tinyscript/parser/ast"
)
func TestIsInstanceOf(t *testing.T) {
var i interface{}
i = &ast.Expr{}
assert.Equal(t, IsInstanceOfExpr(i), true)
}
================================================
FILE: vm/vm.go
================================================
package vm
import (
"log"
"tinyscript/gen"
"tinyscript/gen/operand"
)
type VM struct {
Registers [31]int
Memory [4096]int
EndProgramSection int
StartProgram int
}
func NewVM(staticArea []int, opcodes []int, entry *int) *VM {
vm := &VM{}
i := 0
for ; i < len(staticArea); i++ {
vm.Memory[i] = staticArea[i]
}
j := i
vm.StartProgram = i
//mainStart := *entry + i
for ; i < len(opcodes)+j; i++ {
vm.Memory[i] = opcodes[i-j]
}
vm.Registers[operand.PC.Addr] = i - 3
vm.EndProgramSection = i
vm.Registers[operand.SP.Addr] = 4095
return vm
}
func (vm *VM) Fetch() int {
pc := vm.Registers[operand.PC.Addr]
return vm.Memory[pc]
}
func (vm *VM) Decode(code int) *gen.Instruction {
return gen.FromByCode(code)
}
func (vm *VM) Exec(instr *gen.Instruction) {
code := instr.Code.Value
log.Println("exec:", instr)
switch code {
case 0x01: //ADD
r0 := instr.GetOperand(0).(*operand.Register)
r1 := instr.GetOperand(1).(*operand.Register)
r2 := instr.GetOperand(2).(*operand.Register)
vm.Registers[r0.Addr] = vm.Registers[r1.Addr] + vm.Registers[r2.Addr]
//case 0x09: //
case 0x09, 0x02: //SUB
r0 := instr.GetOperand(0).(*operand.Register)
r1 := instr.GetOperand(1).(*operand.Register)
r2 := instr.GetOperand(2).(*operand.Register)
vm.Registers[r0.Addr] = vm.Registers[r1.Addr] - vm.Registers[r2.Addr]
case 0x03: //MULT
r0 := instr.GetOperand(0).(*operand.Register)
r1 := instr.GetOperand(1).(*operand.Register)
vm.Registers[operand.L0.Addr] = vm.Registers[r0.Addr] * vm.Registers[r1.Addr]
case 0x05: //ADDI
r0 := instr.GetOperand(0).(*operand.Register)
r1 := instr.GetOperand(1).(*operand.ImmediateNumber)
vm.Registers[r0.Addr] += r1.Value
case 0x06: //SUBI
r0 := instr.GetOperand(0).(*operand.Register)
r1 := instr.GetOperand(1).(*operand.ImmediateNumber)
vm.Registers[r0.Addr] -= r1.Value
//case 0x07: //MULI
case 0x08: //MFLO
r0 := instr.GetOperand(0).(*operand.Register)
vm.Registers[r0.Addr] = vm.Registers[operand.L0.Addr]
case 0x10: //SW
r0 := instr.GetOperand(0).(*operand.Register)
r1 := instr.GetOperand(1).(*operand.Register)
offset := instr.GetOperand(2).(*operand.Offset)
R1VAL := vm.Registers[r1.Addr]
vm.Memory[R1VAL+offset.Offset] = vm.Registers[r0.Addr]
case 0x11: //LW
r0 := instr.GetOperand(0).(*operand.Register)
r1 := instr.GetOperand(1).(*operand.Register)
offset := instr.GetOperand(2).(*operand.Offset)
R1VAL := vm.Registers[r1.Addr]
vm.Registers[r0.Addr] = vm.Memory[R1VAL+offset.Offset]
case 0x15: //BNE
r0 := instr.GetOperand(0).(*operand.Register)
r1 := instr.GetOperand(1).(*operand.Register)
offset := instr.GetOperand(2).(*operand.Offset)
if vm.Registers[r0.Addr] != vm.Registers[r1.Addr] {
vm.Registers[operand.PC.Addr] = offset.Offset + vm.StartProgram - 1
}
case 0x20: //JUMP
r0 := instr.GetOperand(0).(*operand.Offset)
vm.Registers[operand.PC.Addr] = r0.Offset + vm.StartProgram - 1
case 0x21: //JR
r0 := instr.GetOperand(0).(*operand.Offset)
vm.Registers[operand.RA.Addr] = vm.Registers[operand.PC.Addr]
vm.Registers[operand.PC.Addr] = r0.Offset + vm.StartProgram - 1
case 0x22: //RETRUN
if instr.GetOperand(0) != nil {
//match 返回值
}
spVal := vm.Registers[operand.SP.Addr]
vm.Registers[operand.PC.Addr] = vm.Memory[spVal]
}
}
func (vm *VM) run() {
//模拟CPU循环
// fetch
// decode
// exec
// pc++
for ; vm.runOneStep(); {
}
}
func (vm *VM) GetSpMemory(offset int) int {
sp := vm.Registers[operand.SP.Addr]
return vm.Memory[sp+offset]
}
func (vm *VM) runOneStep() bool {
code := vm.Fetch()
instr := vm.Decode(code)
vm.Exec(instr)
vm.Registers[operand.PC.Addr] += 1
log.Println(vm.Registers[operand.PC.Addr], "|", vm.EndProgramSection)
return vm.Registers[operand.PC.Addr] < vm.EndProgramSection
}
================================================
FILE: vm/vm_test.go
================================================
package vm
import (
"github.com/magiconair/properties/assert"
"testing"
"tinyscript/gen"
"tinyscript/gen/operand"
"tinyscript/parser"
"tinyscript/translator"
)
func TestCalcExpr(t *testing.T) {
source := `func main()int{var a = 2 * 3 + 4
return
}`
taProg := translator.NewTranslator().Translate(parser.Parse(source))
prog := gen.NewOpCodeGen().Gen(taProg)
staticTable := prog.GetStaticArea(taProg)
opcodes := prog.ToByteCode()
vm := NewVM(staticTable, opcodes, prog.Entry)
// CALL main
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
t.Log("RA:", vm.Registers[operand.RA.Addr])
assert.Equal(t, vm.GetSpMemory(0), 18)
// p0 = 2 * 3
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
assert.Equal(t, vm.Registers[operand.S0.Addr], 2)
assert.Equal(t, vm.Registers[operand.S1.Addr], 3)
assert.Equal(t, vm.Registers[operand.L0.Addr], 6)
assert.Equal(t, vm.Registers[operand.S2.Addr], 6)
assert.Equal(t, vm.GetSpMemory(-2), 6)
// p1 = p0 + 4
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
assert.Equal(t, vm.Registers[operand.S0.Addr], 6)
assert.Equal(t, vm.Registers[operand.S1.Addr], 4)
assert.Equal(t, vm.Registers[operand.S2.Addr], 10)
assert.Equal(t, vm.GetSpMemory(-3), 10)
// a = p1
vm.runOneStep()
vm.runOneStep()
assert.Equal(t, vm.GetSpMemory(-1), 10)
assert.Equal(t, vm.Registers[operand.S0.Addr], 10)
// RETURN null
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
t.Log("SP:", vm.Registers[operand.SP.Addr])
}
func TestRecursiveFunction(t *testing.T) {
taProg := translator.NewTranslator().Translate(parser.ParseFromFile("../tests/fact2.ts"))
t.Log(taProg)
prog := gen.NewOpCodeGen().Gen(taProg)
staticTable := prog.GetStaticArea(taProg)
opcodes := prog.ToByteCode()
t.Log(prog)
t.Log(taProg.StaticTable)
vm := NewVM(staticTable, opcodes, prog.Entry)
// CALL main
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
t.Log("RA:", vm.Registers[operand.RA.Addr])
assert.Equal(t, vm.GetSpMemory(0), 39)
// PARAM 10 0
vm.runOneStep()
vm.runOneStep()
assert.Equal(t, vm.GetSpMemory(-3), 2)
// SP -2
vm.runOneStep()
vm.runOneStep()
t.Log("RA:", vm.Registers[operand.RA.Addr])
// #FUNC_BEGIN
vm.runOneStep()
assert.Equal(t, vm.GetSpMemory(0), 33)
// #p1 = n == 0
assert.Equal(t, vm.GetSpMemory(-1), 2)
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
assert.Equal(t, vm.GetSpMemory(-2) == 0, false)
// #IF p1 ELSE L1
vm.runOneStep()
vm.runOneStep()
// #p3 = n - 1
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
assert.Equal(t, 1, vm.GetSpMemory(-3))
// #PARAM p3 0
// #SP-5
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
assert.Equal(t, 1, vm.GetSpMemory(-1))
vm.runOneStep()
vm.runOneStep()
// #p1 = n == 0
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
assert.Equal(t, false, vm.GetSpMemory(-2) == 0)
// #IF p1 ELSE L1
vm.runOneStep()
// #p3 = n - 1
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
// #PARAM p3 0
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
// CALL
vm.runOneStep()
vm.runOneStep()
// #p1 = n == 0
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
assert.Equal(t, true, vm.GetSpMemory(-2) == 0)
// #IF p1 ELSE L1
vm.runOneStep()
// RETURN 1
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
// #p4 = p2 * n 计算递归值
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
// #RETURN p4
vm.runOneStep()
vm.runOneStep()
//RETURN
vm.runOneStep()
vm.runOneStep()
//#p4 = p2 * n
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
vm.runOneStep()
assert.Equal(t, 2, vm.GetSpMemory(-5))
vm.runOneStep()
vm.runOneStep()
// RETURN MAIN
vm.runOneStep()
// SP 2
vm.runOneStep()
// #RETURN p1 : from main
vm.runOneStep()
assert.Equal(t, 2, vm.GetSpMemory(-1))
for ; vm.runOneStep(); {
}
assert.Equal(t, 2, vm.GetSpMemory(0))
}
func TestRecursivefunction1(t *testing.T) {
taProg := translator.NewTranslator().Translate(parser.ParseFromFile("../tests/fact5.ts"))
prog := gen.NewOpCodeGen().Gen(taProg)
staticTable := prog.GetStaticArea(taProg)
opcodes := prog.ToByteCode()
//t.Log(prog)
//t.Log(taProg.StaticTable)
vm := NewVM(staticTable, opcodes, prog.Entry)
vm.run()
assert.Equal(t, 120, vm.GetSpMemory(0))
}
gitextract_72t38b4e/
├── .gitignore
├── Makefile
├── README.md
├── gen/
│ ├── instruction.go
│ ├── opcode.go
│ ├── opcode_gen.go
│ ├── opcode_gen_test.go
│ ├── opcode_program.go
│ ├── opcode_test.go
│ ├── operand/
│ │ ├── immediate_number.go
│ │ ├── label.go
│ │ ├── offset.go
│ │ ├── oprand.go
│ │ ├── register.go
│ │ └── types.go
│ └── types.go
├── go.mod
├── go.sum
├── lexer/
│ ├── alphabet.go
│ ├── alphabet_test.go
│ ├── keywords.go
│ ├── lexer.go
│ ├── lexer_test.go
│ ├── token.go
│ └── util/
│ ├── stream.go
│ └── stream_test.go
├── main.go
├── parser/
│ ├── ast/
│ │ ├── ast.go
│ │ ├── block.go
│ │ ├── expr.go
│ │ ├── expr_call.go
│ │ ├── factor.go
│ │ ├── func_args.go
│ │ ├── priority_table.go
│ │ ├── program.go
│ │ ├── scalar.go
│ │ ├── stmt.go
│ │ ├── stmt_assign.go
│ │ ├── stmt_assign_test.go
│ │ ├── stmt_declare.go
│ │ ├── stmt_declare_test.go
│ │ ├── stmt_for.go
│ │ ├── stmt_func_declare.go
│ │ ├── stmt_func_declare_test.go
│ │ ├── stmt_if.go
│ │ ├── stmt_if_test.go
│ │ ├── stmt_return.go
│ │ ├── stream.go
│ │ ├── stream_test.go
│ │ ├── type.go
│ │ ├── util.go
│ │ └── variable.go
│ ├── parser.go
│ └── parser_test.go
├── tests/
│ ├── add.ts
│ ├── complex-if.ts
│ ├── fact2.ts
│ ├── fact5.ts
│ ├── function.ts
│ └── recursion.ts
├── translator/
│ ├── static_table_test.go
│ ├── symbol/
│ │ ├── static_table.go
│ │ ├── symbol.go
│ │ ├── table.go
│ │ ├── table_test.go
│ │ ├── types.go
│ │ └── util.go
│ ├── tainstruction.go
│ ├── tainstruction_type.go
│ ├── taprogram.go
│ ├── translator.go
│ ├── translator_test.go
│ ├── util.go
│ └── util_test.go
└── vm/
├── vm.go
└── vm_test.go
SYMBOL INDEX (362 symbols across 64 files)
FILE: gen/instruction.go
constant MASK_OPCODE (line 12) | MASK_OPCODE = 0xfc000000
constant MASK_R0 (line 13) | MASK_R0 = 0x03e00000
constant MASK_R1 (line 14) | MASK_R1 = 0x001f0000
constant MASK_R2 (line 15) | MASK_R2 = 0x0000f800
constant MASK_OFFSET0 (line 16) | MASK_OFFSET0 = 0x03ffffff
constant MASK_OFFSET1 (line 17) | MASK_OFFSET1 = 0x001fffff
constant MASK_OFFSET2 (line 18) | MASK_OFFSET2 = 0x000007ff
type Instruction (line 21) | type Instruction struct
method AddOperand (line 70) | func (i *Instruction) AddOperand(o operand.Operand) {
method String (line 74) | func (i *Instruction) String() string {
method ToByteCode (line 83) | func (i *Instruction) ToByteCode() int {
method GetOperand (line 190) | func (i *Instruction) GetOperand(index int) operand.Operand {
function NewInstruction (line 26) | func NewInstruction(code *OpCode) *Instruction {
function NewJumpInstruction (line 29) | func NewJumpInstruction(code *OpCode, offset int) *Instruction {
function NewOffsetInstruction (line 35) | func NewOffsetInstruction(code *OpCode, r1, r2 *operand.Register, offset...
function NewRegisterInstruction (line 43) | func NewRegisterInstruction(code *OpCode, r1, r2, r3 *operand.Register) ...
function NewBNEInstruction (line 55) | func NewBNEInstruction(r1, r2 *operand.Register, label string) *Instruct...
function NewImmediateInstruction (line 63) | func NewImmediateInstruction(code *OpCode, r1 *operand.Register, number ...
function LoadToRegister (line 125) | func LoadToRegister(target *operand.Register, arg *symbol.Symbol) *Instr...
function SaveToMemory (line 136) | func SaveToMemory(source *operand.Register, arg *symbol.Symbol) *Instruc...
function FromByCode (line 140) | func FromByCode(code int) *Instruction {
FILE: gen/opcode.go
type OpCode (line 29) | type OpCode struct
method String (line 41) | func (oc *OpCode) String() string {
function NewOpCode (line 35) | func NewOpCode(addrType AddressingType, name string, value byte) *OpCode {
function FromByte (line 45) | func FromByte(byteOpcode byte) *OpCode {
FILE: gen/opcode_gen.go
type OpCodeGen (line 10) | type OpCodeGen struct
method Gen (line 17) | func (g *OpCodeGen) Gen(taProgram *translator.TAProgram) *OpCodeProgram {
method GenGoTo (line 58) | func (g *OpCodeGen) GenGoTo(program *OpCodeProgram, ta *translator.TAI...
method GenIf (line 66) | func (g *OpCodeGen) GenIf(program *OpCodeProgram, ta *translator.TAIns...
method Relabel (line 71) | func (g *OpCodeGen) Relabel(program *OpCodeProgram, labelMap map[strin...
method GenReturn (line 86) | func (g *OpCodeGen) GenReturn(program *OpCodeProgram, ta *translator.T...
method GenSP (line 96) | func (g *OpCodeGen) GenSP(program *OpCodeProgram, ta *translator.TAIns...
method GenPass (line 105) | func (g *OpCodeGen) GenPass(program *OpCodeProgram, ta *translator.TAI...
method GenFuncBegin (line 113) | func (g *OpCodeGen) GenFuncBegin(program *OpCodeProgram, ta *translato...
method GenCall (line 118) | func (g *OpCodeGen) GenCall(program *OpCodeProgram, ta *translator.TAI...
method GenCopy (line 125) | func (g *OpCodeGen) GenCopy(program *OpCodeProgram, ta *translator.TAI...
function NewOpCodeGen (line 13) | func NewOpCodeGen() *OpCodeGen {
FILE: gen/opcode_gen_test.go
function TestExprEvaluate (line 10) | func TestExprEvaluate(t *testing.T) {
function TestFuncEvaluate (line 45) | func TestFuncEvaluate(t *testing.T) {
FILE: gen/opcode_program.go
type OpCodeProgram (line 9) | type OpCodeProgram struct
method Add (line 19) | func (o *OpCodeProgram) Add(instr *Instruction) {
method String (line 22) | func (o *OpCodeProgram) String() string {
method SetEntry (line 38) | func (o *OpCodeProgram) SetEntry(entry *int) {
method AddComment (line 43) | func (o *OpCodeProgram) AddComment(comment string) {
method ToByteCode (line 47) | func (o *OpCodeProgram) ToByteCode() []int {
method GetStaticArea (line 57) | func (o *OpCodeProgram) GetStaticArea(taProgram *translator.TAProgram)...
function NewOpCodeProgram (line 15) | func NewOpCodeProgram() *OpCodeProgram {
FILE: gen/opcode_test.go
function TestAdd (line 11) | func TestAdd(t *testing.T) {
function TestMult (line 18) | func TestMult(t *testing.T) {
function TestNewJumpInstruction (line 25) | func TestNewJumpInstruction(t *testing.T) {
function TestJR (line 33) | func TestJR(t *testing.T) {
function TestSW (line 41) | func TestSW(t *testing.T) {
function TestSW1 (line 48) | func TestSW1(t *testing.T) {
function TestLW (line 55) | func TestLW(t *testing.T) {
function TestLW2 (line 62) | func TestLW2(t *testing.T) {
function TestSP (line 69) | func TestSP(t *testing.T) {
function TestBNE (line 74) | func TestBNE(t *testing.T) {
function AssertSameInstruction (line 80) | func AssertSameInstruction(t *testing.T, a, b *Instruction) {
FILE: gen/operand/immediate_number.go
type ImmediateNumber (line 7) | type ImmediateNumber struct
method String (line 15) | func (i *ImmediateNumber) String() string {
method Typ (line 19) | func (*ImmediateNumber) Typ() OperandType {
function NewImmediateNumber (line 11) | func NewImmediateNumber(value int) *ImmediateNumber {
FILE: gen/operand/label.go
type Label (line 5) | type Label struct
method String (line 14) | func (l *Label) String() string {
method Typ (line 18) | func (*Label) Typ() OperandType {
method SetOffset (line 21) | func (l *Label) SetOffset(offset int) {
function NewLabel (line 10) | func NewLabel(label string) *Label {
FILE: gen/operand/offset.go
type Offset (line 7) | type Offset struct
method String (line 15) | func (o *Offset) String() string {
method GetEncodedOffset (line 19) | func (o *Offset) GetEncodedOffset() int {
method Typ (line 33) | func (*Offset) Typ() OperandType {
function NewOffset (line 11) | func NewOffset(offset int) *Offset {
function DecodeOffset (line 26) | func DecodeOffset(offset int) *Offset {
FILE: gen/operand/oprand.go
type Operand (line 3) | type Operand interface
FILE: gen/operand/register.go
type Register (line 23) | type Register struct
method Typ (line 34) | func (reg *Register) Typ() OperandType {
method String (line 37) | func (reg *Register) String() string {
function NewRegister (line 28) | func NewRegister(name string, addr byte) *Register {
function RegisterFromAddr (line 41) | func RegisterFromAddr(reg int) *Register {
FILE: gen/operand/types.go
type OperandType (line 3) | type OperandType
constant TYPE_REGISTER (line 6) | TYPE_REGISTER = iota
constant TYPE_IMMEDIATE (line 7) | TYPE_IMMEDIATE
constant TYPE_LABEL (line 8) | TYPE_LABEL
constant TYPE_OFFSET (line 9) | TYPE_OFFSET
FILE: gen/types.go
type AddressingType (line 4) | type AddressingType
constant ADDRESSING_TYPE_IMMEDIATE (line 7) | ADDRESSING_TYPE_IMMEDIATE AddressingType = iota
constant ADDRESSING_TYPE_REGISTER (line 8) | ADDRESSING_TYPE_REGISTER
constant ADDRESSING_TYPE_JUMP (line 9) | ADDRESSING_TYPE_JUMP
constant ADDRESSING_TYPE_BRANCH (line 10) | ADDRESSING_TYPE_BRANCH
constant ADDRESSING_TYPE_OFFSET (line 11) | ADDRESSING_TYPE_OFFSET
FILE: lexer/alphabet.go
function IsLetter (line 12) | func IsLetter(c string) bool {
function IsNumber (line 16) | func IsNumber(c string) bool {
function IsLiteral (line 20) | func IsLiteral(c string) bool {
function IsOperator (line 24) | func IsOperator(c string) bool {
FILE: lexer/alphabet_test.go
function TestAlphabet (line 8) | func TestAlphabet(t *testing.T) {
FILE: lexer/keywords.go
function IsKeyword (line 14) | func IsKeyword(key string) bool {
FILE: lexer/lexer.go
constant EndToken (line 11) | EndToken = "$"
type Lexer (line 13) | type Lexer struct
method Analyse (line 41) | func (l *Lexer) Analyse() []*Token {
method MakeString (line 128) | func (l *Lexer) MakeString() *Token {
method MakeVarOrKeyword (line 159) | func (l *Lexer) MakeVarOrKeyword() *Token {
method MakeOp (line 181) | func (l *Lexer) MakeOp() *Token {
method MakeNumber (line 334) | func (l *Lexer) MakeNumber() *Token {
function FromFile (line 18) | func FromFile(path string) []*Token {
function Analyse (line 32) | func Analyse(source string) []*Token {
function NewLexer (line 36) | func NewLexer(r io.Reader, et string) *Lexer {
FILE: lexer/lexer_test.go
function TestLexer_MakeVarOrKeyword (line 11) | func TestLexer_MakeVarOrKeyword(t *testing.T) {
function TestLexer_MakeString (line 27) | func TestLexer_MakeString(t *testing.T) {
function TestLexer_MakeOp (line 33) | func TestLexer_MakeOp(t *testing.T) {
function TestLexer_MakeNumber (line 51) | func TestLexer_MakeNumber(t *testing.T) {
function TestLexer_Analyse (line 76) | func TestLexer_Analyse(t *testing.T) {
function Test_Function (line 95) | func Test_Function(t *testing.T) {
function TestDeleteComment (line 127) | func TestDeleteComment(t *testing.T) {
function assertToken (line 136) | func assertToken(t *testing.T, token *Token, wantValue string, wantType ...
function TestFromFile (line 141) | func TestFromFile(t *testing.T) {
FILE: lexer/token.go
type TokenType (line 5) | type TokenType
method String (line 18) | func (tt TokenType) String() string {
constant KEYWORD (line 8) | KEYWORD TokenType = 1
constant VARIABLE (line 9) | VARIABLE TokenType = 2
constant OPERATOR (line 10) | OPERATOR TokenType = 3
constant BRACKET (line 11) | BRACKET TokenType = 4
constant STRING (line 12) | STRING TokenType = 5
constant FLOAT (line 13) | FLOAT TokenType = 6
constant BOOLEAN (line 14) | BOOLEAN TokenType = 7
constant INTEGER (line 15) | INTEGER TokenType = 8
type Token (line 42) | type Token struct
method IsVariable (line 51) | func (t *Token) IsVariable() bool {
method IsScalar (line 55) | func (t *Token) IsScalar() bool {
method IsNumber (line 59) | func (t *Token) IsNumber() bool {
method IsOperator (line 63) | func (t *Token) IsOperator() bool {
method String (line 67) | func (t *Token) String() string {
method IsValue (line 71) | func (t *Token) IsValue() bool {
method IsType (line 75) | func (t *Token) IsType() bool {
function NewToken (line 47) | func NewToken(t TokenType, v string) *Token {
FILE: lexer/util/stream.go
type Stream (line 9) | type Stream struct
method Next (line 22) | func (s *Stream) Next() string {
method HasNext (line 37) | func (s *Stream) HasNext() bool {
method Peek (line 54) | func (s *Stream) Peek() string {
method PutBack (line 68) | func (s *Stream) PutBack(e string) {
function NewStream (line 16) | func NewStream(r io.Reader, et string) *Stream {
FILE: lexer/util/stream_test.go
function TestNewStream (line 9) | func TestNewStream(t *testing.T) {
FILE: main.go
function main (line 3) | func main() {
FILE: parser/ast/ast.go
type ASTNode (line 9) | type ASTNode interface
type node (line 32) | type node struct
method Prop (line 48) | func (n *node) Prop(key string) interface{} {
method SetProp (line 51) | func (n *node) SetProp(key string, value interface{}) {
method Lexeme (line 54) | func (n *node) Lexeme() *lexer.Token {
method TypeLexeme (line 57) | func (n *node) TypeLexeme() *lexer.Token {
method IsValueType (line 60) | func (n *node) IsValueType() bool {
method Type (line 63) | func (n *node) Type() NodeType {
method Label (line 66) | func (n *node) Label() string {
method Children (line 69) | func (n *node) Children() []ASTNode {
method GetChild (line 72) | func (n *node) GetChild(index uint) ASTNode {
method Parent (line 78) | func (n *node) Parent() ASTNode {
method AddChild (line 81) | func (n *node) AddChild(node ASTNode) {
method SetLexeme (line 85) | func (n *node) SetLexeme(lexeme *lexer.Token) {
method SetTypeLexeme (line 88) | func (n *node) SetTypeLexeme(lexeme *lexer.Token) {
method SetType (line 91) | func (n *node) SetType(t NodeType) {
method SetLabel (line 94) | func (n *node) SetLabel(str string) {
method SetParent (line 97) | func (n *node) SetParent(node ASTNode) {
method Print (line 100) | func (n *node) Print(indent int) {
function MakeNode (line 45) | func MakeNode() *node {
FILE: parser/ast/block.go
type Block (line 5) | type Block struct
function MakeBlock (line 9) | func MakeBlock() *Block {
function BlockParse (line 16) | func BlockParse(stream *PeekTokenStream) ASTNode {
FILE: parser/ast/expr.go
type Expr (line 7) | type Expr struct
function MakeExpr (line 11) | func MakeExpr() *Expr {
function NewExpr (line 16) | func NewExpr(typ NodeType, token *lexer.Token) *Expr {
type ExprHOF (line 24) | type ExprHOF
function E (line 34) | func E(stream *PeekTokenStream, k int) ASTNode {
function U (line 74) | func U(stream *PeekTokenStream) ASTNode {
function F (line 93) | func F(stream *PeekTokenStream) ASTNode {
function E_ (line 106) | func E_(stream *PeekTokenStream, k int) ASTNode {
function race (line 128) | func race(stream *PeekTokenStream, af ExprHOF, bf ExprHOF) ASTNode {
function combine (line 141) | func combine(stream *PeekTokenStream, af ExprHOF, bf ExprHOF) ASTNode {
function ExprParse (line 167) | func ExprParse(stream *PeekTokenStream) ASTNode {
FILE: parser/ast/expr_call.go
type CallExpr (line 5) | type CallExpr struct
function MakeCallExpr (line 9) | func MakeCallExpr() *CallExpr {
function CallExprParse (line 16) | func CallExprParse(factor ASTNode, stream *PeekTokenStream) ASTNode {
FILE: parser/ast/factor.go
type Factor (line 9) | type Factor struct
function MakeFactor (line 13) | func MakeFactor() *Factor {
function NewFactor (line 17) | func NewFactor(stream *PeekTokenStream) *Factor {
function FactorParse (line 32) | func FactorParse(stream *PeekTokenStream) ASTNode {
FILE: parser/ast/func_args.go
type FuncArgs (line 5) | type FuncArgs struct
function MakeFuncArgs (line 9) | func MakeFuncArgs() *FuncArgs {
function FuncArgsParse (line 15) | func FuncArgsParse(stream *PeekTokenStream) ASTNode {
FILE: parser/ast/priority_table.go
type priorityTable (line 5) | type priorityTable struct
method Size (line 19) | func (pt *priorityTable) Size() int {
method Get (line 22) | func (pt *priorityTable) Get(level int) []string {
method IsContain (line 25) | func (pt *priorityTable) IsContain(level int, key string) bool {
function NewPriorityTable (line 9) | func NewPriorityTable() *priorityTable {
FILE: parser/ast/program.go
type Program (line 5) | type Program struct
function MakeProgram (line 9) | func MakeProgram() *Program {
function ProgramParse (line 15) | func ProgramParse(stream *PeekTokenStream) ASTNode {
FILE: parser/ast/scalar.go
type Scalar (line 5) | type Scalar struct
function NewScalar (line 9) | func NewScalar(stream *PeekTokenStream) *Scalar {
function MakeScalar (line 13) | func MakeScalar() *Scalar {
FILE: parser/ast/stmt.go
type Stmt (line 5) | type Stmt struct
function MakeStmt (line 9) | func MakeStmt() *Stmt {
function StmtParse (line 14) | func StmtParse(stream *PeekTokenStream) ASTNode {
FILE: parser/ast/stmt_assign.go
type AssignStmt (line 5) | type AssignStmt struct
function MakeAssignStmt (line 9) | func MakeAssignStmt() *AssignStmt {
function AssignStmtParse (line 16) | func AssignStmtParse(stream *PeekTokenStream) ASTNode {
FILE: parser/ast/stmt_assign_test.go
function TestAssignStmtParse (line 10) | func TestAssignStmtParse(t *testing.T) {
FILE: parser/ast/stmt_declare.go
type DeclareStmt (line 5) | type DeclareStmt struct
function NewDeclareStmt (line 9) | func NewDeclareStmt() *DeclareStmt {
function MakeDeclareStmt (line 15) | func MakeDeclareStmt() *DeclareStmt {
function DeclareStmtParse (line 22) | func DeclareStmtParse(stream *PeekTokenStream) ASTNode {
FILE: parser/ast/stmt_declare_test.go
function TestDeclareStmtParse (line 10) | func TestDeclareStmtParse(t *testing.T) {
FILE: parser/ast/stmt_for.go
type ForStmt (line 5) | type ForStmt struct
function MakeForStmt (line 9) | func MakeForStmt() *ForStmt {
FILE: parser/ast/stmt_func_declare.go
type FuncDeclareStmt (line 5) | type FuncDeclareStmt struct
method FuncVariable (line 42) | func (f *FuncDeclareStmt) FuncVariable() ASTNode {
method Args (line 45) | func (f *FuncDeclareStmt) Args() ASTNode {
method FuncType (line 48) | func (f *FuncDeclareStmt) FuncType() string {
method Block (line 51) | func (f *FuncDeclareStmt) Block() ASTNode {
function MakeFuncDeclareStmt (line 9) | func MakeFuncDeclareStmt() *FuncDeclareStmt {
function FuncDeclareStmtParse (line 16) | func FuncDeclareStmtParse(stream *PeekTokenStream) *FuncDeclareStmt {
FILE: parser/ast/stmt_func_declare_test.go
function TestFuncDeclareStmtParse (line 9) | func TestFuncDeclareStmtParse(t *testing.T) {
function TestFunctionRecursion (line 26) | func TestFunctionRecursion(t *testing.T) {
FILE: parser/ast/stmt_if.go
type IfStmt (line 5) | type IfStmt struct
method GetExpr (line 59) | func (i *IfStmt) GetExpr() ASTNode {
method GetBlock (line 63) | func (i *IfStmt) GetBlock() ASTNode {
method GetElseBlock (line 66) | func (i *IfStmt) GetElseBlock() ASTNode {
method GetElseIfStmt (line 74) | func (i *IfStmt) GetElseIfStmt() ASTNode {
function MakeIfStmt (line 9) | func MakeIfStmt() *IfStmt {
function IfStmtParse (line 16) | func IfStmtParse(stream *PeekTokenStream) ASTNode {
function IfParse (line 21) | func IfParse(stream *PeekTokenStream) ASTNode {
function TailParse (line 43) | func TailParse(stream *PeekTokenStream) ASTNode {
FILE: parser/ast/stmt_if_test.go
function TestIfStmtParse (line 10) | func TestIfStmtParse(t *testing.T) {
function createTokenStream (line 23) | func createTokenStream(src string) *PeekTokenStream {
function TestIfElseStmtParse (line 29) | func TestIfElseStmtParse(t *testing.T) {
FILE: parser/ast/stmt_return.go
type ReturnStmt (line 5) | type ReturnStmt struct
function MakeReturnStmt (line 9) | func MakeReturnStmt() *ReturnStmt {
function ReturnStmtParse (line 16) | func ReturnStmtParse(stream *PeekTokenStream) ASTNode {
FILE: parser/ast/stream.go
type PeekTokenStream (line 8) | type PeekTokenStream struct
method Next (line 17) | func (pt *PeekTokenStream) Next() *lexer.Token {
method HasNext (line 26) | func (pt *PeekTokenStream) HasNext() bool {
method Peek (line 33) | func (pt *PeekTokenStream) Peek() *lexer.Token {
method PutBack (line 44) | func (pt *PeekTokenStream) PutBack(n int) {
method NextMatch (line 52) | func (pt *PeekTokenStream) NextMatch(value string) *lexer.Token {
method NextMatchType (line 61) | func (pt *PeekTokenStream) NextMatchType(typ lexer.TokenType) *lexer.T...
function NewPeekTokenStream (line 13) | func NewPeekTokenStream(tokens []*lexer.Token) *PeekTokenStream {
FILE: parser/ast/stream_test.go
function TestNewPeekTokenStream (line 10) | func TestNewPeekTokenStream(t *testing.T) {
function assertToken (line 23) | func assertToken(t *testing.T, token *lexer2.Token, wantValue string, wa...
FILE: parser/ast/type.go
type NodeType (line 3) | type NodeType
method String (line 40) | func (nt NodeType) String() string {
constant ASTNODE_TYPE_BLOCK (line 6) | ASTNODE_TYPE_BLOCK NodeType = iota
constant ASTNODE_TYPE_BINARY_EXPR (line 8) | ASTNODE_TYPE_BINARY_EXPR
constant ASTNODE_TYPE_UNARY_EXPR (line 9) | ASTNODE_TYPE_UNARY_EXPR
constant ASTNODE_TYPE_CALL_EXPR (line 10) | ASTNODE_TYPE_CALL_EXPR
constant ASTNODE_TYPE_VARIABLE (line 12) | ASTNODE_TYPE_VARIABLE
constant ASTNODE_TYPE_SCALAR (line 13) | ASTNODE_TYPE_SCALAR
constant ASTNODE_TYPE_IF_STMT (line 15) | ASTNODE_TYPE_IF_STMT
constant ASTNODE_TYPE_WHILE_STMT (line 16) | ASTNODE_TYPE_WHILE_STMT
constant ASTNODE_TYPE_FOR_STMT (line 17) | ASTNODE_TYPE_FOR_STMT
constant ASTNODE_TYPE_RETURN_STMT (line 18) | ASTNODE_TYPE_RETURN_STMT
constant ASTNODE_TYPE_ASSIGN_STMT (line 19) | ASTNODE_TYPE_ASSIGN_STMT
constant ASTNODE_TYPE_FUNCTION_DECLARE_STMT (line 20) | ASTNODE_TYPE_FUNCTION_DECLARE_STMT
constant ASTNODE_TYPE_DECLARE_STMT (line 21) | ASTNODE_TYPE_DECLARE_STMT
FILE: parser/ast/util.go
function ToPostfixExpr (line 8) | func ToPostfixExpr(node ASTNode) string {
function ToBFSString (line 42) | func ToBFSString(node ASTNode, max int) string {
FILE: parser/ast/variable.go
type Variable (line 5) | type Variable struct
function NewVariable (line 9) | func NewVariable(stream *PeekTokenStream) *Variable {
function MakeVariable (line 13) | func MakeVariable() *Variable {
FILE: parser/parser.go
type Parser (line 8) | type Parser struct
method parse (line 25) | func (p *Parser) parse() ast.ASTNode {
method SimpleParse (line 31) | func (p *Parser) SimpleParse() ast.ASTNode {
function Parse (line 12) | func Parse(source string) ast.ASTNode {
function ParseFromFile (line 16) | func ParseFromFile(file string) ast.ASTNode {
function NewParser (line 21) | func NewParser(tokens []*lexer.Token) *Parser {
FILE: parser/parser_test.go
function TestParser_Parse (line 11) | func TestParser_Parse(t *testing.T) {
function createExpr (line 36) | func createExpr(src string) ast.ASTNode {
function TestSimple (line 42) | func TestSimple(t *testing.T) {
function TestSimple1 (line 47) | func TestSimple1(t *testing.T) {
function TestComplex (line 52) | func TestComplex(t *testing.T) {
FILE: translator/static_table_test.go
function TestStaticTable (line 9) | func TestStaticTable(t *testing.T) {
FILE: translator/symbol/static_table.go
type StaticSymbolTable (line 8) | type StaticSymbolTable struct
method Add (line 18) | func (s *StaticSymbolTable) Add(symbol *Symbol) {
method Size (line 31) | func (s *StaticSymbolTable) Size() int {
method String (line 35) | func (s *StaticSymbolTable) String() string {
function NewStaticSymbolTable (line 14) | func NewStaticSymbolTable() *StaticSymbolTable {
FILE: translator/symbol/symbol.go
type Symbol (line 5) | type Symbol struct
method String (line 18) | func (s *Symbol) String() string {
function NewSymbol (line 14) | func NewSymbol(typ SymbolType) *Symbol {
function MakeAddressSymbol (line 26) | func MakeAddressSymbol(lexeme *lexer.Token, offset int) *Symbol {
function MakeImmediateSymbol (line 34) | func MakeImmediateSymbol(lexeme *lexer.Token) *Symbol {
function MakeLabelSymbol (line 41) | func MakeLabelSymbol(label string, lexeme *lexer.Token) *Symbol {
FILE: translator/symbol/table.go
type Table (line 10) | type Table struct
method AddSymbol (line 26) | func (t *Table) AddSymbol(symbol *Symbol) {
method symbolByLexeme (line 31) | func (t *Table) symbolByLexeme(lexeme *lexer.Token) *Symbol {
method Exists (line 40) | func (t *Table) Exists(lexeme *lexer.Token) bool {
method CloneFromSymbolTree (line 53) | func (t *Table) CloneFromSymbolTree(lexeme *lexer.Token, layoutOffset ...
method CreateSymbolByLexeme (line 67) | func (t *Table) CreateSymbolByLexeme(lexeme *lexer.Token) *Symbol {
method CreateVariable (line 89) | func (t *Table) CreateVariable() *Symbol {
method AddChild (line 98) | func (t *Table) AddChild(child *Table) {
method LocalSize (line 104) | func (t *Table) LocalSize() int {
method CreateLabel (line 108) | func (t *Table) CreateLabel(label string, lexeme *lexer.Token) {
function NewTable (line 19) | func NewTable() *Table {
FILE: translator/symbol/table_test.go
function TestSymbolTable (line 9) | func TestSymbolTable(t *testing.T) {
function TestTableChain (line 17) | func TestTableChain(t *testing.T) {
function TestOffset (line 31) | func TestOffset(t *testing.T) {
FILE: translator/symbol/types.go
type SymbolType (line 3) | type SymbolType
method String (line 11) | func (s SymbolType) String() string {
constant SYMBOL_ADDRESS (line 6) | SYMBOL_ADDRESS SymbolType = iota
constant SYMBOL_IMMEDIATE (line 7) | SYMBOL_IMMEDIATE
constant SYMBOL_LABEL (line 8) | SYMBOL_LABEL
FILE: translator/tainstruction.go
type TAInstruction (line 8) | type TAInstruction struct
method String (line 21) | func (t TAInstruction) String() string {
function NewTAInstruction (line 17) | func NewTAInstruction(typ TAInstructionType, result *symbol.Symbol, op s...
FILE: translator/tainstruction_type.go
type TAInstructionType (line 3) | type TAInstructionType
constant TAINSTR_TYPE_ASSIGN (line 6) | TAINSTR_TYPE_ASSIGN TAInstructionType = iota
constant TAINSTR_TYPE_GOTO (line 7) | TAINSTR_TYPE_GOTO
constant TAINSTR_TYPE_IF (line 8) | TAINSTR_TYPE_IF
constant TAINSTR_TYPE_LABEL (line 9) | TAINSTR_TYPE_LABEL
constant TAINSTR_TYPE_CALL (line 10) | TAINSTR_TYPE_CALL
constant TAINSTR_TYPE_RETURN (line 11) | TAINSTR_TYPE_RETURN
constant TAINSTR_TYPE_SP (line 12) | TAINSTR_TYPE_SP
constant TAINSTR_TYPE_PARAM (line 13) | TAINSTR_TYPE_PARAM
constant TAINSTR_TYPE_FUNC_BEGIN (line 14) | TAINSTR_TYPE_FUNC_BEGIN
FILE: translator/taprogram.go
type TAProgram (line 9) | type TAProgram struct
method Add (line 19) | func (t *TAProgram) Add(instr *TAInstruction) {
method AddLabel (line 23) | func (t *TAProgram) AddLabel() *TAInstruction {
method String (line 32) | func (t *TAProgram) String() string {
method SetStaticSymbols (line 42) | func (t *TAProgram) SetStaticSymbols(table *symbol.Table) {
function NewTAProgram (line 15) | func NewTAProgram() *TAProgram {
FILE: translator/translator.go
type Translator (line 10) | type Translator struct
method Translate (line 20) | func (t *Translator) Translate(node ast.ASTNode) *TAProgram {
method TranslateStmt (line 40) | func (t *Translator) TranslateStmt(program *TAProgram, node ast.ASTNod...
method TranslateDeclareStmt (line 68) | func (t *Translator) TranslateDeclareStmt(program *TAProgram, node ast...
method TranslateAssignStmt (line 79) | func (t *Translator) TranslateAssignStmt(program *TAProgram, node ast....
method TranslateExpr (line 91) | func (t *Translator) TranslateExpr(program *TAProgram, node ast.ASTNod...
method TranslateBlock (line 124) | func (t *Translator) TranslateBlock(program *TAProgram, node ast.ASTNo...
method TranslateIfStmt (line 143) | func (t *Translator) TranslateIfStmt(program *TAProgram, node *ast.IfS...
method TranslateFunctionDeclareStmt (line 173) | func (t *Translator) TranslateFunctionDeclareStmt(program *TAProgram, ...
method TranslateCallExpr (line 196) | func (t *Translator) TranslateCallExpr(program *TAProgram, node ast.AS...
method TranslateReturnStmt (line 227) | func (t *Translator) TranslateReturnStmt(program *TAProgram, node ast....
function NewTranslator (line 13) | func NewTranslator() *Translator {
FILE: translator/translator_test.go
function TestExprTranslator (line 10) | func TestExprTranslator(t *testing.T) {
function TestAssignStmt (line 28) | func TestAssignStmt(t *testing.T) {
function TestTranslator_TranslateDeclareStmt (line 40) | func TestTranslator_TranslateDeclareStmt(t *testing.T) {
function TestAssignStmt2 (line 52) | func TestAssignStmt2(t *testing.T) {
function TestBlock (line 61) | func TestBlock(t *testing.T) {
function TestTranslator_TranslateIfStmt (line 81) | func TestTranslator_TranslateIfStmt(t *testing.T) {
function TestTranslator_TranslateIfElseStmt (line 95) | func TestTranslator_TranslateIfElseStmt(t *testing.T) {
function TestTranslator_TranslateIfElseIfStmt (line 114) | func TestTranslator_TranslateIfElseIfStmt(t *testing.T) {
function TestSimpleFunction (line 141) | func TestSimpleFunction(t *testing.T) {
function TestRecursionFunc (line 152) | func TestRecursionFunc(t *testing.T) {
FILE: translator/util.go
function IsInstanceOfExpr (line 8) | func IsInstanceOfExpr(instance interface{}) bool {
function IsNil (line 13) | func IsNil(i interface{}) bool {
FILE: translator/util_test.go
function TestIsInstanceOf (line 9) | func TestIsInstanceOf(t *testing.T) {
FILE: vm/vm.go
type VM (line 9) | type VM struct
method Fetch (line 37) | func (vm *VM) Fetch() int {
method Decode (line 42) | func (vm *VM) Decode(code int) *gen.Instruction {
method Exec (line 46) | func (vm *VM) Exec(instr *gen.Instruction) {
method run (line 114) | func (vm *VM) run() {
method GetSpMemory (line 124) | func (vm *VM) GetSpMemory(offset int) int {
method runOneStep (line 129) | func (vm *VM) runOneStep() bool {
function NewVM (line 16) | func NewVM(staticArea []int, opcodes []int, entry *int) *VM {
FILE: vm/vm_test.go
function TestCalcExpr (line 12) | func TestCalcExpr(t *testing.T) {
function TestRecursiveFunction (line 66) | func TestRecursiveFunction(t *testing.T) {
function TestRecursivefunction1 (line 205) | func TestRecursivefunction1(t *testing.T) {
Condensed preview — 76 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (103K chars).
[
{
"path": ".gitignore",
"chars": 20,
"preview": ".idea/\n./tinyscript\n"
},
{
"path": "Makefile",
"chars": 21,
"preview": "test:\n\tgo test ./...\n"
},
{
"path": "README.md",
"chars": 624,
"preview": "# tinyscript\n\n整个项目包括三个东西:\n1. 创建了一个自己的语言\n2. 编译器\n3. 虚拟机\n \ngolang实现的一个编译器,用来编译一个自己创建的语言(用来玩的),最后写了一个自定义虚拟机用来运行自定义语言。\n\n\n## 语"
},
{
"path": "gen/instruction.go",
"chars": 5002,
"preview": "package gen\n\nimport (\n\t\"fmt\"\n\t\"reflect\"\n\t\"strings\"\n\t\"tinyscript/gen/operand\"\n\t\"tinyscript/translator/symbol\"\n)\n\nconst (\n"
},
{
"path": "gen/opcode.go",
"chars": 1399,
"preview": "package gen\n\nimport \"fmt\"\n\nvar Codes = [63]*OpCode{}\n\nvar (\n\tADD = NewOpCode(ADDRESSING_TYPE_REGISTER, \"ADD\", 0x01)\n\tSU"
},
{
"path": "gen/opcode_gen.go",
"chars": 4523,
"preview": "package gen\n\nimport (\n\t\"fmt\"\n\t\"tinyscript/gen/operand\"\n\t\"tinyscript/translator\"\n\t\"tinyscript/translator/symbol\"\n)\n\ntype "
},
{
"path": "gen/opcode_gen_test.go",
"chars": 1289,
"preview": "package gen\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/parser\"\n\t\"tinyscript/translator"
},
{
"path": "gen/opcode_program.go",
"chars": 1452,
"preview": "package gen\n\nimport (\n\t\"strconv\"\n\t\"strings\"\n\t\"tinyscript/translator\"\n)\n\ntype OpCodeProgram struct {\n\tEntry *int\n\t"
},
{
"path": "gen/opcode_test.go",
"chars": 3340,
"preview": "package gen\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"reflect\"\n\t\"testing\"\n\t\"tinyscript/gen/operand\"\n\tsymbol"
},
{
"path": "gen/operand/immediate_number.go",
"chars": 358,
"preview": "package operand\n\nimport \"fmt\"\n\nvar _ Operand = &ImmediateNumber{}\n\ntype ImmediateNumber struct {\n\tValue int\n}\n\nfunc NewI"
},
{
"path": "gen/operand/label.go",
"chars": 360,
"preview": "package operand\n\nvar _ Operand = &Label{}\n\ntype Label struct {\n\tLabel string\n\t*Offset\n}\n\nfunc NewLabel(label string) *La"
},
{
"path": "gen/operand/offset.go",
"chars": 547,
"preview": "package operand\n\nimport \"fmt\"\n\nvar _ Operand = &Offset{}\n\ntype Offset struct {\n\tOffset int\n}\n\nfunc NewOffset(offset int)"
},
{
"path": "gen/operand/oprand.go",
"chars": 80,
"preview": "package operand\n\ntype Operand interface {\n\tString() string\n\tTyp() OperandType\n}\n"
},
{
"path": "gen/operand/register.go",
"chars": 849,
"preview": "package operand\n\nimport \"fmt\"\n\nvar _ Operand = &Register{}\n\nvar (\n\tRegisters = [31]*Register{}\n\n\tZERO = NewRegister(\"Z"
},
{
"path": "gen/operand/types.go",
"chars": 112,
"preview": "package operand\n\ntype OperandType int\n\nconst (\n\tTYPE_REGISTER = iota\n\tTYPE_IMMEDIATE\n\tTYPE_LABEL\n\tTYPE_OFFSET\n)\n"
},
{
"path": "gen/types.go",
"chars": 200,
"preview": "package gen\n\n//寻址类型\ntype AddressingType int\n\nconst (\n\tADDRESSING_TYPE_IMMEDIATE AddressingType = iota\n\tADDRESSING_TYPE_R"
},
{
"path": "go.mod",
"chars": 76,
"preview": "module tinyscript\n\ngo 1.14\n\nrequire github.com/magiconair/properties v1.8.1\n"
},
{
"path": "go.sum",
"chars": 183,
"preview": "github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=\ngithub.com/magiconair/properties"
},
{
"path": "lexer/alphabet.go",
"chars": 517,
"preview": "package lexer\n\nimport \"regexp\"\n\nvar (\n\tptnLetter = regexp.MustCompile(\"^[a-zA-Z]$\")\n\tptnNumber = regexp.MustCompile("
},
{
"path": "lexer/alphabet_test.go",
"chars": 472,
"preview": "package lexer\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n)\n\nfunc TestAlphabet(t *testing.T) {\n\tasse"
},
{
"path": "lexer/keywords.go",
"chars": 244,
"preview": "package lexer\n\nvar KeyWords = map[string]bool{\n\t\"var\": true,\n\t\"if\": true,\n\t\"else\": true,\n\t\"for\": true,\n\t\"whi"
},
{
"path": "lexer/lexer.go",
"chars": 7142,
"preview": "package lexer\n\nimport (\n\t\"bytes\"\n\t\"io\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"tinyscript/lexer/util\"\n)\n\nconst EndToken = \"$\"\n\ntype Lex"
},
{
"path": "lexer/lexer_test.go",
"chars": 3865,
"preview": "package lexer\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"regexp\"\n\t\"strings\"\n\t\"testing\"\n)\n\nfunc Test"
},
{
"path": "lexer/token.go",
"chars": 1379,
"preview": "package lexer\n\nimport \"fmt\"\n\ntype TokenType int\n\nconst (\n\tKEYWORD TokenType = 1\n\tVARIABLE TokenType = 2\n\tOPERATOR Token"
},
{
"path": "lexer/util/stream.go",
"chars": 1085,
"preview": "package util\n\nimport (\n\t\"bufio\"\n\t\"container/list\"\n\t\"io\"\n)\n\ntype Stream struct {\n\tscanner *bufio.Scanner\n\tqueueCache *"
},
{
"path": "lexer/util/stream_test.go",
"chars": 641,
"preview": "package util\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n)\n\nfunc TestNewStream(t *testing.T"
},
{
"path": "main.go",
"chars": 33,
"preview": "package main\n\nfunc main() {\n\t\n}\n"
},
{
"path": "parser/ast/ast.go",
"chars": 2277,
"preview": "package ast\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n\t\"tinyscript/lexer\"\n)\n\ntype ASTNode interface {\n\t//get\n\tLexeme() *lexer.Token //"
},
{
"path": "parser/ast/block.go",
"chars": 435,
"preview": "package ast\n\nvar DefaultBlock ASTNode = MakeBlock()\n\ntype Block struct {\n\t*Stmt\n}\n\nfunc MakeBlock() *Block {\n\tb := &Bloc"
},
{
"path": "parser/ast/expr.go",
"chars": 2769,
"preview": "package ast\n\nimport (\n\t\"tinyscript/lexer\"\n)\n\ntype Expr struct {\n\t*node\n}\n\nfunc MakeExpr() *Expr {\n\te := &Expr{MakeNode()"
},
{
"path": "parser/ast/expr_call.go",
"chars": 530,
"preview": "package ast\n\nvar _ ASTNode = &CallExpr{}\n\ntype CallExpr struct {\n\t*node\n}\n\nfunc MakeCallExpr() *CallExpr {\n\te := &CallEx"
},
{
"path": "parser/ast/factor.go",
"chars": 846,
"preview": "package ast\n\nimport (\n\t\"tinyscript/lexer\"\n)\n\nvar _ ASTNode = &Factor{}\n\ntype Factor struct {\n\t*node\n}\n\nfunc MakeFactor()"
},
{
"path": "parser/ast/func_args.go",
"chars": 469,
"preview": "package ast\n\nvar _ ASTNode = &Factor{}\n\ntype FuncArgs struct {\n\t*node\n}\n\nfunc MakeFuncArgs() *FuncArgs {\n\ts := &FuncArgs"
},
{
"path": "parser/ast/priority_table.go",
"chars": 638,
"preview": "package ast\n\nvar PriorityTable = NewPriorityTable()\n\ntype priorityTable struct {\n\ttable [][]string\n}\n\nfunc NewPriorityTa"
},
{
"path": "parser/ast/program.go",
"chars": 348,
"preview": "package ast\n\nvar _ ASTNode = &Block{}\n\ntype Program struct {\n\t*Block\n}\n\nfunc MakeProgram() *Program {\n\tb := &Program{Mak"
},
{
"path": "parser/ast/scalar.go",
"chars": 261,
"preview": "package ast\n\nvar _ ASTNode = &Factor{}\n\ntype Scalar struct {\n\t*Factor\n}\n\nfunc NewScalar(stream *PeekTokenStream) *Scalar"
},
{
"path": "parser/ast/stmt.go",
"chars": 782,
"preview": "package ast\n\nvar DefaultStmt ASTNode = MakeStmt()\n\ntype Stmt struct {\n\t*node\n}\n\nfunc MakeStmt() *Stmt {\n\ts := &Stmt{Make"
},
{
"path": "parser/ast/stmt_assign.go",
"chars": 609,
"preview": "package ast\n\nvar DefaultAssignStmt ASTNode = MakeAssignStmt()\n\ntype AssignStmt struct {\n\t*Stmt\n}\n\nfunc MakeAssignStmt() "
},
{
"path": "parser/ast/stmt_assign_test.go",
"chars": 375,
"preview": "package ast\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/lexer\"\n)\n\nfunc TestAss"
},
{
"path": "parser/ast/stmt_declare.go",
"chars": 692,
"preview": "package ast\n\nvar DefaultDeclareStmt ASTNode = MakeDeclareStmt()\n\ntype DeclareStmt struct {\n\t*Stmt\n}\n\nfunc NewDeclareStmt"
},
{
"path": "parser/ast/stmt_declare_test.go",
"chars": 381,
"preview": "package ast\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/lexer\"\n)\n\nfunc TestDec"
},
{
"path": "parser/ast/stmt_for.go",
"chars": 198,
"preview": "package ast\n\nvar _ ASTNode = MakeForStmt()\n\ntype ForStmt struct {\n\t*Stmt\n}\n\nfunc MakeForStmt() *ForStmt {\n\tv := &ForStmt"
},
{
"path": "parser/ast/stmt_func_declare.go",
"chars": 1168,
"preview": "package ast\n\nvar _ ASTNode = MakeFuncDeclareStmt()\n\ntype FuncDeclareStmt struct {\n\t*Stmt\n}\n\nfunc MakeFuncDeclareStmt() *"
},
{
"path": "parser/ast/stmt_func_declare_test.go",
"chars": 982,
"preview": "package ast\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/lexer\"\n)\n\nfunc TestFuncDeclareS"
},
{
"path": "parser/ast/stmt_if.go",
"chars": 1471,
"preview": "package ast\n\nvar _ ASTNode = MakeIfStmt()\n\ntype IfStmt struct {\n\t*Stmt\n}\n\nfunc MakeIfStmt() *IfStmt {\n\tv := &IfStmt{Make"
},
{
"path": "parser/ast/stmt_if_test.go",
"chars": 1075,
"preview": "package ast\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/lexer\"\n)\n\nfunc TestIfS"
},
{
"path": "parser/ast/stmt_return.go",
"chars": 457,
"preview": "package ast\n\nvar _ ASTNode = &ReturnStmt{}\n\ntype ReturnStmt struct {\n\t*Stmt\n}\n\nfunc MakeReturnStmt() *ReturnStmt {\n\tv :="
},
{
"path": "parser/ast/stream.go",
"chars": 1351,
"preview": "package ast\n\nimport (\n\t\"fmt\"\n\t\"tinyscript/lexer\"\n)\n\ntype PeekTokenStream struct {\n\ttokens []*lexer.Token //TODO 保存lexer"
},
{
"path": "parser/ast/stream_test.go",
"chars": 904,
"preview": "package ast\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\tlexer2 \"tinyscript/lexer\"\n)\n\nfunc "
},
{
"path": "parser/ast/type.go",
"chars": 1191,
"preview": "package ast\n\ntype NodeType int\n\nconst (\n\tASTNODE_TYPE_BLOCK NodeType = iota\n\n\tASTNODE_TYPE_BINARY_EXPR // 1+1\n\tASTNODE_T"
},
{
"path": "parser/ast/util.go",
"chars": 1204,
"preview": "package ast\n\nimport (\n\t\"container/list\"\n\t\"strings\"\n)\n\nfunc ToPostfixExpr(node ASTNode) string {\n\tif node.Type() == ASTNO"
},
{
"path": "parser/ast/variable.go",
"chars": 279,
"preview": "package ast\n\nvar _ ASTNode = &Variable{}\n\ntype Variable struct {\n\t*Factor\n}\n\nfunc NewVariable(stream *PeekTokenStream) *"
},
{
"path": "parser/parser.go",
"chars": 941,
"preview": "package parser\n\nimport (\n\t\"tinyscript/lexer\"\n\t\"tinyscript/parser/ast\"\n)\n\ntype Parser struct {\n\tstream *ast.PeekTokenStre"
},
{
"path": "parser/parser_test.go",
"chars": 1650,
"preview": "package parser\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\tlexer \"tinyscript/lexer\"\n\t\"tiny"
},
{
"path": "tests/add.ts",
"chars": 94,
"preview": "func add(int a, int b) int {\n return a + b\n}\n\nfunc main() void {\n add(10, 20)\nreturn\n}\n"
},
{
"path": "tests/complex-if.ts",
"chars": 115,
"preview": "if(a == 1) {\n b = 100\n} else if(a == 2) {\n b = 500\n} else if(a == 3) {\n b = a * 1000\n} else {\n b = -1\n}"
},
{
"path": "tests/fact2.ts",
"chars": 130,
"preview": "func fact(int n) int {\n if(n == 0) {\n return 1\n }\n return fact(n-1) * n\n}\nfunc main() void {\n return"
},
{
"path": "tests/fact5.ts",
"chars": 130,
"preview": "func fact(int n) int {\n if(n == 0) {\n return 1\n }\n return fact(n-1) * n\n}\nfunc main() void {\n return"
},
{
"path": "tests/function.ts",
"chars": 44,
"preview": "func add(int a,int b)int{\n return a + b\n}"
},
{
"path": "tests/recursion.ts",
"chars": 85,
"preview": "func fact(int n)int{\n if (n ==0){\n return 1\n }\n\n return fact(n-1)*n\n}"
},
{
"path": "translator/static_table_test.go",
"chars": 305,
"preview": "package translator\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/parser\"\n)\n\nfunc TestStat"
},
{
"path": "translator/symbol/static_table.go",
"chars": 911,
"preview": "package symbol\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n)\n\ntype StaticSymbolTable struct {\n\tOffsetMap map[string]*Symbol\n\tOffsetC"
},
{
"path": "translator/symbol/symbol.go",
"chars": 811,
"preview": "package symbol\n\nimport \"tinyscript/lexer\"\n\ntype Symbol struct {\n\tParent *Table\n\tLexeme *lexer.Token\n\tLabel "
},
{
"path": "translator/symbol/table.go",
"chars": 2204,
"preview": "package symbol\n\nimport (\n\t\"fmt\"\n\t\"tinyscript/lexer\"\n)\n/*\n一个符号表在运行时就是活动记录,一个符号表可以对应多个活动记录(递归),符号表这个时候就是一个模板\n */\ntype Tabl"
},
{
"path": "translator/symbol/table_test.go",
"chars": 1510,
"preview": "package symbol\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/lexer\"\n)\n\nfunc TestSymbolTab"
},
{
"path": "translator/symbol/types.go",
"chars": 344,
"preview": "package symbol\n\ntype SymbolType int\n\nconst (\n\tSYMBOL_ADDRESS SymbolType = iota\n\tSYMBOL_IMMEDIATE\n\tSYMBOL_LABEL\n)\n\nfunc ("
},
{
"path": "translator/symbol/util.go",
"chars": 16,
"preview": "package symbol\n\n"
},
{
"path": "translator/tainstruction.go",
"chars": 1303,
"preview": "package translator\n\nimport (\n\t\"fmt\"\n\t\"tinyscript/translator/symbol\"\n)\n\ntype TAInstruction struct {\n\tArg1 interface{}\n\t"
},
{
"path": "translator/tainstruction_type.go",
"chars": 262,
"preview": "package translator\n\ntype TAInstructionType int\n\nconst (\n\tTAINSTR_TYPE_ASSIGN TAInstructionType = iota\n\tTAINSTR_TYPE_GOTO"
},
{
"path": "translator/taprogram.go",
"chars": 1178,
"preview": "package translator\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n\t\"tinyscript/translator/symbol\"\n)\n\ntype TAProgram struct {\n\tInstructions "
},
{
"path": "translator/translator.go",
"chars": 6960,
"preview": "package translator\n\nimport (\n\t\"fmt\"\n\t\"tinyscript/lexer\"\n\t\"tinyscript/parser/ast\"\n\t\"tinyscript/translator/symbol\"\n)\n\ntype"
},
{
"path": "translator/translator_test.go",
"chars": 3194,
"preview": "package translator\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/parser\"\n\t\"tinyscript/tra"
},
{
"path": "translator/util.go",
"chars": 343,
"preview": "package translator\n\nimport (\n\t\"reflect\"\n\t\"strings\"\n)\n\nfunc IsInstanceOfExpr(instance interface{}) bool {\n\tival := reflec"
},
{
"path": "translator/util_test.go",
"chars": 231,
"preview": "package translator\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/parser/ast\"\n)\n\nfunc Test"
},
{
"path": "vm/vm.go",
"chars": 3803,
"preview": "package vm\n\nimport (\n\t\"log\"\n\t\"tinyscript/gen\"\n\t\"tinyscript/gen/operand\"\n)\n\ntype VM struct {\n\tRegisters [31]int\n\t"
},
{
"path": "vm/vm_test.go",
"chars": 4376,
"preview": "package vm\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/gen\"\n\t\"tinyscript/gen/operand\"\n\t"
}
]
About this extraction
This page contains the full source code of the elvin-du/tinyscript GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 76 files (86.4 KB), approximately 28.7k tokens, and a symbol index with 362 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.