[
  {
    "path": ".gitignore",
    "content": ".idea/\n./tinyscript\n"
  },
  {
    "path": "Makefile",
    "content": "test:\n\tgo test ./...\n"
  },
  {
    "path": "README.md",
    "content": "# tinyscript\n\n整个项目包括三个东西：\n1. 创建了一个自己的语言\n2. 编译器\n3. 虚拟机\n \ngolang实现的一个编译器，用来编译一个自己创建的语言（用来玩的），最后写了一个自定义虚拟机用来运行自定义语言。\n\n\n## 语言介绍\n\n为了跨平台（其实是为了方便开发 ^ ^），所以这个语言没有静态编译成硬件指令集，最后的机器码是我自己的定义的，和MIPS类似的（其实就是一个mips子集）虚拟指令集。为了运行这些指令集，我写了一个虚拟机。\n\n\n语言和golang和javascript类似，实现了函数，类型声明，函数调用等最基本的一些语言元素，没有实现类，结构体，接口等复杂数据结构。\n下面是用这个语言编程的例子：\n```\nfunc fact(int n)  int {\n    if(n == 0) {\n        return 1\n    }\n    return fact(n-1) * n\n}\nfunc main() void {\n    return fact(2)\n}\n```\n\n每个函数都实现了相应的UnitTest，单元测试真香～\n\n## 声明：\n工程思路不是我自己想出来的，来自于慕课网《大学计算机必修课新讲--编译原理+操作系统+图形学》这个课程。\n理论主要是看《龙书》的一部分，《自己动手写编译器，连接器》和bilibili上的中科大华保健老师的《编译原理》和哈尔滨工业大学的《编译原理》课程的一部分。\n\n"
  },
  {
    "path": "gen/instruction.go",
    "content": "package gen\n\nimport (\n\t\"fmt\"\n\t\"reflect\"\n\t\"strings\"\n\t\"tinyscript/gen/operand\"\n\t\"tinyscript/translator/symbol\"\n)\n\nconst (\n\tMASK_OPCODE  = 0xfc000000\n\tMASK_R0      = 0x03e00000\n\tMASK_R1      = 0x001f0000\n\tMASK_R2      = 0x0000f800\n\tMASK_OFFSET0 = 0x03ffffff\n\tMASK_OFFSET1 = 0x001fffff\n\tMASK_OFFSET2 = 0x000007ff\n)\n\ntype Instruction struct {\n\tCode   *OpCode\n\tOpList []operand.Operand\n}\n\nfunc NewInstruction(code *OpCode) *Instruction {\n\treturn &Instruction{Code: code, OpList: make([]operand.Operand, 0)}\n}\nfunc NewJumpInstruction(code *OpCode, offset int) *Instruction {\n\ti := NewInstruction(code)\n\ti.AddOperand(operand.NewOffset(offset))\n\treturn i\n}\n\nfunc NewOffsetInstruction(code *OpCode, r1, r2 *operand.Register, offset *operand.Offset) *Instruction {\n\ti := NewInstruction(code)\n\ti.AddOperand(r1)\n\ti.AddOperand(r2)\n\ti.AddOperand(offset)\n\treturn i\n}\n\nfunc NewRegisterInstruction(code *OpCode, r1, r2, r3 *operand.Register) *Instruction {\n\ti := NewInstruction(code)\n\ti.AddOperand(r1)\n\tif r2 != nil {\n\t\ti.AddOperand(r2)\n\t}\n\tif r3 != nil {\n\t\ti.AddOperand(r3)\n\t}\n\treturn i\n}\n\nfunc NewBNEInstruction(r1, r2 *operand.Register, label string) *Instruction {\n\ti := NewInstruction(BNE)\n\ti.AddOperand(r1)\n\ti.AddOperand(r2)\n\ti.AddOperand(operand.NewLabel(label))\n\treturn i\n}\n\nfunc NewImmediateInstruction(code *OpCode, r1 *operand.Register, number *operand.ImmediateNumber) *Instruction {\n\ti := NewInstruction(code)\n\ti.AddOperand(r1)\n\ti.AddOperand(number)\n\treturn i\n}\n\nfunc (i *Instruction) AddOperand(o operand.Operand) {\n\ti.OpList = append(i.OpList, o)\n}\n\nfunc (i *Instruction) String() string {\n\ts := i.Code.String()\n\tprts := make([]string, 0, len(i.OpList)+1)\n\tfor _, op := range i.OpList {\n\t\tprts = append(prts, op.String())\n\t}\n\treturn s + \" \" + strings.Join(prts, \" \")\n}\n\nfunc (i *Instruction) ToByteCode() int {\n\tcode := 0\n\tx := i.Code.Value\n\tcode |= int(x) << 26\n\tswitch i.Code.AddrType {\n\tcase ADDRESSING_TYPE_IMMEDIATE:\n\t\tr0 := i.OpList[0].(*operand.Register)\n\t\tcode |= int(r0.Addr) << 21\n\t\tcode |= i.OpList[1].(*operand.ImmediateNumber).Value\n\t\treturn code\n\tcase ADDRESSING_TYPE_REGISTER:\n\t\tr1 := i.OpList[0].(*operand.Register)\n\t\tcode |= int(r1.Addr) << 21\n\t\tif len(i.OpList) > 1 {\n\t\t\tcode |= int(i.OpList[1].(*operand.Register).Addr) << 16\n\t\t\tif len(i.OpList) > 2 {\n\t\t\t\tr2 := int(i.OpList[2].(*operand.Register).Addr)\n\t\t\t\tcode |= r2 << 11\n\t\t\t}\n\t\t}\n\tcase ADDRESSING_TYPE_JUMP:\n\t\tif len(i.OpList) > 0 {\n\t\t\tcode |= i.OpList[0].(*operand.Label).Offset.GetEncodedOffset()\n\t\t}\n\tcase ADDRESSING_TYPE_OFFSET:\n\t\tr1 := i.OpList[0].(*operand.Register)\n\t\tr2 := i.OpList[1].(*operand.Register)\n\t\tvar offset *operand.Offset = nil\n\t\tif reflect.TypeOf(i.OpList[2]).String() == reflect.TypeOf(&operand.Label{}).String() {\n\t\t\toffset = i.OpList[2].(*operand.Label).Offset\n\t\t} else {\n\t\t\toffset = i.OpList[2].(*operand.Offset)\n\t\t}\n\n\t\tcode |= int(r1.Addr) << 21\n\t\tcode |= int(r2.Addr) << 16\n\t\tcode |= offset.GetEncodedOffset()\n\t}\n\n\treturn code\n}\n\nfunc LoadToRegister(target *operand.Register, arg *symbol.Symbol) *Instruction {\n\t//转成证书，目前只支持整数\n\tif arg.Typ == symbol.SYMBOL_ADDRESS {\n\t\treturn NewOffsetInstruction(LW, target, operand.SP, operand.NewOffset(-arg.Offset))\n\t} else if arg.Typ == symbol.SYMBOL_IMMEDIATE {\n\t\treturn NewOffsetInstruction(LW, target, operand.STATIC, operand.NewOffset(arg.Offset))\n\t}\n\n\tpanic(fmt.Sprintf(\"Cannot load type %v symbol to register\", arg.Typ))\n}\n\nfunc SaveToMemory(source *operand.Register, arg *symbol.Symbol) *Instruction {\n\treturn NewOffsetInstruction(SW, source, operand.SP, operand.NewOffset(-arg.Offset))\n}\n\nfunc FromByCode(code int) *Instruction {\n\tbyteOpcode := (byte)(int(code&MASK_OPCODE) >> 26)\n\topcode := FromByte(byteOpcode)\n\ti := NewInstruction(opcode)\n\n\tswitch opcode.AddrType {\n\tcase ADDRESSING_TYPE_IMMEDIATE:\n\t\treg := (code & MASK_R0) >> 21\n\t\tnumber := code & MASK_OFFSET1\n\t\ti.OpList = append(i.OpList, operand.RegisterFromAddr(reg))\n\t\ti.OpList = append(i.OpList, operand.NewImmediateNumber(number))\n\tcase ADDRESSING_TYPE_REGISTER:\n\t\tr1Addr := (code & MASK_R0) >> 21\n\t\tr2Addr := (code & MASK_R1) >> 16\n\t\tr3Addr := (code & MASK_R2) >> 11\n\t\tr1 := operand.RegisterFromAddr(r1Addr)\n\n\t\tvar r2 *operand.Register = nil\n\t\tif r2Addr != 0 {\n\t\t\tr2 = operand.RegisterFromAddr(r2Addr)\n\t\t}\n\n\t\tvar r3 *operand.Register = nil\n\t\tif r3Addr != 0 {\n\t\t\tr3 = operand.RegisterFromAddr(r3Addr)\n\t\t}\n\n\t\ti.OpList = append(i.OpList, r1)\n\n\t\tif nil != r2 {\n\t\t\ti.OpList = append(i.OpList, r2)\n\t\t}\n\t\tif nil != r3 {\n\t\t\ti.OpList = append(i.OpList, r3)\n\t\t}\n\tcase ADDRESSING_TYPE_JUMP:\n\t\toffset := code & MASK_OFFSET0\n\t\ti.OpList = append(i.OpList, operand.DecodeOffset(offset))\n\tcase ADDRESSING_TYPE_OFFSET:\n\t\tr1Addr := (code & MASK_R0) >> 21\n\t\tr2Addr := (code & MASK_R1) >> 16\n\t\toffset := code & MASK_OFFSET2\n\t\ti.OpList = append(i.OpList, operand.RegisterFromAddr(r1Addr))\n\t\ti.OpList = append(i.OpList, operand.RegisterFromAddr(r2Addr))\n\t\ti.OpList = append(i.OpList, operand.DecodeOffset(offset))\n\t}\n\n\treturn i\n}\n\nfunc (i *Instruction) GetOperand(index int) operand.Operand {\n\treturn i.OpList[index]\n}\n"
  },
  {
    "path": "gen/opcode.go",
    "content": "package gen\n\nimport \"fmt\"\n\nvar Codes = [63]*OpCode{}\n\nvar (\n\tADD  = NewOpCode(ADDRESSING_TYPE_REGISTER, \"ADD\", 0x01)\n\tSUB  = NewOpCode(ADDRESSING_TYPE_REGISTER, \"SUB\", 0x02)\n\tMULT = NewOpCode(ADDRESSING_TYPE_REGISTER, \"MULT\", 0x03)\n\n\tADDI  = NewOpCode(ADDRESSING_TYPE_IMMEDIATE, \"ADDI\", 0x05) //立即数加\n\tSUBI  = NewOpCode(ADDRESSING_TYPE_IMMEDIATE, \"SUBI\", 0x06)\n\tMULTI = NewOpCode(ADDRESSING_TYPE_IMMEDIATE, \"MULTI\", 0x07)\n\n\tMFLO = NewOpCode(ADDRESSING_TYPE_REGISTER, \"MFLO\", 0x08) //MULT/MULTI操作码的结果会存储到这个寄存器中\n\n\tEQ  = NewOpCode(ADDRESSING_TYPE_REGISTER, \"EQ\", 0x09)\n\tBNE = NewOpCode(ADDRESSING_TYPE_OFFSET, \"BNE\", 0x15) //不相等\n\n\tSW = NewOpCode(ADDRESSING_TYPE_OFFSET, \"SW\", 0x10) //从寄存器写回内存\n\tLW = NewOpCode(ADDRESSING_TYPE_OFFSET, \"LW\", 0x11) //从内存读入到寄存器\n\n\tJUMP   = NewOpCode(ADDRESSING_TYPE_JUMP, \"JUMP\", 0x20)\n\tJR     = NewOpCode(ADDRESSING_TYPE_JUMP, \"JR\", 0x21) //函数的跳转\n\tRETURN = NewOpCode(ADDRESSING_TYPE_JUMP, \"RETURN\", 0x22)\n)\n\ntype OpCode struct {\n\tName     string\n\tValue    byte\n\tAddrType AddressingType\n}\n\nfunc NewOpCode(addrType AddressingType, name string, value byte) *OpCode {\n\toc := &OpCode{Name: name, Value: value, AddrType: addrType}\n\tCodes[value] = oc\n\treturn oc\n}\n\nfunc (oc *OpCode) String() string {\n\treturn oc.Name\n}\n\nfunc FromByte(byteOpcode byte) *OpCode {\n\tcode := Codes[byteOpcode]\n\tif nil == code {\n\t\tpanic(fmt.Sprintf(\"%x opcode undefined\", byteOpcode))\n\t}\n\n\treturn code\n}\n"
  },
  {
    "path": "gen/opcode_gen.go",
    "content": "package gen\n\nimport (\n\t\"fmt\"\n\t\"tinyscript/gen/operand\"\n\t\"tinyscript/translator\"\n\t\"tinyscript/translator/symbol\"\n)\n\ntype OpCodeGen struct {\n}\n\nfunc NewOpCodeGen() *OpCodeGen {\n\treturn &OpCodeGen{}\n}\n\nfunc (g *OpCodeGen) Gen(taProgram *translator.TAProgram) *OpCodeProgram {\n\tprogram := NewOpCodeProgram()\n\ttaInstrs := taProgram.Instructions\n\tlabelHash := make(map[string]int)\n\n\tfor _, taInstr := range taInstrs {\n\t\tprogram.AddComment(taInstr.String())\n\t\tswitch taInstr.Typ {\n\t\tcase translator.TAINSTR_TYPE_ASSIGN:\n\t\t\tg.GenCopy(program, taInstr)\n\t\tcase translator.TAINSTR_TYPE_GOTO:\n\t\t\tg.GenGoTo(program, taInstr)\n\t\tcase translator.TAINSTR_TYPE_CALL:\n\t\t\tg.GenCall(program, taInstr)\n\t\tcase translator.TAINSTR_TYPE_PARAM:\n\t\t\tg.GenPass(program, taInstr)\n\t\tcase translator.TAINSTR_TYPE_SP:\n\t\t\tg.GenSP(program, taInstr)\n\t\tcase translator.TAINSTR_TYPE_LABEL:\n\t\t\tif taInstr.Arg2 != nil && taInstr.Arg2.(string) == \"main\" {\n\t\t\t\tsize := len(program.Instructions)\n\t\t\t\tprogram.SetEntry(&size)\n\t\t\t}\n\t\t\t//这里用于给计算label在代码中的行号做基础\n\t\t\tlabelHash[taInstr.Arg1.(string)] = len(program.Instructions)\n\t\tcase translator.TAINSTR_TYPE_RETURN:\n\t\t\tg.GenReturn(program, taInstr)\n\t\tcase translator.TAINSTR_TYPE_FUNC_BEGIN:\n\t\t\tg.GenFuncBegin(program, taInstr)\n\t\tcase translator.TAINSTR_TYPE_IF:\n\t\t\tg.GenIf(program, taInstr)\n\t\tdefault:\n\t\t\tpanic(fmt.Sprintf(\"unknown type %d\", taInstr.Typ))\n\t\t}\n\t}\n\n\tg.Relabel(program, labelHash)\n\n\treturn program\n}\n\nfunc (g *OpCodeGen) GenGoTo(program *OpCodeProgram, ta *translator.TAInstruction) {\n\tlabel := ta.Arg1.(string)\n\ti := NewInstruction(JUMP)\n\t//label对应的未知在relabel阶段计算\n\ti.OpList = append(i.OpList, operand.NewLabel(label))\n\tprogram.Add(i)\n}\n\nfunc (g *OpCodeGen) GenIf(program *OpCodeProgram, ta *translator.TAInstruction) {\n\tlabel := ta.Arg2\n\tprogram.Add(NewBNEInstruction(operand.S2, operand.ZERO, label.(string)))\n}\n\nfunc (g *OpCodeGen) Relabel(program *OpCodeProgram, labelMap map[string]int) {\n\tfor _, instr := range program.Instructions {\n\t\tif instr.Code == JUMP || instr.Code == JR || instr.Code == BNE {\n\t\t\tidx := 0\n\t\t\tif instr.Code == BNE {\n\t\t\t\tidx = 2\n\t\t\t}\n\t\t\tlabelOperand := instr.OpList[idx].(*operand.Label)\n\t\t\tlabel := labelOperand.Label\n\t\t\toffset := labelMap[label]\n\t\t\tlabelOperand.Offset.Offset = offset\n\t\t}\n\t}\n}\n\nfunc (g *OpCodeGen) GenReturn(program *OpCodeProgram, ta *translator.TAInstruction) {\n\tret := ta.Arg1.(*symbol.Symbol)\n\tif nil != ret {\n\t\tprogram.Add(LoadToRegister(operand.S0, ret))\n\t}\n\tprogram.Add(NewOffsetInstruction(SW, operand.S0, operand.SP, operand.NewOffset(1)))\n\ti := NewInstruction(RETURN)\n\tprogram.Add(i)\n}\n\nfunc (g *OpCodeGen) GenSP(program *OpCodeProgram, ta *translator.TAInstruction) {\n\toffset := ta.Arg1.(int)\n\tif offset > 0 {\n\t\tprogram.Add(NewImmediateInstruction(ADDI, operand.SP, operand.NewImmediateNumber(offset)))\n\t} else {\n\t\tprogram.Add(NewImmediateInstruction(SUBI, operand.SP, operand.NewImmediateNumber(-offset)))\n\t}\n}\n\nfunc (g *OpCodeGen) GenPass(program *OpCodeProgram, ta *translator.TAInstruction) {\n\targ1 := ta.Arg1.(*symbol.Symbol)\n\tnumber := ta.Arg2.(int)\n\tprogram.Add(LoadToRegister(operand.S0, arg1))\n\t//pass a\n\tprogram.Add(NewOffsetInstruction(SW, operand.S0, operand.SP, operand.NewOffset(-number)))\n}\n\nfunc (g *OpCodeGen) GenFuncBegin(program *OpCodeProgram, ta *translator.TAInstruction) {\n\ti := NewOffsetInstruction(SW, operand.RA, operand.SP, operand.NewOffset(0))\n\tprogram.Add(i)\n}\n\nfunc (g *OpCodeGen) GenCall(program *OpCodeProgram, ta *translator.TAInstruction) {\n\tlabel := ta.Arg1.(*symbol.Symbol)\n\ti := NewInstruction(JR) //跳转之前会把PC寄存器的值存储到RA寄存器\n\ti.OpList = append(i.OpList, operand.NewLabel(label.Label))\n\tprogram.Add(i)\n}\n\nfunc (g *OpCodeGen) GenCopy(program *OpCodeProgram, ta *translator.TAInstruction) {\n\tresult := ta.Result\n\top := ta.Op\n\targ1 := ta.Arg1.(*symbol.Symbol)\n\n\tif nil == ta.Arg2 {\n\t\tprogram.Add(LoadToRegister(operand.S0, arg1))\n\t\tprogram.Add(SaveToMemory(operand.S0, result))\n\t} else {\n\t\tprogram.Add(LoadToRegister(operand.S0, arg1))\n\t\targ2 := ta.Arg2.(*symbol.Symbol)\n\t\tprogram.Add(LoadToRegister(operand.S1, arg2))\n\t\tswitch op {\n\t\tcase \"+\":\n\t\t\tprogram.Add(NewRegisterInstruction(ADD, operand.S2, operand.S0, operand.S1))\n\t\tcase \"-\":\n\t\t\tprogram.Add(NewRegisterInstruction(SUB, operand.S2, operand.S0, operand.S1))\n\t\tcase \"*\":\n\t\t\tprogram.Add(NewRegisterInstruction(MULT, operand.S0, operand.S1, nil))\n\t\t\tprogram.Add(NewRegisterInstruction(MFLO, operand.S2, nil, nil))\n\t\tcase \"==\":\n\t\t\tprogram.Add(NewRegisterInstruction(EQ, operand.S2, operand.S1, operand.S0))\n\t\t}\n\t\tprogram.Add(SaveToMemory(operand.S2, result))\n\t}\n}\n"
  },
  {
    "path": "gen/opcode_gen_test.go",
    "content": "package gen\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/parser\"\n\t\"tinyscript/translator\"\n)\n\nfunc TestExprEvaluate(t *testing.T) {\n\tsource := \"var a = 3 * 2*(5+1)\"\n\tnode := parser.Parse(source)\n\ttaprog := translator.NewTranslator().Translate(node)\n\tassert.Equal(t, taprog.StaticTable.String(), `0:3\n1:2\n2:5\n3:1`)\n\n\tg := NewOpCodeGen()\n\tprog := g.Gen(taprog)\n\texpected := `#p0 = 5 + 1\nLW S0 STATIC 2\nLW S1 STATIC 3\nADD S2 S0 S1\nSW S2 SP -1\n#p1 = 2 * p0\nLW S0 STATIC 1\nLW S1 SP -1\nMULT S0 S1\nMFLO S2\nSW S2 SP -2\n#p2 = 3 * p1\nLW S0 STATIC 0\nLW S1 SP -2\nMULT S0 S1\nMFLO S2\nSW S2 SP -3\n#a = p2\nLW S0 SP -3\nSW S0 SP 0`\n\n\tassert.Equal(t, prog.String(), expected)\n}\n\nfunc TestFuncEvaluate(t *testing.T) {\n\tnode := parser.ParseFromFile(\"../tests/add.ts\")\n\ttaprog := translator.NewTranslator().Translate(node)\n\tg := NewOpCodeGen()\n\tprog := g.Gen(taprog)\n\texpected := `#FUNC_BEGIN\nSW RA SP 0\n#p1 = a + b\nLW S0 SP -1\nLW S1 SP -2\nADD S2 S0 S1\nSW S2 SP -3\n#RETURN p1\nLW S0 SP -3\nSW S0 SP 1\nRETURN \n#FUNC_BEGIN\nMAIN:SW RA SP 0\n#PARAM 10 3\nLW S0 STATIC 0\nSW S0 SP -3\n#PARAM 20 4\nLW S0 STATIC 1\nSW S0 SP -4\n#SP -2\nSUBI SP 2\n#CALL L0\nJR L0\n#SP 2\nADDI SP 2\n#RETURN\nSW S0 SP 1\nRETURN \n#SP -1\nSUBI SP 1\n#CALL L1\nJR L1\n#SP 1\nADDI SP 1`\n\n\tassert.Equal(t,prog.String(),expected)\n}\n"
  },
  {
    "path": "gen/opcode_program.go",
    "content": "package gen\n\nimport (\n\t\"strconv\"\n\t\"strings\"\n\t\"tinyscript/translator\"\n)\n\ntype OpCodeProgram struct {\n\tEntry        *int\n\tInstructions []*Instruction\n\tComments     map[int]string //注释；行号：注释内容\n}\n\nfunc NewOpCodeProgram() *OpCodeProgram {\n\treturn &OpCodeProgram{Entry: nil, Instructions: make([]*Instruction, 0), Comments: make(map[int]string)}\n}\n\nfunc (o *OpCodeProgram) Add(instr *Instruction) {\n\to.Instructions = append(o.Instructions, instr)\n}\nfunc (o *OpCodeProgram) String() string {\n\tprts := make([]string, 0, len(o.Instructions))\n\tfor i, instr := range o.Instructions {\n\t\tif c, ok := o.Comments[i]; ok {\n\t\t\tprts = append(prts, \"#\"+c)\n\t\t}\n\t\tstr := instr.String()\n\t\tif o.Entry != nil && *o.Entry == i {\n\t\t\tstr = \"MAIN:\" + str\n\t\t}\n\t\tprts = append(prts, str)\n\t}\n\n\treturn strings.Join(prts, \"\\n\")\n}\n\nfunc (o *OpCodeProgram) SetEntry(entry *int) {\n\to.Entry = entry\n}\n\n//当前指令的位置添加一行注释\nfunc (o *OpCodeProgram) AddComment(comment string) {\n\to.Comments[len(o.Instructions)] = comment\n}\n\nfunc (o *OpCodeProgram) ToByteCode() []int {\n\tcodes := []int{}\n\tfor _, instr := range o.Instructions {\n\t\tcodes = append(codes, instr.ToByteCode())\n\t}\n\n\treturn codes\n}\n\n//从三地址代码中获取静态符号表中的值，存起来在虚拟机实例化时写入内存静态区\nfunc (o *OpCodeProgram) GetStaticArea(taProgram *translator.TAProgram) []int {\n\tl := []int{}\n\tfor _, symbol := range taProgram.StaticTable.Symbols {\n\t\ti, err := strconv.Atoi(symbol.Lexeme.Value)\n\t\tif nil != err {\n\t\t\tpanic(err)\n\t\t}\n\t\tl = append(l, i)\n\t}\n\treturn l\n}\n"
  },
  {
    "path": "gen/opcode_test.go",
    "content": "package gen\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"reflect\"\n\t\"testing\"\n\t\"tinyscript/gen/operand\"\n\tsymbol2 \"tinyscript/translator/symbol\"\n)\n\nfunc TestAdd(t *testing.T) {\n\ta := NewInstruction(ADD)\n\ta.AddOperand(operand.S2)\n\ta.AddOperand(operand.S0)\n\ta.AddOperand(operand.S1)\n\tAssertSameInstruction(t, a, FromByCode(a.ToByteCode()))\n}\nfunc TestMult(t *testing.T) {\n\ta := NewInstruction(MULT)\n\ta.AddOperand(operand.S0)\n\ta.AddOperand(operand.S1)\n\tAssertSameInstruction(t, a, FromByCode(a.ToByteCode()))\n}\n\nfunc TestNewJumpInstruction(t *testing.T) {\n\ta := NewInstruction(JUMP)\n\tlabel := operand.NewLabel(\"L0\")\n\ta.AddOperand(label)\n\tlabel.SetOffset(100)\n\tAssertSameInstruction(t, a, FromByCode(a.ToByteCode()))\n}\n\nfunc TestJR(t *testing.T) {\n\ta := NewInstruction(JR)\n\tlabel := operand.NewLabel(\"L0\")\n\ta.AddOperand(label)\n\tlabel.SetOffset(100)\n\tAssertSameInstruction(t, a, FromByCode(a.ToByteCode()))\n}\n\nfunc TestSW(t *testing.T) {\n\tsymbol := symbol2.NewSymbol(symbol2.SYMBOL_IMMEDIATE)\n\tsymbol.Offset = -100\n\ta := SaveToMemory(operand.S0, symbol)\n\tAssertSameInstruction(t, a, FromByCode(a.ToByteCode()))\n}\n\nfunc TestSW1(t *testing.T) {\n\tsymbol := symbol2.NewSymbol(symbol2.SYMBOL_IMMEDIATE)\n\tsymbol.Offset = 100\n\ta := SaveToMemory(operand.S0, symbol)\n\tAssertSameInstruction(t, a, FromByCode(a.ToByteCode()))\n}\n\nfunc TestLW(t *testing.T) {\n\tsymbol := symbol2.NewSymbol(symbol2.SYMBOL_IMMEDIATE)\n\tsymbol.Offset = 100\n\ta := LoadToRegister(operand.S0, symbol)\n\tAssertSameInstruction(t, a, FromByCode(a.ToByteCode()))\n}\n\nfunc TestLW2(t *testing.T) {\n\tsymbol := symbol2.NewSymbol(symbol2.SYMBOL_ADDRESS)\n\tsymbol.Offset = 100\n\ta := LoadToRegister(operand.S0, symbol)\n\tAssertSameInstruction(t, a, FromByCode(a.ToByteCode()))\n}\n\nfunc TestSP(t *testing.T) {\n\ta := NewImmediateInstruction(ADDI, operand.SP, operand.NewImmediateNumber(100))\n\tAssertSameInstruction(t, a, FromByCode(a.ToByteCode()))\n}\n\nfunc TestBNE(t *testing.T) {\n\ta := NewBNEInstruction(operand.S0, operand.S1, \"L0\")\n\ta.GetOperand(2).(*operand.Label).SetOffset(100)\n\tAssertSameInstruction(t, a, FromByCode(a.ToByteCode()))\n}\n\nfunc AssertSameInstruction(t *testing.T, a, b *Instruction) {\n\tassert.Equal(t, a.Code, b.Code)\n\tassert.Equal(t, len(a.OpList), len(b.OpList))\n\tfor i, av := range a.OpList {\n\t\tbv := b.GetOperand(i)\n\t\tif reflect.ValueOf(av).Type().String() == reflect.TypeOf(&operand.Label{}).String() {\n\t\t\tassert.Equal(t, bv, av.(*operand.Label).Offset)\n\t\t}else {\n\t\t\tassert.Equal(t, bv, av)\n\t\t}\n\n\t\tif reflect.ValueOf(av).Type().String() == reflect.TypeOf(&operand.ImmediateNumber{}).String() {\n\t\t\tassert.Equal(t, av.(*operand.ImmediateNumber).Value, bv.(*operand.ImmediateNumber).Value)\n\t\t} else if reflect.ValueOf(av).Type().String() == reflect.TypeOf(&operand.Offset{}).String() {\n\t\t\tassert.Equal(t, av.(*operand.Offset).Offset, bv.(*operand.Offset).Offset)\n\t\t} else if reflect.ValueOf(av).Type().String() == reflect.TypeOf(&operand.Register{}).String() {\n\t\t\tassert.Equal(t, av.(*operand.Register).Addr, bv.(*operand.Register).Addr)\n\t\t\tassert.Equal(t, av.(*operand.Register).Name, bv.(*operand.Register).Name)\n\t\t} else if reflect.ValueOf(av).Type().String() == reflect.TypeOf(&operand.Label{}).String() {\n\t\t\tassert.Equal(t, av.(*operand.Label).Offset.Offset, bv.(*operand.Offset).Offset)\n\t\t} else {\n\t\t\tpanic(\"unsupported encode/decode type\" + av.String())\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "gen/operand/immediate_number.go",
    "content": "package operand\n\nimport \"fmt\"\n\nvar _ Operand = &ImmediateNumber{}\n\ntype ImmediateNumber struct {\n\tValue int\n}\n\nfunc NewImmediateNumber(value int) *ImmediateNumber {\n\treturn &ImmediateNumber{Value: value}\n}\n\nfunc (i *ImmediateNumber) String() string {\n\treturn fmt.Sprintf(\"%d\", i.Value)\n}\n\nfunc (*ImmediateNumber) Typ() OperandType {\n\treturn TYPE_IMMEDIATE\n}\n"
  },
  {
    "path": "gen/operand/label.go",
    "content": "package operand\n\nvar _ Operand = &Label{}\n\ntype Label struct {\n\tLabel string\n\t*Offset\n}\n\nfunc NewLabel(label string) *Label {\n\treturn &Label{Label: label, Offset: NewOffset(0)}\n}\n\nfunc (l *Label) String() string {\n\treturn l.Label\n}\n\nfunc (*Label) Typ() OperandType {\n\treturn TYPE_LABEL\n}\nfunc (l *Label) SetOffset(offset int) {\n\tl.Offset = NewOffset(offset)\n}\n"
  },
  {
    "path": "gen/operand/offset.go",
    "content": "package operand\n\nimport \"fmt\"\n\nvar _ Operand = &Offset{}\n\ntype Offset struct {\n\tOffset int\n}\n\nfunc NewOffset(offset int) *Offset {\n\treturn &Offset{Offset: offset}\n}\n\nfunc (o *Offset) String() string {\n\treturn fmt.Sprintf(\"%d\", o.Offset)\n}\n\nfunc (o *Offset) GetEncodedOffset() int {\n\tif o.Offset > 0 {\n\t\treturn o.Offset\n\t}\n\n\treturn 0x400 | -o.Offset\n}\nfunc DecodeOffset(offset int) *Offset {\n\tif offset&0x400 > 0 {\n\t\toffset = offset & 0x3ff\n\t\toffset = -offset\n\t}\n\treturn NewOffset(offset)\n}\nfunc (*Offset) Typ() OperandType {\n\treturn TYPE_OFFSET\n}\n"
  },
  {
    "path": "gen/operand/oprand.go",
    "content": "package operand\n\ntype Operand interface {\n\tString() string\n\tTyp() OperandType\n}\n"
  },
  {
    "path": "gen/operand/register.go",
    "content": "package operand\n\nimport \"fmt\"\n\nvar _ Operand = &Register{}\n\nvar (\n\tRegisters = [31]*Register{}\n\n\tZERO   = NewRegister(\"ZERO\", 1)\n\tPC     = NewRegister(\"PC\", 2)\n\tSP     = NewRegister(\"SP\", 3)\n\tSTATIC = NewRegister(\"STATIC\", 4)\n\tRA     = NewRegister(\"RA\", 5)\n\n\tS0 = NewRegister(\"S0\", 10)\n\tS1 = NewRegister(\"S1\", 11)\n\tS2 = NewRegister(\"S2\", 12)\n\n\tL0 = NewRegister(\"L0\", 20)\n)\n\ntype Register struct {\n\tAddr byte\n\tName string\n}\n\nfunc NewRegister(name string, addr byte) *Register {\n\treg := &Register{Addr: addr, Name: name}\n\tRegisters[addr] = reg\n\treturn reg\n}\n\nfunc (reg *Register) Typ() OperandType {\n\treturn TYPE_REGISTER\n}\nfunc (reg *Register) String() string {\n\treturn reg.Name\n}\n\nfunc RegisterFromAddr(reg int) *Register {\n\tif reg < 0 || reg >= len(Registers) {\n\t\tpanic(fmt.Sprintf(\"no register's address is %d\", reg))\n\t}\n\n\treturn Registers[reg]\n}\n"
  },
  {
    "path": "gen/operand/types.go",
    "content": "package operand\n\ntype OperandType int\n\nconst (\n\tTYPE_REGISTER = iota\n\tTYPE_IMMEDIATE\n\tTYPE_LABEL\n\tTYPE_OFFSET\n)\n"
  },
  {
    "path": "gen/types.go",
    "content": "package gen\n\n//寻址类型\ntype AddressingType int\n\nconst (\n\tADDRESSING_TYPE_IMMEDIATE AddressingType = iota\n\tADDRESSING_TYPE_REGISTER\n\tADDRESSING_TYPE_JUMP\n\tADDRESSING_TYPE_BRANCH\n\tADDRESSING_TYPE_OFFSET\n)\n"
  },
  {
    "path": "go.mod",
    "content": "module tinyscript\n\ngo 1.14\n\nrequire github.com/magiconair/properties v1.8.1\n"
  },
  {
    "path": "go.sum",
    "content": "github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=\ngithub.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=\n"
  },
  {
    "path": "lexer/alphabet.go",
    "content": "package lexer\n\nimport \"regexp\"\n\nvar (\n\tptnLetter   = regexp.MustCompile(\"^[a-zA-Z]$\")\n\tptnNumber   = regexp.MustCompile(\"^[0-9]$\")\n\tptnLiteral  = regexp.MustCompile(\"^[_a-zA-Z0-9]$\")\n\tptnOperator = regexp.MustCompile(\"^[+-\\\\\\\\*<>=!&|^%/]$\")\n)\n\nfunc IsLetter(c string) bool {\n\treturn ptnLetter.MatchString(c)\n}\n\nfunc IsNumber(c string) bool {\n\treturn ptnNumber.MatchString(c)\n}\n\nfunc IsLiteral(c string) bool {\n\treturn ptnLiteral.MatchString(c)\n}\n\nfunc IsOperator(c string) bool {\n\treturn ptnOperator.MatchString(c)\n}\n"
  },
  {
    "path": "lexer/alphabet_test.go",
    "content": "package lexer\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n)\n\nfunc TestAlphabet(t *testing.T) {\n\tassert.Equal(t, IsLetter(\"a\"), true)\n\tassert.Equal(t, IsLiteral(\"a\"), true)\n\tassert.Equal(t, IsNumber(\"2\"), true)\n\tassert.Equal(t, IsOperator(\"*\"), true)\n\tassert.Equal(t, IsOperator(\"^\"), true)\n\tassert.Equal(t, IsOperator(\"-\"), true)\n\tassert.Equal(t, IsOperator(\"=\"), true)\n\tassert.Equal(t, IsOperator(\"/\"), true)\n\tassert.Equal(t, IsOperator(\"%\"), true)\n}\n"
  },
  {
    "path": "lexer/keywords.go",
    "content": "package lexer\n\nvar KeyWords = map[string]bool{\n\t\"var\":    true,\n\t\"if\":     true,\n\t\"else\":   true,\n\t\"for\":    true,\n\t\"while\":  true,\n\t\"break\":  true,\n\t\"func\":   true,\n\t\"return\": true,\n}\n\nfunc IsKeyword(key string) bool {\n\treturn KeyWords[key]\n}\n"
  },
  {
    "path": "lexer/lexer.go",
    "content": "package lexer\n\nimport (\n\t\"bytes\"\n\t\"io\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"tinyscript/lexer/util\"\n)\n\nconst EndToken = \"$\"\n\ntype Lexer struct {\n\t*util.Stream\n\tendToken string\n}\n\nfunc FromFile(path string) []*Token {\n\tabsPath, err := filepath.Abs(path)\n\tif nil != err {\n\t\tpanic(err)\n\t}\n\tf, err := os.Open(absPath)\n\tif nil != err {\n\t\tpanic(err)\n\t}\n\tdefer f.Close()\n\n\treturn NewLexer(f, EndToken).Analyse()\n}\n\nfunc Analyse(source string) []*Token {\n\treturn NewLexer(bytes.NewBufferString(source), EndToken).Analyse()\n}\n\nfunc NewLexer(r io.Reader, et string) *Lexer {\n\ts := util.NewStream(r, EndToken)\n\treturn &Lexer{Stream: s, endToken: et}\n}\n\nfunc (l *Lexer) Analyse() []*Token {\n\ttokens := make([]*Token, 0)\n\tfor ; l.HasNext(); {\n\t\tc := l.Next()\n\t\tif c == EndToken {\n\t\t\tbreak\n\t\t}\n\t\tlookahead := l.Peek()\n\n\t\tif c == \" \" || c == \"\\n\" || c == \"\\t\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tif \"/\" == c {\n\t\t\tif lookahead == \"/\" {\n\t\t\t\tfor ; l.HasNext(); {\n\t\t\t\t\tif \"\\n\" == l.Next() {\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else if lookahead == \"*\" {\n\t\t\t\tvalid := false\n\t\t\t\tfor ; l.HasNext(); {\n\t\t\t\t\tp := l.Next()\n\t\t\t\t\tif \"*\" == p && l.Peek() == \"/\" {\n\t\t\t\t\t\tl.Next()\n\t\t\t\t\t\tvalid = true\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif !valid {\n\t\t\t\t\tpanic(\"source comment invalid\")\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tcontinue\n\t\t}\n\n\t\tif c == \"{\" || c == \"}\" || c == \"(\" || c == \")\" {\n\t\t\ttokens = append(tokens, NewToken(BRACKET, c))\n\t\t\tcontinue\n\t\t}\n\n\t\tif c == `\"` || c == `'` {\n\t\t\tl.PutBack(c)\n\t\t\ttokens = append(tokens, l.MakeString())\n\t\t\tcontinue\n\t\t}\n\n\t\tif IsLetter(c) {\n\t\t\tl.PutBack(c)\n\t\t\ttokens = append(tokens, l.MakeVarOrKeyword())\n\t\t\tcontinue\n\t\t}\n\t\tif IsNumber(c) {\n\t\t\tl.PutBack(c)\n\t\t\ttokens = append(tokens, l.MakeNumber())\n\t\t\tcontinue\n\t\t}\n\n\t\t//+ - .\n\t\t//+-: 3+5, +5, 3 * -5\n\t\tif (c == \"+\" || c == \"-\" || c == \".\") && IsNumber(lookahead) {\n\t\t\tvar lastToken *Token = nil\n\t\t\tif len(tokens) > 0 {\n\t\t\t\tlastToken = tokens[len(tokens)-1]\n\t\t\t}\n\n\t\t\tif nil == lastToken || !lastToken.IsValue() || lastToken.IsOperator() {\n\t\t\t\tl.PutBack(c)\n\t\t\t\ttokens = append(tokens, l.MakeNumber())\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\n\t\tif IsOperator(c) {\n\t\t\tl.PutBack(c)\n\t\t\ttokens = append(tokens, l.MakeOp())\n\t\t\tcontinue\n\t\t}\n\n\t\tpanic(\"unexpected character\" + c)\n\t}\n\n\treturn tokens\n}\n\nfunc (l *Lexer) MakeString() *Token {\n\ts := \"\"\n\tstate := 0\n\tfor ; l.HasNext(); {\n\t\tc := l.Next()\n\t\tswitch state {\n\t\tcase 0:\n\t\t\tif c == `'` {\n\t\t\t\tstate = 1\n\t\t\t} else {\n\t\t\t\tstate = 2\n\t\t\t}\n\t\t\ts += c\n\t\tcase 1:\n\t\t\tif `'` == c {\n\t\t\t\treturn NewToken(STRING, s+c)\n\t\t\t} else {\n\t\t\t\ts += c\n\t\t\t}\n\t\tcase 2:\n\t\t\tif `\"` == c {\n\t\t\t\treturn NewToken(STRING, s+c)\n\t\t\t} else {\n\t\t\t\ts += c\n\t\t\t}\n\t\t}\n\t}\n\n\tpanic(\"make string failed\")\n}\n\nfunc (l *Lexer) MakeVarOrKeyword() *Token {\n\ts := \"\"\n\tfor ; l.HasNext(); {\n\t\tlookahead := l.Peek()\n\t\tif IsLiteral(lookahead) {\n\t\t\ts += lookahead\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t\tl.Next()\n\t}\n\n\tif IsKeyword(s) {\n\t\treturn NewToken(KEYWORD, s)\n\t}\n\n\tif \"true\" == s || \"false\" == s {\n\t\treturn NewToken(BOOLEAN, s)\n\t}\n\n\treturn NewToken(VARIABLE, s)\n}\nfunc (l *Lexer) MakeOp() *Token {\n\tstate := 0\n\n\tfor ; l.HasNext(); {\n\t\tlookahead := l.Next()\n\t\tswitch state {\n\t\tcase 0:\n\t\t\tswitch lookahead {\n\t\t\tcase \"+\":\n\t\t\t\tstate = 1\n\t\t\tcase \"-\":\n\t\t\t\tstate = 2\n\t\t\tcase \"*\":\n\t\t\t\tstate = 3\n\t\t\tcase `/`:\n\t\t\t\tstate = 4\n\t\t\tcase `>`:\n\t\t\t\tstate = 5\n\t\t\tcase `<`:\n\t\t\t\tstate = 6\n\t\t\tcase `=`:\n\t\t\t\tstate = 7\n\t\t\tcase `!`:\n\t\t\t\tstate = 8\n\t\t\tcase `&`:\n\t\t\t\tstate = 9\n\t\t\tcase `|`:\n\t\t\t\tstate = 10\n\t\t\tcase `^`:\n\t\t\t\tstate = 11\n\t\t\tcase `%`:\n\t\t\t\tstate = 12\n\t\t\tcase \",\":\n\t\t\t\treturn NewToken(OPERATOR, \",\")\n\t\t\tcase \";\":\n\t\t\t\treturn NewToken(OPERATOR, \";\")\n\t\t\t}\n\t\tcase 1:\n\t\t\tswitch lookahead {\n\t\t\tcase `+`:\n\t\t\t\treturn NewToken(OPERATOR, \"++\")\n\t\t\tcase `=`:\n\t\t\t\treturn NewToken(OPERATOR, \"+=\")\n\t\t\tdefault:\n\t\t\t\tl.PutBack(lookahead)\n\t\t\t\treturn NewToken(OPERATOR, \"+\")\n\t\t\t}\n\t\tcase 2:\n\t\t\tswitch lookahead {\n\t\t\tcase `-`:\n\t\t\t\treturn NewToken(OPERATOR, \"--\")\n\t\t\tcase `=`:\n\t\t\t\treturn NewToken(OPERATOR, \"-=\")\n\t\t\tdefault:\n\t\t\t\tl.PutBack(lookahead)\n\t\t\t\treturn NewToken(OPERATOR, \"-\")\n\t\t\t}\n\t\tcase 3:\n\t\t\tswitch lookahead {\n\t\t\tcase `=`:\n\t\t\t\treturn NewToken(OPERATOR, \"*=\")\n\t\t\tdefault:\n\t\t\t\tl.PutBack(lookahead)\n\t\t\t\treturn NewToken(OPERATOR, \"*\")\n\t\t\t}\n\t\tcase 4:\n\t\t\tswitch lookahead {\n\t\t\tcase `=`:\n\t\t\t\treturn NewToken(OPERATOR, \"/=\")\n\t\t\tdefault:\n\t\t\t\tl.PutBack(lookahead)\n\t\t\t\treturn NewToken(OPERATOR, \"/\")\n\t\t\t}\n\t\tcase 5:\n\t\t\tswitch lookahead {\n\t\t\tcase `=`:\n\t\t\t\treturn NewToken(OPERATOR, \">=\")\n\t\t\tcase `>`:\n\t\t\t\treturn NewToken(OPERATOR, \">>\")\n\t\t\tdefault:\n\t\t\t\tl.PutBack(lookahead)\n\t\t\t\treturn NewToken(OPERATOR, \">\")\n\t\t\t}\n\t\tcase 6:\n\t\t\tswitch lookahead {\n\t\t\tcase `=`:\n\t\t\t\treturn NewToken(OPERATOR, \"<=\")\n\t\t\tcase `<`:\n\t\t\t\treturn NewToken(OPERATOR, \"<<\")\n\t\t\tdefault:\n\t\t\t\tl.PutBack(lookahead)\n\t\t\t\treturn NewToken(OPERATOR, \"<\")\n\t\t\t}\n\t\tcase 7:\n\t\t\tswitch lookahead {\n\t\t\tcase `=`:\n\t\t\t\treturn NewToken(OPERATOR, \"==\")\n\t\t\tdefault:\n\t\t\t\tl.PutBack(lookahead)\n\t\t\t\treturn NewToken(OPERATOR, \"=\")\n\t\t\t}\n\t\tcase 8:\n\t\t\tswitch lookahead {\n\t\t\tcase `=`:\n\t\t\t\treturn NewToken(OPERATOR, \"!=\")\n\t\t\tdefault:\n\t\t\t\tl.PutBack(lookahead)\n\t\t\t\treturn NewToken(OPERATOR, \"!\")\n\t\t\t}\n\t\tcase 9:\n\t\t\tswitch lookahead {\n\t\t\tcase `&`:\n\t\t\t\treturn NewToken(OPERATOR, \"&&\")\n\t\t\tcase `=`:\n\t\t\t\treturn NewToken(OPERATOR, \"&=\")\n\t\t\tdefault:\n\t\t\t\tl.PutBack(lookahead)\n\t\t\t\treturn NewToken(OPERATOR, \"&\")\n\t\t\t}\n\t\tcase 10:\n\t\t\tswitch lookahead {\n\t\t\tcase `|`:\n\t\t\t\treturn NewToken(OPERATOR, \"||\")\n\t\t\tcase `=`:\n\t\t\t\treturn NewToken(OPERATOR, \"|=\")\n\t\t\tdefault:\n\t\t\t\tl.PutBack(lookahead)\n\t\t\t\treturn NewToken(OPERATOR, \"|\")\n\t\t\t}\n\t\tcase 11:\n\t\t\tswitch lookahead {\n\t\t\tcase `^`:\n\t\t\t\treturn NewToken(OPERATOR, \"^^\")\n\t\t\tcase `=`:\n\t\t\t\treturn NewToken(OPERATOR, \"^=\")\n\t\t\tdefault:\n\t\t\t\tl.PutBack(lookahead)\n\t\t\t\treturn NewToken(OPERATOR, \"^\")\n\t\t\t}\n\t\tcase 12:\n\t\t\tswitch lookahead {\n\t\t\tcase `=`:\n\t\t\t\treturn NewToken(OPERATOR, \"%=\")\n\t\t\tdefault:\n\t\t\t\tl.PutBack(lookahead)\n\t\t\t\treturn NewToken(OPERATOR, \"%\")\n\t\t\t}\n\t\t}\n\t}\n\n\tpanic(\"makeOp failed\")\n}\n\nfunc (l *Lexer) MakeNumber() *Token {\n\tstate := 0\n\ts := \"\"\n\tfor ; l.HasNext(); {\n\t\tlookahead := l.Peek()\n\t\tswitch state {\n\t\tcase 0:\n\t\t\tif \"0\" == lookahead {\n\t\t\t\tstate = 1\n\t\t\t} else if IsNumber(lookahead) {\n\t\t\t\tstate = 2\n\t\t\t} else if `+` == lookahead || `-` == lookahead {\n\t\t\t\tstate = 3\n\t\t\t} else if lookahead == `.` {\n\t\t\t\tstate = 5\n\t\t\t}\n\t\tcase 1:\n\t\t\tif lookahead == \"0\" {\n\t\t\t\tstate = 1\n\t\t\t} else if IsNumber(lookahead) {\n\t\t\t\tstate = 2\n\t\t\t} else if lookahead == \".\" {\n\t\t\t\tstate = 4\n\t\t\t} else {\n\t\t\t\treturn NewToken(INTEGER, s)\n\t\t\t}\n\t\tcase 2:\n\t\t\tif IsNumber(lookahead) {\n\t\t\t\tstate = 2\n\t\t\t} else if lookahead == \".\" {\n\t\t\t\tstate = 4\n\t\t\t} else {\n\t\t\t\treturn NewToken(INTEGER, s)\n\t\t\t}\n\t\tcase 3:\n\t\t\tif IsNumber(lookahead) {\n\t\t\t\tstate = 2\n\t\t\t} else if lookahead == \".\" {\n\t\t\t\tstate = 5\n\t\t\t} else {\n\t\t\t\tpanic(\"unexpected character \" + lookahead)\n\t\t\t}\n\t\tcase 4:\n\t\t\tif \".\" == lookahead {\n\t\t\t\tpanic(\"unexpected character\" + lookahead)\n\t\t\t} else if IsNumber(lookahead) {\n\t\t\t\tstate = 20\n\t\t\t} else {\n\t\t\t\treturn NewToken(FLOAT, s)\n\t\t\t}\n\t\tcase 5:\n\t\t\tif IsNumber(lookahead) {\n\t\t\t\tstate = 20\n\t\t\t} else {\n\t\t\t\tpanic(\"unexpected character\" + lookahead)\n\t\t\t}\n\t\tcase 20:\n\t\t\tif IsNumber(lookahead) {\n\t\t\t\tstate = 20\n\t\t\t} else if \".\" == lookahead {\n\t\t\t\tpanic(\"unexpected character\" + lookahead)\n\t\t\t} else {\n\t\t\t\treturn NewToken(FLOAT, s)\n\t\t\t}\n\t\t}\n\n\t\tl.Next()\n\t\ts += lookahead\n\t}\n\n\tpanic(\"makeNumber failed\")\n}\n"
  },
  {
    "path": "lexer/lexer_test.go",
    "content": "package lexer\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"regexp\"\n\t\"strings\"\n\t\"testing\"\n)\n\nfunc TestLexer_MakeVarOrKeyword(t *testing.T) {\n\tl := NewLexer(bytes.NewBufferString(\"if abc\"), \"$\")\n\ttoken := l.MakeVarOrKeyword()\n\ttoken2 := NewLexer(bytes.NewBufferString(\"true abc\"), EndToken).MakeVarOrKeyword()\n\n\tassert.Equal(t, token.Typ, KEYWORD)\n\tassert.Equal(t, token.Value, \"if\")\n\tassert.Equal(t, token2.Typ, BOOLEAN)\n\tassert.Equal(t, token2.Value, \"true\")\n\n\tl.Next()\n\ttoken3 := l.MakeVarOrKeyword()\n\tassert.Equal(t, token3.Typ, VARIABLE)\n\tassert.Equal(t, token3.Value, \"abc\")\n}\n\nfunc TestLexer_MakeString(t *testing.T) {\n\ttoken := NewLexer(bytes.NewBufferString(`\"123\"`), \"$\").MakeString()\n\tassert.Equal(t, token.Typ, STRING)\n\tassert.Equal(t, token.Value, `\"123\"`)\n}\n\nfunc TestLexer_MakeOp(t *testing.T) {\n\ttests := []string{\n\t\t\"+ xxx\",\n\t\t\"++mmm\",\n\t\t\"/=g\",\n\t\t\"==1\",\n\t\t\"&=3434\",\n\t\t\"&8888\",\n\t\t\"||xxxx\",\n\t\t\"^=111\",\n\t\t\"%79\",\n\t}\n\n\tfor _, test := range tests {\n\t\ttoken := NewLexer(bytes.NewBufferString(test), \"$\").MakeOp()\n\t\tassert.Equal(t, token.Typ, OPERATOR)\n\t}\n}\nfunc TestLexer_MakeNumber(t *testing.T) {\n\ttests := []string{\n\t\t\"+0 aa\",\n\t\t\"-0 aa\",\n\t\t\".3000 aa\",\n\t\t\".55 ww\",\n\t\t\"778.99 aa\",\n\t\t\"355 kkk\",\n\t\t\"-888*234aa\",\n\t}\n\n\tfor _, test := range tests {\n\t\ttoken := NewLexer(bytes.NewBufferString(test), \"$\").MakeNumber()\n\n\t\tvalue := regexp.MustCompile(\"[* ]+\").Split(token.Value, 1)[0]\n\t\t//t.Log(value)\n\t\tassert.Equal(t, token.Value, value)\n\t\tif strings.Contains(token.Value, \".\") {\n\t\t\tassert.Equal(t, token.Typ, FLOAT)\n\t\t} else {\n\t\t\tassert.Equal(t, token.Typ, INTEGER)\n\t\t}\n\t}\n}\n\nfunc TestLexer_Analyse(t *testing.T) {\n\tsource := `(w+c)^100.12==+100-30eee`\n\tlexer := NewLexer(bytes.NewBufferString(source), EndToken)\n\ttokens := lexer.Analyse()\n\tassert.Equal(t, len(tokens), 12)\n\tassert.Equal(t, tokens[0].Value, \"(\")\n\tassert.Equal(t, tokens[1].Value, \"w\")\n\tassert.Equal(t, tokens[2].Value, \"+\")\n\tassert.Equal(t, tokens[3].Value, \"c\")\n\tassert.Equal(t, tokens[4].Value, \")\")\n\tassert.Equal(t, tokens[5].Value, \"^\")\n\tassert.Equal(t, tokens[6].Value, \"100.12\")\n\tassert.Equal(t, tokens[7].Value, \"==\")\n\tassert.Equal(t, tokens[8].Value, \"+100\")\n\tassert.Equal(t, tokens[9].Value, \"-\")\n\tassert.Equal(t, tokens[10].Value, \"30\")\n\tassert.Equal(t, tokens[11].Value, \"eee\")\n}\n\nfunc Test_Function(t *testing.T) {\n\tsource := `func foo(a,b){ \n\t\tprint(a+b) \n\t\t}\n\t\tfoo(-100.0,100)`\n\n\tlexer := NewLexer(bytes.NewBufferString(source), EndToken)\n\ttokens := lexer.Analyse()\n\n\tassertToken(t, tokens[0], \"func\", KEYWORD)\n\tassertToken(t, tokens[1], \"foo\", VARIABLE)\n\tassertToken(t, tokens[2], \"(\", BRACKET)\n\tassertToken(t, tokens[3], \"a\", VARIABLE)\n\tassertToken(t, tokens[4], \",\", OPERATOR)\n\tassertToken(t, tokens[5], \"b\", VARIABLE)\n\tassertToken(t, tokens[6], \")\", BRACKET)\n\tassertToken(t, tokens[7], \"{\", BRACKET)\n\tassertToken(t, tokens[8], \"print\", VARIABLE)\n\tassertToken(t, tokens[9], \"(\", BRACKET)\n\tassertToken(t, tokens[10], \"a\", VARIABLE)\n\tassertToken(t, tokens[11], \"+\", OPERATOR)\n\tassertToken(t, tokens[12], \"b\", VARIABLE)\n\tassertToken(t, tokens[13], \")\", BRACKET)\n\tassertToken(t, tokens[14], \"}\", BRACKET)\n\tassertToken(t, tokens[15], \"foo\", VARIABLE)\n\tassertToken(t, tokens[16], \"(\", BRACKET)\n\tassertToken(t, tokens[17], \"-100.0\", FLOAT)\n\tassertToken(t, tokens[18], \",\", OPERATOR)\n\tassertToken(t, tokens[19], \"100\", INTEGER)\n\tassertToken(t, tokens[20], \")\", BRACKET)\n}\n\nfunc TestDeleteComment(t *testing.T) {\n\tsource := `/*12324abdfda\n\t\t\t\t34fa9kfjl*/a=1\n\t\t`\n\tlexer := NewLexer(bytes.NewBufferString(source), EndToken)\n\ttokens := lexer.Analyse()\n\tassert.Equal(t, len(tokens), 3)\n}\n\nfunc assertToken(t *testing.T, token *Token, wantValue string, wantType TokenType) {\n\tassert.Equal(t, token.Typ, wantType)\n\tassert.Equal(t, token.Value, wantValue)\n}\n\nfunc TestFromFile(t *testing.T) {\n\ttokens := FromFile(\"./../tests/function.ts\")\n\tassert.Equal(t, len(tokens), 16)\n}\n"
  },
  {
    "path": "lexer/token.go",
    "content": "package lexer\n\nimport \"fmt\"\n\ntype TokenType int\n\nconst (\n\tKEYWORD  TokenType = 1\n\tVARIABLE TokenType = 2\n\tOPERATOR TokenType = 3\n\tBRACKET  TokenType = 4\n\tSTRING   TokenType = 5\n\tFLOAT    TokenType = 6\n\tBOOLEAN  TokenType = 7\n\tINTEGER  TokenType = 8\n)\n\nfunc (tt TokenType) String() string {\n\tswitch tt {\n\n\tcase KEYWORD:\n\t\treturn \"keyword\"\n\tcase VARIABLE:\n\t\treturn \"variable\"\n\tcase OPERATOR:\n\t\treturn \"operator\"\n\tcase BRACKET:\n\t\treturn \"bracket\"\n\tcase STRING:\n\t\treturn \"string`\"\n\tcase FLOAT:\n\t\treturn \"float\"\n\tcase BOOLEAN:\n\t\treturn \"boolean\"\n\tcase INTEGER:\n\t\treturn \"integer\"\n\t}\n\n\tpanic(\"unexpected token type\")\n}\n\ntype Token struct {\n\tTyp   TokenType\n\tValue string\n}\n\nfunc NewToken(t TokenType, v string) *Token {\n\treturn &Token{Typ: t, Value: v}\n}\n\nfunc (t *Token) IsVariable() bool {\n\treturn t.Typ == VARIABLE\n}\n\nfunc (t *Token) IsScalar() bool {\n\treturn t.Typ == FLOAT || t.Typ == BOOLEAN || t.Typ == INTEGER || t.Typ == STRING\n}\n\nfunc (t *Token) IsNumber() bool {\n\treturn t.Typ == INTEGER || t.Typ == FLOAT\n}\n\nfunc (t *Token) IsOperator() bool {\n\treturn t.Typ == OPERATOR\n}\n\nfunc (t *Token) String() string {\n\treturn fmt.Sprintf(\"type:%v,value:%s\", t.Typ, t.Value)\n}\n\nfunc (t *Token) IsValue() bool {\n\treturn t.IsVariable() || t.IsScalar()\n}\n\nfunc (t *Token) IsType() bool {\n\tswitch t.Value {\n\tcase \"bool\", \"int\", \"float\", \"void\", \"string\":\n\t\treturn true\n\t}\n\n\treturn false\n}\n"
  },
  {
    "path": "lexer/util/stream.go",
    "content": "package util\n\nimport (\n\t\"bufio\"\n\t\"container/list\"\n\t\"io\"\n)\n\ntype Stream struct {\n\tscanner    *bufio.Scanner\n\tqueueCache *list.List\n\tendToken   string\n\tisEnd      bool\n}\n\nfunc NewStream(r io.Reader, et string) *Stream {\n\ts := bufio.NewScanner(r)\n\ts.Split(bufio.ScanRunes)\n\treturn &Stream{scanner: s, queueCache: list.New(), endToken: et, isEnd: false}\n}\n\nfunc (s *Stream) Next() string {\n\tif s.queueCache.Len() != 0 {\n\t\te := s.queueCache.Front()\n\t\treturn s.queueCache.Remove(e).(string)\n\t}\n\n\tif s.scanner.Scan() {\n\t\treturn s.scanner.Text()\n\t}\n\n\ts.isEnd = true\n\n\treturn s.endToken\n}\n\nfunc (s *Stream) HasNext() bool {\n\tif s.queueCache.Len() != 0 {\n\t\treturn true\n\t}\n\n\tif s.scanner.Scan() {\n\t\ts.queueCache.PushBack(s.scanner.Text())\n\t\treturn true\n\t}\n\n\tif !s.isEnd {\n\t\treturn true\n\t}\n\n\treturn false\n}\n\nfunc (s *Stream) Peek() string {\n\tif s.queueCache.Len() != 0 {\n\t\treturn s.queueCache.Front().Value.(string)\n\t}\n\n\tif s.scanner.Scan() {\n\t\te := s.scanner.Text()\n\t\ts.queueCache.PushBack(e)\n\t\treturn e\n\t}\n\n\treturn s.endToken\n}\n\nfunc (s *Stream) PutBack(e string) {\n\ts.queueCache.PushFront(e)\n}\n"
  },
  {
    "path": "lexer/util/stream_test.go",
    "content": "package util\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n)\n\nfunc TestNewStream(t *testing.T) {\n\tstr := \"abcd\"\n\ts := NewStream(bytes.NewReader([]byte(str)), \"$\")\n\tassert.Equal(t, s.Next(), \"a\")\n\tassert.Equal(t, s.Next(), \"b\")\n\n\tassert.Equal(t, s.Peek(), \"c\")\n\tassert.Equal(t, s.Peek(), \"c\")\n\n\ts.PutBack(\"b\")\n\tassert.Equal(t, s.Peek(), \"b\")\n\tassert.Equal(t, s.Next(), \"b\")\n\tassert.Equal(t, s.Next(), \"c\")\n\n\tassert.Equal(t, s.HasNext(), true, \"hasnext failed\")\n\tassert.Equal(t, s.Next(), \"d\")\n\tassert.Equal(t, s.Next(), \"$\")\n\tassert.Equal(t, s.Next(), \"$\")\n\n\tassert.Equal(t, s.HasNext(), false, \"hasnext failed\")\n}\n"
  },
  {
    "path": "main.go",
    "content": "package main\n\nfunc main()  {\n\t\n}\n"
  },
  {
    "path": "parser/ast/ast.go",
    "content": "package ast\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n\t\"tinyscript/lexer\"\n)\n\ntype ASTNode interface {\n\t//get\n\tLexeme() *lexer.Token //ast节点对应的token是什么\n\tType() NodeType\n\tLabel() string //用字符串标识ast节点的含义，主要用于打印日志\n\tChildren() []ASTNode\n\tGetChild(uint) ASTNode\n\tParent() ASTNode\n\tPrint(indent int)\n\tTypeLexeme() *lexer.Token //标识变量的类型和函数的返回值类型；别的ast节点没有必要设置这个属性\n\tIsValueType() bool\n\tProp(string) interface{}\n\n\t//set\n\tAddChild(ASTNode)\n\tSetLexeme(*lexer.Token)\n\tSetTypeLexeme(*lexer.Token)\n\tSetType(NodeType)\n\tSetLabel(string)\n\tSetParent(ASTNode)\n\tSetProp(string, interface{})\n}\n\ntype node struct {\n\tparent     ASTNode\n\tchildren   []ASTNode\n\tlabel      string //备注（标签）\n\ttyp        NodeType\n\tlexeme     *lexer.Token           //词法单元\n\ttypeLexeme *lexer.Token           //func foo(int a); 这时typelexeme等于int型的token\n\tprop       map[string]interface{} //用于符号表，语法分析不会用到\n}\n\n//test\nvar _ ASTNode = &node{}\n\nfunc MakeNode() *node {\n\treturn &node{children: make([]ASTNode, 0), prop: make(map[string]interface{})}\n}\nfunc (n *node) Prop(key string) interface{} {\n\treturn n.prop[key]\n}\nfunc (n *node) SetProp(key string, value interface{}) {\n\tn.prop[key] = value\n}\nfunc (n *node) Lexeme() *lexer.Token {\n\treturn n.lexeme\n}\nfunc (n *node) TypeLexeme() *lexer.Token {\n\treturn n.typeLexeme\n}\nfunc (n *node) IsValueType() bool {\n\treturn n.typ == ASTNODE_TYPE_VARIABLE || n.typ == ASTNODE_TYPE_SCALAR\n}\nfunc (n *node) Type() NodeType {\n\treturn n.typ\n}\nfunc (n *node) Label() string {\n\treturn n.label\n}\nfunc (n *node) Children() []ASTNode {\n\treturn n.children\n}\nfunc (n *node) GetChild(index uint) ASTNode {\n\tif int(index) >= len(n.children) {\n\t\treturn nil\n\t}\n\treturn n.children[index]\n}\nfunc (n *node) Parent() ASTNode {\n\treturn n.parent\n}\nfunc (n *node) AddChild(node ASTNode) {\n\tnode.SetParent(n)\n\tn.children = append(n.children, node)\n}\nfunc (n *node) SetLexeme(lexeme *lexer.Token) {\n\tn.lexeme = lexeme\n}\nfunc (n *node) SetTypeLexeme(lexeme *lexer.Token) {\n\tn.typeLexeme = lexeme\n}\nfunc (n *node) SetType(t NodeType) {\n\tn.typ = t\n}\nfunc (n *node) SetLabel(str string) {\n\tn.label = str\n}\nfunc (n *node) SetParent(node ASTNode) {\n\tn.parent = node\n}\nfunc (n *node) Print(indent int) {\n\tfmt.Printf(\"%s%s\\n\", strings.Repeat(\"  \", indent*2), n.label)\n\tfor _, child := range n.children {\n\t\tchild.Print(indent + 2)\n\t}\n}\n"
  },
  {
    "path": "parser/ast/block.go",
    "content": "package ast\n\nvar DefaultBlock ASTNode = MakeBlock()\n\ntype Block struct {\n\t*Stmt\n}\n\nfunc MakeBlock() *Block {\n\tb := &Block{MakeStmt()}\n\tb.SetType(ASTNODE_TYPE_BLOCK)\n\tb.SetLabel(\"block\")\n\treturn b\n}\n\nfunc BlockParse(stream *PeekTokenStream) ASTNode {\n\tstream.NextMatch(\"{\")\n\tblock := MakeBlock()\n\tfor stmt := StmtParse(stream); nil != stmt; {\n\t\tblock.AddChild(stmt)\n\t\tstmt = StmtParse(stream)\n\t}\n\tstream.NextMatch(\"}\")\n\n\treturn block\n}\n"
  },
  {
    "path": "parser/ast/expr.go",
    "content": "package ast\n\nimport (\n\t\"tinyscript/lexer\"\n)\n\ntype Expr struct {\n\t*node\n}\n\nfunc MakeExpr() *Expr {\n\te := &Expr{MakeNode()}\n\treturn e\n}\n\nfunc NewExpr(typ NodeType, token *lexer.Token) *Expr {\n\texpr := MakeExpr()\n\texpr.SetType(typ)\n\texpr.SetLexeme(token)\n\texpr.SetLabel(token.Value)\n\treturn expr\n}\n\ntype ExprHOF func() ASTNode\n\n//left: E(k) -> E(k) op(k) E(k+1) | E(k+1)\n//right:\n//\t\tE(k) -> E(k+1) E_(k)\n//\t\tE_(k) -> op(k) E(k+1) E_(k) | ⍷\n// 最高优先级：\n// \t\tE(t) -> F E_(k) | U E_(k)\n//\t\tE_(t) -> op(t) E(t) E_(t) | ⍷\n\nfunc E(stream *PeekTokenStream, k int) ASTNode {\n\tif k < PriorityTable.Size()-1 {\n\t\treturn combine(\n\t\t\tstream,\n\t\t\tfunc() ASTNode {\n\t\t\t\treturn E(stream, k+1)\n\t\t\t},\n\t\t\tfunc() ASTNode {\n\t\t\t\treturn E_(stream, k)\n\t\t\t},\n\t\t)\n\t}\n\n\treturn race(\n\t\tstream,\n\t\tfunc() ASTNode {\n\t\t\treturn combine(\n\t\t\t\tstream,\n\t\t\t\tfunc() ASTNode {\n\t\t\t\t\treturn F(stream)\n\t\t\t\t},\n\t\t\t\tfunc() ASTNode {\n\t\t\t\t\treturn E_(stream, k)\n\t\t\t\t},\n\t\t\t)\n\t\t},\n\t\tfunc() ASTNode {\n\t\t\treturn combine(\n\t\t\t\tstream,\n\t\t\t\tfunc() ASTNode {\n\t\t\t\t\treturn U(stream)\n\t\t\t\t},\n\t\t\t\tfunc() ASTNode {\n\t\t\t\t\treturn E_(stream, k)\n\t\t\t\t},\n\t\t\t)\n\t\t},\n\t)\n}\n\nfunc U(stream *PeekTokenStream) ASTNode {\n\ttoken := stream.Peek()\n\tvalue := token.Value\n\tif value == \"(\" {\n\t\tstream.NextMatch(\"(\")\n\t\texpr := E(stream, 0)\n\t\tstream.NextMatch(\")\")\n\t\treturn expr\n\t} else if value == \"++\" || value == \"--\" || value == \"!\" {\n\t\tt := stream.Peek()\n\t\tstream.NextMatch(value)\n\t\tunaryExpr := NewExpr(ASTNODE_TYPE_UNARY_EXPR, t)\n\t\tunaryExpr.AddChild(E(stream, 0))\n\t\treturn unaryExpr\n\t}\n\n\treturn nil\n}\n\nfunc F(stream *PeekTokenStream) ASTNode {\n\tfactor := FactorParse(stream)\n\tif nil == factor {\n\t\treturn nil\n\t}\n\n\tif stream.HasNext() && stream.Peek().Value == \"(\" {\n\t\treturn CallExprParse(factor, stream)\n\t}\n\n\treturn factor\n}\n\nfunc E_(stream *PeekTokenStream, k int) ASTNode {\n\ttoken := stream.Peek()\n\tvalue := token.Value\n\tif PriorityTable.IsContain(k, value) {\n\t\texpr := NewExpr(ASTNODE_TYPE_BINARY_EXPR, stream.NextMatch(value))\n\t\texpr.AddChild(\n\t\t\tcombine(\n\t\t\t\tstream,\n\t\t\t\tfunc() ASTNode {\n\t\t\t\t\treturn E(stream, k+1)\n\t\t\t\t},\n\t\t\t\tfunc() ASTNode {\n\t\t\t\t\treturn E_(stream, k)\n\t\t\t\t},\n\t\t\t),\n\t\t)\n\t\treturn expr\n\t}\n\n\treturn nil\n}\n\nfunc race(stream *PeekTokenStream, af ExprHOF, bf ExprHOF) ASTNode {\n\tif !stream.HasNext() {\n\t\treturn nil\n\t}\n\n\ta := af()\n\tif nil != a {\n\t\treturn a\n\t}\n\n\treturn bf()\n}\n\nfunc combine(stream *PeekTokenStream, af ExprHOF, bf ExprHOF) ASTNode {\n\ta := af()\n\tif nil == a {\n\t\tif stream.HasNext() {\n\t\t\treturn bf()\n\t\t}\n\t\treturn nil\n\t}\n\n\tvar b ASTNode = nil\n\tif stream.HasNext() {\n\t\tb = bf()\n\t\tif nil == b {\n\t\t\treturn a\n\t\t}\n\t} else {\n\t\treturn a\n\t}\n\n\texpr := NewExpr(ASTNODE_TYPE_BINARY_EXPR, b.Lexeme())\n\texpr.AddChild(a)\n\texpr.AddChild(b.GetChild(0))\n\n\treturn expr\n}\n\nfunc ExprParse(stream *PeekTokenStream) ASTNode {\n\treturn E(stream, 0)\n}\n"
  },
  {
    "path": "parser/ast/expr_call.go",
    "content": "package ast\n\nvar _ ASTNode = &CallExpr{}\n\ntype CallExpr struct {\n\t*node\n}\n\nfunc MakeCallExpr() *CallExpr {\n\te := &CallExpr{MakeNode()}\n\te.SetType(ASTNODE_TYPE_CALL_EXPR)\n\te.SetLabel(\"call\")\n\treturn e\n}\n\nfunc CallExprParse(factor ASTNode, stream *PeekTokenStream) ASTNode {\n\texpr := MakeCallExpr()\n\texpr.AddChild(factor)\n\tstream.NextMatch(\"(\")\n\tfor p := ExprParse(stream); p != nil; p = ExprParse(stream) {\n\t\texpr.AddChild(p)\n\t\tif stream.Peek().Value != \")\" {\n\t\t\tstream.NextMatch(\",\")\n\t\t}\n\t}\n\n\tstream.NextMatch(\")\")\n\treturn expr\n}\n"
  },
  {
    "path": "parser/ast/factor.go",
    "content": "package ast\n\nimport (\n\t\"tinyscript/lexer\"\n)\n\nvar _ ASTNode = &Factor{}\n\ntype Factor struct {\n\t*node\n}\n\nfunc MakeFactor() *Factor {\n\treturn &Factor{MakeNode()}\n}\n\nfunc NewFactor(stream *PeekTokenStream) *Factor {\n\tfactor := &Factor{MakeNode()}\n\ttoken := stream.Next()\n\tfactor.SetLexeme(token)\n\tfactor.SetLabel(token.Value)\n\n\tif lexer.VARIABLE == token.Typ {\n\t\tfactor.SetType(ASTNODE_TYPE_VARIABLE)\n\t} else {\n\t\tfactor.SetType(ASTNODE_TYPE_SCALAR)\n\t}\n\n\treturn factor\n}\n\nfunc FactorParse(stream *PeekTokenStream) ASTNode {\n\ttoken := stream.Peek()\n\ttyp := token.Typ\n\tif lexer.VARIABLE == typ {\n\t\tstream.Next()\n\t\tv := MakeVariable()\n\t\tv.SetLabel(token.Value)\n\t\tv.SetLexeme(token)\n\t\treturn v\n\t} else if token.IsScalar() {\n\t\tstream.Next()\n\t\tscalar := MakeScalar()\n\t\tscalar.SetLabel(token.Value)\n\t\tscalar.SetLexeme(token)\n\t\treturn scalar\n\t}\n\treturn nil\n}\n"
  },
  {
    "path": "parser/ast/func_args.go",
    "content": "package ast\n\nvar _ ASTNode = &Factor{}\n\ntype FuncArgs struct {\n\t*node\n}\n\nfunc MakeFuncArgs() *FuncArgs {\n\ts := &FuncArgs{MakeNode()}\n\ts.SetLabel(\"args\")\n\treturn s\n}\n\nfunc FuncArgsParse(stream *PeekTokenStream) ASTNode {\n\targs := MakeFuncArgs()\n\tfor stream.Peek().IsType() {\n\t\ttyp := stream.Next()\n\t\tv := FactorParse(stream)\n\t\tv.SetTypeLexeme(typ) //为语义分析做准备，设置参数变量的类型\n\t\targs.AddChild(v)\n\t\tif stream.Peek().Value != \")\" {\n\t\t\tstream.NextMatch(\",\")\n\t\t}\n\t}\n\n\treturn args\n}\n"
  },
  {
    "path": "parser/ast/priority_table.go",
    "content": "package ast\n\nvar PriorityTable = NewPriorityTable()\n\ntype priorityTable struct {\n\ttable [][]string\n}\n\nfunc NewPriorityTable() *priorityTable {\n\treturn &priorityTable{[][]string{\n\t\t[]string{\"&\", \"|\", \"^\"},\n\t\t[]string{\"==\", \"!=\", \">\", \"<\", \">=\", \"<=\"},\n\t\t[]string{\"+\", \"-\"},\n\t\t[]string{\"*\", \"/\"},\n\t\t[]string{\"<<\", \">>\"},\n\t}}\n}\n\nfunc (pt *priorityTable) Size() int {\n\treturn len(pt.table)\n}\nfunc (pt *priorityTable) Get(level int) []string {\n\treturn pt.table[level]\n}\nfunc (pt *priorityTable) IsContain(level int, key string) bool {\n\tstrs := pt.Get(level)\n\tfor _, str := range strs {\n\t\tif str == key {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}\n"
  },
  {
    "path": "parser/ast/program.go",
    "content": "package ast\n\nvar _ ASTNode = &Block{}\n\ntype Program struct {\n\t*Block\n}\n\nfunc MakeProgram() *Program {\n\tb := &Program{MakeBlock()}\n\tb.SetLabel(\"program\")\n\treturn b\n}\n\nfunc ProgramParse(stream *PeekTokenStream) ASTNode {\n\tp := MakeProgram()\n\tfor stmt := StmtParse(stream); nil != stmt; {\n\t\tp.AddChild(stmt)\n\t\tstmt = StmtParse(stream)\n\t}\n\n\treturn p\n}\n"
  },
  {
    "path": "parser/ast/scalar.go",
    "content": "package ast\n\nvar _ ASTNode = &Factor{}\n\ntype Scalar struct {\n\t*Factor\n}\n\nfunc NewScalar(stream *PeekTokenStream) *Scalar {\n\treturn &Scalar{NewFactor(stream)}\n}\n\nfunc MakeScalar() *Scalar {\n\ts := &Scalar{MakeFactor()}\n\ts.SetType(ASTNODE_TYPE_SCALAR)\n\treturn s\n}\n"
  },
  {
    "path": "parser/ast/stmt.go",
    "content": "package ast\n\nvar DefaultStmt ASTNode = MakeStmt()\n\ntype Stmt struct {\n\t*node\n}\n\nfunc MakeStmt() *Stmt {\n\ts := &Stmt{MakeNode()}\n\treturn s\n}\n\nfunc StmtParse(stream *PeekTokenStream) ASTNode {\n\tif !stream.HasNext(){\n\t\treturn nil\n\t}\n\n\ttoken := stream.Next()\n\tlookahead := stream.Peek()\n\tstream.PutBack(1)\n\n\tif token.IsVariable() && lookahead != nil && lookahead.Value == \"=\" {\n\t\treturn AssignStmtParse(stream)\n\t} else if token.Value == \"var\" {\n\t\treturn DeclareStmtParse(stream)\n\t} else if token.Value == \"func\" {\n\t\treturn FuncDeclareStmtParse(stream)\n\t} else if token.Value == \"return\" {\n\t\treturn ReturnStmtParse(stream)\n\t} else if token.Value == \"if\" {\n\t\treturn IfStmtParse(stream)\n\t} else if token.Value == \"{\" {\n\t\treturn BlockParse(stream)\n\t} else {\n\t\treturn ExprParse(stream)\n\t}\n}\n"
  },
  {
    "path": "parser/ast/stmt_assign.go",
    "content": "package ast\n\nvar DefaultAssignStmt ASTNode = MakeAssignStmt()\n\ntype AssignStmt struct {\n\t*Stmt\n}\n\nfunc MakeAssignStmt() *AssignStmt {\n\tv := &AssignStmt{MakeStmt()}\n\tv.SetType(ASTNODE_TYPE_ASSIGN_STMT)\n\tv.SetLabel(\"assign_stmt\")\n\treturn v\n}\n\nfunc AssignStmtParse(stream *PeekTokenStream) ASTNode {\n\tstmt := MakeAssignStmt()\n\t//stmt.SetParent(parent)\n\ttkn := stream.Peek()\n\tfactor := FactorParse(stream)\n\tif nil == factor {\n\t\tpanic(\"syntax error:\" + tkn.String())\n\t}\n\tstmt.AddChild(factor)\n\tlexeme := stream.NextMatch(\"=\")\n\tstmt.SetLexeme(lexeme)\n\texpr := ExprParse(stream)\n\tstmt.AddChild(expr)\n\n\treturn stmt\n}\n"
  },
  {
    "path": "parser/ast/stmt_assign_test.go",
    "content": "package ast\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/lexer\"\n)\n\nfunc TestAssignStmtParse(t *testing.T) {\n\tsrc := \"i = 100*2\"\n\ttokens := lexer.NewLexer(bytes.NewBufferString(src), lexer.EndToken).Analyse()\n\tstream := NewPeekTokenStream(tokens)\n\tstmt := AssignStmtParse(stream)\n\tassert.Equal(t, ToPostfixExpr(stmt), \"i 100 2 * =\")\n}\n"
  },
  {
    "path": "parser/ast/stmt_declare.go",
    "content": "package ast\n\nvar DefaultDeclareStmt ASTNode = MakeDeclareStmt()\n\ntype DeclareStmt struct {\n\t*Stmt\n}\n\nfunc NewDeclareStmt() *DeclareStmt {\n\td := MakeDeclareStmt()\n\n\treturn d\n}\n\nfunc MakeDeclareStmt() *DeclareStmt {\n\tv := &DeclareStmt{MakeStmt()}\n\tv.SetType(ASTNODE_TYPE_DECLARE_STMT)\n\tv.SetLabel(\"declare_stmt\")\n\treturn v\n}\n\nfunc DeclareStmtParse(stream *PeekTokenStream) ASTNode {\n\tstmt := NewDeclareStmt()\n\tstream.NextMatch(\"var\")\n\ttkn := stream.Peek()\n\tfactor := FactorParse(stream)\n\tif nil == factor {\n\t\tpanic(\"syntax error:\" + tkn.String())\n\t}\n\tstmt.AddChild(factor)\n\tlexeme := stream.NextMatch(\"=\")\n\tstmt.SetLexeme(lexeme)\n\texpr := ExprParse(stream)\n\tstmt.AddChild(expr)\n\n\treturn stmt\n}\n"
  },
  {
    "path": "parser/ast/stmt_declare_test.go",
    "content": "package ast\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/lexer\"\n)\n\nfunc TestDeclareStmtParse(t *testing.T) {\n\tsrc := \"var i = 100*2\"\n\ttokens := lexer.NewLexer(bytes.NewBufferString(src), lexer.EndToken).Analyse()\n\tstream := NewPeekTokenStream(tokens)\n\tstmt := DeclareStmtParse(stream)\n\tassert.Equal(t, ToPostfixExpr(stmt), \"i 100 2 * =\")\n}\n"
  },
  {
    "path": "parser/ast/stmt_for.go",
    "content": "package ast\n\nvar _ ASTNode = MakeForStmt()\n\ntype ForStmt struct {\n\t*Stmt\n}\n\nfunc MakeForStmt() *ForStmt {\n\tv := &ForStmt{MakeStmt()}\n\tv.SetType(ASTNODE_TYPE_FOR_STMT)\n\tv.SetLabel(\"for\")\n\treturn v\n}\n"
  },
  {
    "path": "parser/ast/stmt_func_declare.go",
    "content": "package ast\n\nvar _ ASTNode = MakeFuncDeclareStmt()\n\ntype FuncDeclareStmt struct {\n\t*Stmt\n}\n\nfunc MakeFuncDeclareStmt() *FuncDeclareStmt {\n\tv := &FuncDeclareStmt{MakeStmt()}\n\tv.SetType(ASTNODE_TYPE_FUNCTION_DECLARE_STMT)\n\tv.SetLabel(\"func\")\n\treturn v\n}\n\nfunc FuncDeclareStmtParse(stream *PeekTokenStream) *FuncDeclareStmt {\n\tstream.NextMatch(\"func\")\n\t//func add() int {}\n\tfn := MakeFuncDeclareStmt()\n\tlexeme := stream.Peek()\n\tfnV := FactorParse(stream) //函数名ast节点\n\tfn.SetLexeme(lexeme)       //函数名token作为这个ast节点的lexeme\n\tfn.AddChild(fnV)\n\n\tstream.NextMatch(\"(\")\n\targs := FuncArgsParse(stream)\n\tstream.NextMatch(\")\")\n\tfn.AddChild(args)\n\n\tkeyword := stream.Next()\n\tif !keyword.IsType() {\n\t\tpanic(\"syntax error: unexpected \" + keyword.Value)\n\t}\n\n\tfnV.SetTypeLexeme(keyword) //函数名ast节点的类型，即：函数返回值类型，用token表示\n\tblock := BlockParse(stream)\n\tfn.AddChild(block)\n\n\treturn fn\n}\n\nfunc (f *FuncDeclareStmt) FuncVariable() ASTNode {\n\treturn f.GetChild(0)\n}\nfunc (f *FuncDeclareStmt) Args() ASTNode {\n\treturn f.GetChild(1)\n}\nfunc (f *FuncDeclareStmt) FuncType() string {\n\treturn f.FuncVariable().TypeLexeme().Value\n}\nfunc (f *FuncDeclareStmt) Block() ASTNode {\n\treturn f.GetChild(2)\n}\n"
  },
  {
    "path": "parser/ast/stmt_func_declare_test.go",
    "content": "package ast\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/lexer\"\n)\n\nfunc TestFuncDeclareStmtParse(t *testing.T) {\n\tstream := NewPeekTokenStream(lexer.FromFile(\"./../../tests/function.ts\"))\n\tstmt := StmtParse(stream).(*FuncDeclareStmt)\n\targs := stmt.Args()\n\tassert.Equal(t, args.GetChild(0).Lexeme().Value, \"a\")\n\tassert.Equal(t, args.GetChild(1).Lexeme().Value, \"b\")\n\n\ttyp := stmt.FuncType()\n\tassert.Equal(t, typ, \"int\")\n\n\tfuncVariable := stmt.FuncVariable()\n\tassert.Equal(t, funcVariable.Lexeme().Value, \"add\")\n\n\tblock := stmt.Block()\n\tassert.Equal(t, block.GetChild(0).Lexeme().Value, \"return\")\n}\n\nfunc TestFunctionRecursion(t *testing.T) {\n\tstream := NewPeekTokenStream(lexer.FromFile(\"./../../tests/recursion.ts\"))\n\tstmt := StmtParse(stream).(*FuncDeclareStmt)\n\tassert.Equal(t, ToBFSString(stmt, 4), \"func fact args block\")\n\tassert.Equal(t, ToBFSString(stmt.Args(), 2), \"args n\")\n\tassert.Equal(t, ToBFSString(stmt.Block(), 3), \"block if return\")\n}\n"
  },
  {
    "path": "parser/ast/stmt_if.go",
    "content": "package ast\n\nvar _ ASTNode = MakeIfStmt()\n\ntype IfStmt struct {\n\t*Stmt\n}\n\nfunc MakeIfStmt() *IfStmt {\n\tv := &IfStmt{MakeStmt()}\n\tv.SetType(ASTNODE_TYPE_IF_STMT)\n\tv.SetLabel(\"if\")\n\treturn v\n}\n\nfunc IfStmtParse(stream *PeekTokenStream) ASTNode {\n\treturn IfParse(stream)\n}\n\n//IfStmt -> If(Expr) Block Tail\nfunc IfParse(stream *PeekTokenStream) ASTNode {\n\tlexeme := stream.NextMatch(\"if\")\n\tstream.NextMatch(\"(\")\n\tifStmt := MakeIfStmt()\n\tifStmt.SetLexeme(lexeme)\n\n\te := ExprParse(stream)\n\tifStmt.AddChild(e)\n\tstream.NextMatch(\")\")\n\n\tblock := BlockParse(stream)\n\tifStmt.AddChild(block)\n\n\ttail := TailParse(stream)\n\tif tail != nil {\n\t\tifStmt.AddChild(tail)\n\t}\n\n\treturn ifStmt\n}\n\n//Tail -> else {Block} | else IfStmt | ⍷\nfunc TailParse(stream *PeekTokenStream) ASTNode {\n\tif !stream.HasNext() || stream.Peek().Value != \"else\" {\n\t\treturn nil\n\t}\n\tstream.NextMatch(\"else\")\n\tlookahead := stream.Peek()\n\n\tif lookahead.Value == \"{\" {\n\t\treturn BlockParse(stream)\n\t} else if lookahead.Value == \"if\" {\n\t\treturn IfParse(stream)\n\t}\n\n\treturn nil\n}\n\nfunc (i *IfStmt) GetExpr() ASTNode {\n\treturn i.GetChild(0)\n}\n\nfunc (i *IfStmt) GetBlock() ASTNode {\n\treturn i.GetChild(1)\n}\nfunc (i *IfStmt) GetElseBlock() ASTNode {\n\tblock := i.GetChild(2)\n\tif block != nil && block.Type() == ASTNODE_TYPE_BLOCK {\n\t\treturn block\n\t}\n\n\treturn nil\n}\nfunc (i *IfStmt) GetElseIfStmt() ASTNode {\n\tifStmt := i.GetChild(2)\n\tif ifStmt != nil && ifStmt.Type() == ASTNODE_TYPE_IF_STMT {\n\t\treturn ifStmt\n\t}\n\treturn nil\n}\n"
  },
  {
    "path": "parser/ast/stmt_if_test.go",
    "content": "package ast\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/lexer\"\n)\n\nfunc TestIfStmtParse(t *testing.T) {\n\tstream := createTokenStream(`if(a){\na = 1\n}`)\n\tstmt := IfStmtParse(stream)\n\te := stmt.GetChild(0)\n\tblock := stmt.GetChild(1)\n\tassignStmt := block.GetChild(0)\n\n\tassert.Equal(t, e.Lexeme().Value, \"a\")\n\tassert.Equal(t, assignStmt.Lexeme().Value, \"=\")\n}\n\nfunc createTokenStream(src string) *PeekTokenStream {\n\ttokens := lexer.NewLexer(bytes.NewBufferString(src), lexer.EndToken).Analyse()\n\tstream := NewPeekTokenStream(tokens)\n\treturn stream\n}\n\nfunc TestIfElseStmtParse(t *testing.T) {\n\tstream := createTokenStream(`if(a){\na = 1\n}else{\na = 2\na = a * 3\n}`)\n\n\tstmt := IfStmtParse(stream)\n\texpr := stmt.GetChild(0)\n\tblock := stmt.GetChild(1)\n\tassignStmt := block.GetChild(0)\n\telseBlock := stmt.GetChild(2)\n\tassignStmt2 := elseBlock.GetChild(0)\n\n\tassert.Equal(t, expr.Lexeme().Value, \"a\")\n\tassert.Equal(t, assignStmt.Lexeme().Value, \"=\")\n\tassert.Equal(t, assignStmt2.Lexeme().Value, \"=\")\n\tassert.Equal(t, len(elseBlock.Children()), 2)\n}\n"
  },
  {
    "path": "parser/ast/stmt_return.go",
    "content": "package ast\n\nvar _ ASTNode = &ReturnStmt{}\n\ntype ReturnStmt struct {\n\t*Stmt\n}\n\nfunc MakeReturnStmt() *ReturnStmt {\n\tv := &ReturnStmt{MakeStmt()}\n\tv.SetType(ASTNODE_TYPE_RETURN_STMT)\n\tv.SetLabel(\"return\")\n\treturn v\n}\n\nfunc ReturnStmtParse(stream *PeekTokenStream) ASTNode {\n\tvar lexeme = stream.NextMatch(\"return\")\n\tvar expr = ExprParse(stream)\n\n\tvar stmt = MakeReturnStmt()\n\tstmt.SetLexeme(lexeme)\n\tif expr != nil {\n\t\tstmt.AddChild(expr)\n\t}\n\n\treturn stmt\n}\n"
  },
  {
    "path": "parser/ast/stream.go",
    "content": "package ast\n\nimport (\n\t\"fmt\"\n\t\"tinyscript/lexer\"\n)\n\ntype PeekTokenStream struct {\n\ttokens  []*lexer.Token //TODO 保存lexer生成的tokens，更加好的方式不是全部存储，这样内存消耗会比较大\n\tcurrent int\n}\n\nfunc NewPeekTokenStream(tokens []*lexer.Token) *PeekTokenStream {\n\treturn &PeekTokenStream{tokens: tokens}\n}\n\nfunc (pt *PeekTokenStream) Next() *lexer.Token {\n\tif pt.current >= len(pt.tokens) {\n\t\treturn nil\n\t}\n\tt := pt.tokens[pt.current]\n\tpt.current++\n\treturn t\n}\n\nfunc (pt *PeekTokenStream) HasNext() bool {\n\tif pt.current >= len(pt.tokens) {\n\t\treturn false\n\t}\n\treturn true\n}\n\nfunc (pt *PeekTokenStream) Peek() *lexer.Token {\n\tt := pt.Next()\n\tif nil == t {\n\t\treturn nil\n\t}\n\n\tpt.current -= 1\n\treturn t\n}\n\n//参数：n 表示退回多少个token\nfunc (pt *PeekTokenStream) PutBack(n int) {\n\tif pt.current-n < 0 {\n\t\tpanic(\"putback parameter is invalid\")\n\t}\n\tpt.current -= n //必须+1，因为初始化时current就指向第一个元素\n}\n\n//下一个token的value匹配实参字符的话，返回这个token，否则panic\nfunc (pt *PeekTokenStream) NextMatch(value string) *lexer.Token {\n\ttoken := pt.Next()\n\tif token.Value != value {\n\t\tpanic(fmt.Sprintf(\"syntax err: want value:%s,got %s\", value, token.Value))\n\t}\n\treturn token\n}\n\n//下一个token匹配实参类型的话，返回下一个token\nfunc (pt *PeekTokenStream) NextMatchType(typ lexer.TokenType) *lexer.Token {\n\ttoken := pt.Next()\n\tif token.Typ != typ {\n\t\tpanic(fmt.Sprintf(\"syntax err: want type: %s,got %s\", typ, token.Value))\n\t}\n\treturn token\n}\n"
  },
  {
    "path": "parser/ast/stream_test.go",
    "content": "package ast\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\tlexer2 \"tinyscript/lexer\"\n)\n\nfunc TestNewPeekTokenStream(t *testing.T) {\n\ttokens := lexer2.NewLexer(bytes.NewBufferString(\"a+b*c\"), lexer2.EndToken).Analyse()\n\tpeekts := NewPeekTokenStream(tokens)\n\tassert.Equal(t, peekts.HasNext(), true)\n\tassertToken(t, peekts.Next(), \"a\", lexer2.VARIABLE)\n\tassertToken(t, peekts.Next(), \"+\", lexer2.OPERATOR)\n\tassertToken(t, peekts.Peek(), \"b\", lexer2.VARIABLE)\n\tassertToken(t, peekts.Next(), \"b\", lexer2.VARIABLE)\n\tpeekts.PutBack(3)\n\tassertToken(t, peekts.Peek(), \"a\", lexer2.VARIABLE)\n\tassertToken(t, peekts.Next(), \"a\", lexer2.VARIABLE)\n}\n\nfunc assertToken(t *testing.T, token *lexer2.Token, wantValue string, wantType lexer2.TokenType) {\n\tassert.Equal(t, token.Typ, wantType, \"err detail:\"+token.String())\n\tassert.Equal(t, token.Value, wantValue, \"err detail:\"+token.String())\n}\n"
  },
  {
    "path": "parser/ast/type.go",
    "content": "package ast\n\ntype NodeType int\n\nconst (\n\tASTNODE_TYPE_BLOCK NodeType = iota\n\n\tASTNODE_TYPE_BINARY_EXPR // 1+1\n\tASTNODE_TYPE_UNARY_EXPR  //++1\n\tASTNODE_TYPE_CALL_EXPR\n\n\tASTNODE_TYPE_VARIABLE\n\tASTNODE_TYPE_SCALAR // 1.0 true\n\n\tASTNODE_TYPE_IF_STMT\n\tASTNODE_TYPE_WHILE_STMT\n\tASTNODE_TYPE_FOR_STMT\n\tASTNODE_TYPE_RETURN_STMT\n\tASTNODE_TYPE_ASSIGN_STMT\n\tASTNODE_TYPE_FUNCTION_DECLARE_STMT\n\tASTNODE_TYPE_DECLARE_STMT\n)\n\nvar NodeTypeStringMap = map[NodeType]string{\n\tASTNODE_TYPE_BLOCK:                 \"block\",\n\tASTNODE_TYPE_ASSIGN_STMT:           \"assign_stmt\",\n\tASTNODE_TYPE_BINARY_EXPR:           \"binary_expr\",\n\tASTNODE_TYPE_UNARY_EXPR:            \"unary_expr\",\n\tASTNODE_TYPE_CALL_EXPR:             \"call_expr\",\n\tASTNODE_TYPE_DECLARE_STMT:          \"declare_stmt\",\n\tASTNODE_TYPE_FOR_STMT:              \"for_stmt\",\n\tASTNODE_TYPE_FUNCTION_DECLARE_STMT: \"function_declare_stmt\",\n\tASTNODE_TYPE_IF_STMT:               \"if_stmt\",\n\tASTNODE_TYPE_RETURN_STMT:           \"return_stmt\",\n\tASTNODE_TYPE_SCALAR:                \"scalar\",\n\tASTNODE_TYPE_VARIABLE:              \"variable\",\n\tASTNODE_TYPE_WHILE_STMT:            \"while_stmt\",\n}\n\nfunc (nt NodeType) String() string {\n\treturn NodeTypeStringMap[nt]\n}\n"
  },
  {
    "path": "parser/ast/util.go",
    "content": "package ast\n\nimport (\n\t\"container/list\"\n\t\"strings\"\n)\n\nfunc ToPostfixExpr(node ASTNode) string {\n\tif node.Type() == ASTNODE_TYPE_SCALAR || node.Type() == ASTNODE_TYPE_VARIABLE {\n\t\treturn node.Lexeme().Value\n\t}\n\n\tarr := []string{}\n\tfor _, child := range node.Children() {\n\t\tarr = append(arr, ToPostfixExpr(child))\n\t}\n\tstr := \"\"\n\tif nil != node.Lexeme() {\n\t\tstr = node.Lexeme().Value\n\t}\n\n\tif len(str) > 0 {\n\t\treturn strings.Join(arr, \" \") + \" \" + str\n\t}\n\treturn strings.Join(arr, \" \")\n\n\t//left := \"\"\n\t//right := \"\"\n\t//\n\t//switch node.Type() {\n\t//case ASTNODE_TYPE_BINARY_EXPR:\n\t//\tleft = ToPostfixExpr(node.GetChild(0))\n\t//\tright = ToPostfixExpr(node.GetChild(1))\n\t//\treturn left + \" \" + right + \" \" + node.Lexeme().Value\n\t//case ASTNODE_TYPE_SCALAR, ASTNODE_TYPE_VARIABLE:\n\t//\treturn node.Lexeme().Value\n\t//}\n\t//\n\t//panic(\"ToPostfixExpr failed\")\n}\n\nfunc ToBFSString(node ASTNode, max int) string {\n\tl := list.New()\n\tl.PushBack(node)\n\tstrs := []string{}\n\tfor e, i := l.Front(), 0; nil != e && i < max; e = l.Front() {\n\t\ti += 1\n\t\tparent := l.Remove(e).(ASTNode)\n\t\tstrs = append(strs, parent.Label())\n\n\t\tfor _, child := range parent.Children() {\n\t\t\tl.PushBack(child)\n\t\t}\n\t}\n\n\treturn strings.Join(strs, \" \")\n}\n"
  },
  {
    "path": "parser/ast/variable.go",
    "content": "package ast\n\nvar _ ASTNode = &Variable{}\n\ntype Variable struct {\n\t*Factor\n}\n\nfunc NewVariable(stream *PeekTokenStream) *Variable {\n\treturn &Variable{NewFactor(stream)}\n}\n\nfunc MakeVariable() *Variable {\n\tv := &Variable{MakeFactor()}\n\tv.SetType(ASTNODE_TYPE_VARIABLE)\n\treturn v\n}\n"
  },
  {
    "path": "parser/parser.go",
    "content": "package parser\n\nimport (\n\t\"tinyscript/lexer\"\n\t\"tinyscript/parser/ast\"\n)\n\ntype Parser struct {\n\tstream *ast.PeekTokenStream\n}\n\nfunc Parse(source string) ast.ASTNode {\n\treturn NewParser(lexer.Analyse(source)).parse()\n}\n\nfunc ParseFromFile(file string) ast.ASTNode {\n\ttokens := lexer.FromFile(file)\n\treturn NewParser(tokens).parse()\n}\n\nfunc NewParser(tokens []*lexer.Token) *Parser {\n\treturn &Parser{stream: ast.NewPeekTokenStream(tokens)}\n}\n\nfunc (p *Parser) parse() ast.ASTNode {\n\treturn ast.ProgramParse(p.stream)\n}\n\n//Expr -> digit + Expr | d|igit\n//digit -> 0|1|2|....|9\nfunc (p *Parser) SimpleParse() ast.ASTNode {\n\texpr := ast.MakeExpr()\n\tscalar := ast.NewScalar(p.stream)\n\n\tif !p.stream.HasNext() {\n\t\treturn scalar\n\t}\n\n\texpr.SetLexeme(p.stream.Peek())\n\tp.stream.NextMatch(\"+\")\n\texpr.SetLabel(\"+\")\n\texpr.SetType(ast.ASTNODE_TYPE_BINARY_EXPR)\n\texpr.AddChild(scalar)\n\trightNode := p.SimpleParse()\n\texpr.AddChild(rightNode)\n\n\treturn expr\n}\n"
  },
  {
    "path": "parser/parser_test.go",
    "content": "package parser\n\nimport (\n\t\"bytes\"\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\tlexer \"tinyscript/lexer\"\n\t\"tinyscript/parser/ast\"\n)\n\nfunc TestParser_Parse(t *testing.T) {\n\tsource := \"1+2+3+4\"\n\tparser := NewParser(lexer.NewLexer(bytes.NewBufferString(source), lexer.EndToken).Analyse())\n\texpr := parser.SimpleParse()\n\n\tassert.Equal(t, len(expr.Children()), 2)\n\n\tv1 := expr.GetChild(0)\n\tassert.Equal(t, v1.Lexeme().Value, \"1\")\n\tassert.Equal(t, expr.Lexeme().Value, \"+\")\n\n\te2 := expr.GetChild(1)\n\tv2 := e2.GetChild(0)\n\tassert.Equal(t, v2.Lexeme().Value, \"2\")\n\tassert.Equal(t, e2.Lexeme().Value, \"+\")\n\n\te3 := e2.GetChild(1)\n\tv3 := e3.GetChild(0)\n\tassert.Equal(t, v3.Lexeme().Value, \"3\")\n\tassert.Equal(t, e3.Lexeme().Value, \"+\")\n\n\tv4 := e3.GetChild(1)\n\tassert.Equal(t, v4.Lexeme().Value, \"4\")\n}\n\nfunc createExpr(src string) ast.ASTNode {\n\ttokens := lexer.NewLexer(bytes.NewBufferString(src), lexer.EndToken).Analyse()\n\tstream := ast.NewPeekTokenStream(tokens)\n\treturn ast.ExprParse(stream)\n}\n\nfunc TestSimple(t *testing.T) {\n\texpr := createExpr(\"1+1+1\")\n\tassert.Equal(t, ast.ToPostfixExpr(expr), \"1 1 1 + +\")\n}\n\nfunc TestSimple1(t *testing.T) {\n\texpr := createExpr(`\"123\" == \"\"`)\n\tassert.Equal(t, ast.ToPostfixExpr(expr), `\"123\" \"\" ==`)\n}\n\nfunc TestComplex(t *testing.T) {\n\texpr1 := createExpr(\"1+2*3\")\n\texpr2 := createExpr(\"1*2+3\")\n\te3 := createExpr(\"10 * (7+4)\")\n\te4 := createExpr(\"(1*2!=7)==3!=4*5+6\")\n\n\tassert.Equal(t, ast.ToPostfixExpr(expr1), \"1 2 3 * +\")\n\tassert.Equal(t, ast.ToPostfixExpr(expr2), \"1 2 * 3 +\")\n\tassert.Equal(t, ast.ToPostfixExpr(e3), \"10 7 4 + *\")\n\tassert.Equal(t, ast.ToPostfixExpr(e4), \"1 2 * 7 != 3 4 5 * 6 + != ==\")\n}\n"
  },
  {
    "path": "tests/add.ts",
    "content": "func add(int a, int b) int {\n    return a + b\n}\n\nfunc main()  void {\n    add(10, 20)\nreturn\n}\n"
  },
  {
    "path": "tests/complex-if.ts",
    "content": "if(a == 1) {\n    b = 100\n} else if(a == 2) {\n    b = 500\n} else if(a == 3) {\n    b = a * 1000\n} else {\n    b = -1\n}"
  },
  {
    "path": "tests/fact2.ts",
    "content": "func fact(int n)  int {\n    if(n == 0) {\n        return 1\n    }\n    return fact(n-1) * n\n}\nfunc main() void {\n    return fact(2)\n}"
  },
  {
    "path": "tests/fact5.ts",
    "content": "func fact(int n)  int {\n    if(n == 0) {\n        return 1\n    }\n    return fact(n-1) * n\n}\nfunc main() void {\n    return fact(5)\n}"
  },
  {
    "path": "tests/function.ts",
    "content": "func add(int a,int b)int{\n    return a + b\n}"
  },
  {
    "path": "tests/recursion.ts",
    "content": "func fact(int n)int{\n    if (n ==0){\n        return 1\n    }\n\n    return fact(n-1)*n\n}"
  },
  {
    "path": "translator/static_table_test.go",
    "content": "package translator\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/parser\"\n)\n\nfunc TestStaticTable(t *testing.T) {\n\tsource := `if(a){a=1}else{b=a+1*5}`\n\tnode := parser.Parse(source)\n\tprogram := NewTranslator().Translate(node)\n\tassert.Equal(t, program.StaticTable.Size(), 2)\n}\n"
  },
  {
    "path": "translator/symbol/static_table.go",
    "content": "package symbol\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n)\n\ntype StaticSymbolTable struct {\n\tOffsetMap     map[string]*Symbol\n\tOffsetCounter int\n\tSymbols       []*Symbol\n}\n\nfunc NewStaticSymbolTable() *StaticSymbolTable {\n\treturn &StaticSymbolTable{OffsetCounter: 0, OffsetMap: make(map[string]*Symbol), Symbols: make([]*Symbol, 0)}\n}\n\nfunc (s *StaticSymbolTable) Add(symbol *Symbol) {\n\tlexval := symbol.Lexeme.Value\n\tif _, ok := s.OffsetMap[lexval]; !ok {\n\t\ts.OffsetMap[lexval] = symbol\n\t\tsymbol.Offset = s.OffsetCounter\n\t\ts.OffsetCounter += 1\n\t\ts.Symbols = append(s.Symbols, symbol)\n\t} else {\n\t\tsameSymbol := s.OffsetMap[lexval]\n\t\tsymbol.Offset = sameSymbol.Offset\n\t}\n}\n\nfunc (s *StaticSymbolTable) Size() int {\n\treturn len(s.Symbols)\n}\n\nfunc (s *StaticSymbolTable) String() string {\n\tvar list []string\n\tfor i, v := range s.Symbols {\n\t\tlist = append(list, fmt.Sprintf(\"%d:%s\", i, v))\n\t}\n\n\treturn strings.Join(list, \"\\n\")\n}\n"
  },
  {
    "path": "translator/symbol/symbol.go",
    "content": "package symbol\n\nimport \"tinyscript/lexer\"\n\ntype Symbol struct {\n\tParent      *Table\n\tLexeme      *lexer.Token\n\tLabel       string\n\tOffset      int\n\tLayerOffset int\n\tTyp         SymbolType\n}\n\nfunc NewSymbol(typ SymbolType) *Symbol {\n\treturn &Symbol{Typ: typ}\n}\n\nfunc (s *Symbol) String() string {\n\tif SYMBOL_LABEL == s.Typ {\n\t\treturn s.Label\n\t}\n\n\treturn s.Lexeme.Value\n}\n\nfunc MakeAddressSymbol(lexeme *lexer.Token, offset int) *Symbol {\n\tsyb := NewSymbol(SYMBOL_ADDRESS)\n\tsyb.Lexeme = lexeme\n\tsyb.Offset = offset\n\n\treturn syb\n}\n\nfunc MakeImmediateSymbol(lexeme *lexer.Token) *Symbol {\n\tsyb := NewSymbol(SYMBOL_IMMEDIATE)\n\tsyb.Lexeme = lexeme\n\n\treturn syb\n}\n\nfunc MakeLabelSymbol(label string, lexeme *lexer.Token) *Symbol {\n\tsyb := NewSymbol(SYMBOL_LABEL)\n\tsyb.Lexeme = lexeme\n\tsyb.Label = label\n\n\treturn syb\n}\n"
  },
  {
    "path": "translator/symbol/table.go",
    "content": "package symbol\n\nimport (\n\t\"fmt\"\n\t\"tinyscript/lexer\"\n)\n/*\n一个符号表在运行时就是活动记录，一个符号表可以对应多个活动记录（递归），符号表这个时候就是一个模板\n */\ntype Table struct {\n\tParent      *Table\n\tChildren    []*Table\n\tSymbols     []*Symbol\n\tTempIndex   int\n\tOffsetIndex int\n\tLevel       int\n}\n\nfunc NewTable() *Table {\n\treturn &Table{\n\t\tSymbols:  make([]*Symbol, 0),\n\t\tChildren: make([]*Table, 0),\n\t}\n}\n\nfunc (t *Table) AddSymbol(symbol *Symbol) {\n\tt.Symbols = append(t.Symbols, symbol)\n\tsymbol.Parent = t\n}\n\nfunc (t *Table) symbolByLexeme(lexeme *lexer.Token) *Symbol {\n\tfor _, v := range t.Symbols {\n\t\tif lexeme.Value == v.Lexeme.Value {\n\t\t\treturn v\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc (t *Table) Exists(lexeme *lexer.Token) bool {\n\tsymbol := t.symbolByLexeme(lexeme)\n\tif nil != symbol {\n\t\treturn true\n\t}\n\n\tif t.Parent != nil {\n\t\treturn t.Parent.Exists(lexeme)\n\t}\n\n\treturn false\n}\n\nfunc (t *Table) CloneFromSymbolTree(lexeme *lexer.Token, layoutOffset int) *Symbol {\n\tsymbl := t.symbolByLexeme(lexeme)\n\tif nil != symbl {\n\t\tsymbol := *symbl\n\t\tsymbol.LayerOffset = layoutOffset\n\t\treturn &symbol\n\t}\n\tif nil != t.Parent {\n\t\treturn t.Parent.CloneFromSymbolTree(lexeme, layoutOffset+1)\n\t}\n\n\treturn nil\n}\n\nfunc (t *Table) CreateSymbolByLexeme(lexeme *lexer.Token) *Symbol {\n\tvar symbol *Symbol = nil\n\tif lexeme.IsScalar() {\n\t\tsymbol = MakeImmediateSymbol(lexeme)\n\t\tt.AddSymbol(symbol)\n\t} else {\n\t\tsymbol2 := t.symbolByLexeme(lexeme)\n\t\tif nil == symbol2 {\n\t\t\tsymbol = t.CloneFromSymbolTree(lexeme, 0)\n\t\t\tif symbol == nil {\n\t\t\t\tsymbol = MakeAddressSymbol(lexeme, t.OffsetIndex)\n\t\t\t\tt.OffsetIndex += 1\n\t\t\t}\n\t\t\tt.AddSymbol(symbol)\n\t\t} else {\n\t\t\tsymbol = symbol2\n\t\t}\n\t}\n\n\treturn symbol\n}\n\nfunc (t *Table) CreateVariable() *Symbol {\n\tlexeme := lexer.NewToken(lexer.VARIABLE, \"p\"+fmt.Sprintf(\"%d\", t.TempIndex))\n\tt.TempIndex += 1\n\tsymbol := MakeAddressSymbol(lexeme, t.OffsetIndex)\n\tt.OffsetIndex += 1\n\tt.AddSymbol(symbol)\n\treturn symbol\n}\n\nfunc (t *Table) AddChild(child *Table) {\n\tchild.Parent = t\n\tchild.Level = t.Level + 1\n\tt.Children = append(t.Children, child)\n}\n\nfunc (t *Table) LocalSize() int {\n\treturn t.OffsetIndex\n}\n\nfunc (t *Table) CreateLabel(label string, lexeme *lexer.Token) {\n\tlabelSymbol := MakeLabelSymbol(label, lexeme)\n\tt.AddSymbol(labelSymbol)\n}\n"
  },
  {
    "path": "translator/symbol/table_test.go",
    "content": "package symbol\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/lexer\"\n)\n\nfunc TestSymbolTable(t *testing.T) {\n\ttable := NewTable()\n\ttable.CreateLabel(\"L0\", lexer.NewToken(lexer.VARIABLE, \"foo\"))\n\ttable.CreateVariable()\n\ttable.CreateSymbolByLexeme(lexer.NewToken(lexer.VARIABLE, \"foo\"))\n\tassert.Equal(t, table.LocalSize(), 1)\n}\n\nfunc TestTableChain(t *testing.T) {\n\ttable := NewTable()\n\ttable.CreateSymbolByLexeme(lexer.NewToken(lexer.VARIABLE, \"a\"))\n\n\tchildTable := NewTable()\n\ttable.AddChild(childTable)\n\n\tchildChildTable := NewTable()\n\tchildTable.AddChild(childChildTable)\n\n\tassert.Equal(t, childChildTable.Exists(lexer.NewToken(lexer.VARIABLE, \"a\")), true)\n\tassert.Equal(t, childTable.Exists(lexer.NewToken(lexer.VARIABLE, \"a\")), true)\n}\n\nfunc TestOffset(t *testing.T) {\n\ttable := NewTable()\n\n\ttable.CreateSymbolByLexeme(lexer.NewToken(lexer.INTEGER, \"100\"))\n\tsymbola := table.CreateSymbolByLexeme(lexer.NewToken(lexer.VARIABLE, \"a\"))\n\tsymbolb := table.CreateSymbolByLexeme(lexer.NewToken(lexer.VARIABLE, \"b\"))\n\n\tchildTable := NewTable()\n\ttable.AddChild(childTable)\n\tanotherSymbolB := childTable.CreateSymbolByLexeme(lexer.NewToken(lexer.VARIABLE, \"b\"))\n\tsymbolC := childTable.CreateSymbolByLexeme(lexer.NewToken(lexer.VARIABLE, \"c\"))\n\n\tassert.Equal(t, symbola.Offset, 0)\n\tassert.Equal(t, symbolb.Offset, 1)\n\tassert.Equal(t, anotherSymbolB.Offset, 1)\n\tassert.Equal(t, anotherSymbolB.LayerOffset, 1)\n\tassert.Equal(t, symbolC.Offset, 0)\n\tassert.Equal(t, symbolC.LayerOffset, 0)\n}\n"
  },
  {
    "path": "translator/symbol/types.go",
    "content": "package symbol\n\ntype SymbolType int\n\nconst (\n\tSYMBOL_ADDRESS SymbolType = iota\n\tSYMBOL_IMMEDIATE\n\tSYMBOL_LABEL\n)\n\nfunc (s SymbolType) String() string {\n\tswitch s {\n\tcase SYMBOL_ADDRESS:\n\t\treturn \"symbol_address\"\n\tcase SYMBOL_IMMEDIATE:\n\t\treturn \"symbol_immediate\"\n\tcase SYMBOL_LABEL:\n\t\treturn \"symbol_label\"\n\t}\n\n\tpanic(\"unknown symbol type\")\n}\n"
  },
  {
    "path": "translator/symbol/util.go",
    "content": "package symbol\n\n"
  },
  {
    "path": "translator/tainstruction.go",
    "content": "package translator\n\nimport (\n\t\"fmt\"\n\t\"tinyscript/translator/symbol\"\n)\n\ntype TAInstruction struct {\n\tArg1   interface{}\n\tArg2   interface{}\n\tOp     string\n\tResult *symbol.Symbol\n\tTyp    TAInstructionType\n\tLabel  string\n}\n\nfunc NewTAInstruction(typ TAInstructionType, result *symbol.Symbol, op string, arg1 interface{}, arg2 interface{}) *TAInstruction {\n\treturn &TAInstruction{Arg1: arg1, Arg2: arg2, Op: op, Result: result, Typ: typ}\n}\n\nfunc (t TAInstruction) String() string {\n\tswitch t.Typ {\n\tcase TAINSTR_TYPE_ASSIGN:\n\t\tif nil != t.Arg2 {\n\t\t\treturn fmt.Sprintf(\"%v = %v %v %v\", t.Result, t.Arg1, t.Op, t.Arg2)\n\t\t} else {\n\t\t\treturn fmt.Sprintf(\"%v = %v\", t.Result, t.Arg1)\n\t\t}\n\tcase TAINSTR_TYPE_IF:\n\t\treturn fmt.Sprintf(\"IF %v ELSE %v\", t.Arg1, t.Arg2)\n\tcase TAINSTR_TYPE_GOTO:\n\t\treturn fmt.Sprintf(\"GOTO %v\", t.Arg1)\n\tcase TAINSTR_TYPE_LABEL:\n\t\treturn fmt.Sprintf(\"%v:\", t.Arg1)\n\tcase TAINSTR_TYPE_FUNC_BEGIN:\n\t\treturn \"FUNC_BEGIN\"\n\tcase TAINSTR_TYPE_RETURN:\n\t\tif !IsNil(t.Arg1) {\n\t\t\treturn fmt.Sprintf(\"RETURN %v\", t.Arg1)\n\t\t}\n\t\treturn fmt.Sprintf(\"RETURN\")\n\tcase TAINSTR_TYPE_PARAM:\n\t\treturn fmt.Sprintf(\"PARAM %v %v\", t.Arg1, t.Arg2)\n\tcase TAINSTR_TYPE_SP:\n\t\treturn fmt.Sprintf(\"SP %v\", t.Arg1)\n\tcase TAINSTR_TYPE_CALL:\n\t\treturn fmt.Sprintf(\"CALL %v\", t.Arg1)\n\t}\n\n\tpanic(\"unknown opcode type\")\n}\n"
  },
  {
    "path": "translator/tainstruction_type.go",
    "content": "package translator\n\ntype TAInstructionType int\n\nconst (\n\tTAINSTR_TYPE_ASSIGN TAInstructionType = iota\n\tTAINSTR_TYPE_GOTO\n\tTAINSTR_TYPE_IF\n\tTAINSTR_TYPE_LABEL\n\tTAINSTR_TYPE_CALL\n\tTAINSTR_TYPE_RETURN\n\tTAINSTR_TYPE_SP\n\tTAINSTR_TYPE_PARAM\n\tTAINSTR_TYPE_FUNC_BEGIN\n)\n"
  },
  {
    "path": "translator/taprogram.go",
    "content": "package translator\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n\t\"tinyscript/translator/symbol\"\n)\n\ntype TAProgram struct {\n\tInstructions []*TAInstruction\n\tLabelCounter int\n\tStaticTable  *symbol.StaticSymbolTable\n}\n\nfunc NewTAProgram() *TAProgram {\n\treturn &TAProgram{Instructions: make([]*TAInstruction, 0), StaticTable: symbol.NewStaticSymbolTable()}\n}\n\nfunc (t *TAProgram) Add(instr *TAInstruction) {\n\tt.Instructions = append(t.Instructions, instr)\n}\n\nfunc (t *TAProgram) AddLabel() *TAInstruction {\n\tlabel := fmt.Sprintf(\"L%d\", t.LabelCounter)\n\tt.LabelCounter += 1\n\ttaCode := NewTAInstruction(TAINSTR_TYPE_LABEL, nil, \"\", nil, nil)\n\ttaCode.Arg1 = label\n\tt.Instructions = append(t.Instructions, taCode)\n\treturn taCode\n}\n\nfunc (t *TAProgram) String() string {\n\tvar lines []string\n\tfor _, v := range t.Instructions {\n\t\tlines = append(lines, v.String())\n\t}\n\n\treturn strings.Join(lines, \"\\n\")\n}\n\n//根据符号表的内容，判断符号类型，如果是SYMBOL_IMMEDIATE，则加入静态符号表，以此来设置静态符号表的信息\nfunc (t *TAProgram) SetStaticSymbols(table *symbol.Table) {\n\tfor _, v := range table.Symbols {\n\t\tif symbol.SYMBOL_IMMEDIATE == v.Typ {\n\t\t\tt.StaticTable.Add(v)\n\t\t}\n\t}\n\n\tfor _, child := range table.Children {\n\t\tt.SetStaticSymbols(child)\n\t}\n}\n"
  },
  {
    "path": "translator/translator.go",
    "content": "package translator\n\nimport (\n\t\"fmt\"\n\t\"tinyscript/lexer\"\n\t\"tinyscript/parser/ast\"\n\t\"tinyscript/translator/symbol\"\n)\n\ntype Translator struct {\n}\n\nfunc NewTranslator() *Translator {\n\treturn &Translator{}\n}\n\n/*\n符号表是辅助工具。对ast遍历的同时产生符号表，根据符号表的内容产生TAProgram。\n*/\nfunc (t *Translator) Translate(node ast.ASTNode) *TAProgram {\n\tprogram := NewTAProgram()\n\n\ttable := symbol.NewTable()\n\tfor _, child := range node.Children() {\n\t\tt.TranslateStmt(program, child, table)\n\t}\n\tprogram.SetStaticSymbols(table)\n\n\tmainFn := lexer.NewToken(lexer.VARIABLE, \"main\")\n\tif table.Exists(mainFn) {\n\t\ttable.CreateVariable() //返回值\n\t\tprogram.Add(NewTAInstruction(TAINSTR_TYPE_SP, nil, \"\", -table.LocalSize(), nil))\n\t\tprogram.Add(NewTAInstruction(TAINSTR_TYPE_CALL, nil, \"\", table.CloneFromSymbolTree(mainFn, 0), nil))\n\t\tprogram.Add(NewTAInstruction(TAINSTR_TYPE_SP, nil, \"\", table.LocalSize(), nil))\n\t}\n\n\treturn program\n}\n\nfunc (t *Translator) TranslateStmt(program *TAProgram, node ast.ASTNode, table *symbol.Table) {\n\tswitch node.Type() {\n\tcase ast.ASTNODE_TYPE_BLOCK:\n\t\tt.TranslateBlock(program, node, table)\n\t\treturn\n\tcase ast.ASTNODE_TYPE_IF_STMT:\n\t\tt.TranslateIfStmt(program, node.(*ast.IfStmt), table)\n\t\treturn\n\tcase ast.ASTNODE_TYPE_ASSIGN_STMT:\n\t\tt.TranslateAssignStmt(program, node, table)\n\t\treturn\n\tcase ast.ASTNODE_TYPE_DECLARE_STMT:\n\t\tt.TranslateDeclareStmt(program, node, table)\n\t\treturn\n\tcase ast.ASTNODE_TYPE_FUNCTION_DECLARE_STMT:\n\t\tt.TranslateFunctionDeclareStmt(program, node, table)\n\t\treturn\n\tcase ast.ASTNODE_TYPE_RETURN_STMT:\n\t\tt.TranslateReturnStmt(program, node, table)\n\t\treturn\n\tcase ast.ASTNODE_TYPE_CALL_EXPR:\n\t\tt.TranslateCallExpr(program, node, table)\n\t\treturn\n\t}\n\n\tpanic(\"unknown node type\" + node.Type().String())\n}\n\nfunc (t *Translator) TranslateDeclareStmt(program *TAProgram, node ast.ASTNode, table *symbol.Table) {\n\tlexeme := node.GetChild(0).Lexeme()\n\tif table.Exists(lexeme) {\n\t\tpanic(\"Syntax Error, Identifier \" + lexeme.Value + \" is already defined\")\n\t}\n\tassigned := table.CreateSymbolByLexeme(lexeme)\n\texpr := node.GetChild(1)\n\taddr := t.TranslateExpr(program, expr, table)\n\tprogram.Add(NewTAInstruction(TAINSTR_TYPE_ASSIGN, assigned, \"=\", addr, nil))\n}\n\nfunc (t *Translator) TranslateAssignStmt(program *TAProgram, node ast.ASTNode, table *symbol.Table) {\n\tassigned := table.CreateSymbolByLexeme(node.GetChild(0).Lexeme())\n\texpr := node.GetChild(1)\n\taddr := t.TranslateExpr(program, expr, table)\n\tprogram.Add(NewTAInstruction(TAINSTR_TYPE_ASSIGN, assigned, \"=\", addr, nil))\n}\n\n/*\nSDD:\n\tE -> E1 op E2\n\tE -> F\n*/\nfunc (t *Translator) TranslateExpr(program *TAProgram, node ast.ASTNode, table *symbol.Table) *symbol.Symbol {\n\tif node.IsValueType() {\n\t\taddr := table.CreateSymbolByLexeme(node.Lexeme())\n\t\tnode.SetProp(\"addr\", addr)\n\t\treturn addr\n\t} else if node.Type() == ast.ASTNODE_TYPE_CALL_EXPR {\n\t\taddr := t.TranslateCallExpr(program, node, table)\n\t\tnode.SetProp(\"addr\", addr)\n\t\treturn addr\n\t} else if IsInstanceOfExpr(node) {\n\t\tfor _, child := range node.Children() {\n\t\t\tt.TranslateExpr(program, child, table)\n\t\t}\n\n\t\tif node.Prop(\"addr\") == nil {\n\t\t\tnode.SetProp(\"addr\", table.CreateVariable())\n\t\t}\n\n\t\tinstr := NewTAInstruction(\n\t\t\tTAINSTR_TYPE_ASSIGN,\n\t\t\tnode.Prop(\"addr\").(*symbol.Symbol),\n\t\t\tnode.Lexeme().Value,\n\t\t\tnode.GetChild(0).Prop(\"addr\").(*symbol.Symbol),\n\t\t\tnode.GetChild(1).Prop(\"addr\").(*symbol.Symbol),\n\t\t)\n\n\t\tprogram.Add(instr)\n\t\treturn instr.Result\n\t}\n\n\tpanic(\"unexpected node type :\" + node.Type().String())\n}\n\nfunc (t *Translator) TranslateBlock(program *TAProgram, node ast.ASTNode, parent *symbol.Table) {\n\ttable := symbol.NewTable()\n\tparent.AddChild(table)\n\tparentOffset := table.CreateVariable()\n\tparentOffset.Lexeme = lexer.NewToken(lexer.INTEGER, fmt.Sprintf(\"%d\", parent.LocalSize()))\n\n\t//pushRecord := NewTAInstruction(TAINSTR_TYPE_SP, nil, \"\", nil, nil)\n\t//program.Add(pushRecord)\n\tfor _, stmt := range node.Children() {\n\t\tt.TranslateStmt(program, stmt, table)\n\t}\n\n\t//popRecord := NewTAInstruction(TAINSTR_TYPE_SP, nil, \"\", nil, nil)\n\t//program.Add(popRecord)\n\t//\n\t//pushRecord.Arg1 = -parent.LocalSize()\n\t//popRecord.Arg1 = parent.LocalSize()\n}\n\nfunc (t *Translator) TranslateIfStmt(program *TAProgram, node *ast.IfStmt, table *symbol.Table) {\n\texpr := node.GetExpr()\n\texprAddr := t.TranslateExpr(program, expr, table)\n\tifOpCode := NewTAInstruction(TAINSTR_TYPE_IF, nil, \"\", exprAddr, nil)\n\tprogram.Add(ifOpCode)\n\n\tt.TranslateBlock(program, node.GetBlock(), table)\n\n\tvar gotoInstr *TAInstruction = nil\n\tif node.GetChild(2) != nil {\n\t\tgotoInstr = NewTAInstruction(TAINSTR_TYPE_GOTO, nil, \"\", nil, nil)\n\t\tprogram.Add(gotoInstr)\n\t\tlabelEndIf := program.AddLabel()\n\t\tifOpCode.Arg2 = labelEndIf.Arg1\n\t}\n\n\tif node.GetElseBlock() != nil {\n\t\tt.TranslateBlock(program, node.GetElseBlock(), table)\n\t} else if node.GetElseIfStmt() != nil {\n\t\tt.TranslateIfStmt(program, node.GetElseIfStmt().(*ast.IfStmt), table)\n\t}\n\n\tlabelEnd := program.AddLabel()\n\tif node.GetChild(2) == nil {\n\t\tifOpCode.Arg2 = labelEnd.Arg1\n\t} else {\n\t\tgotoInstr.Arg1 = labelEnd.Arg1\n\t}\n}\n\nfunc (t *Translator) TranslateFunctionDeclareStmt(program *TAProgram, node ast.ASTNode, parent *symbol.Table) {\n\tlabel := program.AddLabel()\n\n\ttable := symbol.NewTable()\n\n\tprogram.Add(NewTAInstruction(TAINSTR_TYPE_FUNC_BEGIN, nil, \"\", nil, nil))\n\ttable.CreateVariable() //返回地址\n\n\tlabel.Arg2 = node.Lexeme().Value\n\n\tfn := node.(*ast.FuncDeclareStmt)\n\targs := fn.Args()\n\tparent.AddChild(table)\n\tparent.CreateLabel(label.Arg1.(string), node.Lexeme())\n\tfor _, arg := range args.Children() {\n\t\ttable.CreateSymbolByLexeme(arg.Lexeme())\n\t}\n\n\tfor _, child := range fn.Block().Children() {\n\t\tt.TranslateStmt(program, child, table)\n\t}\n}\n\nfunc (t *Translator) TranslateCallExpr(program *TAProgram, node ast.ASTNode, table *symbol.Table) *symbol.Symbol {\n\t//foo()\n\tfactor := node.GetChild(0)\n\n\t//foo -> symbol(foo) L0\n\t//table.CreateVariable()                //返回地址\n\n\tvar l = make([]*TAInstruction, 0)\n\tfor i := 1; i < len(node.Children()); i++ {\n\t\texpr := node.GetChild(uint(i))\n\t\taddr := t.TranslateExpr(program, expr, table)\n\t\tl = append(l, NewTAInstruction(TAINSTR_TYPE_PARAM, nil, \"\", addr, i-1))\n\t}\n\n\tfor _, instr := range l {\n\t\tinstr.Arg2 = table.LocalSize() + instr.Arg2.(int) + 2\n\t\tprogram.Add(instr)\n\t}\n\n\treturnValue := table.CreateVariable() //返回值\n\tfuncAddr := table.CloneFromSymbolTree(factor.Lexeme(), 0)\n\tif nil == funcAddr {\n\t\tpanic(\"function \" + factor.Lexeme().Value + \" not found\")\n\t}\n\tprogram.Add(NewTAInstruction(TAINSTR_TYPE_SP, nil, \"\", -table.LocalSize(), nil))\n\tprogram.Add(NewTAInstruction(TAINSTR_TYPE_CALL, nil, \"\", funcAddr, nil))\n\tprogram.Add(NewTAInstruction(TAINSTR_TYPE_SP, nil, \"\", table.LocalSize(), nil))\n\n\treturn returnValue\n}\n\nfunc (t *Translator) TranslateReturnStmt(program *TAProgram, node ast.ASTNode, table *symbol.Table) {\n\tvar resultValue *symbol.Symbol = nil\n\tif node.GetChild(0) != nil {\n\t\tresultValue = t.TranslateExpr(program, node.GetChild(0), table)\n\t}\n\tprogram.Add(NewTAInstruction(TAINSTR_TYPE_RETURN, nil, \"\", resultValue, nil))\n}\n"
  },
  {
    "path": "translator/translator_test.go",
    "content": "package translator\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/parser\"\n\t\"tinyscript/translator/symbol\"\n)\n\nfunc TestExprTranslator(t *testing.T) {\n\tsource := `a+(b-c)+d*(b-c)*2`\n\tp := parser.Parse(source)\n\texprNode := p.GetChild(0)\n\ttranslator := NewTranslator()\n\ttable := symbol.NewTable()\n\tprogram := NewTAProgram()\n\ttranslator.TranslateExpr(program, exprNode, table)\n\texpected := `p0 = b - c\np1 = b - c\np2 = p1 * 2\np3 = d * p2\np4 = p0 + p3\np5 = a + p4`\n\n\tassert.Equal(t, program.String(), expected)\n}\n\nfunc TestAssignStmt(t *testing.T) {\n\tsource := \"a=1.0*2.0*3.0\"\n\tnode := parser.Parse(source)\n\ttranslator := NewTranslator()\n\tprogram := translator.Translate(node)\n\n\texpected := `p0 = 2.0 * 3.0\np1 = 1.0 * p0\na = p1`\n\tassert.Equal(t, program.String(), expected)\n}\n\nfunc TestTranslator_TranslateDeclareStmt(t *testing.T) {\n\tsource := \"var a=1.0*2.0*3.0\"\n\tnode := parser.Parse(source)\n\ttranslator := NewTranslator()\n\tprogram := translator.Translate(node)\n\n\texpected := `p0 = 2.0 * 3.0\np1 = 1.0 * p0\na = p1`\n\tassert.Equal(t, program.String(), expected)\n}\n\nfunc TestAssignStmt2(t *testing.T) {\n\tsource := \"a=1\"\n\tnode := parser.Parse(source)\n\ttranslator := NewTranslator()\n\tprogram := translator.Translate(node)\n\n\tassert.Equal(t, program.String(), \"a = 1\")\n}\n\nfunc TestBlock(t *testing.T) {\n\tsourc := `var a = 1\n{\nvar b = 1 * 100\n}\n{\nvar b = a * 100\n}`\n\n\tast := parser.Parse(sourc)\n\ttranslator := NewTranslator()\n\tprogram := translator.Translate(ast)\n\texpected := `a = 1\np1 = 1 * 100\nb = p1\np1 = a * 100\nb = p1`\n\tassert.Equal(t, program.String(), expected)\n}\n\nfunc TestTranslator_TranslateIfStmt(t *testing.T) {\n\tsource := `if(a){\nb=1\n}`\n\n\tastNode := parser.Parse(source)\n\ttranslator := NewTranslator()\n\tprogram := translator.Translate(astNode)\n\texpected := `IF a ELSE L0\nb = 1\nL0:`\n\tassert.Equal(t, program.String(), expected)\n}\n\nfunc TestTranslator_TranslateIfElseStmt(t *testing.T) {\n\tsource := `if(a){\nb=1\n}else{\nb=2\n}`\n\n\tastNode := parser.Parse(source)\n\ttranslator := NewTranslator()\n\tprogram := translator.Translate(astNode)\n\texpected := `IF a ELSE L0\nb = 1\nGOTO L1\nL0:\nb = 2\nL1:`\n\tassert.Equal(t, program.String(), expected)\n}\n\nfunc TestTranslator_TranslateIfElseIfStmt(t *testing.T) {\n\tastNode := parser.ParseFromFile(\"../tests/complex-if.ts\")\n\ttranslator := NewTranslator()\n\tprogram := translator.Translate(astNode)\n\texpected := `p0 = a == 1\nIF p0 ELSE L0\nb = 100\nGOTO L5\nL0:\np1 = a == 2\nIF p1 ELSE L1\nb = 500\nGOTO L4\nL1:\np2 = a == 3\nIF p2 ELSE L2\np1 = a * 1000\nb = p1\nGOTO L3\nL2:\nb = -1\nL3:\nL4:\nL5:`\n\tassert.Equal(t, program.String(), expected)\n}\n\nfunc TestSimpleFunction(t *testing.T) {\n\tnode := parser.ParseFromFile(\"../tests/function.ts\")\n\ttranslator := NewTranslator()\n\tprogram := translator.Translate(node)\n\texpected := `L0:\nFUNC_BEGIN\np1 = a + b\nRETURN p1`\n\tassert.Equal(t, program.String(), expected)\n}\n\nfunc TestRecursionFunc(t *testing.T) {\n\tnode := parser.ParseFromFile(\"../tests/recursion.ts\")\n\ttranslator := NewTranslator()\n\tprogram := translator.Translate(node)\n\texpected := `L0:\nFUNC_BEGIN\np1 = n == 0\nIF p1 ELSE L1\nRETURN 1\nL1:\np2 = n - 1\nPARAM p2 6\nSP -5\nCALL L0\nSP 5\np4 = p3 * n\nRETURN p4`\n\tassert.Equal(t, program.String(), expected)\n}\n"
  },
  {
    "path": "translator/util.go",
    "content": "package translator\n\nimport (\n\t\"reflect\"\n\t\"strings\"\n)\n\nfunc IsInstanceOfExpr(instance interface{}) bool {\n\tival := reflect.ValueOf(instance)\n\treturn strings.LastIndex(ival.Type().String(), \"ast.Expr\") != -1\n}\n\nfunc IsNil(i interface{}) bool {\n\tvi := reflect.ValueOf(i)\n\t//if vi.Kind() == reflect.Ptr {\n\treturn vi.IsNil()\n\t//}\n\t//return false\n}\n"
  },
  {
    "path": "translator/util_test.go",
    "content": "package translator\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/parser/ast\"\n)\n\nfunc TestIsInstanceOf(t *testing.T) {\n\tvar i interface{}\n\ti = &ast.Expr{}\n\tassert.Equal(t, IsInstanceOfExpr(i), true)\n}\n"
  },
  {
    "path": "vm/vm.go",
    "content": "package vm\n\nimport (\n\t\"log\"\n\t\"tinyscript/gen\"\n\t\"tinyscript/gen/operand\"\n)\n\ntype VM struct {\n\tRegisters         [31]int\n\tMemory            [4096]int\n\tEndProgramSection int\n\tStartProgram      int\n}\n\nfunc NewVM(staticArea []int, opcodes []int, entry *int) *VM {\n\tvm := &VM{}\n\n\ti := 0\n\tfor ; i < len(staticArea); i++ {\n\t\tvm.Memory[i] = staticArea[i]\n\t}\n\n\tj := i\n\tvm.StartProgram = i\n\t//mainStart := *entry + i\n\tfor ; i < len(opcodes)+j; i++ {\n\t\tvm.Memory[i] = opcodes[i-j]\n\t}\n\n\tvm.Registers[operand.PC.Addr] = i - 3\n\tvm.EndProgramSection = i\n\n\tvm.Registers[operand.SP.Addr] = 4095\n\treturn vm\n}\nfunc (vm *VM) Fetch() int {\n\tpc := vm.Registers[operand.PC.Addr]\n\treturn vm.Memory[pc]\n}\n\nfunc (vm *VM) Decode(code int) *gen.Instruction {\n\treturn gen.FromByCode(code)\n}\n\nfunc (vm *VM) Exec(instr *gen.Instruction) {\n\tcode := instr.Code.Value\n\tlog.Println(\"exec:\", instr)\n\n\tswitch code {\n\tcase 0x01: //ADD\n\t\tr0 := instr.GetOperand(0).(*operand.Register)\n\t\tr1 := instr.GetOperand(1).(*operand.Register)\n\t\tr2 := instr.GetOperand(2).(*operand.Register)\n\t\tvm.Registers[r0.Addr] = vm.Registers[r1.Addr] + vm.Registers[r2.Addr]\n\t//case 0x09: //\n\tcase 0x09, 0x02: //SUB\n\t\tr0 := instr.GetOperand(0).(*operand.Register)\n\t\tr1 := instr.GetOperand(1).(*operand.Register)\n\t\tr2 := instr.GetOperand(2).(*operand.Register)\n\t\tvm.Registers[r0.Addr] = vm.Registers[r1.Addr] - vm.Registers[r2.Addr]\n\tcase 0x03: //MULT\n\t\tr0 := instr.GetOperand(0).(*operand.Register)\n\t\tr1 := instr.GetOperand(1).(*operand.Register)\n\t\tvm.Registers[operand.L0.Addr] = vm.Registers[r0.Addr] * vm.Registers[r1.Addr]\n\tcase 0x05: //ADDI\n\t\tr0 := instr.GetOperand(0).(*operand.Register)\n\t\tr1 := instr.GetOperand(1).(*operand.ImmediateNumber)\n\t\tvm.Registers[r0.Addr] += r1.Value\n\tcase 0x06: //SUBI\n\t\tr0 := instr.GetOperand(0).(*operand.Register)\n\t\tr1 := instr.GetOperand(1).(*operand.ImmediateNumber)\n\t\tvm.Registers[r0.Addr] -= r1.Value\n\t//case 0x07: //MULI\n\tcase 0x08: //MFLO\n\t\tr0 := instr.GetOperand(0).(*operand.Register)\n\t\tvm.Registers[r0.Addr] = vm.Registers[operand.L0.Addr]\n\tcase 0x10: //SW\n\t\tr0 := instr.GetOperand(0).(*operand.Register)\n\t\tr1 := instr.GetOperand(1).(*operand.Register)\n\t\toffset := instr.GetOperand(2).(*operand.Offset)\n\t\tR1VAL := vm.Registers[r1.Addr]\n\t\tvm.Memory[R1VAL+offset.Offset] = vm.Registers[r0.Addr]\n\tcase 0x11: //LW\n\t\tr0 := instr.GetOperand(0).(*operand.Register)\n\t\tr1 := instr.GetOperand(1).(*operand.Register)\n\t\toffset := instr.GetOperand(2).(*operand.Offset)\n\t\tR1VAL := vm.Registers[r1.Addr]\n\t\tvm.Registers[r0.Addr] = vm.Memory[R1VAL+offset.Offset]\n\tcase 0x15: //BNE\n\t\tr0 := instr.GetOperand(0).(*operand.Register)\n\t\tr1 := instr.GetOperand(1).(*operand.Register)\n\t\toffset := instr.GetOperand(2).(*operand.Offset)\n\t\tif vm.Registers[r0.Addr] != vm.Registers[r1.Addr] {\n\t\t\tvm.Registers[operand.PC.Addr] = offset.Offset + vm.StartProgram - 1\n\t\t}\n\tcase 0x20: //JUMP\n\t\tr0 := instr.GetOperand(0).(*operand.Offset)\n\t\tvm.Registers[operand.PC.Addr] = r0.Offset + vm.StartProgram - 1\n\tcase 0x21: //JR\n\t\tr0 := instr.GetOperand(0).(*operand.Offset)\n\t\tvm.Registers[operand.RA.Addr] = vm.Registers[operand.PC.Addr]\n\t\tvm.Registers[operand.PC.Addr] = r0.Offset + vm.StartProgram - 1\n\tcase 0x22: //RETRUN\n\t\tif instr.GetOperand(0) != nil {\n\t\t\t//match 返回值\n\t\t}\n\n\t\tspVal := vm.Registers[operand.SP.Addr]\n\t\tvm.Registers[operand.PC.Addr] = vm.Memory[spVal]\n\t}\n}\n\nfunc (vm *VM) run() {\n\t//模拟CPU循环\n\t// fetch\n\t// decode\n\t// exec\n\t// pc++\n\tfor ; vm.runOneStep(); {\n\t}\n}\n\nfunc (vm *VM) GetSpMemory(offset int) int {\n\tsp := vm.Registers[operand.SP.Addr]\n\treturn vm.Memory[sp+offset]\n}\n\nfunc (vm *VM) runOneStep() bool {\n\tcode := vm.Fetch()\n\tinstr := vm.Decode(code)\n\tvm.Exec(instr)\n\tvm.Registers[operand.PC.Addr] += 1\n\tlog.Println(vm.Registers[operand.PC.Addr], \"|\", vm.EndProgramSection)\n\treturn vm.Registers[operand.PC.Addr] < vm.EndProgramSection\n}\n"
  },
  {
    "path": "vm/vm_test.go",
    "content": "package vm\n\nimport (\n\t\"github.com/magiconair/properties/assert\"\n\t\"testing\"\n\t\"tinyscript/gen\"\n\t\"tinyscript/gen/operand\"\n\t\"tinyscript/parser\"\n\t\"tinyscript/translator\"\n)\n\nfunc TestCalcExpr(t *testing.T) {\n\tsource := `func main()int{var a = 2 * 3 + 4 \nreturn\n}`\n\ttaProg := translator.NewTranslator().Translate(parser.Parse(source))\n\tprog := gen.NewOpCodeGen().Gen(taProg)\n\tstaticTable := prog.GetStaticArea(taProg)\n\topcodes := prog.ToByteCode()\n\tvm := NewVM(staticTable, opcodes, prog.Entry)\n\n\t// CALL main\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\n\tt.Log(\"RA:\", vm.Registers[operand.RA.Addr])\n\tassert.Equal(t, vm.GetSpMemory(0), 18)\n\n\t// p0 = 2 * 3\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tassert.Equal(t, vm.Registers[operand.S0.Addr], 2)\n\tassert.Equal(t, vm.Registers[operand.S1.Addr], 3)\n\tassert.Equal(t, vm.Registers[operand.L0.Addr], 6)\n\tassert.Equal(t, vm.Registers[operand.S2.Addr], 6)\n\tassert.Equal(t, vm.GetSpMemory(-2), 6)\n\n\t// p1 = p0 + 4\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tassert.Equal(t, vm.Registers[operand.S0.Addr], 6)\n\tassert.Equal(t, vm.Registers[operand.S1.Addr], 4)\n\tassert.Equal(t, vm.Registers[operand.S2.Addr], 10)\n\tassert.Equal(t, vm.GetSpMemory(-3), 10)\n\n\t// a = p1\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tassert.Equal(t, vm.GetSpMemory(-1), 10)\n\tassert.Equal(t, vm.Registers[operand.S0.Addr], 10)\n\n\t// RETURN null\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\n\tt.Log(\"SP:\", vm.Registers[operand.SP.Addr])\n}\n\nfunc TestRecursiveFunction(t *testing.T) {\n\ttaProg := translator.NewTranslator().Translate(parser.ParseFromFile(\"../tests/fact2.ts\"))\n\tt.Log(taProg)\n\tprog := gen.NewOpCodeGen().Gen(taProg)\n\tstaticTable := prog.GetStaticArea(taProg)\n\topcodes := prog.ToByteCode()\n\tt.Log(prog)\n\tt.Log(taProg.StaticTable)\n\tvm := NewVM(staticTable, opcodes, prog.Entry)\n\t// CALL main\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tt.Log(\"RA:\", vm.Registers[operand.RA.Addr])\n\tassert.Equal(t, vm.GetSpMemory(0), 39)\n\n\t// PARAM 10 0\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tassert.Equal(t, vm.GetSpMemory(-3), 2)\n\n\t// SP -2\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tt.Log(\"RA:\", vm.Registers[operand.RA.Addr])\n\n\t// #FUNC_BEGIN\n\tvm.runOneStep()\n\tassert.Equal(t, vm.GetSpMemory(0), 33)\n\n\t// #p1 = n == 0\n\tassert.Equal(t, vm.GetSpMemory(-1), 2)\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tassert.Equal(t, vm.GetSpMemory(-2) == 0, false)\n\n\t// #IF p1 ELSE L1\n\tvm.runOneStep()\n\tvm.runOneStep()\n\n\t// #p3 = n - 1\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tassert.Equal(t, 1, vm.GetSpMemory(-3))\n\n\t// #PARAM p3 0\n\t// #SP-5\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tassert.Equal(t, 1, vm.GetSpMemory(-1))\n\n\tvm.runOneStep()\n\tvm.runOneStep()\n\n\t// #p1 = n == 0\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tassert.Equal(t, false, vm.GetSpMemory(-2) == 0)\n\n\t// #IF p1 ELSE L1\n\tvm.runOneStep()\n\n\t// #p3 = n - 1\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\n\t// #PARAM p3 0\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\n\t// CALL\n\tvm.runOneStep()\n\tvm.runOneStep()\n\n\t// #p1 = n == 0\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tassert.Equal(t, true, vm.GetSpMemory(-2) == 0)\n\n\t// #IF p1 ELSE L1\n\tvm.runOneStep()\n\n\t// RETURN 1\n\tvm.runOneStep()\n\tvm.runOneStep()\n\n\tvm.runOneStep()\n\tvm.runOneStep()\n\n\t// #p4 = p2 * n 计算递归值\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\t// #RETURN p4\n\tvm.runOneStep()\n\tvm.runOneStep()\n\t//RETURN\n\tvm.runOneStep()\n\tvm.runOneStep()\n\n\t//#p4 = p2 * n\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\tvm.runOneStep()\n\n\tassert.Equal(t, 2, vm.GetSpMemory(-5))\n\n\tvm.runOneStep()\n\tvm.runOneStep()\n\t// RETURN MAIN\n\tvm.runOneStep()\n\n\t// SP 2\n\tvm.runOneStep()\n\n\t// #RETURN p1 : from main\n\tvm.runOneStep()\n\tassert.Equal(t, 2, vm.GetSpMemory(-1))\n\n\tfor ; vm.runOneStep(); {\n\t}\n\tassert.Equal(t, 2, vm.GetSpMemory(0))\n}\n\nfunc TestRecursivefunction1(t *testing.T) {\n\ttaProg := translator.NewTranslator().Translate(parser.ParseFromFile(\"../tests/fact5.ts\"))\n\tprog := gen.NewOpCodeGen().Gen(taProg)\n\tstaticTable := prog.GetStaticArea(taProg)\n\topcodes := prog.ToByteCode()\n\t//t.Log(prog)\n\t//t.Log(taProg.StaticTable)\n\tvm := NewVM(staticTable, opcodes, prog.Entry)\n\tvm.run()\n\tassert.Equal(t, 120, vm.GetSpMemory(0))\n}\n"
  }
]