diff --git a/.gitignore b/.gitignore index 08533bdc9..e460b6412 100644 --- a/.gitignore +++ b/.gitignore @@ -101,3 +101,7 @@ fabric.properties out gen *.rdb +*.db +cmd/cxbenchmark/bin/cx +*.out +*.test diff --git a/Makefile b/Makefile index e6e6df3bc..e5ea574b5 100644 --- a/Makefile +++ b/Makefile @@ -143,3 +143,13 @@ dep: ## Update go vendor help: @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + +benchmark: #Benchmark +ifndef CXVERSION + @echo "$(CXEXE) not found in $(PWD)/bin, please run make install first" +else + mkdir -p $(PWD)/cmd/cxbenchmark/bin/ + rm -f $(PWD)/cmd/cxbenchmark/bin/$(CXEXE) + cp $(PWD)/bin/$(CXEXE) $(PWD)/cmd/cxbenchmark/bin/ + go test $(PWD)/cmd/cxbenchmark -run BenchmarkCX -tags ptr32 -count=10 -bench=. -benchmem +endif \ No newline at end of file diff --git a/cmd/cx/helpers.go b/cmd/cx/helpers.go index 4fffda660..2f3e70ea7 100644 --- a/cmd/cx/helpers.go +++ b/cmd/cx/helpers.go @@ -40,7 +40,7 @@ func parseProgram(options cxCmdFlags, fileNames []string, sourceCode []*os.File) } // Parsing all the source code files sent as CLI arguments to CX. - cxparsing.ParseSourceCode(sourceCode, fileNames) + cxparsing.ParseSourceCode(sourceCode) // Checking if a main package exists. If not, create and add it to `AST`. if _, err := actions.AST.GetFunction(constants.MAIN_FUNC, constants.MAIN_PKG); err != nil { diff --git a/cmd/cxbenchmark/cxbenchmark_test.go b/cmd/cxbenchmark/cxbenchmark_test.go new file mode 100644 index 000000000..9d5734a3f --- /dev/null +++ b/cmd/cxbenchmark/cxbenchmark_test.go @@ -0,0 +1,18 @@ +package cxbenchmark_test + +import ( + "os/exec" + "testing" +) + +func BenchmarkCX(b *testing.B) { + for i := 0; i < b.N; i++ { + + cmd := exec.Command("./bin/cx", "./test_files/test.cx") + _, err := cmd.CombinedOutput() + if err != nil { + b.Fatal(err) + } + + } +} diff --git a/cmd/cxbenchmark/test_files/test.cx b/cmd/cxbenchmark/test_files/test.cx new file mode 100644 index 000000000..ba9a0c0a2 --- /dev/null +++ b/cmd/cxbenchmark/test_files/test.cx @@ -0,0 +1,79 @@ +package main + +var Bool bool = true +var Byte i8 = 8B +var I16 i16 = 16H +var I32 i32 = 32 +var I64 i64 = 64L +var UByte ui8 = 9UB +var UI16 ui16 = 17UH +var UI32 ui32 = 33U +var UI64 ui64 = 65UL +var F32 f32 = 0.32 +var F64 f64 = 0.64D +var string str = "Hello World" +var Affordance aff + +var intArray [5]i32 +// var stringPointer *str + +// var abc string Var in comment +/* + var apple int + - Global in a multiline comment +*/ + +type CustomType struct { + fieldA str + fieldB i32 +} + +func (customType *CustomType) setFieldA (string str) { + customType.fieldA = string +} + +func main () { + + bool.print(Bool) + i8.print(Byte) + i16.print(I16) + i32.print(I32) + i64.print(I64) + ui8.print(UByte) + ui16.print(UI16) + ui32.print(UI32) + ui64.print(UI64) + f32.print(F32) + f64.print(F64) + str.print(string) + + //Addition + answer := add(I32, 6) + i32.print(answer) + + //Multiply + var quotient i32 + var remainder f32 + quotient, remainder = divide(9, 4) + i32.print(quotient) + f32.print(remainder) + + printer("Print me") +} + +func add(a i32, b i32)(answer i32) { + answer = a + b +} + +func divide(c i32, d i32)(quotient i32, remainder f32) { + quotient = c/d + remainder = i32.f32(c)%i32.f32(d) +} + +func printer(message str)() { + str.print(message) +} + +type AnotherType struct { + name str +} \ No newline at end of file diff --git a/cmd/cxtest/main.go b/cmd/cxtest/main.go index 6a379a651..8c8c67e3d 100644 --- a/cmd/cxtest/main.go +++ b/cmd/cxtest/main.go @@ -113,7 +113,9 @@ func Execute(c *cli.Context) error { var start = time.Now().Unix() fmt.Printf("Running CX tests in dir: '%s'\n", workingDir) + timerStart := time.Now() runTestCases(tester) + timerTime := time.Since(timerStart) end := time.Now().Unix() if runner.Has(logMask, runner.LogTime) { @@ -124,6 +126,7 @@ func Execute(c *cli.Context) error { fmt.Printf("%d were successful\n", tester.TestSuccess) fmt.Printf("%d failed\n", tester.TestCount-tester.TestSuccess) fmt.Printf("%d skipped\n", tester.TestSkipped) + fmt.Printf("Time took %v\n", timerTime) if tester.TestCount == 0 || (tester.TestSuccess != tester.TestCount) { return errors.New("not all test succeeded") diff --git a/cmd/declaration_extractor/declaration_extractor.go b/cmd/declaration_extractor/declaration_extractor.go index ff3a70e66..cf084da07 100644 --- a/cmd/declaration_extractor/declaration_extractor.go +++ b/cmd/declaration_extractor/declaration_extractor.go @@ -3,11 +3,36 @@ package declaration_extractor import ( "fmt" "path/filepath" + "regexp" "sync" - - "github.com/skycoin/cx/cmd/packageloader/loader" ) +//Regexes +var reName = regexp.MustCompile(`[_a-zA-Z][_a-zA-Z0-9]*`) +var reEnumInit = regexp.MustCompile(`const\s+\(`) +var reEnumDec = regexp.MustCompile(`([_a-zA-Z][_a-zA-Z0-9]*)(?:\s+([_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*)){0,1}(?:\s*\=\s*[\s\S]+\S+){0,1}`) +var reNotSpace = regexp.MustCompile(`\S+`) + +// Func Declaration regex for name extraction and syntax checking +// Components: +// func - func keyword +// (?:\s*\(\s*[_a-zA-Z]\w*\s+\*{0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*\s*\)\s*)|\s+) - [space/no space] [([reciever object]) [name] | [space]] +// ([_a-zA-Z]\w*) - name of func +// (?:\s*\(\s*(?:(?:[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*\s*,\s*)+[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*|(?:[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*){0,1})\s*\)){1,2} - [[space/no space] ([params])]{1,2} +// (?:(?:[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*\s*,\s*)+[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*|(?:[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*){0,1}) - [[param name] [data type] [,]]{0,1} [param name] [data type] | [param name] [data type] +// (?:[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)* - [param name] [*]{0,1} [\[[1-9][0-9]+|[0-9]\][*]{0,1}]{0,1} [word] [[.][word]]* +// +// First, finds the func keyword +// Second, finds out whether the function has a receiver object or not and extracts the func name +// Third, finds whether there's one or two pairs of parenthesis +// Forth, finds whether there's one or more params in the parenthesis +var reFuncDec = regexp.MustCompile(`(func(?:(?:\s*\(\s*[_a-zA-Z]\w*\s+\*{0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*\s*\)\s*)|\s+)([_a-zA-Z]\w*)(?:\s*\(\s*(?:(?:[_a-zA-Z]\w*(?:\s*\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*|\s+)[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*\s*,\s*)+[_a-zA-Z]\w*(?:\s*\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}|\s+)\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*|(?:[_a-zA-Z]\w*(?:\s*\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*|\s+)[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*){0,1})\s*\)){1,2})(?:\s*{){0,1}`) +var reGlobalName = regexp.MustCompile(`var\s+([_a-zA-Z]\w*)\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*(?:\s*\=\s*[\s\S]+\S+){0,1}`) +var reStruct = regexp.MustCompile(`type\s+[_a-zA-Z][_a-zA-Z0-9]*\s+struct`) +var reStructHeader = regexp.MustCompile(`type\s+([_a-zA-Z][_a-zA-Z0-9]*)\s+struct\s*{`) +var reStructField = regexp.MustCompile(`([_a-zA-Z][_a-zA-Z0-9]*)\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*`) +var reTypeDefinition = regexp.MustCompile(`type\s+([_a-zA-Z][_a-zA-Z0-9]*)\s+([_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*)`) + func ReDeclarationCheck(Import []ImportDeclaration, Glbl []GlobalDeclaration, Enum []EnumDeclaration, TypeDef []TypeDefinitionDeclaration, Strct []StructDeclaration, Func []FuncDeclaration) error { // Checks for the first declaration redeclared @@ -67,7 +92,17 @@ func ReDeclarationCheck(Import []ImportDeclaration, Glbl []GlobalDeclaration, En for i := 0; i < len(Func); i++ { for j := i + 1; j < len(Func); j++ { if Func[i].FuncName == Func[j].FuncName && Func[i].PackageID == Func[j].PackageID { - return fmt.Errorf("%v:%v: redeclaration error: func: %v", filepath.Base(Func[j].FileID), Func[j].LineNumber, Func[i].FuncName) + funcMethod1, err := ExtractMethod(Func[i]) + if err != nil { + return err + } + funcMethod2, err := ExtractMethod(Func[j]) + if err != nil { + return err + } + if funcMethod1 == funcMethod2 { + return fmt.Errorf("%v:%v: redeclaration error: func: %v", filepath.Base(Func[j].FileID), Func[j].LineNumber, Func[i].FuncName) + } } } } @@ -102,7 +137,7 @@ func GetDeclarations(source []byte, Glbls []GlobalDeclaration, Enums []EnumDecla return declarations } -func ExtractAllDeclarations(source []*loader.File) ([]ImportDeclaration, []GlobalDeclaration, []EnumDeclaration, []TypeDefinitionDeclaration, []StructDeclaration, []FuncDeclaration, error) { +func ExtractAllDeclarations(sourceCodeStrings []string, fileNames []string) ([]ImportDeclaration, []GlobalDeclaration, []EnumDeclaration, []TypeDefinitionDeclaration, []StructDeclaration, []FuncDeclaration, error) { //Variable declarations var Imports []ImportDeclaration @@ -113,28 +148,26 @@ func ExtractAllDeclarations(source []*loader.File) ([]ImportDeclaration, []Globa var Funcs []FuncDeclaration //Channel declarations - importChannel := make(chan []ImportDeclaration, len(source)) - globalChannel := make(chan []GlobalDeclaration, len(source)) - enumChannel := make(chan []EnumDeclaration, len(source)) - typeDefinitionChannel := make(chan []TypeDefinitionDeclaration, len(source)) - structChannel := make(chan []StructDeclaration, len(source)) - funcChannel := make(chan []FuncDeclaration, len(source)) - errorChannel := make(chan error, len(source)) + importChannel := make(chan []ImportDeclaration, len(sourceCodeStrings)) + globalChannel := make(chan []GlobalDeclaration, len(sourceCodeStrings)) + enumChannel := make(chan []EnumDeclaration, len(sourceCodeStrings)) + typeDefinitionChannel := make(chan []TypeDefinitionDeclaration, len(sourceCodeStrings)) + structChannel := make(chan []StructDeclaration, len(sourceCodeStrings)) + funcChannel := make(chan []FuncDeclaration, len(sourceCodeStrings)) + errorChannel := make(chan error, len(sourceCodeStrings)) var wg sync.WaitGroup // concurrent extractions start - for _, currentFile := range source { + for i, sourceCode := range sourceCodeStrings { wg.Add(1) - go func(currentFile *loader.File, globalChannel chan<- []GlobalDeclaration, enumChannel chan<- []EnumDeclaration, typeDefinition chan<- []TypeDefinitionDeclaration, structChannel chan<- []StructDeclaration, funcChannel chan<- []FuncDeclaration, errorChannel chan<- error, wg *sync.WaitGroup) { + go func(sourceCode string, fileName string, globalChannel chan<- []GlobalDeclaration, enumChannel chan<- []EnumDeclaration, typeDefinition chan<- []TypeDefinitionDeclaration, structChannel chan<- []StructDeclaration, funcChannel chan<- []FuncDeclaration, errorChannel chan<- error, wg *sync.WaitGroup) { defer wg.Done() - srcBytes := currentFile.Content - - fileName := currentFile.FileName + srcBytes := []byte(sourceCode) replaceComments := ReplaceCommentsWithWhitespaces(srcBytes) replaceStringContents, err := ReplaceStringContentsWithWhitespaces(replaceComments) if err != nil { @@ -234,7 +267,7 @@ func ExtractAllDeclarations(source []*loader.File) ([]ImportDeclaration, []Globa }(funcChannel, replaceStringContents, fileName, wg) - }(currentFile, globalChannel, enumChannel, typeDefinitionChannel, structChannel, funcChannel, errorChannel, &wg) + }(sourceCode, fileNames[i], globalChannel, enumChannel, typeDefinitionChannel, structChannel, funcChannel, errorChannel, &wg) } wg.Wait() diff --git a/cmd/declaration_extractor/declaration_extractor_test.go b/cmd/declaration_extractor/declaration_extractor_test.go index cc741d44d..6b93b3f94 100644 --- a/cmd/declaration_extractor/declaration_extractor_test.go +++ b/cmd/declaration_extractor/declaration_extractor_test.go @@ -3,14 +3,17 @@ package declaration_extractor_test import ( "bytes" "errors" + "io" "os" "path/filepath" "runtime" "testing" "github.com/skycoin/cx/cmd/declaration_extractor" - "github.com/skycoin/cx/cmd/packageloader/file_output" - "github.com/skycoin/cx/cmd/packageloader/loader" + "github.com/skycoin/cx/cmd/fileloader" + "github.com/skycoin/cx/cx/ast" + cxinit "github.com/skycoin/cx/cx/init" + "github.com/skycoin/cx/cxparser/actions" ) //Sets the offset for windows or other os @@ -45,14 +48,22 @@ func TestDeclarationExtractor_ReplaceCommentsWithWhitespaces(t *testing.T) { for _, tc := range tests { t.Run(tc.scenario, func(t *testing.T) { - srcBytes, err := os.ReadFile(tc.testDir) + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } - wantBytes, err := os.ReadFile(tc.wantCommentReplaced) + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() + + wantFile, err := os.Open(tc.wantCommentReplaced) if err != nil { t.Fatal(err) } + wantSrc := bytes.NewBuffer(nil) + io.Copy(wantSrc, wantFile) + wantBytes := wantSrc.Bytes() + commentReplaced := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) if len(srcBytes) != len(commentReplaced) { @@ -102,14 +113,21 @@ func TestDeclarationExtractor_ReplaceStringContentsWithWhitespaces(t *testing.T) for _, tc := range tests { t.Run(tc.scenario, func(t *testing.T) { - srcBytes, err := os.ReadFile(tc.testDir) + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } - wantBytes, err := os.ReadFile(tc.wantStringContentsReplaced) + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() + + wantFile, err := os.Open(tc.wantStringContentsReplaced) if err != nil { t.Fatal(err) } + wantSrc := bytes.NewBuffer(nil) + io.Copy(wantSrc, wantFile) + wantBytes := wantSrc.Bytes() stringContentsReplaced, gotErr := declaration_extractor.ReplaceStringContentsWithWhitespaces(srcBytes) if len(srcBytes) != len(stringContentsReplaced) { @@ -208,10 +226,13 @@ func TestDeclarationExtractor_ExtractImports(t *testing.T) { for _, tc := range tests { t.Run(tc.scenario, func(t *testing.T) { - srcBytes, err := os.ReadFile(tc.testDir) + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) if err != nil { @@ -335,10 +356,13 @@ func TestDeclarationExtractor_ExtractGlobals(t *testing.T) { for _, tc := range tests { t.Run(tc.scenario, func(t *testing.T) { - srcBytes, err := os.ReadFile(tc.testDir) + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) ReplaceStringContentsWithWhitespaces, err := declaration_extractor.ReplaceStringContentsWithWhitespaces(ReplaceCommentsWithWhitespaces) @@ -540,10 +564,13 @@ func TestDeclarationExtractor_ExtractEnums(t *testing.T) { for _, tc := range tests { t.Run(tc.scenario, func(t *testing.T) { - srcBytes, err := os.ReadFile(tc.testDir) + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) ReplaceStringContentsWithWhitespaces, err := declaration_extractor.ReplaceStringContentsWithWhitespaces(ReplaceCommentsWithWhitespaces) @@ -676,10 +703,13 @@ func TestDeclarationExtractor_ExtractTypeDefinitions(t *testing.T) { for _, tc := range tests { t.Run(tc.scenario, func(t *testing.T) { - srcBytes, err := os.ReadFile(tc.testDir) + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) ReplaceStringContentsWithWhitespaces, err := declaration_extractor.ReplaceStringContentsWithWhitespaces(ReplaceCommentsWithWhitespaces) @@ -911,10 +941,13 @@ func TestDeclarationExtractor_ExtractStructs(t *testing.T) { for _, tc := range tests { t.Run(tc.scenario, func(t *testing.T) { - srcBytes, err := os.ReadFile(tc.testDir) + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) ReplaceStringContentsWithWhitespaces, err := declaration_extractor.ReplaceStringContentsWithWhitespaces(ReplaceCommentsWithWhitespaces) if err != nil { @@ -1085,10 +1118,13 @@ func TestDeclarationExtractor_ExtractFuncs(t *testing.T) { for _, tc := range tests { t.Run(tc.scenario, func(t *testing.T) { - srcBytes, err := os.ReadFile(tc.testDir) + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) ReplaceStringContentsWithWhitespaces, err := declaration_extractor.ReplaceStringContentsWithWhitespaces(ReplaceCommentsWithWhitespaces) @@ -1184,10 +1220,13 @@ func TestDeclarationExtractor_ReDeclarationCheck(t *testing.T) { for _, tc := range tests { t.Run(tc.scenario, func(t *testing.T) { - srcBytes, err := os.ReadFile(tc.testDir) + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) ReplaceStringContentsWithWhitespaces, err := declaration_extractor.ReplaceStringContentsWithWhitespaces(ReplaceCommentsWithWhitespaces) @@ -1267,10 +1306,13 @@ func TestDeclarationExtractor_GetDeclarations(t *testing.T) { for _, tc := range tests { t.Run(tc.scenario, func(t *testing.T) { - srcBytes, err := os.ReadFile(tc.testDir) + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) ReplaceStringContentsWithWhitespaces, err := declaration_extractor.ReplaceStringContentsWithWhitespaces(ReplaceCommentsWithWhitespaces) @@ -1440,18 +1482,16 @@ func TestDeclarationExtractor_ExtractAllDeclarations(t *testing.T) { for _, tc := range tests { t.Run(tc.scenario, func(t *testing.T) { - _, sourceCodes, _ := loader.ParseArgsForCX([]string{tc.testDir}, true) - err := loader.LoadCXProgram("mypkg1", sourceCodes, "bolt") - if err != nil { - t.Fatal(err) - } + actions.AST = cxinit.MakeProgram() - files, err := file_output.GetImportFiles("mypkg1", "bolt") + _, sourceCodes, _ := ast.ParseArgsForCX([]string{tc.testDir}, true) + + sourceCodesStrings, fileNames, err := fileloader.LoadFiles(sourceCodes) if err != nil { t.Fatal(err) } - Imports, Globals, Enums, TypeDefinitions, Structs, Funcs, gotErr := declaration_extractor.ExtractAllDeclarations(files) + Imports, Globals, Enums, TypeDefinitions, Structs, Funcs, gotErr := declaration_extractor.ExtractAllDeclarations(sourceCodesStrings, fileNames) if len(Imports) == 0 && len(Globals) == 0 && len(Enums) == 0 && len(Structs) == 0 && len(Funcs) == 0 { t.Error("No Declarations found") @@ -1502,18 +1542,42 @@ func BenchmarkDeclarationExtractor_ExtractFuncs(b *testing.B) { scenario string testDir string }{ - {scenario: "regular funcs", testDir: "./test_files/ExtractFuncs/HasFuncs.cx"}, + { + scenario: "regular funcs", + testDir: "./test_files/ExtractFuncs/HasFuncs.cx", + }, } for _, bm := range benchmarks { b.Run(bm.scenario, func(b *testing.B) { - srcBytes, err := os.ReadFile(bm.testDir) + file, err := os.Open(bm.testDir) if err != nil { b.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() for n := 0; n < b.N; n++ { declaration_extractor.ExtractFuncs(srcBytes, bm.testDir) } }) } } + +func BenchmarkDeclarationExtractor_ExtractAllDeclarations(b *testing.B) { + for i := 0; i < b.N; i++ { + actions.AST = cxinit.MakeProgram() + + _, sourceCodes, _ := ast.ParseArgsForCX([]string{"./test_files/ExtractAllDeclarations/test.cx"}, true) + + sourceCodeStrings, fileNames, err := fileloader.LoadFiles(sourceCodes) + if err != nil { + b.Fatal(err) + } + + _, _, _, _, _, _, err = declaration_extractor.ExtractAllDeclarations(sourceCodeStrings, fileNames) + if err != nil { + b.Fatal(err) + } + } +} diff --git a/cmd/declaration_extractor/enums.go b/cmd/declaration_extractor/enums.go index 3e7c897a4..1f780393f 100644 --- a/cmd/declaration_extractor/enums.go +++ b/cmd/declaration_extractor/enums.go @@ -5,7 +5,6 @@ import ( "bytes" "fmt" "path/filepath" - "regexp" ) type EnumDeclaration struct { @@ -24,13 +23,6 @@ func ExtractEnums(source []byte, fileName string) ([]EnumDeclaration, error) { var EnumDeclarationsArray []EnumDeclaration var pkg string - //Regexes - rePkg := regexp.MustCompile(`^(?:.+\s+|\s*)package(?:\s+[\S\s]+|\s*)$`) - rePkgName := regexp.MustCompile(`package\s+([_a-zA-Z][_a-zA-Z0-9]*)`) - reEnumInit := regexp.MustCompile(`const\s+\(`) - rePrtsClose := regexp.MustCompile(`\)`) - reEnumDec := regexp.MustCompile(`([_a-zA-Z][_a-zA-Z0-9]*)(?:\s+([_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*)){0,1}(?:\s*\=\s*[\s\S]+\S+){0,1}`) - reader := bytes.NewReader(source) scanner := bufio.NewScanner(reader) scanner.Split(scanLinesWithLineTerminator) // set scanner SplitFunc to custom ScanLines func at line 55 @@ -45,30 +37,35 @@ func ExtractEnums(source []byte, fileName string) ([]EnumDeclaration, error) { line := scanner.Bytes() lineno++ + tokens := bytes.Fields(line) // Package declaration extraction - if rePkg.FindIndex(line) != nil { + if ContainsTokenByte(tokens, []byte("package")) { + + if len(tokens) != 2 { + return EnumDeclarationsArray, fmt.Errorf("%v:%v: syntax error: package declaration", filepath.Base(fileName), lineno) + } - matchPkg := rePkgName.FindSubmatch(line) + name := reName.Find(tokens[1]) - if matchPkg == nil || !bytes.Equal(matchPkg[0], bytes.TrimSpace(line)) { + if len(tokens[1]) != len(name) { return EnumDeclarationsArray, fmt.Errorf("%v:%v: syntax error: package declaration", filepath.Base(fileName), lineno) } - pkg = string(matchPkg[1]) + pkg = string(name) } // initialize enum, increment parenthesis depth and skip to next line // if const ( is found - if locs := reEnumInit.FindAllIndex(line, -1); locs != nil { + if reEnumInit.Find(line) != nil { EnumInit = true currentOffset += len(line) // increments the currentOffset by line len continue } // if ) is found and enum intialized, decrement parenthesis depth - if locs := rePrtsClose.FindAllIndex(line, -1); locs != nil && EnumInit { + if ContainsTokenByte(tokens, []byte(")")) && EnumInit { EnumInit = false Type = "" Index = 0 @@ -77,6 +74,10 @@ func ExtractEnums(source []byte, fileName string) ([]EnumDeclaration, error) { // if match is found and enum initialized and parenthesis depth is 1 if enumDec := reEnumDec.FindSubmatch(line); enumDec != nil && EnumInit { + if pkg == "" { + return EnumDeclarationsArray, fmt.Errorf("%v:%v: syntax error: missing package", filepath.Base(fileName), lineno) + } + if !bytes.Equal(enumDec[0], bytes.TrimSpace(line)) { return EnumDeclarationsArray, fmt.Errorf("%v:%v: syntax error: enum declaration", filepath.Base(fileName), lineno) } diff --git a/cmd/declaration_extractor/functions.go b/cmd/declaration_extractor/functions.go index c394886c0..cdb219e72 100644 --- a/cmd/declaration_extractor/functions.go +++ b/cmd/declaration_extractor/functions.go @@ -4,6 +4,8 @@ import ( "bufio" "bytes" "fmt" + "io" + "os" "path/filepath" "regexp" ) @@ -22,27 +24,6 @@ func ExtractFuncs(source []byte, fileName string) ([]FuncDeclaration, error) { var FuncDeclarationsArray []FuncDeclaration var pkg string - // Regexes - rePkg := regexp.MustCompile(`^(?:.+\s+|\s*)package(?:\s+[\S\s]+|\s*)$`) - rePkgName := regexp.MustCompile(`package\s+([_a-zA-Z][_a-zA-Z0-9]*)`) - reFunc := regexp.MustCompile(`^(?:.+\s+|\s*)func(?:\s+[\S\s]+|\([\S\s]+|\s*)$`) - reNotSpace := regexp.MustCompile(`\S+`) - - // Func Declaration regex for name extraction and syntax checking - // Components: - // func - func keyword - // (?:\s*\(\s*[_a-zA-Z]\w*\s+\*{0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*\s*\)\s*)|\s+) - [space/no space] [([reciever object]) [name] | [space]] - // ([_a-zA-Z]\w*) - name of func - // (?:\s*\(\s*(?:(?:[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*\s*,\s*)+[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*|(?:[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*){0,1})\s*\)){1,2} - [[space/no space] ([params])]{1,2} - // (?:(?:[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*\s*,\s*)+[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*|(?:[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*){0,1}) - [[param name] [data type] [,]]{0,1} [param name] [data type] | [param name] [data type] - // (?:[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)* - [param name] [*]{0,1} [\[[1-9][0-9]+|[0-9]\][*]{0,1}]{0,1} [word] [[.][word]]* - // - // First, finds the func keyword - // Second, finds out whether the function has a receiver object or not and extracts the func name - // Third, finds whether there's one or two pairs of parenthesis - // Forth, finds whether there's one or more params in the parenthesis - reFuncDec := regexp.MustCompile(`(func(?:(?:\s*\(\s*[_a-zA-Z]\w*\s+\*{0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*\s*\)\s*)|\s+)([_a-zA-Z]\w*)(?:\s*\(\s*(?:(?:[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*\s*,\s*)+[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*|(?:[_a-zA-Z]\w*\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*){0,1})\s*\)){1,2})(?:\s*{){0,1}`) - reader := bytes.NewReader(source) scanner := bufio.NewScanner(reader) scanner.Split(scanLinesWithLineTerminator) // set scanner SplitFunc to custom ScanLines func at line 55 @@ -54,25 +35,34 @@ func ExtractFuncs(source []byte, fileName string) ([]FuncDeclaration, error) { for scanner.Scan() { line := scanner.Bytes() lineno++ + tokens := bytes.Fields(line) // Package declaration extraction - if rePkg.FindIndex(line) != nil { + if ContainsTokenByte(tokens, []byte("package")) { + + if len(tokens) != 2 { + return FuncDeclarationsArray, fmt.Errorf("%v:%v: syntax error: package declaration", filepath.Base(fileName), lineno) + } - matchPkg := rePkgName.FindSubmatch(line) + name := reName.Find(tokens[1]) - if matchPkg == nil || !bytes.Equal(matchPkg[0], bytes.TrimSpace(line)) && reNotSpace.Find(line) != nil { + if len(tokens[1]) != len(name) { return FuncDeclarationsArray, fmt.Errorf("%v:%v: syntax error: package declaration", filepath.Base(fileName), lineno) } - pkg = string(matchPkg[1]) + pkg = string(name) } - if match := reFunc.FindIndex(line); match != nil { + if ContainsTokenByte(tokens, []byte("func")) || ContainsTokenByte(tokens, []byte("func(")) { funcBytes := reFuncDec.FindSubmatch(line) funcIdx := reFuncDec.FindSubmatchIndex(line) + if pkg == "" { + return FuncDeclarationsArray, fmt.Errorf("%v:%v: syntax error: missing package", filepath.Base(fileName), lineno) + } + if funcBytes == nil || !bytes.Equal(funcBytes[0], bytes.TrimSpace(line)) { return FuncDeclarationsArray, fmt.Errorf("%v:%v: syntax error: func declaration", filepath.Base(fileName), lineno) } @@ -94,3 +84,22 @@ func ExtractFuncs(source []byte, fileName string) ([]FuncDeclaration, error) { return FuncDeclarationsArray, nil } + +func ExtractMethod(fun FuncDeclaration) (string, error) { + + file, err := os.Open(fun.FileID) + if err != nil { + return "", err + } + + tmp := bytes.NewBuffer(nil) + io.Copy(tmp, file) + bytes := tmp.Bytes() + + reFuncMethod := regexp.MustCompile(`func\s*\(\s*\w+\s+([\w\*]+)\s*\)`) + funcMethod := reFuncMethod.FindSubmatch(bytes[fun.StartOffset : fun.StartOffset+fun.Length]) + if funcMethod == nil { + return "", nil + } + return string(funcMethod[1]), nil +} diff --git a/cmd/declaration_extractor/globals.go b/cmd/declaration_extractor/globals.go index 81e5eca24..a45ba3c7a 100644 --- a/cmd/declaration_extractor/globals.go +++ b/cmd/declaration_extractor/globals.go @@ -5,7 +5,6 @@ import ( "bytes" "fmt" "path/filepath" - "regexp" ) // All units for offset/length are in counted in bytes @@ -24,14 +23,6 @@ func ExtractGlobals(source []byte, fileName string) ([]GlobalDeclaration, error) var GlobalDeclarationsArray []GlobalDeclaration var pkg string - //Regexs - rePkg := regexp.MustCompile(`^(?:.+\s+|\s*)package(?:\s+[\S\s]+|\s*)$`) - rePkgName := regexp.MustCompile(`package\s+([_a-zA-Z][_a-zA-Z0-9]*)`) - reGlobal := regexp.MustCompile(`^(?:.+\s+|\s*)var(?:\s+[\S\s]+|\s*)$`) - reGlobalName := regexp.MustCompile(`var\s+([_a-zA-Z]\w*)\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*(?:\s*\=\s*[\s\S]+\S+){0,1}`) - reBodyOpen := regexp.MustCompile("{") - reBodyClose := regexp.MustCompile("}") - reader := bytes.NewReader(source) scanner := bufio.NewScanner(reader) scanner.Split(scanLinesWithLineTerminator) // set scanner SplitFunc to custom ScanLines func at line 55 @@ -44,41 +35,50 @@ func ExtractGlobals(source []byte, fileName string) ([]GlobalDeclaration, error) line := scanner.Bytes() lineno++ + tokens := bytes.Fields(line) // Package declaration extraction - if rePkg.FindIndex(line) != nil { + if ContainsTokenByte(tokens, []byte("package")) { + + if len(tokens) != 2 { + return GlobalDeclarationsArray, fmt.Errorf("%v:%v: syntax error: package declaration", filepath.Base(fileName), lineno) + } - matchPkg := rePkgName.FindSubmatch(line) + name := reName.Find(tokens[1]) - if matchPkg == nil || !bytes.Equal(matchPkg[0], bytes.TrimSpace(line)) { + if len(tokens[1]) != len(name) { return GlobalDeclarationsArray, fmt.Errorf("%v:%v: syntax error: package declaration", filepath.Base(fileName), lineno) } - pkg = string(matchPkg[1]) + pkg = string(name) } // if { is found increment body depth - if locs := reBodyOpen.FindAllIndex(line, -1); locs != nil { + if ContainsTokenByteInToken(tokens, []byte("{")) { inBlock++ } // if } is found decrement body depth - if locs := reBodyClose.FindAllIndex(line, -1); locs != nil { + if ContainsTokenByteInToken(tokens, []byte("}")) { inBlock-- } - // if match is found and body depth is 0 - if reGlobal.FindIndex(line) != nil { + if inBlock == 0 { - matchGlobal := reGlobalName.FindSubmatch(line) - matchGlobalIdx := reGlobalName.FindIndex(line) + // if match is found and body depth is 0 + if ContainsTokenByte(tokens, []byte("var")) { - if matchGlobal == nil || !bytes.Equal(matchGlobal[0], bytes.TrimSpace(line)) { - return GlobalDeclarationsArray, fmt.Errorf("%v:%v: syntax error: global declaration", filepath.Base(fileName), lineno) - } + if pkg == "" { + return GlobalDeclarationsArray, fmt.Errorf("%v:%v: syntax error: missing package", filepath.Base(fileName), lineno) + } + + matchGlobal := reGlobalName.FindSubmatch(line) + matchGlobalIdx := reGlobalName.FindIndex(line) - if inBlock == 0 { + if matchGlobal == nil || !bytes.Equal(matchGlobal[0], bytes.TrimSpace(line)) { + return GlobalDeclarationsArray, fmt.Errorf("%v:%v: syntax error: global declaration", filepath.Base(fileName), lineno) + } var tmp GlobalDeclaration diff --git a/cmd/declaration_extractor/imports.go b/cmd/declaration_extractor/imports.go index 400b4d213..9c00cdd98 100644 --- a/cmd/declaration_extractor/imports.go +++ b/cmd/declaration_extractor/imports.go @@ -5,7 +5,6 @@ import ( "bytes" "fmt" "path/filepath" - "regexp" ) type ImportDeclaration struct { @@ -20,10 +19,6 @@ func ExtractImports(source []byte, fileName string) ([]ImportDeclaration, error) var ImportDeclarationsArray []ImportDeclaration var pkg string - //Regexs - rePkg := regexp.MustCompile(`^(?:.+\s+|\s*)package(?:\s+[\S\s]+|\s*)$`) - rePkgName := regexp.MustCompile(`^\s*package\s+([_a-zA-Z][_a-zA-Z0-9]*)`) - reader := bytes.NewReader(source) scanner := bufio.NewScanner(reader) @@ -33,23 +28,31 @@ func ExtractImports(source []byte, fileName string) ([]ImportDeclaration, error) line := scanner.Bytes() lineno++ + tokens := bytes.Fields(line) // Package declaration extraction - if rePkg.FindIndex(line) != nil { + if ContainsTokenByte(tokens, []byte("package")) { + + if len(tokens) != 2 { + return ImportDeclarationsArray, fmt.Errorf("%v:%v: syntax error: package declaration", filepath.Base(fileName), lineno) + } - matchPkg := rePkgName.FindSubmatch(line) + name := reName.Find(tokens[1]) - if matchPkg == nil || !bytes.Equal(matchPkg[0], bytes.TrimSpace(line)) { + if len(tokens) != 2 || len(tokens[1]) != len(name) { return ImportDeclarationsArray, fmt.Errorf("%v:%v: syntax error: package declaration", filepath.Base(fileName), lineno) } - pkg = string(matchPkg[1]) + pkg = string(name) } // Extract Import - checkLine := bytes.Split(line, []byte(" ")) - if bytes.Equal(checkLine[0], []byte("import")) { + if ContainsTokenByte(tokens, []byte("import")) { + + if pkg == "" { + return ImportDeclarationsArray, fmt.Errorf("%v:%v: syntax error: missing package", filepath.Base(fileName), lineno) + } var tmp ImportDeclaration @@ -57,7 +60,7 @@ func ExtractImports(source []byte, fileName string) ([]ImportDeclaration, error) tmp.FileID = fileName tmp.LineNumber = lineno - tmp.ImportName = string(checkLine[1][1 : len(checkLine[1])-1]) + tmp.ImportName = string(tokens[1][1 : len(tokens[1])-1]) ImportDeclarationsArray = append(ImportDeclarationsArray, tmp) } diff --git a/cmd/declaration_extractor/structs.go b/cmd/declaration_extractor/structs.go index d60a97150..7da7018a8 100644 --- a/cmd/declaration_extractor/structs.go +++ b/cmd/declaration_extractor/structs.go @@ -5,7 +5,6 @@ import ( "bytes" "fmt" "path/filepath" - "regexp" ) type StructDeclaration struct { @@ -30,15 +29,6 @@ func ExtractStructs(source []byte, fileName string) ([]StructDeclaration, error) var StructDeclarationsArray []StructDeclaration var pkg string - // Regexes - reNotSpace := regexp.MustCompile(`\S+`) - rePkg := regexp.MustCompile(`^(?:.+\s+|\s*)package(?:\s+[\S\s]+|\s*)$`) - rePkgName := regexp.MustCompile(`package\s+([_a-zA-Z][_a-zA-Z0-9]*)`) - reStruct := regexp.MustCompile(`type\s+[_a-zA-Z][_a-zA-Z0-9]*\s+struct`) - reStructHeader := regexp.MustCompile(`type\s+([_a-zA-Z][_a-zA-Z0-9]*)\s+struct\s*{`) - reRightBrace := regexp.MustCompile("}") - reStructField := regexp.MustCompile(`([_a-zA-Z][_a-zA-Z0-9]*)\s+\*{0,1}\s*(?:\[(?:[1-9]\d+|[0-9]){0,1}\]\*{0,1}){0,1}\s*[_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*`) - reader := bytes.NewReader(source) scanner := bufio.NewScanner(reader) scanner.Split(scanLinesWithLineTerminator) // set scanner SplitFunc to custom ScanLines func at line 55 @@ -53,17 +43,22 @@ func ExtractStructs(source []byte, fileName string) ([]StructDeclaration, error) for scanner.Scan() { line := scanner.Bytes() lineno++ + tokens := bytes.Fields(line) // Package declaration extraction - if rePkg.FindIndex(line) != nil { + if ContainsTokenByte(tokens, []byte("package")) { + + if len(tokens) != 2 { + return StructDeclarationsArray, fmt.Errorf("%v:%v: syntax error: package declaration", filepath.Base(fileName), lineno) + } - matchPkg := rePkgName.FindSubmatch(line) + name := reName.Find(tokens[1]) - if matchPkg == nil || !bytes.Equal(matchPkg[0], bytes.TrimSpace(line)) { + if len(tokens) != 2 || len(tokens[1]) != len(name) { return StructDeclarationsArray, fmt.Errorf("%v:%v: syntax error: package declaration", filepath.Base(fileName), lineno) } - pkg = string(matchPkg[1]) + pkg = string(name) } @@ -89,7 +84,7 @@ func ExtractStructs(source []byte, fileName string) ([]StructDeclaration, error) } - if match := reRightBrace.FindIndex(line); match != nil && inBlock { + if ContainsTokenByteInToken(tokens, []byte("}")) && inBlock { inBlock = false structDeclaration.StructFields = structFieldsArray @@ -100,6 +95,10 @@ func ExtractStructs(source []byte, fileName string) ([]StructDeclaration, error) if inBlock && structDeclaration.LineNumber < lineno { + if pkg == "" { + return StructDeclarationsArray, fmt.Errorf("%v:%v: syntax error: missing package", filepath.Base(fileName), lineno) + } + var structField StructField matchStructField := reStructField.FindSubmatch(line) matchStructFieldIdx := reStructField.FindSubmatchIndex(line) diff --git a/cmd/declaration_extractor/test_files/ExtractAllDeclarations/test.cx b/cmd/declaration_extractor/test_files/ExtractAllDeclarations/test.cx new file mode 100644 index 000000000..ba9a0c0a2 --- /dev/null +++ b/cmd/declaration_extractor/test_files/ExtractAllDeclarations/test.cx @@ -0,0 +1,79 @@ +package main + +var Bool bool = true +var Byte i8 = 8B +var I16 i16 = 16H +var I32 i32 = 32 +var I64 i64 = 64L +var UByte ui8 = 9UB +var UI16 ui16 = 17UH +var UI32 ui32 = 33U +var UI64 ui64 = 65UL +var F32 f32 = 0.32 +var F64 f64 = 0.64D +var string str = "Hello World" +var Affordance aff + +var intArray [5]i32 +// var stringPointer *str + +// var abc string Var in comment +/* + var apple int + - Global in a multiline comment +*/ + +type CustomType struct { + fieldA str + fieldB i32 +} + +func (customType *CustomType) setFieldA (string str) { + customType.fieldA = string +} + +func main () { + + bool.print(Bool) + i8.print(Byte) + i16.print(I16) + i32.print(I32) + i64.print(I64) + ui8.print(UByte) + ui16.print(UI16) + ui32.print(UI32) + ui64.print(UI64) + f32.print(F32) + f64.print(F64) + str.print(string) + + //Addition + answer := add(I32, 6) + i32.print(answer) + + //Multiply + var quotient i32 + var remainder f32 + quotient, remainder = divide(9, 4) + i32.print(quotient) + f32.print(remainder) + + printer("Print me") +} + +func add(a i32, b i32)(answer i32) { + answer = a + b +} + +func divide(c i32, d i32)(quotient i32, remainder f32) { + quotient = c/d + remainder = i32.f32(c)%i32.f32(d) +} + +func printer(message str)() { + str.print(message) +} + +type AnotherType struct { + name str +} \ No newline at end of file diff --git a/cmd/declaration_extractor/type_definitions.go b/cmd/declaration_extractor/type_definitions.go index 1f6b992e8..af4cc9526 100644 --- a/cmd/declaration_extractor/type_definitions.go +++ b/cmd/declaration_extractor/type_definitions.go @@ -5,7 +5,6 @@ import ( "bytes" "fmt" "path/filepath" - "regexp" ) type TypeDefinitionDeclaration struct { @@ -21,12 +20,6 @@ func ExtractTypeDefinitions(source []byte, fileName string) ([]TypeDefinitionDec var TypeDefinitionDeclarationsArray []TypeDefinitionDeclaration var pkg string - // Regexes - rePkg := regexp.MustCompile(`^(?:.+\s+|\s*)package(?:\s+[\S\s]+|\s*)$`) - rePkgName := regexp.MustCompile(`package\s+([_a-zA-Z][_a-zA-Z0-9]*)`) - reType := regexp.MustCompile(`(?:.+\s+|\s*)type(?:\s+[\S\s]+|\s*)$`) - reTypeDefinition := regexp.MustCompile(`type\s+([_a-zA-Z][_a-zA-Z0-9]*)\s+([_a-zA-Z]\w*(?:\.[_a-zA-Z]\w*)*)`) - reader := bytes.NewReader(source) scanner := bufio.NewScanner(reader) scanner.Split(scanLinesWithLineTerminator) // set scanner SplitFunc to custom ScanLines func at line 55 @@ -38,21 +31,30 @@ func ExtractTypeDefinitions(source []byte, fileName string) ([]TypeDefinitionDec for scanner.Scan() { line := scanner.Bytes() lineno++ + tokens := bytes.Fields(line) // Package declaration extraction - if rePkg.FindIndex(line) != nil { + if ContainsTokenByte(tokens, []byte("package")) { + + if len(tokens) != 2 { + return TypeDefinitionDeclarationsArray, fmt.Errorf("%v:%v: syntax error: package declaration", filepath.Base(fileName), lineno) + } - matchPkg := rePkgName.FindSubmatch(line) + name := reName.Find(tokens[1]) - if matchPkg == nil || !bytes.Equal(matchPkg[0], bytes.TrimSpace(line)) { + if len(tokens) != 2 || len(tokens[1]) != len(name) { return TypeDefinitionDeclarationsArray, fmt.Errorf("%v:%v: syntax error: package declaration", filepath.Base(fileName), lineno) } - pkg = string(matchPkg[1]) + pkg = string(name) } - if reType.Find(line) != nil { + if ContainsTokenByte(tokens, []byte("type")) { + + if pkg == "" { + return TypeDefinitionDeclarationsArray, fmt.Errorf("%v:%v: syntax error: missing package", filepath.Base(fileName), lineno) + } typeDefinition := reTypeDefinition.FindSubmatch(line) typeDefinitionIdx := reTypeDefinition.FindSubmatchIndex(line) diff --git a/cmd/declaration_extractor/utils.go b/cmd/declaration_extractor/utils.go index ab2db3adc..7a07f4ad7 100644 --- a/cmd/declaration_extractor/utils.go +++ b/cmd/declaration_extractor/utils.go @@ -97,3 +97,21 @@ func ReplaceStringContentsWithWhitespaces(source []byte) ([]byte, error) { return sourceWithoutStringContents, nil } + +func ContainsTokenByte(tokenList [][]byte, tokenByte []byte) bool { + for _, token := range tokenList { + if bytes.Equal(token, tokenByte) { + return true + } + } + return false +} + +func ContainsTokenByteInToken(tokenList [][]byte, tokenByte []byte) bool { + for _, token := range tokenList { + if bytes.Contains(token, tokenByte) { + return true + } + } + return false +} diff --git a/cmd/fileloader/fileloader.go b/cmd/fileloader/fileloader.go new file mode 100644 index 000000000..b608414d9 --- /dev/null +++ b/cmd/fileloader/fileloader.go @@ -0,0 +1,153 @@ +package fileloader + +import ( + "bufio" + "bytes" + "io" + "os" + "path/filepath" + "regexp" + "strings" + + "github.com/skycoin/cx/cx/ast" + "github.com/skycoin/cx/cx/globals" + "github.com/skycoin/cx/cx/packages" + "github.com/skycoin/cx/cxparser/actions" + "github.com/skycoin/cx/cxparser/util/profiling" +) + +func LoadFiles(sourceCode []*os.File) (sourceCodeStrings []string, fileNames []string, err error) { + for _, source := range sourceCode { + tmp := bytes.NewBuffer(nil) + io.Copy(tmp, source) + sourceCodeStrings = append(sourceCodeStrings, tmp.String()) + fileNames = append(fileNames, source.Name()) + } + + reMultiCommentOpen := regexp.MustCompile(`/\*`) + reMultiCommentClose := regexp.MustCompile(`\*/`) + reComment := regexp.MustCompile("//") + + rePkg := regexp.MustCompile("package") + rePkgName := regexp.MustCompile(`(^|[\s])package\s+([_a-zA-Z][_a-zA-Z0-9]*)`) + + reImp := regexp.MustCompile("import") + reImpName := regexp.MustCompile(`(^|[\s])import\s+"([_a-zA-Z][_a-zA-Z0-9/-]*)"`) + + profiling.StartProfile("1. packages") + // 1. Identify all the packages and structs + for srcI, srcStr := range sourceCodeStrings { + srcName := fileNames[srcI] + profiling.StartProfile(srcName) + + reader := strings.NewReader(srcStr) + scanner := bufio.NewScanner(reader) + var commentedCode bool + for scanner.Scan() { + line := scanner.Bytes() + + // Identify whether we are in a comment or not. + commentLoc := reComment.FindIndex(line) + multiCommentOpenLoc := reMultiCommentOpen.FindIndex(line) + multiCommentCloseLoc := reMultiCommentClose.FindIndex(line) + if commentedCode && multiCommentCloseLoc != nil { + commentedCode = false + } + if commentedCode { + continue + } + if multiCommentOpenLoc != nil && !commentedCode && multiCommentCloseLoc == nil { + commentedCode = true + continue + } + + // At this point we know that we are *not* in a comment + // 1-a. Identify all the packages + if loc := rePkg.FindIndex(line); loc != nil { + if (commentLoc != nil && commentLoc[0] < loc[0]) || + (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] < loc[0]) || + (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) { + // then it's commented out + continue + } + + if match := rePkgName.FindStringSubmatch(string(line)); match != nil { + if _, err := actions.AST.GetPackage(match[len(match)-1]); err != nil { + // then it hasn't been added + newPkg := ast.MakePackage(match[len(match)-1]) + pkgIdx := actions.AST.AddPackage(newPkg) + newPkg, err = actions.AST.GetPackageFromArray(pkgIdx) + if err != nil { + panic(err) + } + } + } + } + + } + profiling.StopProfile(srcName) + } // for range sourceCodeStrings + profiling.StopProfile("1. packages") + + profiling.StartProfile("2. imports") + for i, source := range sourceCodeStrings { + profiling.StartProfile(fileNames[i]) + + var commentedCode bool + + scanner := bufio.NewScanner(strings.NewReader(source)) + for scanner.Scan() { + line := scanner.Bytes() + + // we need to ignore function bodies + // it'll also ignore struct declaration's bodies, but this doesn't matter + commentLoc := reComment.FindIndex(line) + + multiCommentOpenLoc := reMultiCommentOpen.FindIndex(line) + multiCommentCloseLoc := reMultiCommentClose.FindIndex(line) + + if commentedCode && multiCommentCloseLoc != nil { + commentedCode = false + } + + if commentedCode { + continue + } + + if multiCommentOpenLoc != nil && !commentedCode && multiCommentCloseLoc == nil { + commentedCode = true + // continue + } + + // Identify all the package imports. + if loc := reImp.FindIndex(line); loc != nil { + if (commentLoc != nil && commentLoc[0] < loc[0]) || + (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] < loc[0]) || + (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) { + // then it's commented out + continue + } + + if match := reImpName.FindStringSubmatch(string(line)); match != nil { + pkgName := match[len(match)-1] + // Checking if `pkgName` already exists and if it's not a standard library package. + if _, err := actions.AST.GetPackage(pkgName); err != nil && !packages.IsDefaultPackage(pkgName) { + // _, sourceCode, srcNames := ParseArgsForCX([]string{fmt.Sprintf("%s%s", SRCPATH, pkgName)}, false) + _, sourceCode, _ := ast.ParseArgsForCX([]string{filepath.Join(globals.SRCPATH, pkgName)}, false) + s, f, err := LoadFiles(sourceCode) + if err != nil { + return nil, nil, err + } + sourceCodeStrings = append(sourceCodeStrings, s...) + fileNames = append(fileNames, f...) + } + } + } + + } + profiling.StopProfile(fileNames[i]) + } + profiling.StopProfile("2. imports") + + return sourceCodeStrings, fileNames, nil +} diff --git a/cmd/fileloader/fileloader_test.go b/cmd/fileloader/fileloader_test.go new file mode 100644 index 000000000..6a7c012bf --- /dev/null +++ b/cmd/fileloader/fileloader_test.go @@ -0,0 +1,84 @@ +package fileloader_test + +import ( + "testing" + + "github.com/skycoin/cx/cmd/fileloader" + "github.com/skycoin/cx/cx/ast" + cxinit "github.com/skycoin/cx/cx/init" + "github.com/skycoin/cx/cxparser/actions" +) + +func TestFileLoader_LoadFiles(t *testing.T) { + tests := []struct { + scenario string + testDir string + wantFiles []string + }{ + { + scenario: "One file one package", + testDir: "./test_files/One_file_one_package.cx", + wantFiles: []string{ + "test_files/One_file_one_package.cx", + }, + }, + { + scenario: "Has multiple packages in one file", + testDir: "./test_files/Has_multiple_packages_in_file.cx", + wantFiles: []string{ + "test_files/Has_multiple_packages_in_file.cx", + }, + }, + { + scenario: "Has imports", + testDir: "./test_files/Has_Imports/", + wantFiles: []string{ + "test_files/Has_Imports/testimport1/testimport1file1.cx", + "test_files/Has_Imports/testimport1/testimport1file2.cx", + "test_files/Has_Imports/testimport2/testimport2file.cx", + "test_files/Has_Imports/testimport3/testimport3file1.cx", + "test_files/Has_Imports/testimport4/testimport1file1.cx", + "test_files/Has_Imports/testimport4/testimport1file2.cx", + "test_files/Has_Imports/testmain.cx", + }, + }, + } + for _, tc := range tests { + t.Run(tc.scenario, func(t *testing.T) { + actions.AST = cxinit.MakeProgram() + + _, sourceCodes, _ := ast.ParseArgsForCX([]string{tc.testDir}, true) + + _, fileNames, err := fileloader.LoadFiles(sourceCodes) + if err != nil { + t.Fatal(err) + } + + for _, wantFile := range tc.wantFiles { + var match bool + for _, file := range fileNames { + if file == wantFile { + match = true + break + } + } + if !match { + t.Errorf("missing file: %s", wantFile) + } + } + }) + } +} + +func BenchmarkFileLoader_LoadFiles(b *testing.B) { + for i := 0; i < b.N; i++ { + actions.AST = cxinit.MakeProgram() + + _, sourceCodes, _ := ast.ParseArgsForCX([]string{"./test_files/One_file_one_package.cx"}, true) + + _, _, err := fileloader.LoadFiles(sourceCodes) + if err != nil { + b.Fatal(err) + } + } +} diff --git a/cmd/fileloader/test_files/Has_Imports/testimport1/testimport1file1.cx b/cmd/fileloader/test_files/Has_Imports/testimport1/testimport1file1.cx new file mode 100644 index 000000000..3f74cded8 --- /dev/null +++ b/cmd/fileloader/test_files/Has_Imports/testimport1/testimport1file1.cx @@ -0,0 +1,3 @@ +package testimport1 + +import "gl" \ No newline at end of file diff --git a/cmd/fileloader/test_files/Has_Imports/testimport1/testimport1file2.cx b/cmd/fileloader/test_files/Has_Imports/testimport1/testimport1file2.cx new file mode 100644 index 000000000..c1230a389 --- /dev/null +++ b/cmd/fileloader/test_files/Has_Imports/testimport1/testimport1file2.cx @@ -0,0 +1,2 @@ +package testimport1 + diff --git a/cmd/fileloader/test_files/Has_Imports/testimport2/testimport2file.cx b/cmd/fileloader/test_files/Has_Imports/testimport2/testimport2file.cx new file mode 100644 index 000000000..06195d84d --- /dev/null +++ b/cmd/fileloader/test_files/Has_Imports/testimport2/testimport2file.cx @@ -0,0 +1,4 @@ +package testimport2 + +import "testimport1" +import "testimport3" \ No newline at end of file diff --git a/cmd/fileloader/test_files/Has_Imports/testimport3/testimport3file1.cx b/cmd/fileloader/test_files/Has_Imports/testimport3/testimport3file1.cx new file mode 100644 index 000000000..a6d21942f --- /dev/null +++ b/cmd/fileloader/test_files/Has_Imports/testimport3/testimport3file1.cx @@ -0,0 +1,3 @@ +package testimport3 + +import "testimport1" \ No newline at end of file diff --git a/cmd/fileloader/test_files/Has_Imports/testimport4/testimport1file1.cx b/cmd/fileloader/test_files/Has_Imports/testimport4/testimport1file1.cx new file mode 100644 index 000000000..e295e858d --- /dev/null +++ b/cmd/fileloader/test_files/Has_Imports/testimport4/testimport1file1.cx @@ -0,0 +1,3 @@ +package testimport4 + +import "gl" \ No newline at end of file diff --git a/cmd/fileloader/test_files/Has_Imports/testimport4/testimport1file2.cx b/cmd/fileloader/test_files/Has_Imports/testimport4/testimport1file2.cx new file mode 100644 index 000000000..2bc44c965 --- /dev/null +++ b/cmd/fileloader/test_files/Has_Imports/testimport4/testimport1file2.cx @@ -0,0 +1,2 @@ +package testimport4 + diff --git a/cmd/fileloader/test_files/Has_Imports/testmain.cx b/cmd/fileloader/test_files/Has_Imports/testmain.cx new file mode 100644 index 000000000..d3f8e27c4 --- /dev/null +++ b/cmd/fileloader/test_files/Has_Imports/testmain.cx @@ -0,0 +1,5 @@ +package main + +import "os" +import "testimport2" +import "testimport1" \ No newline at end of file diff --git a/cmd/fileloader/test_files/Has_multiple_packages_in_file.cx b/cmd/fileloader/test_files/Has_multiple_packages_in_file.cx new file mode 100644 index 000000000..4afe6e362 --- /dev/null +++ b/cmd/fileloader/test_files/Has_multiple_packages_in_file.cx @@ -0,0 +1,84 @@ + +/* ---------------------------------------------- +This example explains, how we can use different packages in our main package and +how the Global variables and structures can be used in the main package from the other imported packages. +*/ + + +//Package for adding +// +package foo1 + +// Defining global structure in the foo1 package +//Point is the name of the structure +// +type Point struct { + +// x , y are the variables with file type i32 + x i32 + y i32 +} + +// This function is an example of adding variables +// +func double (n i32) (res i32) { + res = i32.add(n, n) +} + +// Package for multiplication +// +package foo2 + +// This function allow a variable to multiply with 3 +// +func triple (n i32) (res i32) { + res = i32.mul(n, 3) +} + + + + + +// Main package +// +package main + +// This is how to import various packages in the main package +import "foo1" +import "foo2" + +// main() is the starting function in the program. +// +func main () { + +// Point is the structure from foo1 package +// we are defining a variable p of type Point +//This is how we can define a variable from another package, by considering it as a type. +// + var p Point + +// we are assigning the values to the p variable +// Tis is how we can assign the values to a variable of type structure from an other package +// In foo1.Point{} foo1 is package and Point is the structure, by writing foo1.Point{} we can access Point from the foo1 and assign the values to x and y + p = foo1.Point{x: 10,y: 20} + +// p.x will allow to access the value in x +// p.y will allow to access the values in y +//simple print the i32 file type in the terminal +// + i32.print(p.x) + i32.print(p.y) + + // Defining variables + var double_return i32 + var triple_return i32 + +// here we are using a function double() from package foo1 and triple() from package foo2 +// + double_return = foo1.double(10) + triple_return = foo2.triple(10) + +// simple print the i32 file type in the terminal + i32.print(double_return) + i32.print(triple_return) +} \ No newline at end of file diff --git a/cmd/fileloader/test_files/One_file_one_package.cx b/cmd/fileloader/test_files/One_file_one_package.cx new file mode 100644 index 000000000..ba9a0c0a2 --- /dev/null +++ b/cmd/fileloader/test_files/One_file_one_package.cx @@ -0,0 +1,79 @@ +package main + +var Bool bool = true +var Byte i8 = 8B +var I16 i16 = 16H +var I32 i32 = 32 +var I64 i64 = 64L +var UByte ui8 = 9UB +var UI16 ui16 = 17UH +var UI32 ui32 = 33U +var UI64 ui64 = 65UL +var F32 f32 = 0.32 +var F64 f64 = 0.64D +var string str = "Hello World" +var Affordance aff + +var intArray [5]i32 +// var stringPointer *str + +// var abc string Var in comment +/* + var apple int + - Global in a multiline comment +*/ + +type CustomType struct { + fieldA str + fieldB i32 +} + +func (customType *CustomType) setFieldA (string str) { + customType.fieldA = string +} + +func main () { + + bool.print(Bool) + i8.print(Byte) + i16.print(I16) + i32.print(I32) + i64.print(I64) + ui8.print(UByte) + ui16.print(UI16) + ui32.print(UI32) + ui64.print(UI64) + f32.print(F32) + f64.print(F64) + str.print(string) + + //Addition + answer := add(I32, 6) + i32.print(answer) + + //Multiply + var quotient i32 + var remainder f32 + quotient, remainder = divide(9, 4) + i32.print(quotient) + f32.print(remainder) + + printer("Print me") +} + +func add(a i32, b i32)(answer i32) { + answer = a + b +} + +func divide(c i32, d i32)(quotient i32, remainder f32) { + quotient = c/d + remainder = i32.f32(c)%i32.f32(d) +} + +func printer(message str)() { + str.print(message) +} + +type AnotherType struct { + name str +} \ No newline at end of file diff --git a/cmd/type_checker/functions.go b/cmd/type_checker/functions.go index 4adea8e72..525397c6f 100644 --- a/cmd/type_checker/functions.go +++ b/cmd/type_checker/functions.go @@ -2,10 +2,11 @@ package type_checker import ( "bytes" + "io" + "os" "regexp" "github.com/skycoin/cx/cmd/declaration_extractor" - "github.com/skycoin/cx/cmd/packageloader/loader" "github.com/skycoin/cx/cx/ast" "github.com/skycoin/cx/cxparser/actions" ) @@ -13,7 +14,7 @@ import ( // Parse Function Headers // - takes in funcs from cx/cmd/declaration_extractor // - adds func headers to AST -func ParseFuncHeaders(files []*loader.File, funcs []declaration_extractor.FuncDeclaration) error { +func ParseFuncHeaders(funcs []declaration_extractor.FuncDeclaration) error { for _, fun := range funcs { @@ -36,38 +37,52 @@ func ParseFuncHeaders(files []*loader.File, funcs []declaration_extractor.FuncDe actions.AST.SelectPackage(fun.PackageID) - source, err := GetSourceBytes(files, fun.FileID) + file, err := os.Open(fun.FileID) if err != nil { return err } + tmp := bytes.NewBuffer(nil) + io.Copy(tmp, file) + source := tmp.Bytes() + funcDeclarationLine := source[fun.StartOffset : fun.StartOffset+fun.Length] - reFuncMethod := regexp.MustCompile(`func\s*\(\s*(.+)\s*\)\s*\S+\s*\(([\s\w,]*)\)(?:\s*\(([\s\w,]*)\))*`) - funcMethod := reFuncMethod.FindSubmatch(funcDeclarationLine) + reFuncMethod := regexp.MustCompile(`func\s*\(\s*.+\s*\)`) + funcMethod := reFuncMethod.Find(funcDeclarationLine) + reParams := regexp.MustCompile(`\(([\s\w\*\[\],\.]*)\)`) + params := reParams.FindAllSubmatch(funcDeclarationLine, -1) if funcMethod != nil { - receiverArg, err := ParseParameterDeclaration(funcMethod[1], pkg, fun.FileID, fun.LineNumber) + + receiverArg, err := ParseParameterDeclaration(params[0][1], pkg, fun.FileID, fun.LineNumber) if err != nil { return err } - fnIdx := actions.FunctionHeader(actions.AST, fun.FuncName, []*ast.CXArgument{receiverArg}, true) + fnName := receiverArg.StructType.Name + "." + fun.FuncName + + fn := ast.MakeFunction(fnName, actions.CurrentFile, fun.LineNumber) + _, fnIdx := pkg.AddFunction(actions.AST, fn) + newFn := actions.AST.GetFunctionFromArray(fnIdx) + newFn.AddInput(actions.AST, receiverArg) var inputs []*ast.CXArgument var outputs []*ast.CXArgument - if funcMethod[2] != nil && len(funcMethod[2]) != 0 { - inputs, err = ParseFuncParameters(funcMethod[2], pkg, fun.FileID, fun.LineNumber) + if params[1][1] != nil && len(params[1][1]) != 0 { + inputs, err = ParseFuncParameters(params[1][1], pkg, fun.FileID, fun.LineNumber) if err != nil { return err } } - if funcMethod[3] != nil && len(funcMethod[3]) != 0 { - outputs, err = ParseFuncParameters(funcMethod[3], pkg, fun.FileID, fun.LineNumber) - if err != nil { - return err + if len(params) == 3 { + if params[2][1] != nil && len(params[2][1]) != 0 { + outputs, err = ParseFuncParameters(params[2][1], pkg, fun.FileID, fun.LineNumber) + if err != nil { + return err + } } } @@ -75,25 +90,25 @@ func ParseFuncHeaders(files []*loader.File, funcs []declaration_extractor.FuncDe } else { - reFuncRegular := regexp.MustCompile(`func\s*\S+\s*\(([\s\w,]*)\)(?:\s*\(([\s\w,]*)\))*`) - funcRegular := reFuncRegular.FindSubmatch(funcDeclarationLine) - - fnIdx := actions.FunctionHeader(actions.AST, fun.FuncName, nil, false) + fn := ast.MakeFunction(fun.FuncName, fun.FileID, fun.LineNumber) + _, fnIdx := pkg.AddFunction(actions.AST, fn) var inputs []*ast.CXArgument var outputs []*ast.CXArgument - if funcRegular[1] != nil && len(funcRegular[1]) != 0 { - inputs, err = ParseFuncParameters(funcRegular[1], pkg, fun.FileID, fun.LineNumber) + if params[0][1] != nil && len(params[0][1]) != 0 { + inputs, err = ParseFuncParameters(params[0][1], pkg, fun.FileID, fun.LineNumber) if err != nil { return err } } - if funcRegular[2] != nil && len(funcRegular[2]) != 0 { - outputs, err = ParseFuncParameters(funcRegular[2], pkg, fun.FileID, fun.LineNumber) - if err != nil { - return err + if len(params) == 2 { + if params[1][1] != nil && len(params[1][1]) != 0 { + outputs, err = ParseFuncParameters(params[1][1], pkg, fun.FileID, fun.LineNumber) + if err != nil { + return err + } } } @@ -101,6 +116,7 @@ func ParseFuncHeaders(files []*loader.File, funcs []declaration_extractor.FuncDe } } + return nil } diff --git a/cmd/type_checker/globals.go b/cmd/type_checker/globals.go index 136faead7..8c2658689 100644 --- a/cmd/type_checker/globals.go +++ b/cmd/type_checker/globals.go @@ -1,10 +1,12 @@ package type_checker import ( + "bytes" + "io" + "os" "regexp" "github.com/skycoin/cx/cmd/declaration_extractor" - "github.com/skycoin/cx/cmd/packageloader/loader" "github.com/skycoin/cx/cx/ast" "github.com/skycoin/cx/cx/types" "github.com/skycoin/cx/cxparser/actions" @@ -13,7 +15,7 @@ import ( // Parse Globals // - takes in globals from cx/cmd/declaration_extractor // - adds globals to AST -func ParseGlobals(files []*loader.File, globals []declaration_extractor.GlobalDeclaration) error { +func ParseGlobals(globals []declaration_extractor.GlobalDeclaration) error { // Range over global declarations and parse for _, global := range globals { @@ -38,13 +40,17 @@ func ParseGlobals(files []*loader.File, globals []declaration_extractor.GlobalDe actions.AST.SelectPackage(global.PackageID) // Read File - source, err := GetSourceBytes(files, global.FileID) + file, err := os.Open(global.FileID) if err != nil { return err } + tmp := bytes.NewBuffer(nil) + io.Copy(tmp, file) + source := tmp.Bytes() + // Extract Declaration from file - reGlobalDeclaration := regexp.MustCompile(`var\s+(\w*)\s+([\*\[\]\w\.]+)(?:\s*\=\s*[\s\S]+\S+){0,1}`) + reGlobalDeclaration := regexp.MustCompile(`var\s+(\w*)\s+([\*\[\]\w\.]+)`) globalDeclaration := source[global.StartOffset : global.StartOffset+global.Length] globalTokens := reGlobalDeclaration.FindSubmatch(globalDeclaration) @@ -56,9 +62,7 @@ func ParseGlobals(files []*loader.File, globals []declaration_extractor.GlobalDe pkg.AddGlobal(actions.AST, globalArgIdx) - var declarationSpecifier *ast.CXArgument - - declarationSpecifier, err = ParseDeclarationSpecifier(globalTokens[2], global.FileID, global.LineNumber, declarationSpecifier) + declarationSpecifier, err := ParseDeclarationSpecifier(globalTokens[2], global.FileID, global.LineNumber) if err != nil { return err } diff --git a/cmd/type_checker/imports.go b/cmd/type_checker/imports.go index d550c58e6..6351b616d 100644 --- a/cmd/type_checker/imports.go +++ b/cmd/type_checker/imports.go @@ -1,22 +1,25 @@ package type_checker import ( + "strings" + "github.com/skycoin/cx/cmd/declaration_extractor" "github.com/skycoin/cx/cx/ast" + "github.com/skycoin/cx/cx/packages" "github.com/skycoin/cx/cxparser/actions" ) func ParseImports(imports []declaration_extractor.ImportDeclaration) error { - // Make and add import packages to AST + // Declare import in the correct packages for _, imprt := range imports { - // Get Package - pkg, err := actions.AST.GetPackage(imprt.ImportName) + // Get Package + pkg, err := actions.AST.GetPackage(imprt.PackageID) // If package not in AST if err != nil || pkg == nil { - newPkg := ast.MakePackage(imprt.ImportName) + newPkg := ast.MakePackage(imprt.PackageID) pkgIdx := actions.AST.AddPackage(newPkg) newPkg, err := actions.AST.GetPackageFromArray(pkgIdx) @@ -26,18 +29,18 @@ func ParseImports(imports []declaration_extractor.ImportDeclaration) error { pkg = newPkg } - } - // Declare import in the correct packages - for _, imprt := range imports { + impPkg, err := actions.AST.GetPackage(imprt.ImportName) + if (err != nil || impPkg == nil) && !packages.IsDefaultPackage(imprt.ImportName) { - // Get Package - pkg, err := actions.AST.GetPackage(imprt.PackageID) + var imprtName string = imprt.ImportName - // If package not in AST - if err != nil || pkg == nil { + if strings.Contains(imprt.ImportName, "/") { + tokens := strings.Split(imprtName, "/") + imprtName = tokens[len(tokens)-1] + } - newPkg := ast.MakePackage(imprt.PackageID) + newPkg := ast.MakePackage(imprtName) pkgIdx := actions.AST.AddPackage(newPkg) newPkg, err := actions.AST.GetPackageFromArray(pkgIdx) @@ -45,7 +48,7 @@ func ParseImports(imports []declaration_extractor.ImportDeclaration) error { return err } - pkg = newPkg + impPkg = newPkg } actions.AST.SelectPackage(imprt.PackageID) diff --git a/cmd/type_checker/structs.go b/cmd/type_checker/structs.go index 65ed78ea8..da91f7964 100644 --- a/cmd/type_checker/structs.go +++ b/cmd/type_checker/structs.go @@ -1,8 +1,11 @@ package type_checker import ( + "bytes" + "io" + "os" + "github.com/skycoin/cx/cmd/declaration_extractor" - "github.com/skycoin/cx/cmd/packageloader/loader" "github.com/skycoin/cx/cx/ast" "github.com/skycoin/cx/cxparser/actions" ) @@ -10,7 +13,7 @@ import ( // Parse Structs // - takes in structs from cx/cmd/declaration_extractor // - adds structs to AST -func ParseStructs(files []*loader.File, structs []declaration_extractor.StructDeclaration) error { +func ParseStructs(structs []declaration_extractor.StructDeclaration) error { // Get Package for _, strct := range structs { @@ -32,19 +35,42 @@ func ParseStructs(files []*loader.File, structs []declaration_extractor.StructDe } - // Select Package to Add to AST - actions.AST.SelectPackage(strct.PackageID) - structCX := ast.MakeStruct(strct.StructName) - structCX.Package = ast.CXPackageIndex(pkg.Index) + pkg.AddStruct(actions.AST, structCX) + } + + // Get Package + for _, strct := range structs { + + pkg, err := actions.AST.GetPackage(strct.PackageID) + + // If package is not found + if err != nil { + + newPkg := ast.MakePackage(strct.PackageID) + pkgIdx := actions.AST.AddPackage(newPkg) + newPkg, err = actions.AST.GetPackageFromArray(pkgIdx) + + if err != nil { + return err + } - pkg = pkg.AddStruct(actions.AST, structCX) + pkg = newPkg + + } - src, err := GetSourceBytes(files, strct.FileID) + // Select Package to Add to AST + actions.AST.SelectPackage(strct.PackageID) + + file, err := os.Open(strct.FileID) if err != nil { return err } + tmp := bytes.NewBuffer(nil) + io.Copy(tmp, file) + src := tmp.Bytes() + var structFields []*ast.CXArgument for _, strctFieldDec := range strct.StructFields { diff --git a/cmd/type_checker/test_files/ParseFuncs/HasFuncs/src/test.cx b/cmd/type_checker/test_files/ParseFuncs/test.cx similarity index 100% rename from cmd/type_checker/test_files/ParseFuncs/HasFuncs/src/test.cx rename to cmd/type_checker/test_files/ParseFuncs/test.cx diff --git a/cmd/type_checker/test_files/ParseFuncs/HasFuncs2/src/testFile.cx b/cmd/type_checker/test_files/ParseFuncs/testFile.cx similarity index 100% rename from cmd/type_checker/test_files/ParseFuncs/HasFuncs2/src/testFile.cx rename to cmd/type_checker/test_files/ParseFuncs/testFile.cx diff --git a/cmd/type_checker/test_files/ParseStructs/HasStructs/src/test.cx b/cmd/type_checker/test_files/ParseStructs/test.cx similarity index 100% rename from cmd/type_checker/test_files/ParseStructs/HasStructs/src/test.cx rename to cmd/type_checker/test_files/ParseStructs/test.cx diff --git a/cmd/type_checker/test_files/ParseStructs/HasStructs2/src/testFile.cx b/cmd/type_checker/test_files/ParseStructs/testFile.cx similarity index 100% rename from cmd/type_checker/test_files/ParseStructs/HasStructs2/src/testFile.cx rename to cmd/type_checker/test_files/ParseStructs/testFile.cx diff --git a/cmd/type_checker/type_checker.go b/cmd/type_checker/type_checker.go index 145c4f1bf..95a1ade5b 100644 --- a/cmd/type_checker/type_checker.go +++ b/cmd/type_checker/type_checker.go @@ -2,12 +2,11 @@ package type_checker import ( "github.com/skycoin/cx/cmd/declaration_extractor" - "github.com/skycoin/cx/cmd/packageloader/loader" cxinit "github.com/skycoin/cx/cx/init" "github.com/skycoin/cx/cxparser/actions" ) -func ParseAllDeclarations(files []*loader.File, imports []declaration_extractor.ImportDeclaration, globals []declaration_extractor.GlobalDeclaration, structs []declaration_extractor.StructDeclaration, funcs []declaration_extractor.FuncDeclaration) error { +func ParseAllDeclarations(imports []declaration_extractor.ImportDeclaration, globals []declaration_extractor.GlobalDeclaration, structs []declaration_extractor.StructDeclaration, funcs []declaration_extractor.FuncDeclaration) error { // Make AST if not made yet if actions.AST == nil { @@ -19,17 +18,17 @@ func ParseAllDeclarations(files []*loader.File, imports []declaration_extractor. return err } - err = ParseStructs(files, structs) + err = ParseStructs(structs) if err != nil { return err } - err = ParseGlobals(files, globals) + err = ParseGlobals(globals) if err != nil { return err } - err = ParseFuncHeaders(files, funcs) + err = ParseFuncHeaders(funcs) if err != nil { return err } diff --git a/cmd/type_checker/type_checker_test.go b/cmd/type_checker/type_checker_test.go index 9fee71d2b..8dbffaebc 100644 --- a/cmd/type_checker/type_checker_test.go +++ b/cmd/type_checker/type_checker_test.go @@ -3,12 +3,13 @@ package type_checker_test import ( "bytes" "fmt" + "io" "os" + "path/filepath" "testing" "github.com/skycoin/cx/cmd/declaration_extractor" - "github.com/skycoin/cx/cmd/packageloader/file_output" - "github.com/skycoin/cx/cmd/packageloader/loader" + "github.com/skycoin/cx/cmd/fileloader" "github.com/skycoin/cx/cmd/type_checker" "github.com/skycoin/cx/cx/ast" "github.com/skycoin/cx/cx/constants" @@ -142,8 +143,7 @@ func TestTypeChecker_ParseDeclarationSpecifier(t *testing.T) { actions.DeclareImport(actions.AST, tc.pkgName, tc.fileName, tc.lineno) } - var gotDeclarationSpecifier *ast.CXArgument - gotDeclarationSpecifier, gotErr := type_checker.ParseDeclarationSpecifier([]byte(tc.testString), tc.fileName, tc.lineno, gotDeclarationSpecifier) + gotDeclarationSpecifier, gotErr := type_checker.ParseDeclarationSpecifier([]byte(tc.testString), tc.fileName, tc.lineno) gotTypeSignature := ast.GetCXTypeSignatureRepresentationOfCXArg(actions.AST, gotDeclarationSpecifier) gotDeclarationSpecifierFormattedString := ast.GetFormattedType(actions.AST, gotTypeSignature) @@ -314,10 +314,13 @@ func TestTypeChecker_ParseImports(t *testing.T) { actions.AST = cxinit.MakeProgram() - srcBytes, err := os.ReadFile(tc.testDir) + file, err := os.Open(tc.testDir) if err != nil { - t.Error(err) + t.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) @@ -491,30 +494,26 @@ func TestTypeChecker_ParseGlobals(t *testing.T) { actions.AST = cxinit.MakeProgram() - _, sourceCode, _ := loader.ParseArgsForCX([]string{tc.testDir}, true) - - err := loader.LoadCXProgram("test", sourceCode, "bolt") - if err != nil { - t.Fatal(err) - } - - files, err := file_output.GetImportFiles("test", "bolt") + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() - ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(files[0].Content) + ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) ReplaceStringContentsWithWhitespaces, err := declaration_extractor.ReplaceStringContentsWithWhitespaces(ReplaceCommentsWithWhitespaces) if err != nil { t.Fatal(err) } - Globals, err := declaration_extractor.ExtractGlobals(ReplaceStringContentsWithWhitespaces, files[0].FileName) + Globals, err := declaration_extractor.ExtractGlobals(ReplaceStringContentsWithWhitespaces, filepath.Base(tc.testDir)) if err != nil { t.Fatal(err) } - err = type_checker.ParseGlobals(files, Globals) + err = type_checker.ParseGlobals(Globals) if err != nil { t.Fatal(err) } @@ -613,7 +612,7 @@ func TestTypeChecker_ParseStructs(t *testing.T) { }{ { scenario: "Has Structs", - testDir: "./test_files/ParseStructs/HasStructs", + testDir: "./test_files/ParseStructs/test.cx", structTypeSignatures: []StructTypeSignature{ { Name: "CustomType", @@ -648,7 +647,7 @@ func TestTypeChecker_ParseStructs(t *testing.T) { }, { scenario: "Has Structs 2", - testDir: "./test_files/ParseStructs/HasStructs2", + testDir: "./test_files/ParseStructs/testFile.cx", structTypeSignatures: []StructTypeSignature{ { Name: "animal", @@ -676,24 +675,23 @@ func TestTypeChecker_ParseStructs(t *testing.T) { actions.AST = cxinit.MakeProgram() - _, sourceCode, _ := loader.ParseArgsForCX([]string{tc.testDir}, true) - - err := loader.LoadCXProgram("test", sourceCode, "bolt") + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() + + ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) - files, err := file_output.GetImportFiles("test", "bolt") + structs, err := declaration_extractor.ExtractStructs(ReplaceCommentsWithWhitespaces, tc.testDir) + + err = type_checker.ParseStructs(structs) if err != nil { t.Fatal(err) } - ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(files[0].Content) - - structs, err := declaration_extractor.ExtractStructs(ReplaceCommentsWithWhitespaces, files[0].FileName) - - type_checker.ParseStructs(files, structs) - program := actions.AST for _, wantStruct := range tc.structTypeSignatures { @@ -787,7 +785,7 @@ func TestTypeChecker_ParseFuncHeaders(t *testing.T) { }{ { scenario: "Has funcs", - testDir: "./test_files/ParseFuncs/HasFuncs", + testDir: "./test_files/ParseFuncs/test.cx", functionCXs: []CXFunc{ { Name: "main", @@ -818,7 +816,7 @@ func TestTypeChecker_ParseFuncHeaders(t *testing.T) { }, { scenario: "Has funcs 2", - testDir: "./test_files/ParseFuncs/HasFuncs2", + testDir: "./test_files/ParseFuncs/testFile.cx", functionCXs: []CXFunc{ { Name: "main", @@ -841,27 +839,29 @@ func TestTypeChecker_ParseFuncHeaders(t *testing.T) { actions.AST = cxinit.MakeProgram() - _, sourceCode, _ := loader.ParseArgsForCX([]string{tc.testDir}, true) - - err := loader.LoadCXProgram("test", sourceCode, "bolt") + file, err := os.Open(tc.testDir) if err != nil { t.Fatal(err) } + src := bytes.NewBuffer(nil) + io.Copy(src, file) + srcBytes := src.Bytes() - files, err := file_output.GetImportFiles("test", "bolt") + ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(srcBytes) + ReplaceStringContentsWithWhitespaces, err := declaration_extractor.ReplaceStringContentsWithWhitespaces(ReplaceCommentsWithWhitespaces) if err != nil { t.Fatal(err) } - ReplaceCommentsWithWhitespaces := declaration_extractor.ReplaceCommentsWithWhitespaces(files[0].Content) - ReplaceStringContentsWithWhitespaces, err := declaration_extractor.ReplaceStringContentsWithWhitespaces(ReplaceCommentsWithWhitespaces) + funcs, err := declaration_extractor.ExtractFuncs(ReplaceStringContentsWithWhitespaces, tc.testDir) if err != nil { t.Fatal(err) } - funcs, err := declaration_extractor.ExtractFuncs(ReplaceStringContentsWithWhitespaces, files[0].FileName) - - type_checker.ParseFuncHeaders(files, funcs) + err = type_checker.ParseFuncHeaders(funcs) + if err != nil { + t.Fatal(err) + } program := actions.AST @@ -1070,7 +1070,7 @@ func TestTypeChecker_ParseAllDeclarations(t *testing.T) { }, { Name: "CustomType.setFieldA", - Inputs: "main.customType *CustomType, string str", + Inputs: "customType *CustomType, string str", }, }, }, @@ -1142,7 +1142,7 @@ func TestTypeChecker_ParseAllDeclarations(t *testing.T) { Funcs: []Func{ { Name: "Animal.Speak", - Inputs: "helper.a *Animal", + Inputs: "a *Animal", }, }, }, @@ -1168,26 +1168,21 @@ func TestTypeChecker_ParseAllDeclarations(t *testing.T) { for _, tc := range tests { t.Run(tc.scenario, func(t *testing.T) { - actions.AST = nil + actions.AST = cxinit.MakeProgram() - _, sourceCode, _ := loader.ParseArgsForCX([]string{tc.testDir}, true) + _, sourceCodes, _ := ast.ParseArgsForCX([]string{tc.testDir}, true) - err := loader.LoadCXProgram("test", sourceCode, "bolt") + sourceCodeStrings, fileNames, err := fileloader.LoadFiles(sourceCodes) if err != nil { t.Fatal(err) } - files, err := file_output.GetImportFiles("test", "bolt") - if err != nil { - t.Fatal(err) - } - - Imports, Globals, Enums, TypeDefinitions, Structs, Funcs, gotErr := declaration_extractor.ExtractAllDeclarations(files) - if (Enums != nil && TypeDefinitions != nil) || gotErr != nil { + Imports, Globals, _, _, Structs, Funcs, gotErr := declaration_extractor.ExtractAllDeclarations(sourceCodeStrings, fileNames) + if gotErr != nil { t.Fatal(gotErr) } - err = type_checker.ParseAllDeclarations(files, Imports, Globals, Structs, Funcs) + err = type_checker.ParseAllDeclarations(Imports, Globals, Structs, Funcs) if err != nil { t.Fatal(err) } @@ -1361,3 +1356,26 @@ func getFormattedParam(prgrm *ast.CXProgram, paramTypeSigIdxs []ast.CXTypeSignat } } + +func BenchmarkTypeChecker_ParseAllDeclarations(b *testing.B) { + for i := 0; i < b.N; i++ { + actions.AST = cxinit.MakeProgram() + + _, sourceCodes, _ := ast.ParseArgsForCX([]string{"./test_files/ParseAllDeclarations/HasDeclarations"}, true) + + sourceCodeStrings, fileNames, err := fileloader.LoadFiles(sourceCodes) + if err != nil { + b.Fatal(err) + } + + Imports, Globals, _, _, Structs, Funcs, err := declaration_extractor.ExtractAllDeclarations(sourceCodeStrings, fileNames) + if err != nil { + b.Fatal(err) + } + + err = type_checker.ParseAllDeclarations(Imports, Globals, Structs, Funcs) + if err != nil { + b.Fatal(err) + } + } +} diff --git a/cmd/type_checker/util.go b/cmd/type_checker/util.go index dbc198db4..0c41dc11f 100644 --- a/cmd/type_checker/util.go +++ b/cmd/type_checker/util.go @@ -8,7 +8,6 @@ import ( "strconv" "unicode" - "github.com/skycoin/cx/cmd/packageloader/loader" "github.com/skycoin/cx/cx/ast" "github.com/skycoin/cx/cx/constants" "github.com/skycoin/cx/cx/types" @@ -17,7 +16,7 @@ import ( func ParseParameterDeclaration(parameterString []byte, pkg *ast.CXPackage, fileName string, lineno int) (*ast.CXArgument, error) { var parameterDeclaration *ast.CXArgument - reParameterDeclaration := regexp.MustCompile(`(\w+)\s+(.+)`) + reParameterDeclaration := regexp.MustCompile(`(\w+)((?:(?:\s*[\[\]\*\d]+|\s+)\w+(?:\.\w+)*))`) parameterDeclarationTokens := reParameterDeclaration.FindSubmatch(parameterString) // Check if the tokenized result is empty @@ -32,7 +31,7 @@ func ParseParameterDeclaration(parameterString []byte, pkg *ast.CXPackage, fileN declarator.Name = string(parameterDeclarationTokens[1]) //Set the decalaration type - parameterDeclaration, err := ParseDeclarationSpecifier(parameterDeclarationTokens[2], fileName, lineno, parameterDeclaration) + parameterDeclaration, err := ParseDeclarationSpecifier(bytes.TrimSpace(parameterDeclarationTokens[2]), fileName, lineno) if err != nil { return nil, err } @@ -44,97 +43,85 @@ func ParseParameterDeclaration(parameterString []byte, pkg *ast.CXPackage, fileN return parameterDeclaration, nil } -func ParseDeclarationSpecifier(declarationSpecifierByte []byte, fileName string, lineno int, declarationSpecifier *ast.CXArgument) (*ast.CXArgument, error) { +func ParseDeclarationSpecifier(declarationSpecifierByte []byte, fileName string, lineno int) (*ast.CXArgument, error) { + var declarationSpecifier *ast.CXArgument // Base case if all parts are parsed - if declarationSpecifierByte == nil || len(declarationSpecifierByte) == 0 { - return declarationSpecifier, nil - } + for len(declarationSpecifierByte) > 0 { + // Checks last byte to determine what to parse + lastByte := declarationSpecifierByte[len(declarationSpecifierByte)-1] - // Checks last byte to determine what to parse - lastByte := declarationSpecifierByte[len(declarationSpecifierByte)-1] + if unicode.IsLetter(rune(lastByte)) || unicode.IsNumber(rune(lastByte)) || lastByte == '_' { - if unicode.IsLetter(rune(lastByte)) || unicode.IsNumber(rune(lastByte)) || lastByte == '_' { + reWords := regexp.MustCompile(`[\w\.]+`) + words := reWords.FindAll(declarationSpecifierByte, -1) + wordsIdx := reWords.FindAllIndex(declarationSpecifierByte, -1) + newLastIdx := wordsIdx[len(wordsIdx)-1][0] - reWords := regexp.MustCompile(`[\w\.]+`) - words := reWords.FindAll(declarationSpecifierByte, -1) - wordsIdx := reWords.FindAllIndex(declarationSpecifierByte, -1) - newLastIdx := wordsIdx[len(wordsIdx)-1][0] + dataType := words[len(words)-1] + splitDataType := bytes.Split(dataType, []byte(".")) - dataType := words[len(words)-1] - splitDataType := bytes.Split(dataType, []byte(".")) + declarationSpecifierByte = declarationSpecifierByte[:newLastIdx] - newDeclarationSpecifierByte := declarationSpecifierByte[:newLastIdx] + if len(splitDataType) == 1 { - if len(splitDataType) == 1 { + // Types like i32, str, aff, etc... + if val, ok := TypesMap[string(splitDataType[0])]; ok { + declarationSpecifier = actions.DeclarationSpecifiersBasic(val) + continue + } - // Types like i32, str, aff, etc... - if val, ok := TypesMap[string(splitDataType[0])]; ok { - newDeclarationSpecifierArg := actions.DeclarationSpecifiersBasic(val) - return ParseDeclarationSpecifier(newDeclarationSpecifierByte, fileName, lineno, newDeclarationSpecifierArg) + // Structs + declarationSpecifier = actions.DeclarationSpecifiersStruct(actions.AST, string(splitDataType[0]), "", false, fileName, lineno) + continue } - // Structs - newDeclarationSpecifierArg := actions.DeclarationSpecifiersStruct(actions.AST, string(splitDataType[0]), "", false, fileName, lineno) - return ParseDeclarationSpecifier(newDeclarationSpecifierByte, fileName, lineno, newDeclarationSpecifierArg) - } + // External types + if val, ok := TypesMap[string(splitDataType[0])]; ok { + declarationSpecifier = actions.DeclarationSpecifiersStruct(actions.AST, string(splitDataType[1]), val.Name(), true, fileName, lineno) + continue + } - // External types - if val, ok := TypesMap[string(splitDataType[0])]; ok { - newDeclarationSpecifierArg := actions.DeclarationSpecifiersStruct(actions.AST, string(splitDataType[1]), val.Name(), true, fileName, lineno) - return ParseDeclarationSpecifier(newDeclarationSpecifierByte, fileName, lineno, newDeclarationSpecifierArg) + // External structs + declarationSpecifier = actions.DeclarationSpecifiersStruct(actions.AST, string(splitDataType[1]), string(splitDataType[0]), true, fileName, lineno) + continue } - // External structs - newDeclarationSpecifierArg := actions.DeclarationSpecifiersStruct(actions.AST, string(splitDataType[1]), string(splitDataType[0]), true, fileName, lineno) - return ParseDeclarationSpecifier(newDeclarationSpecifierByte, fileName, lineno, newDeclarationSpecifierArg) - } - - if lastByte == ']' { - reBrackets := regexp.MustCompile(`\[\s*(\d*)\s*\]`) - brackets := reBrackets.FindAllSubmatch(declarationSpecifierByte, -1) - bracketsIdx := reBrackets.FindAllIndex(declarationSpecifierByte, -1) - newLastIdx := bracketsIdx[len(bracketsIdx)-1][0] - reNumber := regexp.MustCompile(`\d+`) - number := reNumber.Find(brackets[len(brackets)-1][1]) - newDeclarationSpecifierByte := declarationSpecifierByte[:newLastIdx] - - // Arrays - if number != nil { - byteToInt, err := strconv.Atoi(string(number)) - if err != nil { - return declarationSpecifier, err + if lastByte == ']' { + reBrackets := regexp.MustCompile(`\[\s*(\d*)\s*\]`) + brackets := reBrackets.FindAllSubmatch(declarationSpecifierByte, -1) + bracketsIdx := reBrackets.FindAllIndex(declarationSpecifierByte, -1) + newLastIdx := bracketsIdx[len(bracketsIdx)-1][0] + reNumber := regexp.MustCompile(`\d+`) + number := reNumber.Find(brackets[len(brackets)-1][1]) + declarationSpecifierByte = declarationSpecifierByte[:newLastIdx] + + // Arrays + if number != nil { + byteToInt, err := strconv.Atoi(string(number)) + if err != nil { + return declarationSpecifier, err + } + + declarationSpecifier.Lengths = append(declarationSpecifier.Lengths, types.Pointer(byteToInt)) + declarationSpecifier = actions.DeclarationSpecifiers(declarationSpecifier, declarationSpecifier.Lengths, constants.DECL_ARRAY) + continue } - declarationSpecifier.Lengths = append(declarationSpecifier.Lengths, types.Pointer(byteToInt)) - newDeclarationSpecifierArg := actions.DeclarationSpecifiers(declarationSpecifier, declarationSpecifier.Lengths, constants.DECL_ARRAY) - return ParseDeclarationSpecifier(newDeclarationSpecifierByte, fileName, lineno, newDeclarationSpecifierArg) + // Slices + declarationSpecifier = actions.DeclarationSpecifiers(declarationSpecifier, []types.Pointer{0}, constants.DECL_SLICE) + continue } - // Slices - newDeclarationSpecifierArg := actions.DeclarationSpecifiers(declarationSpecifier, []types.Pointer{0}, constants.DECL_SLICE) - return ParseDeclarationSpecifier(newDeclarationSpecifierByte, fileName, lineno, newDeclarationSpecifierArg) - } - - // Pointer - if lastByte == '*' { - newLastIdx := bytes.LastIndex(declarationSpecifierByte, []byte("*")) - newDeclarationSpecifierByte := declarationSpecifierByte[:newLastIdx] - newDeclarationSpecifierArg := actions.DeclarationSpecifiers(declarationSpecifier, []types.Pointer{0}, constants.DECL_POINTER) - return ParseDeclarationSpecifier(newDeclarationSpecifierByte, fileName, lineno, newDeclarationSpecifierArg) - - } - - // If bytes don't match any of the cases - return nil, fmt.Errorf("%v: %d: declaration specifier error", fileName, lineno) -} - -// Finds the SourceBytes from the files array -func GetSourceBytes(files []*loader.File, fileName string) ([]byte, error) { - for _, file := range files { - if file.FileName == fileName { - return file.Content, nil + // Pointer + if lastByte == '*' { + newLastIdx := bytes.LastIndex(declarationSpecifierByte, []byte("*")) + declarationSpecifierByte = declarationSpecifierByte[:newLastIdx] + declarationSpecifier = actions.DeclarationSpecifiers(declarationSpecifier, []types.Pointer{0}, constants.DECL_POINTER) + continue } + // If bytes don't match any of the cases + return declarationSpecifier, fmt.Errorf("%v: %d: declaration specifier error", fileName, lineno) } - return nil, fmt.Errorf("%s not found", fileName) + return declarationSpecifier, nil } diff --git a/cxparser/cxparsing/cxparsing.go b/cxparser/cxparsing/cxparsing.go index 22e525266..0761b636c 100644 --- a/cxparser/cxparsing/cxparsing.go +++ b/cxparser/cxparsing/cxparsing.go @@ -2,16 +2,18 @@ package cxparsering import ( "bytes" - "io" + "fmt" "os" + "github.com/skycoin/cx/cmd/declaration_extractor" + "github.com/skycoin/cx/cmd/fileloader" + "github.com/skycoin/cx/cmd/type_checker" "github.com/skycoin/cx/cx/ast" "github.com/skycoin/cx/cx/constants" "github.com/skycoin/cx/cx/globals" "github.com/skycoin/cx/cx/types" "github.com/skycoin/cx/cxparser/actions" - cxpartialparsing "github.com/skycoin/cx/cxparser/cxpartialparsing" "github.com/skycoin/cx/cxparser/util/profiling" ) @@ -27,21 +29,23 @@ import ( step 2 : passtwo */ -func ParseSourceCode(sourceCode []*os.File, fileNames []string) { +func ParseSourceCode(sourceCode []*os.File) { //local - cxpartialparsing.Program = actions.AST + // cxpartialparsing.Program = actions.AST /* Copy the contents of the file pointers containing the CX source code into sourceCodeStrings */ - sourceCodeStrings := make([]string, len(sourceCode)) - for i, source := range sourceCode { - tmp := bytes.NewBuffer(nil) - io.Copy(tmp, source) - sourceCodeStrings[i] = tmp.String() - } + // sourceCodeStrings := make([]string, len(sourceCode)) + // for i, source := range sourceCode { + // tmp := bytes.NewBuffer(nil) + // io.Copy(tmp, source) + // sourceCodeStrings[i] = tmp.String() + // } + var sourceCodeStrings []string + var fileNames []string /* We need to traverse the elements by hierarchy first add all the @@ -51,10 +55,28 @@ func ParseSourceCode(sourceCode []*os.File, fileNames []string) { */ parseErrors := 0 if len(sourceCode) > 0 { - parseErrors = Preliminarystage(sourceCodeStrings, fileNames) + var err error + sourceCodeStrings, fileNames, err = fileloader.LoadFiles(sourceCode) + if err != nil { + parseErrors++ + fmt.Print(err) + } + + Imports, Globals, _, _, Structs, Funcs, err := declaration_extractor.ExtractAllDeclarations(sourceCodeStrings, fileNames) + if err != nil { + parseErrors++ + fmt.Println(err) + } + + err = type_checker.ParseAllDeclarations(Imports, Globals, Structs, Funcs) + if err != nil { + parseErrors++ + fmt.Println(err) + } + } - actions.AST = cxpartialparsing.Program + // actions.AST = cxpartialparsing.Program if globals.FoundCompileErrors || parseErrors > 0 { profiling.CleanupAndExit(constants.CX_COMPILATION_ERROR) diff --git a/cxparser/cxparsing/utils.go b/cxparser/cxparsing/utils.go index ec38a5a50..673f0b146 100644 --- a/cxparser/cxparsing/utils.go +++ b/cxparser/cxparsing/utils.go @@ -2,24 +2,20 @@ package cxparsering import ( "bufio" - "path/filepath" "regexp" "strings" "github.com/skycoin/cx/cx/ast" "github.com/skycoin/cx/cx/constants" - "github.com/skycoin/cx/cx/globals" cxpackages "github.com/skycoin/cx/cx/packages" - "github.com/skycoin/cx/cx/types" "github.com/skycoin/cx/cxparser/actions" - cxpartialparsing "github.com/skycoin/cx/cxparser/cxpartialparsing" "github.com/skycoin/cx/cxparser/util/profiling" ) // preliminarystage performs a first pass for the CX cxgo. Globals, packages and // custom types are added to `cxpartialparsing.Program`. func Preliminarystage(srcStrs, srcNames []string) int { - var prePkg *ast.CXPackage + // var prePkg *ast.CXPackage parseErrors := 0 reMultiCommentOpen := regexp.MustCompile(`/\*`) @@ -28,14 +24,14 @@ func Preliminarystage(srcStrs, srcNames []string) int { rePkg := regexp.MustCompile("package") rePkgName := regexp.MustCompile(`(^|[\s])package\s+([_a-zA-Z][_a-zA-Z0-9]*)`) - reStrct := regexp.MustCompile("type") - reStrctName := regexp.MustCompile(`(^|[\s])type\s+([_a-zA-Z][_a-zA-Z0-9]*)?\s`) + // reStrct := regexp.MustCompile("type") + // reStrctName := regexp.MustCompile(`(^|[\s])type\s+([_a-zA-Z][_a-zA-Z0-9]*)?\s`) - reGlbl := regexp.MustCompile("var") - reGlblName := regexp.MustCompile(`(^|[\s])var\s([_a-zA-Z][_a-zA-Z0-9]*)`) + // reGlbl := regexp.MustCompile("var") + // reGlblName := regexp.MustCompile(`(^|[\s])var\s([_a-zA-Z][_a-zA-Z0-9]*)`) - reBodyOpen := regexp.MustCompile("{") - reBodyClose := regexp.MustCompile("}") + // reBodyOpen := regexp.MustCompile("{") + // reBodyClose := regexp.MustCompile("}") reImp := regexp.MustCompile("import") reImpName := regexp.MustCompile(`(^|[\s])import\s+"([_a-zA-Z][_a-zA-Z0-9/-]*)"`) @@ -80,41 +76,41 @@ func Preliminarystage(srcStrs, srcNames []string) int { } if match := rePkgName.FindStringSubmatch(string(line)); match != nil { - if pkg, err := cxpartialparsing.Program.GetPackage(match[len(match)-1]); err != nil { + if _, err := actions.AST.GetPackage(match[len(match)-1]); err != nil { // then it hasn't been added newPkg := ast.MakePackage(match[len(match)-1]) - pkgIdx := cxpartialparsing.Program.AddPackage(newPkg) - newPkg, err = cxpartialparsing.Program.GetPackageFromArray(pkgIdx) + pkgIdx := actions.AST.AddPackage(newPkg) + newPkg, err = actions.AST.GetPackageFromArray(pkgIdx) if err != nil { panic(err) } - prePkg = newPkg + // prePkg = newPkg } else { - prePkg = pkg + // prePkg = pkg } } } - // 1-b. Identify all the structs - if loc := reStrct.FindIndex(line); loc != nil { - if (commentLoc != nil && commentLoc[0] < loc[0]) || - (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] < loc[0]) || - (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) { - // then it's commented out - continue - } - - if match := reStrctName.FindStringSubmatch(string(line)); match != nil { - if prePkg == nil { - println(ast.CompilationError(srcName, lineno), - "No package defined") - } else if _, err := cxpartialparsing.Program.GetStruct(match[len(match)-1], prePkg.Name); err != nil { - // then it hasn't been added - strct := ast.MakeStruct(match[len(match)-1]) - prePkg.AddStruct(cxpartialparsing.Program, strct) - } - } - } + // // 1-b. Identify all the structs + // if loc := reStrct.FindIndex(line); loc != nil { + // if (commentLoc != nil && commentLoc[0] < loc[0]) || + // (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] < loc[0]) || + // (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) { + // // then it's commented out + // continue + // } + + // if match := reStrctName.FindStringSubmatch(string(line)); match != nil { + // if prePkg == nil { + // println(ast.CompilationError(srcName, lineno), + // "No package defined") + // } else if _, err := cxpartialparsing.Program.GetStruct(match[len(match)-1], prePkg.Name); err != nil { + // // then it hasn't been added + // strct := ast.MakeStruct(match[len(match)-1]) + // prePkg.AddStruct(cxpartialparsing.Program, strct) + // } + // } + // } } profiling.StopProfile(srcName) } // for range srcStrs @@ -127,7 +123,7 @@ func Preliminarystage(srcStrs, srcNames []string) int { for i, source := range srcStrs { profiling.StartProfile(srcNames[i]) // inBlock needs to be 0 to guarantee that we're in the global scope - var inBlock int + // var inBlock int var commentedCode bool scanner := bufio.NewScanner(strings.NewReader(source)) @@ -166,108 +162,108 @@ func Preliminarystage(srcStrs, srcNames []string) int { if match := reImpName.FindStringSubmatch(string(line)); match != nil { pkgName := match[len(match)-1] // Checking if `pkgName` already exists and if it's not a standard library package. - if _, err := cxpartialparsing.Program.GetPackage(pkgName); err != nil && !cxpackages.IsDefaultPackage(pkgName) { + if _, err := actions.AST.GetPackage(pkgName); err != nil && !cxpackages.IsDefaultPackage(pkgName) { // _, sourceCode, srcNames := ParseArgsForCX([]string{fmt.Sprintf("%s%s", SRCPATH, pkgName)}, false) - _, sourceCode, fileNames := ast.ParseArgsForCX([]string{filepath.Join(globals.SRCPATH, pkgName)}, false) - ParseSourceCode(sourceCode, fileNames) - } - } - } - - // we search for packages at the same time, so we can know to what package to add the global - if loc := rePkg.FindIndex(line); loc != nil { - if (commentLoc != nil && commentLoc[0] < loc[0]) || - (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] < loc[0]) || - (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) { - // then it's commented out - continue - } - - if match := rePkgName.FindStringSubmatch(string(line)); match != nil { - if pkg, err := cxpartialparsing.Program.GetPackage(match[len(match)-1]); err != nil { - // then it hasn't been added - prePkg = ast.MakePackage(match[len(match)-1]) - pkgIdx := cxpartialparsing.Program.AddPackage(prePkg) - prePkg, err = cxpartialparsing.Program.GetPackageFromArray(pkgIdx) - if err != nil { - panic(err) - } - } else { - prePkg = pkg + // _, sourceCode, fileNames := ast.ParseArgsForCX([]string{filepath.Join(globals.SRCPATH, pkgName)}, false) + // ParseSourceCode(sourceCode, fileNames) } } } - if locs := reBodyOpen.FindAllIndex(line, -1); locs != nil { - for _, loc := range locs { - if !(multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) { - // then it's outside of a */, e.g. `*/ }` - if (commentLoc == nil && multiCommentOpenLoc == nil && multiCommentCloseLoc == nil) || - (commentLoc != nil && commentLoc[0] > loc[0]) || - (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] > loc[0]) || - (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] < loc[0]) { - // then we have an uncommented opening bracket - inBlock++ - } - } - } - } - if locs := reBodyClose.FindAllIndex(line, -1); locs != nil { - for _, loc := range locs { - if !(multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) { - if (commentLoc == nil && multiCommentOpenLoc == nil && multiCommentCloseLoc == nil) || - (commentLoc != nil && commentLoc[0] > loc[0]) || - (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] > loc[0]) || - (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] < loc[0]) { - // then we have an uncommented closing bracket - inBlock-- - } - } - } - } - - // we could have this situation: {var local i32} - // but we don't care about this, as the later passes will throw an error as it's invalid syntax - - if loc := rePkg.FindIndex(line); loc != nil { - if (commentLoc != nil && commentLoc[0] < loc[0]) || - (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] < loc[0]) || - (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) { - // then it's commented out - continue - } - - if match := rePkgName.FindStringSubmatch(string(line)); match != nil { - if pkg, err := cxpartialparsing.Program.GetPackage(match[len(match)-1]); err != nil { - // it should be already present - panic(err) - } else { - prePkg = pkg - } - } - } - - // finally, if we read a "var" and we're in global scope, we add the global without any type - // the type will be determined later on - if loc := reGlbl.FindIndex(line); loc != nil { - if (commentLoc != nil && commentLoc[0] < loc[0]) || - (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] < loc[0]) || - (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) || inBlock != 0 { - // then it's commented out or inside a block - continue - } - if match := reGlblName.FindStringSubmatch(string(line)); match != nil { - if _, err := prePkg.GetGlobal(actions.AST, match[len(match)-1]); err != nil { - // then it hasn't been added - arg := ast.MakeArgument(match[len(match)-1], "", 0) - arg.Offset = types.InvalidPointer - arg.Package = ast.CXPackageIndex(prePkg.Index) - argIdx := actions.AST.AddCXArgInArray(arg) - - prePkg.AddGlobal(actions.AST, argIdx) - } - } - } + // // we search for packages at the same time, so we can know to what package to add the global + // if loc := rePkg.FindIndex(line); loc != nil { + // if (commentLoc != nil && commentLoc[0] < loc[0]) || + // (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] < loc[0]) || + // (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) { + // // then it's commented out + // continue + // } + + // if match := rePkgName.FindStringSubmatch(string(line)); match != nil { + // if pkg, err := cxpartialparsing.Program.GetPackage(match[len(match)-1]); err != nil { + // // then it hasn't been added + // prePkg = ast.MakePackage(match[len(match)-1]) + // pkgIdx := cxpartialparsing.Program.AddPackage(prePkg) + // prePkg, err = cxpartialparsing.Program.GetPackageFromArray(pkgIdx) + // if err != nil { + // panic(err) + // } + // } else { + // prePkg = pkg + // } + // } + // } + + // if locs := reBodyOpen.FindAllIndex(line, -1); locs != nil { + // for _, loc := range locs { + // if !(multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) { + // // then it's outside of a */, e.g. `*/ }` + // if (commentLoc == nil && multiCommentOpenLoc == nil && multiCommentCloseLoc == nil) || + // (commentLoc != nil && commentLoc[0] > loc[0]) || + // (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] > loc[0]) || + // (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] < loc[0]) { + // // then we have an uncommented opening bracket + // inBlock++ + // } + // } + // } + // } + // if locs := reBodyClose.FindAllIndex(line, -1); locs != nil { + // for _, loc := range locs { + // if !(multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) { + // if (commentLoc == nil && multiCommentOpenLoc == nil && multiCommentCloseLoc == nil) || + // (commentLoc != nil && commentLoc[0] > loc[0]) || + // (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] > loc[0]) || + // (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] < loc[0]) { + // // then we have an uncommented closing bracket + // inBlock-- + // } + // } + // } + // } + + // // we could have this situation: {var local i32} + // // but we don't care about this, as the later passes will throw an error as it's invalid syntax + + // if loc := rePkg.FindIndex(line); loc != nil { + // if (commentLoc != nil && commentLoc[0] < loc[0]) || + // (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] < loc[0]) || + // (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) { + // // then it's commented out + // continue + // } + + // if match := rePkgName.FindStringSubmatch(string(line)); match != nil { + // if pkg, err := cxpartialparsing.Program.GetPackage(match[len(match)-1]); err != nil { + // // it should be already present + // panic(err) + // } else { + // prePkg = pkg + // } + // } + // } + + // // finally, if we read a "var" and we're in global scope, we add the global without any type + // // the type will be determined later on + // if loc := reGlbl.FindIndex(line); loc != nil { + // if (commentLoc != nil && commentLoc[0] < loc[0]) || + // (multiCommentOpenLoc != nil && multiCommentOpenLoc[0] < loc[0]) || + // (multiCommentCloseLoc != nil && multiCommentCloseLoc[0] > loc[0]) || inBlock != 0 { + // // then it's commented out or inside a block + // continue + // } + // if match := reGlblName.FindStringSubmatch(string(line)); match != nil { + // if _, err := prePkg.GetGlobal(actions.AST, match[len(match)-1]); err != nil { + // // then it hasn't been added + // arg := ast.MakeArgument(match[len(match)-1], "", 0) + // arg.Offset = types.InvalidPointer + // arg.Package = ast.CXPackageIndex(prePkg.Index) + // argIdx := actions.AST.AddCXArgInArray(arg) + + // prePkg.AddGlobal(actions.AST, argIdx) + // } + // } + // } } profiling.StopProfile(srcNames[i]) } @@ -275,18 +271,18 @@ func Preliminarystage(srcStrs, srcNames []string) int { profiling.StartProfile("3. cxpartialparsing") - for i, source := range srcStrs { - profiling.StartProfile(srcNames[i]) - source = source + "\n" - if len(srcNames) > 0 { - cxpartialparsing.CurrentFileName = srcNames[i] - } - /* - passone - */ - parseErrors += Passone(source) - profiling.StopProfile(srcNames[i]) - } + // for i, source := range srcStrs { + // profiling.StartProfile(srcNames[i]) + // source = source + "\n" + // if len(srcNames) > 0 { + // cxpartialparsing.CurrentFileName = srcNames[i] + // } + // /* + // passone + // */ + // parseErrors += Passone(source) + // profiling.StopProfile(srcNames[i]) + // } profiling.StopProfile("3. cxpartialparsing") return parseErrors diff --git a/lib/args.cx b/lib/args.cx new file mode 100644 index 000000000..14b57de96 --- /dev/null +++ b/lib/args.cx @@ -0,0 +1,159 @@ +package args + +import "cx" +import "os" + +// Str ... +func Str(arg str, pattern str, value *str, match *bool) (success bool) { + success = getStr(arg, pattern, false, value, match) +} + +// StrSlice ... +func StrSlice(arg str, pattern str, value *[]str, match *bool) (success bool) { + var filterList str = "" + success = Str(arg, pattern, &filterList, match) + if success { + var index i32 = 0 + for index >= 0 { + index = str.index(filterList, ",") + var filter str = filterList + var filterListLen i32 = len(filterList) + if index >= 0 { + filter = str.substr(filterList, 0, index) + filterList = str.substr(filterList, index + 1, filterListLen) + } else { + filterList = "" + } + filter = str.trimspace(filter) + if len(filter) > 0 || filterListLen > 0 { + *value = append(*value, filter) + } + } + } +} + +// I32 ... +func I32(arg str, pattern str, value *i32, match *bool) (success bool) { + var strValue str = "" + success = getStr(arg, pattern, true, &strValue, match) + if success { + var i32Value i32 = str.i32(strValue) + *value = i32Value + } +} + +// Bool ... +func Bool(arg str, pattern str, value *bool, match *bool) (success bool) { + var strValue str = "" + success = Str(arg, pattern, &strValue, match) + if success { + if strValue == "true" { + *value = true + } else if strValue == "false" { + *value = false + } else { + success = false + } + return + } + + *match = false + success = matchStr(arg, pattern, match) + if success { + *value = true + } +} + +// Flags ... +func Flags(arg str, pattern str, flags *i32, match *bool, names []str, values []i32) (success bool) { + success = false + var list str = "" + var slice []str + if StrSlice(arg, pattern, &slice, match) { + var nameCount i32 = len(names) + var valueCount i32 = len(values) + if nameCount == valueCount { + var inputCount i32 = len(slice) + success = true + for i := 0; i < inputCount; i++ { + var value i32 = 0 + for k := 0; k < nameCount; k++ { + if slice[i] == names[k] { + value = values[k] + k = nameCount // CX Issue break statument + } + } + + if value != 0 { + *flags = *flags | value + } else { + success = false + return + } + } + } + } +} + +// PrintFlags ... +func PrintFlags(name str, flags i32, names []str, values[]i32) (success bool) { + printf("%s : %d : [ ", name, flags) + var nameCount i32 = len(names) + var count i32 = len(values) + if nameCount == count { + for i := 0; i < count && flags > 0; i++ { + var flag i32 = values[i] + if (flags & flag) == flag { + printf("%s, ", names[i]) + flags = flags & (-1 ^ flag) + } + } + success = flags == 0 + } + printf(" ]\n") +} + +// matchStr ... +func matchStr(arg str, pattern str, match *bool) (success bool) { + success = false + if len(arg) <= 0 { + return + } + pattern = sprintf("++%s", pattern) + var argLen i32 = len(arg) + var patternLen i32 = len(pattern) + if argLen >= patternLen { + name := str.substr(arg, 0, patternLen) + if (name == pattern) { + var valid bool = false + if argLen == patternLen { + valid = true + } else if str.substr(arg, patternLen, patternLen + 1) == "=" { + valid = true + } + + if valid == true { + if *match { + success = true + return + } + *match = true + success = true + } + } + } +} + +// getStr ... +func getStr(arg str, pattern str, strict bool, value *str, match *bool) (success bool) { + if matchStr(arg, pattern, match) { + var argLen i32 = len(arg) + var patternLen i32 = len(pattern) + 3 // + len("++=") + if argLen > patternLen || (!strict && argLen == patternLen) { + *value = str.substr(arg, patternLen, argLen) + success = true + } + } +} + + diff --git a/lib/json.cx b/lib/json.cx new file mode 100644 index 000000000..b271a1c28 --- /dev/null +++ b/lib/json.cx @@ -0,0 +1,320 @@ +package json + +import "json" + +// Read next token in json file and checks if it's a delimiter matching the delim value, returns true on success +func MatchDelim(file i32, delim i32) (success bool) { + var value i32 + if ReadDelim(file, &value) { + success = value == delim + } +} + +// Read and interpret next token in json file as delim value, returns true on success +func ReadDelim(file i32, value *i32) (success bool) { + var tokenType i32 + tokenType, success = json.Next(file) + if success { + *value, success = json.Delim(file) + if success { + return + } + } + + //debugToken(file, tokenType) +} + + +// Read and interpret next token in json file as str value, returns true on success +func ReadStr(file i32, value *str) (success bool) { + var tokenType i32 + tokenType, success = json.Next(file) + if success { + *value, success = json.Str(file) + if success { + return + } + } + + //debugToken(file, tokenType) +} + +// Read and interpret next token in json file as i32 value, returns true on success +func ReadBool(file i32, value *bool) (success bool) { + var tokenType i32 + tokenType, success = json.Next(file) + if success { + *value, success = json.Bool(file) + if success { + return + } + } + + //debugToken(file, tokenType) +} + +// Read and interpret next token in json file as i64 value, returns true on success +func ReadI64(file i32, value *i64) (success bool) { + var tokenType i32 + tokenType, success = json.Next(file) + if success { + *value, success = json.Int64(file) + if success { + return + } + } + + //debugToken(file, tokenType) +} + +// Read and interpret next token in json file as f64 value, returns true on success +func ReadF64(file i32, value *f64) (success bool) { + var tokenType i32 + tokenType, success = json.Next(file) + if success { + *value, success = json.Float64(file) + if success { + return + } + } + + //debugToken(file, tokenType) +} + +// Read and interpret next token in json file as i32 value, returns true on success +func ReadI32(file i32, value *i32) (success bool) { + var tokenType i32 + tokenType, success = json.Next(file) + if success { + var valueI64 i64 + valueI64, success = json.Int64(file) + if success { + *value = i64.i32(valueI64) + return + } + } + + //debugToken(file, tokenType) +} + +// Read and interpret next token in json file as f32 value, returns true on success +func ReadF32(file i32, value *f32) (success bool) { + var tokenType i32 + tokenType, success = json.Next(file) + if success { + var valueF64 f64 + valueF64, success = json.Float64(file) + if success { + *value = f64.f32(valueF64) + return + } + } + + //debugToken(file, tokenType) +} + +// Read and interpret next array in json file as [3]f32, returns true on success +// This function move the current token +func ReadF32Vec3(file i32, array *[3]f32) (success bool) { + if MatchDelim(file, json.DELIM_SQUARE_LEFT) == false { + return + } + + var i i32 + for i < 3 { + var more bool + more, success = json.More(file) + if more == false || success == false { + success = false + return + } + var value f32 + if ReadF32(file, &value) == false { + success = false + return + } + (*array)[i] = value + i++ + } + + success = MatchDelim(file, json.DELIM_SQUARE_RIGHT) +} + +// Read next array in json file as [4]f32, returns true on success +// This function move the current token +func ReadF32Vec4(file i32, array *[4]f32) (success bool) { + if MatchDelim(file, json.DELIM_SQUARE_LEFT) == false { + return + } + + var i i32 + for i < 4 { + var more bool + more, success = json.More(file) + if more == false || success == false { + success = false + return + } + var value f32 + if ReadF32(file, &value) == false { + success = false + return + } + (*array)[i] = value + i++ + } + + success = MatchDelim(file, json.DELIM_SQUARE_RIGHT) +} + +// Read next array in json file as []i32, returns true on success +// This function move the current token +func ReadI32Slice(file i32, array *[]i32) (success bool) { + if MatchDelim(file, json.DELIM_SQUARE_LEFT) == false { + return + } + + var more bool = true + for more == true { + more, success = json.More(file) + if success == false { + return + } + if more { + var value i32 + if ReadI32(file, &value) == false { + success = false + return + } + *array = append(*array, value) + } + } + + success = MatchDelim(file, json.DELIM_SQUARE_RIGHT) +} + +// Read next array in json file as []f32, returns true on success +// This function move the current token +func ReadF32Slice(file i32, array *[]f32) (success bool) { + if MatchDelim(file, json.DELIM_SQUARE_LEFT) == false { + return + } + + var more bool = true + for more == true { + more, success = json.More(file) + if success == false { + return + } + if more { + var value f32 + if ReadF32(file, &value) == false { + success = false + return + } + *array = append(*array, value) + } + } + + success = MatchDelim(file, json.DELIM_SQUARE_RIGHT) +} + +// Read next array in json file as []str, returns true on success +// This function move the current token +func ReadStrSlice(file i32, array *[]str) (success bool) { + if MatchDelim(file, json.DELIM_SQUARE_LEFT) == false { + return + } + + var more bool = true + for more == true { + more, success = json.More(file) + if success == false { + return + } + if more { + var value str + if ReadStr(file, &value) == false { + success = false + return + } + *array = append(*array, value) + } + } + + success = MatchDelim(file, json.DELIM_SQUARE_RIGHT) +} + +// debug helper +func debugToken(file i32, t i32) { + printf("DEBUG_JSON_TYPE %d\n", t) + if t == json.TOKEN_DELIM { + printf("DEBUG_JSON_DELIM\n") + var value i32 + var success bool + value, success = json.Delim(file) + if success { + if value == json.DELIM_CURLY_LEFT { + printf("{\n") + } else if value == json.DELIM_CURLY_RIGHT { + printf("}\n") + } else if value == json.DELIM_SQUARE_LEFT { + printf("[\n") + } else if value == json.DELIM_SQUARE_RIGHT { + printf("]\n") + } else { + printf("invalid delimiter\n") + } + } else { + panic(true, false, "failed to parse delimiter value") + } + } else if t == json.TOKEN_BOOL { + printf("DEBUG_JSON_BOOL\n") + var value bool + var success bool + value, success = json.Bool(file) + if success { + if value { + printf("true\n") + } else { + printf("false\n") + } + } else { + panic(true, false, "failed to parse bool value") + } + } else if t == json.TOKEN_F64 { + printf("DEBUG_JSON_F64\n") + var value f64 + var success bool + value, success = json.Float64(file) + if success { + printf("%f\n", value) + } else { + panic(true, false, "failed to parse f64 value") + } + } else if t == json.TOKEN_NUMBER { + printf("DEBUG_JSON_NUMBER\n") + var value f64 + var success bool + value, success = json.Float64(file) + if success { + printf("%f\n", value) + } else { + panic(true, false, "failed to parse number value") + } + } else if t == json.TOKEN_STR { + printf("DEBUG_JSON_STR\n") + var value str + var success bool + value, success = json.Str(file) + if success { + printf("%s\n", value) + } else { + panic(true, false, "failed to parse str value") + } + } else { + panic(true, false, sprintf("invalid token type : %d", t)) + } +} + + diff --git a/tests/issue-53-a.cx b/tests/issue-53-a.cx index d3b57a4ee..ad36ccc5b 100644 --- a/tests/issue-53-a.cx +++ b/tests/issue-53-a.cx @@ -1,9 +1,9 @@ // https://github.com/skycoin/cx/issues/53 -import "cx" - package main +import "cx" + type Too struct { a i32 sb []i8