diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 745a6017..4083e982 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -2,6 +2,7 @@ on: pull_request: paths: - 'language/**' + - 'tests/**' jobs: release: @@ -11,6 +12,7 @@ jobs: - uses: actions/checkout@v2 with: fetch-depth: 1 + submodules: true - run: npm install - run: node ./node_modules/eslint/bin/eslint src/** --no-error-on-unmatched-pattern - run: npm run test \ No newline at end of file diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..3d798013 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,24 @@ +[submodule "tests/sources/bob-recursive-example"] + path = tests/sources/bob-recursive-example + url = git@github.com:IBM/bob-recursive-example.git +[submodule "tests/sources/BBS400"] + path = tests/sources/BBS400 + url = git@github.com:worksofliam/BBS400.git +[submodule "tests/sources/noxDB"] + path = tests/sources/noxDB + url = git@github.com:sitemule/noxDB.git +[submodule "tests/sources/xmlservice"] + path = tests/sources/xmlservice + url = git@github.com:IBM/xmlservice.git +[submodule "tests/sources/I_builder"] + path = tests/sources/I_builder + url = git@github.com:ibmiiste/I_builder.git +[submodule "tests/sources/ibmi-company_system"] + path = tests/sources/ibmi-company_system + url = git@github.com:IBM/ibmi-company_system.git +[submodule "tests/sources/httpapi"] + path = tests/sources/httpapi + url = git@github.com:ScottKlement/httpapi.git +[submodule "tests/sources/rpgle-repl"] + path = tests/sources/rpgle-repl + url = git@github.com:tom-writes-code/rpgle-repl.git diff --git a/extension/server/src/connection.ts b/extension/server/src/connection.ts index 5beef8bc..0e10e86c 100644 --- a/extension/server/src/connection.ts +++ b/extension/server/src/connection.ts @@ -63,15 +63,23 @@ export async function memberResolve(baseUri: string, member: string, file: strin if (resolvedMembers[baseUri] && resolvedMembers[baseUri][fileKey]) return resolvedMembers[baseUri][fileKey]; - const resolvedMember = await queue.add(() => {return connection.sendRequest("memberResolve", [member, file])}) as IBMiMember|undefined; - // const resolvedMember = await connection.sendRequest("memberResolve", [member, file]) as IBMiMember|undefined; - - if (resolvedMember) { - if (!resolvedMembers[baseUri]) resolvedMembers[baseUri] = {}; - resolvedMembers[baseUri][fileKey] = resolvedMember; + try { + const resolvedMember = await queue.add(() => {return connection.sendRequest("memberResolve", [member, file])}) as IBMiMember|undefined; + // const resolvedMember = await connection.sendRequest("memberResolve", [member, file]) as IBMiMember|undefined; + + if (resolvedMember) { + if (!resolvedMembers[baseUri]) resolvedMembers[baseUri] = {}; + resolvedMembers[baseUri][fileKey] = resolvedMember; + } + + return resolvedMember; + } catch (e) { + console.log(`Member resolve failed.`); + console.log(JSON.stringify({baseUri, member, file})); + console.log(e); } - return resolvedMember; + return undefined; } export async function streamfileResolve(baseUri: string, base: string[]): Promise { @@ -82,15 +90,23 @@ export async function streamfileResolve(baseUri: string, base: string[]): Promis const paths = (workspace ? includePath[workspace.uri] : []) || []; - const resolvedPath = await queue.add(() => {return connection.sendRequest("streamfileResolve", [base, paths])}) as string|undefined; - // const resolvedPath = await connection.sendRequest("streamfileResolve", [base, paths]) as string|undefined; + try { + const resolvedPath = await queue.add(() => {return connection.sendRequest("streamfileResolve", [base, paths])}) as string|undefined; + // const resolvedPath = await connection.sendRequest("streamfileResolve", [base, paths]) as string|undefined; + + if (resolvedPath) { + if (!resolvedStreamfiles[baseUri]) resolvedStreamfiles[baseUri] = {}; + resolvedStreamfiles[baseUri][baseString] = resolvedPath; + } - if (resolvedPath) { - if (!resolvedStreamfiles[baseUri]) resolvedStreamfiles[baseUri] = {}; - resolvedStreamfiles[baseUri][baseString] = resolvedPath; + return resolvedPath; + } catch (e) { + console.log(`Streamfile resolve failed.`); + console.log(JSON.stringify({baseUri, base, paths})); + console.log(e); } - return resolvedPath; + return undefined; } export function getWorkingDirectory(): Promise { diff --git a/extension/server/src/data.ts b/extension/server/src/data.ts index 08a13128..42ee3b5e 100644 --- a/extension/server/src/data.ts +++ b/extension/server/src/data.ts @@ -55,6 +55,8 @@ export function dspffdToRecordFormats(data: any, aliases = false): Declaration[] len: digits === 0 ? strLength : digits, decimals: decimals, keywords, + field: ``, + pos: `` }); currentSubfield.description = text.trim(); diff --git a/extension/server/src/providers/definition.ts b/extension/server/src/providers/definition.ts index 2260d089..3d816da0 100644 --- a/extension/server/src/providers/definition.ts +++ b/extension/server/src/providers/definition.ts @@ -5,18 +5,18 @@ import Cache from '../../../../language/models/cache'; import Declaration from '../../../../language/models/declaration'; export default async function definitionProvider(handler: DefinitionParams): Promise { - const currentPath = handler.textDocument.uri; + const currentUri = handler.textDocument.uri; const lineNumber = handler.position.line; - const document = documents.get(currentPath); + const document = documents.get(currentUri); if (document) { - const doc = await parser.getDocs(currentPath, document.getText()); + const doc = await parser.getDocs(currentUri, document.getText()); if (doc) { const editingLine = document.getText(Range.create(lineNumber, 0, lineNumber, 200)); const possibleInclude = Parser.getIncludeFromDirective(editingLine); - if (possibleInclude) { - const include = await parser.includeFileFetch(currentPath, possibleInclude); + if (possibleInclude && parser.includeFileFetch) { + const include = await parser.includeFileFetch(currentUri, possibleInclude); if (include.found && include.uri) { return Location.create(include.uri, Range.create(0, 0, 0, 0)); } @@ -25,7 +25,7 @@ export default async function definitionProvider(handler: DefinitionParams): Pro let def: Declaration|undefined; // First, we try and get the reference by offset - def = Cache.referenceByOffset(doc, document.offsetAt(handler.position)); + def = Cache.referenceByOffset(currentUri, doc, document.offsetAt(handler.position)); if (def) { return Location.create( diff --git a/extension/server/src/providers/hover.ts b/extension/server/src/providers/hover.ts index 547fd85f..70c099bf 100644 --- a/extension/server/src/providers/hover.ts +++ b/extension/server/src/providers/hover.ts @@ -83,7 +83,9 @@ export default async function hoverProvider(params: HoverParams): Promise { if (cache) { @@ -198,7 +199,6 @@ export async function refreshLinterDiagnostics(document: TextDocument, docs: Cac let availableIncludes: string[] | undefined; if (Project.isEnabled) { - options.CollectReferences = true; const headers = await Project.getIncludes(document.uri); availableIncludes = headers.map(header => header.relative); } @@ -538,7 +538,7 @@ function caseInsensitiveReplaceAll(text: string, search: string, replace: string function createExtract(document: TextDocument, userRange: Range, docs: Cache) { const range = Range.create(userRange.start.line, 0, userRange.end.line, 1000); - const references = docs.referencesInRange({position: document.offsetAt(range.start), end: document.offsetAt(range.end)}); + const references = docs.referencesInRange(document.uri, {position: document.offsetAt(range.start), end: document.offsetAt(range.end)}); const validRefs = references.filter(ref => [`struct`, `subitem`, `variable`].includes(ref.dec.type)); const nameDiffSize = 1; // Always once since we only add 'p' at the start diff --git a/extension/server/src/providers/project/index.ts b/extension/server/src/providers/project/index.ts index 14e85b9a..4c50cb4a 100644 --- a/extension/server/src/providers/project/index.ts +++ b/extension/server/src/providers/project/index.ts @@ -141,17 +141,7 @@ async function loadLocalFile(uri: string) { if (document) { const content = document?.getText(); - const cache = await parser.getDocs(uri, content); - if (cache) { - if (content.length >= 6 && content.substring(0, 6).toUpperCase() === `**FREE`) { - Linter.getErrors({ - uri, - content, - }, { - CollectReferences: true - }, cache); - } - } + await parser.getDocs(uri, content); } } diff --git a/extension/server/src/providers/project/references.ts b/extension/server/src/providers/project/references.ts index 449b2f5f..d2db50bd 100644 --- a/extension/server/src/providers/project/references.ts +++ b/extension/server/src/providers/project/references.ts @@ -5,22 +5,11 @@ import { calculateOffset } from '../linter'; import { documents, parser } from '..'; import { getTextDoc, isEnabled } from '.'; -export async function findAllLocalReferences(def: Declaration): Promise { +export async function findAllProjectReferences(def: Declaration): Promise { let locations: Location[] = []; if (isEnabled) { const parsedFiles = Object.keys(parser.parsedCache); - - const document = documents.get(def.position.path); - - if (document) { - locations.push( - ...def.references.map(ref => Location.create( - def.position.path, - calculateOffset(document, ref) - )) - ); - } if (def.keyword[`EXPORT`]) { // If we are looking for references to an export function @@ -57,10 +46,6 @@ export async function findAllLocalReferences(def: Declaration): Promise loc.uri === keyPath)) { locations.push( - // First, we push the copybook where it is brought in. - // We do this because we don't have references for non-**free - Location.create(proc.position.path, Range.create(proc.position.line, 0, proc.position.line, 0)), - // Then we push the references. Empty for non-**free ...proc.references.map(ref => Location.create( keyPath, @@ -74,45 +59,6 @@ export async function findAllLocalReferences(def: Declaration): Promise include.toPath === baseUri) - if (foundInclude) { - const possibleDef = cache.find(def.name); - - // Okay, we found something with a similar name in another file... - if (possibleDef) { - if (possibleDef.position.path === def.position.path) { - if (document.getText(Range.create(0, 0, 0, 6)).toUpperCase() !== `**FREE` || possibleDef.references.length === 0) { - locations.push( - // First, we push the copybook where it is brought in. - // We do this because we don't have references for non-**free - Location.create(uri, Range.create(foundInclude.line, 0, foundInclude.line, 0)), - ); - } else { - // But since it's **free, and we probably have referneces... - locations.push( - // Then we push the references. Empty for non-**free - ...possibleDef.references.map(ref => Location.create( - uri, - calculateOffset(document, ref) - )) - ); - } - - } - } - } - } - } } } diff --git a/extension/server/src/providers/reference.ts b/extension/server/src/providers/reference.ts index 86f15172..1f80f7da 100644 --- a/extension/server/src/providers/reference.ts +++ b/extension/server/src/providers/reference.ts @@ -4,7 +4,7 @@ import Linter from '../../../../language/linter'; import { calculateOffset } from './linter'; import * as Project from "./project"; -import { findAllLocalReferences } from './project/references'; +import { findAllProjectReferences as getAllProcedureReferences } from './project/references'; import Cache from '../../../../language/models/cache'; export async function referenceProvider(params: ReferenceParams): Promise { @@ -18,35 +18,29 @@ export async function referenceProvider(params: ReferenceParams): Promise Location.create( - def.position.path, - calculateOffset(document, ref) - )); + const procRefs = await getAllProcedureReferences(def); + locations.push(...procRefs); } + + for (const ref of def.references) { + let refDoc = documents.get(ref.uri); + if (refDoc) { + locations.push(Location.create( + ref.uri, + calculateOffset(refDoc, ref) + )); + } + } + + return locations; } } } diff --git a/extension/server/src/providers/rename.ts b/extension/server/src/providers/rename.ts index 6734bd8c..4cc468ef 100644 --- a/extension/server/src/providers/rename.ts +++ b/extension/server/src/providers/rename.ts @@ -11,27 +11,18 @@ export async function renamePrepareProvider(params: PrepareRenameParams): Promis const document = documents.get(uri); if (document) { - const isFree = (document.getText(Range.create(0, 0, 0, 6)).toUpperCase() === `**FREE`); - const doc = await parser.getDocs(uri, document.getText()); if (doc) { - if (isFree) { - Linter.getErrors( - { - uri, - content: document.getText() - }, - { - CollectReferences: true - }, - doc - ); - } - - const def = Cache.referenceByOffset(doc, document.offsetAt(currentPos)); + const def = Cache.referenceByOffset(uri, doc, document.offsetAt(currentPos)); if (def) { + const uniqueUris = def.references.map(ref => ref.uri).filter((value, index, self) => self.indexOf(value) === index); + + if (uniqueUris.length > 1) { + return; + } + const currentSelectedRef = def?.references.find(r => document.positionAt(r.offset.position).line === currentPos.line); if (currentSelectedRef) { @@ -58,34 +49,25 @@ export async function renameRequestProvider(params: RenameParams): Promise ({ - newText: params.newName, - range: Range.create( - document.positionAt(ref.offset.position), - document.positionAt(ref.offset.end) - ) - })); + let edits: {[uri: string]: TextEdit[]} = {}; + + const uniqueUris = def.references.map(ref => ref.uri).filter((value, index, self) => self.indexOf(value) === index); + + for (const uri of uniqueUris) { + edits[uri] = def.references.filter(ref => ref.uri === uri).map(ref => ({ + newText: params.newName, + range: Range.create( + document.positionAt(ref.offset.position), + document.positionAt(ref.offset.end) + ) + })); + } const workspaceEdit: WorkspaceEdit = { - changes: { - [document.uri]: edits - } + changes: edits } return workspaceEdit; diff --git a/extension/server/src/server.ts b/extension/server/src/server.ts index 2e366749..eb838e48 100644 --- a/extension/server/src/server.ts +++ b/extension/server/src/server.ts @@ -272,7 +272,7 @@ parser.setIncludeFileFetch(async (stringUri: string, includeString: string) => { return { found: true, uri: validUri, - lines: validSource.split(`\n`) + content: validSource }; } } @@ -311,7 +311,8 @@ documents.onDidChangeContent(handler => { handler.document.getText(), { withIncludes: true, - ignoreCache: true + ignoreCache: true, + collectReferences: true } ).then(cache => { if (cache) { diff --git a/language/linter.ts b/language/linter.ts index 01c4f145..7d3e9010 100644 --- a/language/linter.ts +++ b/language/linter.ts @@ -104,16 +104,6 @@ export default class Linter { let opcode: string; - if (rules.NoUnreferenced) { - // We need to collect references for this to work correctly. - rules.CollectReferences = true; - } - - // Clear out all the old references. - if (rules.CollectReferences) { - globalScope.clearReferences(); - } - // Make all external refs uppercase. if (rules.NoExternalTo && rules.NoExternalTo.length) { rules.NoExternalTo = rules.NoExternalTo.map(val => val.toUpperCase()); @@ -809,21 +799,6 @@ export default class Linter { } break; - - case `special`: - if (rules.CollectReferences) { - value = part.value.substring(1).toUpperCase(); - const defRef = globalScope.find(value); - - if (defRef) { - defRef.references.push({ - offset: { position: part.range.start, end: part.range.end }, - type: null, - }); - } - } - break; - case `word`: const upperName = part.value.toUpperCase(); @@ -886,81 +861,6 @@ export default class Linter { } } } - - if (rules.CollectReferences) { - if (statement[i - 1] && statement[i - 1].type === `dot`) break; - - // We might be referencing a subfield in a structure, so we grab the name in scope - // then we actually do the lookup against that - const parentDeclareBlock: string | undefined = inStruct[inStruct.length - 1]; - const inDeclareBlock = parentDeclareBlock && parentDeclareBlock.toUpperCase() !== upperName; - const lookupName = inDeclareBlock ? parentDeclareBlock : upperName; - - let defRef: Declaration; - if (currentProcedure && currentProcedure.scope) { - defRef = currentProcedure.scope.find(lookupName); - - if (!defRef) { - defRef = currentProcedure.subItems.find(def => def.name.toUpperCase() === lookupName); - } - } - - if (!defRef) { - defRef = globalScope.find(lookupName); - } - - if (defRef) { - - if (inDeclareBlock) { - // If we did the lookup against a parent DS, look for the subfield and add the reference there - defRef = defRef.subItems.find(sub => sub.name.toUpperCase() === upperName); - - if (defRef) { - defRef.references.push({ - offset: { position: part.range.start, end: part.range.end }, - }); - } - - } else { - - defRef.references.push({ - offset: { position: part.range.start, end: part.range.end }, - }); - - if (defRef.keyword[`QUALIFIED`]) { - let nextPartIndex = i + 1; - - if (statement[nextPartIndex]) { - // First, check if there is an array call here and skip over it - if (statement[nextPartIndex].type === `openbracket`) { - nextPartIndex = statement.findIndex((value, index) => index > nextPartIndex && value.type === `closebracket`); - - if (nextPartIndex >= 0) nextPartIndex++; - } - - // Check if the next part is a dot - if (statement[nextPartIndex] && statement[nextPartIndex].type === `dot`) { - nextPartIndex++; - - // Check if the next part is a word - if (statement[nextPartIndex] && statement[nextPartIndex].type === `word` && statement[nextPartIndex].value) { - const subItemPart = statement[nextPartIndex]; - const subItemName = subItemPart.value.toUpperCase(); - - // Find the subitem - const subItemDef = defRef.subItems.find(subfield => subfield.name.toUpperCase() == subItemName); - if (subItemDef) { - subItemDef.references.push({ - offset: { position: subItemPart.range.start, end: subItemPart.range.end }, - }); - } - } - } - } - } - } - } - } break; case `string`: diff --git a/language/models/cache.ts b/language/models/cache.ts index dae0cb25..5b913b1b 100644 --- a/language/models/cache.ts +++ b/language/models/cache.ts @@ -5,7 +5,7 @@ const newInds = () => { return [...Array(98).keys(), `LR`, `KL`].map(val => `IN${val.toString().padStart(2, `0`)}`).map(ind => { const indDef = new Declaration(`variable`); indDef.name = ind; - indDef.keywords = [`IND`]; + indDef.keyword = {IND: true}; return indDef; }) }; @@ -21,41 +21,22 @@ export default class Cache { constants: Declaration[]; sqlReferences: Declaration[]; indicators: Declaration[]; + tags: Declaration[]; includes: IncludeStatement[]; constructor(cache: CacheProps = {}) { - /** @type {import("../parserTypes").Keywords} */ this.keyword = {}; - - /** @type {Declaration[]} */ this.parameters = cache.parameters || []; - - /** @type {Declaration[]} */ this.subroutines = cache.subroutines || []; - - /** @type {Declaration[]} */ this.procedures = cache.procedures || []; - - /** @type {Declaration[]} */ this.files = cache.files || []; - - /** @type {Declaration[]} */ this.variables = cache.variables || []; - - /** @type {Declaration[]} */ this.structs = cache.structs || []; - - /** @type {Declaration[]} */ this.constants = cache.constants || []; - - /** @type {Declaration[]} */ this.sqlReferences = cache.sqlReferences || []; - - /** @type {Declaration[]} */ this.indicators = cache.indicators || [...newInds()]; - - /** @type {import("../parserTypes").IncludeStatement[]} */ this.includes = cache.includes || []; + this.tags = cache.tags || []; } /** @@ -84,17 +65,19 @@ export default class Cache { * @returns {String[]} */ getNames() { - const fileStructNames = this.files.map(file => file.subItems.map(sub => sub.name)).flat(); - return [ - ...this.parameters.map(def => def.name), - ...this.constants.map(def => def.name), - ...this.procedures.map(def => def.name), - ...this.files.map(def => def.name), - ...fileStructNames, - ...this.subroutines.map(def => def.name), - ...this.variables.map(def => def.name), - ...this.structs.map(def => def.name), - ].filter(name => name); + const names = new Set(); + + this.parameters.forEach(def => names.add(def.name)); + this.constants.forEach(def => names.add(def.name)); + this.procedures.forEach(def => names.add(def.name)); + this.files.forEach(def => names.add(def.name)); + this.files.forEach(file => file.subItems.forEach(sub => names.add(sub.name))); + this.subroutines.forEach(def => names.add(def.name)); + this.variables.forEach(def => names.add(def.name)); + this.structs.forEach(def => names.add(def.name)); + this.tags.forEach(def => names.add(def.name)); + + return Array.from(names); } /** @@ -123,46 +106,37 @@ export default class Cache { */ find(name) { name = name.toUpperCase(); - const fileStructs = this.files.map(file => file.subItems).flat(); + + const fileStructs = this.files.flatMap(file => file.subItems); const allStructs = [...fileStructs, ...this.structs]; - const possibles = [ - ...this.parameters.filter(def => def.name.toUpperCase() === name), - ...this.constants.filter(def => def.name.toUpperCase() === name), - ...this.procedures.filter(def => def.name.toUpperCase() === name), - ...this.files.filter(def => def.name.toUpperCase() === name), - ...allStructs.filter(def => def.name.toUpperCase() === name), - ...this.subroutines.filter(def => def.name.toUpperCase() === name), - ...this.variables.filter(def => def.name.toUpperCase() === name), - ...this.indicators.filter(def => def.name.toUpperCase() === name), + const searchIn = [ + this.parameters, + this.constants, + this.procedures, + this.files, + allStructs, + this.subroutines, + this.variables, + this.indicators, + this.tags ]; - if (allStructs.length > 0 && possibles.length === 0) { - allStructs.filter(def => def.keyword[`QUALIFIED`] !== true).forEach(def => { - possibles.push(...def.subItems.filter(sub => sub.name.toUpperCase() === name)); - }); + for (const list of searchIn) { + const found = list.find(def => def.name.toUpperCase() === name); + if (found) return found; } - if (possibles.length > 0) { - return possibles[0]; - } else { - return null; + if (allStructs.length > 0) { + for (const def of allStructs) { + if (def.keyword[`QUALIFIED`] !== true) { + const subItem = def.subItems.find(sub => sub.name.toUpperCase() === name); + if (subItem) return subItem; + } + } } - } - clearReferences() { - const fileStructs = this.files.map(file => file.subItems).flat(); - - [...fileStructs, ...this.parameters, ...this.constants, ...this.files, ...this.procedures, ...this.subroutines, ...this.variables, ...this.structs].forEach(def => { - def.references = []; - def.subItems.forEach(sub => sub.references = []); - }); - - this.procedures.forEach(proc => { - if (proc.scope) { - proc.scope.clearReferences(); - } - }); + return null; } findDefinition(lineNumber, word) { @@ -185,20 +159,18 @@ export default class Cache { } findConstByValue(lineNumber: number, value: string) { - const upperValue = value.toUpperCase(); // Keywords are stored in uppercase - // If they're typing inside of a procedure, let's get the stuff from there too const currentProcedure = this.procedures.find(proc => lineNumber >= proc.range.start && lineNumber <= proc.range.end); if (currentProcedure && currentProcedure.scope) { - const localDef = currentProcedure.scope.constants.find(def => def.keyword[upperValue] === true); + const localDef = currentProcedure.scope.constants.find(def => def.keyword[`CONST`] === value); if (localDef) { return localDef; } } - const globalDef = this.constants.find(def => def.keyword[upperValue] === true); + const globalDef = this.constants.find(def => def.keyword[`CONST`] === value); if (globalDef) { return globalDef; @@ -219,11 +191,11 @@ export default class Cache { } } - referencesInRange(range: Offset): { dec: Declaration, refs: Offset[] }[] { + referencesInRange(baseUri: string, range: Offset): { dec: Declaration, refs: Offset[] }[] { let list: { dec: Declaration, refs: Offset[] }[] = []; for (let i = range.position; i <= range.end; i++) { - const ref = Cache.referenceByOffset(this, i); + const ref = Cache.referenceByOffset(baseUri, this, i); if (ref) { // No duplicates allowed if (list.some(item => item.dec.name === ref.name)) continue; @@ -238,29 +210,28 @@ export default class Cache { return list; } - static referenceByOffset(scope: Cache, offset: number): Declaration | undefined { - const props: (keyof Cache)[] = [`parameters`, `subroutines`, `procedures`, `files`, `variables`, `structs`, `constants`, `indicators`]; - + static referenceByOffset(baseUri: string, scope: Cache, offset: number): Declaration | undefined { + const props: (keyof Cache)[] = [`parameters`, `subroutines`, `procedures`, `files`, `variables`, `structs`, `constants`, `indicators`, `tags`]; for (const prop of props) { const list = scope[prop] as unknown as Declaration[]; for (const def of list) { let possibleRef: boolean; // Search top level - possibleRef = def.references.some(r => offset >= r.offset.position && offset <= r.offset.end); + possibleRef = def.references.some(r => r.uri === baseUri && offset >= r.offset.position && offset <= r.offset.end); if (possibleRef) return def; // Search any subitems if (def.subItems.length > 0) { for (const subItem of def.subItems) { - possibleRef = subItem.references.some(r => offset >= r.offset.position && offset <= r.offset.end); + possibleRef = subItem.references.some(r => r.uri === baseUri && offset >= r.offset.position && offset <= r.offset.end); if (possibleRef) return subItem; } } // Search scope if any if (def.scope) { - const inScope = Cache.referenceByOffset(def.scope, offset); + const inScope = Cache.referenceByOffset(baseUri, def.scope, offset); if (inScope) return inScope; } } diff --git a/language/models/declaration.js b/language/models/declaration.js index be230ae3..7a564f4d 100644 --- a/language/models/declaration.js +++ b/language/models/declaration.js @@ -4,7 +4,7 @@ import Cache from "./cache"; export default class Declaration { /** * - * @param {"procedure"|"subroutine"|"file"|"struct"|"subitem"|"variable"|"constant"} type + * @param {"procedure"|"subroutine"|"file"|"struct"|"subitem"|"variable"|"constant"|"tag"} type */ constructor(type) { this.type = type; @@ -21,7 +21,7 @@ export default class Declaration { /** @type {import("../parserTypes").DefinitionPosition} */ this.position = undefined; - /** @type {import("../parserTypes").IssueRange[]} */ + /** @type {import("../parserTypes").Reference[]} */ this.references = []; // Not used in subitem: diff --git a/language/models/fixed.js b/language/models/fixed.js index 54322336..62be4a36 100644 --- a/language/models/fixed.js +++ b/language/models/fixed.js @@ -1,5 +1,37 @@ import Parser from "../parser"; +/** + * @param {number} lineNumber + * @param {number} startingPos + * @param {string} value + * @param {string} [type] + * @returns {import("../types").Token|undefined} + */ +function calculateToken(lineNumber, startingPos, value, type) { + let resultValue = value.trim(); + + if (resultValue === ``) { + return; + } + + if (type === `special-ind`) { + type = `special`; + resultValue = `*IN` + resultValue; + } + + const frontSpaces = value.length - value.trimStart().length; + const start = startingPos + frontSpaces; + return { + type: type || `word`, + value: resultValue, + range: { + start, + end: start + resultValue.length, + line: lineNumber + } + } +} + /** * @param {string} line */ @@ -12,84 +44,101 @@ export function parseFLine(line) { return { name, - keywords: Parser.expandKeywords(keywords) + keywords: Parser.expandKeywords(Parser.getTokens(keywords)) }; } /** - * @param {string} line + * @param {string} content */ -export function parseCLine(line) { - line = line.padEnd(80); - const factor1 = line.substr(11, 14).trim(); - const opcode = line.substr(25, 10).trim().toUpperCase(); - const factor2 = line.substr(35, 14).trim(); - const extended = line.substr(35).trim(); - const result = line.substr(49, 14).trim(); - - const ind1 = line.substr(70, 2).trim(); - const ind2 = line.substr(72, 2).trim(); - const ind3 = line.substr(74, 2).trim(); +export function parseCLine(lineNumber, lineIndex, content) { + content = content.padEnd(80); + const factor1 = content.substr(11, 14); + const opcode = content.substr(25, 10).toUpperCase(); + const factor2 = content.substr(35, 14); + const extended = content.substr(35); + const result = content.substr(49, 14); + + const ind1 = content.substr(70, 2); + const ind2 = content.substr(72, 2); + const ind3 = content.substr(74, 2); return { - opcode, - factor1, - factor2, - result, - extended, - ind1, - ind2, - ind3 + opcode: calculateToken(lineNumber, lineIndex+25, opcode, `opcode`), + factor1: calculateToken(lineNumber, lineIndex+11, factor1), + factor2: calculateToken(lineNumber, lineIndex+35, factor2), + result: calculateToken(lineNumber, lineIndex+49, result), + extended: calculateToken(lineNumber, lineIndex+35, extended), + ind1: calculateToken(lineNumber, lineIndex+70, ind1, `special-ind`), + ind2: calculateToken(lineNumber, lineIndex+72, ind2, `special-ind`), + ind3: calculateToken(lineNumber, lineIndex+74, ind3, `special-ind`) }; } /** - * @param {string} line + * @param {string} content */ -export function parseDLine(line) { - line = line.padEnd(80); - const potentialName = line.substring(6).trim(); - const name = line.substr(6, 15).trim(); - const pos = line.substr(25, 7).trim(); - const len = line.substr(32, 7).trim(); - const type = line.substr(39, 1).trim(); - const decimals = line.substr(40, 3).trim(); - const field = line.substr(23, 2).trim().toUpperCase(); - const keywords = line.substr(43).trim().toUpperCase(); +export function parseDLine(lineNumber, lineIndex, content) { + content = content.padEnd(80); + const longForm = content.substring(6).trimEnd(); + const potentialName = longForm.endsWith(`...`) ? calculateToken(lineNumber, lineIndex+6, longForm.substring(0, longForm.length - 3)) : undefined; + const name = content.substr(6, 15); + const pos = content.substr(25, 7); + const len = content.substr(32, 7); + const type = content.substr(39, 1); + const decimals = content.substr(40, 3); + const field = content.substr(23, 2).toUpperCase(); + const keywords = content.substr(43); + const keywordTokens = Parser.getTokens(keywords, lineNumber, lineIndex+43); return { - potentialName, - name, - pos, - len, - type, - decimals, - field, - keywords: Parser.expandKeywords(keywords) + potentialName: potentialName, + name: calculateToken(lineNumber, lineIndex+6, name), + pos: calculateToken(lineNumber, lineIndex+25, pos), + len: calculateToken(lineNumber, lineIndex+32, len), + type: calculateToken(lineNumber, lineIndex+39, type), + decimals: calculateToken(lineNumber, lineIndex+40, decimals), + field: calculateToken(lineNumber, lineIndex+23, field), + keywordsRaw: keywordTokens, + keywords: Parser.expandKeywords(keywordTokens, field.trim() === `C`) }; } /** - * @param {string} line + * @param {string} content */ -export function parsePLine(line) { - line = line.padEnd(80); - const name = line.substr(6, 16).trim(); - const potentialName = line.substring(6).trim(); - const start = line[23].toUpperCase() === `B`; - const keywords = line.substr(43).trim().toUpperCase(); +export function parsePLine(content, lineNumber, lineIndex) { + content = content.padEnd(80); + const name = content.substr(6, 16) + const longForm = content.substring(6).trimEnd(); + const potentialName = longForm.endsWith(`...`) ? calculateToken(lineNumber, lineIndex+6, longForm.substring(0, longForm.length - 3)) : undefined; + const start = content[23].toUpperCase() === `B`; + const keywords = content.substr(43) + const keywordTokens = Parser.getTokens(keywords, lineNumber, lineIndex+43); return { - name, + name: calculateToken(lineNumber, lineIndex+6, name), potentialName, - keywords: Parser.expandKeywords(keywords), + keywordsRaw: keywordTokens, + keywords: Parser.expandKeywords(keywordTokens), start }; } +export function prettyTypeFromToken(dSpec) { + return getPrettyType({ + type: dSpec.type ? dSpec.type.value : ``, + keywords: dSpec.keywords, + len: dSpec.len ? dSpec.len.value : ``, + pos: dSpec.pos ? dSpec.pos.value : ``, + decimals: dSpec.decimals ? dSpec.decimals.value : ``, + field: dSpec.field ? dSpec.field.value : `` + }) +} + /** * - * @param {{type: string, keywords: import("../parserTypes").Keywords, len: string, pos?: string, decimals?: string, field?: string}} lineData + * @param {{type: string, keywords: import("../parserTypes").Keywords, len: string, pos: string, decimals: string, field: string}} lineData * @returns {import("../parserTypes").Keywords} */ export function getPrettyType(lineData) { @@ -100,6 +149,10 @@ export function getPrettyType(lineData) { length = length - Number(lineData.pos) + 1; } + if (!lineData.decimals) { + lineData.decimals = ``; + } + switch (lineData.type.toUpperCase()) { case `A`: if (Number(lineData.keywords[`VARYING`]) >= 0) { @@ -224,5 +277,5 @@ export function getPrettyType(lineData) { break; } - return Parser.expandKeywords(outType); + return Parser.expandKeywords(Parser.getTokens(outType)); } \ No newline at end of file diff --git a/language/parser.js b/language/parser.ts similarity index 66% rename from language/parser.js rename to language/parser.ts index 03a05e7c..edd2c307 100644 --- a/language/parser.js +++ b/language/parser.ts @@ -1,65 +1,51 @@ /* eslint-disable no-case-declarations */ -import { createBlocks, tokenise } from "./tokens"; +import { ALLOWS_EXTENDED, createBlocks, tokenise } from "./tokens"; import Cache from "./models/cache"; import Declaration from "./models/declaration"; import oneLineTriggers from "./models/oneLineTriggers"; -import { parseFLine, parseCLine, parsePLine, parseDLine, getPrettyType } from "./models/fixed"; +import { parseFLine, parseCLine, parsePLine, parseDLine, getPrettyType, prettyTypeFromToken } from "./models/fixed"; +import path from "path"; +import { Token } from "./types"; +import { Keywords } from "./parserTypes"; const HALF_HOUR = (30 * 60 * 1000); -/** - * @callback tablePromise - * @param {string} name Table name - * @param {boolean} [aliases] Table name - * @returns {Promise} - */ - -/** - * @callback includeFilePromise - * @param {string} baseFile - * @param {string} includeString - * @returns {Promise<{found: boolean, uri?: string, lines?: string[]}>} - */ +export type tablePromise = (name: string, aliases?: boolean) => Promise; +export type includeFilePromise = (baseFile: string, includeString: string) => Promise<{found: boolean, uri?: string, content?: string}>; +export type TableDetail = {[name: string]: {fetched: number, fetching?: boolean, recordFormats: Declaration[]}}; +export interface ParseOptions {withIncludes?: boolean, ignoreCache?: boolean, collectReferences?: boolean}; + +const lineTokens = (input: string, lineNumber: number, lineIndex: number): Token[] => { + let tokens = tokenise(input, { + baseIndex: lineIndex, + lineNumber, + ignoreTypes: [`tab`] + }); + + return tokens; +} export default class Parser { - constructor() { - /** @type {{[path: string]: Cache}} */ - this.parsedCache = {}; + parsedCache: {[thePath: string]: Cache} = {}; + tables: TableDetail = {}; + tableFetch: tablePromise|undefined; + includeFileFetch: includeFilePromise|undefined; - /** @type {{[name: string]: {fetched: number, fetching?: boolean, recordFormats: Declaration[]}}} */ - this.tables = {}; - - /** @type {tablePromise} */ - this.tableFetch = undefined; - - /** @type {includeFilePromise} */ - this.includeFileFetch = undefined; + constructor() { } - /** - * @param {tablePromise} promise - */ - setTableFetch(promise) { + setTableFetch(promise: tablePromise) { this.tableFetch = promise; } - /** - * @param {includeFilePromise} promise - */ - setIncludeFileFetch(promise) { + setIncludeFileFetch(promise: includeFilePromise) { this.includeFileFetch = promise; } - /** - * @param {string} name - * @param {string} keyVersion - * @param {boolean} [aliases] - * @returns {Promise} - */ - async fetchTable(name, keyVersion = ``, aliases) { + async fetchTable(name: string, keyVersion = ``, aliases?: boolean): Promise { if (name === undefined || (name && name.trim() === ``)) return []; if (!this.tableFetch) return []; const table = name.toUpperCase(); @@ -82,8 +68,7 @@ export default class Parser { recordFormats: [] }; - /** @type {Declaration[]} */ - let newDefs; + let newDefs: Declaration[]; try { newDefs = await this.tableFetch(table, aliases); @@ -124,7 +109,7 @@ export default class Parser { * @param {string} line * @returns {string|undefined} */ - static getIncludeFromDirective(line) { + static getIncludeFromDirective(line: string): string|undefined { if (line.includes(`*`)) return; // Likely comment if (line.trim().startsWith(`//`)) return; // Likely comment @@ -143,8 +128,7 @@ export default class Parser { directiveLength = 9 }; - /** @type {string|undefined} */ - let directiveValue; + let directiveValue: string|undefined; if (directivePosition >= 0) { if (comment >= 0) { @@ -162,144 +146,239 @@ export default class Parser { return directiveValue; } - } - /** - * @param {string} workingUri - * @param {string} [content] - * @param {{withIncludes?: boolean, ignoreCache?: boolean}} options - * @returns {Promise} - */ - async getDocs(workingUri, content, options = {withIncludes: true}) { + async getDocs(workingUri: string, baseContent?: string, options: ParseOptions = {withIncludes: true, collectReferences: true}): Promise { const existingCache = this.getParsedCache(workingUri); if (options.ignoreCache !== true && existingCache) { return existingCache; } - if (!content) return null; - - let baseLines = content.replace(new RegExp(`\\\r`, `g`), ``).split(`\n`); + if (!baseContent) return null; - /** @type {Cache[]} */ - let scopes = []; + let scopes: Cache[] = []; - /** @type {Declaration[]} Free format struct scopes. Used for free-format only */ - let dsScopes = []; + /** Free format struct scopes. Used for free-format only */ + let dsScopes: Declaration[] = []; - /** @type {string[]} */ - let globalKeyword = []; + let globalKeyword: string[] = []; // Global scope bits scopes.push(new Cache()); - /** - * Gets value of EXTFILE if it exists. - * @param {string} defaultName - * @param {import("./parserTypes").Keywords} keywords - * @returns {string} - */ - const getObjectName = (defaultName, keywords) => { + const getObjectName = (defaultName: string, keywords: Keywords): string => { let objectName = defaultName; - const extObjKeywords = [`EXTFILE`]; - const extObjKeywordsDesc = [`EXTDESC`]; // Check for external object - extObjKeywords.forEach(keyword => { - const keywordValue = keywords[keyword]; - if (keywordValue && typeof keywordValue === `string`) { - objectName = keywordValue.substring(keyword.length+1, keywordValue.length - 1).toUpperCase(); - - if (objectName.startsWith(`'`) && objectName.endsWith(`'`)) { - objectName = objectName.substring(1, objectName.length - 1); - } + const extFile = keywords[`EXTFILE`]; + if (extFile && typeof extFile === `string`) { + objectName = extFile.toUpperCase(); + if (objectName.startsWith(`'`) && objectName.endsWith(`'`)) { + objectName = objectName.substring(1, objectName.length - 1); } - }); + } if(objectName === `*EXTDESC`){ // Check for external object - extObjKeywordsDesc.forEach(keyword => { - const keywordValue = keywords[keyword]; - if (keywordValue && typeof keywordValue === `string`) { - objectName = keywordValue.substring(keyword.length+1, keywordValue.length - 1).toUpperCase(); + const extDesc = keywords['EXTDESC']; + if (extDesc && typeof extDesc === `string`) { + objectName = extDesc.toUpperCase(); - if (objectName.startsWith(`'`) && objectName.endsWith(`'`)) { - objectName = objectName.substring(1, objectName.length - 1); - } + if (objectName.startsWith(`'`) && objectName.endsWith(`'`)) { + objectName = objectName.substring(1, objectName.length - 1); } - }); + } } return objectName; }; - let potentialName; + let potentialName: string|undefined; let potentialNameUsed = false; - /** @type {"structs"|"procedures"|"constants"} */ - let currentGroup; + let currentGroup: "structs"|"procedures"|"constants"; - /** @type {string[]} */ - let definedMacros = []; + let definedMacros: string[] = []; - //Now the real work /** - * @param {string} file - * @param {string[]} lines + * Parse the tokens and add references to the definitions + * The statement is modified in place and sets tokens undefined when are references */ - const parseContent = async (file, lines) => { + const collectReferences = (currentUri: string, statement: Token[], currentProcedure?: Declaration, currentDef?: Declaration, isExec = false) => { + if (statement[0]?.value?.toUpperCase() === `EXEC`) { + isExec = true; + } + + const removeCollectedToken = (at: number) => { + statement[at] = undefined; + } + + const addReference = (def: Declaration, part: Token, at: number) => { + def.references.push({ + uri: currentUri, + offset: { position: part.range.start, end: part.range.end }, + }); + + removeCollectedToken(at); + } + + for (let i = 0; i < statement.length; i++) { + const part = statement[i]; + if (part === undefined) continue; + + if (![`special`, `word`].includes(part.type)) continue; + if (statement[i - 1] && statement[i - 1].type === `dot`) break; + + if (isExec && statement[i-1]) { + if (statement[i-1].type !== `seperator`) { + continue; + } + } + + const isSpecial = part.type === `special`; + const lookupName = (isSpecial ? part.value.substring(1) : part.value).toUpperCase(); + + let defRef: Declaration|undefined; + + if (isSpecial) { + // The only specials that can be looked up at global indicators + defRef = scopes[0].indicators.find(ind => ind.name.toUpperCase() === lookupName); + + } else { + if (currentDef) { + if (currentDef.name.toUpperCase() === lookupName) { + defRef = currentDef; + } else if (currentDef.subItems.length > 0) { + defRef = currentDef.subItems.find(sub => sub.name.toUpperCase() === lookupName); + } + } + + if (!defRef && currentProcedure && currentProcedure.scope) { + defRef = currentProcedure.scope.find(lookupName); + + if (!defRef) { + defRef = currentProcedure.subItems.find(def => def.name.toUpperCase() === lookupName); + } + } + + if (!defRef) { + defRef = scopes[0].find(lookupName); + } + } + + if (defRef) { + addReference(defRef, part, i); + + if (defRef.keyword[`QUALIFIED`]) { + let nextPartIndex = i + 1; + + if (statement[nextPartIndex]) { + // First, check if there is an array call here and skip over it + if (statement[nextPartIndex].type === `openbracket`) { + nextPartIndex = statement.findIndex((value, index) => index > nextPartIndex && value.type === `closebracket`); + + if (nextPartIndex >= 0) nextPartIndex++; + } + + // Check if the next part is a dot + if (statement[nextPartIndex] && statement[nextPartIndex].type === `dot`) { + nextPartIndex++; + + // Check if the next part is a word + if (statement[nextPartIndex] && statement[nextPartIndex].type === `word` && statement[nextPartIndex].value) { + const subItemPart = statement[nextPartIndex]; + const subItemName = subItemPart.value.toUpperCase(); + + // Find the subitem + const subItemDef = defRef.subItems.find(subfield => subfield.name.toUpperCase() == subItemName); + if (subItemDef) { + addReference(subItemDef, subItemPart, i); + } + } + } + } + } + } + } + } + + //Now the real work + const parseContent = async (fileUri: string, allContent: string) => { + const EOL = allContent.includes(`\r\n`) ? `\r\n` : `\n`; + const LINEEND = ``.padEnd(EOL.length); + let lines = allContent.split(EOL); + + let postProcessingStatements: {[procedure: string]: Token[][]} = {'GLOBAL': []}; + + const addPostProcessingStatements = (procedure = `GLOBAL`, statement: Token[]) => { + if (!options.collectReferences) return; + + if (!postProcessingStatements[procedure]) { + postProcessingStatements[procedure] = []; + } + + postProcessingStatements[procedure].push(statement); + } + + const scanScopeForReferences = () => { + for (const procedure in postProcessingStatements) { + const currentProcedure = scopes[0].procedures.find(proc => proc.name === procedure) ; + const statements = postProcessingStatements[procedure]; + for (const statement of statements) { + collectReferences(fileUri, statement, currentProcedure); + } + } + } + if (lines.length === 0) return; let currentTitle = undefined, currentDescription = []; - /** @type {{tag: string, content: string}[]} */ - let currentTags = []; + let currentTags: {tag: string, content: string}[] = []; - /** @type {Declaration} */ - let currentItem; - /** @type {Declaration} */ - let currentSub; - let currentProcName; + let currentItem: Declaration|undefined; + let currentSub: Declaration|undefined; + let currentProcName: string|undefined; let resetDefinition = false; //Set to true when you're done defining a new item let docs = false; // If section is for ILEDocs let lineNumber = -1; - - /** - * @type {string[]} - */ - let parts; - - /** - * @type {string[]} - */ - let partsLower; - - /** - * @type {string[]} - */ - let pieces; + let lineIndex = 0; let isFullyFree = lines[0].toUpperCase().startsWith(`**FREE`); let lineIsFree = false; - /** @type {string|undefined} */ - let currentStatement; - /** @type {number|undefined} */ - let statementStartingLine; + /** Used for handling multiline statements */ + let currentStmtStart: {content?: string, line: number, index: number}|undefined; - /** @type {{condition: boolean}[]} */ - let directIfScope = []; + let directIfScope: {condition: boolean}[] = []; let lineCanRun = () => { return directIfScope.length === 0 || directIfScope.every(scope => scope.condition); } - /** - * Expands LIKEDS, LIKEREC and EXTNAME. - * @param {string} file - * @param {Declaration} ds - */ - const expandDs = async (file, ds) => { + const stripComment = (inputLine: string) => { + const comment = inputLine.indexOf(`//`); + const quote = inputLine.lastIndexOf(`'`); + if (comment >= 0 && comment < quote) { + return inputLine; + } + + return (comment >= 0 ? inputLine.substring(0, comment).trimEnd() : inputLine); + } + + const getValidStatement = (inputLine: string, withSep?: boolean) => { + const comment = inputLine.indexOf(`//`); + const quote = inputLine.lastIndexOf(`'`); + const sep = inputLine.indexOf(`;`, quote >= 0 ? quote : 0); + + if (comment >= 0 && comment < sep) { + return inputLine; + } + + return (sep >= 0 ? inputLine.substring(0, sep + (withSep ? 1 : 0)) : inputLine); + } + + const expandDs = async (file: string, ds: Declaration): Promise => { const tags = [`LIKEDS`, `LIKEREC`, `EXTNAME`]; const keywords = ds.keyword; for (const tag of tags) { @@ -319,13 +398,12 @@ export default class Parser { keywordValue = keywordValue.substring(1, keywordValue.length - 1); } - if ([`EXTNAME`].includes(tag)) { + if (tag === `EXTNAME`) { // Fetch from external definitions const keywordLength = Object.keys(ds.keyword); const recordFormats = await this.fetchTable(keywordValue, keywordLength.length.toString(), ds.keyword[`ALIAS`] !== undefined); if (recordFormats.length > 0) { - // Got to fix the positions for the defintions to be the declare. recordFormats.forEach(recordFormat => { recordFormat.subItems.forEach(subItem => { @@ -342,7 +420,7 @@ export default class Parser { } else { // We need to add qualified as it is qualified by default. if (!ds.keyword[`QUALIFIED`]) - ds.keyword[`QUALIFIED`]; + ds.keyword[`QUALIFIED`] = true; // Fetch from local definitions for (let i = scopes.length - 1; i >= 0; i--) { @@ -368,75 +446,117 @@ export default class Parser { } }; - for (let line of lines) { + let fixedExec = false; + + for (let li = 0; li < lines.length; li++) { + if (li >= 1) { + lineIndex += lines[li-1].length + EOL.length; + } + const scope = scopes[scopes.length - 1]; - let spec; + + let baseLine = lines[li]; + let spec: string|undefined; lineIsFree = false; lineNumber += 1; - if (line.startsWith(`**`)) { + if (baseLine.startsWith(`**`)) { // Usually is **FREE if (lineNumber === 0) continue; // After compile time data, we're done else break; } - if (isFullyFree === false && line.length > 6) { - const comment = line[6]; - spec = line[5].toUpperCase(); + if (isFullyFree === false && baseLine.length > 6) { + const comment = baseLine[6]; + spec = baseLine[5].toUpperCase(); if ([spec, comment].includes(`*`)) { + if (currentStmtStart && currentStmtStart.content) { + // Since we're in an extended statement (usually fixed exec), we still need to collect the lengths for the tokeniser + currentStmtStart.content += ``.padEnd(baseLine.length) + LINEEND; + } + continue; } if (comment === `/`) { // Directives can be parsed by the free format parser - line = line.substring(6); + baseLine = ``.padEnd(6) + baseLine.substring(6); lineIsFree = true; + } else if (comment === `+` && fixedExec && currentStmtStart.content) { + // Fixed format EXEC SQL + baseLine = ``.padEnd(7) + baseLine.substring(7); + currentStmtStart.content += baseLine + LINEEND; + continue; } else { if (spec === ` `) { - //Clear out stupid comments - line = line.substring(7); - + //Clear out stupid comments + baseLine = ``.padEnd(7) + baseLine.substring(7); lineIsFree = true; + } else if (![`D`, `P`, `C`, `F`, `H`].includes(spec)) { continue; - } else { - if (spec === `C`) { - // We don't want to waste precious time parsing all C specs, so we make sure it's got - // BEGSR or ENDSR in it first. - const upperLine = line.toUpperCase(); - if ([`BEGSR`, `ENDSR`, `CALL`].some(v => upperLine.includes(v)) === false) { - continue; - } - } } } - - if (line.length > 80) { - // Remove ending comments - line = line.substring(0, 80); + } else { + // Even if the line is useless, we need to capture the characters to be + // parsed in case it's a statement spread over multiple lines + if (!isFullyFree && currentStmtStart && currentStmtStart.content) { + currentStmtStart.content += ``.padEnd(baseLine.length + EOL.length); } } - pieces = []; - parts = []; + let line = baseLine; + if (!isFullyFree && line.length > 80) { + // Remove ending comments + line = line.substring(0, 80); + } + + let tokens: Token[] = []; + let parts: string[]; + let partsLower: string[]; if (isFullyFree || lineIsFree) { // Free format! - line = line.trim(); + if (line.trim() === ``) continue; - if (line === ``) continue; - - pieces = line.split(`;`); - let tokens = tokenise(pieces[0], lineNumber); + const lineIsComment = line.trim().startsWith(`//`); + tokens = lineTokens(getValidStatement(line), lineNumber, lineIndex); partsLower = tokens.filter(piece => piece.value).map(piece => piece.value); parts = partsLower.map(piece => piece.toUpperCase()); - const lineIsComment = line.startsWith(`//`); + line = line.trim(); if (!lineIsComment) { + + // First, we need a seperate switch for EXEC statements + switch (parts[0]) { + case '/EXEC': + fixedExec = true; + baseLine = ``.padEnd(7) + baseLine.substring(7); + currentStmtStart = { + line: lineNumber, + index: lineIndex, + content: baseLine + LINEEND + } + continue; + case '/END': + line = `;`; + baseLine = ``.padEnd(baseLine.length) + `;`; + fixedExec = false; + break; + default: + // Maybe we're in a fixed exec statement, but a directive is being used. + // See test case references_21_fixed_exec1 + if (fixedExec && currentStmtStart && currentStmtStart.content) { + currentStmtStart.content += ``.padEnd(baseLine.length) + LINEEND; + } + break; + } + + // Then we do regular parsing if (parts[0] === `/EOF` && lineCanRun()) { // End of parsing for this file return; @@ -449,17 +569,19 @@ export default class Parser { if (includePath) { const include = await this.includeFileFetch(workingUri, includePath); - if (include.found) { - scopes[0].includes.push({ - toPath: include.uri, - line: lineNumber - }); - - try { - await parseContent(include.uri, include.lines); - } catch (e) { - console.log(`Error parsing include: ${include.uri}`); - console.log(e); + if (include.found && include.uri) { + if (!scopes[0].includes.some(inc => inc.toPath === include.uri)) { + scopes[0].includes.push({ + toPath: include.uri, + line: lineNumber + }); + + try { + await parseContent(include.uri, include.content); + } catch (e) { + console.log(`Error parsing include: ${include.uri}`); + console.log(e); + } } } } @@ -516,30 +638,38 @@ export default class Parser { } } - if (pieces.length > 1 && pieces[1].includes(`//`)) line = pieces[0] + `;`; - if (!currentStatement) statementStartingLine = lineNumber; + if (!currentStmtStart || !currentStmtStart.content) { + currentStmtStart = {line: lineNumber, index: lineIndex}; + } - if (!lineIsComment) { - if (line.endsWith(`;`)) { - if (currentStatement) { + if (lineIsComment) { + // This happens when we put a comment on a line which is part of one long statement. + // See references_24_comment_in_statement + if (currentStmtStart.content) { + currentStmtStart.content += ``.padEnd(baseLine.length) + LINEEND; + } + } else { + if (stripComment(line).endsWith(`;`)) { + + if (currentStmtStart.content) { // This means the line is just part of the end of the last statement as well. - line = currentStatement + line; - currentStatement = undefined; + line = currentStmtStart.content + getValidStatement(baseLine); - pieces = line.split(`;`); - tokens = tokenise(pieces[0], lineNumber); + tokens = lineTokens(line, currentStmtStart.line, currentStmtStart.index); partsLower = tokens.filter(piece => piece.value).map(piece => piece.value); parts = partsLower.map(piece => piece.toUpperCase()); + + currentStmtStart.content = undefined; } } else if (!line.endsWith(`;`)) { + currentStmtStart.content = (currentStmtStart.content || ``) + baseLine; + + if (currentStmtStart.content.endsWith(`-`)) + currentStmtStart.content = currentStmtStart.content.substring(0, currentStmtStart.content.length - 1) + ` `; - currentStatement = (currentStatement || ``) + line.trim(); - if (currentStatement.endsWith(`-`)) - currentStatement = currentStatement.substring(0, currentStatement.length - 1); - else - currentStatement += ` `; + currentStmtStart.content += LINEEND; continue; } @@ -559,7 +689,7 @@ export default class Parser { currentItem.description = currentDescription.join(`\n`); currentItem.position = { - path: file, + path: fileUri, line: lineNumber }; @@ -606,12 +736,12 @@ export default class Parser { if (parts.length > 1) { currentItem = new Declaration(`constant`); currentItem.name = partsLower[1]; - currentItem.keyword = Parser.expandKeywords(tokens.slice(2)); + currentItem.keyword = Parser.expandKeywords(tokens.slice(2), true); currentItem.description = currentDescription.join(`\n`); currentItem.position = { - path: file, - line: statementStartingLine + path: fileUri, + line: currentStmtStart.line }; scope.constants.push(currentItem); @@ -630,8 +760,8 @@ export default class Parser { currentItem.tags = currentTags; currentItem.position = { - path: file, - line: statementStartingLine + path: fileUri, + line: currentStmtStart.line }; scope.variables.push(currentItem); @@ -649,13 +779,13 @@ export default class Parser { currentItem.description = currentDescription.join(`\n`); currentItem.position = { - path: file, - line: statementStartingLine + path: fileUri, + line: currentStmtStart.line }; currentItem.range = { - start: statementStartingLine, - end: statementStartingLine + start: currentStmtStart.line, + end: currentStmtStart.line }; currentItem.readParms = true; @@ -669,7 +799,7 @@ export default class Parser { case `END-ENUM`: if (currentItem && currentItem.type === `constant`) { - currentItem.range.end = statementStartingLine; + currentItem.range.end = currentStmtStart.line; scope.constants.push(currentItem); @@ -687,23 +817,23 @@ export default class Parser { currentItem.tags = currentTags; currentItem.position = { - path: file, - line: statementStartingLine + path: fileUri, + line: currentStmtStart.line }; currentItem.range = { - start: statementStartingLine, - end: statementStartingLine + start: currentStmtStart.line, + end: currentStmtStart.line }; currentGroup = `structs`; // Expand the LIKEDS value if there is one. - await expandDs(file, currentItem); + await expandDs(fileUri, currentItem); // Does the keywords include a keyword that makes end-ds useless? if (Object.keys(currentItem.keyword).some(keyword => oneLineTriggers[`DCL-DS`].some(trigger => keyword.startsWith(trigger)))) { - currentItem.range.end = statementStartingLine; + currentItem.range.end = currentStmtStart.line; scope.structs.push(currentItem); } else { currentItem.readParms = true; @@ -720,7 +850,7 @@ export default class Parser { case `END-DS`: if (dsScopes.length > 0) { const currentDs = dsScopes[dsScopes.length - 1]; - currentDs.range.end = statementStartingLine; + currentDs.range.end = currentStmtStart.line; } if (dsScopes.length === 1) { @@ -743,20 +873,20 @@ export default class Parser { currentItem.tags = currentTags; currentItem.position = { - path: file, - line: statementStartingLine + path: fileUri, + line: currentStmtStart.line }; currentItem.readParms = true; currentItem.range = { - start: statementStartingLine, - end: statementStartingLine + start: currentStmtStart.line, + end: currentStmtStart.line }; // Does the keywords include a keyword that makes end-ds useless? if (Object.keys(currentItem.keyword).some(keyword => oneLineTriggers[`DCL-PR`].some(trigger => keyword.startsWith(trigger)))) { - currentItem.range.end = statementStartingLine; + currentItem.range.end = currentStmtStart.line; scope.procedures.push(currentItem); resetDefinition = true; } @@ -769,7 +899,7 @@ export default class Parser { case `END-PR`: if (currentItem && currentItem.type === `procedure`) { - currentItem.range.end = statementStartingLine; + currentItem.range.end = currentStmtStart.line; const isDefinedGlobally = scopes[0].procedures.some(proc => proc.name.toUpperCase() === currentItem.name.toUpperCase()); @@ -800,15 +930,15 @@ export default class Parser { currentItem.tags = currentTags; currentItem.position = { - path: file, - line: statementStartingLine + path: fileUri, + line: currentStmtStart.line }; currentItem.readParms = false; currentItem.range = { - start: statementStartingLine, - end: statementStartingLine + start: currentStmtStart.line, + end: currentStmtStart.line }; currentItem.scope = new Cache(); @@ -867,7 +997,7 @@ export default class Parser { if (currentItem && currentItem.type === `procedure`) { scopes.pop(); - currentItem.range.end = statementStartingLine; + currentItem.range.end = currentStmtStart.line; resetDefinition = true; } } @@ -882,13 +1012,13 @@ export default class Parser { currentItem.keyword = {'Subroutine': true}; currentItem.position = { - path: file, - line: statementStartingLine + path: fileUri, + line: currentStmtStart.line }; currentItem.range = { - start: statementStartingLine, - end: statementStartingLine + start: currentStmtStart.line, + end: currentStmtStart.line }; currentDescription = []; @@ -898,7 +1028,7 @@ export default class Parser { case `ENDSR`: if (currentItem && currentItem.type === `subroutine`) { - currentItem.range.end = statementStartingLine; + currentItem.range.end = currentStmtStart.line; scope.subroutines.push(currentItem); resetDefinition = true; } @@ -943,8 +1073,7 @@ export default class Parser { let isContinued = false; - /** @type {string[]} */ - let ignoreCtes = []; + let ignoreCtes: string[] = []; if (pIncludes(`WITH`)) { for (let index = 4; index < tokens.length; index++) { @@ -980,8 +1109,8 @@ export default class Parser { currentSqlItem.description = qualifiedObjectPath.schema || ``; currentSqlItem.position = { - path: file, - line: statementStartingLine + path: fileUri, + line: currentStmtStart.line }; scope.sqlReferences.push(currentSqlItem); @@ -1029,7 +1158,7 @@ export default class Parser { } } else { - //Do nothing because it's a regular comment + //Do nothing because it's a regular comment } } else { @@ -1048,12 +1177,12 @@ export default class Parser { } currentSub = new Declaration(`subitem`); - currentSub.name = (parts[0] === `*N` ? `parm${currentItem.subItems.length+1}` : partsLower[0]) ; + currentSub.name = (parts[0] === `*N` ? `parm${currentItem.subItems.length+1}` : partsLower[0]); currentSub.keyword = Parser.expandKeywords(tokens.slice(1)); currentSub.position = { - path: file, - line: statementStartingLine + path: fileUri, + line: currentStmtStart.line }; // Add comments from the tags @@ -1066,7 +1195,7 @@ export default class Parser { } // If the parameter has likeds, add the subitems to make it a struct. - await expandDs(file, currentSub); + await expandDs(fileUri, currentSub); currentItem.subItems.push(currentSub); currentSub = undefined; @@ -1102,7 +1231,7 @@ export default class Parser { currentItem.keyword = fSpec.keywords; currentItem.position = { - path: file, + path: fileUri, line: lineNumber }; @@ -1153,11 +1282,29 @@ export default class Parser { break; case `C`: - const cSpec = parseCLine(line); + const cSpec = parseCLine(lineNumber, lineIndex, line); + + tokens = [cSpec.ind1, cSpec.ind2, cSpec.ind3]; - potentialName = cSpec.factor1; + const fromToken = (token?: Token) => { + return token ? lineTokens(token.value, lineNumber, token.range.start) : []; + }; - switch (cSpec.opcode) { + if (cSpec.opcode && ALLOWS_EXTENDED.includes(cSpec.opcode.value) && !cSpec.factor1 && cSpec.extended) { + tokens.push(...fromToken(cSpec.extended)); + } else if (!cSpec.factor1 && !cSpec.opcode && cSpec.extended) { + tokens.push(...fromToken(cSpec.extended)); + } else { + tokens.push( + ...fromToken(cSpec.factor1), + ...fromToken(cSpec.factor2), + ...fromToken(cSpec.result), + ); + } + + potentialName = cSpec.factor1 ? cSpec.factor1.value : ``; + + switch (cSpec.opcode && cSpec.opcode.value) { case `BEGSR`: if (!scope.subroutines.find(sub => sub.name && sub.name.toUpperCase() === potentialName)) { currentItem = new Declaration(`subroutine`); @@ -1165,7 +1312,7 @@ export default class Parser { currentItem.keyword = {'Subroutine': true}; currentItem.position = { - path: file, + path: fileUri, line: lineNumber }; @@ -1188,37 +1335,58 @@ export default class Parser { case `CALL`: const callItem = new Declaration(`procedure`); - callItem.name = (cSpec.factor2.startsWith(`'`) && cSpec.factor2.endsWith(`'`) ? cSpec.factor2.substring(1, cSpec.factor2.length-1) : cSpec.factor2); - callItem.keyword = {'EXTPGM': true} - callItem.description = currentDescription.join(`\n`); - callItem.tags = currentTags; + if (cSpec.factor2) { + const f2Value = cSpec.factor2.value; + callItem.name = (f2Value.startsWith(`'`) && f2Value.endsWith(`'`) ? f2Value.substring(1, f2Value.length-1) : f2Value); + callItem.keyword = {'EXTPGM': true} + callItem.description = currentDescription.join(`\n`); + callItem.tags = currentTags; + + callItem.position = { + path: fileUri, + line: lineNumber + }; - callItem.position = { - path: file, - line: lineNumber - }; + callItem.range = { + start: lineNumber, + end: lineNumber + }; - callItem.range = { - start: lineNumber, - end: lineNumber - }; + scope.procedures.push(callItem); + } + break; + + case `TAG`: + const tagItem = new Declaration(`tag`); + if (cSpec.factor1) { + tagItem.name = cSpec.factor1.value; + tagItem.position = { + path: fileUri, + line: lineNumber + }; + + tagItem.range = { + start: lineNumber, + end: lineNumber + }; - scope.procedures.push(callItem); + scope.tags.push(tagItem); + } break; } break; case `P`: - const pSpec = parsePLine(line); + const pSpec = parsePLine(line, lineNumber, lineIndex); - if (pSpec.potentialName === ``) continue; - - if (pSpec.potentialName.endsWith(`...`)) { - potentialName = pSpec.potentialName.substring(0, pSpec.potentialName.length - 3); + if (pSpec.potentialName) { + potentialName = pSpec.potentialName.value.substring(0, pSpec.potentialName.value.length - 3); potentialNameUsed = true; + tokens = [pSpec.potentialName]; } else { if (pSpec.start) { - potentialName = pSpec.name.length > 0 ? pSpec.name : potentialName; + tokens = [...pSpec.keywordsRaw, pSpec.name] + potentialName = pSpec.name && pSpec.name.value.length > 0 ? pSpec.name.value : potentialName; if (potentialName) { //We can overwrite it.. it might have been a PR before. @@ -1234,7 +1402,7 @@ export default class Parser { currentItem.keyword = pSpec.keywords; currentItem.position = { - path: file, + path: fileUri, line: lineNumber - (potentialNameUsed ? 1 : 0) // Account that name is on line before }; @@ -1266,26 +1434,26 @@ export default class Parser { break; case `D`: - const dSpec = parseDLine(line); - - if (dSpec.potentialName === ``) continue; + const dSpec = parseDLine(lineNumber, lineIndex, line); - if (dSpec.potentialName.endsWith(`...`)) { - potentialName = dSpec.potentialName.substring(0, dSpec.potentialName.length - 3); + if (dSpec.potentialName && dSpec.potentialName) { + potentialName = dSpec.potentialName.value; potentialNameUsed = true; + tokens = [dSpec.potentialName]; continue; } else { - potentialName = dSpec.name.length > 0 ? dSpec.name : potentialName ? potentialName : ``; + potentialName = dSpec.name && dSpec.name.value.length > 0 ? dSpec.name.value : (potentialName ? potentialName : ``); + tokens = [dSpec.field, ...dSpec.keywordsRaw, dSpec.name] - switch (dSpec.field) { + switch (dSpec.field && dSpec.field.value) { case `C`: currentItem = new Declaration(`constant`); currentItem.name = potentialName || `*N`; - currentItem.keyword = dSpec.keyword; + currentItem.keyword = dSpec.keywords || {}; // TODO: line number might be different with ...? currentItem.position = { - path: file, + path: fileUri, line: lineNumber - (potentialNameUsed ? 1 : 0) // Account that name is on line before }; @@ -1297,12 +1465,12 @@ export default class Parser { currentItem.name = potentialName || `*N`; currentItem.keyword = { ...dSpec.keywords, - ...getPrettyType(dSpec), + ...prettyTypeFromToken(dSpec), } // TODO: line number might be different with ...? currentItem.position = { - path: file, + path: fileUri, line: lineNumber - (potentialNameUsed ? 1 : 0) // Account that name is on line before }; @@ -1316,7 +1484,7 @@ export default class Parser { currentItem.keyword = dSpec.keywords; currentItem.position = { - path: file, + path: fileUri, line: lineNumber - (potentialNameUsed ? 1 : 0) // Account that name is on line before }; @@ -1325,7 +1493,7 @@ export default class Parser { end: currentItem.position.line }; - expandDs(file, currentItem); + expandDs(fileUri, currentItem); currentGroup = `structs`; scope.structs.push(currentItem); @@ -1338,12 +1506,12 @@ export default class Parser { currentItem = new Declaration(`procedure`); currentItem.name = potentialName || `*N`; currentItem.keyword = { - ...getPrettyType(dSpec), + ...prettyTypeFromToken(dSpec), ...dSpec.keywords } currentItem.position = { - path: file, + path: fileUri, line: lineNumber - (potentialNameUsed ? 1 : 0) // Account that name is on line before }; @@ -1367,7 +1535,7 @@ export default class Parser { if (currentItem) { currentItem.keyword = { ...currentItem.keyword, - ...getPrettyType(dSpec), + ...prettyTypeFromToken(dSpec), ...dSpec.keywords } } @@ -1405,17 +1573,17 @@ export default class Parser { currentSub = new Declaration(`subitem`); currentSub.name = potentialName; currentSub.keyword = { - ...getPrettyType(dSpec), + ...prettyTypeFromToken(dSpec), ...dSpec.keywords } currentSub.position = { - path: file, + path: fileUri, line: lineNumber }; // If the parameter has likeds, add the subitems to make it a struct. - await expandDs(file, currentSub); + await expandDs(fileUri, currentSub); currentItem.subItems.push(currentSub); currentSub = undefined; @@ -1426,7 +1594,7 @@ export default class Parser { if (currentItem.subItems.length > 0) { currentItem.subItems[currentItem.subItems.length - 1].keyword = { ...currentItem.subItems[currentItem.subItems.length - 1].keyword, - ...getPrettyType(dSpec), + ...prettyTypeFromToken(dSpec), ...dSpec.keywords } } else { @@ -1449,6 +1617,12 @@ export default class Parser { } } + if (options.collectReferences && tokens.length > 0) { + const currentProc = scopes[0].procedures.find(proc => proc.name === currentProcName); + collectReferences(fileUri, tokens, currentProc, currentItem); + addPostProcessingStatements(currentProcName, tokens); + } + if (resetDefinition) { potentialName = undefined; potentialNameUsed = false; @@ -1460,12 +1634,16 @@ export default class Parser { resetDefinition = false; } } + + if (options.collectReferences) { + scanScopeForReferences(); + } } - await parseContent(workingUri, baseLines); + await parseContent(workingUri, baseContent); if (scopes.length > 0) { - scopes[0].keyword = Parser.expandKeywords(globalKeyword); + scopes[0].keyword = Parser.expandKeywords(Parser.getTokens(globalKeyword)); } scopes[0].fixProcedures(); @@ -1477,29 +1655,22 @@ export default class Parser { return parsedData; } - /** - * @param {import("./types").Token[]|string|string[]} tokens - */ - static expandKeywords(tokens) { - /** @type {import("./parserTypes").Keywords} */ - const keyvalues = {}; - - /** @type {import("./types").Token[]} */ - let validTokens; - - if (Array.isArray(tokens) && typeof tokens[0] === `string`) { - validTokens = tokenise(tokens.join(` `), 0); + static getTokens(content: string|string[]|Token[], lineNumber?: number, baseIndex?: number): Token[] { + if (Array.isArray(content) && typeof content[0] === `string`) { + return lineTokens(content.join(` `), lineNumber, baseIndex); } else - if (typeof tokens === `string`) { - validTokens = tokenise(tokens, 0); + if (typeof content === `string`) { + return lineTokens(content, lineNumber, baseIndex); } else { - // @ts-ignore - validTokens = tokens; + return content as Token[]; } - + } + + static expandKeywords(tokens: Token[], isConst = false): Keywords { + const keyvalues: Keywords = {}; if (tokens.length > 0) { - const keywordParts = createBlocks(validTokens); + const keywordParts = createBlocks(tokens.slice(0)); for (let i = 0; i < keywordParts.length; i++) { if (keywordParts[i].value) { @@ -1507,7 +1678,11 @@ export default class Parser { keyvalues[keywordParts[i].value.toUpperCase()] = keywordParts[i+1].block.map(part => part.value).join(``); i++; // Skip one for the block. } else { - keyvalues[keywordParts[i].value.toUpperCase()] = true; + if (isConst) { + keyvalues[`CONST`] = keywordParts[i].value; + } else { + keyvalues[keywordParts[i].value.toUpperCase()] = true; + } } } } diff --git a/language/parserTypes.ts b/language/parserTypes.ts index 085404d0..43b6b38e 100644 --- a/language/parserTypes.ts +++ b/language/parserTypes.ts @@ -22,6 +22,7 @@ export interface CacheProps { sqlReferences?: Declaration[]; indicators?: Declaration[]; includes?: IncludeStatement[]; + tags?: Declaration[]; } export interface Rules { @@ -76,6 +77,12 @@ export interface Offset { end: number } + +export interface Reference { + uri: string; + offset: Offset; +} + export interface IssueRange { offset: Offset; type?: keyof Rules; diff --git a/language/tokens.ts b/language/tokens.ts index 673bcf9e..daa3021a 100644 --- a/language/tokens.ts +++ b/language/tokens.ts @@ -35,7 +35,7 @@ const commonMatchers: Matcher[] = [ { type: `divide` }, { type: `word`, - match: (word) => [`TITLE`, `EJECT`, `SPACE`, `COPY`, `INCLUDE`, `SET`, `RESTORE`, `OVERLOAD`, `DEFINE`, `UNDEFINE`, `IF`, `ELSE`, `ELSEIF`, `ENDIF`, `EOF`, `CHARCOUNT`].includes(word.toUpperCase()) + match: (word) => [`TITLE`, `EJECT`, `SPACE`, `COPY`, `INCLUDE`, `SET`, `RESTORE`, `OVERLOAD`, `DEFINE`, `UNDEFINE`, `IF`, `ELSE`, `ELSEIF`, `ENDIF`, `EOF`, `CHARCOUNT`, `EXEC`, `END`].includes(word.toUpperCase()) }, ], becomes: { @@ -232,7 +232,7 @@ const commonMatchers: Matcher[] = [ }, ]; -const splitParts = [`%`, `.`, `(`, `)`, `+`, `-`, `*`, `/`, `=`, `:`, `,`, `;`, `\n`, `\r`, ` `]; +const splitParts = [`%`, `.`, `(`, `)`, `+`, `-`, `*`, `/`, `=`, `:`, `,`, `;`, `\n`, `\r`, `\t`, ` `]; const types = { '%': `percent`, '.': `dot`, @@ -248,17 +248,46 @@ const types = { ',': `comma`, '\n': `newline`, '\r': `newliner`, + '\t': `tab`, }; const stringChar: string = `'`; const startCommentString = `//`; const endCommentString = `\n`; +export const ALLOWS_EXTENDED = [ + `CALLP`, + `DATA-GEN`, + `DATA-INTO`, + `DOU`, + `DOW`, + `ELSEIF`, + `EVAL`, + `EVAL-CORR`, + `EVALR`, + `FOR`, + `FOR-EACH`, + `IF`, + `ON-ERROR`, + `ON-EXCP`, + `ON-EXIT`, + `RETURN`, + `SND-MSG`, + `SORTA`, + `WHEN`, + `XML-INTO`, + `XML-SAX` +] + +export type TokeniseOptions = {lineNumber?: number, baseIndex?: number, ignoreTypes?: string[]}; + /** - * @param {string} statement * @returns {{value?: string, block?: object[], type: string, position: number}[]} */ -export function tokenise(statement, lineNumber = 0) { +export function tokenise(statement: string, options: TokeniseOptions = {}): Token[] { + let lineNumber = options.lineNumber || 0; + let baseIndex = options.baseIndex || 0; + let commentStart = -1; let state: ReadState = ReadState.NORMAL; @@ -300,17 +329,23 @@ export function tokenise(statement, lineNumber = 0) { switch (statement[i]) { // When it's the string character.. case stringChar: + const possibleEscape = statement[i+1] === stringChar; if (state === ReadState.IN_STRING) { - currentText += statement[i]; - result.push({ value: currentText, type: `string`, range: { start: startsAt, end: startsAt + currentText.length, line: lineNumber } }); - currentText = ``; + if (possibleEscape) { + currentText += `''`; + i += 2; + } else { + currentText += statement[i]; + result.push({ value: currentText, type: `string`, range: { start: startsAt, end: startsAt + currentText.length, line: lineNumber } }); + currentText = ``; + } } else { startsAt = i; currentText += statement[i]; } // @ts-ignore - state = state === ReadState.IN_STRING ? ReadState.NORMAL : ReadState.IN_STRING; + state = state === ReadState.IN_STRING && !possibleEscape ? ReadState.NORMAL : ReadState.IN_STRING; break; // When it's any other character... @@ -322,7 +357,13 @@ export function tokenise(statement, lineNumber = 0) { } if (statement[i] !== ` `) { - result.push({ value: statement[i], type: types[statement[i]], range: { start: i, end: i + statement[i].length, line: lineNumber } }); + const type = types[statement[i]]; + + if (options.ignoreTypes && options.ignoreTypes.includes(type)) { + continue; + } + + result.push({ value: statement[i], type, range: { start: i, end: i + statement[i].length, line: lineNumber } }); } startsAt = i + 1; @@ -341,12 +382,21 @@ export function tokenise(statement, lineNumber = 0) { result.push({ value: currentText, type: state === ReadState.NORMAL ? `word` : `string`, range: { start: startsAt, end: startsAt + currentText.length, line: lineNumber } }); currentText = ``; } else { - result.push({ value: currentText, type: `comment`, range: { start: startsAt, end: startsAt + currentText.length, line: lineNumber } }); + if (currentText.trim().length > 0) { + result.push({ value: currentText, type: `comment`, range: { start: startsAt, end: startsAt + currentText.length, line: lineNumber } }); + } } result = fixStatement(result); //result = createBlocks(result); + if (baseIndex) { + for (let i = 0; i < result.length; i++) { + result[i].range.start += baseIndex; + result[i].range.end += baseIndex; + } + } + return result; } diff --git a/tests/parserSetup.ts b/tests/parserSetup.ts index f1171837..55d13c4a 100644 --- a/tests/parserSetup.ts +++ b/tests/parserSetup.ts @@ -2,15 +2,16 @@ import Parser from '../language/parser'; import glob from "glob"; import path from 'path'; +import fs from 'fs'; import { readFile } from 'fs/promises'; import tables from './tables'; import { dspffdToRecordFormats } from '../extension/server/src/data'; -const includeDir = process.env.INCLUDE_DIR || path.join(__dirname, `..`); +const TEST_INCLUDE_DIR = process.env.INCLUDE_DIR || path.join(__dirname, `..`, `tests`); -export default function setupParser(): Parser { +export default function setupParser(projectRoot = TEST_INCLUDE_DIR): Parser { const parser = new Parser(); parser.setIncludeFileFetch(async (baseFile: string, includeFile: string) => { @@ -18,25 +19,26 @@ export default function setupParser(): Parser { includeFile = includeFile.substring(1, includeFile.length - 1); } - if (includeFile.includes(`,`)) { - includeFile = includeFile.split(`,`).join(`/`) + `.*`; + if (includeFile.includes(`,`) || !includeFile.includes(`.`)) { + includeFile = includeFile.split(`,`).join(`/`) + `.*rpgl*`; } const globPath = path.join(`**`, includeFile); - const files = glob.sync(globPath, { - cwd: includeDir, + const files: string[] = glob.sync(globPath, { + cwd: projectRoot, absolute: true, nocase: true, }); if (files.length >= 1) { - const file = files[0]; + const file = files.find(f => f.toLowerCase().endsWith(`rpgleinc`)) || files[0]; const content = await readFile(file, { encoding: `utf-8` }); + return { found: true, uri: file, - lines: content.split(`\n`) + content } } @@ -54,4 +56,24 @@ export default function setupParser(): Parser { }); return parser; +} + +export function getTestProjectsDir(): string[] { + // get a list of directories in the test directory using fs + const sourcesDir = path.join(TEST_INCLUDE_DIR, `sources`); + return fs.readdirSync(sourcesDir) + .filter((f) => fs.statSync(path.join(TEST_INCLUDE_DIR, `sources`, f)).isDirectory()) + .map((f) => path.join(sourcesDir, f)); +} + +export function getSourcesList(fullDirPath: string): string[] { + return glob.sync(`**/*.{rpgle,sqlrpgle}`, { + cwd: fullDirPath, + absolute: true, + nocase: true, + }); +} + +export function getFileContent(fullPath: string) { + return readFile(fullPath, { encoding: `utf-8` }); } \ No newline at end of file diff --git a/tests/rpgle/CBKDTAARA.rpgle b/tests/rpgle/CBKDTAARA.rpgle new file mode 100644 index 00000000..aba827cd --- /dev/null +++ b/tests/rpgle/CBKDTAARA.rpgle @@ -0,0 +1,25 @@ + D wDTAARA UDS DTAARA(BBS400DTA) + D wSoftVer 6A + D wBBSNAM 40A + D wLOCCRY 2A + D wLOCCTY 40A + D wTIMZON 3A + D wCLOSED 1A + D wNUSLVL 2A + D wNUSSVY 4A + D wMAINTM 1A + D wSHWALD 1A + D wUser 10A + D wUserLvl 2A + D wLvlDescr 15A + D wUserLstLogin 8A + D wLvlAuths 5A + D wNewUsrNtfy 10A + D wHIDESO 1A + D wHLSOMS 1A + D wUserLvlAuths DS + D wAuthListUser 1A + D wAuthSysInfor 1A + D wAuthPostMsgs 1A + D wAuthMsgUsers 1A + D wAuthWhosOnli 1A diff --git a/tests/rpgle/CBKHEADER.rpgle b/tests/rpgle/CBKHEADER.rpgle new file mode 100644 index 00000000..2b6ee435 --- /dev/null +++ b/tests/rpgle/CBKHEADER.rpgle @@ -0,0 +1,15 @@ + * Get values from DTAARA and put them on screen + C IN wDTAARA + * Name of this BBS + C EVAL SCRBBS = wBBSNAM + * Nickname of logged in user + C EVAL SCRNCK = wUser + * Access Level of logged in user + C MOVEL wUserLvl SCRLVL + * Description of Access Level + C IF wSHWALD = 'Y' + C EVAL SCRLVD = wLvlDescr + C ELSE + C EVAL SCRLVD = *BLANK + C ENDIF + C UNLOCK wDTAARA diff --git a/tests/rpgle/CBKPCFGDCL.rpgle b/tests/rpgle/CBKPCFGDCL.rpgle new file mode 100644 index 00000000..2d5c5dd8 --- /dev/null +++ b/tests/rpgle/CBKPCFGDCL.rpgle @@ -0,0 +1,17 @@ + D wCfgBBSNAM S 30A + D wCfgLOCCRY S 2A + D wCfgLOCCTY S 40A + D wCfgTIMZON S 3A + D wCfgCLOSED S 1A + D wCfgNUSLVL S 2P 0 + D wCfgNUSSVY S 4A + D wCfgSHWALD S 1A + D wCfgSHWWEL S 1A + D wCfgNUSRNF S 10A + D wCfgHIDESO S 1A + D wCfgHLSOMS S 1A + D wCfgSurvey DS + D wCfgSvyRName 1A + D wCfgSvyGendr 1A + D wCfgSvyLocat 1A + D wCfgSvyEmail 1A diff --git a/tests/rpgle/CBKPCFGREA.rpgle b/tests/rpgle/CBKPCFGREA.rpgle new file mode 100644 index 00000000..998d624c --- /dev/null +++ b/tests/rpgle/CBKPCFGREA.rpgle @@ -0,0 +1,76 @@ + ********************************************************************** + * Read configuration values from PCONFIG into variables + * Uses *IN81 for CHAIN Not Found + * This CopyBook needs: to be used with CBKPCFGDCL + * FPCONFIG IF E K DISK + ********************************************************************** + C GetConfig BEGSR + * Get BBS Name + C EVAL wCfgKey = 'BBSNAM' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C EVAL wCfgBBSNAM = CNFVAL + * Get BBS Location Country Code + C EVAL wCfgKey = 'LOCCRY' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C EVAL wCfgLOCCRY = CNFVAL + * Get BBS Location City + C EVAL wCfgKey = 'LOCCTY' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C EVAL wCfgLOCCTY = CNFVAL + * Get BBS Time Zone + C EVAL wCfgKey = 'TIMZON' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C EVAL wCfgTIMZON = CNFVAL + * Get BBS closed to new users? + C EVAL wCfgKey = 'CLOSED' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C EVAL wCfgCLOSED = CNFVAL + * Get BBS New User Level + C EVAL wCfgKey = 'NUSLVL' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C MOVEL CNFVAL wCfgNUSLVL + * Get BBS New User enabled survey questions + C EVAL wCfgKey = 'NUSSVY' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C EVAL wCfgNUSSVY = CNFVAL + * Get Show Access Level Description? + C EVAL wCfgKey = 'SHWALD' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C EVAL wCfgSHWALD = CNFVAL + C OUT wDTAARA + C UNLOCK wDTAARA + * Get Show Welcome screen + C EVAL wCfgKey = 'SHWWEL' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C EVAL wCfgSHWWEL = CNFVAL + * Get New User default Survey questions + C EVAL wCfgKey = 'NUSSVY' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C EVAL wCfgSurvey = CNFVAL + * Get New User Registration notify + C EVAL wCfgKey = 'NUSRNF' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C EVAL wCfgNUSRNF = CNFVAL + * Get Hide SysOp from Users List + C EVAL wCfgKey = 'HIDESO' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C EVAL wCfgHIDESO = CNFVAL + * Get Highlight SysOp's messages + C EVAL wCfgKey = 'HLSOMS' + C wCfgKey CHAIN PCONFIG 81 + C 81 GOTO ENDOFSR + C EVAL wCfgHLSOMS = CNFVAL + C ENDOFSR TAG + C ENDSR diff --git a/tests/rpgle/copy5.rpgleinc b/tests/rpgle/copy5.rpgleinc new file mode 100644 index 00000000..8ec9603d --- /dev/null +++ b/tests/rpgle/copy5.rpgleinc @@ -0,0 +1,7 @@ +**FREE + +Dcl-C LENGTH_t 20; + +Dcl-Ds TheStruct Qualified; + SubItem Char(length_t); +End-Ds; \ No newline at end of file diff --git a/tests/sources/BBS400 b/tests/sources/BBS400 new file mode 160000 index 00000000..22761d35 --- /dev/null +++ b/tests/sources/BBS400 @@ -0,0 +1 @@ +Subproject commit 22761d3570c05f97fa3dffc3e41c082a03444093 diff --git a/tests/sources/I_builder b/tests/sources/I_builder new file mode 160000 index 00000000..0c1eeb95 --- /dev/null +++ b/tests/sources/I_builder @@ -0,0 +1 @@ +Subproject commit 0c1eeb95e27182fc7643d01f0e6f10bebc844c22 diff --git a/tests/sources/bob-recursive-example b/tests/sources/bob-recursive-example new file mode 160000 index 00000000..9a11c819 --- /dev/null +++ b/tests/sources/bob-recursive-example @@ -0,0 +1 @@ +Subproject commit 9a11c819b443c1c33ba4139b6a9234993e2f337a diff --git a/tests/sources/httpapi b/tests/sources/httpapi new file mode 160000 index 00000000..cf6013ca --- /dev/null +++ b/tests/sources/httpapi @@ -0,0 +1 @@ +Subproject commit cf6013ca0ad37f2ce8ff897ed02f837f0084ce5d diff --git a/tests/sources/ibmi-company_system b/tests/sources/ibmi-company_system new file mode 160000 index 00000000..f1831ab6 --- /dev/null +++ b/tests/sources/ibmi-company_system @@ -0,0 +1 @@ +Subproject commit f1831ab6c9a7bc770849cf6330ed1cb1d79bdbc2 diff --git a/tests/sources/noxDB b/tests/sources/noxDB new file mode 160000 index 00000000..0599b53f --- /dev/null +++ b/tests/sources/noxDB @@ -0,0 +1 @@ +Subproject commit 0599b53fda96819dff810ec4109ff38c5ff0c528 diff --git a/tests/sources/random/hello.test.rpgle b/tests/sources/random/hello.test.rpgle new file mode 100644 index 00000000..1ad69928 --- /dev/null +++ b/tests/sources/random/hello.test.rpgle @@ -0,0 +1,9 @@ +**free + +dcl-s text char(20); + +text = 'Hello, world!'; + +dsply text; + +return; \ No newline at end of file diff --git a/tests/sources/rpgle-repl b/tests/sources/rpgle-repl new file mode 160000 index 00000000..b4240b03 --- /dev/null +++ b/tests/sources/rpgle-repl @@ -0,0 +1 @@ +Subproject commit b4240b0314ce9d90dc59c48858e3809ba0924815 diff --git a/tests/sources/xmlservice b/tests/sources/xmlservice new file mode 160000 index 00000000..84de343d --- /dev/null +++ b/tests/sources/xmlservice @@ -0,0 +1 @@ +Subproject commit 84de343df1dc513d5774f5c1f470ada6701f6488 diff --git a/tests/suite/basics.test.ts b/tests/suite/basics.test.ts index 0193c758..1720922b 100644 --- a/tests/suite/basics.test.ts +++ b/tests/suite/basics.test.ts @@ -248,7 +248,7 @@ test('vitestTest10', async () => { ``, `Ctl-Opt DftActGrp(*No);`, ``, - `/copy './tests/rpgle/copy1.rpgle'`, + `/copy './rpgle/copy1.rpgle'`, ``, `Dcl-s MyVariable2 Char(20);`, ``, @@ -280,7 +280,7 @@ test('test10_local_fixedcopy', async () => { ``, `Ctl-Opt DftActGrp(*No);`, ``, - `/copy tests,eof4`, + `/copy eof4`, ``, `Dcl-s MyVariable2 Char(20);`, ``, @@ -313,8 +313,8 @@ test('test11', async () => { ``, `Ctl-Opt DftActGrp(*No);`, ``, - `/copy './tests/rpgle/copy1.rpgle'`, - `/include './tests/rpgle/copy2.rpgle'`, + `/copy './rpgle/copy1.rpgle'`, + `/include './rpgle/copy2.rpgle'`, ``, `Dcl-s MyVariable2 Char(20);`, ``, @@ -344,7 +344,7 @@ test('test12', async () => { ``, `Ctl-Opt DftActGrp(*No);`, ``, - `/copy './tests/rpgle/copy1.rpgle'`, + `/copy './rpgle/copy1.rpgle'`, ``, `Dcl-S globalVar Char(20);`, ``, @@ -404,7 +404,7 @@ test('test13', async () => { ``, `Ctl-Opt DftActGrp(*No);`, ``, - `/copy './tests/rpgle/copy1.rpgle' // Test copy`, + `/copy './rpgle/copy1.rpgle' // Test copy`, ``, `Dcl-S globalVar Char(20);`, ``, @@ -458,31 +458,6 @@ test('test13', async () => { expect(theLocalProc.scope.variables.length).toBe(1); }); -test('indicators1', async () => { - const lines = [ - `**FREE`, - `Dcl-S MyVar char(10);`, - ``, - `*IN10 = *ON;`, - `MyVar = 'Hi';`, - ``, - `DSply Myvar;`, - `*INLR = *IN10;`, - `Return;`, - ].join(`\n`); - - const cache = await parser.getDocs(uri, lines, {withIncludes: true, ignoreCache: true}); - - Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, - }, cache); - - const in10 = cache.find(`IN10`); - - expect(in10.references.length).toBe(2); -}); - - test('subds1', async () => { const lines = [ `**FREE`, @@ -790,9 +765,7 @@ test('issue_195a', async () => { `End-Proc ScomponiStringa;`, ].join(`\n`); - const cache = await parser.getDocs(uri, lines, {withIncludes: true, ignoreCache: true}); - - cache.clearReferences(); + const cache = await parser.getDocs(uri, lines, {withIncludes: true, ignoreCache: true, collectReferences: true}); }); test('issue_195b', async () => { @@ -1303,4 +1276,25 @@ test('keywords over multiple lines', async () => { const error = invoice_get_invoice.subItems[4]; expect(error.name).toBe(`error`); expect(error.keyword[`LIKE`]).toBe(`TError`); -}) \ No newline at end of file +}); + +test(`const keyword check`, async () => { + const lines = [ + ``, + ` dcl-c hello 556;`, + ` d act c 'act'`, + ``, + ].join(`\r\n`); + + const cache = await parser.getDocs(uri, lines, {withIncludes: true, ignoreCache: true}); + + expect(cache.constants.length).toBe(2); + + const act = cache.find(`act`); + expect(act.name).toBe(`act`); + expect(act.keyword[`CONST`]).toBe(`'act'`); + + const hello = cache.find(`hello`); + expect(hello.name).toBe(`hello`); + expect(hello.keyword[`CONST`]).toBe(`556`); +}); \ No newline at end of file diff --git a/tests/suite/directives.test.ts b/tests/suite/directives.test.ts index b8817381..6d9c7450 100644 --- a/tests/suite/directives.test.ts +++ b/tests/suite/directives.test.ts @@ -235,7 +235,7 @@ test('eof4', async () => { ``, `Ctl-Opt DftActGrp(*No);`, ``, - `/copy './tests/rpgle/eof4.rpgle'`, + `/copy './rpgle/eof4.rpgle'`, ``, `Dcl-s MyVariable2 Char(20);`, ``, @@ -466,7 +466,7 @@ test('variable_case1', async () => { const lines = [ `**FREE`, `Ctl-Opt DftActGrp(*No);`, - `/copy './tests/rpgle/copy3.rpgle'`, + `/copy './rpgle/copy3.rpgle'`, `Dcl-S MyCustomerName1 like(customername_t);`, `Dcl-S MyCustomerName2 like(CustomerName_t);`, `Dcl-S MyCustomerName3 like(CUSTOMERNAME_t);`, @@ -484,19 +484,19 @@ test('variable_case1', async () => { expect(errors.length).toBe(3); expect(errors[0]).toEqual({ - offset: { position: 92, end: 106 }, + offset: { position: 86, end: 100 }, type: `IncorrectVariableCase`, newValue: `CustomerName_t` }); expect(errors[1]).toEqual({ - offset: { position: 180, end: 194 }, + offset: { position: 174, end: 188 }, type: `IncorrectVariableCase`, newValue: `CustomerName_t` }); expect(errors[2]).toEqual({ - offset: { position: 224, end: 238 }, + offset: { position: 218, end: 232 }, type: `IncorrectVariableCase`, newValue: `CustomerName_t` }); @@ -506,7 +506,7 @@ test('variable_case1 commented out', async () => { const lines = [ `**FREE`, `Ctl-Opt DftActGrp(*No);`, - `// /copy './tests/rpgle/copy3.rpgle'`, + `// /copy './rpgle/copy3.rpgle'`, `Dcl-S MyCustomerName1 like(customername_t);`, `Dcl-S MyCustomerName2 like(CustomerName_t);`, `Dcl-S MyCustomerName3 like(CUSTOMERNAME_t);`, @@ -528,9 +528,9 @@ test('uppercase1', async () => { const lines = [ `**FREE`, `Ctl-Opt DftActGrp(*No);`, - `/copy './tests/rpgle/copy1.rpgle'`, - `/Copy './tests/rpgle/copy2.rpgle'`, - `/COPY './tests/rpgle/copy3.rpgle'`, + `/copy './rpgle/copy1.rpgle'`, + `/Copy './rpgle/copy2.rpgle'`, + `/COPY './rpgle/copy3.rpgle'`, `Dcl-S MyCustomerName1 like(CustomerName_t);`, `MyCustomerName1 = 'John Smith';`, `dsply MyCustomerName1;`, @@ -551,7 +551,7 @@ test('uppercase1', async () => { }); expect(errors[1]).toEqual({ - offset: { position: 65, end: 70 }, + offset: { position: 59, end: 64 }, type: `DirectiveCase`, newValue: `/COPY` }); @@ -561,9 +561,9 @@ test('lowercase1', async () => { const lines = [ `**FREE`, `Ctl-Opt DftActGrp(*No);`, - `/copy './tests/rpgle/copy1.rpgle'`, - `/Copy './tests/rpgle/copy2.rpgle'`, - `/COPY './tests/rpgle/copy3.rpgle'`, + `/copy './rpgle/copy1.rpgle'`, + `/Copy './rpgle/copy2.rpgle'`, + `/COPY './rpgle/copy3.rpgle'`, `Dcl-S MyCustomerName1 like(CustomerName_t);`, `MyCustomerName1 = 'John Smith';`, `dsply MyCustomerName1;`, @@ -578,13 +578,13 @@ test('lowercase1', async () => { expect(errors.length).toBe(2); expect(errors[0]).toEqual({ - offset: { position: 65, end: 70 }, + offset: { position: 59, end: 64 }, type: `DirectiveCase`, newValue: `/copy` }); expect(errors[1]).toEqual({ - offset: { position: 99, end: 104 }, + offset: { position: 87, end: 92 }, type: `DirectiveCase`, newValue: `/copy` }); @@ -594,7 +594,7 @@ test('macro defined test 1', async () => { const lines = [ `**FREE`, `Ctl-Opt DftActGrp(*No);`, - `/copy './tests/rpgle/copy4.rpgleinc'`, + `/copy './rpgle/copy4.rpgleinc'`, `Dcl-S MyCustomerName1 char(5);`, `MyCustomerName1 = 'John Smith';`, `dsply MyCustomerName1;`, @@ -612,7 +612,7 @@ test('macro defined test 2', async () => { `**FREE`, `Ctl-Opt DftActGrp(*No);`, `/DEFINE QRPGLEH_RPMAR001`, - `/copy './tests/rpgle/copy4.rpgleinc'`, + `/copy './rpgle/copy4.rpgleinc'`, `Dcl-S MyCustomerName1 char(5);`, `MyCustomerName1 = 'John Smith';`, `dsply MyCustomerName1;`, @@ -630,7 +630,7 @@ test('depth test', async () => { const lines = [ `**FREE`, `Ctl-Opt DftActGrp(*No);`, - `/copy './tests/rpgle/depth1.rpgleinc'`, + `/copy './rpgle/depth1.rpgleinc'`, `Dcl-S MyCustomerName1 char(5);`, `MyCustomerName1 = 'John Smith';`, `dsply MyCustomerName1;`, diff --git a/tests/suite/files.test.ts b/tests/suite/files.test.ts index e260cbce..17ff0dfa 100644 --- a/tests/suite/files.test.ts +++ b/tests/suite/files.test.ts @@ -161,7 +161,7 @@ test('file DS in a copy book', async () => { const lines = [ `**free`, `ctl-opt main(Main);`, - `/copy './tests/rpgle/file1.rpgleinc'`, + `/copy './rpgle/file1.rpgleinc'`, ``, `dcl-proc Main;`, `dcl-pi *n;`, diff --git a/tests/suite/fixed.test.ts b/tests/suite/fixed.test.ts index 5177a634..8ed4e933 100644 --- a/tests/suite/fixed.test.ts +++ b/tests/suite/fixed.test.ts @@ -258,7 +258,7 @@ test('fixed7', async () => { expect(Obj_Next.name).to.equal(`Obj_Next`); expect(Obj_Next.position.line).to.equal(3); expect(Obj_Next.keyword[`EXPORT`]).to.equal(true); - expect(Obj_Next.keyword[`LIKEDS`]).to.equal(`OBJECTDS`); + expect(Obj_Next.keyword[`LIKEDS`]).to.equal(`ObjectDs`); expect(Obj_Next.subItems.length).to.equal(0); }); @@ -324,7 +324,7 @@ test('fixed9', async () => { const lines = [ ``, ` // -----------------------`, - ` /copy './tests/rpgle/copy1.rpgle'`, + ` /copy './rpgle/copy1.rpgle'`, ` // -----------------------`, ``, ` P Obj_Next B Export`, @@ -350,7 +350,7 @@ test('fixed9', async () => { expect(Obj_Next.name).to.equal(`Obj_Next`); expect(Obj_Next.position.line).to.equal(5); expect(Obj_Next.keyword[`EXPORT`]).to.equal(true); - expect(Obj_Next.keyword[`LIKEDS`]).to.equal(`OBJECTDS`); + expect(Obj_Next.keyword[`LIKEDS`]).to.equal(`ObjectDs`); expect(Obj_Next.subItems.length).to.equal(0); const theExtProcedure = cache.find(`theExtProcedure`); @@ -364,8 +364,8 @@ test('fixed9_2', async () => { const lines = [ ``, ` // -----------------------`, - ` d/copy './tests/rpgle/copy1.rpgle'`, - ` */copy './tests/rpgle/copy2.rpgle'`, + ` d/copy './rpgle/copy1.rpgle'`, + ` */copy './rpgle/copy2.rpgle'`, ` // -----------------------`, ` P Obj_Next B Export`, ` D Obj_Next PI LikeDS(ObjectDs)`, @@ -390,7 +390,7 @@ test('fixed9_2', async () => { expect(Obj_Next.name).to.equal(`Obj_Next`); expect(Obj_Next.position.line).to.equal(5); expect(Obj_Next.keyword[`EXPORT`]).to.equal(true); - expect(Obj_Next.keyword[`LIKEDS`]).to.equal(`OBJECTDS`); + expect(Obj_Next.keyword[`LIKEDS`]).to.equal(`ObjectDs`); expect(Obj_Next.subItems.length).to.equal(0); const theExtProcedure = cache.find(`theExtProcedure`); @@ -404,7 +404,7 @@ test('fixed9_3', async () => { const lines = [ ``, ` Ctl-Opt DftActGrp(*No);`, - ` /copy tests,eof4 Call plist update program ESF`, + ` /copy eof4 Call plist update program ESF`, ` *COPY EQCPYLESRC,PLUPT_SB Call plist update program ESF`, ``, ` Dcl-s MyVariable2 Char(20);`, @@ -1145,4 +1145,70 @@ test('plist_test', async () => { const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true }); expect(cache.variables.length).to.equal(0); -}); \ No newline at end of file +}); + +test(`range test 2`, async () => { + const lines = [ + ` D TYPEMST_F Ds LikeDs(TYPEMST_T)`, + ` D*‚ -------------------------------------------------------------------`, + ` D*‚ Service Program Procedures`, + ` D*‚ -------------------------------------------------------------------`, + ` D $Validate_TYPEMST...`, + ` D Pr n`, + ` D $i_Action 4 Const`, + ` D $i_Pointer * Const`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true }); + + const TYPEMST_F = cache.find(`TYPEMST_F`); + expect(TYPEMST_F.range).to.deep.equal({ + start: 0, + end: 0 + }); +}); + +test(`test document build up`, async () => { + const lines = [ + ` H*****************************************************************`, + ` D*`, + ` DRESSTR DS INZ`, + ` D VARNAM 1 10 INZ('VAR018 ')`, + ` D PF 11 11 INZ('F')`, + ` D ERRMSG 12 90`, + ` D EXP 12 16 INZ('EXP: ')`, + ` D EXPCOD 17 21 INZ`, + ` D RCV 22 27 INZ(' RCV:')`, + ` D RECODE 28 32`, + ` D TNAME 92 101 INZ('SQRPGNRUN ')`, + ` D LIB 102 111 INZ('SQTEST ')`, + ` D FILE 112 121 INZ('RPGNRSLTS ')`, + ` D LIBLEN 122 125B 0 INZ(8)`, + ` D FILLEN 126 129B 0 INZ(10)`, + ` D*`, + ` D ACTSQL S 4 0`, + ` D CMPCOD S 4 0`, + ` D*`, + ` D DATHV S 10D DATFMT(*ISO-) INZ(D'2025-12-10')`, + ` D CHKDAT S 10D DATFMT(*ISO-) INZ(D'2025-12-02')`, + ` D*`, + ` C/EXEC SQL`, + ` C+ WHENEVER SQLERROR CONTINUE`, + ` C/END-EXEC`, + ` C*`, + ` C*****************************************************************`, + ` C*`, + ` C*****************************************************************`, + ` C*`, + ` C MOVEL 'VAR018' VARNAM`, + ` C Z-ADD -180 CMPCOD`, + ` C*`, + ].join(`\n`); + + let document = ``; + + for (let c of lines.split(``)) { + document += c; + await parser.getDocs(uri, document, { ignoreCache: true, withIncludes: true, collectReferences: true }); + } +}) \ No newline at end of file diff --git a/tests/suite/linter.test.ts b/tests/suite/linter.test.ts index 1ffd1ec8..8ef32cf4 100644 --- a/tests/suite/linter.test.ts +++ b/tests/suite/linter.test.ts @@ -1438,7 +1438,7 @@ test('linter19', async () => { `End-Proc;`, ].join(`\n`); - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); const { errors } = Linter.getErrors({ uri, content: lines }, { NoUnreferenced: true }, cache); @@ -1531,7 +1531,7 @@ test('linter21', async () => { `End-Proc;`, ].join(`\n`); - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); const { errors } = Linter.getErrors({ uri, content: lines }, { NoUnreferenced: true }, cache); @@ -1611,7 +1611,7 @@ test("linter23", async () => { `End-Proc;` ].join(`\n`); - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); const { errors } = Linter.getErrors({ uri, content: lines }, { NoUnreferenced: true }, cache); @@ -1773,7 +1773,7 @@ test("linter26", async () => { `Return;`, ].join(`\n`); - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); const { errors } = Linter.getErrors({ uri, content: lines }, { NoUnreferenced: true }, cache); @@ -1840,7 +1840,7 @@ test("linter29", async () => { ``, `Ctl-Opt DftActGrp(*No);`, ``, - `/copy './tests/rpgle/copy1.rpgle'`, + `/copy './rpgle/copy1.rpgle'`, ``, `Dcl-s MyVariable2 Char(20);`, ``, @@ -1951,7 +1951,7 @@ test("linter31_b", async () => { const { errors } = Linter.getErrors({ uri, content: lines, - availableIncludes: [`tests/rpgle/copy1.rpgle`] + availableIncludes: [`rpgle/copy1.rpgle`] }, { IncludeMustBeRelative: true, }, cache); @@ -1964,7 +1964,7 @@ test("linter31_b", async () => { expect(errors[0]).toEqual({ offset: { position: 39, end: 50 }, type: `IncludeMustBeRelative`, - newValue: `'tests/rpgle/copy1.rpgle'` + newValue: `'rpgle/copy1.rpgle'` }); }); @@ -1974,7 +1974,7 @@ test("linter32", async () => { ``, `Ctl-Opt DftActGrp(*No);`, ``, - `/copy 'tests/rpgle/copy1.rpgle'`, + `/copy 'rpgle/copy1.rpgle'`, ``, `Dcl-s MyVariable2 Char(20);`, ``, @@ -2017,7 +2017,7 @@ test("linter32_b", async () => { const { errors } = Linter.getErrors({ uri, content: lines, - availableIncludes: [`tests/rpgle/copy1.rpgle`] + availableIncludes: [`rpgle/copy1.rpgle`] }, { IncludeMustBeRelative: true }, cache); @@ -2030,7 +2030,7 @@ test("linter32_b", async () => { expect(errors[0]).toEqual({ offset: { position: 39, end: 52 }, type: `IncludeMustBeRelative`, - newValue: `'tests/rpgle/copy1.rpgle'` + newValue: `'rpgle/copy1.rpgle'` }); }); @@ -2040,7 +2040,7 @@ test("linter33", async () => { ``, `Ctl-Opt DftActGrp(*No);`, ``, - `/copy '/tests/rpgle/copy1.rpgle'`, + `/copy '/rpgle/copy1.rpgle'`, ``, `Dcl-s MyVariable2 Char(20);`, ``, @@ -2062,7 +2062,7 @@ test("linter33", async () => { expect(errors.length).toBe(1); expect(errors[0]).toEqual({ - offset: { position: 39, end: 65 }, type: `IncludeMustBeRelative` + offset: { position: 39, end: 59 }, type: `IncludeMustBeRelative` }); }); @@ -2163,7 +2163,7 @@ test("linter37", async () => { ``, `Ctl-Opt DftActGrp(*No);`, ``, - `/copy 'tests/rpgle/copy1.rpgle'`, + `/copy 'rpgle/copy1.rpgle'`, ``, `Dcl-s MyVariable2 Char(20);`, ``, @@ -2186,188 +2186,11 @@ test("linter37", async () => { expect(errors.length).toBe(1); expect(errors[0]).toEqual({ - offset: { position: 129, end: 135 }, + offset: { position: 123, end: 129 }, type: `UselessOperationCheck` }); }); -test("linter38_subrefs", async () => { - const lines = [ - `**free`, - ``, - `dcl-s localVarYes Char(1);`, - `Dcl-s localVarForProc Int(20);`, - `dcl-s localVarNo Ind;`, - ``, - `dcl-ds structYes;`, - ` subfa varchar(12);`, - `End-ds;`, - ``, - `dcl-ds structNo;`, - ` subfb packed(12);`, - `End-ds;`, - ``, - `Dcl-ds structYesAlso;`, - ` subfc char(20);`, - `End-Ds;`, - ``, - `dcl-ds qualStructYes Qualified;`, - ` qualsubA zoned(5);`, - `end-ds;`, - ``, - `dcl-ds qualStructNo Qualified;`, - ` qualsubA zoned(5);`, - `end-ds;`, - ``, - `dcl-ds qualDimStructYup Qualified Dim(2);`, - ` boopABC zoned(5);`, - `end-ds;`, - ``, - `localVarYes = 'Y';`, - `procYes();`, - ``, - `subfa = 'Yes!';`, - `structYesAlso = 'Really yes';`, - ``, - `qualStructYes.qualsubA = 5;`, - ``, - `qualDimStructYup(1).boopabc = 5;`, - `qualDimStructYup(localVarForProc).boopAbc = 5;`, - `qualDimStructYup(localVarForProc - 1).boopABC = 5;`, - ``, - `return;`, - ``, - `Dcl-Proc procYes;`, - ` dcl-s reallyLocalYes bindec(9);`, - ` dcl-s reallyLocalNo Char(1);`, - ``, - ` dcl-ds localStructYes;`, - ` subfd char(12);`, - ` end-ds;`, - ``, - ` dcl-ds localStructAlsoYes;`, - ` subfe char(12);`, - ` end-ds;`, - ``, - ` dcl-ds localStructNo;`, - ` subfg char(12);`, - ` end-ds;`, - ``, - ` dcl-ds localQualStructYes Qualified;`, - ` qualsubA zoned(5);`, - ` end-ds;`, - ``, - ` dcl-ds localQualStructNo Qualified;`, - ` qualsubA zoned(5);`, - ` end-ds;`, - ``, - ` reallyLocalYes = 1;`, - ` localStructYes = 'Helloworld';`, - ` subfe = 'Otherworld';`, - ` localQualStructYes.qualsubA = 55;`, - ``, - ` localVarForProc = 12398;`, - `End-Proc;`, - ``, - `Dcl-Proc procNo;`, - ` localVarForProc = 1190348;`, - `End-Proc;`, - ].join(`\n`); - - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); - Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, - }, cache); - - const subfa = cache.find(`subfa`); - expect(subfa.references.length).toBe(2); - expect(subfa.references[1]).toEqual({ - offset: { position: 469, end: 474 } - }); - - const structYesAlso = cache.find(`structYesAlso`); - expect(structYesAlso.references.length).toBe(2); - expect(structYesAlso.references[1]).toEqual({ - offset: { position: 485, end: 498 } - }); - - const subfc = structYesAlso.subItems[0]; - expect(subfc.name).toBe(`subfc`); - expect(subfc.references.length).toBe(1); - - const qualStructYes = cache.find(`qualStructYes`); - expect(qualStructYes.references.length).toBe(2); - expect(qualStructYes.references[1]).toEqual({ - offset: { position: 516, end: 529 } - }); - - const qualsubA = qualStructYes.subItems[0]; - expect(qualsubA.name).toBe(`qualsubA`); - expect(qualsubA.references.length).toBe(2); - - expect(qualsubA.references[0]).toEqual({ - offset: { position: 274, end: 282 } - }); - - expect(qualsubA.references[1]).toEqual({ - offset: { position: 530, end: 538 } - }); - - const procYes = cache.find(`procYes`); - const subProc = procYes.scope; - - const localStructYes = subProc.find(`localStructYes`); - expect(localStructYes.references.length).toBe(2); - expect(localStructYes.references[1]).toEqual({ - offset: { position: 1158, end: 1172 } - }); - - const localStructAlsoYes = subProc.find(`localStructAlsoYes`); - expect(localStructAlsoYes.references.length).toBe(1); - - const subfe = localStructAlsoYes.subItems[0]; - expect(subfe.name).toBe(`subfe`); - expect(subfe.references.length).toBe(2); - expect(subfe.references[1]).toEqual({ - offset: { position: 1193, end: 1198 } - }); - - const qualDimStructYup = cache.find(`qualDimStructYup`); - expect(qualDimStructYup.references.length).toBe(4) - - expect(qualDimStructYup.references[1]).toEqual({ - offset: { position: 545, end: 561 } - }); - - expect(qualDimStructYup.references[2]).toEqual({ - offset: { position: 578, end: 594 } - }); - - expect(qualDimStructYup.references[3]).toEqual({ - offset: { position: 625, end: 641 } - }); - - const boopABC = qualDimStructYup.subItems[0]; - expect(boopABC.name).toBe(`boopABC`); - expect(boopABC.references.length).toBe(4); - - expect(boopABC.references[0]).toEqual({ - offset: { position: 411, end: 418 } - }); - - expect(boopABC.references[1]).toEqual({ - offset: { position: 565, end: 572 } - }); - - expect(boopABC.references[2]).toEqual({ - offset: { position: 612, end: 619 } - }); - - expect(boopABC.references[3]).toEqual({ - offset: { position: 663, end: 670 } - }); -}); - test("linter39", async () => { const lines = [ `**FREE`, @@ -2426,51 +2249,6 @@ test("linter40", async () => { expect(errors.length).toEqual(0); }); -test("linter40_return", async () => { - const lines = [ - `**free`, - `Dcl-Proc InputIsValid;`, - ` Dcl-PI InputIsValid likeds(validationResult);`, - ` comp Char(1);`, - ` End-PI;`, - ``, - ` Dcl-S isValid Ind inz(*on);`, - ` Dcl-S isFound Ind inz(*on);`, - ``, - ` Dcl-DS validationResult Qualified;`, - ` isValid Ind inz(*on);`, - ` errorField Char(20) inz(*blanks);`, - ` errorMessage Char(100) inz(*blanks);`, - ` End-DS;`, - ``, - ` // Validate company value`, - ` isFound = company_getrecord(comp);`, - ` if (isFound = *off);`, - ` validationResult.isValid = *off;`, - ` validationResult.errorField = 'comp';`, - ` validationResult.errorMessage = 'Company value inva lid';`, - ``, - ` return validationResult;`, - ` endif;`, - ``, - ` // Validate other input parameters...`, - ``, - ` return validationResult;`, - ``, - `End-Proc;`, - ].join(`\n`); - - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); - Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, - }, cache); - - const procedure = cache.find(`InputIsValid`); - const validationResult = procedure.scope.find(`validationResult`); - - expect(validationResult.references.length).toEqual(7); -}); - test("linter41", async () => { const lines = [ `**FREE`, @@ -2661,7 +2439,7 @@ test("issue_170", async () => { `Endsr;`, ].join(`\n`); - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); const { errors } = Linter.getErrors({ uri, content: lines }, { NoUnreferenced: true }, cache); @@ -2684,7 +2462,7 @@ test("issue_170a", async () => { ].join(`\n`); - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); const { errors } = Linter.getErrors({ uri, content: lines }, { NoUnreferenced: true }, cache); @@ -2713,9 +2491,8 @@ test("linter40_keywordrefs", async () => { `Dcl-s somevar Int(10) inz(randomLen);`, ].join(`\n`); - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); const { errors } = Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, IncorrectVariableCase: true }, cache); @@ -2723,7 +2500,8 @@ test("linter40_keywordrefs", async () => { expect(RANDOMLEN.references.length).toBe(2); expect(RANDOMLEN.references[1]).toEqual({ - offset: { position: 64, end: 73 } + offset: { position: 64, end: 73 }, + uri: uri, }); expect(errors.length).toBe(1); @@ -2745,9 +2523,8 @@ test("linter_casing_on_error_not_a_variable", async () => { `endmon;`, ].join(`\n`); - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); const { errors } = Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, IncorrectVariableCase: true }, cache); @@ -2788,7 +2565,7 @@ test("issue_175", async () => { `End-Proc;`, ].join(`\n`); - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); const { errors } = Linter.getErrors({ uri, content: lines }, { NoUnreferenced: true }, cache); @@ -2828,10 +2605,9 @@ test("issue180", async () => { `Endsr;`, ].join(`\n`); - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, }, cache); }); @@ -3170,43 +2946,6 @@ test('on_excp_2', async () => { }); }); -test('range_1', async () => { - const lines = [ - `**free`, - `ctl-opt debug option(*nodebugio: *srcstmt) dftactgrp(*no) actgrp(*caller)`, - `main(Main);`, - `dcl-s x timestamp;`, - `dcl-s y timestamp;`, - `dcl-proc Main;`, - ` dsply %CHAR(CalcDiscount(10000));`, - ` dsply %char(CalcDiscount(1000));`, - ` x = %TIMESTAMP(y);`, - ` y = %TimeStamp(x);`, - ` return;`, - `end-proc;`, - ].join(`\n`); - - const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true}); - Linter.getErrors({ uri, content: lines }, { - CollectReferences: true - }, cache); - - const rangeRefs = cache.referencesInRange({position: 220, end: 260}); - expect(rangeRefs.length).toBe(2); - expect(rangeRefs[0].dec.name).toBe(`x`); - expect(rangeRefs[1].dec.name).toBe(`y`); - - expect(rangeRefs[0].refs).toEqual([ - { position: 220, end: 221 }, - { position: 256, end: 257 } - ]); - - expect(rangeRefs[1].refs).toEqual([ - { position: 235, end: 236 }, - { position: 241, end: 242 } - ]); -}); - test('sqlRunner1_1', async () => { const lines = [ `**free`, @@ -3316,7 +3055,7 @@ test('linter with non-free copybook', async () => { `Ctl-opt Bnddir('PCRPROCS');`, `Ctl-opt ExtBinInt(*Yes);`, ` `, - `/copy './tests/rpgle/fixed1.rpgleinc'`, + `/copy './rpgle/fixed1.rpgleinc'`, ` `, `Dcl-Proc Engage_Usage_Report;`, ` `, @@ -3339,7 +3078,7 @@ test('linter with non-free copybook', async () => { `End-Proc Engage_Usage_Report;`, ].join(`\n`); - const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true }); + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); expect(cache.includes.length).toBe(1); @@ -3413,7 +3152,7 @@ test('Linter running on rpgleinc', async () => { `Dcl-S CustomerName_t varchar(40) template;`, ].join(`\n`); - const cache = await parser.getDocs(includeUri, lines, { ignoreCache: true, withIncludes: true }); + const cache = await parser.getDocs(includeUri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); const { errors } = Linter.getErrors({ uri: includeUri, content: lines }, { IncorrectVariableCase: true, NoUnreferenced: true, @@ -3434,7 +3173,7 @@ test('Linter running on member rpgleinc', async () => { `Dcl-S CustomerName_t varchar(40) template;`, ].join(`\n`); - const cache = await parser.getDocs(memberIncludeUri, lines, { ignoreCache: true, withIncludes: true }); + const cache = await parser.getDocs(memberIncludeUri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); const { errors } = Linter.getErrors({ uri: memberIncludeUri, content: lines }, { IncorrectVariableCase: true, NoUnreferenced: true, diff --git a/tests/suite/partial.test.ts b/tests/suite/partial.test.ts new file mode 100644 index 00000000..1190e26c --- /dev/null +++ b/tests/suite/partial.test.ts @@ -0,0 +1,58 @@ +import setupParser, { getFileContent, getSourcesList, getTestProjectsDir } from "../parserSetup"; +import { test, expect, describe } from "vitest"; +import path from "path"; + +const timeout = 1000 * 60 * 20; // 20 minutes +const parser = setupParser(); + +// The purpose of this file is to test the parser against all the sources in the sources directory to ensure it doesn't crash. + +test("Parser partial tests", { timeout }, async () => { + const projects = getTestProjectsDir(); + + for (const projectPath of projects) { + const parser = setupParser(projectPath); + const list = await getSourcesList(projectPath); + + for (let i = 0; i < list.length; i++) { + const relativePath = list[i]; + const basename = path.basename(relativePath); + + const rs = performance.now(); + const baseContent = await getFileContent(relativePath); + const re = performance.now(); + + // These are typing tests. Can the parser accept half documents without crashing? + + let content = ``; + + let baseContentSplitUpIntoPieces = []; + + const pieceSize = Math.ceil(baseContent.length / 10); + for (let i = 0; i < baseContent.length; i += pieceSize) { + baseContentSplitUpIntoPieces.push(baseContent.substring(i, i + pieceSize)); + } + + // console.log(`Testing ${basename} (${i}/${list.length})...`); + + let lengths: number[] = []; + for (let i = 0; i < baseContentSplitUpIntoPieces.length; i++) { + content += baseContentSplitUpIntoPieces[i]; + + const ps = performance.now(); + const doc = await parser.getDocs(basename, content, { collectReferences: true, ignoreCache: true, withIncludes: false }); + const pe = performance.now(); + + // console.log(`\tParsed ${i+1}/${baseContentSplitUpIntoPieces.length} (${content.length}) in ${pe - ps}ms. Got ${doc.getNames().length} names.`); + + lengths.push(pe - ps); + } + + const lengthsAverage = lengths.reduce((a, b) => a + b, 0) / lengths.length; + const total = lengths.reduce((a, b) => a + b, 0); + const last = lengths[lengths.length - 1]; + // console.log(`\tAverage: ${lengthsAverage}ms, Full: ${last}ms, Total: ${total}`); + // console.log(``); + } + } +}); \ No newline at end of file diff --git a/tests/suite/references.test.ts b/tests/suite/references.test.ts index 6c452c65..be868b0a 100644 --- a/tests/suite/references.test.ts +++ b/tests/suite/references.test.ts @@ -1,13 +1,13 @@ -import setupParser from "../parserSetup"; -import Linter from "../../language/linter"; +import setupParser, { getFileContent } from "../parserSetup"; import Cache from "../../language/models/cache"; import { test, expect } from "vitest"; +import { readFile } from "fs/promises"; const parser = setupParser(); const uri = `source.rpgle`; -const lines = [ +const bigLines = [ `**free`, ``, `dcl-c FALSE '0';`, @@ -57,125 +57,1773 @@ const lines = [ ].join(`\n`); test("references_1_const", async () => { - const cache = await parser.getDocs(uri, lines, {ignoreCache: true}); + const cache = await parser.getDocs(uri, bigLines, {ignoreCache: true, collectReferences: true}); - Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, - }, cache); + const falseConstIndex = bigLines.indexOf(`dcl-c FALSE`) + 7; - const falseConstIndex = lines.indexOf(`dcl-c FALSE`) + 7; - - const falseConst = Cache.referenceByOffset(cache, falseConstIndex); + const falseConst = Cache.referenceByOffset(uri, cache, falseConstIndex); expect(falseConst.name).toBe(`FALSE`); expect(falseConst.references.length).not.toBe(0); }); test("references_2_const", async () => { - const cache = await parser.getDocs(uri, lines, {ignoreCache: true}); - - Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, - }, cache); + const cache = await parser.getDocs(uri, bigLines, {ignoreCache: true, collectReferences: true}); - const trueConstIndex = lines.indexOf(`var1 = TRUE`) + 7; + const trueConstIndex = bigLines.indexOf(`var1 = TRUE`) + 7; - const trueConst = Cache.referenceByOffset(cache, trueConstIndex); + const trueConst = Cache.referenceByOffset(uri, cache, trueConstIndex); expect(trueConst.name).toBe(`TRUE`); expect(trueConst.references.length).toBe(2); }); test("references_3_enum", async () => { - const cache = await parser.getDocs(uri, lines, {ignoreCache: true}); - - Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, - }, cache); + const cache = await parser.getDocs(uri, bigLines, {ignoreCache: true, collectReferences: true}); - const colorsConstIndex = lines.indexOf(`var1 = COLORS`) + 7; + const colorsConstIndex = bigLines.indexOf(`var1 = COLORS`) + 7; - const colorsConst = Cache.referenceByOffset(cache, colorsConstIndex); + const colorsConst = Cache.referenceByOffset(uri, cache, colorsConstIndex); expect(colorsConst.name).toBe(`COLORS`); expect(colorsConst.references.length).toBe(2); }); test("references_4_subfield_a", async () => { - const cache = await parser.getDocs(uri, lines, {ignoreCache: true}); + const cache = await parser.getDocs(uri, bigLines, {ignoreCache: true, collectReferences: true}); - Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, - }, cache); + const greenSubfieldIndex = bigLines.indexOf(`var1 = COLORS.GREEN`) + 17; - const greenSubfieldIndex = lines.indexOf(`var1 = COLORS.GREEN`) + 17; + const greenConst = Cache.referenceByOffset(uri, cache, greenSubfieldIndex); - const greenConst = Cache.referenceByOffset(cache, greenSubfieldIndex); - expect(greenConst.name).toBe(`GREEN`); expect(greenConst.references.length).toBe(2); }); test("references_4_subfield_b", async () => { - const cache = await parser.getDocs(uri, lines, {ignoreCache: true}); - - Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, - }, cache); + const cache = await parser.getDocs(uri, bigLines, {ignoreCache: true, collectReferences: true}); - const greenSubfieldIndex = lines.indexOf(` GREEN 1`) + 3; + const greenSubfieldIndex = bigLines.indexOf(` GREEN 1`) + 3; - const greenConst = Cache.referenceByOffset(cache, greenSubfieldIndex); + const greenConst = Cache.referenceByOffset(uri, cache, greenSubfieldIndex); expect(greenConst.name).toBe(`GREEN`); expect(greenConst.references.length).toBe(2); - const refSubfieldIndex = lines.indexOf(` RED 2`) + 3; - const redConst = Cache.referenceByOffset(cache, refSubfieldIndex); + + const colours = cache.find(`COLORS`); + const red = colours.subItems.find(sub => sub.name === `RED`); + + + + const refSubfieldIndex = bigLines.indexOf(` RED 2`) + 3; + const redConst = Cache.referenceByOffset(uri, cache, refSubfieldIndex); + expect(redConst.name).toBe(`RED`); expect(redConst.references.length).toBe(1); }); test("references_5", async () => { - const cache = await parser.getDocs(uri, lines, {ignoreCache: true}); + const cache = await parser.getDocs(uri, bigLines, {ignoreCache: true, collectReferences: true}); - Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, - }, cache); + const var1Index = bigLines.indexOf(`var1 = TRUE`); - const var1Index = lines.indexOf(`var1 = TRUE`); - - const var1Var = Cache.referenceByOffset(cache, var1Index); + const var1Var = Cache.referenceByOffset(uri, cache, var1Index); expect(var1Var.name).toBe(`var1`); expect(var1Var.references.length).toBe(5); }); test("references_6_subfield_dim", async () => { - const cache = await parser.getDocs(uri, lines, {ignoreCache: true}); - - Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, - }, cache); + const cache = await parser.getDocs(uri, bigLines, {ignoreCache: true, collectReferences: true}); - const baseIndex = lines.indexOf(`var4 = varColors(1).red`); + const baseIndex = bigLines.indexOf(`var4 = varColors(1).red`); const varColorsIndex = baseIndex + 9; const redSubfieldIndex = baseIndex + 22; - const varColors = Cache.referenceByOffset(cache, varColorsIndex); + const varColors = Cache.referenceByOffset(uri, cache, varColorsIndex); expect(varColors.name).toBe(`varColors`); + expect(varColors.references.length).toBe(2); - const redSubfield = Cache.referenceByOffset(cache, redSubfieldIndex); + const redSubfield = Cache.referenceByOffset(uri, cache, redSubfieldIndex); expect(redSubfield.name).toBe(`red`); expect(redSubfield.references.length).toBe(2); }); test("references_7", async () => { - const cache = await parser.getDocs(uri, lines, {ignoreCache: true}); - - Linter.getErrors({ uri, content: lines }, { - CollectReferences: true, - }, cache); + const cache = await parser.getDocs(uri, bigLines, {ignoreCache: true, collectReferences: true}); - const declareAbcIndex = lines.indexOf(`dcl-proc abc`) + 10; + const declareAbcIndex = bigLines.indexOf(`dcl-proc abc`) + 10; - const varColors = Cache.referenceByOffset(cache, declareAbcIndex); + const varColors = Cache.referenceByOffset(uri, cache, declareAbcIndex); expect(varColors.name).toEqual(`abc`); expect(varColors.references.length).toEqual(1); }); + +test("references_8", async () => { + const lines = [ + `**free`, + ``, + `dcl-s localVarYes Char(1);`, + `Dcl-s localVarForProc Int(20);`, + `dcl-s localVarNo Ind;`, + ``, + `dcl-ds structYes;`, + ` subfa varchar(12);`, + `End-ds;`, + ``, + `dcl-ds structNo;`, + ` subfb packed(12);`, + `End-ds;`, + ``, + `Dcl-ds structYesAlso;`, + ` subfc char(20);`, + `End-Ds;`, + ``, + `dcl-ds qualStructYes Qualified;`, + ` qualsubA zoned(5);`, + `end-ds;`, + ``, + `dcl-ds qualStructNo Qualified;`, + ` qualsubA zoned(5);`, + `end-ds;`, + ``, + `dcl-ds qualDimStructYup Qualified Dim(2);`, + ` boopABC zoned(5);`, + `end-ds;`, + ``, + `localVarYes = 'Y';`, + `procYes();`, + ``, + `subfa = 'Yes!';`, + `structYesAlso = 'Really yes';`, + ``, + `qualStructYes.qualsubA = 5;`, + ``, + `qualDimStructYup(1).boopabc = 5;`, + `qualDimStructYup(localVarForProc).boopAbc = 5;`, + `qualDimStructYup(localVarForProc - 1).boopABC = 5;`, + ``, + `return;`, + ``, + `Dcl-Proc procYes;`, + ` dcl-s reallyLocalYes bindec(9);`, + ` dcl-s reallyLocalNo Char(1);`, + ``, + ` dcl-ds localStructYes;`, + ` subfd char(12);`, + ` end-ds;`, + ``, + ` dcl-ds localStructAlsoYes;`, + ` subfe char(12);`, + ` end-ds;`, + ``, + ` dcl-ds localStructNo;`, + ` subfg char(12);`, + ` end-ds;`, + ``, + ` dcl-ds localQualStructYes Qualified;`, + ` qualsubA zoned(5);`, + ` end-ds;`, + ``, + ` dcl-ds localQualStructNo Qualified;`, + ` qualsubA zoned(5);`, + ` end-ds;`, + ``, + ` reallyLocalYes = 1;`, + ` localStructYes = 'Helloworld';`, + ` subfe = 'Otherworld';`, + ` localQualStructYes.qualsubA = 55;`, + ``, + ` localVarForProc = 12398;`, + `End-Proc;`, + ``, + `Dcl-Proc procNo;`, + ` localVarForProc = 1190348;`, + `End-Proc;`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); + + const subfa = cache.find(`subfa`); + expect(subfa.references.length).toBe(2); + expect(subfa.references[1]).toEqual({ + offset: { position: 469, end: 474 }, + uri: uri + }); + + const structYesAlso = cache.find(`structYesAlso`); + expect(structYesAlso.references.length).toBe(2); + expect(structYesAlso.references[1]).toEqual({ + offset: { position: 485, end: 498 }, + uri: uri + }); + + const subfc = structYesAlso.subItems[0]; + expect(subfc.name).toBe(`subfc`); + expect(subfc.references.length).toBe(1); + + const qualStructYes = cache.find(`qualStructYes`); + expect(qualStructYes.references.length).toBe(2); + expect(qualStructYes.references[1]).toEqual({ + offset: { position: 516, end: 529 }, + uri: uri + }); + + const qualsubA = qualStructYes.subItems[0]; + expect(qualsubA.name).toBe(`qualsubA`); + expect(qualsubA.references.length).toBe(2); + + expect(qualsubA.references[0]).toEqual({ + offset: { position: 274, end: 282 }, + uri: uri + }); + + expect(qualsubA.references[1]).toEqual({ + offset: { position: 530, end: 538 }, + uri: uri + }); + + const procYes = cache.find(`procYes`); + const subProc = procYes.scope; + + const localStructYes = subProc.find(`localStructYes`); + expect(localStructYes.references.length).toBe(2); + expect(localStructYes.references[1]).toEqual({ + offset: { position: 1158, end: 1172 }, + uri: uri + }); + + const localStructAlsoYes = subProc.find(`localStructAlsoYes`); + expect(localStructAlsoYes.references.length).toBe(1); + + const subfe = localStructAlsoYes.subItems[0]; + expect(subfe.name).toBe(`subfe`); + expect(subfe.references.length).toBe(2); + expect(subfe.references[1]).toEqual({ + offset: { position: 1193, end: 1198 }, + uri: uri + }); + + const qualDimStructYup = cache.find(`qualDimStructYup`); + expect(qualDimStructYup.references.length).toBe(4) + + expect(qualDimStructYup.references[1]).toEqual({ + offset: { position: 545, end: 561 }, + uri: uri + }); + + expect(qualDimStructYup.references[2]).toEqual({ + offset: { position: 578, end: 594 }, + uri: uri + }); + + expect(qualDimStructYup.references[3]).toEqual({ + offset: { position: 625, end: 641 }, + uri: uri + }); + + const boopABC = qualDimStructYup.subItems[0]; + expect(boopABC.name).toBe(`boopABC`); + expect(boopABC.references.length).toBe(4); + + expect(boopABC.references[0]).toEqual({ + offset: { position: 411, end: 418 }, + uri: uri + }); + + expect(boopABC.references[1]).toEqual({ + offset: { position: 565, end: 572 }, + uri: uri + }); + + expect(boopABC.references[2]).toEqual({ + offset: { position: 612, end: 619 }, + uri: uri + }); + + expect(boopABC.references[3]).toEqual({ + offset: { position: 663, end: 670 }, + uri: uri + }); +}); + +test("references_9", async () => { + const lines = [ + `**free`, + `Dcl-Proc InputIsValid;`, + ` Dcl-PI InputIsValid likeds(validationResult);`, + ` comp Char(1);`, + ` End-PI;`, + ``, + ` Dcl-S isValid Ind inz(*on);`, + ` Dcl-S isFound Ind inz(*on);`, + ``, + ` Dcl-DS validationResult Qualified;`, + ` isValid Ind inz(*on);`, + ` errorField Char(20) inz(*blanks);`, + ` errorMessage Char(100) inz(*blanks);`, + ` End-DS;`, + ``, + ` // Validate company value`, + ` isFound = company_getrecord(comp);`, + ` if (isFound = *off);`, + ` validationResult.isValid = *off;`, + ` validationResult.errorField = 'comp';`, + ` validationResult.errorMessage = 'Company value inva lid';`, + ``, + ` return validationResult;`, + ` endif;`, + ``, + ` // Validate other input parameters...`, + ``, + ` return validationResult;`, + ``, + `End-Proc;`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); + + const procedure = cache.find(`InputIsValid`); + const validationResult = procedure.scope.find(`validationResult`); + + expect(validationResult.references.length).toEqual(7); + expect(validationResult.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `validationResult`)).toBe(true); +}); + +test('references_10', async () => { + const lines = [ + `**free`, + `ctl-opt debug option(*nodebugio: *srcstmt) dftactgrp(*no) actgrp(*caller)`, + `main(Main);`, + `dcl-s x timestamp;`, + `dcl-s y timestamp;`, + `dcl-proc Main;`, + ` dsply %CHAR(CalcDiscount(10000));`, + ` dsply %char(CalcDiscount(1000));`, + ` x = %TIMESTAMP(y);`, + ` y = %TimeStamp(x);`, + ` return;`, + `end-proc;`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); + + const rangeRefs = cache.referencesInRange(uri, {position: 220, end: 260}); + expect(rangeRefs.length).toBe(2); + expect(rangeRefs[0].dec.name).toBe(`x`); + expect(rangeRefs[1].dec.name).toBe(`y`); + + expect(rangeRefs[0].refs).toEqual([ + { position: 220, end: 221 }, + { position: 256, end: 257 } + ]); + + expect(rangeRefs[1].refs).toEqual([ + { position: 235, end: 236 }, + { position: 241, end: 242 } + ]); +}); + +test("references_11_issue_175", async () => { + const lines = [ + `**FREE`, + ``, + `Dcl-S Field Char(1);`, + ``, + `Field = SubProc('A');`, + ``, + `*INLR = *ON;`, + `Return;`, + ``, + ` ///`, + ` // SubProc`, + ` // Description of SubProc()`, + ` // Description can be multiline`, + ` // @param Parm_A`, + ` // @return Return_1`, + ` ///`, + `Dcl-Proc SubProc;`, + ` Dcl-Pi *N Like( ReturnValue );`, + ` PP_PARM1 Char(1);`, + ` End-Pi;`, + ` Dcl-S ReturnValue Char(1);`, + ` // Your code goes here`, + ` ReturnValue = PP_PARM1;`, + ` ReturnValue= 'Q';`, + ` Return ReturnValue;`, + ` Begsr *PSSR;`, + ` *INLR = *ON;`, + ` Return;`, + ` Endsr;`, + `End-Proc;`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, {ignoreCache: true, withIncludes: true, collectReferences: true}); + + const procedure = cache.find(`SubProc`); + expect(procedure).toBeDefined(); + expect(procedure.references.length).toBe(2); +}); + +test('references_12_fixed_1', async () => { + const lines = [ + ``, + ` FINVMST IF E K DISK`, + ` `, + ` D wkCorp S 10 inz('100')`, + ` D wkInvoice S 15`, + ` `, + ` C eval wkInvoice = 'I035552120'`, + ` `, + ` C eval *inlr = *on`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + + expect(cache.variables.length).to.equal(2); + + const wkInvoice = cache.find(`wkInvoice`); + + for (const ref of wkInvoice.references) { + // console.log({ + // ref, + // text: lines.substring(ref.offset.position, ref.offset.end), + // about: lines.substring(ref.offset.position - 10, ref.offset.end + 10) + // }); + expect(lines.substring(ref.offset.position, ref.offset.end)).to.equal(`wkInvoice`); + } + + expect(wkInvoice.references.length).to.equal(2); +}); + + +test('references_13_fixed_2', async () => { + const lines = [ + ` * ********************************************************************/€`, + ` * *`, + ` * Last Amend No. MIDAS2122 Date 26Jul18 Author ABDN198 *`, + ` * Prev Amend No. MIDAS1939 Date 25May18 Author ABDN198 *`, + ` * Prev Amend No. MIDAS841 Date 23Jun17 Author ABDN198 *`, + ` * *`, + ` * ********************************************************************/€`, + ` * *`, + ` * MIDAS2122 - Added $F4_TYPEMST procedure *`, + ` * MIDAS1939 - Added $Close_ procedure *`, + ` * MIDAS841 - Add additional columns *`, + ` * *`, + ` * ********************************************************************/€`, + ` D*‚ -------------------------------------------------------------------`, + ` D*‚ TYPEMSTPF`, + ` D*‚ -------------------------------------------------------------------`, + ` D TYPEMST_T E Ds ExtName(TYPEMSTPF) Qualified Template`, + ` D`, + ` D TYPEMST_P s *`, + ` D/IF DEFINED(TYPEMSTPF)`, + ` D TYPEMST_K E Ds ExtName(TYPEMSTPF: *KEY)`, + ` D Qualified`, + ` D TYPEMST_R E Ds ExtName(TYPEMSTPF)`, + ` D Based(TYPEMST_P)`, + ` D/ELSEIF DEFINED(TYPEMSTPF_PREFIX)`, + ` D TYPEMST_K E Ds ExtName(TYPEMSTPF: *KEY)`, + ` D Prefix('KTM1')`, + ` D TYPEMST_R E Ds ExtName(TYPEMSTPF)`, + ` D Based(TYPEMST_P)`, + ` D Prefix('TM1')`, + ` D/ELSE`, + ` D TYPEMST_K E Ds ExtName(TYPEMSTPF: *KEY)`, + ` D Qualified`, + ` D TYPEMST_R E Ds ExtName(TYPEMSTPF)`, + ` D Based(TYPEMST_P)`, + ` D Qualified`, + ` D/ENDIF`, + ` D TYPEMST_Ds E Ds ExtName(TYPEMSTPF)`, + ` D Qualified`, + ` D Dim(TYPEMST_Dim)`, + ` D`, + ` D TYPEMST_F Ds LikeDs(TYPEMST_T)`, + ` D*‚ -------------------------------------------------------------------`, + ` D*‚ Service Program Procedures`, + ` D*‚ -------------------------------------------------------------------`, + ` D $Validate_TYPEMST...`, + ` D Pr n`, + ` D $i_Action 4 Const`, + ` D $i_Pointer * Const`, + ` D`, + ` D $GetError_TYPEMST...`, + ` D Pr 80a Varying`, + ` D $o_ErrNo 10i 0 Options(*NoPass: *Omit)`, + ` D`, + ` D $GetErrors_TYPEMST...`, + ` D Pr 10i 0`, + ` D $o_ErrDs likeds($ErrorDs_TYPEMST)`, + ` D Dim(TYPEMST_Dim)`, + ` D`, + ` D*‚ Input Handler`, + ` D`, + ` D $SetLL_TYPEMST Pr n`, + ` D $i_Pointer * Const`, + ` D $i_Key Const`, + ` D likeds(TYPEMST_K)`, + ` D`, + ` D $Read_TYPEMST Pr n`, + ` D $i_Pointer * Const`, + ` D`, + ` D $ReadE_TYPEMST Pr n`, + ` D $i_Pointer * Const`, + ` D $i_Key LikeDs(TYPEMST_K)`, + ` D Const Options(*NoPass)`, + ` D`, + ` D $Chain_TYPEMST Pr n`, + ` D $i_Pointer * Const`, + ` D $i_Key LikeDs(TYPEMST_K)`, + ` D Const Options(*NoPass)`, + ` D`, + ` D $CloseI_TYPEMST...`, + ` D Pr`, + ` D`, + ` D $Close_TYPEMST...`, + ` D Pr`, + ` D`, + ` D $SetGT_TYPEMST Pr n`, + ` D $i_Pointer * Const`, + ` D $i_Key LikeDs(TYPEMST_K)`, + ` D Const Options(*NoPass)`, + ` D`, + ` D $ReadPE_TYPEMST...`, + ` D Pr n`, + ` D $i_Pointer * Const`, + ` D $i_Key LikeDs(TYPEMST_K)`, + ` D Const Options(*NoPass)`, + ` D`, + ` D $ReadP_TYPEMST Pr n`, + ` D $i_Pointer * Const`, + ` D`, + ` D $SaveKeys_TYPEMST...`, + ` D Pr n`, + ` D $i_Pointer * Const`, + ` D $i_Key LikeDs(TYPEMST_K)`, + ` D Const Options(*NoPass)`, + ` D`, + ` D $Restore_TYPEMST...`, + ` D Pr n`, + ` D`, + ` D*‚ Update Handler`, + ` D`, + ` D $CloseU_TYPEMST...`, + ` D Pr`, + ` D`, + ` D $Write_TYPEMST Pr n`, + ` D $i_Pointer * Const`, + ` D $i_Key LikeDs(TYPEMST_K)`, + ` D Const Options(*NoPass)`, + ` D`, + ` D $Update_TYPEMST...`, + ` D Pr n`, + ` D $i_Pointer * Const`, + ` D $i_Key LikeDs(TYPEMST_K)`, + ` D Const Options(*NoPass)`, + ` D`, + ` D $Delete_TYPEMST...`, + ` D Pr n`, + ` D $i_Pointer * Const`, + ` D $i_Key LikeDs(TYPEMST_K)`, + ` D Const Options(*NoPass)`, + ` D`, + ` D*‚ SQL Handler`, + ` D`, + ` D $SQLRead_TYPEMST...`, + ` D Pr n`, + ` D $i_Pointer * Const`, + ` D $i_Statement 500a Const Options(*NoPass: *VarSize)`, + ` D`, + ` D $Select_TYPEMST...`, + ` D Pr n`, + ` D $o_TYPEMST_Ds LikeDs(TYPEMST_R) Dim(TYPEMST_Dim)`, + ` D $o_TYPEMST_Elem...`, + ` D 10i 0`, + ` D $i_SQLWhere 200a Const Options(*NoPass)`, + ` D $i_SQLOrder 200a Const Options(*NoPass)`, + ` D`, + ` D $SQLFetch_TYPEMST...`, + ` D Pr n`, + ` D $i_Pointer * Const`, + ` D $i_Procedure 10a Const`, + ` D`, + ` D $F4_TYPEMST...`, + ` D Pr LikeDs(TYPEMST_K)`, + ` D $i_Filter LikeDs(TYPEMST_F)`, + ` D Const`, + ` D $i_Row 3s 0 Const Options(*NoPass)`, + ` D $i_Col 3s 0 Const Options(*NoPass)`, + ` D`, + ` D $GetFilter_TYPEMST...`, + ` D Pr 5000a Varying`, + ` D $i_Filter LikeDs(TYPEMST_F)`, + ` D Const`, + ` D*‚ -------------------------------------------------------------------`, + ` D*‚ Data Structure`, + ` D*‚ -------------------------------------------------------------------`, + ` D $ErrorDS_TYPEMST...`, + ` D Ds Qualified Dim(TYPEMST_Dim)`, + ` D Column 10a`, + ` D Message 70a`, + ` D`, + ` D*‚ -------------------------------------------------------------------`, + ` D*‚ Constants`, + ` D*‚ -------------------------------------------------------------------`, + ` D TYPEMST_FILENAME...`, + ` D c 'TYPEMSTPF' FILENAME`, + ` D TYPEMST_Dim c 100`, + ` D TYPEMST_IN51 c 51 FILENAME`, + ` D TYPEMST_IN52 c 52 TYPE`, + ` D TYPEMST_IN53 c 53 TYPNAME`, + ` D TYPEMST_IN54 c 54 TYPSNAME`, + ` D TYPEMST_IN55 c 55 ACTION`, + ` D TYPEMST_IN56 c 56 PROC1`, + ` D TYPEMST_IN57 c 57 PROC2`, + ` D TYPEMST_IN58 c 58 PROC3`, + ` D`, + ` /*MIDAS560 ABDN198 */`, + ` /*MIDAS1939 ABDN198 */`, + ``, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + + const TYPEMST_T = cache.find(`TYPEMST_T`); + expect(TYPEMST_T.references.length).toBe(2); + expect(TYPEMST_T.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `TYPEMST_T`)).toBe(true); + + const TYPEMST_Ds = cache.find(`TYPEMST_Ds`); + expect(TYPEMST_Ds.references.length).toBe(1); + expect(TYPEMST_Ds.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `TYPEMST_Ds`)).toBe(true); + + const TYPEMST_Dim = cache.find(`TYPEMST_Dim`); + expect(TYPEMST_Dim.references.length).toBe(5); + expect(TYPEMST_Dim.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `TYPEMST_Dim`)).toBe(true); +}); + +test('references_14_fixed_3', async () => { + const lines = [ + ` *?--------------------------------------------------------------?`, + ` D frdt s 7 0`, + ` D todt s 7 0`, + ` D per s 6`, + ` D year s 2 0`, + ` D month s 2 0`, + ` `, + ` *?--------------------------------------------------------------?`, + ` D DS`, + ` D filedt_c 1 1 0`, + ` D filedt_yy 2 3 0`, + ` D filedt_mm 4 5 0`, + ` D filedt_dd 6 7 0`, + ` D filedt 1 7 0`, + ` *?--------------------------------------------------------------?`, + ` D DS`, + ` D today 1 7 0`, + ` D udatc 1 1 0`, + ` D udatyy 2 3 0`, + ` D udatmm 4 5 0`, + ` D udatdd 6 7 0`, + ` *?--------------------------------------------------------------?`, + ` D PARMDS DS 6`, + ` D pmcc 1 2 0`, + ` D pmyy 3 4 0`, + ` D pmmm 5 6 0`, + ` *?---?`, + ` *?--------------------------------------------------------------?`, + ` C *ENTRY plist`, + ` C parm parmds`, + ` *??`, + ` *??`, + ` *?---?`, + ` C if parmds = *blank`, + ` c eval year = Uyear`, + ` C eval month = umonth`, + ` C movel *year per --> cyymmdd`, + ` C move umonth per`, + ` C else`, + ` C eval year = pmyy`, + ` C eval month = pmmm`, + ` C eval per = parmds --> cyymmdd`, + ` C endif`, + ` *?---?`, + ` C eval filedt_c = 1`, + ` C eval filedt_yy = year`, + ` C eval filedt_mm = month`, + ` C eval filedt_dd = 1`, + ` C eval frdt = filedt --> cyymmdd`, + ` *??`, + ` C eval filedt_dd = 31`, + ` C eval todt = filedt --> cyymmdd`, + ` *??`, + ` *?SQL-Delete if there are already records for given period?`, + ` C/EXEC SQL`, + ` C+ delete from WOOPS/SCOOBYDO where period = :per`, + ` C/END-EXEC`, + ` *?==============================================================?`, + ` *?SQL-Insert in file SCOOBYDO for the given period?`, + ` C/EXEC SQL`, + ` C+ insert into WOOPS/SCOOBYDO (geco,nuco,period,lati,cicn,cdt3,nao2,`, + ` C* this is intentially broken, because we don't parse SQL`, + ` C+ substr(rtrim('0000000' concat cast(fhnuco as char(7))),`, + ` C+ length(rtrim('0000000' concat cast(fhnuco as char(7))))-6, 5 )`, + ` C+ concat '-'`, + ` C+ concat substr(rtrim('0000000' concat cast(fhnuco as char(7))),`, + ` C+ length(rtrim('0000000' concat cast(fhnuco as char(7))))-1, 2 ),`, + ` C+ ftlet1, ftlet2 from pcsiti, pchico, pcsiko`, + ` C+ where fhgeco = fkgeco and fhnuco = fknuco`, + ` C+ and fkgeco = 2 and (fkcgko in ('B', 'C'))`, + ` C+ and fkrpko not in ('110', '130', '135', '140', '199', '235')`, + ` C+ and fhnao1 in ('C', 'H', 'O', 'S')`, + ` C+ and fhanop = ' ' and fhdaop between :frdt and :todt`, + ` C+ and fhcdt3 = ftcdt3`, + ` C+ and fhssor = 'T'`, + ` C+ and fhfcds <> 0`, + ` C+ group by fhgeco, fhnuco, fkleti, ftlati,`, + ` C+ fhcicn, fhnao2, fhlads, fhcdt3, ftlet1, ftlet2`, + ` C+ order by fhnuco`, + ` C/END-EXEC`, + ` *?==============================================================?`, + ` C call 'HICO_TAXE'`, + ` C parm per`, + ` C call 'HICO_BRK2'`, + ` C parm per`, + ` C seton LR`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + + const per = cache.find(`per`); + expect(per.references.length).toBe(7); + + // for (const ref of per.references) { + // console.log({ + // ref, + // text: lines.substring(ref.offset.position, ref.offset.end), + // about: lines.substring(ref.offset.position - 10, ref.offset.end + 10) + // }) + // } + + expect(per.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `per`)).toBe(true); + + const pmyy = cache.find(`pmyy`); + expect(pmyy.references.length).toBe(2); + expect(pmyy.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `pmyy`)).toBe(true); + + const filedt = cache.find(`filedt`); + expect(filedt.references.length).toBe(3); + expect(filedt.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `filedt`)).toBe(true); + + const lr = cache.find(`INLR`); + expect(lr.references.length).toBe(1); +}); + +test('indicators1', async () => { + const lines = [ + `**FREE`, + `Dcl-S MyVar char(10);`, + ``, + `*IN10 = *ON;`, + `MyVar = 'Hi';`, + ``, + `DSply Myvar;`, + `*INLR = *IN10;`, + `Return;`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, {withIncludes: true, ignoreCache: true, collectReferences: true}); + + const in10 = cache.find(`IN10`); + expect(in10.references.length).toBe(2); +}); + +test('references_15_fixed_4', async () => { + const lines = [ + ` *****************************************************************?`, + ` FGltbsVar UF e k disk`, + ` FGlRcvvar IF a e k disk`, + ` FGllogvar o a f 500 disk`, + ` *---------------------------------------------------------------*?`, + ` `, + ` D EntryParm ds 19`, + ` D Type_SLX 3 :SLE, SLC...`, + ` D AdrIP 2 :last chars adr ip`, + ` D Version 2 :V4 ou V5`, + ` D PortNumber 6 :P + port nr`, + ` D SubNode 6 :S + sub-node`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` D RCV s 5083 Received`, + ` D* GLrcv1 1694`, + ` D* GLrcv2 1694`, + ` D* GLrcv3 1694`, + ` D* GLrcvLibre 1`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` D ebcd s 1 dim(223) ebcdic "length"`, + ` D Hms s t timfmt(*HMS)`, + ` D HmsLast s t timfmt(*HMS)`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` D r_saved_lg s 5 0 longueur r_saved`, + ` D r_saved s 19600 saved buffer`, + ` D r_transf s 19600 transfer data`, + ` D r_lg5 s 5 0 longueur modulo 256`, + ` *---------------------------------------------------------------*?`, + ` C *entry plist`, + ` C parm EntryParm`, + ``, + ` *---------------------------------------------------------------*?`, + ` C Decalage begsr`, + ` C clear r_long_lue 1 0`, + ` C if r_saved_lg = r_lg5`, + ` C clear r_saved_lg`, + ` C clear r_saved`, + ` C movel x'0000' r_saved`, + ` C else`, + ` C eval r_transf = r_saved`, + ` C eval r_saved = %subst(r_transf:(r_lg5+1))`, + ` C eval r_saved_lg = r_saved_lg - r_lg5`, + ` C endif`, + ` C clear r_lg5`, + ` C endsr`, + ` *---------------------------------------------------------------*?`, + ].join(`\r\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + + const EntryParm = cache.find(`EntryParm`) + expect(EntryParm.references.length).toBe(2); + expect(EntryParm.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `EntryParm`)).toBe(true); + + const r_transf = cache.find(`r_transf`); + expect(r_transf.references.length).toBe(3); + expect(r_transf.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `r_transf`)).toBe(true); +}); + +test('references_16_fixed_5', async () => { + const lines = [ + ` Fqlaser O F 198 PRINTER OFLIND(*INOV) usropn`, + ` *---------------------------------------------------------------*?`, + ` D USRS S 35 DIM(10)`, + ` D Dadp s 6 0`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` D MinEur s 1 0`, + ` D MinEur1 c 1`, + ` D MinEur2 c 2`, + ` D MinEur3 c 5`, + ` D MinEur4 c 10`, + ` D MinEur5 c 20`, + ` D MinEur6 c 50`, + ` D MinEur7 c 100`, + ` D MinEur8 c 200`, + ` D MinEur9 c 500`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` D line s 3 0`, + ` D date7sub1 s 7 0`, + ` D date7 s 7 0`, + ` D jour s 2`, + ` D tcds s 15 7`, + ` D tcde s 15 7`, + ` D tc$ s 24`, + ``, + ` D mntA s 13 2`, + ` D mntB s 15 2`, + ` D mntC s 15 2`, + ` D mntD s 15 7`, + ` D mntD$ s 19`, + ` D mntE s 15 7`, + ` D mntE$ s 19`, + ` D mntF s 15 2`, + ` D mntG s 15 2`, + ` D mntH s 15 2`, + ` D mntI s 15 2`, + ` D mntJ s 15 2`, + ` D mntJtot s 15 2`, + ` D mntK s 15 2`, + ` D PrintTotal s 1`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` D UDS`, + ` D DATE 1 6 0`, + ` D INTER 7 7`, + ` D DadpIn 11 16`, + ` D CodeIn 17 17`, + ` D #p4 84 84`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` /copy i,#$edti`, + ` /copy i,#$rnse`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` /copy i,#$rnhi`, + ` D mmdshi ds 900`, + ` D mmREJO 100 101P 0`, + ` D mmNUEN 102 105P 0`, + ` D mmAEAN 106 109P 0`, + ` *---------------------------------------------------------------*?`, + ` C eval *inLR = *ON`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` C if #p4 = '$'`, + ``, + ` C if %check('0123456789':dadpin) > 0`, + ` C or %check('0123456789':codein) > 0`, + ` C return`, + ` C endif`, + ``, + ` C else`, + ` C movel date dadpin`, + ` C eval codein = '5'`, + ` C endif`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` C movel dadpin dadp`, + ` C if dadp < 010101`, + ` C return`, + ` C endif`, + ` C movel codein MinEur`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` C call 'DAIS7'`, + ` C parm dadp date7`, + ` C if date7 < 1040102 or date7 > 1991230`, + ` C return`, + ` C endif`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` *?sub 1 day?`, + ` C call 'DACLTT7'`, + ` C parm date7 date7sub1`, + ` C parm -1 JOURS 3 0`, + ` C parm '-' RTNCOD 1`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` C if INTER = 'I'`, + ` C eval EDTIMC = 'N'`, + ` C endif`, + ` /copy i,##edti#ini`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` *?Compte de position:?lecture de tous les soldes (1 par devise)`, + ` C eval FSDADP = date7sub1`, + ` C eval secode = 'S'`, + ` C eval FSGECO = 1`, + ` C eval FSNUCO = 9000074`, + ` C eval fssuco = 'C'`, + ` C eval sezone = '+S '`, + ` C eval sekylf = 'GNLS '`, + ` C call 'RNSE'`, + ` C parm DSSE`, + ``, + ` C do *hival`, + ` C eval secode = '%'`, + ` C call 'RNSE'`, + ` C parm DSSE`, + ``, + ` C if SECODE = 'E'`, + ` C leave`, + ` C endif`, + ``, + ` C if FSLADE = 'EUR'`, + ` C iter`, + ` C endif`, + ``, + ` C clear PrintTotal`, + ` C eval mntA = FSMOSO`, + ``, + ` *?Conversion du solde de la veille, au taux de la veille?`, + ` c call 'EURMODV15'`, + ` c parm fslade`, + ` c parm 'EUR' syde 3`, + ` c parm date7sub1 dacr 7 0`, + ` c parm 0 mntD`, + ` c parm 0 dvfrto 1 0`, + ` c parm ' ' isfrto 1`, + ` C parm fsmoso mntF`, + ``, + ` *?Conversion du solde de la veille, au taux du jour?`, + ` c call 'EURMODV15'`, + ` c parm fslade`, + ` c parm 'EUR' syde 3`, + ` c parm date7 dacr 7 0`, + ` c parm 0 mntE`, + ` c parm 0 dvfrto 1 0`, + ` c parm ' ' isfrto 1`, + ` C parm fsmoso mntG`, + ``, + ` C eval mntH = mntG - mntF`, + ``, + ` C if line = 0 or line > edtiov`, + ` C exsr newpag`, + ` C endif`, + ``, + ` C if MinEur = 0 and %abs(mntH) > 0`, + ` C or MinEur = 1 and %abs(mntH) >= MinEur1`, + ` C or MinEur = 2 and %abs(mntH) >= MinEur2`, + ` C or MinEur = 3 and %abs(mntH) >= MinEur3`, + ` C or MinEur = 4 and %abs(mntH) >= MinEur4`, + ` C or MinEur = 5 and %abs(mntH) >= MinEur5`, + ` C or MinEur = 6 and %abs(mntH) >= MinEur6`, + ` C or MinEur = 7 and %abs(mntH) >= MinEur7`, + ` C or MinEur = 8 and %abs(mntH) >= MinEur8`, + ` C or MinEur = 9 and %abs(mntH) >= MinEur9`, + ` C except solde`, + ` C add 1 line`, + ` C eval PrintTotal = '*'`, + ` C endif`, + ``, + ` C clear mntJtot`, + ``, + ` *?Historique du compte du jour, dans la devise?`, + ` C clear dshi`, + ` C eval hilf = 'LHIGLDP'`, + ` C eval higenu = '+'`, + ` C eval fhgeco = fsgeco`, + ` C eval fhnuco = fsnuco`, + ` C eval hidadp = '='`, + ` C eval fhdadp = date7`, + ` C eval hilade = '='`, + ` C eval fhlade = fslade`, + ` *?setll histo:?`, + ` C eval hicode = 'S'`, + ` C call 'RNHI'`, + ` C parm dshi`, + ` *?read histo:?`, + ` C do *hival`, + ` C eval hicode = 'R'`, + ` C call 'RNHI'`, + ` C parm dshi`, + ``, + ` C if hicode = 'E'`, + ` C exsr PrtTotal`, + ` C leave`, + ` C endif`, + ``, + ` C eval mmdshi = dshi`, + ` C exsr operation`, + ` C eval dshi = mmdshi`, + ``, + ` C enddo`, + ``, + ` C enddo`, + ].join(`\r\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + + const MinEur2 = cache.find(`MinEur2`); + expect(MinEur2.references.length).toBe(2); + expect(MinEur2.references.every(ref => lines.substring(ref.offset.position, ref.offset.end).toUpperCase() === `MINEUR2`)).toBe(true); + + const MinEur = cache.find(`MinEur`); + expect(MinEur.references.length).toBe(12); + expect(MinEur.references.every(ref => lines.substring(ref.offset.position, ref.offset.end).toUpperCase() === `MINEUR`)).toBe(true); + + const line = cache.find(`line`); + expect(line.references.length).toBe(4); + expect(line.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `line`)).toBe(true); + + const DadpIn = cache.find(`DadpIn`); + expect(DadpIn.references.length).toBe(4); + expect(DadpIn.references.every(ref => lines.substring(ref.offset.position, ref.offset.end).toUpperCase() === `DADPIN`)).toBe(true); + + const mntJtot = cache.find(`mntJtot`); + expect(mntJtot.references.length).toBe(2); + expect(mntJtot.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `mntJtot`)).toBe(true); +}); + +test('references_17_fixed_6', async () => { + const lines = [ + ` *%CSTD===========================================================*`, + ` ** Application. : PEC PECAM *`, + ` ** Component. . : AUD003 Type: RPGLE *`, + ` **===============================================================*`, + ` ** Sub-system . : *`, + ` ** Function . . : *`, + ` ** Sub-function : *`, + ` **%S=============================================================*`, + ` ** Description of functions: *`, + ` ** *`, + ` ** *`, + ` ** *`, + ` **%E=============================================================*`, + ` *%ECSTD==========================================================*`, + ``, + ``, + ` fLSIKODO iP e k disk`, + ` faud003O o e k disk usropn extfile('QTEMP/AUD003O')`, + ``, + ` d AUD003 PR`, + ` d type 5 const`, + ` d req 1 const`, + ``, + ` d AUD003 PI`, + ` d type 5 const`, + ` d req 1 const`, + ``, + ` /copy qprotosrc,excproto`, + ` /copy qprotosrc,mail`, + ` /copy qprotosrc,maildist`, + ` /copy qprotosrc,toolproc`, + ` /copy qprotosrc,user`, + ` /copy qprotosrc,path`, + ` /copy qprotosrc,list`, + ` /copy qprotosrc,siko`, + ` /copy qprotosrc,adpo`, + ``, + ` d user s 10 inz(*user)`, + ` d path s 100`, + ` d file s 150`, + ` d msgkey s 4`, + ` d dest s 4`, + ` d datiso s 5i 0`, + ` d dec2 s 5i 0`, + ` d cent s 5i 0`, + ` d today s d inz(*sys)`, + ` d automat s n`, + ` d catbil s 6`, + ``, + ` * common error data structure`, + ``, + ` D errcod ds`, + ` D bytpro 10i 0 INZ(256)`, + ` D bytava 10i 0`, + ` D errmsgid 7`, + ` D 1`, + ` D errmsgdta 240`, + ``, + ` /free`, + ``, + ``, + ` begsr *inzsr;`, + ` // si rapport automatique envoi à la liste et chaque gestionnaire`, + ` automat = req <> 'Y';`, + ` if automat;`, + ` callp(e) docmd('x');`, + ` if %error;`, + ` callp(e) docmd('clrpfm x ');`, + ` endif;`, + ` open aud003o;`, + ` endif;`, + ` path = getpath('*DFT');`, + ` file = %trim(path) + 'x ' + %trim(type) + '.xls';`, + ` exopenfile(file);`, + ` // creation des styles`, + ` datiso = ExCreateStyle ();`, + ` ExSetFormat(datiso :FORMAT_CUSTOM :'d/m/yyyy');`, + ` cent = ExCreateStyle ();`, + ` exsetAlign(cent:ALIGN_CENTER);`, + ` dec2 = ExCreateStyle ();`, + ` ExSetFormat(dec2 :FORMAT_4);`, + ` exaddsheet();`, + ` exFreezePane(2:2);`, + ` exsethead(HF_FILE);`, + ` exsetfoot(HF_CURTOTPAGE:POS_RIGHT);`, + ` exsetlandscape(*ON);`, + ` exsetgridprint(*ON);`, + ` exSetRepRowCol(1:1:0:0);`, + ` exaddrow(0:3);`, + ``, + ` endsr;`, + ` begsr srend;`, + ` exclosefile();`, + ` // Si appelé par menu envoi au requester`, + ` if automat;`, + ` if type = 'ALL'`, + ` or type = 'GEST' and %subst(catbil:1:4) = 'GEST'`, + ` or type = 'OTHER' and %subst(catbil:1:4) <> 'GEST';`, + ` crtdstmail('SLEE1_GEST');`, + ` elseif type = 'OTHER';`, + ` crtdstmail('SLEE1_OTH');`, + ` else;`, + ` crtdstmail('SLEEPING');`, + ` endif;`, + ` else;`, + ` openMail('x');`, + ` MailAddSender(' ' :getusremail());`, + ` MailAddDist(' ':getUsreMail():0);`, + ` endif;`, + ``, + ` endsr ;`, + ` /end-free`, + ].join(`\r\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + + const automat = cache.find(`automat`); + expect(automat.references.length).toBe(4); + expect(automat.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `automat`)).toBe(true); +}); + +test('references_18_fixed_7', async () => { + const lines = [ + ``, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` D Z160 DS 160`, + ` D SRCDTA 1 120`, + ` D LAS 1 132 dim(132)`, + ` *- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*?`, + ` C movea 'Solde titres'las(ll)`, + ` C add 12 ll`, + ` C movea ' du Client: 'las(ll)`, + ` C add 12 ll`, + ``, + ].join(`\r\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + + const las = cache.find(`LAS`); + expect(las.references.length).toBe(3); + expect(las.references.every(ref => lines.substring(ref.offset.position, ref.offset.end).toUpperCase() === `LAS`)).toBe(true); +}); + +test('references_19_fixed_8', async () => { + const lines = [ + ` d data6g ds dim(7) export qualified`, + ` d inz`, + ` d rg 9 4 dim(12)`, + ``, + ` d data6d s 7 0 dim(12) export`, + ``, + ` d dat2 s 7 0 import`, + ``, + ` d xdate s d`, + ` d sdat s 7 0`, + ` d i s 5i 0`, + ` d j s 5i 0`, + ` d data ds inz`, + ` d sumx 13 0`, + ` d retx 13 0`, + ` d sumy 13 0`, + ` d rety 13 0`, + ` d sum1 13 0`, + ` d ret1 13 0`, + ` d sum2 13 0`, + ` d ret2 13 0`, + ` d sum3 13 0`, + ` d ret3 13 0`, + ` d sum5 13 0`, + ` d ret5 13 0`, + ` d sum0 13 0`, + ` d ret0 13 0`, + ` /free`, + ` reset data6g;`, + ` // hello world`, + ` i = 1;`, + ` data6d(i) = dat2;`, + ` sdat = data6d(i);`, + ` exsr sql1;`, + ` xdate = %date(dat2:*cymd);`, + ` for i = 2 to 12;`, + ` xdate -= %days(%subdt(xdate:*d));`, + ` data6d(i) = %dec(xdate:*cymd);`, + ` sdat = data6d(i);`, + ` exsr sql1;`, + ` endfor;`, + ` *inlr = *on;`, + ``, + ` begsr sql1;`, + ` reset data;`, + ` exec sql SELECT`, + ``, + ` sum(whoops) as sumx,`, + ` sum(awesome) as retx,`, + ` sum(case when scooby <> ' ' then whoops else 0 end ) as sumy,`, + ` sum(case when scooby <> ' ' then awesome else 0 end ) as rety,`, + ` sum(case when scooby = '1' then whoops else 0 end ) as sum1,`, + ` sum(case when scooby = '1' then awesome else 0 end ) as ret1,`, + ` sum(case when scooby = '2' then whoops else 0 end ) as sum2,`, + ` sum(case when scooby = '2' then awesome else 0 end ) as ret2,`, + ` sum(case when scooby = '3' then whoops else 0 end ) as sum3,`, + ` sum(case when scooby = '3' then awesome else 0 end ) as ret3,`, + ` sum(case when scooby = '5' then whoops else 0 end ) as sum5,`, + ` sum(case when scooby = '5' then awesome else 0 end ) as ret5,`, + ` sum(case when scooby = ' ' then whoops else 0 end ) as sum0,`, + ` sum(case when scooby = ' ' then awesome else 0 end ) as ret0`, + ` into :data`, + ` FROM pcstatko`, + ` WHERE skprive = '1' and skdate = :sdat and`, + ` skgest in (select cluser from pcusrlst where clname = 'RRO')`, + ` and scooby in (' ', '1', '2', '3', '5')`, + ` and whoops > 0 ;`, + ].join(`\r\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + + const data = cache.find(`data`); + expect(data.references.length).toBe(3); + expect(data.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `data`)).toBe(true); + + const sumy = cache.find(`sumy`); + expect(sumy.references.length).toBe(1); + const aroundSumy = lines.substring(sumy.references[0].offset.position - 10, sumy.references[0].offset.end + 10); + expect(aroundSumy).toContain(`d sumy`); +}); + +test(`references_20`, async () => { + const lines = [ + `**FREE`, + `Ctl-Opt DftActGrp(*No);`, + `/copy './rpgle/copy5.rpgleinc'`, + ``, + `dcl-s MyVar char(LENGTH_t);`, + ``, + `dsply MyVar;`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + + const Length_t = cache.find(`LENGTH_t`); + expect(Length_t.references.length).toBe(3); + + expect(Length_t.position.path).not.toBe(uri); + + const uniqueUris = Length_t.references.map(ref => ref.uri).filter((value, index, self) => self.indexOf(value) === index); + expect(uniqueUris.length).toBe(2); + + const include = uniqueUris.find(uri => uri.endsWith(`copy5.rpgleinc`)); + expect(include).toBeDefined(); + + const rawCopyBook = await readFile(include, 'utf-8'); + const copyRefs = Length_t.references.filter(ref => ref.uri === include); + expect(copyRefs.length).toBe(2); + expect(copyRefs.every(ref => rawCopyBook.substring(ref.offset.position, ref.offset.end).toUpperCase() === `LENGTH_T`)).toBe(true); + + const baseRefs = Length_t.references.filter(ref => ref.uri === uri); + expect(baseRefs.length).toBe(1); + expect(baseRefs.every(ref => lines.substring(ref.offset.position, ref.offset.end).toUpperCase() === `LENGTH_T`)).toBe(true); +}); + +test(`references_21_fixed_exec1`, async () => { + const lines = [ + ``, + ` d tlst s 10 inz('RRO')`, + ``, + ` c sropen begsr`, + ` c if byList or gest = '*'`, + ` c if byList`, + ` c eval tlst = liste`, + ` c else`, + ` c reset tlst`, + ` c endif`, + ` c/exec sql`, + ` C+ declare C1 cursor for`, + ` /copy copy,rro100k1`, + ` c+ and skgest in (select cluser from pcusrlst where clname = :tlst)`, + ` C+ group by stcdpy`, + ` C+ order by xx desc`, + ` c/end-exec`, + ` c/exec sql`, + ` C+ open C1`, + ` c/end-exec`, + ].join(`\r\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + + const tlst = cache.find(`tlst`); + expect(tlst.references.length).toBe(4); + expect(tlst.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `tlst`)).toBe(true); +}); + + +// Test case is from noxDb +test(`references_22_long_lines`, async () => { + const lines = [ + `**free`, + `// --------------------------------------------------------------`, + `// Next departure from Mols Linien`, + `// --------------------------------------------------------------`, + `dcl-proc jsonRequest;`, + ``, + ` dcl-s pReq pointer;`, + ` dcl-s pResponse pointer;`, + ` dcl-s url varchar(1024);`, + ``, + ` // parameters on URL`, + ` url = 'https://www.molslinjen.dk/umbraco/api/departure/getnextdepartures?departureRegionId=JYL';`, + ``, + ` // Note: No payload in the request. use *null - here we pass a null pointer `, + ` // Note: No options in the request. use *null - here we pass the *null literal value`, + ` `, + ` // Do the http request to get next depature`, + ` // Use YUM to install curl, which is the tool used by httpRequest`, + ` pResponse = json_httpRequest (url: pReq:*null:'JSON');`, + ``, + ` json_WriteJsonStmf(pResponse:'/prj/noxdb/testout/httpdump.json':1208:*OFF);`, + ``, + ` json_delete(pReq);`, + ` json_delete(pResponse);`, + ``, + `end-proc;`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + + const jsonRequest = cache.find(`jsonRequest`); + const pReq = jsonRequest.scope.find(`pReq`); + expect(pReq.references.length).toBe(3); + expect(pReq.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `pReq`)).toBe(true); +}); + +// Test case is from noxDb +test('references_23_before_spaces', async () => { + const lines = [ + `**free`, + ``, + `dcl-s err ind;`, + `dcl-s row varChar(32766);`, + `dcl-s pRow pointer;`, + `dcl-s id int(10);`, + ``, + ` // now get that row: here we use the a stringed object to build the where statement via the`, + ` pRow = json_sqlResultRow ((`, + ` 'Select * -`, + ` from noxdbdemo.note2 -`, + ` where id = $id -`, + ` ')`, + ` :'{id:' + %char(id) +'}'`, + ` );`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + + const id = cache.find(`id`); + expect(id.references.length).toBe(2); + expect(id.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `id`)).toBe(true); +}); + +// Test case is from rpgle-repl +test('references_24_comment_in_statement', async () => { + const lines = [ + `**free`, + ``, + `dcl-proc freeFormatEvaluationFound export;`, + ` dcl-pi *n ind;`, + ` code like(t_longLineOfCode) const;`, + ` triggerType like(t_triggerType);`, + ` end-pi;`, + ``, + ` if %len(%trim(code)) >= 10 `, + ` and %lower(%subst(%trim(code): 1: 10)) = 'replprint(';`, + ` triggerType = c_replPrintStatement;`, + ` return *on;`, + ` endif;`, + ``, + ` if %len(%trim(code)) >= 11`, + ` and %lower(%subst(%trim(code): 1: 11)) = 'replequals(';`, + ` triggerType = c_replEqualsStatement;`, + ` return *on;`, + ` endif;`, + ``, + ` if %scan('IF ': %trim(toUpperCase(code))) = 1`, + ` or %scan('IF%': %trim(toUpperCase(code))) = 1`, + ` or %scan('ELSE ': %trim(toUpperCase(code))) = 1`, + ` // why do we need the next case?`, + ` //or %scan('ELSE%': %trim(toUpperCase(code))) = 1`, + ` or %scan('ELSE;': %trim(toUpperCase(code))) = 1`, + ` or %scan('ELSEIF ': %trim(toUpperCase(code))) = 1`, + ` or %scan('ELSEIF%': %trim(toUpperCase(code))) = 1`, + ` or %scan('WHEN ': %trim(toUpperCase(code))) = 1`, + ` or %scan('WHEN%': %trim(toUpperCase(code))) = 1`, + ` or %scan('OTHER ': %trim(toUpperCase(code))) = 1`, + ` or %scan('OTHER;': %trim(toUpperCase(code))) = 1`, + ` or %scan('ON-ERROR ': %trim(toUpperCase(code))) = 1`, + ` or %scan('ON-ERROR;': %trim(toUpperCase(code))) = 1;`, + ``, + ` triggerType = c_conditionalStatement;`, + ` return *on;`, + ``, + ` endif;`, + ``, + ` if %scan('EXEC SQL': %trim(toUpperCase(code))) = 1;`, + ``, + ` // DECLARE statement are non-executable, so don't`, + ` // evaluate them - check by removing blanks, uppercasing,`, + ` // and trimming.`, + ` if %scan('EXECSQLDECLARE':`, + ` %trim(toUpperCase(%scanrpl(' ':'':code)))`, + ` ) <> 1;`, + ``, + ` triggerType = c_sqlStatement;`, + ` return *on;`, + ``, + ` endif;`, + ``, + ` endif;`, + ``, + ` if %scan('DOW ': %trim(toUpperCase(code))) = 1`, + ` or %scan('DOW%': %trim(toUpperCase(code))) = 1`, + ` or %scan('DOU ': %trim(toUpperCase(code))) = 1`, + ` or %scan('DOU%': %trim(toUpperCase(code))) = 1`, + ` or %scan('FOR ': %trim(toUpperCase(code))) = 1;`, + ` // why do we need the next case?`, + ` //or %scan('FOR%': %trim(toUpperCase(code))) = 1;`, + ``, + ` triggerType = c_loopStatement;`, + ` return *on;`, + ``, + ` endif;`, + ``, + ` if %scan('=': %trim(code)) <> 0;`, + ``, + ` triggerType = c_setValueStatement;`, + ` return *on;`, + ``, + ` endif;`, + ``, + ` return *off;`, + ``, + `end-proc;`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + const freeFormatEvaluationFound = cache.find(`freeFormatEvaluationFound`); + const code = freeFormatEvaluationFound.scope.find(`code`); + expect(code.references.length).toBe(25); + expect(code.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `code`)).toBe(true); +}); + +// Test case is from httpapi +test('references_25_fixed_string', async () => { + const lines = [ + ` D http s 5050a varying`, + ` D rc s 10i 0`, + ``, + ` /free`, + ` http_debug(*on: '/tmp/example9-debug.txt');`, + ``, + ` if %parms < 3;`, + ` http_comp('To call, type: +`, + ` EXAMPLE9 URL(''http://google.com'') STMF(''/tmp/google.pdf'')');`, + ` return;`, + ` endif;`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + const http = cache.find(`http`); + expect(http.references.length).toBe(1); + expect(http.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `http`)).toBe(true); +}); + +test('references_26_fixed_tag', async () => { + const lines = [ + ` C SCRNFY IFNE wCfgNUSRNF`, + ` C EVAL wCfgKey = 'NUSRNF'`, + ` C wCfgKey CHAIN PCONFIG 81`, + ` C N81 EVAL CNFVAL = SCRNFY`, + ` C N81 UPDATE CONFIG 81`, + ` C 81 GOTO UPDKO`, + ` C ENDIF`, + ` C UPDOK TAG`, + ` C EVAL MSGLIN = cSavedOK`, + ` C GOTO UPDEND`, + ` C UPDKO TAG`, + ` C EVAL MSGLIN = cSavedKO`, + ` C UPDEND TAG`, + ` C EVAL *IN80 = *ON`, + ` C ENDSR`, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + const updok = cache.find(`UPDKO`); + expect(updok.references.length).toBe(2); + expect(updok.references.every(ref => lines.substring(ref.offset.position, ref.offset.end) === `UPDKO`)).toBe(true); +}); + +// This test case is from bbs400 +test('references_27_fixed_reference', async () => { + const lines = [ + ` H/TITLE Administration - General Configuration`, + ` H COPYRIGHT('(C) 2020 David Asta under MIT License')`, + ` * SYSTEM : V4R5`, + ` * PROGRAMMER : David Asta`, + ` * DATE-WRITTEN: 12/NOV/2020`, + ` *`, + ` * This program allows an Administrator user to display/change the`, + ` * general Configuration values of the BBS stored in PCONFIG`, + ` **********************************************************************`, + ` /copy 'CBKOPTIMIZ.rpgle'`, + ` **********************************************************************`, + ` * INDICATORS USED:`, + ` * 80 - *ON turns DSPATR(PR), which protects fields from being changed`, + ` * 81 - CBKPCFGREA CHAIN Not Found`, + ` **********************************************************************`, + ` FBBSADGCD CF E WORKSTN`, + ` FPCONFIG UF E K DISK`, + ` **********************************************************************`, + ` * Data structures`, + ` /copy 'CBKDTAARA.rpgle'`, + ` * Constants`, + ` D cKeysDft C CONST('F10=Edit F12=Go back')`, + ` D cKeysEdit C CONST('F10=Confirm Changes F12=Can-`, + ` D cel')`, + ` D cSavedOK C CONST('Configuration was changed suc-`, + ` D cessfully.')`, + ` D cSavedKO C CONST('There was an error while writ-`, + ` D ting to PCONFIG.')`, + ` * Variables`, + ` /copy 'CBKPCFGDCL.rpgle'`, + ` D wCfgKey S 6A`, + ` D wShowWelcome S 1A`, + ` ***********************************************************************`, + ` C WRITE HEADER`, + ` C 80 EVAL KEYSLS = cKeysDft`, + ` C N80 EVAL KEYSLS = cKeysEdit`, + ` C WRITE FOOTER`, + ` C EXFMT BODY`, + ` C CLEAR MSGLIN`, + ` C EXSR CheckFkeys`, + ` **********************************************************************`, + ` * Subroutine called automatically at startup`, + ` **********************************************************************`, + ` C *INZSR BEGSR`, + ` C EVAL SCRSCR = 'BBSADGC'`, + ` * Protect fields from being modified`, + ` C EVAL *IN80 = *ON`, + ` * Get values from PCONFIG and show them on screen`, + ` C EXSR GetConfig`, + ` C EVAL SCRNAM = wCfgBBSNAM`, + ` C EVAL SCRLCR = WCfgLOCCRY`, + ` C EVAL SCRLCT = wCfgLOCCTY`, + ` C EVAL SCRTZC = wCfgTIMZON`, + ` C EVAL SCRCLO = wCfgCLOSED`, + ` C EVAL SCRSAL = wCfgSHWALD`, + ` C EVAL SCRSWE = wCfgSHWWEL`, + ` C EVAL SCRHID = wCfgHIDESO`, + ` C EVAL SCRHLS = wCfgHLSOMS`, + ` C EVAL SCRNFY = wCfgNUSRNF`, + ` * Get values from DATAARA and show them on screen`, + ` /copy 'CBKHEADER.rpgle'`, + ` C ENDSR`, + ` **********************************************************************`, + ` * Check Function keys pressed by the user`, + ` **********************************************************************`, + ` C CheckFkeys BEGSR`, + ` * F10=Edit`, + ` C IF *IN10 = *ON`, + ` C* N80 EXSR SavePCONFIG`, + ` C* 80 EVAL *IN80 = *OFF`, + ` C IF *IN80 = *ON`, + ` C EVAL *IN80 = *OFF`, + ` C ELSE`, + ` C EXSR SavePCONFIG`, + ` C ENDIF`, + ` C ENDIF`, + ` * F12=Go back or F12=Cancel`, + ` C IF *IN12 = *ON`, + ` C 80 MOVE *ON *INLR`, + ` C 80 RETURN`, + ` C N80 EVAL *IN80 = *ON`, + ` C ENDIF`, + ` C ENDSR`, + ` **********************************************************************`, + ` * Save changed values to PCONFIG`, + ` **********************************************************************`, + ` C SavePCONFIG BEGSR`, + ` * BBS Name`, + ` C SCRNAM IFNE wCfgBBSNAM`, + ` C EVAL wCfgKey = 'BBSNAM'`, + ` C wCfgKey CHAIN PCONFIG 81`, + ` C N81 EVAL CNFVAL = SCRNAM`, + ` C N81 UPDATE CONFIG 81`, + ` C 81 GOTO UPDKO`, + ` C ENDIF`, + ` * BBS Location Country`, + ` C SCRLCR IFNE WCfgLOCCRY`, + ` C EVAL wCfgKey = 'LOCCRY'`, + ` C wCfgKey CHAIN PCONFIG 81`, + ` C N81 EVAL CNFVAL = SCRLCR`, + ` C N81 UPDATE CONFIG 81`, + ` C 81 GOTO UPDKO`, + ` C ENDIF`, + ` * BBS Location City`, + ` C SCRLCT IFNE wCfgLOCCTY`, + ` C EVAL wCfgKey = 'LOCCTY'`, + ` C wCfgKey CHAIN PCONFIG 81`, + ` C N81 EVAL CNFVAL = SCRLCT`, + ` C N81 UPDATE CONFIG 81`, + ` C 81 GOTO UPDKO`, + ` C ENDIF`, + ` * BBS Time Zone`, + ` C SCRTZC IFNE wCfgTIMZON`, + ` C EVAL wCfgKey = 'TIMZON'`, + ` C wCfgKey CHAIN PCONFIG 81`, + ` C N81 EVAL CNFVAL = SCRTZC`, + ` C N81 UPDATE CONFIG 81`, + ` C 81 GOTO UPDKO`, + ` C ENDIF`, + ` * Closed to New Users?`, + ` C SCRCLO IFNE wCfgCLOSED`, + ` C EVAL wCfgKey = 'CLOSED'`, + ` C wCfgKey CHAIN PCONFIG 81`, + ` C N81 EVAL CNFVAL = SCRCLO`, + ` C N81 UPDATE CONFIG 81`, + ` C 81 GOTO UPDKO`, + ` C ENDIF`, + ` * Show Access Level Description?`, + ` C SCRSAL IFNE wCfgSHWALD`, + ` C EVAL wCfgKey = 'SHWALD'`, + ` C wCfgKey CHAIN PCONFIG 81`, + ` C N81 EVAL CNFVAL = SCRSAL`, + ` C N81 UPDATE CONFIG 81`, + ` C 81 GOTO UPDKO`, + ` C ENDIF`, + ` * Show Welcome screen?`, + ` C SCRSWE IFNE wCfgSHWWEL`, + ` C EVAL wCfgKey = 'SHWWEL'`, + ` C wCfgKey CHAIN PCONFIG 81`, + ` C N81 EVAL CNFVAL = SCRSWE`, + ` C N81 UPDATE CONFIG 81`, + ` C 81 GOTO UPDKO`, + ` C ENDIF`, + ` * Hide SysOp from User Lists?`, + ` C SCRHID IFNE wCfgHIDESO`, + ` C EVAL wCfgKey = 'HIDESO'`, + ` C wCfgKey CHAIN PCONFIG 81`, + ` C N81 EVAL CNFVAL = SCRHID`, + ` C N81 UPDATE CONFIG 81`, + ` C 81 GOTO UPDKO`, + ` C ENDIF`, + ` * Hide SysOp from User Lists?`, + ` C SCRHLS IFNE wCfgHLSOMS`, + ` C EVAL wCfgKey = 'HLSOMS'`, + ` C wCfgKey CHAIN PCONFIG 81`, + ` C N81 EVAL CNFVAL = SCRHLS`, + ` C N81 UPDATE CONFIG 81`, + ` C 81 GOTO UPDKO`, + ` C ENDIF`, + ` * Notify New User Registration`, + ` C SCRNFY IFNE wCfgNUSRNF`, + ` C EVAL wCfgKey = 'NUSRNF'`, + ` C wCfgKey CHAIN PCONFIG 81`, + ` C N81 EVAL CNFVAL = SCRNFY`, + ` C N81 UPDATE CONFIG 81`, + ` C 81 GOTO UPDKO`, + ` C ENDIF`, + ` C UPDOK TAG`, + ` C EVAL MSGLIN = cSavedOK`, + ` C GOTO UPDEND`, + ` C UPDKO TAG`, + ` C EVAL MSGLIN = cSavedKO`, + ` C UPDEND TAG`, + ` C EVAL *IN80 = *ON`, + ` C ENDSR`, + ` **********************************************************************`, + ` /copy 'CBKPCFGREA.rpgle'`, + ``, + ].join(`\n`); + + const cache = await parser.getDocs(uri, lines, { ignoreCache: true, withIncludes: true, collectReferences: true }); + expect(cache.includes.length).toBe (5); + + const wCfgKey = cache.find(`wCfgKey`); + const baseRefs = wCfgKey.references.filter(r => r.uri === uri); + expect(baseRefs.every(ref => lines.substring(ref.offset.position, ref.offset.end).toUpperCase() === `WCFGKEY`)).toBe(true); + expect(baseRefs.length).toBe(21); + + const uniqueUris = wCfgKey.references.map(r => r.uri).filter((value, index, self) => self.indexOf(value) === index); + let cachedFiles: {[uri: string]: string} = {}; + + for (const refUri of uniqueUris) { + if (refUri === uri) { + cachedFiles[refUri] = lines; + } + + if (!cachedFiles[refUri]) { + cachedFiles[refUri] = await getFileContent(refUri); + } + } + + for (const ref of wCfgKey.references) { + const offsetContent = cachedFiles[ref.uri].substring(ref.offset.position, ref.offset.end); + + expect(offsetContent.toUpperCase()).toBe(`WCFGKEY`); + } +}); \ No newline at end of file diff --git a/tests/suite/sources.test.ts b/tests/suite/sources.test.ts new file mode 100644 index 00000000..a8778a3f --- /dev/null +++ b/tests/suite/sources.test.ts @@ -0,0 +1,97 @@ +import setupParser, { getFileContent, getSourcesList, getTestProjectsDir } from "../parserSetup"; +import { test } from "vitest"; +import path from "path"; +import { fail } from "assert"; +import Declaration from "../../language/models/declaration"; +import Cache from "../../language/models/cache"; +import { Reference } from "../../language/parserTypes"; + +const timeout = 1000 * 60 * 10; // 10 minutes + +// The purpose of this file is to test the parser against all the sources in the sources directory to ensure it doesn't crash. + +test("Generic reference tests", { timeout }, async () => { + const projects = getTestProjectsDir(); + + for (const projectPath of projects) { + const parser = setupParser(projectPath); + const list = await getSourcesList(projectPath); + + for (let i = 0; i < list.length; i++) { + const relativePath = list[i]; + const basename = path.basename(relativePath); + + const baseContent = await getFileContent(relativePath); + + const ps = performance.now(); + const doc = await parser.getDocs(basename, baseContent, { collectReferences: true, ignoreCache: true, withIncludes: true }); + const pe = performance.now(); + + let cachedFiles: {[uri: string]: string} = {}; + let referencesCollected = 0; + let errorCount = 0; + + const printReference = (def: Declaration, content: string, ref: Reference) => { + console.log({ + def: def.name, + uri: ref.uri, + offset: ref.offset, + content: content.substring(ref.offset.position, ref.offset.end), + about: content.substring(ref.offset.position - 10, ref.offset.end + 10) + }) + } + + const checkReferences = async (def: Declaration) => { + const refs = def.references; + const uniqueUris = refs.map(r => r.uri).filter((value, index, self) => self.indexOf(value) === index); + + for (const refUri of uniqueUris) { + if (refUri === basename) { + cachedFiles[refUri] = baseContent; + } + + if (!cachedFiles[refUri]) { + cachedFiles[refUri] = await getFileContent(refUri); + } + } + + for (const ref of refs) { + const offsetContent = cachedFiles[ref.uri].substring(ref.offset.position, ref.offset.end); + + if (offsetContent.toUpperCase() === def.name.toUpperCase()) { + referencesCollected++; + } else { + errorCount++; + printReference(def, cachedFiles[ref.uri], ref); + } + } + } + + const checkScope = async (scope: Cache) => { + for (const def of [...scope.variables, ...scope.subroutines, ...scope.procedures, ...scope.constants, ...scope.structs, ...scope.files, ...scope.tags]) { + await checkReferences(def); + + if (def.subItems && def.subItems.length > 0) { + for (const sub of def.subItems) { + await checkReferences(sub); + } + } + + if (def.scope) { + await checkScope(def.scope); + } + } + } + + const ss = performance.now(); + await checkScope(doc); + const se = performance.now(); + + if (errorCount > 0) { + fail(`Found ${errorCount} errors in ${basename}`); + } + + // console.log(`Parsed ${basename} in ${pe - ps}ms. Validated in ${se-ss} (${i+1}/${list.length}). Found ${referencesCollected} references.`); + } + } +}); \ No newline at end of file