Mirror: TypeScript LSP plugin that finds GraphQL documents in your code and provides diagnostics, auto-complete and hover-information.

fix(graphqlsp): Fix unchecked index accesses (#335)

+5
.changeset/dry-moose-camp.md
···
+
---
+
'@0no-co/graphqlsp': patch
+
---
+
+
Address potential crashes on malformed TypeScript AST input (such as missing function arguments where they were previously assumed to be passed)
+7 -2
packages/graphqlsp/src/ast/checks.ts
···
return false;
} else if (node.arguments.length < 1 || node.arguments.length > 2) {
return false;
-
} else if (!ts.isStringLiteralLike(node.arguments[0])) {
+
} else if (!ts.isStringLiteralLike(node.arguments[0]!)) {
return false;
}
return checker ? isTadaGraphQLFunction(node.expression, checker) : false;
···
}
};
+
// As per check in `isGraphQLCall()` below, enforces arguments length
+
export type GraphQLCallNode = ts.CallExpression & {
+
arguments: [ts.Expression] | [ts.Expression, ts.Expression];
+
};
+
/** Checks if node is a gql.tada or regular graphql() call */
export const isGraphQLCall = (
node: ts.Node,
checker: ts.TypeChecker | undefined
-
): node is ts.CallExpression => {
+
): node is GraphQLCallNode => {
return (
ts.isCallExpression(node) &&
node.arguments.length >= 1 &&
+3 -3
packages/graphqlsp/src/ast/index.ts
···
element.getStart()
);
-
if (!definitions || !definitions.length) return fragments;
-
-
const [fragment] = definitions;
+
const fragment = definitions && definitions[0];
+
if (!fragment) return fragments;
const externalSource = getSource(info, fragment.fileName);
if (!externalSource) return fragments;
···
if (
ts.isVariableStatement(node) &&
node.declarationList &&
+
node.declarationList.declarations[0] &&
node.declarationList.declarations[0].name.getText() === 'documents'
) {
const [declaration] = node.declarationList.declarations;
+3 -2
packages/graphqlsp/src/ast/resolve.ts
···
filename,
span.expression.getStart()
);
-
if (!definitions || !definitions.length) return;
+
+
const def = definitions && definitions[0];
+
if (!def) return;
-
const def = definitions[0];
const src = getSource(info, def.fileName);
if (!src) return;
+1 -1
packages/graphqlsp/src/ast/token.ts
···
}
}
-
cPos += input[line].length + 1;
+
cPos += input[line]!.length + 1;
}
return foundToken;
+1 -1
packages/graphqlsp/src/autoComplete.ts
···
let stream: CharacterStream = new CharacterStream('');
for (let i = 0; i < lines.length; i++) {
-
stream = new CharacterStream(lines[i]);
+
stream = new CharacterStream(lines[i]!);
while (!stream.eol()) {
style = parser.token(stream, state);
const code = callback(stream, state, style, i);
+24 -42
packages/graphqlsp/src/checkImports.ts
···
source.fileName,
imp.importClause.name.getStart()
);
-
if (definitions && definitions.length) {
-
const [def] = definitions;
+
const def = definitions && definitions[0];
+
if (def) {
if (def.fileName.includes('node_modules')) return;
const externalSource = getSource(info, def.fileName);
···
);
const names = fragmentsForImport.map(fragment => fragment.name.value);
-
if (
-
names.length &&
-
!importSpecifierToFragments[imp.moduleSpecifier.getText()]
-
) {
-
importSpecifierToFragments[imp.moduleSpecifier.getText()] = {
+
const key = imp.moduleSpecifier.getText();
+
let fragmentsEntry = importSpecifierToFragments[key];
+
if (names.length && fragmentsEntry) {
+
fragmentsEntry.fragments = fragmentsEntry.fragments.concat(names);
+
} else if (names.length && !fragmentsEntry) {
+
importSpecifierToFragments[key] = fragmentsEntry = {
start: imp.moduleSpecifier.getStart(),
length: imp.moduleSpecifier.getText().length,
fragments: names,
};
-
} else if (names.length) {
-
importSpecifierToFragments[
-
imp.moduleSpecifier.getText()
-
].fragments =
-
importSpecifierToFragments[
-
imp.moduleSpecifier.getText()
-
].fragments.concat(names);
}
}
}
···
source.fileName,
imp.importClause.namedBindings.getStart()
);
-
if (definitions && definitions.length) {
-
const [def] = definitions;
+
const def = definitions && definitions[0];
+
if (def) {
if (def.fileName.includes('node_modules')) return;
const externalSource = getSource(info, def.fileName);
···
info
);
const names = fragmentsForImport.map(fragment => fragment.name.value);
-
if (
-
names.length &&
-
!importSpecifierToFragments[imp.moduleSpecifier.getText()]
-
) {
-
importSpecifierToFragments[imp.moduleSpecifier.getText()] = {
+
const key = imp.moduleSpecifier.getText();
+
let fragmentsEntry = importSpecifierToFragments[key];
+
if (names.length && fragmentsEntry) {
+
fragmentsEntry.fragments = fragmentsEntry.fragments.concat(names);
+
} else if (names.length && !fragmentsEntry) {
+
importSpecifierToFragments[key] = fragmentsEntry = {
start: imp.moduleSpecifier.getStart(),
length: imp.moduleSpecifier.getText().length,
fragments: names,
};
-
} else if (names.length) {
-
importSpecifierToFragments[
-
imp.moduleSpecifier.getText()
-
].fragments =
-
importSpecifierToFragments[
-
imp.moduleSpecifier.getText()
-
].fragments.concat(names);
}
}
} else if (
···
source.fileName,
el.getStart()
);
-
if (definitions && definitions.length) {
-
const [def] = definitions;
+
const def = definitions && definitions[0];
+
if (def) {
if (def.fileName.includes('node_modules')) return;
const externalSource = getSource(info, def.fileName);
···
const names = fragmentsForImport.map(
fragment => fragment.name.value
);
-
if (
-
names.length &&
-
!importSpecifierToFragments[imp.moduleSpecifier.getText()]
-
) {
-
importSpecifierToFragments[imp.moduleSpecifier.getText()] = {
+
const key = imp.moduleSpecifier.getText();
+
let fragmentsEntry = importSpecifierToFragments[key];
+
if (names.length && fragmentsEntry) {
+
fragmentsEntry.fragments = fragmentsEntry.fragments.concat(names);
+
} else if (names.length && !fragmentsEntry) {
+
importSpecifierToFragments[key] = fragmentsEntry = {
start: imp.moduleSpecifier.getStart(),
length: imp.moduleSpecifier.getText().length,
fragments: names,
};
-
} else if (names.length) {
-
importSpecifierToFragments[
-
imp.moduleSpecifier.getText()
-
].fragments =
-
importSpecifierToFragments[
-
imp.moduleSpecifier.getText()
-
].fragments.concat(names);
}
}
});
+36 -29
packages/graphqlsp/src/diagnostics.ts
···
: texts.join('-') + schema.version
);
-
let tsDiagnostics;
+
let tsDiagnostics: ts.Diagnostic[];
if (cache.has(cacheKey)) {
tsDiagnostics = cache.get(cacheKey)!;
} else {
···
ref,
start,
length;
-
if (callExpression.typeArguments) {
-
const [typeQuery] = callExpression.typeArguments;
+
const typeQuery =
+
callExpression.typeArguments && callExpression.typeArguments[0];
+
if (typeQuery) {
start = typeQuery.getStart();
length = typeQuery.getEnd() - typeQuery.getStart();
···
if (
!initializer ||
!ts.isCallExpression(initializer) ||
+
!initializer.arguments[0] ||
!ts.isStringLiteralLike(initializer.arguments[0])
) {
// TODO: we can make this check more stringent where we also parse and resolve
···
info,
initializer.arguments[0],
foundFilename,
-
ts.isArrayLiteralExpression(initializer.arguments[1])
+
initializer.arguments[1] &&
+
ts.isArrayLiteralExpression(initializer.arguments[1])
? initializer.arguments[1]
: undefined
);
···
fragments: fragmentNames,
start,
length,
-
} = moduleSpecifierToFragments[moduleSpecifier];
+
} = moduleSpecifierToFragments[moduleSpecifier]!;
const missingFragments = Array.from(
new Set(fragmentNames.filter(x => !usedFragments.has(x)))
);
···
},
schema: SchemaRef,
info: ts.server.PluginCreateInfo
-
) => {
+
): ts.Diagnostic[] => {
const filename = source.fileName;
const isCallExpression = info.config.templateIsCallExpression ?? true;
···
if (!diag.message.includes('Unknown directive')) return true;
const [message] = diag.message.split('(');
-
const matches = /Unknown directive "@([^)]+)"/g.exec(message);
+
const matches =
+
message && /Unknown directive "@([^)]+)"/g.exec(message);
if (!matches) return true;
-
const directiveNmae = matches[1];
-
return !clientDirectives.has(directiveNmae);
+
const directiveName = matches[1];
+
return directiveName && !clientDirectives.has(directiveName);
})
.map(x => {
const { start, end } = x.range;
···
// We add the start.line to account for newline characters which are
// split out
let startChar = startingPosition + start.line;
-
for (let i = 0; i <= start.line; i++) {
+
for (let i = 0; i <= start.line && i < lines.length; i++) {
if (i === start.line) startChar += start.character;
-
else if (lines[i]) startChar += lines[i].length;
+
else if (lines[i]) startChar += lines[i]!.length;
}
let endChar = startingPosition + end.line;
-
for (let i = 0; i <= end.line; i++) {
+
for (let i = 0; i <= end.line && i < lines.length; i++) {
if (i === end.line) endChar += end.character;
-
else if (lines[i]) endChar += lines[i].length;
+
else if (lines[i]) endChar += lines[i]!.length;
}
const locatedInFragment = resolvedSpans.find(x => {
···
.flat()
.filter(Boolean) as Array<Diagnostic & { length: number; start: number }>;
-
const tsDiagnostics = diagnostics.map(diag => ({
-
file: source,
-
length: diag.length,
-
start: diag.start,
-
category:
-
diag.severity === 2
-
? ts.DiagnosticCategory.Warning
-
: ts.DiagnosticCategory.Error,
-
code:
-
typeof diag.code === 'number'
-
? diag.code
-
: diag.severity === 2
-
? USING_DEPRECATED_FIELD_CODE
-
: SEMANTIC_DIAGNOSTIC_CODE,
-
messageText: diag.message.split('\n')[0],
-
}));
+
const tsDiagnostics = diagnostics.map(
+
diag =>
+
({
+
file: source,
+
length: diag.length,
+
start: diag.start,
+
category:
+
diag.severity === 2
+
? ts.DiagnosticCategory.Warning
+
: ts.DiagnosticCategory.Error,
+
code:
+
typeof diag.code === 'number'
+
? diag.code
+
: diag.severity === 2
+
? USING_DEPRECATED_FIELD_CODE
+
: SEMANTIC_DIAGNOSTIC_CODE,
+
messageText: diag.message.split('\n')[0],
+
} as ts.Diagnostic)
+
);
if (isCallExpression) {
const usageDiagnostics =
+27 -24
packages/graphqlsp/src/fieldUsage.ts
···
const isSomeOrEvery =
foundRef.name.text === 'every' || foundRef.name.text === 'some';
const callExpression = foundRef.parent;
-
let func: ts.Expression | ts.FunctionDeclaration =
+
let func: ts.Expression | ts.FunctionDeclaration | undefined =
callExpression.arguments[0];
-
if (ts.isIdentifier(func)) {
+
if (func && ts.isIdentifier(func)) {
// TODO: Scope utilities in checkFieldUsageInFile to deduplicate
const checker = info.languageService.getProgram()!.getTypeChecker();
···
}
if (
-
ts.isFunctionDeclaration(func) ||
-
ts.isFunctionExpression(func) ||
-
ts.isArrowFunction(func)
+
func &&
+
(ts.isFunctionDeclaration(func) ||
+
ts.isFunctionExpression(func) ||
+
ts.isArrowFunction(func))
) {
const param = func.parameters[isReduce ? 1 : 0];
-
const res = crawlScope(
-
param.name,
-
pathParts,
-
allFields,
-
source,
-
info,
-
true
-
);
-
-
if (
-
ts.isVariableDeclaration(callExpression.parent) &&
-
!isSomeOrEvery
-
) {
-
const varRes = crawlScope(
-
callExpression.parent.name,
+
if (param) {
+
const res = crawlScope(
+
param.name,
pathParts,
allFields,
source,
info,
true
);
-
res.push(...varRes);
-
}
-
return res;
+
if (
+
ts.isVariableDeclaration(callExpression.parent) &&
+
!isSomeOrEvery
+
) {
+
const varRes = crawlScope(
+
callExpression.parent.name,
+
pathParts,
+
allFields,
+
source,
+
info,
+
true
+
);
+
res.push(...varRes);
+
}
+
+
return res;
+
}
}
} else if (
ts.isPropertyAccessExpression(foundRef) &&
···
if (loc) {
aggregatedUnusedFields.add(parentField);
if (unusedChildren[parentField]) {
-
unusedChildren[parentField].add(unusedField);
+
unusedChildren[parentField]!.add(unusedField);
} else {
unusedChildren[parentField] = new Set([unusedField]);
}
+7 -7
packages/graphqlsp/src/graphql/getFragmentSpreadSuggestions.ts
···
}
for (j = 1; j <= bLength; j++) {
-
d[0][j] = j;
+
d[0]![j] = j;
}
for (i = 1; i <= aLength; i++) {
for (j = 1; j <= bLength; j++) {
const cost = a[i - 1] === b[j - 1] ? 0 : 1;
-
d[i][j] = Math.min(
-
d[i - 1][j] + 1,
-
d[i][j - 1] + 1,
-
d[i - 1][j - 1] + cost
+
d[i]![j] = Math.min(
+
d[i - 1]![j]! + 1,
+
d[i]![j - 1]! + 1,
+
d[i - 1]![j - 1]! + cost
);
if (i > 1 && j > 1 && a[i - 1] === b[j - 2] && a[i - 2] === b[j - 1]) {
-
d[i][j] = Math.min(d[i][j], d[i - 2][j - 2] + cost);
+
d[i]![j] = Math.min(d[i]![j]!, d[i - 2]![j - 2]! + cost);
}
}
}
-
return d[aLength][bLength];
+
return d[aLength]![bLength]!;
}
export type AllTypeInfo = {
+12 -5
packages/graphqlsp/src/persisted.ts
···
foundFilename = filename;
if (callExpression.typeArguments) {
const [typeQuery] = callExpression.typeArguments;
-
if (!ts.isTypeQueryNode(typeQuery)) return undefined;
+
if (!typeQuery || !ts.isTypeQueryNode(typeQuery)) return undefined;
const { node: found, filename: fileName } =
getDocumentReferenceFromTypeQuery(typeQuery, filename, info);
foundNode = found;
···
if (
!initializer ||
!ts.isCallExpression(initializer) ||
+
!initializer.arguments[0] ||
!ts.isStringLiteralLike(initializer.arguments[0])
-
)
+
) {
return undefined;
+
}
const hash = generateHashForDocument(
info,
initializer.arguments[0],
foundFilename,
-
ts.isArrayLiteralExpression(initializer.arguments[1])
+
initializer.arguments[1] &&
+
ts.isArrayLiteralExpression(initializer.arguments[1])
? initializer.arguments[1]
: undefined
);
···
fragments.forEach(fragmentDefinition => {
text = `${text}\n\n${print(fragmentDefinition)}`;
});
-
return createHash('sha256').update(print(parse(text))).digest('hex');
+
return createHash('sha256')
+
.update(print(parse(text)))
+
.digest('hex');
} else {
const externalSource = getSource(info, foundFilename)!;
const { fragments } = findAllCallExpressions(externalSource, info);
···
resolvedText = `${resolvedText}\n\n${print(fragmentDefinition)}`;
}
-
return createHash('sha256').update(print(parse(resolvedText))).digest('hex');
+
return createHash('sha256')
+
.update(print(parse(resolvedText)))
+
.digest('hex');
}
};
+1
packages/graphqlsp/tsconfig.json
···
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"strict": true,
+
"noUncheckedIndexedAccess": true,
"skipLibCheck": true
}
}
+1
tsconfig.json
···
"forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
/* Type Checking */
"strict": true /* Enable all strict type-checking options. */,
+
"noUncheckedIndexedAccess": true,
"skipLibCheck": true /* Skip type checking all .d.ts files. */
}
}