Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 0 additions & 12 deletions internal/ast/diagnostic.go
Original file line number Diff line number Diff line change
Expand Up @@ -195,17 +195,11 @@ func getDiagnosticPath(d *Diagnostic) string {
}

func EqualDiagnostics(d1, d2 *Diagnostic) bool {
if d1 == d2 {
return true
}
return EqualDiagnosticsNoRelatedInfo(d1, d2) &&
slices.EqualFunc(d1.RelatedInformation(), d2.RelatedInformation(), EqualDiagnostics)
}

func EqualDiagnosticsNoRelatedInfo(d1, d2 *Diagnostic) bool {
if d1 == d2 {
return true
}
return getDiagnosticPath(d1) == getDiagnosticPath(d2) &&
d1.Loc() == d2.Loc() &&
d1.Code() == d2.Code() &&
Expand All @@ -214,9 +208,6 @@ func EqualDiagnosticsNoRelatedInfo(d1, d2 *Diagnostic) bool {
}

func equalMessageChain(c1, c2 *Diagnostic) bool {
if c1 == c2 {
return true
}
return c1.Code() == c2.Code() &&
slices.Equal(c1.MessageArgs(), c2.MessageArgs()) &&
slices.EqualFunc(c1.MessageChain(), c2.MessageChain(), equalMessageChain)
Expand Down Expand Up @@ -267,9 +258,6 @@ func compareRelatedInfo(r1, r2 []*Diagnostic) int {
}

func CompareDiagnostics(d1, d2 *Diagnostic) int {
if d1 == d2 {
return 0
}
c := strings.Compare(getDiagnosticPath(d1), getDiagnosticPath(d2))
if c != 0 {
return c
Expand Down
19 changes: 19 additions & 0 deletions internal/checker/checker_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,25 @@ foo.bar;`
}
}

func TestCheckSrcCompiler(t *testing.T) {
t.Parallel()

repo.SkipIfNoTypeScriptSubmodule(t)
fs := osvfs.FS()
fs = bundled.WrapFS(fs)

rootPath := tspath.CombinePaths(tspath.NormalizeSlashes(repo.TypeScriptSubmodulePath), "src", "compiler")

host := compiler.NewCompilerHost(rootPath, fs, bundled.LibPath(), nil, nil)
parsed, errors := tsoptions.GetParsedCommandLineOfConfigFile(tspath.CombinePaths(rootPath, "tsconfig.json"), &core.CompilerOptions{}, nil, host, nil)
assert.Equal(t, len(errors), 0, "Expected no errors in parsed command line")
p := compiler.NewProgram(compiler.ProgramOptions{
Config: parsed,
Host: host,
})
p.CheckSourceFiles(t.Context(), nil)
}

func BenchmarkNewChecker(b *testing.B) {
repo.SkipIfNoTypeScriptSubmodule(b)
fs := osvfs.FS()
Expand Down
3 changes: 2 additions & 1 deletion internal/compiler/checkerpool.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ type CheckerPool interface {
GetChecker(ctx context.Context) (*checker.Checker, func())
GetCheckerForFile(ctx context.Context, file *ast.SourceFile) (*checker.Checker, func())
GetCheckerForFileExclusive(ctx context.Context, file *ast.SourceFile) (*checker.Checker, func())
ForEachCheckerParallel(ctx context.Context, cb func(idx int, c *checker.Checker))
Files(checker *checker.Checker) iter.Seq[*ast.SourceFile]
}

Expand Down Expand Up @@ -97,7 +98,7 @@ func (p *checkerPool) createCheckers() {

// Runs `cb` for each checker in the pool concurrently, locking and unlocking checker mutexes as it goes,
// making it safe to call `ForEachCheckerParallel` from many threads simultaneously.
func (p *checkerPool) ForEachCheckerParallel(cb func(idx int, c *checker.Checker)) {
func (p *checkerPool) ForEachCheckerParallel(ctx context.Context, cb func(idx int, c *checker.Checker)) {
p.createCheckers()
wg := core.NewWorkGroup(p.program.SingleThreaded())
for idx, checker := range p.checkers {
Expand Down
173 changes: 115 additions & 58 deletions internal/compiler/program.go
Original file line number Diff line number Diff line change
Expand Up @@ -371,15 +371,23 @@ func (p *Program) BindSourceFiles() {
wg.RunAndWait()
}

func (p *Program) CheckSourceFiles(ctx context.Context, files []*ast.SourceFile) {
p.checkerPool.ForEachCheckerParallel(ctx, func(_ int, checker *checker.Checker) {
for file := range p.checkerPool.Files(checker) {
if files == nil || slices.Contains(files, file) {
checker.CheckSourceFile(ctx, file)
}
}
})
}

// Return the type checker associated with the program.
func (p *Program) GetTypeChecker(ctx context.Context) (*checker.Checker, func()) {
return p.checkerPool.GetChecker(ctx)
}

func (p *Program) ForEachCheckerParallel(cb func(idx int, c *checker.Checker)) {
if pool, ok := p.checkerPool.(*checkerPool); ok {
pool.ForEachCheckerParallel(cb)
}
func (p *Program) ForEachCheckerParallel(ctx context.Context, cb func(idx int, c *checker.Checker)) {
p.checkerPool.ForEachCheckerParallel(ctx, cb)
}

// Return a checker for the given file. We may have multiple checkers in concurrent scenarios and this
Expand Down Expand Up @@ -417,59 +425,39 @@ func (p *Program) GetResolvedModules() map[tspath.Path]module.ModeAwareCache[*mo
return p.resolvedModules
}

// collectDiagnostics collects diagnostics from a single file or all files.
// If sourceFile is non-nil, returns diagnostics for just that file.
// If sourceFile is nil, returns diagnostics for all files in the program.
func (p *Program) collectDiagnostics(ctx context.Context, sourceFile *ast.SourceFile, collect func(context.Context, *ast.SourceFile) []*ast.Diagnostic) []*ast.Diagnostic {
var result []*ast.Diagnostic
if sourceFile != nil {
result = collect(ctx, sourceFile)
} else {
for _, file := range p.files {
result = append(result, collect(ctx, file)...)
}
}
return SortAndDeduplicateDiagnostics(result)
}

func (p *Program) GetSyntacticDiagnostics(ctx context.Context, sourceFile *ast.SourceFile) []*ast.Diagnostic {
return p.collectDiagnostics(ctx, sourceFile, func(_ context.Context, file *ast.SourceFile) []*ast.Diagnostic {
return core.Concatenate(file.Diagnostics(), file.JSDiagnostics())
})
return p.getDiagnosticsHelper(ctx, sourceFile, false /*ensureBound*/, false /*ensureChecked*/, p.getSyntacticDiagnosticsForFile)
}

func (p *Program) GetBindDiagnostics(ctx context.Context, sourceFile *ast.SourceFile) []*ast.Diagnostic {
if sourceFile != nil {
binder.BindSourceFile(sourceFile)
} else {
p.BindSourceFiles()
}
return p.collectDiagnostics(ctx, sourceFile, func(_ context.Context, file *ast.SourceFile) []*ast.Diagnostic {
return file.BindDiagnostics()
})
return p.getDiagnosticsHelper(ctx, sourceFile, true /*ensureBound*/, false /*ensureChecked*/, p.getBindDiagnosticsForFile)
}

func (p *Program) GetSemanticDiagnostics(ctx context.Context, sourceFile *ast.SourceFile) []*ast.Diagnostic {
return p.collectDiagnostics(ctx, sourceFile, p.getSemanticDiagnosticsForFile)
return p.getDiagnosticsHelper(ctx, sourceFile, true /*ensureBound*/, true /*ensureChecked*/, p.getSemanticDiagnosticsForFile)
}

func (p *Program) GetSemanticDiagnosticsWithoutNoEmitFiltering(ctx context.Context, sourceFiles []*ast.SourceFile) map[*ast.SourceFile][]*ast.Diagnostic {
func (p *Program) GetSemanticDiagnosticsNoFilter(ctx context.Context, sourceFiles []*ast.SourceFile) map[*ast.SourceFile][]*ast.Diagnostic {
p.BindSourceFiles()
p.CheckSourceFiles(ctx, sourceFiles)
if ctx.Err() != nil {
return nil
}
result := make(map[*ast.SourceFile][]*ast.Diagnostic, len(sourceFiles))
for _, file := range sourceFiles {
result[file] = SortAndDeduplicateDiagnostics(p.getBindAndCheckDiagnosticsForFile(ctx, file))
result[file] = SortAndDeduplicateDiagnostics(p.getSemanticDiagnosticsForFileNotFilter(ctx, file))
}
return result
}

func (p *Program) GetSuggestionDiagnostics(ctx context.Context, sourceFile *ast.SourceFile) []*ast.Diagnostic {
return p.collectDiagnostics(ctx, sourceFile, p.getSuggestionDiagnosticsForFile)
return p.getDiagnosticsHelper(ctx, sourceFile, true /*ensureBound*/, true /*ensureChecked*/, p.getSuggestionDiagnosticsForFile)
}

func (p *Program) GetProgramDiagnostics() []*ast.Diagnostic {
return SortAndDeduplicateDiagnostics(core.Concatenate(
return SortAndDeduplicateDiagnostics(slices.Concat(
p.programDiagnostics,
p.includeProcessor.getDiagnostics(p).GetGlobalDiagnostics(),
))
p.includeProcessor.getDiagnostics(p).GetGlobalDiagnostics()))
}

func (p *Program) GetIncludeProcessorDiagnostics(sourceFile *ast.SourceFile) []*ast.Diagnostic {
Expand Down Expand Up @@ -998,26 +986,40 @@ func (p *Program) GetGlobalDiagnostics(ctx context.Context) []*ast.Diagnostic {
}

globalDiagnostics := make([][]*ast.Diagnostic, p.checkerPool.Count())
p.ForEachCheckerParallel(func(idx int, checker *checker.Checker) {
p.checkerPool.ForEachCheckerParallel(ctx, func(idx int, checker *checker.Checker) {
globalDiagnostics[idx] = checker.GetGlobalDiagnostics()
})

return SortAndDeduplicateDiagnostics(slices.Concat(globalDiagnostics...))
}

func (p *Program) GetDeclarationDiagnostics(ctx context.Context, sourceFile *ast.SourceFile) []*ast.Diagnostic {
return p.collectDiagnostics(ctx, sourceFile, p.getDeclarationDiagnosticsForFile)
return p.getDiagnosticsHelper(ctx, sourceFile, true /*ensureBound*/, true /*ensureChecked*/, p.getDeclarationDiagnosticsForFile)
}

func (p *Program) GetOptionsDiagnostics(ctx context.Context) []*ast.Diagnostic {
return SortAndDeduplicateDiagnostics(core.Concatenate(p.GetGlobalDiagnostics(ctx), p.getOptionsDiagnosticsOfConfigFile()))
return SortAndDeduplicateDiagnostics(append(p.GetGlobalDiagnostics(ctx), p.getOptionsDiagnosticsOfConfigFile()...))
}

func (p *Program) getOptionsDiagnosticsOfConfigFile() []*ast.Diagnostic {
// todo update p.configParsingDiagnostics when updateAndGetProgramDiagnostics is implemented
if p.Options() == nil || p.Options().ConfigFilePath == "" {
return nil
}
return p.GetConfigFileParsingDiagnostics()
return p.GetConfigFileParsingDiagnostics() // TODO: actually call getDiagnosticsHelper on config path
}

func (p *Program) getSyntacticDiagnosticsForFile(ctx context.Context, sourceFile *ast.SourceFile) []*ast.Diagnostic {
return core.Concatenate(sourceFile.Diagnostics(), sourceFile.JSDiagnostics())
}

func (p *Program) getBindDiagnosticsForFile(ctx context.Context, sourceFile *ast.SourceFile) []*ast.Diagnostic {
// TODO: restore this; tsgo's main depends on this function binding all files for timing.
// if checker.SkipTypeChecking(sourceFile, p.compilerOptions) {
// return nil
// }

return sourceFile.BindDiagnostics()
}

func FilterNoEmitSemanticDiagnostics(diagnostics []*ast.Diagnostic, options *core.CompilerOptions) []*ast.Diagnostic {
Expand All @@ -1030,26 +1032,40 @@ func FilterNoEmitSemanticDiagnostics(diagnostics []*ast.Diagnostic, options *cor
}

func (p *Program) getSemanticDiagnosticsForFile(ctx context.Context, sourceFile *ast.SourceFile) []*ast.Diagnostic {
return core.Concatenate(
FilterNoEmitSemanticDiagnostics(p.getBindAndCheckDiagnosticsForFile(ctx, sourceFile), p.Options()),
return slices.Concat(
FilterNoEmitSemanticDiagnostics(p.getSemanticDiagnosticsForFileNotFilter(ctx, sourceFile), p.Options()),
p.GetIncludeProcessorDiagnostics(sourceFile),
)
}

// getBindAndCheckDiagnosticsForFile gets semantic diagnostics for a single file,
// including bind diagnostics, checker diagnostics, and handling of @ts-ignore/@ts-expect-error directives.
func (p *Program) getBindAndCheckDiagnosticsForFile(ctx context.Context, sourceFile *ast.SourceFile) []*ast.Diagnostic {
func (p *Program) getSemanticDiagnosticsForFileNotFilter(ctx context.Context, sourceFile *ast.SourceFile) []*ast.Diagnostic {
compilerOptions := p.Options()
if checker.SkipTypeChecking(sourceFile, compilerOptions, p, false) {
return nil
}

fileChecker, done := p.checkerPool.GetCheckerForFile(ctx, sourceFile)
defer done()

// Getting a checker will force a bind, so this will be populated.
var fileChecker *checker.Checker
var done func()
if sourceFile != nil {
fileChecker, done = p.checkerPool.GetCheckerForFile(ctx, sourceFile)
defer done()
}
diags := slices.Clip(sourceFile.BindDiagnostics())
Copy link

Copilot AI Dec 6, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This function will panic if sourceFile is nil. The code on lines 1047-1052 attempts to handle the nil case by conditionally getting a file checker, but then line 1053 unconditionally accesses sourceFile.BindDiagnostics(), which will cause a nil pointer dereference.

Either remove the nil handling code (lines 1047-1052) if this function should never be called with nil, or add a nil check before line 1053 to handle the nil case properly.

Suggested change
diags := slices.Clip(sourceFile.BindDiagnostics())
var diags []*ast.Diagnostic
if sourceFile != nil {
diags = slices.Clip(sourceFile.BindDiagnostics())
} else {
diags = nil
}

Copilot uses AI. Check for mistakes.
diags = append(diags, fileChecker.GetDiagnostics(ctx, sourceFile)...)
// Ask for diags from all checkers; checking one file may add diagnostics to other files.
// These are deduplicated later.
checkerDiags := make([][]*ast.Diagnostic, p.checkerPool.Count())
p.checkerPool.ForEachCheckerParallel(ctx, func(idx int, checker *checker.Checker) {
if sourceFile == nil || checker == fileChecker {
checkerDiags[idx] = checker.GetDiagnostics(ctx, sourceFile)
}
})
if ctx.Err() != nil {
return nil
}

diags = append(diags, slices.Concat(checkerDiags...)...)

// !!! This should be rewritten to work like getBindAndCheckDiagnosticsForFileNoCache.

isPlainJS := ast.IsPlainJSFile(sourceFile, compilerOptions.CheckJs)
if isPlainJS {
Expand Down Expand Up @@ -1131,12 +1147,28 @@ func (p *Program) getSuggestionDiagnosticsForFile(ctx context.Context, sourceFil
return nil
}

fileChecker, done := p.checkerPool.GetCheckerForFile(ctx, sourceFile)
defer done()
var fileChecker *checker.Checker
var done func()
if sourceFile != nil {
fileChecker, done = p.checkerPool.GetCheckerForFile(ctx, sourceFile)
defer done()
}

// Getting a checker will force a bind, so this will be populated.
diags := slices.Clip(sourceFile.BindSuggestionDiagnostics)
Copy link

Copilot AI Dec 6, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This function will panic if sourceFile is nil. The code on lines 1150-1155 attempts to handle the nil case by conditionally getting a file checker, but then line 1157 unconditionally accesses sourceFile.BindSuggestionDiagnostics, which will cause a nil pointer dereference.

Either remove the nil handling code (lines 1150-1155) if this function should never be called with nil, or add a nil check before line 1157 to handle the nil case properly.

Suggested change
diags := slices.Clip(sourceFile.BindSuggestionDiagnostics)
var diags []*ast.Diagnostic
if sourceFile != nil {
diags = slices.Clip(sourceFile.BindSuggestionDiagnostics)
} else {
diags = []*ast.Diagnostic{}
}

Copilot uses AI. Check for mistakes.
diags = append(diags, fileChecker.GetSuggestionDiagnostics(ctx, sourceFile)...)

checkerDiags := make([][]*ast.Diagnostic, p.checkerPool.Count())
p.checkerPool.ForEachCheckerParallel(ctx, func(idx int, checker *checker.Checker) {
if sourceFile == nil || checker == fileChecker {
checkerDiags[idx] = checker.GetSuggestionDiagnostics(ctx, sourceFile)
} else {
// !!! is there any case where suggestion diagnostics are produced in other checkers?
}
})
if ctx.Err() != nil {
return nil
}

diags = append(diags, slices.Concat(checkerDiags...)...)

return diags
}
Expand Down Expand Up @@ -1189,6 +1221,29 @@ func compactAndMergeRelatedInfos(diagnostics []*ast.Diagnostic) []*ast.Diagnosti
return diagnostics[:j]
}

func (p *Program) getDiagnosticsHelper(ctx context.Context, sourceFile *ast.SourceFile, ensureBound bool, ensureChecked bool, getDiagnostics func(context.Context, *ast.SourceFile) []*ast.Diagnostic) []*ast.Diagnostic {
if sourceFile != nil {
if ensureBound {
binder.BindSourceFile(sourceFile)
}
return SortAndDeduplicateDiagnostics(getDiagnostics(ctx, sourceFile))
}
if ensureBound {
p.BindSourceFiles()
}
if ensureChecked {
p.CheckSourceFiles(ctx, nil)
if ctx.Err() != nil {
return nil
}
}
var result []*ast.Diagnostic
for _, file := range p.files {
result = append(result, getDiagnostics(ctx, file)...)
}
return SortAndDeduplicateDiagnostics(result)
}

func (p *Program) LineCount() int {
var count int
for _, file := range p.files {
Expand All @@ -1212,23 +1267,23 @@ func (p *Program) SymbolCount() int {
}
var val atomic.Uint32
val.Store(uint32(count))
p.ForEachCheckerParallel(func(_ int, c *checker.Checker) {
p.checkerPool.ForEachCheckerParallel(context.Background(), func(idx int, c *checker.Checker) {
Copy link

Copilot AI Dec 6, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Calling ForEachCheckerParallel with context.Background() will cause a panic in the project CheckerPool implementation. The project CheckerPool's ForEachCheckerParallel method (line 106-108 in internal/project/checkerpool.go) panics when the context has no request ID.

The counting methods SymbolCount(), TypeCount(), and InstantiationCount() don't receive a context parameter, so they use context.Background(). This will work fine with the compiler's checkerPool but will panic when used with a project CheckerPool.

Consider either:

  1. Adding a context parameter to these counting methods
  2. Creating a variant of ForEachCheckerParallel that doesn't require a request ID for read-only operations like counting
  3. Using the compiler's checkerPool directly instead of the interface when the implementation is known

Copilot uses AI. Check for mistakes.
val.Add(c.SymbolCount)
})
return int(val.Load())
}

func (p *Program) TypeCount() int {
var val atomic.Uint32
p.ForEachCheckerParallel(func(_ int, c *checker.Checker) {
p.checkerPool.ForEachCheckerParallel(context.Background(), func(idx int, c *checker.Checker) {
Copy link

Copilot AI Dec 6, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Calling ForEachCheckerParallel with context.Background() will cause a panic in the project CheckerPool implementation. The project CheckerPool's ForEachCheckerParallel method (line 106-108 in internal/project/checkerpool.go) panics when the context has no request ID.

The TypeCount() method doesn't receive a context parameter, so it uses context.Background(). This will work fine with the compiler's checkerPool but will panic when used with a project CheckerPool.

Consider either:

  1. Adding a context parameter to this method
  2. Creating a variant of ForEachCheckerParallel that doesn't require a request ID for read-only operations like counting
  3. Using the compiler's checkerPool directly instead of the interface when the implementation is known

Copilot uses AI. Check for mistakes.
val.Add(c.TypeCount)
})
return int(val.Load())
}

func (p *Program) InstantiationCount() int {
var val atomic.Uint32
p.ForEachCheckerParallel(func(_ int, c *checker.Checker) {
p.checkerPool.ForEachCheckerParallel(context.Background(), func(idx int, c *checker.Checker) {
Copy link

Copilot AI Dec 6, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Calling ForEachCheckerParallel with context.Background() will cause a panic in the project CheckerPool implementation. The project CheckerPool's ForEachCheckerParallel method (line 106-108 in internal/project/checkerpool.go) panics when the context has no request ID.

The InstantiationCount() method doesn't receive a context parameter, so it uses context.Background(). This will work fine with the compiler's checkerPool but will panic when used with a project CheckerPool.

Consider either:

  1. Adding a context parameter to this method
  2. Creating a variant of ForEachCheckerParallel that doesn't require a request ID for read-only operations like counting
  3. Using the compiler's checkerPool directly instead of the interface when the implementation is known

Copilot uses AI. Check for mistakes.
val.Add(c.TotalInstantiationCount)
})
return int(val.Load())
Expand Down Expand Up @@ -1323,6 +1378,8 @@ type SourceMapEmitResult struct {
}

func (p *Program) Emit(ctx context.Context, options EmitOptions) *EmitResult {
// !!! performance measurement
p.BindSourceFiles()
if options.EmitOnly != EmitOnlyForcedDts {
result := HandleNoEmitOnError(
ctx,
Expand Down
2 changes: 1 addition & 1 deletion internal/execute/incremental/program.go
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ func (p *Program) collectSemanticDiagnosticsOfAffectedFiles(ctx context.Context,
}

// Get their diagnostics and cache them
diagnosticsPerFile := p.program.GetSemanticDiagnosticsWithoutNoEmitFiltering(ctx, affectedFiles)
diagnosticsPerFile := p.program.GetSemanticDiagnosticsNoFilter(ctx, affectedFiles)
// commit changes if no err
if ctx.Err() != nil {
return
Expand Down
Loading