Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
142 changes: 142 additions & 0 deletions configProcessor.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,140 @@ import (
"sync"
)

// getWorkspaceEntryPointFiles computes absolute file paths for workspace package
// entry points (Main, Module, and root Exports) so they are not flagged as orphans.
func getWorkspaceEntryPointFiles(resolverManager *ResolverManager) map[string]bool {
if resolverManager == nil || resolverManager.monorepoContext == nil {
return nil
}

entryPoints := map[string]bool{}
ctx := resolverManager.monorepoContext

for _, packagePath := range ctx.PackageToPath {
config, err := ctx.GetPackageConfig(packagePath)
if err != nil || config == nil {
continue
}

candidates := []string{}
if config.Main != "" {
candidates = append(candidates, config.Main)
}
if config.Module != "" {
candidates = append(candidates, config.Module)
}
// Extract file paths from the "." (root) export in the exports field
candidates = append(candidates, collectExportsRootPaths(config.Exports)...)

normalizedPkgPath := NormalizePathForInternal(packagePath)

for _, candidate := range candidates {
resolveEntryPointCandidate(normalizedPkgPath, candidate, entryPoints)
}
}

if len(entryPoints) == 0 {
return nil
}
return entryPoints
}

// resolveEntryPointCandidate resolves a relative entry point path to an absolute
// file path and adds it to the entryPoints map.
func resolveEntryPointCandidate(pkgPath string, candidate string, entryPoints map[string]bool) {
candidate = strings.TrimPrefix(candidate, "./")
absPath := pkgPath + "/" + candidate
absPath = NormalizePathForInternal(filepath.Clean(absPath))

if fileExists(absPath) {
entryPoints[absPath] = true
return
}
for _, ext := range []string{".ts", ".tsx", ".js", ".jsx"} {
if fileExists(absPath + ext) {
entryPoints[absPath+ext] = true
return
}
}
for _, ext := range []string{".ts", ".tsx", ".js", ".jsx"} {
indexPath := absPath + "/index" + ext
if fileExists(indexPath) {
entryPoints[indexPath] = true
return
}
}
}

// collectExportsRootPaths extracts all file path strings from the "." (root)
// export entry in a package.json exports field. Handles:
// - string: "./src/index.ts"
// - map with "." key: { ".": "./src/index.ts" } or { ".": { "import": "./src/index.ts" } }
// - map without "." (sugar for root): { "import": "./src/index.ts", "types": "./src/index.ts" }
func collectExportsRootPaths(exports interface{}) []string {
if exports == nil {
return nil
}

// Direct string export: "exports": "./src/index.ts"
if s, ok := exports.(string); ok {
return []string{s}
}

exportsMap, ok := exports.(map[string]interface{})
if !ok {
return nil
}

// Check if any key starts with "." — if so, only extract from the "." entry
hasDotPrefix := false
for k := range exportsMap {
if strings.HasPrefix(k, ".") {
hasDotPrefix = true
break
}
}

if hasDotPrefix {
dotEntry, exists := exportsMap["."]
if !exists {
return nil
}
return collectPathsFromExportValue(dotEntry)
}

// No dot prefix — the entire map is the root export (condition map sugar)
return collectPathsFromExportValue(exports)
}

// collectPathsFromExportValue recursively extracts all string file paths from
// an export value (which may be a string, a condition map, or nested condition maps).
func collectPathsFromExportValue(value interface{}) []string {
if value == nil {
return nil
}
if s, ok := value.(string); ok {
if strings.HasPrefix(s, ".") {
return []string{s}
}
return nil
}
m, ok := value.(map[string]interface{})
if !ok {
return nil
}
var paths []string
for _, v := range m {
paths = append(paths, collectPathsFromExportValue(v)...)
}
return paths
}

func fileExists(path string) bool {
info, err := os.Stat(path)
return err == nil && !info.IsDir()
}

// RestrictedDevDependenciesUsageViolation represents a violation where a dev dependency is used in production code
type RestrictedDevDependenciesUsageViolation struct {
DevDependency string `json:"devDependency"`
Expand Down Expand Up @@ -316,13 +450,20 @@ func processRuleChecks(
wg.Add(1)
go func() {
defer wg.Done()
// Compute workspace entry points so that package entry files from
// followed monorepo packages are not flagged as orphans.
var workspaceEntryPoints map[string]bool
if rule.FollowMonorepoPackages.IsEnabled() {
workspaceEntryPoints = getWorkspaceEntryPointFiles(resolverManager)
}
orphanFiles := FindOrphanFiles(
ruleTree,
rule.OrphanFilesDetection.ValidEntryPoints,
rule.OrphanFilesDetection.GraphExclude,
rule.OrphanFilesDetection.IgnoreTypeImports,
fullRulePath,
moduleSuffixVariants,
workspaceEntryPoints,
)

mu.Lock()
Expand Down Expand Up @@ -387,6 +528,7 @@ func processRuleChecks(
rule.MissingNodeModulesDetection.IncludeModules,
rule.MissingNodeModulesDetection.ExcludeModules,
rulePathNodeModules,
resolverManager,
)

mu.Lock()
Expand Down
7 changes: 6 additions & 1 deletion monorepo.go
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,12 @@ func (ctx *MonorepoContext) FindWorkspacePackages(root string, excludeFilePatter
continue
}

if strings.HasSuffix(pattern, "/**") {
if strings.HasSuffix(pattern, "/**/*") {
positive = append(positive, positivePattern{
basePath: strings.TrimSuffix(pattern, "/**/*"),
isDeep: true,
})
} else if strings.HasSuffix(pattern, "/**") {
positive = append(positive, positivePattern{
basePath: strings.TrimSuffix(pattern, "/**"),
isDeep: true,
Expand Down
48 changes: 48 additions & 0 deletions monorepo_discovery_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -592,6 +592,54 @@ func TestWorkspacesArrayAndPackagesObject(t *testing.T) {
}
}

// TestPnpmWorkspaceDeepGlobStarStar verifies that /**/* glob patterns in
// pnpm-workspace.yaml are recognised as deep patterns. This was a real-world
// root cause: repos using /**/* instead of /** had zero subpackage resolvers
// discovered, making the per-file resolver lookup a no-op and causing all
// workspace dependencies to be falsely flagged as missing.
func TestPnpmWorkspaceDeepGlobStarStar(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "rev-dep-pnpm-deep-glob")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)

files := map[string]string{
// pnpm-workspace.yaml using /**/* (not /**)
"pnpm-workspace.yaml": `packages:
- 'packages/**/*'
`,
"package.json": `{}`,
"packages/apps/app1/package.json": `{ "name": "@scope/app1", "version": "1.0.0" }`,
"packages/libs/shared/package.json": `{ "name": "@scope/shared", "version": "1.0.0" }`,
"packages/libs/nested/deep/package.json": `{ "name": "@scope/deep", "version": "1.0.0" }`,
}

for path, content := range files {
fullPath := filepath.Join(tmpDir, path)
os.MkdirAll(filepath.Dir(fullPath), 0755)
os.WriteFile(fullPath, []byte(content), 0644)
}

monorepoCtx := DetectMonorepo(tmpDir)
if monorepoCtx == nil {
t.Fatalf("Failed to detect monorepo via pnpm-workspace.yaml")
}

monorepoCtx.FindWorkspacePackages(monorepoCtx.WorkspaceRoot, []GlobMatcher{})

expected := []string{"@scope/app1", "@scope/shared", "@scope/deep"}
for _, pkg := range expected {
if _, ok := monorepoCtx.PackageToPath[pkg]; !ok {
t.Errorf("Expected to find package %s with /**/* glob pattern, but didn't", pkg)
}
}

if len(monorepoCtx.PackageToPath) != len(expected) {
t.Errorf("Expected %d packages, got %d: %v", len(expected), len(monorepoCtx.PackageToPath), monorepoCtx.PackageToPath)
}
}

func TestPnpmTakesPrecedenceOverPackageJson(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "rev-dep-pnpm-vs-packagejson")
if err != nil {
Expand Down
19 changes: 12 additions & 7 deletions nodeModules.go
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ func NodeModulesCmd(
}

if listMissing {
missingResults := GetMissingNodeModulesFromTree(minimalTree, modulesToInclude, modulesToExclude, cwdNodeModules)
missingResults := GetMissingNodeModulesFromTree(minimalTree, modulesToInclude, modulesToExclude, cwdNodeModules, resolverManager)
return formatMissingNodeModulesResults(missingResults, cwd, countFlag, groupByModule, groupByFile, groupByModuleFilesCount)
}

Expand Down Expand Up @@ -434,20 +434,25 @@ func GetMissingNodeModulesFromTree(
modulesToInclude []string,
modulesToExclude []string,
workingDirNodeModules map[string]bool,
resolverManager *ResolverManager,
) []MissingNodeModuleResult {
shouldIncludeModule := createShouldModuleByIncluded(modulesToInclude, modulesToExclude)
unresolved := map[string]map[string]bool{}

for filePath, fileDeps := range minimalTree {
// In monorepos, each file may belong to a different workspace package with its own
// package.json dependencies. Use per-file resolver lookup when available.
fileNodeModules := workingDirNodeModules
if resolverManager != nil {
resolver := resolverManager.GetResolverForFile(filePath)
if resolver != nil {
fileNodeModules = resolver.nodeModules
}
}
for _, dependency := range fileDeps {
// If following monorepo packages is enabled, files in minimal tree might not belong to the cwd.
// During resolution, node modules are looked up by package.json that belongs to the file location
// To capture missing modules correctly (let's say for `app` that imports `shared` package), meaning
// Capture modules declared in `shared` package.json, used by files from `shared`, but bundled by app
//
if dependency.ResolvedType == NotResolvedModule || dependency.ResolvedType == NodeModule {
moduleName := GetNodeModuleName(dependency.Request)
if _, exists := workingDirNodeModules[moduleName]; !exists {
if _, exists := fileNodeModules[moduleName]; !exists {
setFilePathInNodeModuleFilesMap(&unresolved, moduleName, filePath)
}
}
Expand Down
57 changes: 57 additions & 0 deletions nodeModules_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1502,3 +1502,60 @@ func TestUsedNodeModulesGroupedByEntryPoint(t *testing.T) {
t.Errorf("Incorrect grouped-by-entry-point modules output for glob '%s'. Expected '%s'", resultFromGlob, expectedGrouped)
}
}

// TestGetMissingNodeModulesFromTree_PnpmWorkspace verifies that workspace-specific
// dependencies are not falsely flagged as missing in a pnpm strict node_modules layout.
//
// In pnpm monorepos each workspace package has its own node_modules/ directory.
// The root node_modules/ only contains root-level dependencies, so checking only
// the root resolver produces false positives for every workspace dependency.
// The fix is per-file resolver lookup in GetMissingNodeModulesFromTree.
func TestGetMissingNodeModulesFromTree_PnpmWorkspace(t *testing.T) {
workspaceFilePath := "/workspace/packages/llm-clients/src/index.ts"

tree := MinimalDependencyTree{
workspaceFilePath: {
// @ai-sdk/cerebras is declared in the workspace package.json and installed
// in packages/llm-clients/node_modules/ — not in root node_modules/.
{Request: "@ai-sdk/cerebras", ResolvedType: NodeModule},
// some-root-dep IS in root node_modules/ so it should never be flagged.
{Request: "some-root-dep", ResolvedType: NodeModule},
},
}

rootResolver := &ModuleResolver{
nodeModules: map[string]bool{
"some-root-dep": true,
// @ai-sdk/cerebras intentionally absent — pnpm strict layout
},
}
workspaceResolver := &ModuleResolver{
nodeModules: map[string]bool{
"@ai-sdk/cerebras": true,
"some-root-dep": true,
},
}

rm := &ResolverManager{
rootResolver: rootResolver,
subpackageResolvers: []SubpackageResolver{
{PkgPath: "/workspace/packages/llm-clients", Resolver: workspaceResolver},
},
}

t.Run("workspace dependency not flagged as missing with per-file resolver", func(t *testing.T) {
results := GetMissingNodeModulesFromTree(tree, nil, nil, rootResolver.nodeModules, rm)
if len(results) != 0 {
t.Errorf("expected no missing modules, got: %v", results)
}
})

t.Run("workspace dependency falsely flagged without per-file resolver (regression guard)", func(t *testing.T) {
// Passing nil resolverManager replicates the old behavior: only root nodeModules
// are consulted, producing a false positive for @ai-sdk/cerebras.
results := GetMissingNodeModulesFromTree(tree, nil, nil, rootResolver.nodeModules, nil)
if len(results) != 1 || results[0].ModuleName != "@ai-sdk/cerebras" {
t.Errorf("expected @ai-sdk/cerebras to be falsely flagged (regression guard), got: %v", results)
}
})
}
5 changes: 4 additions & 1 deletion orphanFiles.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ func FindOrphanFiles(
ignoreTypeImports bool,
cwd string,
moduleSuffixVariants map[string]bool,
additionalEntryPointFiles map[string]bool,
) []string {
// Create glob matchers for valid entry points and graph exclusions
entryPointGlobs := CreateGlobMatchers(validEntryPoints, cwd)
Expand Down Expand Up @@ -55,10 +56,12 @@ func FindOrphanFiles(
isReferenced := referencedFiles[filePath]
isEntryPoint := len(entryPointGlobs) > 0 && MatchesAnyGlobMatcher(filePath, entryPointGlobs, false)
isVariant := moduleSuffixVariants != nil && moduleSuffixVariants[filePath]
isWorkspaceEntryPoint := additionalEntryPointFiles != nil && additionalEntryPointFiles[filePath]

// A file is orphan if it's not referenced by other files AND it's not a valid entry point
// AND it's not a module-suffix variant (platform-specific sibling)
if !isReferenced && !isEntryPoint && !isVariant {
// AND it's not a workspace package entry point
if !isReferenced && !isEntryPoint && !isVariant && !isWorkspaceEntryPoint {
orphanFiles = append(orphanFiles, filePath)
}
}
Expand Down
Loading