diff --git a/pkg/github/issues.go b/pkg/github/issues.go index cd7085550..aa8a9b775 100644 --- a/pkg/github/issues.go +++ b/pkg/github/issues.go @@ -13,6 +13,7 @@ import ( ghErrors "github.com/github/github-mcp-server/pkg/errors" "github.com/github/github-mcp-server/pkg/inventory" "github.com/github/github-mcp-server/pkg/octicons" + "github.com/github/github-mcp-server/pkg/response" "github.com/github/github-mcp-server/pkg/sanitize" "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" @@ -1601,9 +1602,16 @@ func ListIssues(t translations.TranslationHelperFunc) inventory.ServerTool { totalCount = fragment.TotalCount } - // Create response with issues - response := map[string]any{ - "issues": issues, + optimizedIssues, err := response.OptimizeList(issues, + response.WithCollectionExtractors(map[string][]string{"labels": {"name"}}), + ) + if err != nil { + return nil, nil, fmt.Errorf("failed to optimize issues: %w", err) + } + + // Wrap optimized issues with pagination metadata + issueResponse := map[string]any{ + "issues": json.RawMessage(optimizedIssues), "pageInfo": map[string]any{ "hasNextPage": pageInfo.HasNextPage, "hasPreviousPage": pageInfo.HasPreviousPage, @@ -1612,7 +1620,7 @@ func ListIssues(t translations.TranslationHelperFunc) inventory.ServerTool { }, "totalCount": totalCount, } - out, err := json.Marshal(response) + out, err := json.Marshal(issueResponse) if err != nil { return nil, nil, fmt.Errorf("failed to marshal issues: %w", err) } diff --git a/pkg/github/issues_test.go b/pkg/github/issues_test.go index 512ba8a6b..b3ca9beed 100644 --- a/pkg/github/issues_test.go +++ b/pkg/github/issues_test.go @@ -1188,7 +1188,6 @@ func Test_ListIssues(t *testing.T) { expectError bool errContains string expectedCount int - verifyOrder func(t *testing.T, issues []*github.Issue) }{ { name: "list all issues", @@ -1296,32 +1295,29 @@ func Test_ListIssues(t *testing.T) { } require.NoError(t, err) - // Parse the structured response with pagination info - var response struct { - Issues []*github.Issue `json:"issues"` - PageInfo struct { - HasNextPage bool `json:"hasNextPage"` - HasPreviousPage bool `json:"hasPreviousPage"` - StartCursor string `json:"startCursor"` - EndCursor string `json:"endCursor"` - } `json:"pageInfo"` - TotalCount int `json:"totalCount"` - } + // Parse the response + var response map[string]any err = json.Unmarshal([]byte(text), &response) require.NoError(t, err) - assert.Len(t, response.Issues, tc.expectedCount, "Expected %d issues, got %d", tc.expectedCount, len(response.Issues)) + // Metadata should be preserved + assert.NotNil(t, response["totalCount"]) + pageInfo, ok := response["pageInfo"].(map[string]any) + require.True(t, ok, "pageInfo should be a map") + assert.Contains(t, pageInfo, "hasNextPage") + assert.Contains(t, pageInfo, "endCursor") - // Verify order if verifyOrder function is provided - if tc.verifyOrder != nil { - tc.verifyOrder(t, response.Issues) - } + issues, ok := response["issues"].([]any) + require.True(t, ok) + assert.Len(t, issues, tc.expectedCount, "Expected %d issues, got %d", tc.expectedCount, len(issues)) // Verify that returned issues have expected structure - for _, issue := range response.Issues { - assert.NotNil(t, issue.Number, "Issue should have number") - assert.NotNil(t, issue.Title, "Issue should have title") - assert.NotNil(t, issue.State, "Issue should have state") + for _, issue := range issues { + m, ok := issue.(map[string]any) + require.True(t, ok) + assert.NotNil(t, m["number"], "Issue should have number") + assert.NotNil(t, m["title"], "Issue should have title") + assert.NotNil(t, m["state"], "Issue should have state") } }) } diff --git a/pkg/github/pullrequests.go b/pkg/github/pullrequests.go index 58edc07dc..edd76ff29 100644 --- a/pkg/github/pullrequests.go +++ b/pkg/github/pullrequests.go @@ -16,6 +16,7 @@ import ( ghErrors "github.com/github/github-mcp-server/pkg/errors" "github.com/github/github-mcp-server/pkg/inventory" "github.com/github/github-mcp-server/pkg/octicons" + "github.com/github/github-mcp-server/pkg/response" "github.com/github/github-mcp-server/pkg/sanitize" "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" @@ -1168,7 +1169,14 @@ func ListPullRequests(t translations.TranslationHelperFunc) inventory.ServerTool } } - r, err := json.Marshal(prs) + r, err := response.OptimizeList(prs, + response.WithPreservedFields("html_url", "draft"), + response.WithCollectionExtractors(map[string][]string{ + "labels": {"name"}, + "requested_reviewers": {"login"}, + "requested_teams": {"name"}, + }), + ) if err != nil { return utils.NewToolResultErrorFromErr("failed to marshal response", err), nil, nil } diff --git a/pkg/github/repositories.go b/pkg/github/repositories.go index 4433fe64c..b6a1f17dd 100644 --- a/pkg/github/repositories.go +++ b/pkg/github/repositories.go @@ -13,6 +13,7 @@ import ( ghErrors "github.com/github/github-mcp-server/pkg/errors" "github.com/github/github-mcp-server/pkg/inventory" "github.com/github/github-mcp-server/pkg/octicons" + "github.com/github/github-mcp-server/pkg/response" "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" @@ -216,7 +217,10 @@ func ListCommits(t translations.TranslationHelperFunc) inventory.ServerTool { minimalCommits[i] = convertToMinimalCommit(commit, false) } - r, err := json.Marshal(minimalCommits) + r, err := response.OptimizeList(minimalCommits, + response.WithMaxDepth(3), + response.WithPreservedFields("html_url"), + ) if err != nil { return nil, nil, fmt.Errorf("failed to marshal response: %w", err) } @@ -303,7 +307,7 @@ func ListBranches(t translations.TranslationHelperFunc) inventory.ServerTool { minimalBranches = append(minimalBranches, convertToMinimalBranch(branch)) } - r, err := json.Marshal(minimalBranches) + r, err := response.OptimizeList(minimalBranches) if err != nil { return nil, nil, fmt.Errorf("failed to marshal response: %w", err) } @@ -1497,7 +1501,7 @@ func ListTags(t translations.TranslationHelperFunc) inventory.ServerTool { return ghErrors.NewGitHubAPIStatusErrorResponse(ctx, "failed to list tags", resp, body), nil, nil } - r, err := json.Marshal(tags) + r, err := response.OptimizeList(tags) if err != nil { return nil, nil, fmt.Errorf("failed to marshal response: %w", err) } @@ -1670,7 +1674,9 @@ func ListReleases(t translations.TranslationHelperFunc) inventory.ServerTool { return ghErrors.NewGitHubAPIStatusErrorResponse(ctx, "failed to list releases", resp, body), nil, nil } - r, err := json.Marshal(releases) + r, err := response.OptimizeList(releases, + response.WithPreservedFields("html_url", "prerelease"), + ) if err != nil { return nil, nil, fmt.Errorf("failed to marshal response: %w", err) } diff --git a/pkg/github/repositories_test.go b/pkg/github/repositories_test.go index 76628283d..c41ed5f76 100644 --- a/pkg/github/repositories_test.go +++ b/pkg/github/repositories_test.go @@ -1053,23 +1053,30 @@ func Test_ListCommits(t *testing.T) { textContent := getTextResult(t, result) // Unmarshal and verify the result - var returnedCommits []MinimalCommit + var returnedCommits []map[string]any err = json.Unmarshal([]byte(textContent.Text), &returnedCommits) require.NoError(t, err) assert.Len(t, returnedCommits, len(tc.expectedCommits)) for i, commit := range returnedCommits { - assert.Equal(t, tc.expectedCommits[i].GetSHA(), commit.SHA) - assert.Equal(t, tc.expectedCommits[i].GetHTMLURL(), commit.HTMLURL) + assert.Equal(t, tc.expectedCommits[i].GetSHA(), commit["sha"]) + assert.Equal(t, tc.expectedCommits[i].GetHTMLURL(), commit["html_url"]) if tc.expectedCommits[i].Commit != nil { - assert.Equal(t, tc.expectedCommits[i].Commit.GetMessage(), commit.Commit.Message) + assert.Equal(t, tc.expectedCommits[i].Commit.GetMessage(), commit["commit.message"]) + if tc.expectedCommits[i].Commit.Author != nil { + assert.Equal(t, tc.expectedCommits[i].Commit.Author.GetName(), commit["commit.author.name"]) + assert.Equal(t, tc.expectedCommits[i].Commit.Author.GetEmail(), commit["commit.author.email"]) + if tc.expectedCommits[i].Commit.Author.Date != nil { + assert.NotEmpty(t, commit["commit.author.date"], "commit.author.date should be present") + } + } } if tc.expectedCommits[i].Author != nil { - assert.Equal(t, tc.expectedCommits[i].Author.GetLogin(), commit.Author.Login) + assert.Equal(t, tc.expectedCommits[i].Author.GetLogin(), commit["author.login"]) } // Files and stats are never included in list_commits - assert.Nil(t, commit.Files) - assert.Nil(t, commit.Stats) + assert.Nil(t, commit["files"]) + assert.Nil(t, commit["stats"]) } }) } @@ -2791,15 +2798,15 @@ func Test_ListTags(t *testing.T) { textContent := getTextResult(t, result) // Parse and verify the result - var returnedTags []*github.RepositoryTag + var returnedTags []map[string]any err = json.Unmarshal([]byte(textContent.Text), &returnedTags) require.NoError(t, err) // Verify each tag require.Equal(t, len(tc.expectedTags), len(returnedTags)) for i, expectedTag := range tc.expectedTags { - assert.Equal(t, *expectedTag.Name, *returnedTags[i].Name) - assert.Equal(t, *expectedTag.Commit.SHA, *returnedTags[i].Commit.SHA) + assert.Equal(t, *expectedTag.Name, returnedTags[i]["name"]) + assert.Equal(t, *expectedTag.Commit.SHA, returnedTags[i]["commit.sha"]) } }) } diff --git a/pkg/response/optimize.go b/pkg/response/optimize.go new file mode 100644 index 000000000..34c31e486 --- /dev/null +++ b/pkg/response/optimize.go @@ -0,0 +1,255 @@ +package response + +import ( + "encoding/json" + "fmt" + "strings" +) + +const ( + defaultFillRateThreshold = 0.1 // default proportion of items that must have a key for it to survive + minFillRateRows = 3 // minimum number of items required to apply fill-rate filtering + defaultMaxDepth = 2 // default nesting depth that flatten will recurse into +) + +// OptimizeListConfig controls the optimization pipeline behavior. +type OptimizeListConfig struct { + maxDepth int + preservedFields map[string]bool + collectionExtractors map[string][]string +} + +type OptimizeListOption func(*OptimizeListConfig) + +// WithMaxDepth sets the maximum nesting depth for flattening. +// Deeper nested maps are silently dropped. +func WithMaxDepth(d int) OptimizeListOption { + return func(c *OptimizeListConfig) { + c.maxDepth = d + } +} + +// WithPreservedFields sets keys that are exempt from all destructive strategies except whitespace normalization. +// Keys are matched against post-flatten map keys, so for nested fields like "user.html_url", the dotted key must be +// added explicitly. Empty collections are still dropped. Wins over collectionExtractors. +func WithPreservedFields(fields ...string) OptimizeListOption { + return func(c *OptimizeListConfig) { + c.preservedFields = make(map[string]bool, len(fields)) + for _, f := range fields { + c.preservedFields[f] = true + } + } +} + +// WithCollectionExtractors controls how array fields are handled instead of being summarized as "[N items]". +// - 1 sub-field: comma-joined into a flat string ("bug, enhancement"). +// - Multiple sub-fields: keep the array, but trim each element to only those fields. +// +// These are explicitly exempt from fill-rate filtering; if we asked for the extraction, it's likely important +// to preserve the data even if only one item has it. +func WithCollectionExtractors(extractors map[string][]string) OptimizeListOption { + return func(c *OptimizeListConfig) { + c.collectionExtractors = extractors + } +} + +// OptimizeList optimizes a list of items by applying flattening, URL removal, zero-value removal, +// whitespace normalization, collection summarization, and fill-rate filtering. +func OptimizeList[T any](items []T, opts ...OptimizeListOption) ([]byte, error) { + cfg := OptimizeListConfig{maxDepth: defaultMaxDepth} + for _, opt := range opts { + opt(&cfg) + } + + raw, err := json.Marshal(items) + if err != nil { + return nil, fmt.Errorf("failed to marshal data: %w", err) + } + + var maps []map[string]any + if err := json.Unmarshal(raw, &maps); err != nil { + return nil, fmt.Errorf("failed to unmarshal JSON: %w", err) + } + + for i, item := range maps { + flattenedItem := flattenTo(item, cfg.maxDepth) + maps[i] = optimizeItem(flattenedItem, cfg) + } + + if len(maps) >= minFillRateRows { + maps = filterByFillRate(maps, defaultFillRateThreshold, cfg) + } + + return json.Marshal(maps) +} + +// flattenTo recursively promotes values from nested maps into the parent +// using dot-notation keys ("user.login", "commit.author.date"). Arrays +// within nested maps are preserved at their dotted key position. +// Recursion stops at the given maxDepth; deeper nested maps are dropped. +func flattenTo(item map[string]any, maxDepth int) map[string]any { + result := make(map[string]any, len(item)) + flattenInto(item, "", result, 1, maxDepth) + return result +} + +// flattenInto is the recursive worker for flattenTo. +func flattenInto(item map[string]any, prefix string, result map[string]any, depth int, maxDepth int) { + for key, value := range item { + fullKey := prefix + key + if nested, ok := value.(map[string]any); ok && depth < maxDepth { + flattenInto(nested, fullKey+".", result, depth+1, maxDepth) + } else if !ok { + result[fullKey] = value + } + } +} + +// filterByFillRate drops keys that appear on less than the threshold proportion of items. +// Preserved fields and extractor keys always survive. +func filterByFillRate(items []map[string]any, threshold float64, cfg OptimizeListConfig) []map[string]any { + keyCounts := make(map[string]int) + for _, item := range items { + for key := range item { + keyCounts[key]++ + } + } + + minCount := int(threshold * float64(len(items))) + keepKeys := make(map[string]bool, len(keyCounts)) + for key, count := range keyCounts { + _, hasExtractor := cfg.collectionExtractors[key] + if count > minCount || cfg.preservedFields[key] || hasExtractor { + keepKeys[key] = true + } + } + + for i, item := range items { + filtered := make(map[string]any, len(keepKeys)) + for key, value := range item { + if keepKeys[key] { + filtered[key] = value + } + } + items[i] = filtered + } + + return items +} + +// optimizeItem applies per-item strategies in a single pass: remove URLs, +// remove zero-values, normalize whitespace, summarize collections. +// Preserved fields skip everything except whitespace normalization. +func optimizeItem(item map[string]any, cfg OptimizeListConfig) map[string]any { + result := make(map[string]any, len(item)) + for key, value := range item { + preserved := cfg.preservedFields[key] + if !preserved && isURLKey(key) { + continue + } + if !preserved && isZeroValue(value) { + continue + } + + switch v := value.(type) { + case string: + result[key] = strings.Join(strings.Fields(v), " ") + case []any: + if len(v) == 0 { + continue + } + + if preserved { + result[key] = value + } else if fields, ok := cfg.collectionExtractors[key]; ok { + if len(fields) == 1 { + result[key] = extractSubField(v, fields[0]) + } else { + result[key] = trimArrayFields(v, fields) + } + } else { + result[key] = fmt.Sprintf("[%d items]", len(v)) + } + default: + result[key] = value + } + } + + return result +} + +// extractSubField pulls a named sub-field from each slice element and joins +// them with ", ". Elements missing the field are silently skipped. +func extractSubField(items []any, field string) string { + var vals []string + for _, item := range items { + m, ok := item.(map[string]any) + if !ok { + continue + } + + v, ok := m[field] + if !ok || v == nil { + continue + } + + switch s := v.(type) { + case string: + if s != "" { + vals = append(vals, s) + } + default: + vals = append(vals, fmt.Sprintf("%v", v)) + } + } + + return strings.Join(vals, ", ") +} + +// trimArrayFields keeps only the specified fields from each object in a slice. +// The trimmed objects are returned as is, no further strategies are applied. +func trimArrayFields(items []any, fields []string) []any { + result := make([]any, 0, len(items)) + for _, item := range items { + m, ok := item.(map[string]any) + if !ok { + continue + } + + trimmed := make(map[string]any, len(fields)) + for _, f := range fields { + if v, exists := m[f]; exists { + trimmed[f] = v + } + } + + if len(trimmed) > 0 { + result = append(result, trimmed) + } + } + + return result +} + +// isURLKey matches "url", "*_url", and their dot-prefixed variants. +func isURLKey(key string) bool { + if idx := strings.LastIndex(key, "."); idx >= 0 { + key = key[idx+1:] + } + return key == "url" || strings.HasSuffix(key, "_url") +} + +func isZeroValue(v any) bool { + switch val := v.(type) { + case nil: + return true + case string: + return val == "" + case bool: + return !val + case float64: + return val == 0 + default: + return false + } +} diff --git a/pkg/response/optimize_test.go b/pkg/response/optimize_test.go new file mode 100644 index 000000000..ad62dab0c --- /dev/null +++ b/pkg/response/optimize_test.go @@ -0,0 +1,272 @@ +package response + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestFlatten(t *testing.T) { + tests := []struct { + name string + input map[string]any + expected map[string]any + }{ + { + name: "promotes primitive fields from nested map", + input: map[string]any{ + "title": "fix bug", + "user": map[string]any{ + "login": "user", + "id": float64(1), + }, + }, + expected: map[string]any{ + "title": "fix bug", + "user.login": "user", + "user.id": float64(1), + }, + }, + { + name: "drops nested maps at default depth", + input: map[string]any{ + "user": map[string]any{ + "login": "user", + "repos": []any{"repo1"}, + "org": map[string]any{"name": "org"}, + }, + }, + expected: map[string]any{ + "user.login": "user", + "user.repos": []any{"repo1"}, + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := flattenTo(tc.input, defaultMaxDepth) + assert.Equal(t, tc.expected, result) + }) + } + + t.Run("recurses deeper with custom depth", func(t *testing.T) { + input := map[string]any{ + "commit": map[string]any{ + "message": "fix bug", + "author": map[string]any{ + "name": "user", + "date": "2026-01-01", + }, + }, + } + result := flattenTo(input, 3) + assert.Equal(t, map[string]any{ + "commit.message": "fix bug", + "commit.author.name": "user", + "commit.author.date": "2026-01-01", + }, result) + }) +} + +func TestTrimArrayFields(t *testing.T) { + cfg := OptimizeListConfig{ + collectionExtractors: map[string][]string{ + "reviewers": {"login", "state"}, + }, + } + + result := optimizeItem(map[string]any{ + "reviewers": []any{ + map[string]any{"login": "alice", "state": "approved", "id": float64(1)}, + map[string]any{"login": "bob", "state": "changes_requested", "id": float64(2)}, + }, + "title": "Fix bug", + }, cfg) + + expected := []any{ + map[string]any{"login": "alice", "state": "approved"}, + map[string]any{"login": "bob", "state": "changes_requested"}, + } + assert.Equal(t, expected, result["reviewers"]) + assert.Equal(t, "Fix bug", result["title"]) +} + +func TestFilterByFillRate(t *testing.T) { + cfg := OptimizeListConfig{} + + items := []map[string]any{ + {"title": "a", "body": "text", "milestone": "v1"}, + {"title": "b", "body": "text"}, + {"title": "c", "body": "text"}, + {"title": "d", "body": "text"}, + {"title": "e", "body": "text"}, + {"title": "f", "body": "text"}, + {"title": "g", "body": "text"}, + {"title": "h", "body": "text"}, + {"title": "i", "body": "text"}, + {"title": "j", "body": "text"}, + } + + result := filterByFillRate(items, 0.1, cfg) + + for _, item := range result { + assert.Contains(t, item, "title") + assert.Contains(t, item, "body") + assert.NotContains(t, item, "milestone") + } +} + +func TestFilterByFillRate_PreservesFields(t *testing.T) { + cfg := OptimizeListConfig{ + preservedFields: map[string]bool{"html_url": true}, + } + + items := make([]map[string]any, 10) + for i := range items { + items[i] = map[string]any{"title": "x"} + } + items[0]["html_url"] = "https://github.com/repo/1" + + result := filterByFillRate(items, 0.1, cfg) + assert.Contains(t, result[0], "html_url") +} + +func TestOptimizeList_AllStrategies(t *testing.T) { + items := []map[string]any{ + { + "title": "Fix bug", + "body": "line1\n\nline2", + "url": "https://api.github.com/repos/1", + "html_url": "https://github.com/repo/1", + "avatar_url": "https://avatars.githubusercontent.com/1", + "draft": false, + "merged_at": nil, + "labels": []any{map[string]any{"name": "bug"}}, + "user": map[string]any{ + "login": "user", + "avatar_url": "https://avatars.githubusercontent.com/1", + }, + }, + } + + raw, err := OptimizeList(items, + WithPreservedFields("html_url", "draft"), + WithCollectionExtractors(map[string][]string{"labels": {"name"}}), + ) + require.NoError(t, err) + + var result []map[string]any + err = json.Unmarshal(raw, &result) + require.NoError(t, err) + require.Len(t, result, 1) + + assert.Equal(t, "Fix bug", result[0]["title"]) + assert.Equal(t, "line1 line2", result[0]["body"]) + assert.Equal(t, "https://github.com/repo/1", result[0]["html_url"]) + assert.Equal(t, false, result[0]["draft"]) + assert.Equal(t, "bug", result[0]["labels"]) + assert.Equal(t, "user", result[0]["user.login"]) + assert.Nil(t, result[0]["url"]) + assert.Nil(t, result[0]["avatar_url"]) + assert.Nil(t, result[0]["merged_at"]) +} + +func TestOptimizeList_NilInput(t *testing.T) { + raw, err := OptimizeList[map[string]any](nil) + require.NoError(t, err) + assert.Equal(t, "null", string(raw)) +} + +func TestOptimizeList_SkipsFillRateBelowMinRows(t *testing.T) { + items := []map[string]any{ + {"title": "a", "rare": "x"}, + {"title": "b"}, + } + + raw, err := OptimizeList(items) + require.NoError(t, err) + + var result []map[string]any + err = json.Unmarshal(raw, &result) + require.NoError(t, err) + + assert.Equal(t, "x", result[0]["rare"]) +} + +func TestPreservedFields(t *testing.T) { + t.Run("keeps preserved URL keys, strips non-preserved", func(t *testing.T) { + cfg := OptimizeListConfig{ + preservedFields: map[string]bool{ + "html_url": true, + "clone_url": true, + }, + } + + result := optimizeItem(map[string]any{ + "html_url": "https://github.com/repo/1", + "clone_url": "https://github.com/repo/1.git", + "avatar_url": "https://avatars.githubusercontent.com/1", + "user.html_url": "https://github.com/user", + "user.clone_url": "https://github.com/user.git", + }, cfg) + + assert.Contains(t, result, "html_url") + assert.Contains(t, result, "clone_url") + assert.NotContains(t, result, "avatar_url") + assert.NotContains(t, result, "user.html_url") + assert.NotContains(t, result, "user.clone_url") + }) + + t.Run("protects zero values", func(t *testing.T) { + cfg := OptimizeListConfig{ + preservedFields: map[string]bool{"draft": true}, + } + + result := optimizeItem(map[string]any{ + "draft": false, + "body": "", + }, cfg) + + assert.Contains(t, result, "draft") + assert.NotContains(t, result, "body") + }) + + t.Run("protects from collection summarization", func(t *testing.T) { + cfg := OptimizeListConfig{ + preservedFields: map[string]bool{"assignees": true}, + } + + assignees := []any{ + map[string]any{"login": "alice", "id": float64(1)}, + map[string]any{"login": "bob", "id": float64(2)}, + } + + result := optimizeItem(map[string]any{ + "assignees": assignees, + "comments": []any{map[string]any{"id": "1"}, map[string]any{"id": "2"}}, + }, cfg) + + assert.Equal(t, assignees, result["assignees"]) + assert.Equal(t, "[2 items]", result["comments"]) + }) +} + +func TestCollectionFieldExtractors_SurviveFillRate(t *testing.T) { + cfg := OptimizeListConfig{ + collectionExtractors: map[string][]string{"labels": {"name"}}, + } + + items := []map[string]any{ + {"title": "PR 1", "labels": "bug"}, + {"title": "PR 2"}, + {"title": "PR 3"}, + {"title": "PR 4"}, + } + + result := filterByFillRate(items, defaultFillRateThreshold, cfg) + + assert.Contains(t, result[0], "labels") +}