diff --git a/routers/api/v2/ai_context.go b/routers/api/v2/ai_context.go index fb2d3717a0..fe62011f92 100644 --- a/routers/api/v2/ai_context.go +++ b/routers/api/v2/ai_context.go @@ -4,9 +4,9 @@ package v2 import ( - "encoding/json" "net/http" "path" + "slices" "sort" "strings" @@ -16,6 +16,7 @@ import ( apierrors "code.gitea.io/gitea/modules/errors" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/services/context" ) @@ -513,12 +514,7 @@ func isConfigFile(name string) bool { "webpack.config.js", "vite.config.js", "rollup.config.js", ".env.example", ".env.sample", } - for _, cf := range configFiles { - if name == cf { - return true - } - } - return false + return slices.Contains(configFiles, name) } func countFiles(commit *git.Commit, dir string, depth, maxDepth int) int { @@ -545,7 +541,7 @@ func countFiles(commit *git.Commit, dir string, depth, maxDepth int) int { return count } -func detectProjectType(files, configFiles []string, primaryLang string) struct { +func detectProjectType(files, configFiles []string, _ string) struct { SuggestedEntryPoints []string `json:"suggested_entry_points"` ProjectType string `json:"project_type"` BuildSystem string `json:"build_system,omitempty"` @@ -707,8 +703,7 @@ func extractCodeReferences(text string) []string { // Simple extraction of file paths mentioned in text // Look for patterns like `path/to/file.ext` or file.ext refs := []string{} - words := strings.Fields(text) - for _, word := range words { + for word := range strings.FieldsSeq(text) { // Clean up markdown code blocks word = strings.Trim(word, "`*_[]()\"'") if strings.Contains(word, ".") && (strings.Contains(word, "/") || strings.Contains(word, "\\")) { diff --git a/routers/api/v2/api.go b/routers/api/v2/api.go index 24f04ed04a..1d78ebf6a6 100644 --- a/routers/api/v2/api.go +++ b/routers/api/v2/api.go @@ -141,7 +141,7 @@ func securityHeaders() func(http.Handler) http.Handler { return func(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { // CORS preflight - if req.Method == "OPTIONS" { + if req.Method == http.MethodOptions { return } next.ServeHTTP(w, req) diff --git a/routers/api/v2/batch.go b/routers/api/v2/batch.go index dcb7ca4b87..87a8d7d0e6 100644 --- a/routers/api/v2/batch.go +++ b/routers/api/v2/batch.go @@ -4,12 +4,12 @@ package v2 import ( - "encoding/json" "net/http" repo_model "code.gitea.io/gitea/models/repo" apierrors "code.gitea.io/gitea/modules/errors" "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/services/context" ) diff --git a/routers/api/v2/docs.go b/routers/api/v2/docs.go index 8e1978c848..19ef224130 100644 --- a/routers/api/v2/docs.go +++ b/routers/api/v2/docs.go @@ -72,13 +72,13 @@ func DocsScalar(ctx *context.APIContext) { for key, value := range data { html = replaceTemplateVar(html, key, value) } - ctx.Resp.Write([]byte(html)) + _, _ = ctx.Resp.Write([]byte(html)) } func replaceTemplateVar(template, key, value string) string { placeholder := "{{." + key + "}}" result := template - for i := 0; i < 10; i++ { // Replace up to 10 occurrences + for range 10 { // Replace up to 10 occurrences newResult := "" idx := 0 for { diff --git a/routers/api/v2/health.go b/routers/api/v2/health.go index 723bb38245..c9b48aca56 100644 --- a/routers/api/v2/health.go +++ b/routers/api/v2/health.go @@ -67,11 +67,14 @@ func HealthCheck(ctx *context.APIContext) { } // Set status code based on health - statusCode := http.StatusOK - if result.Status == health.StatusUnhealthy { + var statusCode int + switch result.Status { + case health.StatusUnhealthy: statusCode = http.StatusServiceUnavailable - } else if result.Status == health.StatusDegraded { + case health.StatusDegraded: statusCode = http.StatusOK // Still OK but degraded + default: + statusCode = http.StatusOK } ctx.JSON(statusCode, response) diff --git a/routers/api/v2/streaming.go b/routers/api/v2/streaming.go index 24113a50f2..f8ba760871 100644 --- a/routers/api/v2/streaming.go +++ b/routers/api/v2/streaming.go @@ -4,8 +4,6 @@ package v2 import ( - "bufio" - "encoding/json" "net/http" "code.gitea.io/gitea/models/db" @@ -13,6 +11,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" apierrors "code.gitea.io/gitea/modules/errors" "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/services/context" ) @@ -21,7 +20,7 @@ import ( type StreamWriter struct { w http.ResponseWriter flusher http.Flusher - encoder *json.Encoder + encoder json.Encoder } // NewStreamWriter creates a new NDJSON stream writer @@ -160,7 +159,7 @@ func StreamFiles(ctx *context.APIContext) { if err != nil { item.Type = "error" item.Error = "file not found" - sw.WriteItem(item) + _ = sw.WriteItem(item) continue } @@ -170,7 +169,7 @@ func StreamFiles(ctx *context.APIContext) { if entry.IsDir() { item.Type = "error" item.Error = "path is a directory" - sw.WriteItem(item) + _ = sw.WriteItem(item) continue } @@ -178,7 +177,7 @@ func StreamFiles(ctx *context.APIContext) { if entry.Size() > 5*1024*1024 { // 5MB per file in stream item.Type = "error" item.Error = "file too large for streaming (>5MB)" - sw.WriteItem(item) + _ = sw.WriteItem(item) continue } @@ -187,7 +186,7 @@ func StreamFiles(ctx *context.APIContext) { if err != nil { item.Type = "error" item.Error = "failed to read content" - sw.WriteItem(item) + _ = sw.WriteItem(item) continue } @@ -196,11 +195,11 @@ func StreamFiles(ctx *context.APIContext) { reader.Close() item.Content = string(content[:n]) - sw.WriteItem(item) + _ = sw.WriteItem(item) } // Send completion marker - sw.WriteItem(StreamFileItem{ + _ = sw.WriteItem(StreamFileItem{ Type: "done", Total: total, }) @@ -298,7 +297,7 @@ func StreamCommits(ctx *context.APIContext) { // Get commits from head commit commits, err := headCommit.CommitsByRange(req.Offset/req.Limit+1, req.Limit, "", "", "") if err != nil { - sw.WriteItem(StreamCommitItem{ + _ = sw.WriteItem(StreamCommitItem{ Type: "error", Error: "failed to get commits", }) @@ -317,10 +316,10 @@ func StreamCommits(ctx *context.APIContext) { Index: i, Total: total, } - sw.WriteItem(item) + _ = sw.WriteItem(item) } - sw.WriteItem(StreamCommitItem{ + _ = sw.WriteItem(StreamCommitItem{ Type: "done", Total: total, }) @@ -419,7 +418,7 @@ func StreamIssues(ctx *context.APIContext) { }, }) if err != nil { - sw.WriteItem(StreamIssueItem{ + _ = sw.WriteItem(StreamIssueItem{ Type: "error", Error: "failed to get issues", }) @@ -458,36 +457,11 @@ func StreamIssues(ctx *context.APIContext) { Index: i, Total: total, } - sw.WriteItem(item) + _ = sw.WriteItem(item) } - sw.WriteItem(StreamIssueItem{ + _ = sw.WriteItem(StreamIssueItem{ Type: "done", Total: total, }) } - -// Helper for line-by-line streaming of large files -func streamLargeFile(ctx *context.APIContext, content []byte) { - ctx.Resp.Header().Set("Content-Type", "application/x-ndjson") - ctx.Resp.Header().Set("Transfer-Encoding", "chunked") - ctx.Resp.WriteHeader(http.StatusOK) - - sw := NewStreamWriter(ctx.Resp) - scanner := bufio.NewScanner(bufio.NewReader(nil)) // placeholder - - lineNum := 0 - for scanner.Scan() { - lineNum++ - sw.WriteItem(map[string]any{ - "type": "line", - "line": lineNum, - "text": scanner.Text(), - }) - } - - sw.WriteItem(map[string]any{ - "type": "done", - "lines": lineNum, - }) -} diff --git a/routers/api/v2/wiki.go b/routers/api/v2/wiki.go index 83200b710b..9aba9c22bb 100644 --- a/routers/api/v2/wiki.go +++ b/routers/api/v2/wiki.go @@ -4,7 +4,6 @@ package v2 import ( - "encoding/json" "net/http" "strings" "time" @@ -14,6 +13,7 @@ import ( apierrors "code.gitea.io/gitea/modules/errors" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/setting" @@ -98,10 +98,7 @@ func ListWikiPagesV2(ctx *context.APIContext) { totalCount := int64(len(mdEntries)) skip := (page - 1) * limit - end := skip + limit - if end > len(mdEntries) { - end = len(mdEntries) - } + end := min(skip+limit, len(mdEntries)) pages := make([]*api.WikiPageV2, 0, limit) for i := skip; i < end; i++ { @@ -207,10 +204,9 @@ func GetWikiPageV2(ctx *context.APIContext) { // Render HTML var htmlContent string rd := charset.ToUTF8WithFallbackReader(strings.NewReader(content), charset.ConvertOpts{}) - if buf := new(strings.Builder); buf != nil { - if err := markdown.Render(markup.NewRenderContext(ctx).WithRelativePath(gitFilename), rd, buf); err == nil { - htmlContent = buf.String() - } + buf := new(strings.Builder) + if err := markdown.Render(markup.NewRenderContext(ctx).WithRelativePath(gitFilename), rd, buf); err == nil { + htmlContent = buf.String() } // Get last commit @@ -227,7 +223,7 @@ func GetWikiPageV2(ctx *context.APIContext) { var linksOut, linksIn []string if idx, _ := repo_model.GetWikiIndex(ctx, repo.ID, string(wikiName)); idx != nil { if idx.LinksOut != "" { - json.Unmarshal([]byte(idx.LinksOut), &linksOut) + _ = json.Unmarshal([]byte(idx.LinksOut), &linksOut) } } @@ -617,7 +613,7 @@ func GetWikiStatsV2(ctx *context.APIContext) { for _, idx := range indexes { var links []string if idx.LinksOut != "" { - json.Unmarshal([]byte(idx.LinksOut), &links) + _ = json.Unmarshal([]byte(idx.LinksOut), &links) } for _, link := range links { linkCounts[link]++ @@ -858,23 +854,17 @@ func createSearchSnippet(content, query string, maxLen int) string { } // Find start position - start := idx - maxLen/4 - if start < 0 { - start = 0 - } + start := max(idx-maxLen/4, 0) // Find end position - end := start + maxLen - if end > len(content) { - end = len(content) - } + end := min(start+maxLen, len(content)) snippet := content[start:end] if start > 0 { snippet = "..." + snippet } if end < len(content) { - snippet = snippet + "..." + snippet += "..." } return snippet @@ -902,7 +892,7 @@ func calculateSearchScore(idx *repo_model.WikiIndex, query string) float32 { // Longer pages might have more matches but aren't necessarily more relevant // Normalize by word count if idx.WordCount > 0 { - score = score / (float32(idx.WordCount) / 100.0) + score /= float32(idx.WordCount) / 100.0 } return score diff --git a/services/wiki/wiki_index.go b/services/wiki/wiki_index.go index 72deffaf7b..114701c572 100644 --- a/services/wiki/wiki_index.go +++ b/services/wiki/wiki_index.go @@ -7,13 +7,14 @@ import ( "context" "crypto/sha256" "encoding/hex" - "encoding/json" "regexp" + "slices" "strings" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" ) @@ -155,20 +156,24 @@ func ClearWikiIndex(ctx context.Context, repoID int64) error { } // GetWikiGraph returns the link graph for a wiki -func GetWikiGraph(ctx context.Context, repoID int64) (nodes []map[string]interface{}, edges []map[string]interface{}, err error) { +func GetWikiGraph(ctx context.Context, repoID int64) ( + nodes []map[string]any, + edges []map[string]any, + err error, +) { indexes, err := repo_model.GetWikiIndexByRepo(ctx, repoID) if err != nil { return nil, nil, err } - nodes = make([]map[string]interface{}, 0, len(indexes)) - edges = make([]map[string]interface{}, 0) + nodes = make([]map[string]any, 0, len(indexes)) + edges = make([]map[string]any, 0) pageSet := make(map[string]bool) // Build nodes for _, idx := range indexes { pageSet[idx.PageName] = true - nodes = append(nodes, map[string]interface{}{ + nodes = append(nodes, map[string]any{ "name": idx.PageName, "title": idx.Title, "word_count": idx.WordCount, @@ -179,11 +184,11 @@ func GetWikiGraph(ctx context.Context, repoID int64) (nodes []map[string]interfa for _, idx := range indexes { var links []string if idx.LinksOut != "" { - json.Unmarshal([]byte(idx.LinksOut), &links) + _ = json.Unmarshal([]byte(idx.LinksOut), &links) } for _, link := range links { if pageSet[link] { // Only include links to existing pages - edges = append(edges, map[string]interface{}{ + edges = append(edges, map[string]any{ "source": idx.PageName, "target": link, }) @@ -205,13 +210,10 @@ func GetWikiIncomingLinks(ctx context.Context, repoID int64, pageName string) ([ for _, idx := range indexes { var links []string if idx.LinksOut != "" { - json.Unmarshal([]byte(idx.LinksOut), &links) + _ = json.Unmarshal([]byte(idx.LinksOut), &links) } - for _, link := range links { - if link == pageName { - incoming = append(incoming, idx.PageName) - break - } + if slices.Contains(links, pageName) { + incoming = append(incoming, idx.PageName) } } @@ -230,7 +232,7 @@ func GetOrphanedPages(ctx context.Context, repoID int64) ([]*repo_model.WikiInde for _, idx := range indexes { var links []string if idx.LinksOut != "" { - json.Unmarshal([]byte(idx.LinksOut), &links) + _ = json.Unmarshal([]byte(idx.LinksOut), &links) } for _, link := range links { linkedPages[link] = true @@ -266,7 +268,7 @@ func GetDeadLinks(ctx context.Context, repoID int64) ([]map[string]string, error for _, idx := range indexes { var links []string if idx.LinksOut != "" { - json.Unmarshal([]byte(idx.LinksOut), &links) + _ = json.Unmarshal([]byte(idx.LinksOut), &links) } for _, link := range links { if !existingPages[link] { @@ -354,11 +356,10 @@ func extractWikiLinks(content string) []string { // extractTitle extracts the title from markdown content func extractTitle(content, defaultTitle string) string { // Look for first H1 heading - lines := strings.Split(content, "\n") - for _, line := range lines { + for line := range strings.SplitSeq(content, "\n") { line = strings.TrimSpace(line) - if strings.HasPrefix(line, "# ") { - return strings.TrimPrefix(line, "# ") + if title, ok := strings.CutPrefix(line, "# "); ok { + return title } } return defaultTitle