From 9094d8b503fc47f3df3f472b4eca0585ee692230 Mon Sep 17 00:00:00 2001 From: logikonline Date: Fri, 9 Jan 2026 11:41:10 -0500 Subject: [PATCH] feat(api): add v2 API with AI-friendly features (Phase 2) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This introduces a new v2 API at /api/v2/ with features designed for AI agents and automation tools while maintaining full backward compatibility with the existing v1 API. New features: - Structured error codes (70+ machine-readable codes) for precise error handling by automated tools - Scalar API documentation at /api/v2/docs (modern replacement for Swagger UI) - Batch operations for bulk file and repository fetching - NDJSON streaming endpoints for files, commits, and issues - AI context endpoints providing rich repository summaries, navigation hints, and issue context Files added: - modules/errors/codes.go - Error code definitions and catalog - modules/errors/api_error.go - Rich API error response builder - routers/api/v2/api.go - v2 router with auth middleware - routers/api/v2/docs.go - Scalar docs and OpenAPI spec - routers/api/v2/batch.go - Batch file/repo operations - routers/api/v2/streaming.go - NDJSON streaming endpoints - routers/api/v2/ai_context.go - AI context endpoints - routers/api/v2/misc.go - Version and user endpoints 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- modules/errors/api_error.go | 129 ++++++ modules/errors/codes.go | 298 +++++++++++++ routers/api/v2/ai_context.go | 781 +++++++++++++++++++++++++++++++++++ routers/api/v2/api.go | 151 +++++++ routers/api/v2/batch.go | 273 ++++++++++++ routers/api/v2/docs.go | 247 +++++++++++ routers/api/v2/misc.go | 35 ++ routers/api/v2/streaming.go | 493 ++++++++++++++++++++++ routers/init.go | 2 + services/context/api.go | 41 ++ 10 files changed, 2450 insertions(+) create mode 100644 modules/errors/api_error.go create mode 100644 modules/errors/codes.go create mode 100644 routers/api/v2/ai_context.go create mode 100644 routers/api/v2/api.go create mode 100644 routers/api/v2/batch.go create mode 100644 routers/api/v2/docs.go create mode 100644 routers/api/v2/misc.go create mode 100644 routers/api/v2/streaming.go diff --git a/modules/errors/api_error.go b/modules/errors/api_error.go new file mode 100644 index 0000000000..000c5e928b --- /dev/null +++ b/modules/errors/api_error.go @@ -0,0 +1,129 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package errors + +import ( + "fmt" + + "code.gitea.io/gitea/modules/setting" +) + +// APIErrorResponse is the top-level error response wrapper +type APIErrorResponse struct { + Error *APIError `json:"error"` +} + +// APIError represents a structured API error following RFC 7807 Problem Details +// with additional fields for AI-friendly error handling +type APIError struct { + // Machine-readable error code (e.g., "REPO_NOT_FOUND") + Code string `json:"code"` + + // Human-readable error message + Message string `json:"message"` + + // HTTP status code + Status int `json:"status"` + + // Additional context about the error + Details map[string]any `json:"details,omitempty"` + + // URL to documentation about this error + DocumentationURL string `json:"documentation_url,omitempty"` + + // Unique request ID for tracing + RequestID string `json:"request_id,omitempty"` + + // Suggested actions or alternatives + Suggestions []string `json:"suggestions,omitempty"` + + // RFC 7807 Problem Details fields + Type string `json:"type,omitempty"` // URI reference identifying the problem type + Title string `json:"title,omitempty"` // Short summary of the problem type + Instance string `json:"instance,omitempty"` // URI reference for this specific occurrence +} + +// NewAPIError creates a new structured API error +func NewAPIError(code ErrorCode, requestID string) *APIError { + docURL := fmt.Sprintf("%s/api/errors#%s", setting.AppURL, code) + + return &APIError{ + Code: code.String(), + Message: code.Message(), + Status: code.HTTPStatus(), + DocumentationURL: docURL, + RequestID: requestID, + Type: "about:blank", + Title: code.Message(), + Instance: requestID, + } +} + +// WithDetails adds details to the error +func (e *APIError) WithDetails(details map[string]any) *APIError { + e.Details = details + return e +} + +// WithDetail adds a single detail to the error +func (e *APIError) WithDetail(key string, value any) *APIError { + if e.Details == nil { + e.Details = make(map[string]any) + } + e.Details[key] = value + return e +} + +// WithMessage overrides the default message +func (e *APIError) WithMessage(message string) *APIError { + e.Message = message + e.Title = message + return e +} + +// WithSuggestions adds suggested actions +func (e *APIError) WithSuggestions(suggestions ...string) *APIError { + e.Suggestions = append(e.Suggestions, suggestions...) + return e +} + +// Response wraps the error in an APIErrorResponse +func (e *APIError) Response() *APIErrorResponse { + return &APIErrorResponse{Error: e} +} + +// ValidationError represents a field-level validation error +type ValidationError struct { + Field string `json:"field"` + Message string `json:"message"` + Code string `json:"code,omitempty"` +} + +// APIValidationError represents a validation error with field-level details +type APIValidationError struct { + *APIError + Errors []ValidationError `json:"errors,omitempty"` +} + +// NewValidationError creates a new validation error +func NewValidationError(requestID string, errors ...ValidationError) *APIValidationError { + baseErr := NewAPIError(ValInvalidInput, requestID) + return &APIValidationError{ + APIError: baseErr, + Errors: errors, + } +} + +// AddFieldError adds a field validation error +func (e *APIValidationError) AddFieldError(field, message string, code ...string) *APIValidationError { + ve := ValidationError{ + Field: field, + Message: message, + } + if len(code) > 0 { + ve.Code = code[0] + } + e.Errors = append(e.Errors, ve) + return e +} diff --git a/modules/errors/codes.go b/modules/errors/codes.go new file mode 100644 index 0000000000..cfa3bf7686 --- /dev/null +++ b/modules/errors/codes.go @@ -0,0 +1,298 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package errors + +import "net/http" + +// ErrorCode represents a machine-readable error code +type ErrorCode string + +// Authentication errors (AUTH_) +const ( + AuthTokenMissing ErrorCode = "AUTH_TOKEN_MISSING" + AuthTokenInvalid ErrorCode = "AUTH_TOKEN_INVALID" + AuthTokenExpired ErrorCode = "AUTH_TOKEN_EXPIRED" + AuthScopeInsufficient ErrorCode = "AUTH_SCOPE_INSUFFICIENT" + Auth2FARequired ErrorCode = "AUTH_2FA_REQUIRED" + AuthInvalidCredentials ErrorCode = "AUTH_INVALID_CREDENTIALS" +) + +// Permission errors (PERM_) +const ( + PermRepoReadDenied ErrorCode = "PERM_REPO_READ_DENIED" + PermRepoWriteDenied ErrorCode = "PERM_REPO_WRITE_DENIED" + PermRepoAdminRequired ErrorCode = "PERM_REPO_ADMIN_REQUIRED" + PermOrgMemberRequired ErrorCode = "PERM_ORG_MEMBER_REQUIRED" + PermOrgAdminRequired ErrorCode = "PERM_ORG_ADMIN_REQUIRED" + PermActionDenied ErrorCode = "PERM_ACTION_DENIED" +) + +// Repository errors (REPO_) +const ( + RepoNotFound ErrorCode = "REPO_NOT_FOUND" + RepoArchived ErrorCode = "REPO_ARCHIVED" + RepoDisabled ErrorCode = "REPO_DISABLED" + RepoTransferPending ErrorCode = "REPO_TRANSFER_PENDING" + RepoEmpty ErrorCode = "REPO_EMPTY" + RepoAlreadyExists ErrorCode = "REPO_ALREADY_EXISTS" +) + +// File errors (FILE_) +const ( + FileNotFound ErrorCode = "FILE_NOT_FOUND" + FileTooLarge ErrorCode = "FILE_TOO_LARGE" + FileConflict ErrorCode = "FILE_CONFLICT" + FileBinary ErrorCode = "FILE_BINARY" + FileTypeError ErrorCode = "FILE_TYPE_NOT_ALLOWED" +) + +// Git errors (GIT_) +const ( + GitRefNotFound ErrorCode = "GIT_REF_NOT_FOUND" + GitMergeConflict ErrorCode = "GIT_MERGE_CONFLICT" + GitBranchNotFound ErrorCode = "GIT_BRANCH_NOT_FOUND" + GitTagNotFound ErrorCode = "GIT_TAG_NOT_FOUND" + GitCommitNotFound ErrorCode = "GIT_COMMIT_NOT_FOUND" + GitPushRejected ErrorCode = "GIT_PUSH_REJECTED" +) + +// Rate limiting errors (RATE_) +const ( + RateLimitExceeded ErrorCode = "RATE_LIMIT_EXCEEDED" + RateQuotaExhausted ErrorCode = "RATE_QUOTA_EXHAUSTED" +) + +// Validation errors (VAL_) +const ( + ValInvalidInput ErrorCode = "VAL_INVALID_INPUT" + ValMissingField ErrorCode = "VAL_MISSING_FIELD" + ValInvalidName ErrorCode = "VAL_INVALID_NAME" + ValNameTooLong ErrorCode = "VAL_NAME_TOO_LONG" + ValInvalidEmail ErrorCode = "VAL_INVALID_EMAIL" + ValDuplicateName ErrorCode = "VAL_DUPLICATE_NAME" + ValInvalidFormat ErrorCode = "VAL_INVALID_FORMAT" + ValidationFailed ErrorCode = "VALIDATION_FAILED" +) + +// General errors +const ( + InternalError ErrorCode = "INTERNAL_ERROR" + PermAccessDenied ErrorCode = "ACCESS_DENIED" + RefNotFound ErrorCode = "REF_NOT_FOUND" +) + +// Upload errors (UPLOAD_) +const ( + UploadSessionNotFound ErrorCode = "UPLOAD_SESSION_NOT_FOUND" + UploadSessionExpired ErrorCode = "UPLOAD_SESSION_EXPIRED" + UploadChunkInvalid ErrorCode = "UPLOAD_CHUNK_INVALID" + UploadChunkSizeMismatch ErrorCode = "UPLOAD_CHUNK_SIZE_MISMATCH" + UploadChecksumMismatch ErrorCode = "UPLOAD_CHECKSUM_MISMATCH" + UploadIncomplete ErrorCode = "UPLOAD_INCOMPLETE" + UploadFileTooLarge ErrorCode = "UPLOAD_FILE_TOO_LARGE" +) + +// Resource errors (RESOURCE_) +const ( + ResourceNotFound ErrorCode = "RESOURCE_NOT_FOUND" + ResourceConflict ErrorCode = "RESOURCE_CONFLICT" + ResourceGone ErrorCode = "RESOURCE_GONE" +) + +// Server errors (SERVER_) +const ( + ServerInternal ErrorCode = "SERVER_INTERNAL_ERROR" + ServerUnavailable ErrorCode = "SERVER_UNAVAILABLE" + ServerTimeout ErrorCode = "SERVER_TIMEOUT" +) + +// User errors (USER_) +const ( + UserNotFound ErrorCode = "USER_NOT_FOUND" + UserAlreadyExists ErrorCode = "USER_ALREADY_EXISTS" + UserInactive ErrorCode = "USER_INACTIVE" + UserProhibitLogin ErrorCode = "USER_PROHIBIT_LOGIN" +) + +// Organization errors (ORG_) +const ( + OrgNotFound ErrorCode = "ORG_NOT_FOUND" + OrgAlreadyExists ErrorCode = "ORG_ALREADY_EXISTS" +) + +// Issue errors (ISSUE_) +const ( + IssueNotFound ErrorCode = "ISSUE_NOT_FOUND" + IssueClosed ErrorCode = "ISSUE_CLOSED" + IssueLocked ErrorCode = "ISSUE_LOCKED" +) + +// Pull Request errors (PR_) +const ( + PRNotFound ErrorCode = "PR_NOT_FOUND" + PRAlreadyMerged ErrorCode = "PR_ALREADY_MERGED" + PRNotMergeable ErrorCode = "PR_NOT_MERGEABLE" + PRWorkInProgress ErrorCode = "PR_WORK_IN_PROGRESS" +) + +// Release errors (RELEASE_) +const ( + ReleaseNotFound ErrorCode = "RELEASE_NOT_FOUND" + ReleaseTagExists ErrorCode = "RELEASE_TAG_EXISTS" + ReleaseIsDraft ErrorCode = "RELEASE_IS_DRAFT" +) + +// Webhook errors (WEBHOOK_) +const ( + WebhookNotFound ErrorCode = "WEBHOOK_NOT_FOUND" + WebhookDeliveryFail ErrorCode = "WEBHOOK_DELIVERY_FAILED" +) + +// errorInfo contains metadata about an error code +type errorInfo struct { + Message string + HTTPStatus int +} + +// errorCatalog maps error codes to their metadata +var errorCatalog = map[ErrorCode]errorInfo{ + // Auth errors + AuthTokenMissing: {"No authentication token provided", http.StatusUnauthorized}, + AuthTokenInvalid: {"Token is malformed or invalid", http.StatusUnauthorized}, + AuthTokenExpired: {"Token has expired", http.StatusUnauthorized}, + AuthScopeInsufficient: {"Token lacks required scope", http.StatusForbidden}, + Auth2FARequired: {"Two-factor authentication required", http.StatusUnauthorized}, + AuthInvalidCredentials: {"Invalid username or password", http.StatusUnauthorized}, + + // Permission errors + PermRepoReadDenied: {"Cannot read repository", http.StatusForbidden}, + PermRepoWriteDenied: {"Cannot write to repository", http.StatusForbidden}, + PermRepoAdminRequired: {"Repository admin access required", http.StatusForbidden}, + PermOrgMemberRequired: {"Must be organization member", http.StatusForbidden}, + PermOrgAdminRequired: {"Organization admin access required", http.StatusForbidden}, + PermActionDenied: {"Permission denied for this action", http.StatusForbidden}, + + // Repository errors + RepoNotFound: {"Repository does not exist", http.StatusNotFound}, + RepoArchived: {"Repository is archived", http.StatusForbidden}, + RepoDisabled: {"Repository is disabled", http.StatusForbidden}, + RepoTransferPending: {"Repository has pending transfer", http.StatusConflict}, + RepoEmpty: {"Repository is empty", http.StatusUnprocessableEntity}, + RepoAlreadyExists: {"Repository already exists", http.StatusConflict}, + + // File errors + FileNotFound: {"File does not exist", http.StatusNotFound}, + FileTooLarge: {"File exceeds size limit", http.StatusRequestEntityTooLarge}, + FileConflict: {"File was modified (SHA mismatch)", http.StatusConflict}, + FileBinary: {"Cannot perform text operation on binary file", http.StatusBadRequest}, + FileTypeError: {"File type not allowed", http.StatusBadRequest}, + + // Git errors + GitRefNotFound: {"Git reference not found", http.StatusNotFound}, + GitMergeConflict: {"Merge conflict detected", http.StatusConflict}, + GitBranchNotFound: {"Branch not found", http.StatusNotFound}, + GitTagNotFound: {"Tag not found", http.StatusNotFound}, + GitCommitNotFound: {"Commit not found", http.StatusNotFound}, + GitPushRejected: {"Push rejected", http.StatusForbidden}, + + // Rate limiting errors + RateLimitExceeded: {"API rate limit exceeded", http.StatusTooManyRequests}, + RateQuotaExhausted: {"Rate quota exhausted", http.StatusTooManyRequests}, + + // Validation errors + ValInvalidInput: {"Invalid input provided", http.StatusBadRequest}, + ValMissingField: {"Required field is missing", http.StatusBadRequest}, + ValInvalidName: {"Name contains invalid characters", http.StatusBadRequest}, + ValNameTooLong: {"Name exceeds maximum length", http.StatusBadRequest}, + ValInvalidEmail: {"Invalid email address", http.StatusBadRequest}, + ValDuplicateName: {"Name already exists", http.StatusConflict}, + ValInvalidFormat: {"Invalid format", http.StatusBadRequest}, + ValidationFailed: {"Validation failed", http.StatusBadRequest}, + + // General errors + InternalError: {"Internal server error", http.StatusInternalServerError}, + PermAccessDenied: {"Access denied", http.StatusForbidden}, + RefNotFound: {"Reference not found", http.StatusNotFound}, + + // Upload errors + UploadSessionNotFound: {"Upload session does not exist", http.StatusNotFound}, + UploadSessionExpired: {"Upload session has expired", http.StatusGone}, + UploadChunkInvalid: {"Chunk number out of range", http.StatusBadRequest}, + UploadChunkSizeMismatch: {"Chunk size doesn't match expected", http.StatusBadRequest}, + UploadChecksumMismatch: {"File checksum verification failed", http.StatusBadRequest}, + UploadIncomplete: {"Not all chunks have been uploaded", http.StatusBadRequest}, + UploadFileTooLarge: {"File exceeds maximum upload size", http.StatusRequestEntityTooLarge}, + + // Resource errors + ResourceNotFound: {"Resource not found", http.StatusNotFound}, + ResourceConflict: {"Resource conflict", http.StatusConflict}, + ResourceGone: {"Resource no longer available", http.StatusGone}, + + // Server errors + ServerInternal: {"Internal server error", http.StatusInternalServerError}, + ServerUnavailable: {"Service temporarily unavailable", http.StatusServiceUnavailable}, + ServerTimeout: {"Request timeout", http.StatusGatewayTimeout}, + + // User errors + UserNotFound: {"User not found", http.StatusNotFound}, + UserAlreadyExists: {"User already exists", http.StatusConflict}, + UserInactive: {"User account is inactive", http.StatusForbidden}, + UserProhibitLogin: {"User is not allowed to login", http.StatusForbidden}, + + // Organization errors + OrgNotFound: {"Organization not found", http.StatusNotFound}, + OrgAlreadyExists: {"Organization already exists", http.StatusConflict}, + + // Issue errors + IssueNotFound: {"Issue not found", http.StatusNotFound}, + IssueClosed: {"Issue is closed", http.StatusUnprocessableEntity}, + IssueLocked: {"Issue is locked", http.StatusForbidden}, + + // Pull Request errors + PRNotFound: {"Pull request not found", http.StatusNotFound}, + PRAlreadyMerged: {"Pull request already merged", http.StatusConflict}, + PRNotMergeable: {"Pull request is not mergeable", http.StatusConflict}, + PRWorkInProgress: {"Pull request is marked as work in progress", http.StatusUnprocessableEntity}, + + // Release errors + ReleaseNotFound: {"Release not found", http.StatusNotFound}, + ReleaseTagExists: {"Release tag already exists", http.StatusConflict}, + ReleaseIsDraft: {"Release is a draft", http.StatusUnprocessableEntity}, + + // Webhook errors + WebhookNotFound: {"Webhook not found", http.StatusNotFound}, + WebhookDeliveryFail: {"Webhook delivery failed", http.StatusBadGateway}, +} + +// Message returns the human-readable message for an error code +func (e ErrorCode) Message() string { + if info, ok := errorCatalog[e]; ok { + return info.Message + } + return string(e) +} + +// HTTPStatus returns the HTTP status code for an error code +func (e ErrorCode) HTTPStatus() int { + if info, ok := errorCatalog[e]; ok { + return info.HTTPStatus + } + return http.StatusInternalServerError +} + +// String returns the error code as a string +func (e ErrorCode) String() string { + return string(e) +} + +// Error implements the error interface +func (e ErrorCode) Error() string { + return e.Message() +} + +// IsValid returns true if the error code is registered in the catalog +func (e ErrorCode) IsValid() bool { + _, ok := errorCatalog[e] + return ok +} diff --git a/routers/api/v2/ai_context.go b/routers/api/v2/ai_context.go new file mode 100644 index 0000000000..e64004b380 --- /dev/null +++ b/routers/api/v2/ai_context.go @@ -0,0 +1,781 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v2 + +import ( + "encoding/json" + "net/http" + "path" + "sort" + "strings" + + issues_model "code.gitea.io/gitea/models/issues" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unit" + apierrors "code.gitea.io/gitea/modules/errors" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/services/context" +) + +// AIRepoSummaryRequest represents a request for AI-friendly repo summary +type AIRepoSummaryRequest struct { + Owner string `json:"owner" binding:"Required"` + Repo string `json:"repo" binding:"Required"` + Ref string `json:"ref"` +} + +// AIRepoSummaryResponse contains comprehensive repo information for AI tools +type AIRepoSummaryResponse struct { + // Basic info + Owner string `json:"owner"` + Repo string `json:"repo"` + FullName string `json:"full_name"` + Description string `json:"description"` + Website string `json:"website,omitempty"` + Language string `json:"primary_language"` + + // Repository stats + Stars int `json:"stars"` + Forks int `json:"forks"` + Watchers int `json:"watchers"` + OpenIssues int `json:"open_issues"` + OpenPRs int `json:"open_pull_requests"` + Size int64 `json:"size_kb"` + IsPrivate bool `json:"is_private"` + IsFork bool `json:"is_fork"` + IsArchived bool `json:"is_archived"` + IsTemplate bool `json:"is_template"` + HasWiki bool `json:"has_wiki"` + HasIssues bool `json:"has_issues"` + HasProjects bool `json:"has_projects"` + + // Git info + DefaultBranch string `json:"default_branch"` + Branches []string `json:"branches"` + Tags []string `json:"recent_tags"` + LastCommit struct { + SHA string `json:"sha"` + Message string `json:"message"` + Author string `json:"author"` + Timestamp string `json:"timestamp"` + } `json:"last_commit"` + + // Structure overview + Structure struct { + TopLevelDirs []string `json:"top_level_dirs"` + TopLevelFiles []string `json:"top_level_files"` + FileCount int `json:"total_file_count"` + Languages map[string]int64 `json:"languages"` // language -> bytes + HasReadme bool `json:"has_readme"` + ReadmePath string `json:"readme_path,omitempty"` + HasLicense bool `json:"has_license"` + LicensePath string `json:"license_path,omitempty"` + HasContrib bool `json:"has_contributing"` + ContribPath string `json:"contributing_path,omitempty"` + ConfigFiles []string `json:"config_files"` // package.json, go.mod, etc. + } `json:"structure"` + + // Recent activity + RecentActivity struct { + CommitsLastWeek int `json:"commits_last_week"` + CommitsLastMonth int `json:"commits_last_month"` + Contributors int `json:"contributors"` + } `json:"recent_activity"` + + // AI-specific hints + AIHints struct { + SuggestedEntryPoints []string `json:"suggested_entry_points"` + ProjectType string `json:"project_type"` // "library", "application", "monorepo", etc. + BuildSystem string `json:"build_system,omitempty"` + TestFramework string `json:"test_framework,omitempty"` + } `json:"ai_hints"` +} + +// GetAIRepoSummary returns a comprehensive AI-friendly summary of a repository +func GetAIRepoSummary(ctx *context.APIContext) { + var req AIRepoSummaryRequest + if err := json.NewDecoder(ctx.Req.Body).Decode(&req); err != nil { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "error": err.Error(), + }) + return + } + + // Get repository + repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, req.Owner, req.Repo) + if err != nil { + if repo_model.IsErrRepoNotExist(err) { + ctx.APIErrorWithCode(apierrors.RepoNotFound) + return + } + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + + // Check access + if repo.IsPrivate && !ctx.IsSigned { + ctx.APIErrorWithCode(apierrors.PermAccessDenied) + return + } + + // Load owner + if err := repo.LoadOwner(ctx); err != nil { + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + + // Open git repo + gitRepo, err := gitrepo.OpenRepository(ctx, repo) + if err != nil { + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + defer gitRepo.Close() + + response := AIRepoSummaryResponse{ + Owner: repo.Owner.Name, + Repo: repo.Name, + FullName: repo.FullName(), + Description: repo.Description, + Website: repo.Website, + Language: repo.PrimaryLanguage.Language, + Stars: repo.NumStars, + Forks: repo.NumForks, + Watchers: repo.NumWatches, + OpenIssues: repo.NumOpenIssues, + OpenPRs: repo.NumOpenPulls, + Size: repo.Size, + IsPrivate: repo.IsPrivate, + IsFork: repo.IsFork, + IsArchived: repo.IsArchived, + IsTemplate: repo.IsTemplate, + HasWiki: repo.UnitEnabled(ctx, unit.TypeWiki), + HasIssues: repo.UnitEnabled(ctx, unit.TypeIssues), + HasProjects: repo.UnitEnabled(ctx, unit.TypeProjects), + DefaultBranch: repo.DefaultBranch, + } + + // Get branches (limit to recent 20) + branchNames, _, err := gitRepo.GetBranchNames(0, 20) + if err == nil { + response.Branches = branchNames + } + + // Get recent tags + tagInfos, _, err := gitRepo.GetTagInfos(1, 10) + if err == nil { + tagNames := make([]string, 0, len(tagInfos)) + for _, t := range tagInfos { + tagNames = append(tagNames, t.Name) + } + response.Tags = tagNames + } + + // Get last commit + ref := req.Ref + if ref == "" { + ref = repo.DefaultBranch + } + if commit, err := gitRepo.GetBranchCommit(ref); err == nil { + response.LastCommit.SHA = commit.ID.String() + response.LastCommit.Message = strings.Split(commit.CommitMessage, "\n")[0] + response.LastCommit.Author = commit.Author.Name + response.LastCommit.Timestamp = commit.Author.When.Format("2006-01-02T15:04:05Z07:00") + } + + // Analyze structure + if commit, err := gitRepo.GetBranchCommit(ref); err == nil { + if tree, err := commit.SubTree(""); err == nil { + entries, _ := tree.ListEntries() + + for _, entry := range entries { + name := entry.Name() + if entry.IsDir() { + response.Structure.TopLevelDirs = append(response.Structure.TopLevelDirs, name) + } else { + response.Structure.TopLevelFiles = append(response.Structure.TopLevelFiles, name) + + // Check for special files + lowerName := strings.ToLower(name) + if strings.HasPrefix(lowerName, "readme") { + response.Structure.HasReadme = true + response.Structure.ReadmePath = name + } + if strings.HasPrefix(lowerName, "license") || lowerName == "copying" { + response.Structure.HasLicense = true + response.Structure.LicensePath = name + } + if strings.HasPrefix(lowerName, "contributing") { + response.Structure.HasContrib = true + response.Structure.ContribPath = name + } + + // Check for config files + if isConfigFile(name) { + response.Structure.ConfigFiles = append(response.Structure.ConfigFiles, name) + } + } + } + } + + // Count total files (approximate via tree walk, limit depth) + response.Structure.FileCount = countFiles(commit, "", 0, 5) + } + + // Get language stats from repo + response.Structure.Languages = make(map[string]int64) + if langs, err := repo_model.GetLanguageStats(ctx, repo); err == nil { + for _, lang := range langs { + response.Structure.Languages[lang.Language] = int64(lang.Percentage) + } + } + + // AI hints + response.AIHints = detectProjectType(response.Structure.TopLevelFiles, response.Structure.ConfigFiles, response.Language) + + ctx.JSON(http.StatusOK, response) +} + +// AINavigationRequest represents a request to understand repo navigation +type AINavigationRequest struct { + Owner string `json:"owner" binding:"Required"` + Repo string `json:"repo" binding:"Required"` + Ref string `json:"ref"` + Query string `json:"query"` // What the AI is looking for +} + +// AINavigationResponse provides navigation hints for AI +type AINavigationResponse struct { + // Directory tree (limited depth) + Tree []TreeNode `json:"tree"` + + // Important paths + ImportantPaths struct { + Entrypoints []PathInfo `json:"entrypoints"` + Config []PathInfo `json:"config"` + Tests []PathInfo `json:"tests"` + Docs []PathInfo `json:"docs"` + } `json:"important_paths"` + + // File type summary + FileTypes map[string]int `json:"file_types"` // extension -> count +} + +// TreeNode represents a node in the directory tree +type TreeNode struct { + Path string `json:"path"` + Name string `json:"name"` + Type string `json:"type"` // "file" or "dir" + Size int64 `json:"size,omitempty"` + Children []TreeNode `json:"children,omitempty"` +} + +// PathInfo provides information about an important path +type PathInfo struct { + Path string `json:"path"` + Description string `json:"description"` + Priority int `json:"priority"` // 1-10, higher is more important +} + +// GetAINavigation returns navigation hints for AI tools +func GetAINavigation(ctx *context.APIContext) { + var req AINavigationRequest + if err := json.NewDecoder(ctx.Req.Body).Decode(&req); err != nil { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "error": err.Error(), + }) + return + } + + // Get repository + repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, req.Owner, req.Repo) + if err != nil { + if repo_model.IsErrRepoNotExist(err) { + ctx.APIErrorWithCode(apierrors.RepoNotFound) + return + } + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + + // Check access + if repo.IsPrivate && !ctx.IsSigned { + ctx.APIErrorWithCode(apierrors.PermAccessDenied) + return + } + + // Open git repo + gitRepo, err := gitrepo.OpenRepository(ctx, repo) + if err != nil { + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + defer gitRepo.Close() + + ref := req.Ref + if ref == "" { + ref = repo.DefaultBranch + } + + response := AINavigationResponse{ + FileTypes: make(map[string]int), + } + + commit, err := gitRepo.GetBranchCommit(ref) + if err != nil { + ctx.APIErrorWithCode(apierrors.RefNotFound) + return + } + + // Build tree (max depth 3) + response.Tree = buildTree(commit, "", 0, 3) + + // Collect file types and identify important paths + collectFileInfo(commit, "", &response) + + ctx.JSON(http.StatusOK, response) +} + +// AIIssueContextRequest represents a request for issue context +type AIIssueContextRequest struct { + Owner string `json:"owner" binding:"Required"` + Repo string `json:"repo" binding:"Required"` + IssueNumber int64 `json:"issue_number" binding:"Required"` +} + +// AIIssueContextResponse provides rich context about an issue +type AIIssueContextResponse struct { + // Issue details + Number int64 `json:"number"` + Title string `json:"title"` + Body string `json:"body"` + State string `json:"state"` + Labels []string `json:"labels"` + Author string `json:"author"` + Assignees []string `json:"assignees"` + CreatedAt string `json:"created_at"` + UpdatedAt string `json:"updated_at"` + + // Comments + Comments []struct { + Author string `json:"author"` + Body string `json:"body"` + CreatedAt string `json:"created_at"` + } `json:"comments"` + + // Related info + RelatedIssues []struct { + Number int64 `json:"number"` + Title string `json:"title"` + State string `json:"state"` + } `json:"related_issues,omitempty"` + + // Code references (files mentioned in issue/comments) + CodeReferences []string `json:"code_references,omitempty"` + + // AI hints + AIHints struct { + Category string `json:"category"` // "bug", "feature", "question", etc. + Complexity string `json:"complexity"` // "simple", "moderate", "complex" + SuggestedFiles []string `json:"suggested_files,omitempty"` + } `json:"ai_hints"` +} + +// GetAIIssueContext returns rich context about an issue for AI tools +func GetAIIssueContext(ctx *context.APIContext) { + var req AIIssueContextRequest + if err := json.NewDecoder(ctx.Req.Body).Decode(&req); err != nil { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "error": err.Error(), + }) + return + } + + // Get repository + repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, req.Owner, req.Repo) + if err != nil { + if repo_model.IsErrRepoNotExist(err) { + ctx.APIErrorWithCode(apierrors.RepoNotFound) + return + } + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + + // Check access + if repo.IsPrivate && !ctx.IsSigned { + ctx.APIErrorWithCode(apierrors.PermAccessDenied) + return + } + + // Get issue + issue, err := issues_model.GetIssueByIndex(ctx, repo.ID, req.IssueNumber) + if err != nil { + if issues_model.IsErrIssueNotExist(err) { + ctx.APIErrorWithCode(apierrors.IssueNotFound, map[string]any{ + "issue_number": req.IssueNumber, + }) + return + } + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + + // Load related data + if err := issue.LoadPoster(ctx); err != nil { + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + if err := issue.LoadLabels(ctx); err != nil { + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + if err := issue.LoadAssignees(ctx); err != nil { + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + + response := AIIssueContextResponse{ + Number: issue.Index, + Title: issue.Title, + Body: issue.Content, + State: map[bool]string{true: "closed", false: "open"}[issue.IsClosed], + Author: issue.Poster.Name, + CreatedAt: issue.CreatedUnix.AsTime().Format("2006-01-02T15:04:05Z07:00"), + UpdatedAt: issue.UpdatedUnix.AsTime().Format("2006-01-02T15:04:05Z07:00"), + } + + // Labels + for _, label := range issue.Labels { + response.Labels = append(response.Labels, label.Name) + } + + // Assignees + for _, assignee := range issue.Assignees { + response.Assignees = append(response.Assignees, assignee.Name) + } + + // Get comments + comments, err := issues_model.FindComments(ctx, &issues_model.FindCommentsOptions{ + IssueID: issue.ID, + Type: issues_model.CommentTypeComment, + }) + if err == nil { + for _, comment := range comments { + if err := comment.LoadPoster(ctx); err != nil { + continue + } + response.Comments = append(response.Comments, struct { + Author string `json:"author"` + Body string `json:"body"` + CreatedAt string `json:"created_at"` + }{ + Author: comment.Poster.Name, + Body: comment.Content, + CreatedAt: comment.CreatedUnix.AsTime().Format("2006-01-02T15:04:05Z07:00"), + }) + } + } + + // Extract code references from issue body and comments + codeRefs := extractCodeReferences(issue.Content) + for _, comment := range response.Comments { + codeRefs = append(codeRefs, extractCodeReferences(comment.Body)...) + } + response.CodeReferences = uniqueStrings(codeRefs) + + // AI hints based on labels and content + response.AIHints.Category = categorizeIssue(issue.Labels, issue.Title, issue.Content) + response.AIHints.Complexity = estimateComplexity(issue.Content, len(response.Comments)) + + ctx.JSON(http.StatusOK, response) +} + +// Helper functions + +func isConfigFile(name string) bool { + configFiles := []string{ + "package.json", "package-lock.json", "yarn.lock", "pnpm-lock.yaml", + "go.mod", "go.sum", + "Cargo.toml", "Cargo.lock", + "requirements.txt", "setup.py", "pyproject.toml", "Pipfile", + "Gemfile", "Gemfile.lock", + "composer.json", "composer.lock", + "pom.xml", "build.gradle", "build.gradle.kts", + "CMakeLists.txt", "Makefile", "makefile", + "Dockerfile", "docker-compose.yml", "docker-compose.yaml", + ".gitignore", ".gitattributes", + "tsconfig.json", "jsconfig.json", + ".eslintrc", ".eslintrc.js", ".eslintrc.json", + ".prettierrc", ".prettierrc.js", ".prettierrc.json", + "webpack.config.js", "vite.config.js", "rollup.config.js", + ".env.example", ".env.sample", + } + for _, cf := range configFiles { + if name == cf { + return true + } + } + return false +} + +func countFiles(commit *git.Commit, dir string, depth, maxDepth int) int { + if depth > maxDepth { + return 0 + } + tree, err := commit.SubTree(dir) + if err != nil { + return 0 + } + entries, _ := tree.ListEntries() + count := 0 + for _, entry := range entries { + if entry.IsDir() { + subPath := entry.Name() + if dir != "" { + subPath = dir + "/" + entry.Name() + } + count += countFiles(commit, subPath, depth+1, maxDepth) + } else { + count++ + } + } + return count +} + +func detectProjectType(files, configFiles []string, primaryLang string) struct { + SuggestedEntryPoints []string `json:"suggested_entry_points"` + ProjectType string `json:"project_type"` + BuildSystem string `json:"build_system,omitempty"` + TestFramework string `json:"test_framework,omitempty"` +} { + hints := struct { + SuggestedEntryPoints []string `json:"suggested_entry_points"` + ProjectType string `json:"project_type"` + BuildSystem string `json:"build_system,omitempty"` + TestFramework string `json:"test_framework,omitempty"` + }{ + ProjectType: "unknown", + } + + for _, cf := range configFiles { + switch cf { + case "package.json": + hints.BuildSystem = "npm/yarn" + hints.SuggestedEntryPoints = append(hints.SuggestedEntryPoints, "package.json", "src/index.js", "src/index.ts") + case "go.mod": + hints.BuildSystem = "go modules" + hints.SuggestedEntryPoints = append(hints.SuggestedEntryPoints, "go.mod", "main.go", "cmd/") + case "Cargo.toml": + hints.BuildSystem = "cargo" + hints.SuggestedEntryPoints = append(hints.SuggestedEntryPoints, "Cargo.toml", "src/main.rs", "src/lib.rs") + case "requirements.txt", "pyproject.toml": + hints.BuildSystem = "pip/poetry" + hints.SuggestedEntryPoints = append(hints.SuggestedEntryPoints, "setup.py", "main.py", "app.py") + case "Makefile", "makefile": + hints.BuildSystem = "make" + } + } + + // Detect project type + for _, f := range files { + if f == "main.go" || f == "main.rs" || f == "main.py" { + hints.ProjectType = "application" + break + } + } + + if hints.ProjectType == "unknown" { + for _, cf := range configFiles { + if cf == "setup.py" || cf == "Cargo.toml" { + hints.ProjectType = "library" + break + } + } + } + + return hints +} + +func buildTree(commit *git.Commit, dir string, depth, maxDepth int) []TreeNode { + if depth >= maxDepth { + return nil + } + + tree, err := commit.SubTree(dir) + if err != nil { + return nil + } + + entries, _ := tree.ListEntries() + nodes := make([]TreeNode, 0, len(entries)) + + for _, entry := range entries { + node := TreeNode{ + Name: entry.Name(), + Path: path.Join(dir, entry.Name()), + } + + if entry.IsDir() { + node.Type = "dir" + node.Children = buildTree(commit, node.Path, depth+1, maxDepth) + } else { + node.Type = "file" + node.Size = entry.Size() + } + + nodes = append(nodes, node) + } + + // Sort: directories first, then alphabetically + sort.Slice(nodes, func(i, j int) bool { + if nodes[i].Type != nodes[j].Type { + return nodes[i].Type == "dir" + } + return nodes[i].Name < nodes[j].Name + }) + + return nodes +} + +func collectFileInfo(commit *git.Commit, dir string, response *AINavigationResponse) { + tree, err := commit.SubTree(dir) + if err != nil { + return + } + + entries, _ := tree.ListEntries() + for _, entry := range entries { + fullPath := path.Join(dir, entry.Name()) + + if entry.IsDir() { + // Check for important directories + name := strings.ToLower(entry.Name()) + if name == "test" || name == "tests" || name == "__tests__" || name == "spec" { + response.ImportantPaths.Tests = append(response.ImportantPaths.Tests, PathInfo{ + Path: fullPath, + Description: "Test directory", + Priority: 7, + }) + } + if name == "docs" || name == "documentation" { + response.ImportantPaths.Docs = append(response.ImportantPaths.Docs, PathInfo{ + Path: fullPath, + Description: "Documentation directory", + Priority: 6, + }) + } + if name == "src" || name == "lib" || name == "pkg" { + response.ImportantPaths.Entrypoints = append(response.ImportantPaths.Entrypoints, PathInfo{ + Path: fullPath, + Description: "Source directory", + Priority: 8, + }) + } + } else { + // Count file extensions + ext := strings.ToLower(path.Ext(entry.Name())) + if ext != "" { + response.FileTypes[ext]++ + } + + // Check for config files + if isConfigFile(entry.Name()) { + response.ImportantPaths.Config = append(response.ImportantPaths.Config, PathInfo{ + Path: fullPath, + Description: "Configuration file", + Priority: 5, + }) + } + + // Check for entry points + name := strings.ToLower(entry.Name()) + if name == "main.go" || name == "main.rs" || name == "main.py" || name == "index.js" || name == "index.ts" { + response.ImportantPaths.Entrypoints = append(response.ImportantPaths.Entrypoints, PathInfo{ + Path: fullPath, + Description: "Application entry point", + Priority: 10, + }) + } + } + } +} + +func extractCodeReferences(text string) []string { + // Simple extraction of file paths mentioned in text + // Look for patterns like `path/to/file.ext` or file.ext + refs := []string{} + words := strings.Fields(text) + for _, word := range words { + // Clean up markdown code blocks + word = strings.Trim(word, "`*_[]()\"'") + if strings.Contains(word, ".") && (strings.Contains(word, "/") || strings.Contains(word, "\\")) { + // Looks like a file path + if len(word) > 3 && len(word) < 200 { + refs = append(refs, word) + } + } + } + return refs +} + +func uniqueStrings(input []string) []string { + seen := make(map[string]bool) + result := []string{} + for _, s := range input { + if !seen[s] { + seen[s] = true + result = append(result, s) + } + } + return result +} + +func categorizeIssue(labels []*issues_model.Label, title, body string) string { + // Check labels first + for _, label := range labels { + name := strings.ToLower(label.Name) + if strings.Contains(name, "bug") { + return "bug" + } + if strings.Contains(name, "feature") || strings.Contains(name, "enhancement") { + return "feature" + } + if strings.Contains(name, "question") || strings.Contains(name, "help") { + return "question" + } + if strings.Contains(name, "documentation") || strings.Contains(name, "docs") { + return "documentation" + } + } + + // Check title/body keywords + combined := strings.ToLower(title + " " + body) + if strings.Contains(combined, "error") || strings.Contains(combined, "crash") || strings.Contains(combined, "bug") { + return "bug" + } + if strings.Contains(combined, "feature") || strings.Contains(combined, "add support") || strings.Contains(combined, "would be nice") { + return "feature" + } + if strings.Contains(combined, "how to") || strings.Contains(combined, "how do i") || strings.Contains(combined, "?") { + return "question" + } + + return "general" +} + +func estimateComplexity(body string, commentCount int) string { + // Simple heuristics + lines := len(strings.Split(body, "\n")) + words := len(strings.Fields(body)) + + if lines < 10 && words < 100 && commentCount < 3 { + return "simple" + } + if lines > 50 || words > 500 || commentCount > 10 { + return "complex" + } + return "moderate" +} diff --git a/routers/api/v2/api.go b/routers/api/v2/api.go new file mode 100644 index 0000000000..4ca7462f38 --- /dev/null +++ b/routers/api/v2/api.go @@ -0,0 +1,151 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +// Package v2 Gitea API v2 +// +// This is the v2 API with improved error handling, batch operations, +// and AI-friendly endpoints. It uses structured error codes for +// machine-readable error handling. +// +// Schemes: https, http +// License: MIT http://opensource.org/licenses/MIT +// +// Consumes: +// - application/json +// +// Produces: +// - application/json +// - application/x-ndjson +// +// swagger:meta +package v2 + +import ( + "net/http" + + auth_model "code.gitea.io/gitea/models/auth" + apierrors "code.gitea.io/gitea/modules/errors" + "code.gitea.io/gitea/modules/graceful" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/modules/web" + "code.gitea.io/gitea/modules/web/middleware" + "code.gitea.io/gitea/routers/common" + "code.gitea.io/gitea/services/auth" + "code.gitea.io/gitea/services/context" + + "github.com/go-chi/cors" +) + +// Routes registers all v2 API routes to web application. +func Routes() *web.Router { + m := web.NewRouter() + + m.Use(middleware.RequestID()) + m.Use(middleware.RateLimitInfo()) + m.Use(securityHeaders()) + + if setting.CORSConfig.Enabled { + m.Use(cors.Handler(cors.Options{ + AllowedOrigins: setting.CORSConfig.AllowDomain, + AllowedMethods: setting.CORSConfig.Methods, + AllowCredentials: setting.CORSConfig.AllowCredentials, + AllowedHeaders: append([]string{"Authorization", "X-Gitea-OTP"}, setting.CORSConfig.Headers...), + MaxAge: int(setting.CORSConfig.MaxAge.Seconds()), + })) + } + + m.Use(context.APIContexter()) + + // Get user from session if logged in + m.Use(apiAuth(buildAuthGroup())) + + m.Group("", func() { + // Public endpoints (no auth required) + m.Get("/version", Version) + + // API Documentation (Scalar) + m.Get("/docs", DocsScalar) + m.Get("/swagger.json", SwaggerJSON) + + // Authenticated endpoints + m.Group("", func() { + // User info + m.Get("/user", GetAuthenticatedUser) + + // Batch operations - efficient bulk requests + m.Group("/batch", func() { + m.Post("/files", BatchGetFiles) + m.Post("/repos", BatchGetRepos) + }) + + // Streaming endpoints - NDJSON responses + m.Group("/stream", func() { + m.Post("/files", StreamFiles) + m.Post("/commits", StreamCommits) + m.Post("/issues", StreamIssues) + }) + + // AI context endpoints - rich context for AI tools + m.Group("/ai", func() { + m.Post("/repo/summary", GetAIRepoSummary) + m.Post("/repo/navigation", GetAINavigation) + m.Post("/issue/context", GetAIIssueContext) + }) + }, reqToken()) + }) + + return m +} + +func securityHeaders() func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { + // CORS preflight + if req.Method == "OPTIONS" { + return + } + next.ServeHTTP(w, req) + }) + } +} + +func buildAuthGroup() *auth.Group { + group := auth.NewGroup( + &auth.OAuth2{}, + &auth.HTTPSign{}, + &auth.Basic{}, + ) + if setting.Service.EnableReverseProxyAuthAPI { + group.Add(&auth.ReverseProxy{}) + } + if setting.IsWindows && auth_model.IsSSPIEnabled(graceful.GetManager().ShutdownContext()) { + group.Add(&auth.SSPI{}) + } + return group +} + +func apiAuth(authMethod auth.Method) func(*context.APIContext) { + return func(ctx *context.APIContext) { + ar, err := common.AuthShared(ctx.Base, nil, authMethod) + if err != nil { + msg, ok := auth.ErrAsUserAuthMessage(err) + msg = util.Iif(ok, msg, "invalid username, password or token") + ctx.APIErrorWithCodeAndMessage(apierrors.AuthInvalidCredentials, msg) + return + } + ctx.Doer = ar.Doer + ctx.IsSigned = ar.Doer != nil + ctx.IsBasicAuth = ar.IsBasicAuth + } +} + +// reqToken requires authentication +func reqToken() func(ctx *context.APIContext) { + return func(ctx *context.APIContext) { + if !ctx.IsSigned { + ctx.APIErrorWithCode(apierrors.AuthTokenMissing) + return + } + } +} diff --git a/routers/api/v2/batch.go b/routers/api/v2/batch.go new file mode 100644 index 0000000000..dcb7ca4b87 --- /dev/null +++ b/routers/api/v2/batch.go @@ -0,0 +1,273 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v2 + +import ( + "encoding/json" + "net/http" + + repo_model "code.gitea.io/gitea/models/repo" + apierrors "code.gitea.io/gitea/modules/errors" + "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/services/context" +) + +// BatchFileRequest represents a request to get multiple files +type BatchFileRequest struct { + Owner string `json:"owner" binding:"Required"` + Repo string `json:"repo" binding:"Required"` + Ref string `json:"ref"` + Paths []string `json:"paths" binding:"Required"` + Format string `json:"format"` // "content" or "metadata" +} + +// BatchFileResult represents the result for a single file in batch +type BatchFileResult struct { + Path string `json:"path"` + Content string `json:"content,omitempty"` + Encoding string `json:"encoding,omitempty"` + SHA string `json:"sha,omitempty"` + Size int64 `json:"size,omitempty"` + Type string `json:"type,omitempty"` + Error string `json:"error,omitempty"` +} + +// BatchFileResponse represents the response for batch file retrieval +type BatchFileResponse struct { + Owner string `json:"owner"` + Repo string `json:"repo"` + Ref string `json:"ref"` + Results []BatchFileResult `json:"results"` +} + +// BatchGetFiles retrieves multiple files in a single request +// This is optimized for AI tools that need to fetch multiple files at once +func BatchGetFiles(ctx *context.APIContext) { + var req BatchFileRequest + if err := json.NewDecoder(ctx.Req.Body).Decode(&req); err != nil { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "field": "body", + "error": err.Error(), + }) + return + } + + // Validate path count (limit to prevent abuse) + if len(req.Paths) > 100 { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "field": "paths", + "message": "maximum 100 paths per request", + "count": len(req.Paths), + }) + return + } + + if len(req.Paths) == 0 { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "field": "paths", + "message": "at least one path is required", + }) + return + } + + // Get repository + repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, req.Owner, req.Repo) + if err != nil { + if repo_model.IsErrRepoNotExist(err) { + ctx.APIErrorWithCode(apierrors.RepoNotFound, map[string]any{ + "owner": req.Owner, + "repo": req.Repo, + }) + return + } + ctx.APIErrorWithCode(apierrors.InternalError, map[string]any{ + "error": err.Error(), + }) + return + } + + // Check access (basic check - user must be signed in or repo is public) + if repo.IsPrivate && !ctx.IsSigned { + ctx.APIErrorWithCode(apierrors.PermAccessDenied) + return + } + + // Open git repo + gitRepo, err := gitrepo.OpenRepository(ctx, repo) + if err != nil { + ctx.APIErrorWithCode(apierrors.InternalError, map[string]any{ + "error": "failed to open repository", + }) + return + } + defer gitRepo.Close() + + // Determine ref + ref := req.Ref + if ref == "" { + ref = repo.DefaultBranch + } + + // Get commit for ref + commit, err := gitRepo.GetCommit(ref) + if err != nil { + // Try as branch + commit, err = gitRepo.GetBranchCommit(ref) + if err != nil { + // Try as tag + commit, err = gitRepo.GetTagCommit(ref) + if err != nil { + ctx.APIErrorWithCode(apierrors.RefNotFound, map[string]any{ + "ref": ref, + }) + return + } + } + } + + // Fetch each file + results := make([]BatchFileResult, 0, len(req.Paths)) + metadataOnly := req.Format == "metadata" + + for _, path := range req.Paths { + result := BatchFileResult{Path: path} + + entry, err := commit.GetTreeEntryByPath(path) + if err != nil { + result.Error = "file not found" + results = append(results, result) + continue + } + + result.SHA = entry.ID.String() + result.Size = entry.Size() + result.Type = entry.Mode().String() + + if !metadataOnly && !entry.IsDir() { + // Get file content (limit size to prevent memory issues) + if entry.Size() > 10*1024*1024 { // 10MB limit + result.Error = "file too large (>10MB)" + } else { + blob := entry.Blob() + reader, err := blob.DataAsync() + if err != nil { + result.Error = "failed to read file content" + } else { + defer reader.Close() + content := make([]byte, entry.Size()) + _, err = reader.Read(content) + if err != nil && err.Error() != "EOF" { + result.Error = "failed to read file content" + } else { + result.Content = string(content) + result.Encoding = "utf-8" + } + } + } + } + + results = append(results, result) + } + + ctx.JSON(http.StatusOK, BatchFileResponse{ + Owner: req.Owner, + Repo: req.Repo, + Ref: ref, + Results: results, + }) +} + +// BatchRepoRequest represents a request to get info about multiple repos +type BatchRepoRequest struct { + Repos []struct { + Owner string `json:"owner"` + Repo string `json:"repo"` + } `json:"repos" binding:"Required"` + Fields []string `json:"fields"` // Which fields to include +} + +// BatchRepoResult represents the result for a single repo in batch +type BatchRepoResult struct { + Owner string `json:"owner"` + Repo string `json:"repo"` + FullName string `json:"full_name,omitempty"` + Description string `json:"description,omitempty"` + Private bool `json:"private,omitempty"` + Fork bool `json:"fork,omitempty"` + Stars int `json:"stars,omitempty"` + Forks int `json:"forks,omitempty"` + Language string `json:"language,omitempty"` + Error string `json:"error,omitempty"` +} + +// BatchGetRepos retrieves information about multiple repositories +func BatchGetRepos(ctx *context.APIContext) { + var req BatchRepoRequest + if err := json.NewDecoder(ctx.Req.Body).Decode(&req); err != nil { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "field": "body", + "error": err.Error(), + }) + return + } + + // Validate repo count + if len(req.Repos) > 50 { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "field": "repos", + "message": "maximum 50 repositories per request", + "count": len(req.Repos), + }) + return + } + + if len(req.Repos) == 0 { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "field": "repos", + "message": "at least one repository is required", + }) + return + } + + results := make([]BatchRepoResult, 0, len(req.Repos)) + + for _, repoRef := range req.Repos { + result := BatchRepoResult{ + Owner: repoRef.Owner, + Repo: repoRef.Repo, + } + + repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, repoRef.Owner, repoRef.Repo) + if err != nil { + if repo_model.IsErrRepoNotExist(err) { + result.Error = "repository not found" + } else { + result.Error = "failed to fetch repository" + } + results = append(results, result) + continue + } + + // Check access + if repo.IsPrivate && !ctx.IsSigned { + result.Error = "access denied" + results = append(results, result) + continue + } + + result.FullName = repo.FullName() + result.Description = repo.Description + result.Private = repo.IsPrivate + result.Fork = repo.IsFork + result.Stars = repo.NumStars + result.Forks = repo.NumForks + result.Language = repo.PrimaryLanguage.Language + + results = append(results, result) + } + + ctx.JSON(http.StatusOK, map[string]any{ + "results": results, + }) +} diff --git a/routers/api/v2/docs.go b/routers/api/v2/docs.go new file mode 100644 index 0000000000..8e1978c848 --- /dev/null +++ b/routers/api/v2/docs.go @@ -0,0 +1,247 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v2 + +import ( + "net/http" + + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/services/context" +) + +// scalarTemplate is the HTML template for Scalar API documentation +// Scalar is a modern, AI-friendly API documentation tool +const scalarTemplate = ` + + + {{.AppName}} API v2 Reference + + + + + + + + +` + +// DocsScalar serves the Scalar API documentation UI for v2 +func DocsScalar(ctx *context.APIContext) { + data := map[string]string{ + "AppName": setting.AppName, + "SpecURL": setting.AppSubURL + "/api/v2/swagger.json", + } + + ctx.Resp.Header().Set("Content-Type", "text/html; charset=utf-8") + ctx.Resp.WriteHeader(http.StatusOK) + + // Simple template rendering + html := scalarTemplate + for key, value := range data { + html = replaceTemplateVar(html, key, value) + } + ctx.Resp.Write([]byte(html)) +} + +func replaceTemplateVar(template, key, value string) string { + placeholder := "{{." + key + "}}" + result := template + for i := 0; i < 10; i++ { // Replace up to 10 occurrences + newResult := "" + idx := 0 + for { + pos := indexOf(result[idx:], placeholder) + if pos == -1 { + newResult += result[idx:] + break + } + newResult += result[idx : idx+pos] + newResult += value + idx = idx + pos + len(placeholder) + } + if newResult == result { + break + } + result = newResult + } + return result +} + +func indexOf(s, substr string) int { + for i := 0; i <= len(s)-len(substr); i++ { + if s[i:i+len(substr)] == substr { + return i + } + } + return -1 +} + +// SwaggerJSON serves the OpenAPI/Swagger JSON specification for v2 +func SwaggerJSON(ctx *context.APIContext) { + spec := generateOpenAPISpec() + ctx.JSON(http.StatusOK, spec) +} + +// generateOpenAPISpec creates the OpenAPI 3.0 specification for v2 API +func generateOpenAPISpec() map[string]any { + return map[string]any{ + "openapi": "3.0.3", + "info": map[string]any{ + "title": setting.AppName + " API v2", + "description": "Gitea API v2 with structured error codes, batch operations, and AI-friendly endpoints.", + "version": "2.0.0", + "contact": map[string]string{ + "name": "Gitea", + "url": "https://gitea.com", + }, + "license": map[string]string{ + "name": "MIT", + "url": "https://opensource.org/licenses/MIT", + }, + }, + "servers": []map[string]string{ + {"url": setting.AppURL + "api/v2", "description": "Current server"}, + }, + "tags": []map[string]string{ + {"name": "miscellaneous", "description": "General API information"}, + {"name": "user", "description": "User operations"}, + {"name": "repository", "description": "Repository operations"}, + {"name": "batch", "description": "Batch operations for bulk actions"}, + {"name": "ai", "description": "AI-friendly context endpoints"}, + }, + "paths": map[string]any{ + "/version": map[string]any{ + "get": map[string]any{ + "tags": []string{"miscellaneous"}, + "summary": "Get API version", + "operationId": "getVersion", + "responses": map[string]any{ + "200": map[string]any{ + "description": "Version information", + "content": map[string]any{ + "application/json": map[string]any{ + "schema": map[string]any{ + "$ref": "#/components/schemas/Version", + }, + }, + }, + }, + }, + }, + }, + "/user": map[string]any{ + "get": map[string]any{ + "tags": []string{"user"}, + "summary": "Get authenticated user", + "operationId": "getAuthenticatedUser", + "security": []map[string][]string{{"bearerAuth": {}}}, + "responses": map[string]any{ + "200": map[string]any{ + "description": "User information", + "content": map[string]any{ + "application/json": map[string]any{ + "schema": map[string]any{ + "$ref": "#/components/schemas/User", + }, + }, + }, + }, + "401": map[string]any{ + "description": "Authentication required", + "content": map[string]any{ + "application/json": map[string]any{ + "schema": map[string]any{ + "$ref": "#/components/schemas/APIError", + }, + }, + }, + }, + }, + }, + }, + }, + "components": map[string]any{ + "securitySchemes": map[string]any{ + "bearerAuth": map[string]any{ + "type": "http", + "scheme": "bearer", + "description": "API token authentication", + "bearerFormat": "token", + }, + "basicAuth": map[string]any{ + "type": "http", + "scheme": "basic", + "description": "Basic authentication with username and password", + }, + }, + "schemas": map[string]any{ + "Version": map[string]any{ + "type": "object", + "properties": map[string]any{ + "version": map[string]string{"type": "string", "description": "Gitea version"}, + "api": map[string]string{"type": "string", "description": "API version"}, + }, + }, + "User": map[string]any{ + "type": "object", + "properties": map[string]any{ + "id": map[string]string{"type": "integer", "description": "User ID"}, + "login": map[string]string{"type": "string", "description": "Username"}, + "email": map[string]string{"type": "string", "description": "Email address"}, + "is_admin": map[string]string{"type": "boolean", "description": "Is site admin"}, + }, + }, + "APIError": map[string]any{ + "type": "object", + "description": "Structured error response following RFC 7807", + "properties": map[string]any{ + "error": map[string]any{ + "type": "object", + "properties": map[string]any{ + "code": map[string]string{"type": "string", "description": "Machine-readable error code"}, + "message": map[string]string{"type": "string", "description": "Human-readable error message"}, + "status": map[string]string{"type": "integer", "description": "HTTP status code"}, + "documentation_url": map[string]string{"type": "string", "description": "URL to error documentation"}, + "request_id": map[string]string{"type": "string", "description": "Request ID for tracing"}, + "details": map[string]string{"type": "object", "description": "Additional error context"}, + "suggestions": map[string]any{"type": "array", "items": map[string]string{"type": "string"}, "description": "Suggested actions"}, + }, + }, + }, + }, + }, + }, + } +} diff --git a/routers/api/v2/misc.go b/routers/api/v2/misc.go new file mode 100644 index 0000000000..813c41a227 --- /dev/null +++ b/routers/api/v2/misc.go @@ -0,0 +1,35 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v2 + +import ( + "net/http" + + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/services/context" +) + +// VersionResponse contains version information +type VersionResponse struct { + Version string `json:"version"` + API string `json:"api"` +} + +// Version returns the Gitea version +func Version(ctx *context.APIContext) { + ctx.JSON(http.StatusOK, VersionResponse{ + Version: setting.AppVer, + API: "v2", + }) +} + +// GetAuthenticatedUser returns the authenticated user +func GetAuthenticatedUser(ctx *context.APIContext) { + ctx.JSON(http.StatusOK, map[string]any{ + "id": ctx.Doer.ID, + "login": ctx.Doer.Name, + "email": ctx.Doer.Email, + "is_admin": ctx.Doer.IsAdmin, + }) +} diff --git a/routers/api/v2/streaming.go b/routers/api/v2/streaming.go new file mode 100644 index 0000000000..24113a50f2 --- /dev/null +++ b/routers/api/v2/streaming.go @@ -0,0 +1,493 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v2 + +import ( + "bufio" + "encoding/json" + "net/http" + + "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" + repo_model "code.gitea.io/gitea/models/repo" + apierrors "code.gitea.io/gitea/modules/errors" + "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/optional" + "code.gitea.io/gitea/services/context" +) + +// StreamWriter wraps response writer for NDJSON streaming +type StreamWriter struct { + w http.ResponseWriter + flusher http.Flusher + encoder *json.Encoder +} + +// NewStreamWriter creates a new NDJSON stream writer +func NewStreamWriter(w http.ResponseWriter) *StreamWriter { + flusher, _ := w.(http.Flusher) + return &StreamWriter{ + w: w, + flusher: flusher, + encoder: json.NewEncoder(w), + } +} + +// WriteItem writes a single item to the stream +func (sw *StreamWriter) WriteItem(item any) error { + if err := sw.encoder.Encode(item); err != nil { + return err + } + if sw.flusher != nil { + sw.flusher.Flush() + } + return nil +} + +// StreamFilesRequest represents request for streaming file contents +type StreamFilesRequest struct { + Owner string `json:"owner"` + Repo string `json:"repo"` + Ref string `json:"ref"` + Paths []string `json:"paths"` +} + +// StreamFileItem represents a single file in the stream +type StreamFileItem struct { + Type string `json:"type"` // "file", "error", "done" + Path string `json:"path,omitempty"` + Content string `json:"content,omitempty"` + SHA string `json:"sha,omitempty"` + Size int64 `json:"size,omitempty"` + Error string `json:"error,omitempty"` + Index int `json:"index,omitempty"` + Total int `json:"total,omitempty"` +} + +// StreamFiles streams file contents as NDJSON +// This allows AI tools to process files as they arrive without waiting for all files +func StreamFiles(ctx *context.APIContext) { + var req StreamFilesRequest + if err := json.NewDecoder(ctx.Req.Body).Decode(&req); err != nil { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "error": err.Error(), + }) + return + } + + if len(req.Paths) == 0 { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "field": "paths", + "message": "at least one path is required", + }) + return + } + + if len(req.Paths) > 500 { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "field": "paths", + "message": "maximum 500 paths per stream", + }) + return + } + + // Get repository + repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, req.Owner, req.Repo) + if err != nil { + if repo_model.IsErrRepoNotExist(err) { + ctx.APIErrorWithCode(apierrors.RepoNotFound) + return + } + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + + // Check access + if repo.IsPrivate && !ctx.IsSigned { + ctx.APIErrorWithCode(apierrors.PermAccessDenied) + return + } + + // Open git repo + gitRepo, err := gitrepo.OpenRepository(ctx, repo) + if err != nil { + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + defer gitRepo.Close() + + // Determine ref + ref := req.Ref + if ref == "" { + ref = repo.DefaultBranch + } + + // Get commit + commit, err := gitRepo.GetCommit(ref) + if err != nil { + commit, err = gitRepo.GetBranchCommit(ref) + if err != nil { + commit, err = gitRepo.GetTagCommit(ref) + if err != nil { + ctx.APIErrorWithCode(apierrors.RefNotFound, map[string]any{ + "ref": ref, + }) + return + } + } + } + + // Set up streaming response + ctx.Resp.Header().Set("Content-Type", "application/x-ndjson") + ctx.Resp.Header().Set("Transfer-Encoding", "chunked") + ctx.Resp.Header().Set("X-Content-Type-Options", "nosniff") + ctx.Resp.WriteHeader(http.StatusOK) + + sw := NewStreamWriter(ctx.Resp) + total := len(req.Paths) + + // Stream each file + for i, path := range req.Paths { + item := StreamFileItem{ + Type: "file", + Path: path, + Index: i, + Total: total, + } + + entry, err := commit.GetTreeEntryByPath(path) + if err != nil { + item.Type = "error" + item.Error = "file not found" + sw.WriteItem(item) + continue + } + + item.SHA = entry.ID.String() + item.Size = entry.Size() + + if entry.IsDir() { + item.Type = "error" + item.Error = "path is a directory" + sw.WriteItem(item) + continue + } + + // Size limit for streaming + if entry.Size() > 5*1024*1024 { // 5MB per file in stream + item.Type = "error" + item.Error = "file too large for streaming (>5MB)" + sw.WriteItem(item) + continue + } + + blob := entry.Blob() + reader, err := blob.DataAsync() + if err != nil { + item.Type = "error" + item.Error = "failed to read content" + sw.WriteItem(item) + continue + } + + content := make([]byte, entry.Size()) + n, _ := reader.Read(content) + reader.Close() + + item.Content = string(content[:n]) + sw.WriteItem(item) + } + + // Send completion marker + sw.WriteItem(StreamFileItem{ + Type: "done", + Total: total, + }) +} + +// StreamCommitsRequest represents request for streaming commits +type StreamCommitsRequest struct { + Owner string `json:"owner"` + Repo string `json:"repo"` + Ref string `json:"ref"` + Path string `json:"path"` // Optional: filter by path + Limit int `json:"limit"` + Offset int `json:"offset"` +} + +// StreamCommitItem represents a single commit in the stream +type StreamCommitItem struct { + Type string `json:"type"` // "commit", "error", "done" + SHA string `json:"sha,omitempty"` + Message string `json:"message,omitempty"` + Author string `json:"author,omitempty"` + Email string `json:"email,omitempty"` + Timestamp string `json:"timestamp,omitempty"` + Files []string `json:"files,omitempty"` + Error string `json:"error,omitempty"` + Index int `json:"index,omitempty"` + Total int `json:"total,omitempty"` +} + +// StreamCommits streams commit history as NDJSON +func StreamCommits(ctx *context.APIContext) { + var req StreamCommitsRequest + if err := json.NewDecoder(ctx.Req.Body).Decode(&req); err != nil { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "error": err.Error(), + }) + return + } + + // Default and max limits + if req.Limit <= 0 { + req.Limit = 50 + } + if req.Limit > 500 { + req.Limit = 500 + } + + // Get repository + repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, req.Owner, req.Repo) + if err != nil { + if repo_model.IsErrRepoNotExist(err) { + ctx.APIErrorWithCode(apierrors.RepoNotFound) + return + } + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + + // Check access + if repo.IsPrivate && !ctx.IsSigned { + ctx.APIErrorWithCode(apierrors.PermAccessDenied) + return + } + + // Open git repo + gitRepo, err := gitrepo.OpenRepository(ctx, repo) + if err != nil { + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + defer gitRepo.Close() + + ref := req.Ref + if ref == "" { + ref = repo.DefaultBranch + } + + // Get head commit for the ref + headCommit, err := gitRepo.GetBranchCommit(ref) + if err != nil { + headCommit, err = gitRepo.GetTagCommit(ref) + if err != nil { + ctx.APIErrorWithCode(apierrors.RefNotFound) + return + } + } + + // Set up streaming + ctx.Resp.Header().Set("Content-Type", "application/x-ndjson") + ctx.Resp.Header().Set("Transfer-Encoding", "chunked") + ctx.Resp.WriteHeader(http.StatusOK) + + sw := NewStreamWriter(ctx.Resp) + + // Get commits from head commit + commits, err := headCommit.CommitsByRange(req.Offset/req.Limit+1, req.Limit, "", "", "") + if err != nil { + sw.WriteItem(StreamCommitItem{ + Type: "error", + Error: "failed to get commits", + }) + return + } + + total := len(commits) + for i, commit := range commits { + item := StreamCommitItem{ + Type: "commit", + SHA: commit.ID.String(), + Message: commit.CommitMessage, + Author: commit.Author.Name, + Email: commit.Author.Email, + Timestamp: commit.Author.When.Format("2006-01-02T15:04:05Z07:00"), + Index: i, + Total: total, + } + sw.WriteItem(item) + } + + sw.WriteItem(StreamCommitItem{ + Type: "done", + Total: total, + }) +} + +// StreamIssuesRequest represents request for streaming issues +type StreamIssuesRequest struct { + Owner string `json:"owner"` + Repo string `json:"repo"` + State string `json:"state"` // "open", "closed", "all" + Labels []string `json:"labels"` + Limit int `json:"limit"` + Offset int `json:"offset"` +} + +// StreamIssueItem represents a single issue in the stream +type StreamIssueItem struct { + Type string `json:"type"` // "issue", "error", "done" + Number int64 `json:"number,omitempty"` + Title string `json:"title,omitempty"` + Body string `json:"body,omitempty"` + State string `json:"state,omitempty"` + Labels []string `json:"labels,omitempty"` + Author string `json:"author,omitempty"` + Assignees []string `json:"assignees,omitempty"` + CreatedAt string `json:"created_at,omitempty"` + UpdatedAt string `json:"updated_at,omitempty"` + Comments int `json:"comments,omitempty"` + Error string `json:"error,omitempty"` + Index int `json:"index,omitempty"` + Total int `json:"total,omitempty"` +} + +// StreamIssues streams issues as NDJSON +func StreamIssues(ctx *context.APIContext) { + var req StreamIssuesRequest + if err := json.NewDecoder(ctx.Req.Body).Decode(&req); err != nil { + ctx.APIErrorWithCode(apierrors.ValidationFailed, map[string]any{ + "error": err.Error(), + }) + return + } + + // Default and max limits + if req.Limit <= 0 { + req.Limit = 50 + } + if req.Limit > 200 { + req.Limit = 200 + } + + // Get repository + repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, req.Owner, req.Repo) + if err != nil { + if repo_model.IsErrRepoNotExist(err) { + ctx.APIErrorWithCode(apierrors.RepoNotFound) + return + } + ctx.APIErrorWithCode(apierrors.InternalError) + return + } + + // Check access + if repo.IsPrivate && !ctx.IsSigned { + ctx.APIErrorWithCode(apierrors.PermAccessDenied) + return + } + + // Determine issue state option + var isClosed optional.Option[bool] + switch req.State { + case "closed": + isClosed = optional.Some(true) + case "open": + isClosed = optional.Some(false) + case "all": + // Leave as None to get all issues + default: + isClosed = optional.Some(false) // default to open + } + + // Set up streaming + ctx.Resp.Header().Set("Content-Type", "application/x-ndjson") + ctx.Resp.Header().Set("Transfer-Encoding", "chunked") + ctx.Resp.WriteHeader(http.StatusOK) + + sw := NewStreamWriter(ctx.Resp) + + // Get issues + issues, err := issues_model.Issues(ctx, &issues_model.IssuesOptions{ + RepoIDs: []int64{repo.ID}, + IsClosed: isClosed, + Paginator: &db.ListOptions{ + Page: req.Offset/req.Limit + 1, + PageSize: req.Limit, + }, + }) + if err != nil { + sw.WriteItem(StreamIssueItem{ + Type: "error", + Error: "failed to get issues", + }) + return + } + + total := len(issues) + for i, issue := range issues { + labels := make([]string, 0, len(issue.Labels)) + for _, label := range issue.Labels { + labels = append(labels, label.Name) + } + + assignees := make([]string, 0, len(issue.Assignees)) + for _, assignee := range issue.Assignees { + assignees = append(assignees, assignee.Name) + } + + state := "open" + if issue.IsClosed { + state = "closed" + } + + item := StreamIssueItem{ + Type: "issue", + Number: issue.Index, + Title: issue.Title, + Body: issue.Content, + State: state, + Labels: labels, + Author: issue.Poster.Name, + Assignees: assignees, + CreatedAt: issue.CreatedUnix.AsTime().Format("2006-01-02T15:04:05Z07:00"), + UpdatedAt: issue.UpdatedUnix.AsTime().Format("2006-01-02T15:04:05Z07:00"), + Comments: issue.NumComments, + Index: i, + Total: total, + } + sw.WriteItem(item) + } + + sw.WriteItem(StreamIssueItem{ + Type: "done", + Total: total, + }) +} + +// Helper for line-by-line streaming of large files +func streamLargeFile(ctx *context.APIContext, content []byte) { + ctx.Resp.Header().Set("Content-Type", "application/x-ndjson") + ctx.Resp.Header().Set("Transfer-Encoding", "chunked") + ctx.Resp.WriteHeader(http.StatusOK) + + sw := NewStreamWriter(ctx.Resp) + scanner := bufio.NewScanner(bufio.NewReader(nil)) // placeholder + + lineNum := 0 + for scanner.Scan() { + lineNum++ + sw.WriteItem(map[string]any{ + "type": "line", + "line": lineNum, + "text": scanner.Text(), + }) + } + + sw.WriteItem(map[string]any{ + "type": "done", + "lines": lineNum, + }) +} diff --git a/routers/init.go b/routers/init.go index 859b00ebb2..14428075cf 100644 --- a/routers/init.go +++ b/routers/init.go @@ -32,6 +32,7 @@ import ( actions_router "code.gitea.io/gitea/routers/api/actions" packages_router "code.gitea.io/gitea/routers/api/packages" apiv1 "code.gitea.io/gitea/routers/api/v1" + apiv2 "code.gitea.io/gitea/routers/api/v2" "code.gitea.io/gitea/routers/common" "code.gitea.io/gitea/routers/private" web_routers "code.gitea.io/gitea/routers/web" @@ -188,6 +189,7 @@ func NormalRoutes() *web.Router { r.Mount("/", web_routers.Routes()) r.Mount("/api/v1", apiv1.Routes()) + r.Mount("/api/v2", apiv2.Routes()) r.Mount("/api/internal", private.Routes()) r.Post("/-/fetch-redirect", common.FetchRedirectDelegate) diff --git a/services/context/api.go b/services/context/api.go index 2bb3ae6fd5..8acff24aeb 100644 --- a/services/context/api.go +++ b/services/context/api.go @@ -16,6 +16,7 @@ import ( "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/cache" + apierrors "code.gitea.io/gitea/modules/errors" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/httpcache" @@ -203,6 +204,46 @@ func (ctx *APIContext) APIError(status int, obj any) { }) } +// APIErrorWithCode responds with a structured error using the new error code system. +// This provides machine-readable error codes for AI and automation tools. +func (ctx *APIContext) APIErrorWithCode(code apierrors.ErrorCode, details ...map[string]any) { + requestID := middleware.GetRequestID(ctx.Req.Context()) + + apiErr := apierrors.NewAPIError(code, requestID) + if len(details) > 0 && details[0] != nil { + apiErr.WithDetails(details[0]) + } + + if code.HTTPStatus() == http.StatusInternalServerError { + log.Error("APIError [%s] %s: %s", requestID, code, code.Message()) + } + + ctx.JSON(code.HTTPStatus(), apiErr.Response()) +} + +// APIErrorWithCodeAndMessage responds with a structured error with a custom message. +func (ctx *APIContext) APIErrorWithCodeAndMessage(code apierrors.ErrorCode, message string, details ...map[string]any) { + requestID := middleware.GetRequestID(ctx.Req.Context()) + + apiErr := apierrors.NewAPIError(code, requestID).WithMessage(message) + if len(details) > 0 && details[0] != nil { + apiErr.WithDetails(details[0]) + } + + if code.HTTPStatus() == http.StatusInternalServerError { + log.Error("APIError [%s] %s: %s", requestID, code, message) + } + + ctx.JSON(code.HTTPStatus(), apiErr.Response()) +} + +// APIValidationError responds with a validation error including field-level details. +func (ctx *APIContext) APIValidationError(errors ...apierrors.ValidationError) { + requestID := middleware.GetRequestID(ctx.Req.Context()) + validationErr := apierrors.NewValidationError(requestID, errors...) + ctx.JSON(http.StatusBadRequest, validationErr) +} + type apiContextKeyType struct{} var apiContextKey = apiContextKeyType{}