style: fix gofmt formatting across codebase
Some checks failed
Build and Release / Build Binaries (amd64, darwin) (push) Blocked by required conditions
Build and Release / Build Binaries (amd64, linux) (push) Blocked by required conditions
Build and Release / Build Binaries (amd64, windows) (push) Blocked by required conditions
Build and Release / Build Binaries (arm64, darwin) (push) Blocked by required conditions
Build and Release / Build Binaries (arm64, linux) (push) Blocked by required conditions
Build and Release / Build Docker Image (push) Blocked by required conditions
Build and Release / Create Release (push) Blocked by required conditions
Build and Release / Lint and Test (push) Has been cancelled

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
David H. Friedel Jr. 2026-01-09 15:51:36 -05:00
parent dfc94f6408
commit 74c6389454
13 changed files with 154 additions and 154 deletions

View File

@ -23,7 +23,7 @@ import (
)
const (
defaultChunkSize = 10 * 1024 * 1024 // 10MB
defaultChunkSize = 10 * 1024 * 1024 // 10MB
maxChunkSize = 100 * 1024 * 1024 // 100MB
)
@ -55,11 +55,11 @@ Interrupted uploads can be resumed using the session ID.`,
}
var resumeCmd = &cobra.Command{
Use: "resume",
Short: "Resume an interrupted upload",
Long: `Resume a previously interrupted chunked upload using its session ID.`,
Use: "resume",
Short: "Resume an interrupted upload",
Long: `Resume a previously interrupted chunked upload using its session ID.`,
Example: ` gitea-cli upload resume --session sess_abc123 --file ./app.tar.gz`,
RunE: runResumeUpload,
RunE: runResumeUpload,
}
var listCmd = &cobra.Command{
@ -100,15 +100,15 @@ func init() {
// UploadSession represents a chunked upload session
type UploadSession struct {
ID string `json:"id"`
FileName string `json:"file_name"`
FileSize int64 `json:"file_size"`
ChunkSize int64 `json:"chunk_size"`
TotalChunks int64 `json:"total_chunks"`
ChunksReceived int64 `json:"chunks_received"`
Status string `json:"status"`
ExpiresAt time.Time `json:"expires_at"`
Checksum string `json:"checksum,omitempty"`
ID string `json:"id"`
FileName string `json:"file_name"`
FileSize int64 `json:"file_size"`
ChunkSize int64 `json:"chunk_size"`
TotalChunks int64 `json:"total_chunks"`
ChunksReceived int64 `json:"chunks_received"`
Status string `json:"status"`
ExpiresAt time.Time `json:"expires_at"`
Checksum string `json:"checksum,omitempty"`
}
// ProgressTracker tracks upload progress

View File

@ -66,7 +66,7 @@ type PagesConfig struct {
RepoID int64 `xorm:"UNIQUE NOT NULL"`
Enabled bool `xorm:"DEFAULT false"`
Template PagesTemplate `xorm:"VARCHAR(32) DEFAULT 'simple'"`
ConfigJSON string `xorm:"TEXT"` // Cached parsed config from landing.yaml
ConfigJSON string `xorm:"TEXT"` // Cached parsed config from landing.yaml
ConfigHash string `xorm:"VARCHAR(64)"` // Hash for invalidation
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`

View File

@ -22,11 +22,11 @@ type WikiIndex struct {
PageName string `xorm:"VARCHAR(255) NOT NULL"`
PagePath string `xorm:"VARCHAR(512) NOT NULL"` // Git path
Title string `xorm:"VARCHAR(255)"`
Content string `xorm:"LONGTEXT"` // Full content for search
ContentHash string `xorm:"VARCHAR(64)"` // For change detection
CommitSHA string `xorm:"VARCHAR(64)"` // Last indexed commit
Content string `xorm:"LONGTEXT"` // Full content for search
ContentHash string `xorm:"VARCHAR(64)"` // For change detection
CommitSHA string `xorm:"VARCHAR(64)"` // Last indexed commit
WordCount int `xorm:"DEFAULT 0"`
LinksOut string `xorm:"TEXT"` // JSON array of outgoing links
LinksOut string `xorm:"TEXT"` // JSON array of outgoing links
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
}
@ -86,10 +86,10 @@ func DeleteWikiIndexByRepo(ctx context.Context, repoID int64) error {
// SearchWikiOptions contains options for wiki search
type SearchWikiOptions struct {
RepoID int64
Query string
Limit int
Offset int
RepoID int64
Query string
Limit int
Offset int
}
// SearchWikiPages searches wiki pages by content

View File

@ -43,7 +43,7 @@ func (s State) String() string {
// Common errors
var (
ErrCircuitOpen = errors.New("circuit breaker is open")
ErrCircuitOpen = errors.New("circuit breaker is open")
ErrTooManyRequests = errors.New("too many requests in half-open state")
)

View File

@ -59,20 +59,20 @@ const (
// Rate limiting errors (RATE_)
const (
RateLimitExceeded ErrorCode = "RATE_LIMIT_EXCEEDED"
RateLimitExceeded ErrorCode = "RATE_LIMIT_EXCEEDED"
RateQuotaExhausted ErrorCode = "RATE_QUOTA_EXHAUSTED"
)
// Validation errors (VAL_)
const (
ValInvalidInput ErrorCode = "VAL_INVALID_INPUT"
ValMissingField ErrorCode = "VAL_MISSING_FIELD"
ValInvalidName ErrorCode = "VAL_INVALID_NAME"
ValNameTooLong ErrorCode = "VAL_NAME_TOO_LONG"
ValInvalidEmail ErrorCode = "VAL_INVALID_EMAIL"
ValDuplicateName ErrorCode = "VAL_DUPLICATE_NAME"
ValInvalidFormat ErrorCode = "VAL_INVALID_FORMAT"
ValidationFailed ErrorCode = "VALIDATION_FAILED"
ValInvalidInput ErrorCode = "VAL_INVALID_INPUT"
ValMissingField ErrorCode = "VAL_MISSING_FIELD"
ValInvalidName ErrorCode = "VAL_INVALID_NAME"
ValNameTooLong ErrorCode = "VAL_NAME_TOO_LONG"
ValInvalidEmail ErrorCode = "VAL_INVALID_EMAIL"
ValDuplicateName ErrorCode = "VAL_DUPLICATE_NAME"
ValInvalidFormat ErrorCode = "VAL_INVALID_FORMAT"
ValidationFailed ErrorCode = "VALIDATION_FAILED"
)
// General errors
@ -84,20 +84,20 @@ const (
// Upload errors (UPLOAD_)
const (
UploadSessionNotFound ErrorCode = "UPLOAD_SESSION_NOT_FOUND"
UploadSessionExpired ErrorCode = "UPLOAD_SESSION_EXPIRED"
UploadChunkInvalid ErrorCode = "UPLOAD_CHUNK_INVALID"
UploadChunkSizeMismatch ErrorCode = "UPLOAD_CHUNK_SIZE_MISMATCH"
UploadChecksumMismatch ErrorCode = "UPLOAD_CHECKSUM_MISMATCH"
UploadIncomplete ErrorCode = "UPLOAD_INCOMPLETE"
UploadFileTooLarge ErrorCode = "UPLOAD_FILE_TOO_LARGE"
UploadSessionNotFound ErrorCode = "UPLOAD_SESSION_NOT_FOUND"
UploadSessionExpired ErrorCode = "UPLOAD_SESSION_EXPIRED"
UploadChunkInvalid ErrorCode = "UPLOAD_CHUNK_INVALID"
UploadChunkSizeMismatch ErrorCode = "UPLOAD_CHUNK_SIZE_MISMATCH"
UploadChecksumMismatch ErrorCode = "UPLOAD_CHECKSUM_MISMATCH"
UploadIncomplete ErrorCode = "UPLOAD_INCOMPLETE"
UploadFileTooLarge ErrorCode = "UPLOAD_FILE_TOO_LARGE"
)
// Resource errors (RESOURCE_)
const (
ResourceNotFound ErrorCode = "RESOURCE_NOT_FOUND"
ResourceConflict ErrorCode = "RESOURCE_CONFLICT"
ResourceGone ErrorCode = "RESOURCE_GONE"
ResourceNotFound ErrorCode = "RESOURCE_NOT_FOUND"
ResourceConflict ErrorCode = "RESOURCE_CONFLICT"
ResourceGone ErrorCode = "RESOURCE_GONE"
)
// Server errors (SERVER_)
@ -123,24 +123,24 @@ const (
// Issue errors (ISSUE_)
const (
IssueNotFound ErrorCode = "ISSUE_NOT_FOUND"
IssueClosed ErrorCode = "ISSUE_CLOSED"
IssueLocked ErrorCode = "ISSUE_LOCKED"
IssueNotFound ErrorCode = "ISSUE_NOT_FOUND"
IssueClosed ErrorCode = "ISSUE_CLOSED"
IssueLocked ErrorCode = "ISSUE_LOCKED"
)
// Pull Request errors (PR_)
const (
PRNotFound ErrorCode = "PR_NOT_FOUND"
PRAlreadyMerged ErrorCode = "PR_ALREADY_MERGED"
PRNotMergeable ErrorCode = "PR_NOT_MERGEABLE"
PRWorkInProgress ErrorCode = "PR_WORK_IN_PROGRESS"
PRNotFound ErrorCode = "PR_NOT_FOUND"
PRAlreadyMerged ErrorCode = "PR_ALREADY_MERGED"
PRNotMergeable ErrorCode = "PR_NOT_MERGEABLE"
PRWorkInProgress ErrorCode = "PR_WORK_IN_PROGRESS"
)
// Release errors (RELEASE_)
const (
ReleaseNotFound ErrorCode = "RELEASE_NOT_FOUND"
ReleaseTagExists ErrorCode = "RELEASE_TAG_EXISTS"
ReleaseIsDraft ErrorCode = "RELEASE_IS_DRAFT"
ReleaseNotFound ErrorCode = "RELEASE_NOT_FOUND"
ReleaseTagExists ErrorCode = "RELEASE_TAG_EXISTS"
ReleaseIsDraft ErrorCode = "RELEASE_IS_DRAFT"
)
// Webhook errors (WEBHOOK_)

View File

@ -27,33 +27,33 @@ const (
// ComponentCheck represents a health check for a single component
type ComponentCheck struct {
Name string `json:"name"`
Status Status `json:"status"`
Message string `json:"message,omitempty"`
Duration time.Duration `json:"duration_ms"`
LastChecked time.Time `json:"last_checked"`
Metadata map[string]any `json:"metadata,omitempty"`
Name string `json:"name"`
Status Status `json:"status"`
Message string `json:"message,omitempty"`
Duration time.Duration `json:"duration_ms"`
LastChecked time.Time `json:"last_checked"`
Metadata map[string]any `json:"metadata,omitempty"`
}
// HealthResponse represents the complete health check response
type HealthResponse struct {
Status Status `json:"status"`
Version string `json:"version"`
Uptime time.Duration `json:"uptime_seconds"`
Timestamp time.Time `json:"timestamp"`
Components map[string]*ComponentCheck `json:"components"`
System *SystemInfo `json:"system,omitempty"`
Circuits map[string]circuitbreaker.Stats `json:"circuit_breakers,omitempty"`
Status Status `json:"status"`
Version string `json:"version"`
Uptime time.Duration `json:"uptime_seconds"`
Timestamp time.Time `json:"timestamp"`
Components map[string]*ComponentCheck `json:"components"`
System *SystemInfo `json:"system,omitempty"`
Circuits map[string]circuitbreaker.Stats `json:"circuit_breakers,omitempty"`
}
// SystemInfo contains system-level health information
type SystemInfo struct {
GoVersion string `json:"go_version"`
NumGoroutines int `json:"goroutines"`
MemoryAllocMB float64 `json:"memory_alloc_mb"`
MemorySysMB float64 `json:"memory_sys_mb"`
NumCPU int `json:"num_cpu"`
GOMAXPROCS int `json:"gomaxprocs"`
GoVersion string `json:"go_version"`
NumGoroutines int `json:"goroutines"`
MemoryAllocMB float64 `json:"memory_alloc_mb"`
MemorySysMB float64 `json:"memory_sys_mb"`
NumCPU int `json:"num_cpu"`
GOMAXPROCS int `json:"gomaxprocs"`
}
// Checker is a function that performs a health check
@ -61,12 +61,12 @@ type Checker func(ctx context.Context) *ComponentCheck
// Manager manages health checks
type Manager struct {
mu sync.RWMutex
checkers map[string]Checker
cache map[string]*ComponentCheck
cacheTTL time.Duration
startTime time.Time
version string
mu sync.RWMutex
checkers map[string]Checker
cache map[string]*ComponentCheck
cacheTTL time.Duration
startTime time.Time
version string
}
var (

View File

@ -32,11 +32,11 @@ const (
type OperationStatus string
const (
StatusPending OperationStatus = "pending"
StatusRunning OperationStatus = "running"
StatusComplete OperationStatus = "complete"
StatusFailed OperationStatus = "failed"
StatusCancelled OperationStatus = "cancelled"
StatusPending OperationStatus = "pending"
StatusRunning OperationStatus = "running"
StatusComplete OperationStatus = "complete"
StatusFailed OperationStatus = "failed"
StatusCancelled OperationStatus = "cancelled"
)
// Phase represents a phase within an operation
@ -49,23 +49,23 @@ type Phase struct {
// ProgressUpdate represents a progress update event
type ProgressUpdate struct {
OperationID string `json:"operation_id"`
Type OperationType `json:"type"`
Status OperationStatus `json:"status"`
CurrentPhase string `json:"current_phase,omitempty"`
Phases []Phase `json:"phases,omitempty"`
Progress int `json:"progress"` // Overall progress 0-100
BytesTotal int64 `json:"bytes_total,omitempty"`
BytesDone int64 `json:"bytes_done,omitempty"`
ItemsTotal int `json:"items_total,omitempty"`
ItemsDone int `json:"items_done,omitempty"`
Message string `json:"message,omitempty"`
Error string `json:"error,omitempty"`
StartedAt time.Time `json:"started_at"`
UpdatedAt time.Time `json:"updated_at"`
EstimatedETA *time.Time `json:"estimated_eta,omitempty"`
SpeedBPS int64 `json:"speed_bps,omitempty"` // bytes per second
Metadata map[string]any `json:"metadata,omitempty"`
OperationID string `json:"operation_id"`
Type OperationType `json:"type"`
Status OperationStatus `json:"status"`
CurrentPhase string `json:"current_phase,omitempty"`
Phases []Phase `json:"phases,omitempty"`
Progress int `json:"progress"` // Overall progress 0-100
BytesTotal int64 `json:"bytes_total,omitempty"`
BytesDone int64 `json:"bytes_done,omitempty"`
ItemsTotal int `json:"items_total,omitempty"`
ItemsDone int `json:"items_done,omitempty"`
Message string `json:"message,omitempty"`
Error string `json:"error,omitempty"`
StartedAt time.Time `json:"started_at"`
UpdatedAt time.Time `json:"updated_at"`
EstimatedETA *time.Time `json:"estimated_eta,omitempty"`
SpeedBPS int64 `json:"speed_bps,omitempty"` // bytes per second
Metadata map[string]any `json:"metadata,omitempty"`
}
// Operation tracks a long-running operation

View File

@ -168,9 +168,9 @@ type SEOConfig struct {
// AnalyticsConfig represents analytics settings
type AnalyticsConfig struct {
Plausible string `yaml:"plausible,omitempty"`
Umami UmamiConfig `yaml:"umami,omitempty"`
GoogleAnalytics string `yaml:"google_analytics,omitempty"`
Plausible string `yaml:"plausible,omitempty"`
Umami UmamiConfig `yaml:"umami,omitempty"`
GoogleAnalytics string `yaml:"google_analytics,omitempty"`
}
// UmamiConfig represents Umami analytics settings

View File

@ -98,12 +98,12 @@ type RenameOrgOption struct {
// OrgPinnedRepo represents a pinned repository for an organization
type OrgPinnedRepo struct {
ID int64 `json:"id"`
RepoID int64 `json:"repo_id"`
GroupID int64 `json:"group_id,omitempty"`
DisplayOrder int `json:"display_order"`
Repo *Repository `json:"repo,omitempty"`
Group *OrgPinnedGroup `json:"group,omitempty"`
ID int64 `json:"id"`
RepoID int64 `json:"repo_id"`
GroupID int64 `json:"group_id,omitempty"`
DisplayOrder int `json:"display_order"`
Repo *Repository `json:"repo,omitempty"`
Group *OrgPinnedGroup `json:"group,omitempty"`
}
// OrgPinnedGroup represents a group of pinned repositories
@ -168,13 +168,13 @@ type OrgPublicMember struct {
// OrgOverview represents the organization overview for the profile page
type OrgOverview struct {
Organization *Organization `json:"organization"`
PinnedRepos []*OrgPinnedRepo `json:"pinned_repos"`
PinnedGroups []*OrgPinnedGroup `json:"pinned_groups"`
PublicMembers []*OrgPublicMember `json:"public_members"`
TotalMembers int64 `json:"total_members"`
Stats *OrgOverviewStats `json:"stats"`
Profile *OrgProfileContent `json:"profile,omitempty"`
Organization *Organization `json:"organization"`
PinnedRepos []*OrgPinnedRepo `json:"pinned_repos"`
PinnedGroups []*OrgPinnedGroup `json:"pinned_groups"`
PublicMembers []*OrgPublicMember `json:"public_members"`
TotalMembers int64 `json:"total_members"`
Stats *OrgOverviewStats `json:"stats"`
Profile *OrgProfileContent `json:"profile,omitempty"`
}
// OrgOverviewStats represents organization statistics

View File

@ -7,29 +7,29 @@ import "time"
// WikiPageV2 represents a wiki page in v2 API format
type WikiPageV2 struct {
Name string `json:"name"`
Title string `json:"title"`
Path string `json:"path"`
URL string `json:"url"`
HTMLURL string `json:"html_url"`
Content string `json:"content,omitempty"`
ContentHTML string `json:"content_html,omitempty"`
WordCount int `json:"word_count"`
LinksOut []string `json:"links_out,omitempty"`
LinksIn []string `json:"links_in,omitempty"`
Sidebar string `json:"sidebar,omitempty"`
Footer string `json:"footer,omitempty"`
LastCommit *WikiCommitV2 `json:"last_commit,omitempty"`
HistoryURL string `json:"history_url,omitempty"`
Name string `json:"name"`
Title string `json:"title"`
Path string `json:"path"`
URL string `json:"url"`
HTMLURL string `json:"html_url"`
Content string `json:"content,omitempty"`
ContentHTML string `json:"content_html,omitempty"`
WordCount int `json:"word_count"`
LinksOut []string `json:"links_out,omitempty"`
LinksIn []string `json:"links_in,omitempty"`
Sidebar string `json:"sidebar,omitempty"`
Footer string `json:"footer,omitempty"`
LastCommit *WikiCommitV2 `json:"last_commit,omitempty"`
HistoryURL string `json:"history_url,omitempty"`
}
// WikiCommitV2 represents a wiki commit in v2 API format
type WikiCommitV2 struct {
SHA string `json:"sha"`
Author *WikiAuthorV2 `json:"author"`
Committer *WikiAuthorV2 `json:"committer,omitempty"`
Message string `json:"message"`
Date time.Time `json:"date"`
SHA string `json:"sha"`
Author *WikiAuthorV2 `json:"author"`
Committer *WikiAuthorV2 `json:"committer,omitempty"`
Message string `json:"message"`
Date time.Time `json:"date"`
}
// WikiAuthorV2 represents a wiki commit author
@ -116,20 +116,20 @@ type WikiTopLinkedPageV2 struct {
// WikiHealthV2 represents wiki health metrics
type WikiHealthV2 struct {
OrphanedPages []*WikiOrphanedPageV2 `json:"orphaned_pages"`
DeadLinks []*WikiDeadLinkV2 `json:"dead_links"`
OutdatedPages []*WikiOutdatedPageV2 `json:"outdated_pages"`
ShortPages []*WikiShortPageV2 `json:"short_pages"`
OrphanedPages []*WikiOrphanedPageV2 `json:"orphaned_pages"`
DeadLinks []*WikiDeadLinkV2 `json:"dead_links"`
OutdatedPages []*WikiOutdatedPageV2 `json:"outdated_pages"`
ShortPages []*WikiShortPageV2 `json:"short_pages"`
}
// WikiStatsV2 represents wiki statistics
type WikiStatsV2 struct {
TotalPages int64 `json:"total_pages"`
TotalWords int64 `json:"total_words"`
TotalCommits int64 `json:"total_commits"`
LastUpdated time.Time `json:"last_updated"`
Contributors int64 `json:"contributors"`
Health *WikiHealthV2 `json:"health"`
TotalPages int64 `json:"total_pages"`
TotalWords int64 `json:"total_words"`
TotalCommits int64 `json:"total_commits"`
LastUpdated time.Time `json:"last_updated"`
Contributors int64 `json:"contributors"`
Health *WikiHealthV2 `json:"health"`
TopLinked []*WikiTopLinkedPageV2 `json:"top_linked"`
}

View File

@ -6,9 +6,9 @@ package org
import (
"net/http"
access_model "code.gitea.io/gitea/models/perm/access"
"code.gitea.io/gitea/models/organization"
"code.gitea.io/gitea/models/perm"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/web"

View File

@ -10,8 +10,8 @@ import (
"path"
"strings"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/renderhelper"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup/markdown"

View File

@ -241,13 +241,13 @@ func (c *Client) GetRepository(ctx context.Context, owner, repo string) (*Reposi
// Release represents a Gitea release
type Release struct {
ID int64 `json:"id"`
TagName string `json:"tag_name"`
Name string `json:"name"`
Body string `json:"body"`
Draft bool `json:"draft"`
Prerelease bool `json:"prerelease"`
PublishedAt time.Time `json:"published_at"`
ID int64 `json:"id"`
TagName string `json:"tag_name"`
Name string `json:"name"`
Body string `json:"body"`
Draft bool `json:"draft"`
Prerelease bool `json:"prerelease"`
PublishedAt time.Time `json:"published_at"`
Assets []Attachment `json:"assets"`
}