Compare commits

..

2 Commits

Author SHA1 Message Date
Lasse Brandt Thomsen
0fe32826ed Allow read/write to user root and only read to group git on documentation (#15041)
Co-authored-by: Lasse Brandt Thomsen <lasse@bitmand.dk>
2021-03-20 00:45:04 +01:00
Lunny Xiao
cf549500e0 Fix bug when upload on web (#15042)
* Fix bug when upload on web

* move into own function

Co-authored-by: 6543 <6543@obermui.de>
Co-authored-by: zeripath <art27@cantab.net>
2021-03-19 23:49:29 +01:00
458 changed files with 6209 additions and 11565 deletions

View File

@@ -522,7 +522,7 @@ steps:
image: plugins/s3:1
settings:
acl: public-read
bucket: gitea-artifacts
bucket: releases
endpoint: https://storage.gitea.io
path_style: true
source: "dist/release/*"
@@ -543,7 +543,7 @@ steps:
image: plugins/s3:1
settings:
acl: public-read
bucket: gitea-artifacts
bucket: releases
endpoint: https://storage.gitea.io
path_style: true
source: "dist/release/*"
@@ -618,7 +618,7 @@ steps:
image: plugins/s3:1
settings:
acl: public-read
bucket: gitea-artifacts
bucket: releases
endpoint: https://storage.gitea.io
path_style: true
source: "dist/release/*"

View File

@@ -110,7 +110,3 @@ issues:
- text: "exitAfterDefer:"
linters:
- gocritic
- path: modules/graceful/manager_windows.go
linters:
- staticcheck
text: "svc.IsAnInteractiveSession is deprecated: Use IsWindowsService instead."

View File

@@ -4,146 +4,14 @@ This changelog goes through all the changes that have been made in each release
without substantial changes to our git log; to see the highlights of what has
been added to each release, please refer to the [blog](https://blog.gitea.io).
## [1.14.3](https://github.com/go-gitea/gitea/releases/tag/v1.14.3) - 2021-06-10
* SECURITY
* Encrypt migration credentials at rest (#15895) (#16187)
* Only check access tokens if they are likely to be tokens (#16164) (#16171)
* Add missing SameSite settings for the i_like_gitea cookie (#16037) (#16039)
* Fix setting of SameSite on cookies (#15989) (#15991)
* API
* Repository object only count releases as releases (#16184) (#16190)
* EditOrg respect RepoAdminChangeTeamAccess option (#16184) (#16190)
* Fix overly strict edit pr permissions (#15900) (#16081)
* BUGFIXES
* Run processors on whole of text (#16155) (#16185)
* Class `issue-keyword` is being incorrectly stripped off spans (#16163) (#16172)
* Fix language switch for install page (#16043) (#16128)
* Fix bug on getIssueIDsByRepoID (#16119) (#16124)
* Set self-adjusting deadline for connection writing (#16068) (#16123)
* Fix http path bug (#16117) (#16120)
* Fix data URI scramble (#16098) (#16118)
* Merge all deleteBranch as one function and also fix bug when delete branch don't close related PRs (#16067) (#16097)
* git migration: don't prompt interactively for clone credentials (#15902) (#16082)
* Fix case change in ownernames (#16045) (#16050)
* Don't manipulate input params in email notification (#16011) (#16033)
* Remove branch URL before IssueRefURL (#15968) (#15970)
* Fix layout of milestone view (#15927) (#15940)
* GitHub Migration, migrate draft releases too (#15884) (#15888)
* Close the gitrepo when deleting the repository (#15876) (#15887)
* Upgrade xorm to v1.1.0 (#15869) (#15885)
* Fix blame row height alignment (#15863) (#15883)
* Fix error message when saving generated LOCAL_ROOT_URL config (#15880) (#15882)
* Backport Fix LFS commit finder not working (#15856) (#15874)
* Stop calling WriteHeader in Write (#15862) (#15873)
* Add timeout to writing to responses (#15831) (#15872)
* Return go-get info on subdirs (#15642) (#15871)
* Restore PAM user autocreation functionality (#15825) (#15867)
* Fix truncate utf8 string (#15828) (#15854)
* Fix bound address/port for caddy's certmagic library (#15758) (#15848)
* Upgrade unrolled/render to v1.1.1 (#15845) (#15846)
* Queue manager FlushAll can loop rapidly - add delay (#15733) (#15840)
* Tagger can be empty, as can Commit and Author - tolerate this (#15835) (#15839)
* Set autocomplete off on branches selector (#15809) (#15833)
* Add missing error to Doctor log (#15813) (#15824)
* Move restore repo to internal router and invoke from command to avoid open the same db file or queues files (#15790) (#15816)
* ENHANCEMENTS
* Removable media support to snap package (#16136) (#16138)
* Move sans-serif fallback font higher than emoji fonts (#15855) (#15892)
* DOCKER
* Only write config in environment-to-ini if there are changes (#15861) (#15868)
* Only offer hostcertificates if they exist (#15849) (#15853)
## [1.14.2](https://github.com/go-gitea/gitea/releases/tag/v1.14.2) - 2021-05-08
* API
* Make change repo settings work on empty repos (#15778) (#15789)
* Add pull "merged" notification subject status to API (#15344) (#15654)
* BUGFIXES
* Ensure that ctx.Written is checked after issues(...) calls (#15797) (#15798)
* Use pulls in commit graph unless pulls are disabled (#15734 & #15740 & #15774) (#15775)
* Set GIT_DIR correctly if it is not set (#15751) (#15769)
* Fix bug where repositories appear unadopted (#15757) (#15767)
* Not show `ref-in-new-issue` pop when issue was disabled (#15761) (#15765)
* Drop back to use IsAnInteractiveSession for SVC (#15749) (#15762)
* Fix setting version table in dump (#15753) (#15759)
* Fix close button change on delete in simplemde area (#15737) (#15747)
* Defer closing the gitrepo until the end of the wrapped context functions (#15653) (#15746)
* Fix some ui bug about draft release (#15137) (#15745)
* Only log Error on getLastCommitStatus error to let pull list still be visible (#15716) (#15715)
* Move tooltip down to allow selection of Remove File on error (#15672) (#15714)
* Fix setting redis db path (#15698) (#15708)
* Fix DB session cleanup (#15697) (#15700)
* Fixed several activation bugs (#15473) (#15685)
* Delete references if repository gets deleted (#15681) (#15684)
* Fix orphaned objects deletion bug (#15657) (#15683)
* Delete protected branch if repository gets removed (#15658) (#15676)
* Remove spurious set name from eventsource.sharedworker.js (#15643) (#15652)
* Not update updated uinx for `git gc` (#15637) (#15641)
* Fix commit graph author link (#15627) (#15630)
* Fix webhook timeout bug (#15613) (#15621)
* Resolve panic on failed interface conversion in migration v156 (#15604) (#15610)
* Fix missing storage init (#15589) (#15598)
* If the default branch is not present do not report error on stats indexing (#15546 & #15583) (#15594)
* Fix lfs management find (#15537) (#15578)
* Fix NPE on view commit with notes (#15561) (#15573)
* Fix bug on commit graph (#15517) (#15530)
* Send size to /avatars if requested (#15459) (#15528)
* Prevent migration 156 failure if tag commit missing (#15519) (#15527)
* ENHANCEMENTS
* Display conflict-free merge messages for pull requests (#15773) (#15796)
* Exponential Backoff for ByteFIFO (#15724) (#15793)
* Issue list alignment tweaks (#15483) (#15766)
* Implement delete release attachments and update release attachments' name (#14130) (#15666)
* Add placeholder text to deploy key textarea (#15575) (#15576)
* Project board improvements (#15429) (#15560)
* Repo branch page: label size, PR ref, new PR button alignment (#15363) (#15365)
* MISC
* Fix webkit calendar icon color on arc-green (#15713) (#15728)
* Performance improvement for last commit cache and show-ref (#15455) (#15701)
* Bump unrolled/render to v1.1.0 (#15581) (#15608)
* Add ETag header (#15370) (#15552)
## [1.14.1](https://github.com/go-gitea/gitea/releases/tag/v1.14.1) - 2021-04-15
* BUGFIXES
* Fix bug clone wiki (#15499) (#15502)
* Github Migration ignore rate limit, if not enabled (#15490) (#15495)
* Use subdir for URL (#15446) (#15493)
* Query the DB for the hash before inserting in to email_hash (#15457) (#15491)
* Ensure review dismissal only dismisses the correct review (#15477) (#15489)
* Use index of the supported tags to choose user lang (#15452) (#15488)
* Fix wrong file link in code search page (#15466) (#15486)
* Quick template fix for built-in SSH server in admin config (#15464) (#15481)
* Prevent superfluous response.WriteHeader (#15456) (#15476)
* Fix ambiguous argument error on tags (#15432) (#15474)
* Add created_unix instead of expiry to migration (#15458) (#15463)
* Fix repository search (#15428) (#15442)
* Prevent NPE on avatar direct rendering if federated avatars disabled (#15434) (#15439)
* Fix wiki clone urls (#15430) (#15431)
* Fix dingtalk icon url at webhook (#15417) (#15426)
* Standardise icon on projects PR page (#15387) (#15408)
* ENHANCEMENTS
* Add option to skip LFS/attachment files for `dump` (#15407) (#15492)
* Clone panel fixes (#15436)
* Use semantic dropdown for code search query type (#15276) (#15364)
* BUILD
* Build go-git variants for windows (#15482) (#15487)
* Lock down build-images dependencies (Partial #15479) (#15480)
* MISC
* Performance improvement for list pull requests (#15447) (#15500)
* Fix potential copy lfs records failure when fork a repository (#15441) (#15485)
## [1.14.0](https://github.com/go-gitea/gitea/releases/tag/v1.14.0) - 2021-04-11
## [1.14.0-RC1](https://github.com/go-gitea/gitea/releases/tag/v1.14.0) - 2021-03-19
* SECURITY
* Respect approved email domain list for externally validated user registration (#15014)
* Add reverse proxy configuration support for remote IP address detection (#14959)
* Ensure validation occurs on clone addresses too (#14994)
* Fix several render issues highlighted during fuzzing (#14986)
* BREAKING
* Fix double 'push tag' action feed (#15078) (#15083)
* Remove possible resource leak (#15067) (#15082)
* Handle unauthorized user events gracefully (#15071) (#15074)
* Restore Access.log following migration to Chi framework (Stops access logging of /api/internal routes) (#14475)
* Migrate from Macaron to Chi framework (#14293)
* Deprecate building for mips (#14174)
@@ -174,7 +42,6 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
* Dump github/gitlab/gitea repository data to a local directory and restore to gitea (#12244)
* Create Rootless Docker image (#10154)
* API
* Speedup issue search (#15179) (#15192)
* Get pull, return head branch sha, even if deleted (#14931)
* Export LFS & TimeTracking function status (#14753)
* Show Gitea version in swagger (#14654)
@@ -199,20 +66,6 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
* Add more filters to issues search (#13514)
* Add review request api (#11355)
* BUGFIXES
* Fix delete nonexist oauth application 500 and prevent deadlock (#15384) (#15396)
* Always set the merge base used to merge the commit (#15352) (#15385)
* Upgrade to bluemonday 1.0.7 (#15379) (#15380)
* Turn RepoRef and RepoAssignment back into func(*Context) (#15372) (#15377)
* Move FCGI req.URL.Path fix-up to the FCGI listener (#15292) (#15361)
* Show diff on rename with diff changes (#15338) (#15339)
* Fix handling of logout event (#15323) (#15337)
* Fix CanCreateRepo check (#15311) (#15321)
* Fix xorm log stack level (#15285) (#15316)
* Fix bug in Wrap (#15302) (#15309)
* Drop the event source if we are unauthorized (#15275) (#15280)
* Backport Fix graph pagination (#15225) (#15249)
* Prevent NPE in CommentMustAsDiff if no hunk header (#15199) (#15200)
* should run RetrieveRepoMetas() for empty pr (#15187) (#15190)
* Move setting to enable closing issue via commit in non default branch to repo settings (#14965)
* Show correct issues for team dashboard (#14952)
* Ensure that new pull request button works on forked forks owned by owner of the root and reduce ambiguity (#14932)
@@ -269,9 +122,6 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
* Use GO variable in go-check target (#13146) (#13147)
* ENHANCEMENTS
* UI style improvements
* Dropzone styling improvements (#15291) (#15374)
* Add size to Save function (#15264) (#15270)
* Monaco improvements (#15333) (#15345)
* Support .mailmap in code activity stats (#15009)
* Sort release attachments by name (#15008)
* Add ui.explore settings to control view of explore pages (#14094)
@@ -417,52 +267,6 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
* Reduce make verbosity (#13803)
* Add git command error directory on log (#13194)
## [1.13.7](https://github.com/go-gitea/gitea/releases/tag/v1.13.7) - 2021-04-07
* SECURITY
* Update to bluemonday-1.0.6 (#15294) (#15298)
* Clusterfuzz found another way (#15160) (#15169)
* API
* Fix wrong user returned in API (#15139) (#15150)
* BUGFIXES
* Add 'fonts' into 'KnownPublicEntries' (#15188) (#15317)
* Speed up `enry.IsVendor` (#15213) (#15246)
* Response 404 for diff/patch of a commit that not exist (#15221) (#15238)
* Prevent NPE in CommentMustAsDiff if no hunk header (#15199) (#15201)
* MISC
* Add size to Save function (#15264) (#15271)
## [1.13.6](https://github.com/go-gitea/gitea/releases/tag/v1.13.6) - 2021-03-23
* SECURITY
* Fix bug on avatar middleware (#15124) (#15125)
* Fix another clusterfuzz identified issue (#15096) (#15114)
* API
* Fix nil pointer exception in get pull reviews API (#15106)
* BUGFIXES
* Fix markdown rendering in milestone content (#15056) (#15092)
## [1.13.5](https://github.com/go-gitea/gitea/releases/tag/v1.13.5) - 2021-03-21
* SECURITY
* Update to goldmark 1.3.3 (#15059) (#15061)
* Another clusterfuzz spotted issue (#15032) (#15034)
* API
* Fix set milestone on PR creation (#14981) (#15001)
* Prevent panic when editing forked repos by API (#14960) (#14963)
* BUGFIXES
* Fix bug when upload on web (#15042) (#15055)
* Delete Labels & IssueLabels on Repo Delete too (#15039) (#15051)
* Fix postgres ID sequences broken by recreate-table (#15015) (#15029)
* Fix several render issues (#14986) (#15013)
* Make sure sibling images get a link too (#14979) (#14995)
* Fix Anchor jumping with escaped query components (#14969) (#14977)
* Fix release mail html template (#14976)
* Fix excluding more than two labels on issues list (#14962) (#14973)
* Don't mark each comment poster as OP (#14971) (#14972)
* Add "captcha" to list of reserved usernames (#14930)
* Re-enable import local paths after reversion from #13610 (#14925) (#14927)
## [1.13.4](https://github.com/go-gitea/gitea/releases/tag/v1.13.4) - 2021-03-07
* SECURITY

View File

@@ -577,9 +577,6 @@ release-windows: | $(DIST_DIRS)
$(GO) install src.techknowlogick.com/xgo@latest; \
fi
CGO_CFLAGS="$(CGO_CFLAGS)" xgo -go $(XGO_VERSION) -buildmode exe -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION) .
ifeq (,$(findstring gogit,$(TAGS)))
CGO_CFLAGS="$(CGO_CFLAGS)" xgo -go $(XGO_VERSION) -buildmode exe -dest $(DIST)/binaries -tags 'netgo osusergo gogit $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION)-gogit .
endif
ifeq ($(CI),drone)
cp /build/* $(DIST)/binaries
endif
@@ -702,8 +699,8 @@ generate-gitignore:
GO111MODULE=on $(GO) run build/generate-gitignores.go
.PHONY: generate-images
generate-images: | node_modules
npm install --no-save --no-package-lock fabric@4 imagemin-zopfli@7
generate-images:
npm install --no-save --no-package-lock fabric imagemin-zopfli
node build/generate-images.js $(TAGS)
.PHONY: generate-manpage

View File

@@ -21,7 +21,6 @@ import (
pwd "code.gitea.io/gitea/modules/password"
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"github.com/urfave/cli"
)
@@ -490,10 +489,6 @@ func runDeleteUser(c *cli.Context) error {
return err
}
if err := storage.Init(); err != nil {
return err
}
var err error
var user *models.User
if c.IsSet("email") {

View File

@@ -129,14 +129,6 @@ It can be used for backup and capture Gitea server image to send to maintainer`,
Name: "skip-custom-dir",
Usage: "Skip custom directory",
},
cli.BoolFlag{
Name: "skip-lfs-data",
Usage: "Skip LFS data",
},
cli.BoolFlag{
Name: "skip-attachment-data",
Usage: "Skip attachment data",
},
cli.GenericFlag{
Name: "type",
Value: outputTypeEnum,
@@ -222,9 +214,7 @@ func runDump(ctx *cli.Context) error {
fatal("Failed to include repositories: %v", err)
}
if ctx.IsSet("skip-lfs-data") && ctx.Bool("skip-lfs-data") {
log.Info("Skip dumping LFS data")
} else if err := storage.LFS.IterateObjects(func(objPath string, object storage.Object) error {
if err := storage.LFS.IterateObjects(func(objPath string, object storage.Object) error {
info, err := object.Stat()
if err != nil {
return err
@@ -323,9 +313,7 @@ func runDump(ctx *cli.Context) error {
}
}
if ctx.IsSet("skip-attachment-data") && ctx.Bool("skip-attachment-data") {
log.Info("Skip dumping attachment data")
} else if err := storage.Attachments.IterateObjects(func(objPath string, object storage.Object) error {
if err := storage.Attachments.IterateObjects(func(objPath string, object storage.Object) error {
info, err := object.Stat()
if err != nil {
return err

View File

@@ -5,12 +5,15 @@
package cmd
import (
"errors"
"net/http"
"context"
"strings"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/private"
"code.gitea.io/gitea/modules/migrations"
"code.gitea.io/gitea/modules/migrations/base"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
pull_service "code.gitea.io/gitea/services/pull"
"github.com/urfave/cli"
)
@@ -47,18 +50,70 @@ wiki, issues, labels, releases, release_assets, milestones, pull_requests, comme
}
func runRestoreRepository(ctx *cli.Context) error {
setting.NewContext()
if err := initDB(); err != nil {
return err
}
statusCode, errStr := private.RestoreRepo(
log.Trace("AppPath: %s", setting.AppPath)
log.Trace("AppWorkPath: %s", setting.AppWorkPath)
log.Trace("Custom path: %s", setting.CustomPath)
log.Trace("Log path: %s", setting.LogRootPath)
setting.InitDBConfig()
if err := storage.Init(); err != nil {
return err
}
if err := pull_service.Init(); err != nil {
return err
}
var opts = base.MigrateOptions{
RepoName: ctx.String("repo_name"),
}
if len(ctx.String("units")) == 0 {
opts.Wiki = true
opts.Issues = true
opts.Milestones = true
opts.Labels = true
opts.Releases = true
opts.Comments = true
opts.PullRequests = true
opts.ReleaseAssets = true
} else {
units := strings.Split(ctx.String("units"), ",")
for _, unit := range units {
switch strings.ToLower(unit) {
case "wiki":
opts.Wiki = true
case "issues":
opts.Issues = true
case "milestones":
opts.Milestones = true
case "labels":
opts.Labels = true
case "releases":
opts.Releases = true
case "release_assets":
opts.ReleaseAssets = true
case "comments":
opts.Comments = true
case "pull_requests":
opts.PullRequests = true
}
}
}
if err := migrations.RestoreRepository(
context.Background(),
ctx.String("repo_dir"),
ctx.String("owner_name"),
ctx.String("repo_name"),
ctx.StringSlice("units"),
)
if statusCode == http.StatusOK {
return nil
); err != nil {
log.Fatal("Failed to restore repository: %v", err)
return err
}
log.Fatal("Failed to restore repository: %v", errStr)
return errors.New(errStr)
return nil
}

View File

@@ -175,7 +175,7 @@ func setPort(port string) error {
cfg.Section("server").Key("LOCAL_ROOT_URL").SetValue(defaultLocalURL)
if err := cfg.SaveTo(setting.CustomConf); err != nil {
return fmt.Errorf("Error saving generated LOCAL_ROOT_URL to custom config: %v", err)
return fmt.Errorf("Error saving generated JWT Secret to custom config: %v", err)
}
}
return nil

View File

@@ -9,11 +9,9 @@ import (
"net"
"net/http"
"net/http/fcgi"
"strings"
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)
func runHTTP(network, listenAddr, name string, m http.Handler) error {
@@ -50,12 +48,7 @@ func runFCGI(network, listenAddr, name string, m http.Handler) error {
fcgiServer := graceful.NewServer(network, listenAddr, name)
err := fcgiServer.ListenAndServe(func(listener net.Listener) error {
return fcgi.Serve(listener, http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
if setting.AppSubURL != "" {
req.URL.Path = strings.TrimPrefix(req.URL.Path, setting.AppSubURL)
}
m.ServeHTTP(resp, req)
}))
return fcgi.Serve(listener, m)
})
if err != nil {
log.Fatal("Failed to start FCGI main server: %v", err)

View File

@@ -6,7 +6,6 @@ package cmd
import (
"net/http"
"strconv"
"strings"
"code.gitea.io/gitea/modules/log"
@@ -23,15 +22,6 @@ func runLetsEncrypt(listenAddr, domain, directory, email string, m http.Handler)
// TODO: these are placeholders until we add options for each in settings with appropriate warning
enableHTTPChallenge := true
enableTLSALPNChallenge := true
altHTTPPort := 0
altTLSALPNPort := 0
if p, err := strconv.Atoi(setting.PortToRedirect); err == nil {
altHTTPPort = p
}
if p, err := strconv.Atoi(setting.HTTPPort); err == nil {
altTLSALPNPort = p
}
magic := certmagic.NewDefault()
magic.Storage = &certmagic.FileStorage{Path: directory}
@@ -40,9 +30,6 @@ func runLetsEncrypt(listenAddr, domain, directory, email string, m http.Handler)
Agreed: setting.LetsEncryptTOS,
DisableHTTPChallenge: !enableHTTPChallenge,
DisableTLSALPNChallenge: !enableTLSALPNChallenge,
ListenHost: setting.HTTPAddr,
AltTLSALPNPort: altTLSALPNPort,
AltHTTPPort: altHTTPPort,
})
magic.Issuer = myACME

View File

@@ -110,8 +110,6 @@ func runEnvironmentToIni(c *cli.Context) error {
}
cfg.NameMapper = ini.SnackCase
changed := false
prefix := c.String("prefix") + "__"
for _, kv := range os.Environ() {
@@ -145,21 +143,15 @@ func runEnvironmentToIni(c *cli.Context) error {
continue
}
}
oldValue := key.Value()
if !changed && oldValue != value {
changed = true
}
key.SetValue(value)
}
destination := c.String("out")
if len(destination) == 0 {
destination = setting.CustomConf
}
if destination != setting.CustomConf || changed {
err = cfg.SaveTo(destination)
if err != nil {
return err
}
err = cfg.SaveTo(destination)
if err != nil {
return err
}
if c.Bool("clear") {
for _, kv := range os.Environ() {

View File

@@ -281,10 +281,6 @@ HTTP_PORT = 3000
; PORT_TO_REDIRECT.
REDIRECT_OTHER_PORT = false
PORT_TO_REDIRECT = 80
; Timeout for any write to the connection. (Set to 0 to disable all timeouts.)
PER_WRITE_TIMEOUT = 30s
; Timeout per Kb written to connections.
PER_WRITE_PER_KB_TIMEOUT = 30s
; Permission for unix socket
UNIX_SOCKET_PERMISSION = 666
; Local (DMZ) URL for Gitea workers (such as SSH update) accessing web service.

View File

@@ -24,29 +24,9 @@ if [ ! -f /data/ssh/ssh_host_ecdsa_key ]; then
ssh-keygen -t ecdsa -b 256 -f /data/ssh/ssh_host_ecdsa_key -N "" > /dev/null
fi
if [ -e /data/ssh/ssh_host_ed25519_cert ]; then
SSH_ED25519_CERT=${SSH_ED25519_CERT:-"/data/ssh/ssh_host_ed25519_cert"}
fi
if [ -e /data/ssh/ssh_host_rsa_cert ]; then
SSH_RSA_CERT=${SSH_RSA_CERT:-"/data/ssh/ssh_host_rsa_cert"}
fi
if [ -e /data/ssh/ssh_host_ecdsa_cert ]; then
SSH_ECDSA_CERT=${SSH_ECDSA_CERT:-"/data/ssh/ssh_host_ecdsa_cert"}
fi
if [ -e /data/ssh/ssh_host_dsa_cert ]; then
SSH_DSA_CERT=${SSH_DSA_CERT:-"/data/ssh/ssh_host_dsa_cert"}
fi
if [ -d /etc/ssh ]; then
SSH_PORT=${SSH_PORT:-"22"} \
SSH_LISTEN_PORT=${SSH_LISTEN_PORT:-"${SSH_PORT}"} \
SSH_ED25519_CERT="${SSH_ED25519_CERT:+"HostCertificate "}${SSH_ED25519_CERT}" \
SSH_RSA_CERT="${SSH_RSA_CERT:+"HostCertificate "}${SSH_RSA_CERT}" \
SSH_ECDSA_CERT="${SSH_ECDSA_CERT:+"HostCertificate "}${SSH_ECDSA_CERT}" \
SSH_DSA_CERT="${SSH_DSA_CERT:+"HostCertificate "}${SSH_DSA_CERT}" \
envsubst < /etc/templates/sshd_config > /etc/ssh/sshd_config
chmod 0644 /etc/ssh/sshd_config

View File

@@ -8,13 +8,13 @@ ListenAddress ::
LogLevel INFO
HostKey /data/ssh/ssh_host_ed25519_key
${SSH_ED25519_CERT}
HostCertificate /data/ssh/ssh_host_ed25519_cert
HostKey /data/ssh/ssh_host_rsa_key
${SSH_RSA_CERT}
HostCertificate /data/ssh/ssh_host_rsa_cert
HostKey /data/ssh/ssh_host_ecdsa_key
${SSH_ECDSA_CERT}
HostCertificate /data/ssh/ssh_host_ecdsa_cert
HostKey /data/ssh/ssh_host_dsa_key
${SSH_DSA_CERT}
HostCertificate /data/ssh/ssh_host_dsa_cert
AuthorizedKeysFile .ssh/authorized_keys
AuthorizedPrincipalsFile .ssh/authorized_principals

View File

@@ -31,4 +31,4 @@ update: $(THEME)
$(THEME): $(THEME)/theme.toml
$(THEME)/theme.toml:
mkdir -p $$(dirname $@)
curl -L -s $(ARCHIVE) | tar xz -C $$(dirname $@)
curl -s $(ARCHIVE) | tar xz -C $$(dirname $@)

View File

@@ -237,9 +237,6 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`.
most cases you do not need to change the default value. Alter it only if
your SSH server node is not the same as HTTP node. Do not set this variable
if `PROTOCOL` is set to `unix`.
- `PER_WRITE_TIMEOUT`: **30s**: Timeout for any write to the connection. (Set to 0 to
disable all timeouts.)
- `PER_WRITE_PER_KB_TIMEOUT`: **10s**: Timeout per Kb written to connections.
- `DISABLE_SSH`: **false**: Disable SSH feature when it's not available.
- `START_SSH_SERVER`: **false**: When enabled, use the built-in SSH server.
@@ -263,9 +260,6 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`.
- `SSH_KEY_TEST_PATH`: **/tmp**: Directory to create temporary files in when testing public keys using ssh-keygen, default is the system temporary directory.
- `SSH_KEYGEN_PATH`: **ssh-keygen**: Path to ssh-keygen, default is 'ssh-keygen' which means the shell is responsible for finding out which one to call.
- `SSH_EXPOSE_ANONYMOUS`: **false**: Enable exposure of SSH clone URL to anonymous visitors, default is false.
- `SSH_PER_WRITE_TIMEOUT`: **30s**: Timeout for any write to the SSH connections. (Set to
0 to disable all timeouts.)
- `SSH_PER_WRITE_PER_KB_TIMEOUT`: **10s**: Timeout per Kb written to SSH connections.
- `MINIMUM_KEY_SIZE_CHECK`: **true**: Indicate whether to check minimum key size with corresponding type.
- `OFFLINE_MODE`: **false**: Disables use of CDN for static files and Gravatar for profile pictures.

View File

@@ -79,7 +79,7 @@ chmod 770 /etc/gitea
chmod 750 /etc/gitea
chmod 640 /etc/gitea/app.ini
```
If you don't want the web installer to be able to write the config file at all, it is also possible to make the config file read-only for the gitea user (owner/group `root:root`, mode `0660`), and set `INSTALL_LOCK = true`. In that case all database configuration details must be set beforehand in the config file, as well as the `SECRET_KEY` and `INTERNAL_TOKEN` values. See the [command line documentation]({{< relref "doc/usage/command-line.en-us.md" >}}) for information on using `gitea generate secret INTERNAL_TOKEN`.
If you don't want the web installer to be able to write the config file at all, it is also possible to make the config file read-only for the gitea user (owner/group `root:git`, mode `0640`), and set `INSTALL_LOCK = true`. In that case all database configuration details must be set beforehand in the config file, as well as the `SECRET_KEY` and `INTERNAL_TOKEN` values. See the [command line documentation]({{< relref "doc/usage/command-line.en-us.md" >}}) for information on using `gitea generate secret INTERNAL_TOKEN`.
### Configure Gitea's working directory

16
go.mod
View File

@@ -5,7 +5,7 @@ go 1.14
require (
cloud.google.com/go v0.78.0 // indirect
code.gitea.io/gitea-vet v0.2.1
code.gitea.io/sdk/gitea v0.14.0
code.gitea.io/sdk/gitea v0.13.2
gitea.com/go-chi/binding v0.0.0-20210301195521-1fe1c9a555e7
gitea.com/go-chi/cache v0.0.0-20210110083709-82c4c9ce2d5e
gitea.com/go-chi/captcha v0.0.0-20210110083842-e7696c336a1e
@@ -86,7 +86,7 @@ require (
github.com/mgechev/revive v1.0.3
github.com/mholt/acmez v0.1.3 // indirect
github.com/mholt/archiver/v3 v3.5.0
github.com/microcosm-cc/bluemonday v1.0.7
github.com/microcosm-cc/bluemonday v1.0.4
github.com/miekg/dns v1.1.40 // indirect
github.com/minio/md5-simd v1.1.2 // indirect
github.com/minio/minio-go/v7 v7.0.10
@@ -122,13 +122,13 @@ require (
github.com/unknwon/com v1.0.1
github.com/unknwon/i18n v0.0.0-20200823051745-09abd91c7f2c
github.com/unknwon/paginater v0.0.0-20200328080006-042474bd0eae
github.com/unrolled/render v1.1.1
github.com/unrolled/render v1.0.3
github.com/urfave/cli v1.22.5
github.com/willf/bitset v1.1.11 // indirect
github.com/xanzy/go-gitlab v0.44.0
github.com/xanzy/ssh-agent v0.3.0 // indirect
github.com/yohcop/openid-go v1.0.0
github.com/yuin/goldmark v1.3.3
github.com/yuin/goldmark v1.3.2
github.com/yuin/goldmark-highlighting v0.0.0-20200307114337-60d527fdb691
github.com/yuin/goldmark-meta v1.0.0
go.jolheiser.com/hcaptcha v0.0.4
@@ -136,9 +136,9 @@ require (
go.uber.org/multierr v1.6.0 // indirect
go.uber.org/zap v1.16.0 // indirect
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110
golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44
golang.org/x/sys v0.0.0-20210228012217-479acdf4ea46
golang.org/x/text v0.3.5
golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect
golang.org/x/tools v0.1.0
@@ -149,7 +149,9 @@ require (
mvdan.cc/xurls/v2 v2.2.0
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251
xorm.io/builder v0.3.9
xorm.io/xorm v1.1.0
xorm.io/xorm v1.0.7
)
replace github.com/hashicorp/go-version => github.com/6543/go-version v1.2.4
replace github.com/microcosm-cc/bluemonday => github.com/lunny/bluemonday v1.0.5-0.20201227154428-ca34796141e8

62
go.sum
View File

@@ -38,8 +38,8 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
code.gitea.io/gitea-vet v0.2.1 h1:b30by7+3SkmiftK0RjuXqFvZg2q4p68uoPGuxhzBN0s=
code.gitea.io/gitea-vet v0.2.1/go.mod h1:zcNbT/aJEmivCAhfmkHOlT645KNOf9W2KnkLgFjGGfE=
code.gitea.io/sdk/gitea v0.14.0 h1:m4J352I3p9+bmJUfS+g0odeQzBY/5OXP91Gv6D4fnJ0=
code.gitea.io/sdk/gitea v0.14.0/go.mod h1:89WiyOX1KEcvjP66sRHdu0RafojGo60bT9UqW17VbWs=
code.gitea.io/sdk/gitea v0.13.2 h1:wAnT/J7Z62q3fJXbgnecoaOBh8CM1Qq0/DakWxiv4yA=
code.gitea.io/sdk/gitea v0.13.2/go.mod h1:lee2y8LeV3kQb2iK+hHlMqoadL4bp27QOkOV/hawLKg=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
gitea.com/go-chi/binding v0.0.0-20210301195521-1fe1c9a555e7 h1:xCVJPY823C8RWpgMabTw2kOglDrg0iS3GcQU6wdwHkU=
gitea.com/go-chi/binding v0.0.0-20210301195521-1fe1c9a555e7/go.mod h1:AyfTrwtfYN54R/HmVvMYPnSTenH5bVoyh8x6tBluxEA=
@@ -196,6 +196,8 @@ github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chi-middleware/proxy v1.1.1 h1:4HaXUp8o2+bhHr1OhVy+VjN0+L7/07JDcn6v7YrTjrQ=
github.com/chi-middleware/proxy v1.1.1/go.mod h1:jQwMEJct2tz9VmtCELxvnXoMfa+SOdikvbVJVHv/M+0=
github.com/chris-ramon/douceur v0.2.0 h1:IDMEdxlEUUBYBKE4z/mJnFyVXox+MjuEVDJNN27glkU=
github.com/chris-ramon/douceur v0.2.0/go.mod h1:wDW5xjJdeoMm1mRt4sD4c/LbF/mWdEpRXQKjTR8nIBE=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
@@ -774,6 +776,8 @@ github.com/libdns/libdns v0.2.0 h1:ewg3ByWrdUrxrje8ChPVMBNcotg7H9LQYg+u5De2RzI=
github.com/libdns/libdns v0.2.0/go.mod h1:yQCXzk1lEZmmCPa857bnk4TsOiqYasqpyOEeSObbb40=
github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM=
github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4=
github.com/lunny/bluemonday v1.0.5-0.20201227154428-ca34796141e8 h1:1omo92DLtxQu6VwVPSZAmduHaK5zssed6cvkHyl1XOg=
github.com/lunny/bluemonday v1.0.5-0.20201227154428-ca34796141e8/go.mod h1:8iwZnFn2CDDNZ0r6UXhF4xawGvzaqzCRa1n3/lO3W2w=
github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96 h1:uNwtsDp7ci48vBTTxDuwcoTXz4lwtDTe7TjCQ0noaWY=
github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96/go.mod h1:mmIfjCSQlGYXmJ95jFN84AkQFnVABtKuJL8IrzwvUKQ=
github.com/lunny/log v0.0.0-20160921050905-7887c61bf0de/go.mod h1:3q8WtuPQsoRbatJuy3nvq/hRSvuBJrHHr+ybPPiNvHQ=
@@ -830,8 +834,6 @@ github.com/mholt/acmez v0.1.3 h1:J7MmNIk4Qf9b8mAGqAh4XkNeowv3f1zW816yf4zt7Qk=
github.com/mholt/acmez v0.1.3/go.mod h1:8qnn8QA/Ewx8E3ZSsmscqsIjhhpxuy9vqdgbX2ceceM=
github.com/mholt/archiver/v3 v3.5.0 h1:nE8gZIrw66cu4osS/U7UW7YDuGMHssxKutU8IfWxwWE=
github.com/mholt/archiver/v3 v3.5.0/go.mod h1:qqTTPUK/HZPFgFQ/TJ3BzvTpF/dPtFVJXdQbCmeMxwc=
github.com/microcosm-cc/bluemonday v1.0.7 h1:6yAQfk4XT+PI/dk1ZeBp1gr3Q2Hd1DR0O3aEyPUJVTE=
github.com/microcosm-cc/bluemonday v1.0.7/go.mod h1:HOT/6NaBlR0f9XlxD3zolN6Z3N8Lp4pvhp+jLS5ihnI=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/miekg/dns v1.1.30/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
github.com/miekg/dns v1.1.40 h1:pyyPFfGMnciYUk/mXpKkVmeMQjfXqt3FAJ2hy7tPiLA=
@@ -996,8 +998,6 @@ github.com/quasoft/websspi v1.0.0 h1:5nDgdM5xSur9s+B5w2xQ5kxf5nUGqgFgU4W0aDLZ8Mw
github.com/quasoft/websspi v1.0.0/go.mod h1:HmVdl939dQ0WIXZhyik+ARdI03M6bQzaSEKcgpFmewk=
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk=
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
@@ -1115,8 +1115,8 @@ github.com/unknwon/i18n v0.0.0-20200823051745-09abd91c7f2c h1:679/gJXwrsHC3RATr0
github.com/unknwon/i18n v0.0.0-20200823051745-09abd91c7f2c/go.mod h1:+5rDk6sDGpl3azws3O+f+GpFSyN9GVr0K8cvQLQM2ZQ=
github.com/unknwon/paginater v0.0.0-20200328080006-042474bd0eae h1:ihaXiJkaca54IaCSnEXtE/uSZOmPxKZhDfVLrzZLFDs=
github.com/unknwon/paginater v0.0.0-20200328080006-042474bd0eae/go.mod h1:1fdkY6xxl6ExVs2QFv7R0F5IRZHKA8RahhB9fMC9RvM=
github.com/unrolled/render v1.1.1 h1:FpzNzkvlJQIlVdVaqeVBGWiCS8gpbmjtrKpDmCn6p64=
github.com/unrolled/render v1.1.1/go.mod h1:gN9T0NhL4Bfbwu8ann7Ry/TGHYfosul+J0obPf6NBdM=
github.com/unrolled/render v1.0.3 h1:baO+NG1bZSF2WR4zwh+0bMWauWky7DVrTOfvE2w+aFo=
github.com/unrolled/render v1.0.3/go.mod h1:gN9T0NhL4Bfbwu8ann7Ry/TGHYfosul+J0obPf6NBdM=
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/urfave/cli v1.22.5 h1:lNq9sAHXK2qfdI8W+GRItjCEkI+2oR4d+MEHy1CKXoU=
@@ -1145,8 +1145,8 @@ github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.3.3 h1:37BdQwPx8VOSic8eDSWee6QL9mRpZRm9VJp/QugNrW0=
github.com/yuin/goldmark v1.3.3/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.3.2 h1:YjHC5TgyMmHpicTgEqDN0Q96Xo8K6tLXPnmNOHXCgs0=
github.com/yuin/goldmark v1.3.2/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark-highlighting v0.0.0-20200307114337-60d527fdb691 h1:VWSxtAiQNh3zgHJpdpkpVYjTPqRE3P6UZCOPa1nRDio=
github.com/yuin/goldmark-highlighting v0.0.0-20200307114337-60d527fdb691/go.mod h1:YLF3kDffRfUH/bTxOxHhV6lxwIB3Vfj91rEwNMS9MXo=
github.com/yuin/goldmark-meta v1.0.0 h1:ScsatUIT2gFS6azqzLGUjgOnELsBOxMXerM3ogdJhAM=
@@ -1321,9 +1321,8 @@ golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwY
golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210331212208-0fccb6fa2b5c/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 h1:4nGaVu0QrbjT/AK2PRLuQfQuh6DJve+pELhqTdAj3x0=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 h1:qWPm9rbaAMKs8Bq/9LRpbMqxWRVUAQwMI9fVrssnTfw=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@@ -1419,8 +1418,8 @@ golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44 h1:Bli41pIlzTzf3KEY06n+xnzK/BESIg2ze4Pgfh/aI8c=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210228012217-479acdf4ea46 h1:V066+OYJ66oTjnhm4Yrn7SXIwSCiDQJxpBxmvqb1N1c=
golang.org/x/sys v0.0.0-20210228012217-479acdf4ea46/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
@@ -1502,7 +1501,6 @@ golang.org/x/tools v0.0.0-20200928182047-19e03678916f/go.mod h1:z6u4i615ZeAfBE4X
golang.org/x/tools v0.0.0-20200929161345-d7fc70abf50f/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU=
golang.org/x/tools v0.0.0-20201022035929-9cf592e881e9/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201124115921-2c860bdd6e78/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201125231158-b5590deeca9b/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
@@ -1669,33 +1667,6 @@ honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.4 h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8=
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
modernc.org/cc/v3 v3.31.5-0.20210308123301-7a3e9dab9009 h1:u0oCo5b9wyLr++HF3AN9JicGhkUxJhMz51+8TIZH9N0=
modernc.org/cc/v3 v3.31.5-0.20210308123301-7a3e9dab9009/go.mod h1:0R6jl1aZlIl2avnYfbfHBS1QB6/f+16mihBObaBC878=
modernc.org/ccgo/v3 v3.9.0 h1:JbcEIqjw4Agf+0g3Tc85YvfYqkkFOv6xBwS4zkfqSoA=
modernc.org/ccgo/v3 v3.9.0/go.mod h1:nQbgkn8mwzPdp4mm6BT6+p85ugQ7FrGgIcYaE7nSrpY=
modernc.org/httpfs v1.0.6 h1:AAgIpFZRXuYnkjftxTAZwMIiwEqAfk8aVB2/oA6nAeM=
modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM=
modernc.org/libc v1.7.13-0.20210308123627-12f642a52bb8/go.mod h1:U1eq8YWr/Kc1RWCMFUWEdkTg8OTcfLw2kY8EDwl039w=
modernc.org/libc v1.8.0 h1:Pp4uv9g0csgBMpGPABKtkieF6O5MGhfGo6ZiOdlYfR8=
modernc.org/libc v1.8.0/go.mod h1:U1eq8YWr/Kc1RWCMFUWEdkTg8OTcfLw2kY8EDwl039w=
modernc.org/mathutil v1.1.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
modernc.org/mathutil v1.2.2 h1:+yFk8hBprV+4c0U9GjFtL+dV3N8hOJ8JCituQcMShFY=
modernc.org/mathutil v1.2.2/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
modernc.org/memory v1.0.4 h1:utMBrFcpnQDdNsmM6asmyH/FM9TqLPS7XF7otpJmrwM=
modernc.org/memory v1.0.4/go.mod h1:nV2OApxradM3/OVbs2/0OsP6nPfakXpi50C7dcoHXlc=
modernc.org/opt v0.1.1 h1:/0RX92k9vwVeDXj+Xn23DKp2VJubL7k8qNffND6qn3A=
modernc.org/opt v0.1.1/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0=
modernc.org/sqlite v1.10.1-0.20210314190707-798bbeb9bb84 h1:rgEUzE849tFlHSoeCrKyS9cZAljC+DY7MdMHKq6R6sY=
modernc.org/sqlite v1.10.1-0.20210314190707-798bbeb9bb84/go.mod h1:PGzq6qlhyYjL6uVbSgS6WoF7ZopTW/sI7+7p+mb4ZVU=
modernc.org/strutil v1.1.0 h1:+1/yCzZxY2pZwwrsbH+4T7BQMoLQ9QiBshRC9eicYsc=
modernc.org/strutil v1.1.0/go.mod h1:lstksw84oURvj9y3tn8lGvRxyRC1S2+g5uuIzNfIOBs=
modernc.org/tcl v1.5.0 h1:euZSUNfE0Fd4W8VqXI1Ly1v7fqDJoBuAV88Ea+SnaSs=
modernc.org/tcl v1.5.0/go.mod h1:gb57hj4pO8fRrK54zveIfFXBaMHK3SKJNWcmRw1cRzc=
modernc.org/token v1.0.0 h1:a0jaWiNMDhDUtqOj09wvjWWAqd3q7WpBulmL9H2egsk=
modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
modernc.org/z v1.0.1-0.20210308123920-1f282aa71362/go.mod h1:8/SRk5C/HgiQWCgXdfpb+1RvhORdkz5sw72d3jjtyqA=
modernc.org/z v1.0.1 h1:WyIDpEpAIx4Hel6q/Pcgj/VhaQV5XPJ2I6ryIYbjnpc=
modernc.org/z v1.0.1/go.mod h1:8/SRk5C/HgiQWCgXdfpb+1RvhORdkz5sw72d3jjtyqA=
mvdan.cc/xurls/v2 v2.2.0 h1:NSZPykBXJFCetGZykLAxaL6SIpvbVy/UFEniIfHAa8A=
mvdan.cc/xurls/v2 v2.2.0/go.mod h1:EV1RMtya9D6G5DMYPGD8zTQzaHet6Jh8gFlRgGRJeO8=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
@@ -1706,9 +1677,8 @@ sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 h1:mUcz5b3FJbP5Cvdq7Khzn6J9OCUQJaBwgBkCR+MOwSs=
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251/go.mod h1:FJGmPh3vz9jSos1L/F91iAgnC/aejc0wIIrF2ZwJxdY=
xorm.io/builder v0.3.7/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE=
xorm.io/builder v0.3.8/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE=
xorm.io/builder v0.3.9 h1:Sd65/LdWyO7LR8+Cbd+e7mm3sK/7U9k0jS3999IDHMc=
xorm.io/builder v0.3.9/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE=
xorm.io/xorm v1.0.6/go.mod h1:uF9EtbhODq5kNWxMbnBEj8hRRZnlcNSz2t2N7HW/+A4=
xorm.io/xorm v1.1.0 h1:mkEsQXLauZajiOld2cB2PkFcUZKePepPgs1bC1dw8RA=
xorm.io/xorm v1.1.0/go.mod h1:EDzNHMuCVZNszkIRSLL2nI0zX+nQE8RstAVranlSfqI=
xorm.io/xorm v1.0.7 h1:26yBTDVI+CfQpVz2Y88fISh+aiJXIPP4eNoTJlwzsC4=
xorm.io/xorm v1.0.7/go.mod h1:uF9EtbhODq5kNWxMbnBEj8hRRZnlcNSz2t2N7HW/+A4=

View File

@@ -239,26 +239,6 @@ func doAPICreatePullRequest(ctx APITestContext, owner, repo, baseBranch, headBra
}
}
func doAPIGetPullRequest(ctx APITestContext, owner, repo string, index int64) func(*testing.T) (api.PullRequest, error) {
return func(t *testing.T) (api.PullRequest, error) {
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d?token=%s",
owner, repo, index, ctx.Token)
req := NewRequest(t, http.MethodGet, urlStr)
expected := 200
if ctx.ExpectedCode != 0 {
expected = ctx.ExpectedCode
}
resp := ctx.Session.MakeRequest(t, req, expected)
json := jsoniter.ConfigCompatibleWithStandardLibrary
decoder := json.NewDecoder(resp.Body)
pr := api.PullRequest{}
err := decoder.Decode(&pr)
return pr, err
}
}
func doAPIMergePullRequest(ctx APITestContext, owner, repo string, index int64) func(*testing.T) {
return func(t *testing.T) {
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/merge?token=%s",

View File

@@ -92,10 +92,6 @@ func testAPIDeleteOAuth2Application(t *testing.T) {
session.MakeRequest(t, req, http.StatusNoContent)
models.AssertNotExistsBean(t, &models.OAuth2Application{UID: oldApp.UID, Name: oldApp.Name})
// Delete again will return not found
req = NewRequest(t, "DELETE", urlStr)
session.MakeRequest(t, req, http.StatusNotFound)
}
func testAPIGetOAuth2Application(t *testing.T) {

View File

@@ -130,14 +130,11 @@ func getNewRepoEditOption(opts *api.EditRepoOption) *api.EditRepoOption {
func TestAPIRepoEdit(t *testing.T) {
onGiteaRun(t, func(t *testing.T, u *url.URL) {
bFalse, bTrue := false, true
user2 := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) // owner of the repo1 & repo16
user3 := models.AssertExistsAndLoadBean(t, &models.User{ID: 3}).(*models.User) // owner of the repo3, is an org
user4 := models.AssertExistsAndLoadBean(t, &models.User{ID: 4}).(*models.User) // owner of neither repos
repo1 := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) // public repo
repo3 := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 3}).(*models.Repository) // public repo
repo15 := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 15}).(*models.Repository) // empty repo
repo16 := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 16}).(*models.Repository) // private repo
// Get user2's token
@@ -289,8 +286,9 @@ func TestAPIRepoEdit(t *testing.T) {
// Test making a repo public that is private
repo16 = models.AssertExistsAndLoadBean(t, &models.Repository{ID: 16}).(*models.Repository)
assert.True(t, repo16.IsPrivate)
private := false
repoEditOption = &api.EditRepoOption{
Private: &bFalse,
Private: &private,
}
url = fmt.Sprintf("/api/v1/repos/%s/%s?token=%s", user2.Name, repo16.Name, token2)
req = NewRequestWithJSON(t, "PATCH", url, &repoEditOption)
@@ -298,24 +296,11 @@ func TestAPIRepoEdit(t *testing.T) {
repo16 = models.AssertExistsAndLoadBean(t, &models.Repository{ID: 16}).(*models.Repository)
assert.False(t, repo16.IsPrivate)
// Make it private again
repoEditOption.Private = &bTrue
private = true
repoEditOption.Private = &private
req = NewRequestWithJSON(t, "PATCH", url, &repoEditOption)
_ = session.MakeRequest(t, req, http.StatusOK)
// Test to change empty repo
assert.False(t, repo15.IsArchived)
url = fmt.Sprintf("/api/v1/repos/%s/%s?token=%s", user2.Name, repo15.Name, token2)
req = NewRequestWithJSON(t, "PATCH", url, &api.EditRepoOption{
Archived: &bTrue,
})
_ = session.MakeRequest(t, req, http.StatusOK)
repo15 = models.AssertExistsAndLoadBean(t, &models.Repository{ID: 15}).(*models.Repository)
assert.True(t, repo15.IsArchived)
req = NewRequestWithJSON(t, "PATCH", url, &api.EditRepoOption{
Archived: &bFalse,
})
_ = session.MakeRequest(t, req, http.StatusOK)
// Test using org repo "user3/repo3" where user2 is a collaborator
origRepoEditOption = getRepoEditOptionFromRepo(repo3)
repoEditOption = getNewRepoEditOption(origRepoEditOption)

View File

@@ -223,7 +223,7 @@ func TestAPIViewRepo(t *testing.T) {
DecodeJSON(t, resp, &repo)
assert.EqualValues(t, 1, repo.ID)
assert.EqualValues(t, "repo1", repo.Name)
assert.EqualValues(t, 1, repo.Releases)
assert.EqualValues(t, 2, repo.Releases)
assert.EqualValues(t, 1, repo.OpenIssues)
assert.EqualValues(t, 3, repo.OpenPulls)

View File

@@ -122,7 +122,7 @@ func TestGetAttachment(t *testing.T) {
t.Run(tc.name, func(t *testing.T) {
//Write empty file to be available for response
if tc.createFile {
_, err := storage.Attachments.Save(models.AttachmentRelativePath(tc.uuid), strings.NewReader("hello world"), -1)
_, err := storage.Attachments.Save(models.AttachmentRelativePath(tc.uuid), strings.NewReader("hello world"))
assert.NoError(t, err)
}
//Actual test

View File

@@ -1,69 +0,0 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package integrations
import (
"io/ioutil"
"net/http"
"net/url"
"testing"
"github.com/stretchr/testify/assert"
)
func TestGitSmartHTTP(t *testing.T) {
onGiteaRun(t, testGitSmartHTTP)
}
func testGitSmartHTTP(t *testing.T, u *url.URL) {
var kases = []struct {
p string
code int
}{
{
p: "user2/repo1/info/refs",
code: 200,
},
{
p: "user2/repo1/HEAD",
code: 200,
},
{
p: "user2/repo1/objects/info/alternates",
code: 404,
},
{
p: "user2/repo1/objects/info/http-alternates",
code: 404,
},
{
p: "user2/repo1/../../custom/conf/app.ini",
code: 404,
},
{
p: "user2/repo1/objects/info/../../../../custom/conf/app.ini",
code: 404,
},
{
p: `user2/repo1/objects/info/..\..\..\..\custom\conf\app.ini`,
code: 400,
},
}
for _, kase := range kases {
t.Run(kase.p, func(t *testing.T) {
p := u.String() + kase.p
req, err := http.NewRequest("GET", p, nil)
assert.NoError(t, err)
req.SetBasicAuth("user2", userPassword)
resp, err := http.DefaultClient.Do(req)
assert.NoError(t, err)
defer resp.Body.Close()
assert.EqualValues(t, kase.code, resp.StatusCode)
_, err = ioutil.ReadAll(resp.Body)
assert.NoError(t, err)
})
}
}

View File

@@ -5,7 +5,6 @@
package integrations
import (
"encoding/hex"
"fmt"
"io/ioutil"
"math/rand"
@@ -209,13 +208,13 @@ func rawTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS s
// Request raw paths
req := NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", little))
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
assert.Equal(t, littleSize, resp.Length)
resp := session.MakeRequest(t, req, http.StatusOK)
assert.Equal(t, littleSize, resp.Body.Len())
setting.CheckLFSVersion()
if setting.LFS.StartServer {
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", littleLFS))
resp := session.MakeRequest(t, req, http.StatusOK)
resp = session.MakeRequest(t, req, http.StatusOK)
assert.NotEqual(t, littleSize, resp.Body.Len())
assert.LessOrEqual(t, resp.Body.Len(), 1024)
if resp.Body.Len() != littleSize && resp.Body.Len() <= 1024 {
@@ -225,12 +224,12 @@ func rawTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS s
if !testing.Short() {
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", big))
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
assert.Equal(t, bigSize, resp.Length)
resp = session.MakeRequest(t, req, http.StatusOK)
assert.Equal(t, bigSize, resp.Body.Len())
if setting.LFS.StartServer {
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", bigLFS))
resp := session.MakeRequest(t, req, http.StatusOK)
resp = session.MakeRequest(t, req, http.StatusOK)
assert.NotEqual(t, bigSize, resp.Body.Len())
if resp.Body.Len() != bigSize && resp.Body.Len() <= 1024 {
assert.Contains(t, resp.Body.String(), models.LFSMetaFileIdentifier)
@@ -451,35 +450,27 @@ func doMergeFork(ctx, baseCtx APITestContext, baseBranch, headBranch string) fun
t.Run("EnsureCanSeePull", doEnsureCanSeePull(baseCtx, pr))
// Then get the diff string
var diffHash string
var diffLength int
var diffStr string
t.Run("GetDiff", func(t *testing.T) {
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d.diff", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
resp := ctx.Session.MakeRequestNilResponseHashSumRecorder(t, req, http.StatusOK)
diffHash = string(resp.Hash.Sum(nil))
diffLength = resp.Length
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
diffStr = resp.Body.String()
})
// Now: Merge the PR & make sure that doesn't break the PR page or change its diff
t.Run("MergePR", doAPIMergePullRequest(baseCtx, baseCtx.Username, baseCtx.Reponame, pr.Index))
t.Run("EnsureCanSeePull", doEnsureCanSeePull(baseCtx, pr))
t.Run("CheckPR", func(t *testing.T) {
oldMergeBase := pr.MergeBase
pr2, err := doAPIGetPullRequest(baseCtx, baseCtx.Username, baseCtx.Reponame, pr.Index)(t)
assert.NoError(t, err)
assert.Equal(t, oldMergeBase, pr2.MergeBase)
})
t.Run("EnsurDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffHash, diffLength))
t.Run("EnsureDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffStr))
// Then: Delete the head branch & make sure that doesn't break the PR page or change its diff
t.Run("DeleteHeadBranch", doBranchDelete(baseCtx, baseCtx.Username, baseCtx.Reponame, headBranch))
t.Run("EnsureCanSeePull", doEnsureCanSeePull(baseCtx, pr))
t.Run("EnsureDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffHash, diffLength))
t.Run("EnsureDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffStr))
// Delete the head repository & make sure that doesn't break the PR page or change its diff
t.Run("DeleteHeadRepository", doAPIDeleteRepository(ctx))
t.Run("EnsureCanSeePull", doEnsureCanSeePull(baseCtx, pr))
t.Run("EnsureDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffHash, diffLength))
t.Run("EnsureDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffStr))
}
}
@@ -523,15 +514,20 @@ func doEnsureCanSeePull(ctx APITestContext, pr api.PullRequest) func(t *testing.
}
}
func doEnsureDiffNoChange(ctx APITestContext, pr api.PullRequest, diffHash string, diffLength int) func(t *testing.T) {
func doEnsureDiffNoChange(ctx APITestContext, pr api.PullRequest, diffStr string) func(t *testing.T) {
return func(t *testing.T) {
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d.diff", url.PathEscape(ctx.Username), url.PathEscape(ctx.Reponame), pr.Index))
resp := ctx.Session.MakeRequestNilResponseHashSumRecorder(t, req, http.StatusOK)
actual := string(resp.Hash.Sum(nil))
actualLength := resp.Length
equal := diffHash == actual
assert.True(t, equal, "Unexpected change in the diff string: expected hash: %s size: %d but was actually: %s size: %d", hex.EncodeToString([]byte(diffHash)), diffLength, hex.EncodeToString([]byte(actual)), actualLength)
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
expectedMaxLen := len(diffStr)
if expectedMaxLen > 800 {
expectedMaxLen = 800
}
actual := resp.Body.String()
actualMaxLen := len(actual)
if actualMaxLen > 800 {
actualMaxLen = 800
}
assert.Equal(t, diffStr, actual, "Unexpected change in the diff string: expected: %s but was actually: %s", diffStr[:expectedMaxLen], actual[:actualMaxLen])
}
}

View File

@@ -1,35 +0,0 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package integrations
import (
"fmt"
"net/http"
"testing"
"code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/assert"
)
func TestGoGet(t *testing.T) {
defer prepareTestEnv(t)()
req := NewRequest(t, "GET", "/blah/glah/plah?go-get=1")
resp := MakeRequest(t, req, http.StatusOK)
expected := fmt.Sprintf(`<!doctype html>
<html>
<head>
<meta name="go-import" content="%[1]s:%[2]s/blah/glah git %[3]sblah/glah.git">
<meta name="go-source" content="%[1]s:%[2]s/blah/glah _ %[3]sblah/glah/src/branch/master{/dir} %[3]sblah/glah/src/branch/master{/dir}/{file}#L{line}">
</head>
<body>
go get --insecure %[1]s:%[2]s/blah/glah
</body>
</html>
`, setting.Domain, setting.HTTPPort, setting.AppURL)
assert.Equal(t, expected, resp.Body.String())
}

View File

@@ -9,8 +9,6 @@ import (
"context"
"database/sql"
"fmt"
"hash"
"hash/fnv"
"io"
"net/http"
"net/http/cookiejar"
@@ -60,26 +58,6 @@ func NewNilResponseRecorder() *NilResponseRecorder {
}
}
type NilResponseHashSumRecorder struct {
httptest.ResponseRecorder
Hash hash.Hash
Length int
}
func (n *NilResponseHashSumRecorder) Write(b []byte) (int, error) {
_, _ = n.Hash.Write(b)
n.Length += len(b)
return len(b), nil
}
// NewRecorder returns an initialized ResponseRecorder.
func NewNilResponseHashSumRecorder() *NilResponseHashSumRecorder {
return &NilResponseHashSumRecorder{
Hash: fnv.New32(),
ResponseRecorder: *httptest.NewRecorder(),
}
}
func TestMain(m *testing.M) {
defer log.Close()
@@ -306,23 +284,6 @@ func (s *TestSession) MakeRequestNilResponseRecorder(t testing.TB, req *http.Req
return resp
}
func (s *TestSession) MakeRequestNilResponseHashSumRecorder(t testing.TB, req *http.Request, expectedStatus int) *NilResponseHashSumRecorder {
t.Helper()
baseURL, err := url.Parse(setting.AppURL)
assert.NoError(t, err)
for _, c := range s.jar.Cookies(baseURL) {
req.AddCookie(c)
}
resp := MakeRequestNilResponseHashSumRecorder(t, req, expectedStatus)
ch := http.Header{}
ch.Add("Cookie", strings.Join(resp.Header()["Set-Cookie"], ";"))
cr := http.Request{Header: ch}
s.jar.SetCookies(baseURL, cr.Cookies())
return resp
}
const userPassword = "password"
var loginSessionCache = make(map[string]*TestSession, 10)
@@ -468,19 +429,6 @@ func MakeRequestNilResponseRecorder(t testing.TB, req *http.Request, expectedSta
return recorder
}
func MakeRequestNilResponseHashSumRecorder(t testing.TB, req *http.Request, expectedStatus int) *NilResponseHashSumRecorder {
t.Helper()
recorder := NewNilResponseHashSumRecorder()
c.ServeHTTP(recorder, req)
if expectedStatus != NoExpectedStatus {
if !assert.EqualValues(t, expectedStatus, recorder.Code,
"Request: %s %s", req.Method, req.URL.String()) {
logUnexpectedResponse(t, &recorder.ResponseRecorder)
}
}
return recorder
}
// logUnexpectedResponse logs the contents of an unexpected response.
func logUnexpectedResponse(t testing.TB, recorder *httptest.ResponseRecorder) {
t.Helper()

View File

@@ -10,11 +10,9 @@ import (
"testing"
"time"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/test"
"github.com/PuerkitoBio/goquery"
"github.com/stretchr/testify/assert"
"github.com/unknwon/i18n"
)
@@ -85,7 +83,7 @@ func TestCreateRelease(t *testing.T) {
session := loginUser(t, "user2")
createNewRelease(t, session, "/user2/repo1", "v0.0.1", "v0.0.1", false, false)
checkLatestReleaseAndCount(t, session, "/user2/repo1", "v0.0.1", i18n.Tr("en", "repo.release.stable"), 3)
checkLatestReleaseAndCount(t, session, "/user2/repo1", "v0.0.1", i18n.Tr("en", "repo.release.stable"), 2)
}
func TestCreateReleasePreRelease(t *testing.T) {
@@ -94,7 +92,7 @@ func TestCreateReleasePreRelease(t *testing.T) {
session := loginUser(t, "user2")
createNewRelease(t, session, "/user2/repo1", "v0.0.1", "v0.0.1", true, false)
checkLatestReleaseAndCount(t, session, "/user2/repo1", "v0.0.1", i18n.Tr("en", "repo.release.prerelease"), 3)
checkLatestReleaseAndCount(t, session, "/user2/repo1", "v0.0.1", i18n.Tr("en", "repo.release.prerelease"), 2)
}
func TestCreateReleaseDraft(t *testing.T) {
@@ -103,7 +101,7 @@ func TestCreateReleaseDraft(t *testing.T) {
session := loginUser(t, "user2")
createNewRelease(t, session, "/user2/repo1", "v0.0.1", "v0.0.1", false, true)
checkLatestReleaseAndCount(t, session, "/user2/repo1", "v0.0.1", i18n.Tr("en", "repo.release.draft"), 3)
checkLatestReleaseAndCount(t, session, "/user2/repo1", "v0.0.1", i18n.Tr("en", "repo.release.draft"), 2)
}
func TestCreateReleasePaging(t *testing.T) {
@@ -129,80 +127,3 @@ func TestCreateReleasePaging(t *testing.T) {
session2 := loginUser(t, "user4")
checkLatestReleaseAndCount(t, session2, "/user2/repo1", "v0.0.11", i18n.Tr("en", "repo.release.stable"), 10)
}
func TestViewReleaseListNoLogin(t *testing.T) {
defer prepareTestEnv(t)()
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
link := repo.Link() + "/releases"
req := NewRequest(t, "GET", link)
rsp := MakeRequest(t, req, http.StatusOK)
htmlDoc := NewHTMLParser(t, rsp.Body)
releases := htmlDoc.Find("#release-list li.ui.grid")
assert.Equal(t, 1, releases.Length())
links := make([]string, 0, 5)
releases.Each(func(i int, s *goquery.Selection) {
link, exist := s.Find(".release-list-title a").Attr("href")
if !exist {
return
}
links = append(links, link)
})
assert.EqualValues(t, []string{"/user2/repo1/releases/tag/v1.1"}, links)
}
func TestViewReleaseListLogin(t *testing.T) {
defer prepareTestEnv(t)()
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
link := repo.Link() + "/releases"
session := loginUser(t, "user1")
req := NewRequest(t, "GET", link)
rsp := session.MakeRequest(t, req, http.StatusOK)
htmlDoc := NewHTMLParser(t, rsp.Body)
releases := htmlDoc.Find("#release-list li.ui.grid")
assert.Equal(t, 2, releases.Length())
links := make([]string, 0, 5)
releases.Each(func(i int, s *goquery.Selection) {
link, exist := s.Find(".release-list-title a").Attr("href")
if !exist {
return
}
links = append(links, link)
})
assert.EqualValues(t, []string{"/user2/repo1/releases/tag/draft-release",
"/user2/repo1/releases/tag/v1.1"}, links)
}
func TestViewTagsList(t *testing.T) {
defer prepareTestEnv(t)()
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
link := repo.Link() + "/tags"
session := loginUser(t, "user1")
req := NewRequest(t, "GET", link)
rsp := session.MakeRequest(t, req, http.StatusOK)
htmlDoc := NewHTMLParser(t, rsp.Body)
tags := htmlDoc.Find(".tag-list tr")
assert.Equal(t, 2, tags.Length())
tagNames := make([]string, 0, 5)
tags.Each(func(i int, s *goquery.Selection) {
tagNames = append(tagNames, s.Find(".tag a.df.ac").Text())
})
assert.EqualValues(t, []string{"delete-tag", "v1.1"}, tagNames)
}

View File

@@ -382,7 +382,7 @@ func activityQueryCondition(opts GetFeedsOptions) (builder.Cond, error) {
}
if opts.Date != "" {
dateLow, err := time.ParseInLocation("2006-01-02", opts.Date, setting.DefaultUILocation)
dateLow, err := time.Parse("2006-01-02", opts.Date)
if err != nil {
log.Warn("Unable to parse %s, filter not applied: %v", opts.Date, err)
} else {

View File

@@ -85,7 +85,7 @@ func (a *Attachment) LinkedRepository() (*Repository, UnitType, error) {
func NewAttachment(attach *Attachment, buf []byte, file io.Reader) (_ *Attachment, err error) {
attach.UUID = gouuid.New().String()
size, err := storage.Attachments.Save(attach.RelativePath(), io.MultiReader(bytes.NewReader(buf), file), -1)
size, err := storage.Attachments.Save(attach.RelativePath(), io.MultiReader(bytes.NewReader(buf), file))
if err != nil {
return nil, fmt.Errorf("Create: %v", err)
}
@@ -125,8 +125,8 @@ func getAttachmentByUUID(e Engine, uuid string) (*Attachment, error) {
}
// GetAttachmentsByUUIDs returns attachment by given UUID list.
func GetAttachmentsByUUIDs(ctx DBContext, uuids []string) ([]*Attachment, error) {
return getAttachmentsByUUIDs(ctx.e, uuids)
func GetAttachmentsByUUIDs(uuids []string) ([]*Attachment, error) {
return getAttachmentsByUUIDs(x, uuids)
}
func getAttachmentsByUUIDs(e Engine, uuids []string) ([]*Attachment, error) {
@@ -183,12 +183,12 @@ func getAttachmentByReleaseIDFileName(e Engine, releaseID int64, fileName string
// DeleteAttachment deletes the given attachment and optionally the associated file.
func DeleteAttachment(a *Attachment, remove bool) error {
_, err := DeleteAttachments(DefaultDBContext(), []*Attachment{a}, remove)
_, err := DeleteAttachments([]*Attachment{a}, remove)
return err
}
// DeleteAttachments deletes the given attachments and optionally the associated files.
func DeleteAttachments(ctx DBContext, attachments []*Attachment, remove bool) (int, error) {
func DeleteAttachments(attachments []*Attachment, remove bool) (int, error) {
if len(attachments) == 0 {
return 0, nil
}
@@ -198,7 +198,7 @@ func DeleteAttachments(ctx DBContext, attachments []*Attachment, remove bool) (i
ids = append(ids, a.ID)
}
cnt, err := ctx.e.In("id", ids).NoAutoCondition().Delete(attachments[0])
cnt, err := x.In("id", ids).NoAutoCondition().Delete(attachments[0])
if err != nil {
return 0, err
}
@@ -220,7 +220,7 @@ func DeleteAttachmentsByIssue(issueID int64, remove bool) (int, error) {
return 0, err
}
return DeleteAttachments(DefaultDBContext(), attachments, remove)
return DeleteAttachments(attachments, remove)
}
// DeleteAttachmentsByComment deletes all attachments associated with the given comment.
@@ -230,7 +230,7 @@ func DeleteAttachmentsByComment(commentID int64, remove bool) (int, error) {
return 0, err
}
return DeleteAttachments(DefaultDBContext(), attachments, remove)
return DeleteAttachments(attachments, remove)
}
// UpdateAttachment updates the given attachment in database
@@ -238,15 +238,6 @@ func UpdateAttachment(atta *Attachment) error {
return updateAttachment(x, atta)
}
// UpdateAttachmentByUUID Updates attachment via uuid
func UpdateAttachmentByUUID(ctx DBContext, attach *Attachment, cols ...string) error {
if attach.UUID == "" {
return fmt.Errorf("Attachement uuid should not blank")
}
_, err := ctx.e.Where("uuid=?", attach.UUID).Cols(cols...).Update(attach)
return err
}
func updateAttachment(e Engine, atta *Attachment) error {
var sess *xorm.Session
if atta.ID != 0 && atta.UUID == "" {

View File

@@ -120,7 +120,7 @@ func TestUpdateAttachment(t *testing.T) {
func TestGetAttachmentsByUUIDs(t *testing.T) {
assert.NoError(t, PrepareTestDatabase())
attachList, err := GetAttachmentsByUUIDs(DefaultDBContext(), []string{"a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a17", "not-existing-uuid"})
attachList, err := GetAttachmentsByUUIDs([]string{"a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a17", "not-existing-uuid"})
assert.NoError(t, err)
assert.Equal(t, 2, len(attachList))
assert.Equal(t, "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11", attachList[0].UUID)

View File

@@ -81,7 +81,7 @@ func LibravatarURL(email string) (*url.URL, error) {
}
// HashedAvatarLink returns an avatar link for a provided email
func HashedAvatarLink(email string, size int) string {
func HashedAvatarLink(email string) string {
lowerEmail := strings.ToLower(strings.TrimSpace(email))
sum := fmt.Sprintf("%x", md5.Sum([]byte(lowerEmail)))
_, _ = cache.GetString("Avatar:"+sum, func() (string, error) {
@@ -96,11 +96,6 @@ func HashedAvatarLink(email string, size int) string {
// we don't care about any DB problem just return the lowerEmail
return lowerEmail, nil
}
has, err := sess.Where("email = ? AND hash = ?", emailHash.Email, emailHash.Hash).Get(new(EmailHash))
if has || err != nil {
// Seriously we don't care about any DB problems just return the lowerEmail - we expect the transaction to fail most of the time
return lowerEmail, nil
}
_, _ = sess.Insert(emailHash)
if err := sess.Commit(); err != nil {
// Seriously we don't care about any DB problems just return the lowerEmail - we expect the transaction to fail most of the time
@@ -108,9 +103,6 @@ func HashedAvatarLink(email string, size int) string {
}
return lowerEmail, nil
})
if size > 0 {
return setting.AppSubURL + "/avatar/" + url.PathEscape(sum) + "?size=" + strconv.Itoa(size)
}
return setting.AppSubURL + "/avatar/" + url.PathEscape(sum)
}
@@ -132,7 +124,7 @@ func SizedAvatarLink(email string, size int) string {
// This is the slow path that would need to call LibravatarURL() which
// does DNS lookups. Avoid it by issuing a redirect so we don't block
// the template render with network requests.
return HashedAvatarLink(email, size)
return HashedAvatarLink(email)
} else if !setting.DisableGravatar {
// copy GravatarSourceURL, because we will modify its Path.
copyOfGravatarSourceURL := *setting.GravatarSourceURL

View File

@@ -296,15 +296,11 @@ func CountOrphanedObjects(subject, refobject, joinCond string) (int64, error) {
// DeleteOrphanedObjects delete subjects with have no existing refobject anymore
func DeleteOrphanedObjects(subject, refobject, joinCond string) error {
subQuery := builder.Select("`"+subject+"`.id").
_, err := x.In("id", builder.Select("`"+subject+"`.id").
From("`"+subject+"`").
Join("LEFT", "`"+refobject+"`", joinCond).
Where(builder.IsNull{"`" + refobject + "`.id"})
sql, args, err := builder.Delete(builder.In("id", subQuery)).From("`" + subject + "`").ToSQL()
if err != nil {
return err
}
_, err = x.Exec(append([]interface{}{sql}, args...)...)
Where(builder.IsNull{"`" + refobject + "`.id"})).
Delete("`" + subject + "`")
return err
}
@@ -342,7 +338,7 @@ func FixCommentTypeLabelWithEmptyLabel() (int64, error) {
// CountCommentTypeLabelWithOutsideLabels count label comments with outside label
func CountCommentTypeLabelWithOutsideLabels() (int64, error) {
return x.Where("comment.type = ? AND ((label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != repository.owner_id))", CommentTypeLabel).
return x.Where("comment.type = ? AND (issue.repo_id != label.repo_id OR (label.repo_id = 0 AND repository.owner_id != label.org_id))", CommentTypeLabel).
Table("comment").
Join("inner", "label", "label.id = comment.label_id").
Join("inner", "issue", "issue.id = comment.issue_id ").
@@ -358,9 +354,8 @@ func FixCommentTypeLabelWithOutsideLabels() (int64, error) {
FROM comment AS com
INNER JOIN label ON com.label_id = label.id
INNER JOIN issue on issue.id = com.issue_id
INNER JOIN repository ON issue.repo_id = repository.id
WHERE
com.type = ? AND ((label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != repository.owner_id))
com.type = ? AND (issue.repo_id != label.repo_id OR (label.repo_id = 0 AND label.org_id != repo.owner_id))
) AS il_too)`, CommentTypeLabel)
if err != nil {
return 0, err
@@ -371,9 +366,9 @@ func FixCommentTypeLabelWithOutsideLabels() (int64, error) {
// CountIssueLabelWithOutsideLabels count label comments with outside label
func CountIssueLabelWithOutsideLabels() (int64, error) {
return x.Where(builder.Expr("(label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != repository.owner_id)")).
return x.Where(builder.Expr("issue.repo_id != label.repo_id OR (label.repo_id = 0 AND repository.owner_id != label.org_id)")).
Table("issue_label").
Join("inner", "label", "issue_label.label_id = label.id ").
Join("inner", "label", "issue_label.id = label.id ").
Join("inner", "issue", "issue.id = issue_label.issue_id ").
Join("inner", "repository", "issue.repo_id = repository.id").
Count(new(IssueLabel))
@@ -385,11 +380,11 @@ func FixIssueLabelWithOutsideLabels() (int64, error) {
SELECT il_too.id FROM (
SELECT il_too_too.id
FROM issue_label AS il_too_too
INNER JOIN label ON il_too_too.label_id = label.id
INNER JOIN label ON il_too_too.id = label.id
INNER JOIN issue on issue.id = il_too_too.issue_id
INNER JOIN repository on repository.id = issue.repo_id
WHERE
(label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != repository.owner_id)
issue.repo_id != label.repo_id OR (label.repo_id = 0 AND label.org_id != repository.owner_id)
) AS il_too )`)
if err != nil {

View File

@@ -1,32 +0,0 @@
// Copyright 2021 Gitea. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package models
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestDeleteOrphanedObjects(t *testing.T) {
assert.NoError(t, PrepareTestDatabase())
countBefore, err := x.Count(&PullRequest{})
assert.NoError(t, err)
_, err = x.Insert(&PullRequest{IssueID: 1000}, &PullRequest{IssueID: 1001}, &PullRequest{IssueID: 1003})
assert.NoError(t, err)
orphaned, err := CountOrphanedObjects("pull_request", "issue", "pull_request.issue_id=issue.id")
assert.NoError(t, err)
assert.EqualValues(t, 3, orphaned)
err = DeleteOrphanedObjects("pull_request", "issue", "pull_request.issue_id=issue.id")
assert.NoError(t, err)
countAfter, err := x.Count(&PullRequest{})
assert.NoError(t, err)
assert.EqualValues(t, countBefore, countAfter)
}

View File

@@ -43,15 +43,3 @@
is_tag: true
created_unix: 946684800
-
id: 4
repo_id: 1
publisher_id: 2
tag_name: "draft-release"
lower_tag_name: "draft-release"
target: "master"
title: "draft-release"
is_draft: true
is_prerelease: false
is_tag: false
created_unix: 1619524806

View File

@@ -1086,7 +1086,7 @@ func getIssuesByIDs(e Engine, issueIDs []int64) ([]*Issue, error) {
func getIssueIDsByRepoID(e Engine, repoID int64) ([]int64, error) {
ids := make([]int64, 0, 10)
err := e.Table("issue").Cols("id").Where("repo_id = ?", repoID).Find(&ids)
err := e.Table("issue").Where("repo_id = ?", repoID).Find(&ids)
return ids, err
}

View File

@@ -53,9 +53,6 @@ func (issues IssueList) loadRepositories(e Engine) ([]*Repository, error) {
for _, issue := range issues {
issue.Repo = repoMaps[issue.RepoID]
if issue.PullRequest != nil {
issue.PullRequest.BaseRepo = issue.Repo
}
}
return valuesRepository(repoMaps), nil
}
@@ -519,11 +516,6 @@ func (issues IssueList) LoadDiscussComments() error {
return issues.loadComments(x, builder.Eq{"comment.type": CommentTypeComment})
}
// LoadPullRequests loads pull requests
func (issues IssueList) LoadPullRequests() error {
return issues.loadPullRequests(x)
}
// GetApprovalCounts returns a map of issue ID to slice of approval counts
// FIXME: only returns official counts due to double counting of non-official approvals
func (issues IssueList) GetApprovalCounts() (map[int64][]*ReviewCount, error) {

View File

@@ -36,14 +36,6 @@ func TestIssue_ReplaceLabels(t *testing.T) {
testSuccess(1, []int64{})
}
func Test_GetIssueIDsByRepoID(t *testing.T) {
assert.NoError(t, PrepareTestDatabase())
ids, err := GetIssueIDsByRepoID(1)
assert.NoError(t, err)
assert.Len(t, ids, 5)
}
func TestIssueAPIURL(t *testing.T) {
assert.NoError(t, PrepareTestDatabase())
issue := AssertExistsAndLoadBean(t, &Issue{ID: 1}).(*Issue)

View File

@@ -26,8 +26,6 @@ func NewXORMLogger(showSQL bool) xormlog.Logger {
}
}
const stackLevel = 8
// Log a message with defined skip and at logging level
func (l *XORMLogBridge) Log(skip int, level log.Level, format string, v ...interface{}) error {
return l.logger.Log(skip+1, level, format, v...)
@@ -35,42 +33,42 @@ func (l *XORMLogBridge) Log(skip int, level log.Level, format string, v ...inter
// Debug show debug log
func (l *XORMLogBridge) Debug(v ...interface{}) {
_ = l.Log(stackLevel, log.DEBUG, fmt.Sprint(v...))
_ = l.Log(2, log.DEBUG, fmt.Sprint(v...))
}
// Debugf show debug log
func (l *XORMLogBridge) Debugf(format string, v ...interface{}) {
_ = l.Log(stackLevel, log.DEBUG, format, v...)
_ = l.Log(2, log.DEBUG, format, v...)
}
// Error show error log
func (l *XORMLogBridge) Error(v ...interface{}) {
_ = l.Log(stackLevel, log.ERROR, fmt.Sprint(v...))
_ = l.Log(2, log.ERROR, fmt.Sprint(v...))
}
// Errorf show error log
func (l *XORMLogBridge) Errorf(format string, v ...interface{}) {
_ = l.Log(stackLevel, log.ERROR, format, v...)
_ = l.Log(2, log.ERROR, format, v...)
}
// Info show information level log
func (l *XORMLogBridge) Info(v ...interface{}) {
_ = l.Log(stackLevel, log.INFO, fmt.Sprint(v...))
_ = l.Log(2, log.INFO, fmt.Sprint(v...))
}
// Infof show information level log
func (l *XORMLogBridge) Infof(format string, v ...interface{}) {
_ = l.Log(stackLevel, log.INFO, format, v...)
_ = l.Log(2, log.INFO, format, v...)
}
// Warn show warning log
func (l *XORMLogBridge) Warn(v ...interface{}) {
_ = l.Log(stackLevel, log.WARN, fmt.Sprint(v...))
_ = l.Log(2, log.WARN, fmt.Sprint(v...))
}
// Warnf show warnning log
func (l *XORMLogBridge) Warnf(format string, v ...interface{}) {
_ = l.Log(stackLevel, log.WARN, format, v...)
_ = l.Log(2, log.WARN, format, v...)
}
// Level get logger level

View File

@@ -21,7 +21,6 @@ import (
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
gouuid "github.com/google/uuid"
jsoniter "github.com/json-iterator/go"
"xorm.io/xorm"
@@ -117,7 +116,6 @@ func (cfg *SMTPConfig) ToDB() ([]byte, error) {
// PAMConfig holds configuration for the PAM login source.
type PAMConfig struct {
ServiceName string // pam service (e.g. system-auth)
EmailDomain string
}
// FromDB fills up a PAMConfig from serialized format.
@@ -698,26 +696,15 @@ func LoginViaPAM(user *User, login, password string, sourceID int64, cfg *PAMCon
// Allow PAM sources with `@` in their name, like from Active Directory
username := pamLogin
email := pamLogin
idx := strings.Index(pamLogin, "@")
if idx > -1 {
username = pamLogin[:idx]
}
if ValidateEmail(email) != nil {
if cfg.EmailDomain != "" {
email = fmt.Sprintf("%s@%s", username, cfg.EmailDomain)
} else {
email = fmt.Sprintf("%s@%s", username, setting.Service.NoReplyAddress)
}
if ValidateEmail(email) != nil {
email = gouuid.New().String() + "@localhost"
}
}
user = &User{
LowerName: strings.ToLower(username),
Name: username,
Email: email,
Email: pamLogin,
Passwd: password,
LoginType: LoginPAM,
LoginSource: sourceID,

View File

@@ -39,7 +39,6 @@ func InsertMilestones(ms ...*Milestone) (err error) {
// InsertIssues insert issues to database
func InsertIssues(issues ...*Issue) error {
sess := x.NewSession()
defer sess.Close()
if err := sess.Begin(); err != nil {
return err
}
@@ -195,7 +194,6 @@ func InsertPullRequests(prs ...*PullRequest) error {
// InsertReleases migrates release
func InsertReleases(rels ...*Release) error {
sess := x.NewSession()
defer sess.Close()
if err := sess.Begin(); err != nil {
return err
}

View File

@@ -88,7 +88,6 @@ func fixPublisherIDforTagReleases(x *xorm.Engine) error {
repo = new(Repository)
has, err := sess.ID(release.RepoID).Get(repo)
if err != nil {
log.Error("Error whilst loading repository[%d] for release[%d] with tag name %s. Error: %v", release.RepoID, release.ID, release.TagName, err)
return err
} else if !has {
log.Warn("Release[%d] is orphaned and refers to non-existing repository %d", release.ID, release.RepoID)
@@ -100,55 +99,28 @@ func fixPublisherIDforTagReleases(x *xorm.Engine) error {
// v120.go migration may not have been run correctly - we'll just replicate it here
// because this appears to be a common-ish problem.
if _, err := sess.Exec("UPDATE repository SET owner_name = (SELECT name FROM `user` WHERE `user`.id = repository.owner_id)"); err != nil {
log.Error("Error whilst updating repository[%d] owner name", repo.ID)
return err
}
if _, err := sess.ID(release.RepoID).Get(repo); err != nil {
log.Error("Error whilst loading repository[%d] for release[%d] with tag name %s. Error: %v", release.RepoID, release.ID, release.TagName, err)
return err
}
}
gitRepo, err = git.OpenRepository(repoPath(repo.OwnerName, repo.Name))
if err != nil {
log.Error("Error whilst opening git repo for [%d]%s/%s. Error: %v", repo.ID, repo.OwnerName, repo.Name, err)
return err
}
}
commit, err := gitRepo.GetTagCommit(release.TagName)
if err != nil {
if git.IsErrNotExist(err) {
log.Warn("Unable to find commit %s for Tag: %s in [%d]%s/%s. Cannot update publisher ID.", err.(git.ErrNotExist).ID, release.TagName, repo.ID, repo.OwnerName, repo.Name)
continue
}
log.Error("Error whilst getting commit for Tag: %s in [%d]%s/%s. Error: %v", release.TagName, repo.ID, repo.OwnerName, repo.Name, err)
return fmt.Errorf("GetTagCommit: %v", err)
}
if commit.Author.Email == "" {
log.Warn("Tag: %s in Repo[%d]%s/%s does not have a tagger.", release.TagName, repo.ID, repo.OwnerName, repo.Name)
commit, err = gitRepo.GetCommit(commit.ID.String())
if err != nil {
if git.IsErrNotExist(err) {
log.Warn("Unable to find commit %s for Tag: %s in [%d]%s/%s. Cannot update publisher ID.", err.(git.ErrNotExist).ID, release.TagName, repo.ID, repo.OwnerName, repo.Name)
continue
}
log.Error("Error whilst getting commit for Tag: %s in [%d]%s/%s. Error: %v", release.TagName, repo.ID, repo.OwnerName, repo.Name, err)
return fmt.Errorf("GetCommit: %v", err)
}
}
if commit.Author.Email == "" {
log.Warn("Tag: %s in Repo[%d]%s/%s does not have a Tagger and its underlying commit does not have an Author either!", release.TagName, repo.ID, repo.OwnerName, repo.Name)
continue
}
if user == nil || !strings.EqualFold(user.Email, commit.Author.Email) {
user = new(User)
_, err = sess.Where("email=?", commit.Author.Email).Get(user)
if err != nil {
log.Error("Error whilst getting commit author by email: %s for Tag: %s in [%d]%s/%s. Error: %v", commit.Author.Email, release.TagName, repo.ID, repo.OwnerName, repo.Name, err)
return err
}
@@ -161,7 +133,6 @@ func fixPublisherIDforTagReleases(x *xorm.Engine) error {
release.PublisherID = user.ID
if _, err := sess.ID(release.ID).Cols("publisher_id").Update(release); err != nil {
log.Error("Error whilst updating publisher[%d] for release[%d] with tag name %s. Error: %v", release.PublisherID, release.ID, release.TagName, err)
return err
}
}

View File

@@ -12,9 +12,9 @@ import (
func addSessionTable(x *xorm.Engine) error {
type Session struct {
Key string `xorm:"pk CHAR(16)"`
Data []byte `xorm:"BLOB"`
Expiry timeutil.TimeStamp
Key string `xorm:"pk CHAR(16)"`
Data []byte `xorm:"BLOB"`
CreatedUnix timeutil.TimeStamp
}
return x.Sync2(new(Session))
}

View File

@@ -48,11 +48,11 @@ func removeInvalidLabels(x *xorm.Engine) error {
SELECT il_too.id FROM (
SELECT il_too_too.id
FROM issue_label AS il_too_too
INNER JOIN label ON il_too_too.label_id = label.id
INNER JOIN label ON il_too_too.id = label.id
INNER JOIN issue on issue.id = il_too_too.issue_id
INNER JOIN repository on repository.id = issue.repo_id
WHERE
(label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != repository.owner_id)
issue.repo_id != label.repo_id OR (label.repo_id = 0 AND label.org_id != repository.owner_id)
) AS il_too )`); err != nil {
return err
}
@@ -65,7 +65,7 @@ func removeInvalidLabels(x *xorm.Engine) error {
INNER JOIN issue on issue.id = com.issue_id
INNER JOIN repository on repository.id = issue.repo_id
WHERE
com.type = ? AND ((label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != repository.owner_id))
com.type = ? AND (issue.repo_id != label.repo_id OR (label.repo_id = 0 AND label.org_id != repository.owner_id))
) AS il_too)`, 7); err != nil {
return err
}

View File

@@ -319,7 +319,7 @@ func DumpDatabase(filePath, dbType string) error {
ID int64 `xorm:"pk autoincr"`
Version int64
}
t, err := x.TableInfo(&Version{})
t, err := x.TableInfo(Version{})
if err != nil {
return err
}

View File

@@ -25,7 +25,7 @@ func TestDumpDatabase(t *testing.T) {
ID int64 `xorm:"pk autoincr"`
Version int64
}
assert.NoError(t, x.Sync2(new(Version)))
assert.NoError(t, x.Sync2(Version{}))
for _, dbName := range setting.SupportedDatabases {
dbType := setting.GetDBTypeByName(dbName)

View File

@@ -235,7 +235,7 @@ func deleteOAuth2Application(sess *xorm.Session, id, userid int64) error {
if deleted, err := sess.Delete(&OAuth2Application{ID: id, UID: userid}); err != nil {
return err
} else if deleted == 0 {
return ErrOAuthApplicationNotFound{ID: id}
return fmt.Errorf("cannot find oauth2 application")
}
codes := make([]*OAuth2AuthorizationCode, 0)
// delete correlating auth codes
@@ -261,7 +261,6 @@ func deleteOAuth2Application(sess *xorm.Session, id, userid int64) error {
// DeleteOAuth2Application deletes the application with the given id and the grants and auth codes related to it. It checks if the userid was the creator of the app.
func DeleteOAuth2Application(id, userid int64) error {
sess := x.NewSession()
defer sess.Close()
if err := sess.Begin(); err != nil {
return err
}

View File

@@ -212,21 +212,12 @@ func (pr *PullRequest) GetDefaultMergeMessage() string {
log.Error("Cannot load issue %d for PR id %d: Error: %v", pr.IssueID, pr.ID, err)
return ""
}
if err := pr.LoadBaseRepo(); err != nil {
log.Error("LoadBaseRepo: %v", err)
return ""
}
issueReference := "#"
if pr.BaseRepo.UnitEnabled(UnitTypeExternalTracker) {
issueReference = "!"
}
if pr.BaseRepoID == pr.HeadRepoID {
return fmt.Sprintf("Merge pull request '%s' (%s%d) from %s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch)
return fmt.Sprintf("Merge pull request '%s' (#%d) from %s into %s", pr.Issue.Title, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch)
}
return fmt.Sprintf("Merge pull request '%s' (%s%d) from %s:%s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseBranch)
return fmt.Sprintf("Merge pull request '%s' (#%d) from %s:%s into %s", pr.Issue.Title, pr.Issue.Index, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseBranch)
}
// ReviewCount represents a count of Reviews
@@ -415,8 +406,7 @@ func (pr *PullRequest) SetMerged() (bool, error) {
return false, fmt.Errorf("Issue.changeStatus: %v", err)
}
// We need to save all of the data used to compute this merge as it may have already been changed by TestPatch. FIXME: need to set some state to prevent TestPatch from running whilst we are merging.
if _, err := sess.Where("id = ?", pr.ID).Cols("has_merged, status, merge_base, merged_commit_id, merger_id, merged_unix").Update(pr); err != nil {
if _, err := sess.Where("id = ?", pr.ID).Cols("has_merged, status, merged_commit_id, merger_id, merged_unix").Update(pr); err != nil {
return false, fmt.Errorf("Failed to update pr[%d]: %v", pr.ID, err)
}

View File

@@ -234,36 +234,3 @@ func TestPullRequest_GetWorkInProgressPrefixWorkInProgress(t *testing.T) {
pr.Issue.Title = "[wip] " + original
assert.Equal(t, "[wip]", pr.GetWorkInProgressPrefix())
}
func TestPullRequest_GetDefaultMergeMessage_InternalTracker(t *testing.T) {
assert.NoError(t, PrepareTestDatabase())
pr := AssertExistsAndLoadBean(t, &PullRequest{ID: 2}).(*PullRequest)
assert.Equal(t, "Merge pull request 'issue3' (#3) from branch2 into master", pr.GetDefaultMergeMessage())
pr.BaseRepoID = 1
pr.HeadRepoID = 2
assert.Equal(t, "Merge pull request 'issue3' (#3) from user2/repo1:branch2 into master", pr.GetDefaultMergeMessage())
}
func TestPullRequest_GetDefaultMergeMessage_ExternalTracker(t *testing.T) {
assert.NoError(t, PrepareTestDatabase())
externalTracker := RepoUnit{
Type: UnitTypeExternalTracker,
Config: &ExternalTrackerConfig{
ExternalTrackerFormat: "https://someurl.com/{user}/{repo}/{issue}",
},
}
baseRepo := &Repository{Name: "testRepo", ID: 1}
baseRepo.Owner = &User{Name: "testOwner"}
baseRepo.Units = []*RepoUnit{&externalTracker}
pr := AssertExistsAndLoadBean(t, &PullRequest{ID: 2, BaseRepo: baseRepo}).(*PullRequest)
assert.Equal(t, "Merge pull request 'issue3' (!3) from branch2 into master", pr.GetDefaultMergeMessage())
pr.BaseRepoID = 1
pr.HeadRepoID = 2
assert.Equal(t, "Merge pull request 'issue3' (!3) from user2/repo1:branch2 into master", pr.GetDefaultMergeMessage())
}

View File

@@ -6,7 +6,6 @@
package models
import (
"errors"
"fmt"
"sort"
"strings"
@@ -118,20 +117,17 @@ func UpdateRelease(ctx DBContext, rel *Release) error {
}
// AddReleaseAttachments adds a release attachments
func AddReleaseAttachments(ctx DBContext, releaseID int64, attachmentUUIDs []string) (err error) {
func AddReleaseAttachments(releaseID int64, attachmentUUIDs []string) (err error) {
// Check attachments
attachments, err := getAttachmentsByUUIDs(ctx.e, attachmentUUIDs)
attachments, err := GetAttachmentsByUUIDs(attachmentUUIDs)
if err != nil {
return fmt.Errorf("GetAttachmentsByUUIDs [uuids: %v]: %v", attachmentUUIDs, err)
}
for i := range attachments {
if attachments[i].ReleaseID != 0 {
return errors.New("release permission denied")
}
attachments[i].ReleaseID = releaseID
// No assign value could be 0, so ignore AllCols().
if _, err = ctx.e.ID(attachments[i].ID).Update(attachments[i]); err != nil {
if _, err = x.ID(attachments[i].ID).Update(attachments[i]); err != nil {
return fmt.Errorf("update attachment [%d]: %v", attachments[i].ID, err)
}
}

View File

@@ -749,7 +749,7 @@ func (repo *Repository) updateSize(e Engine) error {
}
repo.Size = size
_, err = e.ID(repo.ID).Cols("size").NoAutoTime().Update(repo)
_, err = e.ID(repo.ID).Cols("size").Update(repo)
return err
}
@@ -1349,26 +1349,6 @@ func UpdateRepository(repo *Repository, visibilityChanged bool) (err error) {
return sess.Commit()
}
// UpdateRepositoryOwnerNames updates repository owner_names (this should only be used when the ownerName has changed case)
func UpdateRepositoryOwnerNames(ownerID int64, ownerName string) error {
if ownerID == 0 {
return nil
}
sess := x.NewSession()
defer sess.Close()
if err := sess.Begin(); err != nil {
return err
}
if _, err := sess.Where("owner_id = ?", ownerID).Cols("owner_name").Update(&Repository{
OwnerName: ownerName,
}); err != nil {
return err
}
return sess.Commit()
}
// UpdateRepositoryUpdatedTime updates a repository's updated time
func UpdateRepositoryUpdatedTime(repoID int64, updateTime time.Time) error {
_, err := x.Exec("UPDATE repository SET updated_unix = ? WHERE id = ?", updateTime.Unix(), repoID)
@@ -1474,26 +1454,23 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
if err := deleteBeans(sess,
&Access{RepoID: repo.ID},
&Action{RepoID: repo.ID},
&Collaboration{RepoID: repoID},
&Comment{RefRepoID: repoID},
&CommitStatus{RepoID: repoID},
&DeletedBranch{RepoID: repoID},
&HookTask{RepoID: repoID},
&LFSLock{RepoID: repoID},
&LanguageStat{RepoID: repoID},
&Milestone{RepoID: repoID},
&Mirror{RepoID: repoID},
&Notification{RepoID: repoID},
&ProtectedBranch{RepoID: repoID},
&PullRequest{BaseRepoID: repoID},
&Release{RepoID: repoID},
&RepoIndexerStatus{RepoID: repoID},
&RepoRedirect{RedirectRepoID: repoID},
&RepoUnit{RepoID: repoID},
&Star{RepoID: repoID},
&Task{RepoID: repoID},
&Watch{RepoID: repoID},
&Star{RepoID: repoID},
&Mirror{RepoID: repoID},
&Milestone{RepoID: repoID},
&Release{RepoID: repoID},
&Collaboration{RepoID: repoID},
&PullRequest{BaseRepoID: repoID},
&RepoUnit{RepoID: repoID},
&RepoRedirect{RedirectRepoID: repoID},
&Webhook{RepoID: repoID},
&HookTask{RepoID: repoID},
&Notification{RepoID: repoID},
&CommitStatus{RepoID: repoID},
&RepoIndexerStatus{RepoID: repoID},
&LanguageStat{RepoID: repoID},
&Comment{RefRepoID: repoID},
&Task{RepoID: repoID},
); err != nil {
return fmt.Errorf("deleteBeans: %v", err)
}
@@ -1509,6 +1486,10 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
return err
}
if _, err := sess.Where("repo_id = ?", repoID).Delete(new(RepoUnit)); err != nil {
return err
}
if repo.IsFork {
if _, err := sess.Exec("UPDATE `repository` SET num_forks=num_forks-1 WHERE id=?", repo.ForkID); err != nil {
return fmt.Errorf("decrease fork count: %v", err)

View File

@@ -12,7 +12,6 @@ import (
"code.gitea.io/gitea/modules/util"
"xorm.io/builder"
"xorm.io/xorm"
)
// RepositoryListDefaultPageSize is the default number of repositories
@@ -364,35 +363,6 @@ func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
// SearchRepositoryByCondition search repositories by condition
func SearchRepositoryByCondition(opts *SearchRepoOptions, cond builder.Cond, loadAttributes bool) (RepositoryList, int64, error) {
sess, count, err := searchRepositoryByCondition(opts, cond)
if err != nil {
return nil, 0, err
}
defer sess.Close()
defaultSize := 50
if opts.PageSize > 0 {
defaultSize = opts.PageSize
}
repos := make(RepositoryList, 0, defaultSize)
if err := sess.Find(&repos); err != nil {
return nil, 0, fmt.Errorf("Repo: %v", err)
}
if opts.PageSize <= 0 {
count = int64(len(repos))
}
if loadAttributes {
if err := repos.loadAttributes(sess); err != nil {
return nil, 0, fmt.Errorf("LoadAttributes: %v", err)
}
}
return repos, count, nil
}
func searchRepositoryByCondition(opts *SearchRepoOptions, cond builder.Cond) (*xorm.Session, int64, error) {
if opts.Page <= 0 {
opts.Page = 1
}
@@ -406,24 +376,31 @@ func searchRepositoryByCondition(opts *SearchRepoOptions, cond builder.Cond) (*x
}
sess := x.NewSession()
defer sess.Close()
var count int64
if opts.PageSize > 0 {
var err error
count, err = sess.
Where(cond).
Count(new(Repository))
if err != nil {
_ = sess.Close()
return nil, 0, fmt.Errorf("Count: %v", err)
}
count, err := sess.
Where(cond).
Count(new(Repository))
if err != nil {
return nil, 0, fmt.Errorf("Count: %v", err)
}
repos := make(RepositoryList, 0, opts.PageSize)
sess.Where(cond).OrderBy(opts.OrderBy.String())
if opts.PageSize > 0 {
sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize)
}
return sess, count, nil
if err = sess.Find(&repos); err != nil {
return nil, 0, fmt.Errorf("Repo: %v", err)
}
if loadAttributes {
if err = repos.loadAttributes(sess); err != nil {
return nil, 0, fmt.Errorf("LoadAttributes: %v", err)
}
}
return repos, count, nil
}
// accessibleRepositoryCondition takes a user a returns a condition for checking if a repository is accessible
@@ -479,33 +456,6 @@ func SearchRepositoryByName(opts *SearchRepoOptions) (RepositoryList, int64, err
return SearchRepository(opts)
}
// SearchRepositoryIDs takes keyword and part of repository name to search,
// it returns results in given range and number of total results.
func SearchRepositoryIDs(opts *SearchRepoOptions) ([]int64, int64, error) {
opts.IncludeDescription = false
cond := SearchRepositoryCondition(opts)
sess, count, err := searchRepositoryByCondition(opts, cond)
if err != nil {
return nil, 0, err
}
defer sess.Close()
defaultSize := 50
if opts.PageSize > 0 {
defaultSize = opts.PageSize
}
ids := make([]int64, 0, defaultSize)
err = sess.Select("id").Table("repository").Find(&ids)
if opts.PageSize <= 0 {
count = int64(len(ids))
}
return ids, count, err
}
// AccessibleRepoIDsQuery queries accessible repository ids. Usable as a subquery wherever repo ids need to be filtered.
func AccessibleRepoIDsQuery(user *User) *builder.Builder {
// NB: Please note this code needs to still work if user is nil

View File

@@ -330,10 +330,10 @@ func TransferOwnership(doer *User, newOwnerName string, repo *Repository) (err e
SELECT il_too.id FROM (
SELECT il_too_too.id
FROM issue_label AS il_too_too
INNER JOIN label ON il_too_too.label_id = label.id
INNER JOIN label ON il_too_too.id = label.id
INNER JOIN issue on issue.id = il_too_too.issue_id
WHERE
issue.repo_id = ? AND ((label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != ?))
issue.repo_id = ? AND (issue.repo_id != label.repo_id OR (label.repo_id = 0 AND label.org_id != ?))
) AS il_too )`, repo.ID, newOwner.ID); err != nil {
return fmt.Errorf("Unable to remove old org labels: %v", err)
}
@@ -343,9 +343,9 @@ func TransferOwnership(doer *User, newOwnerName string, repo *Repository) (err e
SELECT com.id
FROM comment AS com
INNER JOIN label ON com.label_id = label.id
INNER JOIN issue ON issue.id = com.issue_id
INNER JOIN issue on issue.id = com.issue_id
WHERE
com.type = ? AND issue.repo_id = ? AND ((label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != ?))
com.type = ? AND issue.repo_id = ? AND (issue.repo_id != label.repo_id OR (label.repo_id = 0 AND label.org_id != ?))
) AS il_too)`, CommentTypeLabel, repo.ID, newOwner.ID); err != nil {
return fmt.Errorf("Unable to remove old org label comments: %v", err)
}

View File

@@ -566,11 +566,7 @@ func DismissReview(review *Review, isDismiss bool) (err error) {
review.Dismissed = isDismiss
if review.ID == 0 {
return ErrReviewNotExist{}
}
_, err = x.ID(review.ID).Cols("dismissed").Update(review)
_, err = x.Cols("dismissed").Update(review)
return
}

View File

@@ -143,57 +143,11 @@ func TestGetReviewersByIssueID(t *testing.T) {
}
func TestDismissReview(t *testing.T) {
assert.NoError(t, PrepareTestDatabase())
rejectReviewExample := AssertExistsAndLoadBean(t, &Review{ID: 9}).(*Review)
requestReviewExample := AssertExistsAndLoadBean(t, &Review{ID: 11}).(*Review)
approveReviewExample := AssertExistsAndLoadBean(t, &Review{ID: 8}).(*Review)
assert.False(t, rejectReviewExample.Dismissed)
assert.False(t, requestReviewExample.Dismissed)
assert.False(t, approveReviewExample.Dismissed)
assert.NoError(t, DismissReview(rejectReviewExample, true))
rejectReviewExample = AssertExistsAndLoadBean(t, &Review{ID: 9}).(*Review)
requestReviewExample = AssertExistsAndLoadBean(t, &Review{ID: 11}).(*Review)
assert.True(t, rejectReviewExample.Dismissed)
assert.False(t, requestReviewExample.Dismissed)
assert.NoError(t, DismissReview(requestReviewExample, true))
rejectReviewExample = AssertExistsAndLoadBean(t, &Review{ID: 9}).(*Review)
requestReviewExample = AssertExistsAndLoadBean(t, &Review{ID: 11}).(*Review)
assert.True(t, rejectReviewExample.Dismissed)
assert.False(t, requestReviewExample.Dismissed)
assert.False(t, approveReviewExample.Dismissed)
assert.NoError(t, DismissReview(requestReviewExample, true))
rejectReviewExample = AssertExistsAndLoadBean(t, &Review{ID: 9}).(*Review)
requestReviewExample = AssertExistsAndLoadBean(t, &Review{ID: 11}).(*Review)
assert.True(t, rejectReviewExample.Dismissed)
assert.False(t, requestReviewExample.Dismissed)
assert.False(t, approveReviewExample.Dismissed)
assert.NoError(t, DismissReview(requestReviewExample, false))
rejectReviewExample = AssertExistsAndLoadBean(t, &Review{ID: 9}).(*Review)
requestReviewExample = AssertExistsAndLoadBean(t, &Review{ID: 11}).(*Review)
assert.True(t, rejectReviewExample.Dismissed)
assert.False(t, requestReviewExample.Dismissed)
assert.False(t, approveReviewExample.Dismissed)
assert.NoError(t, DismissReview(requestReviewExample, false))
rejectReviewExample = AssertExistsAndLoadBean(t, &Review{ID: 9}).(*Review)
requestReviewExample = AssertExistsAndLoadBean(t, &Review{ID: 11}).(*Review)
assert.True(t, rejectReviewExample.Dismissed)
assert.False(t, requestReviewExample.Dismissed)
assert.False(t, approveReviewExample.Dismissed)
assert.NoError(t, DismissReview(rejectReviewExample, false))
assert.False(t, rejectReviewExample.Dismissed)
assert.False(t, requestReviewExample.Dismissed)
assert.False(t, approveReviewExample.Dismissed)
assert.NoError(t, DismissReview(approveReviewExample, true))
assert.False(t, rejectReviewExample.Dismissed)
assert.False(t, requestReviewExample.Dismissed)
assert.True(t, approveReviewExample.Dismissed)
review1 := AssertExistsAndLoadBean(t, &Review{ID: 9}).(*Review)
review2 := AssertExistsAndLoadBean(t, &Review{ID: 11}).(*Review)
assert.NoError(t, DismissReview(review1, true))
assert.NoError(t, DismissReview(review2, true))
assert.NoError(t, DismissReview(review2, true))
assert.NoError(t, DismissReview(review2, false))
assert.NoError(t, DismissReview(review2, false))
}

View File

@@ -117,6 +117,6 @@ func CountSessions() (int64, error) {
// CleanupSessions cleans up expired sessions
func CleanupSessions(maxLifetime int64) error {
_, err := x.Where("expiry <= ?", timeutil.TimeStampNow().Add(-maxLifetime)).Delete(&Session{})
_, err := x.Where("created_unix <= ?", timeutil.TimeStampNow().Add(-maxLifetime)).Delete(&Session{})
return err
}

View File

@@ -8,11 +8,8 @@ import (
"fmt"
migration "code.gitea.io/gitea/modules/migrations/base"
"code.gitea.io/gitea/modules/secret"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
jsoniter "github.com/json-iterator/go"
"xorm.io/builder"
@@ -113,24 +110,6 @@ func (task *Task) MigrateConfig() (*migration.MigrateOptions, error) {
if err != nil {
return nil, err
}
// decrypt credentials
if opts.CloneAddrEncrypted != "" {
if opts.CloneAddr, err = secret.DecryptSecret(setting.SecretKey, opts.CloneAddrEncrypted); err != nil {
return nil, err
}
}
if opts.AuthPasswordEncrypted != "" {
if opts.AuthPassword, err = secret.DecryptSecret(setting.SecretKey, opts.AuthPasswordEncrypted); err != nil {
return nil, err
}
}
if opts.AuthTokenEncrypted != "" {
if opts.AuthToken, err = secret.DecryptSecret(setting.SecretKey, opts.AuthTokenEncrypted); err != nil {
return nil, err
}
}
return &opts, nil
}
return nil, fmt.Errorf("Task type is %s, not Migrate Repo", task.Type.Name())
@@ -226,31 +205,12 @@ func createTask(e Engine, task *Task) error {
func FinishMigrateTask(task *Task) error {
task.Status = structs.TaskStatusFinished
task.EndTime = timeutil.TimeStampNow()
// delete credentials when we're done, they're a liability.
conf, err := task.MigrateConfig()
if err != nil {
return err
}
conf.AuthPassword = ""
conf.AuthToken = ""
conf.CloneAddr = util.SanitizeURLCredentials(conf.CloneAddr, true)
conf.AuthPasswordEncrypted = ""
conf.AuthTokenEncrypted = ""
conf.CloneAddrEncrypted = ""
json := jsoniter.ConfigCompatibleWithStandardLibrary
confBytes, err := json.Marshal(conf)
if err != nil {
return err
}
task.PayloadContent = string(confBytes)
sess := x.NewSession()
defer sess.Close()
if err := sess.Begin(); err != nil {
return err
}
if _, err := sess.ID(task.ID).Cols("status", "end_time", "payload_content").Update(task); err != nil {
if _, err := sess.ID(task.ID).Cols("status", "end_time").Update(task); err != nil {
return err
}

View File

@@ -57,15 +57,9 @@ func GetAccessTokenBySHA(token string) (*AccessToken, error) {
if token == "" {
return nil, ErrAccessTokenEmpty{}
}
// A token is defined as being SHA1 sum these are 40 hexadecimal bytes long
if len(token) != 40 {
if len(token) < 8 {
return nil, ErrAccessTokenNotExist{token}
}
for _, x := range []byte(token) {
if x < '0' || (x > '9' && x < 'a') || x > 'f' {
return nil, ErrAccessTokenNotExist{token}
}
}
var tokens []AccessToken
lastEight := token[len(token)-8:]
err := x.Table(&AccessToken{}).Where("token_last_eight = ?", lastEight).Find(&tokens)

View File

@@ -239,10 +239,10 @@ func (u *User) GetEmail() string {
return u.Email
}
// GetAllUsers returns a slice of all individual users found in DB.
// GetAllUsers returns a slice of all users found in DB.
func GetAllUsers() ([]*User, error) {
users := make([]*User, 0)
return users, x.OrderBy("id").Where("type = ?", UserTypeIndividual).Find(&users)
return users, x.OrderBy("id").Find(&users)
}
// IsLocal returns true if user login type is LoginPlain.

View File

@@ -82,9 +82,6 @@ func (u *User) RealSizedAvatarLink(size int) string {
if u.Avatar == "" {
return DefaultAvatarLink()
}
if size > 0 {
return setting.AppSubURL + "/avatars/" + u.Avatar + "?size=" + strconv.Itoa(size)
}
return setting.AppSubURL + "/avatars/" + u.Avatar
case setting.DisableGravatar, setting.OfflineMode:
if u.Avatar == "" {
@@ -92,9 +89,7 @@ func (u *User) RealSizedAvatarLink(size int) string {
log.Error("GenerateRandomAvatar: %v", err)
}
}
if size > 0 {
return setting.AppSubURL + "/avatars/" + u.Avatar + "?size=" + strconv.Itoa(size)
}
return setting.AppSubURL + "/avatars/" + u.Avatar
}
return SizedAvatarLink(u.AvatarEmail, size)

View File

@@ -1,70 +0,0 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package analyze
import (
"regexp"
"sort"
"strings"
"github.com/go-enry/go-enry/v2/data"
)
var isVendorRegExp *regexp.Regexp
func init() {
matchers := data.VendorMatchers
caretStrings := make([]string, 0, 10)
caretShareStrings := make([]string, 0, 10)
matcherStrings := make([]string, 0, len(matchers))
for _, matcher := range matchers {
str := matcher.String()
if str[0] == '^' {
caretStrings = append(caretStrings, str[1:])
} else if str[0:5] == "(^|/)" {
caretShareStrings = append(caretShareStrings, str[5:])
} else {
matcherStrings = append(matcherStrings, str)
}
}
sort.Strings(caretShareStrings)
sort.Strings(caretStrings)
sort.Strings(matcherStrings)
sb := &strings.Builder{}
sb.WriteString("(?:^(?:")
sb.WriteString(caretStrings[0])
for _, matcher := range caretStrings[1:] {
sb.WriteString(")|(?:")
sb.WriteString(matcher)
}
sb.WriteString("))")
sb.WriteString("|")
sb.WriteString("(?:(?:^|/)(?:")
sb.WriteString(caretShareStrings[0])
for _, matcher := range caretShareStrings[1:] {
sb.WriteString(")|(?:")
sb.WriteString(matcher)
}
sb.WriteString("))")
sb.WriteString("|")
sb.WriteString("(?:")
sb.WriteString(matcherStrings[0])
for _, matcher := range matcherStrings[1:] {
sb.WriteString(")|(?:")
sb.WriteString(matcher)
}
sb.WriteString(")")
combined := sb.String()
isVendorRegExp = regexp.MustCompile(combined)
}
// IsVendor returns whether or not path is a vendor path.
func IsVendor(path string) bool {
return isVendorRegExp.MatchString(path)
}

View File

@@ -1,42 +0,0 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package analyze
import "testing"
func TestIsVendor(t *testing.T) {
tests := []struct {
path string
want bool
}{
{"cache/", true},
{"random/cache/", true},
{"cache", false},
{"dependencies/", true},
{"Dependencies/", true},
{"dependency/", false},
{"dist/", true},
{"dist", false},
{"random/dist/", true},
{"random/dist", false},
{"deps/", true},
{"configure", true},
{"a/configure", true},
{"config.guess", true},
{"config.guess/", false},
{".vscode/", true},
{"doc/_build/", true},
{"a/docs/_build/", true},
{"a/dasdocs/_build-vsdoc.js", true},
{"a/dasdocs/_build-vsdoc.j", false},
}
for _, tt := range tests {
t.Run(tt.path, func(t *testing.T) {
if got := IsVendor(tt.path); got != tt.want {
t.Errorf("IsVendor() = %v, want %v", got, tt.want)
}
})
}
}

View File

@@ -21,7 +21,6 @@ import (
"strings"
"time"
"unicode"
"unicode/utf8"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
@@ -214,19 +213,19 @@ func EllipsisString(str string, length int) string {
if length <= 3 {
return "..."
}
if utf8.RuneCountInString(str) <= length {
if len(str) <= length {
return str
}
return string([]rune(str)[:length-3]) + "..."
return str[:length-3] + "..."
}
// TruncateString returns a truncated string with given limit,
// it returns input string if length is not reached limit.
func TruncateString(str string, limit int) string {
if utf8.RuneCountInString(str) < limit {
if len(str) < limit {
return str
}
return string([]rune(str)[:limit])
return str[:limit]
}
// StringsToInt64s converts a slice of string to a slice of int64.

View File

@@ -170,10 +170,6 @@ func TestEllipsisString(t *testing.T) {
assert.Equal(t, "fo...", EllipsisString("foobar", 5))
assert.Equal(t, "foobar", EllipsisString("foobar", 6))
assert.Equal(t, "foobar", EllipsisString("foobar", 10))
assert.Equal(t, "测...", EllipsisString("测试文本一二三四", 4))
assert.Equal(t, "测试...", EllipsisString("测试文本一二三四", 5))
assert.Equal(t, "测试文...", EllipsisString("测试文本一二三四", 6))
assert.Equal(t, "测试文本一二三四", EllipsisString("测试文本一二三四", 10))
}
func TestTruncateString(t *testing.T) {
@@ -185,10 +181,6 @@ func TestTruncateString(t *testing.T) {
assert.Equal(t, "fooba", TruncateString("foobar", 5))
assert.Equal(t, "foobar", TruncateString("foobar", 6))
assert.Equal(t, "foobar", TruncateString("foobar", 7))
assert.Equal(t, "测试文本", TruncateString("测试文本一二三四", 4))
assert.Equal(t, "测试文本一", TruncateString("测试文本一二三四", 5))
assert.Equal(t, "测试文本一二", TruncateString("测试文本一二三四", 6))
assert.Equal(t, "测试文本一二三", TruncateString("测试文本一二三四", 7))
}
func TestStringsToInt64s(t *testing.T) {

View File

@@ -6,9 +6,9 @@
package context
import (
"context"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"path"
"strings"
@@ -394,233 +394,239 @@ func RepoIDAssignment() func(ctx *Context) {
}
// RepoAssignment returns a middleware to handle repository assignment
func RepoAssignment(ctx *Context) (cancel context.CancelFunc) {
var (
owner *models.User
err error
)
func RepoAssignment() func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
var (
owner *models.User
err error
ctx = GetContext(req)
)
userName := ctx.Params(":username")
repoName := ctx.Params(":reponame")
repoName = strings.TrimSuffix(repoName, ".git")
userName := ctx.Params(":username")
repoName := ctx.Params(":reponame")
repoName = strings.TrimSuffix(repoName, ".git")
// Check if the user is the same as the repository owner
if ctx.IsSigned && ctx.User.LowerName == strings.ToLower(userName) {
owner = ctx.User
} else {
owner, err = models.GetUserByName(userName)
if err != nil {
if models.IsErrUserNotExist(err) {
if ctx.Query("go-get") == "1" {
EarlyResponseForGoGetMeta(ctx)
// Check if the user is the same as the repository owner
if ctx.IsSigned && ctx.User.LowerName == strings.ToLower(userName) {
owner = ctx.User
} else {
owner, err = models.GetUserByName(userName)
if err != nil {
if models.IsErrUserNotExist(err) {
if ctx.Query("go-get") == "1" {
EarlyResponseForGoGetMeta(ctx)
return
}
ctx.NotFound("GetUserByName", nil)
} else {
ctx.ServerError("GetUserByName", err)
}
return
}
ctx.NotFound("GetUserByName", nil)
} else {
ctx.ServerError("GetUserByName", err)
}
return
}
}
ctx.Repo.Owner = owner
ctx.Data["Username"] = ctx.Repo.Owner.Name
ctx.Repo.Owner = owner
ctx.Data["Username"] = ctx.Repo.Owner.Name
// Get repository.
repo, err := models.GetRepositoryByName(owner.ID, repoName)
if err != nil {
if models.IsErrRepoNotExist(err) {
redirectRepoID, err := models.LookupRepoRedirect(owner.ID, repoName)
// Get repository.
repo, err := models.GetRepositoryByName(owner.ID, repoName)
if err != nil {
if models.IsErrRepoNotExist(err) {
redirectRepoID, err := models.LookupRepoRedirect(owner.ID, repoName)
if err == nil {
RedirectToRepo(ctx, redirectRepoID)
} else if models.IsErrRepoRedirectNotExist(err) {
if ctx.Query("go-get") == "1" {
EarlyResponseForGoGetMeta(ctx)
return
}
ctx.NotFound("GetRepositoryByName", nil)
} else {
ctx.ServerError("LookupRepoRedirect", err)
}
} else {
ctx.ServerError("GetRepositoryByName", err)
}
return
}
repo.Owner = owner
repoAssignment(ctx, repo)
if ctx.Written() {
return
}
ctx.Repo.RepoLink = repo.Link()
ctx.Data["RepoLink"] = ctx.Repo.RepoLink
ctx.Data["RepoRelPath"] = ctx.Repo.Owner.Name + "/" + ctx.Repo.Repository.Name
unit, err := ctx.Repo.Repository.GetUnit(models.UnitTypeExternalTracker)
if err == nil {
RedirectToRepo(ctx, redirectRepoID)
} else if models.IsErrRepoRedirectNotExist(err) {
if ctx.Query("go-get") == "1" {
EarlyResponseForGoGetMeta(ctx)
ctx.Data["RepoExternalIssuesLink"] = unit.ExternalTrackerConfig().ExternalTrackerURL
}
ctx.Data["NumTags"], err = models.GetReleaseCountByRepoID(ctx.Repo.Repository.ID, models.FindReleasesOptions{
IncludeTags: true,
})
if err != nil {
ctx.ServerError("GetReleaseCountByRepoID", err)
return
}
ctx.Data["NumReleases"], err = models.GetReleaseCountByRepoID(ctx.Repo.Repository.ID, models.FindReleasesOptions{})
if err != nil {
ctx.ServerError("GetReleaseCountByRepoID", err)
return
}
ctx.Data["Title"] = owner.Name + "/" + repo.Name
ctx.Data["Repository"] = repo
ctx.Data["Owner"] = ctx.Repo.Repository.Owner
ctx.Data["IsRepositoryOwner"] = ctx.Repo.IsOwner()
ctx.Data["IsRepositoryAdmin"] = ctx.Repo.IsAdmin()
ctx.Data["RepoOwnerIsOrganization"] = repo.Owner.IsOrganization()
ctx.Data["CanWriteCode"] = ctx.Repo.CanWrite(models.UnitTypeCode)
ctx.Data["CanWriteIssues"] = ctx.Repo.CanWrite(models.UnitTypeIssues)
ctx.Data["CanWritePulls"] = ctx.Repo.CanWrite(models.UnitTypePullRequests)
if ctx.Data["CanSignedUserFork"], err = ctx.Repo.Repository.CanUserFork(ctx.User); err != nil {
ctx.ServerError("CanUserFork", err)
return
}
ctx.Data["DisableSSH"] = setting.SSH.Disabled
ctx.Data["ExposeAnonSSH"] = setting.SSH.ExposeAnonymous
ctx.Data["DisableHTTP"] = setting.Repository.DisableHTTPGit
ctx.Data["RepoSearchEnabled"] = setting.Indexer.RepoIndexerEnabled
ctx.Data["CloneLink"] = repo.CloneLink()
ctx.Data["WikiCloneLink"] = repo.WikiCloneLink()
if ctx.IsSigned {
ctx.Data["IsWatchingRepo"] = models.IsWatching(ctx.User.ID, repo.ID)
ctx.Data["IsStaringRepo"] = models.IsStaring(ctx.User.ID, repo.ID)
}
if repo.IsFork {
RetrieveBaseRepo(ctx, repo)
if ctx.Written() {
return
}
ctx.NotFound("GetRepositoryByName", nil)
} else {
ctx.ServerError("LookupRepoRedirect", err)
}
} else {
ctx.ServerError("GetRepositoryByName", err)
}
return
if repo.IsGenerated() {
RetrieveTemplateRepo(ctx, repo)
if ctx.Written() {
return
}
}
// Disable everything when the repo is being created
if ctx.Repo.Repository.IsBeingCreated() {
ctx.Data["BranchName"] = ctx.Repo.Repository.DefaultBranch
return
}
gitRepo, err := git.OpenRepository(models.RepoPath(userName, repoName))
if err != nil {
ctx.ServerError("RepoAssignment Invalid repo "+models.RepoPath(userName, repoName), err)
return
}
ctx.Repo.GitRepo = gitRepo
// We opened it, we should close it
defer func() {
// If it's been set to nil then assume someone else has closed it.
if ctx.Repo.GitRepo != nil {
ctx.Repo.GitRepo.Close()
}
}()
// Stop at this point when the repo is empty.
if ctx.Repo.Repository.IsEmpty {
ctx.Data["BranchName"] = ctx.Repo.Repository.DefaultBranch
next.ServeHTTP(w, req)
return
}
tags, err := ctx.Repo.GitRepo.GetTags()
if err != nil {
ctx.ServerError("GetTags", err)
return
}
ctx.Data["Tags"] = tags
brs, _, err := ctx.Repo.GitRepo.GetBranches(0, 0)
if err != nil {
ctx.ServerError("GetBranches", err)
return
}
ctx.Data["Branches"] = brs
ctx.Data["BranchesCount"] = len(brs)
ctx.Data["TagName"] = ctx.Repo.TagName
// If not branch selected, try default one.
// If default branch doesn't exists, fall back to some other branch.
if len(ctx.Repo.BranchName) == 0 {
if len(ctx.Repo.Repository.DefaultBranch) > 0 && gitRepo.IsBranchExist(ctx.Repo.Repository.DefaultBranch) {
ctx.Repo.BranchName = ctx.Repo.Repository.DefaultBranch
} else if len(brs) > 0 {
ctx.Repo.BranchName = brs[0]
}
}
ctx.Data["BranchName"] = ctx.Repo.BranchName
ctx.Data["CommitID"] = ctx.Repo.CommitID
// People who have push access or have forked repository can propose a new pull request.
canPush := ctx.Repo.CanWrite(models.UnitTypeCode) || (ctx.IsSigned && ctx.User.HasForkedRepo(ctx.Repo.Repository.ID))
canCompare := false
// Pull request is allowed if this is a fork repository
// and base repository accepts pull requests.
if repo.BaseRepo != nil && repo.BaseRepo.AllowsPulls() {
canCompare = true
ctx.Data["BaseRepo"] = repo.BaseRepo
ctx.Repo.PullRequest.BaseRepo = repo.BaseRepo
ctx.Repo.PullRequest.Allowed = canPush
ctx.Repo.PullRequest.HeadInfo = ctx.Repo.Owner.Name + ":" + ctx.Repo.BranchName
} else if repo.AllowsPulls() {
// Or, this is repository accepts pull requests between branches.
canCompare = true
ctx.Data["BaseRepo"] = repo
ctx.Repo.PullRequest.BaseRepo = repo
ctx.Repo.PullRequest.Allowed = canPush
ctx.Repo.PullRequest.SameRepo = true
ctx.Repo.PullRequest.HeadInfo = ctx.Repo.BranchName
}
ctx.Data["CanCompareOrPull"] = canCompare
ctx.Data["PullRequestCtx"] = ctx.Repo.PullRequest
if ctx.Repo.Repository.Status == models.RepositoryPendingTransfer {
repoTransfer, err := models.GetPendingRepositoryTransfer(ctx.Repo.Repository)
if err != nil {
ctx.ServerError("GetPendingRepositoryTransfer", err)
return
}
if err := repoTransfer.LoadAttributes(); err != nil {
ctx.ServerError("LoadRecipient", err)
return
}
ctx.Data["RepoTransfer"] = repoTransfer
if ctx.User != nil {
ctx.Data["CanUserAcceptTransfer"] = repoTransfer.CanUserAcceptTransfer(ctx.User)
}
}
if ctx.Query("go-get") == "1" {
ctx.Data["GoGetImport"] = ComposeGoGetImport(owner.Name, repo.Name)
prefix := setting.AppURL + path.Join(owner.Name, repo.Name, "src", "branch", ctx.Repo.BranchName)
ctx.Data["GoDocDirectory"] = prefix + "{/dir}"
ctx.Data["GoDocFile"] = prefix + "{/dir}/{file}#L{line}"
}
next.ServeHTTP(w, req)
})
}
repo.Owner = owner
repoAssignment(ctx, repo)
if ctx.Written() {
return
}
ctx.Repo.RepoLink = repo.Link()
ctx.Data["RepoLink"] = ctx.Repo.RepoLink
ctx.Data["RepoRelPath"] = ctx.Repo.Owner.Name + "/" + ctx.Repo.Repository.Name
unit, err := ctx.Repo.Repository.GetUnit(models.UnitTypeExternalTracker)
if err == nil {
ctx.Data["RepoExternalIssuesLink"] = unit.ExternalTrackerConfig().ExternalTrackerURL
}
ctx.Data["NumTags"], err = models.GetReleaseCountByRepoID(ctx.Repo.Repository.ID, models.FindReleasesOptions{
IncludeTags: true,
})
if err != nil {
ctx.ServerError("GetReleaseCountByRepoID", err)
return
}
ctx.Data["NumReleases"], err = models.GetReleaseCountByRepoID(ctx.Repo.Repository.ID, models.FindReleasesOptions{})
if err != nil {
ctx.ServerError("GetReleaseCountByRepoID", err)
return
}
ctx.Data["Title"] = owner.Name + "/" + repo.Name
ctx.Data["Repository"] = repo
ctx.Data["Owner"] = ctx.Repo.Repository.Owner
ctx.Data["IsRepositoryOwner"] = ctx.Repo.IsOwner()
ctx.Data["IsRepositoryAdmin"] = ctx.Repo.IsAdmin()
ctx.Data["RepoOwnerIsOrganization"] = repo.Owner.IsOrganization()
ctx.Data["CanWriteCode"] = ctx.Repo.CanWrite(models.UnitTypeCode)
ctx.Data["CanWriteIssues"] = ctx.Repo.CanWrite(models.UnitTypeIssues)
ctx.Data["CanWritePulls"] = ctx.Repo.CanWrite(models.UnitTypePullRequests)
if ctx.Data["CanSignedUserFork"], err = ctx.Repo.Repository.CanUserFork(ctx.User); err != nil {
ctx.ServerError("CanUserFork", err)
return
}
ctx.Data["DisableSSH"] = setting.SSH.Disabled
ctx.Data["ExposeAnonSSH"] = setting.SSH.ExposeAnonymous
ctx.Data["DisableHTTP"] = setting.Repository.DisableHTTPGit
ctx.Data["RepoSearchEnabled"] = setting.Indexer.RepoIndexerEnabled
ctx.Data["CloneLink"] = repo.CloneLink()
ctx.Data["WikiCloneLink"] = repo.WikiCloneLink()
if ctx.IsSigned {
ctx.Data["IsWatchingRepo"] = models.IsWatching(ctx.User.ID, repo.ID)
ctx.Data["IsStaringRepo"] = models.IsStaring(ctx.User.ID, repo.ID)
}
if repo.IsFork {
RetrieveBaseRepo(ctx, repo)
if ctx.Written() {
return
}
}
if repo.IsGenerated() {
RetrieveTemplateRepo(ctx, repo)
if ctx.Written() {
return
}
}
// Disable everything when the repo is being created
if ctx.Repo.Repository.IsBeingCreated() {
ctx.Data["BranchName"] = ctx.Repo.Repository.DefaultBranch
return
}
gitRepo, err := git.OpenRepository(models.RepoPath(userName, repoName))
if err != nil {
ctx.ServerError("RepoAssignment Invalid repo "+models.RepoPath(userName, repoName), err)
return
}
ctx.Repo.GitRepo = gitRepo
// We opened it, we should close it
cancel = func() {
// If it's been set to nil then assume someone else has closed it.
if ctx.Repo.GitRepo != nil {
ctx.Repo.GitRepo.Close()
}
}
// Stop at this point when the repo is empty.
if ctx.Repo.Repository.IsEmpty {
ctx.Data["BranchName"] = ctx.Repo.Repository.DefaultBranch
return
}
tags, err := ctx.Repo.GitRepo.GetTags()
if err != nil {
ctx.ServerError("GetTags", err)
return
}
ctx.Data["Tags"] = tags
brs, _, err := ctx.Repo.GitRepo.GetBranches(0, 0)
if err != nil {
ctx.ServerError("GetBranches", err)
return
}
ctx.Data["Branches"] = brs
ctx.Data["BranchesCount"] = len(brs)
ctx.Data["TagName"] = ctx.Repo.TagName
// If not branch selected, try default one.
// If default branch doesn't exists, fall back to some other branch.
if len(ctx.Repo.BranchName) == 0 {
if len(ctx.Repo.Repository.DefaultBranch) > 0 && gitRepo.IsBranchExist(ctx.Repo.Repository.DefaultBranch) {
ctx.Repo.BranchName = ctx.Repo.Repository.DefaultBranch
} else if len(brs) > 0 {
ctx.Repo.BranchName = brs[0]
}
}
ctx.Data["BranchName"] = ctx.Repo.BranchName
ctx.Data["CommitID"] = ctx.Repo.CommitID
// People who have push access or have forked repository can propose a new pull request.
canPush := ctx.Repo.CanWrite(models.UnitTypeCode) || (ctx.IsSigned && ctx.User.HasForkedRepo(ctx.Repo.Repository.ID))
canCompare := false
// Pull request is allowed if this is a fork repository
// and base repository accepts pull requests.
if repo.BaseRepo != nil && repo.BaseRepo.AllowsPulls() {
canCompare = true
ctx.Data["BaseRepo"] = repo.BaseRepo
ctx.Repo.PullRequest.BaseRepo = repo.BaseRepo
ctx.Repo.PullRequest.Allowed = canPush
ctx.Repo.PullRequest.HeadInfo = ctx.Repo.Owner.Name + ":" + ctx.Repo.BranchName
} else if repo.AllowsPulls() {
// Or, this is repository accepts pull requests between branches.
canCompare = true
ctx.Data["BaseRepo"] = repo
ctx.Repo.PullRequest.BaseRepo = repo
ctx.Repo.PullRequest.Allowed = canPush
ctx.Repo.PullRequest.SameRepo = true
ctx.Repo.PullRequest.HeadInfo = ctx.Repo.BranchName
}
ctx.Data["CanCompareOrPull"] = canCompare
ctx.Data["PullRequestCtx"] = ctx.Repo.PullRequest
if ctx.Repo.Repository.Status == models.RepositoryPendingTransfer {
repoTransfer, err := models.GetPendingRepositoryTransfer(ctx.Repo.Repository)
if err != nil {
ctx.ServerError("GetPendingRepositoryTransfer", err)
return
}
if err := repoTransfer.LoadAttributes(); err != nil {
ctx.ServerError("LoadRecipient", err)
return
}
ctx.Data["RepoTransfer"] = repoTransfer
if ctx.User != nil {
ctx.Data["CanUserAcceptTransfer"] = repoTransfer.CanUserAcceptTransfer(ctx.User)
}
}
if ctx.Query("go-get") == "1" {
ctx.Data["GoGetImport"] = ComposeGoGetImport(owner.Name, repo.Name)
prefix := setting.AppURL + path.Join(owner.Name, repo.Name, "src", "branch", ctx.Repo.BranchName)
ctx.Data["GoDocDirectory"] = prefix + "{/dir}"
ctx.Data["GoDocFile"] = prefix + "{/dir}/{file}#L{line}"
}
return
}
// RepoRefType type of repo reference
@@ -645,7 +651,7 @@ const (
// RepoRef handles repository reference names when the ref name is not
// explicitly given
func RepoRef() func(*Context) context.CancelFunc {
func RepoRef() func(http.Handler) http.Handler {
// since no ref name is explicitly specified, ok to just use branch
return RepoRefByType(RepoRefBranch)
}
@@ -724,129 +730,130 @@ func getRefName(ctx *Context, pathType RepoRefType) string {
// RepoRefByType handles repository reference name for a specific type
// of repository reference
func RepoRefByType(refType RepoRefType, ignoreNotExistErr ...bool) func(*Context) context.CancelFunc {
return func(ctx *Context) (cancel context.CancelFunc) {
// Empty repository does not have reference information.
if ctx.Repo.Repository.IsEmpty {
return
}
var (
refName string
err error
)
if ctx.Repo.GitRepo == nil {
repoPath := models.RepoPath(ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)
ctx.Repo.GitRepo, err = git.OpenRepository(repoPath)
if err != nil {
ctx.ServerError("RepoRef Invalid repo "+repoPath, err)
func RepoRefByType(refType RepoRefType) func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
ctx := GetContext(req)
// Empty repository does not have reference information.
if ctx.Repo.Repository.IsEmpty {
return
}
// We opened it, we should close it
cancel = func() {
// If it's been set to nil then assume someone else has closed it.
if ctx.Repo.GitRepo != nil {
ctx.Repo.GitRepo.Close()
}
}
}
// Get default branch.
if len(ctx.Params("*")) == 0 {
refName = ctx.Repo.Repository.DefaultBranch
ctx.Repo.BranchName = refName
if !ctx.Repo.GitRepo.IsBranchExist(refName) {
brs, _, err := ctx.Repo.GitRepo.GetBranches(0, 0)
var (
refName string
err error
)
if ctx.Repo.GitRepo == nil {
repoPath := models.RepoPath(ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)
ctx.Repo.GitRepo, err = git.OpenRepository(repoPath)
if err != nil {
ctx.ServerError("GetBranches", err)
return
} else if len(brs) == 0 {
err = fmt.Errorf("No branches in non-empty repository %s",
ctx.Repo.GitRepo.Path)
ctx.ServerError("GetBranches", err)
ctx.ServerError("RepoRef Invalid repo "+repoPath, err)
return
}
refName = brs[0]
// We opened it, we should close it
defer func() {
// If it's been set to nil then assume someone else has closed it.
if ctx.Repo.GitRepo != nil {
ctx.Repo.GitRepo.Close()
}
}()
}
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName)
if err != nil {
ctx.ServerError("GetBranchCommit", err)
return
}
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
ctx.Repo.IsViewBranch = true
} else {
refName = getRefName(ctx, refType)
ctx.Repo.BranchName = refName
if refType.RefTypeIncludesBranches() && ctx.Repo.GitRepo.IsBranchExist(refName) {
ctx.Repo.IsViewBranch = true
// Get default branch.
if len(ctx.Params("*")) == 0 {
refName = ctx.Repo.Repository.DefaultBranch
ctx.Repo.BranchName = refName
if !ctx.Repo.GitRepo.IsBranchExist(refName) {
brs, _, err := ctx.Repo.GitRepo.GetBranches(0, 0)
if err != nil {
ctx.ServerError("GetBranches", err)
return
} else if len(brs) == 0 {
err = fmt.Errorf("No branches in non-empty repository %s",
ctx.Repo.GitRepo.Path)
ctx.ServerError("GetBranches", err)
return
}
refName = brs[0]
}
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName)
if err != nil {
ctx.ServerError("GetBranchCommit", err)
return
}
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
ctx.Repo.IsViewBranch = true
} else if refType.RefTypeIncludesTags() && ctx.Repo.GitRepo.IsTagExist(refName) {
ctx.Repo.IsViewTag = true
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetTagCommit(refName)
if err != nil {
ctx.ServerError("GetTagCommit", err)
return
}
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
} else if len(refName) >= 7 && len(refName) <= 40 {
ctx.Repo.IsViewCommit = true
ctx.Repo.CommitID = refName
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refName)
if err != nil {
ctx.NotFound("GetCommit", err)
return
}
// If short commit ID add canonical link header
if len(refName) < 40 {
ctx.Header().Set("Link", fmt.Sprintf("<%s>; rel=\"canonical\"",
util.URLJoin(setting.AppURL, strings.Replace(ctx.Req.URL.RequestURI(), refName, ctx.Repo.Commit.ID.String(), 1))))
}
} else {
if len(ignoreNotExistErr) > 0 && ignoreNotExistErr[0] {
refName = getRefName(ctx, refType)
ctx.Repo.BranchName = refName
if refType.RefTypeIncludesBranches() && ctx.Repo.GitRepo.IsBranchExist(refName) {
ctx.Repo.IsViewBranch = true
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName)
if err != nil {
ctx.ServerError("GetBranchCommit", err)
return
}
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
} else if refType.RefTypeIncludesTags() && ctx.Repo.GitRepo.IsTagExist(refName) {
ctx.Repo.IsViewTag = true
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetTagCommit(refName)
if err != nil {
ctx.ServerError("GetTagCommit", err)
return
}
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
} else if len(refName) >= 7 && len(refName) <= 40 {
ctx.Repo.IsViewCommit = true
ctx.Repo.CommitID = refName
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refName)
if err != nil {
ctx.NotFound("GetCommit", err)
return
}
// If short commit ID add canonical link header
if len(refName) < 40 {
ctx.Header().Set("Link", fmt.Sprintf("<%s>; rel=\"canonical\"",
util.URLJoin(setting.AppURL, strings.Replace(ctx.Req.URL.RequestURI(), refName, ctx.Repo.Commit.ID.String(), 1))))
}
} else {
ctx.NotFound("RepoRef invalid repo", fmt.Errorf("branch or tag not exist: %s", refName))
return
}
if refType == RepoRefLegacy {
// redirect from old URL scheme to new URL scheme
ctx.Redirect(path.Join(
setting.AppSubURL,
strings.TrimSuffix(ctx.Req.URL.Path, ctx.Params("*")),
ctx.Repo.BranchNameSubURL(),
ctx.Repo.TreePath))
return
}
ctx.NotFound("RepoRef invalid repo", fmt.Errorf("branch or tag not exist: %s", refName))
return
}
if refType == RepoRefLegacy {
// redirect from old URL scheme to new URL scheme
ctx.Redirect(path.Join(
setting.AppSubURL,
strings.TrimSuffix(ctx.Req.URL.Path, ctx.Params("*")),
ctx.Repo.BranchNameSubURL(),
ctx.Repo.TreePath))
ctx.Data["BranchName"] = ctx.Repo.BranchName
ctx.Data["BranchNameSubURL"] = ctx.Repo.BranchNameSubURL()
ctx.Data["CommitID"] = ctx.Repo.CommitID
ctx.Data["TreePath"] = ctx.Repo.TreePath
ctx.Data["IsViewBranch"] = ctx.Repo.IsViewBranch
ctx.Data["IsViewTag"] = ctx.Repo.IsViewTag
ctx.Data["IsViewCommit"] = ctx.Repo.IsViewCommit
ctx.Data["CanCreateBranch"] = ctx.Repo.CanCreateBranch()
ctx.Repo.CommitsCount, err = ctx.Repo.GetCommitsCount()
if err != nil {
ctx.ServerError("GetCommitsCount", err)
return
}
}
ctx.Data["CommitsCount"] = ctx.Repo.CommitsCount
ctx.Data["BranchName"] = ctx.Repo.BranchName
ctx.Data["BranchNameSubURL"] = ctx.Repo.BranchNameSubURL()
ctx.Data["CommitID"] = ctx.Repo.CommitID
ctx.Data["TreePath"] = ctx.Repo.TreePath
ctx.Data["IsViewBranch"] = ctx.Repo.IsViewBranch
ctx.Data["IsViewTag"] = ctx.Repo.IsViewTag
ctx.Data["IsViewCommit"] = ctx.Repo.IsViewCommit
ctx.Data["CanCreateBranch"] = ctx.Repo.CanCreateBranch()
ctx.Repo.CommitsCount, err = ctx.Repo.GetCommitsCount()
if err != nil {
ctx.ServerError("GetCommitsCount", err)
return
}
ctx.Data["CommitsCount"] = ctx.Repo.CommitsCount
return
next.ServeHTTP(w, req)
})
}
}

View File

@@ -4,9 +4,7 @@
package context
import (
"net/http"
)
import "net/http"
// ResponseWriter represents a response writer for HTTP
type ResponseWriter interface {
@@ -49,7 +47,7 @@ func (r *Response) Write(bs []byte) (int, error) {
return size, err
}
if r.status == 0 {
r.status = http.StatusOK
r.WriteHeader(200)
}
return size, nil
}
@@ -62,10 +60,8 @@ func (r *Response) WriteHeader(statusCode int) {
}
r.beforeExecuted = true
}
if r.status == 0 {
r.status = statusCode
r.ResponseWriter.WriteHeader(statusCode)
}
r.status = statusCode
r.ResponseWriter.WriteHeader(statusCode)
}
// Flush flush cached data

View File

@@ -47,11 +47,6 @@ func ToNotificationThread(n *models.Notification) *api.NotificationThread {
if err == nil && comment != nil {
result.Subject.LatestCommentURL = comment.APIURL()
}
pr, _ := n.Issue.GetPullRequest()
if pr != nil && pr.HasMerged {
result.Subject.State = "merged"
}
}
case models.NotificationSourceCommit:
result.Subject = &api.NotificationSubject{

View File

@@ -85,17 +85,18 @@ func ToPullReviewCommentList(review *models.Review, doer *models.User) ([]*api.P
apiComments := make([]*api.PullReviewComment, 0, len(review.CodeComments))
auth := false
if doer != nil {
auth = doer.IsAdmin || doer.ID == review.ReviewerID
}
for _, lines := range review.CodeComments {
for _, comments := range lines {
for _, comment := range comments {
auth := false
if doer != nil {
auth = doer.IsAdmin || doer.ID == comment.Poster.ID
}
apiComment := &api.PullReviewComment{
ID: comment.ID,
Body: comment.Content,
Reviewer: ToUser(comment.Poster, doer != nil, auth),
Reviewer: ToUser(review.Reviewer, doer != nil, auth),
ReviewID: review.ID,
Created: comment.CreatedUnix.AsTime(),
Updated: comment.UpdatedUnix.AsTime(),

View File

@@ -89,7 +89,7 @@ func innerToRepo(repo *models.Repository, mode models.AccessMode, isParent bool)
return nil
}
numReleases, _ := models.GetReleaseCountByRepoID(repo.ID, models.FindReleasesOptions{IncludeDrafts: false, IncludeTags: false})
numReleases, _ := models.GetReleaseCountByRepoID(repo.ID, models.FindReleasesOptions{IncludeDrafts: false, IncludeTags: true})
mirrorInterval := ""
if repo.IsMirror {

View File

@@ -23,13 +23,13 @@ func checkDBConsistency(logger log.Logger, autofix bool) error {
// find labels without existing repo or org
count, err := models.CountOrphanedLabels()
if err != nil {
logger.Critical("Error: %v whilst counting orphaned labels", err)
logger.Critical("Error: %v whilst counting orphaned labels")
return err
}
if count > 0 {
if autofix {
if err = models.DeleteOrphanedLabels(); err != nil {
logger.Critical("Error: %v whilst deleting orphaned labels", err)
logger.Critical("Error: %v whilst deleting orphaned labels")
return err
}
logger.Info("%d labels without existing repository/organisation deleted", count)
@@ -41,13 +41,13 @@ func checkDBConsistency(logger log.Logger, autofix bool) error {
// find IssueLabels without existing label
count, err = models.CountOrphanedIssueLabels()
if err != nil {
logger.Critical("Error: %v whilst counting orphaned issue_labels", err)
logger.Critical("Error: %v whilst counting orphaned issue_labels")
return err
}
if count > 0 {
if autofix {
if err = models.DeleteOrphanedIssueLabels(); err != nil {
logger.Critical("Error: %v whilst deleting orphaned issue_labels", err)
logger.Critical("Error: %v whilst deleting orphaned issue_labels")
return err
}
logger.Info("%d issue_labels without existing label deleted", count)
@@ -59,13 +59,13 @@ func checkDBConsistency(logger log.Logger, autofix bool) error {
// find issues without existing repository
count, err = models.CountOrphanedIssues()
if err != nil {
logger.Critical("Error: %v whilst counting orphaned issues", err)
logger.Critical("Error: %v whilst counting orphaned issues")
return err
}
if count > 0 {
if autofix {
if err = models.DeleteOrphanedIssues(); err != nil {
logger.Critical("Error: %v whilst deleting orphaned issues", err)
logger.Critical("Error: %v whilst deleting orphaned issues")
return err
}
logger.Info("%d issues without existing repository deleted", count)
@@ -77,13 +77,13 @@ func checkDBConsistency(logger log.Logger, autofix bool) error {
// find pulls without existing issues
count, err = models.CountOrphanedObjects("pull_request", "issue", "pull_request.issue_id=issue.id")
if err != nil {
logger.Critical("Error: %v whilst counting orphaned objects", err)
logger.Critical("Error: %v whilst counting orphaned objects")
return err
}
if count > 0 {
if autofix {
if err = models.DeleteOrphanedObjects("pull_request", "issue", "pull_request.issue_id=issue.id"); err != nil {
logger.Critical("Error: %v whilst deleting orphaned objects", err)
logger.Critical("Error: %v whilst deleting orphaned objects")
return err
}
logger.Info("%d pull requests without existing issue deleted", count)
@@ -95,13 +95,13 @@ func checkDBConsistency(logger log.Logger, autofix bool) error {
// find tracked times without existing issues/pulls
count, err = models.CountOrphanedObjects("tracked_time", "issue", "tracked_time.issue_id=issue.id")
if err != nil {
logger.Critical("Error: %v whilst counting orphaned objects", err)
logger.Critical("Error: %v whilst counting orphaned objects")
return err
}
if count > 0 {
if autofix {
if err = models.DeleteOrphanedObjects("tracked_time", "issue", "tracked_time.issue_id=issue.id"); err != nil {
logger.Critical("Error: %v whilst deleting orphaned objects", err)
logger.Critical("Error: %v whilst deleting orphaned objects")
return err
}
logger.Info("%d tracked times without existing issue deleted", count)
@@ -113,14 +113,14 @@ func checkDBConsistency(logger log.Logger, autofix bool) error {
// find null archived repositories
count, err = models.CountNullArchivedRepository()
if err != nil {
logger.Critical("Error: %v whilst counting null archived repositories", err)
logger.Critical("Error: %v whilst counting null archived repositories")
return err
}
if count > 0 {
if autofix {
updatedCount, err := models.FixNullArchivedRepository()
if err != nil {
logger.Critical("Error: %v whilst fixing null archived repositories", err)
logger.Critical("Error: %v whilst fixing null archived repositories")
return err
}
logger.Info("%d repositories with null is_archived updated", updatedCount)
@@ -132,14 +132,14 @@ func checkDBConsistency(logger log.Logger, autofix bool) error {
// find label comments with empty labels
count, err = models.CountCommentTypeLabelWithEmptyLabel()
if err != nil {
logger.Critical("Error: %v whilst counting label comments with empty labels", err)
logger.Critical("Error: %v whilst counting label comments with empty labels")
return err
}
if count > 0 {
if autofix {
updatedCount, err := models.FixCommentTypeLabelWithEmptyLabel()
if err != nil {
logger.Critical("Error: %v whilst removing label comments with empty labels", err)
logger.Critical("Error: %v whilst removing label comments with empty labels")
return err
}
logger.Info("%d label comments with empty labels removed", updatedCount)
@@ -191,14 +191,13 @@ func checkDBConsistency(logger log.Logger, autofix bool) error {
if setting.Database.UsePostgreSQL {
count, err = models.CountBadSequences()
if err != nil {
logger.Critical("Error: %v whilst checking sequence values", err)
return err
logger.Critical("Error: %v whilst checking sequence values")
}
if count > 0 {
if autofix {
err := models.FixBadSequences()
if err != nil {
logger.Critical("Error: %v whilst attempting to fix sequences", err)
logger.Critical("Error: %v whilst attempting to fix sequences")
return err
}
logger.Info("%d sequences updated", count)
@@ -208,60 +207,6 @@ func checkDBConsistency(logger log.Logger, autofix bool) error {
}
}
// find protected branches without existing repository
count, err = models.CountOrphanedObjects("protected_branch", "repository", "protected_branch.repo_id=repository.id")
if err != nil {
logger.Critical("Error: %v whilst counting orphaned objects", err)
return err
}
if count > 0 {
if autofix {
if err = models.DeleteOrphanedObjects("protected_branch", "repository", "protected_branch.repo_id=repository.id"); err != nil {
logger.Critical("Error: %v whilst deleting orphaned objects", err)
return err
}
logger.Info("%d protected branches without existing repository deleted", count)
} else {
logger.Warn("%d protected branches without existing repository", count)
}
}
// find deleted branches without existing repository
count, err = models.CountOrphanedObjects("deleted_branch", "repository", "deleted_branch.repo_id=repository.id")
if err != nil {
logger.Critical("Error: %v whilst counting orphaned objects", err)
return err
}
if count > 0 {
if autofix {
if err = models.DeleteOrphanedObjects("deleted_branch", "repository", "deleted_branch.repo_id=repository.id"); err != nil {
logger.Critical("Error: %v whilst deleting orphaned objects", err)
return err
}
logger.Info("%d deleted branches without existing repository deleted", count)
} else {
logger.Warn("%d deleted branches without existing repository", count)
}
}
// find LFS locks without existing repository
count, err = models.CountOrphanedObjects("lfs_lock", "repository", "lfs_lock.repo_id=repository.id")
if err != nil {
logger.Critical("Error: %v whilst counting orphaned objects", err)
return err
}
if count > 0 {
if autofix {
if err = models.DeleteOrphanedObjects("lfs_lock", "repository", "lfs_lock.repo_id=repository.id"); err != nil {
logger.Critical("Error: %v whilst deleting orphaned objects", err)
return err
}
logger.Info("%d LFS locks without existing repository deleted", count)
} else {
logger.Warn("%d LFS locks without existing repository", count)
}
}
return nil
}

View File

@@ -23,7 +23,7 @@ func checkDBVersion(logger log.Logger, autofix bool) error {
err = models.NewEngine(context.Background(), migrations.Migrate)
if err != nil {
logger.Critical("Error: %v during migration", err)
logger.Critical("Error: %v during migration")
}
return err
}

View File

@@ -6,7 +6,6 @@
package emoji
import (
"io"
"sort"
"strings"
"sync"
@@ -146,8 +145,6 @@ func (n *rememberSecondWriteWriter) Write(p []byte) (int, error) {
if n.writecount == 2 {
n.idx = n.pos
n.end = n.pos + len(p)
n.pos += len(p)
return len(p), io.EOF
}
n.pos += len(p)
return len(p), nil
@@ -158,8 +155,6 @@ func (n *rememberSecondWriteWriter) WriteString(s string) (int, error) {
if n.writecount == 2 {
n.idx = n.pos
n.end = n.pos + len(s)
n.pos += len(s)
return len(s), io.EOF
}
n.pos += len(s)
return len(s), nil

View File

@@ -51,7 +51,6 @@ type AuthenticationForm struct {
TLS bool
SkipVerify bool
PAMServiceName string
PAMEmailDomain string
Oauth2Provider string
Oauth2Key string
Oauth2Secret string

View File

@@ -149,18 +149,17 @@ headerLoop:
// constant hextable to help quickly convert between 20byte and 40byte hashes
const hextable = "0123456789abcdef"
// To40ByteSHA converts a 20-byte SHA into a 40-byte sha. Input and output can be the
// same 40 byte slice to support in place conversion without allocations.
// This is at least 100x quicker that hex.EncodeToString
// NB This requires that out is a 40-byte slice
func To40ByteSHA(sha, out []byte) []byte {
// to40ByteSHA converts a 20-byte SHA in a 40-byte slice into a 40-byte sha in place
// without allocations. This is at least 100x quicker that hex.EncodeToString
// NB This requires that sha is a 40-byte slice
func to40ByteSHA(sha []byte) []byte {
for i := 19; i >= 0; i-- {
v := sha[i]
vhi, vlo := v>>4, v&0x0f
shi, slo := hextable[vhi], hextable[vlo]
out[i*2], out[i*2+1] = shi, slo
sha[i*2], sha[i*2+1] = shi, slo
}
return out
return sha
}
// ParseTreeLineSkipMode reads an entry from a tree in a cat-file --batch stream

View File

@@ -7,7 +7,6 @@ package git
import (
"io/ioutil"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
@@ -15,15 +14,32 @@ import (
)
func TestBlob_Data(t *testing.T) {
output := "file2\n"
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare")
repo, err := OpenRepository(bareRepo1Path)
if !assert.NoError(t, err) {
t.Fatal()
}
output := `Copyright (c) 2016 The Gitea Authors
Copyright (c) 2015 The Gogs Authors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
`
repo, err := OpenRepository("../../.git")
assert.NoError(t, err)
defer repo.Close()
testBlob, err := repo.GetBlob("6c493ff740f9380390d5c9ddef4af18697ac9375")
testBlob, err := repo.GetBlob("a8d4b49dd073a4a38a7e58385eeff7cc52568697")
assert.NoError(t, err)
r, err := testBlob.DataAsync()
@@ -37,14 +53,13 @@ func TestBlob_Data(t *testing.T) {
}
func Benchmark_Blob_Data(b *testing.B) {
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare")
repo, err := OpenRepository(bareRepo1Path)
repo, err := OpenRepository("../../.git")
if err != nil {
b.Fatal(err)
}
defer repo.Close()
testBlob, err := repo.GetBlob("6c493ff740f9380390d5c9ddef4af18697ac9375")
testBlob, err := repo.GetBlob("a8d4b49dd073a4a38a7e58385eeff7cc52568697")
if err != nil {
b.Fatal(err)
}

View File

@@ -124,18 +124,12 @@ func (c *Command) RunInDirTimeoutEnvFullPipelineFunc(env []string, timeout time.
cmd := exec.CommandContext(ctx, c.name, c.args...)
if env == nil {
cmd.Env = os.Environ()
cmd.Env = append(os.Environ(), fmt.Sprintf("LC_ALL=%s", DefaultLocale))
} else {
cmd.Env = env
cmd.Env = append(cmd.Env, fmt.Sprintf("LC_ALL=%s", DefaultLocale))
}
cmd.Env = append(
cmd.Env,
fmt.Sprintf("LC_ALL=%s", DefaultLocale),
// avoid prompting for credentials interactively, supported since git v2.3
"GIT_TERMINAL_PROMPT=0",
)
// TODO: verify if this is still needed in golang 1.15
if goVersionLessThan115 {
cmd.Env = append(cmd.Env, "GODEBUG=asyncpreemptoff=1")

View File

@@ -102,13 +102,10 @@ func (tes Entries) GetCommitsInfo(commit *Commit, treePath string, cache *LastCo
}
func getLastCommitForPathsByCache(commitID, treePath string, paths []string, cache *LastCommitCache) (map[string]*Commit, []string, error) {
wr, rd, cancel := CatFileBatch(cache.repo.Path)
defer cancel()
var unHitEntryPaths []string
var results = make(map[string]*Commit)
for _, p := range paths {
lastCommit, err := cache.Get(commitID, path.Join(treePath, p), wr, rd)
lastCommit, err := cache.Get(commitID, path.Join(treePath, p))
if err != nil {
return nil, nil, err
}
@@ -303,7 +300,7 @@ revListLoop:
commits[0] = string(commitID)
}
}
treeID = To40ByteSHA(treeID, treeID)
treeID = to40ByteSHA(treeID)
_, err = batchStdinWriter.Write(treeID)
if err != nil {
return nil, err

View File

@@ -17,9 +17,7 @@ import (
// If used as part of a cat-file --batch stream you need to limit the reader to the correct size
func CommitFromReader(gitRepo *Repository, sha SHA1, reader io.Reader) (*Commit, error) {
commit := &Commit{
ID: sha,
Author: &Signature{},
Committer: &Signature{},
ID: sha,
}
payloadSB := new(strings.Builder)

View File

@@ -47,7 +47,7 @@ func GetRawDiffForFile(repoPath, startCommit, endCommit string, diffType RawDiff
func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diffType RawDiffType, file string, writer io.Writer) error {
commit, err := repo.GetCommit(endCommit)
if err != nil {
return err
return fmt.Errorf("GetCommit: %v", err)
}
fileArgs := make([]string, 0)
if len(file) > 0 {

View File

@@ -7,8 +7,6 @@
package git
import (
"bufio"
"io"
"path"
)
@@ -36,7 +34,7 @@ func NewLastCommitCache(repoPath string, gitRepo *Repository, ttl func() int64,
}
// Get get the last commit information by commit id and entry path
func (c *LastCommitCache) Get(ref, entryPath string, wr *io.PipeWriter, rd *bufio.Reader) (interface{}, error) {
func (c *LastCommitCache) Get(ref, entryPath string) (interface{}, error) {
v := c.cache.Get(c.getCacheKey(c.repoPath, ref, entryPath))
if vs, ok := v.(string); ok {
log("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs)
@@ -48,10 +46,7 @@ func (c *LastCommitCache) Get(ref, entryPath string, wr *io.PipeWriter, rd *bufi
if err != nil {
return nil, err
}
if _, err := wr.Write([]byte(vs + "\n")); err != nil {
return nil, err
}
commit, err := c.repo.getCommitFromBatchReader(rd, id)
commit, err := c.repo.getCommit(id)
if err != nil {
return nil, err
}

View File

@@ -8,7 +8,6 @@ package git
import (
"io/ioutil"
"strings"
)
// GetNote retrieves the git-notes data for a given commit.
@@ -50,13 +49,7 @@ func GetNote(repo *Repository, commitID string, note *Note) error {
}
note.Message = d
treePath := ""
if idx := strings.LastIndex(path, "/"); idx > -1 {
treePath = path[:idx]
path = path[idx+1:]
}
lastCommits, err := GetLastCommitForPaths(notes, treePath, []string{path})
lastCommits, err := GetLastCommitForPaths(notes, "", []string{path})
if err != nil {
return err
}

View File

@@ -43,6 +43,8 @@ func FindLFSFile(repo *git.Repository, hash git.SHA1) ([]*LFSResult, error) {
basePath := repo.Path
hashStr := hash.String()
// Use rev-list to provide us with all commits in order
revListReader, revListWriter := io.Pipe()
defer func() {
@@ -72,7 +74,7 @@ func FindLFSFile(repo *git.Repository, hash git.SHA1) ([]*LFSResult, error) {
fnameBuf := make([]byte, 4096)
modeBuf := make([]byte, 40)
workingShaBuf := make([]byte, 20)
workingShaBuf := make([]byte, 40)
for scan.Scan() {
// Get the next commit ID
@@ -125,12 +127,12 @@ func FindLFSFile(repo *git.Repository, hash git.SHA1) ([]*LFSResult, error) {
case "tree":
var n int64
for n < size {
mode, fname, sha20byte, count, err := git.ParseTreeLine(batchReader, modeBuf, fnameBuf, workingShaBuf)
mode, fname, sha, count, err := git.ParseTreeLine(batchReader, modeBuf, fnameBuf, workingShaBuf)
if err != nil {
return nil, err
}
n += int64(count)
if bytes.Equal(sha20byte, hash[:]) {
if bytes.Equal(sha, []byte(hashStr)) {
result := LFSResult{
Name: curPath + string(fname),
SHA: curCommit.ID.String(),
@@ -140,9 +142,7 @@ func FindLFSFile(repo *git.Repository, hash git.SHA1) ([]*LFSResult, error) {
}
resultsMap[curCommit.ID.String()+":"+curPath+string(fname)] = &result
} else if string(mode) == git.EntryModeTree.String() {
sha40Byte := make([]byte, 40)
git.To40ByteSHA(sha20byte, sha40Byte)
trees = append(trees, sha40Byte)
trees = append(trees, sha)
paths = append(paths, curPath+string(fname)+"/")
}
}

View File

@@ -21,7 +21,14 @@ func (repo *Repository) GetBranchCommitID(name string) (string, error) {
// GetTagCommitID returns last commit ID string of given tag.
func (repo *Repository) GetTagCommitID(name string) (string, error) {
return repo.GetRefCommitID(TagPrefix + name)
stdout, err := NewCommand("rev-list", "-n", "1", TagPrefix+name).RunInDir(repo.Path)
if err != nil {
if strings.Contains(err.Error(), "unknown revision or path") {
return "", ErrNotExist{name, ""}
}
return "", err
}
return strings.TrimSpace(stdout), nil
}
// ConvertToSHA1 returns a Hash object from a potential ID string

View File

@@ -9,10 +9,9 @@ package git
import (
"bufio"
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
)
@@ -35,18 +34,6 @@ func (repo *Repository) ResolveReference(name string) (string, error) {
// GetRefCommitID returns the last commit ID string of given reference (branch or tag).
func (repo *Repository) GetRefCommitID(name string) (string, error) {
if strings.HasPrefix(name, "refs/") {
// We're gonna try just reading the ref file as this is likely to be quicker than other options
fileInfo, err := os.Lstat(filepath.Join(repo.Path, name))
if err == nil && fileInfo.Mode().IsRegular() && fileInfo.Size() == 41 {
ref, err := ioutil.ReadFile(filepath.Join(repo.Path, name))
if err == nil && SHAPattern.Match(ref[:40]) && ref[40] == '\n' {
return string(ref[:40]), nil
}
}
}
stdout, err := NewCommand("show-ref", "--verify", "--hash", name).RunInDir(repo.Path)
if err != nil {
if strings.Contains(err.Error(), "not a valid ref") {
@@ -82,11 +69,6 @@ func (repo *Repository) getCommit(id SHA1) (*Commit, error) {
}()
bufReader := bufio.NewReader(stdoutReader)
return repo.getCommitFromBatchReader(bufReader, id)
}
func (repo *Repository) getCommitFromBatchReader(bufReader *bufio.Reader, id SHA1) (*Commit, error) {
_, typ, size, err := ReadBatchLine(bufReader)
if err != nil {
if errors.Is(err, io.EOF) {
@@ -124,6 +106,7 @@ func (repo *Repository) getCommitFromBatchReader(bufReader *bufio.Reader, id SHA
case "commit":
return CommitFromReader(repo, id, io.LimitReader(bufReader, size))
default:
_ = stdoutReader.CloseWithError(fmt.Errorf("unknown typ: %s", typ))
log("Unknown typ: %s", typ)
return nil, ErrNotExist{
ID: id.String(),

View File

@@ -43,7 +43,7 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
sizes := make(map[string]int64)
err = tree.Files().ForEach(func(f *object.File) error {
if f.Size == 0 || analyze.IsVendor(f.Name) || enry.IsDotFile(f.Name) ||
if f.Size == 0 || enry.IsVendor(f.Name) || enry.IsDotFile(f.Name) ||
enry.IsDocumentation(f.Name) || enry.IsConfiguration(f.Name) {
return nil
}

View File

@@ -67,7 +67,7 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
for _, f := range entries {
contentBuf.Reset()
content = contentBuf.Bytes()
if f.Size() == 0 || analyze.IsVendor(f.Name()) || enry.IsDotFile(f.Name()) ||
if f.Size() == 0 || enry.IsVendor(f.Name()) || enry.IsDotFile(f.Name()) ||
enry.IsDocumentation(f.Name()) || enry.IsConfiguration(f.Name()) {
continue
}

View File

@@ -7,18 +7,23 @@ package git
import (
"path/filepath"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
func TestGetLatestCommitTime(t *testing.T) {
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare")
lct, err := GetLatestCommitTime(bareRepo1Path)
lct, err := GetLatestCommitTime(".")
assert.NoError(t, err)
// Time is Sun Jul 21 22:43:13 2019 +0200
// Time is in the past
now := time.Now()
assert.True(t, lct.Unix() < now.Unix(), "%d not smaller than %d", lct, now)
// Time is after Mon Oct 23 03:52:09 2017 +0300
// which is the time of commit
// feaf4ba6bc635fec442f46ddd4512416ec43c2c2 (refs/heads/master)
assert.EqualValues(t, 1563741793, lct.Unix())
// d47b98c44c9a6472e44ab80efe65235e11c6da2a
refTime, err := time.Parse("Mon Jan 02 15:04:05 2006 -0700", "Mon Oct 23 03:52:09 2017 +0300")
assert.NoError(t, err)
assert.True(t, lct.Unix() > refTime.Unix(), "%d not greater than %d", lct, refTime)
}
func TestRepoIsEmpty(t *testing.T) {

View File

@@ -35,7 +35,6 @@ func (tag *Tag) Commit() (*Commit, error) {
// \n\n separate headers from message
func parseTagData(data []byte) (*Tag, error) {
tag := new(Tag)
tag.Tagger = &Signature{}
// we now have the contents of the commit object. Let's investigate...
nextline := 0
l:

View File

@@ -7,7 +7,6 @@ package gitgraph
import (
"bytes"
"fmt"
"strings"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/git"
@@ -217,10 +216,10 @@ func newRefsFromRefNames(refNames []byte) []git.Reference {
continue
}
refName := string(refNameBytes)
if strings.HasPrefix(refName, "tag: ") {
refName = strings.TrimPrefix(refName, "tag: ")
} else if strings.HasPrefix(refName, "HEAD -> ") {
refName = strings.TrimPrefix(refName, "HEAD -> ")
if refName[0:5] == "tag: " {
refName = refName[5:]
} else if refName[0:8] == "HEAD -> " {
refName = refName[8:]
}
refs = append(refs, git.Reference{
Name: refName,

View File

@@ -68,21 +68,17 @@ func (g *Manager) start() {
// Set the running state
g.setState(stateRunning)
if skip, _ := strconv.ParseBool(os.Getenv("SKIP_MINWINSVC")); skip {
log.Trace("Skipping SVC check as SKIP_MINWINSVC is set")
return
}
// Make SVC process
run := svc.Run
//lint:ignore SA1019 We use IsAnInteractiveSession because IsWindowsService has a different permissions profile
isAnInteractiveSession, err := svc.IsAnInteractiveSession()
isInteractive, err := svc.IsWindowsService()
if err != nil {
log.Error("Unable to ascertain if running as an Windows Service: %v", err)
log.Error("Unable to ascertain if running as an Interactive Session: %v", err)
return
}
if isAnInteractiveSession {
log.Trace("Not running a service ... using the debug SVC manager")
if isInteractive {
run = debug.Run
}
go func() {
@@ -98,49 +94,38 @@ func (g *Manager) Execute(args []string, changes <-chan svc.ChangeRequest, statu
status <- svc.Status{State: svc.StartPending, WaitHint: uint32(setting.StartupTimeout / time.Millisecond)}
}
log.Trace("Awaiting server start-up")
// Now need to wait for everything to start...
if !g.awaitServer(setting.StartupTimeout) {
log.Trace("... start-up failed ... Stopped")
return false, 1
}
log.Trace("Sending Running state to SVC")
// We need to implement some way of svc.AcceptParamChange/svc.ParamChange
status <- svc.Status{
State: svc.Running,
Accepts: svc.AcceptStop | svc.AcceptShutdown | acceptHammerCode,
}
log.Trace("Started")
waitTime := 30 * time.Second
loop:
for {
select {
case <-g.ctx.Done():
log.Trace("Shutting down")
g.DoGracefulShutdown()
waitTime += setting.GracefulHammerTime
break loop
case <-g.shutdownRequested:
log.Trace("Shutting down")
waitTime += setting.GracefulHammerTime
break loop
case change := <-changes:
switch change.Cmd {
case svc.Interrogate:
log.Trace("SVC sent interrogate")
status <- change.CurrentStatus
case svc.Stop, svc.Shutdown:
log.Trace("SVC requested shutdown - shutting down")
g.DoGracefulShutdown()
waitTime += setting.GracefulHammerTime
break loop
case hammerCode:
log.Trace("SVC requested hammer - shutting down and hammering immediately")
g.DoGracefulShutdown()
g.DoImmediateHammer()
break loop
@@ -149,8 +134,6 @@ loop:
}
}
}
log.Trace("Sending StopPending state to SVC")
status <- svc.Status{
State: svc.StopPending,
WaitHint: uint32(waitTime / time.Millisecond),
@@ -162,10 +145,8 @@ hammerLoop:
case change := <-changes:
switch change.Cmd {
case svc.Interrogate:
log.Trace("SVC sent interrogate")
status <- change.CurrentStatus
case svc.Stop, svc.Shutdown, hammerCmd:
log.Trace("SVC requested hammer - hammering immediately")
g.DoImmediateHammer()
break hammerLoop
default:
@@ -175,8 +156,6 @@ hammerLoop:
break hammerLoop
}
}
log.Trace("Stopped")
return false, 0
}

View File

@@ -17,7 +17,6 @@ import (
"time"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)
var (
@@ -27,10 +26,6 @@ var (
DefaultWriteTimeOut time.Duration
// DefaultMaxHeaderBytes default max header bytes
DefaultMaxHeaderBytes int
// PerWriteWriteTimeout timeout for writes
PerWriteWriteTimeout = 30 * time.Second
// PerWriteWriteTimeoutKbTime is a timeout taking account of how much there is to be written
PerWriteWriteTimeoutKbTime = 10 * time.Second
)
func init() {
@@ -42,16 +37,14 @@ type ServeFunction = func(net.Listener) error
// Server represents our graceful server
type Server struct {
network string
address string
listener net.Listener
wg sync.WaitGroup
state state
lock *sync.RWMutex
BeforeBegin func(network, address string)
OnShutdown func()
PerWriteTimeout time.Duration
PerWritePerKbTimeout time.Duration
network string
address string
listener net.Listener
wg sync.WaitGroup
state state
lock *sync.RWMutex
BeforeBegin func(network, address string)
OnShutdown func()
}
// NewServer creates a server on network at provided address
@@ -62,13 +55,11 @@ func NewServer(network, address, name string) *Server {
log.Info("Starting new %s server: %s:%s on PID: %d", name, network, address, os.Getpid())
}
srv := &Server{
wg: sync.WaitGroup{},
state: stateInit,
lock: &sync.RWMutex{},
network: network,
address: address,
PerWriteTimeout: setting.PerWriteTimeout,
PerWritePerKbTimeout: setting.PerWritePerKbTimeout,
wg: sync.WaitGroup{},
state: stateInit,
lock: &sync.RWMutex{},
network: network,
address: address,
}
srv.BeforeBegin = func(network, addr string) {
@@ -230,11 +221,9 @@ func (wl *wrappedListener) Accept() (net.Conn, error) {
closed := int32(0)
c = wrappedConn{
Conn: c,
server: wl.server,
closed: &closed,
perWriteTimeout: wl.server.PerWriteTimeout,
perWritePerKbTimeout: wl.server.PerWritePerKbTimeout,
Conn: c,
server: wl.server,
closed: &closed,
}
wl.server.wg.Add(1)
@@ -257,25 +246,8 @@ func (wl *wrappedListener) File() (*os.File, error) {
type wrappedConn struct {
net.Conn
server *Server
closed *int32
deadline time.Time
perWriteTimeout time.Duration
perWritePerKbTimeout time.Duration
}
func (w wrappedConn) Write(p []byte) (n int, err error) {
if w.perWriteTimeout > 0 {
minTimeout := time.Duration(len(p)/1024) * w.perWritePerKbTimeout
minDeadline := time.Now().Add(minTimeout).Add(w.perWriteTimeout)
w.deadline = w.deadline.Add(minTimeout)
if minDeadline.After(w.deadline) {
w.deadline = minDeadline
}
_ = w.Conn.SetWriteDeadline(w.deadline)
}
return w.Conn.Write(p)
server *Server
closed *int32
}
func (w wrappedConn) Close() error {

View File

@@ -10,7 +10,6 @@ import (
"net/http"
"os"
"strconv"
"strings"
"time"
"code.gitea.io/gitea/modules/setting"
@@ -27,13 +26,11 @@ func GetCacheControl() string {
// generateETag generates an ETag based on size, filename and file modification time
func generateETag(fi os.FileInfo) string {
etag := fmt.Sprint(fi.Size()) + fi.Name() + fi.ModTime().UTC().Format(http.TimeFormat)
return `"` + base64.StdEncoding.EncodeToString([]byte(etag)) + `"`
return base64.StdEncoding.EncodeToString([]byte(etag))
}
// HandleTimeCache handles time-based caching for a HTTP request
func HandleTimeCache(req *http.Request, w http.ResponseWriter, fi os.FileInfo) (handled bool) {
w.Header().Set("Cache-Control", GetCacheControl())
ifModifiedSince := req.Header.Get("If-Modified-Since")
if ifModifiedSince != "" {
t, err := time.Parse(http.TimeFormat, ifModifiedSince)
@@ -43,40 +40,20 @@ func HandleTimeCache(req *http.Request, w http.ResponseWriter, fi os.FileInfo) (
}
}
w.Header().Set("Cache-Control", GetCacheControl())
w.Header().Set("Last-Modified", fi.ModTime().Format(http.TimeFormat))
return false
}
// HandleFileETagCache handles ETag-based caching for a HTTP request
func HandleFileETagCache(req *http.Request, w http.ResponseWriter, fi os.FileInfo) (handled bool) {
// HandleEtagCache handles ETag-based caching for a HTTP request
func HandleEtagCache(req *http.Request, w http.ResponseWriter, fi os.FileInfo) (handled bool) {
etag := generateETag(fi)
return HandleGenericETagCache(req, w, etag)
}
// HandleGenericETagCache handles ETag-based caching for a HTTP request.
// It returns true if the request was handled.
func HandleGenericETagCache(req *http.Request, w http.ResponseWriter, etag string) (handled bool) {
if len(etag) > 0 {
w.Header().Set("Etag", etag)
if checkIfNoneMatchIsValid(req, etag) {
w.WriteHeader(http.StatusNotModified)
return true
}
if req.Header.Get("If-None-Match") == etag {
w.WriteHeader(http.StatusNotModified)
return true
}
w.Header().Set("Cache-Control", GetCacheControl())
return false
}
// checkIfNoneMatchIsValid tests if the header If-None-Match matches the ETag
func checkIfNoneMatchIsValid(req *http.Request, etag string) bool {
ifNoneMatch := req.Header.Get("If-None-Match")
if len(ifNoneMatch) > 0 {
for _, item := range strings.Split(ifNoneMatch, ",") {
item = strings.TrimSpace(item)
if item == etag {
return true
}
}
}
w.Header().Set("ETag", etag)
return false
}

View File

@@ -1,144 +0,0 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package httpcache
import (
"net/http"
"net/http/httptest"
"os"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
type mockFileInfo struct {
}
func (m mockFileInfo) Name() string { return "gitea.test" }
func (m mockFileInfo) Size() int64 { return int64(10) }
func (m mockFileInfo) Mode() os.FileMode { return os.ModePerm }
func (m mockFileInfo) ModTime() time.Time { return time.Time{} }
func (m mockFileInfo) IsDir() bool { return false }
func (m mockFileInfo) Sys() interface{} { return nil }
func TestHandleFileETagCache(t *testing.T) {
fi := mockFileInfo{}
etag := `"MTBnaXRlYS50ZXN0TW9uLCAwMSBKYW4gMDAwMSAwMDowMDowMCBHTVQ="`
t.Run("No_If-None-Match", func(t *testing.T) {
req := &http.Request{Header: make(http.Header)}
w := httptest.NewRecorder()
handled := HandleFileETagCache(req, w, fi)
assert.False(t, handled)
assert.Len(t, w.Header(), 2)
assert.Contains(t, w.Header(), "Cache-Control")
assert.Contains(t, w.Header(), "Etag")
assert.Equal(t, etag, w.Header().Get("Etag"))
})
t.Run("Wrong_If-None-Match", func(t *testing.T) {
req := &http.Request{Header: make(http.Header)}
w := httptest.NewRecorder()
req.Header.Set("If-None-Match", `"wrong etag"`)
handled := HandleFileETagCache(req, w, fi)
assert.False(t, handled)
assert.Len(t, w.Header(), 2)
assert.Contains(t, w.Header(), "Cache-Control")
assert.Contains(t, w.Header(), "Etag")
assert.Equal(t, etag, w.Header().Get("Etag"))
})
t.Run("Correct_If-None-Match", func(t *testing.T) {
req := &http.Request{Header: make(http.Header)}
w := httptest.NewRecorder()
req.Header.Set("If-None-Match", etag)
handled := HandleFileETagCache(req, w, fi)
assert.True(t, handled)
assert.Len(t, w.Header(), 1)
assert.Contains(t, w.Header(), "Etag")
assert.Equal(t, etag, w.Header().Get("Etag"))
assert.Equal(t, http.StatusNotModified, w.Code)
})
}
func TestHandleGenericETagCache(t *testing.T) {
etag := `"test"`
t.Run("No_If-None-Match", func(t *testing.T) {
req := &http.Request{Header: make(http.Header)}
w := httptest.NewRecorder()
handled := HandleGenericETagCache(req, w, etag)
assert.False(t, handled)
assert.Len(t, w.Header(), 2)
assert.Contains(t, w.Header(), "Cache-Control")
assert.Contains(t, w.Header(), "Etag")
assert.Equal(t, etag, w.Header().Get("Etag"))
})
t.Run("Wrong_If-None-Match", func(t *testing.T) {
req := &http.Request{Header: make(http.Header)}
w := httptest.NewRecorder()
req.Header.Set("If-None-Match", `"wrong etag"`)
handled := HandleGenericETagCache(req, w, etag)
assert.False(t, handled)
assert.Len(t, w.Header(), 2)
assert.Contains(t, w.Header(), "Cache-Control")
assert.Contains(t, w.Header(), "Etag")
assert.Equal(t, etag, w.Header().Get("Etag"))
})
t.Run("Correct_If-None-Match", func(t *testing.T) {
req := &http.Request{Header: make(http.Header)}
w := httptest.NewRecorder()
req.Header.Set("If-None-Match", etag)
handled := HandleGenericETagCache(req, w, etag)
assert.True(t, handled)
assert.Len(t, w.Header(), 1)
assert.Contains(t, w.Header(), "Etag")
assert.Equal(t, etag, w.Header().Get("Etag"))
assert.Equal(t, http.StatusNotModified, w.Code)
})
t.Run("Multiple_Wrong_If-None-Match", func(t *testing.T) {
req := &http.Request{Header: make(http.Header)}
w := httptest.NewRecorder()
req.Header.Set("If-None-Match", `"wrong etag", "wrong etag "`)
handled := HandleGenericETagCache(req, w, etag)
assert.False(t, handled)
assert.Len(t, w.Header(), 2)
assert.Contains(t, w.Header(), "Cache-Control")
assert.Contains(t, w.Header(), "Etag")
assert.Equal(t, etag, w.Header().Get("Etag"))
})
t.Run("Multiple_Correct_If-None-Match", func(t *testing.T) {
req := &http.Request{Header: make(http.Header)}
w := httptest.NewRecorder()
req.Header.Set("If-None-Match", `"wrong etag", `+etag)
handled := HandleGenericETagCache(req, w, etag)
assert.True(t, handled)
assert.Len(t, w.Header(), 1)
assert.Contains(t, w.Header(), "Etag")
assert.Equal(t, etag, w.Header().Get("Etag"))
assert.Equal(t, http.StatusNotModified, w.Code)
})
}

View File

@@ -325,7 +325,7 @@ func (r *Request) getResponse() (*http.Response, error) {
trans = &http.Transport{
TLSClientConfig: r.setting.TLSClientConfig,
Proxy: proxy,
Dial: TimeoutDialer(r.setting.ConnectTimeout),
Dial: TimeoutDialer(r.setting.ConnectTimeout, r.setting.ReadWriteTimeout),
}
} else if t, ok := trans.(*http.Transport); ok {
if t.TLSClientConfig == nil {
@@ -335,7 +335,7 @@ func (r *Request) getResponse() (*http.Response, error) {
t.Proxy = r.setting.Proxy
}
if t.Dial == nil {
t.Dial = TimeoutDialer(r.setting.ConnectTimeout)
t.Dial = TimeoutDialer(r.setting.ConnectTimeout, r.setting.ReadWriteTimeout)
}
}
@@ -352,7 +352,6 @@ func (r *Request) getResponse() (*http.Response, error) {
client := &http.Client{
Transport: trans,
Jar: jar,
Timeout: r.setting.ReadWriteTimeout,
}
if len(r.setting.UserAgent) > 0 && len(r.req.Header.Get("User-Agent")) == 0 {
@@ -458,12 +457,12 @@ func (r *Request) Response() (*http.Response, error) {
}
// TimeoutDialer returns functions of connection dialer with timeout settings for http.Transport Dial field.
func TimeoutDialer(cTimeout time.Duration) func(net, addr string) (c net.Conn, err error) {
func TimeoutDialer(cTimeout time.Duration, rwTimeout time.Duration) func(net, addr string) (c net.Conn, err error) {
return func(netw, addr string) (net.Conn, error) {
conn, err := net.DialTimeout(netw, addr, cTimeout)
if err != nil {
return nil, err
}
return conn, nil
return conn, conn.SetDeadline(time.Now().Add(rwTimeout))
}
}

View File

@@ -178,7 +178,7 @@ func NewBleveIndexer(indexDir string) (*BleveIndexer, bool, error) {
func (b *BleveIndexer) addUpdate(batchWriter *io.PipeWriter, batchReader *bufio.Reader, commitSha string, update fileUpdate, repo *models.Repository, batch rupture.FlushingBatch) error {
// Ignore vendored files in code search
if setting.Indexer.ExcludeVendored && analyze.IsVendor(update.Filename) {
if setting.Indexer.ExcludeVendored && enry.IsVendor(update.Filename) {
return nil
}

View File

@@ -177,7 +177,7 @@ func (b *ElasticSearchIndexer) init() (bool, error) {
func (b *ElasticSearchIndexer) addUpdate(batchWriter *io.PipeWriter, batchReader *bufio.Reader, sha string, update fileUpdate, repo *models.Repository) ([]elastic.BulkableRequest, error) {
// Ignore vendored files in code search
if setting.Indexer.ExcludeVendored && analyze.IsVendor(update.Filename) {
if setting.Indexer.ExcludeVendored && enry.IsVendor(update.Filename) {
return nil, nil
}

View File

@@ -38,11 +38,7 @@ func (db *DBIndexer) Index(id int64) error {
// Get latest commit for default branch
commitID, err := gitRepo.GetBranchCommitID(repo.DefaultBranch)
if err != nil {
if git.IsErrBranchNotExist(err) || git.IsErrNotExist((err)) {
log.Debug("Unable to get commit ID for defaultbranch %s in %s ... skipping this repository", repo.DefaultBranch, repo.RepoPath())
return nil
}
log.Error("Unable to get commit ID for defaultbranch %s in %s. Error: %v", repo.DefaultBranch, repo.RepoPath(), err)
log.Error("Unable to get commit ID for defaultbranch %s in %s", repo.DefaultBranch, repo.RepoPath())
return err
}

View File

@@ -44,13 +44,24 @@ type ContentStore struct {
}
// Get takes a Meta object and retrieves the content from the store, returning
// it as an io.ReadSeekCloser.
func (s *ContentStore) Get(meta *models.LFSMetaObject) (storage.Object, error) {
// it as an io.Reader. If fromByte > 0, the reader starts from that byte
func (s *ContentStore) Get(meta *models.LFSMetaObject, fromByte int64) (io.ReadCloser, error) {
f, err := s.Open(meta.RelativePath())
if err != nil {
log.Error("Whilst trying to read LFS OID[%s]: Unable to open Error: %v", meta.Oid, err)
return nil, err
}
if fromByte > 0 {
if fromByte >= meta.Size {
return nil, ErrRangeNotSatisfiable{
FromByte: fromByte,
}
}
_, err = f.Seek(fromByte, io.SeekStart)
if err != nil {
log.Error("Whilst trying to read LFS OID[%s]: Unable to seek to %d Error: %v", meta.Oid, fromByte, err)
}
}
return f, err
}
@@ -63,7 +74,7 @@ func (s *ContentStore) Put(meta *models.LFSMetaObject, r io.Reader) error {
// now pass the wrapped reader to Save - if there is a size mismatch or hash mismatch then
// the errors returned by the newHashingReader should percolate up to here
written, err := s.Save(p, wrappedRd, meta.Size)
written, err := s.Save(p, wrappedRd)
if err != nil {
log.Error("Whilst putting LFS OID[%s]: Failed to copy to tmpPath: %s Error: %v", meta.Oid, p, err)
return err

View File

@@ -67,5 +67,5 @@ func IsPointerFile(buf *[]byte) *models.LFSMetaObject {
// ReadMetaObject will read a models.LFSMetaObject and return a reader
func ReadMetaObject(meta *models.LFSMetaObject) (io.ReadCloser, error) {
contentStore := &ContentStore{ObjectStorage: storage.LFS}
return contentStore.Get(meta)
return contentStore.Get(meta, 0)
}

Some files were not shown because too many files have changed in this diff Show More