Compare commits
60 Commits
current-us
...
gitroast
Author | SHA1 | Date |
---|---|---|
AJ ONeal | 76fbfde40c | |
AJ ONeal | 20717e6139 | |
AJ ONeal | fa737a887b | |
AJ ONeal | e09a6248f1 | |
AJ ONeal | 1ff523881d | |
AJ ONeal | 0ae89454e1 | |
AJ ONeal | ce1739bd16 | |
AJ ONeal | 4c7727c3ed | |
AJ ONeal | 8718156545 | |
AJ ONeal | eda51c2079 | |
SagePtr | 812c225223 | |
AJ ONeal | e64ff60c13 | |
AJ ONeal | ee0fa3e58f | |
AJ ONeal | 5a42829dbd | |
AJ ONeal | 996c9cbab3 | |
AJ ONeal | 95df7779ef | |
AJ ONeal | 20eec9abd3 | |
AJ ONeal | fa7fc43a13 | |
Lauris BH | 33c3cbc968 | |
Iwasa Kazmi | 8f29f61a6b | |
SagePtr | 93dcc6caef | |
crito | 4176e33148 | |
Toni Villena | 177b46fe77 | |
linweijie2012 | 1e51307466 | |
SagePtr | c145cb745b | |
techknowlogick | 1a68b3962f | |
SagePtr | d918e63bc5 | |
Lunny Xiao | 1901f35980 | |
Nicolas Lenz | 745c898561 | |
techknowlogick | 38d8b8cf49 | |
SagePtr | 0358a40625 | |
techknowlogick | 99ce0bfcd7 | |
Lanre Adelowo | 3fbcdd9e25 | |
Lanre Adelowo | e9def84bf2 | |
Lauris BH | 066515429f | |
SagePtr | 12c04a85f2 | |
SagePtr | a345023d0a | |
SagePtr | 052aa54b2b | |
SagePtr | cbe8a1f0e6 | |
techknowlogick | cfe6941905 | |
Lunny Xiao | eb8c611b1d | |
techknowlogick | b1eaeeb0cd | |
SagePtr | 15a403bf97 | |
Lunny Xiao | 099028681e | |
Dingjun | 940e30bcd4 | |
SagePtr | 5a7830e0e8 | |
SagePtr | dae065ea68 | |
Lauris BH | 40bbc7320c | |
Lauris BH | 5da301bb70 | |
Lauris BH | 3e191935c8 | |
Lauris BH | 8a639ade58 | |
Lunny Xiao | 88d791013b | |
techknowlogick | b37ca4a6ff | |
Lauris BH | 678834883e | |
Lauris BH | 1965eaf96e | |
Jonas Franz | c784ac53ba | |
Nicolas Da Mutten | 85f3966338 | |
Lauris BH | f096e69e0a | |
Lunny Xiao | 768b41adba | |
Lauris BH | 155caa8e0a |
|
@ -75,7 +75,7 @@ pipeline:
|
||||||
- make lint
|
- make lint
|
||||||
- make fmt-check
|
- make fmt-check
|
||||||
- make swagger-check
|
- make swagger-check
|
||||||
- make swagger-validate
|
# - make swagger-validate
|
||||||
- make misspell-check
|
- make misspell-check
|
||||||
- make test-vendor
|
- make test-vendor
|
||||||
- make build
|
- make build
|
||||||
|
|
31
CHANGELOG.md
31
CHANGELOG.md
|
@ -4,10 +4,39 @@ This changelog goes through all the changes that have been made in each release
|
||||||
without substantial changes to our git log; to see the highlights of what has
|
without substantial changes to our git log; to see the highlights of what has
|
||||||
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
|
|
||||||
## [1.5.0-RC1](https://github.com/go-gitea/gitea/releases/tag/v1.5.0-rc1) - 2018-07-04
|
## [1.5.1](https://github.com/go-gitea/gitea/releases/tag/v1.5.1) - 2018-09-03
|
||||||
* SECURITY
|
* SECURITY
|
||||||
|
* Don't disclose emails of all users when sending out emails (#4784)
|
||||||
|
* Improve URL validation for external wiki and external issues (#4710) (#4740)
|
||||||
|
* Make cookies HttpOnly and obey COOKIE_SECURE flag (#4706) (#4707)
|
||||||
|
* BUGFIXES
|
||||||
|
* Fix missing release title in webhook (#4783) (#4800)
|
||||||
|
* Make sure to reset commit count in the cache on mirror syncing (#4770)
|
||||||
|
* Fixed bug where team with admin privelege type doesn't get any unit (#4759)
|
||||||
|
* Fix failure on creating pull request with assignees (#4583) (#4727)
|
||||||
|
* Hide org/create menu item in Dashboard if user has no rights (#4678) (#4686)
|
||||||
|
* TRANSLATION
|
||||||
|
* Fix incorrect caption of webhook setting (#4701) (#4718)
|
||||||
|
|
||||||
|
## [1.5.0](https://github.com/go-gitea/gitea/releases/tag/v1.5.0) - 2018-08-10
|
||||||
|
* SECURITY
|
||||||
|
* Check that repositories can only be migrated to own user or organizations (#4366) (#4370)
|
||||||
* Limit uploaded avatar image-size to 4096px x 3072px by default (#4353)
|
* Limit uploaded avatar image-size to 4096px x 3072px by default (#4353)
|
||||||
* Do not allow to reuse TOTP passcode (#3878)
|
* Do not allow to reuse TOTP passcode (#3878)
|
||||||
|
* BUGFIXES
|
||||||
|
* Fix column droping for MSSQL that need new transaction for that (#4440) (#4484)
|
||||||
|
* Redirect to correct page after using scratch token (#4458) (#4472)
|
||||||
|
* Replace src with raw to fix image paths (#4377) (#4386)
|
||||||
|
* Fixes repo membership check in API (#4341) (#4379)
|
||||||
|
* Add default merge options when adding new repository (#4369) (#4373)
|
||||||
|
* Fix repository last updated time update when delete a user who watched the repo (#4363) (#4371)
|
||||||
|
* Fix html entity escaping in branch deletion message (#4471) (#4485)
|
||||||
|
* Fix out-of-transaction query in removeOrgUser (#4521) (#4524)
|
||||||
|
* Fix incorrect MergeWhitelistTeamIDs check in CanUserMerge function (#4519)
|
||||||
|
* Fix panic issue on update avatar email (#4580) (#4590)
|
||||||
|
* Fix bugs when too many IN variables (#4594) (#4597)
|
||||||
|
* Push whitelist now doesn't apply to branch deletion (#4601) (#4640)
|
||||||
|
* Site admin could create repos even MAX_CREATION_LIMIT=0 (#4645) (#4650)
|
||||||
* FEATURE
|
* FEATURE
|
||||||
* Add cli commands to regen hooks & keys (#3979)
|
* Add cli commands to regen hooks & keys (#3979)
|
||||||
* Add support for FIDO U2F (#3971)
|
* Add support for FIDO U2F (#3971)
|
||||||
|
|
|
@ -299,12 +299,14 @@
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/go-xorm/builder"
|
name = "github.com/go-xorm/builder"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "488224409dd8aa2ce7a5baf8d10d55764a913738"
|
revision = "dc8bf48f58fab2b4da338ffd25191905fd741b8f"
|
||||||
|
version = "v0.3.0"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/go-xorm/core"
|
name = "github.com/go-xorm/core"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "cb1d0ca71f42d3ee1bf4aba7daa16099bc31a7e9"
|
revision = "c10e21e7e1cec20e09398f2dfae385e58c8df555"
|
||||||
|
version = "v0.6.0"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/go-xorm/tidb"
|
name = "github.com/go-xorm/tidb"
|
||||||
|
@ -314,7 +316,7 @@
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/go-xorm/xorm"
|
name = "github.com/go-xorm/xorm"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "d4149d1eee0c2c488a74a5863fd9caf13d60fd03"
|
revision = "ad69f7d8f0861a29438154bb0a20b60501298480"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
|
@ -873,6 +875,6 @@
|
||||||
[solve-meta]
|
[solve-meta]
|
||||||
analyzer-name = "dep"
|
analyzer-name = "dep"
|
||||||
analyzer-version = 1
|
analyzer-version = 1
|
||||||
inputs-digest = "036b8c882671cf8d2c5e2fdbe53b1bdfbd39f7ebd7765bd50276c7c4ecf16687"
|
inputs-digest = "3b587a036aaf09514228ead18e7fd93e9ee1d14d4e56715bb2f197d5f27259d1"
|
||||||
solver-name = "gps-cdcl"
|
solver-name = "gps-cdcl"
|
||||||
solver-version = 1
|
solver-version = 1
|
||||||
|
|
|
@ -38,7 +38,7 @@ ignored = ["google.golang.org/appengine*"]
|
||||||
[[override]]
|
[[override]]
|
||||||
name = "github.com/go-xorm/xorm"
|
name = "github.com/go-xorm/xorm"
|
||||||
#version = "0.6.5"
|
#version = "0.6.5"
|
||||||
revision = "d4149d1eee0c2c488a74a5863fd9caf13d60fd03"
|
revision = "ad69f7d8f0861a29438154bb0a20b60501298480"
|
||||||
|
|
||||||
[[override]]
|
[[override]]
|
||||||
name = "github.com/gorilla/mux"
|
name = "github.com/gorilla/mux"
|
||||||
|
|
24
Makefile
24
Makefile
|
@ -21,7 +21,19 @@ GOFMT ?= gofmt -s
|
||||||
GOFLAGS := -i -v
|
GOFLAGS := -i -v
|
||||||
EXTRA_GOFLAGS ?=
|
EXTRA_GOFLAGS ?=
|
||||||
|
|
||||||
LDFLAGS := -X "main.Version=$(shell git describe --tags --always | sed 's/-/+/' | sed 's/^v//')" -X "main.Tags=$(TAGS)"
|
ifneq ($(DRONE_TAG),)
|
||||||
|
VERSION ?= $(subst v,,$(DRONE_TAG))
|
||||||
|
GITEA_VERSION := $(VERSION)
|
||||||
|
else
|
||||||
|
ifneq ($(DRONE_BRANCH),)
|
||||||
|
VERSION ?= $(subst release/v,,$(DRONE_BRANCH))
|
||||||
|
else
|
||||||
|
VERSION ?= master
|
||||||
|
endif
|
||||||
|
GITEA_VERSION := $(shell git describe --tags --always | sed 's/-/+/' | sed 's/^v//')
|
||||||
|
endif
|
||||||
|
|
||||||
|
LDFLAGS := -X "main.Version=$(GITEA_VERSION)" -X "main.Tags=$(TAGS)"
|
||||||
|
|
||||||
PACKAGES ?= $(filter-out code.gitea.io/gitea/integrations,$(shell $(GO) list ./... | grep -v /vendor/))
|
PACKAGES ?= $(filter-out code.gitea.io/gitea/integrations,$(shell $(GO) list ./... | grep -v /vendor/))
|
||||||
SOURCES ?= $(shell find . -name "*.go" -type f)
|
SOURCES ?= $(shell find . -name "*.go" -type f)
|
||||||
|
@ -45,16 +57,6 @@ else
|
||||||
EXECUTABLE := gitea
|
EXECUTABLE := gitea
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifneq ($(DRONE_TAG),)
|
|
||||||
VERSION ?= $(subst v,,$(DRONE_TAG))
|
|
||||||
else
|
|
||||||
ifneq ($(DRONE_BRANCH),)
|
|
||||||
VERSION ?= $(subst release/v,,$(DRONE_BRANCH))
|
|
||||||
else
|
|
||||||
VERSION ?= master
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
.PHONY: all
|
.PHONY: all
|
||||||
all: build
|
all: build
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,8 @@ func TestAPIAdminCreateAndDeleteSSHKey(t *testing.T) {
|
||||||
session := loginUser(t, "user1")
|
session := loginUser(t, "user1")
|
||||||
keyOwner := models.AssertExistsAndLoadBean(t, &models.User{Name: "user2"}).(*models.User)
|
keyOwner := models.AssertExistsAndLoadBean(t, &models.User{Name: "user2"}).(*models.User)
|
||||||
|
|
||||||
urlStr := fmt.Sprintf("/api/v1/admin/users/%s/keys", keyOwner.Name)
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
urlStr := fmt.Sprintf("/api/v1/admin/users/%s/keys?token=%s", keyOwner.Name, token)
|
||||||
req := NewRequestWithValues(t, "POST", urlStr, map[string]string{
|
req := NewRequestWithValues(t, "POST", urlStr, map[string]string{
|
||||||
"key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDAu7tvIvX6ZHrRXuZNfkR3XLHSsuCK9Zn3X58lxBcQzuo5xZgB6vRwwm/QtJuF+zZPtY5hsQILBLmF+BZ5WpKZp1jBeSjH2G7lxet9kbcH+kIVj0tPFEoyKI9wvWqIwC4prx/WVk2wLTJjzBAhyNxfEq7C9CeiX9pQEbEqJfkKCQ== nocomment\n",
|
"key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDAu7tvIvX6ZHrRXuZNfkR3XLHSsuCK9Zn3X58lxBcQzuo5xZgB6vRwwm/QtJuF+zZPtY5hsQILBLmF+BZ5WpKZp1jBeSjH2G7lxet9kbcH+kIVj0tPFEoyKI9wvWqIwC4prx/WVk2wLTJjzBAhyNxfEq7C9CeiX9pQEbEqJfkKCQ== nocomment\n",
|
||||||
"title": "test-key",
|
"title": "test-key",
|
||||||
|
@ -36,7 +37,7 @@ func TestAPIAdminCreateAndDeleteSSHKey(t *testing.T) {
|
||||||
OwnerID: keyOwner.ID,
|
OwnerID: keyOwner.ID,
|
||||||
})
|
})
|
||||||
|
|
||||||
req = NewRequestf(t, "DELETE", "/api/v1/admin/users/%s/keys/%d",
|
req = NewRequestf(t, "DELETE", "/api/v1/admin/users/%s/keys/%d?token="+token,
|
||||||
keyOwner.Name, newPublicKey.ID)
|
keyOwner.Name, newPublicKey.ID)
|
||||||
session.MakeRequest(t, req, http.StatusNoContent)
|
session.MakeRequest(t, req, http.StatusNoContent)
|
||||||
models.AssertNotExistsBean(t, &models.PublicKey{ID: newPublicKey.ID})
|
models.AssertNotExistsBean(t, &models.PublicKey{ID: newPublicKey.ID})
|
||||||
|
@ -46,8 +47,9 @@ func TestAPIAdminDeleteMissingSSHKey(t *testing.T) {
|
||||||
prepareTestEnv(t)
|
prepareTestEnv(t)
|
||||||
// user1 is an admin user
|
// user1 is an admin user
|
||||||
session := loginUser(t, "user1")
|
session := loginUser(t, "user1")
|
||||||
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
|
||||||
req := NewRequestf(t, "DELETE", "/api/v1/admin/users/user1/keys/%d", models.NonexistentID)
|
req := NewRequestf(t, "DELETE", "/api/v1/admin/users/user1/keys/%d?token="+token, models.NonexistentID)
|
||||||
session.MakeRequest(t, req, http.StatusNotFound)
|
session.MakeRequest(t, req, http.StatusNotFound)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,7 +59,8 @@ func TestAPIAdminDeleteUnauthorizedKey(t *testing.T) {
|
||||||
normalUsername := "user2"
|
normalUsername := "user2"
|
||||||
session := loginUser(t, adminUsername)
|
session := loginUser(t, adminUsername)
|
||||||
|
|
||||||
urlStr := fmt.Sprintf("/api/v1/admin/users/%s/keys", adminUsername)
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
urlStr := fmt.Sprintf("/api/v1/admin/users/%s/keys?token=%s", adminUsername, token)
|
||||||
req := NewRequestWithValues(t, "POST", urlStr, map[string]string{
|
req := NewRequestWithValues(t, "POST", urlStr, map[string]string{
|
||||||
"key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDAu7tvIvX6ZHrRXuZNfkR3XLHSsuCK9Zn3X58lxBcQzuo5xZgB6vRwwm/QtJuF+zZPtY5hsQILBLmF+BZ5WpKZp1jBeSjH2G7lxet9kbcH+kIVj0tPFEoyKI9wvWqIwC4prx/WVk2wLTJjzBAhyNxfEq7C9CeiX9pQEbEqJfkKCQ== nocomment\n",
|
"key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDAu7tvIvX6ZHrRXuZNfkR3XLHSsuCK9Zn3X58lxBcQzuo5xZgB6vRwwm/QtJuF+zZPtY5hsQILBLmF+BZ5WpKZp1jBeSjH2G7lxet9kbcH+kIVj0tPFEoyKI9wvWqIwC4prx/WVk2wLTJjzBAhyNxfEq7C9CeiX9pQEbEqJfkKCQ== nocomment\n",
|
||||||
"title": "test-key",
|
"title": "test-key",
|
||||||
|
@ -67,7 +70,8 @@ func TestAPIAdminDeleteUnauthorizedKey(t *testing.T) {
|
||||||
DecodeJSON(t, resp, &newPublicKey)
|
DecodeJSON(t, resp, &newPublicKey)
|
||||||
|
|
||||||
session = loginUser(t, normalUsername)
|
session = loginUser(t, normalUsername)
|
||||||
req = NewRequestf(t, "DELETE", "/api/v1/admin/users/%s/keys/%d",
|
token = getTokenForLoggedInUser(t, session)
|
||||||
|
req = NewRequestf(t, "DELETE", "/api/v1/admin/users/%s/keys/%d?token="+token,
|
||||||
adminUsername, newPublicKey.ID)
|
adminUsername, newPublicKey.ID)
|
||||||
session.MakeRequest(t, req, http.StatusForbidden)
|
session.MakeRequest(t, req, http.StatusForbidden)
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,8 @@ func testAPIGetBranch(t *testing.T, branchName string, exists bool) {
|
||||||
prepareTestEnv(t)
|
prepareTestEnv(t)
|
||||||
|
|
||||||
session := loginUser(t, "user2")
|
session := loginUser(t, "user2")
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/branches/%s", branchName)
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
req := NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/branches/%s?token=%s", branchName, token)
|
||||||
resp := session.MakeRequest(t, req, NoExpectedStatus)
|
resp := session.MakeRequest(t, req, NoExpectedStatus)
|
||||||
if !exists {
|
if !exists {
|
||||||
assert.EqualValues(t, http.StatusNotFound, resp.Code)
|
assert.EqualValues(t, http.StatusNotFound, resp.Code)
|
||||||
|
|
|
@ -69,8 +69,9 @@ func TestAPICreateComment(t *testing.T) {
|
||||||
repoOwner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
repoOwner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||||
|
|
||||||
session := loginUser(t, repoOwner.Name)
|
session := loginUser(t, repoOwner.Name)
|
||||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d/comments",
|
token := getTokenForLoggedInUser(t, session)
|
||||||
repoOwner.Name, repo.Name, issue.Index)
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d/comments?token=%s",
|
||||||
|
repoOwner.Name, repo.Name, issue.Index, token)
|
||||||
req := NewRequestWithValues(t, "POST", urlStr, map[string]string{
|
req := NewRequestWithValues(t, "POST", urlStr, map[string]string{
|
||||||
"body": commentBody,
|
"body": commentBody,
|
||||||
})
|
})
|
||||||
|
@ -93,8 +94,9 @@ func TestAPIEditComment(t *testing.T) {
|
||||||
repoOwner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
repoOwner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||||
|
|
||||||
session := loginUser(t, repoOwner.Name)
|
session := loginUser(t, repoOwner.Name)
|
||||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/comments/%d",
|
token := getTokenForLoggedInUser(t, session)
|
||||||
repoOwner.Name, repo.Name, comment.ID)
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/comments/%d?token=%s",
|
||||||
|
repoOwner.Name, repo.Name, comment.ID, token)
|
||||||
req := NewRequestWithValues(t, "PATCH", urlStr, map[string]string{
|
req := NewRequestWithValues(t, "PATCH", urlStr, map[string]string{
|
||||||
"body": newCommentBody,
|
"body": newCommentBody,
|
||||||
})
|
})
|
||||||
|
@ -117,8 +119,9 @@ func TestAPIDeleteComment(t *testing.T) {
|
||||||
repoOwner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
repoOwner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||||
|
|
||||||
session := loginUser(t, repoOwner.Name)
|
session := loginUser(t, repoOwner.Name)
|
||||||
req := NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/issues/comments/%d",
|
token := getTokenForLoggedInUser(t, session)
|
||||||
repoOwner.Name, repo.Name, comment.ID)
|
req := NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/issues/comments/%d?token=%s",
|
||||||
|
repoOwner.Name, repo.Name, comment.ID, token)
|
||||||
session.MakeRequest(t, req, http.StatusNoContent)
|
session.MakeRequest(t, req, http.StatusNoContent)
|
||||||
|
|
||||||
models.AssertNotExistsBean(t, &models.Comment{ID: comment.ID})
|
models.AssertNotExistsBean(t, &models.Comment{ID: comment.ID})
|
||||||
|
|
|
@ -20,16 +20,18 @@ type makeRequestFunc func(testing.TB, *http.Request, int) *httptest.ResponseReco
|
||||||
func TestGPGKeys(t *testing.T) {
|
func TestGPGKeys(t *testing.T) {
|
||||||
prepareTestEnv(t)
|
prepareTestEnv(t)
|
||||||
session := loginUser(t, "user2")
|
session := loginUser(t, "user2")
|
||||||
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
|
||||||
tt := []struct {
|
tt := []struct {
|
||||||
name string
|
name string
|
||||||
makeRequest makeRequestFunc
|
makeRequest makeRequestFunc
|
||||||
|
token string
|
||||||
results []int
|
results []int
|
||||||
}{
|
}{
|
||||||
{name: "NoLogin", makeRequest: MakeRequest,
|
{name: "NoLogin", makeRequest: MakeRequest, token: "",
|
||||||
results: []int{http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized},
|
results: []int{http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized},
|
||||||
},
|
},
|
||||||
{name: "LoggedAsUser2", makeRequest: session.MakeRequest,
|
{name: "LoggedAsUser2", makeRequest: session.MakeRequest, token: token,
|
||||||
results: []int{http.StatusOK, http.StatusOK, http.StatusNotFound, http.StatusNoContent, http.StatusInternalServerError, http.StatusInternalServerError, http.StatusCreated, http.StatusCreated}},
|
results: []int{http.StatusOK, http.StatusOK, http.StatusNotFound, http.StatusNoContent, http.StatusInternalServerError, http.StatusInternalServerError, http.StatusCreated, http.StatusCreated}},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,29 +40,29 @@ func TestGPGKeys(t *testing.T) {
|
||||||
//Basic test on result code
|
//Basic test on result code
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
t.Run("ViewOwnGPGKeys", func(t *testing.T) {
|
t.Run("ViewOwnGPGKeys", func(t *testing.T) {
|
||||||
testViewOwnGPGKeys(t, tc.makeRequest, tc.results[0])
|
testViewOwnGPGKeys(t, tc.makeRequest, tc.token, tc.results[0])
|
||||||
})
|
})
|
||||||
t.Run("ViewGPGKeys", func(t *testing.T) {
|
t.Run("ViewGPGKeys", func(t *testing.T) {
|
||||||
testViewGPGKeys(t, tc.makeRequest, tc.results[1])
|
testViewGPGKeys(t, tc.makeRequest, tc.token, tc.results[1])
|
||||||
})
|
})
|
||||||
t.Run("GetGPGKey", func(t *testing.T) {
|
t.Run("GetGPGKey", func(t *testing.T) {
|
||||||
testGetGPGKey(t, tc.makeRequest, tc.results[2])
|
testGetGPGKey(t, tc.makeRequest, tc.token, tc.results[2])
|
||||||
})
|
})
|
||||||
t.Run("DeleteGPGKey", func(t *testing.T) {
|
t.Run("DeleteGPGKey", func(t *testing.T) {
|
||||||
testDeleteGPGKey(t, tc.makeRequest, tc.results[3])
|
testDeleteGPGKey(t, tc.makeRequest, tc.token, tc.results[3])
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("CreateInvalidGPGKey", func(t *testing.T) {
|
t.Run("CreateInvalidGPGKey", func(t *testing.T) {
|
||||||
testCreateInvalidGPGKey(t, tc.makeRequest, tc.results[4])
|
testCreateInvalidGPGKey(t, tc.makeRequest, tc.token, tc.results[4])
|
||||||
})
|
})
|
||||||
t.Run("CreateNoneRegistredEmailGPGKey", func(t *testing.T) {
|
t.Run("CreateNoneRegistredEmailGPGKey", func(t *testing.T) {
|
||||||
testCreateNoneRegistredEmailGPGKey(t, tc.makeRequest, tc.results[5])
|
testCreateNoneRegistredEmailGPGKey(t, tc.makeRequest, tc.token, tc.results[5])
|
||||||
})
|
})
|
||||||
t.Run("CreateValidGPGKey", func(t *testing.T) {
|
t.Run("CreateValidGPGKey", func(t *testing.T) {
|
||||||
testCreateValidGPGKey(t, tc.makeRequest, tc.results[6])
|
testCreateValidGPGKey(t, tc.makeRequest, tc.token, tc.results[6])
|
||||||
})
|
})
|
||||||
t.Run("CreateValidSecondaryEmailGPGKey", func(t *testing.T) {
|
t.Run("CreateValidSecondaryEmailGPGKey", func(t *testing.T) {
|
||||||
testCreateValidSecondaryEmailGPGKey(t, tc.makeRequest, tc.results[7])
|
testCreateValidSecondaryEmailGPGKey(t, tc.makeRequest, tc.token, tc.results[7])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -70,7 +72,7 @@ func TestGPGKeys(t *testing.T) {
|
||||||
|
|
||||||
var keys []*api.GPGKey
|
var keys []*api.GPGKey
|
||||||
|
|
||||||
req := NewRequest(t, "GET", "/api/v1/user/gpg_keys") //GET all keys
|
req := NewRequest(t, "GET", "/api/v1/user/gpg_keys?token="+token) //GET all keys
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
DecodeJSON(t, resp, &keys)
|
DecodeJSON(t, resp, &keys)
|
||||||
|
|
||||||
|
@ -91,7 +93,7 @@ func TestGPGKeys(t *testing.T) {
|
||||||
assert.EqualValues(t, false, primaryKey2.Emails[0].Verified)
|
assert.EqualValues(t, false, primaryKey2.Emails[0].Verified)
|
||||||
|
|
||||||
var key api.GPGKey
|
var key api.GPGKey
|
||||||
req = NewRequest(t, "GET", "/api/v1/user/gpg_keys/"+strconv.FormatInt(primaryKey1.ID, 10)) //Primary key 1
|
req = NewRequest(t, "GET", "/api/v1/user/gpg_keys/"+strconv.FormatInt(primaryKey1.ID, 10)+"?token="+token) //Primary key 1
|
||||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||||
DecodeJSON(t, resp, &key)
|
DecodeJSON(t, resp, &key)
|
||||||
assert.EqualValues(t, "38EA3BCED732982C", key.KeyID)
|
assert.EqualValues(t, "38EA3BCED732982C", key.KeyID)
|
||||||
|
@ -99,13 +101,13 @@ func TestGPGKeys(t *testing.T) {
|
||||||
assert.EqualValues(t, "user2@example.com", key.Emails[0].Email)
|
assert.EqualValues(t, "user2@example.com", key.Emails[0].Email)
|
||||||
assert.EqualValues(t, true, key.Emails[0].Verified)
|
assert.EqualValues(t, true, key.Emails[0].Verified)
|
||||||
|
|
||||||
req = NewRequest(t, "GET", "/api/v1/user/gpg_keys/"+strconv.FormatInt(subKey.ID, 10)) //Subkey of 38EA3BCED732982C
|
req = NewRequest(t, "GET", "/api/v1/user/gpg_keys/"+strconv.FormatInt(subKey.ID, 10)+"?token="+token) //Subkey of 38EA3BCED732982C
|
||||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||||
DecodeJSON(t, resp, &key)
|
DecodeJSON(t, resp, &key)
|
||||||
assert.EqualValues(t, "70D7C694D17D03AD", key.KeyID)
|
assert.EqualValues(t, "70D7C694D17D03AD", key.KeyID)
|
||||||
assert.EqualValues(t, 0, len(key.Emails))
|
assert.EqualValues(t, 0, len(key.Emails))
|
||||||
|
|
||||||
req = NewRequest(t, "GET", "/api/v1/user/gpg_keys/"+strconv.FormatInt(primaryKey2.ID, 10)) //Primary key 2
|
req = NewRequest(t, "GET", "/api/v1/user/gpg_keys/"+strconv.FormatInt(primaryKey2.ID, 10)+"?token="+token) //Primary key 2
|
||||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||||
DecodeJSON(t, resp, &key)
|
DecodeJSON(t, resp, &key)
|
||||||
assert.EqualValues(t, "FABF39739FE1E927", key.KeyID)
|
assert.EqualValues(t, "FABF39739FE1E927", key.KeyID)
|
||||||
|
@ -119,7 +121,7 @@ func TestGPGKeys(t *testing.T) {
|
||||||
t.Run("CheckCommits", func(t *testing.T) {
|
t.Run("CheckCommits", func(t *testing.T) {
|
||||||
t.Run("NotSigned", func(t *testing.T) {
|
t.Run("NotSigned", func(t *testing.T) {
|
||||||
var branch api.Branch
|
var branch api.Branch
|
||||||
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo16/branches/not-signed")
|
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo16/branches/not-signed?token="+token)
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
DecodeJSON(t, resp, &branch)
|
DecodeJSON(t, resp, &branch)
|
||||||
assert.EqualValues(t, false, branch.Commit.Verification.Verified)
|
assert.EqualValues(t, false, branch.Commit.Verification.Verified)
|
||||||
|
@ -127,7 +129,7 @@ func TestGPGKeys(t *testing.T) {
|
||||||
|
|
||||||
t.Run("SignedWithNotValidatedEmail", func(t *testing.T) {
|
t.Run("SignedWithNotValidatedEmail", func(t *testing.T) {
|
||||||
var branch api.Branch
|
var branch api.Branch
|
||||||
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo16/branches/good-sign-not-yet-validated")
|
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo16/branches/good-sign-not-yet-validated?token="+token)
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
DecodeJSON(t, resp, &branch)
|
DecodeJSON(t, resp, &branch)
|
||||||
assert.EqualValues(t, false, branch.Commit.Verification.Verified)
|
assert.EqualValues(t, false, branch.Commit.Verification.Verified)
|
||||||
|
@ -135,7 +137,7 @@ func TestGPGKeys(t *testing.T) {
|
||||||
|
|
||||||
t.Run("SignedWithValidEmail", func(t *testing.T) {
|
t.Run("SignedWithValidEmail", func(t *testing.T) {
|
||||||
var branch api.Branch
|
var branch api.Branch
|
||||||
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo16/branches/good-sign")
|
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo16/branches/good-sign?token="+token)
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
DecodeJSON(t, resp, &branch)
|
DecodeJSON(t, resp, &branch)
|
||||||
assert.EqualValues(t, true, branch.Commit.Verification.Verified)
|
assert.EqualValues(t, true, branch.Commit.Verification.Verified)
|
||||||
|
@ -143,39 +145,39 @@ func TestGPGKeys(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func testViewOwnGPGKeys(t *testing.T, makeRequest makeRequestFunc, expected int) {
|
func testViewOwnGPGKeys(t *testing.T, makeRequest makeRequestFunc, token string, expected int) {
|
||||||
req := NewRequest(t, "GET", "/api/v1/user/gpg_keys")
|
req := NewRequest(t, "GET", "/api/v1/user/gpg_keys?token="+token)
|
||||||
makeRequest(t, req, expected)
|
makeRequest(t, req, expected)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testViewGPGKeys(t *testing.T, makeRequest makeRequestFunc, expected int) {
|
func testViewGPGKeys(t *testing.T, makeRequest makeRequestFunc, token string, expected int) {
|
||||||
req := NewRequest(t, "GET", "/api/v1/users/user2/gpg_keys")
|
req := NewRequest(t, "GET", "/api/v1/users/user2/gpg_keys?token="+token)
|
||||||
makeRequest(t, req, expected)
|
makeRequest(t, req, expected)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testGetGPGKey(t *testing.T, makeRequest makeRequestFunc, expected int) {
|
func testGetGPGKey(t *testing.T, makeRequest makeRequestFunc, token string, expected int) {
|
||||||
req := NewRequest(t, "GET", "/api/v1/user/gpg_keys/1")
|
req := NewRequest(t, "GET", "/api/v1/user/gpg_keys/1?token="+token)
|
||||||
makeRequest(t, req, expected)
|
makeRequest(t, req, expected)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testDeleteGPGKey(t *testing.T, makeRequest makeRequestFunc, expected int) {
|
func testDeleteGPGKey(t *testing.T, makeRequest makeRequestFunc, token string, expected int) {
|
||||||
req := NewRequest(t, "DELETE", "/api/v1/user/gpg_keys/1")
|
req := NewRequest(t, "DELETE", "/api/v1/user/gpg_keys/1?token="+token)
|
||||||
makeRequest(t, req, expected)
|
makeRequest(t, req, expected)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testCreateGPGKey(t *testing.T, makeRequest makeRequestFunc, expected int, publicKey string) {
|
func testCreateGPGKey(t *testing.T, makeRequest makeRequestFunc, token string, expected int, publicKey string) {
|
||||||
req := NewRequestWithJSON(t, "POST", "/api/v1/user/gpg_keys", api.CreateGPGKeyOption{
|
req := NewRequestWithJSON(t, "POST", "/api/v1/user/gpg_keys?token="+token, api.CreateGPGKeyOption{
|
||||||
ArmoredKey: publicKey,
|
ArmoredKey: publicKey,
|
||||||
})
|
})
|
||||||
makeRequest(t, req, expected)
|
makeRequest(t, req, expected)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testCreateInvalidGPGKey(t *testing.T, makeRequest makeRequestFunc, expected int) {
|
func testCreateInvalidGPGKey(t *testing.T, makeRequest makeRequestFunc, token string, expected int) {
|
||||||
testCreateGPGKey(t, makeRequest, expected, "invalid_key")
|
testCreateGPGKey(t, makeRequest, token, expected, "invalid_key")
|
||||||
}
|
}
|
||||||
|
|
||||||
func testCreateNoneRegistredEmailGPGKey(t *testing.T, makeRequest makeRequestFunc, expected int) {
|
func testCreateNoneRegistredEmailGPGKey(t *testing.T, makeRequest makeRequestFunc, token string, expected int) {
|
||||||
testCreateGPGKey(t, makeRequest, expected, `-----BEGIN PGP PUBLIC KEY BLOCK-----
|
testCreateGPGKey(t, makeRequest, token, expected, `-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||||
|
|
||||||
mQENBFmGUygBCACjCNbKvMGgp0fd5vyFW9olE1CLCSyyF9gQN2hSuzmZLuAZF2Kh
|
mQENBFmGUygBCACjCNbKvMGgp0fd5vyFW9olE1CLCSyyF9gQN2hSuzmZLuAZF2Kh
|
||||||
dCMCG2T1UwzUB/yWUFWJ2BtCwSjuaRv+cGohqEy6bhEBV90peGA33lHfjx7wP25O
|
dCMCG2T1UwzUB/yWUFWJ2BtCwSjuaRv+cGohqEy6bhEBV90peGA33lHfjx7wP25O
|
||||||
|
@ -194,9 +196,9 @@ INx/MmBfmtCq05FqNclvU+sj2R3N1JJOtBOjZrJHQbJhzoILou8AkxeX1A+q9OAz
|
||||||
-----END PGP PUBLIC KEY BLOCK-----`)
|
-----END PGP PUBLIC KEY BLOCK-----`)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testCreateValidGPGKey(t *testing.T, makeRequest makeRequestFunc, expected int) {
|
func testCreateValidGPGKey(t *testing.T, makeRequest makeRequestFunc, token string, expected int) {
|
||||||
//User2 <user2@example.com> //primary & activated
|
//User2 <user2@example.com> //primary & activated
|
||||||
testCreateGPGKey(t, makeRequest, expected, `-----BEGIN PGP PUBLIC KEY BLOCK-----
|
testCreateGPGKey(t, makeRequest, token, expected, `-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||||
|
|
||||||
mQENBFmGVsMBCACuxgZ7W7rI9xN08Y4M7B8yx/6/I4Slm94+wXf8YNRvAyqj30dW
|
mQENBFmGVsMBCACuxgZ7W7rI9xN08Y4M7B8yx/6/I4Slm94+wXf8YNRvAyqj30dW
|
||||||
VJhyBcnfNRDLKSQp5o/hhfDkCgdqBjLa1PnHlGS3PXJc0hP/FyYPD2BFvNMPpCYS
|
VJhyBcnfNRDLKSQp5o/hhfDkCgdqBjLa1PnHlGS3PXJc0hP/FyYPD2BFvNMPpCYS
|
||||||
|
@ -228,9 +230,9 @@ uy6MA3VSB99SK9ducGmE1Jv8mcziREroz2TEGr0zPs6h
|
||||||
-----END PGP PUBLIC KEY BLOCK-----`)
|
-----END PGP PUBLIC KEY BLOCK-----`)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testCreateValidSecondaryEmailGPGKey(t *testing.T, makeRequest makeRequestFunc, expected int) {
|
func testCreateValidSecondaryEmailGPGKey(t *testing.T, makeRequest makeRequestFunc, token string, expected int) {
|
||||||
//User2 <user21@example.com> //secondary and not activated
|
//User2 <user21@example.com> //secondary and not activated
|
||||||
testCreateGPGKey(t, makeRequest, expected, `-----BEGIN PGP PUBLIC KEY BLOCK-----
|
testCreateGPGKey(t, makeRequest, token, expected, `-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||||
|
|
||||||
mQENBFmGWN4BCAC18V4tVGO65VLCV7p14FuXJlUtZ5CuYMvgEkcOqrvRaBSW9ao4
|
mQENBFmGWN4BCAC18V4tVGO65VLCV7p14FuXJlUtZ5CuYMvgEkcOqrvRaBSW9ao4
|
||||||
PGESOhJpfWpnW3QgJniYndLzPpsmdHEclEER6aZjiNgReWPOjHD5tykWocZAJqXD
|
PGESOhJpfWpnW3QgJniYndLzPpsmdHEclEER6aZjiNgReWPOjHD5tykWocZAJqXD
|
||||||
|
|
|
@ -23,12 +23,13 @@ func TestAPIAddIssueLabels(t *testing.T) {
|
||||||
label := models.AssertExistsAndLoadBean(t, &models.Label{RepoID: repo.ID}).(*models.Label)
|
label := models.AssertExistsAndLoadBean(t, &models.Label{RepoID: repo.ID}).(*models.Label)
|
||||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||||
|
|
||||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d/labels",
|
session := loginUser(t, owner.Name)
|
||||||
owner.Name, repo.Name, issue.Index)
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d/labels?token=%s",
|
||||||
|
owner.Name, repo.Name, issue.Index, token)
|
||||||
req := NewRequestWithJSON(t, "POST", urlStr, &api.IssueLabelsOption{
|
req := NewRequestWithJSON(t, "POST", urlStr, &api.IssueLabelsOption{
|
||||||
Labels: []int64{label.ID},
|
Labels: []int64{label.ID},
|
||||||
})
|
})
|
||||||
session := loginUser(t, owner.Name)
|
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
var apiLabels []*api.Label
|
var apiLabels []*api.Label
|
||||||
DecodeJSON(t, resp, &apiLabels)
|
DecodeJSON(t, resp, &apiLabels)
|
||||||
|
@ -45,12 +46,13 @@ func TestAPIReplaceIssueLabels(t *testing.T) {
|
||||||
label := models.AssertExistsAndLoadBean(t, &models.Label{RepoID: repo.ID}).(*models.Label)
|
label := models.AssertExistsAndLoadBean(t, &models.Label{RepoID: repo.ID}).(*models.Label)
|
||||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||||
|
|
||||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d/labels",
|
session := loginUser(t, owner.Name)
|
||||||
owner.Name, repo.Name, issue.Index)
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d/labels?token=%s",
|
||||||
|
owner.Name, repo.Name, issue.Index, token)
|
||||||
req := NewRequestWithJSON(t, "PUT", urlStr, &api.IssueLabelsOption{
|
req := NewRequestWithJSON(t, "PUT", urlStr, &api.IssueLabelsOption{
|
||||||
Labels: []int64{label.ID},
|
Labels: []int64{label.ID},
|
||||||
})
|
})
|
||||||
session := loginUser(t, owner.Name)
|
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
var apiLabels []*api.Label
|
var apiLabels []*api.Label
|
||||||
DecodeJSON(t, resp, &apiLabels)
|
DecodeJSON(t, resp, &apiLabels)
|
||||||
|
|
|
@ -22,8 +22,9 @@ func TestAPIListIssues(t *testing.T) {
|
||||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||||
|
|
||||||
session := loginUser(t, owner.Name)
|
session := loginUser(t, owner.Name)
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/issues?state=all",
|
token := getTokenForLoggedInUser(t, session)
|
||||||
owner.Name, repo.Name)
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/issues?state=all&token=%s",
|
||||||
|
owner.Name, repo.Name, token)
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
var apiIssues []*api.Issue
|
var apiIssues []*api.Issue
|
||||||
DecodeJSON(t, resp, &apiIssues)
|
DecodeJSON(t, resp, &apiIssues)
|
||||||
|
@ -41,8 +42,8 @@ func TestAPICreateIssue(t *testing.T) {
|
||||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||||
|
|
||||||
session := loginUser(t, owner.Name)
|
session := loginUser(t, owner.Name)
|
||||||
|
token := getTokenForLoggedInUser(t, session)
|
||||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues?state=all", owner.Name, repo.Name)
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues?state=all&token=%s", owner.Name, repo.Name, token)
|
||||||
req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateIssueOption{
|
req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateIssueOption{
|
||||||
Body: body,
|
Body: body,
|
||||||
Title: title,
|
Title: title,
|
||||||
|
|
|
@ -46,8 +46,8 @@ func TestCreateReadOnlyDeployKey(t *testing.T) {
|
||||||
repoOwner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
repoOwner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||||
|
|
||||||
session := loginUser(t, repoOwner.Name)
|
session := loginUser(t, repoOwner.Name)
|
||||||
|
token := getTokenForLoggedInUser(t, session)
|
||||||
keysURL := fmt.Sprintf("/api/v1/repos/%s/%s/keys", repoOwner.Name, repo.Name)
|
keysURL := fmt.Sprintf("/api/v1/repos/%s/%s/keys?token=%s", repoOwner.Name, repo.Name, token)
|
||||||
rawKeyBody := api.CreateKeyOption{
|
rawKeyBody := api.CreateKeyOption{
|
||||||
Title: "read-only",
|
Title: "read-only",
|
||||||
Key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDAu7tvIvX6ZHrRXuZNfkR3XLHSsuCK9Zn3X58lxBcQzuo5xZgB6vRwwm/QtJuF+zZPtY5hsQILBLmF+BZ5WpKZp1jBeSjH2G7lxet9kbcH+kIVj0tPFEoyKI9wvWqIwC4prx/WVk2wLTJjzBAhyNxfEq7C9CeiX9pQEbEqJfkKCQ== nocomment\n",
|
Key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDAu7tvIvX6ZHrRXuZNfkR3XLHSsuCK9Zn3X58lxBcQzuo5xZgB6vRwwm/QtJuF+zZPtY5hsQILBLmF+BZ5WpKZp1jBeSjH2G7lxet9kbcH+kIVj0tPFEoyKI9wvWqIwC4prx/WVk2wLTJjzBAhyNxfEq7C9CeiX9pQEbEqJfkKCQ== nocomment\n",
|
||||||
|
@ -72,8 +72,8 @@ func TestCreateReadWriteDeployKey(t *testing.T) {
|
||||||
repoOwner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
repoOwner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||||
|
|
||||||
session := loginUser(t, repoOwner.Name)
|
session := loginUser(t, repoOwner.Name)
|
||||||
|
token := getTokenForLoggedInUser(t, session)
|
||||||
keysURL := fmt.Sprintf("/api/v1/repos/%s/%s/keys", repoOwner.Name, repo.Name)
|
keysURL := fmt.Sprintf("/api/v1/repos/%s/%s/keys?token=%s", repoOwner.Name, repo.Name, token)
|
||||||
rawKeyBody := api.CreateKeyOption{
|
rawKeyBody := api.CreateKeyOption{
|
||||||
Title: "read-write",
|
Title: "read-write",
|
||||||
Key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDsufOCrDDlT8DLkodnnJtbq7uGflcPae7euTfM+Laq4So+v4WeSV362Rg0O/+Sje1UthrhN6lQkfRkdWIlCRQEXg+LMqr6RhvDfZquE2Xwqv/itlz7LjbdAUdYoO1iH7rMSmYvQh4WEnC/DAacKGbhdGIM/ZBz0z6tHm7bPgbI9ykEKekTmPwQFP1Qebvf5NYOFMWqQ2sCEAI9dBMVLoojsIpV+KADf+BotiIi8yNfTG2rzmzpxBpW9fYjd1Sy1yd4NSUpoPbEJJYJ1TrjiSWlYOVq9Ar8xW1O87i6gBjL/3zN7ANeoYhaAXupdOS6YL22YOK/yC0tJtXwwdh/eSrh",
|
Key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDsufOCrDDlT8DLkodnnJtbq7uGflcPae7euTfM+Laq4So+v4WeSV362Rg0O/+Sje1UthrhN6lQkfRkdWIlCRQEXg+LMqr6RhvDfZquE2Xwqv/itlz7LjbdAUdYoO1iH7rMSmYvQh4WEnC/DAacKGbhdGIM/ZBz0z6tHm7bPgbI9ykEKekTmPwQFP1Qebvf5NYOFMWqQ2sCEAI9dBMVLoojsIpV+KADf+BotiIi8yNfTG2rzmzpxBpW9fYjd1Sy1yd4NSUpoPbEJJYJ1TrjiSWlYOVq9Ar8xW1O87i6gBjL/3zN7ANeoYhaAXupdOS6YL22YOK/yC0tJtXwwdh/eSrh",
|
||||||
|
|
|
@ -20,7 +20,8 @@ func TestAPIViewPulls(t *testing.T) {
|
||||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||||
|
|
||||||
session := loginUser(t, "user2")
|
session := loginUser(t, "user2")
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/pulls?state=all", owner.Name, repo.Name)
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/pulls?state=all&token="+token, owner.Name, repo.Name)
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
var pulls []*api.PullRequest
|
var pulls []*api.PullRequest
|
||||||
|
|
|
@ -22,7 +22,7 @@ func TestAPICreateRelease(t *testing.T) {
|
||||||
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
||||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||||
session := loginUser(t, owner.LowerName)
|
session := loginUser(t, owner.LowerName)
|
||||||
|
token := getTokenForLoggedInUser(t, session)
|
||||||
gitRepo, err := git.OpenRepository(repo.RepoPath())
|
gitRepo, err := git.OpenRepository(repo.RepoPath())
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
@ -32,8 +32,8 @@ func TestAPICreateRelease(t *testing.T) {
|
||||||
commitID, err := gitRepo.GetTagCommitID("v0.0.1")
|
commitID, err := gitRepo.GetTagCommitID("v0.0.1")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/releases",
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/releases?token=%s",
|
||||||
owner.Name, repo.Name)
|
owner.Name, repo.Name, token)
|
||||||
req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateReleaseOption{
|
req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateReleaseOption{
|
||||||
TagName: "v0.0.1",
|
TagName: "v0.0.1",
|
||||||
Title: "v0.0.1",
|
Title: "v0.0.1",
|
||||||
|
@ -53,8 +53,8 @@ func TestAPICreateRelease(t *testing.T) {
|
||||||
Note: newRelease.Note,
|
Note: newRelease.Note,
|
||||||
})
|
})
|
||||||
|
|
||||||
urlStr = fmt.Sprintf("/api/v1/repos/%s/%s/releases/%d",
|
urlStr = fmt.Sprintf("/api/v1/repos/%s/%s/releases/%d?token=%s",
|
||||||
owner.Name, repo.Name, newRelease.ID)
|
owner.Name, repo.Name, newRelease.ID, token)
|
||||||
req = NewRequest(t, "GET", urlStr)
|
req = NewRequest(t, "GET", urlStr)
|
||||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
|
|
@ -16,16 +16,17 @@ func TestAPIReposRaw(t *testing.T) {
|
||||||
user := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User)
|
user := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User)
|
||||||
// Login as User2.
|
// Login as User2.
|
||||||
session := loginUser(t, user.Name)
|
session := loginUser(t, user.Name)
|
||||||
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
|
||||||
for _, ref := range [...]string{
|
for _, ref := range [...]string{
|
||||||
"master", // Branch
|
"master", // Branch
|
||||||
"v1.1", // Tag
|
"v1.1", // Tag
|
||||||
"65f1bf27bc3bf70f64657658635e66094edbcb4d", // Commit
|
"65f1bf27bc3bf70f64657658635e66094edbcb4d", // Commit
|
||||||
} {
|
} {
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repos/%s/repo1/raw/%s/README.md", user.Name, ref)
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/repo1/raw/%s/README.md?token="+token, user.Name, ref)
|
||||||
session.MakeRequest(t, req, http.StatusOK)
|
session.MakeRequest(t, req, http.StatusOK)
|
||||||
}
|
}
|
||||||
// Test default branch
|
// Test default branch
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repos/%s/repo1/raw/README.md", user.Name)
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/repo1/raw/README.md?token="+token, user.Name)
|
||||||
session.MakeRequest(t, req, http.StatusOK)
|
session.MakeRequest(t, req, http.StatusOK)
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,16 +67,16 @@ func TestAPISearchRepo(t *testing.T) {
|
||||||
expectedResults
|
expectedResults
|
||||||
}{
|
}{
|
||||||
{name: "RepositoriesMax50", requestURL: "/api/v1/repos/search?limit=50", expectedResults: expectedResults{
|
{name: "RepositoriesMax50", requestURL: "/api/v1/repos/search?limit=50", expectedResults: expectedResults{
|
||||||
nil: {count: 16},
|
nil: {count: 17},
|
||||||
user: {count: 16},
|
user: {count: 17},
|
||||||
user2: {count: 16}},
|
user2: {count: 17}},
|
||||||
},
|
},
|
||||||
{name: "RepositoriesMax10", requestURL: "/api/v1/repos/search?limit=10", expectedResults: expectedResults{
|
{name: "RepositoriesMax10", requestURL: "/api/v1/repos/search?limit=10", expectedResults: expectedResults{
|
||||||
nil: {count: 10},
|
nil: {count: 10},
|
||||||
user: {count: 10},
|
user: {count: 10},
|
||||||
user2: {count: 10}},
|
user2: {count: 10}},
|
||||||
},
|
},
|
||||||
{name: "RepositoriesDefaultMax10", requestURL: "/api/v1/repos/search", expectedResults: expectedResults{
|
{name: "RepositoriesDefaultMax10", requestURL: "/api/v1/repos/search?default", expectedResults: expectedResults{
|
||||||
nil: {count: 10},
|
nil: {count: 10},
|
||||||
user: {count: 10},
|
user: {count: 10},
|
||||||
user2: {count: 10}},
|
user2: {count: 10}},
|
||||||
|
@ -143,17 +143,19 @@ func TestAPISearchRepo(t *testing.T) {
|
||||||
var session *TestSession
|
var session *TestSession
|
||||||
var testName string
|
var testName string
|
||||||
var userID int64
|
var userID int64
|
||||||
|
var token string
|
||||||
if userToLogin != nil && userToLogin.ID > 0 {
|
if userToLogin != nil && userToLogin.ID > 0 {
|
||||||
testName = fmt.Sprintf("LoggedUser%d", userToLogin.ID)
|
testName = fmt.Sprintf("LoggedUser%d", userToLogin.ID)
|
||||||
session = loginUser(t, userToLogin.Name)
|
session = loginUser(t, userToLogin.Name)
|
||||||
userID = userToLogin.ID
|
userID = userToLogin.ID
|
||||||
|
token = getTokenForLoggedInUser(t, session)
|
||||||
} else {
|
} else {
|
||||||
testName = "AnonymousUser"
|
testName = "AnonymousUser"
|
||||||
session = emptyTestSession(t)
|
session = emptyTestSession(t)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Run(testName, func(t *testing.T) {
|
t.Run(testName, func(t *testing.T) {
|
||||||
request := NewRequest(t, "GET", testCase.requestURL)
|
request := NewRequest(t, "GET", testCase.requestURL+"&token="+token)
|
||||||
response := session.MakeRequest(t, request, http.StatusOK)
|
response := session.MakeRequest(t, request, http.StatusOK)
|
||||||
|
|
||||||
var body api.SearchResults
|
var body api.SearchResults
|
||||||
|
@ -215,7 +217,8 @@ func TestAPIOrgRepos(t *testing.T) {
|
||||||
// Login as User2.
|
// Login as User2.
|
||||||
session := loginUser(t, user.Name)
|
session := loginUser(t, user.Name)
|
||||||
|
|
||||||
req := NewRequestf(t, "GET", "/api/v1/orgs/%s/repos", sourceOrg.Name)
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
req := NewRequestf(t, "GET", "/api/v1/orgs/%s/repos?token="+token, sourceOrg.Name)
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
var apiRepos []*api.Repository
|
var apiRepos []*api.Repository
|
||||||
|
@ -231,7 +234,35 @@ func TestAPIOrgRepos(t *testing.T) {
|
||||||
func TestAPIGetRepoByIDUnauthorized(t *testing.T) {
|
func TestAPIGetRepoByIDUnauthorized(t *testing.T) {
|
||||||
prepareTestEnv(t)
|
prepareTestEnv(t)
|
||||||
user := models.AssertExistsAndLoadBean(t, &models.User{ID: 4}).(*models.User)
|
user := models.AssertExistsAndLoadBean(t, &models.User{ID: 4}).(*models.User)
|
||||||
sess := loginUser(t, user.Name)
|
session := loginUser(t, user.Name)
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repositories/2")
|
token := getTokenForLoggedInUser(t, session)
|
||||||
sess.MakeRequest(t, req, http.StatusNotFound)
|
req := NewRequestf(t, "GET", "/api/v1/repositories/2?token="+token)
|
||||||
|
session.MakeRequest(t, req, http.StatusNotFound)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAPIRepoMigrate(t *testing.T) {
|
||||||
|
testCases := []struct {
|
||||||
|
ctxUserID, userID int64
|
||||||
|
cloneURL, repoName string
|
||||||
|
expectedStatus int
|
||||||
|
}{
|
||||||
|
{ctxUserID: 1, userID: 2, cloneURL: "https://github.com/go-gitea/git.git", repoName: "git-admin", expectedStatus: http.StatusCreated},
|
||||||
|
{ctxUserID: 2, userID: 2, cloneURL: "https://github.com/go-gitea/git.git", repoName: "git-own", expectedStatus: http.StatusCreated},
|
||||||
|
{ctxUserID: 2, userID: 1, cloneURL: "https://github.com/go-gitea/git.git", repoName: "git-bad", expectedStatus: http.StatusForbidden},
|
||||||
|
{ctxUserID: 2, userID: 3, cloneURL: "https://github.com/go-gitea/git.git", repoName: "git-org", expectedStatus: http.StatusCreated},
|
||||||
|
{ctxUserID: 2, userID: 6, cloneURL: "https://github.com/go-gitea/git.git", repoName: "git-bad-org", expectedStatus: http.StatusForbidden},
|
||||||
|
}
|
||||||
|
|
||||||
|
prepareTestEnv(t)
|
||||||
|
for _, testCase := range testCases {
|
||||||
|
user := models.AssertExistsAndLoadBean(t, &models.User{ID: testCase.ctxUserID}).(*models.User)
|
||||||
|
session := loginUser(t, user.Name)
|
||||||
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
req := NewRequestWithJSON(t, "POST", "/api/v1/repos/migrate?token="+token, &api.MigrateRepoOption{
|
||||||
|
CloneAddr: testCase.cloneURL,
|
||||||
|
UID: int(testCase.userID),
|
||||||
|
RepoName: testCase.repoName,
|
||||||
|
})
|
||||||
|
session.MakeRequest(t, req, testCase.expectedStatus)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,8 @@ func TestAPITeam(t *testing.T) {
|
||||||
user := models.AssertExistsAndLoadBean(t, &models.User{ID: teamUser.UID}).(*models.User)
|
user := models.AssertExistsAndLoadBean(t, &models.User{ID: teamUser.UID}).(*models.User)
|
||||||
|
|
||||||
session := loginUser(t, user.Name)
|
session := loginUser(t, user.Name)
|
||||||
req := NewRequestf(t, "GET", "/api/v1/teams/%d", teamUser.TeamID)
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
req := NewRequestf(t, "GET", "/api/v1/teams/%d?token="+token, teamUser.TeamID)
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
var apiTeam api.Team
|
var apiTeam api.Team
|
||||||
|
|
|
@ -75,7 +75,8 @@ func TestGit(t *testing.T) {
|
||||||
|
|
||||||
t.Run("CreateRepo", func(t *testing.T) {
|
t.Run("CreateRepo", func(t *testing.T) {
|
||||||
session := loginUser(t, "user2")
|
session := loginUser(t, "user2")
|
||||||
req := NewRequestWithJSON(t, "POST", "/api/v1/user/repos", &api.CreateRepoOption{
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
req := NewRequestWithJSON(t, "POST", "/api/v1/user/repos?token="+token, &api.CreateRepoOption{
|
||||||
AutoInit: true,
|
AutoInit: true,
|
||||||
Description: "Temporary repo",
|
Description: "Temporary repo",
|
||||||
Name: "repo-tmp-17",
|
Name: "repo-tmp-17",
|
||||||
|
@ -166,7 +167,8 @@ func TestGit(t *testing.T) {
|
||||||
t.Run("Standard", func(t *testing.T) {
|
t.Run("Standard", func(t *testing.T) {
|
||||||
t.Run("CreateRepo", func(t *testing.T) {
|
t.Run("CreateRepo", func(t *testing.T) {
|
||||||
session := loginUser(t, "user2")
|
session := loginUser(t, "user2")
|
||||||
req := NewRequestWithJSON(t, "POST", "/api/v1/user/repos", &api.CreateRepoOption{
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
req := NewRequestWithJSON(t, "POST", "/api/v1/user/repos?token="+token, &api.CreateRepoOption{
|
||||||
AutoInit: true,
|
AutoInit: true,
|
||||||
Description: "Temporary repo",
|
Description: "Temporary repo",
|
||||||
Name: "repo-tmp-18",
|
Name: "repo-tmp-18",
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
ref: refs/heads/master
|
|
@ -0,0 +1,4 @@
|
||||||
|
[core]
|
||||||
|
repositoryformatversion = 0
|
||||||
|
filemode = true
|
||||||
|
bare = true
|
|
@ -0,0 +1 @@
|
||||||
|
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -0,0 +1,15 @@
|
||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# An example hook script to check the commit log message taken by
|
||||||
|
# applypatch from an e-mail message.
|
||||||
|
#
|
||||||
|
# The hook should exit with non-zero status after issuing an
|
||||||
|
# appropriate message if it wants to stop the commit. The hook is
|
||||||
|
# allowed to edit the commit message file.
|
||||||
|
#
|
||||||
|
# To enable this hook, rename this file to "applypatch-msg".
|
||||||
|
|
||||||
|
. git-sh-setup
|
||||||
|
commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
|
||||||
|
test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
|
||||||
|
:
|
|
@ -0,0 +1,24 @@
|
||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# An example hook script to check the commit log message.
|
||||||
|
# Called by "git commit" with one argument, the name of the file
|
||||||
|
# that has the commit message. The hook should exit with non-zero
|
||||||
|
# status after issuing an appropriate message if it wants to stop the
|
||||||
|
# commit. The hook is allowed to edit the commit message file.
|
||||||
|
#
|
||||||
|
# To enable this hook, rename this file to "commit-msg".
|
||||||
|
|
||||||
|
# Uncomment the below to add a Signed-off-by line to the message.
|
||||||
|
# Doing this in a hook is a bad idea in general, but the prepare-commit-msg
|
||||||
|
# hook is more suited to it.
|
||||||
|
#
|
||||||
|
# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
|
||||||
|
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"
|
||||||
|
|
||||||
|
# This example catches duplicate Signed-off-by lines.
|
||||||
|
|
||||||
|
test "" = "$(grep '^Signed-off-by: ' "$1" |
|
||||||
|
sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || {
|
||||||
|
echo >&2 Duplicate Signed-off-by lines.
|
||||||
|
exit 1
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
ORI_DIR=`pwd`
|
||||||
|
SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
|
||||||
|
cd "$ORI_DIR"
|
||||||
|
for i in `ls "$SHELL_FOLDER/post-receive.d"`; do
|
||||||
|
sh "$SHELL_FOLDER/post-receive.d/$i"
|
||||||
|
done
|
|
@ -0,0 +1,2 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
"$GITEA_ROOT/gitea" hook --config="$GITEA_ROOT/$GITEA_CONF" post-receive
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# An example hook script to prepare a packed repository for use over
|
||||||
|
# dumb transports.
|
||||||
|
#
|
||||||
|
# To enable this hook, rename this file to "post-update".
|
||||||
|
|
||||||
|
exec git update-server-info
|
|
@ -0,0 +1,14 @@
|
||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# An example hook script to verify what is about to be committed
|
||||||
|
# by applypatch from an e-mail message.
|
||||||
|
#
|
||||||
|
# The hook should exit with non-zero status after issuing an
|
||||||
|
# appropriate message if it wants to stop the commit.
|
||||||
|
#
|
||||||
|
# To enable this hook, rename this file to "pre-applypatch".
|
||||||
|
|
||||||
|
. git-sh-setup
|
||||||
|
precommit="$(git rev-parse --git-path hooks/pre-commit)"
|
||||||
|
test -x "$precommit" && exec "$precommit" ${1+"$@"}
|
||||||
|
:
|
|
@ -0,0 +1,49 @@
|
||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# An example hook script to verify what is about to be committed.
|
||||||
|
# Called by "git commit" with no arguments. The hook should
|
||||||
|
# exit with non-zero status after issuing an appropriate message if
|
||||||
|
# it wants to stop the commit.
|
||||||
|
#
|
||||||
|
# To enable this hook, rename this file to "pre-commit".
|
||||||
|
|
||||||
|
if git rev-parse --verify HEAD >/dev/null 2>&1
|
||||||
|
then
|
||||||
|
against=HEAD
|
||||||
|
else
|
||||||
|
# Initial commit: diff against an empty tree object
|
||||||
|
against=4b825dc642cb6eb9a060e54bf8d69288fbee4904
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If you want to allow non-ASCII filenames set this variable to true.
|
||||||
|
allownonascii=$(git config --bool hooks.allownonascii)
|
||||||
|
|
||||||
|
# Redirect output to stderr.
|
||||||
|
exec 1>&2
|
||||||
|
|
||||||
|
# Cross platform projects tend to avoid non-ASCII filenames; prevent
|
||||||
|
# them from being added to the repository. We exploit the fact that the
|
||||||
|
# printable range starts at the space character and ends with tilde.
|
||||||
|
if [ "$allownonascii" != "true" ] &&
|
||||||
|
# Note that the use of brackets around a tr range is ok here, (it's
|
||||||
|
# even required, for portability to Solaris 10's /usr/bin/tr), since
|
||||||
|
# the square bracket bytes happen to fall in the designated range.
|
||||||
|
test $(git diff --cached --name-only --diff-filter=A -z $against |
|
||||||
|
LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0
|
||||||
|
then
|
||||||
|
cat <<\EOF
|
||||||
|
Error: Attempt to add a non-ASCII file name.
|
||||||
|
|
||||||
|
This can cause problems if you want to work with people on other platforms.
|
||||||
|
|
||||||
|
To be portable it is advisable to rename the file.
|
||||||
|
|
||||||
|
If you know what you are doing you can disable this check using:
|
||||||
|
|
||||||
|
git config hooks.allownonascii true
|
||||||
|
EOF
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If there are whitespace errors, print the offending file names and fail.
|
||||||
|
exec git diff-index --check --cached $against --
|
|
@ -0,0 +1,53 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# An example hook script to verify what is about to be pushed. Called by "git
|
||||||
|
# push" after it has checked the remote status, but before anything has been
|
||||||
|
# pushed. If this script exits with a non-zero status nothing will be pushed.
|
||||||
|
#
|
||||||
|
# This hook is called with the following parameters:
|
||||||
|
#
|
||||||
|
# $1 -- Name of the remote to which the push is being done
|
||||||
|
# $2 -- URL to which the push is being done
|
||||||
|
#
|
||||||
|
# If pushing without using a named remote those arguments will be equal.
|
||||||
|
#
|
||||||
|
# Information about the commits which are being pushed is supplied as lines to
|
||||||
|
# the standard input in the form:
|
||||||
|
#
|
||||||
|
# <local ref> <local sha1> <remote ref> <remote sha1>
|
||||||
|
#
|
||||||
|
# This sample shows how to prevent push of commits where the log message starts
|
||||||
|
# with "WIP" (work in progress).
|
||||||
|
|
||||||
|
remote="$1"
|
||||||
|
url="$2"
|
||||||
|
|
||||||
|
z40=0000000000000000000000000000000000000000
|
||||||
|
|
||||||
|
while read local_ref local_sha remote_ref remote_sha
|
||||||
|
do
|
||||||
|
if [ "$local_sha" = $z40 ]
|
||||||
|
then
|
||||||
|
# Handle delete
|
||||||
|
:
|
||||||
|
else
|
||||||
|
if [ "$remote_sha" = $z40 ]
|
||||||
|
then
|
||||||
|
# New branch, examine all commits
|
||||||
|
range="$local_sha"
|
||||||
|
else
|
||||||
|
# Update to existing branch, examine new commits
|
||||||
|
range="$remote_sha..$local_sha"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for WIP commit
|
||||||
|
commit=`git rev-list -n 1 --grep '^WIP' "$range"`
|
||||||
|
if [ -n "$commit" ]
|
||||||
|
then
|
||||||
|
echo >&2 "Found WIP commit in $local_ref, not pushing"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
exit 0
|
|
@ -0,0 +1,169 @@
|
||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# Copyright (c) 2006, 2008 Junio C Hamano
|
||||||
|
#
|
||||||
|
# The "pre-rebase" hook is run just before "git rebase" starts doing
|
||||||
|
# its job, and can prevent the command from running by exiting with
|
||||||
|
# non-zero status.
|
||||||
|
#
|
||||||
|
# The hook is called with the following parameters:
|
||||||
|
#
|
||||||
|
# $1 -- the upstream the series was forked from.
|
||||||
|
# $2 -- the branch being rebased (or empty when rebasing the current branch).
|
||||||
|
#
|
||||||
|
# This sample shows how to prevent topic branches that are already
|
||||||
|
# merged to 'next' branch from getting rebased, because allowing it
|
||||||
|
# would result in rebasing already published history.
|
||||||
|
|
||||||
|
publish=next
|
||||||
|
basebranch="$1"
|
||||||
|
if test "$#" = 2
|
||||||
|
then
|
||||||
|
topic="refs/heads/$2"
|
||||||
|
else
|
||||||
|
topic=`git symbolic-ref HEAD` ||
|
||||||
|
exit 0 ;# we do not interrupt rebasing detached HEAD
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$topic" in
|
||||||
|
refs/heads/??/*)
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
exit 0 ;# we do not interrupt others.
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Now we are dealing with a topic branch being rebased
|
||||||
|
# on top of master. Is it OK to rebase it?
|
||||||
|
|
||||||
|
# Does the topic really exist?
|
||||||
|
git show-ref -q "$topic" || {
|
||||||
|
echo >&2 "No such branch $topic"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Is topic fully merged to master?
|
||||||
|
not_in_master=`git rev-list --pretty=oneline ^master "$topic"`
|
||||||
|
if test -z "$not_in_master"
|
||||||
|
then
|
||||||
|
echo >&2 "$topic is fully merged to master; better remove it."
|
||||||
|
exit 1 ;# we could allow it, but there is no point.
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Is topic ever merged to next? If so you should not be rebasing it.
|
||||||
|
only_next_1=`git rev-list ^master "^$topic" ${publish} | sort`
|
||||||
|
only_next_2=`git rev-list ^master ${publish} | sort`
|
||||||
|
if test "$only_next_1" = "$only_next_2"
|
||||||
|
then
|
||||||
|
not_in_topic=`git rev-list "^$topic" master`
|
||||||
|
if test -z "$not_in_topic"
|
||||||
|
then
|
||||||
|
echo >&2 "$topic is already up-to-date with master"
|
||||||
|
exit 1 ;# we could allow it, but there is no point.
|
||||||
|
else
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"`
|
||||||
|
/usr/bin/perl -e '
|
||||||
|
my $topic = $ARGV[0];
|
||||||
|
my $msg = "* $topic has commits already merged to public branch:\n";
|
||||||
|
my (%not_in_next) = map {
|
||||||
|
/^([0-9a-f]+) /;
|
||||||
|
($1 => 1);
|
||||||
|
} split(/\n/, $ARGV[1]);
|
||||||
|
for my $elem (map {
|
||||||
|
/^([0-9a-f]+) (.*)$/;
|
||||||
|
[$1 => $2];
|
||||||
|
} split(/\n/, $ARGV[2])) {
|
||||||
|
if (!exists $not_in_next{$elem->[0]}) {
|
||||||
|
if ($msg) {
|
||||||
|
print STDERR $msg;
|
||||||
|
undef $msg;
|
||||||
|
}
|
||||||
|
print STDERR " $elem->[1]\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
' "$topic" "$not_in_next" "$not_in_master"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
<<\DOC_END
|
||||||
|
|
||||||
|
This sample hook safeguards topic branches that have been
|
||||||
|
published from being rewound.
|
||||||
|
|
||||||
|
The workflow assumed here is:
|
||||||
|
|
||||||
|
* Once a topic branch forks from "master", "master" is never
|
||||||
|
merged into it again (either directly or indirectly).
|
||||||
|
|
||||||
|
* Once a topic branch is fully cooked and merged into "master",
|
||||||
|
it is deleted. If you need to build on top of it to correct
|
||||||
|
earlier mistakes, a new topic branch is created by forking at
|
||||||
|
the tip of the "master". This is not strictly necessary, but
|
||||||
|
it makes it easier to keep your history simple.
|
||||||
|
|
||||||
|
* Whenever you need to test or publish your changes to topic
|
||||||
|
branches, merge them into "next" branch.
|
||||||
|
|
||||||
|
The script, being an example, hardcodes the publish branch name
|
||||||
|
to be "next", but it is trivial to make it configurable via
|
||||||
|
$GIT_DIR/config mechanism.
|
||||||
|
|
||||||
|
With this workflow, you would want to know:
|
||||||
|
|
||||||
|
(1) ... if a topic branch has ever been merged to "next". Young
|
||||||
|
topic branches can have stupid mistakes you would rather
|
||||||
|
clean up before publishing, and things that have not been
|
||||||
|
merged into other branches can be easily rebased without
|
||||||
|
affecting other people. But once it is published, you would
|
||||||
|
not want to rewind it.
|
||||||
|
|
||||||
|
(2) ... if a topic branch has been fully merged to "master".
|
||||||
|
Then you can delete it. More importantly, you should not
|
||||||
|
build on top of it -- other people may already want to
|
||||||
|
change things related to the topic as patches against your
|
||||||
|
"master", so if you need further changes, it is better to
|
||||||
|
fork the topic (perhaps with the same name) afresh from the
|
||||||
|
tip of "master".
|
||||||
|
|
||||||
|
Let's look at this example:
|
||||||
|
|
||||||
|
o---o---o---o---o---o---o---o---o---o "next"
|
||||||
|
/ / / /
|
||||||
|
/ a---a---b A / /
|
||||||
|
/ / / /
|
||||||
|
/ / c---c---c---c B /
|
||||||
|
/ / / \ /
|
||||||
|
/ / / b---b C \ /
|
||||||
|
/ / / / \ /
|
||||||
|
---o---o---o---o---o---o---o---o---o---o---o "master"
|
||||||
|
|
||||||
|
|
||||||
|
A, B and C are topic branches.
|
||||||
|
|
||||||
|
* A has one fix since it was merged up to "next".
|
||||||
|
|
||||||
|
* B has finished. It has been fully merged up to "master" and "next",
|
||||||
|
and is ready to be deleted.
|
||||||
|
|
||||||
|
* C has not merged to "next" at all.
|
||||||
|
|
||||||
|
We would want to allow C to be rebased, refuse A, and encourage
|
||||||
|
B to be deleted.
|
||||||
|
|
||||||
|
To compute (1):
|
||||||
|
|
||||||
|
git rev-list ^master ^topic next
|
||||||
|
git rev-list ^master next
|
||||||
|
|
||||||
|
if these match, topic has not merged in next at all.
|
||||||
|
|
||||||
|
To compute (2):
|
||||||
|
|
||||||
|
git rev-list master..topic
|
||||||
|
|
||||||
|
if this is empty, it is fully merged to "master".
|
||||||
|
|
||||||
|
DOC_END
|
|
@ -0,0 +1,7 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
ORI_DIR=`pwd`
|
||||||
|
SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
|
||||||
|
cd "$ORI_DIR"
|
||||||
|
for i in `ls "$SHELL_FOLDER/pre-receive.d"`; do
|
||||||
|
sh "$SHELL_FOLDER/pre-receive.d/$i"
|
||||||
|
done
|
|
@ -0,0 +1,2 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
"$GITEA_ROOT/gitea" hook --config="$GITEA_ROOT/$GITEA_CONF" pre-receive
|
|
@ -0,0 +1,36 @@
|
||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# An example hook script to prepare the commit log message.
|
||||||
|
# Called by "git commit" with the name of the file that has the
|
||||||
|
# commit message, followed by the description of the commit
|
||||||
|
# message's source. The hook's purpose is to edit the commit
|
||||||
|
# message file. If the hook fails with a non-zero status,
|
||||||
|
# the commit is aborted.
|
||||||
|
#
|
||||||
|
# To enable this hook, rename this file to "prepare-commit-msg".
|
||||||
|
|
||||||
|
# This hook includes three examples. The first comments out the
|
||||||
|
# "Conflicts:" part of a merge commit.
|
||||||
|
#
|
||||||
|
# The second includes the output of "git diff --name-status -r"
|
||||||
|
# into the message, just before the "git status" output. It is
|
||||||
|
# commented because it doesn't cope with --amend or with squashed
|
||||||
|
# commits.
|
||||||
|
#
|
||||||
|
# The third example adds a Signed-off-by line to the message, that can
|
||||||
|
# still be edited. This is rarely a good idea.
|
||||||
|
|
||||||
|
case "$2,$3" in
|
||||||
|
merge,)
|
||||||
|
/usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;;
|
||||||
|
|
||||||
|
# ,|template,)
|
||||||
|
# /usr/bin/perl -i.bak -pe '
|
||||||
|
# print "\n" . `git diff --cached --name-status -r`
|
||||||
|
# if /^#/ && $first++ == 0' "$1" ;;
|
||||||
|
|
||||||
|
*) ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
|
||||||
|
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"
|
|
@ -0,0 +1,7 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
ORI_DIR=`pwd`
|
||||||
|
SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
|
||||||
|
cd "$ORI_DIR"
|
||||||
|
for i in `ls "$SHELL_FOLDER/update.d"`; do
|
||||||
|
sh "$SHELL_FOLDER/update.d/$i" $1 $2 $3
|
||||||
|
done
|
|
@ -0,0 +1,2 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
"$GITEA_ROOT/gitea" hook --config="$GITEA_ROOT/$GITEA_CONF" update $1 $2 $3
|
|
@ -0,0 +1,128 @@
|
||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# An example hook script to block unannotated tags from entering.
|
||||||
|
# Called by "git receive-pack" with arguments: refname sha1-old sha1-new
|
||||||
|
#
|
||||||
|
# To enable this hook, rename this file to "update".
|
||||||
|
#
|
||||||
|
# Config
|
||||||
|
# ------
|
||||||
|
# hooks.allowunannotated
|
||||||
|
# This boolean sets whether unannotated tags will be allowed into the
|
||||||
|
# repository. By default they won't be.
|
||||||
|
# hooks.allowdeletetag
|
||||||
|
# This boolean sets whether deleting tags will be allowed in the
|
||||||
|
# repository. By default they won't be.
|
||||||
|
# hooks.allowmodifytag
|
||||||
|
# This boolean sets whether a tag may be modified after creation. By default
|
||||||
|
# it won't be.
|
||||||
|
# hooks.allowdeletebranch
|
||||||
|
# This boolean sets whether deleting branches will be allowed in the
|
||||||
|
# repository. By default they won't be.
|
||||||
|
# hooks.denycreatebranch
|
||||||
|
# This boolean sets whether remotely creating branches will be denied
|
||||||
|
# in the repository. By default this is allowed.
|
||||||
|
#
|
||||||
|
|
||||||
|
# --- Command line
|
||||||
|
refname="$1"
|
||||||
|
oldrev="$2"
|
||||||
|
newrev="$3"
|
||||||
|
|
||||||
|
# --- Safety check
|
||||||
|
if [ -z "$GIT_DIR" ]; then
|
||||||
|
echo "Don't run this script from the command line." >&2
|
||||||
|
echo " (if you want, you could supply GIT_DIR then run" >&2
|
||||||
|
echo " $0 <ref> <oldrev> <newrev>)" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then
|
||||||
|
echo "usage: $0 <ref> <oldrev> <newrev>" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# --- Config
|
||||||
|
allowunannotated=$(git config --bool hooks.allowunannotated)
|
||||||
|
allowdeletebranch=$(git config --bool hooks.allowdeletebranch)
|
||||||
|
denycreatebranch=$(git config --bool hooks.denycreatebranch)
|
||||||
|
allowdeletetag=$(git config --bool hooks.allowdeletetag)
|
||||||
|
allowmodifytag=$(git config --bool hooks.allowmodifytag)
|
||||||
|
|
||||||
|
# check for no description
|
||||||
|
projectdesc=$(sed -e '1q' "$GIT_DIR/description")
|
||||||
|
case "$projectdesc" in
|
||||||
|
"Unnamed repository"* | "")
|
||||||
|
echo "*** Project description file hasn't been set" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# --- Check types
|
||||||
|
# if $newrev is 0000...0000, it's a commit to delete a ref.
|
||||||
|
zero="0000000000000000000000000000000000000000"
|
||||||
|
if [ "$newrev" = "$zero" ]; then
|
||||||
|
newrev_type=delete
|
||||||
|
else
|
||||||
|
newrev_type=$(git cat-file -t $newrev)
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$refname","$newrev_type" in
|
||||||
|
refs/tags/*,commit)
|
||||||
|
# un-annotated tag
|
||||||
|
short_refname=${refname##refs/tags/}
|
||||||
|
if [ "$allowunannotated" != "true" ]; then
|
||||||
|
echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2
|
||||||
|
echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
refs/tags/*,delete)
|
||||||
|
# delete tag
|
||||||
|
if [ "$allowdeletetag" != "true" ]; then
|
||||||
|
echo "*** Deleting a tag is not allowed in this repository" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
refs/tags/*,tag)
|
||||||
|
# annotated tag
|
||||||
|
if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1
|
||||||
|
then
|
||||||
|
echo "*** Tag '$refname' already exists." >&2
|
||||||
|
echo "*** Modifying a tag is not allowed in this repository." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
refs/heads/*,commit)
|
||||||
|
# branch
|
||||||
|
if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then
|
||||||
|
echo "*** Creating a branch is not allowed in this repository" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
refs/heads/*,delete)
|
||||||
|
# delete branch
|
||||||
|
if [ "$allowdeletebranch" != "true" ]; then
|
||||||
|
echo "*** Deleting a branch is not allowed in this repository" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
refs/remotes/*,commit)
|
||||||
|
# tracking branch
|
||||||
|
;;
|
||||||
|
refs/remotes/*,delete)
|
||||||
|
# delete tracking branch
|
||||||
|
if [ "$allowdeletebranch" != "true" ]; then
|
||||||
|
echo "*** Deleting a tracking branch is not allowed in this repository" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
# Anything else (is there anything else?)
|
||||||
|
echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# --- Finished
|
||||||
|
exit 0
|
|
@ -0,0 +1,6 @@
|
||||||
|
# git ls-files --others --exclude-from=.git/info/exclude
|
||||||
|
# Lines that start with '#' are comments.
|
||||||
|
# For a project mostly in C, the following would be a good set of
|
||||||
|
# exclude patterns (uncomment them if you want to use them):
|
||||||
|
# *.[oa]
|
||||||
|
# *~
|
|
@ -0,0 +1,9 @@
|
||||||
|
ebf146f803fccbc1471ef01d8fa0fe12c14e61a5 refs/heads/Grüßen
|
||||||
|
3a810dbf6b96afaa8c5f69a8b6ec1dabfca7368b refs/heads/Plus+Is+Not+Space
|
||||||
|
3aa73c3499bff049a352b4e265575373e964b89a refs/heads/master
|
||||||
|
ebf146f803fccbc1471ef01d8fa0fe12c14e61a5 refs/heads/ГлавнаяВетка
|
||||||
|
ebf146f803fccbc1471ef01d8fa0fe12c14e61a5 refs/heads/а/б/в
|
||||||
|
28d579e4920fbf4f66e71dab3e779d9fbf41422a refs/heads/ブランチ
|
||||||
|
ebf146f803fccbc1471ef01d8fa0fe12c14e61a5 refs/tags/Ё/人
|
||||||
|
ebf146f803fccbc1471ef01d8fa0fe12c14e61a5 refs/tags/Тэг
|
||||||
|
28d579e4920fbf4f66e71dab3e779d9fbf41422a refs/tags/タグ
|
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,3 @@
|
||||||
|
x•ŽAjÄ0E»ö)´œR(²ci<PJ{Å–˜)ILo_ÓM×]}xü/×u½uDO½©‚/¤ˆ6!É9K’KT¦Hœ
|
||||||
|
—À(EæäviºuÐÙ|dK8YÎsöñìÕЗd‚¦>¢ì…œÜûµ6¸Ú¼ý·>dÝ}Íu}OñÄ)xÁ€èi]ÿ%¹ÏRàKvÙôP°Û¢Ð똛lù
|
||||||
|
u[¾ád£§ëÑÇ£>»²´QÑ
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
x<01><>]jC!<10>ћь*цНPtFЧ+<2B>в.e4#ЎЙС<18>ьОЖ;шг<D188>УљљЪб{<7B><><EFBFBD>_цPеФ<1B>ЋOXйгцrБbк2<D0BA>+ХygЎ2є2<D194>D"ђ)<>ѕI(`і<>BiЭљМ%1r<31>чcР§Іс§O>ѕ!§Кы[9њИ@<40><>-!МZДж,wЁM§WЩ|<7C>NPлЎ№нцЪsД}o.вцёћr<>ЁUQѓ<03>M<EFBFBD>
|
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
ebf146f803fccbc1471ef01d8fa0fe12c14e61a5
|
|
@ -0,0 +1 @@
|
||||||
|
3a810dbf6b96afaa8c5f69a8b6ec1dabfca7368b
|
|
@ -0,0 +1 @@
|
||||||
|
3aa73c3499bff049a352b4e265575373e964b89a
|
|
@ -0,0 +1 @@
|
||||||
|
ebf146f803fccbc1471ef01d8fa0fe12c14e61a5
|
|
@ -0,0 +1 @@
|
||||||
|
ebf146f803fccbc1471ef01d8fa0fe12c14e61a5
|
|
@ -0,0 +1 @@
|
||||||
|
28d579e4920fbf4f66e71dab3e779d9fbf41422a
|
|
@ -0,0 +1 @@
|
||||||
|
ebf146f803fccbc1471ef01d8fa0fe12c14e61a5
|
|
@ -0,0 +1 @@
|
||||||
|
ebf146f803fccbc1471ef01d8fa0fe12c14e61a5
|
|
@ -0,0 +1 @@
|
||||||
|
28d579e4920fbf4f66e71dab3e779d9fbf41422a
|
|
@ -223,6 +223,22 @@ func loginUserWithPassword(t testing.TB, userName, password string) *TestSession
|
||||||
return session
|
return session
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getTokenForLoggedInUser(t testing.TB, session *TestSession) string {
|
||||||
|
req := NewRequest(t, "GET", "/user/settings/applications")
|
||||||
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
doc := NewHTMLParser(t, resp.Body)
|
||||||
|
req = NewRequestWithValues(t, "POST", "/user/settings/applications", map[string]string{
|
||||||
|
"_csrf": doc.GetCSRF(),
|
||||||
|
"name": "api-testing-token",
|
||||||
|
})
|
||||||
|
resp = session.MakeRequest(t, req, http.StatusFound)
|
||||||
|
req = NewRequest(t, "GET", "/user/settings/applications")
|
||||||
|
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||||
|
token := htmlDoc.doc.Find(".ui.info p").Text()
|
||||||
|
return token
|
||||||
|
}
|
||||||
|
|
||||||
func NewRequest(t testing.TB, method, urlStr string) *http.Request {
|
func NewRequest(t testing.TB, method, urlStr string) *http.Request {
|
||||||
return NewRequestWithBody(t, method, urlStr, nil)
|
return NewRequestWithBody(t, method, urlStr, nil)
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,178 @@
|
||||||
|
// Copyright 2018 The Gitea Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package integrations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"path"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func testSrcRouteRedirect(t *testing.T, session *TestSession, user, repo, route, expectedLocation string, expectedStatus int) {
|
||||||
|
prefix := path.Join("/", user, repo, "src")
|
||||||
|
|
||||||
|
// Make request
|
||||||
|
req := NewRequest(t, "GET", path.Join(prefix, route))
|
||||||
|
resp := session.MakeRequest(t, req, http.StatusFound)
|
||||||
|
|
||||||
|
// Check Location header
|
||||||
|
location := resp.HeaderMap.Get("Location")
|
||||||
|
assert.Equal(t, path.Join(prefix, expectedLocation), location)
|
||||||
|
|
||||||
|
// Perform redirect
|
||||||
|
req = NewRequest(t, "GET", location)
|
||||||
|
resp = session.MakeRequest(t, req, expectedStatus)
|
||||||
|
}
|
||||||
|
|
||||||
|
func setDefaultBranch(t *testing.T, session *TestSession, user, repo, branch string) {
|
||||||
|
location := path.Join("/", user, repo, "settings/branches")
|
||||||
|
csrf := GetCSRF(t, session, location)
|
||||||
|
req := NewRequestWithValues(t, "POST", location, map[string]string{
|
||||||
|
"_csrf": csrf,
|
||||||
|
"action": "default_branch",
|
||||||
|
"branch": branch,
|
||||||
|
})
|
||||||
|
session.MakeRequest(t, req, http.StatusFound)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNonasciiBranches(t *testing.T) {
|
||||||
|
testRedirects := []struct {
|
||||||
|
from string
|
||||||
|
to string
|
||||||
|
status int
|
||||||
|
}{
|
||||||
|
// Branches
|
||||||
|
{
|
||||||
|
from: "master",
|
||||||
|
to: "branch/master",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "master/README.md",
|
||||||
|
to: "branch/master/README.md",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "master/badfile",
|
||||||
|
to: "branch/master/badfile",
|
||||||
|
status: http.StatusNotFound, // it does not exists
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "ГлавнаяВетка",
|
||||||
|
to: "branch/%d0%93%d0%bb%d0%b0%d0%b2%d0%bd%d0%b0%d1%8f%d0%92%d0%b5%d1%82%d0%ba%d0%b0",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "а/б/в",
|
||||||
|
to: "branch/%d0%b0/%d0%b1/%d0%b2",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "Grüßen/README.md",
|
||||||
|
to: "branch/Gr%c3%bc%c3%9fen/README.md",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "Plus+Is+Not+Space",
|
||||||
|
to: "branch/Plus+Is+Not+Space",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "Plus+Is+Not+Space/Файл.md",
|
||||||
|
to: "branch/Plus+Is+Not+Space/%d0%a4%d0%b0%d0%b9%d0%bb.md",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "Plus+Is+Not+Space/and+it+is+valid.md",
|
||||||
|
to: "branch/Plus+Is+Not+Space/and+it+is+valid.md",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "ブランチ",
|
||||||
|
to: "branch/%e3%83%96%e3%83%a9%e3%83%b3%e3%83%81",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
// Tags
|
||||||
|
{
|
||||||
|
from: "Тэг",
|
||||||
|
to: "tag/%d0%a2%d1%8d%d0%b3",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "Ё/人",
|
||||||
|
to: "tag/%d0%81/%e4%ba%ba",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "タグ",
|
||||||
|
to: "tag/%e3%82%bf%e3%82%b0",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "タグ/ファイル.md",
|
||||||
|
to: "tag/%e3%82%bf%e3%82%b0/%e3%83%95%e3%82%a1%e3%82%a4%e3%83%ab.md",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
// Files
|
||||||
|
{
|
||||||
|
from: "README.md",
|
||||||
|
to: "branch/Plus+Is+Not+Space/README.md",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "Файл.md",
|
||||||
|
to: "branch/Plus+Is+Not+Space/%d0%a4%d0%b0%d0%b9%d0%bb.md",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "ファイル.md",
|
||||||
|
to: "branch/Plus+Is+Not+Space/%e3%83%95%e3%82%a1%e3%82%a4%e3%83%ab.md",
|
||||||
|
status: http.StatusNotFound, // it's not on default branch
|
||||||
|
},
|
||||||
|
// Same but url-encoded (few tests)
|
||||||
|
{
|
||||||
|
from: "%E3%83%96%E3%83%A9%E3%83%B3%E3%83%81",
|
||||||
|
to: "branch/%e3%83%96%e3%83%a9%e3%83%b3%e3%83%81",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "%E3%82%BF%E3%82%b0",
|
||||||
|
to: "tag/%e3%82%bf%e3%82%b0",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "%D0%A4%D0%B0%D0%B9%D0%BB.md",
|
||||||
|
to: "branch/Plus+Is+Not+Space/%d0%a4%d0%b0%d0%b9%d0%bb.md",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "%D0%81%2F%E4%BA%BA",
|
||||||
|
to: "tag/%d0%81/%e4%ba%ba",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: "Ё%2F%E4%BA%BA",
|
||||||
|
to: "tag/%d0%81/%e4%ba%ba",
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
prepareTestEnv(t)
|
||||||
|
|
||||||
|
user := "user2"
|
||||||
|
repo := "utf8"
|
||||||
|
session := loginUser(t, user)
|
||||||
|
|
||||||
|
setDefaultBranch(t, session, user, repo, "Plus+Is+Not+Space")
|
||||||
|
|
||||||
|
for _, test := range testRedirects {
|
||||||
|
testSrcRouteRedirect(t, session, user, repo, test.from, test.to, test.status)
|
||||||
|
}
|
||||||
|
|
||||||
|
setDefaultBranch(t, session, user, repo, "master")
|
||||||
|
|
||||||
|
}
|
|
@ -33,6 +33,7 @@ func doTestRepoCommitWithStatus(t *testing.T, state string, classes ...string) {
|
||||||
prepareTestEnv(t)
|
prepareTestEnv(t)
|
||||||
|
|
||||||
session := loginUser(t, "user2")
|
session := loginUser(t, "user2")
|
||||||
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
|
||||||
// Request repository commits page
|
// Request repository commits page
|
||||||
req := NewRequest(t, "GET", "/user2/repo1/commits/branch/master")
|
req := NewRequest(t, "GET", "/user2/repo1/commits/branch/master")
|
||||||
|
@ -45,7 +46,7 @@ func doTestRepoCommitWithStatus(t *testing.T, state string, classes ...string) {
|
||||||
assert.NotEmpty(t, commitURL)
|
assert.NotEmpty(t, commitURL)
|
||||||
|
|
||||||
// Call API to add status for commit
|
// Call API to add status for commit
|
||||||
req = NewRequestWithJSON(t, "POST", "/api/v1/repos/user2/repo1/statuses/"+path.Base(commitURL),
|
req = NewRequestWithJSON(t, "POST", "/api/v1/repos/user2/repo1/statuses/"+path.Base(commitURL)+"?token="+token,
|
||||||
api.CreateStatusOption{
|
api.CreateStatusOption{
|
||||||
State: api.StatusState(state),
|
State: api.StatusState(state),
|
||||||
TargetURL: "http://test.ci/",
|
TargetURL: "http://test.ci/",
|
||||||
|
|
|
@ -524,12 +524,14 @@ func CommitRepoAction(opts CommitRepoActionOptions) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
refName := git.RefEndName(opts.RefFullName)
|
refName := git.RefEndName(opts.RefFullName)
|
||||||
if repo.IsBare && refName != repo.DefaultBranch {
|
|
||||||
|
// Change default branch and bare status only if pushed ref is non-empty branch.
|
||||||
|
if repo.IsBare && opts.NewCommitID != git.EmptySHA && strings.HasPrefix(opts.RefFullName, git.BranchPrefix) {
|
||||||
repo.DefaultBranch = refName
|
repo.DefaultBranch = refName
|
||||||
|
repo.IsBare = false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Change repository bare status and update last updated time.
|
// Change repository bare status and update last updated time.
|
||||||
repo.IsBare = repo.IsBare && opts.Commits.Len <= 0
|
|
||||||
if err = UpdateRepository(repo, false); err != nil {
|
if err = UpdateRepository(repo, false); err != nil {
|
||||||
return fmt.Errorf("UpdateRepository: %v", err)
|
return fmt.Errorf("UpdateRepository: %v", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,7 +74,7 @@ func (protectBranch *ProtectedBranch) CanUserMerge(userID int64) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(protectBranch.WhitelistTeamIDs) == 0 {
|
if len(protectBranch.MergeWhitelistTeamIDs) == 0 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -184,6 +184,24 @@ func (repo *Repository) IsProtectedBranch(branchName string, doer *User) (bool,
|
||||||
BranchName: branchName,
|
BranchName: branchName,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
has, err := x.Exist(protectedBranch)
|
||||||
|
if err != nil {
|
||||||
|
return true, err
|
||||||
|
}
|
||||||
|
return has, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsProtectedBranchForPush checks if branch is protected for push
|
||||||
|
func (repo *Repository) IsProtectedBranchForPush(branchName string, doer *User) (bool, error) {
|
||||||
|
if doer == nil {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
protectedBranch := &ProtectedBranch{
|
||||||
|
RepoID: repo.ID,
|
||||||
|
BranchName: branchName,
|
||||||
|
}
|
||||||
|
|
||||||
has, err := x.Get(protectedBranch)
|
has, err := x.Get(protectedBranch)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return true, err
|
return true, err
|
||||||
|
|
|
@ -74,3 +74,38 @@
|
||||||
type: 1
|
type: 1
|
||||||
config: "{}"
|
config: "{}"
|
||||||
created_unix: 1524304355
|
created_unix: 1524304355
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 12
|
||||||
|
repo_id: 33
|
||||||
|
type: 1
|
||||||
|
config: "{}"
|
||||||
|
created_unix: 1535593231
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 13
|
||||||
|
repo_id: 33
|
||||||
|
type: 2
|
||||||
|
config: "{\"EnableTimetracker\":true,\"AllowOnlyContributorsToTrackTime\":true}"
|
||||||
|
created_unix: 1535593231
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 14
|
||||||
|
repo_id: 33
|
||||||
|
type: 3
|
||||||
|
config: "{\"IgnoreWhitespaceConflicts\":false,\"AllowMerge\":true,\"AllowRebase\":true,\"AllowSquash\":true}"
|
||||||
|
created_unix: 1535593231
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 15
|
||||||
|
repo_id: 33
|
||||||
|
type: 4
|
||||||
|
config: "{}"
|
||||||
|
created_unix: 1535593231
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 16
|
||||||
|
repo_id: 33
|
||||||
|
type: 5
|
||||||
|
config: "{}"
|
||||||
|
created_unix: 1535593231
|
|
@ -400,3 +400,10 @@
|
||||||
num_forks: 0
|
num_forks: 0
|
||||||
num_issues: 0
|
num_issues: 0
|
||||||
is_mirror: false
|
is_mirror: false
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 33
|
||||||
|
owner_id: 2
|
||||||
|
lower_name: utf8
|
||||||
|
name: utf8
|
||||||
|
is_private: false
|
||||||
|
|
|
@ -27,7 +27,7 @@
|
||||||
is_admin: false
|
is_admin: false
|
||||||
avatar: avatar2
|
avatar: avatar2
|
||||||
avatar_email: user2@example.com
|
avatar_email: user2@example.com
|
||||||
num_repos: 5
|
num_repos: 6
|
||||||
num_stars: 2
|
num_stars: 2
|
||||||
num_followers: 2
|
num_followers: 2
|
||||||
num_following: 1
|
num_following: 1
|
||||||
|
|
|
@ -360,7 +360,7 @@ func verifySign(s *packet.Signature, h hash.Hash, k *GPGKey) error {
|
||||||
|
|
||||||
// ParseCommitWithSignature check if signature is good against keystore.
|
// ParseCommitWithSignature check if signature is good against keystore.
|
||||||
func ParseCommitWithSignature(c *git.Commit) *CommitVerification {
|
func ParseCommitWithSignature(c *git.Commit) *CommitVerification {
|
||||||
if c.Signature != nil {
|
if c.Signature != nil && c.Committer != nil {
|
||||||
//Parsing signature
|
//Parsing signature
|
||||||
sig, err := extractSignature(c.Signature.Signature)
|
sig, err := extractSignature(c.Signature.Signature)
|
||||||
if err != nil { //Skipping failed to extract sign
|
if err != nil { //Skipping failed to extract sign
|
||||||
|
|
|
@ -950,7 +950,7 @@ func newIssue(e *xorm.Session, doer *User, opts NewIssueOptions) (err error) {
|
||||||
|
|
||||||
// Insert the assignees
|
// Insert the assignees
|
||||||
for _, assigneeID := range opts.AssigneeIDs {
|
for _, assigneeID := range opts.AssigneeIDs {
|
||||||
err = opts.Issue.changeAssignee(e, doer, assigneeID)
|
err = opts.Issue.changeAssignee(e, doer, assigneeID, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -1283,7 +1283,7 @@ func getParticipantsByIssueID(e Engine, issueID int64) ([]*User, error) {
|
||||||
And("`comment`.type = ?", CommentTypeComment).
|
And("`comment`.type = ?", CommentTypeComment).
|
||||||
And("`user`.is_active = ?", true).
|
And("`user`.is_active = ?", true).
|
||||||
And("`user`.prohibit_login = ?", false).
|
And("`user`.prohibit_login = ?", false).
|
||||||
Join("INNER", "user", "`user`.id = `comment`.poster_id").
|
Join("INNER", "`user`", "`user`.id = `comment`.poster_id").
|
||||||
Distinct("poster_id").
|
Distinct("poster_id").
|
||||||
Find(&userIDs); err != nil {
|
Find(&userIDs); err != nil {
|
||||||
return nil, fmt.Errorf("get poster IDs: %v", err)
|
return nil, fmt.Errorf("get poster IDs: %v", err)
|
||||||
|
|
|
@ -134,14 +134,14 @@ func (issue *Issue) ChangeAssignee(doer *User, assigneeID int64) (err error) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := issue.changeAssignee(sess, doer, assigneeID); err != nil {
|
if err := issue.changeAssignee(sess, doer, assigneeID, false); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return sess.Commit()
|
return sess.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (issue *Issue) changeAssignee(sess *xorm.Session, doer *User, assigneeID int64) (err error) {
|
func (issue *Issue) changeAssignee(sess *xorm.Session, doer *User, assigneeID int64, isCreate bool) (err error) {
|
||||||
|
|
||||||
// Update the assignee
|
// Update the assignee
|
||||||
removed, err := updateIssueAssignee(sess, issue, assigneeID)
|
removed, err := updateIssueAssignee(sess, issue, assigneeID)
|
||||||
|
@ -161,6 +161,10 @@ func (issue *Issue) changeAssignee(sess *xorm.Session, doer *User, assigneeID in
|
||||||
|
|
||||||
mode, _ := accessLevel(sess, doer.ID, issue.Repo)
|
mode, _ := accessLevel(sess, doer.ID, issue.Repo)
|
||||||
if issue.IsPull {
|
if issue.IsPull {
|
||||||
|
// if pull request is in the middle of creation - don't call webhook
|
||||||
|
if isCreate {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
if err = issue.loadPullRequest(sess); err != nil {
|
if err = issue.loadPullRequest(sess); err != nil {
|
||||||
return fmt.Errorf("loadPullRequest: %v", err)
|
return fmt.Errorf("loadPullRequest: %v", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,11 @@ import "fmt"
|
||||||
// IssueList defines a list of issues
|
// IssueList defines a list of issues
|
||||||
type IssueList []*Issue
|
type IssueList []*Issue
|
||||||
|
|
||||||
|
const (
|
||||||
|
// default variables number on IN () in SQL
|
||||||
|
defaultMaxInSize = 50
|
||||||
|
)
|
||||||
|
|
||||||
func (issues IssueList) getRepoIDs() []int64 {
|
func (issues IssueList) getRepoIDs() []int64 {
|
||||||
repoIDs := make(map[int64]struct{}, len(issues))
|
repoIDs := make(map[int64]struct{}, len(issues))
|
||||||
for _, issue := range issues {
|
for _, issue := range issues {
|
||||||
|
@ -26,12 +31,21 @@ func (issues IssueList) loadRepositories(e Engine) ([]*Repository, error) {
|
||||||
|
|
||||||
repoIDs := issues.getRepoIDs()
|
repoIDs := issues.getRepoIDs()
|
||||||
repoMaps := make(map[int64]*Repository, len(repoIDs))
|
repoMaps := make(map[int64]*Repository, len(repoIDs))
|
||||||
|
var left = len(repoIDs)
|
||||||
|
for left > 0 {
|
||||||
|
var limit = defaultMaxInSize
|
||||||
|
if left < limit {
|
||||||
|
limit = left
|
||||||
|
}
|
||||||
err := e.
|
err := e.
|
||||||
In("id", repoIDs).
|
In("id", repoIDs[:limit]).
|
||||||
Find(&repoMaps)
|
Find(&repoMaps)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("find repository: %v", err)
|
return nil, fmt.Errorf("find repository: %v", err)
|
||||||
}
|
}
|
||||||
|
left = left - limit
|
||||||
|
repoIDs = repoIDs[limit:]
|
||||||
|
}
|
||||||
|
|
||||||
for _, issue := range issues {
|
for _, issue := range issues {
|
||||||
issue.Repo = repoMaps[issue.RepoID]
|
issue.Repo = repoMaps[issue.RepoID]
|
||||||
|
@ -61,12 +75,21 @@ func (issues IssueList) loadPosters(e Engine) error {
|
||||||
|
|
||||||
posterIDs := issues.getPosterIDs()
|
posterIDs := issues.getPosterIDs()
|
||||||
posterMaps := make(map[int64]*User, len(posterIDs))
|
posterMaps := make(map[int64]*User, len(posterIDs))
|
||||||
|
var left = len(posterIDs)
|
||||||
|
for left > 0 {
|
||||||
|
var limit = defaultMaxInSize
|
||||||
|
if left < limit {
|
||||||
|
limit = left
|
||||||
|
}
|
||||||
err := e.
|
err := e.
|
||||||
In("id", posterIDs).
|
In("id", posterIDs[:limit]).
|
||||||
Find(&posterMaps)
|
Find(&posterMaps)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
left = left - limit
|
||||||
|
posterIDs = posterIDs[limit:]
|
||||||
|
}
|
||||||
|
|
||||||
for _, issue := range issues {
|
for _, issue := range issues {
|
||||||
if issue.PosterID <= 0 {
|
if issue.PosterID <= 0 {
|
||||||
|
@ -99,24 +122,35 @@ func (issues IssueList) loadLabels(e Engine) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
var issueLabels = make(map[int64][]*Label, len(issues)*3)
|
var issueLabels = make(map[int64][]*Label, len(issues)*3)
|
||||||
|
var issueIDs = issues.getIssueIDs()
|
||||||
|
var left = len(issueIDs)
|
||||||
|
for left > 0 {
|
||||||
|
var limit = defaultMaxInSize
|
||||||
|
if left < limit {
|
||||||
|
limit = left
|
||||||
|
}
|
||||||
rows, err := e.Table("label").
|
rows, err := e.Table("label").
|
||||||
Join("LEFT", "issue_label", "issue_label.label_id = label.id").
|
Join("LEFT", "issue_label", "issue_label.label_id = label.id").
|
||||||
In("issue_label.issue_id", issues.getIssueIDs()).
|
In("issue_label.issue_id", issueIDs[:limit]).
|
||||||
Asc("label.name").
|
Asc("label.name").
|
||||||
Rows(new(LabelIssue))
|
Rows(new(LabelIssue))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
|
||||||
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var labelIssue LabelIssue
|
var labelIssue LabelIssue
|
||||||
err = rows.Scan(&labelIssue)
|
err = rows.Scan(&labelIssue)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
rows.Close()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
issueLabels[labelIssue.IssueLabel.IssueID] = append(issueLabels[labelIssue.IssueLabel.IssueID], labelIssue.Label)
|
issueLabels[labelIssue.IssueLabel.IssueID] = append(issueLabels[labelIssue.IssueLabel.IssueID], labelIssue.Label)
|
||||||
}
|
}
|
||||||
|
rows.Close()
|
||||||
|
left = left - limit
|
||||||
|
issueIDs = issueIDs[limit:]
|
||||||
|
}
|
||||||
|
|
||||||
for _, issue := range issues {
|
for _, issue := range issues {
|
||||||
issue.Labels = issueLabels[issue.ID]
|
issue.Labels = issueLabels[issue.ID]
|
||||||
|
@ -141,12 +175,21 @@ func (issues IssueList) loadMilestones(e Engine) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
|
milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
|
||||||
|
var left = len(milestoneIDs)
|
||||||
|
for left > 0 {
|
||||||
|
var limit = defaultMaxInSize
|
||||||
|
if left < limit {
|
||||||
|
limit = left
|
||||||
|
}
|
||||||
err := e.
|
err := e.
|
||||||
In("id", milestoneIDs).
|
In("id", milestoneIDs[:limit]).
|
||||||
Find(&milestoneMaps)
|
Find(&milestoneMaps)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
left = left - limit
|
||||||
|
milestoneIDs = milestoneIDs[limit:]
|
||||||
|
}
|
||||||
|
|
||||||
for _, issue := range issues {
|
for _, issue := range issues {
|
||||||
issue.Milestone = milestoneMaps[issue.MilestoneID]
|
issue.Milestone = milestoneMaps[issue.MilestoneID]
|
||||||
|
@ -165,24 +208,36 @@ func (issues IssueList) loadAssignees(e Engine) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
var assignees = make(map[int64][]*User, len(issues))
|
var assignees = make(map[int64][]*User, len(issues))
|
||||||
|
var issueIDs = issues.getIssueIDs()
|
||||||
|
var left = len(issueIDs)
|
||||||
|
for left > 0 {
|
||||||
|
var limit = defaultMaxInSize
|
||||||
|
if left < limit {
|
||||||
|
limit = left
|
||||||
|
}
|
||||||
rows, err := e.Table("issue_assignees").
|
rows, err := e.Table("issue_assignees").
|
||||||
Join("INNER", "user", "`user`.id = `issue_assignees`.assignee_id").
|
Join("INNER", "`user`", "`user`.id = `issue_assignees`.assignee_id").
|
||||||
In("`issue_assignees`.issue_id", issues.getIssueIDs()).
|
In("`issue_assignees`.issue_id", issueIDs[:limit]).
|
||||||
Rows(new(AssigneeIssue))
|
Rows(new(AssigneeIssue))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
|
||||||
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var assigneeIssue AssigneeIssue
|
var assigneeIssue AssigneeIssue
|
||||||
err = rows.Scan(&assigneeIssue)
|
err = rows.Scan(&assigneeIssue)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
rows.Close()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
assignees[assigneeIssue.IssueAssignee.IssueID] = append(assignees[assigneeIssue.IssueAssignee.IssueID], assigneeIssue.Assignee)
|
assignees[assigneeIssue.IssueAssignee.IssueID] = append(assignees[assigneeIssue.IssueAssignee.IssueID], assigneeIssue.Assignee)
|
||||||
}
|
}
|
||||||
|
rows.Close()
|
||||||
|
|
||||||
|
left = left - limit
|
||||||
|
issueIDs = issueIDs[limit:]
|
||||||
|
}
|
||||||
|
|
||||||
for _, issue := range issues {
|
for _, issue := range issues {
|
||||||
issue.Assignees = assignees[issue.ID]
|
issue.Assignees = assignees[issue.ID]
|
||||||
|
@ -207,23 +262,34 @@ func (issues IssueList) loadPullRequests(e Engine) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
pullRequestMaps := make(map[int64]*PullRequest, len(issuesIDs))
|
pullRequestMaps := make(map[int64]*PullRequest, len(issuesIDs))
|
||||||
|
var left = len(issuesIDs)
|
||||||
|
for left > 0 {
|
||||||
|
var limit = defaultMaxInSize
|
||||||
|
if left < limit {
|
||||||
|
limit = left
|
||||||
|
}
|
||||||
rows, err := e.
|
rows, err := e.
|
||||||
In("issue_id", issuesIDs).
|
In("issue_id", issuesIDs[:limit]).
|
||||||
Rows(new(PullRequest))
|
Rows(new(PullRequest))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
|
||||||
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var pr PullRequest
|
var pr PullRequest
|
||||||
err = rows.Scan(&pr)
|
err = rows.Scan(&pr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
rows.Close()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
pullRequestMaps[pr.IssueID] = &pr
|
pullRequestMaps[pr.IssueID] = &pr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rows.Close()
|
||||||
|
left = left - limit
|
||||||
|
issuesIDs = issuesIDs[limit:]
|
||||||
|
}
|
||||||
|
|
||||||
for _, issue := range issues {
|
for _, issue := range issues {
|
||||||
issue.PullRequest = pullRequestMaps[issue.ID]
|
issue.PullRequest = pullRequestMaps[issue.ID]
|
||||||
}
|
}
|
||||||
|
@ -236,24 +302,36 @@ func (issues IssueList) loadAttachments(e Engine) (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
var attachments = make(map[int64][]*Attachment, len(issues))
|
var attachments = make(map[int64][]*Attachment, len(issues))
|
||||||
|
var issuesIDs = issues.getIssueIDs()
|
||||||
|
var left = len(issuesIDs)
|
||||||
|
for left > 0 {
|
||||||
|
var limit = defaultMaxInSize
|
||||||
|
if left < limit {
|
||||||
|
limit = left
|
||||||
|
}
|
||||||
rows, err := e.Table("attachment").
|
rows, err := e.Table("attachment").
|
||||||
Join("INNER", "issue", "issue.id = attachment.issue_id").
|
Join("INNER", "issue", "issue.id = attachment.issue_id").
|
||||||
In("issue.id", issues.getIssueIDs()).
|
In("issue.id", issuesIDs[:limit]).
|
||||||
Rows(new(Attachment))
|
Rows(new(Attachment))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
|
||||||
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var attachment Attachment
|
var attachment Attachment
|
||||||
err = rows.Scan(&attachment)
|
err = rows.Scan(&attachment)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
rows.Close()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
attachments[attachment.IssueID] = append(attachments[attachment.IssueID], &attachment)
|
attachments[attachment.IssueID] = append(attachments[attachment.IssueID], &attachment)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rows.Close()
|
||||||
|
left = left - limit
|
||||||
|
issuesIDs = issuesIDs[limit:]
|
||||||
|
}
|
||||||
|
|
||||||
for _, issue := range issues {
|
for _, issue := range issues {
|
||||||
issue.Attachments = attachments[issue.ID]
|
issue.Attachments = attachments[issue.ID]
|
||||||
}
|
}
|
||||||
|
@ -266,23 +344,34 @@ func (issues IssueList) loadComments(e Engine) (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
var comments = make(map[int64][]*Comment, len(issues))
|
var comments = make(map[int64][]*Comment, len(issues))
|
||||||
|
var issuesIDs = issues.getIssueIDs()
|
||||||
|
var left = len(issuesIDs)
|
||||||
|
for left > 0 {
|
||||||
|
var limit = defaultMaxInSize
|
||||||
|
if left < limit {
|
||||||
|
limit = left
|
||||||
|
}
|
||||||
rows, err := e.Table("comment").
|
rows, err := e.Table("comment").
|
||||||
Join("INNER", "issue", "issue.id = comment.issue_id").
|
Join("INNER", "issue", "issue.id = comment.issue_id").
|
||||||
In("issue.id", issues.getIssueIDs()).
|
In("issue.id", issuesIDs[:limit]).
|
||||||
Rows(new(Comment))
|
Rows(new(Comment))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
|
||||||
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var comment Comment
|
var comment Comment
|
||||||
err = rows.Scan(&comment)
|
err = rows.Scan(&comment)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
rows.Close()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
comments[comment.IssueID] = append(comments[comment.IssueID], &comment)
|
comments[comment.IssueID] = append(comments[comment.IssueID], &comment)
|
||||||
}
|
}
|
||||||
|
rows.Close()
|
||||||
|
left = left - limit
|
||||||
|
issuesIDs = issuesIDs[limit:]
|
||||||
|
}
|
||||||
|
|
||||||
for _, issue := range issues {
|
for _, issue := range issues {
|
||||||
issue.Comments = comments[issue.ID]
|
issue.Comments = comments[issue.ID]
|
||||||
|
@ -307,26 +396,36 @@ func (issues IssueList) loadTotalTrackedTimes(e Engine) (err error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var left = len(ids)
|
||||||
|
for left > 0 {
|
||||||
|
var limit = defaultMaxInSize
|
||||||
|
if left < limit {
|
||||||
|
limit = left
|
||||||
|
}
|
||||||
|
|
||||||
// select issue_id, sum(time) from tracked_time where issue_id in (<issue ids in current page>) group by issue_id
|
// select issue_id, sum(time) from tracked_time where issue_id in (<issue ids in current page>) group by issue_id
|
||||||
rows, err := e.Table("tracked_time").
|
rows, err := e.Table("tracked_time").
|
||||||
Select("issue_id, sum(time) as time").
|
Select("issue_id, sum(time) as time").
|
||||||
In("issue_id", ids).
|
In("issue_id", ids[:limit]).
|
||||||
GroupBy("issue_id").
|
GroupBy("issue_id").
|
||||||
Rows(new(totalTimesByIssue))
|
Rows(new(totalTimesByIssue))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
defer rows.Close()
|
|
||||||
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var totalTime totalTimesByIssue
|
var totalTime totalTimesByIssue
|
||||||
err = rows.Scan(&totalTime)
|
err = rows.Scan(&totalTime)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
rows.Close()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
trackedTimes[totalTime.IssueID] = totalTime.Time
|
trackedTimes[totalTime.IssueID] = totalTime.Time
|
||||||
}
|
}
|
||||||
|
rows.Close()
|
||||||
|
left = left - limit
|
||||||
|
ids = ids[limit:]
|
||||||
|
}
|
||||||
|
|
||||||
for _, issue := range issues {
|
for _, issue := range issues {
|
||||||
issue.TotalTrackedTime = trackedTimes[issue.ID]
|
issue.TotalTrackedTime = trackedTimes[issue.ID]
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
// Copyright 2016 The Gogs Authors. All rights reserved.
|
// Copyright 2016 The Gogs Authors. All rights reserved.
|
||||||
|
// Copyright 2018 The Gitea Authors. All rights reserved.
|
||||||
// Use of this source code is governed by a MIT-style
|
// Use of this source code is governed by a MIT-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
@ -87,7 +88,9 @@ func mailIssueCommentToParticipants(e Engine, issue *Issue, doer *User, content
|
||||||
names = append(names, participants[i].Name)
|
names = append(names, participants[i].Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
SendIssueCommentMail(issue, doer, content, comment, tos)
|
for _, to := range tos {
|
||||||
|
SendIssueCommentMail(issue, doer, content, comment, []string{to})
|
||||||
|
}
|
||||||
|
|
||||||
// Mail mentioned people and exclude watchers.
|
// Mail mentioned people and exclude watchers.
|
||||||
names = append(names, doer.Name)
|
names = append(names, doer.Name)
|
||||||
|
@ -99,7 +102,12 @@ func mailIssueCommentToParticipants(e Engine, issue *Issue, doer *User, content
|
||||||
|
|
||||||
tos = append(tos, mentions[i])
|
tos = append(tos, mentions[i])
|
||||||
}
|
}
|
||||||
SendIssueMentionMail(issue, doer, content, comment, getUserEmailsByNames(e, tos))
|
|
||||||
|
emails := getUserEmailsByNames(e, tos)
|
||||||
|
|
||||||
|
for _, to := range emails {
|
||||||
|
SendIssueMentionMail(issue, doer, content, comment, []string{to})
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,7 +67,7 @@ func getIssueWatchers(e Engine, issueID int64) (watches []*IssueWatch, err error
|
||||||
Where("`issue_watch`.issue_id = ?", issueID).
|
Where("`issue_watch`.issue_id = ?", issueID).
|
||||||
And("`user`.is_active = ?", true).
|
And("`user`.is_active = ?", true).
|
||||||
And("`user`.prohibit_login = ?", false).
|
And("`user`.prohibit_login = ?", false).
|
||||||
Join("INNER", "user", "`user`.id = `issue_watch`.user_id").
|
Join("INNER", "`user`", "`user`.id = `issue_watch`.user_id").
|
||||||
Find(&watches)
|
Find(&watches)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -120,6 +120,14 @@ func addMultipleAssignees(x *xorm.Engine) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Commit and begin new transaction for dropping columns
|
||||||
|
if err := sess.Commit(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := sess.Begin(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
if err := dropTableColumns(sess, "issue", "assignee_id"); err != nil {
|
if err := dropTableColumns(sess, "issue", "assignee_id"); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -73,6 +73,14 @@ func moveTeamUnitsToTeamUnitTable(x *xorm.Engine) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Commit and begin new transaction for dropping columns
|
||||||
|
if err := sess.Commit(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := sess.Begin(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
if err := dropTableColumns(sess, "team", "unit_types"); err != nil {
|
if err := dropTableColumns(sess, "team", "unit_types"); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -383,7 +383,7 @@ func GetOwnedOrgsByUserIDDesc(userID int64, desc string) ([]*User, error) {
|
||||||
func GetOrgUsersByUserID(uid int64, all bool) ([]*OrgUser, error) {
|
func GetOrgUsersByUserID(uid int64, all bool) ([]*OrgUser, error) {
|
||||||
ous := make([]*OrgUser, 0, 10)
|
ous := make([]*OrgUser, 0, 10)
|
||||||
sess := x.
|
sess := x.
|
||||||
Join("LEFT", "user", "`org_user`.org_id=`user`.id").
|
Join("LEFT", "`user`", "`org_user`.org_id=`user`.id").
|
||||||
Where("`org_user`.uid=?", uid)
|
Where("`org_user`.uid=?", uid)
|
||||||
if !all {
|
if !all {
|
||||||
// Only show public organizations
|
// Only show public organizations
|
||||||
|
@ -454,7 +454,7 @@ func AddOrgUser(orgID, uid int64) error {
|
||||||
func removeOrgUser(sess *xorm.Session, orgID, userID int64) error {
|
func removeOrgUser(sess *xorm.Session, orgID, userID int64) error {
|
||||||
ou := new(OrgUser)
|
ou := new(OrgUser)
|
||||||
|
|
||||||
has, err := x.
|
has, err := sess.
|
||||||
Where("uid=?", userID).
|
Where("uid=?", userID).
|
||||||
And("org_id=?", orgID).
|
And("org_id=?", orgID).
|
||||||
Get(ou)
|
Get(ou)
|
||||||
|
@ -575,7 +575,7 @@ func (org *User) getUserTeams(e Engine, userID int64, cols ...string) ([]*Team,
|
||||||
return teams, e.
|
return teams, e.
|
||||||
Where("`team_user`.org_id = ?", org.ID).
|
Where("`team_user`.org_id = ?", org.ID).
|
||||||
Join("INNER", "team_user", "`team_user`.team_id = team.id").
|
Join("INNER", "team_user", "`team_user`.team_id = team.id").
|
||||||
Join("INNER", "user", "`user`.id=team_user.uid").
|
Join("INNER", "`user`", "`user`.id=team_user.uid").
|
||||||
And("`team_user`.uid = ?", userID).
|
And("`team_user`.uid = ?", userID).
|
||||||
Asc("`user`.name").
|
Asc("`user`.name").
|
||||||
Cols(cols...).
|
Cols(cols...).
|
||||||
|
|
|
@ -88,6 +88,7 @@ func (r *Release) APIFormat() *api.Release {
|
||||||
ID: r.ID,
|
ID: r.ID,
|
||||||
TagName: r.TagName,
|
TagName: r.TagName,
|
||||||
Target: r.Target,
|
Target: r.Target,
|
||||||
|
Title: r.Title,
|
||||||
Note: r.Note,
|
Note: r.Note,
|
||||||
URL: r.APIURL(),
|
URL: r.APIURL(),
|
||||||
TarURL: r.TarURL(),
|
TarURL: r.TarURL(),
|
||||||
|
|
|
@ -1347,6 +1347,12 @@ func createRepository(e *xorm.Session, doer, u *User, repo *Repository) (err err
|
||||||
Type: tp,
|
Type: tp,
|
||||||
Config: &IssuesConfig{EnableTimetracker: setting.Service.DefaultEnableTimetracking, AllowOnlyContributorsToTrackTime: setting.Service.DefaultAllowOnlyContributorsToTrackTime},
|
Config: &IssuesConfig{EnableTimetracker: setting.Service.DefaultEnableTimetracking, AllowOnlyContributorsToTrackTime: setting.Service.DefaultAllowOnlyContributorsToTrackTime},
|
||||||
})
|
})
|
||||||
|
} else if tp == UnitTypePullRequests {
|
||||||
|
units = append(units, RepoUnit{
|
||||||
|
RepoID: repo.ID,
|
||||||
|
Type: tp,
|
||||||
|
Config: &PullRequestsConfig{AllowMerge: true, AllowRebase: true, AllowSquash: true},
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
units = append(units, RepoUnit{
|
units = append(units, RepoUnit{
|
||||||
RepoID: repo.ID,
|
RepoID: repo.ID,
|
||||||
|
@ -1401,7 +1407,7 @@ func createRepository(e *xorm.Session, doer, u *User, repo *Repository) (err err
|
||||||
|
|
||||||
// CreateRepository creates a repository for the user/organization u.
|
// CreateRepository creates a repository for the user/organization u.
|
||||||
func CreateRepository(doer, u *User, opts CreateRepoOptions) (_ *Repository, err error) {
|
func CreateRepository(doer, u *User, opts CreateRepoOptions) (_ *Repository, err error) {
|
||||||
if !u.CanCreateRepo() {
|
if !doer.IsAdmin && !u.CanCreateRepo() {
|
||||||
return nil, ErrReachLimitOfRepo{u.MaxRepoCreation}
|
return nil, ErrReachLimitOfRepo{u.MaxRepoCreation}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1846,6 +1852,9 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
|
||||||
if _, err = sess.In("issue_id", issueIDs).Delete(&IssueWatch{}); err != nil {
|
if _, err = sess.In("issue_id", issueIDs).Delete(&IssueWatch{}); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
if _, err = sess.In("issue_id", issueIDs).Delete(&Stopwatch{}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
attachments := make([]*Attachment, 0, 5)
|
attachments := make([]*Attachment, 0, 5)
|
||||||
if err = sess.
|
if err = sess.
|
||||||
|
@ -1948,7 +1957,7 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
|
||||||
func GetRepositoryByOwnerAndName(ownerName, repoName string) (*Repository, error) {
|
func GetRepositoryByOwnerAndName(ownerName, repoName string) (*Repository, error) {
|
||||||
var repo Repository
|
var repo Repository
|
||||||
has, err := x.Select("repository.*").
|
has, err := x.Select("repository.*").
|
||||||
Join("INNER", "user", "`user`.id = repository.owner_id").
|
Join("INNER", "`user`", "`user`.id = repository.owner_id").
|
||||||
Where("repository.lower_name = ?", strings.ToLower(repoName)).
|
Where("repository.lower_name = ?", strings.ToLower(repoName)).
|
||||||
And("`user`.lower_name = ?", strings.ToLower(ownerName)).
|
And("`user`.lower_name = ?", strings.ToLower(ownerName)).
|
||||||
Get(&repo)
|
Get(&repo)
|
||||||
|
|
|
@ -147,10 +147,10 @@ func TestSearchRepositoryByName(t *testing.T) {
|
||||||
count: 14},
|
count: 14},
|
||||||
{name: "AllPublic/PublicRepositoriesOfUserIncludingCollaborative",
|
{name: "AllPublic/PublicRepositoriesOfUserIncludingCollaborative",
|
||||||
opts: &SearchRepoOptions{Page: 1, PageSize: 10, OwnerID: 15, AllPublic: true},
|
opts: &SearchRepoOptions{Page: 1, PageSize: 10, OwnerID: 15, AllPublic: true},
|
||||||
count: 16},
|
count: 17},
|
||||||
{name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborative",
|
{name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborative",
|
||||||
opts: &SearchRepoOptions{Page: 1, PageSize: 10, OwnerID: 15, Private: true, AllPublic: true},
|
opts: &SearchRepoOptions{Page: 1, PageSize: 10, OwnerID: 15, Private: true, AllPublic: true},
|
||||||
count: 20},
|
count: 21},
|
||||||
{name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborativeByName",
|
{name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborativeByName",
|
||||||
opts: &SearchRepoOptions{Keyword: "test", Page: 1, PageSize: 10, OwnerID: 15, Private: true, AllPublic: true},
|
opts: &SearchRepoOptions{Keyword: "test", Page: 1, PageSize: 10, OwnerID: 15, Private: true, AllPublic: true},
|
||||||
count: 13},
|
count: 13},
|
||||||
|
@ -159,7 +159,7 @@ func TestSearchRepositoryByName(t *testing.T) {
|
||||||
count: 11},
|
count: 11},
|
||||||
{name: "AllPublic/PublicRepositoriesOfOrganization",
|
{name: "AllPublic/PublicRepositoriesOfOrganization",
|
||||||
opts: &SearchRepoOptions{Page: 1, PageSize: 10, OwnerID: 17, AllPublic: true, Collaborate: util.OptionalBoolFalse},
|
opts: &SearchRepoOptions{Page: 1, PageSize: 10, OwnerID: 17, AllPublic: true, Collaborate: util.OptionalBoolFalse},
|
||||||
count: 16},
|
count: 17},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, testCase := range testCases {
|
for _, testCase := range testCases {
|
||||||
|
|
|
@ -9,6 +9,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/git"
|
"code.gitea.io/git"
|
||||||
|
"code.gitea.io/gitea/modules/cache"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/process"
|
"code.gitea.io/gitea/modules/process"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
@ -180,6 +181,16 @@ func (m *Mirror) runSync() bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
branches, err := m.Repo.GetBranches()
|
||||||
|
if err != nil {
|
||||||
|
log.Error(4, "GetBranches: %v", err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range branches {
|
||||||
|
cache.Remove(m.Repo.GetCommitsCountCacheKey(branches[i].Name, true))
|
||||||
|
}
|
||||||
|
|
||||||
m.UpdatedUnix = util.TimeStampNow()
|
m.UpdatedUnix = util.TimeStampNow()
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,7 +54,7 @@ func getWatchers(e Engine, repoID int64) ([]*Watch, error) {
|
||||||
return watches, e.Where("`watch`.repo_id=?", repoID).
|
return watches, e.Where("`watch`.repo_id=?", repoID).
|
||||||
And("`user`.is_active=?", true).
|
And("`user`.is_active=?", true).
|
||||||
And("`user`.prohibit_login=?", false).
|
And("`user`.prohibit_login=?", false).
|
||||||
Join("INNER", "user", "`user`.id = `watch`.user_id").
|
Join("INNER", "`user`", "`user`.id = `watch`.user_id").
|
||||||
Find(&watches)
|
Find(&watches)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -374,9 +374,9 @@ func (u *User) GetFollowers(page int) ([]*User, error) {
|
||||||
Limit(ItemsPerPage, (page-1)*ItemsPerPage).
|
Limit(ItemsPerPage, (page-1)*ItemsPerPage).
|
||||||
Where("follow.follow_id=?", u.ID)
|
Where("follow.follow_id=?", u.ID)
|
||||||
if setting.UsePostgreSQL {
|
if setting.UsePostgreSQL {
|
||||||
sess = sess.Join("LEFT", "follow", `"user".id=follow.user_id`)
|
sess = sess.Join("LEFT", "follow", "`user`.id=follow.user_id")
|
||||||
} else {
|
} else {
|
||||||
sess = sess.Join("LEFT", "follow", "user.id=follow.user_id")
|
sess = sess.Join("LEFT", "follow", "`user`.id=follow.user_id")
|
||||||
}
|
}
|
||||||
return users, sess.Find(&users)
|
return users, sess.Find(&users)
|
||||||
}
|
}
|
||||||
|
@ -393,9 +393,9 @@ func (u *User) GetFollowing(page int) ([]*User, error) {
|
||||||
Limit(ItemsPerPage, (page-1)*ItemsPerPage).
|
Limit(ItemsPerPage, (page-1)*ItemsPerPage).
|
||||||
Where("follow.user_id=?", u.ID)
|
Where("follow.user_id=?", u.ID)
|
||||||
if setting.UsePostgreSQL {
|
if setting.UsePostgreSQL {
|
||||||
sess = sess.Join("LEFT", "follow", `"user".id=follow.follow_id`)
|
sess = sess.Join("LEFT", "follow", "`user`.id=follow.follow_id")
|
||||||
} else {
|
} else {
|
||||||
sess = sess.Join("LEFT", "follow", "user.id=follow.follow_id")
|
sess = sess.Join("LEFT", "follow", "`user`.id=follow.follow_id")
|
||||||
}
|
}
|
||||||
return users, sess.Find(&users)
|
return users, sess.Find(&users)
|
||||||
}
|
}
|
||||||
|
@ -956,7 +956,7 @@ func deleteUser(e *xorm.Session, u *User) error {
|
||||||
Where("watch.user_id = ?", u.ID).Find(&watchedRepoIDs); err != nil {
|
Where("watch.user_id = ?", u.ID).Find(&watchedRepoIDs); err != nil {
|
||||||
return fmt.Errorf("get all watches: %v", err)
|
return fmt.Errorf("get all watches: %v", err)
|
||||||
}
|
}
|
||||||
if _, err = e.Decr("num_watches").In("id", watchedRepoIDs).Update(new(Repository)); err != nil {
|
if _, err = e.Decr("num_watches").In("id", watchedRepoIDs).NoAutoTime().Update(new(Repository)); err != nil {
|
||||||
return fmt.Errorf("decrease repository num_watches: %v", err)
|
return fmt.Errorf("decrease repository num_watches: %v", err)
|
||||||
}
|
}
|
||||||
// ***** END: Watch *****
|
// ***** END: Watch *****
|
||||||
|
@ -966,7 +966,7 @@ func deleteUser(e *xorm.Session, u *User) error {
|
||||||
if err = e.Table("star").Cols("star.repo_id").
|
if err = e.Table("star").Cols("star.repo_id").
|
||||||
Where("star.uid = ?", u.ID).Find(&starredRepoIDs); err != nil {
|
Where("star.uid = ?", u.ID).Find(&starredRepoIDs); err != nil {
|
||||||
return fmt.Errorf("get all stars: %v", err)
|
return fmt.Errorf("get all stars: %v", err)
|
||||||
} else if _, err = e.Decr("num_stars").In("id", starredRepoIDs).Update(new(Repository)); err != nil {
|
} else if _, err = e.Decr("num_stars").In("id", starredRepoIDs).NoAutoTime().Update(new(Repository)); err != nil {
|
||||||
return fmt.Errorf("decrease repository num_stars: %v", err)
|
return fmt.Errorf("decrease repository num_stars: %v", err)
|
||||||
}
|
}
|
||||||
// ***** END: Star *****
|
// ***** END: Star *****
|
||||||
|
|
|
@ -213,6 +213,7 @@ func getDiscordPushPayload(p *api.PushPayload, meta *DiscordMeta) (*DiscordPaylo
|
||||||
func getDiscordIssuesPayload(p *api.IssuePayload, meta *DiscordMeta) (*DiscordPayload, error) {
|
func getDiscordIssuesPayload(p *api.IssuePayload, meta *DiscordMeta) (*DiscordPayload, error) {
|
||||||
var text, title string
|
var text, title string
|
||||||
var color int
|
var color int
|
||||||
|
url := fmt.Sprintf("%s/issues/%d", p.Repository.HTMLURL, p.Issue.Index)
|
||||||
switch p.Action {
|
switch p.Action {
|
||||||
case api.HookIssueOpened:
|
case api.HookIssueOpened:
|
||||||
title = fmt.Sprintf("[%s] Issue opened: #%d %s", p.Repository.FullName, p.Index, p.Issue.Title)
|
title = fmt.Sprintf("[%s] Issue opened: #%d %s", p.Repository.FullName, p.Index, p.Issue.Title)
|
||||||
|
@ -268,7 +269,7 @@ func getDiscordIssuesPayload(p *api.IssuePayload, meta *DiscordMeta) (*DiscordPa
|
||||||
{
|
{
|
||||||
Title: title,
|
Title: title,
|
||||||
Description: text,
|
Description: text,
|
||||||
URL: p.Issue.URL,
|
URL: url,
|
||||||
Color: color,
|
Color: color,
|
||||||
Author: DiscordEmbedAuthor{
|
Author: DiscordEmbedAuthor{
|
||||||
Name: p.Sender.UserName,
|
Name: p.Sender.UserName,
|
||||||
|
|
|
@ -63,6 +63,7 @@ func SignedInID(ctx *macaron.Context, sess session.Store) int64 {
|
||||||
if err = models.UpdateAccessToken(t); err != nil {
|
if err = models.UpdateAccessToken(t); err != nil {
|
||||||
log.Error(4, "UpdateAccessToken: %v", err)
|
log.Error(4, "UpdateAccessToken: %v", err)
|
||||||
}
|
}
|
||||||
|
ctx.Data["IsApiToken"] = true
|
||||||
return t.UID
|
return t.UID
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -136,7 +137,7 @@ func SignedInUser(ctx *macaron.Context, sess session.Store) (*models.User, bool)
|
||||||
}
|
}
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
|
ctx.Data["IsApiToken"] = true
|
||||||
return u, true
|
return u, true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,8 +74,9 @@ func (f *InstallForm) Validate(ctx *macaron.Context, errs binding.Errors) bindin
|
||||||
type RegisterForm struct {
|
type RegisterForm struct {
|
||||||
UserName string `binding:"Required;AlphaDashDot;MaxSize(35)"`
|
UserName string `binding:"Required;AlphaDashDot;MaxSize(35)"`
|
||||||
Email string `binding:"Required;Email;MaxSize(254)"`
|
Email string `binding:"Required;Email;MaxSize(254)"`
|
||||||
Password string `binding:"Required;MaxSize(255)"`
|
Password string `binding:"MaxSize(255)"`
|
||||||
Retype string
|
Retype string
|
||||||
|
Remember bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate valideates the fields
|
// Validate valideates the fields
|
||||||
|
@ -86,6 +87,7 @@ func (f *RegisterForm) Validate(ctx *macaron.Context, errs binding.Errors) bindi
|
||||||
// SignInForm form for signing in with user/password
|
// SignInForm form for signing in with user/password
|
||||||
type SignInForm struct {
|
type SignInForm struct {
|
||||||
UserName string `binding:"Required;MaxSize(254)"`
|
UserName string `binding:"Required;MaxSize(254)"`
|
||||||
|
// TODO remove required from password for SecondFactorAuthentication
|
||||||
Password string `binding:"Required;MaxSize(255)"`
|
Password string `binding:"Required;MaxSize(255)"`
|
||||||
Remember bool
|
Remember bool
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,7 +59,22 @@ func DetectEncoding(content []byte) (string, error) {
|
||||||
return "UTF-8", nil
|
return "UTF-8", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := chardet.NewTextDetector().DetectBest(content)
|
textDetector := chardet.NewTextDetector()
|
||||||
|
var detectContent []byte
|
||||||
|
if len(content) < 1024 {
|
||||||
|
// Check if original content is valid
|
||||||
|
if _, err := textDetector.DetectBest(content); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
times := 1024 / len(content)
|
||||||
|
detectContent = make([]byte, 0, times*len(content))
|
||||||
|
for i := 0; i < times; i++ {
|
||||||
|
detectContent = append(detectContent, content...)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
detectContent = content
|
||||||
|
}
|
||||||
|
result, err := textDetector.DetectBest(detectContent)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
|
@ -85,9 +85,9 @@ func (r *Repository) CanCreateBranch() bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// CanCommitToBranch returns true if repository is editable and user has proper access level
|
// CanCommitToBranch returns true if repository is editable and user has proper access level
|
||||||
// and branch is not protected
|
// and branch is not protected for push
|
||||||
func (r *Repository) CanCommitToBranch(doer *models.User) (bool, error) {
|
func (r *Repository) CanCommitToBranch(doer *models.User) (bool, error) {
|
||||||
protectedBranch, err := r.Repository.IsProtectedBranch(r.BranchName, doer)
|
protectedBranch, err := r.Repository.IsProtectedBranchForPush(r.BranchName, doer)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
@ -620,7 +620,7 @@ func RepoRefByType(refType RepoRefType) macaron.Handler {
|
||||||
// redirect from old URL scheme to new URL scheme
|
// redirect from old URL scheme to new URL scheme
|
||||||
ctx.Redirect(path.Join(
|
ctx.Redirect(path.Join(
|
||||||
setting.AppSubURL,
|
setting.AppSubURL,
|
||||||
strings.TrimSuffix(ctx.Req.URL.String(), ctx.Params("*")),
|
strings.TrimSuffix(ctx.Req.URL.Path, ctx.Params("*")),
|
||||||
ctx.Repo.BranchNameSubURL(),
|
ctx.Repo.BranchNameSubURL(),
|
||||||
ctx.Repo.TreePath))
|
ctx.Repo.TreePath))
|
||||||
return
|
return
|
||||||
|
|
|
@ -85,9 +85,12 @@ type link struct {
|
||||||
|
|
||||||
var oidRegExp = regexp.MustCompile(`^[A-Fa-f0-9]+$`)
|
var oidRegExp = regexp.MustCompile(`^[A-Fa-f0-9]+$`)
|
||||||
|
|
||||||
|
func isOidValid(oid string) bool {
|
||||||
|
return oidRegExp.MatchString(oid)
|
||||||
|
}
|
||||||
|
|
||||||
// ObjectOidHandler is the main request routing entry point into LFS server functions
|
// ObjectOidHandler is the main request routing entry point into LFS server functions
|
||||||
func ObjectOidHandler(ctx *context.Context) {
|
func ObjectOidHandler(ctx *context.Context) {
|
||||||
|
|
||||||
if !setting.LFS.StartServer {
|
if !setting.LFS.StartServer {
|
||||||
writeStatus(ctx, 404)
|
writeStatus(ctx, 404)
|
||||||
return
|
return
|
||||||
|
@ -110,6 +113,11 @@ func ObjectOidHandler(ctx *context.Context) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func getAuthenticatedRepoAndMeta(ctx *context.Context, rv *RequestVars, requireWrite bool) (*models.LFSMetaObject, *models.Repository) {
|
func getAuthenticatedRepoAndMeta(ctx *context.Context, rv *RequestVars, requireWrite bool) (*models.LFSMetaObject, *models.Repository) {
|
||||||
|
if !isOidValid(rv.Oid) {
|
||||||
|
writeStatus(ctx, 404)
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
repository, err := models.GetRepositoryByOwnerAndName(rv.User, rv.Repo)
|
repository, err := models.GetRepositoryByOwnerAndName(rv.User, rv.Repo)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Debug("Could not find repository: %s/%s - %s", rv.User, rv.Repo, err)
|
log.Debug("Could not find repository: %s/%s - %s", rv.User, rv.Repo, err)
|
||||||
|
@ -222,7 +230,7 @@ func PostHandler(ctx *context.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if !oidRegExp.MatchString(rv.Oid) {
|
if !isOidValid(rv.Oid) {
|
||||||
writeStatus(ctx, 404)
|
writeStatus(ctx, 404)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -249,7 +257,6 @@ func PostHandler(ctx *context.Context) {
|
||||||
|
|
||||||
// BatchHandler provides the batch api
|
// BatchHandler provides the batch api
|
||||||
func BatchHandler(ctx *context.Context) {
|
func BatchHandler(ctx *context.Context) {
|
||||||
|
|
||||||
if !setting.LFS.StartServer {
|
if !setting.LFS.StartServer {
|
||||||
writeStatus(ctx, 404)
|
writeStatus(ctx, 404)
|
||||||
return
|
return
|
||||||
|
@ -266,6 +273,10 @@ func BatchHandler(ctx *context.Context) {
|
||||||
|
|
||||||
// Create a response object
|
// Create a response object
|
||||||
for _, object := range bv.Objects {
|
for _, object := range bv.Objects {
|
||||||
|
if !isOidValid(object.Oid) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
repository, err := models.GetRepositoryByOwnerAndName(object.User, object.Repo)
|
repository, err := models.GetRepositoryByOwnerAndName(object.User, object.Repo)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -292,14 +303,12 @@ func BatchHandler(ctx *context.Context) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if oidRegExp.MatchString(object.Oid) {
|
|
||||||
// Object is not found
|
// Object is not found
|
||||||
meta, err = models.NewLFSMetaObject(&models.LFSMetaObject{Oid: object.Oid, Size: object.Size, RepositoryID: repository.ID})
|
meta, err = models.NewLFSMetaObject(&models.LFSMetaObject{Oid: object.Oid, Size: object.Size, RepositoryID: repository.ID})
|
||||||
if err == nil {
|
if err == nil {
|
||||||
responseObjects = append(responseObjects, Represent(object, meta, meta.Existing, !contentStore.Exists(meta)))
|
responseObjects = append(responseObjects, Represent(object, meta, meta.Existing, !contentStore.Exists(meta)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
ctx.Resp.Header().Set("Content-Type", metaMediaType)
|
ctx.Resp.Header().Set("Content-Type", metaMediaType)
|
||||||
|
|
||||||
|
|
|
@ -101,7 +101,7 @@ var (
|
||||||
func (r *Renderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {
|
func (r *Renderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {
|
||||||
prefix := r.URLPrefix
|
prefix := r.URLPrefix
|
||||||
if r.IsWiki {
|
if r.IsWiki {
|
||||||
prefix = util.URLJoin(prefix, "wiki", "src")
|
prefix = util.URLJoin(prefix, "wiki", "raw")
|
||||||
}
|
}
|
||||||
prefix = strings.Replace(prefix, "/src/", "/raw/", 1)
|
prefix = strings.Replace(prefix, "/src/", "/raw/", 1)
|
||||||
if len(link) > 0 {
|
if len(link) > 0 {
|
||||||
|
|
|
@ -88,6 +88,9 @@ var (
|
||||||
AppDataPath string
|
AppDataPath string
|
||||||
AppWorkPath string
|
AppWorkPath string
|
||||||
|
|
||||||
|
// User settings
|
||||||
|
GoogleAnalyticsID string
|
||||||
|
|
||||||
// Server settings
|
// Server settings
|
||||||
Protocol Scheme
|
Protocol Scheme
|
||||||
Domain string
|
Domain string
|
||||||
|
@ -695,6 +698,7 @@ func NewContext() {
|
||||||
|
|
||||||
sec := Cfg.Section("server")
|
sec := Cfg.Section("server")
|
||||||
AppName = Cfg.Section("").Key("APP_NAME").MustString("Gitea: Git with a cup of tea")
|
AppName = Cfg.Section("").Key("APP_NAME").MustString("Gitea: Git with a cup of tea")
|
||||||
|
GoogleAnalyticsID = Cfg.Section("").Key("GOOGLE_ANALYTICS_ID").String()
|
||||||
|
|
||||||
Protocol = HTTP
|
Protocol = HTTP
|
||||||
if sec.Key("PROTOCOL").String() == "https" {
|
if sec.Key("PROTOCOL").String() == "https" {
|
||||||
|
@ -1112,7 +1116,7 @@ func NewContext() {
|
||||||
|
|
||||||
extensionReg := regexp.MustCompile(`\.\w`)
|
extensionReg := regexp.MustCompile(`\.\w`)
|
||||||
for _, sec := range Cfg.Section("markup").ChildSections() {
|
for _, sec := range Cfg.Section("markup").ChildSections() {
|
||||||
name := strings.TrimLeft(sec.Name(), "markup.")
|
name := strings.TrimPrefix(sec.Name(), "markup.")
|
||||||
if name == "" {
|
if name == "" {
|
||||||
log.Warn("name is empty, markup " + sec.Name() + "ignored")
|
log.Warn("name is empty, markup " + sec.Name() + "ignored")
|
||||||
continue
|
continue
|
||||||
|
@ -1165,6 +1169,8 @@ var Service struct {
|
||||||
EnableReverseProxyAuth bool
|
EnableReverseProxyAuth bool
|
||||||
EnableReverseProxyAutoRegister bool
|
EnableReverseProxyAutoRegister bool
|
||||||
EnableCaptcha bool
|
EnableCaptcha bool
|
||||||
|
RequireExternalRegistrationCaptcha bool
|
||||||
|
RequireExternalRegistrationPassword bool
|
||||||
DefaultKeepEmailPrivate bool
|
DefaultKeepEmailPrivate bool
|
||||||
DefaultAllowCreateOrganization bool
|
DefaultAllowCreateOrganization bool
|
||||||
EnableTimetracking bool
|
EnableTimetracking bool
|
||||||
|
@ -1190,6 +1196,8 @@ func newService() {
|
||||||
Service.EnableReverseProxyAuth = sec.Key("ENABLE_REVERSE_PROXY_AUTHENTICATION").MustBool()
|
Service.EnableReverseProxyAuth = sec.Key("ENABLE_REVERSE_PROXY_AUTHENTICATION").MustBool()
|
||||||
Service.EnableReverseProxyAutoRegister = sec.Key("ENABLE_REVERSE_PROXY_AUTO_REGISTRATION").MustBool()
|
Service.EnableReverseProxyAutoRegister = sec.Key("ENABLE_REVERSE_PROXY_AUTO_REGISTRATION").MustBool()
|
||||||
Service.EnableCaptcha = sec.Key("ENABLE_CAPTCHA").MustBool()
|
Service.EnableCaptcha = sec.Key("ENABLE_CAPTCHA").MustBool()
|
||||||
|
Service.RequireExternalRegistrationCaptcha = sec.Key("REQUIRE_EXTERNAL_REGISTRATION_CAPTCHA").MustBool()
|
||||||
|
Service.RequireExternalRegistrationPassword = sec.Key("REQUIRE_EXTERNAL_REGISTRATION_PASSWORD").MustBool()
|
||||||
Service.DefaultKeepEmailPrivate = sec.Key("DEFAULT_KEEP_EMAIL_PRIVATE").MustBool()
|
Service.DefaultKeepEmailPrivate = sec.Key("DEFAULT_KEEP_EMAIL_PRIVATE").MustBool()
|
||||||
Service.DefaultAllowCreateOrganization = sec.Key("DEFAULT_ALLOW_CREATE_ORGANIZATION").MustBool(true)
|
Service.DefaultAllowCreateOrganization = sec.Key("DEFAULT_ALLOW_CREATE_ORGANIZATION").MustBool(true)
|
||||||
Service.EnableTimetracking = sec.Key("ENABLE_TIMETRACKING").MustBool(true)
|
Service.EnableTimetracking = sec.Key("ENABLE_TIMETRACKING").MustBool(true)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
// Copyright 2018 The Gitea Authors. All rights reserved.
|
||||||
// Copyright 2014 The Gogs Authors. All rights reserved.
|
// Copyright 2014 The Gogs Authors. All rights reserved.
|
||||||
// Use of this source code is governed by a MIT-style
|
// Use of this source code is governed by a MIT-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
@ -60,6 +61,9 @@ func NewFuncMap() []template.FuncMap {
|
||||||
"DisableGravatar": func() bool {
|
"DisableGravatar": func() bool {
|
||||||
return setting.DisableGravatar
|
return setting.DisableGravatar
|
||||||
},
|
},
|
||||||
|
"GoogleAnalyticsID": func() string {
|
||||||
|
return setting.GoogleAnalyticsID
|
||||||
|
},
|
||||||
"ShowFooterTemplateLoadTime": func() bool {
|
"ShowFooterTemplateLoadTime": func() bool {
|
||||||
return setting.ShowFooterTemplateLoadTime
|
return setting.ShowFooterTemplateLoadTime
|
||||||
},
|
},
|
||||||
|
@ -243,7 +247,7 @@ func ToUTF8WithErr(content []byte) (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// If there is an error, we concatenate the nicely decoded part and the
|
// If there is an error, we concatenate the nicely decoded part and the
|
||||||
// original left over. This way we won't loose data.
|
// original left over. This way we won't lose data.
|
||||||
result, n, err := transform.String(encoding.NewDecoder(), string(content))
|
result, n, err := transform.String(encoding.NewDecoder(), string(content))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
result = result + string(content[n:])
|
result = result + string(content[n:])
|
||||||
|
@ -252,6 +256,28 @@ func ToUTF8WithErr(content []byte) (string, error) {
|
||||||
return result, err
|
return result, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ToUTF8WithFallback detects the encoding of content and coverts to UTF-8 if possible
|
||||||
|
func ToUTF8WithFallback(content []byte) []byte {
|
||||||
|
charsetLabel, err := base.DetectEncoding(content)
|
||||||
|
if err != nil || charsetLabel == "UTF-8" {
|
||||||
|
return content
|
||||||
|
}
|
||||||
|
|
||||||
|
encoding, _ := charset.Lookup(charsetLabel)
|
||||||
|
if encoding == nil {
|
||||||
|
return content
|
||||||
|
}
|
||||||
|
|
||||||
|
// If there is an error, we concatenate the nicely decoded part and the
|
||||||
|
// original left over. This way we won't lose data.
|
||||||
|
result, n, err := transform.Bytes(encoding.NewDecoder(), content)
|
||||||
|
if err != nil {
|
||||||
|
return append(result, content[n:]...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
// ToUTF8 converts content to UTF8 encoding and ignore error
|
// ToUTF8 converts content to UTF8 encoding and ignore error
|
||||||
func ToUTF8(content string) string {
|
func ToUTF8(content string) string {
|
||||||
res, _ := ToUTF8WithErr([]byte(content))
|
res, _ := ToUTF8WithErr([]byte(content))
|
||||||
|
|
|
@ -6,7 +6,6 @@ package validation
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
@ -70,14 +69,10 @@ func addValidURLBindingRule() {
|
||||||
},
|
},
|
||||||
IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
|
IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
|
||||||
str := fmt.Sprintf("%v", val)
|
str := fmt.Sprintf("%v", val)
|
||||||
if len(str) != 0 {
|
if len(str) != 0 && !IsValidURL(str) {
|
||||||
if u, err := url.ParseRequestURI(str); err != nil ||
|
|
||||||
(u.Scheme != "http" && u.Scheme != "https") ||
|
|
||||||
!validPort(portOnly(u.Host)) {
|
|
||||||
errs.Add([]string{name}, binding.ERR_URL, "Url")
|
errs.Add([]string{name}, binding.ERR_URL, "Url")
|
||||||
return false, errs
|
return false, errs
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return true, errs
|
return true, errs
|
||||||
},
|
},
|
||||||
|
|
|
@ -0,0 +1,77 @@
|
||||||
|
// Copyright 2018 The Gitea Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package validation
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
)
|
||||||
|
|
||||||
|
var loopbackIPBlocks []*net.IPNet
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
for _, cidr := range []string{
|
||||||
|
"127.0.0.0/8", // IPv4 loopback
|
||||||
|
"::1/128", // IPv6 loopback
|
||||||
|
} {
|
||||||
|
if _, block, err := net.ParseCIDR(cidr); err == nil {
|
||||||
|
loopbackIPBlocks = append(loopbackIPBlocks, block)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isLoopbackIP(ip string) bool {
|
||||||
|
pip := net.ParseIP(ip)
|
||||||
|
if pip == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, block := range loopbackIPBlocks {
|
||||||
|
if block.Contains(pip) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsValidURL checks if URL is valid
|
||||||
|
func IsValidURL(uri string) bool {
|
||||||
|
if u, err := url.ParseRequestURI(uri); err != nil ||
|
||||||
|
(u.Scheme != "http" && u.Scheme != "https") ||
|
||||||
|
!validPort(portOnly(u.Host)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsAPIURL checks if URL is current Gitea instance API URL
|
||||||
|
func IsAPIURL(uri string) bool {
|
||||||
|
return strings.HasPrefix(strings.ToLower(uri), strings.ToLower(setting.AppURL+"api"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsValidExternalURL checks if URL is valid external URL
|
||||||
|
func IsValidExternalURL(uri string) bool {
|
||||||
|
if !IsValidURL(uri) || IsAPIURL(uri) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
u, err := url.ParseRequestURI(uri)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Currently check only if not loopback IP is provided to keep compatibility
|
||||||
|
if isLoopbackIP(u.Hostname()) || strings.ToLower(u.Hostname()) == "localhost" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Later it should be added to allow local network IP addreses
|
||||||
|
// only if allowed by special setting
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
|
@ -0,0 +1,90 @@
|
||||||
|
// Copyright 2018 The Gitea Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package validation
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_IsValidURL(t *testing.T) {
|
||||||
|
cases := []struct {
|
||||||
|
description string
|
||||||
|
url string
|
||||||
|
valid bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
description: "Empty URL",
|
||||||
|
url: "",
|
||||||
|
valid: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Loobpack IPv4 URL",
|
||||||
|
url: "http://127.0.1.1:5678/",
|
||||||
|
valid: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Loobpack IPv6 URL",
|
||||||
|
url: "https://[::1]/",
|
||||||
|
valid: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Missing semicolon after schema",
|
||||||
|
url: "http//meh/",
|
||||||
|
valid: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range cases {
|
||||||
|
t.Run(testCase.description, func(t *testing.T) {
|
||||||
|
assert.Equal(t, testCase.valid, IsValidURL(testCase.url))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_IsValidExternalURL(t *testing.T) {
|
||||||
|
setting.AppURL = "https://try.gitea.io/"
|
||||||
|
|
||||||
|
cases := []struct {
|
||||||
|
description string
|
||||||
|
url string
|
||||||
|
valid bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
description: "Current instance URL",
|
||||||
|
url: "https://try.gitea.io/test",
|
||||||
|
valid: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Loobpack IPv4 URL",
|
||||||
|
url: "http://127.0.1.1:5678/",
|
||||||
|
valid: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Current instance API URL",
|
||||||
|
url: "https://try.gitea.io/api/v1/user/follow",
|
||||||
|
valid: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Local network URL",
|
||||||
|
url: "http://192.168.1.2/api/v1/user/follow",
|
||||||
|
valid: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Local URL",
|
||||||
|
url: "http://LOCALHOST:1234/whatever",
|
||||||
|
valid: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range cases {
|
||||||
|
t.Run(testCase.description, func(t *testing.T) {
|
||||||
|
assert.Equal(t, testCase.valid, IsValidExternalURL(testCase.url))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -9,7 +9,6 @@ sign_in_with = Sign In With
|
||||||
sign_out = Sign Out
|
sign_out = Sign Out
|
||||||
sign_up = Register
|
sign_up = Register
|
||||||
link_account = Link Account
|
link_account = Link Account
|
||||||
link_account_signin_or_signup = Sign in with existing credentials to link your existing account to this account. Or register a new one.
|
|
||||||
register = Register
|
register = Register
|
||||||
website = Website
|
website = Website
|
||||||
version = Version
|
version = Version
|
||||||
|
@ -223,6 +222,12 @@ twofa_passcode_incorrect = Your passcode is incorrect. If you misplaced your dev
|
||||||
twofa_scratch_token_incorrect = Your scratch code is incorrect.
|
twofa_scratch_token_incorrect = Your scratch code is incorrect.
|
||||||
login_userpass = Sign In
|
login_userpass = Sign In
|
||||||
login_openid = OpenID
|
login_openid = OpenID
|
||||||
|
oauth_signup_tab = Register New Account
|
||||||
|
oauth_signup_title = Add Account Recovery Info
|
||||||
|
oauth_signup_submit = Complete Account
|
||||||
|
oauth_signin_tab = Link to Existing Account
|
||||||
|
oauth_signin_title = Sign In to Authorize Linked Account
|
||||||
|
oauth_signin_submit = Link Account
|
||||||
openid_connect_submit = Connect
|
openid_connect_submit = Connect
|
||||||
openid_connect_title = Connect to an existing account
|
openid_connect_title = Connect to an existing account
|
||||||
openid_connect_desc = The chosen OpenID URI is unknown. Associate it with a new account here.
|
openid_connect_desc = The chosen OpenID URI is unknown. Associate it with a new account here.
|
||||||
|
@ -943,6 +948,7 @@ settings.external_tracker_url = External Issue Tracker URL
|
||||||
settings.external_tracker_url_error = The external issue tracker URL is not a valid URL.
|
settings.external_tracker_url_error = The external issue tracker URL is not a valid URL.
|
||||||
settings.external_tracker_url_desc = Visitors are redirected to the external issue tracker URL when clicking on the issues tab.
|
settings.external_tracker_url_desc = Visitors are redirected to the external issue tracker URL when clicking on the issues tab.
|
||||||
settings.tracker_url_format = External Issue Tracker URL Format
|
settings.tracker_url_format = External Issue Tracker URL Format
|
||||||
|
settings.tracker_url_format_error = The external issue tracker URL format is not a valid URL.
|
||||||
settings.tracker_issue_style = External Issue Tracker Number Format
|
settings.tracker_issue_style = External Issue Tracker Number Format
|
||||||
settings.tracker_issue_style.numeric = Numeric
|
settings.tracker_issue_style.numeric = Numeric
|
||||||
settings.tracker_issue_style.alphanumeric = Alphanumeric
|
settings.tracker_issue_style.alphanumeric = Alphanumeric
|
||||||
|
@ -1042,8 +1048,8 @@ settings.event_push = Push
|
||||||
settings.event_push_desc = Git push to a repository.
|
settings.event_push_desc = Git push to a repository.
|
||||||
settings.event_repository = Repository
|
settings.event_repository = Repository
|
||||||
settings.event_repository_desc = Repository created or deleted.
|
settings.event_repository_desc = Repository created or deleted.
|
||||||
settings.active = Include Event Details
|
settings.active = Active
|
||||||
settings.active_helper = Add information about the triggering event to requests.
|
settings.active_helper = Information about triggered events will be sent to this webhook URL.
|
||||||
settings.add_hook_success = The webhook has been added.
|
settings.add_hook_success = The webhook has been added.
|
||||||
settings.update_webhook = Update Webhook
|
settings.update_webhook = Update Webhook
|
||||||
settings.update_hook_success = The webhook has been updated.
|
settings.update_hook_success = The webhook has been updated.
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -2309,7 +2309,7 @@ function initTopicbar() {
|
||||||
|
|
||||||
mgrBtn.click(function() {
|
mgrBtn.click(function() {
|
||||||
viewDiv.hide();
|
viewDiv.hide();
|
||||||
editDiv.show();
|
editDiv.css('display', ''); // show Semantic UI Grid
|
||||||
})
|
})
|
||||||
|
|
||||||
saveBtn.click(function() {
|
saveBtn.click(function() {
|
||||||
|
@ -2334,7 +2334,7 @@ function initTopicbar() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}).done(function() {
|
}).done(function() {
|
||||||
editDiv.hide();
|
editDiv.css('display', 'none'); // hide Semantic UI Grid
|
||||||
viewDiv.show();
|
viewDiv.show();
|
||||||
}).fail(function(xhr) {
|
}).fail(function(xhr) {
|
||||||
alert(xhr.responseJSON.message)
|
alert(xhr.responseJSON.message)
|
||||||
|
|
|
@ -9,6 +9,9 @@ body {
|
||||||
img {
|
img {
|
||||||
border-radius: 3px;
|
border-radius: 3px;
|
||||||
}
|
}
|
||||||
|
table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
.rounded {
|
.rounded {
|
||||||
border-radius: .28571429rem !important;
|
border-radius: .28571429rem !important;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1743,7 +1743,6 @@ tbody.commit-list {
|
||||||
|
|
||||||
#topic_edit {
|
#topic_edit {
|
||||||
margin-top:5px;
|
margin-top:5px;
|
||||||
display: none;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#repo-topic {
|
#repo-topic {
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue