Merge pull request '[gitea] week 2024-22 cherry pick (gitea/main -> forgejo)' (#3917) from earl-warren/wcp/2024-22 into forgejo

Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/3917
Reviewed-by: 0ko <0ko@noreply.codeberg.org>
This commit is contained in:
Earl Warren 2024-05-28 10:50:48 +00:00
commit 7af3a981b5
45 changed files with 1041 additions and 205 deletions

View file

@ -2046,6 +2046,17 @@ LEVEL = Info
;; or only create new users if UPDATE_EXISTING is set to false
;UPDATE_EXISTING = true
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Cleanup expired actions assets
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;[cron.cleanup_actions]
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;ENABLED = true
;RUN_AT_START = true
;SCHEDULE = @midnight
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Clean-up deleted branches

61
flake.lock Normal file
View file

@ -0,0 +1,61 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1715534503,
"narHash": "sha256-5ZSVkFadZbFP1THataCaSf0JH2cAH3S29hU9rrxTEqk=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "2057814051972fa1453ddfb0d98badbea9b83c06",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

37
flake.nix Normal file
View file

@ -0,0 +1,37 @@
{
inputs = {
nixpkgs.url = "github:nixos/nixpkgs?ref=nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
};
outputs =
{ nixpkgs, flake-utils, ... }:
flake-utils.lib.eachDefaultSystem (
system:
let
pkgs = nixpkgs.legacyPackages.${system};
in
{
devShells.default = pkgs.mkShell {
buildInputs = with pkgs; [
# generic
git
git-lfs
gnumake
gnused
gnutar
gzip
# frontend
nodejs_20
# linting
python312
poetry
# backend
go_1_22
];
};
}
);
}

View file

@ -155,14 +155,14 @@ func (r *Review) LoadCodeComments(ctx context.Context) (err error) {
if r.CodeComments != nil {
return err
}
if err = r.loadIssue(ctx); err != nil {
if err = r.LoadIssue(ctx); err != nil {
return err
}
r.CodeComments, err = fetchCodeCommentsByReview(ctx, r.Issue, nil, r, false)
return err
}
func (r *Review) loadIssue(ctx context.Context) (err error) {
func (r *Review) LoadIssue(ctx context.Context) (err error) {
if r.Issue != nil {
return err
}
@ -199,7 +199,7 @@ func (r *Review) LoadReviewerTeam(ctx context.Context) (err error) {
// LoadAttributes loads all attributes except CodeComments
func (r *Review) LoadAttributes(ctx context.Context) (err error) {
if err = r.loadIssue(ctx); err != nil {
if err = r.LoadIssue(ctx); err != nil {
return err
}
if err = r.LoadCodeComments(ctx); err != nil {

View file

@ -7,6 +7,7 @@ import (
"errors"
"fmt"
"strings"
"sync/atomic"
"code.gitea.io/gitea/models/perm"
"code.gitea.io/gitea/modules/container"
@ -106,14 +107,27 @@ var (
TypeExternalTracker,
}
// DisabledRepoUnits contains the units that have been globally disabled
DisabledRepoUnits = []Type{}
disabledRepoUnitsAtomic atomic.Pointer[[]Type] // the units that have been globally disabled
// AllowedRepoUnitGroups contains the units that have been globally enabled,
// with mutually exclusive units grouped together.
AllowedRepoUnitGroups = [][]Type{}
)
// DisabledRepoUnitsGet returns the globally disabled units, it is a quick patch to fix data-race during testing.
// Because the queue worker might read when a test is mocking the value. FIXME: refactor to a clear solution later.
func DisabledRepoUnitsGet() []Type {
v := disabledRepoUnitsAtomic.Load()
if v == nil {
return nil
}
return *v
}
func DisabledRepoUnitsSet(v []Type) {
disabledRepoUnitsAtomic.Store(&v)
}
// Get valid set of default repository units from settings
func validateDefaultRepoUnits(defaultUnits, settingDefaultUnits []Type) []Type {
units := defaultUnits
@ -131,7 +145,7 @@ func validateDefaultRepoUnits(defaultUnits, settingDefaultUnits []Type) []Type {
}
// Remove disabled units
for _, disabledUnit := range DisabledRepoUnits {
for _, disabledUnit := range DisabledRepoUnitsGet() {
for i, unit := range units {
if unit == disabledUnit {
units = append(units[:i], units[i+1:]...)
@ -144,11 +158,11 @@ func validateDefaultRepoUnits(defaultUnits, settingDefaultUnits []Type) []Type {
// LoadUnitConfig load units from settings
func LoadUnitConfig() error {
var invalidKeys []string
DisabledRepoUnits, invalidKeys = FindUnitTypes(setting.Repository.DisabledRepoUnits...)
disabledRepoUnits, invalidKeys := FindUnitTypes(setting.Repository.DisabledRepoUnits...)
if len(invalidKeys) > 0 {
log.Warn("Invalid keys in disabled repo units: %s", strings.Join(invalidKeys, ", "))
}
DisabledRepoUnitsSet(disabledRepoUnits)
setDefaultRepoUnits, invalidKeys := FindUnitTypes(setting.Repository.DefaultRepoUnits...)
if len(invalidKeys) > 0 {
@ -210,7 +224,7 @@ func LoadUnitConfig() error {
// UnitGlobalDisabled checks if unit type is global disabled
func (u Type) UnitGlobalDisabled() bool {
for _, ud := range DisabledRepoUnits {
for _, ud := range DisabledRepoUnitsGet() {
if u == ud {
return true
}

View file

@ -14,10 +14,10 @@ import (
func TestLoadUnitConfig(t *testing.T) {
t.Run("regular", func(t *testing.T) {
defer func(disabledRepoUnits, defaultRepoUnits, defaultForkRepoUnits []Type) {
DisabledRepoUnits = disabledRepoUnits
DisabledRepoUnitsSet(disabledRepoUnits)
DefaultRepoUnits = defaultRepoUnits
DefaultForkRepoUnits = defaultForkRepoUnits
}(DisabledRepoUnits, DefaultRepoUnits, DefaultForkRepoUnits)
}(DisabledRepoUnitsGet(), DefaultRepoUnits, DefaultForkRepoUnits)
defer func(disabledRepoUnits, defaultRepoUnits, defaultForkRepoUnits []string) {
setting.Repository.DisabledRepoUnits = disabledRepoUnits
setting.Repository.DefaultRepoUnits = defaultRepoUnits
@ -28,16 +28,16 @@ func TestLoadUnitConfig(t *testing.T) {
setting.Repository.DefaultRepoUnits = []string{"repo.code", "repo.releases", "repo.issues", "repo.pulls"}
setting.Repository.DefaultForkRepoUnits = []string{"repo.releases"}
assert.NoError(t, LoadUnitConfig())
assert.Equal(t, []Type{TypeIssues}, DisabledRepoUnits)
assert.Equal(t, []Type{TypeIssues}, DisabledRepoUnitsGet())
assert.Equal(t, []Type{TypeCode, TypeReleases, TypePullRequests}, DefaultRepoUnits)
assert.Equal(t, []Type{TypeReleases}, DefaultForkRepoUnits)
})
t.Run("invalid", func(t *testing.T) {
defer func(disabledRepoUnits, defaultRepoUnits, defaultForkRepoUnits []Type) {
DisabledRepoUnits = disabledRepoUnits
DisabledRepoUnitsSet(disabledRepoUnits)
DefaultRepoUnits = defaultRepoUnits
DefaultForkRepoUnits = defaultForkRepoUnits
}(DisabledRepoUnits, DefaultRepoUnits, DefaultForkRepoUnits)
}(DisabledRepoUnitsGet(), DefaultRepoUnits, DefaultForkRepoUnits)
defer func(disabledRepoUnits, defaultRepoUnits, defaultForkRepoUnits []string) {
setting.Repository.DisabledRepoUnits = disabledRepoUnits
setting.Repository.DefaultRepoUnits = defaultRepoUnits
@ -48,16 +48,16 @@ func TestLoadUnitConfig(t *testing.T) {
setting.Repository.DefaultRepoUnits = []string{"repo.code", "invalid.2", "repo.releases", "repo.issues", "repo.pulls"}
setting.Repository.DefaultForkRepoUnits = []string{"invalid.3", "repo.releases"}
assert.NoError(t, LoadUnitConfig())
assert.Equal(t, []Type{TypeIssues}, DisabledRepoUnits)
assert.Equal(t, []Type{TypeIssues}, DisabledRepoUnitsGet())
assert.Equal(t, []Type{TypeCode, TypeReleases, TypePullRequests}, DefaultRepoUnits)
assert.Equal(t, []Type{TypeReleases}, DefaultForkRepoUnits)
})
t.Run("duplicate", func(t *testing.T) {
defer func(disabledRepoUnits, defaultRepoUnits, defaultForkRepoUnits []Type) {
DisabledRepoUnits = disabledRepoUnits
DisabledRepoUnitsSet(disabledRepoUnits)
DefaultRepoUnits = defaultRepoUnits
DefaultForkRepoUnits = defaultForkRepoUnits
}(DisabledRepoUnits, DefaultRepoUnits, DefaultForkRepoUnits)
}(DisabledRepoUnitsGet(), DefaultRepoUnits, DefaultForkRepoUnits)
defer func(disabledRepoUnits, defaultRepoUnits, defaultForkRepoUnits []string) {
setting.Repository.DisabledRepoUnits = disabledRepoUnits
setting.Repository.DefaultRepoUnits = defaultRepoUnits
@ -68,16 +68,16 @@ func TestLoadUnitConfig(t *testing.T) {
setting.Repository.DefaultRepoUnits = []string{"repo.code", "repo.releases", "repo.issues", "repo.pulls", "repo.code"}
setting.Repository.DefaultForkRepoUnits = []string{"repo.releases", "repo.releases"}
assert.NoError(t, LoadUnitConfig())
assert.Equal(t, []Type{TypeIssues}, DisabledRepoUnits)
assert.Equal(t, []Type{TypeIssues}, DisabledRepoUnitsGet())
assert.Equal(t, []Type{TypeCode, TypeReleases, TypePullRequests}, DefaultRepoUnits)
assert.Equal(t, []Type{TypeReleases}, DefaultForkRepoUnits)
})
t.Run("empty_default", func(t *testing.T) {
defer func(disabledRepoUnits, defaultRepoUnits, defaultForkRepoUnits []Type) {
DisabledRepoUnits = disabledRepoUnits
DisabledRepoUnitsSet(disabledRepoUnits)
DefaultRepoUnits = defaultRepoUnits
DefaultForkRepoUnits = defaultForkRepoUnits
}(DisabledRepoUnits, DefaultRepoUnits, DefaultForkRepoUnits)
}(DisabledRepoUnitsGet(), DefaultRepoUnits, DefaultForkRepoUnits)
defer func(disabledRepoUnits, defaultRepoUnits, defaultForkRepoUnits []string) {
setting.Repository.DisabledRepoUnits = disabledRepoUnits
setting.Repository.DefaultRepoUnits = defaultRepoUnits
@ -88,7 +88,7 @@ func TestLoadUnitConfig(t *testing.T) {
setting.Repository.DefaultRepoUnits = []string{}
setting.Repository.DefaultForkRepoUnits = []string{"repo.releases", "repo.releases"}
assert.NoError(t, LoadUnitConfig())
assert.Equal(t, []Type{TypeIssues}, DisabledRepoUnits)
assert.Equal(t, []Type{TypeIssues}, DisabledRepoUnitsGet())
assert.ElementsMatch(t, []Type{TypeCode, TypePullRequests, TypeReleases, TypeWiki, TypePackages, TypeProjects, TypeActions}, DefaultRepoUnits)
assert.Equal(t, []Type{TypeReleases}, DefaultForkRepoUnits)
})

View file

@ -10,6 +10,7 @@ import (
"net/mail"
"regexp"
"strings"
"time"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/base"
@ -362,14 +363,12 @@ func MakeEmailPrimary(ctx context.Context, email *EmailAddress) error {
// VerifyActiveEmailCode verifies active email code when active account
func VerifyActiveEmailCode(ctx context.Context, code, email string) *EmailAddress {
minutes := setting.Service.ActiveCodeLives
if user := GetVerifyUser(ctx, code); user != nil {
// time limit code
prefix := code[:base.TimeLimitCodeLength]
data := fmt.Sprintf("%d%s%s%s%s", user.ID, email, user.LowerName, user.Passwd, user.Rands)
if base.VerifyTimeLimitCode(data, minutes, prefix) {
if base.VerifyTimeLimitCode(time.Now(), data, setting.Service.ActiveCodeLives, prefix) {
emailAddress := &EmailAddress{UID: user.ID, Email: email}
if has, _ := db.GetEngine(ctx).Get(emailAddress); has {
return emailAddress

View file

@ -321,7 +321,7 @@ func (u *User) OrganisationLink() string {
func (u *User) GenerateEmailActivateCode(email string) string {
code := base.CreateTimeLimitCode(
fmt.Sprintf("%d%s%s%s%s", u.ID, email, u.LowerName, u.Passwd, u.Rands),
setting.Service.ActiveCodeLives, nil)
setting.Service.ActiveCodeLives, time.Now(), nil)
// Add tail hex username
code += hex.EncodeToString([]byte(u.LowerName))
@ -818,14 +818,11 @@ func GetVerifyUser(ctx context.Context, code string) (user *User) {
// VerifyUserActiveCode verifies active code when active account
func VerifyUserActiveCode(ctx context.Context, code string) (user *User) {
minutes := setting.Service.ActiveCodeLives
if user = GetVerifyUser(ctx, code); user != nil {
// time limit code
prefix := code[:base.TimeLimitCodeLength]
data := fmt.Sprintf("%d%s%s%s%s", user.ID, user.Email, user.LowerName, user.Passwd, user.Rands)
if base.VerifyTimeLimitCode(data, minutes, prefix) {
if base.VerifyTimeLimitCode(time.Now(), data, setting.Service.ActiveCodeLives, prefix) {
return user
}
}

View file

@ -4,12 +4,15 @@
package base
import (
"crypto/hmac"
"crypto/sha1"
"crypto/sha256"
"crypto/subtle"
"encoding/base64"
"encoding/hex"
"errors"
"fmt"
"hash"
"os"
"path/filepath"
"runtime"
@ -25,13 +28,6 @@ import (
"github.com/dustin/go-humanize"
)
// EncodeSha1 string to sha1 hex value.
func EncodeSha1(str string) string {
h := sha1.New()
_, _ = h.Write([]byte(str))
return hex.EncodeToString(h.Sum(nil))
}
// EncodeSha256 string to sha256 hex value.
func EncodeSha256(str string) string {
h := sha256.New()
@ -62,63 +58,62 @@ func BasicAuthDecode(encoded string) (string, string, error) {
}
// VerifyTimeLimitCode verify time limit code
func VerifyTimeLimitCode(data string, minutes int, code string) bool {
func VerifyTimeLimitCode(now time.Time, data string, minutes int, code string) bool {
if len(code) <= 18 {
return false
}
// split code
start := code[:12]
lives := code[12:18]
if d, err := strconv.ParseInt(lives, 10, 0); err == nil {
minutes = int(d)
}
startTimeStr := code[:12]
aliveTimeStr := code[12:18]
aliveTime, _ := strconv.Atoi(aliveTimeStr) // no need to check err, if anything wrong, the following code check will fail soon
// right active code
retCode := CreateTimeLimitCode(data, minutes, start)
if retCode == code && minutes > 0 {
// check time is expired or not
before, _ := time.ParseInLocation("200601021504", start, time.Local)
now := time.Now()
if before.Add(time.Minute*time.Duration(minutes)).Unix() > now.Unix() {
return true
// check code
retCode := CreateTimeLimitCode(data, aliveTime, startTimeStr, nil)
if subtle.ConstantTimeCompare([]byte(retCode), []byte(code)) != 1 {
retCode = CreateTimeLimitCode(data, aliveTime, startTimeStr, sha1.New()) // TODO: this is only for the support of legacy codes, remove this in/after 1.23
if subtle.ConstantTimeCompare([]byte(retCode), []byte(code)) != 1 {
return false
}
}
return false
// check time is expired or not: startTime <= now && now < startTime + minutes
startTime, _ := time.ParseInLocation("200601021504", startTimeStr, time.Local)
return (startTime.Before(now) || startTime.Equal(now)) && now.Before(startTime.Add(time.Minute*time.Duration(minutes)))
}
// TimeLimitCodeLength default value for time limit code
const TimeLimitCodeLength = 12 + 6 + 40
// CreateTimeLimitCode create a time limit code
// code format: 12 length date time string + 6 minutes string + 40 sha1 encoded string
func CreateTimeLimitCode(data string, minutes int, startInf any) string {
format := "200601021504"
// CreateTimeLimitCode create a time-limited code.
// Format: 12 length date time string + 6 minutes string (not used) + 40 hash string, some other code depends on this fixed length
// If h is nil, then use the default hmac hash.
func CreateTimeLimitCode[T time.Time | string](data string, minutes int, startTimeGeneric T, h hash.Hash) string {
const format = "200601021504"
var start, end time.Time
var startStr, endStr string
if startInf == nil {
// Use now time create code
start = time.Now()
startStr = start.Format(format)
var start time.Time
var startTimeAny any = startTimeGeneric
if t, ok := startTimeAny.(time.Time); ok {
start = t
} else {
// use start string create code
startStr = startInf.(string)
start, _ = time.ParseInLocation(format, startStr, time.Local)
startStr = start.Format(format)
var err error
start, err = time.ParseInLocation(format, startTimeAny.(string), time.Local)
if err != nil {
return "" // return an invalid code because the "parse" failed
}
}
startStr := start.Format(format)
end := start.Add(time.Minute * time.Duration(minutes))
end = start.Add(time.Minute * time.Duration(minutes))
endStr = end.Format(format)
// create sha1 encode string
sh := sha1.New()
_, _ = sh.Write([]byte(fmt.Sprintf("%s%s%s%s%d", data, hex.EncodeToString(setting.GetGeneralTokenSigningSecret()), startStr, endStr, minutes)))
encoded := hex.EncodeToString(sh.Sum(nil))
if h == nil {
h = hmac.New(sha1.New, setting.GetGeneralTokenSigningSecret())
}
_, _ = fmt.Fprintf(h, "%s%s%s%s%d", data, hex.EncodeToString(setting.GetGeneralTokenSigningSecret()), startStr, end.Format(format), minutes)
encoded := hex.EncodeToString(h.Sum(nil))
code := fmt.Sprintf("%s%06d%s", startStr, minutes, encoded)
if len(code) != TimeLimitCodeLength {
panic("there is a hard requirement for the length of time-limited code") // it shouldn't happen
}
return code
}

View file

@ -4,20 +4,18 @@
package base
import (
"crypto/sha1"
"fmt"
"os"
"testing"
"time"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/test"
"github.com/stretchr/testify/assert"
)
func TestEncodeSha1(t *testing.T) {
assert.Equal(t,
"8843d7f92416211de9ebb963ff4ce28125932878",
EncodeSha1("foobar"),
)
}
func TestEncodeSha256(t *testing.T) {
assert.Equal(t,
"c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2",
@ -46,43 +44,54 @@ func TestBasicAuthDecode(t *testing.T) {
}
func TestVerifyTimeLimitCode(t *testing.T) {
tc := []struct {
data string
minutes int
code string
valid bool
}{{
data: "data",
minutes: 2,
code: testCreateTimeLimitCode(t, "data", 2),
valid: true,
}, {
data: "abc123-ß",
minutes: 1,
code: testCreateTimeLimitCode(t, "abc123-ß", 1),
valid: true,
}, {
data: "data",
minutes: 2,
code: "2021012723240000005928251dac409d2c33a6eb82c63410aaad569bed",
valid: false,
}}
for _, test := range tc {
actualValid := VerifyTimeLimitCode(test.data, test.minutes, test.code)
assert.Equal(t, test.valid, actualValid, "data: '%s' code: '%s' should be valid: %t", test.data, test.code, test.valid)
defer test.MockVariableValue(&setting.InstallLock, true)()
initGeneralSecret := func(secret string) {
setting.InstallLock = true
setting.CfgProvider, _ = setting.NewConfigProviderFromData(fmt.Sprintf(`
[oauth2]
JWT_SECRET = %s
`, secret))
setting.LoadCommonSettings()
}
}
func testCreateTimeLimitCode(t *testing.T, data string, m int) string {
result0 := CreateTimeLimitCode(data, m, nil)
result1 := CreateTimeLimitCode(data, m, time.Now().Format("200601021504"))
result2 := CreateTimeLimitCode(data, m, time.Unix(time.Now().Unix()+int64(time.Minute)*int64(m), 0).Format("200601021504"))
initGeneralSecret("KZb_QLUd4fYVyxetjxC4eZkrBgWM2SndOOWDNtgUUko")
now := time.Now()
assert.Equal(t, result0, result1)
assert.NotEqual(t, result0, result2)
t.Run("TestGenericParameter", func(t *testing.T) {
time2000 := time.Date(2000, 1, 2, 3, 4, 5, 0, time.Local)
assert.Equal(t, "2000010203040000026fa5221b2731b7cf80b1b506f5e39e38c115fee5", CreateTimeLimitCode("test-sha1", 2, time2000, sha1.New()))
assert.Equal(t, "2000010203040000026fa5221b2731b7cf80b1b506f5e39e38c115fee5", CreateTimeLimitCode("test-sha1", 2, "200001020304", sha1.New()))
assert.Equal(t, "2000010203040000024842227a2f87041ff82025199c0187410a9297bf", CreateTimeLimitCode("test-hmac", 2, time2000, nil))
assert.Equal(t, "2000010203040000024842227a2f87041ff82025199c0187410a9297bf", CreateTimeLimitCode("test-hmac", 2, "200001020304", nil))
})
assert.True(t, len(result0) != 0)
return result0
t.Run("TestInvalidCode", func(t *testing.T) {
assert.False(t, VerifyTimeLimitCode(now, "data", 2, ""))
assert.False(t, VerifyTimeLimitCode(now, "data", 2, "invalid code"))
})
t.Run("TestCreateAndVerify", func(t *testing.T) {
code := CreateTimeLimitCode("data", 2, now, nil)
assert.False(t, VerifyTimeLimitCode(now.Add(-time.Minute), "data", 2, code)) // not started yet
assert.True(t, VerifyTimeLimitCode(now, "data", 2, code))
assert.True(t, VerifyTimeLimitCode(now.Add(time.Minute), "data", 2, code))
assert.False(t, VerifyTimeLimitCode(now.Add(time.Minute), "DATA", 2, code)) // invalid data
assert.False(t, VerifyTimeLimitCode(now.Add(2*time.Minute), "data", 2, code)) // expired
})
t.Run("TestDifferentSecret", func(t *testing.T) {
// use another secret to ensure the code is invalid for different secret
verifyDataCode := func(c string) bool {
return VerifyTimeLimitCode(now, "data", 2, c)
}
code1 := CreateTimeLimitCode("data", 2, now, sha1.New())
code2 := CreateTimeLimitCode("data", 2, now, nil)
assert.True(t, verifyDataCode(code1))
assert.True(t, verifyDataCode(code2))
initGeneralSecret("000_QLUd4fYVyxetjxC4eZkrBgWM2SndOOWDNtgUUko")
assert.False(t, verifyDataCode(code1))
assert.False(t, verifyDataCode(code2))
})
}
func TestFileSize(t *testing.T) {

View file

@ -4,6 +4,8 @@
package git
import (
"crypto/sha1"
"encoding/hex"
"fmt"
"io"
"os"
@ -128,3 +130,9 @@ func (l *LimitedReaderCloser) Read(p []byte) (n int, err error) {
func (l *LimitedReaderCloser) Close() error {
return l.C.Close()
}
func HashFilePathForWebUI(s string) string {
h := sha1.New()
_, _ = h.Write([]byte(s))
return hex.EncodeToString(h.Sum(nil))
}

View file

@ -3,7 +3,11 @@
package git
import "testing"
import (
"testing"
"github.com/stretchr/testify/assert"
)
// This file contains utility functions that are used across multiple tests,
// but not in production code.
@ -13,3 +17,10 @@ func skipIfSHA256NotSupported(t *testing.T) {
t.Skip("skipping because installed Git version doesn't support SHA256")
}
}
func TestHashFilePathForWebUI(t *testing.T) {
assert.Equal(t,
"8843d7f92416211de9ebb963ff4ce28125932878",
HashFilePathForWebUI("foobar"),
)
}

View file

@ -91,6 +91,9 @@ func validateYaml(template *api.IssueTemplate) error {
if err := validateOptions(field, idx); err != nil {
return err
}
if err := validateDropdownDefault(position, field.Attributes); err != nil {
return err
}
case api.IssueFormFieldTypeCheckboxes:
if err := validateStringItem(position, field.Attributes, false, "description"); err != nil {
return err
@ -249,6 +252,28 @@ func validateBoolItem(position errorPosition, m map[string]any, names ...string)
return nil
}
func validateDropdownDefault(position errorPosition, attributes map[string]any) error {
v, ok := attributes["default"]
if !ok {
return nil
}
defaultValue, ok := v.(int)
if !ok {
return position.Errorf("'default' should be an int")
}
options, ok := attributes["options"].([]any)
if !ok {
// should not happen
return position.Errorf("'options' is required and should be a array")
}
if defaultValue < 0 || defaultValue >= len(options) {
return position.Errorf("the value of 'default' is out of range")
}
return nil
}
type errorPosition string
func (p errorPosition) Errorf(format string, a ...any) error {

View file

@ -355,6 +355,96 @@ body:
`,
wantErr: "body[0](checkboxes), option[1]: can not require a hidden checkbox",
},
{
name: "dropdown default is not an integer",
content: `
name: "test"
about: "this is about"
body:
- type: dropdown
id: "1"
attributes:
label: Label of dropdown
description: Description of dropdown
multiple: true
options:
- Option 1 of dropdown
- Option 2 of dropdown
- Option 3 of dropdown
default: "def"
validations:
required: true
`,
wantErr: "body[0](dropdown): 'default' should be an int",
},
{
name: "dropdown default is out of range",
content: `
name: "test"
about: "this is about"
body:
- type: dropdown
id: "1"
attributes:
label: Label of dropdown
description: Description of dropdown
multiple: true
options:
- Option 1 of dropdown
- Option 2 of dropdown
- Option 3 of dropdown
default: 3
validations:
required: true
`,
wantErr: "body[0](dropdown): the value of 'default' is out of range",
},
{
name: "dropdown without default is valid",
content: `
name: "test"
about: "this is about"
body:
- type: dropdown
id: "1"
attributes:
label: Label of dropdown
description: Description of dropdown
multiple: true
options:
- Option 1 of dropdown
- Option 2 of dropdown
- Option 3 of dropdown
validations:
required: true
`,
want: &api.IssueTemplate{
Name: "test",
About: "this is about",
Fields: []*api.IssueFormField{
{
Type: "dropdown",
ID: "1",
Attributes: map[string]any{
"label": "Label of dropdown",
"description": "Description of dropdown",
"multiple": true,
"options": []any{
"Option 1 of dropdown",
"Option 2 of dropdown",
"Option 3 of dropdown",
},
},
Validations: map[string]any{
"required": true,
},
Visible: []api.IssueFormFieldVisible{api.IssueFormFieldVisibleForm, api.IssueFormFieldVisibleContent},
},
},
FileName: "test.yaml",
},
wantErr: "",
},
{
name: "valid",
content: `
@ -399,6 +489,7 @@ body:
- Option 1 of dropdown
- Option 2 of dropdown
- Option 3 of dropdown
default: 1
validations:
required: true
- type: checkboxes
@ -475,6 +566,7 @@ body:
"Option 2 of dropdown",
"Option 3 of dropdown",
},
"default": 1,
},
Validations: map[string]any{
"required": true,

View file

@ -0,0 +1,190 @@
3D Slicer Contribution and Software License Agreement ("Agreement")
Version 1.0 (December 20, 2005)
This Agreement covers contributions to and downloads from the 3D
Slicer project ("Slicer") maintained by The Brigham and Women's
Hospital, Inc. ("Brigham"). Part A of this Agreement applies to
contributions of software and/or data to Slicer (including making
revisions of or additions to code and/or data already in Slicer). Part
B of this Agreement applies to downloads of software and/or data from
Slicer. Part C of this Agreement applies to all transactions with
Slicer. If you distribute Software (as defined below) downloaded from
Slicer, all of the paragraphs of Part B of this Agreement must be
included with and apply to such Software.
Your contribution of software and/or data to Slicer (including prior
to the date of the first publication of this Agreement, each a
"Contribution") and/or downloading, copying, modifying, displaying,
distributing or use of any software and/or data from Slicer
(collectively, the "Software") constitutes acceptance of all of the
terms and conditions of this Agreement. If you do not agree to such
terms and conditions, you have no right to contribute your
Contribution, or to download, copy, modify, display, distribute or use
the Software.
PART A. CONTRIBUTION AGREEMENT - License to Brigham with Right to
Sublicense ("Contribution Agreement").
1. As used in this Contribution Agreement, "you" means the individual
contributing the Contribution to Slicer and the institution or
entity which employs or is otherwise affiliated with such
individual in connection with such Contribution.
2. This Contribution Agreement applies to all Contributions made to
Slicer, including without limitation Contributions made prior to
the date of first publication of this Agreement. If at any time you
make a Contribution to Slicer, you represent that (i) you are
legally authorized and entitled to make such Contribution and to
grant all licenses granted in this Contribution Agreement with
respect to such Contribution; (ii) if your Contribution includes
any patient data, all such data is de-identified in accordance with
U.S. confidentiality and security laws and requirements, including
but not limited to the Health Insurance Portability and
Accountability Act (HIPAA) and its regulations, and your disclosure
of such data for the purposes contemplated by this Agreement is
properly authorized and in compliance with all applicable laws and
regulations; and (iii) you have preserved in the Contribution all
applicable attributions, copyright notices and licenses for any
third party software or data included in the Contribution.
3. Except for the licenses granted in this Agreement, you reserve all
right, title and interest in your Contribution.
4. You hereby grant to Brigham, with the right to sublicense, a
perpetual, worldwide, non-exclusive, no charge, royalty-free,
irrevocable license to use, reproduce, make derivative works of,
display and distribute the Contribution. If your Contribution is
protected by patent, you hereby grant to Brigham, with the right to
sublicense, a perpetual, worldwide, non-exclusive, no-charge,
royalty-free, irrevocable license under your interest in patent
rights covering the Contribution, to make, have made, use, sell and
otherwise transfer your Contribution, alone or in combination with
any other code.
5. You acknowledge and agree that Brigham may incorporate your
Contribution into Slicer and may make Slicer available to members
of the public on an open source basis under terms substantially in
accordance with the Software License set forth in Part B of this
Agreement. You further acknowledge and agree that Brigham shall
have no liability arising in connection with claims resulting from
your breach of any of the terms of this Agreement.
6. YOU WARRANT THAT TO THE BEST OF YOUR KNOWLEDGE YOUR CONTRIBUTION
DOES NOT CONTAIN ANY CODE THAT REQUIRES OR PRESCRIBES AN "OPEN
SOURCE LICENSE" FOR DERIVATIVE WORKS (by way of non-limiting
example, the GNU General Public License or other so-called
"reciprocal" license that requires any derived work to be licensed
under the GNU General Public License or other "open source
license").
PART B. DOWNLOADING AGREEMENT - License from Brigham with Right to
Sublicense ("Software License").
1. As used in this Software License, "you" means the individual
downloading and/or using, reproducing, modifying, displaying and/or
distributing the Software and the institution or entity which
employs or is otherwise affiliated with such individual in
connection therewith. The Brigham and Women's Hospital,
Inc. ("Brigham") hereby grants you, with right to sublicense, with
respect to Brigham's rights in the software, and data, if any,
which is the subject of this Software License (collectively, the
"Software"), a royalty-free, non-exclusive license to use,
reproduce, make derivative works of, display and distribute the
Software, provided that:
(a) you accept and adhere to all of the terms and conditions of this
Software License;
(b) in connection with any copy of or sublicense of all or any portion
of the Software, all of the terms and conditions in this Software
License shall appear in and shall apply to such copy and such
sublicense, including without limitation all source and executable
forms and on any user documentation, prefaced with the following
words: "All or portions of this licensed product (such portions are
the "Software") have been obtained under license from The Brigham and
Women's Hospital, Inc. and are subject to the following terms and
conditions:"
(c) you preserve and maintain all applicable attributions, copyright
notices and licenses included in or applicable to the Software;
(d) modified versions of the Software must be clearly identified and
marked as such, and must not be misrepresented as being the original
Software; and
(e) you consider making, but are under no obligation to make, the
source code of any of your modifications to the Software freely
available to others on an open source basis.
2. The license granted in this Software License includes without
limitation the right to (i) incorporate the Software into
proprietary programs (subject to any restrictions applicable to
such programs), (ii) add your own copyright statement to your
modifications of the Software, and (iii) provide additional or
different license terms and conditions in your sublicenses of
modifications of the Software; provided that in each case your use,
reproduction or distribution of such modifications otherwise
complies with the conditions stated in this Software License.
3. This Software License does not grant any rights with respect to
third party software, except those rights that Brigham has been
authorized by a third party to grant to you, and accordingly you
are solely responsible for (i) obtaining any permissions from third
parties that you need to use, reproduce, make derivative works of,
display and distribute the Software, and (ii) informing your
sublicensees, including without limitation your end-users, of their
obligations to secure any such required permissions.
4. The Software has been designed for research purposes only and has
not been reviewed or approved by the Food and Drug Administration
or by any other agency. YOU ACKNOWLEDGE AND AGREE THAT CLINICAL
APPLICATIONS ARE NEITHER RECOMMENDED NOR ADVISED. Any
commercialization of the Software is at the sole risk of the party
or parties engaged in such commercialization. You further agree to
use, reproduce, make derivative works of, display and distribute
the Software in compliance with all applicable governmental laws,
regulations and orders, including without limitation those relating
to export and import control.
5. The Software is provided "AS IS" and neither Brigham nor any
contributor to the software (each a "Contributor") shall have any
obligation to provide maintenance, support, updates, enhancements
or modifications thereto. BRIGHAM AND ALL CONTRIBUTORS SPECIFICALLY
DISCLAIM ALL EXPRESS AND IMPLIED WARRANTIES OF ANY KIND INCLUDING,
BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR
A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
BRIGHAM OR ANY CONTRIBUTOR BE LIABLE TO ANY PARTY FOR DIRECT,
INDIRECT, SPECIAL, INCIDENTAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ARISING IN ANY WAY
RELATED TO THE SOFTWARE, EVEN IF BRIGHAM OR ANY CONTRIBUTOR HAS
BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. TO THE MAXIMUM
EXTENT NOT PROHIBITED BY LAW OR REGULATION, YOU FURTHER ASSUME ALL
LIABILITY FOR YOUR USE, REPRODUCTION, MAKING OF DERIVATIVE WORKS,
DISPLAY, LICENSE OR DISTRIBUTION OF THE SOFTWARE AND AGREE TO
INDEMNIFY AND HOLD HARMLESS BRIGHAM AND ALL CONTRIBUTORS FROM AND
AGAINST ANY AND ALL CLAIMS, SUITS, ACTIONS, DEMANDS AND JUDGMENTS
ARISING THEREFROM.
6. None of the names, logos or trademarks of Brigham or any of
Brigham's affiliates or any of the Contributors, or any funding
agency, may be used to endorse or promote products produced in
whole or in part by operation of the Software or derived from or
based on the Software without specific prior written permission
from the applicable party.
7. Any use, reproduction or distribution of the Software which is not
in accordance with this Software License shall automatically revoke
all rights granted to you under this Software License and render
Paragraphs 1 and 2 of this Software License null and void.
8. This Software License does not grant any rights in or to any
intellectual property owned by Brigham or any Contributor except
those rights expressly granted hereunder.
PART C. MISCELLANEOUS
This Agreement shall be governed by and construed in accordance with
the laws of The Commonwealth of Massachusetts without regard to
principles of conflicts of law. This Agreement shall supercede and
replace any license terms that you may have agreed to previously with
respect to Slicer.

View file

@ -0,0 +1,13 @@
Specific permission is also granted to link Asterisk with OpenSSL, OpenH323
UniMRCP, and/or the UW IMAP Toolkit and distribute the resulting binary files.
In addition, Asterisk implements several management/control protocols.
This includes the Asterisk Manager Interface (AMI), the Asterisk Gateway
Interface (AGI), and the Asterisk REST Interface (ARI). It is our belief
that applications using these protocols to manage or control an Asterisk
instance do not have to be licensed under the GPL or a compatible license,
as we believe these protocols do not create a 'derivative work' as referred
to in the GPL. However, should any court or other judiciary body find that
these protocols do fall under the terms of the GPL, then we hereby grant you a
license to use these protocols in combination with Asterisk in external
applications licensed under any license you wish.

View file

@ -0,0 +1,25 @@
Copyright (c) 1993 Intel Corporation
Intel hereby grants you permission to copy, modify, and distribute this
software and its documentation. Intel grants this permission provided
that the above copyright notice appears in all copies and that both the
copyright notice and this permission notice appear in supporting
documentation. In addition, Intel grants this permission provided that
you prominently mark as "not part of the original" any modifications
made to this software or documentation, and that the name of Intel
Corporation not be used in advertising or publicity pertaining to
distribution of the software or the documentation without specific,
written prior permission.
Intel Corporation provides this AS IS, WITHOUT ANY WARRANTY, EXPRESS OR
IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTY OF MERCHANTABILITY
OR FITNESS FOR A PARTICULAR PURPOSE. Intel makes no guarantee or
representations regarding the use of, or the results of the use of,
the software and documentation in terms of correctness, accuracy,
reliability, currentness, or otherwise; and you rely on the software,
documentation and results solely at your own risk.
IN NO EVENT SHALL INTEL BE LIABLE FOR ANY LOSS OF USE, LOSS OF BUSINESS,
LOSS OF PROFITS, INDIRECT, INCIDENTAL, SPECIAL OR CONSEQUENTIAL DAMAGES
OF ANY KIND. IN NO EVENT SHALL INTEL'S TOTAL LIABILITY EXCEED THE SUM
PAID TO INTEL FOR THE PRODUCT LICENSED HEREUNDER.

View file

@ -0,0 +1,22 @@
Copyright (C) 1994 by the University of Southern California
EXPORT OF THIS SOFTWARE from the United States of America may
require a specific license from the United States Government. It
is the responsibility of any person or organization
contemplating export to obtain such a license before exporting.
WITHIN THAT CONSTRAINT, permission to copy, modify, and distribute
this software and its documentation in source and binary forms is
hereby granted, provided that any documentation or other materials
related to such distribution or use acknowledge that the software
was developed by the University of Southern California.
DISCLAIMER OF WARRANTY. THIS SOFTWARE IS PROVIDED "AS IS". The
University of Southern California MAKES NO REPRESENTATIONS OR
WARRANTIES, EXPRESS OR IMPLIED. By way of example, but not
limitation, the University of Southern California MAKES NO
REPRESENTATIONS OR WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY
PARTICULAR PURPOSE. The University of Southern California shall not
be held liable for any liability nor for any direct, indirect, or
consequential damages with respect to any claim by the user or
distributor of the ksu software.

19
options/license/NCBI-PD Normal file
View file

@ -0,0 +1,19 @@
PUBLIC DOMAIN NOTICE
National Center for Biotechnology Information
This software is a "United States Government Work" under the terms of the
United States Copyright Act. It was written as part of the authors'
official duties as United States Government employees and thus cannot
be copyrighted. This software is freely available to the public for
use. The National Library of Medicine and the U.S. Government have not
placed any restriction on its use or reproduction.
Although all reasonable efforts have been taken to ensure the accuracy
and reliability of the software and data, the NLM and the U.S.
Government do not and cannot warrant the performance or results that
may be obtained by using this software or data. The NLM and the U.S.
Government disclaim all warranties, express or implied, including
warranties of performance, merchantability or fitness for any
particular purpose.
Please cite the author in any work or product based on this material.

View file

@ -175,7 +175,7 @@ func Migrate(ctx *context.APIContext) {
Description: opts.Description,
OriginalURL: form.CloneAddr,
GitServiceType: gitServiceType,
IsPrivate: opts.Private,
IsPrivate: opts.Private || setting.Repository.ForcePrivate,
IsMirror: opts.Mirror,
Status: repo_model.RepositoryBeingMigrated,
})

View file

@ -252,7 +252,7 @@ func CreateUserRepo(ctx *context.APIContext, owner *user_model.User, opt api.Cre
Gitignores: opt.Gitignores,
License: opt.License,
Readme: opt.Readme,
IsPrivate: opt.Private,
IsPrivate: opt.Private || setting.Repository.ForcePrivate,
AutoInit: opt.AutoInit,
DefaultBranch: opt.DefaultBranch,
TrustModel: repo_model.ToTrustModel(opt.TrustModel),
@ -364,7 +364,7 @@ func Generate(ctx *context.APIContext) {
Name: form.Name,
DefaultBranch: form.DefaultBranch,
Description: form.Description,
Private: form.Private,
Private: form.Private || setting.Repository.ForcePrivate,
GitContent: form.GitContent,
Topics: form.Topics,
GitHooks: form.GitHooks,

View file

@ -542,20 +542,45 @@ func GrantApplicationOAuth(ctx *context.Context) {
ctx.Error(http.StatusBadRequest)
return
}
if !form.Granted {
handleAuthorizeError(ctx, AuthorizeError{
State: form.State,
ErrorDescription: "the request is denied",
ErrorCode: ErrorCodeAccessDenied,
}, form.RedirectURI)
return
}
app, err := auth.GetOAuth2ApplicationByClientID(ctx, form.ClientID)
if err != nil {
ctx.ServerError("GetOAuth2ApplicationByClientID", err)
return
}
grant, err := app.CreateGrant(ctx, ctx.Doer.ID, form.Scope)
grant, err := app.GetGrantByUserID(ctx, ctx.Doer.ID)
if err != nil {
handleServerError(ctx, form.State, form.RedirectURI)
return
}
if grant == nil {
grant, err = app.CreateGrant(ctx, ctx.Doer.ID, form.Scope)
if err != nil {
handleAuthorizeError(ctx, AuthorizeError{
State: form.State,
ErrorDescription: "cannot create grant for user",
ErrorCode: ErrorCodeServerError,
}, form.RedirectURI)
return
}
} else if grant.Scope != form.Scope {
handleAuthorizeError(ctx, AuthorizeError{
State: form.State,
ErrorDescription: "cannot create grant for user",
ErrorDescription: "a grant exists with different scope",
ErrorCode: ErrorCodeServerError,
}, form.RedirectURI)
return
}
if len(form.Nonce) > 0 {
err := grant.SetNonce(ctx, form.Nonce)
if err != nil {

View file

@ -931,7 +931,7 @@ func ExcerptBlob(ctx *context.Context) {
}
}
ctx.Data["section"] = section
ctx.Data["FileNameHash"] = base.EncodeSha1(filePath)
ctx.Data["FileNameHash"] = git.HashFilePathForWebUI(filePath)
ctx.Data["AfterCommitID"] = commitID
ctx.Data["Anchor"] = anchor
ctx.HTML(http.StatusOK, tplBlobExcerpt)

View file

@ -248,7 +248,7 @@ func CreatePost(ctx *context.Context) {
opts := repo_service.GenerateRepoOptions{
Name: form.RepoName,
Description: form.Description,
Private: form.Private,
Private: form.Private || setting.Repository.ForcePrivate,
GitContent: form.GitContent,
Topics: form.Topics,
GitHooks: form.GitHooks,

View file

@ -22,6 +22,7 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
"code.gitea.io/gitea/modules/queue"
notify_service "code.gitea.io/gitea/services/notify"
pull_service "code.gitea.io/gitea/services/pull"
)
@ -30,6 +31,8 @@ var prAutoMergeQueue *queue.WorkerPoolQueue[string]
// Init runs the task queue to that handles auto merges
func Init() error {
notify_service.RegisterNotifier(NewNotifier())
prAutoMergeQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_auto_merge", handler)
if prAutoMergeQueue == nil {
return fmt.Errorf("unable to create pr_auto_merge queue")
@ -47,7 +50,7 @@ func handler(items ...string) []string {
log.Error("could not parse data from pr_auto_merge queue (%v): %v", s, err)
continue
}
handlePull(id, sha)
handlePullRequestAutoMerge(id, sha)
}
return nil
}
@ -62,16 +65,6 @@ func addToQueue(pr *issues_model.PullRequest, sha string) {
// ScheduleAutoMerge if schedule is false and no error, pull can be merged directly
func ScheduleAutoMerge(ctx context.Context, doer *user_model.User, pull *issues_model.PullRequest, style repo_model.MergeStyle, message string) (scheduled bool, err error) {
err = db.WithTx(ctx, func(ctx context.Context) error {
lastCommitStatus, err := pull_service.GetPullRequestCommitStatusState(ctx, pull)
if err != nil {
return err
}
// we don't need to schedule
if lastCommitStatus.IsSuccess() {
return nil
}
if err := pull_model.ScheduleAutoMerge(ctx, doer, pull.ID, style, message); err != nil {
return err
}
@ -95,8 +88,8 @@ func RemoveScheduledAutoMerge(ctx context.Context, doer *user_model.User, pull *
})
}
// MergeScheduledPullRequest merges a previously scheduled pull request when all checks succeeded
func MergeScheduledPullRequest(ctx context.Context, sha string, repo *repo_model.Repository) error {
// StartPRCheckAndAutoMergeBySHA start an automerge check and auto merge task for all pull requests of repository and SHA
func StartPRCheckAndAutoMergeBySHA(ctx context.Context, sha string, repo *repo_model.Repository) error {
pulls, err := getPullRequestsByHeadSHA(ctx, sha, repo, func(pr *issues_model.PullRequest) bool {
return !pr.HasMerged && pr.CanAutoMerge()
})
@ -111,6 +104,32 @@ func MergeScheduledPullRequest(ctx context.Context, sha string, repo *repo_model
return nil
}
// StartPRCheckAndAutoMerge start an automerge check and auto merge task for a pull request
func StartPRCheckAndAutoMerge(ctx context.Context, pull *issues_model.PullRequest) {
if pull == nil || pull.HasMerged || !pull.CanAutoMerge() {
return
}
if err := pull.LoadBaseRepo(ctx); err != nil {
log.Error("LoadBaseRepo: %v", err)
return
}
gitRepo, err := gitrepo.OpenRepository(ctx, pull.BaseRepo)
if err != nil {
log.Error("OpenRepository: %v", err)
return
}
defer gitRepo.Close()
commitID, err := gitRepo.GetRefCommitID(pull.GetGitRefName())
if err != nil {
log.Error("GetRefCommitID: %v", err)
return
}
addToQueue(pull, commitID)
}
func getPullRequestsByHeadSHA(ctx context.Context, sha string, repo *repo_model.Repository, filter func(*issues_model.PullRequest) bool) (map[int64]*issues_model.PullRequest, error) {
gitRepo, err := gitrepo.OpenRepository(ctx, repo)
if err != nil {
@ -161,7 +180,8 @@ func getPullRequestsByHeadSHA(ctx context.Context, sha string, repo *repo_model.
return pulls, nil
}
func handlePull(pullID int64, sha string) {
// handlePullRequestAutoMerge merge the pull request if all checks are successful
func handlePullRequestAutoMerge(pullID int64, sha string) {
ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(),
fmt.Sprintf("Handle AutoMerge of PR[%d] with sha[%s]", pullID, sha))
defer finished()
@ -182,24 +202,50 @@ func handlePull(pullID int64, sha string) {
return
}
if err = pr.LoadBaseRepo(ctx); err != nil {
log.Error("%-v LoadBaseRepo: %v", pr, err)
return
}
// check the sha is the same as pull request head commit id
baseGitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
if err != nil {
log.Error("OpenRepository: %v", err)
return
}
defer baseGitRepo.Close()
headCommitID, err := baseGitRepo.GetRefCommitID(pr.GetGitRefName())
if err != nil {
log.Error("GetRefCommitID: %v", err)
return
}
if headCommitID != sha {
log.Warn("Head commit id of auto merge %-v does not match sha [%s], it may means the head branch has been updated. Just ignore this request because a new request expected in the queue", pr, sha)
return
}
// Get all checks for this pr
// We get the latest sha commit hash again to handle the case where the check of a previous push
// did not succeed or was not finished yet.
if err = pr.LoadHeadRepo(ctx); err != nil {
log.Error("%-v LoadHeadRepo: %v", pr, err)
return
}
headGitRepo, err := gitrepo.OpenRepository(ctx, pr.HeadRepo)
if err != nil {
log.Error("OpenRepository %-v: %v", pr.HeadRepo, err)
return
var headGitRepo *git.Repository
if pr.BaseRepoID == pr.HeadRepoID {
headGitRepo = baseGitRepo
} else {
headGitRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
if err != nil {
log.Error("OpenRepository %-v: %v", pr.HeadRepo, err)
return
}
defer headGitRepo.Close()
}
defer headGitRepo.Close()
headBranchExist := headGitRepo.IsBranchExist(pr.HeadBranch)
if pr.HeadRepo == nil || !headBranchExist {
log.Warn("Head branch of auto merge %-v does not exist [HeadRepoID: %d, Branch: %s]", pr, pr.HeadRepoID, pr.HeadBranch)
return
@ -238,25 +284,11 @@ func handlePull(pullID int64, sha string) {
return
}
var baseGitRepo *git.Repository
if pr.BaseRepoID == pr.HeadRepoID {
baseGitRepo = headGitRepo
} else {
if err = pr.LoadBaseRepo(ctx); err != nil {
log.Error("%-v LoadBaseRepo: %v", pr, err)
return
}
baseGitRepo, err = gitrepo.OpenRepository(ctx, pr.BaseRepo)
if err != nil {
log.Error("OpenRepository %-v: %v", pr.BaseRepo, err)
return
}
defer baseGitRepo.Close()
}
if err := pull_service.Merge(ctx, pr, doer, baseGitRepo, scheduledPRM.MergeStyle, "", scheduledPRM.Message, true); err != nil {
log.Error("pull_service.Merge: %v", err)
// FIXME: if merge failed, we should display some error message to the pull request page.
// The resolution is add a new column on automerge table named `error_message` to store the error message and displayed
// on the pull request page. But this should not be finished in a bug fix PR which will be backport to release branch.
return
}
}

View file

@ -0,0 +1,46 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package automerge
import (
"context"
issues_model "code.gitea.io/gitea/models/issues"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
notify_service "code.gitea.io/gitea/services/notify"
)
type automergeNotifier struct {
notify_service.NullNotifier
}
var _ notify_service.Notifier = &automergeNotifier{}
// NewNotifier create a new automergeNotifier notifier
func NewNotifier() notify_service.Notifier {
return &automergeNotifier{}
}
func (n *automergeNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, review *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User) {
// as a missing / blocking reviews could have blocked a pending automerge let's recheck
if review.Type == issues_model.ReviewTypeApprove {
if err := StartPRCheckAndAutoMergeBySHA(ctx, review.CommitID, pr.BaseRepo); err != nil {
log.Error("StartPullRequestAutoMergeCheckBySHA: %v", err)
}
}
}
func (n *automergeNotifier) PullReviewDismiss(ctx context.Context, doer *user_model.User, review *issues_model.Review, comment *issues_model.Comment) {
if err := review.LoadIssue(ctx); err != nil {
log.Error("LoadIssue: %v", err)
return
}
if err := review.Issue.LoadPullRequest(ctx); err != nil {
log.Error("LoadPullRequest: %v", err)
return
}
// as reviews could have blocked a pending automerge let's recheck
StartPRCheckAndAutoMerge(ctx, review.Issue.PullRequest)
}

View file

@ -162,6 +162,7 @@ func (f *AuthorizationForm) Validate(req *http.Request, errs binding.Errors) bin
// GrantApplicationForm form for authorizing oauth2 clients
type GrantApplicationForm struct {
ClientID string `binding:"Required"`
Granted bool
RedirectURI string
State string
Scope string

View file

@ -23,7 +23,6 @@ import (
pull_model "code.gitea.io/gitea/models/pull"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/analyze"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/highlight"
@ -742,7 +741,7 @@ parsingLoop:
diffLineTypeBuffers[DiffLineAdd] = new(bytes.Buffer)
diffLineTypeBuffers[DiffLineDel] = new(bytes.Buffer)
for _, f := range diff.Files {
f.NameHash = base.EncodeSha1(f.Name)
f.NameHash = git.HashFilePathForWebUI(f.Name)
for _, buffer := range diffLineTypeBuffers {
buffer.Reset()

View file

@ -107,7 +107,7 @@ func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.Migrate
Description: repo.Description,
OriginalURL: repo.OriginalURL,
GitServiceType: opts.GitServiceType,
IsPrivate: opts.Private,
IsPrivate: opts.Private || setting.Repository.ForcePrivate,
IsMirror: opts.Mirror,
Status: repo_model.RepositoryBeingMigrated,
})

View file

@ -117,7 +117,7 @@ func CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, creato
}
if status.State.IsSuccess() {
if err := automerge.MergeScheduledPullRequest(ctx, sha, repo); err != nil {
if err := automerge.StartPRCheckAndAutoMergeBySHA(ctx, sha, repo); err != nil {
return fmt.Errorf("MergeScheduledPullRequest[repo_id: %d, user_id: %d, sha: %s]: %w", repo.ID, creator.ID, sha, err)
}
}

View file

@ -85,7 +85,7 @@ func PushCreateRepo(ctx context.Context, authUser, owner *user_model.User, repoN
repo, err := CreateRepository(ctx, authUser, owner, CreateRepoOptions{
Name: repoName,
IsPrivate: setting.Repository.DefaultPushCreatePrivate,
IsPrivate: setting.Repository.DefaultPushCreatePrivate || setting.Repository.ForcePrivate,
})
if err != nil {
return nil, err

View file

@ -107,7 +107,7 @@ func CreateMigrateTask(ctx context.Context, doer, u *user_model.User, opts base.
Description: opts.Description,
OriginalURL: opts.OriginalURL,
GitServiceType: opts.GitServiceType,
IsPrivate: opts.Private,
IsPrivate: opts.Private || setting.Repository.ForcePrivate,
IsMirror: opts.Mirror,
Status: repo_model.RepositoryBeingMigrated,
})

View file

@ -66,7 +66,7 @@ export default {
'xl': '12px',
'2xl': '16px',
'3xl': '24px',
'full': 'var(--border-radius-circle)', // 50%
'full': 'var(--border-radius-full)',
},
fontFamily: {
sans: 'var(--fonts-regular)',

View file

@ -87,7 +87,7 @@
<td class="eight wide">
{{if .DBBranch.IsDeleted}}
<div class="flex-text-block">
<a class="gt-ellipsis" href="{{$.RepoLink}}/src/branch/{{PathEscapeSegments .DBBranch.Name}}">{{.DBBranch.Name}}</a>
<span class="gt-ellipsis">{{.DBBranch.Name}}</span>
<button class="btn interact-fg tw-px-1" data-clipboard-text="{{.DBBranch.Name}}">{{svg "octicon-copy" 14}}</button>
</div>
<p class="info">{{ctx.Locale.Tr "repo.branch.deleted_by" .DBBranch.DeletedBy.Name}} {{TimeSinceUnix .DBBranch.DeletedUnix ctx.Locale}}</p>

View file

@ -2,7 +2,7 @@
{{template "repo/issue/fields/header" .}}
{{/* FIXME: required validation */}}
<div class="ui fluid selection dropdown {{if .item.Attributes.multiple}}multiple clearable{{end}}">
<input type="hidden" name="form-field-{{.item.ID}}" value="0">
<input type="hidden" name="form-field-{{.item.ID}}" value="{{.item.Attributes.default}}">
{{svg "octicon-triangle-down" 14 "dropdown icon"}}
{{if not .item.Validations.required}}
{{svg "octicon-x" 14 "remove icon"}}

View file

@ -28,9 +28,10 @@
<label>{{ctx.Locale.Tr "repo.visibility"}}</label>
<div class="ui checkbox" {{if and (not .Repository.IsPrivate) (gt .Repository.NumStars 0)}}data-tooltip-content="{{ctx.Locale.Tr "repo.stars_remove_warning"}}"{{end}}>
{{if .IsAdmin}}
<input name="private" type="checkbox" {{if .Repository.IsPrivate}}checked{{end}}>
<input name="private" type="checkbox" {{if .Repository.IsPrivate}}checked{{end}}>
{{else}}
<input name="private" type="checkbox" {{if .Repository.IsPrivate}}checked{{end}}{{if and $.ForcePrivate .Repository.IsPrivate}} readonly{{end}}>
<input name="private" type="checkbox" {{if .Repository.IsPrivate}}checked{{end}}{{if and $.ForcePrivate .Repository.IsPrivate}} disabled{{end}}>
{{if and .Repository.IsPrivate $.ForcePrivate}}<input type="hidden" name="private" value="{{.Repository.IsPrivate}}">{{end}}
{{end}}
<label>{{ctx.Locale.Tr "repo.visibility_helper"}} {{if .Repository.NumForks}}<span class="text red">{{ctx.Locale.Tr "repo.visibility_fork_helper"}}</span>{{end}}</label>
</div>

View file

@ -23,8 +23,8 @@
<input type="hidden" name="scope" value="{{.Scope}}">
<input type="hidden" name="nonce" value="{{.Nonce}}">
<input type="hidden" name="redirect_uri" value="{{.RedirectURI}}">
<button type="submit" id="authorize-app" value="{{ctx.Locale.Tr "auth.authorize_application"}}" class="ui red inline button">{{ctx.Locale.Tr "auth.authorize_application"}}</button>
<a href="{{.RedirectURI}}" class="ui basic primary inline button">Cancel</a>
<button type="submit" id="authorize-app" name="granted" value="true" class="ui red inline button">{{ctx.Locale.Tr "auth.authorize_application"}}</button>
<button type="submit" name="granted" value="false" class="ui basic primary inline button">{{ctx.Locale.Tr "cancel"}}</button>
</form>
</div>
</div>

View file

@ -30,28 +30,32 @@ import (
func TestCreateFile(t *testing.T) {
onGiteaRun(t, func(t *testing.T, u *url.URL) {
session := loginUser(t, "user2")
// Request editor page
req := NewRequest(t, "GET", "/user2/repo1/_new/master/")
resp := session.MakeRequest(t, req, http.StatusOK)
doc := NewHTMLParser(t, resp.Body)
lastCommit := doc.GetInputValueByName("last_commit")
assert.NotEmpty(t, lastCommit)
// Save new file to master branch
req = NewRequestWithValues(t, "POST", "/user2/repo1/_new/master/", map[string]string{
"_csrf": doc.GetCSRF(),
"last_commit": lastCommit,
"tree_path": "test.txt",
"content": "Content",
"commit_choice": "direct",
"commit_mail_id": "3",
})
session.MakeRequest(t, req, http.StatusSeeOther)
testCreateFile(t, session, "user2", "repo1", "master", "test.txt", "Content")
})
}
func testCreateFile(t *testing.T, session *TestSession, user, repo, branch, filePath, content string) *httptest.ResponseRecorder {
// Request editor page
newURL := fmt.Sprintf("/%s/%s/_new/%s/", user, repo, branch)
req := NewRequest(t, "GET", newURL)
resp := session.MakeRequest(t, req, http.StatusOK)
doc := NewHTMLParser(t, resp.Body)
lastCommit := doc.GetInputValueByName("last_commit")
assert.NotEmpty(t, lastCommit)
// Save new file to master branch
req = NewRequestWithValues(t, "POST", newURL, map[string]string{
"_csrf": doc.GetCSRF(),
"last_commit": lastCommit,
"tree_path": filePath,
"content": content,
"commit_choice": "direct",
"commit_mail_id": "3",
})
return session.MakeRequest(t, req, http.StatusSeeOther)
}
func TestCreateFileOnProtectedBranch(t *testing.T) {
onGiteaRun(t, func(t *testing.T, u *url.URL) {
session := loginUser(t, "user2")

View file

@ -9,13 +9,15 @@ import (
"testing"
unit_model "code.gitea.io/gitea/models/unit"
"code.gitea.io/gitea/modules/test"
"code.gitea.io/gitea/tests"
)
func TestOrgProjectAccess(t *testing.T) {
defer tests.PrepareTestEnv(t)()
defer test.MockVariableValue(&unit_model.DisabledRepoUnits, append(slices.Clone(unit_model.DisabledRepoUnits), unit_model.TypeProjects))()
disabledRepoUnits := unit_model.DisabledRepoUnitsGet()
unit_model.DisabledRepoUnitsSet(append(slices.Clone(disabledRepoUnits), unit_model.TypeProjects))
defer unit_model.DisabledRepoUnitsSet(disabledRepoUnits)
// repo project, 404
req := NewRequest(t, "GET", "/user2/repo1/projects")

View file

@ -12,6 +12,8 @@ import (
"net/url"
"os"
"path"
"path/filepath"
"strconv"
"strings"
"testing"
"time"
@ -19,7 +21,9 @@ import (
"code.gitea.io/gitea/models"
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
pull_model "code.gitea.io/gitea/models/pull"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
@ -32,7 +36,9 @@ import (
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/test"
"code.gitea.io/gitea/modules/translation"
"code.gitea.io/gitea/services/automerge"
"code.gitea.io/gitea/services/pull"
commitstatus_service "code.gitea.io/gitea/services/repository/commitstatus"
files_service "code.gitea.io/gitea/services/repository/files"
webhook_service "code.gitea.io/gitea/services/webhook"
@ -661,3 +667,195 @@ func TestPullMergeIndexerNotifier(t *testing.T) {
}
})
}
func testResetRepo(t *testing.T, repoPath, branch, commitID string) {
f, err := os.OpenFile(filepath.Join(repoPath, "refs", "heads", branch), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o644)
assert.NoError(t, err)
_, err = f.WriteString(commitID + "\n")
assert.NoError(t, err)
f.Close()
repo, err := git.OpenRepository(context.Background(), repoPath)
assert.NoError(t, err)
defer repo.Close()
id, err := repo.GetBranchCommitID(branch)
assert.NoError(t, err)
assert.EqualValues(t, commitID, id)
}
func TestPullAutoMergeAfterCommitStatusSucceed(t *testing.T) {
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
// create a pull request
session := loginUser(t, "user1")
user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
forkedName := "repo1-1"
testRepoFork(t, session, "user2", "repo1", "user1", forkedName)
defer func() {
testDeleteRepository(t, session, "user1", forkedName)
}()
testEditFile(t, session, "user1", forkedName, "master", "README.md", "Hello, World (Edited)\n")
testPullCreate(t, session, "user1", forkedName, false, "master", "master", "Indexer notifier test pull")
baseRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: "user2", Name: "repo1"})
forkedRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: "user1", Name: forkedName})
pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{
BaseRepoID: baseRepo.ID,
BaseBranch: "master",
HeadRepoID: forkedRepo.ID,
HeadBranch: "master",
})
// add protected branch for commit status
csrf := GetCSRF(t, session, "/user2/repo1/settings/branches")
// Change master branch to protected
req := NewRequestWithValues(t, "POST", "/user2/repo1/settings/branches/edit", map[string]string{
"_csrf": csrf,
"rule_name": "master",
"enable_push": "true",
"enable_status_check": "true",
"status_check_contexts": "gitea/actions",
})
session.MakeRequest(t, req, http.StatusSeeOther)
// first time insert automerge record, return true
scheduled, err := automerge.ScheduleAutoMerge(db.DefaultContext, user1, pr, repo_model.MergeStyleMerge, "auto merge test")
assert.NoError(t, err)
assert.True(t, scheduled)
// second time insert automerge record, return false because it does exist
scheduled, err = automerge.ScheduleAutoMerge(db.DefaultContext, user1, pr, repo_model.MergeStyleMerge, "auto merge test")
assert.Error(t, err)
assert.False(t, scheduled)
// reload pr again
pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: pr.ID})
assert.False(t, pr.HasMerged)
assert.Empty(t, pr.MergedCommitID)
// update commit status to success, then it should be merged automatically
baseGitRepo, err := gitrepo.OpenRepository(db.DefaultContext, baseRepo)
assert.NoError(t, err)
sha, err := baseGitRepo.GetRefCommitID(pr.GetGitRefName())
assert.NoError(t, err)
masterCommitID, err := baseGitRepo.GetBranchCommitID("master")
assert.NoError(t, err)
branches, _, err := baseGitRepo.GetBranchNames(0, 100)
assert.NoError(t, err)
assert.ElementsMatch(t, []string{"sub-home-md-img-check", "home-md-img-check", "pr-to-update", "branch2", "DefaultBranch", "develop", "feature/1", "master"}, branches)
baseGitRepo.Close()
defer func() {
testResetRepo(t, baseRepo.RepoPath(), "master", masterCommitID)
}()
err = commitstatus_service.CreateCommitStatus(db.DefaultContext, baseRepo, user1, sha, &git_model.CommitStatus{
State: api.CommitStatusSuccess,
TargetURL: "https://gitea.com",
Context: "gitea/actions",
})
assert.NoError(t, err)
time.Sleep(2 * time.Second)
// realod pr again
pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: pr.ID})
assert.True(t, pr.HasMerged)
assert.NotEmpty(t, pr.MergedCommitID)
unittest.AssertNotExistsBean(t, &pull_model.AutoMerge{PullID: pr.ID})
})
}
func TestPullAutoMergeAfterCommitStatusSucceedAndApproval(t *testing.T) {
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
// create a pull request
session := loginUser(t, "user1")
user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
forkedName := "repo1-2"
testRepoFork(t, session, "user2", "repo1", "user1", forkedName)
defer func() {
testDeleteRepository(t, session, "user1", forkedName)
}()
testEditFile(t, session, "user1", forkedName, "master", "README.md", "Hello, World (Edited)\n")
testPullCreate(t, session, "user1", forkedName, false, "master", "master", "Indexer notifier test pull")
baseRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: "user2", Name: "repo1"})
forkedRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: "user1", Name: forkedName})
pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{
BaseRepoID: baseRepo.ID,
BaseBranch: "master",
HeadRepoID: forkedRepo.ID,
HeadBranch: "master",
})
// add protected branch for commit status
csrf := GetCSRF(t, session, "/user2/repo1/settings/branches")
// Change master branch to protected
req := NewRequestWithValues(t, "POST", "/user2/repo1/settings/branches/edit", map[string]string{
"_csrf": csrf,
"rule_name": "master",
"enable_push": "true",
"enable_status_check": "true",
"status_check_contexts": "gitea/actions",
"required_approvals": "1",
})
session.MakeRequest(t, req, http.StatusSeeOther)
// first time insert automerge record, return true
scheduled, err := automerge.ScheduleAutoMerge(db.DefaultContext, user1, pr, repo_model.MergeStyleMerge, "auto merge test")
assert.NoError(t, err)
assert.True(t, scheduled)
// second time insert automerge record, return false because it does exist
scheduled, err = automerge.ScheduleAutoMerge(db.DefaultContext, user1, pr, repo_model.MergeStyleMerge, "auto merge test")
assert.Error(t, err)
assert.False(t, scheduled)
// reload pr again
pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: pr.ID})
assert.False(t, pr.HasMerged)
assert.Empty(t, pr.MergedCommitID)
// update commit status to success, then it should be merged automatically
baseGitRepo, err := gitrepo.OpenRepository(db.DefaultContext, baseRepo)
assert.NoError(t, err)
sha, err := baseGitRepo.GetRefCommitID(pr.GetGitRefName())
assert.NoError(t, err)
masterCommitID, err := baseGitRepo.GetBranchCommitID("master")
assert.NoError(t, err)
baseGitRepo.Close()
defer func() {
testResetRepo(t, baseRepo.RepoPath(), "master", masterCommitID)
}()
err = commitstatus_service.CreateCommitStatus(db.DefaultContext, baseRepo, user1, sha, &git_model.CommitStatus{
State: api.CommitStatusSuccess,
TargetURL: "https://gitea.com",
Context: "gitea/actions",
})
assert.NoError(t, err)
time.Sleep(2 * time.Second)
// reload pr again
pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: pr.ID})
assert.False(t, pr.HasMerged)
assert.Empty(t, pr.MergedCommitID)
// approve the PR from non-author
approveSession := loginUser(t, "user2")
req = NewRequest(t, "GET", fmt.Sprintf("/user2/repo1/pulls/%d", pr.Index))
resp := approveSession.MakeRequest(t, req, http.StatusOK)
htmlDoc := NewHTMLParser(t, resp.Body)
testSubmitReview(t, approveSession, htmlDoc.GetCSRF(), "user2", "repo1", strconv.Itoa(int(pr.Index)), sha, "approve", http.StatusOK)
time.Sleep(2 * time.Second)
// realod pr again
pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: pr.ID})
assert.True(t, pr.HasMerged)
assert.NotEmpty(t, pr.MergedCommitID)
unittest.AssertNotExistsBean(t, &pull_model.AutoMerge{PullID: pr.ID})
})
}

View file

@ -425,10 +425,10 @@ func TestPullView_GivenApproveOrRejectReviewOnClosedPR(t *testing.T) {
htmlDoc := NewHTMLParser(t, resp.Body)
// Submit an approve review on the PR.
testSubmitReview(t, user2Session, htmlDoc.GetCSRF(), "user2", "repo1", elem[4], "approve", http.StatusUnprocessableEntity)
testSubmitReview(t, user2Session, htmlDoc.GetCSRF(), "user2", "repo1", elem[4], "", "approve", http.StatusUnprocessableEntity)
// Submit a reject review on the PR.
testSubmitReview(t, user2Session, htmlDoc.GetCSRF(), "user2", "repo1", elem[4], "reject", http.StatusUnprocessableEntity)
testSubmitReview(t, user2Session, htmlDoc.GetCSRF(), "user2", "repo1", elem[4], "", "reject", http.StatusUnprocessableEntity)
})
t.Run("Submit approve/reject review on closed PR", func(t *testing.T) {
@ -445,18 +445,18 @@ func TestPullView_GivenApproveOrRejectReviewOnClosedPR(t *testing.T) {
htmlDoc := NewHTMLParser(t, resp.Body)
// Submit an approve review on the PR.
testSubmitReview(t, user2Session, htmlDoc.GetCSRF(), "user2", "repo1", elem[4], "approve", http.StatusUnprocessableEntity)
testSubmitReview(t, user2Session, htmlDoc.GetCSRF(), "user2", "repo1", elem[4], "", "approve", http.StatusUnprocessableEntity)
// Submit a reject review on the PR.
testSubmitReview(t, user2Session, htmlDoc.GetCSRF(), "user2", "repo1", elem[4], "reject", http.StatusUnprocessableEntity)
testSubmitReview(t, user2Session, htmlDoc.GetCSRF(), "user2", "repo1", elem[4], "", "reject", http.StatusUnprocessableEntity)
})
})
}
func testSubmitReview(t *testing.T, session *TestSession, csrf, owner, repo, pullNumber, reviewType string, expectedSubmitStatus int) *httptest.ResponseRecorder {
func testSubmitReview(t *testing.T, session *TestSession, csrf, owner, repo, pullNumber, commitID, reviewType string, expectedSubmitStatus int) *httptest.ResponseRecorder {
options := map[string]string{
"_csrf": csrf,
"commit_id": "",
"commit_id": commitID,
"content": "test",
"type": reviewType,
}

View file

@ -18,7 +18,7 @@
/* other variables */
--border-radius: 4px;
--border-radius-medium: 6px;
--border-radius-circle: 50%;
--border-radius-full: 99999px; /* TODO: use calc(infinity * 1px) */
--opacity-disabled: 0.55;
--height-loading: 16rem;
--repo-header-issue-min-height: 41px;
@ -1361,7 +1361,7 @@ svg.text.purple,
.color-icon {
display: inline-block;
border-radius: var(--border-radius-circle);
border-radius: var(--border-radius-full);
height: 14px;
width: 14px;
}

View file

@ -31,7 +31,7 @@
border-width: 4px;
border-style: solid;
border-color: var(--color-secondary) var(--color-secondary) var(--color-secondary-dark-8) var(--color-secondary-dark-8);
border-radius: var(--border-radius-circle);
border-radius: var(--border-radius-full);
}
.is-loading.loading-icon-2px::after {

View file

@ -787,7 +787,7 @@ td .commit-summary {
width: 34px;
height: 34px;
background-color: var(--color-timeline);
border-radius: var(--border-radius-circle);
border-radius: var(--border-radius-full);
display: flex;
float: left;
margin-left: -33px;

View file

@ -68,7 +68,7 @@
position: absolute;
left: -5.5px;
top: 30px;
border-radius: var(--border-radius-circle);
border-radius: var(--border-radius-full);
border: 2.5px solid var(--color-body);
}