Merge pull request '[gitea] Week 11 cherry-pick' (#2620) from earl-warren/forgejo:wip-gitea-cherry-pick into forgejo

Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/2620
This commit is contained in:
Earl Warren 2024-03-13 00:44:21 +00:00
commit 0b2bae5517
289 changed files with 4993 additions and 3056 deletions

View file

@ -189,6 +189,7 @@ package "code.gitea.io/gitea/modules/gitgraph"
package "code.gitea.io/gitea/modules/gitrepo" package "code.gitea.io/gitea/modules/gitrepo"
func GetBranchCommitID func GetBranchCommitID
func GetWikiDefaultBranch
package "code.gitea.io/gitea/modules/graceful" package "code.gitea.io/gitea/modules/graceful"
func (*Manager).TerminateContext func (*Manager).TerminateContext

View file

@ -8,7 +8,9 @@
}, },
"ghcr.io/devcontainers/features/git-lfs:1.1.0": {}, "ghcr.io/devcontainers/features/git-lfs:1.1.0": {},
"ghcr.io/devcontainers-contrib/features/poetry:2": {}, "ghcr.io/devcontainers-contrib/features/poetry:2": {},
"ghcr.io/devcontainers/features/python:1": {} "ghcr.io/devcontainers/features/python:1": {
"version": "3.12"
}
}, },
"customizations": { "customizations": {
"vscode": { "vscode": {

View file

@ -961,6 +961,7 @@ fomantic:
cd $(FOMANTIC_WORK_DIR) && npm install --no-save cd $(FOMANTIC_WORK_DIR) && npm install --no-save
cp -f $(FOMANTIC_WORK_DIR)/theme.config.less $(FOMANTIC_WORK_DIR)/node_modules/fomantic-ui/src/theme.config cp -f $(FOMANTIC_WORK_DIR)/theme.config.less $(FOMANTIC_WORK_DIR)/node_modules/fomantic-ui/src/theme.config
cp -rf $(FOMANTIC_WORK_DIR)/_site $(FOMANTIC_WORK_DIR)/node_modules/fomantic-ui/src/ cp -rf $(FOMANTIC_WORK_DIR)/_site $(FOMANTIC_WORK_DIR)/node_modules/fomantic-ui/src/
$(SED_INPLACE) -e 's/ overrideBrowserslist\r/ overrideBrowserslist: ["defaults"]\r/g' $(FOMANTIC_WORK_DIR)/node_modules/fomantic-ui/tasks/config/tasks.js
cd $(FOMANTIC_WORK_DIR) && npx gulp -f node_modules/fomantic-ui/gulpfile.js build cd $(FOMANTIC_WORK_DIR) && npx gulp -f node_modules/fomantic-ui/gulpfile.js build
# fomantic uses "touchstart" as click event for some browsers, it's not ideal, so we force fomantic to always use "click" as click event # fomantic uses "touchstart" as click event for some browsers, it's not ideal, so we force fomantic to always use "click" as click event
$(SED_INPLACE) -e 's/clickEvent[ \t]*=/clickEvent = "click", unstableClickEvent =/g' $(FOMANTIC_WORK_DIR)/build/semantic.js $(SED_INPLACE) -e 's/clickEvent[ \t]*=/clickEvent = "click", unstableClickEvent =/g' $(FOMANTIC_WORK_DIR)/build/semantic.js

View file

@ -1496,10 +1496,11 @@ LEVEL = Info
;; ;;
;; Default configuration for email notifications for users (user configurable). Options: enabled, onmention, disabled ;; Default configuration for email notifications for users (user configurable). Options: enabled, onmention, disabled
;DEFAULT_EMAIL_NOTIFICATIONS = enabled ;DEFAULT_EMAIL_NOTIFICATIONS = enabled
;; Disabled features for users, could be "deletion","manage_gpg_keys" more features can be disabled in future ;; Send an email to all admins when a new user signs up to inform the admins about this act. Options: true, false
;SEND_NOTIFICATION_EMAIL_ON_NEW_USER = false ;SEND_NOTIFICATION_EMAIL_ON_NEW_USER = false
;; Disabled features for users, could be "deletion", more features can be disabled in future ;; Disabled features for users, could be "deletion", "manage_ssh_keys","manage_gpg_keys" more features can be disabled in future
;; - deletion: a user cannot delete their own account ;; - deletion: a user cannot delete their own account
;; - manage_ssh_keys: a user cannot configure ssh keys
;; - manage_gpg_keys: a user cannot configure gpg keys ;; - manage_gpg_keys: a user cannot configure gpg keys
;USER_DISABLED_FEATURES = ;USER_DISABLED_FEATURES =

View file

@ -518,9 +518,10 @@ And the following unique queues:
- `DEFAULT_EMAIL_NOTIFICATIONS`: **enabled**: Default configuration for email notifications for users (user configurable). Options: enabled, onmention, disabled - `DEFAULT_EMAIL_NOTIFICATIONS`: **enabled**: Default configuration for email notifications for users (user configurable). Options: enabled, onmention, disabled
- `DISABLE_REGULAR_ORG_CREATION`: **false**: Disallow regular (non-admin) users from creating organizations. - `DISABLE_REGULAR_ORG_CREATION`: **false**: Disallow regular (non-admin) users from creating organizations.
- `USER_DISABLED_FEATURES`: **_empty_** Disabled features for users, could be `deletion`, `manage_gpg_keys` and more features can be added in future. - `USER_DISABLED_FEATURES`: **_empty_** Disabled features for users, could be `deletion`, `manage_ssh_keys`, `manage_gpg_keys` and more features can be added in future.
- `deletion`: User cannot delete their own account. - `deletion`: User cannot delete their own account.
- `manage_gpg_keys`: User cannot configure gpg keys - `manage_ssh_keys`: User cannot configure ssh keys.
- `manage_gpg_keys`: User cannot configure gpg keys.
## Security (`security`) ## Security (`security`)
@ -831,7 +832,7 @@ Default templates for project boards:
## Issue and pull request attachments (`attachment`) ## Issue and pull request attachments (`attachment`)
- `ENABLED`: **true**: Whether issue and pull request attachments are enabled. - `ENABLED`: **true**: Whether issue and pull request attachments are enabled.
- `ALLOWED_TYPES`: **.csv,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip**: Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types. - `ALLOWED_TYPES`: **.cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip**: Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types.
- `MAX_SIZE`: **2048**: Maximum size (MB). - `MAX_SIZE`: **2048**: Maximum size (MB).
- `MAX_FILES`: **5**: Maximum number of attachments that can be uploaded at once. - `MAX_FILES`: **5**: Maximum number of attachments that can be uploaded at once.
- `STORAGE_TYPE`: **local**: Storage type for attachments, `local` for local disk or `minio` for s3 compatible object storage service, default is `local` or other name defined with `[storage.xxx]` - `STORAGE_TYPE`: **local**: Storage type for attachments, `local` for local disk or `minio` for s3 compatible object storage service, default is `local` or other name defined with `[storage.xxx]`

View file

@ -497,9 +497,10 @@ Gitea 创建以下非唯一队列:
- `DEFAULT_EMAIL_NOTIFICATIONS`: **enabled**用户电子邮件通知的默认配置用户可配置。选项enabled、onmention、disabled - `DEFAULT_EMAIL_NOTIFICATIONS`: **enabled**用户电子邮件通知的默认配置用户可配置。选项enabled、onmention、disabled
- `DISABLE_REGULAR_ORG_CREATION`: **false**:禁止普通(非管理员)用户创建组织。 - `DISABLE_REGULAR_ORG_CREATION`: **false**:禁止普通(非管理员)用户创建组织。
- `USER_DISABLED_FEATURES`:**_empty_** 禁用的用户特性,当前允许为空或者 `deletion``manage_gpg_keys` 未来可以增加更多设置。 - `USER_DISABLED_FEATURES`:**_empty_** 禁用的用户特性,当前允许为空或者 `deletion``manage_ssh_keys` `manage_gpg_keys` 未来可以增加更多设置。
- `deletion`: 用户不能通过界面或者API删除他自己。 - `deletion`: 用户不能通过界面或者API删除他自己。
- `manage_gpg_keys`: 用户不能配置 GPG 密钥 - `manage_ssh_keys`: 用户不能通过界面或者API配置SSH Keys。
- `manage_gpg_keys`: 用户不能配置 GPG 密钥。
## 安全性 (`security`) ## 安全性 (`security`)
@ -781,7 +782,7 @@ Gitea 创建以下非唯一队列:
## 工单和合并请求的附件 (`attachment`) ## 工单和合并请求的附件 (`attachment`)
- `ENABLED`: **true**: 是否允许用户上传附件。 - `ENABLED`: **true**: 是否允许用户上传附件。
- `ALLOWED_TYPES`: **.csv,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip**: 允许的文件扩展名(`.zip`、mime 类型(`text/plain`)或通配符类型(`image/*`、`audio/*`、`video/*`)的逗号分隔列表。空值或 `*/*` 允许所有类型。 - `ALLOWED_TYPES`: **.cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip**: 允许的文件扩展名(`.zip`、mime 类型(`text/plain`)或通配符类型(`image/*`、`audio/*`、`video/*`)的逗号分隔列表。空值或 `*/*` 允许所有类型。
- `MAX_SIZE`: **2048**: 附件的最大限制MB - `MAX_SIZE`: **2048**: 附件的最大限制MB
- `MAX_FILES`: **5**: 一次最多上传的附件数量。 - `MAX_FILES`: **5**: 一次最多上传的附件数量。
- `STORAGE_TYPE`: **local**: 附件的存储类型,`local` 表示本地磁盘,`minio` 表示兼容 S3 的对象存储服务,如果未设置将使用默认值 `local` 或其他在 `[storage.xxx]` 中定义的名称。 - `STORAGE_TYPE`: **local**: 附件的存储类型,`local` 表示本地磁盘,`minio` 表示兼容 S3 的对象存储服务,如果未设置将使用默认值 `local` 或其他在 `[storage.xxx]` 中定义的名称。

View file

@ -224,7 +224,7 @@ Please check [Gitea's logs](administration/logging-config.md) for error messages
{{if not (eq .Body "")}} {{if not (eq .Body "")}}
<h3>Message content</h3> <h3>Message content</h3>
<hr> <hr>
{{.Body | SanitizeHTML}} {{.Body}}
{{end}} {{end}}
</p> </p>
<hr> <hr>

View file

@ -207,7 +207,7 @@ _主题_ 和 _邮件正文_ 由 [Golang的模板引擎](https://go.dev/pkg/text/
{{if not (eq .Body "")}} {{if not (eq .Body "")}}
<h3>消息内容:</h3> <h3>消息内容:</h3>
<hr> <hr>
{{.Body | SanitizeHTML}} {{.Body}}
{{end}} {{end}}
</p> </p>
<hr> <hr>

View file

@ -47,7 +47,7 @@ We recommend [Google HTML/CSS Style Guide](https://google.github.io/styleguide/h
9. Avoid unnecessary `!important` in CSS, add comments to explain why it's necessary if it can't be avoided. 9. Avoid unnecessary `!important` in CSS, add comments to explain why it's necessary if it can't be avoided.
10. Avoid mixing different events in one event listener, prefer to use individual event listeners for every event. 10. Avoid mixing different events in one event listener, prefer to use individual event listeners for every event.
11. Custom event names are recommended to use `ce-` prefix. 11. Custom event names are recommended to use `ce-` prefix.
12. Gitea's tailwind-style CSS classes use `gt-` prefix (`gt-relative`), while Gitea's own private framework-level CSS classes use `g-` prefix (`g-modal-confirm`). 12. Prefer using Tailwind CSS which is available via `tw-` prefix, e.g. `tw-relative`. Gitea's helper CSS classes use `gt-` prefix (`gt-df`), while Gitea's own private framework-level CSS classes use `g-` prefix (`g-modal-confirm`).
13. Avoid inline scripts & styles as much as possible, it's recommended to put JS code into JS files and use CSS classes. If inline scripts & styles are unavoidable, explain the reason why it can't be avoided. 13. Avoid inline scripts & styles as much as possible, it's recommended to put JS code into JS files and use CSS classes. If inline scripts & styles are unavoidable, explain the reason why it can't be avoided.
### Accessibility / ARIA ### Accessibility / ARIA

View file

@ -34,7 +34,7 @@ HTML 页面由[Go HTML Template](https://pkg.go.dev/html/template)渲染。
我们推荐使用[Google HTML/CSS Style Guide](https://google.github.io/styleguide/htmlcssguide.html)和[Google JavaScript Style Guide](https://google.github.io/styleguide/jsguide.html)。 我们推荐使用[Google HTML/CSS Style Guide](https://google.github.io/styleguide/htmlcssguide.html)和[Google JavaScript Style Guide](https://google.github.io/styleguide/jsguide.html)。
## Gitea 特定准则 ## Gitea 特定准则
1. 每个功能Fomantic-UI/jQuery 模块)应放在单独的文件/目录中。 1. 每个功能Fomantic-UI/jQuery 模块)应放在单独的文件/目录中。
2. HTML 的 id 和 class 应使用 kebab-case最好包含2-3个与功能相关的关键词。 2. HTML 的 id 和 class 应使用 kebab-case最好包含2-3个与功能相关的关键词。
@ -47,7 +47,8 @@ HTML 页面由[Go HTML Template](https://pkg.go.dev/html/template)渲染。
9. 避免在 CSS 中使用不必要的`!important`,如果无法避免,添加注释解释为什么需要它。 9. 避免在 CSS 中使用不必要的`!important`,如果无法避免,添加注释解释为什么需要它。
10. 避免在一个事件监听器中混合不同的事件,优先为每个事件使用独立的事件监听器。 10. 避免在一个事件监听器中混合不同的事件,优先为每个事件使用独立的事件监听器。
11. 推荐使用自定义事件名称前缀`ce-`。 11. 推荐使用自定义事件名称前缀`ce-`。
12. Gitea 的 tailwind-style CSS 类使用`gt-`前缀(`gt-relative`),而 Gitea 自身的私有框架级 CSS 类使用`g-`前缀(`g-modal-confirm`)。 12. 建议使用 Tailwind CSS它可以通过 `tw-` 前缀获得,例如 `tw-relative`. Gitea 自身的助手类 CSS 使用 `gt-` 前缀(`gt-df`Gitea 自身的私有框架级 CSS 类使用 `g-` 前缀(`g-modal-confirm`)。
13. 尽量避免内联脚本和样式建议将JS代码放入JS文件中并使用CSS类。如果内联脚本和样式不可避免请解释无法避免的原因。
### 可访问性 / ARIA ### 可访问性 / ARIA
@ -64,18 +65,21 @@ Gitea使用一些补丁使Fomantic UI更具可访问性参见`aria.js`和`ari
* Vue + Vanilla JS * Vue + Vanilla JS
* Fomantic-UIjQuery * Fomantic-UIjQuery
* htmx (部分页面重新加载其他静态组件)
* Vanilla JS * Vanilla JS
不推荐的实现方式: 不推荐的实现方式:
* Vue + Fomantic-UIjQuery * Vue + Fomantic-UIjQuery
* jQuery + Vanilla JS * jQuery + Vanilla JS
* htmx + 任何其他需要大量 JavaScript 代码或不必要的功能,如 htmx 脚本 (`hx-on`)
为了保持界面一致Vue 组件可以使用 Fomantic-UI 的 CSS 类。 为了保持界面一致Vue 组件可以使用 Fomantic-UI 的 CSS 类。
尽管不建议混合使用不同的框架, 尽管不建议混合使用不同的框架,
我们使用 htmx 进行简单的交互。您可以在此 [PR](https://github.com/go-gitea/gitea/pull/28908) 中查看一个简单交互的示例,其中应使用 htmx。如果您需要更高级的反应性请不要使用 htmx请使用其他框架Vue/Vanilla JS
但如果混合使用是必要的,并且代码设计良好且易于维护,也可以工作。 但如果混合使用是必要的,并且代码设计良好且易于维护,也可以工作。
### async 函数 ### `async` 函数
只有当函数内部存在`await`调用或返回`Promise`时,才将函数标记为`async`。 只有当函数内部存在`await`调用或返回`Promise`时,才将函数标记为`async`。
@ -91,6 +95,12 @@ Gitea使用一些补丁使Fomantic UI更具可访问性参见`aria.js`和`ari
这是有意为之的我们想调用异步函数并忽略Promise。 这是有意为之的我们想调用异步函数并忽略Promise。
一些 lint 规则和 IDE 也会在未处理返回的 Promise 时发出警告。 一些 lint 规则和 IDE 也会在未处理返回的 Promise 时发出警告。
### 获取数据
要获取数据,请使用`modules/fetch.js`中的包装函数`GET`、`POST`等。他们
接受内容的`data`选项,将自动设置 CSRF 令牌并返回
[Response](https://developer.mozilla.org/en-US/docs/Web/API/Response)。
### HTML 属性和 dataset ### HTML 属性和 dataset
禁止使用`dataset`,它的驼峰命名行为使得搜索属性变得困难。 禁止使用`dataset`,它的驼峰命名行为使得搜索属性变得困难。
@ -132,3 +142,7 @@ Gitea使用一些补丁使Fomantic UI更具可访问性参见`aria.js`和`ari
### Vue3 和 JSX ### Vue3 和 JSX
Gitea 现在正在使用 Vue3。我们决定不引入 JSX以保持 HTML 代码和 JavaScript 代码分离。 Gitea 现在正在使用 Vue3。我们决定不引入 JSX以保持 HTML 代码和 JavaScript 代码分离。
### UI示例
Gitea 使用一些自制的 UI 元素并自定义其他元素,以将它们更好地集成到通用 UI 方法中。当在开发模式(`RUN_MODE=dev`)下运行 Gitea 时,在 `http(s)://your-gitea-url:port/devtest` 下会提供一个包含一些标准化 UI 示例的页面。

12
go.mod
View file

@ -47,7 +47,7 @@ require (
github.com/go-ldap/ldap/v3 v3.4.6 github.com/go-ldap/ldap/v3 v3.4.6
github.com/go-sql-driver/mysql v1.8.0 github.com/go-sql-driver/mysql v1.8.0
github.com/go-swagger/go-swagger v0.30.5 github.com/go-swagger/go-swagger v0.30.5
github.com/go-testfixtures/testfixtures/v3 v3.9.0 github.com/go-testfixtures/testfixtures/v3 v3.10.0
github.com/go-webauthn/webauthn v0.10.0 github.com/go-webauthn/webauthn v0.10.0
github.com/gobwas/glob v0.2.3 github.com/gobwas/glob v0.2.3
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f
@ -55,7 +55,7 @@ require (
github.com/golang-jwt/jwt/v5 v5.2.0 github.com/golang-jwt/jwt/v5 v5.2.0
github.com/google/go-github/v57 v57.0.0 github.com/google/go-github/v57 v57.0.0
github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 github.com/google/pprof v0.0.0-20240117000934-35fc243c5815
github.com/google/uuid v1.5.0 github.com/google/uuid v1.6.0
github.com/gorilla/feeds v1.1.2 github.com/gorilla/feeds v1.1.2
github.com/gorilla/sessions v1.2.2 github.com/gorilla/sessions v1.2.2
github.com/hashicorp/go-version v1.6.0 github.com/hashicorp/go-version v1.6.0
@ -71,7 +71,7 @@ require (
github.com/lib/pq v1.10.9 github.com/lib/pq v1.10.9
github.com/markbates/goth v1.78.0 github.com/markbates/goth v1.78.0
github.com/mattn/go-isatty v0.0.20 github.com/mattn/go-isatty v0.0.20
github.com/mattn/go-sqlite3 v1.14.19 github.com/mattn/go-sqlite3 v1.14.22
github.com/meilisearch/meilisearch-go v0.26.1 github.com/meilisearch/meilisearch-go v0.26.1
github.com/mholt/archiver/v3 v3.5.1 github.com/mholt/archiver/v3 v3.5.1
github.com/microcosm-cc/bluemonday v1.0.26 github.com/microcosm-cc/bluemonday v1.0.26
@ -126,7 +126,7 @@ require (
filippo.io/edwards25519 v1.1.0 // indirect filippo.io/edwards25519 v1.1.0 // indirect
git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078 // indirect git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078 // indirect
github.com/ClickHouse/ch-go v0.61.1 // indirect github.com/ClickHouse/ch-go v0.61.1 // indirect
github.com/ClickHouse/clickhouse-go/v2 v2.17.1 // indirect github.com/ClickHouse/clickhouse-go/v2 v2.18.0 // indirect
github.com/DataDog/zstd v1.5.5 // indirect github.com/DataDog/zstd v1.5.5 // indirect
github.com/Masterminds/goutils v1.1.1 // indirect github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/semver/v3 v3.2.1 // indirect github.com/Masterminds/semver/v3 v3.2.1 // indirect
@ -239,7 +239,7 @@ require (
github.com/oklog/ulid v1.3.1 // indirect github.com/oklog/ulid v1.3.1 // indirect
github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect
github.com/onsi/ginkgo v1.16.5 // indirect github.com/onsi/ginkgo v1.16.5 // indirect
github.com/paulmach/orb v0.11.0 // indirect github.com/paulmach/orb v0.11.1 // indirect
github.com/pelletier/go-toml/v2 v2.1.1 // indirect github.com/pelletier/go-toml/v2 v2.1.1 // indirect
github.com/pierrec/lz4/v4 v4.1.21 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect
github.com/pjbgf/sha1cd v0.3.0 // indirect github.com/pjbgf/sha1cd v0.3.0 // indirect
@ -299,7 +299,7 @@ replace github.com/hashicorp/go-version => github.com/6543/go-version v1.3.1
replace github.com/shurcooL/vfsgen => github.com/lunny/vfsgen v0.0.0-20220105142115-2c99e1ffdfa0 replace github.com/shurcooL/vfsgen => github.com/lunny/vfsgen v0.0.0-20220105142115-2c99e1ffdfa0
replace github.com/nektos/act => gitea.com/gitea/act v0.2.51 replace github.com/nektos/act => gitea.com/gitea/act v0.259.1
replace github.com/gorilla/feeds => github.com/yardenshoham/feeds v0.0.0-20240110072658-f3d0c21c0bd5 replace github.com/gorilla/feeds => github.com/yardenshoham/feeds v0.0.0-20240110072658-f3d0c21c0bd5

24
go.sum
View file

@ -52,8 +52,8 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078 h1:cliQ4HHsCo6xi2oWZYKWW4bly/Ory9FuTpFPRxj/mAg= git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078 h1:cliQ4HHsCo6xi2oWZYKWW4bly/Ory9FuTpFPRxj/mAg=
git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078/go.mod h1:g/V2Hjas6Z1UHUp4yIx6bATpNzJ7DYtD0FG3+xARWxs= git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078/go.mod h1:g/V2Hjas6Z1UHUp4yIx6bATpNzJ7DYtD0FG3+xARWxs=
gitea.com/gitea/act v0.2.51 h1:gXc/B4OlTciTTzAx9cmNyw04n2SDO7exPjAsR5Idu+c= gitea.com/gitea/act v0.259.1 h1:8GG1o/xtUHl3qjn5f0h/2FXrT5ubBn05TJOM5ry+FBw=
gitea.com/gitea/act v0.2.51/go.mod h1:CoaX2053jqBlD6JMgu4d4UgFL/rp2I14Kt5mMqcs0Z0= gitea.com/gitea/act v0.259.1/go.mod h1:UxZWRYqQG2Yj4+4OqfGWW5a3HELwejyWFQyU7F1jUD8=
gitea.com/go-chi/binding v0.0.0-20230415142243-04b515c6d669 h1:RUBX+MK/TsDxpHmymaOaydfigEbbzqUnG1OTZU/HAeo= gitea.com/go-chi/binding v0.0.0-20230415142243-04b515c6d669 h1:RUBX+MK/TsDxpHmymaOaydfigEbbzqUnG1OTZU/HAeo=
gitea.com/go-chi/binding v0.0.0-20230415142243-04b515c6d669/go.mod h1:77TZu701zMXWJFvB8gvTbQ92zQ3DQq/H7l5wAEjQRKc= gitea.com/go-chi/binding v0.0.0-20230415142243-04b515c6d669/go.mod h1:77TZu701zMXWJFvB8gvTbQ92zQ3DQq/H7l5wAEjQRKc=
gitea.com/go-chi/cache v0.0.0-20210110083709-82c4c9ce2d5e/go.mod h1:k2V/gPDEtXGjjMGuBJiapffAXTv76H4snSmlJRLUhH0= gitea.com/go-chi/cache v0.0.0-20210110083709-82c4c9ce2d5e/go.mod h1:k2V/gPDEtXGjjMGuBJiapffAXTv76H4snSmlJRLUhH0=
@ -82,8 +82,8 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/ClickHouse/ch-go v0.61.1 h1:j5rx3qnvcnYjhnP1IdXE/vdIRQiqgwAzyqOaasA6QCw= github.com/ClickHouse/ch-go v0.61.1 h1:j5rx3qnvcnYjhnP1IdXE/vdIRQiqgwAzyqOaasA6QCw=
github.com/ClickHouse/ch-go v0.61.1/go.mod h1:myxt/JZgy2BYHFGQqzmaIpbfr5CMbs3YHVULaWQj5YU= github.com/ClickHouse/ch-go v0.61.1/go.mod h1:myxt/JZgy2BYHFGQqzmaIpbfr5CMbs3YHVULaWQj5YU=
github.com/ClickHouse/clickhouse-go/v2 v2.17.1 h1:ZCmAYWpu75IyEi7+Yrs/uaAjiCGY5wfW5kXo64exkX4= github.com/ClickHouse/clickhouse-go/v2 v2.18.0 h1:O1LicIeg2JS2V29fKRH4+yT3f6jvvcJBm506dpVQ4mQ=
github.com/ClickHouse/clickhouse-go/v2 v2.17.1/go.mod h1:rkGTvFDTLqLIm0ma+13xmcCfr/08Gvs7KmFt1tgiWHQ= github.com/ClickHouse/clickhouse-go/v2 v2.18.0/go.mod h1:ztQvX6wm7kAbhJslS87EXEhOVNY/TObXwyURnGju5FQ=
github.com/DataDog/zstd v1.4.5/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= github.com/DataDog/zstd v1.4.5/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo=
github.com/DataDog/zstd v1.5.5 h1:oWf5W7GtOLgp6bciQYDmhHHjdhYkALu6S/5Ni9ZgSvQ= github.com/DataDog/zstd v1.5.5 h1:oWf5W7GtOLgp6bciQYDmhHHjdhYkALu6S/5Ni9ZgSvQ=
github.com/DataDog/zstd v1.5.5/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= github.com/DataDog/zstd v1.5.5/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw=
@ -353,8 +353,8 @@ github.com/go-swagger/scan-repo-boundary v0.0.0-20180623220736-973b3573c013/go.m
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg= github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg=
github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE=
github.com/go-testfixtures/testfixtures/v3 v3.9.0 h1:938g5V+GWLVejm3Hc+nWCuEXRlcglZDDlN/t1gWzcSY= github.com/go-testfixtures/testfixtures/v3 v3.10.0 h1:BrBwN7AuC+74g5qtk9D59TLGOaEa8Bw1WmIsf+SyzWc=
github.com/go-testfixtures/testfixtures/v3 v3.9.0/go.mod h1:cdsKD2ApFBjdog9jRsz6EJqF+LClq/hrwE9K/1Dzo4s= github.com/go-testfixtures/testfixtures/v3 v3.10.0/go.mod h1:z8RoleoNtibi6Ar8ziCW7e6PQ+jWiqbUWvuv8AMe4lo=
github.com/go-webauthn/webauthn v0.10.0 h1:yuW2e1tXnRAwAvKrR4q4LQmc6XtCMH639/ypZGhZCwk= github.com/go-webauthn/webauthn v0.10.0 h1:yuW2e1tXnRAwAvKrR4q4LQmc6XtCMH639/ypZGhZCwk=
github.com/go-webauthn/webauthn v0.10.0/go.mod h1:l0NiauXhL6usIKqNLCUM3Qir43GK7ORg8ggold0Uv/Y= github.com/go-webauthn/webauthn v0.10.0/go.mod h1:l0NiauXhL6usIKqNLCUM3Qir43GK7ORg8ggold0Uv/Y=
github.com/go-webauthn/x v0.1.6 h1:QNAX+AWeqRt9loE8mULeWJCqhVG5D/jvdmJ47fIWCkQ= github.com/go-webauthn/x v0.1.6 h1:QNAX+AWeqRt9loE8mULeWJCqhVG5D/jvdmJ47fIWCkQ=
@ -457,8 +457,8 @@ github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm4
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
@ -609,8 +609,8 @@ github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-sqlite3 v1.11.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.11.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI= github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/meilisearch/meilisearch-go v0.26.1 h1:3bmo2uLijX7kvBmiZ9LupVfC95TFcRJDgrRTzbOoE4A= github.com/meilisearch/meilisearch-go v0.26.1 h1:3bmo2uLijX7kvBmiZ9LupVfC95TFcRJDgrRTzbOoE4A=
github.com/meilisearch/meilisearch-go v0.26.1/go.mod h1:SxuSqDcPBIykjWz1PX+KzsYzArNLSCadQodWs8extS0= github.com/meilisearch/meilisearch-go v0.26.1/go.mod h1:SxuSqDcPBIykjWz1PX+KzsYzArNLSCadQodWs8extS0=
github.com/mholt/acmez v1.2.0 h1:1hhLxSgY5FvH5HCnGUuwbKY2VQVo8IU7rxXKSnZ7F30= github.com/mholt/acmez v1.2.0 h1:1hhLxSgY5FvH5HCnGUuwbKY2VQVo8IU7rxXKSnZ7F30=
@ -681,8 +681,8 @@ github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.0-rc6 h1:XDqvyKsJEbRtATzkgItUqBA7QHk58yxX1Ov9HERHNqU= github.com/opencontainers/image-spec v1.1.0-rc6 h1:XDqvyKsJEbRtATzkgItUqBA7QHk58yxX1Ov9HERHNqU=
github.com/opencontainers/image-spec v1.1.0-rc6/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/opencontainers/image-spec v1.1.0-rc6/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
github.com/paulmach/orb v0.11.0 h1:JfVXJUBeH9ifc/OrhBY0lL16QsmPgpCHMlqSSYhcgAA= github.com/paulmach/orb v0.11.1 h1:3koVegMC4X/WeiXYz9iswopaTwMem53NzTJuTF20JzU=
github.com/paulmach/orb v0.11.0/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU= github.com/paulmach/orb v0.11.1/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU=
github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKfDlhJCDw2gY= github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKfDlhJCDw2gY=
github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc=
github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI= github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI=

View file

@ -10,6 +10,7 @@ import (
"strings" "strings"
"code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/util"
@ -82,3 +83,35 @@ func UpdateVariable(ctx context.Context, variable *ActionVariable) (bool, error)
}) })
return count != 0, err return count != 0, err
} }
func GetVariablesOfRun(ctx context.Context, run *ActionRun) (map[string]string, error) {
variables := map[string]string{}
// Global
globalVariables, err := db.Find[ActionVariable](ctx, FindVariablesOpts{})
if err != nil {
log.Error("find global variables: %v", err)
return nil, err
}
// Org / User level
ownerVariables, err := db.Find[ActionVariable](ctx, FindVariablesOpts{OwnerID: run.Repo.OwnerID})
if err != nil {
log.Error("find variables of org: %d, error: %v", run.Repo.OwnerID, err)
return nil, err
}
// Repo level
repoVariables, err := db.Find[ActionVariable](ctx, FindVariablesOpts{RepoID: run.RepoID})
if err != nil {
log.Error("find variables of repo: %d, error: %v", run.RepoID, err)
return nil, err
}
// Level precedence: Repo > Org / User > Global
for _, v := range append(globalVariables, append(ownerVariables, repoVariables...)...) {
variables[v.Name] = v.Data
}
return variables, nil
}

View file

@ -395,10 +395,14 @@ func (a *Action) GetCreate() time.Time {
return a.CreatedUnix.AsTime() return a.CreatedUnix.AsTime()
} }
// GetIssueInfos returns a list of issues associated with // GetIssueInfos returns a list of associated information with the action.
// the action.
func (a *Action) GetIssueInfos() []string { func (a *Action) GetIssueInfos() []string {
return strings.SplitN(a.Content, "|", 3) // make sure it always returns 3 elements, because there are some access to the a[1] and a[2] without checking the length
ret := strings.SplitN(a.Content, "|", 3)
for len(ret) < 3 {
ret = append(ret, "")
}
return ret
} }
// GetIssueTitle returns the title of first issue associated with the action. // GetIssueTitle returns the title of first issue associated with the action.

View file

@ -166,8 +166,7 @@ func preprocessDatabaseCollation(x *xorm.Engine) {
// try to alter database collation to expected if the database is empty, it might fail in some cases (and it isn't necessary to succeed) // try to alter database collation to expected if the database is empty, it might fail in some cases (and it isn't necessary to succeed)
// at the moment, there is no "altering" solution for MSSQL, site admin should manually change the database collation // at the moment, there is no "altering" solution for MSSQL, site admin should manually change the database collation
// and there is a bug https://github.com/go-testfixtures/testfixtures/pull/182 mssql: Invalid object name 'information_schema.tables'. if !r.CollationEquals(r.DatabaseCollation, r.ExpectedCollation) && r.ExistingTableNumber == 0 {
if !r.CollationEquals(r.DatabaseCollation, r.ExpectedCollation) && r.ExistingTableNumber == 0 && x.Dialect().URI().DBType == schemas.MYSQL {
if err = alterDatabaseCollation(x, r.ExpectedCollation); err != nil { if err = alterDatabaseCollation(x, r.ExpectedCollation); err != nil {
log.Error("Failed to change database collation to %q: %v", r.ExpectedCollation, err) log.Error("Failed to change database collation to %q: %v", r.ExpectedCollation, err)
} else { } else {

View file

@ -3,3 +3,35 @@
hook_id: 1 hook_id: 1
uuid: uuid1 uuid: uuid1
is_delivered: true is_delivered: true
is_succeed: false
request_content: >
{
"url": "/matrix-delivered",
"http_method":"PUT",
"headers": {
"X-Head": "42"
},
"body": "{}"
}
-
id: 2
hook_id: 1
uuid: uuid2
is_delivered: false
-
id: 3
hook_id: 1
uuid: uuid3
is_delivered: true
is_succeed: true
payload_content: '{"key":"value"}' # legacy task, payload saved in payload_content (and not in request_content)
request_content: >
{
"url": "/matrix-success",
"http_method":"PUT",
"headers": {
"X-Head": "42"
}
}

View file

@ -909,12 +909,7 @@ func PullRequestCodeOwnersReview(ctx context.Context, pull *Issue, pr *PullReque
} }
defer repo.Close() defer repo.Close()
branch, err := repo.GetDefaultBranch() commit, err := repo.GetBranchCommit(pr.BaseRepo.DefaultBranch)
if err != nil {
return err
}
commit, err := repo.GetBranchCommit(branch)
if err != nil { if err != nil {
return err return err
} }

View file

@ -36,12 +36,14 @@ func Test_DropTableColumns(t *testing.T) {
"updated_unix", "updated_unix",
} }
x.SetMapper(names.GonicMapper{})
for i := range columns { for i := range columns {
x.SetMapper(names.GonicMapper{})
if err := x.Sync(new(DropTest)); err != nil { if err := x.Sync(new(DropTest)); err != nil {
t.Errorf("unable to create DropTest table: %v", err) t.Errorf("unable to create DropTest table: %v", err)
return return
} }
sess := x.NewSession() sess := x.NewSession()
if err := sess.Begin(); err != nil { if err := sess.Begin(); err != nil {
sess.Close() sess.Close()
@ -64,7 +66,6 @@ func Test_DropTableColumns(t *testing.T) {
return return
} }
for j := range columns[i+1:] { for j := range columns[i+1:] {
x.SetMapper(names.GonicMapper{})
if err := x.Sync(new(DropTest)); err != nil { if err := x.Sync(new(DropTest)); err != nil {
t.Errorf("unable to create DropTest table: %v", err) t.Errorf("unable to create DropTest table: %v", err)
return return

View file

@ -0,0 +1,4 @@
-
id: 1
repo_id: 1
index: 1

View file

@ -0,0 +1,16 @@
- id: 11
uuid: uuid11
hook_id: 1
payload_content: >
{"data":"payload"}
event_type: create
delivered: 1706106005
- id: 101
uuid: uuid101
hook_id: 1
payload_content: >
{"data":"payload"}
event_type: create
delivered: 1706106006
is_delivered: true

View file

@ -0,0 +1,18 @@
- id: 11
uuid: uuid11
hook_id: 1
payload_content: >
{"data":"payload"}
event_type: create
delivered: 1706106005
payload_version: 1
- id: 101
uuid: uuid101
hook_id: 1
payload_content: >
{"data":"payload"}
event_type: create
delivered: 1706106006
is_delivered: true
payload_version: 1

View file

@ -0,0 +1,11 @@
-
id: 1
uuid: a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11
issue_id: 1
release_id: 0
-
id: 2
uuid: a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12
issue_id: 0
release_id: 1

View file

@ -0,0 +1,3 @@
-
id: 1
repo_id: 1

View file

@ -0,0 +1,3 @@
-
id: 1
repo_id: 1

View file

@ -0,0 +1,3 @@
-
id: 1
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d

View file

@ -0,0 +1,3 @@
-
id: 1
context_hash: 19fe5caf872476db265596eaac1dc35ad1c6422d

View file

@ -0,0 +1,5 @@
-
id: 1
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d
merge_base: 19fe5caf872476db265596eaac1dc35ad1c6422d
merged_commit_id: 19fe5caf872476db265596eaac1dc35ad1c6422d

View file

@ -0,0 +1,3 @@
-
id: 1
sha1: 19fe5caf872476db265596eaac1dc35ad1c6422d

View file

@ -0,0 +1,3 @@
-
id: 1
commit_id: 19fe5caf872476db265596eaac1dc35ad1c6422d

View file

@ -0,0 +1,3 @@
-
id: 1
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d

View file

@ -0,0 +1,3 @@
-
id: 1
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d

View file

@ -0,0 +1,4 @@
-
id: 1
description: the badge
image_url: https://gitea.com/myimage.png

View file

@ -560,6 +560,14 @@ var migrations = []Migration{
NewMigration("Add PreviousDuration to ActionRun", v1_22.AddPreviousDurationToActionRun), NewMigration("Add PreviousDuration to ActionRun", v1_22.AddPreviousDurationToActionRun),
// v286 -> v287 // v286 -> v287
NewMigration("Add support for SHA256 git repositories", v1_22.AdjustDBForSha256), NewMigration("Add support for SHA256 git repositories", v1_22.AdjustDBForSha256),
// v287 -> v288
NewMigration("Use Slug instead of ID for Badges", v1_22.UseSlugInsteadOfIDForBadges),
// v288 -> v289
NewMigration("Add user_blocking table", v1_22.AddUserBlockingTable),
// v289 -> v290
NewMigration("Add default_wiki_branch to repository table", v1_22.AddDefaultWikiBranch),
// v290 -> v291
NewMigration("Add PayloadVersion to HookTask", v1_22.AddPayloadVersionToHookTaskTable),
} }
// GetCurrentDBVersion returns the current db version // GetCurrentDBVersion returns the current db version

View file

@ -15,7 +15,6 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
type Attachment struct { type Attachment struct {
ID int64 `xorm:"pk autoincr"` ID int64 `xorm:"pk autoincr"`
UUID string `xorm:"uuid UNIQUE"` UUID string `xorm:"uuid UNIQUE"`
RepoID int64 `xorm:"INDEX"` // this should not be zero
IssueID int64 `xorm:"INDEX"` // maybe zero when creating IssueID int64 `xorm:"INDEX"` // maybe zero when creating
ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating
UploaderID int64 `xorm:"INDEX DEFAULT 0"` UploaderID int64 `xorm:"INDEX DEFAULT 0"`
@ -44,12 +43,21 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
return return
} }
var issueAttachments []*Attachment type NewAttachment struct {
err := x.Where("issue_id > 0").Find(&issueAttachments) ID int64 `xorm:"pk autoincr"`
UUID string `xorm:"uuid UNIQUE"`
RepoID int64 `xorm:"INDEX"` // this should not be zero
IssueID int64 `xorm:"INDEX"` // maybe zero when creating
ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating
UploaderID int64 `xorm:"INDEX DEFAULT 0"`
}
var issueAttachments []*NewAttachment
err := x.Table("attachment").Where("issue_id > 0").Find(&issueAttachments)
assert.NoError(t, err) assert.NoError(t, err)
for _, attach := range issueAttachments { for _, attach := range issueAttachments {
assert.Greater(t, attach.RepoID, 0) assert.Greater(t, attach.RepoID, int64(0))
assert.Greater(t, attach.IssueID, 0) assert.Greater(t, attach.IssueID, int64(0))
var issue Issue var issue Issue
has, err := x.ID(attach.IssueID).Get(&issue) has, err := x.ID(attach.IssueID).Get(&issue)
assert.NoError(t, err) assert.NoError(t, err)
@ -57,12 +65,12 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
assert.EqualValues(t, attach.RepoID, issue.RepoID) assert.EqualValues(t, attach.RepoID, issue.RepoID)
} }
var releaseAttachments []*Attachment var releaseAttachments []*NewAttachment
err = x.Where("release_id > 0").Find(&releaseAttachments) err = x.Table("attachment").Where("release_id > 0").Find(&releaseAttachments)
assert.NoError(t, err) assert.NoError(t, err)
for _, attach := range releaseAttachments { for _, attach := range releaseAttachments {
assert.Greater(t, attach.RepoID, 0) assert.Greater(t, attach.RepoID, int64(0))
assert.Greater(t, attach.IssueID, 0) assert.Greater(t, attach.ReleaseID, int64(0))
var release Release var release Release
has, err := x.ID(attach.ReleaseID).Get(&release) has, err := x.ID(attach.ReleaseID).Get(&release)
assert.NoError(t, err) assert.NoError(t, err)

View file

@ -4,10 +4,40 @@
package v1_22 //nolint package v1_22 //nolint
import ( import (
"fmt"
"xorm.io/xorm" "xorm.io/xorm"
"xorm.io/xorm/schemas"
) )
func AddCombinedIndexToIssueUser(x *xorm.Engine) error { func AddCombinedIndexToIssueUser(x *xorm.Engine) error {
type OldIssueUser struct {
IssueID int64
UID int64
Cnt int64
}
var duplicatedIssueUsers []OldIssueUser
if err := x.SQL("select * from (select issue_id, uid, count(1) as cnt from issue_user group by issue_id, uid) a where a.cnt > 1").
Find(&duplicatedIssueUsers); err != nil {
return err
}
for _, issueUser := range duplicatedIssueUsers {
if x.Dialect().URI().DBType == schemas.MSSQL {
if _, err := x.Exec(fmt.Sprintf("delete from issue_user where id in (SELECT top %d id FROM issue_user WHERE issue_id = ? and uid = ?)", issueUser.Cnt-1), issueUser.IssueID, issueUser.UID); err != nil {
return err
}
} else {
var ids []int64
if err := x.SQL("SELECT id FROM issue_user WHERE issue_id = ? and uid = ? limit ?", issueUser.IssueID, issueUser.UID, issueUser.Cnt-1).Find(&ids); err != nil {
return err
}
if _, err := x.Table("issue_user").In("id", ids).Delete(); err != nil {
return err
}
}
}
type IssueUser struct { type IssueUser struct {
UID int64 `xorm:"INDEX unique(uid_to_issue)"` // User ID. UID int64 `xorm:"INDEX unique(uid_to_issue)"` // User ID.
IssueID int64 `xorm:"INDEX unique(uid_to_issue)"` IssueID int64 `xorm:"INDEX unique(uid_to_issue)"`

View file

@ -36,9 +36,9 @@ func expandHashReferencesToSha256(x *xorm.Engine) error {
if setting.Database.Type.IsMSSQL() { if setting.Database.Type.IsMSSQL() {
// drop indexes that need to be re-created afterwards // drop indexes that need to be re-created afterwards
droppedIndexes := []string{ droppedIndexes := []string{
"DROP INDEX commit_status.IDX_commit_status_context_hash", "DROP INDEX IF EXISTS [IDX_commit_status_context_hash] ON [commit_status]",
"DROP INDEX review_state.UQE_review_state_pull_commit_user", "DROP INDEX IF EXISTS [UQE_review_state_pull_commit_user] ON [review_state]",
"DROP INDEX repo_archiver.UQE_repo_archiver_s", "DROP INDEX IF EXISTS [UQE_repo_archiver_s] ON [repo_archiver]",
} }
for _, s := range droppedIndexes { for _, s := range droppedIndexes {
_, err := db.Exec(s) _, err := db.Exec(s)
@ -53,7 +53,7 @@ func expandHashReferencesToSha256(x *xorm.Engine) error {
if setting.Database.Type.IsMySQL() { if setting.Database.Type.IsMySQL() {
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` MODIFY COLUMN `%s` VARCHAR(64)", alts[0], alts[1])) _, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` MODIFY COLUMN `%s` VARCHAR(64)", alts[0], alts[1]))
} else if setting.Database.Type.IsMSSQL() { } else if setting.Database.Type.IsMSSQL() {
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` ALTER COLUMN `%s` VARCHAR(64)", alts[0], alts[1])) _, err = db.Exec(fmt.Sprintf("ALTER TABLE [%s] ALTER COLUMN [%s] VARCHAR(64)", alts[0], alts[1]))
} else { } else {
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` ALTER COLUMN `%s` TYPE VARCHAR(64)", alts[0], alts[1])) _, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` ALTER COLUMN `%s` TYPE VARCHAR(64)", alts[0], alts[1]))
} }

View file

@ -14,59 +14,75 @@ import (
func PrepareOldRepository(t *testing.T) (*xorm.Engine, func()) { func PrepareOldRepository(t *testing.T) (*xorm.Engine, func()) {
type Repository struct { // old struct type Repository struct { // old struct
ID int64 `xorm:"pk autoincr"` ID int64 `xorm:"pk autoincr"`
ObjectFormatName string `xorm:"VARCHAR(6) NOT NULL DEFAULT 'sha1'"`
} }
type CommitStatus struct { // old struct type CommitStatus struct {
ID int64 `xorm:"pk autoincr"` ID int64
ContextHash string `xorm:"char(40)"` ContextHash string
} }
type Comment struct { // old struct type RepoArchiver struct {
ID int64 `xorm:"pk autoincr"` ID int64
CommitSHA string `xorm:"VARCHAR(40)"` RepoID int64
Type int
CommitID string
} }
type PullRequest struct { // old struct type ReviewState struct {
ID int64 `xorm:"pk autoincr"` ID int64
MergeBase string `xorm:"VARCHAR(40)"` CommitSHA string
MergedCommitID string `xorm:"VARCHAR(40)"` UserID int64
PullID int64
} }
type Review struct { // old struct type Comment struct {
ID int64 `xorm:"pk autoincr"` ID int64
CommitID string `xorm:"VARCHAR(40)"` CommitSHA string
} }
type ReviewState struct { // old struct type PullRequest struct {
ID int64 `xorm:"pk autoincr"` ID int64
CommitSHA string `xorm:"VARCHAR(40)"` CommitSHA string
MergeBase string
MergedCommitID string
} }
type RepoArchiver struct { // old struct type Release struct {
ID int64 `xorm:"pk autoincr"` ID int64
CommitID string `xorm:"VARCHAR(40)"` Sha1 string
} }
type Release struct { // old struct type RepoIndexerStatus struct {
ID int64 `xorm:"pk autoincr"` ID int64
Sha1 string `xorm:"VARCHAR(40)"` CommitSHA string
} }
type RepoIndexerStatus struct { // old struct type Review struct {
ID int64 `xorm:"pk autoincr"` ID int64
CommitSha string `xorm:"VARCHAR(40)"` CommitID string
} }
// Prepare and load the testing database // Prepare and load the testing database
return base.PrepareTestEnv(t, 0, new(Repository), new(CommitStatus), new(Comment), new(PullRequest), new(Review), new(ReviewState), new(RepoArchiver), new(Release), new(RepoIndexerStatus)) return base.PrepareTestEnv(t, 0,
new(Repository),
new(CommitStatus),
new(RepoArchiver),
new(ReviewState),
new(Review),
new(Comment),
new(PullRequest),
new(Release),
new(RepoIndexerStatus),
)
} }
func Test_RepositoryFormat(t *testing.T) { func Test_RepositoryFormat(t *testing.T) {
x, deferable := PrepareOldRepository(t) x, deferable := PrepareOldRepository(t)
defer deferable() defer deferable()
assert.NoError(t, AdjustDBForSha256(x))
type Repository struct { type Repository struct {
ID int64 `xorm:"pk autoincr"` ID int64 `xorm:"pk autoincr"`
ObjectFormatName string `xorg:"not null default('sha1')"` ObjectFormatName string `xorg:"not null default('sha1')"`
@ -79,12 +95,10 @@ func Test_RepositoryFormat(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
assert.EqualValues(t, 4, count) assert.EqualValues(t, 4, count)
assert.NoError(t, AdjustDBForSha256(x))
repo.ID = 20
repo.ObjectFormatName = "sha256" repo.ObjectFormatName = "sha256"
_, err = x.Insert(repo) _, err = x.Insert(repo)
assert.NoError(t, err) assert.NoError(t, err)
id := repo.ID
count, err = x.Count(new(Repository)) count, err = x.Count(new(Repository))
assert.NoError(t, err) assert.NoError(t, err)
@ -97,7 +111,7 @@ func Test_RepositoryFormat(t *testing.T) {
assert.EqualValues(t, "sha1", repo.ObjectFormatName) assert.EqualValues(t, "sha1", repo.ObjectFormatName)
repo = new(Repository) repo = new(Repository)
ok, err = x.ID(20).Get(repo) ok, err = x.ID(id).Get(repo)
assert.NoError(t, err) assert.NoError(t, err)
assert.EqualValues(t, true, ok) assert.EqualValues(t, true, ok)
assert.EqualValues(t, "sha256", repo.ObjectFormatName) assert.EqualValues(t, "sha256", repo.ObjectFormatName)

View file

@ -0,0 +1,46 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_22 //nolint
import (
"xorm.io/xorm"
)
type BadgeUnique struct {
ID int64 `xorm:"pk autoincr"`
Slug string `xorm:"UNIQUE"`
}
func (BadgeUnique) TableName() string {
return "badge"
}
func UseSlugInsteadOfIDForBadges(x *xorm.Engine) error {
type Badge struct {
Slug string
}
err := x.Sync(new(Badge))
if err != nil {
return err
}
sess := x.NewSession()
defer sess.Close()
if err := sess.Begin(); err != nil {
return err
}
_, err = sess.Exec("UPDATE `badge` SET `slug` = `id` Where `slug` IS NULL")
if err != nil {
return err
}
err = sess.Sync(new(BadgeUnique))
if err != nil {
return err
}
return sess.Commit()
}

View file

@ -0,0 +1,26 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_22 //nolint
import (
"code.gitea.io/gitea/modules/timeutil"
"xorm.io/xorm"
)
type Blocking struct {
ID int64 `xorm:"pk autoincr"`
BlockerID int64 `xorm:"UNIQUE(block)"`
BlockeeID int64 `xorm:"UNIQUE(block)"`
Note string
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
}
func (*Blocking) TableName() string {
return "user_blocking"
}
func AddUserBlockingTable(x *xorm.Engine) error {
return x.Sync(&Blocking{})
}

View file

@ -0,0 +1,18 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_22 //nolint
import "xorm.io/xorm"
func AddDefaultWikiBranch(x *xorm.Engine) error {
type Repository struct {
ID int64
DefaultWikiBranch string
}
if err := x.Sync(&Repository{}); err != nil {
return err
}
_, err := x.Exec("UPDATE `repository` SET default_wiki_branch = 'master' WHERE (default_wiki_branch IS NULL) OR (default_wiki_branch = '')")
return err
}

View file

@ -0,0 +1,39 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_22 //nolint
import (
"code.gitea.io/gitea/modules/timeutil"
webhook_module "code.gitea.io/gitea/modules/webhook"
"xorm.io/xorm"
)
// HookTask represents a hook task.
// exact copy of models/webhook/hooktask.go when this migration was created
// - xorm:"-" fields deleted
type HookTask struct {
ID int64 `xorm:"pk autoincr"`
HookID int64 `xorm:"index"`
UUID string `xorm:"unique"`
PayloadContent string `xorm:"LONGTEXT"`
EventType webhook_module.HookEventType
IsDelivered bool
Delivered timeutil.TimeStampNano
// History info.
IsSucceed bool
RequestContent string `xorm:"LONGTEXT"`
ResponseContent string `xorm:"LONGTEXT"`
// Version number to allow for smooth version upgrades:
// - Version 1: PayloadContent contains the JSON as send to the URL
// - Version 2: PayloadContent contains the original event
PayloadVersion int `xorm:"DEFAULT 1"`
}
func AddPayloadVersionToHookTaskTable(x *xorm.Engine) error {
// create missing column
return x.Sync(new(HookTask))
}

View file

@ -0,0 +1,58 @@
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_22 //nolint
import (
"strconv"
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/modules/timeutil"
webhook_module "code.gitea.io/gitea/modules/webhook"
"github.com/stretchr/testify/assert"
)
func Test_AddPayloadVersionToHookTaskTable(t *testing.T) {
type HookTaskMigrated HookTask
// HookTask represents a hook task, as of before the migration
type HookTask struct {
ID int64 `xorm:"pk autoincr"`
HookID int64 `xorm:"index"`
UUID string `xorm:"unique"`
PayloadContent string `xorm:"LONGTEXT"`
EventType webhook_module.HookEventType
IsDelivered bool
Delivered timeutil.TimeStampNano
// History info.
IsSucceed bool
RequestContent string `xorm:"LONGTEXT"`
ResponseContent string `xorm:"LONGTEXT"`
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(HookTask), new(HookTaskMigrated))
defer deferable()
if x == nil || t.Failed() {
return
}
assert.NoError(t, AddPayloadVersionToHookTaskTable(x))
expected := []HookTaskMigrated{}
assert.NoError(t, x.Table("hook_task_migrated").Asc("id").Find(&expected))
assert.Len(t, expected, 2)
got := []HookTaskMigrated{}
assert.NoError(t, x.Table("hook_task").Asc("id").Find(&got))
for i, expected := range expected {
expected, got := expected, got[i]
t.Run(strconv.FormatInt(expected.ID, 10), func(t *testing.T) {
assert.Equal(t, expected.PayloadVersion, got.PayloadVersion)
})
}
}

View file

@ -232,7 +232,7 @@ func UpdateBoard(ctx context.Context, board *Board) error {
func (p *Project) GetBoards(ctx context.Context) (BoardList, error) { func (p *Project) GetBoards(ctx context.Context) (BoardList, error) {
boards := make([]*Board, 0, 5) boards := make([]*Board, 0, 5)
if err := db.GetEngine(ctx).Where("project_id=? AND `default`=?", p.ID, false).OrderBy("Sorting").Find(&boards); err != nil { if err := db.GetEngine(ctx).Where("project_id=? AND `default`=?", p.ID, false).OrderBy("sorting").Find(&boards); err != nil {
return nil, err return nil, err
} }

View file

@ -9,7 +9,10 @@ import (
"fmt" "fmt"
"strings" "strings"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/db"
actions_module "code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/log"
secret_module "code.gitea.io/gitea/modules/secret" secret_module "code.gitea.io/gitea/modules/secret"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/timeutil"
@ -112,3 +115,39 @@ func UpdateSecret(ctx context.Context, secretID int64, data string) error {
} }
return err return err
} }
func GetSecretsOfTask(ctx context.Context, task *actions_model.ActionTask) (map[string]string, error) {
secrets := map[string]string{}
secrets["GITHUB_TOKEN"] = task.Token
secrets["GITEA_TOKEN"] = task.Token
if task.Job.Run.IsForkPullRequest && task.Job.Run.TriggerEvent != actions_module.GithubEventPullRequestTarget {
// ignore secrets for fork pull request, except GITHUB_TOKEN and GITEA_TOKEN which are automatically generated.
// for the tasks triggered by pull_request_target event, they could access the secrets because they will run in the context of the base branch
// see the documentation: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target
return secrets, nil
}
ownerSecrets, err := db.Find[Secret](ctx, FindSecretsOptions{OwnerID: task.Job.Run.Repo.OwnerID})
if err != nil {
log.Error("find secrets of owner %v: %v", task.Job.Run.Repo.OwnerID, err)
return nil, err
}
repoSecrets, err := db.Find[Secret](ctx, FindSecretsOptions{RepoID: task.Job.Run.RepoID})
if err != nil {
log.Error("find secrets of repo %v: %v", task.Job.Run.RepoID, err)
return nil, err
}
for _, secret := range append(ownerSecrets, repoSecrets...) {
v, err := secret_module.DecryptSecret(setting.SecretKey, secret.Data)
if err != nil {
log.Error("decrypt secret %v %q: %v", secret.ID, secret.Name, err)
return nil, err
}
secrets[secret.Name] = v
}
return secrets, nil
}

View file

@ -157,37 +157,18 @@ func UpdateEmailAddress(ctx context.Context, email *EmailAddress) error {
var emailRegexp = regexp.MustCompile("^[a-zA-Z0-9.!#$%&'*+-/=?^_`{|}~]*@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$") var emailRegexp = regexp.MustCompile("^[a-zA-Z0-9.!#$%&'*+-/=?^_`{|}~]*@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$")
// ValidateEmail check if email is a allowed address // ValidateEmail check if email is a valid & allowed address
func ValidateEmail(email string) error { func ValidateEmail(email string) error {
if len(email) == 0 { if err := validateEmailBasic(email); err != nil {
return ErrEmailInvalid{email} return err
} }
return validateEmailDomain(email)
}
if !emailRegexp.MatchString(email) { // ValidateEmailForAdmin check if email is a valid address when admins manually add or edit users
return ErrEmailCharIsNotSupported{email} func ValidateEmailForAdmin(email string) error {
} return validateEmailBasic(email)
// In this case we do not need to check the email domain
if email[0] == '-' {
return ErrEmailInvalid{email}
}
if _, err := mail.ParseAddress(email); err != nil {
return ErrEmailInvalid{email}
}
// if there is no allow list, then check email against block list
if len(setting.Service.EmailDomainAllowList) == 0 &&
validation.IsEmailDomainListed(setting.Service.EmailDomainBlockList, email) {
return ErrEmailInvalid{email}
}
// if there is an allow list, then check email against allow list
if len(setting.Service.EmailDomainAllowList) > 0 &&
!validation.IsEmailDomainListed(setting.Service.EmailDomainAllowList, email) {
return ErrEmailInvalid{email}
}
return nil
} }
func GetEmailAddressByEmail(ctx context.Context, email string) (*EmailAddress, error) { func GetEmailAddressByEmail(ctx context.Context, email string) (*EmailAddress, error) {
@ -543,3 +524,41 @@ func ActivateUserEmail(ctx context.Context, userID int64, email string, activate
return committer.Commit() return committer.Commit()
} }
// validateEmailBasic checks whether the email complies with the rules
func validateEmailBasic(email string) error {
if len(email) == 0 {
return ErrEmailInvalid{email}
}
if !emailRegexp.MatchString(email) {
return ErrEmailCharIsNotSupported{email}
}
if email[0] == '-' {
return ErrEmailInvalid{email}
}
if _, err := mail.ParseAddress(email); err != nil {
return ErrEmailInvalid{email}
}
return nil
}
// validateEmailDomain checks whether the email domain is allowed or blocked
func validateEmailDomain(email string) error {
// if there is no allow list, then check email against block list
if len(setting.Service.EmailDomainAllowList) == 0 &&
validation.IsEmailDomainListed(setting.Service.EmailDomainBlockList, email) {
return ErrEmailInvalid{email}
}
// if there is an allow list, then check email against allow list
if len(setting.Service.EmailDomainAllowList) > 0 &&
!validation.IsEmailDomainListed(setting.Service.EmailDomainAllowList, email) {
return ErrEmailInvalid{email}
}
return nil
}

View file

@ -598,6 +598,16 @@ type CreateUserOverwriteOptions struct {
// CreateUser creates record of a new user. // CreateUser creates record of a new user.
func CreateUser(ctx context.Context, u *User, overwriteDefault ...*CreateUserOverwriteOptions) (err error) { func CreateUser(ctx context.Context, u *User, overwriteDefault ...*CreateUserOverwriteOptions) (err error) {
return createUser(ctx, u, false, overwriteDefault...)
}
// AdminCreateUser is used by admins to manually create users
func AdminCreateUser(ctx context.Context, u *User, overwriteDefault ...*CreateUserOverwriteOptions) (err error) {
return createUser(ctx, u, true, overwriteDefault...)
}
// createUser creates record of a new user.
func createUser(ctx context.Context, u *User, createdByAdmin bool, overwriteDefault ...*CreateUserOverwriteOptions) (err error) {
if err = IsUsableUsername(u.Name); err != nil { if err = IsUsableUsername(u.Name); err != nil {
return err return err
} }
@ -651,8 +661,14 @@ func CreateUser(ctx context.Context, u *User, overwriteDefault ...*CreateUserOve
return err return err
} }
if err := ValidateEmail(u.Email); err != nil { if createdByAdmin {
return err if err := ValidateEmailForAdmin(u.Email); err != nil {
return err
}
} else {
if err := ValidateEmail(u.Email); err != nil {
return err
}
} }
ctx, committer, err := db.TxContext(ctx) ctx, committer, err := db.TxContext(ctx)

View file

@ -5,13 +5,13 @@ package webhook
import ( import (
"context" "context"
"errors"
"time" "time"
"code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/timeutil"
webhook_module "code.gitea.io/gitea/modules/webhook" webhook_module "code.gitea.io/gitea/modules/webhook"
@ -31,6 +31,7 @@ type HookRequest struct {
URL string `json:"url"` URL string `json:"url"`
HTTPMethod string `json:"http_method"` HTTPMethod string `json:"http_method"`
Headers map[string]string `json:"headers"` Headers map[string]string `json:"headers"`
Body string `json:"body"`
} }
// HookResponse represents hook task response information. // HookResponse represents hook task response information.
@ -45,11 +46,15 @@ type HookTask struct {
ID int64 `xorm:"pk autoincr"` ID int64 `xorm:"pk autoincr"`
HookID int64 `xorm:"index"` HookID int64 `xorm:"index"`
UUID string `xorm:"unique"` UUID string `xorm:"unique"`
api.Payloader `xorm:"-"`
PayloadContent string `xorm:"LONGTEXT"` PayloadContent string `xorm:"LONGTEXT"`
EventType webhook_module.HookEventType // PayloadVersion number to allow for smooth version upgrades:
IsDelivered bool // - PayloadVersion 1: PayloadContent contains the JSON as sent to the URL
Delivered timeutil.TimeStampNano // - PayloadVersion 2: PayloadContent contains the original event
PayloadVersion int `xorm:"DEFAULT 1"`
EventType webhook_module.HookEventType
IsDelivered bool
Delivered timeutil.TimeStampNano
// History info. // History info.
IsSucceed bool IsSucceed bool
@ -115,16 +120,12 @@ func HookTasks(ctx context.Context, hookID int64, page int) ([]*HookTask, error)
// it handles conversion from Payload to PayloadContent. // it handles conversion from Payload to PayloadContent.
func CreateHookTask(ctx context.Context, t *HookTask) (*HookTask, error) { func CreateHookTask(ctx context.Context, t *HookTask) (*HookTask, error) {
t.UUID = gouuid.New().String() t.UUID = gouuid.New().String()
if t.Payloader != nil {
data, err := t.Payloader.JSONPayload()
if err != nil {
return nil, err
}
t.PayloadContent = string(data)
}
if t.Delivered == 0 { if t.Delivered == 0 {
t.Delivered = timeutil.TimeStampNanoNow() t.Delivered = timeutil.TimeStampNanoNow()
} }
if t.PayloadVersion == 0 {
return nil, errors.New("missing HookTask.PayloadVersion")
}
return t, db.Insert(ctx, t) return t, db.Insert(ctx, t)
} }
@ -165,6 +166,7 @@ func ReplayHookTask(ctx context.Context, hookID int64, uuid string) (*HookTask,
HookID: task.HookID, HookID: task.HookID,
PayloadContent: task.PayloadContent, PayloadContent: task.PayloadContent,
EventType: task.EventType, EventType: task.EventType,
PayloadVersion: task.PayloadVersion,
}) })
} }

View file

@ -12,7 +12,6 @@ import (
"code.gitea.io/gitea/models/unittest" "code.gitea.io/gitea/models/unittest"
"code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/optional"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/timeutil"
webhook_module "code.gitea.io/gitea/modules/webhook" webhook_module "code.gitea.io/gitea/modules/webhook"
@ -35,8 +34,10 @@ func TestWebhook_History(t *testing.T) {
webhook := unittest.AssertExistsAndLoadBean(t, &Webhook{ID: 1}) webhook := unittest.AssertExistsAndLoadBean(t, &Webhook{ID: 1})
tasks, err := webhook.History(db.DefaultContext, 0) tasks, err := webhook.History(db.DefaultContext, 0)
assert.NoError(t, err) assert.NoError(t, err)
if assert.Len(t, tasks, 1) { if assert.Len(t, tasks, 3) {
assert.Equal(t, int64(1), tasks[0].ID) assert.Equal(t, int64(3), tasks[0].ID)
assert.Equal(t, int64(2), tasks[1].ID)
assert.Equal(t, int64(1), tasks[2].ID)
} }
webhook = unittest.AssertExistsAndLoadBean(t, &Webhook{ID: 2}) webhook = unittest.AssertExistsAndLoadBean(t, &Webhook{ID: 2})
@ -197,8 +198,10 @@ func TestHookTasks(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase()) assert.NoError(t, unittest.PrepareTestDatabase())
hookTasks, err := HookTasks(db.DefaultContext, 1, 1) hookTasks, err := HookTasks(db.DefaultContext, 1, 1)
assert.NoError(t, err) assert.NoError(t, err)
if assert.Len(t, hookTasks, 1) { if assert.Len(t, hookTasks, 3) {
assert.Equal(t, int64(1), hookTasks[0].ID) assert.Equal(t, int64(3), hookTasks[0].ID)
assert.Equal(t, int64(2), hookTasks[1].ID)
assert.Equal(t, int64(1), hookTasks[2].ID)
} }
hookTasks, err = HookTasks(db.DefaultContext, unittest.NonexistentID, 1) hookTasks, err = HookTasks(db.DefaultContext, unittest.NonexistentID, 1)
@ -209,8 +212,8 @@ func TestHookTasks(t *testing.T) {
func TestCreateHookTask(t *testing.T) { func TestCreateHookTask(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase()) assert.NoError(t, unittest.PrepareTestDatabase())
hookTask := &HookTask{ hookTask := &HookTask{
HookID: 3, HookID: 3,
Payloader: &api.PushPayload{}, PayloadVersion: 2,
} }
unittest.AssertNotExistsBean(t, hookTask) unittest.AssertNotExistsBean(t, hookTask)
_, err := CreateHookTask(db.DefaultContext, hookTask) _, err := CreateHookTask(db.DefaultContext, hookTask)
@ -232,10 +235,10 @@ func TestUpdateHookTask(t *testing.T) {
func TestCleanupHookTaskTable_PerWebhook_DeletesDelivered(t *testing.T) { func TestCleanupHookTaskTable_PerWebhook_DeletesDelivered(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase()) assert.NoError(t, unittest.PrepareTestDatabase())
hookTask := &HookTask{ hookTask := &HookTask{
HookID: 3, HookID: 3,
Payloader: &api.PushPayload{}, IsDelivered: true,
IsDelivered: true, Delivered: timeutil.TimeStampNanoNow(),
Delivered: timeutil.TimeStampNanoNow(), PayloadVersion: 2,
} }
unittest.AssertNotExistsBean(t, hookTask) unittest.AssertNotExistsBean(t, hookTask)
_, err := CreateHookTask(db.DefaultContext, hookTask) _, err := CreateHookTask(db.DefaultContext, hookTask)
@ -249,9 +252,9 @@ func TestCleanupHookTaskTable_PerWebhook_DeletesDelivered(t *testing.T) {
func TestCleanupHookTaskTable_PerWebhook_LeavesUndelivered(t *testing.T) { func TestCleanupHookTaskTable_PerWebhook_LeavesUndelivered(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase()) assert.NoError(t, unittest.PrepareTestDatabase())
hookTask := &HookTask{ hookTask := &HookTask{
HookID: 4, HookID: 4,
Payloader: &api.PushPayload{}, IsDelivered: false,
IsDelivered: false, PayloadVersion: 2,
} }
unittest.AssertNotExistsBean(t, hookTask) unittest.AssertNotExistsBean(t, hookTask)
_, err := CreateHookTask(db.DefaultContext, hookTask) _, err := CreateHookTask(db.DefaultContext, hookTask)
@ -265,10 +268,10 @@ func TestCleanupHookTaskTable_PerWebhook_LeavesUndelivered(t *testing.T) {
func TestCleanupHookTaskTable_PerWebhook_LeavesMostRecentTask(t *testing.T) { func TestCleanupHookTaskTable_PerWebhook_LeavesMostRecentTask(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase()) assert.NoError(t, unittest.PrepareTestDatabase())
hookTask := &HookTask{ hookTask := &HookTask{
HookID: 4, HookID: 4,
Payloader: &api.PushPayload{}, IsDelivered: true,
IsDelivered: true, Delivered: timeutil.TimeStampNanoNow(),
Delivered: timeutil.TimeStampNanoNow(), PayloadVersion: 2,
} }
unittest.AssertNotExistsBean(t, hookTask) unittest.AssertNotExistsBean(t, hookTask)
_, err := CreateHookTask(db.DefaultContext, hookTask) _, err := CreateHookTask(db.DefaultContext, hookTask)
@ -282,10 +285,10 @@ func TestCleanupHookTaskTable_PerWebhook_LeavesMostRecentTask(t *testing.T) {
func TestCleanupHookTaskTable_OlderThan_DeletesDelivered(t *testing.T) { func TestCleanupHookTaskTable_OlderThan_DeletesDelivered(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase()) assert.NoError(t, unittest.PrepareTestDatabase())
hookTask := &HookTask{ hookTask := &HookTask{
HookID: 3, HookID: 3,
Payloader: &api.PushPayload{}, IsDelivered: true,
IsDelivered: true, Delivered: timeutil.TimeStampNano(time.Now().AddDate(0, 0, -8).UnixNano()),
Delivered: timeutil.TimeStampNano(time.Now().AddDate(0, 0, -8).UnixNano()), PayloadVersion: 2,
} }
unittest.AssertNotExistsBean(t, hookTask) unittest.AssertNotExistsBean(t, hookTask)
_, err := CreateHookTask(db.DefaultContext, hookTask) _, err := CreateHookTask(db.DefaultContext, hookTask)
@ -299,9 +302,9 @@ func TestCleanupHookTaskTable_OlderThan_DeletesDelivered(t *testing.T) {
func TestCleanupHookTaskTable_OlderThan_LeavesUndelivered(t *testing.T) { func TestCleanupHookTaskTable_OlderThan_LeavesUndelivered(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase()) assert.NoError(t, unittest.PrepareTestDatabase())
hookTask := &HookTask{ hookTask := &HookTask{
HookID: 4, HookID: 4,
Payloader: &api.PushPayload{}, IsDelivered: false,
IsDelivered: false, PayloadVersion: 2,
} }
unittest.AssertNotExistsBean(t, hookTask) unittest.AssertNotExistsBean(t, hookTask)
_, err := CreateHookTask(db.DefaultContext, hookTask) _, err := CreateHookTask(db.DefaultContext, hookTask)
@ -315,10 +318,10 @@ func TestCleanupHookTaskTable_OlderThan_LeavesUndelivered(t *testing.T) {
func TestCleanupHookTaskTable_OlderThan_LeavesTaskEarlierThanAgeToDelete(t *testing.T) { func TestCleanupHookTaskTable_OlderThan_LeavesTaskEarlierThanAgeToDelete(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase()) assert.NoError(t, unittest.PrepareTestDatabase())
hookTask := &HookTask{ hookTask := &HookTask{
HookID: 4, HookID: 4,
Payloader: &api.PushPayload{}, IsDelivered: true,
IsDelivered: true, Delivered: timeutil.TimeStampNano(time.Now().AddDate(0, 0, -6).UnixNano()),
Delivered: timeutil.TimeStampNano(time.Now().AddDate(0, 0, -6).UnixNano()), PayloadVersion: 2,
} }
unittest.AssertNotExistsBean(t, hookTask) unittest.AssertNotExistsBean(t, hookTask)
_, err := CreateHookTask(db.DefaultContext, hookTask) _, err := CreateHookTask(db.DefaultContext, hookTask)

View file

@ -55,15 +55,8 @@ func (repo *Repository) GetHEADBranch() (*Branch, error) {
}, nil }, nil
} }
// SetDefaultBranch sets default branch of repository. func GetDefaultBranch(ctx context.Context, repoPath string) (string, error) {
func (repo *Repository) SetDefaultBranch(name string) error { stdout, _, err := NewCommand(ctx, "symbolic-ref", "HEAD").RunStdString(&RunOpts{Dir: repoPath})
_, _, err := NewCommand(repo.Ctx, "symbolic-ref", "HEAD").AddDynamicArguments(BranchPrefix + name).RunStdString(&RunOpts{Dir: repo.Path})
return err
}
// GetDefaultBranch gets default branch of repository.
func (repo *Repository) GetDefaultBranch() (string, error) {
stdout, _, err := NewCommand(repo.Ctx, "symbolic-ref", "HEAD").RunStdString(&RunOpts{Dir: repo.Path})
if err != nil { if err != nil {
return "", err return "", err
} }

View file

@ -30,3 +30,20 @@ func GetBranchCommitID(ctx context.Context, repo Repository, branch string) (str
return gitRepo.GetBranchCommitID(branch) return gitRepo.GetBranchCommitID(branch)
} }
// SetDefaultBranch sets default branch of repository.
func SetDefaultBranch(ctx context.Context, repo Repository, name string) error {
_, _, err := git.NewCommand(ctx, "symbolic-ref", "HEAD").
AddDynamicArguments(git.BranchPrefix + name).
RunStdString(&git.RunOpts{Dir: repoPath(repo)})
return err
}
// GetDefaultBranch gets default branch of repository.
func GetDefaultBranch(ctx context.Context, repo Repository) (string, error) {
return git.GetDefaultBranch(ctx, repoPath(repo))
}
func GetWikiDefaultBranch(ctx context.Context, repo Repository) (string, error) {
return git.GetDefaultBranch(ctx, wikiPath(repo))
}

View file

@ -59,7 +59,15 @@ func (g *Manager) start() {
go func() { go func() {
defer close(startupDone) defer close(startupDone)
// Wait till we're done getting all the listeners and then close the unused ones // Wait till we're done getting all the listeners and then close the unused ones
g.createServerWaitGroup.Wait() func() {
// FIXME: there is a fundamental design problem of the "manager" and the "wait group".
// If nothing has started, the "Wait" just panics: sync: WaitGroup is reused before previous Wait has returned
// There is no clear solution besides a complete rewriting of the "manager"
defer func() {
_ = recover()
}()
g.createServerWaitGroup.Wait()
}()
// Ignore the error here there's not much we can do with it, they're logged in the CloseProvidedListeners function // Ignore the error here there's not much we can do with it, they're logged in the CloseProvidedListeners function
_ = CloseProvidedListeners() _ = CloseProvidedListeners()
g.notify(readyMsg) g.notify(readyMsg)

View file

@ -150,7 +150,15 @@ func (g *Manager) awaitServer(limit time.Duration) bool {
c := make(chan struct{}) c := make(chan struct{})
go func() { go func() {
defer close(c) defer close(c)
g.createServerWaitGroup.Wait() func() {
// FIXME: there is a fundamental design problem of the "manager" and the "wait group".
// If nothing has started, the "Wait" just panics: sync: WaitGroup is reused before previous Wait has returned
// There is no clear solution besides a complete rewriting of the "manager"
defer func() {
_ = recover()
}()
g.createServerWaitGroup.Wait()
}()
}() }()
if limit > 0 { if limit > 0 {
select { select {

View file

@ -233,21 +233,21 @@ func (b *Indexer) Delete(_ context.Context, repoID int64) error {
// Search searches for files in the specified repo. // Search searches for files in the specified repo.
// Returns the matching file-paths // Returns the matching file-paths
func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isMatch bool) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) { func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) {
var ( var (
indexerQuery query.Query indexerQuery query.Query
keywordQuery query.Query keywordQuery query.Query
) )
if isMatch { if isFuzzy {
prefixQuery := bleve.NewPrefixQuery(keyword)
prefixQuery.FieldVal = "Content"
keywordQuery = prefixQuery
} else {
phraseQuery := bleve.NewMatchPhraseQuery(keyword) phraseQuery := bleve.NewMatchPhraseQuery(keyword)
phraseQuery.FieldVal = "Content" phraseQuery.FieldVal = "Content"
phraseQuery.Analyzer = repoIndexerAnalyzer phraseQuery.Analyzer = repoIndexerAnalyzer
keywordQuery = phraseQuery keywordQuery = phraseQuery
} else {
prefixQuery := bleve.NewPrefixQuery(keyword)
prefixQuery.FieldVal = "Content"
keywordQuery = prefixQuery
} }
if len(repoIDs) > 0 { if len(repoIDs) > 0 {

View file

@ -281,10 +281,10 @@ func extractAggs(searchResult *elastic.SearchResult) []*internal.SearchResultLan
} }
// Search searches for codes and language stats by given conditions. // Search searches for codes and language stats by given conditions.
func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isMatch bool) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) { func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) {
searchType := esMultiMatchTypeBestFields searchType := esMultiMatchTypePhrasePrefix
if isMatch { if isFuzzy {
searchType = esMultiMatchTypePhrasePrefix searchType = esMultiMatchTypeBestFields
} }
kwQuery := elastic.NewMultiMatchQuery(keyword, "content").Type(searchType) kwQuery := elastic.NewMultiMatchQuery(keyword, "content").Type(searchType)

View file

@ -70,7 +70,7 @@ func testIndexer(name string, t *testing.T, indexer internal.Indexer) {
for _, kw := range keywords { for _, kw := range keywords {
t.Run(kw.Keyword, func(t *testing.T) { t.Run(kw.Keyword, func(t *testing.T) {
total, res, langs, err := indexer.Search(context.TODO(), kw.RepoIDs, "", kw.Keyword, 1, 10, false) total, res, langs, err := indexer.Search(context.TODO(), kw.RepoIDs, "", kw.Keyword, 1, 10, true)
assert.NoError(t, err) assert.NoError(t, err)
assert.Len(t, kw.IDs, int(total)) assert.Len(t, kw.IDs, int(total))
assert.Len(t, langs, kw.Langs) assert.Len(t, langs, kw.Langs)

View file

@ -16,7 +16,7 @@ type Indexer interface {
internal.Indexer internal.Indexer
Index(ctx context.Context, repo *repo_model.Repository, sha string, changes *RepoChanges) error Index(ctx context.Context, repo *repo_model.Repository, sha string, changes *RepoChanges) error
Delete(ctx context.Context, repoID int64) error Delete(ctx context.Context, repoID int64) error
Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isMatch bool) (int64, []*SearchResult, []*SearchResultLanguages, error) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int64, []*SearchResult, []*SearchResultLanguages, error)
} }
// NewDummyIndexer returns a dummy indexer // NewDummyIndexer returns a dummy indexer
@ -38,6 +38,6 @@ func (d *dummyIndexer) Delete(ctx context.Context, repoID int64) error {
return fmt.Errorf("indexer is not ready") return fmt.Errorf("indexer is not ready")
} }
func (d *dummyIndexer) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isMatch bool) (int64, []*SearchResult, []*SearchResultLanguages, error) { func (d *dummyIndexer) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int64, []*SearchResult, []*SearchResultLanguages, error) {
return 0, nil, nil, fmt.Errorf("indexer is not ready") return 0, nil, nil, fmt.Errorf("indexer is not ready")
} }

View file

@ -124,12 +124,13 @@ func searchResult(result *internal.SearchResult, startIndex, endIndex int) (*Res
} }
// PerformSearch perform a search on a repository // PerformSearch perform a search on a repository
func PerformSearch(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isMatch bool) (int, []*Result, []*internal.SearchResultLanguages, error) { // if isFuzzy is true set the Damerau-Levenshtein distance from 0 to 2
func PerformSearch(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int, []*Result, []*internal.SearchResultLanguages, error) {
if len(keyword) == 0 { if len(keyword) == 0 {
return 0, nil, nil, nil return 0, nil, nil, nil
} }
total, results, resultLanguages, err := (*globalIndexer.Load()).Search(ctx, repoIDs, language, keyword, page, pageSize, isMatch) total, results, resultLanguages, err := (*globalIndexer.Load()).Search(ctx, repoIDs, language, keyword, page, pageSize, isFuzzy)
if err != nil { if err != nil {
return 0, nil, nil, err return 0, nil, nil, err
} }

View file

@ -25,6 +25,13 @@ func MatchPhraseQuery(matchPhrase, field, analyzer string) *query.MatchPhraseQue
return q return q
} }
// PrefixQuery generates a match prefix query for the given prefix and field
func PrefixQuery(matchPrefix, field string) *query.PrefixQuery {
q := bleve.NewPrefixQuery(matchPrefix)
q.FieldVal = field
return q
}
// BoolFieldQuery generates a bool field query for the given value and field // BoolFieldQuery generates a bool field query for the given value and field
func BoolFieldQuery(value bool, field string) *query.BoolFieldQuery { func BoolFieldQuery(value bool, field string) *query.BoolFieldQuery {
q := bleve.NewBoolFieldQuery(value) q := bleve.NewBoolFieldQuery(value)

View file

@ -156,12 +156,19 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
var queries []query.Query var queries []query.Query
if options.Keyword != "" { if options.Keyword != "" {
keywordQueries := []query.Query{ if options.IsFuzzyKeyword {
inner_bleve.MatchPhraseQuery(options.Keyword, "title", issueIndexerAnalyzer), queries = append(queries, bleve.NewDisjunctionQuery([]query.Query{
inner_bleve.MatchPhraseQuery(options.Keyword, "content", issueIndexerAnalyzer), inner_bleve.MatchPhraseQuery(options.Keyword, "title", issueIndexerAnalyzer),
inner_bleve.MatchPhraseQuery(options.Keyword, "comments", issueIndexerAnalyzer), inner_bleve.MatchPhraseQuery(options.Keyword, "content", issueIndexerAnalyzer),
inner_bleve.MatchPhraseQuery(options.Keyword, "comments", issueIndexerAnalyzer),
}...))
} else {
queries = append(queries, bleve.NewDisjunctionQuery([]query.Query{
inner_bleve.PrefixQuery(options.Keyword, "title"),
inner_bleve.PrefixQuery(options.Keyword, "content"),
inner_bleve.PrefixQuery(options.Keyword, "comments"),
}...))
} }
queries = append(queries, bleve.NewDisjunctionQuery(keywordQueries...))
} }
if len(options.RepoIDs) > 0 || options.AllPublic { if len(options.RepoIDs) > 0 || options.AllPublic {

View file

@ -19,6 +19,10 @@ import (
const ( const (
issueIndexerLatestVersion = 1 issueIndexerLatestVersion = 1
// multi-match-types, currently only 2 types are used
// Reference: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-multi-match-query.html#multi-match-types
esMultiMatchTypeBestFields = "best_fields"
esMultiMatchTypePhrasePrefix = "phrase_prefix"
) )
var _ internal.Indexer = &Indexer{} var _ internal.Indexer = &Indexer{}
@ -141,7 +145,13 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
query := elastic.NewBoolQuery() query := elastic.NewBoolQuery()
if options.Keyword != "" { if options.Keyword != "" {
query.Must(elastic.NewMultiMatchQuery(options.Keyword, "title", "content", "comments"))
searchType := esMultiMatchTypePhrasePrefix
if options.IsFuzzyKeyword {
searchType = esMultiMatchTypeBestFields
}
query.Must(elastic.NewMultiMatchQuery(options.Keyword, "title", "content", "comments").Type(searchType))
} }
if len(options.RepoIDs) > 0 { if len(options.RepoIDs) > 0 {

View file

@ -74,6 +74,8 @@ type SearchResult struct {
type SearchOptions struct { type SearchOptions struct {
Keyword string // keyword to search Keyword string // keyword to search
IsFuzzyKeyword bool // if false the levenshtein distance is 0
RepoIDs []int64 // repository IDs which the issues belong to RepoIDs []int64 // repository IDs which the issues belong to
AllPublic bool // if include all public repositories AllPublic bool // if include all public repositories

View file

@ -5,6 +5,7 @@ package meilisearch
import ( import (
"context" "context"
"errors"
"strconv" "strconv"
"strings" "strings"
@ -16,12 +17,15 @@ import (
) )
const ( const (
issueIndexerLatestVersion = 2 issueIndexerLatestVersion = 3
// TODO: make this configurable if necessary // TODO: make this configurable if necessary
maxTotalHits = 10000 maxTotalHits = 10000
) )
// ErrMalformedResponse is never expected as we initialize the indexer ourself and so define the types.
var ErrMalformedResponse = errors.New("meilisearch returned unexpected malformed content")
var _ internal.Indexer = &Indexer{} var _ internal.Indexer = &Indexer{}
// Indexer implements Indexer interface // Indexer implements Indexer interface
@ -47,6 +51,9 @@ func NewIndexer(url, apiKey, indexerName string) *Indexer {
}, },
DisplayedAttributes: []string{ DisplayedAttributes: []string{
"id", "id",
"title",
"content",
"comments",
}, },
FilterableAttributes: []string{ FilterableAttributes: []string{
"repo_id", "repo_id",
@ -221,11 +228,9 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
return nil, err return nil, err
} }
hits := make([]internal.Match, 0, len(searchRes.Hits)) hits, err := nonFuzzyWorkaround(searchRes, options.Keyword, options.IsFuzzyKeyword)
for _, hit := range searchRes.Hits { if err != nil {
hits = append(hits, internal.Match{ return nil, err
ID: int64(hit.(map[string]any)["id"].(float64)),
})
} }
return &internal.SearchResult{ return &internal.SearchResult{
@ -241,3 +246,77 @@ func parseSortBy(sortBy internal.SortBy) string {
} }
return field + ":asc" return field + ":asc"
} }
// nonFuzzyWorkaround is needed as meilisearch does not have an exact search
// and you can only change "typo tolerance" per index. So we have to post-filter the results
// https://www.meilisearch.com/docs/learn/configuration/typo_tolerance#configuring-typo-tolerance
// TODO: remove once https://github.com/orgs/meilisearch/discussions/377 is addressed
func nonFuzzyWorkaround(searchRes *meilisearch.SearchResponse, keyword string, isFuzzy bool) ([]internal.Match, error) {
hits := make([]internal.Match, 0, len(searchRes.Hits))
for _, hit := range searchRes.Hits {
hit, ok := hit.(map[string]any)
if !ok {
return nil, ErrMalformedResponse
}
if !isFuzzy {
keyword = strings.ToLower(keyword)
// declare a anon func to check if the title, content or at least one comment contains the keyword
found, err := func() (bool, error) {
// check if title match first
title, ok := hit["title"].(string)
if !ok {
return false, ErrMalformedResponse
} else if strings.Contains(strings.ToLower(title), keyword) {
return true, nil
}
// check if content has a match
content, ok := hit["content"].(string)
if !ok {
return false, ErrMalformedResponse
} else if strings.Contains(strings.ToLower(content), keyword) {
return true, nil
}
// now check for each comment if one has a match
// so we first try to cast and skip if there are no comments
comments, ok := hit["comments"].([]any)
if !ok {
return false, ErrMalformedResponse
} else if len(comments) == 0 {
return false, nil
}
// now we iterate over all and report as soon as we detect one match
for i := range comments {
comment, ok := comments[i].(string)
if !ok {
return false, ErrMalformedResponse
}
if strings.Contains(strings.ToLower(comment), keyword) {
return true, nil
}
}
// we got no match
return false, nil
}()
if err != nil {
return nil, err
} else if !found {
continue
}
}
issueID, ok := hit["id"].(float64)
if !ok {
return nil, ErrMalformedResponse
}
hits = append(hits, internal.Match{
ID: int64(issueID),
})
}
return hits, nil
}

View file

@ -10,7 +10,11 @@ import (
"testing" "testing"
"time" "time"
"code.gitea.io/gitea/modules/indexer/issues/internal"
"code.gitea.io/gitea/modules/indexer/issues/internal/tests" "code.gitea.io/gitea/modules/indexer/issues/internal/tests"
"github.com/meilisearch/meilisearch-go"
"github.com/stretchr/testify/assert"
) )
func TestMeilisearchIndexer(t *testing.T) { func TestMeilisearchIndexer(t *testing.T) {
@ -49,3 +53,44 @@ func TestMeilisearchIndexer(t *testing.T) {
tests.TestIndexer(t, indexer) tests.TestIndexer(t, indexer)
} }
func TestNonFuzzyWorkaround(t *testing.T) {
// get unexpected return
_, err := nonFuzzyWorkaround(&meilisearch.SearchResponse{
Hits: []any{"aa", "bb", "cc", "dd"},
}, "bowling", false)
assert.ErrorIs(t, err, ErrMalformedResponse)
validResponse := &meilisearch.SearchResponse{
Hits: []any{
map[string]any{
"id": float64(11),
"title": "a title",
"content": "issue body with no match",
"comments": []any{"hey whats up?", "I'm currently bowling", "nice"},
},
map[string]any{
"id": float64(22),
"title": "Bowling as title",
"content": "",
"comments": []any{},
},
map[string]any{
"id": float64(33),
"title": "Bowl-ing as fuzzy match",
"content": "",
"comments": []any{},
},
},
}
// nonFuzzy
hits, err := nonFuzzyWorkaround(validResponse, "bowling", false)
assert.NoError(t, err)
assert.EqualValues(t, []internal.Match{{ID: 11}, {ID: 22}}, hits)
// fuzzy
hits, err = nonFuzzyWorkaround(validResponse, "bowling", true)
assert.NoError(t, err)
assert.EqualValues(t, []internal.Match{{ID: 11}, {ID: 22}, {ID: 33}}, hits)
}

View file

@ -93,8 +93,10 @@ func (Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Wri
if _, err := tmpBlock.WriteString(html.EscapeString(string(rawBytes))); err != nil { if _, err := tmpBlock.WriteString(html.EscapeString(string(rawBytes))); err != nil {
return err return err
} }
_, err = tmpBlock.WriteString("</pre>") if _, err := tmpBlock.WriteString("</pre>"); err != nil {
return err return err
}
return tmpBlock.Flush()
} }
rd, err := csv.CreateReaderAndDetermineDelimiter(ctx, bytes.NewReader(rawBytes)) rd, err := csv.CreateReaderAndDetermineDelimiter(ctx, bytes.NewReader(rawBytes))

View file

@ -22,5 +22,6 @@ func loadAdminFrom(rootCfg ConfigProvider) {
const ( const (
UserFeatureDeletion = "deletion" UserFeatureDeletion = "deletion"
UserFeatureManageSSHKeys = "manage_ssh_keys"
UserFeatureManageGPGKeys = "manage_gpg_keys" UserFeatureManageGPGKeys = "manage_gpg_keys"
) )

View file

@ -12,7 +12,7 @@ var Attachment = struct {
Enabled bool Enabled bool
}{ }{
Storage: &Storage{}, Storage: &Storage{},
AllowedTypes: ".csv,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip", AllowedTypes: ".cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip",
MaxSize: 2048, MaxSize: 2048,
MaxFiles: 5, MaxFiles: 5,
Enabled: true, Enabled: true,
@ -25,7 +25,7 @@ func loadAttachmentFrom(rootCfg ConfigProvider) (err error) {
return err return err
} }
Attachment.AllowedTypes = sec.Key("ALLOWED_TYPES").MustString(".csv,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip") Attachment.AllowedTypes = sec.Key("ALLOWED_TYPES").MustString(".cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip")
Attachment.MaxSize = sec.Key("MAX_SIZE").MustInt64(2048) Attachment.MaxSize = sec.Key("MAX_SIZE").MustInt64(2048)
Attachment.MaxFiles = sec.Key("MAX_FILES").MustInt(5) Attachment.MaxFiles = sec.Key("MAX_FILES").MustInt(5)
Attachment.Enabled = sec.Key("ENABLED").MustBool(true) Attachment.Enabled = sec.Key("ENABLED").MustBool(true)

View file

@ -211,14 +211,8 @@ func SafeHTML(s any) template.HTML {
} }
// SanitizeHTML sanitizes the input by pre-defined markdown rules // SanitizeHTML sanitizes the input by pre-defined markdown rules
func SanitizeHTML(s any) template.HTML { func SanitizeHTML(s string) template.HTML {
switch v := s.(type) { return template.HTML(markup.Sanitize(s))
case string:
return template.HTML(markup.Sanitize(v))
case template.HTML:
return template.HTML(markup.Sanitize(string(v)))
}
panic(fmt.Sprintf("unexpected type %T", s))
} }
func HTMLEscape(s any) template.HTML { func HTMLEscape(s any) template.HTML {

View file

@ -64,5 +64,4 @@ func TestHTMLFormat(t *testing.T) {
func TestSanitizeHTML(t *testing.T) { func TestSanitizeHTML(t *testing.T) {
assert.Equal(t, template.HTML(`<a href="/" rel="nofollow">link</a> xss <div>inline</div>`), SanitizeHTML(`<a href="/">link</a> <a href="javascript:">xss</a> <div style="dangerous">inline</div>`)) assert.Equal(t, template.HTML(`<a href="/" rel="nofollow">link</a> xss <div>inline</div>`), SanitizeHTML(`<a href="/">link</a> <a href="javascript:">xss</a> <div style="dangerous">inline</div>`))
assert.Equal(t, template.HTML(`<a href="/" rel="nofollow">link</a> xss <div>inline</div>`), SanitizeHTML(template.HTML(`<a href="/">link</a> <a href="javascript:">xss</a> <div style="dangerous">inline</div>`)))
} }

View file

@ -595,6 +595,8 @@ enterred_invalid_repo_name = The repository name you entered is incorrect.
enterred_invalid_org_name = The organization name you entered is incorrect. enterred_invalid_org_name = The organization name you entered is incorrect.
enterred_invalid_owner_name = The new owner name is not valid. enterred_invalid_owner_name = The new owner name is not valid.
enterred_invalid_password = The password you entered is incorrect. enterred_invalid_password = The password you entered is incorrect.
unset_password = The login user has not set the password.
unsupported_login_type = The login type is not supported to delete account.
user_not_exist = The user does not exist. user_not_exist = The user does not exist.
team_not_exist = The team does not exist. team_not_exist = The team does not exist.
last_org_owner = You cannot remove the last user from the "owners" team. There must be at least one owner for an organization. last_org_owner = You cannot remove the last user from the "owners" team. There must be at least one owner for an organization.
@ -2671,6 +2673,7 @@ find_file.no_matching = No matching file found
error.csv.too_large = Can't render this file because it is too large. error.csv.too_large = Can't render this file because it is too large.
error.csv.unexpected = Can't render this file because it contains an unexpected character in line %d and column %d. error.csv.unexpected = Can't render this file because it contains an unexpected character in line %d and column %d.
error.csv.invalid_field_count = Can't render this file because it has a wrong number of fields in line %d. error.csv.invalid_field_count = Can't render this file because it has a wrong number of fields in line %d.
error.broken_git_hook = Git hooks of this repository seem to be broken. Please follow the <a target="_blank" rel="noreferrer" href="%s">documentation</a> to fix them, then push some commits to refresh the status.
[graphs] [graphs]
component_loading = Loading %s... component_loading = Loading %s...

View file

@ -162,8 +162,8 @@ footer.software=ソフトウェアについて
footer.links=リンク footer.links=リンク
[heatmap] [heatmap]
number_of_contributions_in_the_last_12_months=過去 12 か月間で %s 個の貢献 number_of_contributions_in_the_last_12_months=過去 12 か月間で %s 件の実績
no_contributions=貢献なし no_contributions=実績なし
less= less=
more= more=
@ -1547,7 +1547,7 @@ issues.role.member_helper=このユーザーはこのリポジトリを所有し
issues.role.collaborator=共同作業者 issues.role.collaborator=共同作業者
issues.role.collaborator_helper=このユーザーはリポジトリ上で共同作業するように招待されています。 issues.role.collaborator_helper=このユーザーはリポジトリ上で共同作業するように招待されています。
issues.role.first_time_contributor=初めての貢献者 issues.role.first_time_contributor=初めての貢献者
issues.role.first_time_contributor_helper=これは、このユーザーリポジトリへの最初の貢献です。 issues.role.first_time_contributor_helper=これは、このユーザーによるリポジトリへの最初の貢献です。
issues.role.contributor=貢献者 issues.role.contributor=貢献者
issues.role.contributor_helper=このユーザーは以前にリポジトリにコミットしています。 issues.role.contributor_helper=このユーザーは以前にリポジトリにコミットしています。
issues.re_request_review=レビューを再依頼 issues.re_request_review=レビューを再依頼
@ -2048,7 +2048,8 @@ settings.mirror_settings.docs.more_information_if_disabled=プッシュミラー
settings.mirror_settings.docs.doc_link_title=リポジトリをミラーリングするには? settings.mirror_settings.docs.doc_link_title=リポジトリをミラーリングするには?
settings.mirror_settings.docs.doc_link_pull_section=ドキュメントの「リモートリポジトリからのプル」セクション。 settings.mirror_settings.docs.doc_link_pull_section=ドキュメントの「リモートリポジトリからのプル」セクション。
settings.mirror_settings.docs.pulling_remote_title=リモートリポジトリからのプル settings.mirror_settings.docs.pulling_remote_title=リモートリポジトリからのプル
settings.mirror_settings.mirrored_repository=同期するリポジトリ settings.mirror_settings.mirrored_repository=ミラー元のリポジトリ
settings.mirror_settings.pushed_repository=プッシュ先のリポジトリ
settings.mirror_settings.direction=方向 settings.mirror_settings.direction=方向
settings.mirror_settings.direction.pull=プル settings.mirror_settings.direction.pull=プル
settings.mirror_settings.direction.push=プッシュ settings.mirror_settings.direction.push=プッシュ
@ -3591,6 +3592,8 @@ runs.actors_no_select=すべてのアクター
runs.status_no_select=すべてのステータス runs.status_no_select=すべてのステータス
runs.no_results=一致する結果はありません。 runs.no_results=一致する結果はありません。
runs.no_workflows=ワークフローはまだありません。 runs.no_workflows=ワークフローはまだありません。
runs.no_workflows.quick_start=Gitea Actions の始め方がわからない? では<a target="_blank" rel="noopener noreferrer" href="%s">クイックスタートガイド</a>をご覧ください。
runs.no_workflows.documentation=Gitea Actions の詳細については、<a target="_blank" rel="noopener noreferrer" href="%s">ドキュメント</a>を参照してください。
runs.no_runs=ワークフローはまだ実行されていません。 runs.no_runs=ワークフローはまだ実行されていません。
runs.empty_commit_message=(空のコミットメッセージ) runs.empty_commit_message=(空のコミットメッセージ)

30
package-lock.json generated
View file

@ -24,7 +24,6 @@
"chartjs-plugin-zoom": "2.0.1", "chartjs-plugin-zoom": "2.0.1",
"clippie": "4.0.7", "clippie": "4.0.7",
"css-loader": "6.10.0", "css-loader": "6.10.0",
"css-variables-parser": "1.0.1",
"dayjs": "1.11.10", "dayjs": "1.11.10",
"dropzone": "6.0.0-beta.2", "dropzone": "6.0.0-beta.2",
"easymde": "2.18.0", "easymde": "2.18.0",
@ -4032,35 +4031,6 @@
"node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0"
} }
}, },
"node_modules/css-variables-parser": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/css-variables-parser/-/css-variables-parser-1.0.1.tgz",
"integrity": "sha512-GWaqrwGtAWVr/yjjE17iyvbcy+W3voe0vko1/xLCwFeYd3kTLstzUdVH+g5TTXejrtlsb1FS4L9rP6PmeTa8wQ==",
"dependencies": {
"postcss": "^7.0.36"
}
},
"node_modules/css-variables-parser/node_modules/picocolors": {
"version": "0.2.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz",
"integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA=="
},
"node_modules/css-variables-parser/node_modules/postcss": {
"version": "7.0.39",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz",
"integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==",
"dependencies": {
"picocolors": "^0.2.1",
"source-map": "^0.6.1"
},
"engines": {
"node": ">=6.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/postcss/"
}
},
"node_modules/css-what": { "node_modules/css-what": {
"version": "6.1.0", "version": "6.1.0",
"resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz",

View file

@ -23,7 +23,6 @@
"chartjs-plugin-zoom": "2.0.1", "chartjs-plugin-zoom": "2.0.1",
"clippie": "4.0.7", "clippie": "4.0.7",
"css-loader": "6.10.0", "css-loader": "6.10.0",
"css-variables-parser": "1.0.1",
"dayjs": "1.11.10", "dayjs": "1.11.10",
"dropzone": "6.0.0-beta.2", "dropzone": "6.0.0-beta.2",
"easymde": "2.18.0", "easymde": "2.18.0",

View file

@ -1 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="gitea-bitbucket__svg gitea-bitbucket__gitea-bitbucket svg gitea-bitbucket" preserveAspectRatio="xMinYMin meet" viewBox="0 0 256 295" width="16" height="16"><g fill="#205081"><path d="M128 0C57.732 0 .012 18.822.012 42.663c0 6.274 15.057 95.364 21.331 130.498 2.51 16.312 43.918 38.898 106.657 38.898 62.74 0 102.893-22.586 106.657-38.898 6.274-35.134 21.331-124.224 21.331-130.498C254.734 18.822 198.268 0 128 0m0 183.199c-22.586 0-40.153-17.567-40.153-40.153s17.567-40.153 40.153-40.153 40.153 17.567 40.153 40.153c0 21.331-17.567 40.153-40.153 40.153m0-127.988c-45.172 0-81.561-7.53-81.561-17.567 0-10.039 36.389-17.567 81.561-17.567s81.561 7.528 81.561 17.567c0 10.038-36.389 17.567-81.561 17.567"/><path d="M220.608 207.04c-2.51 0-3.764 1.255-3.764 1.255s-31.37 25.096-87.835 25.096c-56.466 0-87.835-25.096-87.835-25.096s-2.51-1.255-3.765-1.255c-2.51 0-5.019 1.255-5.019 5.02v1.254c5.02 26.35 8.784 45.172 8.784 47.682 3.764 18.822 41.408 33.88 86.58 33.88s82.816-15.058 86.58-33.88c0-2.51 3.765-21.332 8.784-47.682v-1.255c1.255-2.51 0-5.019-2.51-5.019"/><circle cx="128" cy="141.791" r="20.077"/></g></svg> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 62.42 62.42" class="svg gitea-bitbucket" width="16" height="16" aria-hidden="true"><defs><linearGradient id="gitea-bitbucket__a" x1="64.01" x2="32.99" y1="30.27" y2="54.48" gradientUnits="userSpaceOnUse"><stop offset=".18" stop-color="#0052cc"/><stop offset="1" stop-color="#2684ff"/></linearGradient></defs><g data-name="Layer 2"><path fill="#2684ff" d="M2 3.13a2 2 0 0 0-2 2.32l8.49 51.54a2.72 2.72 0 0 0 2.66 2.27h40.73a2 2 0 0 0 2-1.68l8.49-52.12a2 2 0 0 0-2-2.32Zm35.75 37.25h-13l-3.52-18.39H40.9Z"/><path fill="url(#gitea-bitbucket__a)" d="M59.67 25.12H40.9l-3.15 18.39h-13L9.4 61.73a2.7 2.7 0 0 0 1.75.66h40.74a2 2 0 0 0 2-1.68Z" transform="translate(0 -3.13)"/></g></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 732 B

View file

@ -1 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="gitea-facebook__svg gitea-facebook__gitea-facebook svg gitea-facebook" style="shape-rendering:geometricPrecision;text-rendering:geometricPrecision;image-rendering:optimizeQuality;fill-rule:evenodd;clip-rule:evenodd" viewBox="0 0 128 128" width="16" height="16"><path fill="#395b97" d="M93.5 8.5q-2.177 1.203-5 1.5L10 88.5q-.297 2.823-1.5 5a552 552 0 0 1-.5-56Q11.75 11.75 37.5 8a552 552 0 0 1 56 .5" style="opacity:.995"/><path fill="#366098" d="M93.5 8.5q23.832 6.337 26 31a677 677 0 0 0-1.5 37l-35 35a32.4 32.4 0 0 0-.5 8 442 442 0 0 1-1-42h14a380 380 0 0 0 3-17h-17q-3.75-20.745 17-18v-16q-38.632-4.865-33 34h-14v17h14v42q-14.01.25-28-.5-23.177-2.93-29-25.5 1.203-2.177 1.5-5L88.5 10q2.823-.297 5-1.5" style="opacity:.976"/><path fill="#346499" d="M119.5 39.5q.25 25.005-.5 50-5.432 30.368-36.5 30a32.4 32.4 0 0 1 .5-8l35-35q.254-18.76 1.5-37" style="opacity:.918"/></svg> <svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" fill-rule="evenodd" clip-rule="evenodd" image-rendering="optimizeQuality" shape-rendering="geometricPrecision" text-rendering="geometricPrecision" viewBox="0 0 14222 14222" class="svg gitea-facebook" width="16" height="16" aria-hidden="true"><g fill-rule="nonzero"><path fill="#1977f3" d="M14222 7111C14222 3184 11038 0 7111 0S0 3184 0 7111c0 3549 2600 6491 6000 7025V9167H4194V7111h1806V5544c0-1782 1062-2767 2686-2767 778 0 1592 139 1592 139v1750h-897c-883 0-1159 548-1159 1111v1334h1972l-315 2056H8222v4969c3400-533 6000-3475 6000-7025"/><path fill="#fefefe" d="m9879 9167 315-2056H8222V5777c0-562 275-1111 1159-1111h897V2916s-814-139-1592-139c-1624 0-2686 984-2686 2767v1567H4194v2056h1806v4969c362 57 733 86 1111 86s749-30 1111-86V9167z"/></g></svg>

Before

Width:  |  Height:  |  Size: 941 B

After

Width:  |  Height:  |  Size: 815 B

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" viewBox="0 0 70 70" class="svg gitea-jetbrains" width="16" height="16" aria-hidden="true"><linearGradient id="gitea-jetbrains__a" x1=".79" x2="33.317" y1="40.089" y2="40.089" gradientUnits="userSpaceOnUse"><stop offset=".258" style="stop-color:#f97a12"/><stop offset=".459" style="stop-color:#b07b58"/><stop offset=".724" style="stop-color:#577bae"/><stop offset=".91" style="stop-color:#1e7ce5"/><stop offset="1" style="stop-color:#087cfa"/></linearGradient><path d="M17.7 54.6.8 41.2l8.4-15.6L33.3 35z" style="fill:url(#gitea-jetbrains__a)"/><linearGradient id="gitea-jetbrains__b" x1="25.767" x2="79.424" y1="24.88" y2="54.57" gradientUnits="userSpaceOnUse"><stop offset="0" style="stop-color:#f97a12"/><stop offset=".072" style="stop-color:#cb7a3e"/><stop offset=".154" style="stop-color:#9e7b6a"/><stop offset=".242" style="stop-color:#757b91"/><stop offset=".334" style="stop-color:#537bb1"/><stop offset=".432" style="stop-color:#387ccc"/><stop offset=".538" style="stop-color:#237ce0"/><stop offset=".655" style="stop-color:#147cef"/><stop offset=".792" style="stop-color:#0b7cf7"/><stop offset="1" style="stop-color:#087cfa"/></linearGradient><path d="m70 18.7-1.3 40.5L41.8 70 25.6 59.6 49.3 35 38.9 12.3l9.3-11.2z" style="fill:url(#gitea-jetbrains__b)"/><linearGradient id="gitea-jetbrains__c" x1="63.228" x2="48.29" y1="42.915" y2="-1.719" gradientUnits="userSpaceOnUse"><stop offset="0" style="stop-color:#fe315d"/><stop offset=".078" style="stop-color:#cb417e"/><stop offset=".16" style="stop-color:#9e4e9b"/><stop offset=".247" style="stop-color:#755bb4"/><stop offset=".339" style="stop-color:#5365ca"/><stop offset=".436" style="stop-color:#386ddb"/><stop offset=".541" style="stop-color:#2374e9"/><stop offset=".658" style="stop-color:#1478f3"/><stop offset=".794" style="stop-color:#0b7bf8"/><stop offset="1" style="stop-color:#087cfa"/></linearGradient><path d="M70 18.7 48.7 43.9l-9.8-31.6 9.3-11.2z" style="fill:url(#gitea-jetbrains__c)"/><linearGradient id="gitea-jetbrains__d" x1="10.72" x2="55.524" y1="16.473" y2="90.58" gradientUnits="userSpaceOnUse"><stop offset="0" style="stop-color:#fe315d"/><stop offset=".04" style="stop-color:#f63462"/><stop offset=".104" style="stop-color:#df3a71"/><stop offset=".167" style="stop-color:#c24383"/><stop offset=".291" style="stop-color:#ad4a91"/><stop offset=".55" style="stop-color:#755bb4"/><stop offset=".917" style="stop-color:#1d76ed"/><stop offset="1" style="stop-color:#087cfa"/></linearGradient><path d="M33.7 58.1 5.6 68.3l4.5-15.8L16 33.1 0 27.7 10.1 0l22 2.7 21.6 24.7z" style="fill:url(#gitea-jetbrains__d)"/><path d="M13.7 13.5h43.2v43.2H13.7z" style="fill:#000"/><path d="M17.7 48.6h16.2v2.7H17.7zM29.4 22.4v-3.3h-9v3.3H23v11.3h-2.6V37h9v-3.3h-2.5V22.4zM38 37.3c-1.4 0-2.6-.3-3.5-.8s-1.7-1.2-2.3-1.9l2.5-2.8c.5.6 1 1 1.5 1.3s1.1.5 1.7.5c.7 0 1.3-.2 1.8-.7.4-.5.6-1.2.6-2.3V19.1h4v11.7c0 1.1-.1 2-.4 2.8s-.7 1.4-1.3 2c-.5.5-1.2 1-2 1.2-.8.3-1.6.5-2.6.5" style="fill:#fff"/></svg>

After

Width:  |  Height:  |  Size: 3 KiB

View file

@ -1 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="gitea-microsoftonline__svg gitea-microsoftonline__gitea-microsoftonline svg gitea-microsoftonline" viewBox="0 0 2075 2499.8" width="16" height="16"><path fill="#eb3c00" d="M0 2016.6V496.8L1344.4 0 2075 233.7v2045.9l-730.6 220.3zl1344.4 161.8V409.2L467.6 613.8v1198.3z"/></svg> <svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 48 48" class="svg gitea-microsoftonline" width="16" height="16" aria-hidden="true"><path fill="url(#gitea-microsoftonline__a)" d="m20.084 3.026-.224.136a8 8 0 0 0-1.009.722l.648-.456H25L26 11l-5 5-5 3.475v4.008a8 8 0 0 0 3.857 6.844l5.264 3.186L14 40h-2.145l-3.998-2.42A8 8 0 0 1 4 30.737V17.26a8 8 0 0 1 3.86-6.846l12-7.258q.111-.068.224-.131Z"/><path fill="url(#gitea-microsoftonline__b)" d="m20.084 3.026-.224.136a8 8 0 0 0-1.009.722l.648-.456H25L26 11l-5 5-5 3.475v4.008a8 8 0 0 0 3.857 6.844l5.264 3.186L14 40h-2.145l-3.998-2.42A8 8 0 0 1 4 30.737V17.26a8 8 0 0 1 3.86-6.846l12-7.258q.111-.068.224-.131Z"/><path fill="url(#gitea-microsoftonline__c)" d="M32 19v4.48a8 8 0 0 1-3.857 6.844l-12 7.264a8 8 0 0 1-8.008.16l11.722 7.096a8 8 0 0 0 8.286 0l12-7.264A8 8 0 0 0 44 30.736V27.5L43 26z"/><path fill="url(#gitea-microsoftonline__d)" d="M32 19v4.48a8 8 0 0 1-3.857 6.844l-12 7.264a8 8 0 0 1-8.008.16l11.722 7.096a8 8 0 0 0 8.286 0l12-7.264A8 8 0 0 0 44 30.736V27.5L43 26z"/><path fill="url(#gitea-microsoftonline__e)" d="m40.14 10.415-12-7.258a8 8 0 0 0-8.042-.139l-.238.144A8 8 0 0 0 16 10.008v9.483l3.86-2.334a8 8 0 0 1 8.28 0l12 7.258A8 8 0 0 1 43.997 31q.004-.132.004-.263V17.26a8 8 0 0 0-3.86-6.845Z"/><path fill="url(#gitea-microsoftonline__f)" d="m40.14 10.415-12-7.258a8 8 0 0 0-8.042-.139l-.238.144A8 8 0 0 0 16 10.008v9.483l3.86-2.334a8 8 0 0 1 8.28 0l12 7.258A8 8 0 0 1 43.997 31q.004-.132.004-.263V17.26a8 8 0 0 0-3.86-6.845Z"/><path fill="url(#gitea-microsoftonline__g)" d="M4.004 30.998"/><path fill="url(#gitea-microsoftonline__h)" d="M4.004 30.998"/><defs><radialGradient id="gitea-microsoftonline__a" cx="0" cy="0" r="1" gradientTransform="rotate(110.528 5.021 11.358)scale(33.3657 58.1966)" gradientUnits="userSpaceOnUse"><stop offset=".064" stop-color="#AE7FE2"/><stop offset="1" stop-color="#0078D4"/></radialGradient><radialGradient id="gitea-microsoftonline__c" cx="0" cy="0" r="1" gradientTransform="matrix(30.7198 -4.51832 2.98465 20.29248 10.43 36.351)" gradientUnits="userSpaceOnUse"><stop offset=".134" stop-color="#D59DFF"/><stop offset="1" stop-color="#5E438F"/></radialGradient><radialGradient id="gitea-microsoftonline__e" cx="0" cy="0" r="1" gradientTransform="matrix(-24.1583 -6.12555 10.3118 -40.66824 41.055 26.504)" gradientUnits="userSpaceOnUse"><stop offset=".058" stop-color="#50E6FF"/><stop offset="1" stop-color="#436DCD"/></radialGradient><radialGradient id="gitea-microsoftonline__g" cx="0" cy="0" r="1" gradientTransform="matrix(-24.1583 -6.12555 10.3118 -40.66824 41.055 26.504)" gradientUnits="userSpaceOnUse"><stop offset=".058" stop-color="#50E6FF"/><stop offset="1" stop-color="#436DCD"/></radialGradient><linearGradient id="gitea-microsoftonline__b" x1="17.512" x2="12.751" y1="37.868" y2="29.635" gradientUnits="userSpaceOnUse"><stop stop-color="#114A8B"/><stop offset="1" stop-color="#0078D4" stop-opacity="0"/></linearGradient><linearGradient id="gitea-microsoftonline__d" x1="40.357" x2="35.255" y1="25.377" y2="32.692" gradientUnits="userSpaceOnUse"><stop stop-color="#493474"/><stop offset="1" stop-color="#8C66BA" stop-opacity="0"/></linearGradient><linearGradient id="gitea-microsoftonline__f" x1="16.976" x2="24.487" y1="3.057" y2="3.057" gradientUnits="userSpaceOnUse"><stop stop-color="#2D3F80"/><stop offset="1" stop-color="#436DCD" stop-opacity="0"/></linearGradient><linearGradient id="gitea-microsoftonline__h" x1="16.976" x2="24.487" y1="3.057" y2="3.057" gradientUnits="userSpaceOnUse"><stop stop-color="#2D3F80"/><stop offset="1" stop-color="#436DCD" stop-opacity="0"/></linearGradient></defs></svg>

Before

Width:  |  Height:  |  Size: 342 B

After

Width:  |  Height:  |  Size: 3.6 KiB

View file

@ -1 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" fill-rule="evenodd" stroke-linejoin="round" stroke-miterlimit="2" aria-hidden="true" class="gitea-twitter__svg gitea-twitter__gitea-twitter svg gitea-twitter" clip-rule="evenodd" viewBox="-89.009 -46.884 643.937 446.884" width="16" height="16"><path fill="#1da1f2" fill-rule="nonzero" d="M154.729 400c185.669 0 287.205-153.876 287.205-287.312 0-4.37-.089-8.72-.286-13.052A205.3 205.3 0 0 0 492 47.346c-18.087 8.044-37.55 13.458-57.968 15.899 20.841-12.501 36.84-32.278 44.389-55.852a202.4 202.4 0 0 1-64.098 24.511C395.903 12.276 369.679 0 340.641 0c-55.744 0-100.948 45.222-100.948 100.965 0 7.925.887 15.631 2.619 23.025-83.895-4.223-158.287-44.405-208.074-105.504A100.74 100.74 0 0 0 20.57 69.24c0 35.034 17.82 65.961 44.92 84.055a100.2 100.2 0 0 1-45.716-12.63c-.015.424-.015.837-.015 1.29 0 48.903 34.794 89.734 80.982 98.986a101 101 0 0 1-26.617 3.553c-6.493 0-12.821-.639-18.971-1.82 12.851 40.122 50.115 69.319 94.296 70.135-34.549 27.089-78.07 43.224-125.371 43.224A205 205 0 0 1 0 354.634c44.674 28.645 97.72 45.359 154.734 45.359"/></svg> <svg viewBox="0 0 24 24" class="svg gitea-twitter" xmlns="http://www.w3.org/2000/svg" width="16" height="16" aria-hidden="true"><path d="M14.095 10.316 22.286 1h-1.94L13.23 9.088 7.551 1H1l8.59 12.231L1 23h1.94l7.51-8.543 6 8.543H23zm-2.658 3.022-.872-1.218L3.64 2.432h2.98l5.59 7.821.869 1.219 7.265 10.166h-2.982z"/></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 324 B

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" fill-rule="evenodd" stroke-linejoin="round" stroke-miterlimit="2" clip-rule="evenodd" viewBox="0 0 16 16" class="svg gitea-vscodium" width="16" height="16" aria-hidden="true"><path fill-rule="nonzero" d="m10.2.2.5-.3c.3 0 .5.2.7.4l.2.8-.2 1-.8 2.4c-.3 1-.4 2 0 2.9l.8-2c.2 0 .4.1.4.3l-.3 1L9.2 13l3.1-2.9c.3-.2.7-.5.8-1a2 2 0 0 0-.3-1c-.2-.5-.5-.9-.6-1.4l.1-.7c.1-.1.3-.2.5-.1.2 0 .3.2.4.4.3.5.4 1.2.5 1.8l.6-1.2c0-.2.2-.4.4-.6l.4-.2c.2 0 .4.3.4.4v.6l-.8 1.6-1.4 1.8 1-.4c.2 0 .6.2.7.5 0 .2 0 .4-.2.5-.3.2-.6.2-1 .2-1 0-2.2.6-2.9 1.4L9.6 15c-.4.4-.9 1-1.4.8-.8-.1-.8-1.3-1-1.8 0-.3-.2-.6-.4-.7-.3-.2-.5-.3-.8-.3-.6-.1-1.2 0-1.8-.2l-.8-.4-.4-.7c-.3-.6-.3-1.2-.5-1.8A4 4 0 0 0 1 8l-.4-.4v-.4c.2-.2.5-.2.7 0 .5.2.5.8 1 1.1V6.2s.3-.1.4 0l.2.5L3 9c.4-.4.6-1 .5-1.5L3.4 7l.3-.2c.2 0 .3.2.4.3v.7c0 .6-.3 1.1-.4 1.7-.2.4-.3 1-.1 1.4.1.5.5.9.9 1 .5.3 1.1.4 1.7.4-.4-.6-.7-1.2-.7-2 0-.7.4-1.3.6-2C6.3 7 5.7 5.8 4.8 5l-1.5-.7c-.4-.2-.7-.7-.7-1.2.3-.1.7 0 1 .1L5 4.5l.6.1c.2-.3 0-.6-.2-.8-.3-.5-1-.6-1.3-1a.9.9 0 0 1-.2-.8c0-.2.3-.4.5-.4.4 0 .7.3.9.5.8.8 1.2 1.8 1.4 3s0 2.5-.2 3.7c0 .3-.2.5-.1.8l.2.2c.2 0 .4 0 .5-.2.4-.3.8-.8.9-1.3l.1-1.2.1-.6.4-.2.3.3v.6c-.1.5-.2 1-.5 1.6a2 2 0 0 1-.6 1l-1 1c-.1.2-.2.6-.1.9 0 .2.2.4.4.5.4.2.8.2 1 0 .3-.1.5-.4.7-.6l.5-1.4.4-2.5C9.7 7 9.6 6 9 5.2c-.2-.4-.5-.7-1-1l-1-.8c-.2-.3-.4-.7-.3-1.2h.6c.4.1.7.4.9.8s.4.8.9 1l-1-2c-.1-.3-.3-.5-.2-.8 0-.2.2-.4.4-.4s.4.1.5.3l.2.5 1 3.1a4 4 0 0 0 .4-2.3L10 1V.2Z"/></svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

View file

@ -15,7 +15,6 @@ import (
"code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
secret_module "code.gitea.io/gitea/modules/secret"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/services/actions" "code.gitea.io/gitea/services/actions"
@ -32,14 +31,24 @@ func pickTask(ctx context.Context, runner *actions_model.ActionRunner) (*runnerv
return nil, false, nil return nil, false, nil
} }
secrets, err := secret_model.GetSecretsOfTask(ctx, t)
if err != nil {
return nil, false, fmt.Errorf("GetSecretsOfTask: %w", err)
}
vars, err := actions_model.GetVariablesOfRun(ctx, t.Job.Run)
if err != nil {
return nil, false, fmt.Errorf("GetVariablesOfRun: %w", err)
}
actions.CreateCommitStatus(ctx, t.Job) actions.CreateCommitStatus(ctx, t.Job)
task := &runnerv1.Task{ task := &runnerv1.Task{
Id: t.ID, Id: t.ID,
WorkflowPayload: t.Job.WorkflowPayload, WorkflowPayload: t.Job.WorkflowPayload,
Context: generateTaskContext(t), Context: generateTaskContext(t),
Secrets: getSecretsOfTask(ctx, t), Secrets: secrets,
Vars: getVariablesOfTask(ctx, t), Vars: vars,
} }
if needs, err := findTaskNeeds(ctx, t); err != nil { if needs, err := findTaskNeeds(ctx, t); err != nil {
@ -55,71 +64,6 @@ func pickTask(ctx context.Context, runner *actions_model.ActionRunner) (*runnerv
return task, true, nil return task, true, nil
} }
func getSecretsOfTask(ctx context.Context, task *actions_model.ActionTask) map[string]string {
secrets := map[string]string{}
secrets["GITHUB_TOKEN"] = task.Token
secrets["GITEA_TOKEN"] = task.Token
if task.Job.Run.IsForkPullRequest && task.Job.Run.TriggerEvent != actions_module.GithubEventPullRequestTarget {
// ignore secrets for fork pull request, except GITHUB_TOKEN and GITEA_TOKEN which are automatically generated.
// for the tasks triggered by pull_request_target event, they could access the secrets because they will run in the context of the base branch
// see the documentation: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target
return secrets
}
ownerSecrets, err := db.Find[secret_model.Secret](ctx, secret_model.FindSecretsOptions{OwnerID: task.Job.Run.Repo.OwnerID})
if err != nil {
log.Error("find secrets of owner %v: %v", task.Job.Run.Repo.OwnerID, err)
// go on
}
repoSecrets, err := db.Find[secret_model.Secret](ctx, secret_model.FindSecretsOptions{RepoID: task.Job.Run.RepoID})
if err != nil {
log.Error("find secrets of repo %v: %v", task.Job.Run.RepoID, err)
// go on
}
for _, secret := range append(ownerSecrets, repoSecrets...) {
if v, err := secret_module.DecryptSecret(setting.SecretKey, secret.Data); err != nil {
log.Error("decrypt secret %v %q: %v", secret.ID, secret.Name, err)
// go on
} else {
secrets[secret.Name] = v
}
}
return secrets
}
func getVariablesOfTask(ctx context.Context, task *actions_model.ActionTask) map[string]string {
variables := map[string]string{}
// Global
globalVariables, err := db.Find[actions_model.ActionVariable](ctx, actions_model.FindVariablesOpts{})
if err != nil {
log.Error("find global variables: %v", err)
}
// Org / User level
ownerVariables, err := db.Find[actions_model.ActionVariable](ctx, actions_model.FindVariablesOpts{OwnerID: task.Job.Run.Repo.OwnerID})
if err != nil {
log.Error("find variables of org: %d, error: %v", task.Job.Run.Repo.OwnerID, err)
}
// Repo level
repoVariables, err := db.Find[actions_model.ActionVariable](ctx, actions_model.FindVariablesOpts{RepoID: task.Job.Run.RepoID})
if err != nil {
log.Error("find variables of repo: %d, error: %v", task.Job.Run.RepoID, err)
}
// Level precedence: Repo > Org / User > Global
for _, v := range append(globalVariables, append(ownerVariables, repoVariables...)...) {
variables[v.Name] = v.Data
}
return variables
}
func generateTaskContext(t *actions_model.ActionTask) *structpb.Struct { func generateTaskContext(t *actions_model.ActionTask) *structpb.Struct {
event := map[string]any{} event := map[string]any{}
_ = json.Unmarshal([]byte(t.Job.Run.EventPayload), &event) _ = json.Unmarshal([]byte(t.Job.Run.EventPayload), &event)

View file

@ -133,7 +133,7 @@ func CreateUser(ctx *context.APIContext) {
u.UpdatedUnix = u.CreatedUnix u.UpdatedUnix = u.CreatedUnix
} }
if err := user_model.CreateUser(ctx, u, overwriteDefault); err != nil { if err := user_model.AdminCreateUser(ctx, u, overwriteDefault); err != nil {
if user_model.IsErrUserAlreadyExist(err) || if user_model.IsErrUserAlreadyExist(err) ||
user_model.IsErrEmailAlreadyUsed(err) || user_model.IsErrEmailAlreadyUsed(err) ||
db.IsErrNameReserved(err) || db.IsErrNameReserved(err) ||
@ -209,7 +209,7 @@ func EditUser(ctx *context.APIContext) {
} }
if form.Email != nil { if form.Email != nil {
if err := user_service.AddOrSetPrimaryEmailAddress(ctx, ctx.ContextUser, *form.Email); err != nil { if err := user_service.AdminAddOrSetPrimaryEmailAddress(ctx, ctx.ContextUser, *form.Email); err != nil {
switch { switch {
case user_model.IsErrEmailCharIsNotSupported(err), user_model.IsErrEmailInvalid(err): case user_model.IsErrEmailCharIsNotSupported(err), user_model.IsErrEmailInvalid(err):
ctx.Error(http.StatusBadRequest, "EmailInvalid", err) ctx.Error(http.StatusBadRequest, "EmailInvalid", err)

View file

@ -720,7 +720,7 @@ func updateBasicProperties(ctx *context.APIContext, opts api.EditRepoOption) err
if ctx.Repo.GitRepo == nil && !repo.IsEmpty { if ctx.Repo.GitRepo == nil && !repo.IsEmpty {
var err error var err error
ctx.Repo.GitRepo, err = gitrepo.OpenRepository(ctx, ctx.Repo.Repository) ctx.Repo.GitRepo, err = gitrepo.OpenRepository(ctx, repo)
if err != nil { if err != nil {
ctx.Error(http.StatusInternalServerError, "Unable to OpenRepository", err) ctx.Error(http.StatusInternalServerError, "Unable to OpenRepository", err)
return err return err
@ -731,7 +731,7 @@ func updateBasicProperties(ctx *context.APIContext, opts api.EditRepoOption) err
// Default branch only updated if changed and exist or the repository is empty // Default branch only updated if changed and exist or the repository is empty
if opts.DefaultBranch != nil && repo.DefaultBranch != *opts.DefaultBranch && (repo.IsEmpty || ctx.Repo.GitRepo.IsBranchExist(*opts.DefaultBranch)) { if opts.DefaultBranch != nil && repo.DefaultBranch != *opts.DefaultBranch && (repo.IsEmpty || ctx.Repo.GitRepo.IsBranchExist(*opts.DefaultBranch)) {
if !repo.IsEmpty { if !repo.IsEmpty {
if err := ctx.Repo.GitRepo.SetDefaultBranch(*opts.DefaultBranch); err != nil { if err := gitrepo.SetDefaultBranch(ctx, ctx.Repo.Repository, *opts.DefaultBranch); err != nil {
if !git.IsErrUnsupportedVersion(err) { if !git.IsErrUnsupportedVersion(err) {
ctx.Error(http.StatusInternalServerError, "SetDefaultBranch", err) ctx.Error(http.StatusInternalServerError, "SetDefaultBranch", err)
return err return err

View file

@ -14,7 +14,7 @@ import (
"code.gitea.io/gitea/routers/api/v1/utils" "code.gitea.io/gitea/routers/api/v1/utils"
"code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/convert" "code.gitea.io/gitea/services/convert"
files_service "code.gitea.io/gitea/services/repository/files" commitstatus_service "code.gitea.io/gitea/services/repository/commitstatus"
) )
// NewCommitStatus creates a new CommitStatus // NewCommitStatus creates a new CommitStatus
@ -64,7 +64,7 @@ func NewCommitStatus(ctx *context.APIContext) {
Description: form.Description, Description: form.Description,
Context: form.Context, Context: form.Context,
} }
if err := files_service.CreateCommitStatus(ctx, ctx.Repo.Repository, ctx.Doer, sha, status); err != nil { if err := commitstatus_service.CreateCommitStatus(ctx, ctx.Repo.Repository, ctx.Doer, sha, status); err != nil {
ctx.Error(http.StatusInternalServerError, "CreateCommitStatus", err) ctx.Error(http.StatusInternalServerError, "CreateCommitStatus", err)
return return
} }

View file

@ -5,6 +5,7 @@ package user
import ( import (
std_ctx "context" std_ctx "context"
"fmt"
"net/http" "net/http"
asymkey_model "code.gitea.io/gitea/models/asymkey" asymkey_model "code.gitea.io/gitea/models/asymkey"
@ -198,6 +199,11 @@ func GetPublicKey(ctx *context.APIContext) {
// CreateUserPublicKey creates new public key to given user by ID. // CreateUserPublicKey creates new public key to given user by ID.
func CreateUserPublicKey(ctx *context.APIContext, form api.CreateKeyOption, uid int64) { func CreateUserPublicKey(ctx *context.APIContext, form api.CreateKeyOption, uid int64) {
if setting.Admin.UserDisabledFeatures.Contains(setting.UserFeatureManageSSHKeys) {
ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
return
}
content, err := asymkey_model.CheckPublicKeyString(form.Key) content, err := asymkey_model.CheckPublicKeyString(form.Key)
if err != nil { if err != nil {
repo.HandleCheckKeyStringError(ctx, err) repo.HandleCheckKeyStringError(ctx, err)
@ -263,6 +269,11 @@ func DeletePublicKey(ctx *context.APIContext) {
// "404": // "404":
// "$ref": "#/responses/notFound" // "$ref": "#/responses/notFound"
if setting.Admin.UserDisabledFeatures.Contains(setting.UserFeatureManageSSHKeys) {
ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
return
}
id := ctx.ParamsInt64(":id") id := ctx.ParamsInt64(":id")
externallyManaged, err := asymkey_model.PublicKeyIsExternallyManaged(ctx, id) externallyManaged, err := asymkey_model.PublicKeyIsExternallyManaged(ctx, id)
if err != nil { if err != nil {

View file

@ -9,6 +9,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo" repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/private" "code.gitea.io/gitea/modules/private"
gitea_context "code.gitea.io/gitea/services/context" gitea_context "code.gitea.io/gitea/services/context"
) )
@ -20,7 +21,7 @@ func SetDefaultBranch(ctx *gitea_context.PrivateContext) {
branch := ctx.Params(":branch") branch := ctx.Params(":branch")
ctx.Repo.Repository.DefaultBranch = branch ctx.Repo.Repository.DefaultBranch = branch
if err := ctx.Repo.GitRepo.SetDefaultBranch(ctx.Repo.Repository.DefaultBranch); err != nil { if err := gitrepo.SetDefaultBranch(ctx, ctx.Repo.Repository, ctx.Repo.Repository.DefaultBranch); err != nil {
if !git.IsErrUnsupportedVersion(err) { if !git.IsErrUnsupportedVersion(err) {
ctx.JSON(http.StatusInternalServerError, private.Response{ ctx.JSON(http.StatusInternalServerError, private.Response{
Err: fmt.Sprintf("Unable to set default branch on repository: %s/%s Error: %v", ownerName, repoName, err), Err: fmt.Sprintf("Unable to set default branch on repository: %s/%s Error: %v", ownerName, repoName, err),

View file

@ -177,7 +177,7 @@ func NewUserPost(ctx *context.Context) {
u.MustChangePassword = form.MustChangePassword u.MustChangePassword = form.MustChangePassword
} }
if err := user_model.CreateUser(ctx, u, overwriteDefault); err != nil { if err := user_model.AdminCreateUser(ctx, u, overwriteDefault); err != nil {
switch { switch {
case user_model.IsErrUserAlreadyExist(err): case user_model.IsErrUserAlreadyExist(err):
ctx.Data["Err_UserName"] = true ctx.Data["Err_UserName"] = true
@ -412,7 +412,7 @@ func EditUserPost(ctx *context.Context) {
} }
if form.Email != "" { if form.Email != "" {
if err := user_service.AddOrSetPrimaryEmailAddress(ctx, u, form.Email); err != nil { if err := user_service.AdminAddOrSetPrimaryEmailAddress(ctx, u, form.Email); err != nil {
switch { switch {
case user_model.IsErrEmailCharIsNotSupported(err), user_model.IsErrEmailInvalid(err): case user_model.IsErrEmailCharIsNotSupported(err), user_model.IsErrEmailInvalid(err):
ctx.Data["Err_Email"] = true ctx.Data["Err_Email"] = true

View file

@ -135,9 +135,21 @@ func resetLocale(ctx *context.Context, u *user_model.User) error {
return nil return nil
} }
func RedirectAfterLogin(ctx *context.Context) {
redirectTo := ctx.FormString("redirect_to")
if redirectTo == "" {
redirectTo = ctx.GetSiteCookie("redirect_to")
}
middleware.DeleteRedirectToCookie(ctx.Resp)
nextRedirectTo := setting.AppSubURL + string(setting.LandingPageURL)
if setting.LandingPageURL == setting.LandingPageLogin {
nextRedirectTo = setting.AppSubURL + "/" // do not cycle-redirect to the login page
}
ctx.RedirectToFirst(redirectTo, nextRedirectTo)
}
func CheckAutoLogin(ctx *context.Context) bool { func CheckAutoLogin(ctx *context.Context) bool {
// Check auto-login isSucceed, err := autoSignIn(ctx) // try to auto-login
isSucceed, err := autoSignIn(ctx)
if err != nil { if err != nil {
ctx.ServerError("autoSignIn", err) ctx.ServerError("autoSignIn", err)
return true return true
@ -146,17 +158,10 @@ func CheckAutoLogin(ctx *context.Context) bool {
redirectTo := ctx.FormString("redirect_to") redirectTo := ctx.FormString("redirect_to")
if len(redirectTo) > 0 { if len(redirectTo) > 0 {
middleware.SetRedirectToCookie(ctx.Resp, redirectTo) middleware.SetRedirectToCookie(ctx.Resp, redirectTo)
} else {
redirectTo = ctx.GetSiteCookie("redirect_to")
} }
if isSucceed { if isSucceed {
middleware.DeleteRedirectToCookie(ctx.Resp) RedirectAfterLogin(ctx)
nextRedirectTo := setting.AppSubURL + string(setting.LandingPageURL)
if setting.LandingPageURL == setting.LandingPageLogin {
nextRedirectTo = setting.AppSubURL + "/" // do not cycle-redirect to the login page
}
ctx.RedirectToFirst(redirectTo, nextRedirectTo)
return true return true
} }
@ -171,6 +176,11 @@ func SignIn(ctx *context.Context) {
return return
} }
if ctx.IsSigned {
RedirectAfterLogin(ctx)
return
}
oauth2Providers, err := oauth2.GetOAuth2Providers(ctx, optional.Some(true)) oauth2Providers, err := oauth2.GetOAuth2Providers(ctx, optional.Some(true))
if err != nil { if err != nil {
ctx.ServerError("UserSignIn", err) ctx.ServerError("UserSignIn", err)

View file

@ -0,0 +1,43 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"net/http"
"net/url"
"testing"
"code.gitea.io/gitea/modules/test"
"code.gitea.io/gitea/services/contexttest"
"github.com/stretchr/testify/assert"
)
func TestUserLogin(t *testing.T) {
ctx, resp := contexttest.MockContext(t, "/user/login")
SignIn(ctx)
assert.Equal(t, http.StatusOK, resp.Code)
ctx, resp = contexttest.MockContext(t, "/user/login")
ctx.IsSigned = true
SignIn(ctx)
assert.Equal(t, http.StatusSeeOther, resp.Code)
assert.Equal(t, "/", test.RedirectURL(resp))
ctx, resp = contexttest.MockContext(t, "/user/login?redirect_to=/other")
ctx.IsSigned = true
SignIn(ctx)
assert.Equal(t, "/other", test.RedirectURL(resp))
ctx, resp = contexttest.MockContext(t, "/user/login")
ctx.Req.AddCookie(&http.Cookie{Name: "redirect_to", Value: "/other-cookie"})
ctx.IsSigned = true
SignIn(ctx)
assert.Equal(t, "/other-cookie", test.RedirectURL(resp))
ctx, resp = contexttest.MockContext(t, "/user/login?redirect_to="+url.QueryEscape("https://example.com"))
ctx.IsSigned = true
SignIn(ctx)
assert.Equal(t, "/", test.RedirectURL(resp))
}

View file

@ -35,7 +35,7 @@ func Code(ctx *context.Context) {
keyword := ctx.FormTrim("q") keyword := ctx.FormTrim("q")
queryType := ctx.FormTrim("t") queryType := ctx.FormTrim("t")
isMatch := queryType == "match" isFuzzy := queryType != "match"
ctx.Data["Keyword"] = keyword ctx.Data["Keyword"] = keyword
ctx.Data["Language"] = language ctx.Data["Language"] = language
@ -77,7 +77,7 @@ func Code(ctx *context.Context) {
) )
if (len(repoIDs) > 0) || isAdmin { if (len(repoIDs) > 0) || isAdmin {
total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(ctx, repoIDs, language, keyword, page, setting.UI.RepoSearchPagingNum, isMatch) total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(ctx, repoIDs, language, keyword, page, setting.UI.RepoSearchPagingNum, isFuzzy)
if err != nil { if err != nil {
if code_indexer.IsAvailable(ctx) { if code_indexer.IsAvailable(ctx) {
ctx.ServerError("SearchResults", err) ctx.ServerError("SearchResults", err)

View file

@ -35,6 +35,7 @@ import (
"code.gitea.io/gitea/services/forms" "code.gitea.io/gitea/services/forms"
repo_service "code.gitea.io/gitea/services/repository" repo_service "code.gitea.io/gitea/services/repository"
archiver_service "code.gitea.io/gitea/services/repository/archiver" archiver_service "code.gitea.io/gitea/services/repository/archiver"
commitstatus_service "code.gitea.io/gitea/services/repository/commitstatus"
) )
const ( const (
@ -630,30 +631,14 @@ func SearchRepo(ctx *context.Context) {
return return
} }
// collect the latest commit of each repo latestCommitStatuses, err := commitstatus_service.FindReposLastestCommitStatuses(ctx, repos)
// at most there are dozens of repos (limited by MaxResponseItems), so it's not a big problem at the moment
repoBranchNames := make(map[int64]string, len(repos))
for _, repo := range repos {
repoBranchNames[repo.ID] = repo.DefaultBranch
}
repoIDsToLatestCommitSHAs, err := git_model.FindBranchesByRepoAndBranchName(ctx, repoBranchNames)
if err != nil { if err != nil {
log.Error("FindBranchesByRepoAndBranchName: %v", err) log.Error("FindReposLastestCommitStatuses: %v", err)
return
}
// call the database O(1) times to get the commit statuses for all repos
repoToItsLatestCommitStatuses, err := git_model.GetLatestCommitStatusForPairs(ctx, repoIDsToLatestCommitSHAs, db.ListOptionsAll)
if err != nil {
log.Error("GetLatestCommitStatusForPairs: %v", err)
return return
} }
results := make([]*repo_service.WebSearchRepository, len(repos)) results := make([]*repo_service.WebSearchRepository, len(repos))
for i, repo := range repos { for i, repo := range repos {
latestCommitStatus := git_model.CalcCommitStatus(repoToItsLatestCommitStatuses[repo.ID])
results[i] = &repo_service.WebSearchRepository{ results[i] = &repo_service.WebSearchRepository{
Repository: &api.Repository{ Repository: &api.Repository{
ID: repo.ID, ID: repo.ID,
@ -667,8 +652,11 @@ func SearchRepo(ctx *context.Context) {
Link: repo.Link(), Link: repo.Link(),
Internal: !repo.IsPrivate && repo.Owner.Visibility == api.VisibleTypePrivate, Internal: !repo.IsPrivate && repo.Owner.Visibility == api.VisibleTypePrivate,
}, },
LatestCommitStatus: latestCommitStatus, }
LocaleLatestCommitStatus: latestCommitStatus.LocaleString(ctx.Locale),
if latestCommitStatuses[i] != nil {
results[i].LatestCommitStatus = latestCommitStatuses[i]
results[i].LocaleLatestCommitStatus = latestCommitStatuses[i].LocaleString(ctx.Locale)
} }
} }

View file

@ -21,7 +21,7 @@ func Search(ctx *context.Context) {
keyword := ctx.FormTrim("q") keyword := ctx.FormTrim("q")
queryType := ctx.FormTrim("t") queryType := ctx.FormTrim("t")
isMatch := queryType == "match" isFuzzy := queryType != "match"
ctx.Data["Keyword"] = keyword ctx.Data["Keyword"] = keyword
ctx.Data["Language"] = language ctx.Data["Language"] = language
@ -44,7 +44,7 @@ func Search(ctx *context.Context) {
ctx.Data["CodeIndexerEnabled"] = true ctx.Data["CodeIndexerEnabled"] = true
total, searchResults, searchResultLanguages, err := code_indexer.PerformSearch(ctx, []int64{ctx.Repo.Repository.ID}, total, searchResults, searchResultLanguages, err := code_indexer.PerformSearch(ctx, []int64{ctx.Repo.Repository.ID},
language, keyword, page, setting.UI.RepoSearchPagingNum, isMatch) language, keyword, page, setting.UI.RepoSearchPagingNum, isFuzzy)
if err != nil { if err != nil {
if code_indexer.IsAvailable(ctx) { if code_indexer.IsAvailable(ctx) {
ctx.ServerError("SearchResults", err) ctx.ServerError("SearchResults", err)

View file

@ -8,6 +8,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo" repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/routers/web/repo" "code.gitea.io/gitea/routers/web/repo"
@ -40,7 +41,7 @@ func SetDefaultBranchPost(ctx *context.Context) {
return return
} else if repo.DefaultBranch != branch { } else if repo.DefaultBranch != branch {
repo.DefaultBranch = branch repo.DefaultBranch = branch
if err := ctx.Repo.GitRepo.SetDefaultBranch(branch); err != nil { if err := gitrepo.SetDefaultBranch(ctx, repo, branch); err != nil {
if !git.IsErrUnsupportedVersion(err) { if !git.IsErrUnsupportedVersion(err) {
ctx.ServerError("SetDefaultBranch", err) ctx.ServerError("SetDefaultBranch", err)
return return

View file

@ -36,6 +36,7 @@ import (
"code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/charset" "code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/highlight" "code.gitea.io/gitea/modules/highlight"
"code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
@ -996,6 +997,8 @@ func renderCode(ctx *context.Context) {
return return
} }
checkOutdatedBranch(ctx)
checkCitationFile(ctx, entry) checkCitationFile(ctx, entry)
if ctx.Written() { if ctx.Written() {
return return
@ -1071,7 +1074,7 @@ func renderCode(ctx *context.Context) {
if err != nil { if err != nil {
continue continue
} }
defaultBranch, err := gitRepo.GetDefaultBranch() defaultBranch, err := gitrepo.GetDefaultBranch(ctx, repo)
if err != nil { if err != nil {
continue continue
} }
@ -1121,6 +1124,31 @@ PostRecentBranchCheck:
ctx.HTML(http.StatusOK, tplRepoHome) ctx.HTML(http.StatusOK, tplRepoHome)
} }
func checkOutdatedBranch(ctx *context.Context) {
if !(ctx.Repo.IsAdmin() || ctx.Repo.IsOwner()) {
return
}
// get the head commit of the branch since ctx.Repo.CommitID is not always the head commit of `ctx.Repo.BranchName`
commit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.BranchName)
if err != nil {
log.Error("GetBranchCommitID: %v", err)
// Don't return an error page, as it can be rechecked the next time the user opens the page.
return
}
dbBranch, err := git_model.GetBranch(ctx, ctx.Repo.Repository.ID, ctx.Repo.BranchName)
if err != nil {
log.Error("GetBranch: %v", err)
// Don't return an error page, as it can be rechecked the next time the user opens the page.
return
}
if dbBranch.CommitID != commit.ID.String() {
ctx.Flash.Warning(ctx.Tr("repo.error.broken_git_hook", "https://docs.gitea.com/help/faq#push-hook--webhook--actions-arent-running"), true)
}
}
// RenderUserCards render a page show users according the input template // RenderUserCards render a page show users according the input template
func RenderUserCards(ctx *context.Context, total int, getter func(opts db.ListOptions) ([]*user_model.User, error), tpl base.TplName) { func RenderUserCards(ctx *context.Context, total int, getter func(opts db.ListOptions) ([]*user_model.User, error), tpl base.TplName) {
page := ctx.FormInt("page") page := ctx.FormInt("page")

View file

@ -79,7 +79,7 @@ func assertPagesMetas(t *testing.T, expectedNames []string, metas any) {
func TestWiki(t *testing.T) { func TestWiki(t *testing.T) {
unittest.PrepareTestEnv(t) unittest.PrepareTestEnv(t)
ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/?action=_pages") ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki")
ctx.SetParams("*", "Home") ctx.SetParams("*", "Home")
contexttest.LoadRepo(t, ctx, 1) contexttest.LoadRepo(t, ctx, 1)
Wiki(ctx) Wiki(ctx)

View file

@ -40,7 +40,7 @@ func CodeSearch(ctx *context.Context) {
keyword := ctx.FormTrim("q") keyword := ctx.FormTrim("q")
queryType := ctx.FormTrim("t") queryType := ctx.FormTrim("t")
isMatch := queryType == "match" isFuzzy := queryType != "match"
ctx.Data["Keyword"] = keyword ctx.Data["Keyword"] = keyword
ctx.Data["Language"] = language ctx.Data["Language"] = language
@ -75,7 +75,7 @@ func CodeSearch(ctx *context.Context) {
) )
if len(repoIDs) > 0 { if len(repoIDs) > 0 {
total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(ctx, repoIDs, language, keyword, page, setting.UI.RepoSearchPagingNum, isMatch) total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(ctx, repoIDs, language, keyword, page, setting.UI.RepoSearchPagingNum, isFuzzy)
if err != nil { if err != nil {
if code_indexer.IsAvailable(ctx) { if code_indexer.IsAvailable(ctx) {
ctx.ServerError("SearchResults", err) ctx.ServerError("SearchResults", err)

View file

@ -19,6 +19,8 @@ import (
"code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/services/auth" "code.gitea.io/gitea/services/auth"
"code.gitea.io/gitea/services/auth/source/db"
"code.gitea.io/gitea/services/auth/source/smtp"
"code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/forms" "code.gitea.io/gitea/services/forms"
"code.gitea.io/gitea/services/mailer" "code.gitea.io/gitea/services/mailer"
@ -251,11 +253,24 @@ func DeleteAccount(ctx *context.Context) {
ctx.Data["PageIsSettingsAccount"] = true ctx.Data["PageIsSettingsAccount"] = true
if _, _, err := auth.UserSignIn(ctx, ctx.Doer.Name, ctx.FormString("password")); err != nil { if _, _, err := auth.UserSignIn(ctx, ctx.Doer.Name, ctx.FormString("password")); err != nil {
if user_model.IsErrUserNotExist(err) { switch {
case user_model.IsErrUserNotExist(err):
loadAccountData(ctx)
ctx.RenderWithErr(ctx.Tr("form.user_not_exist"), tplSettingsAccount, nil)
case errors.Is(err, smtp.ErrUnsupportedLoginType):
loadAccountData(ctx)
ctx.RenderWithErr(ctx.Tr("form.unsupported_login_type"), tplSettingsAccount, nil)
case errors.As(err, &db.ErrUserPasswordNotSet{}):
loadAccountData(ctx)
ctx.RenderWithErr(ctx.Tr("form.unset_password"), tplSettingsAccount, nil)
case errors.As(err, &db.ErrUserPasswordInvalid{}):
loadAccountData(ctx) loadAccountData(ctx)
ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_password"), tplSettingsAccount, nil) ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_password"), tplSettingsAccount, nil)
} else { default:
ctx.ServerError("UserSignIn", err) ctx.ServerError("UserSignIn", err)
} }
return return

View file

@ -159,6 +159,11 @@ func KeysPost(ctx *context.Context) {
ctx.Flash.Success(ctx.Tr("settings.verify_gpg_key_success", keyID)) ctx.Flash.Success(ctx.Tr("settings.verify_gpg_key_success", keyID))
ctx.Redirect(setting.AppSubURL + "/user/settings/keys") ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
case "ssh": case "ssh":
if setting.Admin.UserDisabledFeatures.Contains(setting.UserFeatureManageSSHKeys) {
ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
return
}
content, err := asymkey_model.CheckPublicKeyString(form.Content) content, err := asymkey_model.CheckPublicKeyString(form.Content)
if err != nil { if err != nil {
if db.IsErrSSHDisabled(err) { if db.IsErrSSHDisabled(err) {
@ -198,6 +203,11 @@ func KeysPost(ctx *context.Context) {
ctx.Flash.Success(ctx.Tr("settings.add_key_success", form.Title)) ctx.Flash.Success(ctx.Tr("settings.add_key_success", form.Title))
ctx.Redirect(setting.AppSubURL + "/user/settings/keys") ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
case "verify_ssh": case "verify_ssh":
if setting.Admin.UserDisabledFeatures.Contains(setting.UserFeatureManageSSHKeys) {
ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
return
}
token := asymkey_model.VerificationToken(ctx.Doer, 1) token := asymkey_model.VerificationToken(ctx.Doer, 1)
lastToken := asymkey_model.VerificationToken(ctx.Doer, 0) lastToken := asymkey_model.VerificationToken(ctx.Doer, 0)
@ -240,6 +250,11 @@ func DeleteKey(ctx *context.Context) {
ctx.Flash.Success(ctx.Tr("settings.gpg_key_deletion_success")) ctx.Flash.Success(ctx.Tr("settings.gpg_key_deletion_success"))
} }
case "ssh": case "ssh":
if setting.Admin.UserDisabledFeatures.Contains(setting.UserFeatureManageSSHKeys) {
ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
return
}
keyID := ctx.FormInt64("id") keyID := ctx.FormInt64("id")
external, err := asymkey_model.PublicKeyIsExternallyManaged(ctx, keyID) external, err := asymkey_model.PublicKeyIsExternallyManaged(ctx, keyID)
if err != nil { if err != nil {
@ -318,4 +333,5 @@ func loadKeysData(ctx *context.Context) {
ctx.Data["VerifyingID"] = ctx.FormString("verify_gpg") ctx.Data["VerifyingID"] = ctx.FormString("verify_gpg")
ctx.Data["VerifyingFingerprint"] = ctx.FormString("verify_ssh") ctx.Data["VerifyingFingerprint"] = ctx.FormString("verify_ssh")
ctx.Data["UserDisabledFeatures"] = &setting.Admin.UserDisabledFeatures
} }

View file

@ -9,6 +9,7 @@ import (
"strings" "strings"
"time" "time"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
@ -21,17 +22,41 @@ type actionsClaims struct {
TaskID int64 TaskID int64
RunID int64 RunID int64
JobID int64 JobID int64
Ac string `json:"ac"`
} }
type actionsCacheScope struct {
Scope string
Permission actionsCachePermission
}
type actionsCachePermission int
const (
actionsCachePermissionRead = 1 << iota
actionsCachePermissionWrite
)
func CreateAuthorizationToken(taskID, runID, jobID int64) (string, error) { func CreateAuthorizationToken(taskID, runID, jobID int64) (string, error) {
now := time.Now() now := time.Now()
ac, err := json.Marshal(&[]actionsCacheScope{
{
Scope: "",
Permission: actionsCachePermissionWrite,
},
})
if err != nil {
return "", err
}
claims := actionsClaims{ claims := actionsClaims{
RegisteredClaims: jwt.RegisteredClaims{ RegisteredClaims: jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(now.Add(24 * time.Hour)), ExpiresAt: jwt.NewNumericDate(now.Add(24 * time.Hour)),
NotBefore: jwt.NewNumericDate(now), NotBefore: jwt.NewNumericDate(now),
}, },
Scp: fmt.Sprintf("Actions.Results:%d:%d", runID, jobID), Scp: fmt.Sprintf("Actions.Results:%d:%d", runID, jobID),
Ac: string(ac),
TaskID: taskID, TaskID: taskID,
RunID: runID, RunID: runID,
JobID: jobID, JobID: jobID,

View file

@ -7,6 +7,7 @@ import (
"net/http" "net/http"
"testing" "testing"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"github.com/golang-jwt/jwt/v5" "github.com/golang-jwt/jwt/v5"
@ -29,6 +30,14 @@ func TestCreateAuthorizationToken(t *testing.T) {
taskIDClaim, ok := claims["TaskID"] taskIDClaim, ok := claims["TaskID"]
assert.True(t, ok, "Has TaskID claim in jwt token") assert.True(t, ok, "Has TaskID claim in jwt token")
assert.Equal(t, float64(taskID), taskIDClaim, "Supplied taskid must match stored one") assert.Equal(t, float64(taskID), taskIDClaim, "Supplied taskid must match stored one")
acClaim, ok := claims["ac"]
assert.True(t, ok, "Has ac claim in jwt token")
ac, ok := acClaim.(string)
assert.True(t, ok, "ac claim is a string for buildx gha cache")
scopes := []actionsCacheScope{}
err = json.Unmarshal([]byte(ac), &scopes)
assert.NoError(t, err, "ac claim is a json list for buildx gha cache")
assert.GreaterOrEqual(t, len(scopes), 1, "Expected at least one action cache scope for buildx gha cache")
} }
func TestParseAuthorizationToken(t *testing.T) { func TestParseAuthorizationToken(t *testing.T) {

View file

@ -305,7 +305,18 @@ func handleWorkflows(
run.NeedApproval = need run.NeedApproval = need
} }
jobs, err := jobparser.Parse(dwf.Content) if err := run.LoadAttributes(ctx); err != nil {
log.Error("LoadAttributes: %v", err)
continue
}
vars, err := actions_model.GetVariablesOfRun(ctx, run)
if err != nil {
log.Error("GetVariablesOfRun: %v", err)
continue
}
jobs, err := jobparser.Parse(dwf.Content, jobparser.WithVars(vars))
if err != nil { if err != nil {
log.Error("jobparser.Parse: %v", err) log.Error("jobparser.Parse: %v", err)
continue continue

View file

@ -59,7 +59,7 @@ func (p *AuthSourceProvider) DisplayName() string {
func (p *AuthSourceProvider) IconHTML(size int) template.HTML { func (p *AuthSourceProvider) IconHTML(size int) template.HTML {
if p.iconURL != "" { if p.iconURL != "" {
img := fmt.Sprintf(`<img class="gt-object-contain gt-mr-3" width="%d" height="%d" src="%s" alt="%s">`, img := fmt.Sprintf(`<img class="tw-object-contain gt-mr-3" width="%d" height="%d" src="%s" alt="%s">`,
size, size,
size, size,
html.EscapeString(p.iconURL), html.EscapeString(p.DisplayName()), html.EscapeString(p.iconURL), html.EscapeString(p.DisplayName()),

Some files were not shown because too many files have changed in this diff Show more