forked from gitea/gitea
Add codeowners feature (#24910)
Hello. This PR adds a github like configuration for the CODEOWNERS file. Resolves: #10161
This commit is contained in:
parent
b5a2bb9ab3
commit
3bdd48016f
|
@ -0,0 +1,65 @@
|
|||
---
|
||||
date: "2023-05-24T16:00:00+00:00"
|
||||
title: "Code Owners"
|
||||
slug: "code-owners"
|
||||
weight: 30
|
||||
toc: false
|
||||
draft: false
|
||||
aliases:
|
||||
- /en-us/code-owners
|
||||
menu:
|
||||
sidebar:
|
||||
parent: "usage"
|
||||
name: "Code Owners"
|
||||
weight: 30
|
||||
identifier: "code-owners"
|
||||
---
|
||||
|
||||
# Code Owners
|
||||
|
||||
Gitea maintains code owner files. It looks for it in the following locations in this order:
|
||||
|
||||
- `./CODEOWNERS`
|
||||
- `./docs/CODEOWNERS`
|
||||
- `./.gitea/CODEOWNERS`
|
||||
|
||||
And stops at the first found file.
|
||||
|
||||
File format: `<regexp rule> <@user or @org/team> [@user or @org/team]...`
|
||||
|
||||
Regexp specified in golang Regex format.
|
||||
Regexp can start with `!` for negative rules - match all files except specified.
|
||||
|
||||
Example file:
|
||||
|
||||
```
|
||||
.*\\.go @user1 @user2 # This is comment
|
||||
|
||||
# Comment too
|
||||
# You can assigning code owning for users or teams
|
||||
frontend/src/.*\\.js @org1/team1 @org1/team2 @user3
|
||||
|
||||
# You can use negative pattern
|
||||
!frontend/src/.* @org1/team3 @user5
|
||||
|
||||
# You can use power of go regexp
|
||||
docs/(aws|google|azure)/[^/]*\\.(md|txt) @user8 @org1/team4
|
||||
!/assets/.*\\.(bin|exe|msi) @user9
|
||||
```
|
||||
|
||||
### Escaping
|
||||
|
||||
You can escape characters `#`, ` ` (space) and `\` with `\`, like:
|
||||
|
||||
```
|
||||
dir/with\#hashtag @user1
|
||||
path\ with\ space @user2
|
||||
path/with\\backslash @user3
|
||||
```
|
||||
|
||||
Some character (`.+*?()|[]{}^$\`) should be escaped with `\\` inside regexp, like:
|
||||
|
||||
```
|
||||
path/\\.with\\.dots
|
||||
path/with\\+plus
|
||||
```
|
|
@ -8,11 +8,13 @@ import (
|
|||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
git_model "code.gitea.io/gitea/models/git"
|
||||
org_model "code.gitea.io/gitea/models/organization"
|
||||
pull_model "code.gitea.io/gitea/models/pull"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
|
@ -887,3 +889,222 @@ func MergeBlockedByOfficialReviewRequests(ctx context.Context, protectBranch *gi
|
|||
func MergeBlockedByOutdatedBranch(protectBranch *git_model.ProtectedBranch, pr *PullRequest) bool {
|
||||
return protectBranch.BlockOnOutdatedBranch && pr.CommitsBehind > 0
|
||||
}
|
||||
|
||||
func PullRequestCodeOwnersReview(ctx context.Context, pull *Issue, pr *PullRequest) error {
|
||||
files := []string{"CODEOWNERS", "docs/CODEOWNERS", ".gitea/CODEOWNERS"}
|
||||
|
||||
if pr.IsWorkInProgress() {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := pr.LoadBaseRepo(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
repo, err := git.OpenRepository(ctx, pr.BaseRepo.RepoPath())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer repo.Close()
|
||||
|
||||
branch, err := repo.GetDefaultBranch()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
commit, err := repo.GetBranchCommit(branch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var data string
|
||||
for _, file := range files {
|
||||
if blob, err := commit.GetBlobByPath(file); err == nil {
|
||||
data, err = blob.GetBlobContent()
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rules, _ := GetCodeOwnersFromContent(ctx, data)
|
||||
changedFiles, err := repo.GetFilesChangedBetween(git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
uniqUsers := make(map[int64]*user_model.User)
|
||||
uniqTeams := make(map[string]*org_model.Team)
|
||||
for _, rule := range rules {
|
||||
for _, f := range changedFiles {
|
||||
if (rule.Rule.MatchString(f) && !rule.Negative) || (!rule.Rule.MatchString(f) && rule.Negative) {
|
||||
for _, u := range rule.Users {
|
||||
uniqUsers[u.ID] = u
|
||||
}
|
||||
for _, t := range rule.Teams {
|
||||
uniqTeams[fmt.Sprintf("%d/%d", t.OrgID, t.ID)] = t
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, u := range uniqUsers {
|
||||
if u.ID != pull.Poster.ID {
|
||||
if _, err := AddReviewRequest(pull, u, pull.Poster); err != nil {
|
||||
log.Warn("Failed add assignee user: %s to PR review: %s#%d, error: %s", u.Name, pr.BaseRepo.Name, pr.ID, err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, t := range uniqTeams {
|
||||
if _, err := AddTeamReviewRequest(pull, t, pull.Poster); err != nil {
|
||||
log.Warn("Failed add assignee team: %s to PR review: %s#%d, error: %s", t.Name, pr.BaseRepo.Name, pr.ID, err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetCodeOwnersFromContent returns the code owners configuration
|
||||
// Return empty slice if files missing
|
||||
// Return warning messages on parsing errors
|
||||
// We're trying to do the best we can when parsing a file.
|
||||
// Invalid lines are skipped. Non-existent users and teams too.
|
||||
func GetCodeOwnersFromContent(ctx context.Context, data string) ([]*CodeOwnerRule, []string) {
|
||||
if len(data) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
rules := make([]*CodeOwnerRule, 0)
|
||||
lines := strings.Split(data, "\n")
|
||||
warnings := make([]string, 0)
|
||||
|
||||
for i, line := range lines {
|
||||
tokens := TokenizeCodeOwnersLine(line)
|
||||
if len(tokens) == 0 {
|
||||
continue
|
||||
} else if len(tokens) < 2 {
|
||||
warnings = append(warnings, fmt.Sprintf("Line: %d: incorrect format", i+1))
|
||||
continue
|
||||
}
|
||||
rule, wr := ParseCodeOwnersLine(ctx, tokens)
|
||||
for _, w := range wr {
|
||||
warnings = append(warnings, fmt.Sprintf("Line: %d: %s", i+1, w))
|
||||
}
|
||||
if rule == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
rules = append(rules, rule)
|
||||
}
|
||||
|
||||
return rules, warnings
|
||||
}
|
||||
|
||||
type CodeOwnerRule struct {
|
||||
Rule *regexp.Regexp
|
||||
Negative bool
|
||||
Users []*user_model.User
|
||||
Teams []*org_model.Team
|
||||
}
|
||||
|
||||
func ParseCodeOwnersLine(ctx context.Context, tokens []string) (*CodeOwnerRule, []string) {
|
||||
var err error
|
||||
rule := &CodeOwnerRule{
|
||||
Users: make([]*user_model.User, 0),
|
||||
Teams: make([]*org_model.Team, 0),
|
||||
Negative: strings.HasPrefix(tokens[0], "!"),
|
||||
}
|
||||
|
||||
warnings := make([]string, 0)
|
||||
|
||||
rule.Rule, err = regexp.Compile(fmt.Sprintf("^%s$", strings.TrimPrefix(tokens[0], "!")))
|
||||
if err != nil {
|
||||
warnings = append(warnings, fmt.Sprintf("incorrect codeowner regexp: %s", err))
|
||||
return nil, warnings
|
||||
}
|
||||
|
||||
for _, user := range tokens[1:] {
|
||||
user = strings.TrimPrefix(user, "@")
|
||||
|
||||
// Only @org/team can contain slashes
|
||||
if strings.Contains(user, "/") {
|
||||
s := strings.Split(user, "/")
|
||||
if len(s) != 2 {
|
||||
warnings = append(warnings, fmt.Sprintf("incorrect codeowner group: %s", user))
|
||||
continue
|
||||
}
|
||||
orgName := s[0]
|
||||
teamName := s[1]
|
||||
|
||||
org, err := org_model.GetOrgByName(ctx, orgName)
|
||||
if err != nil {
|
||||
warnings = append(warnings, fmt.Sprintf("incorrect codeowner organization: %s", user))
|
||||
continue
|
||||
}
|
||||
teams, err := org.LoadTeams()
|
||||
if err != nil {
|
||||
warnings = append(warnings, fmt.Sprintf("incorrect codeowner team: %s", user))
|
||||
continue
|
||||
}
|
||||
|
||||
for _, team := range teams {
|
||||
if team.Name == teamName {
|
||||
rule.Teams = append(rule.Teams, team)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
u, err := user_model.GetUserByName(ctx, user)
|
||||
if err != nil {
|
||||
warnings = append(warnings, fmt.Sprintf("incorrect codeowner user: %s", user))
|
||||
continue
|
||||
}
|
||||
rule.Users = append(rule.Users, u)
|
||||
}
|
||||
}
|
||||
|
||||
if (len(rule.Users) == 0) && (len(rule.Teams) == 0) {
|
||||
warnings = append(warnings, "no users/groups matched")
|
||||
return nil, warnings
|
||||
}
|
||||
|
||||
return rule, warnings
|
||||
}
|
||||
|
||||
func TokenizeCodeOwnersLine(line string) []string {
|
||||
if len(line) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
line = strings.TrimSpace(line)
|
||||
line = strings.ReplaceAll(line, "\t", " ")
|
||||
|
||||
tokens := make([]string, 0)
|
||||
|
||||
escape := false
|
||||
token := ""
|
||||
for _, char := range line {
|
||||
if escape {
|
||||
token += string(char)
|
||||
escape = false
|
||||
} else if string(char) == "\\" {
|
||||
escape = true
|
||||
} else if string(char) == "#" {
|
||||
break
|
||||
} else if string(char) == " " {
|
||||
if len(token) > 0 {
|
||||
tokens = append(tokens, token)
|
||||
token = ""
|
||||
}
|
||||
} else {
|
||||
token += string(char)
|
||||
}
|
||||
}
|
||||
|
||||
if len(token) > 0 {
|
||||
tokens = append(tokens, token)
|
||||
}
|
||||
|
||||
return tokens
|
||||
}
|
||||
|
|
|
@ -303,3 +303,25 @@ func TestDeleteOrphanedObjects(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, countBefore, countAfter)
|
||||
}
|
||||
|
||||
func TestParseCodeOwnersLine(t *testing.T) {
|
||||
type CodeOwnerTest struct {
|
||||
Line string
|
||||
Tokens []string
|
||||
}
|
||||
|
||||
given := []CodeOwnerTest{
|
||||
{Line: "", Tokens: nil},
|
||||
{Line: "# comment", Tokens: []string{}},
|
||||
{Line: "!.* @user1 @org1/team1", Tokens: []string{"!.*", "@user1", "@org1/team1"}},
|
||||
{Line: `.*\\.js @user2 #comment`, Tokens: []string{`.*\.js`, "@user2"}},
|
||||
{Line: `docs/(aws|google|azure)/[^/]*\\.(md|txt) @user3 @org2/team2`, Tokens: []string{`docs/(aws|google|azure)/[^/]*\.(md|txt)`, "@user3", "@org2/team2"}},
|
||||
{Line: `\#path @user3`, Tokens: []string{`#path`, "@user3"}},
|
||||
{Line: `path\ with\ spaces/ @user3`, Tokens: []string{`path with spaces/`, "@user3"}},
|
||||
}
|
||||
|
||||
for _, g := range given {
|
||||
tokens := issues_model.TokenizeCodeOwnersLine(g.Line)
|
||||
assert.Equal(t, g.Tokens, tokens, "Codeowners tokenizer failed")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ import (
|
|||
asymkey_model "code.gitea.io/gitea/models/asymkey"
|
||||
"code.gitea.io/gitea/models/db"
|
||||
git_model "code.gitea.io/gitea/models/git"
|
||||
issue_model "code.gitea.io/gitea/models/issues"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
unit_model "code.gitea.io/gitea/models/unit"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
|
@ -361,6 +362,13 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st
|
|||
if workFlowErr != nil {
|
||||
ctx.Data["FileError"] = ctx.Locale.Tr("actions.runs.invalid_workflow_helper", workFlowErr.Error())
|
||||
}
|
||||
} else if util.SliceContains([]string{"CODEOWNERS", "docs/CODEOWNERS", ".gitea/CODEOWNERS"}, ctx.Repo.TreePath) {
|
||||
if data, err := blob.GetBlobContent(); err == nil {
|
||||
_, warnings := issue_model.GetCodeOwnersFromContent(ctx, data)
|
||||
if len(warnings) > 0 {
|
||||
ctx.Data["FileWarning"] = strings.Join(warnings, "\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
isDisplayingSource := ctx.FormString("display") == "source"
|
||||
|
|
|
@ -57,6 +57,12 @@ func ChangeTitle(ctx context.Context, issue *issues_model.Issue, doer *user_mode
|
|||
return
|
||||
}
|
||||
|
||||
if issue.IsPull && issues_model.HasWorkInProgressPrefix(oldTitle) && !issues_model.HasWorkInProgressPrefix(title) {
|
||||
if err = issues_model.PullRequestCodeOwnersReview(ctx, issue, issue.PullRequest); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
notification.NotifyIssueChangeTitle(ctx, doer, issue, oldTitle)
|
||||
|
||||
return nil
|
||||
|
|
|
@ -123,6 +123,13 @@ func NewPullRequest(ctx context.Context, repo *repo_model.Repository, pull *issu
|
|||
}
|
||||
|
||||
_, _ = issue_service.CreateComment(ctx, ops)
|
||||
|
||||
if !pr.IsWorkInProgress() {
|
||||
if err := issues_model.PullRequestCodeOwnersReview(ctx, pull, pr); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
Loading…
Reference in New Issue