2024-02-23 01:26:17 +01:00
|
|
|
// Copyright 2014 The Gogs Authors. All rights reserved.
|
|
|
|
// Copyright 2018 The Gitea Authors. All rights reserved.
|
|
|
|
// Copyright 2024 The Forgejo Authors. All rights reserved.
|
2018-01-05 11:56:52 +01:00
|
|
|
// All rights reserved.
|
2022-11-27 19:20:29 +01:00
|
|
|
// SPDX-License-Identifier: MIT
|
2014-03-24 11:25:15 +01:00
|
|
|
|
|
|
|
package repo
|
|
|
|
|
|
|
|
import (
|
2021-06-07 16:52:59 +02:00
|
|
|
"errors"
|
2016-10-12 15:28:51 +02:00
|
|
|
"fmt"
|
2021-11-16 19:18:25 +01:00
|
|
|
"html"
|
2019-12-16 07:20:25 +01:00
|
|
|
"net/http"
|
2021-11-16 19:18:25 +01:00
|
|
|
"net/url"
|
|
|
|
"strconv"
|
2015-08-08 16:43:14 +02:00
|
|
|
"strings"
|
2020-01-17 07:03:40 +01:00
|
|
|
"time"
|
2015-08-08 16:43:14 +02:00
|
|
|
|
2016-11-10 17:24:48 +01:00
|
|
|
"code.gitea.io/gitea/models"
|
2022-08-25 04:31:57 +02:00
|
|
|
activities_model "code.gitea.io/gitea/models/activities"
|
2021-09-24 13:32:56 +02:00
|
|
|
"code.gitea.io/gitea/models/db"
|
2022-06-12 17:51:54 +02:00
|
|
|
git_model "code.gitea.io/gitea/models/git"
|
2022-06-13 11:37:59 +02:00
|
|
|
issues_model "code.gitea.io/gitea/models/issues"
|
2022-03-29 08:29:02 +02:00
|
|
|
"code.gitea.io/gitea/models/organization"
|
2022-05-11 12:09:36 +02:00
|
|
|
access_model "code.gitea.io/gitea/models/perm/access"
|
2022-06-11 16:44:20 +02:00
|
|
|
pull_model "code.gitea.io/gitea/models/pull"
|
feat(quota): Quota enforcement
The previous commit laid out the foundation of the quota engine, this
one builds on top of it, and implements the actual enforcement.
Enforcement happens at the route decoration level, whenever possible. In
case of the API, when over quota, a 413 error is returned, with an
appropriate JSON payload. In case of web routes, a 413 HTML page is
rendered with similar information.
This implementation is for a **soft quota**: quota usage is checked
before an operation is to be performed, and the operation is *only*
denied if the user is already over quota. This makes it possible to go
over quota, but has the significant advantage of being practically
implementable within the current Forgejo architecture.
The goal of enforcement is to deny actions that can make the user go
over quota, and allow the rest. As such, deleting things should - in
almost all cases - be possible. A prime exemption is deleting files via
the web ui: that creates a new commit, which in turn increases repo
size, thus, is denied if the user is over quota.
Limitations
-----------
Because we generally work at a route decorator level, and rarely
look *into* the operation itself, `size:repos:public` and
`size:repos:private` are not enforced at this level, the engine enforces
against `size:repos:all`. This will be improved in the future.
AGit does not play very well with this system, because AGit PRs count
toward the repo they're opened against, while in the GitHub-style fork +
pull model, it counts against the fork. This too, can be improved in the
future.
There's very little done on the UI side to guard against going over
quota. What this patch implements, is enforcement, not prevention. The
UI will still let you *try* operations that *will* result in a denial.
Signed-off-by: Gergely Nagy <forgejo@gergo.csillger.hu>
2024-07-06 10:30:16 +02:00
|
|
|
quota_model "code.gitea.io/gitea/models/quota"
|
2021-12-10 02:27:50 +01:00
|
|
|
repo_model "code.gitea.io/gitea/models/repo"
|
2021-11-09 20:57:58 +01:00
|
|
|
"code.gitea.io/gitea/models/unit"
|
2021-11-24 10:49:20 +01:00
|
|
|
user_model "code.gitea.io/gitea/models/user"
|
2016-11-10 17:24:48 +01:00
|
|
|
"code.gitea.io/gitea/modules/base"
|
2024-02-24 23:34:51 +01:00
|
|
|
"code.gitea.io/gitea/modules/emoji"
|
2019-03-27 10:33:00 +01:00
|
|
|
"code.gitea.io/gitea/modules/git"
|
Simplify how git repositories are opened (#28937)
## Purpose
This is a refactor toward building an abstraction over managing git
repositories.
Afterwards, it does not matter anymore if they are stored on the local
disk or somewhere remote.
## What this PR changes
We used `git.OpenRepository` everywhere previously.
Now, we should split them into two distinct functions:
Firstly, there are temporary repositories which do not change:
```go
git.OpenRepository(ctx, diskPath)
```
Gitea managed repositories having a record in the database in the
`repository` table are moved into the new package `gitrepo`:
```go
gitrepo.OpenRepository(ctx, repo_model.Repo)
```
Why is `repo_model.Repository` the second parameter instead of file
path?
Because then we can easily adapt our repository storage strategy.
The repositories can be stored locally, however, they could just as well
be stored on a remote server.
## Further changes in other PRs
- A Git Command wrapper on package `gitrepo` could be created. i.e.
`NewCommand(ctx, repo_model.Repository, commands...)`. `git.RunOpts{Dir:
repo.RepoPath()}`, the directory should be empty before invoking this
method and it can be filled in the function only. #28940
- Remove the `RepoPath()`/`WikiPath()` functions to reduce the
possibility of mistakes.
---------
Co-authored-by: delvh <dev.lh@web.de>
2024-01-27 21:09:51 +01:00
|
|
|
"code.gitea.io/gitea/modules/gitrepo"
|
2022-09-02 09:58:49 +02:00
|
|
|
issue_template "code.gitea.io/gitea/modules/issue/template"
|
2016-11-10 17:24:48 +01:00
|
|
|
"code.gitea.io/gitea/modules/log"
|
2024-02-23 03:18:33 +01:00
|
|
|
"code.gitea.io/gitea/modules/optional"
|
2016-11-10 17:24:48 +01:00
|
|
|
"code.gitea.io/gitea/modules/setting"
|
2020-06-07 02:45:12 +02:00
|
|
|
"code.gitea.io/gitea/modules/structs"
|
2019-01-21 12:45:32 +01:00
|
|
|
"code.gitea.io/gitea/modules/util"
|
2021-01-26 16:36:53 +01:00
|
|
|
"code.gitea.io/gitea/modules/web"
|
2020-02-22 14:08:48 +01:00
|
|
|
"code.gitea.io/gitea/routers/utils"
|
2022-03-31 16:53:08 +02:00
|
|
|
asymkey_service "code.gitea.io/gitea/services/asymkey"
|
2022-06-11 16:44:20 +02:00
|
|
|
"code.gitea.io/gitea/services/automerge"
|
2024-02-27 08:12:22 +01:00
|
|
|
"code.gitea.io/gitea/services/context"
|
|
|
|
"code.gitea.io/gitea/services/context/upload"
|
2021-04-06 21:44:05 +02:00
|
|
|
"code.gitea.io/gitea/services/forms"
|
2019-09-06 04:20:09 +02:00
|
|
|
"code.gitea.io/gitea/services/gitdiff"
|
2023-09-05 20:37:47 +02:00
|
|
|
notify_service "code.gitea.io/gitea/services/notify"
|
2019-09-27 02:22:36 +02:00
|
|
|
pull_service "code.gitea.io/gitea/services/pull"
|
2019-10-26 08:54:11 +02:00
|
|
|
repo_service "code.gitea.io/gitea/services/repository"
|
2023-05-17 10:11:13 +02:00
|
|
|
|
|
|
|
"github.com/gobwas/glob"
|
2014-03-24 11:25:15 +01:00
|
|
|
)
|
|
|
|
|
2014-06-23 05:11:12 +02:00
|
|
|
const (
|
2016-11-24 08:04:31 +01:00
|
|
|
tplFork base.TplName = "repo/pulls/fork"
|
2019-06-07 22:29:29 +02:00
|
|
|
tplCompareDiff base.TplName = "repo/diff/compare"
|
2016-11-24 08:04:31 +01:00
|
|
|
tplPullCommits base.TplName = "repo/pulls/commits"
|
|
|
|
tplPullFiles base.TplName = "repo/pulls/files"
|
2016-02-17 23:21:31 +01:00
|
|
|
|
2016-11-24 08:04:31 +01:00
|
|
|
pullRequestTemplateKey = "PullRequestTemplate"
|
2016-02-17 23:21:31 +01:00
|
|
|
)
|
|
|
|
|
2022-01-20 18:46:10 +01:00
|
|
|
var pullRequestTemplateCandidates = []string{
|
|
|
|
"PULL_REQUEST_TEMPLATE.md",
|
2022-09-02 09:58:49 +02:00
|
|
|
"PULL_REQUEST_TEMPLATE.yaml",
|
|
|
|
"PULL_REQUEST_TEMPLATE.yml",
|
2022-01-20 18:46:10 +01:00
|
|
|
"pull_request_template.md",
|
2022-09-02 09:58:49 +02:00
|
|
|
"pull_request_template.yaml",
|
|
|
|
"pull_request_template.yml",
|
2023-08-25 22:49:17 +02:00
|
|
|
".forgejo/PULL_REQUEST_TEMPLATE.md",
|
|
|
|
".forgejo/PULL_REQUEST_TEMPLATE.yaml",
|
|
|
|
".forgejo/PULL_REQUEST_TEMPLATE.yml",
|
|
|
|
".forgejo/pull_request_template.md",
|
|
|
|
".forgejo/pull_request_template.yaml",
|
|
|
|
".forgejo/pull_request_template.yml",
|
2022-01-20 18:46:10 +01:00
|
|
|
".gitea/PULL_REQUEST_TEMPLATE.md",
|
2022-09-02 09:58:49 +02:00
|
|
|
".gitea/PULL_REQUEST_TEMPLATE.yaml",
|
|
|
|
".gitea/PULL_REQUEST_TEMPLATE.yml",
|
2022-01-20 18:46:10 +01:00
|
|
|
".gitea/pull_request_template.md",
|
2022-09-02 09:58:49 +02:00
|
|
|
".gitea/pull_request_template.yaml",
|
|
|
|
".gitea/pull_request_template.yml",
|
2022-01-20 18:46:10 +01:00
|
|
|
".github/PULL_REQUEST_TEMPLATE.md",
|
2022-09-02 09:58:49 +02:00
|
|
|
".github/PULL_REQUEST_TEMPLATE.yaml",
|
|
|
|
".github/PULL_REQUEST_TEMPLATE.yml",
|
2022-01-20 18:46:10 +01:00
|
|
|
".github/pull_request_template.md",
|
2022-09-02 09:58:49 +02:00
|
|
|
".github/pull_request_template.yaml",
|
|
|
|
".github/pull_request_template.yml",
|
2022-01-20 18:46:10 +01:00
|
|
|
}
|
2014-06-23 05:11:12 +02:00
|
|
|
|
2021-12-10 02:27:50 +01:00
|
|
|
func getRepository(ctx *context.Context, repoID int64) *repo_model.Repository {
|
2022-12-03 03:48:26 +01:00
|
|
|
repo, err := repo_model.GetRepositoryByID(ctx, repoID)
|
2015-08-08 11:10:34 +02:00
|
|
|
if err != nil {
|
2021-12-10 02:27:50 +01:00
|
|
|
if repo_model.IsErrRepoNotExist(err) {
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.NotFound("GetRepositoryByID", nil)
|
2015-08-08 11:10:34 +02:00
|
|
|
} else {
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.ServerError("GetRepositoryByID", err)
|
2015-08-08 11:10:34 +02:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
2015-09-01 17:57:02 +02:00
|
|
|
|
2022-05-11 12:09:36 +02:00
|
|
|
perm, err := access_model.GetUserRepoPermission(ctx, repo, ctx.Doer)
|
2018-11-28 12:26:14 +01:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetUserRepoPermission", err)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-11-09 20:57:58 +01:00
|
|
|
if !perm.CanRead(unit.TypeCode) {
|
2019-11-11 16:15:29 +01:00
|
|
|
log.Trace("Permission Denied: User %-v cannot read %-v of repo %-v\n"+
|
|
|
|
"User in repo has Permissions: %-+v",
|
2022-03-22 08:03:22 +01:00
|
|
|
ctx.Doer,
|
2021-11-09 20:57:58 +01:00
|
|
|
unit.TypeCode,
|
2019-11-11 16:15:29 +01:00
|
|
|
ctx.Repo,
|
|
|
|
perm)
|
|
|
|
ctx.NotFound("getRepository", nil)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return repo
|
|
|
|
}
|
|
|
|
|
2024-02-09 15:57:08 +01:00
|
|
|
func updateForkRepositoryInContext(ctx *context.Context, forkRepo *repo_model.Repository) bool {
|
|
|
|
if forkRepo == nil {
|
|
|
|
ctx.NotFound("No repository in context", nil)
|
|
|
|
return false
|
2019-11-11 16:15:29 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if forkRepo.IsEmpty {
|
|
|
|
log.Trace("Empty repository %-v", forkRepo)
|
2024-02-09 15:57:08 +01:00
|
|
|
ctx.NotFound("updateForkRepositoryInContext", nil)
|
|
|
|
return false
|
2015-09-01 17:57:02 +02:00
|
|
|
}
|
|
|
|
|
2023-02-18 13:11:03 +01:00
|
|
|
if err := forkRepo.LoadOwner(ctx); err != nil {
|
|
|
|
ctx.ServerError("LoadOwner", err)
|
2024-02-09 15:57:08 +01:00
|
|
|
return false
|
2015-08-08 11:10:34 +02:00
|
|
|
}
|
2020-06-07 02:45:12 +02:00
|
|
|
|
|
|
|
ctx.Data["repo_name"] = forkRepo.Name
|
|
|
|
ctx.Data["description"] = forkRepo.Description
|
|
|
|
ctx.Data["IsPrivate"] = forkRepo.IsPrivate || forkRepo.Owner.Visibility == structs.VisibleTypePrivate
|
2023-09-14 19:09:32 +02:00
|
|
|
canForkToUser := forkRepo.OwnerID != ctx.Doer.ID && !repo_model.HasForkedRepo(ctx, ctx.Doer.ID, forkRepo.ID)
|
2020-06-07 02:45:12 +02:00
|
|
|
|
2021-11-16 19:18:25 +01:00
|
|
|
ctx.Data["ForkRepo"] = forkRepo
|
2015-08-08 11:10:34 +02:00
|
|
|
|
2023-09-14 19:09:32 +02:00
|
|
|
ownedOrgs, err := organization.GetOrgsCanCreateRepoByUserID(ctx, ctx.Doer.ID)
|
2021-11-22 16:21:55 +01:00
|
|
|
if err != nil {
|
2021-11-25 06:03:03 +01:00
|
|
|
ctx.ServerError("GetOrgsCanCreateRepoByUserID", err)
|
2024-02-09 15:57:08 +01:00
|
|
|
return false
|
2015-08-08 11:10:34 +02:00
|
|
|
}
|
2022-03-29 08:29:02 +02:00
|
|
|
var orgs []*organization.Organization
|
2021-11-22 16:21:55 +01:00
|
|
|
for _, org := range ownedOrgs {
|
2023-09-14 19:09:32 +02:00
|
|
|
if forkRepo.OwnerID != org.ID && !repo_model.HasForkedRepo(ctx, org.ID, forkRepo.ID) {
|
2017-10-15 17:06:07 +02:00
|
|
|
orgs = append(orgs, org)
|
|
|
|
}
|
|
|
|
}
|
2017-11-06 05:12:55 +01:00
|
|
|
|
2022-01-20 18:46:10 +01:00
|
|
|
traverseParentRepo := forkRepo
|
2017-11-06 05:12:55 +01:00
|
|
|
for {
|
2022-03-22 08:03:22 +01:00
|
|
|
if ctx.Doer.ID == traverseParentRepo.OwnerID {
|
2017-11-06 05:12:55 +01:00
|
|
|
canForkToUser = false
|
|
|
|
} else {
|
|
|
|
for i, org := range orgs {
|
|
|
|
if org.ID == traverseParentRepo.OwnerID {
|
|
|
|
orgs = append(orgs[:i], orgs[i+1:]...)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !traverseParentRepo.IsFork {
|
|
|
|
break
|
|
|
|
}
|
2022-12-03 03:48:26 +01:00
|
|
|
traverseParentRepo, err = repo_model.GetRepositoryByID(ctx, traverseParentRepo.ForkID)
|
2017-11-06 05:12:55 +01:00
|
|
|
if err != nil {
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.ServerError("GetRepositoryByID", err)
|
2024-02-09 15:57:08 +01:00
|
|
|
return false
|
2017-11-06 05:12:55 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx.Data["CanForkToUser"] = canForkToUser
|
2017-10-15 17:06:07 +02:00
|
|
|
ctx.Data["Orgs"] = orgs
|
|
|
|
|
|
|
|
if canForkToUser {
|
2022-03-22 08:03:22 +01:00
|
|
|
ctx.Data["ContextUser"] = ctx.Doer
|
2017-10-15 17:06:07 +02:00
|
|
|
} else if len(orgs) > 0 {
|
|
|
|
ctx.Data["ContextUser"] = orgs[0]
|
2023-07-14 09:56:20 +02:00
|
|
|
} else {
|
|
|
|
ctx.Data["CanForkRepo"] = false
|
2024-03-13 15:11:49 +01:00
|
|
|
ctx.RenderWithErr(ctx.Tr("repo.fork_no_valid_owners"), tplFork, nil)
|
2024-02-09 15:57:08 +01:00
|
|
|
return false
|
2017-10-15 17:06:07 +02:00
|
|
|
}
|
2015-08-08 11:10:34 +02:00
|
|
|
|
2023-09-29 03:48:39 +02:00
|
|
|
branches, err := git_model.FindBranchNames(ctx, git_model.FindBranchOptions{
|
|
|
|
RepoID: ctx.Repo.Repository.ID,
|
|
|
|
ListOptions: db.ListOptions{
|
|
|
|
ListAll: true,
|
|
|
|
},
|
2024-02-23 03:18:33 +01:00
|
|
|
IsDeletedBranch: optional.Some(false),
|
2023-09-29 03:48:39 +02:00
|
|
|
// Add it as the first option
|
|
|
|
ExcludeBranchNames: []string{ctx.Repo.Repository.DefaultBranch},
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("FindBranchNames", err)
|
2024-02-09 15:57:08 +01:00
|
|
|
return false
|
2023-09-29 03:48:39 +02:00
|
|
|
}
|
|
|
|
ctx.Data["Branches"] = append([]string{ctx.Repo.Repository.DefaultBranch}, branches...)
|
|
|
|
|
2024-02-09 15:57:08 +01:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
// ForkByID redirects (with 301 Moved Permanently) to the repository's `/fork` page
|
|
|
|
func ForkByID(ctx *context.Context) {
|
|
|
|
ctx.Redirect(ctx.Repo.Repository.Link()+"/fork", http.StatusMovedPermanently)
|
2015-08-08 11:10:34 +02:00
|
|
|
}
|
|
|
|
|
2024-02-09 15:57:08 +01:00
|
|
|
// Fork renders the repository fork page
|
2016-03-11 17:56:52 +01:00
|
|
|
func Fork(ctx *context.Context) {
|
2015-08-08 11:10:34 +02:00
|
|
|
ctx.Data["Title"] = ctx.Tr("new_fork")
|
|
|
|
|
2022-12-27 22:21:14 +01:00
|
|
|
if ctx.Doer.CanForkRepo() {
|
|
|
|
ctx.Data["CanForkRepo"] = true
|
|
|
|
} else {
|
|
|
|
maxCreationLimit := ctx.Doer.MaxCreationLimit()
|
|
|
|
msg := ctx.TrN(maxCreationLimit, "repo.form.reach_limit_of_creation_1", "repo.form.reach_limit_of_creation_n", maxCreationLimit)
|
2023-07-15 10:52:03 +02:00
|
|
|
ctx.Flash.Error(msg, true)
|
2022-12-27 22:21:14 +01:00
|
|
|
}
|
|
|
|
|
2024-02-09 15:57:08 +01:00
|
|
|
if !updateForkRepositoryInContext(ctx, ctx.Repo.Repository) {
|
2015-08-08 11:10:34 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-04-05 17:30:52 +02:00
|
|
|
ctx.HTML(http.StatusOK, tplFork)
|
2015-08-08 11:10:34 +02:00
|
|
|
}
|
|
|
|
|
2016-11-24 08:04:31 +01:00
|
|
|
// ForkPost response for forking a repository
|
2021-01-26 16:36:53 +01:00
|
|
|
func ForkPost(ctx *context.Context) {
|
2021-04-06 21:44:05 +02:00
|
|
|
form := web.GetForm(ctx).(*forms.CreateRepoForm)
|
2015-08-08 11:10:34 +02:00
|
|
|
ctx.Data["Title"] = ctx.Tr("new_fork")
|
2023-04-03 16:11:05 +02:00
|
|
|
ctx.Data["CanForkRepo"] = true
|
2015-08-08 11:10:34 +02:00
|
|
|
|
2017-10-15 17:06:07 +02:00
|
|
|
ctxUser := checkContextUser(ctx, form.UID)
|
2015-08-08 11:10:34 +02:00
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-02-09 15:57:08 +01:00
|
|
|
forkRepo := ctx.Repo.Repository
|
|
|
|
if !updateForkRepositoryInContext(ctx, forkRepo) {
|
2015-08-08 11:10:34 +02:00
|
|
|
return
|
|
|
|
}
|
2017-10-15 17:06:07 +02:00
|
|
|
|
2015-08-08 11:10:34 +02:00
|
|
|
ctx.Data["ContextUser"] = ctxUser
|
|
|
|
|
feat(quota): Quota enforcement
The previous commit laid out the foundation of the quota engine, this
one builds on top of it, and implements the actual enforcement.
Enforcement happens at the route decoration level, whenever possible. In
case of the API, when over quota, a 413 error is returned, with an
appropriate JSON payload. In case of web routes, a 413 HTML page is
rendered with similar information.
This implementation is for a **soft quota**: quota usage is checked
before an operation is to be performed, and the operation is *only*
denied if the user is already over quota. This makes it possible to go
over quota, but has the significant advantage of being practically
implementable within the current Forgejo architecture.
The goal of enforcement is to deny actions that can make the user go
over quota, and allow the rest. As such, deleting things should - in
almost all cases - be possible. A prime exemption is deleting files via
the web ui: that creates a new commit, which in turn increases repo
size, thus, is denied if the user is over quota.
Limitations
-----------
Because we generally work at a route decorator level, and rarely
look *into* the operation itself, `size:repos:public` and
`size:repos:private` are not enforced at this level, the engine enforces
against `size:repos:all`. This will be improved in the future.
AGit does not play very well with this system, because AGit PRs count
toward the repo they're opened against, while in the GitHub-style fork +
pull model, it counts against the fork. This too, can be improved in the
future.
There's very little done on the UI side to guard against going over
quota. What this patch implements, is enforcement, not prevention. The
UI will still let you *try* operations that *will* result in a denial.
Signed-off-by: Gergely Nagy <forgejo@gergo.csillger.hu>
2024-07-06 10:30:16 +02:00
|
|
|
if !ctx.CheckQuota(quota_model.LimitSubjectSizeReposAll, ctxUser.ID, ctxUser.Name) {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2015-08-08 11:10:34 +02:00
|
|
|
if ctx.HasError() {
|
2021-04-05 17:30:52 +02:00
|
|
|
ctx.HTML(http.StatusOK, tplFork)
|
2015-08-08 11:10:34 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-11-06 05:12:55 +01:00
|
|
|
var err error
|
2022-01-20 18:46:10 +01:00
|
|
|
traverseParentRepo := forkRepo
|
2017-11-06 05:12:55 +01:00
|
|
|
for {
|
|
|
|
if ctxUser.ID == traverseParentRepo.OwnerID {
|
|
|
|
ctx.RenderWithErr(ctx.Tr("repo.settings.new_owner_has_same_repo"), tplFork, &form)
|
|
|
|
return
|
|
|
|
}
|
2023-09-14 19:09:32 +02:00
|
|
|
repo := repo_model.GetForkedRepo(ctx, ctxUser.ID, traverseParentRepo.ID)
|
2021-11-22 16:21:55 +01:00
|
|
|
if repo != nil {
|
2021-11-16 19:18:25 +01:00
|
|
|
ctx.Redirect(ctxUser.HomeLink() + "/" + url.PathEscape(repo.Name))
|
2017-11-06 05:12:55 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if !traverseParentRepo.IsFork {
|
|
|
|
break
|
|
|
|
}
|
2022-12-03 03:48:26 +01:00
|
|
|
traverseParentRepo, err = repo_model.GetRepositoryByID(ctx, traverseParentRepo.ForkID)
|
2017-11-06 05:12:55 +01:00
|
|
|
if err != nil {
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.ServerError("GetRepositoryByID", err)
|
2017-11-06 05:12:55 +01:00
|
|
|
return
|
|
|
|
}
|
2017-07-26 09:17:38 +02:00
|
|
|
}
|
|
|
|
|
2021-11-25 06:03:03 +01:00
|
|
|
// Check if user is allowed to create repo's on the organization.
|
2015-08-08 11:10:34 +02:00
|
|
|
if ctxUser.IsOrganization() {
|
2023-10-03 12:30:41 +02:00
|
|
|
isAllowedToFork, err := organization.OrgFromUser(ctxUser).CanCreateOrgRepo(ctx, ctx.Doer.ID)
|
2017-12-21 08:43:26 +01:00
|
|
|
if err != nil {
|
2021-11-25 06:03:03 +01:00
|
|
|
ctx.ServerError("CanCreateOrgRepo", err)
|
2017-12-21 08:43:26 +01:00
|
|
|
return
|
2021-11-25 06:03:03 +01:00
|
|
|
} else if !isAllowedToFork {
|
2021-04-05 17:30:52 +02:00
|
|
|
ctx.Error(http.StatusForbidden)
|
2015-08-08 11:10:34 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-08-08 09:46:38 +02:00
|
|
|
repo, err := repo_service.ForkRepositoryAndUpdates(ctx, ctx.Doer, ctxUser, repo_service.ForkRepoOptions{
|
2023-09-29 03:48:39 +02:00
|
|
|
BaseRepo: forkRepo,
|
|
|
|
Name: form.RepoName,
|
|
|
|
Description: form.Description,
|
|
|
|
SingleBranch: form.ForkSingleBranch,
|
2021-08-28 10:37:14 +02:00
|
|
|
})
|
2015-08-08 11:10:34 +02:00
|
|
|
if err != nil {
|
2015-08-31 09:24:28 +02:00
|
|
|
ctx.Data["Err_RepoName"] = true
|
2015-08-08 11:10:34 +02:00
|
|
|
switch {
|
2022-12-27 22:21:14 +01:00
|
|
|
case repo_model.IsErrReachLimitOfRepo(err):
|
|
|
|
maxCreationLimit := ctxUser.MaxCreationLimit()
|
|
|
|
msg := ctx.TrN(maxCreationLimit, "repo.form.reach_limit_of_creation_1", "repo.form.reach_limit_of_creation_n", maxCreationLimit)
|
|
|
|
ctx.RenderWithErr(msg, tplFork, &form)
|
2021-12-12 16:48:20 +01:00
|
|
|
case repo_model.IsErrRepoAlreadyExist(err):
|
2016-11-24 08:04:31 +01:00
|
|
|
ctx.RenderWithErr(ctx.Tr("repo.settings.new_owner_has_same_repo"), tplFork, &form)
|
2023-05-22 12:21:46 +02:00
|
|
|
case repo_model.IsErrRepoFilesAlreadyExist(err):
|
|
|
|
switch {
|
|
|
|
case ctx.IsUserSiteAdmin() || (setting.Repository.AllowAdoptionOfUnadoptedRepositories && setting.Repository.AllowDeleteOfUnadoptedRepositories):
|
|
|
|
ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt_or_delete"), tplFork, form)
|
|
|
|
case setting.Repository.AllowAdoptionOfUnadoptedRepositories:
|
|
|
|
ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt"), tplFork, form)
|
|
|
|
case setting.Repository.AllowDeleteOfUnadoptedRepositories:
|
|
|
|
ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.delete"), tplFork, form)
|
|
|
|
default:
|
|
|
|
ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist"), tplFork, form)
|
|
|
|
}
|
2021-11-24 10:49:20 +01:00
|
|
|
case db.IsErrNameReserved(err):
|
|
|
|
ctx.RenderWithErr(ctx.Tr("repo.form.name_reserved", err.(db.ErrNameReserved).Name), tplFork, &form)
|
|
|
|
case db.IsErrNamePatternNotAllowed(err):
|
|
|
|
ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tplFork, &form)
|
2015-08-08 11:10:34 +02:00
|
|
|
default:
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.ServerError("ForkPost", err)
|
2015-08-08 11:10:34 +02:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2015-08-08 16:43:14 +02:00
|
|
|
log.Trace("Repository forked[%d]: %s/%s", forkRepo.ID, ctxUser.Name, repo.Name)
|
2021-11-16 19:18:25 +01:00
|
|
|
ctx.Redirect(ctxUser.HomeLink() + "/" + url.PathEscape(repo.Name))
|
2015-08-08 11:10:34 +02:00
|
|
|
}
|
|
|
|
|
2023-08-07 05:43:18 +02:00
|
|
|
func getPullInfo(ctx *context.Context) (issue *issues_model.Issue, ok bool) {
|
2023-07-22 16:14:27 +02:00
|
|
|
issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
|
2015-09-02 10:08:05 +02:00
|
|
|
if err != nil {
|
2022-06-13 11:37:59 +02:00
|
|
|
if issues_model.IsErrIssueNotExist(err) {
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.NotFound("GetIssueByIndex", err)
|
2015-09-02 10:08:05 +02:00
|
|
|
} else {
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.ServerError("GetIssueByIndex", err)
|
2015-09-02 10:08:05 +02:00
|
|
|
}
|
2023-08-07 05:43:18 +02:00
|
|
|
return nil, false
|
2015-09-02 10:08:05 +02:00
|
|
|
}
|
2022-11-19 09:12:33 +01:00
|
|
|
if err = issue.LoadPoster(ctx); err != nil {
|
2018-12-13 16:55:43 +01:00
|
|
|
ctx.ServerError("LoadPoster", err)
|
2023-08-07 05:43:18 +02:00
|
|
|
return nil, false
|
2018-12-13 16:55:43 +01:00
|
|
|
}
|
2022-04-08 11:11:15 +02:00
|
|
|
if err := issue.LoadRepo(ctx); err != nil {
|
2019-10-23 19:54:13 +02:00
|
|
|
ctx.ServerError("LoadRepo", err)
|
2023-08-07 05:43:18 +02:00
|
|
|
return nil, false
|
2019-10-23 19:54:13 +02:00
|
|
|
}
|
2024-02-24 23:34:51 +01:00
|
|
|
ctx.Data["Title"] = fmt.Sprintf("#%d - %s", issue.Index, emoji.ReplaceAliases(issue.Title))
|
2015-10-19 01:30:39 +02:00
|
|
|
ctx.Data["Issue"] = issue
|
2015-09-02 10:08:05 +02:00
|
|
|
|
2015-10-19 01:30:39 +02:00
|
|
|
if !issue.IsPull {
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.NotFound("ViewPullCommits", nil)
|
2023-08-07 05:43:18 +02:00
|
|
|
return nil, false
|
2015-09-02 10:08:05 +02:00
|
|
|
}
|
|
|
|
|
2022-11-19 09:12:33 +01:00
|
|
|
if err = issue.LoadPullRequest(ctx); err != nil {
|
2018-12-13 16:55:43 +01:00
|
|
|
ctx.ServerError("LoadPullRequest", err)
|
2023-08-07 05:43:18 +02:00
|
|
|
return nil, false
|
2018-12-13 16:55:43 +01:00
|
|
|
}
|
|
|
|
|
2022-11-19 09:12:33 +01:00
|
|
|
if err = issue.PullRequest.LoadHeadRepo(ctx); err != nil {
|
2020-03-02 23:31:55 +01:00
|
|
|
ctx.ServerError("LoadHeadRepo", err)
|
2023-08-07 05:43:18 +02:00
|
|
|
return nil, false
|
2015-09-02 10:08:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if ctx.IsSigned {
|
|
|
|
// Update issue-user.
|
2022-08-25 04:31:57 +02:00
|
|
|
if err = activities_model.SetIssueReadBy(ctx, issue.ID, ctx.Doer.ID); err != nil {
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.ServerError("ReadBy", err)
|
2023-08-07 05:43:18 +02:00
|
|
|
return nil, false
|
2015-09-02 10:08:05 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-08-07 05:43:18 +02:00
|
|
|
return issue, true
|
2015-09-02 10:08:05 +02:00
|
|
|
}
|
|
|
|
|
2022-06-13 11:37:59 +02:00
|
|
|
func setMergeTarget(ctx *context.Context, pull *issues_model.PullRequest) {
|
2022-11-19 09:12:33 +01:00
|
|
|
if ctx.Repo.Owner.Name == pull.MustHeadUserName(ctx) {
|
2017-10-04 19:35:01 +02:00
|
|
|
ctx.Data["HeadTarget"] = pull.HeadBranch
|
|
|
|
} else if pull.HeadRepo == nil {
|
2022-11-19 09:12:33 +01:00
|
|
|
ctx.Data["HeadTarget"] = pull.MustHeadUserName(ctx) + ":" + pull.HeadBranch
|
2017-10-04 19:35:01 +02:00
|
|
|
} else {
|
2022-11-19 09:12:33 +01:00
|
|
|
ctx.Data["HeadTarget"] = pull.MustHeadUserName(ctx) + "/" + pull.HeadRepo.Name + ":" + pull.HeadBranch
|
2017-10-04 19:35:01 +02:00
|
|
|
}
|
2024-02-23 01:26:17 +01:00
|
|
|
|
|
|
|
if pull.Flow == issues_model.PullRequestFlowAGit {
|
|
|
|
ctx.Data["MadeUsingAGit"] = true
|
|
|
|
}
|
|
|
|
|
2017-10-04 19:35:01 +02:00
|
|
|
ctx.Data["BaseTarget"] = pull.BaseBranch
|
2023-10-11 06:24:07 +02:00
|
|
|
ctx.Data["HeadBranchLink"] = pull.GetHeadBranchLink(ctx)
|
|
|
|
ctx.Data["BaseBranchLink"] = pull.GetBaseBranchLink(ctx)
|
2017-10-04 19:35:01 +02:00
|
|
|
}
|
|
|
|
|
2023-07-03 03:00:28 +02:00
|
|
|
// GetPullDiffStats get Pull Requests diff stats
|
|
|
|
func GetPullDiffStats(ctx *context.Context) {
|
2023-08-07 05:43:18 +02:00
|
|
|
issue, ok := getPullInfo(ctx)
|
|
|
|
if !ok {
|
|
|
|
return
|
|
|
|
}
|
2016-08-16 19:19:09 +02:00
|
|
|
pull := issue.PullRequest
|
2015-09-02 15:26:56 +02:00
|
|
|
|
2023-07-03 03:00:28 +02:00
|
|
|
mergeBaseCommitID := GetMergedBaseCommitID(ctx, issue)
|
|
|
|
|
2023-08-07 05:43:18 +02:00
|
|
|
if mergeBaseCommitID == "" {
|
2023-07-03 03:00:28 +02:00
|
|
|
ctx.NotFound("PullFiles", nil)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
headCommitID, err := ctx.Repo.GitRepo.GetRefCommitID(pull.GetGitRefName())
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetRefCommitID", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
diffOptions := &gitdiff.DiffOptions{
|
|
|
|
BeforeCommitID: mergeBaseCommitID,
|
|
|
|
AfterCommitID: headCommitID,
|
|
|
|
MaxLines: setting.Git.MaxGitDiffLines,
|
|
|
|
MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
|
|
|
|
MaxFiles: setting.Git.MaxGitDiffFiles,
|
|
|
|
WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)),
|
|
|
|
}
|
|
|
|
|
|
|
|
diff, err := gitdiff.GetPullDiffStats(ctx.Repo.GitRepo, diffOptions)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetPullDiffStats", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx.Data["Diff"] = diff
|
|
|
|
}
|
|
|
|
|
|
|
|
func GetMergedBaseCommitID(ctx *context.Context, issue *issues_model.Issue) string {
|
|
|
|
pull := issue.PullRequest
|
2017-10-04 19:35:01 +02:00
|
|
|
|
2021-12-23 09:32:29 +01:00
|
|
|
var baseCommit string
|
|
|
|
// Some migrated PR won't have any Base SHA and lose history, try to get one
|
|
|
|
if pull.MergeBase == "" {
|
|
|
|
var commitSHA, parentCommit string
|
|
|
|
// If there is a head or a patch file, and it is readable, grab info
|
2021-12-23 14:44:00 +01:00
|
|
|
commitSHA, err := ctx.Repo.GitRepo.GetRefCommitID(pull.GetGitRefName())
|
2021-12-23 09:32:29 +01:00
|
|
|
if err != nil {
|
|
|
|
// Head File does not exist, try the patch
|
|
|
|
commitSHA, err = ctx.Repo.GitRepo.ReadPatchCommit(pull.Index)
|
|
|
|
if err == nil {
|
|
|
|
// Recreate pull head in files for next time
|
2021-12-23 14:44:00 +01:00
|
|
|
if err := ctx.Repo.GitRepo.SetReference(pull.GetGitRefName(), commitSHA); err != nil {
|
2021-12-23 09:32:29 +01:00
|
|
|
log.Error("Could not write head file", err)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// There is no history available
|
|
|
|
log.Trace("No history file available for PR %d", pull.Index)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if commitSHA != "" {
|
|
|
|
// Get immediate parent of the first commit in the patch, grab history back
|
2022-10-23 16:44:45 +02:00
|
|
|
parentCommit, _, err = git.NewCommand(ctx, "rev-list", "-1", "--skip=1").AddDynamicArguments(commitSHA).RunStdString(&git.RunOpts{Dir: ctx.Repo.GitRepo.Path})
|
2021-12-23 09:32:29 +01:00
|
|
|
if err == nil {
|
|
|
|
parentCommit = strings.TrimSpace(parentCommit)
|
|
|
|
}
|
|
|
|
// Special case on Git < 2.25 that doesn't fail on immediate empty history
|
|
|
|
if err != nil || parentCommit == "" {
|
|
|
|
log.Info("No known parent commit for PR %d, error: %v", pull.Index, err)
|
|
|
|
// bring at least partial history if it can work
|
|
|
|
parentCommit = commitSHA
|
|
|
|
}
|
|
|
|
}
|
|
|
|
baseCommit = parentCommit
|
|
|
|
} else {
|
|
|
|
// Keep an empty history or original commit
|
|
|
|
baseCommit = pull.MergeBase
|
|
|
|
}
|
|
|
|
|
2023-07-03 03:00:28 +02:00
|
|
|
return baseCommit
|
|
|
|
}
|
|
|
|
|
|
|
|
// PrepareMergedViewPullInfo show meta information for a merged pull request view page
|
|
|
|
func PrepareMergedViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.CompareInfo {
|
|
|
|
pull := issue.PullRequest
|
|
|
|
|
|
|
|
setMergeTarget(ctx, pull)
|
|
|
|
ctx.Data["HasMerged"] = true
|
|
|
|
|
|
|
|
baseCommit := GetMergedBaseCommitID(ctx, issue)
|
|
|
|
|
2019-06-12 01:32:08 +02:00
|
|
|
compareInfo, err := ctx.Repo.GitRepo.GetCompareInfo(ctx.Repo.Repository.RepoPath(),
|
2022-01-18 08:45:43 +01:00
|
|
|
baseCommit, pull.GetGitRefName(), false, false)
|
2015-09-02 15:26:56 +02:00
|
|
|
if err != nil {
|
2020-10-20 14:52:54 +02:00
|
|
|
if strings.Contains(err.Error(), "fatal: Not a valid object name") || strings.Contains(err.Error(), "unknown revision or path not in the working tree") {
|
2018-08-01 05:00:35 +02:00
|
|
|
ctx.Data["IsPullRequestBroken"] = true
|
2020-03-02 23:31:55 +01:00
|
|
|
ctx.Data["BaseTarget"] = pull.BaseBranch
|
2018-01-19 07:18:51 +01:00
|
|
|
ctx.Data["NumCommits"] = 0
|
|
|
|
ctx.Data["NumFiles"] = 0
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-06-07 22:29:29 +02:00
|
|
|
ctx.ServerError("GetCompareInfo", err)
|
2018-01-19 07:18:51 +01:00
|
|
|
return nil
|
2015-09-02 15:26:56 +02:00
|
|
|
}
|
2021-08-09 20:08:51 +02:00
|
|
|
ctx.Data["NumCommits"] = len(compareInfo.Commits)
|
2019-06-12 01:32:08 +02:00
|
|
|
ctx.Data["NumFiles"] = compareInfo.NumFiles
|
2020-12-18 13:37:55 +01:00
|
|
|
|
2021-08-09 20:08:51 +02:00
|
|
|
if len(compareInfo.Commits) != 0 {
|
|
|
|
sha := compareInfo.Commits[0].ID.String()
|
2024-03-22 13:53:52 +01:00
|
|
|
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, sha, db.ListOptionsAll)
|
2020-12-18 13:37:55 +01:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetLatestCommitStatus", err)
|
|
|
|
return nil
|
|
|
|
}
|
2024-07-28 17:11:40 +02:00
|
|
|
if !ctx.Repo.CanRead(unit.TypeActions) {
|
|
|
|
git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
|
|
|
|
}
|
|
|
|
|
2020-12-18 13:37:55 +01:00
|
|
|
if len(commitStatuses) != 0 {
|
|
|
|
ctx.Data["LatestCommitStatuses"] = commitStatuses
|
2022-06-12 17:51:54 +02:00
|
|
|
ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses)
|
2020-12-18 13:37:55 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-12 01:32:08 +02:00
|
|
|
return compareInfo
|
2015-09-02 15:26:56 +02:00
|
|
|
}
|
|
|
|
|
2016-11-24 08:04:31 +01:00
|
|
|
// PrepareViewPullInfo show meta information for a pull request preview page
|
2022-06-13 11:37:59 +02:00
|
|
|
func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.CompareInfo {
|
2022-02-11 09:02:53 +01:00
|
|
|
ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
|
|
|
|
|
2015-09-02 10:08:05 +02:00
|
|
|
repo := ctx.Repo.Repository
|
2016-08-16 19:19:09 +02:00
|
|
|
pull := issue.PullRequest
|
2015-09-02 10:08:05 +02:00
|
|
|
|
2022-11-19 09:12:33 +01:00
|
|
|
if err := pull.LoadHeadRepo(ctx); err != nil {
|
2020-03-02 23:31:55 +01:00
|
|
|
ctx.ServerError("LoadHeadRepo", err)
|
2015-10-24 09:36:47 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-11-19 09:12:33 +01:00
|
|
|
if err := pull.LoadBaseRepo(ctx); err != nil {
|
2020-03-02 23:31:55 +01:00
|
|
|
ctx.ServerError("LoadBaseRepo", err)
|
2020-01-07 18:06:14 +01:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-10-04 19:35:01 +02:00
|
|
|
setMergeTarget(ctx, pull)
|
|
|
|
|
2023-01-16 09:00:22 +01:00
|
|
|
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, pull.BaseBranch)
|
|
|
|
if err != nil {
|
2020-01-17 07:03:40 +01:00
|
|
|
ctx.ServerError("LoadProtectedBranch", err)
|
2019-09-18 07:39:45 +02:00
|
|
|
return nil
|
|
|
|
}
|
2023-01-16 09:00:22 +01:00
|
|
|
ctx.Data["EnableStatusCheck"] = pb != nil && pb.EnableStatusCheck
|
2019-09-18 07:39:45 +02:00
|
|
|
|
2022-01-20 00:26:57 +01:00
|
|
|
var baseGitRepo *git.Repository
|
|
|
|
if pull.BaseRepoID == ctx.Repo.Repository.ID && ctx.Repo.GitRepo != nil {
|
|
|
|
baseGitRepo = ctx.Repo.GitRepo
|
|
|
|
} else {
|
Simplify how git repositories are opened (#28937)
## Purpose
This is a refactor toward building an abstraction over managing git
repositories.
Afterwards, it does not matter anymore if they are stored on the local
disk or somewhere remote.
## What this PR changes
We used `git.OpenRepository` everywhere previously.
Now, we should split them into two distinct functions:
Firstly, there are temporary repositories which do not change:
```go
git.OpenRepository(ctx, diskPath)
```
Gitea managed repositories having a record in the database in the
`repository` table are moved into the new package `gitrepo`:
```go
gitrepo.OpenRepository(ctx, repo_model.Repo)
```
Why is `repo_model.Repository` the second parameter instead of file
path?
Because then we can easily adapt our repository storage strategy.
The repositories can be stored locally, however, they could just as well
be stored on a remote server.
## Further changes in other PRs
- A Git Command wrapper on package `gitrepo` could be created. i.e.
`NewCommand(ctx, repo_model.Repository, commands...)`. `git.RunOpts{Dir:
repo.RepoPath()}`, the directory should be empty before invoking this
method and it can be filled in the function only. #28940
- Remove the `RepoPath()`/`WikiPath()` functions to reduce the
possibility of mistakes.
---------
Co-authored-by: delvh <dev.lh@web.de>
2024-01-27 21:09:51 +01:00
|
|
|
baseGitRepo, err := gitrepo.OpenRepository(ctx, pull.BaseRepo)
|
2022-01-20 00:26:57 +01:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("OpenRepository", err)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
defer baseGitRepo.Close()
|
2020-01-07 18:06:14 +01:00
|
|
|
}
|
2020-03-05 19:51:21 +01:00
|
|
|
|
|
|
|
if !baseGitRepo.IsBranchExist(pull.BaseBranch) {
|
|
|
|
ctx.Data["IsPullRequestBroken"] = true
|
|
|
|
ctx.Data["BaseTarget"] = pull.BaseBranch
|
|
|
|
ctx.Data["HeadTarget"] = pull.HeadBranch
|
2020-03-31 15:42:44 +02:00
|
|
|
|
|
|
|
sha, err := baseGitRepo.GetRefCommitID(pull.GetGitRefName())
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError(fmt.Sprintf("GetRefCommitID(%s)", pull.GetGitRefName()), err)
|
|
|
|
return nil
|
|
|
|
}
|
2024-03-22 13:53:52 +01:00
|
|
|
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll)
|
2020-03-31 15:42:44 +02:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetLatestCommitStatus", err)
|
|
|
|
return nil
|
|
|
|
}
|
2024-07-28 17:11:40 +02:00
|
|
|
if !ctx.Repo.CanRead(unit.TypeActions) {
|
|
|
|
git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
|
|
|
|
}
|
|
|
|
|
2020-03-31 15:42:44 +02:00
|
|
|
if len(commitStatuses) > 0 {
|
|
|
|
ctx.Data["LatestCommitStatuses"] = commitStatuses
|
2022-06-12 17:51:54 +02:00
|
|
|
ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses)
|
2020-03-31 15:42:44 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
compareInfo, err := baseGitRepo.GetCompareInfo(pull.BaseRepo.RepoPath(),
|
2022-01-18 08:45:43 +01:00
|
|
|
pull.MergeBase, pull.GetGitRefName(), false, false)
|
2020-03-31 15:42:44 +02:00
|
|
|
if err != nil {
|
|
|
|
if strings.Contains(err.Error(), "fatal: Not a valid object name") {
|
|
|
|
ctx.Data["IsPullRequestBroken"] = true
|
|
|
|
ctx.Data["BaseTarget"] = pull.BaseBranch
|
|
|
|
ctx.Data["NumCommits"] = 0
|
|
|
|
ctx.Data["NumFiles"] = 0
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx.ServerError("GetCompareInfo", err)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-08-09 20:08:51 +02:00
|
|
|
ctx.Data["NumCommits"] = len(compareInfo.Commits)
|
2020-03-31 15:42:44 +02:00
|
|
|
ctx.Data["NumFiles"] = compareInfo.NumFiles
|
|
|
|
return compareInfo
|
2020-03-05 19:51:21 +01:00
|
|
|
}
|
|
|
|
|
2019-06-30 09:57:59 +02:00
|
|
|
var headBranchExist bool
|
2020-01-07 18:06:14 +01:00
|
|
|
var headBranchSha string
|
2019-06-30 09:57:59 +02:00
|
|
|
// HeadRepo may be missing
|
2015-10-05 02:54:06 +02:00
|
|
|
if pull.HeadRepo != nil {
|
Simplify how git repositories are opened (#28937)
## Purpose
This is a refactor toward building an abstraction over managing git
repositories.
Afterwards, it does not matter anymore if they are stored on the local
disk or somewhere remote.
## What this PR changes
We used `git.OpenRepository` everywhere previously.
Now, we should split them into two distinct functions:
Firstly, there are temporary repositories which do not change:
```go
git.OpenRepository(ctx, diskPath)
```
Gitea managed repositories having a record in the database in the
`repository` table are moved into the new package `gitrepo`:
```go
gitrepo.OpenRepository(ctx, repo_model.Repo)
```
Why is `repo_model.Repository` the second parameter instead of file
path?
Because then we can easily adapt our repository storage strategy.
The repositories can be stored locally, however, they could just as well
be stored on a remote server.
## Further changes in other PRs
- A Git Command wrapper on package `gitrepo` could be created. i.e.
`NewCommand(ctx, repo_model.Repository, commands...)`. `git.RunOpts{Dir:
repo.RepoPath()}`, the directory should be empty before invoking this
method and it can be filled in the function only. #28940
- Remove the `RepoPath()`/`WikiPath()` functions to reduce the
possibility of mistakes.
---------
Co-authored-by: delvh <dev.lh@web.de>
2024-01-27 21:09:51 +01:00
|
|
|
headGitRepo, err := gitrepo.OpenRepository(ctx, pull.HeadRepo)
|
2015-10-05 02:54:06 +02:00
|
|
|
if err != nil {
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.ServerError("OpenRepository", err)
|
2015-10-05 02:54:06 +02:00
|
|
|
return nil
|
|
|
|
}
|
2019-11-13 08:01:19 +01:00
|
|
|
defer headGitRepo.Close()
|
2019-06-30 09:57:59 +02:00
|
|
|
|
2022-06-13 11:37:59 +02:00
|
|
|
if pull.Flow == issues_model.PullRequestFlowGithub {
|
2021-07-28 11:42:56 +02:00
|
|
|
headBranchExist = headGitRepo.IsBranchExist(pull.HeadBranch)
|
|
|
|
} else {
|
2021-11-30 21:06:32 +01:00
|
|
|
headBranchExist = git.IsReferenceExist(ctx, baseGitRepo.Path, pull.GetGitRefName())
|
2021-07-28 11:42:56 +02:00
|
|
|
}
|
2019-06-30 09:57:59 +02:00
|
|
|
|
|
|
|
if headBranchExist {
|
2022-06-13 11:37:59 +02:00
|
|
|
if pull.Flow != issues_model.PullRequestFlowGithub {
|
2021-07-28 11:42:56 +02:00
|
|
|
headBranchSha, err = baseGitRepo.GetRefCommitID(pull.GetGitRefName())
|
|
|
|
} else {
|
|
|
|
headBranchSha, err = headGitRepo.GetBranchCommitID(pull.HeadBranch)
|
|
|
|
}
|
2019-06-30 09:57:59 +02:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetBranchCommitID", err)
|
|
|
|
return nil
|
|
|
|
}
|
2020-01-07 18:06:14 +01:00
|
|
|
}
|
|
|
|
}
|
2019-06-30 09:57:59 +02:00
|
|
|
|
2020-01-25 03:48:22 +01:00
|
|
|
if headBranchExist {
|
2022-01-20 00:26:57 +01:00
|
|
|
var err error
|
2022-04-28 13:48:48 +02:00
|
|
|
ctx.Data["UpdateAllowed"], ctx.Data["UpdateByRebaseAllowed"], err = pull_service.IsUserAllowedToUpdate(ctx, pull, ctx.Doer)
|
2020-01-25 03:48:22 +01:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("IsUserAllowedToUpdate", err)
|
|
|
|
return nil
|
|
|
|
}
|
2022-01-20 00:26:57 +01:00
|
|
|
ctx.Data["GetCommitMessages"] = pull_service.GetSquashMergeCommitMessages(ctx, pull)
|
2022-08-03 06:56:59 +02:00
|
|
|
} else {
|
|
|
|
ctx.Data["GetCommitMessages"] = ""
|
2020-01-25 03:48:22 +01:00
|
|
|
}
|
|
|
|
|
2020-01-07 18:06:14 +01:00
|
|
|
sha, err := baseGitRepo.GetRefCommitID(pull.GetGitRefName())
|
|
|
|
if err != nil {
|
2020-06-08 20:07:41 +02:00
|
|
|
if git.IsErrNotExist(err) {
|
|
|
|
ctx.Data["IsPullRequestBroken"] = true
|
|
|
|
if pull.IsSameRepo() {
|
|
|
|
ctx.Data["HeadTarget"] = pull.HeadBranch
|
|
|
|
} else if pull.HeadRepo == nil {
|
2023-08-04 00:07:15 +02:00
|
|
|
ctx.Data["HeadTarget"] = ctx.Locale.Tr("repo.pull.deleted_branch", pull.HeadBranch)
|
2020-06-08 20:07:41 +02:00
|
|
|
} else {
|
|
|
|
ctx.Data["HeadTarget"] = pull.HeadRepo.OwnerName + ":" + pull.HeadBranch
|
|
|
|
}
|
|
|
|
ctx.Data["BaseTarget"] = pull.BaseBranch
|
|
|
|
ctx.Data["NumCommits"] = 0
|
|
|
|
ctx.Data["NumFiles"] = 0
|
|
|
|
return nil
|
|
|
|
}
|
2020-01-07 18:06:14 +01:00
|
|
|
ctx.ServerError(fmt.Sprintf("GetRefCommitID(%s)", pull.GetGitRefName()), err)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-03-22 13:53:52 +01:00
|
|
|
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll)
|
2020-01-07 18:06:14 +01:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetLatestCommitStatus", err)
|
|
|
|
return nil
|
|
|
|
}
|
2024-07-28 17:11:40 +02:00
|
|
|
if !ctx.Repo.CanRead(unit.TypeActions) {
|
|
|
|
git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
|
|
|
|
}
|
|
|
|
|
2020-01-07 18:06:14 +01:00
|
|
|
if len(commitStatuses) > 0 {
|
|
|
|
ctx.Data["LatestCommitStatuses"] = commitStatuses
|
2022-06-12 17:51:54 +02:00
|
|
|
ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses)
|
2020-01-07 18:06:14 +01:00
|
|
|
}
|
2019-09-18 07:39:45 +02:00
|
|
|
|
2023-01-16 09:00:22 +01:00
|
|
|
if pb != nil && pb.EnableStatusCheck {
|
2024-02-19 10:57:08 +01:00
|
|
|
var missingRequiredChecks []string
|
|
|
|
for _, requiredContext := range pb.StatusCheckContexts {
|
|
|
|
contextFound := false
|
|
|
|
matchesRequiredContext := createRequiredContextMatcher(requiredContext)
|
|
|
|
for _, presentStatus := range commitStatuses {
|
|
|
|
if matchesRequiredContext(presentStatus.Context) {
|
|
|
|
contextFound = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !contextFound {
|
|
|
|
missingRequiredChecks = append(missingRequiredChecks, requiredContext)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ctx.Data["MissingRequiredChecks"] = missingRequiredChecks
|
|
|
|
|
2020-01-07 18:06:14 +01:00
|
|
|
ctx.Data["is_context_required"] = func(context string) bool {
|
2023-01-16 09:00:22 +01:00
|
|
|
for _, c := range pb.StatusCheckContexts {
|
2023-12-22 14:29:50 +01:00
|
|
|
if c == context {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
if gp, err := glob.Compile(c); err != nil {
|
|
|
|
// All newly created status_check_contexts are checked to ensure they are valid glob expressions before being stored in the database.
|
|
|
|
// But some old status_check_context created before glob was introduced may be invalid glob expressions.
|
|
|
|
// So log the error here for debugging.
|
|
|
|
log.Error("compile glob %q: %v", c, err)
|
|
|
|
} else if gp.Match(context) {
|
2020-01-07 18:06:14 +01:00
|
|
|
return true
|
2019-09-18 07:39:45 +02:00
|
|
|
}
|
|
|
|
}
|
2020-01-07 18:06:14 +01:00
|
|
|
return false
|
2019-06-30 09:57:59 +02:00
|
|
|
}
|
2023-01-16 09:00:22 +01:00
|
|
|
ctx.Data["RequiredStatusCheckState"] = pull_service.MergeRequiredContextsCommitStatus(commitStatuses, pb.StatusCheckContexts)
|
2015-09-02 10:08:05 +02:00
|
|
|
}
|
|
|
|
|
2020-01-07 18:06:14 +01:00
|
|
|
ctx.Data["HeadBranchMovedOn"] = headBranchSha != sha
|
|
|
|
ctx.Data["HeadBranchCommitID"] = headBranchSha
|
|
|
|
ctx.Data["PullHeadCommitID"] = sha
|
|
|
|
|
Fix cannot reopen after pushing commits to a closed PR (#23189)
Close: #22784
1. On GH, we can reopen a PR which was closed before after pushing
commits. After reopening PR, we can see the commits that were pushed
after closing PR in the time line. So the case of
[issue](https://github.com/go-gitea/gitea/issues/22784) is a bug which
needs to be fixed.
2. After closing a PR and pushing commits, `headBranchSha` is not equal
to `sha`(which is the last commit ID string of reference). If the
judgement exists, the button of reopen will not display. So, skip the
judgement if the status of PR is closed.
![image](https://user-images.githubusercontent.com/33891828/222037529-651fccf9-0bba-433e-b2f0-79c17e0cc812.png)
3. Even if PR is already close, we should still insert comment record
into DB when we push commits.
So we should still call function `CreatePushPullComment()`.
https://github.com/go-gitea/gitea/blob/067b0c2664d127c552ccdfd264257caca4907a77/services/pull/pull.go#L260-L282
So, I add a switch(`includeClosed`) to the
`GetUnmergedPullRequestsByHeadInfo` func to control whether the status
of PR must be open. In this case, by setting `includeClosed` to `true`,
we can query the closed PR.
![image](https://user-images.githubusercontent.com/33891828/222621045-bb80987c-10c5-4eac-aa0c-1fb9c6aefb51.png)
4. In the loop of comments, I use the`latestCloseCommentID` variable to
record the last occurrence of the close comment.
In the go template, if the status of PR is closed, the comments whose
type is `CommentTypePullRequestPush(29)` after `latestCloseCommentID`
won't be rendered.
![image](https://user-images.githubusercontent.com/33891828/222058913-c91cf3e3-819b-40c5-8015-654b31eeccff.png)
e.g.
1). The initial status of the PR is opened.
![image](https://user-images.githubusercontent.com/33891828/222453617-33c5093e-f712-4cd6-8489-9f87e2075869.png)
2). Then I click the button of `Close`. PR is closed now.
![image](https://user-images.githubusercontent.com/33891828/222453694-25c588a9-c121-4897-9ae5-0b13cf33d20b.png)
3). I try to push a commit to this PR, even though its current status is
closed.
![image](https://user-images.githubusercontent.com/33891828/222453916-361678fb-7321-410d-9e37-5a26e8095638.png)
But in comments list, this commit do not display.This is as expected :)
![image](https://user-images.githubusercontent.com/33891828/222454169-7617a791-78d2-404e-be5e-77d555f93313.png)
4). Click the `Reopen` button, the commit which is pushed after closing
PR display now.
![image](https://user-images.githubusercontent.com/33891828/222454533-897893b6-b96e-4701-b5cb-b1800f382b8f.png)
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-03-03 14:16:58 +01:00
|
|
|
if pull.HeadRepo == nil || !headBranchExist || (!pull.Issue.IsClosed && (headBranchSha != sha)) {
|
2018-08-01 05:00:35 +02:00
|
|
|
ctx.Data["IsPullRequestBroken"] = true
|
2020-03-02 23:31:55 +01:00
|
|
|
if pull.IsSameRepo() {
|
|
|
|
ctx.Data["HeadTarget"] = pull.HeadBranch
|
2020-06-08 20:07:41 +02:00
|
|
|
} else if pull.HeadRepo == nil {
|
2023-08-04 00:07:15 +02:00
|
|
|
ctx.Data["HeadTarget"] = ctx.Locale.Tr("repo.pull.deleted_branch", pull.HeadBranch)
|
2020-03-02 23:31:55 +01:00
|
|
|
} else {
|
2020-06-08 20:07:41 +02:00
|
|
|
ctx.Data["HeadTarget"] = pull.HeadRepo.OwnerName + ":" + pull.HeadBranch
|
2020-03-02 23:31:55 +01:00
|
|
|
}
|
2015-09-02 15:26:56 +02:00
|
|
|
}
|
|
|
|
|
2020-01-07 18:06:14 +01:00
|
|
|
compareInfo, err := baseGitRepo.GetCompareInfo(pull.BaseRepo.RepoPath(),
|
2022-01-18 08:45:43 +01:00
|
|
|
git.BranchPrefix+pull.BaseBranch, pull.GetGitRefName(), false, false)
|
2015-09-02 10:08:05 +02:00
|
|
|
if err != nil {
|
2016-07-23 12:35:16 +02:00
|
|
|
if strings.Contains(err.Error(), "fatal: Not a valid object name") {
|
2018-08-01 05:00:35 +02:00
|
|
|
ctx.Data["IsPullRequestBroken"] = true
|
2020-03-02 23:31:55 +01:00
|
|
|
ctx.Data["BaseTarget"] = pull.BaseBranch
|
2016-07-23 12:35:16 +02:00
|
|
|
ctx.Data["NumCommits"] = 0
|
|
|
|
ctx.Data["NumFiles"] = 0
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-06-07 22:29:29 +02:00
|
|
|
ctx.ServerError("GetCompareInfo", err)
|
2015-09-02 10:08:05 +02:00
|
|
|
return nil
|
|
|
|
}
|
2018-08-13 21:04:39 +02:00
|
|
|
|
2021-07-29 04:32:48 +02:00
|
|
|
if compareInfo.HeadCommitID == compareInfo.MergeBase {
|
|
|
|
ctx.Data["IsNothingToCompare"] = true
|
|
|
|
}
|
|
|
|
|
2023-10-11 06:24:07 +02:00
|
|
|
if pull.IsWorkInProgress(ctx) {
|
2018-08-13 21:04:39 +02:00
|
|
|
ctx.Data["IsPullWorkInProgress"] = true
|
2022-11-19 09:12:33 +01:00
|
|
|
ctx.Data["WorkInProgressPrefix"] = pull.GetWorkInProgressPrefix(ctx)
|
2018-08-13 21:04:39 +02:00
|
|
|
}
|
|
|
|
|
2019-02-05 12:54:49 +01:00
|
|
|
if pull.IsFilesConflicted() {
|
|
|
|
ctx.Data["IsPullFilesConflicted"] = true
|
|
|
|
ctx.Data["ConflictedFiles"] = pull.ConflictedFiles
|
|
|
|
}
|
|
|
|
|
2021-08-09 20:08:51 +02:00
|
|
|
ctx.Data["NumCommits"] = len(compareInfo.Commits)
|
2019-06-12 01:32:08 +02:00
|
|
|
ctx.Data["NumFiles"] = compareInfo.NumFiles
|
|
|
|
return compareInfo
|
2015-09-02 10:08:05 +02:00
|
|
|
}
|
|
|
|
|
2024-02-19 10:57:08 +01:00
|
|
|
func createRequiredContextMatcher(requiredContext string) func(string) bool {
|
|
|
|
if gp, err := glob.Compile(requiredContext); err == nil {
|
|
|
|
return func(contextToCheck string) bool {
|
|
|
|
return gp.Match(contextToCheck)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return func(contextToCheck string) bool {
|
|
|
|
return requiredContext == contextToCheck
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-28 21:18:12 +02:00
|
|
|
type pullCommitList struct {
|
|
|
|
Commits []pull_service.CommitInfo `json:"commits"`
|
|
|
|
LastReviewCommitSha string `json:"last_review_commit_sha"`
|
2024-02-14 22:48:45 +01:00
|
|
|
Locale map[string]any `json:"locale"`
|
2023-07-28 21:18:12 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// GetPullCommits get all commits for given pull request
|
|
|
|
func GetPullCommits(ctx *context.Context) {
|
2023-08-07 05:43:18 +02:00
|
|
|
issue, ok := getPullInfo(ctx)
|
|
|
|
if !ok {
|
2023-07-28 21:18:12 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
resp := &pullCommitList{}
|
|
|
|
|
|
|
|
commits, lastReviewCommitSha, err := pull_service.GetPullCommits(ctx, issue)
|
|
|
|
if err != nil {
|
|
|
|
ctx.JSON(http.StatusInternalServerError, err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get the needed locale
|
2024-02-14 22:48:45 +01:00
|
|
|
resp.Locale = map[string]any{
|
2023-07-28 21:18:12 +02:00
|
|
|
"lang": ctx.Locale.Language(),
|
|
|
|
"show_all_commits": ctx.Tr("repo.pulls.show_all_commits"),
|
|
|
|
"stats_num_commits": ctx.TrN(len(commits), "repo.activity.git_stats_commit_1", "repo.activity.git_stats_commit_n", len(commits)),
|
|
|
|
"show_changes_since_your_last_review": ctx.Tr("repo.pulls.show_changes_since_your_last_review"),
|
|
|
|
"select_commit_hold_shift_for_range": ctx.Tr("repo.pulls.select_commit_hold_shift_for_range"),
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Commits = commits
|
|
|
|
resp.LastReviewCommitSha = lastReviewCommitSha
|
|
|
|
|
|
|
|
ctx.JSON(http.StatusOK, resp)
|
|
|
|
}
|
|
|
|
|
2016-11-24 08:04:31 +01:00
|
|
|
// ViewPullCommits show commits for a pull request
|
2016-03-11 17:56:52 +01:00
|
|
|
func ViewPullCommits(ctx *context.Context) {
|
2016-08-14 12:32:24 +02:00
|
|
|
ctx.Data["PageIsPullList"] = true
|
2015-09-02 10:08:05 +02:00
|
|
|
ctx.Data["PageIsPullCommits"] = true
|
|
|
|
|
2023-08-07 05:43:18 +02:00
|
|
|
issue, ok := getPullInfo(ctx)
|
|
|
|
if !ok {
|
2015-09-02 10:08:05 +02:00
|
|
|
return
|
|
|
|
}
|
2016-08-16 19:19:09 +02:00
|
|
|
pull := issue.PullRequest
|
2015-09-02 10:08:05 +02:00
|
|
|
|
2019-12-18 19:37:44 +01:00
|
|
|
var prInfo *git.CompareInfo
|
2015-09-02 15:26:56 +02:00
|
|
|
if pull.HasMerged {
|
2019-12-18 19:37:44 +01:00
|
|
|
prInfo = PrepareMergedViewPullInfo(ctx, issue)
|
2015-09-02 15:26:56 +02:00
|
|
|
} else {
|
2019-12-18 19:37:44 +01:00
|
|
|
prInfo = PrepareViewPullInfo(ctx, issue)
|
2015-09-02 10:08:05 +02:00
|
|
|
}
|
|
|
|
|
2019-12-18 19:37:44 +01:00
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
} else if prInfo == nil {
|
|
|
|
ctx.NotFound("ViewPullCommits", nil)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx.Data["Username"] = ctx.Repo.Owner.Name
|
|
|
|
ctx.Data["Reponame"] = ctx.Repo.Repository.Name
|
2021-08-09 20:08:51 +02:00
|
|
|
|
2024-07-28 17:11:40 +02:00
|
|
|
commits := processGitCommits(ctx, prInfo.Commits)
|
2015-09-02 15:26:56 +02:00
|
|
|
ctx.Data["Commits"] = commits
|
2021-08-09 20:08:51 +02:00
|
|
|
ctx.Data["CommitCount"] = len(commits)
|
2015-09-02 15:26:56 +02:00
|
|
|
|
2023-07-10 09:05:59 +02:00
|
|
|
ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
|
|
|
|
ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID)
|
|
|
|
|
2023-07-21 13:20:04 +02:00
|
|
|
// For PR commits page
|
|
|
|
PrepareBranchList(ctx)
|
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
2019-12-16 07:20:25 +01:00
|
|
|
getBranchData(ctx, issue)
|
2021-04-05 17:30:52 +02:00
|
|
|
ctx.HTML(http.StatusOK, tplPullCommits)
|
2015-09-02 10:08:05 +02:00
|
|
|
}
|
|
|
|
|
2016-11-24 08:04:31 +01:00
|
|
|
// ViewPullFiles render pull request changed files list page
|
2023-07-28 21:18:12 +02:00
|
|
|
func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommit string, willShowSpecifiedCommitRange, willShowSpecifiedCommit bool) {
|
2016-08-14 12:32:24 +02:00
|
|
|
ctx.Data["PageIsPullList"] = true
|
2015-09-02 10:08:05 +02:00
|
|
|
ctx.Data["PageIsPullFiles"] = true
|
|
|
|
|
2023-08-07 05:43:18 +02:00
|
|
|
issue, ok := getPullInfo(ctx)
|
|
|
|
if !ok {
|
2015-09-02 10:08:05 +02:00
|
|
|
return
|
|
|
|
}
|
2016-08-16 19:19:09 +02:00
|
|
|
pull := issue.PullRequest
|
2015-09-02 10:08:05 +02:00
|
|
|
|
2015-09-02 15:26:56 +02:00
|
|
|
var (
|
|
|
|
startCommitID string
|
|
|
|
endCommitID string
|
2021-08-31 06:16:23 +02:00
|
|
|
gitRepo = ctx.Repo.GitRepo
|
2015-09-02 15:26:56 +02:00
|
|
|
)
|
|
|
|
|
2019-12-18 19:37:44 +01:00
|
|
|
var prInfo *git.CompareInfo
|
2015-09-02 15:26:56 +02:00
|
|
|
if pull.HasMerged {
|
2019-12-18 19:37:44 +01:00
|
|
|
prInfo = PrepareMergedViewPullInfo(ctx, issue)
|
2015-09-02 15:26:56 +02:00
|
|
|
} else {
|
2019-12-18 19:37:44 +01:00
|
|
|
prInfo = PrepareViewPullInfo(ctx, issue)
|
|
|
|
}
|
2015-09-02 10:08:05 +02:00
|
|
|
|
2023-07-28 21:18:12 +02:00
|
|
|
// Validate the given commit sha to show (if any passed)
|
|
|
|
if willShowSpecifiedCommit || willShowSpecifiedCommitRange {
|
|
|
|
foundStartCommit := len(specifiedStartCommit) == 0
|
|
|
|
foundEndCommit := len(specifiedEndCommit) == 0
|
|
|
|
|
|
|
|
if !(foundStartCommit && foundEndCommit) {
|
|
|
|
for _, commit := range prInfo.Commits {
|
|
|
|
if commit.ID.String() == specifiedStartCommit {
|
|
|
|
foundStartCommit = true
|
|
|
|
}
|
|
|
|
if commit.ID.String() == specifiedEndCommit {
|
|
|
|
foundEndCommit = true
|
|
|
|
}
|
|
|
|
|
|
|
|
if foundStartCommit && foundEndCommit {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !(foundStartCommit && foundEndCommit) {
|
|
|
|
ctx.NotFound("Given SHA1 not found for this PR", nil)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-18 19:37:44 +01:00
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
} else if prInfo == nil {
|
|
|
|
ctx.NotFound("ViewPullFiles", nil)
|
|
|
|
return
|
|
|
|
}
|
2015-09-02 10:08:05 +02:00
|
|
|
|
2019-12-18 19:37:44 +01:00
|
|
|
headCommitID, err := gitRepo.GetRefCommitID(pull.GetGitRefName())
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetRefCommitID", err)
|
|
|
|
return
|
|
|
|
}
|
2015-09-02 15:26:56 +02:00
|
|
|
|
2023-07-28 21:18:12 +02:00
|
|
|
ctx.Data["IsShowingOnlySingleCommit"] = willShowSpecifiedCommit
|
|
|
|
|
|
|
|
if willShowSpecifiedCommit || willShowSpecifiedCommitRange {
|
|
|
|
if len(specifiedEndCommit) > 0 {
|
|
|
|
endCommitID = specifiedEndCommit
|
|
|
|
} else {
|
|
|
|
endCommitID = headCommitID
|
|
|
|
}
|
|
|
|
if len(specifiedStartCommit) > 0 {
|
|
|
|
startCommitID = specifiedStartCommit
|
|
|
|
} else {
|
|
|
|
startCommitID = prInfo.MergeBase
|
|
|
|
}
|
|
|
|
ctx.Data["IsShowingAllCommits"] = false
|
|
|
|
} else {
|
|
|
|
endCommitID = headCommitID
|
|
|
|
startCommitID = prInfo.MergeBase
|
|
|
|
ctx.Data["IsShowingAllCommits"] = true
|
|
|
|
}
|
2017-06-21 00:25:38 +02:00
|
|
|
|
2019-12-18 19:37:44 +01:00
|
|
|
ctx.Data["Username"] = ctx.Repo.Owner.Name
|
|
|
|
ctx.Data["Reponame"] = ctx.Repo.Repository.Name
|
2019-11-15 03:52:59 +01:00
|
|
|
ctx.Data["AfterCommitID"] = endCommitID
|
2023-07-28 21:18:12 +02:00
|
|
|
ctx.Data["BeforeCommitID"] = startCommitID
|
2015-09-02 10:08:05 +02:00
|
|
|
|
2021-11-21 17:51:08 +01:00
|
|
|
fileOnly := ctx.FormBool("file-only")
|
|
|
|
|
|
|
|
maxLines, maxFiles := setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles
|
|
|
|
files := ctx.FormStrings("files")
|
|
|
|
if fileOnly && (len(files) == 2 || len(files) == 1) {
|
|
|
|
maxLines, maxFiles = -1, -1
|
|
|
|
}
|
2023-07-28 21:18:12 +02:00
|
|
|
|
2022-05-07 20:28:10 +02:00
|
|
|
diffOptions := &gitdiff.DiffOptions{
|
|
|
|
AfterCommitID: endCommitID,
|
|
|
|
SkipTo: ctx.FormString("skip-to"),
|
|
|
|
MaxLines: maxLines,
|
|
|
|
MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
|
|
|
|
MaxFiles: maxFiles,
|
|
|
|
WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)),
|
|
|
|
}
|
|
|
|
|
2023-07-28 21:18:12 +02:00
|
|
|
if !willShowSpecifiedCommit {
|
|
|
|
diffOptions.BeforeCommitID = startCommitID
|
|
|
|
}
|
|
|
|
|
2022-05-07 20:28:10 +02:00
|
|
|
var methodWithError string
|
|
|
|
var diff *gitdiff.Diff
|
2023-07-28 21:18:12 +02:00
|
|
|
|
|
|
|
// if we're not logged in or only a single commit (or commit range) is shown we
|
|
|
|
// have to load only the diff and not get the viewed information
|
|
|
|
// as the viewed information is designed to be loaded only on latest PR
|
|
|
|
// diff and if you're signed in.
|
|
|
|
if !ctx.IsSigned || willShowSpecifiedCommit || willShowSpecifiedCommitRange {
|
2023-10-03 12:30:41 +02:00
|
|
|
diff, err = gitdiff.GetDiff(ctx, gitRepo, diffOptions, files...)
|
2022-05-07 20:28:10 +02:00
|
|
|
methodWithError = "GetDiff"
|
|
|
|
} else {
|
|
|
|
diff, err = gitdiff.SyncAndGetUserSpecificDiff(ctx, ctx.Doer.ID, pull, gitRepo, diffOptions, files...)
|
|
|
|
methodWithError = "SyncAndGetUserSpecificDiff"
|
|
|
|
}
|
2015-09-02 10:08:05 +02:00
|
|
|
if err != nil {
|
2022-05-07 20:28:10 +02:00
|
|
|
ctx.ServerError(methodWithError, err)
|
2015-09-02 10:08:05 +02:00
|
|
|
return
|
|
|
|
}
|
2018-08-06 06:43:22 +02:00
|
|
|
|
2023-07-04 20:36:08 +02:00
|
|
|
ctx.PageData["prReview"] = map[string]any{
|
2022-05-07 20:28:10 +02:00
|
|
|
"numberOfFiles": diff.NumFiles,
|
|
|
|
"numberOfViewedFiles": diff.NumViewedFiles,
|
|
|
|
}
|
|
|
|
|
2023-06-21 18:08:12 +02:00
|
|
|
if err = diff.LoadComments(ctx, issue, ctx.Doer, ctx.Data["ShowOutdatedComments"].(bool)); err != nil {
|
2018-08-06 06:43:22 +02:00
|
|
|
ctx.ServerError("LoadComments", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-02-25 07:00:55 +01:00
|
|
|
for _, file := range diff.Files {
|
|
|
|
for _, section := range file.Sections {
|
|
|
|
for _, line := range section.Lines {
|
2024-02-27 16:05:59 +01:00
|
|
|
for _, comments := range line.Conversations {
|
|
|
|
for _, comment := range comments {
|
|
|
|
if err := comment.LoadAttachments(ctx); err != nil {
|
|
|
|
ctx.ServerError("LoadAttachments", err)
|
|
|
|
return
|
|
|
|
}
|
2024-02-25 07:00:55 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-16 09:00:22 +01:00
|
|
|
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pull.BaseRepoID, pull.BaseBranch)
|
|
|
|
if err != nil {
|
2020-10-13 20:50:57 +02:00
|
|
|
ctx.ServerError("LoadProtectedBranch", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-01-16 09:00:22 +01:00
|
|
|
if pb != nil {
|
|
|
|
glob := pb.GetProtectedFilePatterns()
|
2020-10-13 20:50:57 +02:00
|
|
|
if len(glob) != 0 {
|
|
|
|
for _, file := range diff.Files {
|
2023-01-16 09:00:22 +01:00
|
|
|
file.IsProtected = pb.IsProtectedFile(glob, file.Name)
|
2020-10-13 20:50:57 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-09-02 10:08:05 +02:00
|
|
|
ctx.Data["Diff"] = diff
|
2020-05-26 07:58:07 +02:00
|
|
|
ctx.Data["DiffNotAvailable"] = diff.NumFiles == 0
|
2015-09-02 10:08:05 +02:00
|
|
|
|
2019-09-16 11:03:22 +02:00
|
|
|
baseCommit, err := ctx.Repo.GitRepo.GetCommit(startCommitID)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetCommit", err)
|
|
|
|
return
|
|
|
|
}
|
2015-09-02 15:26:56 +02:00
|
|
|
commit, err := gitRepo.GetCommit(endCommitID)
|
2015-09-02 10:08:05 +02:00
|
|
|
if err != nil {
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.ServerError("GetCommit", err)
|
2015-09-02 10:08:05 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-12-19 22:18:07 +01:00
|
|
|
// determine if the user viewing the pull request can edit the head branch
|
|
|
|
if ctx.Doer != nil && pull.HeadRepo != nil && !pull.HasMerged {
|
|
|
|
headRepoPerm, err := access_model.GetUserRepoPermission(ctx, pull.HeadRepo, ctx.Doer)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetUserRepoPermission", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx.Data["HeadBranchIsEditable"] = pull.HeadRepo.CanEnableEditor() && issues_model.CanMaintainerWriteToBranch(ctx, headRepoPerm, pull.HeadBranch, ctx.Doer)
|
|
|
|
ctx.Data["SourceRepoLink"] = pull.HeadRepo.Link()
|
|
|
|
ctx.Data["HeadBranch"] = pull.HeadBranch
|
|
|
|
}
|
|
|
|
|
2022-03-22 08:03:22 +01:00
|
|
|
if ctx.IsSigned && ctx.Doer != nil {
|
2023-09-29 14:12:54 +02:00
|
|
|
if ctx.Data["CanMarkConversation"], err = issues_model.CanMarkConversation(ctx, issue, ctx.Doer); err != nil {
|
2020-04-18 15:50:25 +02:00
|
|
|
ctx.ServerError("CanMarkConversation", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-16 19:18:25 +01:00
|
|
|
setCompareContext(ctx, baseCommit, commit, ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)
|
2019-09-16 11:03:22 +02:00
|
|
|
|
2023-04-07 02:11:02 +02:00
|
|
|
assigneeUsers, err := repo_model.GetRepoAssignees(ctx, ctx.Repo.Repository)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetRepoAssignees", err)
|
2018-08-06 06:43:22 +02:00
|
|
|
return
|
|
|
|
}
|
2023-08-25 13:07:42 +02:00
|
|
|
ctx.Data["Assignees"] = MakeSelfOnTop(ctx.Doer, assigneeUsers)
|
2023-04-07 02:11:02 +02:00
|
|
|
|
2020-12-21 16:39:28 +01:00
|
|
|
handleTeamMentions(ctx)
|
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
2022-05-07 07:35:12 +02:00
|
|
|
|
2022-06-13 11:37:59 +02:00
|
|
|
currentReview, err := issues_model.GetCurrentReview(ctx, ctx.Doer, issue)
|
|
|
|
if err != nil && !issues_model.IsErrReviewNotExist(err) {
|
2018-08-06 06:43:22 +02:00
|
|
|
ctx.ServerError("GetCurrentReview", err)
|
|
|
|
return
|
|
|
|
}
|
2022-05-07 07:35:12 +02:00
|
|
|
numPendingCodeComments := int64(0)
|
|
|
|
if currentReview != nil {
|
2023-09-29 14:12:54 +02:00
|
|
|
numPendingCodeComments, err = issues_model.CountComments(ctx, &issues_model.FindCommentsOptions{
|
2022-06-13 11:37:59 +02:00
|
|
|
Type: issues_model.CommentTypeCode,
|
2022-05-07 07:35:12 +02:00
|
|
|
ReviewID: currentReview.ID,
|
|
|
|
IssueID: issue.ID,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("CountComments", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ctx.Data["CurrentReview"] = currentReview
|
|
|
|
ctx.Data["PendingCodeCommentNumber"] = numPendingCodeComments
|
|
|
|
|
2019-12-16 07:20:25 +01:00
|
|
|
getBranchData(ctx, issue)
|
2022-03-22 08:03:22 +01:00
|
|
|
ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID)
|
2020-04-04 07:39:48 +02:00
|
|
|
ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
|
2021-06-15 03:12:33 +02:00
|
|
|
|
|
|
|
ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
|
2023-07-21 13:20:04 +02:00
|
|
|
// For files changed page
|
|
|
|
PrepareBranchList(ctx)
|
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
2021-06-15 03:12:33 +02:00
|
|
|
upload.AddUploadContext(ctx, "comment")
|
|
|
|
|
2021-04-05 17:30:52 +02:00
|
|
|
ctx.HTML(http.StatusOK, tplPullFiles)
|
2015-09-02 10:08:05 +02:00
|
|
|
}
|
2015-08-31 09:24:28 +02:00
|
|
|
|
2023-07-28 21:18:12 +02:00
|
|
|
func ViewPullFilesForSingleCommit(ctx *context.Context) {
|
|
|
|
viewPullFiles(ctx, "", ctx.Params("sha"), true, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
func ViewPullFilesForRange(ctx *context.Context) {
|
|
|
|
viewPullFiles(ctx, ctx.Params("shaFrom"), ctx.Params("shaTo"), true, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
func ViewPullFilesStartingFromCommit(ctx *context.Context) {
|
|
|
|
viewPullFiles(ctx, "", ctx.Params("sha"), true, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
func ViewPullFilesForAllCommitsOfPr(ctx *context.Context) {
|
|
|
|
viewPullFiles(ctx, "", "", false, false)
|
|
|
|
}
|
|
|
|
|
2020-08-04 22:55:22 +02:00
|
|
|
// UpdatePullRequest merge PR's baseBranch into headBranch
|
2020-01-17 07:03:40 +01:00
|
|
|
func UpdatePullRequest(ctx *context.Context) {
|
2023-08-07 05:43:18 +02:00
|
|
|
issue, ok := getPullInfo(ctx)
|
|
|
|
if !ok {
|
2020-01-17 07:03:40 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if issue.IsClosed {
|
|
|
|
ctx.NotFound("MergePullRequest", nil)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if issue.PullRequest.HasMerged {
|
|
|
|
ctx.NotFound("MergePullRequest", nil)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-08-31 16:03:45 +02:00
|
|
|
rebase := ctx.FormString("style") == "rebase"
|
|
|
|
|
2022-11-19 09:12:33 +01:00
|
|
|
if err := issue.PullRequest.LoadBaseRepo(ctx); err != nil {
|
2021-01-14 21:27:22 +01:00
|
|
|
ctx.ServerError("LoadBaseRepo", err)
|
2020-01-17 07:03:40 +01:00
|
|
|
return
|
|
|
|
}
|
2022-11-19 09:12:33 +01:00
|
|
|
if err := issue.PullRequest.LoadHeadRepo(ctx); err != nil {
|
2021-01-14 21:27:22 +01:00
|
|
|
ctx.ServerError("LoadHeadRepo", err)
|
2020-01-17 07:03:40 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-04-28 13:48:48 +02:00
|
|
|
allowedUpdateByMerge, allowedUpdateByRebase, err := pull_service.IsUserAllowedToUpdate(ctx, issue.PullRequest, ctx.Doer)
|
2020-01-17 07:03:40 +01:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("IsUserAllowedToMerge", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// ToDo: add check if maintainers are allowed to change branch ... (need migration & co)
|
2021-08-31 16:03:45 +02:00
|
|
|
if (!allowedUpdateByMerge && !rebase) || (rebase && !allowedUpdateByRebase) {
|
2020-01-17 07:03:40 +01:00
|
|
|
ctx.Flash.Error(ctx.Tr("repo.pulls.update_not_allowed"))
|
2021-11-16 19:18:25 +01:00
|
|
|
ctx.Redirect(issue.Link())
|
2020-01-17 07:03:40 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// default merge commit message
|
|
|
|
message := fmt.Sprintf("Merge branch '%s' into %s", issue.PullRequest.BaseBranch, issue.PullRequest.HeadBranch)
|
|
|
|
|
2022-03-22 08:03:22 +01:00
|
|
|
if err = pull_service.Update(ctx, issue.PullRequest, ctx.Doer, message, rebase); err != nil {
|
2020-01-17 07:03:40 +01:00
|
|
|
if models.IsErrMergeConflicts(err) {
|
|
|
|
conflictError := err.(models.ErrMergeConflicts)
|
2024-03-02 16:05:07 +01:00
|
|
|
flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
|
2020-10-21 01:50:10 +02:00
|
|
|
"Message": ctx.Tr("repo.pulls.merge_conflict"),
|
|
|
|
"Summary": ctx.Tr("repo.pulls.merge_conflict_summary"),
|
|
|
|
"Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "<br>" + utils.SanitizeFlashErrorString(conflictError.StdOut),
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("UpdatePullRequest.HTMLString", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx.Flash.Error(flashError)
|
2021-11-16 19:18:25 +01:00
|
|
|
ctx.Redirect(issue.Link())
|
2020-01-17 07:03:40 +01:00
|
|
|
return
|
2021-09-05 11:30:40 +02:00
|
|
|
} else if models.IsErrRebaseConflicts(err) {
|
|
|
|
conflictError := err.(models.ErrRebaseConflicts)
|
2024-03-02 16:05:07 +01:00
|
|
|
flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
|
2021-09-05 11:30:40 +02:00
|
|
|
"Message": ctx.Tr("repo.pulls.rebase_conflict", utils.SanitizeFlashErrorString(conflictError.CommitSHA)),
|
|
|
|
"Summary": ctx.Tr("repo.pulls.rebase_conflict_summary"),
|
|
|
|
"Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "<br>" + utils.SanitizeFlashErrorString(conflictError.StdOut),
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("UpdatePullRequest.HTMLString", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx.Flash.Error(flashError)
|
2021-11-16 19:18:25 +01:00
|
|
|
ctx.Redirect(issue.Link())
|
2021-09-05 11:30:40 +02:00
|
|
|
return
|
2020-01-17 07:03:40 +01:00
|
|
|
}
|
|
|
|
ctx.Flash.Error(err.Error())
|
2021-11-16 19:18:25 +01:00
|
|
|
ctx.Redirect(issue.Link())
|
2020-08-03 22:50:29 +02:00
|
|
|
return
|
2020-01-17 07:03:40 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
time.Sleep(1 * time.Second)
|
|
|
|
|
|
|
|
ctx.Flash.Success(ctx.Tr("repo.pulls.update_branch_success"))
|
2021-11-16 19:18:25 +01:00
|
|
|
ctx.Redirect(issue.Link())
|
2020-01-17 07:03:40 +01:00
|
|
|
}
|
|
|
|
|
2016-11-24 08:04:31 +01:00
|
|
|
// MergePullRequest response for merging pull request
|
2021-01-26 16:36:53 +01:00
|
|
|
func MergePullRequest(ctx *context.Context) {
|
2021-04-06 21:44:05 +02:00
|
|
|
form := web.GetForm(ctx).(*forms.MergePullRequestForm)
|
2023-08-07 05:43:18 +02:00
|
|
|
issue, ok := getPullInfo(ctx)
|
|
|
|
if !ok {
|
2015-09-02 15:26:56 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-12-13 16:55:43 +01:00
|
|
|
pr := issue.PullRequest
|
2022-03-31 16:53:08 +02:00
|
|
|
pr.Issue = issue
|
|
|
|
pr.Issue.Repo = ctx.Repo.Repository
|
2023-02-21 15:42:07 +01:00
|
|
|
|
|
|
|
manuallyMerged := repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged
|
|
|
|
|
|
|
|
mergeCheckType := pull_service.MergeCheckTypeGeneral
|
|
|
|
if form.MergeWhenChecksSucceed {
|
|
|
|
mergeCheckType = pull_service.MergeCheckTypeAuto
|
|
|
|
}
|
|
|
|
if manuallyMerged {
|
|
|
|
mergeCheckType = pull_service.MergeCheckTypeManually
|
|
|
|
}
|
2015-09-02 15:26:56 +02:00
|
|
|
|
2022-05-03 21:46:28 +02:00
|
|
|
// start with merging by checking
|
2024-05-08 18:11:43 +02:00
|
|
|
if err := pull_service.CheckPullMergeable(ctx, ctx.Doer, &ctx.Repo.Permission, pr, mergeCheckType, form.ForceMerge); err != nil {
|
2023-02-04 00:11:48 +01:00
|
|
|
switch {
|
|
|
|
case errors.Is(err, pull_service.ErrIsClosed):
|
2022-03-31 16:53:08 +02:00
|
|
|
if issue.IsPull {
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONError(ctx.Tr("repo.pulls.is_closed"))
|
2022-03-31 16:53:08 +02:00
|
|
|
} else {
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONError(ctx.Tr("repo.issues.closed_title"))
|
2022-03-31 16:53:08 +02:00
|
|
|
}
|
2023-02-04 00:11:48 +01:00
|
|
|
case errors.Is(err, pull_service.ErrUserNotAllowedToMerge):
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONError(ctx.Tr("repo.pulls.update_not_allowed"))
|
2023-02-04 00:11:48 +01:00
|
|
|
case errors.Is(err, pull_service.ErrHasMerged):
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONError(ctx.Tr("repo.pulls.has_merged"))
|
2023-02-04 00:11:48 +01:00
|
|
|
case errors.Is(err, pull_service.ErrIsWorkInProgress):
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONError(ctx.Tr("repo.pulls.no_merge_wip"))
|
2024-05-08 18:11:43 +02:00
|
|
|
case errors.Is(err, pull_service.ErrNotMergeableState):
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONError(ctx.Tr("repo.pulls.no_merge_not_ready"))
|
2023-02-04 00:11:48 +01:00
|
|
|
case models.IsErrDisallowedToMerge(err):
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONError(ctx.Tr("repo.pulls.no_merge_not_ready"))
|
2023-02-04 00:11:48 +01:00
|
|
|
case asymkey_service.IsErrWontSign(err):
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONError(err.Error()) // has no translation ...
|
2023-02-04 00:11:48 +01:00
|
|
|
case errors.Is(err, pull_service.ErrDependenciesLeft):
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONError(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
|
2023-02-04 00:11:48 +01:00
|
|
|
default:
|
2022-03-31 16:53:08 +02:00
|
|
|
ctx.ServerError("WebCheck", err)
|
|
|
|
}
|
2023-02-04 00:11:48 +01:00
|
|
|
|
2020-01-27 11:26:53 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-03-04 04:41:23 +01:00
|
|
|
// handle manually-merged mark
|
2023-02-21 15:42:07 +01:00
|
|
|
if manuallyMerged {
|
2023-10-14 10:37:24 +02:00
|
|
|
if err := pull_service.MergedManually(ctx, pr, ctx.Doer, ctx.Repo.GitRepo, form.MergeCommitID); err != nil {
|
2023-02-04 00:11:48 +01:00
|
|
|
switch {
|
|
|
|
case models.IsErrInvalidMergeStyle(err):
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONError(ctx.Tr("repo.pulls.invalid_merge_option"))
|
2023-02-04 00:11:48 +01:00
|
|
|
case strings.Contains(err.Error(), "Wrong commit ID"):
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONError(ctx.Tr("repo.pulls.wrong_commit_id"))
|
2023-02-04 00:11:48 +01:00
|
|
|
default:
|
2022-05-03 21:46:28 +02:00
|
|
|
ctx.ServerError("MergedManually", err)
|
2021-03-04 04:41:23 +01:00
|
|
|
}
|
2024-01-14 23:00:47 +01:00
|
|
|
|
|
|
|
return
|
2021-03-04 04:41:23 +01:00
|
|
|
}
|
|
|
|
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONRedirect(issue.Link())
|
2021-03-04 04:41:23 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-05-08 14:32:45 +02:00
|
|
|
message := strings.TrimSpace(form.MergeTitleField)
|
|
|
|
if len(message) == 0 {
|
|
|
|
var err error
|
2022-12-29 13:40:20 +01:00
|
|
|
message, _, err = pull_service.GetDefaultMergeMessage(ctx, ctx.Repo.GitRepo, pr, repo_model.MergeStyle(form.Do))
|
2022-05-08 14:32:45 +02:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetDefaultMergeMessage", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
form.MergeMessageField = strings.TrimSpace(form.MergeMessageField)
|
|
|
|
if len(form.MergeMessageField) > 0 {
|
|
|
|
message += "\n\n" + form.MergeMessageField
|
2018-07-17 23:23:58 +02:00
|
|
|
}
|
|
|
|
|
2022-06-11 16:44:20 +02:00
|
|
|
if form.MergeWhenChecksSucceed {
|
|
|
|
// delete all scheduled auto merges
|
|
|
|
_ = pull_model.DeleteScheduledAutoMerge(ctx, pr.ID)
|
|
|
|
// schedule auto merge
|
2024-10-21 21:21:50 +02:00
|
|
|
scheduled, err := automerge.ScheduleAutoMerge(ctx, ctx.Doer, pr, repo_model.MergeStyle(form.Do), message, form.DeleteBranchAfterMerge)
|
2022-06-11 16:44:20 +02:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("ScheduleAutoMerge", err)
|
|
|
|
return
|
|
|
|
} else if scheduled {
|
|
|
|
// nothing more to do ...
|
|
|
|
ctx.Flash.Success(ctx.Tr("repo.pulls.auto_merge_newly_scheduled"))
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONRedirect(fmt.Sprintf("%s/pulls/%d", ctx.Repo.RepoLink, pr.Index))
|
2022-06-11 16:44:20 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-03 16:49:00 +01:00
|
|
|
if err := pull_service.Merge(ctx, pr, ctx.Doer, ctx.Repo.GitRepo, repo_model.MergeStyle(form.Do), form.HeadCommitID, message, false); err != nil {
|
2018-01-05 19:56:50 +01:00
|
|
|
if models.IsErrInvalidMergeStyle(err) {
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONError(ctx.Tr("repo.pulls.invalid_merge_option"))
|
2019-11-10 09:42:51 +01:00
|
|
|
} else if models.IsErrMergeConflicts(err) {
|
|
|
|
conflictError := err.(models.ErrMergeConflicts)
|
2024-03-02 16:05:07 +01:00
|
|
|
flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
|
2020-10-21 01:50:10 +02:00
|
|
|
"Message": ctx.Tr("repo.editor.merge_conflict"),
|
|
|
|
"Summary": ctx.Tr("repo.editor.merge_conflict_summary"),
|
|
|
|
"Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "<br>" + utils.SanitizeFlashErrorString(conflictError.StdOut),
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("MergePullRequest.HTMLString", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx.Flash.Error(flashError)
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONRedirect(issue.Link())
|
2019-11-10 09:42:51 +01:00
|
|
|
} else if models.IsErrRebaseConflicts(err) {
|
|
|
|
conflictError := err.(models.ErrRebaseConflicts)
|
2024-03-02 16:05:07 +01:00
|
|
|
flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
|
2020-10-21 21:59:12 +02:00
|
|
|
"Message": ctx.Tr("repo.pulls.rebase_conflict", utils.SanitizeFlashErrorString(conflictError.CommitSHA)),
|
|
|
|
"Summary": ctx.Tr("repo.pulls.rebase_conflict_summary"),
|
2020-10-21 01:50:10 +02:00
|
|
|
"Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "<br>" + utils.SanitizeFlashErrorString(conflictError.StdOut),
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("MergePullRequest.HTMLString", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx.Flash.Error(flashError)
|
2024-02-21 12:40:46 +01:00
|
|
|
ctx.JSONRedirect(issue.Link())
|
2019-11-10 09:42:51 +01:00
|
|
|
} else if models.IsErrMergeUnrelatedHistories(err) {
|
|
|
|
log.Debug("MergeUnrelatedHistories error: %v", err)
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.pulls.unrelated_histories"))
|
2024-02-21 12:40:46 +01:00
|
|
|
ctx.JSONRedirect(issue.Link())
|
2020-03-28 05:13:18 +01:00
|
|
|
} else if git.IsErrPushOutOfDate(err) {
|
2019-11-10 09:42:51 +01:00
|
|
|
log.Debug("MergePushOutOfDate error: %v", err)
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.pulls.merge_out_of_date"))
|
2024-02-21 12:40:46 +01:00
|
|
|
ctx.JSONRedirect(issue.Link())
|
2021-12-20 01:32:54 +01:00
|
|
|
} else if models.IsErrSHADoesNotMatch(err) {
|
|
|
|
log.Debug("MergeHeadOutOfDate error: %v", err)
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.pulls.head_out_of_date"))
|
2024-02-21 12:40:46 +01:00
|
|
|
ctx.JSONRedirect(issue.Link())
|
2020-03-28 05:13:18 +01:00
|
|
|
} else if git.IsErrPushRejected(err) {
|
2020-02-22 14:08:48 +01:00
|
|
|
log.Debug("MergePushRejected error: %v", err)
|
2020-03-28 05:13:18 +01:00
|
|
|
pushrejErr := err.(*git.ErrPushRejected)
|
2020-02-22 14:08:48 +01:00
|
|
|
message := pushrejErr.Message
|
|
|
|
if len(message) == 0 {
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.pulls.push_rejected_no_message"))
|
|
|
|
} else {
|
2024-03-02 16:05:07 +01:00
|
|
|
flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
|
2020-10-21 01:50:10 +02:00
|
|
|
"Message": ctx.Tr("repo.pulls.push_rejected"),
|
|
|
|
"Summary": ctx.Tr("repo.pulls.push_rejected_summary"),
|
|
|
|
"Details": utils.SanitizeFlashErrorString(pushrejErr.Message),
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("MergePullRequest.HTMLString", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx.Flash.Error(flashError)
|
2020-02-22 14:08:48 +01:00
|
|
|
}
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONRedirect(issue.Link())
|
2022-05-03 21:46:28 +02:00
|
|
|
} else {
|
|
|
|
ctx.ServerError("Merge", err)
|
2018-01-05 19:56:50 +01:00
|
|
|
}
|
2015-09-02 15:26:56 +02:00
|
|
|
return
|
|
|
|
}
|
2022-05-03 21:46:28 +02:00
|
|
|
log.Trace("Pull request merged: %d", pr.ID)
|
2015-09-02 15:26:56 +02:00
|
|
|
|
2023-09-16 16:39:12 +02:00
|
|
|
if err := stopTimerIfAvailable(ctx, ctx.Doer, issue); err != nil {
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.ServerError("stopTimerIfAvailable", err)
|
2019-02-05 12:38:11 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2015-09-02 15:26:56 +02:00
|
|
|
log.Trace("Pull request merged: %d", pr.ID)
|
2021-07-13 01:26:25 +02:00
|
|
|
|
|
|
|
if form.DeleteBranchAfterMerge {
|
|
|
|
var headRepo *git.Repository
|
|
|
|
if ctx.Repo != nil && ctx.Repo.Repository != nil && pr.HeadRepoID == ctx.Repo.Repository.ID && ctx.Repo.GitRepo != nil {
|
|
|
|
headRepo = ctx.Repo.GitRepo
|
|
|
|
} else {
|
2024-10-23 00:48:46 +02:00
|
|
|
var err error
|
Simplify how git repositories are opened (#28937)
## Purpose
This is a refactor toward building an abstraction over managing git
repositories.
Afterwards, it does not matter anymore if they are stored on the local
disk or somewhere remote.
## What this PR changes
We used `git.OpenRepository` everywhere previously.
Now, we should split them into two distinct functions:
Firstly, there are temporary repositories which do not change:
```go
git.OpenRepository(ctx, diskPath)
```
Gitea managed repositories having a record in the database in the
`repository` table are moved into the new package `gitrepo`:
```go
gitrepo.OpenRepository(ctx, repo_model.Repo)
```
Why is `repo_model.Repository` the second parameter instead of file
path?
Because then we can easily adapt our repository storage strategy.
The repositories can be stored locally, however, they could just as well
be stored on a remote server.
## Further changes in other PRs
- A Git Command wrapper on package `gitrepo` could be created. i.e.
`NewCommand(ctx, repo_model.Repository, commands...)`. `git.RunOpts{Dir:
repo.RepoPath()}`, the directory should be empty before invoking this
method and it can be filled in the function only. #28940
- Remove the `RepoPath()`/`WikiPath()` functions to reduce the
possibility of mistakes.
---------
Co-authored-by: delvh <dev.lh@web.de>
2024-01-27 21:09:51 +01:00
|
|
|
headRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
|
2021-07-13 01:26:25 +02:00
|
|
|
if err != nil {
|
Simplify how git repositories are opened (#28937)
## Purpose
This is a refactor toward building an abstraction over managing git
repositories.
Afterwards, it does not matter anymore if they are stored on the local
disk or somewhere remote.
## What this PR changes
We used `git.OpenRepository` everywhere previously.
Now, we should split them into two distinct functions:
Firstly, there are temporary repositories which do not change:
```go
git.OpenRepository(ctx, diskPath)
```
Gitea managed repositories having a record in the database in the
`repository` table are moved into the new package `gitrepo`:
```go
gitrepo.OpenRepository(ctx, repo_model.Repo)
```
Why is `repo_model.Repository` the second parameter instead of file
path?
Because then we can easily adapt our repository storage strategy.
The repositories can be stored locally, however, they could just as well
be stored on a remote server.
## Further changes in other PRs
- A Git Command wrapper on package `gitrepo` could be created. i.e.
`NewCommand(ctx, repo_model.Repository, commands...)`. `git.RunOpts{Dir:
repo.RepoPath()}`, the directory should be empty before invoking this
method and it can be filled in the function only. #28940
- Remove the `RepoPath()`/`WikiPath()` functions to reduce the
possibility of mistakes.
---------
Co-authored-by: delvh <dev.lh@web.de>
2024-01-27 21:09:51 +01:00
|
|
|
ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.FullName()), err)
|
2021-07-13 01:26:25 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
defer headRepo.Close()
|
|
|
|
}
|
2024-10-23 00:48:46 +02:00
|
|
|
|
|
|
|
if err := repo_service.DeleteBranchAfterMerge(ctx, ctx.Doer, pr, headRepo); err != nil {
|
|
|
|
switch {
|
|
|
|
case errors.Is(err, repo_service.ErrBranchIsDefault):
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.pulls.delete_after_merge.head_branch.is_default"))
|
|
|
|
case errors.Is(err, git_model.ErrBranchIsProtected):
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.pulls.delete_after_merge.head_branch.is_protected"))
|
|
|
|
case errors.Is(err, util.ErrPermissionDenied):
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.pulls.delete_after_merge.head_branch.insufficient_branch"))
|
|
|
|
default:
|
|
|
|
ctx.ServerError("DeleteBranchAfterMerge", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx.JSONRedirect(issue.Link())
|
|
|
|
return
|
|
|
|
}
|
2021-07-13 01:26:25 +02:00
|
|
|
}
|
|
|
|
|
2024-01-14 23:00:47 +01:00
|
|
|
ctx.JSONRedirect(issue.Link())
|
2022-06-11 16:44:20 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// CancelAutoMergePullRequest cancels a scheduled pr
|
|
|
|
func CancelAutoMergePullRequest(ctx *context.Context) {
|
2023-08-07 05:43:18 +02:00
|
|
|
issue, ok := getPullInfo(ctx)
|
|
|
|
if !ok {
|
2022-06-11 16:44:20 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := automerge.RemoveScheduledAutoMerge(ctx, ctx.Doer, issue.PullRequest); err != nil {
|
|
|
|
if db.IsErrNotExist(err) {
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.pulls.auto_merge_not_scheduled"))
|
|
|
|
ctx.Redirect(fmt.Sprintf("%s/pulls/%d", ctx.Repo.RepoLink, issue.Index))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx.ServerError("RemoveScheduledAutoMerge", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx.Flash.Success(ctx.Tr("repo.pulls.auto_merge_canceled_schedule"))
|
|
|
|
ctx.Redirect(fmt.Sprintf("%s/pulls/%d", ctx.Repo.RepoLink, issue.Index))
|
2015-09-02 15:26:56 +02:00
|
|
|
}
|
|
|
|
|
2023-09-16 16:39:12 +02:00
|
|
|
func stopTimerIfAvailable(ctx *context.Context, user *user_model.User, issue *issues_model.Issue) error {
|
|
|
|
if issues_model.StopwatchExists(ctx, user.ID, issue.ID) {
|
|
|
|
if err := issues_model.CreateOrStopIssueStopwatch(ctx, user, issue); err != nil {
|
2019-02-05 12:38:11 +01:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-11-24 08:04:31 +01:00
|
|
|
// CompareAndPullRequestPost response for creating pull request
|
2021-01-26 16:36:53 +01:00
|
|
|
func CompareAndPullRequestPost(ctx *context.Context) {
|
2021-04-06 21:44:05 +02:00
|
|
|
form := web.GetForm(ctx).(*forms.CreateIssueForm)
|
2015-09-02 01:07:02 +02:00
|
|
|
ctx.Data["Title"] = ctx.Tr("repo.pulls.compare_changes")
|
|
|
|
ctx.Data["PageIsComparePull"] = true
|
|
|
|
ctx.Data["IsDiffCompare"] = true
|
2018-08-13 21:04:39 +02:00
|
|
|
ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
|
2020-10-05 07:49:33 +02:00
|
|
|
ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
|
|
|
|
upload.AddUploadContext(ctx, "comment")
|
2021-11-09 20:57:58 +01:00
|
|
|
ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWrite(unit.TypePullRequests)
|
2015-09-02 01:07:02 +02:00
|
|
|
|
|
|
|
var (
|
|
|
|
repo = ctx.Repo.Repository
|
|
|
|
attachments []string
|
|
|
|
)
|
|
|
|
|
2021-09-27 14:19:34 +02:00
|
|
|
ci := ParseCompareInfo(ctx)
|
2021-08-31 09:43:31 +02:00
|
|
|
defer func() {
|
2021-09-30 21:31:02 +02:00
|
|
|
if ci != nil && ci.HeadGitRepo != nil {
|
2021-09-27 14:19:34 +02:00
|
|
|
ci.HeadGitRepo.Close()
|
2021-08-31 09:43:31 +02:00
|
|
|
}
|
|
|
|
}()
|
2015-09-02 01:07:02 +02:00
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-01-12 16:25:15 +01:00
|
|
|
labelIDs, assigneeIDs, milestoneID, projectID := ValidateRepoMetas(ctx, *form, true)
|
2015-09-02 01:07:02 +02:00
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-08-18 06:23:45 +02:00
|
|
|
if setting.Attachment.Enabled {
|
2016-08-11 14:48:08 +02:00
|
|
|
attachments = form.Files
|
2015-09-02 01:07:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if ctx.HasError() {
|
2023-06-19 10:25:36 +02:00
|
|
|
ctx.JSONError(ctx.GetErrMsg())
|
2015-09-02 01:07:02 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-01-21 12:45:32 +01:00
|
|
|
if util.IsEmptyString(form.Title) {
|
2023-06-19 10:25:36 +02:00
|
|
|
ctx.JSONError(ctx.Tr("repo.issues.new.title_empty"))
|
2019-01-21 12:45:32 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-02 09:58:49 +02:00
|
|
|
content := form.Content
|
|
|
|
if filename := ctx.Req.Form.Get("template-file"); filename != "" {
|
|
|
|
if template, err := issue_template.UnmarshalFromRepo(ctx.Repo.GitRepo, ctx.Repo.Repository.DefaultBranch, filename); err == nil {
|
|
|
|
content = issue_template.RenderToMarkdown(template, ctx.Req.Form)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-06-13 11:37:59 +02:00
|
|
|
pullIssue := &issues_model.Issue{
|
2015-09-02 01:07:02 +02:00
|
|
|
RepoID: repo.ID,
|
2021-11-16 19:18:25 +01:00
|
|
|
Repo: repo,
|
2016-08-14 12:32:24 +02:00
|
|
|
Title: form.Title,
|
2022-03-22 08:03:22 +01:00
|
|
|
PosterID: ctx.Doer.ID,
|
|
|
|
Poster: ctx.Doer,
|
2015-09-02 01:07:02 +02:00
|
|
|
MilestoneID: milestoneID,
|
|
|
|
IsPull: true,
|
2022-09-02 09:58:49 +02:00
|
|
|
Content: content,
|
2015-09-02 01:07:02 +02:00
|
|
|
}
|
2022-06-13 11:37:59 +02:00
|
|
|
pullRequest := &issues_model.PullRequest{
|
2022-04-28 17:45:33 +02:00
|
|
|
HeadRepoID: ci.HeadRepo.ID,
|
|
|
|
BaseRepoID: repo.ID,
|
|
|
|
HeadBranch: ci.HeadBranch,
|
|
|
|
BaseBranch: ci.BaseBranch,
|
|
|
|
HeadRepo: ci.HeadRepo,
|
|
|
|
BaseRepo: repo,
|
|
|
|
MergeBase: ci.CompareInfo.MergeBase,
|
2022-06-13 11:37:59 +02:00
|
|
|
Type: issues_model.PullRequestGitea,
|
2022-04-28 17:45:33 +02:00
|
|
|
AllowMaintainerEdit: form.AllowMaintainerEdit,
|
2016-02-24 13:56:54 +01:00
|
|
|
}
|
2016-08-15 23:04:44 +02:00
|
|
|
// FIXME: check error in the case two people send pull request at almost same time, give nice error prompt
|
|
|
|
// instead of 500.
|
2018-05-09 18:29:04 +02:00
|
|
|
|
2022-01-20 00:26:57 +01:00
|
|
|
if err := pull_service.NewPullRequest(ctx, repo, pullIssue, labelIDs, attachments, pullRequest, assigneeIDs); err != nil {
|
Avoid returning without written ctx when posting PR (#31843)
Fix #31625.
If `pull_service.NewPullRequest` return an error which misses each `if`
check, `CompareAndPullRequestPost` will return immediately, since it
doesn't write the HTTP response, a 200 response with empty body will be
sent to clients.
```go
if err := pull_service.NewPullRequest(ctx, repo, pullIssue, labelIDs, attachments, pullRequest, assigneeIDs); err != nil {
if repo_model.IsErrUserDoesNotHaveAccessToRepo(err) {
ctx.Error(http.StatusBadRequest, "UserDoesNotHaveAccessToRepo", err.Error())
} else if git.IsErrPushRejected(err) {
// ...
ctx.JSONError(flashError)
} else if errors.Is(err, user_model.ErrBlockedUser) {
// ...
ctx.JSONError(flashError)
} else if errors.Is(err, issues_model.ErrMustCollaborator) {
// ...
ctx.JSONError(flashError)
}
return
}
```
Not sure what kind of error can cause it to happen, so this PR just
expose it. And we can fix it when users report that creating PRs failed
with error responses.
It's all my guess since I cannot reproduce the problem, but even if it's
not related, the code here needs to be improved.
(cherry picked from commit acd7053e9d4968e8b9812ab379be9027ac8e7771)
Conflicts:
routers/web/repo/pull.go
trivial context conflict
2024-08-16 19:04:54 +02:00
|
|
|
switch {
|
|
|
|
case errors.Is(err, user_model.ErrBlockedByUser):
|
2024-08-09 20:33:23 +02:00
|
|
|
ctx.JSONError(ctx.Tr("repo.pulls.blocked_by_user"))
|
Avoid returning without written ctx when posting PR (#31843)
Fix #31625.
If `pull_service.NewPullRequest` return an error which misses each `if`
check, `CompareAndPullRequestPost` will return immediately, since it
doesn't write the HTTP response, a 200 response with empty body will be
sent to clients.
```go
if err := pull_service.NewPullRequest(ctx, repo, pullIssue, labelIDs, attachments, pullRequest, assigneeIDs); err != nil {
if repo_model.IsErrUserDoesNotHaveAccessToRepo(err) {
ctx.Error(http.StatusBadRequest, "UserDoesNotHaveAccessToRepo", err.Error())
} else if git.IsErrPushRejected(err) {
// ...
ctx.JSONError(flashError)
} else if errors.Is(err, user_model.ErrBlockedUser) {
// ...
ctx.JSONError(flashError)
} else if errors.Is(err, issues_model.ErrMustCollaborator) {
// ...
ctx.JSONError(flashError)
}
return
}
```
Not sure what kind of error can cause it to happen, so this PR just
expose it. And we can fix it when users report that creating PRs failed
with error responses.
It's all my guess since I cannot reproduce the problem, but even if it's
not related, the code here needs to be improved.
(cherry picked from commit acd7053e9d4968e8b9812ab379be9027ac8e7771)
Conflicts:
routers/web/repo/pull.go
trivial context conflict
2024-08-16 19:04:54 +02:00
|
|
|
case repo_model.IsErrUserDoesNotHaveAccessToRepo(err):
|
2021-04-05 17:30:52 +02:00
|
|
|
ctx.Error(http.StatusBadRequest, "UserDoesNotHaveAccessToRepo", err.Error())
|
Avoid returning without written ctx when posting PR (#31843)
Fix #31625.
If `pull_service.NewPullRequest` return an error which misses each `if`
check, `CompareAndPullRequestPost` will return immediately, since it
doesn't write the HTTP response, a 200 response with empty body will be
sent to clients.
```go
if err := pull_service.NewPullRequest(ctx, repo, pullIssue, labelIDs, attachments, pullRequest, assigneeIDs); err != nil {
if repo_model.IsErrUserDoesNotHaveAccessToRepo(err) {
ctx.Error(http.StatusBadRequest, "UserDoesNotHaveAccessToRepo", err.Error())
} else if git.IsErrPushRejected(err) {
// ...
ctx.JSONError(flashError)
} else if errors.Is(err, user_model.ErrBlockedUser) {
// ...
ctx.JSONError(flashError)
} else if errors.Is(err, issues_model.ErrMustCollaborator) {
// ...
ctx.JSONError(flashError)
}
return
}
```
Not sure what kind of error can cause it to happen, so this PR just
expose it. And we can fix it when users report that creating PRs failed
with error responses.
It's all my guess since I cannot reproduce the problem, but even if it's
not related, the code here needs to be improved.
(cherry picked from commit acd7053e9d4968e8b9812ab379be9027ac8e7771)
Conflicts:
routers/web/repo/pull.go
trivial context conflict
2024-08-16 19:04:54 +02:00
|
|
|
case git.IsErrPushRejected(err):
|
2020-06-08 20:07:41 +02:00
|
|
|
pushrejErr := err.(*git.ErrPushRejected)
|
|
|
|
message := pushrejErr.Message
|
|
|
|
if len(message) == 0 {
|
2023-06-19 10:25:36 +02:00
|
|
|
ctx.JSONError(ctx.Tr("repo.pulls.push_rejected_no_message"))
|
|
|
|
return
|
2020-06-08 20:07:41 +02:00
|
|
|
}
|
2024-03-02 16:05:07 +01:00
|
|
|
flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
|
2023-06-19 10:25:36 +02:00
|
|
|
"Message": ctx.Tr("repo.pulls.push_rejected"),
|
|
|
|
"Summary": ctx.Tr("repo.pulls.push_rejected_summary"),
|
|
|
|
"Details": utils.SanitizeFlashErrorString(pushrejErr.Message),
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("CompareAndPullRequest.HTMLString", err)
|
|
|
|
return
|
|
|
|
}
|
2024-03-02 16:05:07 +01:00
|
|
|
ctx.JSONError(flashError)
|
Avoid returning without written ctx when posting PR (#31843)
Fix #31625.
If `pull_service.NewPullRequest` return an error which misses each `if`
check, `CompareAndPullRequestPost` will return immediately, since it
doesn't write the HTTP response, a 200 response with empty body will be
sent to clients.
```go
if err := pull_service.NewPullRequest(ctx, repo, pullIssue, labelIDs, attachments, pullRequest, assigneeIDs); err != nil {
if repo_model.IsErrUserDoesNotHaveAccessToRepo(err) {
ctx.Error(http.StatusBadRequest, "UserDoesNotHaveAccessToRepo", err.Error())
} else if git.IsErrPushRejected(err) {
// ...
ctx.JSONError(flashError)
} else if errors.Is(err, user_model.ErrBlockedUser) {
// ...
ctx.JSONError(flashError)
} else if errors.Is(err, issues_model.ErrMustCollaborator) {
// ...
ctx.JSONError(flashError)
}
return
}
```
Not sure what kind of error can cause it to happen, so this PR just
expose it. And we can fix it when users report that creating PRs failed
with error responses.
It's all my guess since I cannot reproduce the problem, but even if it's
not related, the code here needs to be improved.
(cherry picked from commit acd7053e9d4968e8b9812ab379be9027ac8e7771)
Conflicts:
routers/web/repo/pull.go
trivial context conflict
2024-08-16 19:04:54 +02:00
|
|
|
default:
|
|
|
|
// It's an unexpected error.
|
|
|
|
// If it happens, we should add another case to handle it.
|
|
|
|
log.Error("Unexpected error of NewPullRequest: %T %s", err, err)
|
|
|
|
ctx.ServerError("CompareAndPullRequest", err)
|
2018-05-09 18:29:04 +02:00
|
|
|
}
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.ServerError("NewPullRequest", err)
|
2015-09-02 01:07:02 +02:00
|
|
|
return
|
2015-12-10 17:18:56 +01:00
|
|
|
}
|
|
|
|
|
2024-05-08 15:44:57 +02:00
|
|
|
if projectID > 0 && ctx.Repo.CanWrite(unit.TypeProjects) {
|
|
|
|
if err := issues_model.IssueAssignOrRemoveProject(ctx, pullIssue, ctx.Doer, projectID, 0); err != nil {
|
|
|
|
if !errors.Is(err, util.ErrPermissionDenied) {
|
|
|
|
ctx.ServerError("IssueAssignOrRemoveProject", err)
|
|
|
|
return
|
|
|
|
}
|
2024-01-12 16:25:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-02-24 13:56:54 +01:00
|
|
|
log.Trace("Pull request created: %d/%d", repo.ID, pullIssue.ID)
|
2023-06-19 10:25:36 +02:00
|
|
|
ctx.JSONRedirect(pullIssue.Link())
|
2014-03-24 11:25:15 +01:00
|
|
|
}
|
2015-10-24 09:36:47 +02:00
|
|
|
|
2017-06-21 03:00:03 +02:00
|
|
|
// CleanUpPullRequest responses for delete merged branch when PR has been merged
|
|
|
|
func CleanUpPullRequest(ctx *context.Context) {
|
2023-08-07 05:43:18 +02:00
|
|
|
issue, ok := getPullInfo(ctx)
|
|
|
|
if !ok {
|
2017-06-21 03:00:03 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-12-13 16:55:43 +01:00
|
|
|
pr := issue.PullRequest
|
2017-06-21 03:00:03 +02:00
|
|
|
|
2019-04-20 22:50:34 +02:00
|
|
|
// Don't cleanup unmerged and unclosed PRs
|
|
|
|
if !pr.HasMerged && !issue.IsClosed {
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.NotFound("CleanUpPullRequest", nil)
|
2017-06-21 03:00:03 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-01-03 20:45:58 +01:00
|
|
|
// Don't cleanup when there are other PR's that use this branch as head branch.
|
2022-06-13 11:37:59 +02:00
|
|
|
exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(ctx, pr.HeadRepoID, pr.HeadBranch)
|
2022-01-03 20:45:58 +01:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if exist {
|
|
|
|
ctx.NotFound("CleanUpPullRequest", nil)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-11-19 09:12:33 +01:00
|
|
|
if err := pr.LoadHeadRepo(ctx); err != nil {
|
2020-03-02 23:31:55 +01:00
|
|
|
ctx.ServerError("LoadHeadRepo", err)
|
2017-06-21 03:00:03 +02:00
|
|
|
return
|
|
|
|
} else if pr.HeadRepo == nil {
|
|
|
|
// Forked repository has already been deleted
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.NotFound("CleanUpPullRequest", nil)
|
2017-06-21 03:00:03 +02:00
|
|
|
return
|
2022-11-19 09:12:33 +01:00
|
|
|
} else if err = pr.LoadBaseRepo(ctx); err != nil {
|
2020-03-02 23:31:55 +01:00
|
|
|
ctx.ServerError("LoadBaseRepo", err)
|
2017-06-21 03:00:03 +02:00
|
|
|
return
|
2023-02-18 13:11:03 +01:00
|
|
|
} else if err = pr.HeadRepo.LoadOwner(ctx); err != nil {
|
|
|
|
ctx.ServerError("HeadRepo.LoadOwner", err)
|
2017-06-21 03:00:03 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-05-11 12:09:36 +02:00
|
|
|
perm, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, ctx.Doer)
|
2018-11-28 12:26:14 +01:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetUserRepoPermission", err)
|
|
|
|
return
|
|
|
|
}
|
2021-11-09 20:57:58 +01:00
|
|
|
if !perm.CanWrite(unit.TypeCode) {
|
2018-01-10 22:34:17 +01:00
|
|
|
ctx.NotFound("CleanUpPullRequest", nil)
|
2017-06-21 03:00:03 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
fullBranchName := pr.HeadRepo.Owner.Name + "/" + pr.HeadBranch
|
|
|
|
|
2021-07-13 01:26:25 +02:00
|
|
|
var gitBaseRepo *git.Repository
|
|
|
|
|
|
|
|
// Assume that the base repo is the current context (almost certainly)
|
|
|
|
if ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.BaseRepoID && ctx.Repo.GitRepo != nil {
|
|
|
|
gitBaseRepo = ctx.Repo.GitRepo
|
|
|
|
} else {
|
|
|
|
// If not just open it
|
Simplify how git repositories are opened (#28937)
## Purpose
This is a refactor toward building an abstraction over managing git
repositories.
Afterwards, it does not matter anymore if they are stored on the local
disk or somewhere remote.
## What this PR changes
We used `git.OpenRepository` everywhere previously.
Now, we should split them into two distinct functions:
Firstly, there are temporary repositories which do not change:
```go
git.OpenRepository(ctx, diskPath)
```
Gitea managed repositories having a record in the database in the
`repository` table are moved into the new package `gitrepo`:
```go
gitrepo.OpenRepository(ctx, repo_model.Repo)
```
Why is `repo_model.Repository` the second parameter instead of file
path?
Because then we can easily adapt our repository storage strategy.
The repositories can be stored locally, however, they could just as well
be stored on a remote server.
## Further changes in other PRs
- A Git Command wrapper on package `gitrepo` could be created. i.e.
`NewCommand(ctx, repo_model.Repository, commands...)`. `git.RunOpts{Dir:
repo.RepoPath()}`, the directory should be empty before invoking this
method and it can be filled in the function only. #28940
- Remove the `RepoPath()`/`WikiPath()` functions to reduce the
possibility of mistakes.
---------
Co-authored-by: delvh <dev.lh@web.de>
2024-01-27 21:09:51 +01:00
|
|
|
gitBaseRepo, err = gitrepo.OpenRepository(ctx, pr.BaseRepo)
|
2021-07-13 01:26:25 +02:00
|
|
|
if err != nil {
|
Simplify how git repositories are opened (#28937)
## Purpose
This is a refactor toward building an abstraction over managing git
repositories.
Afterwards, it does not matter anymore if they are stored on the local
disk or somewhere remote.
## What this PR changes
We used `git.OpenRepository` everywhere previously.
Now, we should split them into two distinct functions:
Firstly, there are temporary repositories which do not change:
```go
git.OpenRepository(ctx, diskPath)
```
Gitea managed repositories having a record in the database in the
`repository` table are moved into the new package `gitrepo`:
```go
gitrepo.OpenRepository(ctx, repo_model.Repo)
```
Why is `repo_model.Repository` the second parameter instead of file
path?
Because then we can easily adapt our repository storage strategy.
The repositories can be stored locally, however, they could just as well
be stored on a remote server.
## Further changes in other PRs
- A Git Command wrapper on package `gitrepo` could be created. i.e.
`NewCommand(ctx, repo_model.Repository, commands...)`. `git.RunOpts{Dir:
repo.RepoPath()}`, the directory should be empty before invoking this
method and it can be filled in the function only. #28940
- Remove the `RepoPath()`/`WikiPath()` functions to reduce the
possibility of mistakes.
---------
Co-authored-by: delvh <dev.lh@web.de>
2024-01-27 21:09:51 +01:00
|
|
|
ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.BaseRepo.FullName()), err)
|
2021-07-13 01:26:25 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
defer gitBaseRepo.Close()
|
2017-06-21 03:00:03 +02:00
|
|
|
}
|
|
|
|
|
2021-07-13 01:26:25 +02:00
|
|
|
// Now assume that the head repo is the same as the base repo (reasonable chance)
|
|
|
|
gitRepo := gitBaseRepo
|
|
|
|
// But if not: is it the same as the context?
|
|
|
|
if pr.BaseRepoID != pr.HeadRepoID && ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.HeadRepoID && ctx.Repo.GitRepo != nil {
|
|
|
|
gitRepo = ctx.Repo.GitRepo
|
|
|
|
} else if pr.BaseRepoID != pr.HeadRepoID {
|
|
|
|
// Otherwise just load it up
|
Simplify how git repositories are opened (#28937)
## Purpose
This is a refactor toward building an abstraction over managing git
repositories.
Afterwards, it does not matter anymore if they are stored on the local
disk or somewhere remote.
## What this PR changes
We used `git.OpenRepository` everywhere previously.
Now, we should split them into two distinct functions:
Firstly, there are temporary repositories which do not change:
```go
git.OpenRepository(ctx, diskPath)
```
Gitea managed repositories having a record in the database in the
`repository` table are moved into the new package `gitrepo`:
```go
gitrepo.OpenRepository(ctx, repo_model.Repo)
```
Why is `repo_model.Repository` the second parameter instead of file
path?
Because then we can easily adapt our repository storage strategy.
The repositories can be stored locally, however, they could just as well
be stored on a remote server.
## Further changes in other PRs
- A Git Command wrapper on package `gitrepo` could be created. i.e.
`NewCommand(ctx, repo_model.Repository, commands...)`. `git.RunOpts{Dir:
repo.RepoPath()}`, the directory should be empty before invoking this
method and it can be filled in the function only. #28940
- Remove the `RepoPath()`/`WikiPath()` functions to reduce the
possibility of mistakes.
---------
Co-authored-by: delvh <dev.lh@web.de>
2024-01-27 21:09:51 +01:00
|
|
|
gitRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
|
2021-07-13 01:26:25 +02:00
|
|
|
if err != nil {
|
Simplify how git repositories are opened (#28937)
## Purpose
This is a refactor toward building an abstraction over managing git
repositories.
Afterwards, it does not matter anymore if they are stored on the local
disk or somewhere remote.
## What this PR changes
We used `git.OpenRepository` everywhere previously.
Now, we should split them into two distinct functions:
Firstly, there are temporary repositories which do not change:
```go
git.OpenRepository(ctx, diskPath)
```
Gitea managed repositories having a record in the database in the
`repository` table are moved into the new package `gitrepo`:
```go
gitrepo.OpenRepository(ctx, repo_model.Repo)
```
Why is `repo_model.Repository` the second parameter instead of file
path?
Because then we can easily adapt our repository storage strategy.
The repositories can be stored locally, however, they could just as well
be stored on a remote server.
## Further changes in other PRs
- A Git Command wrapper on package `gitrepo` could be created. i.e.
`NewCommand(ctx, repo_model.Repository, commands...)`. `git.RunOpts{Dir:
repo.RepoPath()}`, the directory should be empty before invoking this
method and it can be filled in the function only. #28940
- Remove the `RepoPath()`/`WikiPath()` functions to reduce the
possibility of mistakes.
---------
Co-authored-by: delvh <dev.lh@web.de>
2024-01-27 21:09:51 +01:00
|
|
|
ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.FullName()), err)
|
2021-07-13 01:26:25 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
defer gitRepo.Close()
|
2017-06-21 03:00:03 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
defer func() {
|
2023-07-26 08:04:01 +02:00
|
|
|
ctx.JSONRedirect(issue.Link())
|
2017-06-21 03:00:03 +02:00
|
|
|
}()
|
|
|
|
|
|
|
|
// Check if branch has no new commits
|
2018-01-30 13:29:39 +01:00
|
|
|
headCommitID, err := gitBaseRepo.GetRefCommitID(pr.GetGitRefName())
|
|
|
|
if err != nil {
|
2019-04-02 09:48:31 +02:00
|
|
|
log.Error("GetRefCommitID: %v", err)
|
2018-01-30 13:29:39 +01:00
|
|
|
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
branchCommitID, err := gitRepo.GetBranchCommitID(pr.HeadBranch)
|
|
|
|
if err != nil {
|
2019-04-02 09:48:31 +02:00
|
|
|
log.Error("GetBranchCommitID: %v", err)
|
2018-01-30 13:29:39 +01:00
|
|
|
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if headCommitID != branchCommitID {
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.branch.delete_branch_has_new_commits", fullBranchName))
|
|
|
|
return
|
2017-06-21 03:00:03 +02:00
|
|
|
}
|
|
|
|
|
2021-07-13 01:26:25 +02:00
|
|
|
deleteBranch(ctx, pr, gitRepo)
|
|
|
|
}
|
|
|
|
|
2022-06-13 11:37:59 +02:00
|
|
|
func deleteBranch(ctx *context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository) {
|
2023-01-31 23:11:48 +01:00
|
|
|
fullBranchName := pr.HeadRepo.FullName() + ":" + pr.HeadBranch
|
2024-01-17 01:44:56 +01:00
|
|
|
|
|
|
|
if err := pull_service.RetargetChildrenOnMerge(ctx, ctx.Doer, pr); err != nil {
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-02-28 23:17:51 +01:00
|
|
|
if err := repo_service.DeleteBranch(ctx, ctx.Doer, pr.HeadRepo, gitRepo, pr.HeadBranch); err != nil {
|
2021-06-07 16:52:59 +02:00
|
|
|
switch {
|
|
|
|
case git.IsErrBranchNotExist(err):
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
|
|
|
case errors.Is(err, repo_service.ErrBranchIsDefault):
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
2023-01-16 09:00:22 +01:00
|
|
|
case errors.Is(err, git_model.ErrBranchIsProtected):
|
2021-06-07 16:52:59 +02:00
|
|
|
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
|
|
|
default:
|
|
|
|
log.Error("DeleteBranch: %v", err)
|
|
|
|
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
|
|
|
}
|
2017-06-21 03:00:03 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-06-13 11:37:59 +02:00
|
|
|
if err := issues_model.AddDeletePRBranchComment(ctx, ctx.Doer, pr.BaseRepo, pr.IssueID, pr.HeadBranch); err != nil {
|
2017-06-21 03:00:03 +02:00
|
|
|
// Do not fail here as branch has already been deleted
|
2019-04-02 09:48:31 +02:00
|
|
|
log.Error("DeleteBranch: %v", err)
|
2017-06-21 03:00:03 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
ctx.Flash.Success(ctx.Tr("repo.branch.deletion_success", fullBranchName))
|
|
|
|
}
|
2018-01-05 11:56:52 +01:00
|
|
|
|
|
|
|
// DownloadPullDiff render a pull's raw diff
|
|
|
|
func DownloadPullDiff(ctx *context.Context) {
|
2019-12-13 23:21:06 +01:00
|
|
|
DownloadPullDiffOrPatch(ctx, false)
|
2018-01-05 11:56:52 +01:00
|
|
|
}
|
2018-01-07 14:10:20 +01:00
|
|
|
|
|
|
|
// DownloadPullPatch render a pull's raw patch
|
|
|
|
func DownloadPullPatch(ctx *context.Context) {
|
2019-12-13 23:21:06 +01:00
|
|
|
DownloadPullDiffOrPatch(ctx, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
// DownloadPullDiffOrPatch render a pull's raw diff or patch
|
|
|
|
func DownloadPullDiffOrPatch(ctx *context.Context, patch bool) {
|
2022-06-13 11:37:59 +02:00
|
|
|
pr, err := issues_model.GetPullRequestByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
|
2018-01-07 14:10:20 +01:00
|
|
|
if err != nil {
|
2022-06-13 11:37:59 +02:00
|
|
|
if issues_model.IsErrPullRequestNotExist(err) {
|
2021-10-05 16:41:48 +02:00
|
|
|
ctx.NotFound("GetPullRequestByIndex", err)
|
2018-01-07 14:10:20 +01:00
|
|
|
} else {
|
2021-10-05 16:41:48 +02:00
|
|
|
ctx.ServerError("GetPullRequestByIndex", err)
|
2018-01-07 14:10:20 +01:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-09-27 23:09:49 +02:00
|
|
|
binary := ctx.FormBool("binary")
|
2018-01-07 14:10:20 +01:00
|
|
|
|
2022-01-20 00:26:57 +01:00
|
|
|
if err := pull_service.DownloadDiffOrPatch(ctx, pr, ctx, patch, binary); err != nil {
|
2019-12-13 23:21:06 +01:00
|
|
|
ctx.ServerError("DownloadDiffOrPatch", err)
|
2018-01-07 14:10:20 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
2019-12-16 07:20:25 +01:00
|
|
|
|
|
|
|
// UpdatePullRequestTarget change pull request's target branch
|
|
|
|
func UpdatePullRequestTarget(ctx *context.Context) {
|
|
|
|
issue := GetActionIssue(ctx)
|
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
2023-07-05 20:52:12 +02:00
|
|
|
pr := issue.PullRequest
|
2019-12-16 07:20:25 +01:00
|
|
|
if !issue.IsPull {
|
|
|
|
ctx.Error(http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-03-22 08:03:22 +01:00
|
|
|
if !ctx.IsSigned || (!issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) {
|
2019-12-16 07:20:25 +01:00
|
|
|
ctx.Error(http.StatusForbidden)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-07-29 03:42:15 +02:00
|
|
|
targetBranch := ctx.FormTrim("target_branch")
|
2019-12-16 07:20:25 +01:00
|
|
|
if len(targetBranch) == 0 {
|
|
|
|
ctx.Error(http.StatusNoContent)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-03-22 08:03:22 +01:00
|
|
|
if err := pull_service.ChangeTargetBranch(ctx, pr, ctx.Doer, targetBranch); err != nil {
|
2022-06-13 11:37:59 +02:00
|
|
|
if issues_model.IsErrPullRequestAlreadyExists(err) {
|
|
|
|
err := err.(issues_model.ErrPullRequestAlreadyExists)
|
2019-12-16 07:20:25 +01:00
|
|
|
|
|
|
|
RepoRelPath := ctx.Repo.Owner.Name + "/" + ctx.Repo.Repository.Name
|
Fix various typos (#20338)
* Fix various typos
Found via `codespell -q 3 -S ./options/locale,./options/license,./public/vendor -L actived,allways,attachements,ba,befores,commiter,pullrequest,pullrequests,readby,splitted,te,unknwon`
Co-authored-by: zeripath <art27@cantab.net>
2022-07-12 23:32:37 +02:00
|
|
|
errorMessage := ctx.Tr("repo.pulls.has_pull_request", html.EscapeString(ctx.Repo.RepoLink+"/pulls/"+strconv.FormatInt(err.IssueID, 10)), html.EscapeString(RepoRelPath), err.IssueID) // FIXME: Creates url inside locale string
|
2019-12-16 07:20:25 +01:00
|
|
|
|
|
|
|
ctx.Flash.Error(errorMessage)
|
2023-07-04 20:36:08 +02:00
|
|
|
ctx.JSON(http.StatusConflict, map[string]any{
|
2019-12-16 07:20:25 +01:00
|
|
|
"error": err.Error(),
|
|
|
|
"user_error": errorMessage,
|
|
|
|
})
|
2022-06-13 11:37:59 +02:00
|
|
|
} else if issues_model.IsErrIssueIsClosed(err) {
|
2019-12-16 07:20:25 +01:00
|
|
|
errorMessage := ctx.Tr("repo.pulls.is_closed")
|
|
|
|
|
|
|
|
ctx.Flash.Error(errorMessage)
|
2023-07-04 20:36:08 +02:00
|
|
|
ctx.JSON(http.StatusConflict, map[string]any{
|
2019-12-16 07:20:25 +01:00
|
|
|
"error": err.Error(),
|
|
|
|
"user_error": errorMessage,
|
|
|
|
})
|
|
|
|
} else if models.IsErrPullRequestHasMerged(err) {
|
|
|
|
errorMessage := ctx.Tr("repo.pulls.has_merged")
|
|
|
|
|
|
|
|
ctx.Flash.Error(errorMessage)
|
2023-07-04 20:36:08 +02:00
|
|
|
ctx.JSON(http.StatusConflict, map[string]any{
|
2019-12-16 07:20:25 +01:00
|
|
|
"error": err.Error(),
|
|
|
|
"user_error": errorMessage,
|
|
|
|
})
|
2023-06-29 12:03:20 +02:00
|
|
|
} else if git_model.IsErrBranchesEqual(err) {
|
2019-12-16 07:20:25 +01:00
|
|
|
errorMessage := ctx.Tr("repo.pulls.nothing_to_compare")
|
|
|
|
|
|
|
|
ctx.Flash.Error(errorMessage)
|
2023-07-04 20:36:08 +02:00
|
|
|
ctx.JSON(http.StatusBadRequest, map[string]any{
|
2019-12-16 07:20:25 +01:00
|
|
|
"error": err.Error(),
|
|
|
|
"user_error": errorMessage,
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
ctx.ServerError("UpdatePullRequestTarget", err)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
2023-09-05 20:37:47 +02:00
|
|
|
notify_service.PullRequestChangeTargetBranch(ctx, ctx.Doer, pr, targetBranch)
|
2019-12-16 07:20:25 +01:00
|
|
|
|
2023-07-04 20:36:08 +02:00
|
|
|
ctx.JSON(http.StatusOK, map[string]any{
|
2019-12-16 07:20:25 +01:00
|
|
|
"base_branch": pr.BaseBranch,
|
|
|
|
})
|
|
|
|
}
|
2022-04-28 17:45:33 +02:00
|
|
|
|
|
|
|
// SetAllowEdits allow edits from maintainers to PRs
|
|
|
|
func SetAllowEdits(ctx *context.Context) {
|
|
|
|
form := web.GetForm(ctx).(*forms.UpdateAllowEditsForm)
|
|
|
|
|
2022-06-13 11:37:59 +02:00
|
|
|
pr, err := issues_model.GetPullRequestByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
|
2022-04-28 17:45:33 +02:00
|
|
|
if err != nil {
|
2022-06-13 11:37:59 +02:00
|
|
|
if issues_model.IsErrPullRequestNotExist(err) {
|
2022-04-28 17:45:33 +02:00
|
|
|
ctx.NotFound("GetPullRequestByIndex", err)
|
|
|
|
} else {
|
|
|
|
ctx.ServerError("GetPullRequestByIndex", err)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := pull_service.SetAllowEdits(ctx, ctx.Doer, pr, form.AllowMaintainerEdit); err != nil {
|
2024-08-14 11:43:42 +02:00
|
|
|
if errors.Is(err, pull_service.ErrUserHasNoPermissionForAction) {
|
2022-04-28 17:45:33 +02:00
|
|
|
ctx.Error(http.StatusForbidden)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx.ServerError("SetAllowEdits", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-07-04 20:36:08 +02:00
|
|
|
ctx.JSON(http.StatusOK, map[string]any{
|
2022-04-28 17:45:33 +02:00
|
|
|
"allow_maintainer_edit": pr.AllowMaintainerEdit,
|
|
|
|
})
|
|
|
|
}
|