forked from Shiloh/githaven
Add more checks in migration code (#21011)
When migrating add several more important sanity checks: * SHAs must be SHAs * Refs must be valid Refs * URLs must be reasonable Signed-off-by: Andrew Thornton <art27@cantab.net> Signed-off-by: Andrew Thornton <art27@cantab.net> Co-authored-by: techknowlogick <matti@mdranta.net>
This commit is contained in:
parent
93a610a819
commit
e6b3be4608
@ -267,7 +267,7 @@ func (a *Action) GetRefLink() string {
|
|||||||
return a.GetRepoLink() + "/src/branch/" + util.PathEscapeSegments(strings.TrimPrefix(a.RefName, git.BranchPrefix))
|
return a.GetRepoLink() + "/src/branch/" + util.PathEscapeSegments(strings.TrimPrefix(a.RefName, git.BranchPrefix))
|
||||||
case strings.HasPrefix(a.RefName, git.TagPrefix):
|
case strings.HasPrefix(a.RefName, git.TagPrefix):
|
||||||
return a.GetRepoLink() + "/src/tag/" + util.PathEscapeSegments(strings.TrimPrefix(a.RefName, git.TagPrefix))
|
return a.GetRepoLink() + "/src/tag/" + util.PathEscapeSegments(strings.TrimPrefix(a.RefName, git.TagPrefix))
|
||||||
case len(a.RefName) == 40 && git.SHAPattern.MatchString(a.RefName):
|
case len(a.RefName) == 40 && git.IsValidSHAPattern(a.RefName):
|
||||||
return a.GetRepoLink() + "/src/commit/" + a.RefName
|
return a.GetRepoLink() + "/src/commit/" + a.RefName
|
||||||
default:
|
default:
|
||||||
// FIXME: we will just assume it's a branch - this was the old way - at some point we may want to enforce that there is always a ref here.
|
// FIXME: we will just assume it's a branch - this was the old way - at some point we may want to enforce that there is always a ref here.
|
||||||
|
@ -4,7 +4,10 @@
|
|||||||
|
|
||||||
package git
|
package git
|
||||||
|
|
||||||
import "strings"
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// RemotePrefix is the base directory of the remotes information of git.
|
// RemotePrefix is the base directory of the remotes information of git.
|
||||||
@ -15,6 +18,29 @@ const (
|
|||||||
pullLen = len(PullPrefix)
|
pullLen = len(PullPrefix)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// refNamePatternInvalid is regular expression with unallowed characters in git reference name
|
||||||
|
// They cannot have ASCII control characters (i.e. bytes whose values are lower than \040, or \177 DEL), space, tilde ~, caret ^, or colon : anywhere.
|
||||||
|
// They cannot have question-mark ?, asterisk *, or open bracket [ anywhere
|
||||||
|
var refNamePatternInvalid = regexp.MustCompile(
|
||||||
|
`[\000-\037\177 \\~^:?*[]|` + // No absolutely invalid characters
|
||||||
|
`(?:^[/.])|` + // Not HasPrefix("/") or "."
|
||||||
|
`(?:/\.)|` + // no "/."
|
||||||
|
`(?:\.lock$)|(?:\.lock/)|` + // No ".lock/"" or ".lock" at the end
|
||||||
|
`(?:\.\.)|` + // no ".." anywhere
|
||||||
|
`(?://)|` + // no "//" anywhere
|
||||||
|
`(?:@{)|` + // no "@{"
|
||||||
|
`(?:[/.]$)|` + // no terminal '/' or '.'
|
||||||
|
`(?:^@$)`) // Not "@"
|
||||||
|
|
||||||
|
// IsValidRefPattern ensures that the provided string could be a valid reference
|
||||||
|
func IsValidRefPattern(name string) bool {
|
||||||
|
return !refNamePatternInvalid.MatchString(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func SanitizeRefPattern(name string) string {
|
||||||
|
return refNamePatternInvalid.ReplaceAllString(name, "_")
|
||||||
|
}
|
||||||
|
|
||||||
// Reference represents a Git ref.
|
// Reference represents a Git ref.
|
||||||
type Reference struct {
|
type Reference struct {
|
||||||
Name string
|
Name string
|
||||||
|
@ -138,7 +138,7 @@ func (repo *Repository) getCommitFromBatchReader(rd *bufio.Reader, id SHA1) (*Co
|
|||||||
|
|
||||||
// ConvertToSHA1 returns a Hash object from a potential ID string
|
// ConvertToSHA1 returns a Hash object from a potential ID string
|
||||||
func (repo *Repository) ConvertToSHA1(commitID string) (SHA1, error) {
|
func (repo *Repository) ConvertToSHA1(commitID string) (SHA1, error) {
|
||||||
if len(commitID) == 40 && SHAPattern.MatchString(commitID) {
|
if len(commitID) == 40 && IsValidSHAPattern(commitID) {
|
||||||
sha1, err := NewIDFromString(commitID)
|
sha1, err := NewIDFromString(commitID)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
return sha1, nil
|
return sha1, nil
|
||||||
|
@ -19,7 +19,12 @@ const EmptySHA = "0000000000000000000000000000000000000000"
|
|||||||
const EmptyTreeSHA = "4b825dc642cb6eb9a060e54bf8d69288fbee4904"
|
const EmptyTreeSHA = "4b825dc642cb6eb9a060e54bf8d69288fbee4904"
|
||||||
|
|
||||||
// SHAPattern can be used to determine if a string is an valid sha
|
// SHAPattern can be used to determine if a string is an valid sha
|
||||||
var SHAPattern = regexp.MustCompile(`^[0-9a-f]{4,40}$`)
|
var shaPattern = regexp.MustCompile(`^[0-9a-f]{4,40}$`)
|
||||||
|
|
||||||
|
// IsValidSHAPattern will check if the provided string matches the SHA Pattern
|
||||||
|
func IsValidSHAPattern(sha string) bool {
|
||||||
|
return shaPattern.MatchString(sha)
|
||||||
|
}
|
||||||
|
|
||||||
// MustID always creates a new SHA1 from a [20]byte array with no validation of input.
|
// MustID always creates a new SHA1 from a [20]byte array with no validation of input.
|
||||||
func MustID(b []byte) SHA1 {
|
func MustID(b []byte) SHA1 {
|
||||||
|
@ -26,7 +26,7 @@ type PullRequest struct {
|
|||||||
Updated time.Time
|
Updated time.Time
|
||||||
Closed *time.Time
|
Closed *time.Time
|
||||||
Labels []*Label
|
Labels []*Label
|
||||||
PatchURL string `yaml:"patch_url"`
|
PatchURL string `yaml:"patch_url"` // SECURITY: This must be safe to download directly from
|
||||||
Merged bool
|
Merged bool
|
||||||
MergedTime *time.Time `yaml:"merged_time"`
|
MergedTime *time.Time `yaml:"merged_time"`
|
||||||
MergeCommitSHA string `yaml:"merge_commit_sha"`
|
MergeCommitSHA string `yaml:"merge_commit_sha"`
|
||||||
@ -37,6 +37,7 @@ type PullRequest struct {
|
|||||||
Reactions []*Reaction
|
Reactions []*Reaction
|
||||||
ForeignIndex int64
|
ForeignIndex int64
|
||||||
Context DownloaderContext `yaml:"-"`
|
Context DownloaderContext `yaml:"-"`
|
||||||
|
EnsuredSafe bool `yaml:"ensured_safe"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *PullRequest) GetLocalIndex() int64 { return p.Number }
|
func (p *PullRequest) GetLocalIndex() int64 { return p.Number }
|
||||||
@ -55,9 +56,9 @@ func (p PullRequest) GetGitRefName() string {
|
|||||||
|
|
||||||
// PullRequestBranch represents a pull request branch
|
// PullRequestBranch represents a pull request branch
|
||||||
type PullRequestBranch struct {
|
type PullRequestBranch struct {
|
||||||
CloneURL string `yaml:"clone_url"`
|
CloneURL string `yaml:"clone_url"` // SECURITY: This must be safe to download from
|
||||||
Ref string
|
Ref string // SECURITY: this must be a git.IsValidRefPattern
|
||||||
SHA string
|
SHA string // SECURITY: this must be a git.IsValidSHAPattern
|
||||||
RepoName string `yaml:"repo_name"`
|
RepoName string `yaml:"repo_name"`
|
||||||
OwnerName string `yaml:"owner_name"`
|
OwnerName string `yaml:"owner_name"`
|
||||||
}
|
}
|
||||||
|
@ -18,15 +18,16 @@ type ReleaseAsset struct {
|
|||||||
DownloadCount *int `yaml:"download_count"`
|
DownloadCount *int `yaml:"download_count"`
|
||||||
Created time.Time
|
Created time.Time
|
||||||
Updated time.Time
|
Updated time.Time
|
||||||
DownloadURL *string `yaml:"download_url"`
|
|
||||||
|
DownloadURL *string `yaml:"download_url"` // SECURITY: It is the responsibility of downloader to make sure this is safe
|
||||||
// if DownloadURL is nil, the function should be invoked
|
// if DownloadURL is nil, the function should be invoked
|
||||||
DownloadFunc func() (io.ReadCloser, error) `yaml:"-"`
|
DownloadFunc func() (io.ReadCloser, error) `yaml:"-"` // SECURITY: It is the responsibility of downloader to make sure this is safe
|
||||||
}
|
}
|
||||||
|
|
||||||
// Release represents a release
|
// Release represents a release
|
||||||
type Release struct {
|
type Release struct {
|
||||||
TagName string `yaml:"tag_name"`
|
TagName string `yaml:"tag_name"` // SECURITY: This must pass git.IsValidRefPattern
|
||||||
TargetCommitish string `yaml:"target_commitish"`
|
TargetCommitish string `yaml:"target_commitish"` // SECURITY: This must pass git.IsValidRefPattern
|
||||||
Name string
|
Name string
|
||||||
Body string
|
Body string
|
||||||
Draft bool
|
Draft bool
|
||||||
|
@ -12,7 +12,7 @@ type Repository struct {
|
|||||||
IsPrivate bool `yaml:"is_private"`
|
IsPrivate bool `yaml:"is_private"`
|
||||||
IsMirror bool `yaml:"is_mirror"`
|
IsMirror bool `yaml:"is_mirror"`
|
||||||
Description string
|
Description string
|
||||||
CloneURL string `yaml:"clone_url"`
|
CloneURL string `yaml:"clone_url"` // SECURITY: This must be checked to ensure that is safe to be used
|
||||||
OriginalURL string `yaml:"original_url"`
|
OriginalURL string `yaml:"original_url"`
|
||||||
DefaultBranch string
|
DefaultBranch string
|
||||||
}
|
}
|
||||||
|
@ -9,6 +9,8 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/git"
|
||||||
|
|
||||||
"gitea.com/go-chi/binding"
|
"gitea.com/go-chi/binding"
|
||||||
"github.com/gobwas/glob"
|
"github.com/gobwas/glob"
|
||||||
)
|
)
|
||||||
@ -24,30 +26,6 @@ const (
|
|||||||
ErrRegexPattern = "RegexPattern"
|
ErrRegexPattern = "RegexPattern"
|
||||||
)
|
)
|
||||||
|
|
||||||
// GitRefNamePatternInvalid is regular expression with unallowed characters in git reference name
|
|
||||||
// They cannot have ASCII control characters (i.e. bytes whose values are lower than \040, or \177 DEL), space, tilde ~, caret ^, or colon : anywhere.
|
|
||||||
// They cannot have question-mark ?, asterisk *, or open bracket [ anywhere
|
|
||||||
var GitRefNamePatternInvalid = regexp.MustCompile(`[\000-\037\177 \\~^:?*[]+`)
|
|
||||||
|
|
||||||
// CheckGitRefAdditionalRulesValid check name is valid on additional rules
|
|
||||||
func CheckGitRefAdditionalRulesValid(name string) bool {
|
|
||||||
// Additional rules as described at https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html
|
|
||||||
if strings.HasPrefix(name, "/") || strings.HasSuffix(name, "/") ||
|
|
||||||
strings.HasSuffix(name, ".") || strings.Contains(name, "..") ||
|
|
||||||
strings.Contains(name, "//") || strings.Contains(name, "@{") ||
|
|
||||||
name == "@" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
parts := strings.Split(name, "/")
|
|
||||||
for _, part := range parts {
|
|
||||||
if strings.HasSuffix(part, ".lock") || strings.HasPrefix(part, ".") {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// AddBindingRules adds additional binding rules
|
// AddBindingRules adds additional binding rules
|
||||||
func AddBindingRules() {
|
func AddBindingRules() {
|
||||||
addGitRefNameBindingRule()
|
addGitRefNameBindingRule()
|
||||||
@ -67,16 +45,10 @@ func addGitRefNameBindingRule() {
|
|||||||
IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
|
IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
|
||||||
str := fmt.Sprintf("%v", val)
|
str := fmt.Sprintf("%v", val)
|
||||||
|
|
||||||
if GitRefNamePatternInvalid.MatchString(str) {
|
if !git.IsValidRefPattern(str) {
|
||||||
errs.Add([]string{name}, ErrGitRefName, "GitRefName")
|
errs.Add([]string{name}, ErrGitRefName, "GitRefName")
|
||||||
return false, errs
|
return false, errs
|
||||||
}
|
}
|
||||||
|
|
||||||
if !CheckGitRefAdditionalRulesValid(str) {
|
|
||||||
errs.Add([]string{name}, ErrGitRefName, "GitRefName")
|
|
||||||
return false, errs
|
|
||||||
}
|
|
||||||
|
|
||||||
return true, errs
|
return true, errs
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -17,7 +17,6 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/validation"
|
|
||||||
"code.gitea.io/gitea/routers/api/v1/utils"
|
"code.gitea.io/gitea/routers/api/v1/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -53,7 +52,7 @@ func GetSingleCommit(ctx *context.APIContext) {
|
|||||||
// "$ref": "#/responses/notFound"
|
// "$ref": "#/responses/notFound"
|
||||||
|
|
||||||
sha := ctx.Params(":sha")
|
sha := ctx.Params(":sha")
|
||||||
if (validation.GitRefNamePatternInvalid.MatchString(sha) || !validation.CheckGitRefAdditionalRulesValid(sha)) && !git.SHAPattern.MatchString(sha) {
|
if !git.IsValidRefPattern(sha) {
|
||||||
ctx.Error(http.StatusUnprocessableEntity, "no valid ref or sha", fmt.Sprintf("no valid ref or sha: %s", sha))
|
ctx.Error(http.StatusUnprocessableEntity, "no valid ref or sha", fmt.Sprintf("no valid ref or sha: %s", sha))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -12,7 +12,6 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/convert"
|
"code.gitea.io/gitea/modules/convert"
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/validation"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// GetNote Get a note corresponding to a single commit from a repository
|
// GetNote Get a note corresponding to a single commit from a repository
|
||||||
@ -47,7 +46,7 @@ func GetNote(ctx *context.APIContext) {
|
|||||||
// "$ref": "#/responses/notFound"
|
// "$ref": "#/responses/notFound"
|
||||||
|
|
||||||
sha := ctx.Params(":sha")
|
sha := ctx.Params(":sha")
|
||||||
if (validation.GitRefNamePatternInvalid.MatchString(sha) || !validation.CheckGitRefAdditionalRulesValid(sha)) && !git.SHAPattern.MatchString(sha) {
|
if !git.IsValidRefPattern(sha) {
|
||||||
ctx.Error(http.StatusUnprocessableEntity, "no valid ref or sha", fmt.Sprintf("no valid ref or sha: %s", sha))
|
ctx.Error(http.StatusUnprocessableEntity, "no valid ref or sha", fmt.Sprintf("no valid ref or sha: %s", sha))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -107,9 +107,24 @@ func NewCodebaseDownloader(ctx context.Context, projectURL *url.URL, project, re
|
|||||||
commitMap: make(map[string]string),
|
commitMap: make(map[string]string),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log.Trace("Create Codebase downloader. BaseURL: %s Project: %s RepoName: %s", baseURL, project, repoName)
|
||||||
return downloader
|
return downloader
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// String implements Stringer
|
||||||
|
func (d *CodebaseDownloader) String() string {
|
||||||
|
return fmt.Sprintf("migration from codebase server %s %s/%s", d.baseURL, d.project, d.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ColorFormat provides a basic color format for a GogsDownloader
|
||||||
|
func (d *CodebaseDownloader) ColorFormat(s fmt.State) {
|
||||||
|
if d == nil {
|
||||||
|
log.ColorFprintf(s, "<nil: CodebaseDownloader>")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.ColorFprintf(s, "migration from codebase server %s %s/%s", d.baseURL, d.project, d.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
// FormatCloneURL add authentication into remote URLs
|
// FormatCloneURL add authentication into remote URLs
|
||||||
func (d *CodebaseDownloader) FormatCloneURL(opts base.MigrateOptions, remoteAddr string) (string, error) {
|
func (d *CodebaseDownloader) FormatCloneURL(opts base.MigrateOptions, remoteAddr string) (string, error) {
|
||||||
return opts.CloneAddr, nil
|
return opts.CloneAddr, nil
|
||||||
@ -451,8 +466,8 @@ func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullReq
|
|||||||
Value int64 `xml:",chardata"`
|
Value int64 `xml:",chardata"`
|
||||||
Type string `xml:"type,attr"`
|
Type string `xml:"type,attr"`
|
||||||
} `xml:"id"`
|
} `xml:"id"`
|
||||||
SourceRef string `xml:"source-ref"`
|
SourceRef string `xml:"source-ref"` // NOTE: from the documentation these are actually just branches NOT full refs
|
||||||
TargetRef string `xml:"target-ref"`
|
TargetRef string `xml:"target-ref"` // NOTE: from the documentation these are actually just branches NOT full refs
|
||||||
Subject string `xml:"subject"`
|
Subject string `xml:"subject"`
|
||||||
Status string `xml:"status"`
|
Status string `xml:"status"`
|
||||||
UserID struct {
|
UserID struct {
|
||||||
@ -564,6 +579,9 @@ func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullReq
|
|||||||
Comments: comments[1:],
|
Comments: comments[1:],
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// SECURITY: Ensure that the PR is safe
|
||||||
|
_ = CheckAndEnsureSafePR(pullRequests[len(pullRequests)-1], d.baseURL.String(), d)
|
||||||
}
|
}
|
||||||
|
|
||||||
return pullRequests, true, nil
|
return pullRequests, true, nil
|
||||||
|
82
services/migrations/common.go
Normal file
82
services/migrations/common.go
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
// Copyright 2022 The Gitea Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package migrations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
admin_model "code.gitea.io/gitea/models/admin"
|
||||||
|
"code.gitea.io/gitea/modules/git"
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
base "code.gitea.io/gitea/modules/migration"
|
||||||
|
)
|
||||||
|
|
||||||
|
// WarnAndNotice will log the provided message and send a repository notice
|
||||||
|
func WarnAndNotice(fmtStr string, args ...interface{}) {
|
||||||
|
log.Warn(fmtStr, args...)
|
||||||
|
if err := admin_model.CreateRepositoryNotice(fmt.Sprintf(fmtStr, args...)); err != nil {
|
||||||
|
log.Error("create repository notice failed: ", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasBaseURL(toCheck, baseURL string) bool {
|
||||||
|
if len(baseURL) > 0 && baseURL[len(baseURL)-1] != '/' {
|
||||||
|
baseURL += "/"
|
||||||
|
}
|
||||||
|
return strings.HasPrefix(toCheck, baseURL)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CheckAndEnsureSafePR will check that a given PR is safe to download
|
||||||
|
func CheckAndEnsureSafePR(pr *base.PullRequest, commonCloneBaseURL string, g base.Downloader) bool {
|
||||||
|
valid := true
|
||||||
|
// SECURITY: the patchURL must be checked to have the same baseURL as the current to prevent open redirect
|
||||||
|
if pr.PatchURL != "" && !hasBaseURL(pr.PatchURL, commonCloneBaseURL) {
|
||||||
|
// TODO: Should we check that this url has the expected format for a patch url?
|
||||||
|
WarnAndNotice("PR #%d in %s has invalid PatchURL: %s baseURL: %s", pr.Number, g, pr.PatchURL, commonCloneBaseURL)
|
||||||
|
pr.PatchURL = ""
|
||||||
|
valid = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// SECURITY: the headCloneURL must be checked to have the same baseURL as the current to prevent open redirect
|
||||||
|
if pr.Head.CloneURL != "" && !hasBaseURL(pr.Head.CloneURL, commonCloneBaseURL) {
|
||||||
|
// TODO: Should we check that this url has the expected format for a patch url?
|
||||||
|
WarnAndNotice("PR #%d in %s has invalid HeadCloneURL: %s baseURL: %s", pr.Number, g, pr.Head.CloneURL, commonCloneBaseURL)
|
||||||
|
pr.Head.CloneURL = ""
|
||||||
|
valid = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// SECURITY: SHAs Must be a SHA
|
||||||
|
if pr.MergeCommitSHA != "" && !git.IsValidSHAPattern(pr.MergeCommitSHA) {
|
||||||
|
WarnAndNotice("PR #%d in %s has invalid MergeCommitSHA: %s", pr.Number, g, pr.MergeCommitSHA)
|
||||||
|
pr.MergeCommitSHA = ""
|
||||||
|
}
|
||||||
|
if pr.Head.SHA != "" && !git.IsValidSHAPattern(pr.Head.SHA) {
|
||||||
|
WarnAndNotice("PR #%d in %s has invalid HeadSHA: %s", pr.Number, g, pr.Head.SHA)
|
||||||
|
pr.Head.SHA = ""
|
||||||
|
valid = false
|
||||||
|
}
|
||||||
|
if pr.Base.SHA != "" && !git.IsValidSHAPattern(pr.Base.SHA) {
|
||||||
|
WarnAndNotice("PR #%d in %s has invalid BaseSHA: %s", pr.Number, g, pr.Base.SHA)
|
||||||
|
pr.Base.SHA = ""
|
||||||
|
valid = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// SECURITY: Refs must be valid refs or SHAs
|
||||||
|
if pr.Head.Ref != "" && !git.IsValidRefPattern(pr.Head.Ref) {
|
||||||
|
WarnAndNotice("PR #%d in %s has invalid HeadRef: %s", pr.Number, g, pr.Head.Ref)
|
||||||
|
pr.Head.Ref = ""
|
||||||
|
valid = false
|
||||||
|
}
|
||||||
|
if pr.Base.Ref != "" && !git.IsValidRefPattern(pr.Base.Ref) {
|
||||||
|
WarnAndNotice("PR #%d in %s has invalid BaseRef: %s", pr.Number, g, pr.Base.Ref)
|
||||||
|
pr.Base.Ref = ""
|
||||||
|
valid = false
|
||||||
|
}
|
||||||
|
|
||||||
|
pr.EnsuredSafe = true
|
||||||
|
|
||||||
|
return valid
|
||||||
|
}
|
@ -12,7 +12,6 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
@ -26,6 +25,8 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/structs"
|
"code.gitea.io/gitea/modules/structs"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -47,7 +48,7 @@ type RepositoryDumper struct {
|
|||||||
reviewFiles map[int64]*os.File
|
reviewFiles map[int64]*os.File
|
||||||
|
|
||||||
gitRepo *git.Repository
|
gitRepo *git.Repository
|
||||||
prHeadCache map[string]struct{}
|
prHeadCache map[string]string
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewRepositoryDumper creates an gitea Uploader
|
// NewRepositoryDumper creates an gitea Uploader
|
||||||
@ -62,7 +63,7 @@ func NewRepositoryDumper(ctx context.Context, baseDir, repoOwner, repoName strin
|
|||||||
baseDir: baseDir,
|
baseDir: baseDir,
|
||||||
repoOwner: repoOwner,
|
repoOwner: repoOwner,
|
||||||
repoName: repoName,
|
repoName: repoName,
|
||||||
prHeadCache: make(map[string]struct{}),
|
prHeadCache: make(map[string]string),
|
||||||
commentFiles: make(map[int64]*os.File),
|
commentFiles: make(map[int64]*os.File),
|
||||||
reviewFiles: make(map[int64]*os.File),
|
reviewFiles: make(map[int64]*os.File),
|
||||||
}, nil
|
}, nil
|
||||||
@ -296,8 +297,10 @@ func (g *RepositoryDumper) CreateReleases(releases ...*base.Release) error {
|
|||||||
}
|
}
|
||||||
for _, asset := range release.Assets {
|
for _, asset := range release.Assets {
|
||||||
attachLocalPath := filepath.Join(attachDir, asset.Name)
|
attachLocalPath := filepath.Join(attachDir, asset.Name)
|
||||||
// download attachment
|
|
||||||
|
|
||||||
|
// SECURITY: We cannot check the DownloadURL and DownloadFunc are safe here
|
||||||
|
// ... we must assume that they are safe and simply download the attachment
|
||||||
|
// download attachment
|
||||||
err := func(attachPath string) error {
|
err := func(attachPath string) error {
|
||||||
var rc io.ReadCloser
|
var rc io.ReadCloser
|
||||||
var err error
|
var err error
|
||||||
@ -317,7 +320,7 @@ func (g *RepositoryDumper) CreateReleases(releases ...*base.Release) error {
|
|||||||
|
|
||||||
fw, err := os.Create(attachPath)
|
fw, err := os.Create(attachPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("Create: %v", err)
|
return fmt.Errorf("create: %w", err)
|
||||||
}
|
}
|
||||||
defer fw.Close()
|
defer fw.Close()
|
||||||
|
|
||||||
@ -385,9 +388,18 @@ func (g *RepositoryDumper) createItems(dir string, itemFiles map[int64]*os.File,
|
|||||||
}
|
}
|
||||||
|
|
||||||
for number, items := range itemsMap {
|
for number, items := range itemsMap {
|
||||||
var err error
|
if err := g.encodeItems(number, items, dir, itemFiles); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *RepositoryDumper) encodeItems(number int64, items []interface{}, dir string, itemFiles map[int64]*os.File) error {
|
||||||
itemFile := itemFiles[number]
|
itemFile := itemFiles[number]
|
||||||
if itemFile == nil {
|
if itemFile == nil {
|
||||||
|
var err error
|
||||||
itemFile, err = os.Create(filepath.Join(dir, fmt.Sprintf("%d.yml", number)))
|
itemFile, err = os.Create(filepath.Join(dir, fmt.Sprintf("%d.yml", number)))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -395,17 +407,10 @@ func (g *RepositoryDumper) createItems(dir string, itemFiles map[int64]*os.File,
|
|||||||
itemFiles[number] = itemFile
|
itemFiles[number] = itemFile
|
||||||
}
|
}
|
||||||
|
|
||||||
bs, err := yaml.Marshal(items)
|
encoder := yaml.NewEncoder(itemFile)
|
||||||
if err != nil {
|
defer encoder.Close()
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := itemFile.Write(bs); err != nil {
|
return encoder.Encode(items)
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateComments creates comments of issues
|
// CreateComments creates comments of issues
|
||||||
@ -418,16 +423,30 @@ func (g *RepositoryDumper) CreateComments(comments ...*base.Comment) error {
|
|||||||
return g.createItems(g.commentDir(), g.commentFiles, commentsMap)
|
return g.createItems(g.commentDir(), g.commentFiles, commentsMap)
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreatePullRequests creates pull requests
|
func (g *RepositoryDumper) handlePullRequest(pr *base.PullRequest) error {
|
||||||
func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error {
|
// SECURITY: this pr must have been ensured safe
|
||||||
for _, pr := range prs {
|
if !pr.EnsuredSafe {
|
||||||
// download patch file
|
log.Error("PR #%d in %s/%s has not been checked for safety ... We will ignore this.", pr.Number, g.repoOwner, g.repoName)
|
||||||
|
return fmt.Errorf("unsafe PR #%d", pr.Number)
|
||||||
|
}
|
||||||
|
|
||||||
|
// First we download the patch file
|
||||||
err := func() error {
|
err := func() error {
|
||||||
|
// if the patchURL is empty there is nothing to download
|
||||||
|
if pr.PatchURL == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SECURITY: We will assume that the pr.PatchURL has been checked
|
||||||
|
// pr.PatchURL maybe a local file - but note EnsureSafe should be asserting that this safe
|
||||||
u, err := g.setURLToken(pr.PatchURL)
|
u, err := g.setURLToken(pr.PatchURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
resp, err := http.Get(u)
|
|
||||||
|
// SECURITY: We will assume that the pr.PatchURL has been checked
|
||||||
|
// pr.PatchURL maybe a local file - but note EnsureSafe should be asserting that this safe
|
||||||
|
resp, err := http.Get(u) // TODO: This probably needs to use the downloader as there may be rate limiting issues here
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -442,6 +461,8 @@ func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
|
||||||
|
// TODO: Should there be limits on the size of this file?
|
||||||
if _, err = io.Copy(f, resp.Body); err != nil {
|
if _, err = io.Copy(f, resp.Body); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -450,70 +471,127 @@ func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error {
|
|||||||
return nil
|
return nil
|
||||||
}()
|
}()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
log.Error("PR #%d in %s/%s unable to download patch: %v", pr.Number, g.repoOwner, g.repoName, err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// set head information
|
isFork := pr.IsForkPullRequest()
|
||||||
pullHead := filepath.Join(g.gitPath(), "refs", "pull", fmt.Sprintf("%d", pr.Number))
|
|
||||||
if err := os.MkdirAll(pullHead, os.ModePerm); err != nil {
|
// Even if it's a forked repo PR, we have to change head info as the same as the base info
|
||||||
return err
|
oldHeadOwnerName := pr.Head.OwnerName
|
||||||
}
|
pr.Head.OwnerName, pr.Head.RepoName = pr.Base.OwnerName, pr.Base.RepoName
|
||||||
p, err := os.Create(filepath.Join(pullHead, "head"))
|
|
||||||
if err != nil {
|
if !isFork || pr.State == "closed" {
|
||||||
return err
|
return nil
|
||||||
}
|
|
||||||
_, err = p.WriteString(pr.Head.SHA)
|
|
||||||
p.Close()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if pr.IsForkPullRequest() && pr.State != "closed" {
|
// OK we want to fetch the current head as a branch from its CloneURL
|
||||||
if pr.Head.OwnerName != "" {
|
|
||||||
remote := pr.Head.OwnerName
|
// 1. Is there a head clone URL available?
|
||||||
_, ok := g.prHeadCache[remote]
|
// 2. Is there a head ref available?
|
||||||
|
if pr.Head.CloneURL == "" || pr.Head.Ref == "" {
|
||||||
|
// Set head information if pr.Head.SHA is available
|
||||||
|
if pr.Head.SHA != "" {
|
||||||
|
_, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref", pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.gitPath()})
|
||||||
|
if err != nil {
|
||||||
|
log.Error("PR #%d in %s/%s unable to update-ref for pr HEAD: %v", pr.Number, g.repoOwner, g.repoName, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. We need to create a remote for this clone url
|
||||||
|
// ... maybe we already have a name for this remote
|
||||||
|
remote, ok := g.prHeadCache[pr.Head.CloneURL+":"]
|
||||||
if !ok {
|
if !ok {
|
||||||
// git remote add
|
// ... let's try ownername as a reasonable name
|
||||||
// TODO: how to handle private CloneURL?
|
remote = oldHeadOwnerName
|
||||||
|
if !git.IsValidRefPattern(remote) {
|
||||||
|
// ... let's try something less nice
|
||||||
|
remote = "head-pr-" + strconv.FormatInt(pr.Number, 10)
|
||||||
|
}
|
||||||
|
// ... now add the remote
|
||||||
err := g.gitRepo.AddRemote(remote, pr.Head.CloneURL, true)
|
err := g.gitRepo.AddRemote(remote, pr.Head.CloneURL, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("AddRemote failed: %s", err)
|
log.Error("PR #%d in %s/%s AddRemote[%s] failed: %v", pr.Number, g.repoOwner, g.repoName, remote, err)
|
||||||
} else {
|
} else {
|
||||||
g.prHeadCache[remote] = struct{}{}
|
g.prHeadCache[pr.Head.CloneURL+":"] = remote
|
||||||
ok = true
|
ok = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if !ok {
|
||||||
|
// Set head information if pr.Head.SHA is available
|
||||||
|
if pr.Head.SHA != "" {
|
||||||
|
_, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref", pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.gitPath()})
|
||||||
|
if err != nil {
|
||||||
|
log.Error("PR #%d in %s/%s unable to update-ref for pr HEAD: %v", pr.Number, g.repoOwner, g.repoName, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if ok {
|
return nil
|
||||||
_, _, err = git.NewCommand(g.ctx, "fetch", remote, pr.Head.Ref).RunStdString(&git.RunOpts{Dir: g.gitPath()})
|
}
|
||||||
|
|
||||||
|
// 4. Check if we already have this ref?
|
||||||
|
localRef, ok := g.prHeadCache[pr.Head.CloneURL+":"+pr.Head.Ref]
|
||||||
|
if !ok {
|
||||||
|
// ... We would normally name this migrated branch as <OwnerName>/<HeadRef> but we need to ensure that is safe
|
||||||
|
localRef = git.SanitizeRefPattern(oldHeadOwnerName + "/" + pr.Head.Ref)
|
||||||
|
|
||||||
|
// ... Now we must assert that this does not exist
|
||||||
|
if g.gitRepo.IsBranchExist(localRef) {
|
||||||
|
localRef = "head-pr-" + strconv.FormatInt(pr.Number, 10) + "/" + localRef
|
||||||
|
i := 0
|
||||||
|
for g.gitRepo.IsBranchExist(localRef) {
|
||||||
|
if i > 5 {
|
||||||
|
// ... We tried, we really tried but this is just a seriously unfriendly repo
|
||||||
|
return fmt.Errorf("unable to create unique local reference from %s", pr.Head.Ref)
|
||||||
|
}
|
||||||
|
// OK just try some uuids!
|
||||||
|
localRef = git.SanitizeRefPattern("head-pr-" + strconv.FormatInt(pr.Number, 10) + uuid.New().String())
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchArg := pr.Head.Ref + ":" + git.BranchPrefix + localRef
|
||||||
|
if strings.HasPrefix(fetchArg, "-") {
|
||||||
|
fetchArg = git.BranchPrefix + fetchArg
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, err = git.NewCommand(g.ctx, "fetch", "--no-tags", "--", remote, fetchArg).RunStdString(&git.RunOpts{Dir: g.gitPath()})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err)
|
log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err)
|
||||||
|
// We need to continue here so that the Head.Ref is reset and we attempt to set the gitref for the PR
|
||||||
|
// (This last step will likely fail but we should try to do as much as we can.)
|
||||||
} else {
|
} else {
|
||||||
// a new branch name with <original_owner_name/original_branchname> will be created to as new head branch
|
// Cache the localRef as the Head.Ref - if we've failed we can always try again.
|
||||||
ref := path.Join(pr.Head.OwnerName, pr.Head.Ref)
|
g.prHeadCache[pr.Head.CloneURL+":"+pr.Head.Ref] = localRef
|
||||||
headBranch := filepath.Join(g.gitPath(), "refs", "heads", ref)
|
|
||||||
if err := os.MkdirAll(filepath.Dir(headBranch), os.ModePerm); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
b, err := os.Create(headBranch)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
_, err = b.WriteString(pr.Head.SHA)
|
|
||||||
b.Close()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
pr.Head.Ref = ref
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// whatever it's a forked repo PR, we have to change head info as the same as the base info
|
|
||||||
pr.Head.OwnerName = pr.Base.OwnerName
|
|
||||||
pr.Head.RepoName = pr.Base.RepoName
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set the pr.Head.Ref to the localRef
|
||||||
|
pr.Head.Ref = localRef
|
||||||
|
|
||||||
|
// 5. Now if pr.Head.SHA == "" we should recover this to the head of this branch
|
||||||
|
if pr.Head.SHA == "" {
|
||||||
|
headSha, err := g.gitRepo.GetBranchCommitID(localRef)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("unable to get head SHA of local head for PR #%d from %s in %s/%s. Error: %v", pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
pr.Head.SHA = headSha
|
||||||
|
}
|
||||||
|
if pr.Head.SHA != "" {
|
||||||
|
_, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref", pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.gitPath()})
|
||||||
|
if err != nil {
|
||||||
|
log.Error("unable to set %s as the local head for PR #%d from %s in %s/%s. Error: %v", pr.Head.SHA, pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatePullRequests creates pull requests
|
||||||
|
func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error {
|
||||||
var err error
|
var err error
|
||||||
if g.pullrequestFile == nil {
|
if g.pullrequestFile == nil {
|
||||||
if err := os.MkdirAll(g.baseDir, os.ModePerm); err != nil {
|
if err := os.MkdirAll(g.baseDir, os.ModePerm); err != nil {
|
||||||
@ -525,16 +603,22 @@ func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bs, err := yaml.Marshal(prs)
|
encoder := yaml.NewEncoder(g.pullrequestFile)
|
||||||
if err != nil {
|
defer encoder.Close()
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := g.pullrequestFile.Write(bs); err != nil {
|
count := 0
|
||||||
return err
|
for i := 0; i < len(prs); i++ {
|
||||||
|
pr := prs[i]
|
||||||
|
if err := g.handlePullRequest(pr); err != nil {
|
||||||
|
log.Error("PR #%d in %s/%s failed - skipping", pr.Number, g.repoOwner, g.repoName, err)
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
|
prs[count] = pr
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
prs = prs[:count]
|
||||||
|
|
||||||
return nil
|
return encoder.Encode(prs)
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateReviews create pull request reviews
|
// CreateReviews create pull request reviews
|
||||||
|
@ -6,9 +6,11 @@ package migrations
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
base "code.gitea.io/gitea/modules/migration"
|
base "code.gitea.io/gitea/modules/migration"
|
||||||
"code.gitea.io/gitea/modules/structs"
|
"code.gitea.io/gitea/modules/structs"
|
||||||
)
|
)
|
||||||
@ -37,6 +39,7 @@ func (f *GitBucketDownloaderFactory) New(ctx context.Context, opts base.MigrateO
|
|||||||
oldOwner := fields[1]
|
oldOwner := fields[1]
|
||||||
oldName := strings.TrimSuffix(fields[2], ".git")
|
oldName := strings.TrimSuffix(fields[2], ".git")
|
||||||
|
|
||||||
|
log.Trace("Create GitBucket downloader. BaseURL: %s RepoOwner: %s RepoName: %s", baseURL, oldOwner, oldName)
|
||||||
return NewGitBucketDownloader(ctx, baseURL, opts.AuthUsername, opts.AuthPassword, opts.AuthToken, oldOwner, oldName), nil
|
return NewGitBucketDownloader(ctx, baseURL, opts.AuthUsername, opts.AuthPassword, opts.AuthToken, oldOwner, oldName), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,6 +54,20 @@ type GitBucketDownloader struct {
|
|||||||
*GithubDownloaderV3
|
*GithubDownloaderV3
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// String implements Stringer
|
||||||
|
func (g *GitBucketDownloader) String() string {
|
||||||
|
return fmt.Sprintf("migration from gitbucket server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ColorFormat provides a basic color format for a GitBucketDownloader
|
||||||
|
func (g *GitBucketDownloader) ColorFormat(s fmt.State) {
|
||||||
|
if g == nil {
|
||||||
|
log.ColorFprintf(s, "<nil: GitBucketDownloader>")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.ColorFprintf(s, "migration from gitbucket server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
// NewGitBucketDownloader creates a GitBucket downloader
|
// NewGitBucketDownloader creates a GitBucket downloader
|
||||||
func NewGitBucketDownloader(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GitBucketDownloader {
|
func NewGitBucketDownloader(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GitBucketDownloader {
|
||||||
githubDownloader := NewGithubDownloaderV3(ctx, baseURL, userName, password, token, repoOwner, repoName)
|
githubDownloader := NewGithubDownloaderV3(ctx, baseURL, userName, password, token, repoOwner, repoName)
|
||||||
|
@ -14,7 +14,6 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
admin_model "code.gitea.io/gitea/models/admin"
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
base "code.gitea.io/gitea/modules/migration"
|
base "code.gitea.io/gitea/modules/migration"
|
||||||
"code.gitea.io/gitea/modules/structs"
|
"code.gitea.io/gitea/modules/structs"
|
||||||
@ -71,6 +70,7 @@ type GiteaDownloader struct {
|
|||||||
base.NullDownloader
|
base.NullDownloader
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
client *gitea_sdk.Client
|
client *gitea_sdk.Client
|
||||||
|
baseURL string
|
||||||
repoOwner string
|
repoOwner string
|
||||||
repoName string
|
repoName string
|
||||||
pagination bool
|
pagination bool
|
||||||
@ -117,6 +117,7 @@ func NewGiteaDownloader(ctx context.Context, baseURL, repoPath, username, passwo
|
|||||||
return &GiteaDownloader{
|
return &GiteaDownloader{
|
||||||
ctx: ctx,
|
ctx: ctx,
|
||||||
client: giteaClient,
|
client: giteaClient,
|
||||||
|
baseURL: baseURL,
|
||||||
repoOwner: path[0],
|
repoOwner: path[0],
|
||||||
repoName: path[1],
|
repoName: path[1],
|
||||||
pagination: paginationSupport,
|
pagination: paginationSupport,
|
||||||
@ -129,6 +130,20 @@ func (g *GiteaDownloader) SetContext(ctx context.Context) {
|
|||||||
g.ctx = ctx
|
g.ctx = ctx
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// String implements Stringer
|
||||||
|
func (g *GiteaDownloader) String() string {
|
||||||
|
return fmt.Sprintf("migration from gitea server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ColorFormat provides a basic color format for a GiteaDownloader
|
||||||
|
func (g *GiteaDownloader) ColorFormat(s fmt.State) {
|
||||||
|
if g == nil {
|
||||||
|
log.ColorFprintf(s, "<nil: GiteaDownloader>")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.ColorFprintf(s, "migration from gitea server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
// GetRepoInfo returns a repository information
|
// GetRepoInfo returns a repository information
|
||||||
func (g *GiteaDownloader) GetRepoInfo() (*base.Repository, error) {
|
func (g *GiteaDownloader) GetRepoInfo() (*base.Repository, error) {
|
||||||
if g == nil {
|
if g == nil {
|
||||||
@ -284,6 +299,12 @@ func (g *GiteaDownloader) convertGiteaRelease(rel *gitea_sdk.Release) *base.Rele
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !hasBaseURL(asset.DownloadURL, g.baseURL) {
|
||||||
|
WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", asset.ID, g, asset.DownloadURL)
|
||||||
|
return io.NopCloser(strings.NewReader(asset.DownloadURL)), nil
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME: for a private download?
|
// FIXME: for a private download?
|
||||||
req, err := http.NewRequest("GET", asset.DownloadURL, nil)
|
req, err := http.NewRequest("GET", asset.DownloadURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -403,11 +424,7 @@ func (g *GiteaDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, err
|
|||||||
|
|
||||||
reactions, err := g.getIssueReactions(issue.Index)
|
reactions, err := g.getIssueReactions(issue.Index)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warn("Unable to load reactions during migrating issue #%d to %s/%s. Error: %v", issue.Index, g.repoOwner, g.repoName, err)
|
WarnAndNotice("Unable to load reactions during migrating issue #%d in %s. Error: %v", issue.Index, g, err)
|
||||||
if err2 := admin_model.CreateRepositoryNotice(
|
|
||||||
fmt.Sprintf("Unable to load reactions during migrating issue #%d to %s/%s. Error: %v", issue.Index, g.repoOwner, g.repoName, err)); err2 != nil {
|
|
||||||
log.Error("create repository notice failed: ", err2)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var assignees []string
|
var assignees []string
|
||||||
@ -465,11 +482,7 @@ func (g *GiteaDownloader) GetComments(commentable base.Commentable) ([]*base.Com
|
|||||||
for _, comment := range comments {
|
for _, comment := range comments {
|
||||||
reactions, err := g.getCommentReactions(comment.ID)
|
reactions, err := g.getCommentReactions(comment.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warn("Unable to load comment reactions during migrating issue #%d for comment %d to %s/%s. Error: %v", commentable.GetForeignIndex(), comment.ID, g.repoOwner, g.repoName, err)
|
WarnAndNotice("Unable to load comment reactions during migrating issue #%d for comment %d in %s. Error: %v", commentable.GetForeignIndex(), comment.ID, g, err)
|
||||||
if err2 := admin_model.CreateRepositoryNotice(
|
|
||||||
fmt.Sprintf("Unable to load reactions during migrating issue #%d for comment %d to %s/%s. Error: %v", commentable.GetForeignIndex(), comment.ID, g.repoOwner, g.repoName, err)); err2 != nil {
|
|
||||||
log.Error("create repository notice failed: ", err2)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
allComments = append(allComments, &base.Comment{
|
allComments = append(allComments, &base.Comment{
|
||||||
@ -544,11 +557,7 @@ func (g *GiteaDownloader) GetPullRequests(page, perPage int) ([]*base.PullReques
|
|||||||
|
|
||||||
reactions, err := g.getIssueReactions(pr.Index)
|
reactions, err := g.getIssueReactions(pr.Index)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warn("Unable to load reactions during migrating pull #%d to %s/%s. Error: %v", pr.Index, g.repoOwner, g.repoName, err)
|
WarnAndNotice("Unable to load reactions during migrating pull #%d in %s. Error: %v", pr.Index, g, err)
|
||||||
if err2 := admin_model.CreateRepositoryNotice(
|
|
||||||
fmt.Sprintf("Unable to load reactions during migrating pull #%d to %s/%s. Error: %v", pr.Index, g.repoOwner, g.repoName, err)); err2 != nil {
|
|
||||||
log.Error("create repository notice failed: ", err2)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var assignees []string
|
var assignees []string
|
||||||
@ -605,6 +614,8 @@ func (g *GiteaDownloader) GetPullRequests(page, perPage int) ([]*base.PullReques
|
|||||||
},
|
},
|
||||||
ForeignIndex: pr.Index,
|
ForeignIndex: pr.Index,
|
||||||
})
|
})
|
||||||
|
// SECURITY: Ensure that the PR is safe
|
||||||
|
_ = CheckAndEnsureSafePR(allPRs[len(allPRs)-1], g.baseURL, g)
|
||||||
}
|
}
|
||||||
|
|
||||||
isEnd := len(prs) < perPage
|
isEnd := len(prs) < perPage
|
||||||
|
@ -10,6 +10,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
@ -32,7 +33,7 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/uri"
|
"code.gitea.io/gitea/modules/uri"
|
||||||
"code.gitea.io/gitea/services/pull"
|
"code.gitea.io/gitea/services/pull"
|
||||||
|
|
||||||
gouuid "github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ base.Uploader = &GiteaLocalUploader{}
|
var _ base.Uploader = &GiteaLocalUploader{}
|
||||||
@ -48,7 +49,7 @@ type GiteaLocalUploader struct {
|
|||||||
milestones map[string]int64
|
milestones map[string]int64
|
||||||
issues map[int64]*issues_model.Issue
|
issues map[int64]*issues_model.Issue
|
||||||
gitRepo *git.Repository
|
gitRepo *git.Repository
|
||||||
prHeadCache map[string]struct{}
|
prHeadCache map[string]string
|
||||||
sameApp bool
|
sameApp bool
|
||||||
userMap map[int64]int64 // external user id mapping to user id
|
userMap map[int64]int64 // external user id mapping to user id
|
||||||
prCache map[int64]*issues_model.PullRequest
|
prCache map[int64]*issues_model.PullRequest
|
||||||
@ -65,7 +66,7 @@ func NewGiteaLocalUploader(ctx context.Context, doer *user_model.User, repoOwner
|
|||||||
labels: make(map[string]*issues_model.Label),
|
labels: make(map[string]*issues_model.Label),
|
||||||
milestones: make(map[string]int64),
|
milestones: make(map[string]int64),
|
||||||
issues: make(map[int64]*issues_model.Issue),
|
issues: make(map[int64]*issues_model.Issue),
|
||||||
prHeadCache: make(map[string]struct{}),
|
prHeadCache: make(map[string]string),
|
||||||
userMap: make(map[int64]int64),
|
userMap: make(map[int64]int64),
|
||||||
prCache: make(map[int64]*issues_model.PullRequest),
|
prCache: make(map[int64]*issues_model.PullRequest),
|
||||||
}
|
}
|
||||||
@ -125,7 +126,7 @@ func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.Migrate
|
|||||||
Mirror: repo.IsMirror,
|
Mirror: repo.IsMirror,
|
||||||
LFS: opts.LFS,
|
LFS: opts.LFS,
|
||||||
LFSEndpoint: opts.LFSEndpoint,
|
LFSEndpoint: opts.LFSEndpoint,
|
||||||
CloneAddr: repo.CloneURL,
|
CloneAddr: repo.CloneURL, // SECURITY: we will assume that this has already been checked
|
||||||
Private: repo.IsPrivate,
|
Private: repo.IsPrivate,
|
||||||
Wiki: opts.Wiki,
|
Wiki: opts.Wiki,
|
||||||
Releases: opts.Releases, // if didn't get releases, then sync them from tags
|
Releases: opts.Releases, // if didn't get releases, then sync them from tags
|
||||||
@ -150,13 +151,15 @@ func (g *GiteaLocalUploader) Close() {
|
|||||||
|
|
||||||
// CreateTopics creates topics
|
// CreateTopics creates topics
|
||||||
func (g *GiteaLocalUploader) CreateTopics(topics ...string) error {
|
func (g *GiteaLocalUploader) CreateTopics(topics ...string) error {
|
||||||
// ignore topics to long for the db
|
// Ignore topics too long for the db
|
||||||
c := 0
|
c := 0
|
||||||
for i := range topics {
|
for _, topic := range topics {
|
||||||
if len(topics[i]) <= 50 {
|
if len(topic) > 50 {
|
||||||
topics[c] = topics[i]
|
continue
|
||||||
c++
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
topics[c] = topic
|
||||||
|
c++
|
||||||
}
|
}
|
||||||
topics = topics[:c]
|
topics = topics[:c]
|
||||||
return repo_model.SaveTopics(g.repo.ID, topics...)
|
return repo_model.SaveTopics(g.repo.ID, topics...)
|
||||||
@ -217,11 +220,17 @@ func (g *GiteaLocalUploader) CreateMilestones(milestones ...*base.Milestone) err
|
|||||||
func (g *GiteaLocalUploader) CreateLabels(labels ...*base.Label) error {
|
func (g *GiteaLocalUploader) CreateLabels(labels ...*base.Label) error {
|
||||||
lbs := make([]*issues_model.Label, 0, len(labels))
|
lbs := make([]*issues_model.Label, 0, len(labels))
|
||||||
for _, label := range labels {
|
for _, label := range labels {
|
||||||
|
// We must validate color here:
|
||||||
|
if !issues_model.LabelColorPattern.MatchString("#" + label.Color) {
|
||||||
|
log.Warn("Invalid label color: #%s for label: %s in migration to %s/%s", label.Color, label.Name, g.repoOwner, g.repoName)
|
||||||
|
label.Color = "ffffff"
|
||||||
|
}
|
||||||
|
|
||||||
lbs = append(lbs, &issues_model.Label{
|
lbs = append(lbs, &issues_model.Label{
|
||||||
RepoID: g.repo.ID,
|
RepoID: g.repo.ID,
|
||||||
Name: label.Name,
|
Name: label.Name,
|
||||||
Description: label.Description,
|
Description: label.Description,
|
||||||
Color: fmt.Sprintf("#%s", label.Color),
|
Color: "#" + label.Color,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -247,6 +256,16 @@ func (g *GiteaLocalUploader) CreateReleases(releases ...*base.Release) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SECURITY: The TagName must be a valid git ref
|
||||||
|
if release.TagName != "" && !git.IsValidRefPattern(release.TagName) {
|
||||||
|
release.TagName = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// SECURITY: The TargetCommitish must be a valid git ref
|
||||||
|
if release.TargetCommitish != "" && !git.IsValidRefPattern(release.TargetCommitish) {
|
||||||
|
release.TargetCommitish = ""
|
||||||
|
}
|
||||||
|
|
||||||
rel := repo_model.Release{
|
rel := repo_model.Release{
|
||||||
RepoID: g.repo.ID,
|
RepoID: g.repo.ID,
|
||||||
TagName: release.TagName,
|
TagName: release.TagName,
|
||||||
@ -288,14 +307,15 @@ func (g *GiteaLocalUploader) CreateReleases(releases ...*base.Release) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
attach := repo_model.Attachment{
|
attach := repo_model.Attachment{
|
||||||
UUID: gouuid.New().String(),
|
UUID: uuid.New().String(),
|
||||||
Name: asset.Name,
|
Name: asset.Name,
|
||||||
DownloadCount: int64(*asset.DownloadCount),
|
DownloadCount: int64(*asset.DownloadCount),
|
||||||
Size: int64(*asset.Size),
|
Size: int64(*asset.Size),
|
||||||
CreatedUnix: timeutil.TimeStamp(asset.Created.Unix()),
|
CreatedUnix: timeutil.TimeStamp(asset.Created.Unix()),
|
||||||
}
|
}
|
||||||
|
|
||||||
// download attachment
|
// SECURITY: We cannot check the DownloadURL and DownloadFunc are safe here
|
||||||
|
// ... we must assume that they are safe and simply download the attachment
|
||||||
err := func() error {
|
err := func() error {
|
||||||
// asset.DownloadURL maybe a local file
|
// asset.DownloadURL maybe a local file
|
||||||
var rc io.ReadCloser
|
var rc io.ReadCloser
|
||||||
@ -365,6 +385,12 @@ func (g *GiteaLocalUploader) CreateIssues(issues ...*base.Issue) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SECURITY: issue.Ref needs to be a valid reference
|
||||||
|
if !git.IsValidRefPattern(issue.Ref) {
|
||||||
|
log.Warn("Invalid issue.Ref[%s] in issue #%d in %s/%s", issue.Ref, issue.Number, g.repoOwner, g.repoName)
|
||||||
|
issue.Ref = ""
|
||||||
|
}
|
||||||
|
|
||||||
is := issues_model.Issue{
|
is := issues_model.Issue{
|
||||||
RepoID: g.repo.ID,
|
RepoID: g.repo.ID,
|
||||||
Repo: g.repo,
|
Repo: g.repo,
|
||||||
@ -496,102 +522,151 @@ func (g *GiteaLocalUploader) CreatePullRequests(prs ...*base.PullRequest) error
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (g *GiteaLocalUploader) updateGitForPullRequest(pr *base.PullRequest) (head string, err error) {
|
func (g *GiteaLocalUploader) updateGitForPullRequest(pr *base.PullRequest) (head string, err error) {
|
||||||
// download patch file
|
// SECURITY: this pr must have been must have been ensured safe
|
||||||
|
if !pr.EnsuredSafe {
|
||||||
|
log.Error("PR #%d in %s/%s has not been checked for safety.", pr.Number, g.repoOwner, g.repoName)
|
||||||
|
return "", fmt.Errorf("the PR[%d] was not checked for safety", pr.Number)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Anonymous function to download the patch file (allows us to use defer)
|
||||||
err = func() error {
|
err = func() error {
|
||||||
|
// if the patchURL is empty there is nothing to download
|
||||||
if pr.PatchURL == "" {
|
if pr.PatchURL == "" {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
// pr.PatchURL maybe a local file
|
|
||||||
ret, err := uri.Open(pr.PatchURL)
|
// SECURITY: We will assume that the pr.PatchURL has been checked
|
||||||
|
// pr.PatchURL maybe a local file - but note EnsureSafe should be asserting that this safe
|
||||||
|
ret, err := uri.Open(pr.PatchURL) // TODO: This probably needs to use the downloader as there may be rate limiting issues here
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer ret.Close()
|
defer ret.Close()
|
||||||
|
|
||||||
pullDir := filepath.Join(g.repo.RepoPath(), "pulls")
|
pullDir := filepath.Join(g.repo.RepoPath(), "pulls")
|
||||||
if err = os.MkdirAll(pullDir, os.ModePerm); err != nil {
|
if err = os.MkdirAll(pullDir, os.ModePerm); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
f, err := os.Create(filepath.Join(pullDir, fmt.Sprintf("%d.patch", pr.Number)))
|
f, err := os.Create(filepath.Join(pullDir, fmt.Sprintf("%d.patch", pr.Number)))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
|
||||||
|
// TODO: Should there be limits on the size of this file?
|
||||||
_, err = io.Copy(f, ret)
|
_, err = io.Copy(f, ret)
|
||||||
|
|
||||||
return err
|
return err
|
||||||
}()
|
}()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
// set head information
|
|
||||||
pullHead := filepath.Join(g.repo.RepoPath(), "refs", "pull", fmt.Sprintf("%d", pr.Number))
|
|
||||||
if err := os.MkdirAll(pullHead, os.ModePerm); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
p, err := os.Create(filepath.Join(pullHead, "head"))
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
_, err = p.WriteString(pr.Head.SHA)
|
|
||||||
p.Close()
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
head = "unknown repository"
|
head = "unknown repository"
|
||||||
if pr.IsForkPullRequest() && pr.State != "closed" {
|
if pr.IsForkPullRequest() && pr.State != "closed" {
|
||||||
if pr.Head.OwnerName != "" {
|
// OK we want to fetch the current head as a branch from its CloneURL
|
||||||
remote := pr.Head.OwnerName
|
|
||||||
_, ok := g.prHeadCache[remote]
|
// 1. Is there a head clone URL available?
|
||||||
|
// 2. Is there a head ref available?
|
||||||
|
if pr.Head.CloneURL == "" || pr.Head.Ref == "" {
|
||||||
|
return head, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. We need to create a remote for this clone url
|
||||||
|
// ... maybe we already have a name for this remote
|
||||||
|
remote, ok := g.prHeadCache[pr.Head.CloneURL+":"]
|
||||||
if !ok {
|
if !ok {
|
||||||
// git remote add
|
// ... let's try ownername as a reasonable name
|
||||||
|
remote = pr.Head.OwnerName
|
||||||
|
if !git.IsValidRefPattern(remote) {
|
||||||
|
// ... let's try something less nice
|
||||||
|
remote = "head-pr-" + strconv.FormatInt(pr.Number, 10)
|
||||||
|
}
|
||||||
|
// ... now add the remote
|
||||||
err := g.gitRepo.AddRemote(remote, pr.Head.CloneURL, true)
|
err := g.gitRepo.AddRemote(remote, pr.Head.CloneURL, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("AddRemote failed: %s", err)
|
log.Error("PR #%d in %s/%s AddRemote[%s] failed: %v", pr.Number, g.repoOwner, g.repoName, remote, err)
|
||||||
} else {
|
} else {
|
||||||
g.prHeadCache[remote] = struct{}{}
|
g.prHeadCache[pr.Head.CloneURL+":"] = remote
|
||||||
ok = true
|
ok = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if !ok {
|
||||||
|
return head, nil
|
||||||
|
}
|
||||||
|
|
||||||
if ok {
|
// 4. Check if we already have this ref?
|
||||||
_, _, err = git.NewCommand(g.ctx, "fetch", "--no-tags", "--", remote, pr.Head.Ref).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
|
localRef, ok := g.prHeadCache[pr.Head.CloneURL+":"+pr.Head.Ref]
|
||||||
|
if !ok {
|
||||||
|
// ... We would normally name this migrated branch as <OwnerName>/<HeadRef> but we need to ensure that is safe
|
||||||
|
localRef = git.SanitizeRefPattern(pr.Head.OwnerName + "/" + pr.Head.Ref)
|
||||||
|
|
||||||
|
// ... Now we must assert that this does not exist
|
||||||
|
if g.gitRepo.IsBranchExist(localRef) {
|
||||||
|
localRef = "head-pr-" + strconv.FormatInt(pr.Number, 10) + "/" + localRef
|
||||||
|
i := 0
|
||||||
|
for g.gitRepo.IsBranchExist(localRef) {
|
||||||
|
if i > 5 {
|
||||||
|
// ... We tried, we really tried but this is just a seriously unfriendly repo
|
||||||
|
return head, nil
|
||||||
|
}
|
||||||
|
// OK just try some uuids!
|
||||||
|
localRef = git.SanitizeRefPattern("head-pr-" + strconv.FormatInt(pr.Number, 10) + uuid.New().String())
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchArg := pr.Head.Ref + ":" + git.BranchPrefix + localRef
|
||||||
|
if strings.HasPrefix(fetchArg, "-") {
|
||||||
|
fetchArg = git.BranchPrefix + fetchArg
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, err = git.NewCommand(g.ctx, "fetch", "--no-tags", "--", remote, fetchArg).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err)
|
log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err)
|
||||||
} else {
|
return head, nil
|
||||||
headBranch := filepath.Join(g.repo.RepoPath(), "refs", "heads", pr.Head.OwnerName, pr.Head.Ref)
|
|
||||||
if err := os.MkdirAll(filepath.Dir(headBranch), os.ModePerm); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
}
|
||||||
b, err := os.Create(headBranch)
|
g.prHeadCache[pr.Head.CloneURL+":"+pr.Head.Ref] = localRef
|
||||||
|
head = localRef
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Now if pr.Head.SHA == "" we should recover this to the head of this branch
|
||||||
|
if pr.Head.SHA == "" {
|
||||||
|
headSha, err := g.gitRepo.GetBranchCommitID(localRef)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("unable to get head SHA of local head for PR #%d from %s in %s/%s. Error: %v", pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err)
|
||||||
|
return head, nil
|
||||||
|
}
|
||||||
|
pr.Head.SHA = headSha
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref", pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
_, err = b.WriteString(pr.Head.SHA)
|
|
||||||
b.Close()
|
return head, nil
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
}
|
||||||
head = pr.Head.OwnerName + "/" + pr.Head.Ref
|
|
||||||
}
|
if pr.Head.Ref != "" {
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
head = pr.Head.Ref
|
head = pr.Head.Ref
|
||||||
|
}
|
||||||
|
|
||||||
// Ensure the closed PR SHA still points to an existing ref
|
// Ensure the closed PR SHA still points to an existing ref
|
||||||
|
if pr.Head.SHA == "" {
|
||||||
|
// The SHA is empty
|
||||||
|
log.Warn("Empty reference, no pull head for PR #%d in %s/%s", pr.Number, g.repoOwner, g.repoName)
|
||||||
|
} else {
|
||||||
_, _, err = git.NewCommand(g.ctx, "rev-list", "--quiet", "-1", pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
|
_, _, err = git.NewCommand(g.ctx, "rev-list", "--quiet", "-1", pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if pr.Head.SHA != "" {
|
|
||||||
// Git update-ref remove bad references with a relative path
|
// Git update-ref remove bad references with a relative path
|
||||||
log.Warn("Deprecated local head, removing : %v", pr.Head.SHA)
|
log.Warn("Deprecated local head %s for PR #%d in %s/%s, removing %s", pr.Head.SHA, pr.Number, g.repoOwner, g.repoName, pr.GetGitRefName())
|
||||||
err = g.gitRepo.RemoveReference(pr.GetGitRefName())
|
|
||||||
} else {
|
} else {
|
||||||
// The SHA is empty, remove the head file
|
// set head information
|
||||||
log.Warn("Empty reference, removing : %v", pullHead)
|
_, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref", pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
|
||||||
err = os.Remove(filepath.Join(pullHead, "head"))
|
|
||||||
}
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Cannot remove local head ref, %v", err)
|
log.Error("unable to set %s as the local head for PR #%d from %s in %s/%s. Error: %v", pr.Head.SHA, pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -615,6 +690,20 @@ func (g *GiteaLocalUploader) newPullRequest(pr *base.PullRequest) (*issues_model
|
|||||||
return nil, fmt.Errorf("updateGitForPullRequest: %w", err)
|
return nil, fmt.Errorf("updateGitForPullRequest: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Now we may need to fix the mergebase
|
||||||
|
if pr.Base.SHA == "" {
|
||||||
|
if pr.Base.Ref != "" && pr.Head.SHA != "" {
|
||||||
|
// A PR against a tag base does not make sense - therefore pr.Base.Ref must be a branch
|
||||||
|
// TODO: should we be checking for the refs/heads/ prefix on the pr.Base.Ref? (i.e. are these actually branches or refs)
|
||||||
|
pr.Base.SHA, _, err = g.gitRepo.GetMergeBase("", git.BranchPrefix+pr.Base.Ref, pr.Head.SHA)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Cannot determine the merge base for PR #%d in %s/%s. Error: %v", pr.Number, g.repoOwner, g.repoName, err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.Error("Cannot determine the merge base for PR #%d in %s/%s. Not enough information", pr.Number, g.repoOwner, g.repoName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if pr.Created.IsZero() {
|
if pr.Created.IsZero() {
|
||||||
if pr.Closed != nil {
|
if pr.Closed != nil {
|
||||||
pr.Created = *pr.Closed
|
pr.Created = *pr.Closed
|
||||||
@ -728,6 +817,8 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cms = append(cms, &cm)
|
||||||
|
|
||||||
// get pr
|
// get pr
|
||||||
pr, ok := g.prCache[issue.ID]
|
pr, ok := g.prCache[issue.ID]
|
||||||
if !ok {
|
if !ok {
|
||||||
@ -738,6 +829,17 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
|
|||||||
}
|
}
|
||||||
g.prCache[issue.ID] = pr
|
g.prCache[issue.ID] = pr
|
||||||
}
|
}
|
||||||
|
if pr.MergeBase == "" {
|
||||||
|
// No mergebase -> no basis for any patches
|
||||||
|
log.Warn("PR #%d in %s/%s: does not have a merge base, all review comments will be ignored", pr.Index, g.repoOwner, g.repoName)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
headCommitID, err := g.gitRepo.GetRefCommitID(pr.GetGitRefName())
|
||||||
|
if err != nil {
|
||||||
|
log.Warn("PR #%d GetRefCommitID[%s] in %s/%s: %v, all review comments will be ignored", pr.Index, pr.GetGitRefName(), g.repoOwner, g.repoName, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
for _, comment := range review.Comments {
|
for _, comment := range review.Comments {
|
||||||
line := comment.Line
|
line := comment.Line
|
||||||
@ -746,11 +848,9 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
|
|||||||
} else {
|
} else {
|
||||||
_, _, line, _ = git.ParseDiffHunkString(comment.DiffHunk)
|
_, _, line, _ = git.ParseDiffHunkString(comment.DiffHunk)
|
||||||
}
|
}
|
||||||
headCommitID, err := g.gitRepo.GetRefCommitID(pr.GetGitRefName())
|
|
||||||
if err != nil {
|
// SECURITY: The TreePath must be cleaned!
|
||||||
log.Warn("GetRefCommitID[%s]: %v, the review comment will be ignored", pr.GetGitRefName(), err)
|
comment.TreePath = path.Clean("/" + comment.TreePath)[1:]
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
var patch string
|
var patch string
|
||||||
reader, writer := io.Pipe()
|
reader, writer := io.Pipe()
|
||||||
@ -775,6 +875,11 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
|
|||||||
comment.UpdatedAt = comment.CreatedAt
|
comment.UpdatedAt = comment.CreatedAt
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !git.IsValidSHAPattern(comment.CommitID) {
|
||||||
|
log.Warn("Invalid comment CommitID[%s] on comment[%d] in PR #%d of %s/%s replaced with %s", comment.CommitID, pr.Index, g.repoOwner, g.repoName, headCommitID)
|
||||||
|
comment.CommitID = headCommitID
|
||||||
|
}
|
||||||
|
|
||||||
c := issues_model.Comment{
|
c := issues_model.Comment{
|
||||||
Type: issues_model.CommentTypeCode,
|
Type: issues_model.CommentTypeCode,
|
||||||
IssueID: issue.ID,
|
IssueID: issue.ID,
|
||||||
@ -793,8 +898,6 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
|
|||||||
|
|
||||||
cm.Comments = append(cm.Comments, &c)
|
cm.Comments = append(cm.Comments, &c)
|
||||||
}
|
}
|
||||||
|
|
||||||
cms = append(cms, &cm)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return issues_model.InsertReviews(cms)
|
return issues_model.InsertReviews(cms)
|
||||||
|
@ -390,7 +390,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
assertContent: func(t *testing.T, content string) {
|
assertContent: func(t *testing.T, content string) {
|
||||||
assert.Contains(t, content, "AddRemote failed")
|
assert.Contains(t, content, "AddRemote")
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -439,7 +439,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
assertContent: func(t *testing.T, content string) {
|
assertContent: func(t *testing.T, content string) {
|
||||||
assert.Contains(t, content, "Empty reference, removing")
|
assert.Contains(t, content, "Empty reference")
|
||||||
assert.NotContains(t, content, "Cannot remove local head")
|
assert.NotContains(t, content, "Cannot remove local head")
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -467,7 +467,6 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
|
|||||||
},
|
},
|
||||||
assertContent: func(t *testing.T, content string) {
|
assertContent: func(t *testing.T, content string) {
|
||||||
assert.Contains(t, content, "Deprecated local head")
|
assert.Contains(t, content, "Deprecated local head")
|
||||||
assert.Contains(t, content, "Cannot remove local head")
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -504,6 +503,8 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
|
|||||||
logger.SetLogger("buffer", "buffer", "{}")
|
logger.SetLogger("buffer", "buffer", "{}")
|
||||||
defer logger.DelLogger("buffer")
|
defer logger.DelLogger("buffer")
|
||||||
|
|
||||||
|
testCase.pr.EnsuredSafe = true
|
||||||
|
|
||||||
head, err := uploader.updateGitForPullRequest(&testCase.pr)
|
head, err := uploader.updateGitForPullRequest(&testCase.pr)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, testCase.head, head)
|
assert.EqualValues(t, testCase.head, head)
|
||||||
|
@ -51,7 +51,7 @@ func (f *GithubDownloaderV3Factory) New(ctx context.Context, opts base.MigrateOp
|
|||||||
oldOwner := fields[1]
|
oldOwner := fields[1]
|
||||||
oldName := strings.TrimSuffix(fields[2], ".git")
|
oldName := strings.TrimSuffix(fields[2], ".git")
|
||||||
|
|
||||||
log.Trace("Create github downloader: %s/%s", oldOwner, oldName)
|
log.Trace("Create github downloader BaseURL: %s %s/%s", baseURL, oldOwner, oldName)
|
||||||
|
|
||||||
return NewGithubDownloaderV3(ctx, baseURL, opts.AuthUsername, opts.AuthPassword, opts.AuthToken, oldOwner, oldName), nil
|
return NewGithubDownloaderV3(ctx, baseURL, opts.AuthUsername, opts.AuthPassword, opts.AuthToken, oldOwner, oldName), nil
|
||||||
}
|
}
|
||||||
@ -67,6 +67,7 @@ type GithubDownloaderV3 struct {
|
|||||||
base.NullDownloader
|
base.NullDownloader
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
clients []*github.Client
|
clients []*github.Client
|
||||||
|
baseURL string
|
||||||
repoOwner string
|
repoOwner string
|
||||||
repoName string
|
repoName string
|
||||||
userName string
|
userName string
|
||||||
@ -81,6 +82,7 @@ type GithubDownloaderV3 struct {
|
|||||||
func NewGithubDownloaderV3(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GithubDownloaderV3 {
|
func NewGithubDownloaderV3(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GithubDownloaderV3 {
|
||||||
downloader := GithubDownloaderV3{
|
downloader := GithubDownloaderV3{
|
||||||
userName: userName,
|
userName: userName,
|
||||||
|
baseURL: baseURL,
|
||||||
password: password,
|
password: password,
|
||||||
ctx: ctx,
|
ctx: ctx,
|
||||||
repoOwner: repoOwner,
|
repoOwner: repoOwner,
|
||||||
@ -118,6 +120,20 @@ func NewGithubDownloaderV3(ctx context.Context, baseURL, userName, password, tok
|
|||||||
return &downloader
|
return &downloader
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// String implements Stringer
|
||||||
|
func (g *GithubDownloaderV3) String() string {
|
||||||
|
return fmt.Sprintf("migration from github server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ColorFormat provides a basic color format for a GithubDownloader
|
||||||
|
func (g *GithubDownloaderV3) ColorFormat(s fmt.State) {
|
||||||
|
if g == nil {
|
||||||
|
log.ColorFprintf(s, "<nil: GithubDownloaderV3>")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.ColorFprintf(s, "migration from github server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
func (g *GithubDownloaderV3) addClient(client *http.Client, baseURL string) {
|
func (g *GithubDownloaderV3) addClient(client *http.Client, baseURL string) {
|
||||||
githubClient := github.NewClient(client)
|
githubClient := github.NewClient(client)
|
||||||
if baseURL != "https://github.com" {
|
if baseURL != "https://github.com" {
|
||||||
@ -322,15 +338,30 @@ func (g *GithubDownloaderV3) convertGithubRelease(rel *github.RepositoryRelease)
|
|||||||
Updated: asset.UpdatedAt.Time,
|
Updated: asset.UpdatedAt.Time,
|
||||||
DownloadFunc: func() (io.ReadCloser, error) {
|
DownloadFunc: func() (io.ReadCloser, error) {
|
||||||
g.waitAndPickClient()
|
g.waitAndPickClient()
|
||||||
asset, redirectURL, err := g.getClient().Repositories.DownloadReleaseAsset(g.ctx, g.repoOwner, g.repoName, assetID, nil)
|
readCloser, redirectURL, err := g.getClient().Repositories.DownloadReleaseAsset(g.ctx, g.repoOwner, g.repoName, assetID, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if err := g.RefreshRate(); err != nil {
|
if err := g.RefreshRate(); err != nil {
|
||||||
log.Error("g.getClient().RateLimits: %s", err)
|
log.Error("g.getClient().RateLimits: %s", err)
|
||||||
}
|
}
|
||||||
if asset == nil {
|
|
||||||
if redirectURL != "" {
|
if readCloser != nil {
|
||||||
|
return readCloser, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if redirectURL == "" {
|
||||||
|
return nil, fmt.Errorf("no release asset found for %d", assetID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prevent open redirect
|
||||||
|
if !hasBaseURL(redirectURL, g.baseURL) &&
|
||||||
|
!hasBaseURL(redirectURL, "https://objects.githubusercontent.com/") {
|
||||||
|
WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", asset.GetID(), g, redirectURL)
|
||||||
|
|
||||||
|
return io.NopCloser(strings.NewReader(redirectURL)), nil
|
||||||
|
}
|
||||||
|
|
||||||
g.waitAndPickClient()
|
g.waitAndPickClient()
|
||||||
req, err := http.NewRequestWithContext(g.ctx, "GET", redirectURL, nil)
|
req, err := http.NewRequestWithContext(g.ctx, "GET", redirectURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -339,16 +370,12 @@ func (g *GithubDownloaderV3) convertGithubRelease(rel *github.RepositoryRelease)
|
|||||||
resp, err := httpClient.Do(req)
|
resp, err := httpClient.Do(req)
|
||||||
err1 := g.RefreshRate()
|
err1 := g.RefreshRate()
|
||||||
if err1 != nil {
|
if err1 != nil {
|
||||||
log.Error("g.getClient().RateLimits: %s", err1)
|
log.Error("g.RefreshRate(): %s", err1)
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return resp.Body, nil
|
return resp.Body, nil
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("No release asset found for %d", assetID)
|
|
||||||
}
|
|
||||||
return asset, nil
|
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -697,7 +724,7 @@ func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullReq
|
|||||||
SHA: pr.GetHead().GetSHA(),
|
SHA: pr.GetHead().GetSHA(),
|
||||||
OwnerName: pr.GetHead().GetUser().GetLogin(),
|
OwnerName: pr.GetHead().GetUser().GetLogin(),
|
||||||
RepoName: pr.GetHead().GetRepo().GetName(),
|
RepoName: pr.GetHead().GetRepo().GetName(),
|
||||||
CloneURL: pr.GetHead().GetRepo().GetCloneURL(),
|
CloneURL: pr.GetHead().GetRepo().GetCloneURL(), // see below for SECURITY related issues here
|
||||||
},
|
},
|
||||||
Base: base.PullRequestBranch{
|
Base: base.PullRequestBranch{
|
||||||
Ref: pr.GetBase().GetRef(),
|
Ref: pr.GetBase().GetRef(),
|
||||||
@ -705,10 +732,13 @@ func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullReq
|
|||||||
RepoName: pr.GetBase().GetRepo().GetName(),
|
RepoName: pr.GetBase().GetRepo().GetName(),
|
||||||
OwnerName: pr.GetBase().GetUser().GetLogin(),
|
OwnerName: pr.GetBase().GetUser().GetLogin(),
|
||||||
},
|
},
|
||||||
PatchURL: pr.GetPatchURL(),
|
PatchURL: pr.GetPatchURL(), // see below for SECURITY related issues here
|
||||||
Reactions: reactions,
|
Reactions: reactions,
|
||||||
ForeignIndex: int64(*pr.Number),
|
ForeignIndex: int64(*pr.Number),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// SECURITY: Ensure that the PR is safe
|
||||||
|
_ = CheckAndEnsureSafePR(allPRs[len(allPRs)-1], g.baseURL, g)
|
||||||
}
|
}
|
||||||
|
|
||||||
return allPRs, len(prs) < perPage, nil
|
return allPRs, len(prs) < perPage, nil
|
||||||
|
@ -63,6 +63,7 @@ type GitlabDownloader struct {
|
|||||||
base.NullDownloader
|
base.NullDownloader
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
client *gitlab.Client
|
client *gitlab.Client
|
||||||
|
baseURL string
|
||||||
repoID int
|
repoID int
|
||||||
repoName string
|
repoName string
|
||||||
issueCount int64
|
issueCount int64
|
||||||
@ -125,12 +126,27 @@ func NewGitlabDownloader(ctx context.Context, baseURL, repoPath, username, passw
|
|||||||
return &GitlabDownloader{
|
return &GitlabDownloader{
|
||||||
ctx: ctx,
|
ctx: ctx,
|
||||||
client: gitlabClient,
|
client: gitlabClient,
|
||||||
|
baseURL: baseURL,
|
||||||
repoID: gr.ID,
|
repoID: gr.ID,
|
||||||
repoName: gr.Name,
|
repoName: gr.Name,
|
||||||
maxPerPage: 100,
|
maxPerPage: 100,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// String implements Stringer
|
||||||
|
func (g *GitlabDownloader) String() string {
|
||||||
|
return fmt.Sprintf("migration from gitlab server %s [%d]/%s", g.baseURL, g.repoID, g.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ColorFormat provides a basic color format for a GitlabDownloader
|
||||||
|
func (g *GitlabDownloader) ColorFormat(s fmt.State) {
|
||||||
|
if g == nil {
|
||||||
|
log.ColorFprintf(s, "<nil: GitlabDownloader>")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.ColorFprintf(s, "migration from gitlab server %s [%d]/%s", g.baseURL, g.repoID, g.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
// SetContext set context
|
// SetContext set context
|
||||||
func (g *GitlabDownloader) SetContext(ctx context.Context) {
|
func (g *GitlabDownloader) SetContext(ctx context.Context) {
|
||||||
g.ctx = ctx
|
g.ctx = ctx
|
||||||
@ -308,6 +324,11 @@ func (g *GitlabDownloader) convertGitlabRelease(rel *gitlab.Release) *base.Relea
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !hasBaseURL(link.URL, g.baseURL) {
|
||||||
|
WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", asset.ID, g, link.URL)
|
||||||
|
return io.NopCloser(strings.NewReader(link.URL)), nil
|
||||||
|
}
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", link.URL, nil)
|
req, err := http.NewRequest("GET", link.URL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -612,6 +633,9 @@ func (g *GitlabDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque
|
|||||||
ForeignIndex: int64(pr.IID),
|
ForeignIndex: int64(pr.IID),
|
||||||
Context: gitlabIssueContext{IsMergeRequest: true},
|
Context: gitlabIssueContext{IsMergeRequest: true},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// SECURITY: Ensure that the PR is safe
|
||||||
|
_ = CheckAndEnsureSafePR(allPRs[len(allPRs)-1], g.baseURL, g)
|
||||||
}
|
}
|
||||||
|
|
||||||
return allPRs, len(prs) < perPage, nil
|
return allPRs, len(prs) < perPage, nil
|
||||||
|
@ -73,6 +73,20 @@ type GogsDownloader struct {
|
|||||||
transport http.RoundTripper
|
transport http.RoundTripper
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// String implements Stringer
|
||||||
|
func (g *GogsDownloader) String() string {
|
||||||
|
return fmt.Sprintf("migration from gogs server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ColorFormat provides a basic color format for a GogsDownloader
|
||||||
|
func (g *GogsDownloader) ColorFormat(s fmt.State) {
|
||||||
|
if g == nil {
|
||||||
|
log.ColorFprintf(s, "<nil: GogsDownloader>")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.ColorFprintf(s, "migration from gogs server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
// SetContext set context
|
// SetContext set context
|
||||||
func (g *GogsDownloader) SetContext(ctx context.Context) {
|
func (g *GogsDownloader) SetContext(ctx context.Context) {
|
||||||
g.ctx = ctx
|
g.ctx = ctx
|
||||||
|
@ -198,19 +198,25 @@ func migrateRepository(doer *user_model.User, downloader base.Downloader, upload
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the downloader is not a RepositoryRestorer then we need to recheck the CloneURL
|
// SECURITY: If the downloader is not a RepositoryRestorer then we need to recheck the CloneURL
|
||||||
if _, ok := downloader.(*RepositoryRestorer); !ok {
|
if _, ok := downloader.(*RepositoryRestorer); !ok {
|
||||||
// Now the clone URL can be rewritten by the downloader so we must recheck
|
// Now the clone URL can be rewritten by the downloader so we must recheck
|
||||||
if err := IsMigrateURLAllowed(repo.CloneURL, doer); err != nil {
|
if err := IsMigrateURLAllowed(repo.CloneURL, doer); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// And so can the original URL too so again we must recheck
|
// SECURITY: Ensure that we haven't been redirected from an external to a local filesystem
|
||||||
if repo.OriginalURL != "" {
|
// Now we know all of these must parse
|
||||||
if err := IsMigrateURLAllowed(repo.OriginalURL, doer); err != nil {
|
cloneAddrURL, _ := url.Parse(opts.CloneAddr)
|
||||||
return err
|
cloneURL, _ := url.Parse(repo.CloneURL)
|
||||||
|
|
||||||
|
if cloneURL.Scheme == "file" || cloneURL.Scheme == "" {
|
||||||
|
if cloneAddrURL.Scheme != "file" && cloneAddrURL.Scheme != "" {
|
||||||
|
return fmt.Errorf("repo info has changed from external to local filesystem")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We don't actually need to check the OriginalURL as it isn't used anywhere
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Trace("migrating git data from %s", repo.CloneURL)
|
log.Trace("migrating git data from %s", repo.CloneURL)
|
||||||
|
@ -110,6 +110,20 @@ func NewOneDevDownloader(ctx context.Context, baseURL *url.URL, username, passwo
|
|||||||
return downloader
|
return downloader
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// String implements Stringer
|
||||||
|
func (d *OneDevDownloader) String() string {
|
||||||
|
return fmt.Sprintf("migration from oneDev server %s [%d]/%s", d.baseURL, d.repoID, d.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ColorFormat provides a basic color format for a OneDevDownloader
|
||||||
|
func (d *OneDevDownloader) ColorFormat(s fmt.State) {
|
||||||
|
if d == nil {
|
||||||
|
log.ColorFprintf(s, "<nil: OneDevDownloader>")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.ColorFprintf(s, "migration from oneDev server %s [%d]/%s", d.baseURL, d.repoID, d.repoName)
|
||||||
|
}
|
||||||
|
|
||||||
func (d *OneDevDownloader) callAPI(endpoint string, parameter map[string]string, result interface{}) error {
|
func (d *OneDevDownloader) callAPI(endpoint string, parameter map[string]string, result interface{}) error {
|
||||||
u, err := d.baseURL.Parse(endpoint)
|
u, err := d.baseURL.Parse(endpoint)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -542,6 +556,9 @@ func (d *OneDevDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque
|
|||||||
ForeignIndex: pr.ID,
|
ForeignIndex: pr.ID,
|
||||||
Context: onedevIssueContext{IsPullRequest: true},
|
Context: onedevIssueContext{IsPullRequest: true},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// SECURITY: Ensure that the PR is safe
|
||||||
|
_ = CheckAndEnsureSafePR(pullRequests[len(pullRequests)-1], d.baseURL.String(), d)
|
||||||
}
|
}
|
||||||
|
|
||||||
return pullRequests, len(pullRequests) == 0, nil
|
return pullRequests, len(pullRequests) == 0, nil
|
||||||
|
@ -243,6 +243,7 @@ func (r *RepositoryRestorer) GetPullRequests(page, perPage int) ([]*base.PullReq
|
|||||||
}
|
}
|
||||||
for _, pr := range pulls {
|
for _, pr := range pulls {
|
||||||
pr.PatchURL = "file://" + filepath.Join(r.baseDir, pr.PatchURL)
|
pr.PatchURL = "file://" + filepath.Join(r.baseDir, pr.PatchURL)
|
||||||
|
CheckAndEnsureSafePR(pr, "", r)
|
||||||
}
|
}
|
||||||
return pulls, true, nil
|
return pulls, true, nil
|
||||||
}
|
}
|
||||||
|
@ -37,6 +37,9 @@ func createTag(gitRepo *git.Repository, rel *repo_model.Release, msg string) (bo
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return false, fmt.Errorf("GetProtectedTags: %v", err)
|
return false, fmt.Errorf("GetProtectedTags: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Trim '--' prefix to prevent command line argument vulnerability.
|
||||||
|
rel.TagName = strings.TrimPrefix(rel.TagName, "--")
|
||||||
isAllowed, err := git_model.IsUserAllowedToControlTag(protectedTags, rel.TagName, rel.PublisherID)
|
isAllowed, err := git_model.IsUserAllowedToControlTag(protectedTags, rel.TagName, rel.PublisherID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
@ -52,8 +55,6 @@ func createTag(gitRepo *git.Repository, rel *repo_model.Release, msg string) (bo
|
|||||||
return false, fmt.Errorf("createTag::GetCommit[%v]: %v", rel.Target, err)
|
return false, fmt.Errorf("createTag::GetCommit[%v]: %v", rel.Target, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Trim '--' prefix to prevent command line argument vulnerability.
|
|
||||||
rel.TagName = strings.TrimPrefix(rel.TagName, "--")
|
|
||||||
if len(msg) > 0 {
|
if len(msg) > 0 {
|
||||||
if err = gitRepo.CreateAnnotatedTag(rel.TagName, msg, commit.ID.String()); err != nil {
|
if err = gitRepo.CreateAnnotatedTag(rel.TagName, msg, commit.ID.String()); err != nil {
|
||||||
if strings.Contains(err.Error(), "is not a valid tag name") {
|
if strings.Contains(err.Error(), "is not a valid tag name") {
|
||||||
@ -308,7 +309,7 @@ func DeleteReleaseByID(ctx context.Context, id int64, doer *user_model.User, del
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if stdout, _, err := git.NewCommand(ctx, "tag", "-d", rel.TagName).
|
if stdout, _, err := git.NewCommand(ctx, "tag", "-d", "--", rel.TagName).
|
||||||
SetDescription(fmt.Sprintf("DeleteReleaseByID (git tag -d): %d", rel.ID)).
|
SetDescription(fmt.Sprintf("DeleteReleaseByID (git tag -d): %d", rel.ID)).
|
||||||
RunStdString(&git.RunOpts{Dir: repo.RepoPath()}); err != nil && !strings.Contains(err.Error(), "not found") {
|
RunStdString(&git.RunOpts{Dir: repo.RepoPath()}); err != nil && !strings.Contains(err.Error(), "not found") {
|
||||||
log.Error("DeleteReleaseByID (git tag -d): %d in %v Failed:\nStdout: %s\nError: %v", rel.ID, repo, stdout, err)
|
log.Error("DeleteReleaseByID (git tag -d): %d in %v Failed:\nStdout: %s\nError: %v", rel.ID, repo, stdout, err)
|
||||||
|
Loading…
Reference in New Issue
Block a user