2019-05-06 21:12:51 -04:00
|
|
|
// Copyright 2019 The Gitea Authors. All rights reserved.
|
|
|
|
// Copyright 2018 Jonas Franz. All rights reserved.
|
|
|
|
// Use of this source code is governed by a MIT-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
|
|
|
package migrations
|
|
|
|
|
|
|
|
import (
|
2020-01-23 12:28:15 -05:00
|
|
|
"bytes"
|
2019-12-16 23:16:54 -05:00
|
|
|
"context"
|
2019-05-06 21:12:51 -04:00
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"net/http"
|
2019-08-20 16:21:07 -04:00
|
|
|
"net/url"
|
2019-05-06 21:12:51 -04:00
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"strings"
|
|
|
|
"sync"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"code.gitea.io/gitea/models"
|
|
|
|
"code.gitea.io/gitea/modules/git"
|
|
|
|
"code.gitea.io/gitea/modules/log"
|
|
|
|
"code.gitea.io/gitea/modules/migrations/base"
|
2019-12-14 12:30:01 -05:00
|
|
|
"code.gitea.io/gitea/modules/repository"
|
2020-01-12 07:11:17 -05:00
|
|
|
repo_module "code.gitea.io/gitea/modules/repository"
|
2019-05-06 21:12:51 -04:00
|
|
|
"code.gitea.io/gitea/modules/setting"
|
2020-08-18 00:23:45 -04:00
|
|
|
"code.gitea.io/gitea/modules/storage"
|
2019-10-13 09:23:14 -04:00
|
|
|
"code.gitea.io/gitea/modules/structs"
|
2019-08-15 10:46:21 -04:00
|
|
|
"code.gitea.io/gitea/modules/timeutil"
|
2020-10-27 17:34:56 -04:00
|
|
|
"code.gitea.io/gitea/services/pull"
|
2019-05-06 21:12:51 -04:00
|
|
|
|
2020-06-18 05:18:44 -04:00
|
|
|
gouuid "github.com/google/uuid"
|
2019-05-06 21:12:51 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
_ base.Uploader = &GiteaLocalUploader{}
|
|
|
|
)
|
|
|
|
|
|
|
|
// GiteaLocalUploader implements an Uploader to gitea sites
|
|
|
|
type GiteaLocalUploader struct {
|
2019-12-16 23:16:54 -05:00
|
|
|
ctx context.Context
|
2019-10-14 02:10:42 -04:00
|
|
|
doer *models.User
|
|
|
|
repoOwner string
|
|
|
|
repoName string
|
|
|
|
repo *models.Repository
|
|
|
|
labels sync.Map
|
|
|
|
milestones sync.Map
|
|
|
|
issues sync.Map
|
|
|
|
gitRepo *git.Repository
|
|
|
|
prHeadCache map[string]struct{}
|
|
|
|
userMap map[int64]int64 // external user id mapping to user id
|
2020-01-23 12:28:15 -05:00
|
|
|
prCache map[int64]*models.PullRequest
|
2019-10-14 02:10:42 -04:00
|
|
|
gitServiceType structs.GitServiceType
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// NewGiteaLocalUploader creates an gitea Uploader via gitea API v1
|
2019-12-16 23:16:54 -05:00
|
|
|
func NewGiteaLocalUploader(ctx context.Context, doer *models.User, repoOwner, repoName string) *GiteaLocalUploader {
|
2019-05-06 21:12:51 -04:00
|
|
|
return &GiteaLocalUploader{
|
2019-12-16 23:16:54 -05:00
|
|
|
ctx: ctx,
|
2019-05-06 21:12:51 -04:00
|
|
|
doer: doer,
|
|
|
|
repoOwner: repoOwner,
|
|
|
|
repoName: repoName,
|
|
|
|
prHeadCache: make(map[string]struct{}),
|
2019-10-14 02:10:42 -04:00
|
|
|
userMap: make(map[int64]int64),
|
2020-01-23 12:28:15 -05:00
|
|
|
prCache: make(map[int64]*models.PullRequest),
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-06 15:24:50 -04:00
|
|
|
// MaxBatchInsertSize returns the table's max batch insert size
|
|
|
|
func (g *GiteaLocalUploader) MaxBatchInsertSize(tp string) int {
|
|
|
|
switch tp {
|
|
|
|
case "issue":
|
|
|
|
return models.MaxBatchInsertSize(new(models.Issue))
|
|
|
|
case "comment":
|
|
|
|
return models.MaxBatchInsertSize(new(models.Comment))
|
|
|
|
case "milestone":
|
|
|
|
return models.MaxBatchInsertSize(new(models.Milestone))
|
|
|
|
case "label":
|
|
|
|
return models.MaxBatchInsertSize(new(models.Label))
|
|
|
|
case "release":
|
|
|
|
return models.MaxBatchInsertSize(new(models.Release))
|
|
|
|
case "pullrequest":
|
|
|
|
return models.MaxBatchInsertSize(new(models.PullRequest))
|
|
|
|
}
|
|
|
|
return 10
|
|
|
|
}
|
|
|
|
|
2019-05-06 21:12:51 -04:00
|
|
|
// CreateRepo creates a repository
|
2019-07-01 17:17:16 -04:00
|
|
|
func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.MigrateOptions) error {
|
2019-05-06 21:12:51 -04:00
|
|
|
owner, err := models.GetUserByName(g.repoOwner)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2019-08-20 16:21:07 -04:00
|
|
|
var remoteAddr = repo.CloneURL
|
2020-08-27 21:36:37 -04:00
|
|
|
if len(opts.AuthToken) > 0 || len(opts.AuthUsername) > 0 {
|
2019-08-20 16:21:07 -04:00
|
|
|
u, err := url.Parse(repo.CloneURL)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
u.User = url.UserPassword(opts.AuthUsername, opts.AuthPassword)
|
2020-08-27 21:36:37 -04:00
|
|
|
if len(opts.AuthToken) > 0 {
|
|
|
|
u.User = url.UserPassword("oauth2", opts.AuthToken)
|
|
|
|
}
|
2019-08-20 16:21:07 -04:00
|
|
|
remoteAddr = u.String()
|
|
|
|
}
|
|
|
|
|
2019-10-13 09:23:14 -04:00
|
|
|
var r *models.Repository
|
|
|
|
if opts.MigrateToRepoID <= 0 {
|
2020-01-12 07:11:17 -05:00
|
|
|
r, err = repo_module.CreateRepository(g.doer, owner, models.CreateRepoOptions{
|
2020-01-10 10:35:17 -05:00
|
|
|
Name: g.repoName,
|
|
|
|
Description: repo.Description,
|
|
|
|
OriginalURL: repo.OriginalURL,
|
|
|
|
GitServiceType: opts.GitServiceType,
|
|
|
|
IsPrivate: opts.Private,
|
|
|
|
IsMirror: opts.Mirror,
|
|
|
|
Status: models.RepositoryBeingMigrated,
|
2019-10-13 09:23:14 -04:00
|
|
|
})
|
|
|
|
} else {
|
|
|
|
r, err = models.GetRepositoryByID(opts.MigrateToRepoID)
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-09-15 10:37:44 -04:00
|
|
|
r.DefaultBranch = repo.DefaultBranch
|
2019-10-13 09:23:14 -04:00
|
|
|
|
2020-12-02 13:36:06 -05:00
|
|
|
r, err = repository.MigrateRepositoryGitData(g.ctx, owner, r, base.MigrateOptions{
|
2019-10-14 02:10:42 -04:00
|
|
|
RepoName: g.repoName,
|
|
|
|
Description: repo.Description,
|
|
|
|
OriginalURL: repo.OriginalURL,
|
|
|
|
GitServiceType: opts.GitServiceType,
|
|
|
|
Mirror: repo.IsMirror,
|
|
|
|
CloneAddr: remoteAddr,
|
|
|
|
Private: repo.IsPrivate,
|
|
|
|
Wiki: opts.Wiki,
|
|
|
|
Releases: opts.Releases, // if didn't get releases, then sync them from tags
|
2019-05-06 21:12:51 -04:00
|
|
|
})
|
2019-10-13 09:23:14 -04:00
|
|
|
|
2019-05-25 17:18:27 -04:00
|
|
|
g.repo = r
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
g.gitRepo, err = git.OpenRepository(r.RepoPath())
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2019-11-13 02:01:19 -05:00
|
|
|
// Close closes this uploader
|
|
|
|
func (g *GiteaLocalUploader) Close() {
|
|
|
|
if g.gitRepo != nil {
|
|
|
|
g.gitRepo.Close()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-14 02:16:12 -04:00
|
|
|
// CreateTopics creates topics
|
|
|
|
func (g *GiteaLocalUploader) CreateTopics(topics ...string) error {
|
2020-12-26 20:23:57 -05:00
|
|
|
// ignore topics to long for the db
|
|
|
|
c := 0
|
|
|
|
for i := range topics {
|
|
|
|
if len(topics[i]) <= 50 {
|
|
|
|
topics[c] = topics[i]
|
|
|
|
c++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
topics = topics[:c]
|
2019-08-14 02:16:12 -04:00
|
|
|
return models.SaveTopics(g.repo.ID, topics...)
|
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
// CreateMilestones creates milestones
|
|
|
|
func (g *GiteaLocalUploader) CreateMilestones(milestones ...*base.Milestone) error {
|
|
|
|
var mss = make([]*models.Milestone, 0, len(milestones))
|
|
|
|
for _, milestone := range milestones {
|
2019-08-15 10:46:21 -04:00
|
|
|
var deadline timeutil.TimeStamp
|
2019-06-29 09:38:22 -04:00
|
|
|
if milestone.Deadline != nil {
|
2019-08-15 10:46:21 -04:00
|
|
|
deadline = timeutil.TimeStamp(milestone.Deadline.Unix())
|
2019-06-29 09:38:22 -04:00
|
|
|
}
|
|
|
|
if deadline == 0 {
|
2019-08-15 10:46:21 -04:00
|
|
|
deadline = timeutil.TimeStamp(time.Date(9999, 1, 1, 0, 0, 0, 0, setting.DefaultUILocation).Unix())
|
2019-06-29 09:38:22 -04:00
|
|
|
}
|
|
|
|
var ms = models.Milestone{
|
|
|
|
RepoID: g.repo.ID,
|
|
|
|
Name: milestone.Title,
|
|
|
|
Content: milestone.Description,
|
2019-07-29 11:41:22 -04:00
|
|
|
IsClosed: milestone.State == "closed",
|
2019-06-29 09:38:22 -04:00
|
|
|
DeadlineUnix: deadline,
|
|
|
|
}
|
|
|
|
if ms.IsClosed && milestone.Closed != nil {
|
2019-08-15 10:46:21 -04:00
|
|
|
ms.ClosedDateUnix = timeutil.TimeStamp(milestone.Closed.Unix())
|
2019-06-29 09:38:22 -04:00
|
|
|
}
|
|
|
|
mss = append(mss, &ms)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
err := models.InsertMilestones(mss...)
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
for _, ms := range mss {
|
|
|
|
g.milestones.Store(ms.Name, ms.ID)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
// CreateLabels creates labels
|
|
|
|
func (g *GiteaLocalUploader) CreateLabels(labels ...*base.Label) error {
|
|
|
|
var lbs = make([]*models.Label, 0, len(labels))
|
|
|
|
for _, label := range labels {
|
|
|
|
lbs = append(lbs, &models.Label{
|
|
|
|
RepoID: g.repo.ID,
|
|
|
|
Name: label.Name,
|
|
|
|
Description: label.Description,
|
|
|
|
Color: fmt.Sprintf("#%s", label.Color),
|
|
|
|
})
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
err := models.NewLabels(lbs...)
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
2019-06-29 09:38:22 -04:00
|
|
|
return err
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2019-06-29 09:38:22 -04:00
|
|
|
for _, lb := range lbs {
|
|
|
|
g.labels.Store(lb.Name, lb)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2019-06-29 09:38:22 -04:00
|
|
|
return nil
|
|
|
|
}
|
2019-05-06 21:12:51 -04:00
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
// CreateReleases creates releases
|
2020-08-27 21:36:37 -04:00
|
|
|
func (g *GiteaLocalUploader) CreateReleases(downloader base.Downloader, releases ...*base.Release) error {
|
2019-06-29 09:38:22 -04:00
|
|
|
var rels = make([]*models.Release, 0, len(releases))
|
|
|
|
for _, release := range releases {
|
|
|
|
var rel = models.Release{
|
2019-10-14 02:10:42 -04:00
|
|
|
RepoID: g.repo.ID,
|
|
|
|
TagName: release.TagName,
|
|
|
|
LowerTagName: strings.ToLower(release.TagName),
|
|
|
|
Target: release.TargetCommitish,
|
|
|
|
Title: release.Name,
|
|
|
|
Sha1: release.TargetCommitish,
|
|
|
|
Note: release.Body,
|
|
|
|
IsDraft: release.Draft,
|
|
|
|
IsPrerelease: release.Prerelease,
|
|
|
|
IsTag: false,
|
|
|
|
CreatedUnix: timeutil.TimeStamp(release.Created.Unix()),
|
|
|
|
}
|
|
|
|
|
|
|
|
userid, ok := g.userMap[release.PublisherID]
|
|
|
|
tp := g.gitServiceType.Name()
|
|
|
|
if !ok && tp != "" {
|
|
|
|
var err error
|
|
|
|
userid, err = models.GetUserIDByExternalUserID(tp, fmt.Sprintf("%v", release.PublisherID))
|
|
|
|
if err != nil {
|
|
|
|
log.Error("GetUserIDByExternalUserID: %v", err)
|
|
|
|
}
|
|
|
|
if userid > 0 {
|
|
|
|
g.userMap[release.PublisherID] = userid
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if userid > 0 {
|
|
|
|
rel.PublisherID = userid
|
|
|
|
} else {
|
|
|
|
rel.PublisherID = g.doer.ID
|
|
|
|
rel.OriginalAuthor = release.PublisherName
|
|
|
|
rel.OriginalAuthorID = release.PublisherID
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
// calc NumCommits
|
|
|
|
commit, err := g.gitRepo.GetCommit(rel.TagName)
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
2019-06-29 09:38:22 -04:00
|
|
|
return fmt.Errorf("GetCommit: %v", err)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2019-06-29 09:38:22 -04:00
|
|
|
rel.NumCommits, err = commit.CommitsCount()
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
2019-06-29 09:38:22 -04:00
|
|
|
return fmt.Errorf("CommitsCount: %v", err)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
for _, asset := range release.Assets {
|
|
|
|
var attach = models.Attachment{
|
2020-06-18 05:18:44 -04:00
|
|
|
UUID: gouuid.New().String(),
|
2019-06-29 09:38:22 -04:00
|
|
|
Name: asset.Name,
|
|
|
|
DownloadCount: int64(*asset.DownloadCount),
|
|
|
|
Size: int64(*asset.Size),
|
2019-08-15 10:46:21 -04:00
|
|
|
CreatedUnix: timeutil.TimeStamp(asset.Created.Unix()),
|
2019-06-29 09:38:22 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// download attachment
|
2019-11-05 07:54:47 -05:00
|
|
|
err = func() error {
|
2020-10-14 00:06:00 -04:00
|
|
|
var rc io.ReadCloser
|
|
|
|
if asset.DownloadURL == nil {
|
|
|
|
rc, err = downloader.GetAsset(rel.TagName, rel.ID, asset.ID)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
resp, err := http.Get(*asset.DownloadURL)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
rc = resp.Body
|
2019-11-05 07:54:47 -05:00
|
|
|
}
|
2020-08-27 21:36:37 -04:00
|
|
|
_, err = storage.Attachments.Save(attach.RelativePath(), rc)
|
2019-11-05 07:54:47 -05:00
|
|
|
return err
|
|
|
|
}()
|
|
|
|
if err != nil {
|
2019-06-29 09:38:22 -04:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
rel.Attachments = append(rel.Attachments, &attach)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
rels = append(rels, &rel)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2019-07-01 17:17:16 -04:00
|
|
|
|
2019-12-11 19:20:11 -05:00
|
|
|
return models.InsertReleases(rels...)
|
|
|
|
}
|
2019-12-11 01:09:06 -05:00
|
|
|
|
2019-12-11 19:20:11 -05:00
|
|
|
// SyncTags syncs releases with tags in the database
|
|
|
|
func (g *GiteaLocalUploader) SyncTags() error {
|
2019-12-14 12:30:01 -05:00
|
|
|
return repository.SyncReleasesWithTags(g.repo, g.gitRepo)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
// CreateIssues creates issues
|
|
|
|
func (g *GiteaLocalUploader) CreateIssues(issues ...*base.Issue) error {
|
|
|
|
var iss = make([]*models.Issue, 0, len(issues))
|
|
|
|
for _, issue := range issues {
|
|
|
|
var labels []*models.Label
|
|
|
|
for _, label := range issue.Labels {
|
|
|
|
lb, ok := g.labels.Load(label.Name)
|
|
|
|
if ok {
|
|
|
|
labels = append(labels, lb.(*models.Label))
|
|
|
|
}
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
var milestoneID int64
|
|
|
|
if issue.Milestone != "" {
|
|
|
|
milestone, ok := g.milestones.Load(issue.Milestone)
|
|
|
|
if ok {
|
|
|
|
milestoneID = milestone.(int64)
|
|
|
|
}
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
var is = models.Issue{
|
2019-10-14 02:10:42 -04:00
|
|
|
RepoID: g.repo.ID,
|
|
|
|
Repo: g.repo,
|
|
|
|
Index: issue.Number,
|
|
|
|
Title: issue.Title,
|
|
|
|
Content: issue.Content,
|
2020-12-13 06:34:11 -05:00
|
|
|
Ref: issue.Ref,
|
2019-10-14 02:10:42 -04:00
|
|
|
IsClosed: issue.State == "closed",
|
|
|
|
IsLocked: issue.IsLocked,
|
|
|
|
MilestoneID: milestoneID,
|
|
|
|
Labels: labels,
|
|
|
|
CreatedUnix: timeutil.TimeStamp(issue.Created.Unix()),
|
2020-01-14 05:29:22 -05:00
|
|
|
UpdatedUnix: timeutil.TimeStamp(issue.Updated.Unix()),
|
2019-10-14 02:10:42 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
userid, ok := g.userMap[issue.PosterID]
|
|
|
|
tp := g.gitServiceType.Name()
|
|
|
|
if !ok && tp != "" {
|
|
|
|
var err error
|
|
|
|
userid, err = models.GetUserIDByExternalUserID(tp, fmt.Sprintf("%v", issue.PosterID))
|
|
|
|
if err != nil {
|
|
|
|
log.Error("GetUserIDByExternalUserID: %v", err)
|
|
|
|
}
|
|
|
|
if userid > 0 {
|
|
|
|
g.userMap[issue.PosterID] = userid
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if userid > 0 {
|
|
|
|
is.PosterID = userid
|
|
|
|
} else {
|
|
|
|
is.PosterID = g.doer.ID
|
|
|
|
is.OriginalAuthor = issue.PosterName
|
|
|
|
is.OriginalAuthorID = issue.PosterID
|
2019-06-29 09:38:22 -04:00
|
|
|
}
|
2019-10-14 02:10:42 -04:00
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
if issue.Closed != nil {
|
2019-08-15 10:46:21 -04:00
|
|
|
is.ClosedUnix = timeutil.TimeStamp(issue.Closed.Unix())
|
2019-06-29 09:38:22 -04:00
|
|
|
}
|
2020-01-15 06:14:07 -05:00
|
|
|
// add reactions
|
|
|
|
for _, reaction := range issue.Reactions {
|
|
|
|
userid, ok := g.userMap[reaction.UserID]
|
|
|
|
if !ok && tp != "" {
|
|
|
|
var err error
|
|
|
|
userid, err = models.GetUserIDByExternalUserID(tp, fmt.Sprintf("%v", reaction.UserID))
|
|
|
|
if err != nil {
|
|
|
|
log.Error("GetUserIDByExternalUserID: %v", err)
|
|
|
|
}
|
|
|
|
if userid > 0 {
|
|
|
|
g.userMap[reaction.UserID] = userid
|
|
|
|
}
|
|
|
|
}
|
|
|
|
var res = models.Reaction{
|
|
|
|
Type: reaction.Content,
|
|
|
|
CreatedUnix: timeutil.TimeStampNow(),
|
|
|
|
}
|
|
|
|
if userid > 0 {
|
|
|
|
res.UserID = userid
|
|
|
|
} else {
|
|
|
|
res.UserID = g.doer.ID
|
|
|
|
res.OriginalAuthorID = reaction.UserID
|
|
|
|
res.OriginalAuthor = reaction.UserName
|
|
|
|
}
|
|
|
|
is.Reactions = append(is.Reactions, &res)
|
|
|
|
}
|
2019-06-29 09:38:22 -04:00
|
|
|
iss = append(iss, &is)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2020-04-17 13:42:57 -04:00
|
|
|
if len(iss) > 0 {
|
|
|
|
if err := models.InsertIssues(iss...); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, is := range iss {
|
|
|
|
g.issues.Store(is.Index, is.ID)
|
|
|
|
}
|
2019-06-29 09:38:22 -04:00
|
|
|
}
|
2020-04-17 13:42:57 -04:00
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// CreateComments creates comments of issues
|
|
|
|
func (g *GiteaLocalUploader) CreateComments(comments ...*base.Comment) error {
|
|
|
|
var cms = make([]*models.Comment, 0, len(comments))
|
|
|
|
for _, comment := range comments {
|
|
|
|
var issueID int64
|
|
|
|
if issueIDStr, ok := g.issues.Load(comment.IssueIndex); !ok {
|
|
|
|
issue, err := models.GetIssueByIndex(g.repo.ID, comment.IssueIndex)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
issueID = issue.ID
|
|
|
|
g.issues.Store(comment.IssueIndex, issueID)
|
|
|
|
} else {
|
|
|
|
issueID = issueIDStr.(int64)
|
|
|
|
}
|
|
|
|
|
2019-10-14 02:10:42 -04:00
|
|
|
userid, ok := g.userMap[comment.PosterID]
|
|
|
|
tp := g.gitServiceType.Name()
|
|
|
|
if !ok && tp != "" {
|
|
|
|
var err error
|
|
|
|
userid, err = models.GetUserIDByExternalUserID(tp, fmt.Sprintf("%v", comment.PosterID))
|
|
|
|
if err != nil {
|
|
|
|
log.Error("GetUserIDByExternalUserID: %v", err)
|
|
|
|
}
|
|
|
|
if userid > 0 {
|
|
|
|
g.userMap[comment.PosterID] = userid
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
cm := models.Comment{
|
|
|
|
IssueID: issueID,
|
|
|
|
Type: models.CommentTypeComment,
|
|
|
|
Content: comment.Content,
|
|
|
|
CreatedUnix: timeutil.TimeStamp(comment.Created.Unix()),
|
2020-01-14 05:29:22 -05:00
|
|
|
UpdatedUnix: timeutil.TimeStamp(comment.Updated.Unix()),
|
2019-10-14 02:10:42 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
if userid > 0 {
|
|
|
|
cm.PosterID = userid
|
|
|
|
} else {
|
|
|
|
cm.PosterID = g.doer.ID
|
|
|
|
cm.OriginalAuthor = comment.PosterName
|
|
|
|
cm.OriginalAuthorID = comment.PosterID
|
|
|
|
}
|
|
|
|
|
2020-01-15 06:14:07 -05:00
|
|
|
// add reactions
|
|
|
|
for _, reaction := range comment.Reactions {
|
|
|
|
userid, ok := g.userMap[reaction.UserID]
|
|
|
|
if !ok && tp != "" {
|
|
|
|
var err error
|
|
|
|
userid, err = models.GetUserIDByExternalUserID(tp, fmt.Sprintf("%v", reaction.UserID))
|
|
|
|
if err != nil {
|
|
|
|
log.Error("GetUserIDByExternalUserID: %v", err)
|
|
|
|
}
|
|
|
|
if userid > 0 {
|
|
|
|
g.userMap[reaction.UserID] = userid
|
|
|
|
}
|
|
|
|
}
|
|
|
|
var res = models.Reaction{
|
|
|
|
Type: reaction.Content,
|
|
|
|
CreatedUnix: timeutil.TimeStampNow(),
|
|
|
|
}
|
|
|
|
if userid > 0 {
|
|
|
|
res.UserID = userid
|
|
|
|
} else {
|
|
|
|
res.UserID = g.doer.ID
|
|
|
|
res.OriginalAuthorID = reaction.UserID
|
|
|
|
res.OriginalAuthor = reaction.UserName
|
|
|
|
}
|
|
|
|
cm.Reactions = append(cm.Reactions, &res)
|
|
|
|
}
|
2019-06-29 09:38:22 -04:00
|
|
|
|
2020-01-15 06:14:07 -05:00
|
|
|
cms = append(cms, &cm)
|
2019-06-29 09:38:22 -04:00
|
|
|
}
|
|
|
|
|
2020-04-17 13:42:57 -04:00
|
|
|
if len(cms) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
2019-06-29 09:38:22 -04:00
|
|
|
return models.InsertIssueComments(cms)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
// CreatePullRequests creates pull requests
|
|
|
|
func (g *GiteaLocalUploader) CreatePullRequests(prs ...*base.PullRequest) error {
|
|
|
|
var gprs = make([]*models.PullRequest, 0, len(prs))
|
|
|
|
for _, pr := range prs {
|
|
|
|
gpr, err := g.newPullRequest(pr)
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-10-14 02:10:42 -04:00
|
|
|
|
|
|
|
userid, ok := g.userMap[pr.PosterID]
|
|
|
|
tp := g.gitServiceType.Name()
|
|
|
|
if !ok && tp != "" {
|
|
|
|
var err error
|
|
|
|
userid, err = models.GetUserIDByExternalUserID(tp, fmt.Sprintf("%v", pr.PosterID))
|
|
|
|
if err != nil {
|
|
|
|
log.Error("GetUserIDByExternalUserID: %v", err)
|
|
|
|
}
|
|
|
|
if userid > 0 {
|
|
|
|
g.userMap[pr.PosterID] = userid
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if userid > 0 {
|
|
|
|
gpr.Issue.PosterID = userid
|
|
|
|
} else {
|
|
|
|
gpr.Issue.PosterID = g.doer.ID
|
|
|
|
gpr.Issue.OriginalAuthor = pr.PosterName
|
|
|
|
gpr.Issue.OriginalAuthorID = pr.PosterID
|
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
gprs = append(gprs, gpr)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2019-06-29 09:38:22 -04:00
|
|
|
if err := models.InsertPullRequests(gprs...); err != nil {
|
|
|
|
return err
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2019-06-29 09:38:22 -04:00
|
|
|
for _, pr := range gprs {
|
|
|
|
g.issues.Store(pr.Issue.Index, pr.Issue.ID)
|
2020-10-27 17:34:56 -04:00
|
|
|
pull.AddToTaskQueue(pr)
|
2019-06-29 09:38:22 -04:00
|
|
|
}
|
|
|
|
return nil
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
func (g *GiteaLocalUploader) newPullRequest(pr *base.PullRequest) (*models.PullRequest, error) {
|
|
|
|
var labels []*models.Label
|
2019-05-06 21:12:51 -04:00
|
|
|
for _, label := range pr.Labels {
|
2019-06-29 09:38:22 -04:00
|
|
|
lb, ok := g.labels.Load(label.Name)
|
|
|
|
if ok {
|
|
|
|
labels = append(labels, lb.(*models.Label))
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var milestoneID int64
|
|
|
|
if pr.Milestone != "" {
|
|
|
|
milestone, ok := g.milestones.Load(pr.Milestone)
|
2019-06-29 09:38:22 -04:00
|
|
|
if ok {
|
|
|
|
milestoneID = milestone.(int64)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// download patch file
|
2019-11-05 07:54:47 -05:00
|
|
|
err := func() error {
|
|
|
|
resp, err := http.Get(pr.PatchURL)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
pullDir := filepath.Join(g.repo.RepoPath(), "pulls")
|
|
|
|
if err = os.MkdirAll(pullDir, os.ModePerm); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
f, err := os.Create(filepath.Join(pullDir, fmt.Sprintf("%d.patch", pr.Number)))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
_, err = io.Copy(f, resp.Body)
|
|
|
|
return err
|
|
|
|
}()
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
2019-06-29 09:38:22 -04:00
|
|
|
return nil, err
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// set head information
|
|
|
|
pullHead := filepath.Join(g.repo.RepoPath(), "refs", "pull", fmt.Sprintf("%d", pr.Number))
|
|
|
|
if err := os.MkdirAll(pullHead, os.ModePerm); err != nil {
|
2019-06-29 09:38:22 -04:00
|
|
|
return nil, err
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
p, err := os.Create(filepath.Join(pullHead, "head"))
|
|
|
|
if err != nil {
|
2019-06-29 09:38:22 -04:00
|
|
|
return nil, err
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
_, err = p.WriteString(pr.Head.SHA)
|
2019-11-05 07:54:47 -05:00
|
|
|
p.Close()
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
2019-06-29 09:38:22 -04:00
|
|
|
return nil, err
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
var head = "unknown repository"
|
2019-07-14 05:16:15 -04:00
|
|
|
if pr.IsForkPullRequest() && pr.State != "closed" {
|
2019-05-06 21:12:51 -04:00
|
|
|
if pr.Head.OwnerName != "" {
|
|
|
|
remote := pr.Head.OwnerName
|
|
|
|
_, ok := g.prHeadCache[remote]
|
|
|
|
if !ok {
|
|
|
|
// git remote add
|
|
|
|
err := g.gitRepo.AddRemote(remote, pr.Head.CloneURL, true)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("AddRemote failed: %s", err)
|
|
|
|
} else {
|
|
|
|
g.prHeadCache[remote] = struct{}{}
|
|
|
|
ok = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if ok {
|
|
|
|
_, err = git.NewCommand("fetch", remote, pr.Head.Ref).RunInDir(g.repo.RepoPath())
|
|
|
|
if err != nil {
|
|
|
|
log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err)
|
|
|
|
} else {
|
|
|
|
headBranch := filepath.Join(g.repo.RepoPath(), "refs", "heads", pr.Head.OwnerName, pr.Head.Ref)
|
|
|
|
if err := os.MkdirAll(filepath.Dir(headBranch), os.ModePerm); err != nil {
|
2019-06-29 09:38:22 -04:00
|
|
|
return nil, err
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
b, err := os.Create(headBranch)
|
|
|
|
if err != nil {
|
2019-06-29 09:38:22 -04:00
|
|
|
return nil, err
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
_, err = b.WriteString(pr.Head.SHA)
|
2019-11-05 07:54:47 -05:00
|
|
|
b.Close()
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
2019-06-29 09:38:22 -04:00
|
|
|
return nil, err
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
head = pr.Head.OwnerName + "/" + pr.Head.Ref
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
head = pr.Head.Ref
|
|
|
|
}
|
|
|
|
|
2019-10-14 02:10:42 -04:00
|
|
|
var issue = models.Issue{
|
|
|
|
RepoID: g.repo.ID,
|
|
|
|
Repo: g.repo,
|
|
|
|
Title: pr.Title,
|
|
|
|
Index: pr.Number,
|
|
|
|
Content: pr.Content,
|
|
|
|
MilestoneID: milestoneID,
|
|
|
|
IsPull: true,
|
|
|
|
IsClosed: pr.State == "closed",
|
|
|
|
IsLocked: pr.IsLocked,
|
|
|
|
Labels: labels,
|
|
|
|
CreatedUnix: timeutil.TimeStamp(pr.Created.Unix()),
|
2020-01-14 05:29:22 -05:00
|
|
|
UpdatedUnix: timeutil.TimeStamp(pr.Updated.Unix()),
|
2019-10-14 02:10:42 -04:00
|
|
|
}
|
|
|
|
|
2020-01-15 06:14:07 -05:00
|
|
|
tp := g.gitServiceType.Name()
|
|
|
|
|
2019-10-14 02:10:42 -04:00
|
|
|
userid, ok := g.userMap[pr.PosterID]
|
2020-01-15 06:14:07 -05:00
|
|
|
if !ok && tp != "" {
|
2019-10-14 02:10:42 -04:00
|
|
|
var err error
|
2020-01-15 06:14:07 -05:00
|
|
|
userid, err = models.GetUserIDByExternalUserID(tp, fmt.Sprintf("%v", pr.PosterID))
|
2019-10-14 02:10:42 -04:00
|
|
|
if err != nil {
|
|
|
|
log.Error("GetUserIDByExternalUserID: %v", err)
|
|
|
|
}
|
|
|
|
if userid > 0 {
|
|
|
|
g.userMap[pr.PosterID] = userid
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if userid > 0 {
|
|
|
|
issue.PosterID = userid
|
|
|
|
} else {
|
|
|
|
issue.PosterID = g.doer.ID
|
|
|
|
issue.OriginalAuthor = pr.PosterName
|
|
|
|
issue.OriginalAuthorID = pr.PosterID
|
|
|
|
}
|
|
|
|
|
2020-01-15 06:14:07 -05:00
|
|
|
// add reactions
|
|
|
|
for _, reaction := range pr.Reactions {
|
|
|
|
userid, ok := g.userMap[reaction.UserID]
|
|
|
|
if !ok && tp != "" {
|
|
|
|
var err error
|
|
|
|
userid, err = models.GetUserIDByExternalUserID(tp, fmt.Sprintf("%v", reaction.UserID))
|
|
|
|
if err != nil {
|
|
|
|
log.Error("GetUserIDByExternalUserID: %v", err)
|
|
|
|
}
|
|
|
|
if userid > 0 {
|
|
|
|
g.userMap[reaction.UserID] = userid
|
|
|
|
}
|
|
|
|
}
|
|
|
|
var res = models.Reaction{
|
|
|
|
Type: reaction.Content,
|
|
|
|
CreatedUnix: timeutil.TimeStampNow(),
|
|
|
|
}
|
|
|
|
if userid > 0 {
|
|
|
|
res.UserID = userid
|
|
|
|
} else {
|
|
|
|
res.UserID = g.doer.ID
|
|
|
|
res.OriginalAuthorID = reaction.UserID
|
|
|
|
res.OriginalAuthor = reaction.UserName
|
|
|
|
}
|
|
|
|
issue.Reactions = append(issue.Reactions, &res)
|
|
|
|
}
|
|
|
|
|
2019-05-06 21:12:51 -04:00
|
|
|
var pullRequest = models.PullRequest{
|
2019-10-18 07:13:31 -04:00
|
|
|
HeadRepoID: g.repo.ID,
|
|
|
|
HeadBranch: head,
|
|
|
|
BaseRepoID: g.repo.ID,
|
|
|
|
BaseBranch: pr.Base.Ref,
|
|
|
|
MergeBase: pr.Base.SHA,
|
|
|
|
Index: pr.Number,
|
|
|
|
HasMerged: pr.Merged,
|
2019-05-06 21:12:51 -04:00
|
|
|
|
2019-10-14 02:10:42 -04:00
|
|
|
Issue: &issue,
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
if pullRequest.Issue.IsClosed && pr.Closed != nil {
|
2019-08-15 10:46:21 -04:00
|
|
|
pullRequest.Issue.ClosedUnix = timeutil.TimeStamp(pr.Closed.Unix())
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
if pullRequest.HasMerged && pr.MergedTime != nil {
|
2019-08-15 10:46:21 -04:00
|
|
|
pullRequest.MergedUnix = timeutil.TimeStamp(pr.MergedTime.Unix())
|
2019-05-06 21:12:51 -04:00
|
|
|
pullRequest.MergedCommitID = pr.MergeCommitSHA
|
|
|
|
pullRequest.MergerID = g.doer.ID
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: assignees
|
|
|
|
|
2019-06-29 09:38:22 -04:00
|
|
|
return &pullRequest, nil
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2020-01-23 12:28:15 -05:00
|
|
|
func convertReviewState(state string) models.ReviewType {
|
|
|
|
switch state {
|
|
|
|
case base.ReviewStatePending:
|
|
|
|
return models.ReviewTypePending
|
|
|
|
case base.ReviewStateApproved:
|
|
|
|
return models.ReviewTypeApprove
|
|
|
|
case base.ReviewStateChangesRequested:
|
|
|
|
return models.ReviewTypeReject
|
|
|
|
case base.ReviewStateCommented:
|
|
|
|
return models.ReviewTypeComment
|
|
|
|
default:
|
|
|
|
return models.ReviewTypePending
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// CreateReviews create pull request reviews
|
|
|
|
func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
|
|
|
|
var cms = make([]*models.Review, 0, len(reviews))
|
|
|
|
for _, review := range reviews {
|
|
|
|
var issueID int64
|
|
|
|
if issueIDStr, ok := g.issues.Load(review.IssueIndex); !ok {
|
|
|
|
issue, err := models.GetIssueByIndex(g.repo.ID, review.IssueIndex)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
issueID = issue.ID
|
|
|
|
g.issues.Store(review.IssueIndex, issueID)
|
|
|
|
} else {
|
|
|
|
issueID = issueIDStr.(int64)
|
|
|
|
}
|
|
|
|
|
|
|
|
userid, ok := g.userMap[review.ReviewerID]
|
|
|
|
tp := g.gitServiceType.Name()
|
|
|
|
if !ok && tp != "" {
|
|
|
|
var err error
|
|
|
|
userid, err = models.GetUserIDByExternalUserID(tp, fmt.Sprintf("%v", review.ReviewerID))
|
|
|
|
if err != nil {
|
|
|
|
log.Error("GetUserIDByExternalUserID: %v", err)
|
|
|
|
}
|
|
|
|
if userid > 0 {
|
|
|
|
g.userMap[review.ReviewerID] = userid
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var cm = models.Review{
|
|
|
|
Type: convertReviewState(review.State),
|
|
|
|
IssueID: issueID,
|
|
|
|
Content: review.Content,
|
|
|
|
Official: review.Official,
|
|
|
|
CreatedUnix: timeutil.TimeStamp(review.CreatedAt.Unix()),
|
|
|
|
UpdatedUnix: timeutil.TimeStamp(review.CreatedAt.Unix()),
|
|
|
|
}
|
|
|
|
|
|
|
|
if userid > 0 {
|
|
|
|
cm.ReviewerID = userid
|
|
|
|
} else {
|
|
|
|
cm.ReviewerID = g.doer.ID
|
|
|
|
cm.OriginalAuthor = review.ReviewerName
|
|
|
|
cm.OriginalAuthorID = review.ReviewerID
|
|
|
|
}
|
|
|
|
|
|
|
|
// get pr
|
|
|
|
pr, ok := g.prCache[issueID]
|
|
|
|
if !ok {
|
|
|
|
var err error
|
|
|
|
pr, err = models.GetPullRequestByIssueIDWithNoAttributes(issueID)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
g.prCache[issueID] = pr
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, comment := range review.Comments {
|
2020-10-14 00:06:00 -04:00
|
|
|
line := comment.Line
|
|
|
|
if line != 0 {
|
|
|
|
comment.Position = 1
|
|
|
|
} else {
|
|
|
|
_, _, line, _ = git.ParseDiffHunkString(comment.DiffHunk)
|
|
|
|
}
|
2020-01-23 12:28:15 -05:00
|
|
|
headCommitID, err := g.gitRepo.GetRefCommitID(pr.GetGitRefName())
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("GetRefCommitID[%s]: %v", pr.GetGitRefName(), err)
|
|
|
|
}
|
2020-01-28 03:02:03 -05:00
|
|
|
|
|
|
|
var patch string
|
2020-01-23 12:28:15 -05:00
|
|
|
patchBuf := new(bytes.Buffer)
|
2020-01-28 03:02:03 -05:00
|
|
|
if err := git.GetRepoRawDiffForFile(g.gitRepo, pr.MergeBase, headCommitID, git.RawDiffNormal, comment.TreePath, patchBuf); err != nil {
|
|
|
|
// We should ignore the error since the commit maybe removed when force push to the pull request
|
|
|
|
log.Warn("GetRepoRawDiffForFile failed when migrating [%s, %s, %s, %s]: %v", g.gitRepo.Path, pr.MergeBase, headCommitID, comment.TreePath, err)
|
|
|
|
} else {
|
|
|
|
patch = git.CutDiffAroundLine(patchBuf, int64((&models.Comment{Line: int64(line + comment.Position - 1)}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines)
|
2020-01-23 12:28:15 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
var c = models.Comment{
|
|
|
|
Type: models.CommentTypeCode,
|
|
|
|
PosterID: comment.PosterID,
|
|
|
|
IssueID: issueID,
|
|
|
|
Content: comment.Content,
|
|
|
|
Line: int64(line + comment.Position - 1),
|
|
|
|
TreePath: comment.TreePath,
|
|
|
|
CommitSHA: comment.CommitID,
|
|
|
|
Patch: patch,
|
|
|
|
CreatedUnix: timeutil.TimeStamp(comment.CreatedAt.Unix()),
|
|
|
|
UpdatedUnix: timeutil.TimeStamp(comment.UpdatedAt.Unix()),
|
|
|
|
}
|
|
|
|
|
|
|
|
if userid > 0 {
|
|
|
|
c.PosterID = userid
|
|
|
|
} else {
|
|
|
|
c.PosterID = g.doer.ID
|
|
|
|
c.OriginalAuthor = review.ReviewerName
|
|
|
|
c.OriginalAuthorID = review.ReviewerID
|
|
|
|
}
|
|
|
|
|
|
|
|
cm.Comments = append(cm.Comments, &c)
|
|
|
|
}
|
|
|
|
|
|
|
|
cms = append(cms, &cm)
|
|
|
|
}
|
|
|
|
|
|
|
|
return models.InsertReviews(cms)
|
|
|
|
}
|
|
|
|
|
2019-05-06 21:12:51 -04:00
|
|
|
// Rollback when migrating failed, this will rollback all the changes.
|
|
|
|
func (g *GiteaLocalUploader) Rollback() error {
|
|
|
|
if g.repo != nil && g.repo.ID > 0 {
|
|
|
|
if err := models.DeleteRepository(g.doer, g.repo.OwnerID, g.repo.ID); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|