2019-05-06 19:12:51 -06:00
|
|
|
// Copyright 2019 The Gitea Authors. All rights reserved.
|
|
|
|
// Copyright 2018 Jonas Franz. All rights reserved.
|
2022-11-27 11:20:29 -07:00
|
|
|
// SPDX-License-Identifier: MIT
|
2019-05-06 19:12:51 -06:00
|
|
|
|
|
|
|
package migrations
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"fmt"
|
2020-08-27 19:36:37 -06:00
|
|
|
"io"
|
2019-05-06 19:12:51 -06:00
|
|
|
"net/http"
|
|
|
|
"net/url"
|
2021-06-30 01:23:49 -06:00
|
|
|
"strconv"
|
2019-05-06 19:12:51 -06:00
|
|
|
"strings"
|
2019-12-16 21:16:54 -07:00
|
|
|
"time"
|
2019-05-06 19:12:51 -06:00
|
|
|
|
2022-11-02 20:32:52 -06:00
|
|
|
"code.gitea.io/gitea/modules/git"
|
2019-05-06 19:12:51 -06:00
|
|
|
"code.gitea.io/gitea/modules/log"
|
2021-11-16 08:25:33 -07:00
|
|
|
base "code.gitea.io/gitea/modules/migration"
|
2021-08-18 07:10:39 -06:00
|
|
|
"code.gitea.io/gitea/modules/proxy"
|
2019-10-14 00:10:42 -06:00
|
|
|
"code.gitea.io/gitea/modules/structs"
|
2019-05-06 19:12:51 -06:00
|
|
|
|
2023-04-08 05:27:30 -06:00
|
|
|
"github.com/google/go-github/v51/github"
|
2019-05-06 19:12:51 -06:00
|
|
|
"golang.org/x/oauth2"
|
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
_ base.Downloader = &GithubDownloaderV3{}
|
|
|
|
_ base.DownloaderFactory = &GithubDownloaderV3Factory{}
|
2020-01-16 08:15:44 -07:00
|
|
|
// GithubLimitRateRemaining limit to wait for new rate to apply
|
|
|
|
GithubLimitRateRemaining = 0
|
2019-05-06 19:12:51 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
RegisterDownloaderFactory(&GithubDownloaderV3Factory{})
|
|
|
|
}
|
|
|
|
|
|
|
|
// GithubDownloaderV3Factory defines a github downloader v3 factory
|
2022-01-20 10:46:10 -07:00
|
|
|
type GithubDownloaderV3Factory struct{}
|
2019-05-06 19:12:51 -06:00
|
|
|
|
|
|
|
// New returns a Downloader related to this factory according MigrateOptions
|
2020-09-02 11:49:25 -06:00
|
|
|
func (f *GithubDownloaderV3Factory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) {
|
2019-10-13 07:23:14 -06:00
|
|
|
u, err := url.Parse(opts.CloneAddr)
|
2019-05-06 19:12:51 -06:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2020-09-21 08:36:51 -06:00
|
|
|
baseURL := u.Scheme + "://" + u.Host
|
2019-05-06 19:12:51 -06:00
|
|
|
fields := strings.Split(u.Path, "/")
|
|
|
|
oldOwner := fields[1]
|
|
|
|
oldName := strings.TrimSuffix(fields[2], ".git")
|
|
|
|
|
2022-09-04 04:47:56 -06:00
|
|
|
log.Trace("Create github downloader BaseURL: %s %s/%s", baseURL, oldOwner, oldName)
|
2019-05-06 19:12:51 -06:00
|
|
|
|
2020-09-21 08:36:51 -06:00
|
|
|
return NewGithubDownloaderV3(ctx, baseURL, opts.AuthUsername, opts.AuthPassword, opts.AuthToken, oldOwner, oldName), nil
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
|
|
|
|
2019-10-14 00:10:42 -06:00
|
|
|
// GitServiceType returns the type of git service
|
|
|
|
func (f *GithubDownloaderV3Factory) GitServiceType() structs.GitServiceType {
|
|
|
|
return structs.GithubService
|
|
|
|
}
|
|
|
|
|
2021-07-08 05:38:13 -06:00
|
|
|
// GithubDownloaderV3 implements a Downloader interface to get repository information
|
2019-05-06 19:12:51 -06:00
|
|
|
// from github via APIv3
|
|
|
|
type GithubDownloaderV3 struct {
|
2021-01-21 12:33:58 -07:00
|
|
|
base.NullDownloader
|
2021-11-14 12:11:10 -07:00
|
|
|
ctx context.Context
|
|
|
|
clients []*github.Client
|
2022-09-04 04:47:56 -06:00
|
|
|
baseURL string
|
2021-11-14 12:11:10 -07:00
|
|
|
repoOwner string
|
|
|
|
repoName string
|
|
|
|
userName string
|
|
|
|
password string
|
|
|
|
rates []*github.Rate
|
|
|
|
curClientIdx int
|
|
|
|
maxPerPage int
|
|
|
|
SkipReactions bool
|
2023-01-17 00:22:00 -07:00
|
|
|
SkipReviews bool
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// NewGithubDownloaderV3 creates a github Downloader via github v3 API
|
2020-09-21 08:36:51 -06:00
|
|
|
func NewGithubDownloaderV3(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GithubDownloaderV3 {
|
2022-01-20 10:46:10 -07:00
|
|
|
downloader := GithubDownloaderV3{
|
2020-10-24 23:11:03 -06:00
|
|
|
userName: userName,
|
2022-09-04 04:47:56 -06:00
|
|
|
baseURL: baseURL,
|
2020-10-24 23:11:03 -06:00
|
|
|
password: password,
|
|
|
|
ctx: ctx,
|
|
|
|
repoOwner: repoOwner,
|
|
|
|
repoName: repoName,
|
|
|
|
maxPerPage: 100,
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
|
|
|
|
2020-08-27 19:36:37 -06:00
|
|
|
if token != "" {
|
2021-10-14 23:47:15 -06:00
|
|
|
tokens := strings.Split(token, ",")
|
|
|
|
for _, token := range tokens {
|
|
|
|
token = strings.TrimSpace(token)
|
|
|
|
ts := oauth2.StaticTokenSource(
|
|
|
|
&oauth2.Token{AccessToken: token},
|
|
|
|
)
|
2022-01-20 10:46:10 -07:00
|
|
|
client := &http.Client{
|
2021-10-14 23:47:15 -06:00
|
|
|
Transport: &oauth2.Transport{
|
2021-11-20 02:34:05 -07:00
|
|
|
Base: NewMigrationHTTPTransport(),
|
2021-10-14 23:47:15 -06:00
|
|
|
Source: oauth2.ReuseTokenSource(nil, ts),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
downloader.addClient(client, baseURL)
|
|
|
|
}
|
|
|
|
} else {
|
2022-01-20 10:46:10 -07:00
|
|
|
transport := NewMigrationHTTPTransport()
|
2021-11-20 02:34:05 -07:00
|
|
|
transport.Proxy = func(req *http.Request) (*url.URL, error) {
|
|
|
|
req.SetBasicAuth(userName, password)
|
|
|
|
return proxy.Proxy()(req)
|
|
|
|
}
|
2022-01-20 10:46:10 -07:00
|
|
|
client := &http.Client{
|
2021-11-20 02:34:05 -07:00
|
|
|
Transport: transport,
|
2021-10-14 23:47:15 -06:00
|
|
|
}
|
|
|
|
downloader.addClient(client, baseURL)
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
2021-10-14 23:47:15 -06:00
|
|
|
return &downloader
|
|
|
|
}
|
|
|
|
|
2022-09-04 04:47:56 -06:00
|
|
|
// String implements Stringer
|
|
|
|
func (g *GithubDownloaderV3) String() string {
|
|
|
|
return fmt.Sprintf("migration from github server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
|
|
|
|
}
|
|
|
|
|
|
|
|
// ColorFormat provides a basic color format for a GithubDownloader
|
|
|
|
func (g *GithubDownloaderV3) ColorFormat(s fmt.State) {
|
|
|
|
if g == nil {
|
|
|
|
log.ColorFprintf(s, "<nil: GithubDownloaderV3>")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
log.ColorFprintf(s, "migration from github server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
|
|
|
|
}
|
|
|
|
|
2021-10-14 23:47:15 -06:00
|
|
|
func (g *GithubDownloaderV3) addClient(client *http.Client, baseURL string) {
|
|
|
|
githubClient := github.NewClient(client)
|
2020-09-21 08:36:51 -06:00
|
|
|
if baseURL != "https://github.com" {
|
2021-10-14 23:47:15 -06:00
|
|
|
githubClient, _ = github.NewEnterpriseClient(baseURL, baseURL, client)
|
2020-09-21 08:36:51 -06:00
|
|
|
}
|
2021-10-14 23:47:15 -06:00
|
|
|
g.clients = append(g.clients, githubClient)
|
|
|
|
g.rates = append(g.rates, nil)
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
|
|
|
|
2019-12-16 21:16:54 -07:00
|
|
|
// SetContext set context
|
|
|
|
func (g *GithubDownloaderV3) SetContext(ctx context.Context) {
|
|
|
|
g.ctx = ctx
|
|
|
|
}
|
|
|
|
|
2021-10-14 23:47:15 -06:00
|
|
|
func (g *GithubDownloaderV3) waitAndPickClient() {
|
|
|
|
var recentIdx int
|
|
|
|
var maxRemaining int
|
|
|
|
for i := 0; i < len(g.clients); i++ {
|
|
|
|
if g.rates[i] != nil && g.rates[i].Remaining > maxRemaining {
|
|
|
|
maxRemaining = g.rates[i].Remaining
|
|
|
|
recentIdx = i
|
|
|
|
}
|
|
|
|
}
|
|
|
|
g.curClientIdx = recentIdx // if no max remain, it will always pick the first client.
|
|
|
|
|
|
|
|
for g.rates[g.curClientIdx] != nil && g.rates[g.curClientIdx].Remaining <= GithubLimitRateRemaining {
|
|
|
|
timer := time.NewTimer(time.Until(g.rates[g.curClientIdx].Reset.Time))
|
2019-12-16 21:16:54 -07:00
|
|
|
select {
|
|
|
|
case <-g.ctx.Done():
|
Rewrite queue (#24505)
# ⚠️ Breaking
Many deprecated queue config options are removed (actually, they should
have been removed in 1.18/1.19).
If you see the fatal message when starting Gitea: "Please update your
app.ini to remove deprecated config options", please follow the error
messages to remove these options from your app.ini.
Example:
```
2023/05/06 19:39:22 [E] Removed queue option: `[indexer].ISSUE_INDEXER_QUEUE_TYPE`. Use new options in `[queue.issue_indexer]`
2023/05/06 19:39:22 [E] Removed queue option: `[indexer].UPDATE_BUFFER_LEN`. Use new options in `[queue.issue_indexer]`
2023/05/06 19:39:22 [F] Please update your app.ini to remove deprecated config options
```
Many options in `[queue]` are are dropped, including:
`WRAP_IF_NECESSARY`, `MAX_ATTEMPTS`, `TIMEOUT`, `WORKERS`,
`BLOCK_TIMEOUT`, `BOOST_TIMEOUT`, `BOOST_WORKERS`, they can be removed
from app.ini.
# The problem
The old queue package has some legacy problems:
* complexity: I doubt few people could tell how it works.
* maintainability: Too many channels and mutex/cond are mixed together,
too many different structs/interfaces depends each other.
* stability: due to the complexity & maintainability, sometimes there
are strange bugs and difficult to debug, and some code doesn't have test
(indeed some code is difficult to test because a lot of things are mixed
together).
* general applicability: although it is called "queue", its behavior is
not a well-known queue.
* scalability: it doesn't seem easy to make it work with a cluster
without breaking its behaviors.
It came from some very old code to "avoid breaking", however, its
technical debt is too heavy now. It's a good time to introduce a better
"queue" package.
# The new queue package
It keeps using old config and concept as much as possible.
* It only contains two major kinds of concepts:
* The "base queue": channel, levelqueue, redis
* They have the same abstraction, the same interface, and they are
tested by the same testing code.
* The "WokerPoolQueue", it uses the "base queue" to provide "worker
pool" function, calls the "handler" to process the data in the base
queue.
* The new code doesn't do "PushBack"
* Think about a queue with many workers, the "PushBack" can't guarantee
the order for re-queued unhandled items, so in new code it just does
"normal push"
* The new code doesn't do "pause/resume"
* The "pause/resume" was designed to handle some handler's failure: eg:
document indexer (elasticsearch) is down
* If a queue is paused for long time, either the producers blocks or the
new items are dropped.
* The new code doesn't do such "pause/resume" trick, it's not a common
queue's behavior and it doesn't help much.
* If there are unhandled items, the "push" function just blocks for a
few seconds and then re-queue them and retry.
* The new code doesn't do "worker booster"
* Gitea's queue's handlers are light functions, the cost is only the
go-routine, so it doesn't make sense to "boost" them.
* The new code only use "max worker number" to limit the concurrent
workers.
* The new "Push" never blocks forever
* Instead of creating more and more blocking goroutines, return an error
is more friendly to the server and to the end user.
There are more details in code comments: eg: the "Flush" problem, the
strange "code.index" hanging problem, the "immediate" queue problem.
Almost ready for review.
TODO:
* [x] add some necessary comments during review
* [x] add some more tests if necessary
* [x] update documents and config options
* [x] test max worker / active worker
* [x] re-run the CI tasks to see whether any test is flaky
* [x] improve the `handleOldLengthConfiguration` to provide more
friendly messages
* [x] fine tune default config values (eg: length?)
## Code coverage:
![image](https://user-images.githubusercontent.com/2114189/236620635-55576955-f95d-4810-b12f-879026a3afdf.png)
2023-05-08 05:49:59 -06:00
|
|
|
timer.Stop()
|
2019-12-16 21:16:54 -07:00
|
|
|
return
|
|
|
|
case <-timer.C:
|
|
|
|
}
|
|
|
|
|
2020-01-16 08:15:44 -07:00
|
|
|
err := g.RefreshRate()
|
2019-12-16 21:16:54 -07:00
|
|
|
if err != nil {
|
2021-10-14 23:47:15 -06:00
|
|
|
log.Error("g.getClient().RateLimits: %s", err)
|
2019-12-16 21:16:54 -07:00
|
|
|
}
|
2020-01-16 08:15:44 -07:00
|
|
|
}
|
|
|
|
}
|
2019-12-16 21:16:54 -07:00
|
|
|
|
2020-01-16 08:15:44 -07:00
|
|
|
// RefreshRate update the current rate (doesn't count in rate limit)
|
|
|
|
func (g *GithubDownloaderV3) RefreshRate() error {
|
2021-10-14 23:47:15 -06:00
|
|
|
rates, _, err := g.getClient().RateLimits(g.ctx)
|
2020-01-16 08:15:44 -07:00
|
|
|
if err != nil {
|
2021-04-15 07:34:22 -06:00
|
|
|
// if rate limit is not enabled, ignore it
|
|
|
|
if strings.Contains(err.Error(), "404") {
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(nil)
|
2021-04-15 07:34:22 -06:00
|
|
|
return nil
|
|
|
|
}
|
2020-01-16 08:15:44 -07:00
|
|
|
return err
|
2019-12-16 21:16:54 -07:00
|
|
|
}
|
2020-01-16 08:15:44 -07:00
|
|
|
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(rates.GetCore())
|
2020-01-16 08:15:44 -07:00
|
|
|
return nil
|
2019-12-16 21:16:54 -07:00
|
|
|
}
|
|
|
|
|
2021-10-14 23:47:15 -06:00
|
|
|
func (g *GithubDownloaderV3) getClient() *github.Client {
|
|
|
|
return g.clients[g.curClientIdx]
|
|
|
|
}
|
|
|
|
|
|
|
|
func (g *GithubDownloaderV3) setRate(rate *github.Rate) {
|
|
|
|
g.rates[g.curClientIdx] = rate
|
|
|
|
}
|
|
|
|
|
2019-05-06 19:12:51 -06:00
|
|
|
// GetRepoInfo returns a repository information
|
|
|
|
func (g *GithubDownloaderV3) GetRepoInfo() (*base.Repository, error) {
|
2021-10-14 23:47:15 -06:00
|
|
|
g.waitAndPickClient()
|
|
|
|
gr, resp, err := g.getClient().Repositories.Get(g.ctx, g.repoOwner, g.repoName)
|
2019-05-06 19:12:51 -06:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(&resp.Rate)
|
2019-12-16 21:16:54 -07:00
|
|
|
|
2019-05-06 19:12:51 -06:00
|
|
|
// convert github repo to stand Repo
|
|
|
|
return &base.Repository{
|
2020-09-15 08:37:44 -06:00
|
|
|
Owner: g.repoOwner,
|
|
|
|
Name: gr.GetName(),
|
2021-08-10 03:49:43 -06:00
|
|
|
IsPrivate: gr.GetPrivate(),
|
2020-09-15 08:37:44 -06:00
|
|
|
Description: gr.GetDescription(),
|
|
|
|
OriginalURL: gr.GetHTMLURL(),
|
|
|
|
CloneURL: gr.GetCloneURL(),
|
2021-08-10 03:49:43 -06:00
|
|
|
DefaultBranch: gr.GetDefaultBranch(),
|
2019-05-06 19:12:51 -06:00
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2019-08-14 00:16:12 -06:00
|
|
|
// GetTopics return github topics
|
|
|
|
func (g *GithubDownloaderV3) GetTopics() ([]string, error) {
|
2021-10-14 23:47:15 -06:00
|
|
|
g.waitAndPickClient()
|
|
|
|
r, resp, err := g.getClient().Repositories.Get(g.ctx, g.repoOwner, g.repoName)
|
2019-12-16 21:16:54 -07:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(&resp.Rate)
|
2019-12-16 21:16:54 -07:00
|
|
|
return r.Topics, nil
|
2019-08-14 00:16:12 -06:00
|
|
|
}
|
|
|
|
|
2019-05-06 19:12:51 -06:00
|
|
|
// GetMilestones returns milestones
|
|
|
|
func (g *GithubDownloaderV3) GetMilestones() ([]*base.Milestone, error) {
|
2022-01-20 10:46:10 -07:00
|
|
|
perPage := g.maxPerPage
|
|
|
|
milestones := make([]*base.Milestone, 0, perPage)
|
2019-05-06 19:12:51 -06:00
|
|
|
for i := 1; ; i++ {
|
2021-10-14 23:47:15 -06:00
|
|
|
g.waitAndPickClient()
|
|
|
|
ms, resp, err := g.getClient().Issues.ListMilestones(g.ctx, g.repoOwner, g.repoName,
|
2019-05-06 19:12:51 -06:00
|
|
|
&github.MilestoneListOptions{
|
|
|
|
State: "all",
|
|
|
|
ListOptions: github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: perPage,
|
2022-01-20 10:46:10 -07:00
|
|
|
},
|
|
|
|
})
|
2019-05-06 19:12:51 -06:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(&resp.Rate)
|
2019-05-06 19:12:51 -06:00
|
|
|
|
|
|
|
for _, m := range ms {
|
2022-01-20 10:46:10 -07:00
|
|
|
state := "open"
|
2019-05-06 19:12:51 -06:00
|
|
|
if m.State != nil {
|
|
|
|
state = *m.State
|
|
|
|
}
|
|
|
|
milestones = append(milestones, &base.Milestone{
|
2021-08-10 03:49:43 -06:00
|
|
|
Title: m.GetTitle(),
|
|
|
|
Description: m.GetDescription(),
|
2023-04-08 05:27:30 -06:00
|
|
|
Deadline: convertGithubTimestampToTime(m.DueOn),
|
2019-05-06 19:12:51 -06:00
|
|
|
State: state,
|
2023-04-08 05:27:30 -06:00
|
|
|
Created: m.GetCreatedAt().Time,
|
|
|
|
Updated: convertGithubTimestampToTime(m.UpdatedAt),
|
|
|
|
Closed: convertGithubTimestampToTime(m.ClosedAt),
|
2019-05-06 19:12:51 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
if len(ms) < perPage {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return milestones, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func convertGithubLabel(label *github.Label) *base.Label {
|
|
|
|
return &base.Label{
|
2021-08-10 03:49:43 -06:00
|
|
|
Name: label.GetName(),
|
|
|
|
Color: label.GetColor(),
|
|
|
|
Description: label.GetDescription(),
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetLabels returns labels
|
|
|
|
func (g *GithubDownloaderV3) GetLabels() ([]*base.Label, error) {
|
2022-01-20 10:46:10 -07:00
|
|
|
perPage := g.maxPerPage
|
|
|
|
labels := make([]*base.Label, 0, perPage)
|
2019-05-06 19:12:51 -06:00
|
|
|
for i := 1; ; i++ {
|
2021-10-14 23:47:15 -06:00
|
|
|
g.waitAndPickClient()
|
|
|
|
ls, resp, err := g.getClient().Issues.ListLabels(g.ctx, g.repoOwner, g.repoName,
|
2019-05-06 19:12:51 -06:00
|
|
|
&github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: perPage,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(&resp.Rate)
|
2019-05-06 19:12:51 -06:00
|
|
|
|
|
|
|
for _, label := range ls {
|
|
|
|
labels = append(labels, convertGithubLabel(label))
|
|
|
|
}
|
|
|
|
if len(ls) < perPage {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return labels, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (g *GithubDownloaderV3) convertGithubRelease(rel *github.RepositoryRelease) *base.Release {
|
2022-11-02 20:32:52 -06:00
|
|
|
// GitHub allows commitish to be a reference.
|
|
|
|
// In this case, we need to remove the prefix, i.e. convert "refs/heads/main" to "main".
|
|
|
|
targetCommitish := strings.TrimPrefix(rel.GetTargetCommitish(), git.BranchPrefix)
|
|
|
|
|
2019-05-06 19:12:51 -06:00
|
|
|
r := &base.Release{
|
2021-08-10 03:49:43 -06:00
|
|
|
Name: rel.GetName(),
|
|
|
|
TagName: rel.GetTagName(),
|
2022-11-02 20:32:52 -06:00
|
|
|
TargetCommitish: targetCommitish,
|
2021-08-10 03:49:43 -06:00
|
|
|
Draft: rel.GetDraft(),
|
|
|
|
Prerelease: rel.GetPrerelease(),
|
|
|
|
Created: rel.GetCreatedAt().Time,
|
|
|
|
PublisherID: rel.GetAuthor().GetID(),
|
|
|
|
PublisherName: rel.GetAuthor().GetLogin(),
|
|
|
|
PublisherEmail: rel.GetAuthor().GetEmail(),
|
|
|
|
Body: rel.GetBody(),
|
2021-05-15 16:37:17 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
if rel.PublishedAt != nil {
|
|
|
|
r.Published = rel.PublishedAt.Time
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
|
|
|
|
2021-11-20 02:34:05 -07:00
|
|
|
httpClient := NewMigrationHTTPClient()
|
2021-08-18 07:10:39 -06:00
|
|
|
|
2019-05-06 19:12:51 -06:00
|
|
|
for _, asset := range rel.Assets {
|
2022-01-20 10:46:10 -07:00
|
|
|
assetID := *asset.ID // Don't optimize this, for closure we need a local variable
|
2020-12-26 20:34:19 -07:00
|
|
|
r.Assets = append(r.Assets, &base.ReleaseAsset{
|
2021-08-10 03:49:43 -06:00
|
|
|
ID: asset.GetID(),
|
|
|
|
Name: asset.GetName(),
|
2019-05-06 19:12:51 -06:00
|
|
|
ContentType: asset.ContentType,
|
|
|
|
Size: asset.Size,
|
|
|
|
DownloadCount: asset.DownloadCount,
|
|
|
|
Created: asset.CreatedAt.Time,
|
|
|
|
Updated: asset.UpdatedAt.Time,
|
2020-12-26 20:34:19 -07:00
|
|
|
DownloadFunc: func() (io.ReadCloser, error) {
|
2021-10-14 23:47:15 -06:00
|
|
|
g.waitAndPickClient()
|
2022-09-04 04:47:56 -06:00
|
|
|
readCloser, redirectURL, err := g.getClient().Repositories.DownloadReleaseAsset(g.ctx, g.repoOwner, g.repoName, assetID, nil)
|
2020-12-26 20:34:19 -07:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-05-15 16:37:17 -06:00
|
|
|
if err := g.RefreshRate(); err != nil {
|
2021-10-14 23:47:15 -06:00
|
|
|
log.Error("g.getClient().RateLimits: %s", err)
|
2021-02-07 08:56:11 -07:00
|
|
|
}
|
2022-09-04 04:47:56 -06:00
|
|
|
|
|
|
|
if readCloser != nil {
|
|
|
|
return readCloser, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if redirectURL == "" {
|
|
|
|
return nil, fmt.Errorf("no release asset found for %d", assetID)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Prevent open redirect
|
|
|
|
if !hasBaseURL(redirectURL, g.baseURL) &&
|
|
|
|
!hasBaseURL(redirectURL, "https://objects.githubusercontent.com/") {
|
|
|
|
WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", asset.GetID(), g, redirectURL)
|
|
|
|
|
|
|
|
return io.NopCloser(strings.NewReader(redirectURL)), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
g.waitAndPickClient()
|
|
|
|
req, err := http.NewRequestWithContext(g.ctx, "GET", redirectURL, nil)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2020-12-26 20:34:19 -07:00
|
|
|
}
|
2022-09-04 04:47:56 -06:00
|
|
|
resp, err := httpClient.Do(req)
|
|
|
|
err1 := g.RefreshRate()
|
|
|
|
if err1 != nil {
|
|
|
|
log.Error("g.RefreshRate(): %s", err1)
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return resp.Body, nil
|
2020-12-26 20:34:19 -07:00
|
|
|
},
|
2019-05-06 19:12:51 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
return r
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetReleases returns releases
|
|
|
|
func (g *GithubDownloaderV3) GetReleases() ([]*base.Release, error) {
|
2022-01-20 10:46:10 -07:00
|
|
|
perPage := g.maxPerPage
|
|
|
|
releases := make([]*base.Release, 0, perPage)
|
2019-05-06 19:12:51 -06:00
|
|
|
for i := 1; ; i++ {
|
2021-10-14 23:47:15 -06:00
|
|
|
g.waitAndPickClient()
|
|
|
|
ls, resp, err := g.getClient().Repositories.ListReleases(g.ctx, g.repoOwner, g.repoName,
|
2019-05-06 19:12:51 -06:00
|
|
|
&github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: perPage,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(&resp.Rate)
|
2019-05-06 19:12:51 -06:00
|
|
|
|
|
|
|
for _, release := range ls {
|
|
|
|
releases = append(releases, g.convertGithubRelease(release))
|
|
|
|
}
|
|
|
|
if len(ls) < perPage {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return releases, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetIssues returns issues according start and limit
|
2019-05-30 14:26:57 -06:00
|
|
|
func (g *GithubDownloaderV3) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
|
2020-10-24 23:11:03 -06:00
|
|
|
if perPage > g.maxPerPage {
|
|
|
|
perPage = g.maxPerPage
|
|
|
|
}
|
2019-05-06 19:12:51 -06:00
|
|
|
opt := &github.IssueListByRepoOptions{
|
|
|
|
Sort: "created",
|
|
|
|
Direction: "asc",
|
|
|
|
State: "all",
|
|
|
|
ListOptions: github.ListOptions{
|
|
|
|
PerPage: perPage,
|
2019-05-30 14:26:57 -06:00
|
|
|
Page: page,
|
2019-05-06 19:12:51 -06:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2022-01-20 10:46:10 -07:00
|
|
|
allIssues := make([]*base.Issue, 0, perPage)
|
2021-10-14 23:47:15 -06:00
|
|
|
g.waitAndPickClient()
|
|
|
|
issues, resp, err := g.getClient().Issues.ListByRepo(g.ctx, g.repoOwner, g.repoName, opt)
|
2019-05-30 14:26:57 -06:00
|
|
|
if err != nil {
|
2022-10-24 13:29:17 -06:00
|
|
|
return nil, false, fmt.Errorf("error while listing repos: %w", err)
|
2019-05-30 14:26:57 -06:00
|
|
|
}
|
2020-12-26 20:34:19 -07:00
|
|
|
log.Trace("Request get issues %d/%d, but in fact get %d", perPage, page, len(issues))
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(&resp.Rate)
|
2019-05-30 14:26:57 -06:00
|
|
|
for _, issue := range issues {
|
|
|
|
if issue.IsPullRequest() {
|
|
|
|
continue
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
2021-08-10 03:49:43 -06:00
|
|
|
|
2022-01-20 10:46:10 -07:00
|
|
|
labels := make([]*base.Label, 0, len(issue.Labels))
|
2019-05-30 14:26:57 -06:00
|
|
|
for _, l := range issue.Labels {
|
2020-07-31 08:22:34 -06:00
|
|
|
labels = append(labels, convertGithubLabel(l))
|
2019-05-30 14:26:57 -06:00
|
|
|
}
|
|
|
|
|
2020-01-15 04:14:07 -07:00
|
|
|
// get reactions
|
|
|
|
var reactions []*base.Reaction
|
2021-11-14 12:11:10 -07:00
|
|
|
if !g.SkipReactions {
|
|
|
|
for i := 1; ; i++ {
|
|
|
|
g.waitAndPickClient()
|
|
|
|
res, resp, err := g.getClient().Reactions.ListIssueReactions(g.ctx, g.repoOwner, g.repoName, issue.GetNumber(), &github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: perPage,
|
2020-01-15 04:14:07 -07:00
|
|
|
})
|
2021-11-14 12:11:10 -07:00
|
|
|
if err != nil {
|
|
|
|
return nil, false, err
|
|
|
|
}
|
|
|
|
g.setRate(&resp.Rate)
|
|
|
|
if len(res) == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
for _, reaction := range res {
|
|
|
|
reactions = append(reactions, &base.Reaction{
|
|
|
|
UserID: reaction.User.GetID(),
|
|
|
|
UserName: reaction.User.GetLogin(),
|
|
|
|
Content: reaction.GetContent(),
|
|
|
|
})
|
|
|
|
}
|
2020-01-15 04:14:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-10 03:49:43 -06:00
|
|
|
var assignees []string
|
|
|
|
for i := range issue.Assignees {
|
|
|
|
assignees = append(assignees, issue.Assignees[i].GetLogin())
|
|
|
|
}
|
|
|
|
|
2019-05-30 14:26:57 -06:00
|
|
|
allIssues = append(allIssues, &base.Issue{
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 11:08:35 -06:00
|
|
|
Title: *issue.Title,
|
|
|
|
Number: int64(*issue.Number),
|
|
|
|
PosterID: issue.GetUser().GetID(),
|
|
|
|
PosterName: issue.GetUser().GetLogin(),
|
|
|
|
PosterEmail: issue.GetUser().GetEmail(),
|
|
|
|
Content: issue.GetBody(),
|
|
|
|
Milestone: issue.GetMilestone().GetTitle(),
|
|
|
|
State: issue.GetState(),
|
2023-04-08 05:27:30 -06:00
|
|
|
Created: issue.GetCreatedAt().Time,
|
|
|
|
Updated: issue.GetUpdatedAt().Time,
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 11:08:35 -06:00
|
|
|
Labels: labels,
|
|
|
|
Reactions: reactions,
|
2023-04-08 05:27:30 -06:00
|
|
|
Closed: &issue.ClosedAt.Time,
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 11:08:35 -06:00
|
|
|
IsLocked: issue.GetLocked(),
|
|
|
|
Assignees: assignees,
|
|
|
|
ForeignIndex: int64(*issue.Number),
|
2019-05-30 14:26:57 -06:00
|
|
|
})
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
2019-05-30 14:26:57 -06:00
|
|
|
|
|
|
|
return allIssues, len(issues) < perPage, nil
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
|
|
|
|
2021-06-30 01:23:49 -06:00
|
|
|
// SupportGetRepoComments return true if it supports get repo comments
|
|
|
|
func (g *GithubDownloaderV3) SupportGetRepoComments() bool {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2019-05-06 19:12:51 -06:00
|
|
|
// GetComments returns comments according issueNumber
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 11:08:35 -06:00
|
|
|
func (g *GithubDownloaderV3) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
|
|
|
|
comments, err := g.getComments(commentable)
|
|
|
|
return comments, false, err
|
2021-06-30 01:23:49 -06:00
|
|
|
}
|
|
|
|
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 11:08:35 -06:00
|
|
|
func (g *GithubDownloaderV3) getComments(commentable base.Commentable) ([]*base.Comment, error) {
|
2020-01-23 10:28:15 -07:00
|
|
|
var (
|
2020-10-24 23:11:03 -06:00
|
|
|
allComments = make([]*base.Comment, 0, g.maxPerPage)
|
2020-01-23 10:28:15 -07:00
|
|
|
created = "created"
|
|
|
|
asc = "asc"
|
|
|
|
)
|
2019-05-06 19:12:51 -06:00
|
|
|
opt := &github.IssueListCommentsOptions{
|
2020-07-31 08:22:34 -06:00
|
|
|
Sort: &created,
|
|
|
|
Direction: &asc,
|
2019-05-06 19:12:51 -06:00
|
|
|
ListOptions: github.ListOptions{
|
2020-10-24 23:11:03 -06:00
|
|
|
PerPage: g.maxPerPage,
|
2019-05-06 19:12:51 -06:00
|
|
|
},
|
|
|
|
}
|
|
|
|
for {
|
2021-10-14 23:47:15 -06:00
|
|
|
g.waitAndPickClient()
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 11:08:35 -06:00
|
|
|
comments, resp, err := g.getClient().Issues.ListComments(g.ctx, g.repoOwner, g.repoName, int(commentable.GetForeignIndex()), opt)
|
2019-05-06 19:12:51 -06:00
|
|
|
if err != nil {
|
2022-10-24 13:29:17 -06:00
|
|
|
return nil, fmt.Errorf("error while listing repos: %w", err)
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(&resp.Rate)
|
2019-05-06 19:12:51 -06:00
|
|
|
for _, comment := range comments {
|
2020-01-15 04:14:07 -07:00
|
|
|
// get reactions
|
|
|
|
var reactions []*base.Reaction
|
2021-11-14 12:11:10 -07:00
|
|
|
if !g.SkipReactions {
|
|
|
|
for i := 1; ; i++ {
|
|
|
|
g.waitAndPickClient()
|
|
|
|
res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(g.ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: g.maxPerPage,
|
2020-01-15 04:14:07 -07:00
|
|
|
})
|
2021-11-14 12:11:10 -07:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
g.setRate(&resp.Rate)
|
|
|
|
if len(res) == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
for _, reaction := range res {
|
|
|
|
reactions = append(reactions, &base.Reaction{
|
|
|
|
UserID: reaction.User.GetID(),
|
|
|
|
UserName: reaction.User.GetLogin(),
|
|
|
|
Content: reaction.GetContent(),
|
|
|
|
})
|
|
|
|
}
|
2020-01-15 04:14:07 -07:00
|
|
|
}
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
2021-08-10 03:49:43 -06:00
|
|
|
|
2019-05-06 19:12:51 -06:00
|
|
|
allComments = append(allComments, &base.Comment{
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 11:08:35 -06:00
|
|
|
IssueIndex: commentable.GetLocalIndex(),
|
2022-03-06 12:00:41 -07:00
|
|
|
Index: comment.GetID(),
|
2021-08-10 03:49:43 -06:00
|
|
|
PosterID: comment.GetUser().GetID(),
|
|
|
|
PosterName: comment.GetUser().GetLogin(),
|
|
|
|
PosterEmail: comment.GetUser().GetEmail(),
|
|
|
|
Content: comment.GetBody(),
|
2023-04-08 05:27:30 -06:00
|
|
|
Created: comment.GetCreatedAt().Time,
|
|
|
|
Updated: comment.GetUpdatedAt().Time,
|
2019-05-06 19:12:51 -06:00
|
|
|
Reactions: reactions,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if resp.NextPage == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
opt.Page = resp.NextPage
|
|
|
|
}
|
|
|
|
return allComments, nil
|
|
|
|
}
|
|
|
|
|
2021-06-30 01:23:49 -06:00
|
|
|
// GetAllComments returns repository comments according page and perPageSize
|
|
|
|
func (g *GithubDownloaderV3) GetAllComments(page, perPage int) ([]*base.Comment, bool, error) {
|
|
|
|
var (
|
|
|
|
allComments = make([]*base.Comment, 0, perPage)
|
|
|
|
created = "created"
|
|
|
|
asc = "asc"
|
|
|
|
)
|
2021-09-15 05:30:19 -06:00
|
|
|
if perPage > g.maxPerPage {
|
|
|
|
perPage = g.maxPerPage
|
|
|
|
}
|
2021-06-30 01:23:49 -06:00
|
|
|
opt := &github.IssueListCommentsOptions{
|
|
|
|
Sort: &created,
|
|
|
|
Direction: &asc,
|
|
|
|
ListOptions: github.ListOptions{
|
|
|
|
Page: page,
|
|
|
|
PerPage: perPage,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2021-10-14 23:47:15 -06:00
|
|
|
g.waitAndPickClient()
|
|
|
|
comments, resp, err := g.getClient().Issues.ListComments(g.ctx, g.repoOwner, g.repoName, 0, opt)
|
2021-06-30 01:23:49 -06:00
|
|
|
if err != nil {
|
2022-10-24 13:29:17 -06:00
|
|
|
return nil, false, fmt.Errorf("error while listing repos: %w", err)
|
2021-06-30 01:23:49 -06:00
|
|
|
}
|
2022-01-20 10:46:10 -07:00
|
|
|
isEnd := resp.NextPage == 0
|
2021-09-15 05:30:19 -06:00
|
|
|
|
|
|
|
log.Trace("Request get comments %d/%d, but in fact get %d, next page is %d", perPage, page, len(comments), resp.NextPage)
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(&resp.Rate)
|
2021-06-30 01:23:49 -06:00
|
|
|
for _, comment := range comments {
|
|
|
|
// get reactions
|
|
|
|
var reactions []*base.Reaction
|
2021-11-14 12:11:10 -07:00
|
|
|
if !g.SkipReactions {
|
|
|
|
for i := 1; ; i++ {
|
|
|
|
g.waitAndPickClient()
|
|
|
|
res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(g.ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: g.maxPerPage,
|
2021-06-30 01:23:49 -06:00
|
|
|
})
|
2021-11-14 12:11:10 -07:00
|
|
|
if err != nil {
|
|
|
|
return nil, false, err
|
|
|
|
}
|
|
|
|
g.setRate(&resp.Rate)
|
|
|
|
if len(res) == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
for _, reaction := range res {
|
|
|
|
reactions = append(reactions, &base.Reaction{
|
|
|
|
UserID: reaction.User.GetID(),
|
|
|
|
UserName: reaction.User.GetLogin(),
|
|
|
|
Content: reaction.GetContent(),
|
|
|
|
})
|
|
|
|
}
|
2021-06-30 01:23:49 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
idx := strings.LastIndex(*comment.IssueURL, "/")
|
|
|
|
issueIndex, _ := strconv.ParseInt((*comment.IssueURL)[idx+1:], 10, 64)
|
|
|
|
allComments = append(allComments, &base.Comment{
|
|
|
|
IssueIndex: issueIndex,
|
2022-03-06 12:00:41 -07:00
|
|
|
Index: comment.GetID(),
|
2021-08-10 03:49:43 -06:00
|
|
|
PosterID: comment.GetUser().GetID(),
|
|
|
|
PosterName: comment.GetUser().GetLogin(),
|
|
|
|
PosterEmail: comment.GetUser().GetEmail(),
|
|
|
|
Content: comment.GetBody(),
|
2023-04-08 05:27:30 -06:00
|
|
|
Created: comment.GetCreatedAt().Time,
|
|
|
|
Updated: comment.GetUpdatedAt().Time,
|
2021-06-30 01:23:49 -06:00
|
|
|
Reactions: reactions,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-09-15 05:30:19 -06:00
|
|
|
return allComments, isEnd, nil
|
2021-06-30 01:23:49 -06:00
|
|
|
}
|
|
|
|
|
2019-05-30 14:26:57 -06:00
|
|
|
// GetPullRequests returns pull requests according page and perPage
|
2020-10-13 22:06:00 -06:00
|
|
|
func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
|
2020-10-24 23:11:03 -06:00
|
|
|
if perPage > g.maxPerPage {
|
|
|
|
perPage = g.maxPerPage
|
|
|
|
}
|
2019-05-06 19:12:51 -06:00
|
|
|
opt := &github.PullRequestListOptions{
|
|
|
|
Sort: "created",
|
|
|
|
Direction: "asc",
|
|
|
|
State: "all",
|
|
|
|
ListOptions: github.ListOptions{
|
2019-05-30 14:26:57 -06:00
|
|
|
PerPage: perPage,
|
|
|
|
Page: page,
|
2019-05-06 19:12:51 -06:00
|
|
|
},
|
|
|
|
}
|
2022-01-20 10:46:10 -07:00
|
|
|
allPRs := make([]*base.PullRequest, 0, perPage)
|
2021-10-14 23:47:15 -06:00
|
|
|
g.waitAndPickClient()
|
|
|
|
prs, resp, err := g.getClient().PullRequests.List(g.ctx, g.repoOwner, g.repoName, opt)
|
2019-05-30 14:26:57 -06:00
|
|
|
if err != nil {
|
2022-10-24 13:29:17 -06:00
|
|
|
return nil, false, fmt.Errorf("error while listing repos: %w", err)
|
2019-05-30 14:26:57 -06:00
|
|
|
}
|
2021-06-30 01:23:49 -06:00
|
|
|
log.Trace("Request get pull requests %d/%d, but in fact get %d", perPage, page, len(prs))
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(&resp.Rate)
|
2019-05-30 14:26:57 -06:00
|
|
|
for _, pr := range prs {
|
2022-01-20 10:46:10 -07:00
|
|
|
labels := make([]*base.Label, 0, len(pr.Labels))
|
2019-05-30 14:26:57 -06:00
|
|
|
for _, l := range pr.Labels {
|
|
|
|
labels = append(labels, convertGithubLabel(l))
|
|
|
|
}
|
2019-05-06 19:12:51 -06:00
|
|
|
|
2020-01-15 04:14:07 -07:00
|
|
|
// get reactions
|
|
|
|
var reactions []*base.Reaction
|
2021-11-14 12:11:10 -07:00
|
|
|
if !g.SkipReactions {
|
|
|
|
for i := 1; ; i++ {
|
|
|
|
g.waitAndPickClient()
|
|
|
|
res, resp, err := g.getClient().Reactions.ListIssueReactions(g.ctx, g.repoOwner, g.repoName, pr.GetNumber(), &github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: perPage,
|
2020-01-15 04:14:07 -07:00
|
|
|
})
|
2021-11-14 12:11:10 -07:00
|
|
|
if err != nil {
|
|
|
|
return nil, false, err
|
|
|
|
}
|
|
|
|
g.setRate(&resp.Rate)
|
|
|
|
if len(res) == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
for _, reaction := range res {
|
|
|
|
reactions = append(reactions, &base.Reaction{
|
|
|
|
UserID: reaction.User.GetID(),
|
|
|
|
UserName: reaction.User.GetLogin(),
|
|
|
|
Content: reaction.GetContent(),
|
|
|
|
})
|
|
|
|
}
|
2020-01-15 04:14:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-14 23:47:15 -06:00
|
|
|
// download patch and saved as tmp file
|
|
|
|
g.waitAndPickClient()
|
|
|
|
|
2019-05-30 14:26:57 -06:00
|
|
|
allPRs = append(allPRs, &base.PullRequest{
|
2021-08-10 03:49:43 -06:00
|
|
|
Title: pr.GetTitle(),
|
|
|
|
Number: int64(pr.GetNumber()),
|
|
|
|
PosterID: pr.GetUser().GetID(),
|
|
|
|
PosterName: pr.GetUser().GetLogin(),
|
|
|
|
PosterEmail: pr.GetUser().GetEmail(),
|
|
|
|
Content: pr.GetBody(),
|
|
|
|
Milestone: pr.GetMilestone().GetTitle(),
|
|
|
|
State: pr.GetState(),
|
2023-04-08 05:27:30 -06:00
|
|
|
Created: pr.GetCreatedAt().Time,
|
|
|
|
Updated: pr.GetUpdatedAt().Time,
|
|
|
|
Closed: convertGithubTimestampToTime(pr.ClosedAt),
|
2019-05-30 14:26:57 -06:00
|
|
|
Labels: labels,
|
2021-08-10 03:49:43 -06:00
|
|
|
Merged: pr.MergedAt != nil,
|
|
|
|
MergeCommitSHA: pr.GetMergeCommitSHA(),
|
2023-04-08 05:27:30 -06:00
|
|
|
MergedTime: convertGithubTimestampToTime(pr.MergedAt),
|
2019-05-30 14:26:57 -06:00
|
|
|
IsLocked: pr.ActiveLockReason != nil,
|
|
|
|
Head: base.PullRequestBranch{
|
2021-08-10 03:49:43 -06:00
|
|
|
Ref: pr.GetHead().GetRef(),
|
|
|
|
SHA: pr.GetHead().GetSHA(),
|
|
|
|
OwnerName: pr.GetHead().GetUser().GetLogin(),
|
|
|
|
RepoName: pr.GetHead().GetRepo().GetName(),
|
2022-09-04 04:47:56 -06:00
|
|
|
CloneURL: pr.GetHead().GetRepo().GetCloneURL(), // see below for SECURITY related issues here
|
2019-05-30 14:26:57 -06:00
|
|
|
},
|
|
|
|
Base: base.PullRequestBranch{
|
2021-08-10 03:49:43 -06:00
|
|
|
Ref: pr.GetBase().GetRef(),
|
|
|
|
SHA: pr.GetBase().GetSHA(),
|
|
|
|
RepoName: pr.GetBase().GetRepo().GetName(),
|
|
|
|
OwnerName: pr.GetBase().GetUser().GetLogin(),
|
2019-05-30 14:26:57 -06:00
|
|
|
},
|
2022-09-04 04:47:56 -06:00
|
|
|
PatchURL: pr.GetPatchURL(), // see below for SECURITY related issues here
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 11:08:35 -06:00
|
|
|
Reactions: reactions,
|
|
|
|
ForeignIndex: int64(*pr.Number),
|
2019-05-30 14:26:57 -06:00
|
|
|
})
|
2022-09-04 04:47:56 -06:00
|
|
|
|
|
|
|
// SECURITY: Ensure that the PR is safe
|
|
|
|
_ = CheckAndEnsureSafePR(allPRs[len(allPRs)-1], g.baseURL, g)
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
2019-05-30 14:26:57 -06:00
|
|
|
|
2020-10-13 22:06:00 -06:00
|
|
|
return allPRs, len(prs) < perPage, nil
|
2019-05-06 19:12:51 -06:00
|
|
|
}
|
2020-01-23 10:28:15 -07:00
|
|
|
|
|
|
|
func convertGithubReview(r *github.PullRequestReview) *base.Review {
|
|
|
|
return &base.Review{
|
|
|
|
ID: r.GetID(),
|
|
|
|
ReviewerID: r.GetUser().GetID(),
|
|
|
|
ReviewerName: r.GetUser().GetLogin(),
|
|
|
|
CommitID: r.GetCommitID(),
|
|
|
|
Content: r.GetBody(),
|
2023-04-08 05:27:30 -06:00
|
|
|
CreatedAt: r.GetSubmittedAt().Time,
|
2020-01-23 10:28:15 -07:00
|
|
|
State: r.GetState(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (g *GithubDownloaderV3) convertGithubReviewComments(cs []*github.PullRequestComment) ([]*base.ReviewComment, error) {
|
2022-01-20 10:46:10 -07:00
|
|
|
rcs := make([]*base.ReviewComment, 0, len(cs))
|
2020-01-23 10:28:15 -07:00
|
|
|
for _, c := range cs {
|
|
|
|
// get reactions
|
|
|
|
var reactions []*base.Reaction
|
2021-11-14 12:11:10 -07:00
|
|
|
if !g.SkipReactions {
|
|
|
|
for i := 1; ; i++ {
|
|
|
|
g.waitAndPickClient()
|
|
|
|
res, resp, err := g.getClient().Reactions.ListPullRequestCommentReactions(g.ctx, g.repoOwner, g.repoName, c.GetID(), &github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: g.maxPerPage,
|
2020-01-23 10:28:15 -07:00
|
|
|
})
|
2021-11-14 12:11:10 -07:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
g.setRate(&resp.Rate)
|
|
|
|
if len(res) == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
for _, reaction := range res {
|
|
|
|
reactions = append(reactions, &base.Reaction{
|
|
|
|
UserID: reaction.User.GetID(),
|
|
|
|
UserName: reaction.User.GetLogin(),
|
|
|
|
Content: reaction.GetContent(),
|
|
|
|
})
|
|
|
|
}
|
2020-01-23 10:28:15 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
rcs = append(rcs, &base.ReviewComment{
|
|
|
|
ID: c.GetID(),
|
|
|
|
InReplyTo: c.GetInReplyTo(),
|
|
|
|
Content: c.GetBody(),
|
|
|
|
TreePath: c.GetPath(),
|
|
|
|
DiffHunk: c.GetDiffHunk(),
|
|
|
|
Position: c.GetPosition(),
|
|
|
|
CommitID: c.GetCommitID(),
|
|
|
|
PosterID: c.GetUser().GetID(),
|
|
|
|
Reactions: reactions,
|
2023-04-08 05:27:30 -06:00
|
|
|
CreatedAt: c.GetCreatedAt().Time,
|
|
|
|
UpdatedAt: c.GetUpdatedAt().Time,
|
2020-01-23 10:28:15 -07:00
|
|
|
})
|
|
|
|
}
|
|
|
|
return rcs, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetReviews returns pull requests review
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 11:08:35 -06:00
|
|
|
func (g *GithubDownloaderV3) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) {
|
2022-01-20 10:46:10 -07:00
|
|
|
allReviews := make([]*base.Review, 0, g.maxPerPage)
|
2023-01-17 00:22:00 -07:00
|
|
|
if g.SkipReviews {
|
|
|
|
return allReviews, nil
|
|
|
|
}
|
2020-01-23 10:28:15 -07:00
|
|
|
opt := &github.ListOptions{
|
2020-10-24 23:11:03 -06:00
|
|
|
PerPage: g.maxPerPage,
|
2020-01-23 10:28:15 -07:00
|
|
|
}
|
2022-06-08 20:50:05 -06:00
|
|
|
// Get approve/request change reviews
|
2020-01-23 10:28:15 -07:00
|
|
|
for {
|
2021-10-14 23:47:15 -06:00
|
|
|
g.waitAndPickClient()
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 11:08:35 -06:00
|
|
|
reviews, resp, err := g.getClient().PullRequests.ListReviews(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt)
|
2020-01-23 10:28:15 -07:00
|
|
|
if err != nil {
|
2022-10-24 13:29:17 -06:00
|
|
|
return nil, fmt.Errorf("error while listing repos: %w", err)
|
2020-01-23 10:28:15 -07:00
|
|
|
}
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(&resp.Rate)
|
2020-01-23 10:28:15 -07:00
|
|
|
for _, review := range reviews {
|
|
|
|
r := convertGithubReview(review)
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 11:08:35 -06:00
|
|
|
r.IssueIndex = reviewable.GetLocalIndex()
|
2020-01-23 10:28:15 -07:00
|
|
|
// retrieve all review comments
|
|
|
|
opt2 := &github.ListOptions{
|
2020-10-24 23:11:03 -06:00
|
|
|
PerPage: g.maxPerPage,
|
2020-01-23 10:28:15 -07:00
|
|
|
}
|
|
|
|
for {
|
2021-10-14 23:47:15 -06:00
|
|
|
g.waitAndPickClient()
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 11:08:35 -06:00
|
|
|
reviewComments, resp, err := g.getClient().PullRequests.ListReviewComments(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), review.GetID(), opt2)
|
2020-01-23 10:28:15 -07:00
|
|
|
if err != nil {
|
2022-10-24 13:29:17 -06:00
|
|
|
return nil, fmt.Errorf("error while listing repos: %w", err)
|
2020-01-23 10:28:15 -07:00
|
|
|
}
|
2021-10-14 23:47:15 -06:00
|
|
|
g.setRate(&resp.Rate)
|
2020-01-23 10:28:15 -07:00
|
|
|
|
|
|
|
cs, err := g.convertGithubReviewComments(reviewComments)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
r.Comments = append(r.Comments, cs...)
|
|
|
|
if resp.NextPage == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
opt2.Page = resp.NextPage
|
|
|
|
}
|
|
|
|
allReviews = append(allReviews, r)
|
|
|
|
}
|
|
|
|
if resp.NextPage == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
opt.Page = resp.NextPage
|
|
|
|
}
|
2022-06-08 20:50:05 -06:00
|
|
|
// Get requested reviews
|
|
|
|
for {
|
|
|
|
g.waitAndPickClient()
|
|
|
|
reviewers, resp, err := g.getClient().PullRequests.ListReviewers(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt)
|
|
|
|
if err != nil {
|
2022-10-24 13:29:17 -06:00
|
|
|
return nil, fmt.Errorf("error while listing repos: %w", err)
|
2022-06-08 20:50:05 -06:00
|
|
|
}
|
|
|
|
g.setRate(&resp.Rate)
|
|
|
|
for _, user := range reviewers.Users {
|
|
|
|
r := &base.Review{
|
|
|
|
ReviewerID: user.GetID(),
|
|
|
|
ReviewerName: user.GetLogin(),
|
|
|
|
State: base.ReviewStateRequestReview,
|
|
|
|
IssueIndex: reviewable.GetLocalIndex(),
|
|
|
|
}
|
|
|
|
allReviews = append(allReviews, r)
|
|
|
|
}
|
|
|
|
// TODO: Handle Team requests
|
|
|
|
if resp.NextPage == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
opt.Page = resp.NextPage
|
|
|
|
}
|
2020-01-23 10:28:15 -07:00
|
|
|
return allReviews, nil
|
|
|
|
}
|
2023-04-08 05:27:30 -06:00
|
|
|
|
|
|
|
func convertGithubTimestampToTime(t *github.Timestamp) *time.Time {
|
|
|
|
if t == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return &t.Time
|
|
|
|
}
|