2019-05-06 21:12:51 -04:00
|
|
|
// Copyright 2019 The Gitea Authors. All rights reserved.
|
|
|
|
// Copyright 2018 Jonas Franz. All rights reserved.
|
2022-11-27 13:20:29 -05:00
|
|
|
// SPDX-License-Identifier: MIT
|
2019-05-06 21:12:51 -04:00
|
|
|
|
|
|
|
package migrations
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"fmt"
|
2020-08-27 21:36:37 -04:00
|
|
|
"io"
|
2019-05-06 21:12:51 -04:00
|
|
|
"net/http"
|
|
|
|
"net/url"
|
2021-06-30 03:23:49 -04:00
|
|
|
"strconv"
|
2019-05-06 21:12:51 -04:00
|
|
|
"strings"
|
2019-12-16 23:16:54 -05:00
|
|
|
"time"
|
2019-05-06 21:12:51 -04:00
|
|
|
|
2022-11-02 22:32:52 -04:00
|
|
|
"code.gitea.io/gitea/modules/git"
|
2019-05-06 21:12:51 -04:00
|
|
|
"code.gitea.io/gitea/modules/log"
|
2021-11-16 10:25:33 -05:00
|
|
|
base "code.gitea.io/gitea/modules/migration"
|
2021-08-18 09:10:39 -04:00
|
|
|
"code.gitea.io/gitea/modules/proxy"
|
2019-10-14 02:10:42 -04:00
|
|
|
"code.gitea.io/gitea/modules/structs"
|
2019-05-06 21:12:51 -04:00
|
|
|
|
2024-04-28 01:20:23 -04:00
|
|
|
"github.com/google/go-github/v61/github"
|
2019-05-06 21:12:51 -04:00
|
|
|
"golang.org/x/oauth2"
|
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
_ base.Downloader = &GithubDownloaderV3{}
|
|
|
|
_ base.DownloaderFactory = &GithubDownloaderV3Factory{}
|
2020-01-16 10:15:44 -05:00
|
|
|
// GithubLimitRateRemaining limit to wait for new rate to apply
|
|
|
|
GithubLimitRateRemaining = 0
|
2019-05-06 21:12:51 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
RegisterDownloaderFactory(&GithubDownloaderV3Factory{})
|
|
|
|
}
|
|
|
|
|
|
|
|
// GithubDownloaderV3Factory defines a github downloader v3 factory
|
2022-01-20 12:46:10 -05:00
|
|
|
type GithubDownloaderV3Factory struct{}
|
2019-05-06 21:12:51 -04:00
|
|
|
|
|
|
|
// New returns a Downloader related to this factory according MigrateOptions
|
2020-09-02 13:49:25 -04:00
|
|
|
func (f *GithubDownloaderV3Factory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) {
|
2019-10-13 09:23:14 -04:00
|
|
|
u, err := url.Parse(opts.CloneAddr)
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2020-09-21 10:36:51 -04:00
|
|
|
baseURL := u.Scheme + "://" + u.Host
|
2019-05-06 21:12:51 -04:00
|
|
|
fields := strings.Split(u.Path, "/")
|
|
|
|
oldOwner := fields[1]
|
|
|
|
oldName := strings.TrimSuffix(fields[2], ".git")
|
|
|
|
|
2022-09-04 06:47:56 -04:00
|
|
|
log.Trace("Create github downloader BaseURL: %s %s/%s", baseURL, oldOwner, oldName)
|
2019-05-06 21:12:51 -04:00
|
|
|
|
2020-09-21 10:36:51 -04:00
|
|
|
return NewGithubDownloaderV3(ctx, baseURL, opts.AuthUsername, opts.AuthPassword, opts.AuthToken, oldOwner, oldName), nil
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2019-10-14 02:10:42 -04:00
|
|
|
// GitServiceType returns the type of git service
|
|
|
|
func (f *GithubDownloaderV3Factory) GitServiceType() structs.GitServiceType {
|
|
|
|
return structs.GithubService
|
|
|
|
}
|
|
|
|
|
2021-07-08 07:38:13 -04:00
|
|
|
// GithubDownloaderV3 implements a Downloader interface to get repository information
|
2019-05-06 21:12:51 -04:00
|
|
|
// from github via APIv3
|
|
|
|
type GithubDownloaderV3 struct {
|
2021-01-21 14:33:58 -05:00
|
|
|
base.NullDownloader
|
2021-11-14 14:11:10 -05:00
|
|
|
ctx context.Context
|
|
|
|
clients []*github.Client
|
2022-09-04 06:47:56 -04:00
|
|
|
baseURL string
|
2021-11-14 14:11:10 -05:00
|
|
|
repoOwner string
|
|
|
|
repoName string
|
|
|
|
userName string
|
|
|
|
password string
|
|
|
|
rates []*github.Rate
|
|
|
|
curClientIdx int
|
|
|
|
maxPerPage int
|
|
|
|
SkipReactions bool
|
2023-01-17 02:22:00 -05:00
|
|
|
SkipReviews bool
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// NewGithubDownloaderV3 creates a github Downloader via github v3 API
|
2020-09-21 10:36:51 -04:00
|
|
|
func NewGithubDownloaderV3(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GithubDownloaderV3 {
|
2022-01-20 12:46:10 -05:00
|
|
|
downloader := GithubDownloaderV3{
|
2020-10-25 01:11:03 -04:00
|
|
|
userName: userName,
|
2022-09-04 06:47:56 -04:00
|
|
|
baseURL: baseURL,
|
2020-10-25 01:11:03 -04:00
|
|
|
password: password,
|
|
|
|
ctx: ctx,
|
|
|
|
repoOwner: repoOwner,
|
|
|
|
repoName: repoName,
|
|
|
|
maxPerPage: 100,
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2020-08-27 21:36:37 -04:00
|
|
|
if token != "" {
|
2021-10-15 01:47:15 -04:00
|
|
|
tokens := strings.Split(token, ",")
|
|
|
|
for _, token := range tokens {
|
|
|
|
token = strings.TrimSpace(token)
|
|
|
|
ts := oauth2.StaticTokenSource(
|
|
|
|
&oauth2.Token{AccessToken: token},
|
|
|
|
)
|
2022-01-20 12:46:10 -05:00
|
|
|
client := &http.Client{
|
2021-10-15 01:47:15 -04:00
|
|
|
Transport: &oauth2.Transport{
|
2021-11-20 04:34:05 -05:00
|
|
|
Base: NewMigrationHTTPTransport(),
|
2021-10-15 01:47:15 -04:00
|
|
|
Source: oauth2.ReuseTokenSource(nil, ts),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
downloader.addClient(client, baseURL)
|
|
|
|
}
|
|
|
|
} else {
|
2022-01-20 12:46:10 -05:00
|
|
|
transport := NewMigrationHTTPTransport()
|
2021-11-20 04:34:05 -05:00
|
|
|
transport.Proxy = func(req *http.Request) (*url.URL, error) {
|
|
|
|
req.SetBasicAuth(userName, password)
|
|
|
|
return proxy.Proxy()(req)
|
|
|
|
}
|
2022-01-20 12:46:10 -05:00
|
|
|
client := &http.Client{
|
2021-11-20 04:34:05 -05:00
|
|
|
Transport: transport,
|
2021-10-15 01:47:15 -04:00
|
|
|
}
|
|
|
|
downloader.addClient(client, baseURL)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2021-10-15 01:47:15 -04:00
|
|
|
return &downloader
|
|
|
|
}
|
|
|
|
|
2022-09-04 06:47:56 -04:00
|
|
|
// String implements Stringer
|
|
|
|
func (g *GithubDownloaderV3) String() string {
|
|
|
|
return fmt.Sprintf("migration from github server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
|
|
|
|
}
|
|
|
|
|
Rewrite logger system (#24726)
## ⚠️ Breaking
The `log.<mode>.<logger>` style config has been dropped. If you used it,
please check the new config manual & app.example.ini to make your
instance output logs as expected.
Although many legacy options still work, it's encouraged to upgrade to
the new options.
The SMTP logger is deleted because SMTP is not suitable to collect logs.
If you have manually configured Gitea log options, please confirm the
logger system works as expected after upgrading.
## Description
Close #12082 and maybe more log-related issues, resolve some related
FIXMEs in old code (which seems unfixable before)
Just like rewriting queue #24505 : make code maintainable, clear legacy
bugs, and add the ability to support more writers (eg: JSON, structured
log)
There is a new document (with examples): `logging-config.en-us.md`
This PR is safer than the queue rewriting, because it's just for
logging, it won't break other logic.
## The old problems
The logging system is quite old and difficult to maintain:
* Unclear concepts: Logger, NamedLogger, MultiChannelledLogger,
SubLogger, EventLogger, WriterLogger etc
* Some code is diffuclt to konw whether it is right:
`log.DelNamedLogger("console")` vs `log.DelNamedLogger(log.DEFAULT)` vs
`log.DelLogger("console")`
* The old system heavily depends on ini config system, it's difficult to
create new logger for different purpose, and it's very fragile.
* The "color" trick is difficult to use and read, many colors are
unnecessary, and in the future structured log could help
* It's difficult to add other log formats, eg: JSON format
* The log outputer doesn't have full control of its goroutine, it's
difficult to make outputer have advanced behaviors
* The logs could be lost in some cases: eg: no Fatal error when using
CLI.
* Config options are passed by JSON, which is quite fragile.
* INI package makes the KEY in `[log]` section visible in `[log.sub1]`
and `[log.sub1.subA]`, this behavior is quite fragile and would cause
more unclear problems, and there is no strong requirement to support
`log.<mode>.<logger>` syntax.
## The new design
See `logger.go` for documents.
## Screenshot
<details>
![image](https://github.com/go-gitea/gitea/assets/2114189/4462d713-ba39-41f5-bb08-de912e67e1ff)
![image](https://github.com/go-gitea/gitea/assets/2114189/b188035e-f691-428b-8b2d-ff7b2199b2f9)
![image](https://github.com/go-gitea/gitea/assets/2114189/132e9745-1c3b-4e00-9e0d-15eaea495dee)
</details>
## TODO
* [x] add some new tests
* [x] fix some tests
* [x] test some sub-commands (manually ....)
---------
Co-authored-by: Jason Song <i@wolfogre.com>
Co-authored-by: delvh <dev.lh@web.de>
Co-authored-by: Giteabot <teabot@gitea.io>
2023-05-21 18:35:11 -04:00
|
|
|
func (g *GithubDownloaderV3) LogString() string {
|
2022-09-04 06:47:56 -04:00
|
|
|
if g == nil {
|
Rewrite logger system (#24726)
## ⚠️ Breaking
The `log.<mode>.<logger>` style config has been dropped. If you used it,
please check the new config manual & app.example.ini to make your
instance output logs as expected.
Although many legacy options still work, it's encouraged to upgrade to
the new options.
The SMTP logger is deleted because SMTP is not suitable to collect logs.
If you have manually configured Gitea log options, please confirm the
logger system works as expected after upgrading.
## Description
Close #12082 and maybe more log-related issues, resolve some related
FIXMEs in old code (which seems unfixable before)
Just like rewriting queue #24505 : make code maintainable, clear legacy
bugs, and add the ability to support more writers (eg: JSON, structured
log)
There is a new document (with examples): `logging-config.en-us.md`
This PR is safer than the queue rewriting, because it's just for
logging, it won't break other logic.
## The old problems
The logging system is quite old and difficult to maintain:
* Unclear concepts: Logger, NamedLogger, MultiChannelledLogger,
SubLogger, EventLogger, WriterLogger etc
* Some code is diffuclt to konw whether it is right:
`log.DelNamedLogger("console")` vs `log.DelNamedLogger(log.DEFAULT)` vs
`log.DelLogger("console")`
* The old system heavily depends on ini config system, it's difficult to
create new logger for different purpose, and it's very fragile.
* The "color" trick is difficult to use and read, many colors are
unnecessary, and in the future structured log could help
* It's difficult to add other log formats, eg: JSON format
* The log outputer doesn't have full control of its goroutine, it's
difficult to make outputer have advanced behaviors
* The logs could be lost in some cases: eg: no Fatal error when using
CLI.
* Config options are passed by JSON, which is quite fragile.
* INI package makes the KEY in `[log]` section visible in `[log.sub1]`
and `[log.sub1.subA]`, this behavior is quite fragile and would cause
more unclear problems, and there is no strong requirement to support
`log.<mode>.<logger>` syntax.
## The new design
See `logger.go` for documents.
## Screenshot
<details>
![image](https://github.com/go-gitea/gitea/assets/2114189/4462d713-ba39-41f5-bb08-de912e67e1ff)
![image](https://github.com/go-gitea/gitea/assets/2114189/b188035e-f691-428b-8b2d-ff7b2199b2f9)
![image](https://github.com/go-gitea/gitea/assets/2114189/132e9745-1c3b-4e00-9e0d-15eaea495dee)
</details>
## TODO
* [x] add some new tests
* [x] fix some tests
* [x] test some sub-commands (manually ....)
---------
Co-authored-by: Jason Song <i@wolfogre.com>
Co-authored-by: delvh <dev.lh@web.de>
Co-authored-by: Giteabot <teabot@gitea.io>
2023-05-21 18:35:11 -04:00
|
|
|
return "<GithubDownloaderV3 nil>"
|
2022-09-04 06:47:56 -04:00
|
|
|
}
|
Rewrite logger system (#24726)
## ⚠️ Breaking
The `log.<mode>.<logger>` style config has been dropped. If you used it,
please check the new config manual & app.example.ini to make your
instance output logs as expected.
Although many legacy options still work, it's encouraged to upgrade to
the new options.
The SMTP logger is deleted because SMTP is not suitable to collect logs.
If you have manually configured Gitea log options, please confirm the
logger system works as expected after upgrading.
## Description
Close #12082 and maybe more log-related issues, resolve some related
FIXMEs in old code (which seems unfixable before)
Just like rewriting queue #24505 : make code maintainable, clear legacy
bugs, and add the ability to support more writers (eg: JSON, structured
log)
There is a new document (with examples): `logging-config.en-us.md`
This PR is safer than the queue rewriting, because it's just for
logging, it won't break other logic.
## The old problems
The logging system is quite old and difficult to maintain:
* Unclear concepts: Logger, NamedLogger, MultiChannelledLogger,
SubLogger, EventLogger, WriterLogger etc
* Some code is diffuclt to konw whether it is right:
`log.DelNamedLogger("console")` vs `log.DelNamedLogger(log.DEFAULT)` vs
`log.DelLogger("console")`
* The old system heavily depends on ini config system, it's difficult to
create new logger for different purpose, and it's very fragile.
* The "color" trick is difficult to use and read, many colors are
unnecessary, and in the future structured log could help
* It's difficult to add other log formats, eg: JSON format
* The log outputer doesn't have full control of its goroutine, it's
difficult to make outputer have advanced behaviors
* The logs could be lost in some cases: eg: no Fatal error when using
CLI.
* Config options are passed by JSON, which is quite fragile.
* INI package makes the KEY in `[log]` section visible in `[log.sub1]`
and `[log.sub1.subA]`, this behavior is quite fragile and would cause
more unclear problems, and there is no strong requirement to support
`log.<mode>.<logger>` syntax.
## The new design
See `logger.go` for documents.
## Screenshot
<details>
![image](https://github.com/go-gitea/gitea/assets/2114189/4462d713-ba39-41f5-bb08-de912e67e1ff)
![image](https://github.com/go-gitea/gitea/assets/2114189/b188035e-f691-428b-8b2d-ff7b2199b2f9)
![image](https://github.com/go-gitea/gitea/assets/2114189/132e9745-1c3b-4e00-9e0d-15eaea495dee)
</details>
## TODO
* [x] add some new tests
* [x] fix some tests
* [x] test some sub-commands (manually ....)
---------
Co-authored-by: Jason Song <i@wolfogre.com>
Co-authored-by: delvh <dev.lh@web.de>
Co-authored-by: Giteabot <teabot@gitea.io>
2023-05-21 18:35:11 -04:00
|
|
|
return fmt.Sprintf("<GithubDownloaderV3 %s %s/%s>", g.baseURL, g.repoOwner, g.repoName)
|
2022-09-04 06:47:56 -04:00
|
|
|
}
|
|
|
|
|
2021-10-15 01:47:15 -04:00
|
|
|
func (g *GithubDownloaderV3) addClient(client *http.Client, baseURL string) {
|
|
|
|
githubClient := github.NewClient(client)
|
2020-09-21 10:36:51 -04:00
|
|
|
if baseURL != "https://github.com" {
|
2023-12-18 16:42:04 -05:00
|
|
|
githubClient, _ = github.NewClient(client).WithEnterpriseURLs(baseURL, baseURL)
|
2020-09-21 10:36:51 -04:00
|
|
|
}
|
2021-10-15 01:47:15 -04:00
|
|
|
g.clients = append(g.clients, githubClient)
|
|
|
|
g.rates = append(g.rates, nil)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2019-12-16 23:16:54 -05:00
|
|
|
// SetContext set context
|
|
|
|
func (g *GithubDownloaderV3) SetContext(ctx context.Context) {
|
|
|
|
g.ctx = ctx
|
|
|
|
}
|
|
|
|
|
2021-10-15 01:47:15 -04:00
|
|
|
func (g *GithubDownloaderV3) waitAndPickClient() {
|
|
|
|
var recentIdx int
|
|
|
|
var maxRemaining int
|
|
|
|
for i := 0; i < len(g.clients); i++ {
|
|
|
|
if g.rates[i] != nil && g.rates[i].Remaining > maxRemaining {
|
|
|
|
maxRemaining = g.rates[i].Remaining
|
|
|
|
recentIdx = i
|
|
|
|
}
|
|
|
|
}
|
|
|
|
g.curClientIdx = recentIdx // if no max remain, it will always pick the first client.
|
|
|
|
|
|
|
|
for g.rates[g.curClientIdx] != nil && g.rates[g.curClientIdx].Remaining <= GithubLimitRateRemaining {
|
|
|
|
timer := time.NewTimer(time.Until(g.rates[g.curClientIdx].Reset.Time))
|
2019-12-16 23:16:54 -05:00
|
|
|
select {
|
|
|
|
case <-g.ctx.Done():
|
Rewrite queue (#24505)
# ⚠️ Breaking
Many deprecated queue config options are removed (actually, they should
have been removed in 1.18/1.19).
If you see the fatal message when starting Gitea: "Please update your
app.ini to remove deprecated config options", please follow the error
messages to remove these options from your app.ini.
Example:
```
2023/05/06 19:39:22 [E] Removed queue option: `[indexer].ISSUE_INDEXER_QUEUE_TYPE`. Use new options in `[queue.issue_indexer]`
2023/05/06 19:39:22 [E] Removed queue option: `[indexer].UPDATE_BUFFER_LEN`. Use new options in `[queue.issue_indexer]`
2023/05/06 19:39:22 [F] Please update your app.ini to remove deprecated config options
```
Many options in `[queue]` are are dropped, including:
`WRAP_IF_NECESSARY`, `MAX_ATTEMPTS`, `TIMEOUT`, `WORKERS`,
`BLOCK_TIMEOUT`, `BOOST_TIMEOUT`, `BOOST_WORKERS`, they can be removed
from app.ini.
# The problem
The old queue package has some legacy problems:
* complexity: I doubt few people could tell how it works.
* maintainability: Too many channels and mutex/cond are mixed together,
too many different structs/interfaces depends each other.
* stability: due to the complexity & maintainability, sometimes there
are strange bugs and difficult to debug, and some code doesn't have test
(indeed some code is difficult to test because a lot of things are mixed
together).
* general applicability: although it is called "queue", its behavior is
not a well-known queue.
* scalability: it doesn't seem easy to make it work with a cluster
without breaking its behaviors.
It came from some very old code to "avoid breaking", however, its
technical debt is too heavy now. It's a good time to introduce a better
"queue" package.
# The new queue package
It keeps using old config and concept as much as possible.
* It only contains two major kinds of concepts:
* The "base queue": channel, levelqueue, redis
* They have the same abstraction, the same interface, and they are
tested by the same testing code.
* The "WokerPoolQueue", it uses the "base queue" to provide "worker
pool" function, calls the "handler" to process the data in the base
queue.
* The new code doesn't do "PushBack"
* Think about a queue with many workers, the "PushBack" can't guarantee
the order for re-queued unhandled items, so in new code it just does
"normal push"
* The new code doesn't do "pause/resume"
* The "pause/resume" was designed to handle some handler's failure: eg:
document indexer (elasticsearch) is down
* If a queue is paused for long time, either the producers blocks or the
new items are dropped.
* The new code doesn't do such "pause/resume" trick, it's not a common
queue's behavior and it doesn't help much.
* If there are unhandled items, the "push" function just blocks for a
few seconds and then re-queue them and retry.
* The new code doesn't do "worker booster"
* Gitea's queue's handlers are light functions, the cost is only the
go-routine, so it doesn't make sense to "boost" them.
* The new code only use "max worker number" to limit the concurrent
workers.
* The new "Push" never blocks forever
* Instead of creating more and more blocking goroutines, return an error
is more friendly to the server and to the end user.
There are more details in code comments: eg: the "Flush" problem, the
strange "code.index" hanging problem, the "immediate" queue problem.
Almost ready for review.
TODO:
* [x] add some necessary comments during review
* [x] add some more tests if necessary
* [x] update documents and config options
* [x] test max worker / active worker
* [x] re-run the CI tasks to see whether any test is flaky
* [x] improve the `handleOldLengthConfiguration` to provide more
friendly messages
* [x] fine tune default config values (eg: length?)
## Code coverage:
![image](https://user-images.githubusercontent.com/2114189/236620635-55576955-f95d-4810-b12f-879026a3afdf.png)
2023-05-08 07:49:59 -04:00
|
|
|
timer.Stop()
|
2019-12-16 23:16:54 -05:00
|
|
|
return
|
|
|
|
case <-timer.C:
|
|
|
|
}
|
|
|
|
|
2020-01-16 10:15:44 -05:00
|
|
|
err := g.RefreshRate()
|
2019-12-16 23:16:54 -05:00
|
|
|
if err != nil {
|
2023-12-18 16:42:04 -05:00
|
|
|
log.Error("g.getClient().RateLimit.Get: %s", err)
|
2019-12-16 23:16:54 -05:00
|
|
|
}
|
2020-01-16 10:15:44 -05:00
|
|
|
}
|
|
|
|
}
|
2019-12-16 23:16:54 -05:00
|
|
|
|
2020-01-16 10:15:44 -05:00
|
|
|
// RefreshRate update the current rate (doesn't count in rate limit)
|
|
|
|
func (g *GithubDownloaderV3) RefreshRate() error {
|
2023-12-18 16:42:04 -05:00
|
|
|
rates, _, err := g.getClient().RateLimit.Get(g.ctx)
|
2020-01-16 10:15:44 -05:00
|
|
|
if err != nil {
|
2021-04-15 09:34:22 -04:00
|
|
|
// if rate limit is not enabled, ignore it
|
|
|
|
if strings.Contains(err.Error(), "404") {
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(nil)
|
2021-04-15 09:34:22 -04:00
|
|
|
return nil
|
|
|
|
}
|
2020-01-16 10:15:44 -05:00
|
|
|
return err
|
2019-12-16 23:16:54 -05:00
|
|
|
}
|
2020-01-16 10:15:44 -05:00
|
|
|
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(rates.GetCore())
|
2020-01-16 10:15:44 -05:00
|
|
|
return nil
|
2019-12-16 23:16:54 -05:00
|
|
|
}
|
|
|
|
|
2021-10-15 01:47:15 -04:00
|
|
|
func (g *GithubDownloaderV3) getClient() *github.Client {
|
|
|
|
return g.clients[g.curClientIdx]
|
|
|
|
}
|
|
|
|
|
|
|
|
func (g *GithubDownloaderV3) setRate(rate *github.Rate) {
|
|
|
|
g.rates[g.curClientIdx] = rate
|
|
|
|
}
|
|
|
|
|
2019-05-06 21:12:51 -04:00
|
|
|
// GetRepoInfo returns a repository information
|
|
|
|
func (g *GithubDownloaderV3) GetRepoInfo() (*base.Repository, error) {
|
2021-10-15 01:47:15 -04:00
|
|
|
g.waitAndPickClient()
|
|
|
|
gr, resp, err := g.getClient().Repositories.Get(g.ctx, g.repoOwner, g.repoName)
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(&resp.Rate)
|
2019-12-16 23:16:54 -05:00
|
|
|
|
2019-05-06 21:12:51 -04:00
|
|
|
// convert github repo to stand Repo
|
|
|
|
return &base.Repository{
|
2020-09-15 10:37:44 -04:00
|
|
|
Owner: g.repoOwner,
|
|
|
|
Name: gr.GetName(),
|
2021-08-10 05:49:43 -04:00
|
|
|
IsPrivate: gr.GetPrivate(),
|
2020-09-15 10:37:44 -04:00
|
|
|
Description: gr.GetDescription(),
|
|
|
|
OriginalURL: gr.GetHTMLURL(),
|
|
|
|
CloneURL: gr.GetCloneURL(),
|
2021-08-10 05:49:43 -04:00
|
|
|
DefaultBranch: gr.GetDefaultBranch(),
|
2019-05-06 21:12:51 -04:00
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2019-08-14 02:16:12 -04:00
|
|
|
// GetTopics return github topics
|
|
|
|
func (g *GithubDownloaderV3) GetTopics() ([]string, error) {
|
2021-10-15 01:47:15 -04:00
|
|
|
g.waitAndPickClient()
|
|
|
|
r, resp, err := g.getClient().Repositories.Get(g.ctx, g.repoOwner, g.repoName)
|
2019-12-16 23:16:54 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(&resp.Rate)
|
2019-12-16 23:16:54 -05:00
|
|
|
return r.Topics, nil
|
2019-08-14 02:16:12 -04:00
|
|
|
}
|
|
|
|
|
2019-05-06 21:12:51 -04:00
|
|
|
// GetMilestones returns milestones
|
|
|
|
func (g *GithubDownloaderV3) GetMilestones() ([]*base.Milestone, error) {
|
2022-01-20 12:46:10 -05:00
|
|
|
perPage := g.maxPerPage
|
|
|
|
milestones := make([]*base.Milestone, 0, perPage)
|
2019-05-06 21:12:51 -04:00
|
|
|
for i := 1; ; i++ {
|
2021-10-15 01:47:15 -04:00
|
|
|
g.waitAndPickClient()
|
|
|
|
ms, resp, err := g.getClient().Issues.ListMilestones(g.ctx, g.repoOwner, g.repoName,
|
2019-05-06 21:12:51 -04:00
|
|
|
&github.MilestoneListOptions{
|
|
|
|
State: "all",
|
|
|
|
ListOptions: github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: perPage,
|
2022-01-20 12:46:10 -05:00
|
|
|
},
|
|
|
|
})
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(&resp.Rate)
|
2019-05-06 21:12:51 -04:00
|
|
|
|
|
|
|
for _, m := range ms {
|
2022-01-20 12:46:10 -05:00
|
|
|
state := "open"
|
2019-05-06 21:12:51 -04:00
|
|
|
if m.State != nil {
|
|
|
|
state = *m.State
|
|
|
|
}
|
|
|
|
milestones = append(milestones, &base.Milestone{
|
2021-08-10 05:49:43 -04:00
|
|
|
Title: m.GetTitle(),
|
|
|
|
Description: m.GetDescription(),
|
2023-05-30 20:31:51 -04:00
|
|
|
Deadline: m.DueOn.GetTime(),
|
2019-05-06 21:12:51 -04:00
|
|
|
State: state,
|
2023-04-08 07:27:30 -04:00
|
|
|
Created: m.GetCreatedAt().Time,
|
2023-05-30 20:31:51 -04:00
|
|
|
Updated: m.UpdatedAt.GetTime(),
|
|
|
|
Closed: m.ClosedAt.GetTime(),
|
2019-05-06 21:12:51 -04:00
|
|
|
})
|
|
|
|
}
|
|
|
|
if len(ms) < perPage {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return milestones, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func convertGithubLabel(label *github.Label) *base.Label {
|
|
|
|
return &base.Label{
|
2021-08-10 05:49:43 -04:00
|
|
|
Name: label.GetName(),
|
|
|
|
Color: label.GetColor(),
|
|
|
|
Description: label.GetDescription(),
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetLabels returns labels
|
|
|
|
func (g *GithubDownloaderV3) GetLabels() ([]*base.Label, error) {
|
2022-01-20 12:46:10 -05:00
|
|
|
perPage := g.maxPerPage
|
|
|
|
labels := make([]*base.Label, 0, perPage)
|
2019-05-06 21:12:51 -04:00
|
|
|
for i := 1; ; i++ {
|
2021-10-15 01:47:15 -04:00
|
|
|
g.waitAndPickClient()
|
|
|
|
ls, resp, err := g.getClient().Issues.ListLabels(g.ctx, g.repoOwner, g.repoName,
|
2019-05-06 21:12:51 -04:00
|
|
|
&github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: perPage,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(&resp.Rate)
|
2019-05-06 21:12:51 -04:00
|
|
|
|
|
|
|
for _, label := range ls {
|
|
|
|
labels = append(labels, convertGithubLabel(label))
|
|
|
|
}
|
|
|
|
if len(ls) < perPage {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return labels, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (g *GithubDownloaderV3) convertGithubRelease(rel *github.RepositoryRelease) *base.Release {
|
2022-11-02 22:32:52 -04:00
|
|
|
// GitHub allows commitish to be a reference.
|
|
|
|
// In this case, we need to remove the prefix, i.e. convert "refs/heads/main" to "main".
|
|
|
|
targetCommitish := strings.TrimPrefix(rel.GetTargetCommitish(), git.BranchPrefix)
|
|
|
|
|
2019-05-06 21:12:51 -04:00
|
|
|
r := &base.Release{
|
2021-08-10 05:49:43 -04:00
|
|
|
Name: rel.GetName(),
|
|
|
|
TagName: rel.GetTagName(),
|
2022-11-02 22:32:52 -04:00
|
|
|
TargetCommitish: targetCommitish,
|
2021-08-10 05:49:43 -04:00
|
|
|
Draft: rel.GetDraft(),
|
|
|
|
Prerelease: rel.GetPrerelease(),
|
|
|
|
Created: rel.GetCreatedAt().Time,
|
|
|
|
PublisherID: rel.GetAuthor().GetID(),
|
|
|
|
PublisherName: rel.GetAuthor().GetLogin(),
|
|
|
|
PublisherEmail: rel.GetAuthor().GetEmail(),
|
|
|
|
Body: rel.GetBody(),
|
2021-05-15 18:37:17 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
if rel.PublishedAt != nil {
|
|
|
|
r.Published = rel.PublishedAt.Time
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2021-11-20 04:34:05 -05:00
|
|
|
httpClient := NewMigrationHTTPClient()
|
2021-08-18 09:10:39 -04:00
|
|
|
|
2019-05-06 21:12:51 -04:00
|
|
|
for _, asset := range rel.Assets {
|
2022-01-20 12:46:10 -05:00
|
|
|
assetID := *asset.ID // Don't optimize this, for closure we need a local variable
|
2020-12-26 22:34:19 -05:00
|
|
|
r.Assets = append(r.Assets, &base.ReleaseAsset{
|
2021-08-10 05:49:43 -04:00
|
|
|
ID: asset.GetID(),
|
|
|
|
Name: asset.GetName(),
|
2019-05-06 21:12:51 -04:00
|
|
|
ContentType: asset.ContentType,
|
|
|
|
Size: asset.Size,
|
|
|
|
DownloadCount: asset.DownloadCount,
|
|
|
|
Created: asset.CreatedAt.Time,
|
|
|
|
Updated: asset.UpdatedAt.Time,
|
2020-12-26 22:34:19 -05:00
|
|
|
DownloadFunc: func() (io.ReadCloser, error) {
|
2021-10-15 01:47:15 -04:00
|
|
|
g.waitAndPickClient()
|
2022-09-04 06:47:56 -04:00
|
|
|
readCloser, redirectURL, err := g.getClient().Repositories.DownloadReleaseAsset(g.ctx, g.repoOwner, g.repoName, assetID, nil)
|
2020-12-26 22:34:19 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-05-15 18:37:17 -04:00
|
|
|
if err := g.RefreshRate(); err != nil {
|
2021-10-15 01:47:15 -04:00
|
|
|
log.Error("g.getClient().RateLimits: %s", err)
|
2021-02-07 10:56:11 -05:00
|
|
|
}
|
2022-09-04 06:47:56 -04:00
|
|
|
|
|
|
|
if readCloser != nil {
|
|
|
|
return readCloser, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if redirectURL == "" {
|
|
|
|
return nil, fmt.Errorf("no release asset found for %d", assetID)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Prevent open redirect
|
|
|
|
if !hasBaseURL(redirectURL, g.baseURL) &&
|
|
|
|
!hasBaseURL(redirectURL, "https://objects.githubusercontent.com/") {
|
|
|
|
WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", asset.GetID(), g, redirectURL)
|
|
|
|
|
|
|
|
return io.NopCloser(strings.NewReader(redirectURL)), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
g.waitAndPickClient()
|
|
|
|
req, err := http.NewRequestWithContext(g.ctx, "GET", redirectURL, nil)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2020-12-26 22:34:19 -05:00
|
|
|
}
|
2022-09-04 06:47:56 -04:00
|
|
|
resp, err := httpClient.Do(req)
|
|
|
|
err1 := g.RefreshRate()
|
|
|
|
if err1 != nil {
|
|
|
|
log.Error("g.RefreshRate(): %s", err1)
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return resp.Body, nil
|
2020-12-26 22:34:19 -05:00
|
|
|
},
|
2019-05-06 21:12:51 -04:00
|
|
|
})
|
|
|
|
}
|
|
|
|
return r
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetReleases returns releases
|
|
|
|
func (g *GithubDownloaderV3) GetReleases() ([]*base.Release, error) {
|
2022-01-20 12:46:10 -05:00
|
|
|
perPage := g.maxPerPage
|
|
|
|
releases := make([]*base.Release, 0, perPage)
|
2019-05-06 21:12:51 -04:00
|
|
|
for i := 1; ; i++ {
|
2021-10-15 01:47:15 -04:00
|
|
|
g.waitAndPickClient()
|
|
|
|
ls, resp, err := g.getClient().Repositories.ListReleases(g.ctx, g.repoOwner, g.repoName,
|
2019-05-06 21:12:51 -04:00
|
|
|
&github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: perPage,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(&resp.Rate)
|
2019-05-06 21:12:51 -04:00
|
|
|
|
|
|
|
for _, release := range ls {
|
|
|
|
releases = append(releases, g.convertGithubRelease(release))
|
|
|
|
}
|
|
|
|
if len(ls) < perPage {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return releases, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetIssues returns issues according start and limit
|
2019-05-30 16:26:57 -04:00
|
|
|
func (g *GithubDownloaderV3) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
|
2020-10-25 01:11:03 -04:00
|
|
|
if perPage > g.maxPerPage {
|
|
|
|
perPage = g.maxPerPage
|
|
|
|
}
|
2019-05-06 21:12:51 -04:00
|
|
|
opt := &github.IssueListByRepoOptions{
|
|
|
|
Sort: "created",
|
|
|
|
Direction: "asc",
|
|
|
|
State: "all",
|
|
|
|
ListOptions: github.ListOptions{
|
|
|
|
PerPage: perPage,
|
2019-05-30 16:26:57 -04:00
|
|
|
Page: page,
|
2019-05-06 21:12:51 -04:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2022-01-20 12:46:10 -05:00
|
|
|
allIssues := make([]*base.Issue, 0, perPage)
|
2021-10-15 01:47:15 -04:00
|
|
|
g.waitAndPickClient()
|
|
|
|
issues, resp, err := g.getClient().Issues.ListByRepo(g.ctx, g.repoOwner, g.repoName, opt)
|
2019-05-30 16:26:57 -04:00
|
|
|
if err != nil {
|
2022-10-24 15:29:17 -04:00
|
|
|
return nil, false, fmt.Errorf("error while listing repos: %w", err)
|
2019-05-30 16:26:57 -04:00
|
|
|
}
|
2020-12-26 22:34:19 -05:00
|
|
|
log.Trace("Request get issues %d/%d, but in fact get %d", perPage, page, len(issues))
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(&resp.Rate)
|
2019-05-30 16:26:57 -04:00
|
|
|
for _, issue := range issues {
|
|
|
|
if issue.IsPullRequest() {
|
|
|
|
continue
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2021-08-10 05:49:43 -04:00
|
|
|
|
2022-01-20 12:46:10 -05:00
|
|
|
labels := make([]*base.Label, 0, len(issue.Labels))
|
2019-05-30 16:26:57 -04:00
|
|
|
for _, l := range issue.Labels {
|
2020-07-31 10:22:34 -04:00
|
|
|
labels = append(labels, convertGithubLabel(l))
|
2019-05-30 16:26:57 -04:00
|
|
|
}
|
|
|
|
|
2020-01-15 06:14:07 -05:00
|
|
|
// get reactions
|
|
|
|
var reactions []*base.Reaction
|
2021-11-14 14:11:10 -05:00
|
|
|
if !g.SkipReactions {
|
|
|
|
for i := 1; ; i++ {
|
|
|
|
g.waitAndPickClient()
|
|
|
|
res, resp, err := g.getClient().Reactions.ListIssueReactions(g.ctx, g.repoOwner, g.repoName, issue.GetNumber(), &github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: perPage,
|
2020-01-15 06:14:07 -05:00
|
|
|
})
|
2021-11-14 14:11:10 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, false, err
|
|
|
|
}
|
|
|
|
g.setRate(&resp.Rate)
|
|
|
|
if len(res) == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
for _, reaction := range res {
|
|
|
|
reactions = append(reactions, &base.Reaction{
|
|
|
|
UserID: reaction.User.GetID(),
|
|
|
|
UserName: reaction.User.GetLogin(),
|
|
|
|
Content: reaction.GetContent(),
|
|
|
|
})
|
|
|
|
}
|
2020-01-15 06:14:07 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-10 05:49:43 -04:00
|
|
|
var assignees []string
|
|
|
|
for i := range issue.Assignees {
|
|
|
|
assignees = append(assignees, issue.Assignees[i].GetLogin())
|
|
|
|
}
|
|
|
|
|
2019-05-30 16:26:57 -04:00
|
|
|
allIssues = append(allIssues, &base.Issue{
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 13:08:35 -04:00
|
|
|
Title: *issue.Title,
|
|
|
|
Number: int64(*issue.Number),
|
|
|
|
PosterID: issue.GetUser().GetID(),
|
|
|
|
PosterName: issue.GetUser().GetLogin(),
|
|
|
|
PosterEmail: issue.GetUser().GetEmail(),
|
|
|
|
Content: issue.GetBody(),
|
|
|
|
Milestone: issue.GetMilestone().GetTitle(),
|
|
|
|
State: issue.GetState(),
|
2023-04-08 07:27:30 -04:00
|
|
|
Created: issue.GetCreatedAt().Time,
|
|
|
|
Updated: issue.GetUpdatedAt().Time,
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 13:08:35 -04:00
|
|
|
Labels: labels,
|
|
|
|
Reactions: reactions,
|
2023-06-14 02:12:39 -04:00
|
|
|
Closed: issue.ClosedAt.GetTime(),
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 13:08:35 -04:00
|
|
|
IsLocked: issue.GetLocked(),
|
|
|
|
Assignees: assignees,
|
|
|
|
ForeignIndex: int64(*issue.Number),
|
2019-05-30 16:26:57 -04:00
|
|
|
})
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2019-05-30 16:26:57 -04:00
|
|
|
|
|
|
|
return allIssues, len(issues) < perPage, nil
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
|
|
|
|
2021-06-30 03:23:49 -04:00
|
|
|
// SupportGetRepoComments return true if it supports get repo comments
|
|
|
|
func (g *GithubDownloaderV3) SupportGetRepoComments() bool {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2019-05-06 21:12:51 -04:00
|
|
|
// GetComments returns comments according issueNumber
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 13:08:35 -04:00
|
|
|
func (g *GithubDownloaderV3) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
|
|
|
|
comments, err := g.getComments(commentable)
|
|
|
|
return comments, false, err
|
2021-06-30 03:23:49 -04:00
|
|
|
}
|
|
|
|
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 13:08:35 -04:00
|
|
|
func (g *GithubDownloaderV3) getComments(commentable base.Commentable) ([]*base.Comment, error) {
|
2020-01-23 12:28:15 -05:00
|
|
|
var (
|
2020-10-25 01:11:03 -04:00
|
|
|
allComments = make([]*base.Comment, 0, g.maxPerPage)
|
2020-01-23 12:28:15 -05:00
|
|
|
created = "created"
|
|
|
|
asc = "asc"
|
|
|
|
)
|
2019-05-06 21:12:51 -04:00
|
|
|
opt := &github.IssueListCommentsOptions{
|
2020-07-31 10:22:34 -04:00
|
|
|
Sort: &created,
|
|
|
|
Direction: &asc,
|
2019-05-06 21:12:51 -04:00
|
|
|
ListOptions: github.ListOptions{
|
2020-10-25 01:11:03 -04:00
|
|
|
PerPage: g.maxPerPage,
|
2019-05-06 21:12:51 -04:00
|
|
|
},
|
|
|
|
}
|
|
|
|
for {
|
2021-10-15 01:47:15 -04:00
|
|
|
g.waitAndPickClient()
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 13:08:35 -04:00
|
|
|
comments, resp, err := g.getClient().Issues.ListComments(g.ctx, g.repoOwner, g.repoName, int(commentable.GetForeignIndex()), opt)
|
2019-05-06 21:12:51 -04:00
|
|
|
if err != nil {
|
2022-10-24 15:29:17 -04:00
|
|
|
return nil, fmt.Errorf("error while listing repos: %w", err)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(&resp.Rate)
|
2019-05-06 21:12:51 -04:00
|
|
|
for _, comment := range comments {
|
2020-01-15 06:14:07 -05:00
|
|
|
// get reactions
|
|
|
|
var reactions []*base.Reaction
|
2021-11-14 14:11:10 -05:00
|
|
|
if !g.SkipReactions {
|
|
|
|
for i := 1; ; i++ {
|
|
|
|
g.waitAndPickClient()
|
|
|
|
res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(g.ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: g.maxPerPage,
|
2020-01-15 06:14:07 -05:00
|
|
|
})
|
2021-11-14 14:11:10 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
g.setRate(&resp.Rate)
|
|
|
|
if len(res) == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
for _, reaction := range res {
|
|
|
|
reactions = append(reactions, &base.Reaction{
|
|
|
|
UserID: reaction.User.GetID(),
|
|
|
|
UserName: reaction.User.GetLogin(),
|
|
|
|
Content: reaction.GetContent(),
|
|
|
|
})
|
|
|
|
}
|
2020-01-15 06:14:07 -05:00
|
|
|
}
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2021-08-10 05:49:43 -04:00
|
|
|
|
2019-05-06 21:12:51 -04:00
|
|
|
allComments = append(allComments, &base.Comment{
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 13:08:35 -04:00
|
|
|
IssueIndex: commentable.GetLocalIndex(),
|
2022-03-06 14:00:41 -05:00
|
|
|
Index: comment.GetID(),
|
2021-08-10 05:49:43 -04:00
|
|
|
PosterID: comment.GetUser().GetID(),
|
|
|
|
PosterName: comment.GetUser().GetLogin(),
|
|
|
|
PosterEmail: comment.GetUser().GetEmail(),
|
|
|
|
Content: comment.GetBody(),
|
2023-04-08 07:27:30 -04:00
|
|
|
Created: comment.GetCreatedAt().Time,
|
|
|
|
Updated: comment.GetUpdatedAt().Time,
|
2019-05-06 21:12:51 -04:00
|
|
|
Reactions: reactions,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if resp.NextPage == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
opt.Page = resp.NextPage
|
|
|
|
}
|
|
|
|
return allComments, nil
|
|
|
|
}
|
|
|
|
|
2021-06-30 03:23:49 -04:00
|
|
|
// GetAllComments returns repository comments according page and perPageSize
|
|
|
|
func (g *GithubDownloaderV3) GetAllComments(page, perPage int) ([]*base.Comment, bool, error) {
|
|
|
|
var (
|
|
|
|
allComments = make([]*base.Comment, 0, perPage)
|
|
|
|
created = "created"
|
|
|
|
asc = "asc"
|
|
|
|
)
|
2021-09-15 07:30:19 -04:00
|
|
|
if perPage > g.maxPerPage {
|
|
|
|
perPage = g.maxPerPage
|
|
|
|
}
|
2021-06-30 03:23:49 -04:00
|
|
|
opt := &github.IssueListCommentsOptions{
|
|
|
|
Sort: &created,
|
|
|
|
Direction: &asc,
|
|
|
|
ListOptions: github.ListOptions{
|
|
|
|
Page: page,
|
|
|
|
PerPage: perPage,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2021-10-15 01:47:15 -04:00
|
|
|
g.waitAndPickClient()
|
|
|
|
comments, resp, err := g.getClient().Issues.ListComments(g.ctx, g.repoOwner, g.repoName, 0, opt)
|
2021-06-30 03:23:49 -04:00
|
|
|
if err != nil {
|
2022-10-24 15:29:17 -04:00
|
|
|
return nil, false, fmt.Errorf("error while listing repos: %w", err)
|
2021-06-30 03:23:49 -04:00
|
|
|
}
|
2022-01-20 12:46:10 -05:00
|
|
|
isEnd := resp.NextPage == 0
|
2021-09-15 07:30:19 -04:00
|
|
|
|
|
|
|
log.Trace("Request get comments %d/%d, but in fact get %d, next page is %d", perPage, page, len(comments), resp.NextPage)
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(&resp.Rate)
|
2021-06-30 03:23:49 -04:00
|
|
|
for _, comment := range comments {
|
|
|
|
// get reactions
|
|
|
|
var reactions []*base.Reaction
|
2021-11-14 14:11:10 -05:00
|
|
|
if !g.SkipReactions {
|
|
|
|
for i := 1; ; i++ {
|
|
|
|
g.waitAndPickClient()
|
|
|
|
res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(g.ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: g.maxPerPage,
|
2021-06-30 03:23:49 -04:00
|
|
|
})
|
2021-11-14 14:11:10 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, false, err
|
|
|
|
}
|
|
|
|
g.setRate(&resp.Rate)
|
|
|
|
if len(res) == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
for _, reaction := range res {
|
|
|
|
reactions = append(reactions, &base.Reaction{
|
|
|
|
UserID: reaction.User.GetID(),
|
|
|
|
UserName: reaction.User.GetLogin(),
|
|
|
|
Content: reaction.GetContent(),
|
|
|
|
})
|
|
|
|
}
|
2021-06-30 03:23:49 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
idx := strings.LastIndex(*comment.IssueURL, "/")
|
|
|
|
issueIndex, _ := strconv.ParseInt((*comment.IssueURL)[idx+1:], 10, 64)
|
|
|
|
allComments = append(allComments, &base.Comment{
|
|
|
|
IssueIndex: issueIndex,
|
2022-03-06 14:00:41 -05:00
|
|
|
Index: comment.GetID(),
|
2021-08-10 05:49:43 -04:00
|
|
|
PosterID: comment.GetUser().GetID(),
|
|
|
|
PosterName: comment.GetUser().GetLogin(),
|
|
|
|
PosterEmail: comment.GetUser().GetEmail(),
|
|
|
|
Content: comment.GetBody(),
|
2023-04-08 07:27:30 -04:00
|
|
|
Created: comment.GetCreatedAt().Time,
|
|
|
|
Updated: comment.GetUpdatedAt().Time,
|
2021-06-30 03:23:49 -04:00
|
|
|
Reactions: reactions,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-09-15 07:30:19 -04:00
|
|
|
return allComments, isEnd, nil
|
2021-06-30 03:23:49 -04:00
|
|
|
}
|
|
|
|
|
2019-05-30 16:26:57 -04:00
|
|
|
// GetPullRequests returns pull requests according page and perPage
|
2020-10-14 00:06:00 -04:00
|
|
|
func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
|
2020-10-25 01:11:03 -04:00
|
|
|
if perPage > g.maxPerPage {
|
|
|
|
perPage = g.maxPerPage
|
|
|
|
}
|
2019-05-06 21:12:51 -04:00
|
|
|
opt := &github.PullRequestListOptions{
|
|
|
|
Sort: "created",
|
|
|
|
Direction: "asc",
|
|
|
|
State: "all",
|
|
|
|
ListOptions: github.ListOptions{
|
2019-05-30 16:26:57 -04:00
|
|
|
PerPage: perPage,
|
|
|
|
Page: page,
|
2019-05-06 21:12:51 -04:00
|
|
|
},
|
|
|
|
}
|
2022-01-20 12:46:10 -05:00
|
|
|
allPRs := make([]*base.PullRequest, 0, perPage)
|
2021-10-15 01:47:15 -04:00
|
|
|
g.waitAndPickClient()
|
|
|
|
prs, resp, err := g.getClient().PullRequests.List(g.ctx, g.repoOwner, g.repoName, opt)
|
2019-05-30 16:26:57 -04:00
|
|
|
if err != nil {
|
2022-10-24 15:29:17 -04:00
|
|
|
return nil, false, fmt.Errorf("error while listing repos: %w", err)
|
2019-05-30 16:26:57 -04:00
|
|
|
}
|
2021-06-30 03:23:49 -04:00
|
|
|
log.Trace("Request get pull requests %d/%d, but in fact get %d", perPage, page, len(prs))
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(&resp.Rate)
|
2019-05-30 16:26:57 -04:00
|
|
|
for _, pr := range prs {
|
2022-01-20 12:46:10 -05:00
|
|
|
labels := make([]*base.Label, 0, len(pr.Labels))
|
2019-05-30 16:26:57 -04:00
|
|
|
for _, l := range pr.Labels {
|
|
|
|
labels = append(labels, convertGithubLabel(l))
|
|
|
|
}
|
2019-05-06 21:12:51 -04:00
|
|
|
|
2020-01-15 06:14:07 -05:00
|
|
|
// get reactions
|
|
|
|
var reactions []*base.Reaction
|
2021-11-14 14:11:10 -05:00
|
|
|
if !g.SkipReactions {
|
|
|
|
for i := 1; ; i++ {
|
|
|
|
g.waitAndPickClient()
|
|
|
|
res, resp, err := g.getClient().Reactions.ListIssueReactions(g.ctx, g.repoOwner, g.repoName, pr.GetNumber(), &github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: perPage,
|
2020-01-15 06:14:07 -05:00
|
|
|
})
|
2021-11-14 14:11:10 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, false, err
|
|
|
|
}
|
|
|
|
g.setRate(&resp.Rate)
|
|
|
|
if len(res) == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
for _, reaction := range res {
|
|
|
|
reactions = append(reactions, &base.Reaction{
|
|
|
|
UserID: reaction.User.GetID(),
|
|
|
|
UserName: reaction.User.GetLogin(),
|
|
|
|
Content: reaction.GetContent(),
|
|
|
|
})
|
|
|
|
}
|
2020-01-15 06:14:07 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-15 01:47:15 -04:00
|
|
|
// download patch and saved as tmp file
|
|
|
|
g.waitAndPickClient()
|
|
|
|
|
2019-05-30 16:26:57 -04:00
|
|
|
allPRs = append(allPRs, &base.PullRequest{
|
2021-08-10 05:49:43 -04:00
|
|
|
Title: pr.GetTitle(),
|
|
|
|
Number: int64(pr.GetNumber()),
|
|
|
|
PosterID: pr.GetUser().GetID(),
|
|
|
|
PosterName: pr.GetUser().GetLogin(),
|
|
|
|
PosterEmail: pr.GetUser().GetEmail(),
|
|
|
|
Content: pr.GetBody(),
|
|
|
|
Milestone: pr.GetMilestone().GetTitle(),
|
|
|
|
State: pr.GetState(),
|
2023-04-08 07:27:30 -04:00
|
|
|
Created: pr.GetCreatedAt().Time,
|
|
|
|
Updated: pr.GetUpdatedAt().Time,
|
2023-05-30 20:31:51 -04:00
|
|
|
Closed: pr.ClosedAt.GetTime(),
|
2019-05-30 16:26:57 -04:00
|
|
|
Labels: labels,
|
2021-08-10 05:49:43 -04:00
|
|
|
Merged: pr.MergedAt != nil,
|
|
|
|
MergeCommitSHA: pr.GetMergeCommitSHA(),
|
2023-05-30 20:31:51 -04:00
|
|
|
MergedTime: pr.MergedAt.GetTime(),
|
2019-05-30 16:26:57 -04:00
|
|
|
IsLocked: pr.ActiveLockReason != nil,
|
|
|
|
Head: base.PullRequestBranch{
|
2021-08-10 05:49:43 -04:00
|
|
|
Ref: pr.GetHead().GetRef(),
|
|
|
|
SHA: pr.GetHead().GetSHA(),
|
|
|
|
OwnerName: pr.GetHead().GetUser().GetLogin(),
|
|
|
|
RepoName: pr.GetHead().GetRepo().GetName(),
|
2022-09-04 06:47:56 -04:00
|
|
|
CloneURL: pr.GetHead().GetRepo().GetCloneURL(), // see below for SECURITY related issues here
|
2019-05-30 16:26:57 -04:00
|
|
|
},
|
|
|
|
Base: base.PullRequestBranch{
|
2021-08-10 05:49:43 -04:00
|
|
|
Ref: pr.GetBase().GetRef(),
|
|
|
|
SHA: pr.GetBase().GetSHA(),
|
|
|
|
RepoName: pr.GetBase().GetRepo().GetName(),
|
|
|
|
OwnerName: pr.GetBase().GetUser().GetLogin(),
|
2019-05-30 16:26:57 -04:00
|
|
|
},
|
2022-09-04 06:47:56 -04:00
|
|
|
PatchURL: pr.GetPatchURL(), // see below for SECURITY related issues here
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 13:08:35 -04:00
|
|
|
Reactions: reactions,
|
|
|
|
ForeignIndex: int64(*pr.Number),
|
2024-10-13 15:58:13 -04:00
|
|
|
IsDraft: pr.GetDraft(),
|
2019-05-30 16:26:57 -04:00
|
|
|
})
|
2022-09-04 06:47:56 -04:00
|
|
|
|
|
|
|
// SECURITY: Ensure that the PR is safe
|
|
|
|
_ = CheckAndEnsureSafePR(allPRs[len(allPRs)-1], g.baseURL, g)
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2019-05-30 16:26:57 -04:00
|
|
|
|
2020-10-14 00:06:00 -04:00
|
|
|
return allPRs, len(prs) < perPage, nil
|
2019-05-06 21:12:51 -04:00
|
|
|
}
|
2020-01-23 12:28:15 -05:00
|
|
|
|
|
|
|
func convertGithubReview(r *github.PullRequestReview) *base.Review {
|
|
|
|
return &base.Review{
|
|
|
|
ID: r.GetID(),
|
|
|
|
ReviewerID: r.GetUser().GetID(),
|
|
|
|
ReviewerName: r.GetUser().GetLogin(),
|
|
|
|
CommitID: r.GetCommitID(),
|
|
|
|
Content: r.GetBody(),
|
2023-04-08 07:27:30 -04:00
|
|
|
CreatedAt: r.GetSubmittedAt().Time,
|
2020-01-23 12:28:15 -05:00
|
|
|
State: r.GetState(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (g *GithubDownloaderV3) convertGithubReviewComments(cs []*github.PullRequestComment) ([]*base.ReviewComment, error) {
|
2022-01-20 12:46:10 -05:00
|
|
|
rcs := make([]*base.ReviewComment, 0, len(cs))
|
2020-01-23 12:28:15 -05:00
|
|
|
for _, c := range cs {
|
|
|
|
// get reactions
|
|
|
|
var reactions []*base.Reaction
|
2021-11-14 14:11:10 -05:00
|
|
|
if !g.SkipReactions {
|
|
|
|
for i := 1; ; i++ {
|
|
|
|
g.waitAndPickClient()
|
|
|
|
res, resp, err := g.getClient().Reactions.ListPullRequestCommentReactions(g.ctx, g.repoOwner, g.repoName, c.GetID(), &github.ListOptions{
|
|
|
|
Page: i,
|
|
|
|
PerPage: g.maxPerPage,
|
2020-01-23 12:28:15 -05:00
|
|
|
})
|
2021-11-14 14:11:10 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
g.setRate(&resp.Rate)
|
|
|
|
if len(res) == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
for _, reaction := range res {
|
|
|
|
reactions = append(reactions, &base.Reaction{
|
|
|
|
UserID: reaction.User.GetID(),
|
|
|
|
UserName: reaction.User.GetLogin(),
|
|
|
|
Content: reaction.GetContent(),
|
|
|
|
})
|
|
|
|
}
|
2020-01-23 12:28:15 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
rcs = append(rcs, &base.ReviewComment{
|
|
|
|
ID: c.GetID(),
|
|
|
|
InReplyTo: c.GetInReplyTo(),
|
|
|
|
Content: c.GetBody(),
|
|
|
|
TreePath: c.GetPath(),
|
|
|
|
DiffHunk: c.GetDiffHunk(),
|
|
|
|
Position: c.GetPosition(),
|
|
|
|
CommitID: c.GetCommitID(),
|
|
|
|
PosterID: c.GetUser().GetID(),
|
|
|
|
Reactions: reactions,
|
2023-04-08 07:27:30 -04:00
|
|
|
CreatedAt: c.GetCreatedAt().Time,
|
|
|
|
UpdatedAt: c.GetUpdatedAt().Time,
|
2020-01-23 12:28:15 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
return rcs, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetReviews returns pull requests review
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 13:08:35 -04:00
|
|
|
func (g *GithubDownloaderV3) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) {
|
2022-01-20 12:46:10 -05:00
|
|
|
allReviews := make([]*base.Review, 0, g.maxPerPage)
|
2023-01-17 02:22:00 -05:00
|
|
|
if g.SkipReviews {
|
|
|
|
return allReviews, nil
|
|
|
|
}
|
2020-01-23 12:28:15 -05:00
|
|
|
opt := &github.ListOptions{
|
2020-10-25 01:11:03 -04:00
|
|
|
PerPage: g.maxPerPage,
|
2020-01-23 12:28:15 -05:00
|
|
|
}
|
2022-06-08 22:50:05 -04:00
|
|
|
// Get approve/request change reviews
|
2020-01-23 12:28:15 -05:00
|
|
|
for {
|
2021-10-15 01:47:15 -04:00
|
|
|
g.waitAndPickClient()
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 13:08:35 -04:00
|
|
|
reviews, resp, err := g.getClient().PullRequests.ListReviews(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt)
|
2020-01-23 12:28:15 -05:00
|
|
|
if err != nil {
|
2022-10-24 15:29:17 -04:00
|
|
|
return nil, fmt.Errorf("error while listing repos: %w", err)
|
2020-01-23 12:28:15 -05:00
|
|
|
}
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(&resp.Rate)
|
2020-01-23 12:28:15 -05:00
|
|
|
for _, review := range reviews {
|
|
|
|
r := convertGithubReview(review)
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 13:08:35 -04:00
|
|
|
r.IssueIndex = reviewable.GetLocalIndex()
|
2020-01-23 12:28:15 -05:00
|
|
|
// retrieve all review comments
|
|
|
|
opt2 := &github.ListOptions{
|
2020-10-25 01:11:03 -04:00
|
|
|
PerPage: g.maxPerPage,
|
2020-01-23 12:28:15 -05:00
|
|
|
}
|
|
|
|
for {
|
2021-10-15 01:47:15 -04:00
|
|
|
g.waitAndPickClient()
|
Store the foreign ID of issues during migration (#18446)
Storing the foreign identifier of an imported issue in the database is a prerequisite to implement idempotent migrations or mirror for issues. It is a baby step towards mirroring that introduces a new table.
At the moment when an issue is created by the Gitea uploader, it fails if the issue already exists. The Gitea uploader could be modified so that, instead of failing, it looks up the database to find an existing issue. And if it does it would update the issue instead of creating a new one. However this is not currently possible because an information is missing from the database: the foreign identifier that uniquely represents the issue being migrated is not persisted. With this change, the foreign identifier is stored in the database and the Gitea uploader will then be able to run a query to figure out if a given issue being imported already exists.
The implementation of mirroring for issues, pull requests, releases, etc. can be done in three steps:
1. Store an identifier for the element being mirrored (issue, pull request...) in the database (this is the purpose of these changes)
2. Modify the Gitea uploader to be able to update an existing repository with all it contains (issues, pull request...) instead of failing if it exists
3. Optimize the Gitea uploader to speed up the updates, when possible.
The second step creates code that does not yet exist to enable idempotent migrations with the Gitea uploader. When a migration is done for the first time, the behavior is not changed. But when a migration is done for a repository that already exists, this new code is used to update it.
The third step can use the code created in the second step to optimize and speed up migrations. For instance, when a migration is resumed, an issue that has an update time that is not more recent can be skipped and only newly created issues or updated ones will be updated. Another example of optimization could be that a webhook notifies Gitea when an issue is updated. The code triggered by the webhook would download only this issue and call the code created in the second step to update the issue, as if it was in the process of an idempotent migration.
The ForeignReferences table is added to contain local and foreign ID pairs relative to a given repository. It can later be used for pull requests and other artifacts that can be mirrored. Although the foreign id could be added as a single field in issues or pull requests, it would need to be added to all tables that represent something that can be mirrored. Creating a new table makes for a simpler and more generic design. The drawback is that it requires an extra lookup to obtain the information. However, this extra information is only required during migration or mirroring and does not impact the way Gitea currently works.
The foreign identifier of an issue or pull request is similar to the identifier of an external user, which is stored in reactions, issues, etc. as OriginalPosterID and so on. The representation of a user is however different and the ability of users to link their account to an external user at a later time is also a logic that is different from what is involved in mirroring or migrations. For these reasons, despite some commonalities, it is unclear at this time how the two tables (foreign reference and external user) could be merged together.
The ForeignID field is extracted from the issue migration context so that it can be dumped in files with dump-repo and later restored via restore-repo.
The GetAllComments downloader method is introduced to simplify the implementation and not overload the Context for the purpose of pagination. It also clarifies in which context the comments are paginated and in which context they are not.
The Context interface is no longer useful for the purpose of retrieving the LocalID and ForeignID since they are now both available from the PullRequest and Issue struct. The Reviewable and Commentable interfaces replace and serve the same purpose.
The Context data member of PullRequest and Issue becomes a DownloaderContext to clarify that its purpose is not to support in memory operations while the current downloader is acting but is not otherwise persisted. It is, for instance, used by the GitLab downloader to store the IsMergeRequest boolean and sort out issues.
---
[source](https://lab.forgefriends.org/forgefriends/forgefriends/-/merge_requests/36)
Signed-off-by: Loïc Dachary <loic@dachary.org>
Co-authored-by: Loïc Dachary <loic@dachary.org>
2022-03-17 13:08:35 -04:00
|
|
|
reviewComments, resp, err := g.getClient().PullRequests.ListReviewComments(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), review.GetID(), opt2)
|
2020-01-23 12:28:15 -05:00
|
|
|
if err != nil {
|
2022-10-24 15:29:17 -04:00
|
|
|
return nil, fmt.Errorf("error while listing repos: %w", err)
|
2020-01-23 12:28:15 -05:00
|
|
|
}
|
2021-10-15 01:47:15 -04:00
|
|
|
g.setRate(&resp.Rate)
|
2020-01-23 12:28:15 -05:00
|
|
|
|
|
|
|
cs, err := g.convertGithubReviewComments(reviewComments)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
r.Comments = append(r.Comments, cs...)
|
|
|
|
if resp.NextPage == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
opt2.Page = resp.NextPage
|
|
|
|
}
|
|
|
|
allReviews = append(allReviews, r)
|
|
|
|
}
|
|
|
|
if resp.NextPage == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
opt.Page = resp.NextPage
|
|
|
|
}
|
2022-06-08 22:50:05 -04:00
|
|
|
// Get requested reviews
|
|
|
|
for {
|
|
|
|
g.waitAndPickClient()
|
|
|
|
reviewers, resp, err := g.getClient().PullRequests.ListReviewers(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt)
|
|
|
|
if err != nil {
|
2022-10-24 15:29:17 -04:00
|
|
|
return nil, fmt.Errorf("error while listing repos: %w", err)
|
2022-06-08 22:50:05 -04:00
|
|
|
}
|
|
|
|
g.setRate(&resp.Rate)
|
|
|
|
for _, user := range reviewers.Users {
|
|
|
|
r := &base.Review{
|
|
|
|
ReviewerID: user.GetID(),
|
|
|
|
ReviewerName: user.GetLogin(),
|
|
|
|
State: base.ReviewStateRequestReview,
|
|
|
|
IssueIndex: reviewable.GetLocalIndex(),
|
|
|
|
}
|
|
|
|
allReviews = append(allReviews, r)
|
|
|
|
}
|
|
|
|
// TODO: Handle Team requests
|
|
|
|
if resp.NextPage == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
opt.Page = resp.NextPage
|
|
|
|
}
|
2020-01-23 12:28:15 -05:00
|
|
|
return allReviews, nil
|
|
|
|
}
|