From 20ca5e37e933e0f487e2a3996f34feec472c5796 Mon Sep 17 00:00:00 2001 From: wxiaoguang Date: Tue, 11 Jun 2024 11:31:23 +0800 Subject: [PATCH 01/18] Delete legacy cookie before setting new cookie (#31306) Try to fix #31202 (cherry picked from commit 5342a61124bf2d4fbe4c1d560b13866198149ac9) --- modules/web/middleware/cookie.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/web/middleware/cookie.go b/modules/web/middleware/cookie.go index ec6b06f993..f2d25f5b1c 100644 --- a/modules/web/middleware/cookie.go +++ b/modules/web/middleware/cookie.go @@ -35,6 +35,10 @@ func GetSiteCookie(req *http.Request, name string) string { // SetSiteCookie returns given cookie value from request header. func SetSiteCookie(resp http.ResponseWriter, name, value string, maxAge int) { + // Previous versions would use a cookie path with a trailing /. + // These are more specific than cookies without a trailing /, so + // we need to delete these if they exist. + deleteLegacySiteCookie(resp, name) cookie := &http.Cookie{ Name: name, Value: url.QueryEscape(value), @@ -46,10 +50,6 @@ func SetSiteCookie(resp http.ResponseWriter, name, value string, maxAge int) { SameSite: setting.SessionConfig.SameSite, } resp.Header().Add("Set-Cookie", cookie.String()) - // Previous versions would use a cookie path with a trailing /. - // These are more specific than cookies without a trailing /, so - // we need to delete these if they exist. - deleteLegacySiteCookie(resp, name) } // deleteLegacySiteCookie deletes the cookie with the given name at the cookie From 8346cd6c881673eb4a19e0b6c540295a6a53fbcc Mon Sep 17 00:00:00 2001 From: 6543 <6543@obermui.de> Date: Tue, 11 Jun 2024 15:47:13 +0200 Subject: [PATCH 02/18] update nix flake and add gofumpt (#31320) nix flake maintenance (cherry picked from commit 61c97fdef10d29f8813ee18734b37bb2797e3bab) --- flake.lock | 6 +++--- flake.nix | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 0b2278f080..606f8836c1 100644 --- a/flake.lock +++ b/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1715534503, - "narHash": "sha256-5ZSVkFadZbFP1THataCaSf0JH2cAH3S29hU9rrxTEqk=", + "lastModified": 1717974879, + "narHash": "sha256-GTO3C88+5DX171F/gVS3Qga/hOs/eRMxPFpiHq2t+D8=", "owner": "nixos", "repo": "nixpkgs", - "rev": "2057814051972fa1453ddfb0d98badbea9b83c06", + "rev": "c7b821ba2e1e635ba5a76d299af62821cbcb09f3", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index c6e915e9db..22354663dd 100644 --- a/flake.nix +++ b/flake.nix @@ -30,6 +30,7 @@ # backend go_1_22 + gofumpt ]; }; } From d8bc0495de03d9232228a9b5aeb6303ceb244537 Mon Sep 17 00:00:00 2001 From: silverwind Date: Tue, 11 Jun 2024 20:47:45 +0200 Subject: [PATCH 03/18] Enable `unparam` linter (#31277) Enable [unparam](https://github.com/mvdan/unparam) linter. Often I could not tell the intention why param is unused, so I put `//nolint` for those cases like webhook request creation functions never using `ctx`. --------- Co-authored-by: Lunny Xiao Co-authored-by: delvh (cherry picked from commit fc2d75f86d77b022ece848acf2581c14ef21d43b) Conflicts: modules/setting/config_env.go modules/storage/azureblob.go services/webhook/dingtalk.go services/webhook/discord.go services/webhook/feishu.go services/webhook/matrix.go services/webhook/msteams.go services/webhook/packagist.go services/webhook/slack.go services/webhook/telegram.go services/webhook/wechatwork.go run make lint-go and fix Forgejo specific warnings --- .golangci.yml | 1 + models/dbfs/dbfile.go | 17 ++----- models/issues/issue_search.go | 51 ++++++++----------- models/issues/pull_list.go | 16 ++---- modules/auth/password/hash/common.go | 2 +- modules/git/git.go | 17 +++---- modules/git/object_format.go | 4 +- modules/markup/markdown/transform_codespan.go | 2 +- modules/packages/cran/metadata.go | 18 +++---- modules/setting/config_env.go | 2 +- modules/setting/storage.go | 2 +- modules/util/keypair.go | 9 ++-- routers/api/actions/artifacts.go | 8 +-- routers/api/actions/artifacts_utils.go | 8 +-- routers/api/packages/container/blob.go | 2 +- routers/api/packages/nuget/nuget.go | 2 +- routers/api/v1/repo/compare.go | 2 +- routers/api/v1/repo/pull.go | 28 +++++----- routers/private/hook_pre_receive.go | 4 +- routers/web/admin/config.go | 2 +- routers/web/repo/badges/badges.go | 2 +- services/actions/notifier_helper.go | 2 +- services/f3/driver/repository.go | 2 +- services/f3/driver/tests/options.go | 2 +- services/mailer/mail_admin_new_user.go | 2 +- services/pull/merge.go | 2 +- services/remote/promote.go | 2 +- 27 files changed, 88 insertions(+), 123 deletions(-) diff --git a/.golangci.yml b/.golangci.yml index c55a08bba0..57f3c86f05 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -22,6 +22,7 @@ linters: - typecheck - unconvert - unused + - unparam - wastedassign run: diff --git a/models/dbfs/dbfile.go b/models/dbfs/dbfile.go index 3650ce057e..dd27b5c36b 100644 --- a/models/dbfs/dbfile.go +++ b/models/dbfs/dbfile.go @@ -215,16 +215,15 @@ func fileTimestampToTime(timestamp int64) time.Time { return time.UnixMicro(timestamp) } -func (f *file) loadMetaByPath() (*dbfsMeta, error) { +func (f *file) loadMetaByPath() error { var fileMeta dbfsMeta if ok, err := db.GetEngine(f.ctx).Where("full_path = ?", f.fullPath).Get(&fileMeta); err != nil { - return nil, err + return err } else if ok { f.metaID = fileMeta.ID f.blockSize = fileMeta.BlockSize - return &fileMeta, nil } - return nil, nil + return nil } func (f *file) open(flag int) (err error) { @@ -288,10 +287,7 @@ func (f *file) createEmpty() error { if err != nil { return err } - if _, err = f.loadMetaByPath(); err != nil { - return err - } - return nil + return f.loadMetaByPath() } func (f *file) truncate() error { @@ -368,8 +364,5 @@ func buildPath(path string) string { func newDbFile(ctx context.Context, path string) (*file, error) { path = buildPath(path) f := &file{ctx: ctx, fullPath: path, blockSize: defaultFileBlockSize} - if _, err := f.loadMetaByPath(); err != nil { - return nil, err - } - return f, nil + return f, f.loadMetaByPath() } diff --git a/models/issues/issue_search.go b/models/issues/issue_search.go index 491def1229..c1d7d921a9 100644 --- a/models/issues/issue_search.go +++ b/models/issues/issue_search.go @@ -99,9 +99,9 @@ func applySorts(sess *xorm.Session, sortType string, priorityRepoID int64) { } } -func applyLimit(sess *xorm.Session, opts *IssuesOptions) *xorm.Session { +func applyLimit(sess *xorm.Session, opts *IssuesOptions) { if opts.Paginator == nil || opts.Paginator.IsListAll() { - return sess + return } start := 0 @@ -109,11 +109,9 @@ func applyLimit(sess *xorm.Session, opts *IssuesOptions) *xorm.Session { start = (opts.Paginator.Page - 1) * opts.Paginator.PageSize } sess.Limit(opts.Paginator.PageSize, start) - - return sess } -func applyLabelsCondition(sess *xorm.Session, opts *IssuesOptions) *xorm.Session { +func applyLabelsCondition(sess *xorm.Session, opts *IssuesOptions) { if len(opts.LabelIDs) > 0 { if opts.LabelIDs[0] == 0 { sess.Where("issue.id NOT IN (SELECT issue_id FROM issue_label)") @@ -136,11 +134,9 @@ func applyLabelsCondition(sess *xorm.Session, opts *IssuesOptions) *xorm.Session if len(opts.ExcludedLabelNames) > 0 { sess.And(builder.NotIn("issue.id", BuildLabelNamesIssueIDsCondition(opts.ExcludedLabelNames))) } - - return sess } -func applyMilestoneCondition(sess *xorm.Session, opts *IssuesOptions) *xorm.Session { +func applyMilestoneCondition(sess *xorm.Session, opts *IssuesOptions) { if len(opts.MilestoneIDs) == 1 && opts.MilestoneIDs[0] == db.NoConditionID { sess.And("issue.milestone_id = 0") } else if len(opts.MilestoneIDs) > 0 { @@ -153,11 +149,9 @@ func applyMilestoneCondition(sess *xorm.Session, opts *IssuesOptions) *xorm.Sess From("milestone"). Where(builder.In("name", opts.IncludeMilestones))) } - - return sess } -func applyProjectCondition(sess *xorm.Session, opts *IssuesOptions) *xorm.Session { +func applyProjectCondition(sess *xorm.Session, opts *IssuesOptions) { if opts.ProjectID > 0 { // specific project sess.Join("INNER", "project_issue", "issue.id = project_issue.issue_id"). And("project_issue.project_id=?", opts.ProjectID) @@ -166,10 +160,9 @@ func applyProjectCondition(sess *xorm.Session, opts *IssuesOptions) *xorm.Sessio } // opts.ProjectID == 0 means all projects, // do not need to apply any condition - return sess } -func applyProjectColumnCondition(sess *xorm.Session, opts *IssuesOptions) *xorm.Session { +func applyProjectColumnCondition(sess *xorm.Session, opts *IssuesOptions) { // opts.ProjectColumnID == 0 means all project columns, // do not need to apply any condition if opts.ProjectColumnID > 0 { @@ -177,10 +170,9 @@ func applyProjectColumnCondition(sess *xorm.Session, opts *IssuesOptions) *xorm. } else if opts.ProjectColumnID == db.NoConditionID { sess.In("issue.id", builder.Select("issue_id").From("project_issue").Where(builder.Eq{"project_board_id": 0})) } - return sess } -func applyRepoConditions(sess *xorm.Session, opts *IssuesOptions) *xorm.Session { +func applyRepoConditions(sess *xorm.Session, opts *IssuesOptions) { if len(opts.RepoIDs) == 1 { opts.RepoCond = builder.Eq{"issue.repo_id": opts.RepoIDs[0]} } else if len(opts.RepoIDs) > 1 { @@ -195,10 +187,9 @@ func applyRepoConditions(sess *xorm.Session, opts *IssuesOptions) *xorm.Session if opts.RepoCond != nil { sess.And(opts.RepoCond) } - return sess } -func applyConditions(sess *xorm.Session, opts *IssuesOptions) *xorm.Session { +func applyConditions(sess *xorm.Session, opts *IssuesOptions) { if len(opts.IssueIDs) > 0 { sess.In("issue.id", opts.IssueIDs) } @@ -261,8 +252,6 @@ func applyConditions(sess *xorm.Session, opts *IssuesOptions) *xorm.Session { if opts.User != nil { sess.And(issuePullAccessibleRepoCond("issue.repo_id", opts.User.ID, opts.Org, opts.Team, opts.IsPull.Value())) } - - return sess } // teamUnitsRepoCond returns query condition for those repo id in the special org team with special units access @@ -339,22 +328,22 @@ func issuePullAccessibleRepoCond(repoIDstr string, userID int64, org *organizati return cond } -func applyAssigneeCondition(sess *xorm.Session, assigneeID int64) *xorm.Session { - return sess.Join("INNER", "issue_assignees", "issue.id = issue_assignees.issue_id"). +func applyAssigneeCondition(sess *xorm.Session, assigneeID int64) { + sess.Join("INNER", "issue_assignees", "issue.id = issue_assignees.issue_id"). And("issue_assignees.assignee_id = ?", assigneeID) } -func applyPosterCondition(sess *xorm.Session, posterID int64) *xorm.Session { - return sess.And("issue.poster_id=?", posterID) +func applyPosterCondition(sess *xorm.Session, posterID int64) { + sess.And("issue.poster_id=?", posterID) } -func applyMentionedCondition(sess *xorm.Session, mentionedID int64) *xorm.Session { - return sess.Join("INNER", "issue_user", "issue.id = issue_user.issue_id"). +func applyMentionedCondition(sess *xorm.Session, mentionedID int64) { + sess.Join("INNER", "issue_user", "issue.id = issue_user.issue_id"). And("issue_user.is_mentioned = ?", true). And("issue_user.uid = ?", mentionedID) } -func applyReviewRequestedCondition(sess *xorm.Session, reviewRequestedID int64) *xorm.Session { +func applyReviewRequestedCondition(sess *xorm.Session, reviewRequestedID int64) { existInTeamQuery := builder.Select("team_user.team_id"). From("team_user"). Where(builder.Eq{"team_user.uid": reviewRequestedID}) @@ -375,11 +364,11 @@ func applyReviewRequestedCondition(sess *xorm.Session, reviewRequestedID int64) ), builder.In("review.id", maxReview), )) - return sess.Where("issue.poster_id <> ?", reviewRequestedID). + sess.Where("issue.poster_id <> ?", reviewRequestedID). And(builder.In("issue.id", subQuery)) } -func applyReviewedCondition(sess *xorm.Session, reviewedID int64) *xorm.Session { +func applyReviewedCondition(sess *xorm.Session, reviewedID int64) { // Query for pull requests where you are a reviewer or commenter, excluding // any pull requests already returned by the review requested filter. notPoster := builder.Neq{"issue.poster_id": reviewedID} @@ -406,11 +395,11 @@ func applyReviewedCondition(sess *xorm.Session, reviewedID int64) *xorm.Session builder.In("type", CommentTypeComment, CommentTypeCode, CommentTypeReview), )), ) - return sess.And(notPoster, builder.Or(reviewed, commented)) + sess.And(notPoster, builder.Or(reviewed, commented)) } -func applySubscribedCondition(sess *xorm.Session, subscriberID int64) *xorm.Session { - return sess.And( +func applySubscribedCondition(sess *xorm.Session, subscriberID int64) { + sess.And( builder. NotIn("issue.id", builder.Select("issue_id"). diff --git a/models/issues/pull_list.go b/models/issues/pull_list.go index 52f2f01a5a..9b838df5c3 100644 --- a/models/issues/pull_list.go +++ b/models/issues/pull_list.go @@ -28,7 +28,7 @@ type PullRequestsOptions struct { MilestoneID int64 } -func listPullRequestStatement(ctx context.Context, baseRepoID int64, opts *PullRequestsOptions) (*xorm.Session, error) { +func listPullRequestStatement(ctx context.Context, baseRepoID int64, opts *PullRequestsOptions) *xorm.Session { sess := db.GetEngine(ctx).Where("pull_request.base_repo_id=?", baseRepoID) sess.Join("INNER", "issue", "pull_request.issue_id = issue.id") @@ -46,7 +46,7 @@ func listPullRequestStatement(ctx context.Context, baseRepoID int64, opts *PullR sess.And("issue.milestone_id=?", opts.MilestoneID) } - return sess, nil + return sess } func GetUnmergedPullRequestsByHeadInfoMax(ctx context.Context, repoID, olderThan int64, branch string) ([]*PullRequest, error) { @@ -136,23 +136,15 @@ func PullRequests(ctx context.Context, baseRepoID int64, opts *PullRequestsOptio opts.Page = 1 } - countSession, err := listPullRequestStatement(ctx, baseRepoID, opts) - if err != nil { - log.Error("listPullRequestStatement: %v", err) - return nil, 0, err - } + countSession := listPullRequestStatement(ctx, baseRepoID, opts) maxResults, err := countSession.Count(new(PullRequest)) if err != nil { log.Error("Count PRs: %v", err) return nil, maxResults, err } - findSession, err := listPullRequestStatement(ctx, baseRepoID, opts) + findSession := listPullRequestStatement(ctx, baseRepoID, opts) applySorts(findSession, opts.SortType, 0) - if err != nil { - log.Error("listPullRequestStatement: %v", err) - return nil, maxResults, err - } findSession = db.SetSessionPagination(findSession, opts) prs := make([]*PullRequest, 0, opts.PageSize) return prs, maxResults, findSession.Find(&prs) diff --git a/modules/auth/password/hash/common.go b/modules/auth/password/hash/common.go index ac6faf35cf..487c0738f4 100644 --- a/modules/auth/password/hash/common.go +++ b/modules/auth/password/hash/common.go @@ -18,7 +18,7 @@ func parseIntParam(value, param, algorithmName, config string, previousErr error return parsed, previousErr // <- Keep the previous error as this function should still return an error once everything has been checked if any call failed } -func parseUIntParam(value, param, algorithmName, config string, previousErr error) (uint64, error) { +func parseUIntParam(value, param, algorithmName, config string, previousErr error) (uint64, error) { //nolint:unparam parsed, err := strconv.ParseUint(value, 10, 64) if err != nil { log.Error("invalid integer for %s representation in %s hash spec %s", param, algorithmName, config) diff --git a/modules/git/git.go b/modules/git/git.go index d36718d86a..70232c86a0 100644 --- a/modules/git/git.go +++ b/modules/git/git.go @@ -42,20 +42,19 @@ var ( ) // loadGitVersion returns current Git version from shell. Internal usage only. -func loadGitVersion() (*version.Version, error) { +func loadGitVersion() error { // doesn't need RWMutex because it's executed by Init() if gitVersion != nil { - return gitVersion, nil + return nil } - stdout, _, runErr := NewCommand(DefaultContext, "version").RunStdString(nil) if runErr != nil { - return nil, runErr + return runErr } fields := strings.Fields(stdout) if len(fields) < 3 { - return nil, fmt.Errorf("invalid git version output: %s", stdout) + return fmt.Errorf("invalid git version output: %s", stdout) } var versionString string @@ -70,7 +69,7 @@ func loadGitVersion() (*version.Version, error) { var err error gitVersion, err = version.NewVersion(versionString) - return gitVersion, err + return err } // SetExecutablePath changes the path of git executable and checks the file permission and version. @@ -85,7 +84,7 @@ func SetExecutablePath(path string) error { } GitExecutable = absPath - _, err = loadGitVersion() + err = loadGitVersion() if err != nil { return fmt.Errorf("unable to load git version: %w", err) } @@ -312,7 +311,7 @@ func syncGitConfig() (err error) { // CheckGitVersionAtLeast check git version is at least the constraint version func CheckGitVersionAtLeast(atLeast string) error { - if _, err := loadGitVersion(); err != nil { + if err := loadGitVersion(); err != nil { return err } atLeastVersion, err := version.NewVersion(atLeast) @@ -327,7 +326,7 @@ func CheckGitVersionAtLeast(atLeast string) error { // CheckGitVersionEqual checks if the git version is equal to the constraint version. func CheckGitVersionEqual(equal string) error { - if _, err := loadGitVersion(); err != nil { + if err := loadGitVersion(); err != nil { return err } atLeastVersion, err := version.NewVersion(equal) diff --git a/modules/git/object_format.go b/modules/git/object_format.go index 2b462589a3..db9120d827 100644 --- a/modules/git/object_format.go +++ b/modules/git/object_format.go @@ -34,13 +34,13 @@ type ObjectFormat interface { ComputeHash(t ObjectType, content []byte) ObjectID } -func computeHash(dst []byte, hasher hash.Hash, t ObjectType, content []byte) []byte { +func computeHash(dst []byte, hasher hash.Hash, t ObjectType, content []byte) { _, _ = hasher.Write(t.Bytes()) _, _ = hasher.Write([]byte(" ")) _, _ = hasher.Write([]byte(strconv.Itoa(len(content)))) _, _ = hasher.Write([]byte{0}) _, _ = hasher.Write(content) - return hasher.Sum(dst) + hasher.Sum(dst) } /* SHA1 Type */ diff --git a/modules/markup/markdown/transform_codespan.go b/modules/markup/markdown/transform_codespan.go index 0cf1169dee..a2cd4fb5ba 100644 --- a/modules/markup/markdown/transform_codespan.go +++ b/modules/markup/markdown/transform_codespan.go @@ -48,7 +48,7 @@ func (r *HTMLRenderer) renderCodeSpan(w util.BufWriter, source []byte, n ast.Nod return ast.WalkContinue, nil } -func (g *ASTTransformer) transformCodeSpan(ctx *markup.RenderContext, v *ast.CodeSpan, reader text.Reader) { +func (g *ASTTransformer) transformCodeSpan(_ *markup.RenderContext, v *ast.CodeSpan, reader text.Reader) { colorContent := v.Text(reader.Source()) if matchColor(strings.ToLower(string(colorContent))) { v.AppendChild(v, NewColorPreview(colorContent)) diff --git a/modules/packages/cran/metadata.go b/modules/packages/cran/metadata.go index 24e6f323af..0b0bfb07c6 100644 --- a/modules/packages/cran/metadata.go +++ b/modules/packages/cran/metadata.go @@ -185,8 +185,6 @@ func ParseDescription(r io.Reader) (*Package, error) { } func setField(p *Package, data string) error { - const listDelimiter = ", " - if data == "" { return nil } @@ -215,19 +213,19 @@ func setField(p *Package, data string) error { case "Description": p.Metadata.Description = value case "URL": - p.Metadata.ProjectURL = splitAndTrim(value, listDelimiter) + p.Metadata.ProjectURL = splitAndTrim(value) case "License": p.Metadata.License = value case "Author": - p.Metadata.Authors = splitAndTrim(authorReplacePattern.ReplaceAllString(value, ""), listDelimiter) + p.Metadata.Authors = splitAndTrim(authorReplacePattern.ReplaceAllString(value, "")) case "Depends": - p.Metadata.Depends = splitAndTrim(value, listDelimiter) + p.Metadata.Depends = splitAndTrim(value) case "Imports": - p.Metadata.Imports = splitAndTrim(value, listDelimiter) + p.Metadata.Imports = splitAndTrim(value) case "Suggests": - p.Metadata.Suggests = splitAndTrim(value, listDelimiter) + p.Metadata.Suggests = splitAndTrim(value) case "LinkingTo": - p.Metadata.LinkingTo = splitAndTrim(value, listDelimiter) + p.Metadata.LinkingTo = splitAndTrim(value) case "NeedsCompilation": p.Metadata.NeedsCompilation = value == "yes" } @@ -235,8 +233,8 @@ func setField(p *Package, data string) error { return nil } -func splitAndTrim(s, sep string) []string { - items := strings.Split(s, sep) +func splitAndTrim(s string) []string { + items := strings.Split(s, ", ") for i := range items { items[i] = strings.TrimSpace(items[i]) } diff --git a/modules/setting/config_env.go b/modules/setting/config_env.go index 522e360303..fa0100dba2 100644 --- a/modules/setting/config_env.go +++ b/modules/setting/config_env.go @@ -97,7 +97,7 @@ func decodeEnvSectionKey(encoded string) (ok bool, section, key string) { // decodeEnvironmentKey decode the environment key to section and key // The environment key is in the form of GITEA__SECTION__KEY or GITEA__SECTION__KEY__FILE -func decodeEnvironmentKey(prefixRegexp *regexp.Regexp, suffixFile, envKey string) (ok bool, section, key string, useFileValue bool) { +func decodeEnvironmentKey(prefixRegexp *regexp.Regexp, suffixFile, envKey string) (ok bool, section, key string, useFileValue bool) { //nolint:unparam if strings.HasSuffix(envKey, suffixFile) { useFileValue = true envKey = envKey[:len(envKey)-len(suffixFile)] diff --git a/modules/setting/storage.go b/modules/setting/storage.go index c082579d39..8ee5c0f0ab 100644 --- a/modules/setting/storage.go +++ b/modules/setting/storage.go @@ -122,7 +122,7 @@ const ( targetSecIsSec // target section is from the name seciont [name] ) -func getStorageSectionByType(rootCfg ConfigProvider, typ string) (ConfigSection, targetSecType, error) { +func getStorageSectionByType(rootCfg ConfigProvider, typ string) (ConfigSection, targetSecType, error) { //nolint:unparam targetSec, err := rootCfg.GetSection(storageSectionName + "." + typ) if err != nil { if !IsValidStorageType(StorageType(typ)) { diff --git a/modules/util/keypair.go b/modules/util/keypair.go index 8b86c142af..07f27bd1ba 100644 --- a/modules/util/keypair.go +++ b/modules/util/keypair.go @@ -15,10 +15,7 @@ import ( // GenerateKeyPair generates a public and private keypair func GenerateKeyPair(bits int) (string, string, error) { priv, _ := rsa.GenerateKey(rand.Reader, bits) - privPem, err := pemBlockForPriv(priv) - if err != nil { - return "", "", err - } + privPem := pemBlockForPriv(priv) pubPem, err := pemBlockForPub(&priv.PublicKey) if err != nil { return "", "", err @@ -26,12 +23,12 @@ func GenerateKeyPair(bits int) (string, string, error) { return privPem, pubPem, nil } -func pemBlockForPriv(priv *rsa.PrivateKey) (string, error) { +func pemBlockForPriv(priv *rsa.PrivateKey) string { privBytes := pem.EncodeToMemory(&pem.Block{ Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(priv), }) - return string(privBytes), nil + return string(privBytes) } func pemBlockForPub(pub *rsa.PublicKey) (string, error) { diff --git a/routers/api/actions/artifacts.go b/routers/api/actions/artifacts.go index 7225d67135..6aa0ecaaec 100644 --- a/routers/api/actions/artifacts.go +++ b/routers/api/actions/artifacts.go @@ -241,16 +241,12 @@ func (ar artifactRoutes) uploadArtifact(ctx *ArtifactContext) { } // get upload file size - fileRealTotalSize, contentLength, err := getUploadFileSize(ctx) - if err != nil { - log.Error("Error get upload file size: %v", err) - ctx.Error(http.StatusInternalServerError, "Error get upload file size") - return - } + fileRealTotalSize, contentLength := getUploadFileSize(ctx) // get artifact retention days expiredDays := setting.Actions.ArtifactRetentionDays if queryRetentionDays := ctx.Req.URL.Query().Get("retentionDays"); queryRetentionDays != "" { + var err error expiredDays, err = strconv.ParseInt(queryRetentionDays, 10, 64) if err != nil { log.Error("Error parse retention days: %v", err) diff --git a/routers/api/actions/artifacts_utils.go b/routers/api/actions/artifacts_utils.go index d2e7ccaea1..db602f1e14 100644 --- a/routers/api/actions/artifacts_utils.go +++ b/routers/api/actions/artifacts_utils.go @@ -43,7 +43,7 @@ func validateRunID(ctx *ArtifactContext) (*actions.ActionTask, int64, bool) { return task, runID, true } -func validateRunIDV4(ctx *ArtifactContext, rawRunID string) (*actions.ActionTask, int64, bool) { +func validateRunIDV4(ctx *ArtifactContext, rawRunID string) (*actions.ActionTask, int64, bool) { //nolint:unparam task := ctx.ActionTask runID, err := strconv.ParseInt(rawRunID, 10, 64) if err != nil || task.Job.RunID != runID { @@ -84,11 +84,11 @@ func parseArtifactItemPath(ctx *ArtifactContext) (string, string, bool) { // getUploadFileSize returns the size of the file to be uploaded. // The raw size is the size of the file as reported by the header X-TFS-FileLength. -func getUploadFileSize(ctx *ArtifactContext) (int64, int64, error) { +func getUploadFileSize(ctx *ArtifactContext) (int64, int64) { contentLength := ctx.Req.ContentLength xTfsLength, _ := strconv.ParseInt(ctx.Req.Header.Get(artifactXTfsFileLengthHeader), 10, 64) if xTfsLength > 0 { - return xTfsLength, contentLength, nil + return xTfsLength, contentLength } - return contentLength, contentLength, nil + return contentLength, contentLength } diff --git a/routers/api/packages/container/blob.go b/routers/api/packages/container/blob.go index f2d63297c1..9e3a47076c 100644 --- a/routers/api/packages/container/blob.go +++ b/routers/api/packages/container/blob.go @@ -26,7 +26,7 @@ var uploadVersionMutex sync.Mutex // saveAsPackageBlob creates a package blob from an upload // The uploaded blob gets stored in a special upload version to link them to the package/image -func saveAsPackageBlob(ctx context.Context, hsr packages_module.HashedSizeReader, pci *packages_service.PackageCreationInfo) (*packages_model.PackageBlob, error) { +func saveAsPackageBlob(ctx context.Context, hsr packages_module.HashedSizeReader, pci *packages_service.PackageCreationInfo) (*packages_model.PackageBlob, error) { //nolint:unparam pb := packages_service.NewPackageBlob(hsr) exists := false diff --git a/routers/api/packages/nuget/nuget.go b/routers/api/packages/nuget/nuget.go index 3633d0d007..0d7212d7f7 100644 --- a/routers/api/packages/nuget/nuget.go +++ b/routers/api/packages/nuget/nuget.go @@ -36,7 +36,7 @@ func apiError(ctx *context.Context, status int, obj any) { }) } -func xmlResponse(ctx *context.Context, status int, obj any) { +func xmlResponse(ctx *context.Context, status int, obj any) { //nolint:unparam ctx.Resp.Header().Set("Content-Type", "application/atom+xml; charset=utf-8") ctx.Resp.WriteHeader(status) if _, err := ctx.Resp.Write([]byte(xml.Header)); err != nil { diff --git a/routers/api/v1/repo/compare.go b/routers/api/v1/repo/compare.go index cfd61d768c..429145c714 100644 --- a/routers/api/v1/repo/compare.go +++ b/routers/api/v1/repo/compare.go @@ -64,7 +64,7 @@ func CompareDiff(ctx *context.APIContext) { } } - _, _, headGitRepo, ci, _, _ := parseCompareInfo(ctx, api.CreatePullRequestOption{ + _, headGitRepo, ci, _, _ := parseCompareInfo(ctx, api.CreatePullRequestOption{ Base: infos[0], Head: infos[1], }) diff --git a/routers/api/v1/repo/pull.go b/routers/api/v1/repo/pull.go index b8314d6eb0..725a33929f 100644 --- a/routers/api/v1/repo/pull.go +++ b/routers/api/v1/repo/pull.go @@ -406,7 +406,7 @@ func CreatePullRequest(ctx *context.APIContext) { ) // Get repo/branch information - _, headRepo, headGitRepo, compareInfo, baseBranch, headBranch := parseCompareInfo(ctx, form) + headRepo, headGitRepo, compareInfo, baseBranch, headBranch := parseCompareInfo(ctx, form) if ctx.Written() { return } @@ -1051,7 +1051,7 @@ func MergePullRequest(ctx *context.APIContext) { ctx.Status(http.StatusOK) } -func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) (*user_model.User, *repo_model.Repository, *git.Repository, *git.CompareInfo, string, string) { +func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) (*repo_model.Repository, *git.Repository, *git.CompareInfo, string, string) { baseRepo := ctx.Repo.Repository // Get compared branches information @@ -1084,14 +1084,14 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) } else { ctx.Error(http.StatusInternalServerError, "GetUserByName", err) } - return nil, nil, nil, nil, "", "" + return nil, nil, nil, "", "" } headBranch = headInfos[1] // The head repository can also point to the same repo isSameRepo = ctx.Repo.Owner.ID == headUser.ID } else { ctx.NotFound() - return nil, nil, nil, nil, "", "" + return nil, nil, nil, "", "" } ctx.Repo.PullRequest.SameRepo = isSameRepo @@ -1099,7 +1099,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) // Check if base branch is valid. if !ctx.Repo.GitRepo.IsBranchExist(baseBranch) && !ctx.Repo.GitRepo.IsTagExist(baseBranch) { ctx.NotFound("BaseNotExist") - return nil, nil, nil, nil, "", "" + return nil, nil, nil, "", "" } // Check if current user has fork of repository or in the same repository. @@ -1107,7 +1107,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) if headRepo == nil && !isSameRepo { log.Trace("parseCompareInfo[%d]: does not have fork or in same repository", baseRepo.ID) ctx.NotFound("GetForkedRepo") - return nil, nil, nil, nil, "", "" + return nil, nil, nil, "", "" } var headGitRepo *git.Repository @@ -1118,7 +1118,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) headGitRepo, err = gitrepo.OpenRepository(ctx, headRepo) if err != nil { ctx.Error(http.StatusInternalServerError, "OpenRepository", err) - return nil, nil, nil, nil, "", "" + return nil, nil, nil, "", "" } } @@ -1127,7 +1127,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) if err != nil { headGitRepo.Close() ctx.Error(http.StatusInternalServerError, "GetUserRepoPermission", err) - return nil, nil, nil, nil, "", "" + return nil, nil, nil, "", "" } if !permBase.CanReadIssuesOrPulls(true) || !permBase.CanRead(unit.TypeCode) { if log.IsTrace() { @@ -1138,7 +1138,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) } headGitRepo.Close() ctx.NotFound("Can't read pulls or can't read UnitTypeCode") - return nil, nil, nil, nil, "", "" + return nil, nil, nil, "", "" } // user should have permission to read headrepo's codes @@ -1146,7 +1146,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) if err != nil { headGitRepo.Close() ctx.Error(http.StatusInternalServerError, "GetUserRepoPermission", err) - return nil, nil, nil, nil, "", "" + return nil, nil, nil, "", "" } if !permHead.CanRead(unit.TypeCode) { if log.IsTrace() { @@ -1157,24 +1157,24 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) } headGitRepo.Close() ctx.NotFound("Can't read headRepo UnitTypeCode") - return nil, nil, nil, nil, "", "" + return nil, nil, nil, "", "" } // Check if head branch is valid. if !headGitRepo.IsBranchExist(headBranch) && !headGitRepo.IsTagExist(headBranch) { headGitRepo.Close() ctx.NotFound() - return nil, nil, nil, nil, "", "" + return nil, nil, nil, "", "" } compareInfo, err := headGitRepo.GetCompareInfo(repo_model.RepoPath(baseRepo.Owner.Name, baseRepo.Name), baseBranch, headBranch, false, false) if err != nil { headGitRepo.Close() ctx.Error(http.StatusInternalServerError, "GetCompareInfo", err) - return nil, nil, nil, nil, "", "" + return nil, nil, nil, "", "" } - return headUser, headRepo, headGitRepo, compareInfo, baseBranch, headBranch + return headRepo, headGitRepo, compareInfo, baseBranch, headBranch } // UpdatePullRequest merge PR's baseBranch into headBranch diff --git a/routers/private/hook_pre_receive.go b/routers/private/hook_pre_receive.go index d12a762db6..456a288b00 100644 --- a/routers/private/hook_pre_receive.go +++ b/routers/private/hook_pre_receive.go @@ -434,7 +434,7 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID string, r } } -func preReceiveTag(ctx *preReceiveContext, oldCommitID, newCommitID string, refFullName git.RefName) { +func preReceiveTag(ctx *preReceiveContext, oldCommitID, newCommitID string, refFullName git.RefName) { //nolint:unparam if !ctx.AssertCanWriteCode() { return } @@ -470,7 +470,7 @@ func preReceiveTag(ctx *preReceiveContext, oldCommitID, newCommitID string, refF } } -func preReceiveFor(ctx *preReceiveContext, oldCommitID, newCommitID string, refFullName git.RefName) { +func preReceiveFor(ctx *preReceiveContext, oldCommitID, newCommitID string, refFullName git.RefName) { //nolint:unparam if !ctx.AssertCreatePullRequest() { return } diff --git a/routers/web/admin/config.go b/routers/web/admin/config.go index 2f5f17e201..22f260eded 100644 --- a/routers/web/admin/config.go +++ b/routers/web/admin/config.go @@ -183,7 +183,7 @@ func ChangeConfig(ctx *context.Context) { value := ctx.FormString("value") cfg := setting.Config() - marshalBool := func(v string) (string, error) { + marshalBool := func(v string) (string, error) { //nolint:unparam if b, _ := strconv.ParseBool(v); b { return "true", nil } diff --git a/routers/web/repo/badges/badges.go b/routers/web/repo/badges/badges.go index f240d30a31..a2306d5836 100644 --- a/routers/web/repo/badges/badges.go +++ b/routers/web/repo/badges/badges.go @@ -39,7 +39,7 @@ func redirectToBadge(ctx *context_module.Context, label, text, color string) { ctx.Redirect(getBadgeURL(ctx, label, text, color)) } -func errorBadge(ctx *context_module.Context, label, text string) { +func errorBadge(ctx *context_module.Context, label, text string) { //nolint:unparam ctx.Redirect(getBadgeURL(ctx, label, text, "crimson")) } diff --git a/services/actions/notifier_helper.go b/services/actions/notifier_helper.go index 78f413c214..4196cf7686 100644 --- a/services/actions/notifier_helper.go +++ b/services/actions/notifier_helper.go @@ -474,7 +474,7 @@ func handleSchedules( detectedWorkflows []*actions_module.DetectedWorkflow, commit *git.Commit, input *notifyInput, - ref string, + _ string, ) error { branch, err := commit.GetBranchName() if err != nil { diff --git a/services/f3/driver/repository.go b/services/f3/driver/repository.go index 73e81a39b1..203622ad9b 100644 --- a/services/f3/driver/repository.go +++ b/services/f3/driver/repository.go @@ -92,7 +92,7 @@ func (o *repository) GetRepositoryPushURL() string { return o.getURL() } -func newRepository(ctx context.Context) generic.NodeDriverInterface { +func newRepository(_ context.Context) generic.NodeDriverInterface { r := &repository{ f: &f3.Repository{}, } diff --git a/services/f3/driver/tests/options.go b/services/f3/driver/tests/options.go index f115fd80f6..adaa1da588 100644 --- a/services/f3/driver/tests/options.go +++ b/services/f3/driver/tests/options.go @@ -14,7 +14,7 @@ import ( "code.forgejo.org/f3/gof3/v3/options" ) -func newTestOptions(t *testing.T) options.Interface { +func newTestOptions(_ *testing.T) options.Interface { o := options.GetFactory(driver_options.Name)().(*driver_options.Options) o.SetLogger(util.NewF3Logger(nil, forgejo_log.GetLogger(forgejo_log.DEFAULT))) return o diff --git a/services/mailer/mail_admin_new_user.go b/services/mailer/mail_admin_new_user.go index 54287b1b7e..0713de8a95 100644 --- a/services/mailer/mail_admin_new_user.go +++ b/services/mailer/mail_admin_new_user.go @@ -45,7 +45,7 @@ func MailNewUser(ctx context.Context, u *user_model.User) { } } -func mailNewUser(ctx context.Context, u *user_model.User, lang string, tos []string) { +func mailNewUser(_ context.Context, u *user_model.User, lang string, tos []string) { locale := translation.NewLocale(lang) manageUserURL := setting.AppURL + "admin/users/" + strconv.FormatInt(u.ID, 10) diff --git a/services/pull/merge.go b/services/pull/merge.go index 0d37daa6ed..75b22e00b3 100644 --- a/services/pull/merge.go +++ b/services/pull/merge.go @@ -252,7 +252,7 @@ func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.U } // doMergeAndPush performs the merge operation without changing any pull information in database and pushes it up to the base repository -func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, pushTrigger repo_module.PushTrigger) (string, error) { +func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, pushTrigger repo_module.PushTrigger) (string, error) { //nolint:unparam // Clone base repo. mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, expectedHeadCommitID) if err != nil { diff --git a/services/remote/promote.go b/services/remote/promote.go index 5402c946d7..eb41ace462 100644 --- a/services/remote/promote.go +++ b/services/remote/promote.go @@ -84,7 +84,7 @@ func MaybePromoteRemoteUser(ctx context.Context, source *auth_model.Source, logi return true, reason, nil } -func getRemoteUserToPromote(ctx context.Context, source *auth_model.Source, loginName, email string) (*user_model.User, Reason, error) { +func getRemoteUserToPromote(ctx context.Context, source *auth_model.Source, loginName, email string) (*user_model.User, Reason, error) { //nolint:unparam if !source.IsOAuth2() { return nil, NewReason(log.DEBUG, ReasonNotAuth2, "source %v is not OAuth2", source), nil } From 80357bbcb318264082c1c9f58c98c368aa844884 Mon Sep 17 00:00:00 2001 From: Lunny Xiao Date: Wed, 12 Jun 2024 18:22:01 +0800 Subject: [PATCH 04/18] Fix adopt repository has empty object name in database (#31333) Fix #31330 Fix #31311 A workaround to fix the old database is to update object_format_name to `sha1` if it's empty or null. (cherry picked from commit 1968c2222dcf47ebd1697afb4e79a81e74702d31) --- modules/repository/branch.go | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/repository/branch.go b/modules/repository/branch.go index a3fca7c7ce..2bf9930f19 100644 --- a/modules/repository/branch.go +++ b/modules/repository/branch.go @@ -45,6 +45,7 @@ func SyncRepoBranchesWithRepo(ctx context.Context, repo *repo_model.Repository, if err != nil { return 0, fmt.Errorf("UpdateRepository: %w", err) } + repo.ObjectFormatName = objFmt.Name() // keep consistent with db allBranches := container.Set[string]{} { From 1627d3a53f859e37920da4aadf878eeb8435f78b Mon Sep 17 00:00:00 2001 From: Rowan Bohde Date: Wed, 12 Jun 2024 06:34:35 -0500 Subject: [PATCH 05/18] Reduce memory usage for chunked artifact uploads to MinIO (#31325) When using the MinIO storage driver for Actions Artifacts, we found that the chunked artifact required significantly more memory usage to both upload and merge than the local storage driver. This seems to be related to hardcoding a value of `-1` for the size to the MinIO client [which has a warning about memory usage in the respective docs](https://pkg.go.dev/github.com/minio/minio-go/v7#Client.PutObject). Specifying the size in both the upload and merge case reduces memory usage of the MinIO client. Co-authored-by: Kyle D (cherry picked from commit 45dbeb5600d1f552c0134721fe49e8fd1099b5a4) --- routers/api/actions/artifacts_chunks.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/routers/api/actions/artifacts_chunks.go b/routers/api/actions/artifacts_chunks.go index 3a81724b3a..b0c96585cb 100644 --- a/routers/api/actions/artifacts_chunks.go +++ b/routers/api/actions/artifacts_chunks.go @@ -39,7 +39,7 @@ func saveUploadChunkBase(st storage.ObjectStorage, ctx *ArtifactContext, r = io.TeeReader(r, hasher) } // save chunk to storage - writtenSize, err := st.Save(storagePath, r, -1) + writtenSize, err := st.Save(storagePath, r, contentSize) if err != nil { return -1, fmt.Errorf("save chunk to storage error: %v", err) } @@ -208,7 +208,7 @@ func mergeChunksForArtifact(ctx *ArtifactContext, chunks []*chunkFileItem, st st // save merged file storagePath := fmt.Sprintf("%d/%d/%d.%s", artifact.RunID%255, artifact.ID%255, time.Now().UnixNano(), extension) - written, err := st.Save(storagePath, mergedReader, -1) + written, err := st.Save(storagePath, mergedReader, artifact.FileCompressedSize) if err != nil { return fmt.Errorf("save merged file error: %v", err) } From 62ed5f2db5a1f282012f46467c7f914606353914 Mon Sep 17 00:00:00 2001 From: Lunny Xiao Date: Thu, 13 Jun 2024 06:35:46 +0800 Subject: [PATCH 06/18] Fix hash render end with colon (#31319) Fix a hash render problem like `: xxxxx` which is usually used in release notes. (cherry picked from commit 7115dce773e3021b3538ae360c4e7344d5bbf45b) --- modules/markup/html.go | 2 +- modules/markup/html_internal_test.go | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/markup/html.go b/modules/markup/html.go index f73221a37f..93c72fdcb4 100644 --- a/modules/markup/html.go +++ b/modules/markup/html.go @@ -48,7 +48,7 @@ var ( // hashCurrentPattern matches string that represents a commit SHA, e.g. d8a994ef243349f321568f9e36d5c3f444b99cae // Although SHA1 hashes are 40 chars long, SHA256 are 64, the regex matches the hash from 7 to 64 chars in length // so that abbreviated hash links can be used as well. This matches git and GitHub usability. - hashCurrentPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([0-9a-f]{7,64})(?:\s|$|\)|\]|[.,](\s|$))`) + hashCurrentPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([0-9a-f]{7,64})(?:\s|$|\)|\]|[.,:](\s|$))`) // shortLinkPattern matches short but difficult to parse [[name|link|arg=test]] syntax shortLinkPattern = regexp.MustCompile(`\[\[(.*?)\]\](\w*)`) diff --git a/modules/markup/html_internal_test.go b/modules/markup/html_internal_test.go index 917f280c73..18088af0ca 100644 --- a/modules/markup/html_internal_test.go +++ b/modules/markup/html_internal_test.go @@ -381,6 +381,7 @@ func TestRegExp_sha1CurrentPattern(t *testing.T) { "(abcdefabcdefabcdefabcdefabcdefabcdefabcd)", "[abcdefabcdefabcdefabcdefabcdefabcdefabcd]", "abcdefabcdefabcdefabcdefabcdefabcdefabcd.", + "abcdefabcdefabcdefabcdefabcdefabcdefabcd:", } falseTestCases := []string{ "test", From 12e23ee19916f4ca3f10916be8bb8666fd0d6f58 Mon Sep 17 00:00:00 2001 From: 6543 <6543@obermui.de> Date: Thu, 13 Jun 2024 11:13:11 +0200 Subject: [PATCH 07/18] [Refactor] Unify repo search order by logic (#30876) have repo OrderBy definitions defined in one place and use a single type for OrderBy database options (cherry picked from commit bb04311b0b5b7a28f94c4bc409db1c4a04bcef17) --- models/db/search.go | 6 --- models/repo/repo_list.go | 25 ------------ models/repo/search.go | 72 +++++++++++++++++++++++++++++----- routers/api/v1/repo/repo.go | 2 +- routers/web/explore/repo.go | 42 ++++---------------- templates/swagger/v1_json.tmpl | 2 +- 6 files changed, 71 insertions(+), 78 deletions(-) diff --git a/models/db/search.go b/models/db/search.go index aa577f08e0..37565f45e1 100644 --- a/models/db/search.go +++ b/models/db/search.go @@ -18,12 +18,6 @@ const ( SearchOrderByRecentUpdated SearchOrderBy = "updated_unix DESC" SearchOrderByOldest SearchOrderBy = "created_unix ASC" SearchOrderByNewest SearchOrderBy = "created_unix DESC" - SearchOrderBySize SearchOrderBy = "size ASC" - SearchOrderBySizeReverse SearchOrderBy = "size DESC" - SearchOrderByGitSize SearchOrderBy = "git_size ASC" - SearchOrderByGitSizeReverse SearchOrderBy = "git_size DESC" - SearchOrderByLFSSize SearchOrderBy = "lfs_size ASC" - SearchOrderByLFSSizeReverse SearchOrderBy = "lfs_size DESC" SearchOrderByID SearchOrderBy = "id ASC" SearchOrderByIDReverse SearchOrderBy = "id DESC" SearchOrderByStars SearchOrderBy = "num_stars ASC" diff --git a/models/repo/repo_list.go b/models/repo/repo_list.go index 987c7df9b0..6cce2d33a3 100644 --- a/models/repo/repo_list.go +++ b/models/repo/repo_list.go @@ -205,31 +205,6 @@ type SearchRepoOptions struct { OnlyShowRelevant bool } -// SearchOrderBy is used to sort the result -type SearchOrderBy string - -func (s SearchOrderBy) String() string { - return string(s) -} - -// Strings for sorting result -const ( - SearchOrderByAlphabetically SearchOrderBy = "name ASC" - SearchOrderByAlphabeticallyReverse SearchOrderBy = "name DESC" - SearchOrderByLeastUpdated SearchOrderBy = "updated_unix ASC" - SearchOrderByRecentUpdated SearchOrderBy = "updated_unix DESC" - SearchOrderByOldest SearchOrderBy = "created_unix ASC" - SearchOrderByNewest SearchOrderBy = "created_unix DESC" - SearchOrderBySize SearchOrderBy = "size ASC" - SearchOrderBySizeReverse SearchOrderBy = "size DESC" - SearchOrderByID SearchOrderBy = "id ASC" - SearchOrderByIDReverse SearchOrderBy = "id DESC" - SearchOrderByStars SearchOrderBy = "num_stars ASC" - SearchOrderByStarsReverse SearchOrderBy = "num_stars DESC" - SearchOrderByForks SearchOrderBy = "num_forks ASC" - SearchOrderByForksReverse SearchOrderBy = "num_forks DESC" -) - // UserOwnedRepoCond returns user ownered repositories func UserOwnedRepoCond(userID int64) builder.Cond { return builder.Eq{ diff --git a/models/repo/search.go b/models/repo/search.go index 54d6dcfb44..c500d41be8 100644 --- a/models/repo/search.go +++ b/models/repo/search.go @@ -5,20 +5,72 @@ package repo import "code.gitea.io/gitea/models/db" +// Strings for sorting result +const ( + // only used for repos + SearchOrderByAlphabetically db.SearchOrderBy = "owner_name ASC, name ASC" + SearchOrderByAlphabeticallyReverse db.SearchOrderBy = "owner_name DESC, name DESC" + SearchOrderBySize db.SearchOrderBy = "size ASC" + SearchOrderBySizeReverse db.SearchOrderBy = "size DESC" + SearchOrderByGitSize db.SearchOrderBy = "git_size ASC" + SearchOrderByGitSizeReverse db.SearchOrderBy = "git_size DESC" + SearchOrderByLFSSize db.SearchOrderBy = "lfs_size ASC" + SearchOrderByLFSSizeReverse db.SearchOrderBy = "lfs_size DESC" + // alias as also used elsewhere + SearchOrderByLeastUpdated db.SearchOrderBy = db.SearchOrderByLeastUpdated + SearchOrderByRecentUpdated db.SearchOrderBy = db.SearchOrderByRecentUpdated + SearchOrderByOldest db.SearchOrderBy = db.SearchOrderByOldest + SearchOrderByNewest db.SearchOrderBy = db.SearchOrderByNewest + SearchOrderByID db.SearchOrderBy = db.SearchOrderByID + SearchOrderByIDReverse db.SearchOrderBy = db.SearchOrderByIDReverse + SearchOrderByStars db.SearchOrderBy = db.SearchOrderByStars + SearchOrderByStarsReverse db.SearchOrderBy = db.SearchOrderByStarsReverse + SearchOrderByForks db.SearchOrderBy = db.SearchOrderByForks + SearchOrderByForksReverse db.SearchOrderBy = db.SearchOrderByForksReverse +) + // SearchOrderByMap represents all possible search order var SearchOrderByMap = map[string]map[string]db.SearchOrderBy{ "asc": { - "alpha": "owner_name ASC, name ASC", - "created": db.SearchOrderByOldest, - "updated": db.SearchOrderByLeastUpdated, - "size": db.SearchOrderBySize, - "id": db.SearchOrderByID, + "alpha": SearchOrderByAlphabetically, + "created": SearchOrderByOldest, + "updated": SearchOrderByLeastUpdated, + "size": SearchOrderBySize, + "git_size": SearchOrderByGitSize, + "lfs_size": SearchOrderByLFSSize, + "id": SearchOrderByID, + "stars": SearchOrderByStars, + "forks": SearchOrderByForks, }, "desc": { - "alpha": "owner_name DESC, name DESC", - "created": db.SearchOrderByNewest, - "updated": db.SearchOrderByRecentUpdated, - "size": db.SearchOrderBySizeReverse, - "id": db.SearchOrderByIDReverse, + "alpha": SearchOrderByAlphabeticallyReverse, + "created": SearchOrderByNewest, + "updated": SearchOrderByRecentUpdated, + "size": SearchOrderBySizeReverse, + "git_size": SearchOrderByGitSizeReverse, + "lfs_size": SearchOrderByLFSSizeReverse, + "id": SearchOrderByIDReverse, + "stars": SearchOrderByStarsReverse, + "forks": SearchOrderByForksReverse, }, } + +// SearchOrderByFlatMap is similar to SearchOrderByMap but use human language keywords +// to decide between asc and desc +var SearchOrderByFlatMap = map[string]db.SearchOrderBy{ + "newest": SearchOrderByMap["desc"]["created"], + "oldest": SearchOrderByMap["asc"]["created"], + "leastupdate": SearchOrderByMap["asc"]["updated"], + "reversealphabetically": SearchOrderByMap["desc"]["alpha"], + "alphabetically": SearchOrderByMap["asc"]["alpha"], + "reversesize": SearchOrderByMap["desc"]["size"], + "size": SearchOrderByMap["asc"]["size"], + "reversegitsize": SearchOrderByMap["desc"]["git_size"], + "gitsize": SearchOrderByMap["asc"]["git_size"], + "reverselfssize": SearchOrderByMap["desc"]["lfs_size"], + "lfssize": SearchOrderByMap["asc"]["lfs_size"], + "moststars": SearchOrderByMap["desc"]["stars"], + "feweststars": SearchOrderByMap["asc"]["stars"], + "mostforks": SearchOrderByMap["desc"]["forks"], + "fewestforks": SearchOrderByMap["asc"]["forks"], +} diff --git a/routers/api/v1/repo/repo.go b/routers/api/v1/repo/repo.go index 7c0289d4a0..41368146da 100644 --- a/routers/api/v1/repo/repo.go +++ b/routers/api/v1/repo/repo.go @@ -107,7 +107,7 @@ func Search(ctx *context.APIContext) { // - name: sort // in: query // description: sort repos by attribute. Supported values are - // "alpha", "created", "updated", "size", and "id". + // "alpha", "created", "updated", "size", "git_size", "lfs_size", "stars", "forks" and "id". // Default is "alpha" // type: string // - name: order diff --git a/routers/web/explore/repo.go b/routers/web/explore/repo.go index cf7381512b..1d5fb2c149 100644 --- a/routers/web/explore/repo.go +++ b/routers/web/explore/repo.go @@ -6,6 +6,7 @@ package explore import ( "fmt" "net/http" + "strings" "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" @@ -57,47 +58,18 @@ func RenderRepoSearch(ctx *context.Context, opts *RepoSearchOptions) { orderBy db.SearchOrderBy ) - sortOrder := ctx.FormString("sort") + sortOrder := strings.ToLower(ctx.FormString("sort")) if sortOrder == "" { sortOrder = setting.UI.ExploreDefaultSort } - ctx.Data["SortType"] = sortOrder - switch sortOrder { - case "newest": - orderBy = db.SearchOrderByNewest - case "oldest": - orderBy = db.SearchOrderByOldest - case "leastupdate": - orderBy = db.SearchOrderByLeastUpdated - case "reversealphabetically": - orderBy = db.SearchOrderByAlphabeticallyReverse - case "alphabetically": - orderBy = db.SearchOrderByAlphabetically - case "reversesize": - orderBy = db.SearchOrderBySizeReverse - case "size": - orderBy = db.SearchOrderBySize - case "reversegitsize": - orderBy = db.SearchOrderByGitSizeReverse - case "gitsize": - orderBy = db.SearchOrderByGitSize - case "reverselfssize": - orderBy = db.SearchOrderByLFSSizeReverse - case "lfssize": - orderBy = db.SearchOrderByLFSSize - case "moststars": - orderBy = db.SearchOrderByStarsReverse - case "feweststars": - orderBy = db.SearchOrderByStars - case "mostforks": - orderBy = db.SearchOrderByForksReverse - case "fewestforks": - orderBy = db.SearchOrderByForks - default: - ctx.Data["SortType"] = "recentupdate" + if order, ok := repo_model.SearchOrderByFlatMap[sortOrder]; ok { + orderBy = order + } else { + sortOrder = "recentupdate" orderBy = db.SearchOrderByRecentUpdated } + ctx.Data["SortType"] = sortOrder keyword := ctx.FormTrim("q") diff --git a/templates/swagger/v1_json.tmpl b/templates/swagger/v1_json.tmpl index f0a48495a9..002d81f0bb 100644 --- a/templates/swagger/v1_json.tmpl +++ b/templates/swagger/v1_json.tmpl @@ -3616,7 +3616,7 @@ }, { "type": "string", - "description": "sort repos by attribute. Supported values are \"alpha\", \"created\", \"updated\", \"size\", and \"id\". Default is \"alpha\"", + "description": "sort repos by attribute. Supported values are \"alpha\", \"created\", \"updated\", \"size\", \"git_size\", \"lfs_size\", \"stars\", \"forks\" and \"id\". Default is \"alpha\"", "name": "sort", "in": "query" }, From b5ea092964f0c305ca89a9aace849fd116895897 Mon Sep 17 00:00:00 2001 From: yp05327 <576951401@qq.com> Date: Thu, 13 Jun 2024 18:42:07 +0900 Subject: [PATCH 08/18] Fix PullRequestList.GetIssueIDs's logic (#31352) fix a bug from #30490 `prs.GetIssueIDs()` will also be used in other places, e.g. `InvalidateCodeComments` so we should not add `if pr.Issue == nil` in it, or if `pr.Issue` is already loaded, you will not get the issueID in the results list and this is not an expected result. So this will caused a bug: before calling `InvalidateCodeComments`, all `pr.Issues` in `prs` are loaded, so `issueIDs` in this function will always be `[]`. ![image](https://github.com/go-gitea/gitea/assets/18380374/ef94d9d2-0bf9-455a-abd6-4d5e6497db7c) (cherry picked from commit e61e9a36b7117bab2cb122a95d606a86527ed45d) --- models/issues/pull_list.go | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/models/issues/pull_list.go b/models/issues/pull_list.go index 9b838df5c3..f3970fa93b 100644 --- a/models/issues/pull_list.go +++ b/models/issues/pull_list.go @@ -198,8 +198,10 @@ func (prs PullRequestList) LoadIssues(ctx context.Context) (IssueList, error) { return nil, nil } - // Load issues. - issueIDs := prs.GetIssueIDs() + // Load issues which are not loaded + issueIDs := container.FilterSlice(prs, func(pr *PullRequest) (int64, bool) { + return pr.IssueID, pr.Issue == nil && pr.IssueID > 0 + }) issues := make(map[int64]*Issue, len(issueIDs)) if err := db.GetEngine(ctx). In("id", issueIDs). @@ -235,10 +237,7 @@ func (prs PullRequestList) LoadIssues(ctx context.Context) (IssueList, error) { // GetIssueIDs returns all issue ids func (prs PullRequestList) GetIssueIDs() []int64 { return container.FilterSlice(prs, func(pr *PullRequest) (int64, bool) { - if pr.Issue == nil { - return pr.IssueID, pr.IssueID > 0 - } - return 0, false + return pr.IssueID, pr.IssueID > 0 }) } From b640ac4660ece3fc6b9f5ff3c7f25860691b6eaf Mon Sep 17 00:00:00 2001 From: Oleksandr Redko Date: Fri, 14 Jun 2024 04:26:33 +0300 Subject: [PATCH 09/18] Refactor to use UnsafeStringToBytes (#31358) The PR replaces all `goldmark/util.BytesToReadOnlyString` with `util.UnsafeBytesToString`, `goldmark/util.StringToReadOnlyBytes` with `util.UnsafeStringToBytes`. This removes one `TODO`. Co-authored-by: wxiaoguang (cherry picked from commit 1761459ebc7eb6d432eced093b4583425a5c5d4b) --- modules/markup/markdown/prefixed_id.go | 6 +++--- modules/markup/markdown/transform_heading.go | 6 +++--- modules/references/references.go | 5 ++--- modules/system/db.go | 7 +++---- modules/util/sanitize.go | 6 ++---- modules/util/string.go | 2 +- 6 files changed, 14 insertions(+), 18 deletions(-) diff --git a/modules/markup/markdown/prefixed_id.go b/modules/markup/markdown/prefixed_id.go index 9c60949202..63d7fadc0a 100644 --- a/modules/markup/markdown/prefixed_id.go +++ b/modules/markup/markdown/prefixed_id.go @@ -9,9 +9,9 @@ import ( "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/markup/common" + "code.gitea.io/gitea/modules/util" "github.com/yuin/goldmark/ast" - "github.com/yuin/goldmark/util" ) type prefixedIDs struct { @@ -36,7 +36,7 @@ func (p *prefixedIDs) GenerateWithDefault(value, dft []byte) []byte { if !bytes.HasPrefix(result, []byte("user-content-")) { result = append([]byte("user-content-"), result...) } - if p.values.Add(util.BytesToReadOnlyString(result)) { + if p.values.Add(util.UnsafeBytesToString(result)) { return result } for i := 1; ; i++ { @@ -49,7 +49,7 @@ func (p *prefixedIDs) GenerateWithDefault(value, dft []byte) []byte { // Put puts a given element id to the used ids table. func (p *prefixedIDs) Put(value []byte) { - p.values.Add(util.BytesToReadOnlyString(value)) + p.values.Add(util.UnsafeBytesToString(value)) } func newPrefixedIDs() *prefixedIDs { diff --git a/modules/markup/markdown/transform_heading.go b/modules/markup/markdown/transform_heading.go index 6f38abfad9..6d48f34d93 100644 --- a/modules/markup/markdown/transform_heading.go +++ b/modules/markup/markdown/transform_heading.go @@ -7,10 +7,10 @@ import ( "fmt" "code.gitea.io/gitea/modules/markup" + "code.gitea.io/gitea/modules/util" "github.com/yuin/goldmark/ast" "github.com/yuin/goldmark/text" - "github.com/yuin/goldmark/util" ) func (g *ASTTransformer) transformHeading(_ *markup.RenderContext, v *ast.Heading, reader text.Reader, tocList *[]markup.Header) { @@ -21,11 +21,11 @@ func (g *ASTTransformer) transformHeading(_ *markup.RenderContext, v *ast.Headin } txt := v.Text(reader.Source()) header := markup.Header{ - Text: util.BytesToReadOnlyString(txt), + Text: util.UnsafeBytesToString(txt), Level: v.Level, } if id, found := v.AttributeString("id"); found { - header.ID = util.BytesToReadOnlyString(id.([]byte)) + header.ID = util.UnsafeBytesToString(id.([]byte)) } *tocList = append(*tocList, header) g.applyElementDir(v) diff --git a/modules/references/references.go b/modules/references/references.go index fd10992e8e..c61d06d5dc 100644 --- a/modules/references/references.go +++ b/modules/references/references.go @@ -14,8 +14,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup/mdstripper" "code.gitea.io/gitea/modules/setting" - - "github.com/yuin/goldmark/util" + "code.gitea.io/gitea/modules/util" ) var ( @@ -341,7 +340,7 @@ func FindRenderizableReferenceNumeric(content string, prOnly, crossLinkOnly bool return false, nil } } - r := getCrossReference(util.StringToReadOnlyBytes(content), match[2], match[3], false, prOnly) + r := getCrossReference(util.UnsafeStringToBytes(content), match[2], match[3], false, prOnly) if r == nil { return false, nil } diff --git a/modules/system/db.go b/modules/system/db.go index 05e9de0ae8..17178283d9 100644 --- a/modules/system/db.go +++ b/modules/system/db.go @@ -8,8 +8,7 @@ import ( "code.gitea.io/gitea/models/system" "code.gitea.io/gitea/modules/json" - - "github.com/yuin/goldmark/util" + "code.gitea.io/gitea/modules/util" ) // DBStore can be used to store app state items in local filesystem @@ -24,7 +23,7 @@ func (f *DBStore) Get(ctx context.Context, item StateItem) error { if content == "" { return nil } - return json.Unmarshal(util.StringToReadOnlyBytes(content), item) + return json.Unmarshal(util.UnsafeStringToBytes(content), item) } // Set saves the state item @@ -33,5 +32,5 @@ func (f *DBStore) Set(ctx context.Context, item StateItem) error { if err != nil { return err } - return system.SaveAppStateContent(ctx, item.Name(), util.BytesToReadOnlyString(b)) + return system.SaveAppStateContent(ctx, item.Name(), util.UnsafeBytesToString(b)) } diff --git a/modules/util/sanitize.go b/modules/util/sanitize.go index f1ea2574f1..0dd8b342a2 100644 --- a/modules/util/sanitize.go +++ b/modules/util/sanitize.go @@ -6,8 +6,6 @@ package util import ( "bytes" "unicode" - - "github.com/yuin/goldmark/util" ) type sanitizedError struct { @@ -33,7 +31,7 @@ var schemeSep = []byte("://") // SanitizeCredentialURLs remove all credentials in URLs (starting with "scheme://") for the input string: "https://user:pass@domain.com" => "https://sanitized-credential@domain.com" func SanitizeCredentialURLs(s string) string { - bs := util.StringToReadOnlyBytes(s) + bs := UnsafeStringToBytes(s) schemeSepPos := bytes.Index(bs, schemeSep) if schemeSepPos == -1 || bytes.IndexByte(bs[schemeSepPos:], '@') == -1 { return s // fast return if there is no URL scheme or no userinfo @@ -70,5 +68,5 @@ func SanitizeCredentialURLs(s string) string { schemeSepPos = bytes.Index(bs, schemeSep) } out = append(out, bs...) - return util.BytesToReadOnlyString(out) + return UnsafeBytesToString(out) } diff --git a/modules/util/string.go b/modules/util/string.go index 2cf44d29b1..cf50f591c6 100644 --- a/modules/util/string.go +++ b/modules/util/string.go @@ -87,11 +87,11 @@ func ToSnakeCase(input string) string { } // UnsafeBytesToString uses Go's unsafe package to convert a byte slice to a string. -// TODO: replace all "goldmark/util.BytesToReadOnlyString" with this official approach func UnsafeBytesToString(b []byte) string { return unsafe.String(unsafe.SliceData(b), len(b)) } +// UnsafeStringToBytes uses Go's unsafe package to convert a string to a byte slice. func UnsafeStringToBytes(s string) []byte { return unsafe.Slice(unsafe.StringData(s), len(s)) } From 9aa3ae955ff506d883737e576dd62f674a3ee372 Mon Sep 17 00:00:00 2001 From: Lunny Xiao Date: Fri, 14 Jun 2024 10:31:07 +0800 Subject: [PATCH 10/18] Fix bug filtering issues which have no project (#31337) Fix #31327 This is a quick patch to fix the bug. Some parameters are using 0, some are using -1. I think it needs a refactor to keep consistent. But that will be another PR. (cherry picked from commit e4abaff7ffbbc5acd3aa668a9c458fbdf76f9573) --- modules/indexer/issues/dboptions.go | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/modules/indexer/issues/dboptions.go b/modules/indexer/issues/dboptions.go index d9cf9b5e3b..50916024af 100644 --- a/modules/indexer/issues/dboptions.go +++ b/modules/indexer/issues/dboptions.go @@ -38,6 +38,12 @@ func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOp searchOpt.MilestoneIDs = opts.MilestoneIDs } + if opts.ProjectID > 0 { + searchOpt.ProjectID = optional.Some(opts.ProjectID) + } else if opts.ProjectID == -1 { // FIXME: this is inconsistent from other places + searchOpt.ProjectID = optional.Some[int64](0) // Those issues with no project(projectid==0) + } + // See the comment of issues_model.SearchOptions for the reason why we need to convert convertID := func(id int64) optional.Option[int64] { if id > 0 { @@ -49,7 +55,6 @@ func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOp return nil } - searchOpt.ProjectID = convertID(opts.ProjectID) searchOpt.ProjectColumnID = convertID(opts.ProjectColumnID) searchOpt.PosterID = convertID(opts.PosterID) searchOpt.AssigneeID = convertID(opts.AssigneeID) From 7fa7ec08911f73c4fa20533a3042f2efc4807e27 Mon Sep 17 00:00:00 2001 From: KN4CK3R Date: Fri, 14 Jun 2024 06:45:52 +0200 Subject: [PATCH 11/18] Extract and display readme and comments for Composer packages (#30927) Related #30075 CC @thojo0 Example with rendered readme: ![grafik](https://github.com/go-gitea/gitea/assets/1666336/3516fef5-2631-40fd-8841-5d9894ec8904) (cherry picked from commit 4e7b067a7fdfb3e2c8dfdf87475e3938051fd400) --- modules/packages/composer/metadata.go | 44 ++++++++++++++- modules/packages/composer/metadata_test.go | 62 +++++++++++++++------- templates/package/content/composer.tmpl | 10 ++-- 3 files changed, 90 insertions(+), 26 deletions(-) diff --git a/modules/packages/composer/metadata.go b/modules/packages/composer/metadata.go index 1d0f025648..2c2e9ebf27 100644 --- a/modules/packages/composer/metadata.go +++ b/modules/packages/composer/metadata.go @@ -6,6 +6,7 @@ package composer import ( "archive/zip" "io" + "path" "regexp" "strings" @@ -36,10 +37,14 @@ type Package struct { Metadata *Metadata } +// https://getcomposer.org/doc/04-schema.md + // Metadata represents the metadata of a Composer package type Metadata struct { Description string `json:"description,omitempty"` + Readme string `json:"readme,omitempty"` Keywords []string `json:"keywords,omitempty"` + Comments Comments `json:"_comments,omitempty"` Homepage string `json:"homepage,omitempty"` License Licenses `json:"license,omitempty"` Authors []Author `json:"authors,omitempty"` @@ -74,6 +79,28 @@ func (l *Licenses) UnmarshalJSON(data []byte) error { return nil } +// Comments represents the comments of a Composer package +type Comments []string + +// UnmarshalJSON reads from a string or array +func (c *Comments) UnmarshalJSON(data []byte) error { + switch data[0] { + case '"': + var value string + if err := json.Unmarshal(data, &value); err != nil { + return err + } + *c = Comments{value} + case '[': + values := make([]string, 0, 5) + if err := json.Unmarshal(data, &values); err != nil { + return err + } + *c = Comments(values) + } + return nil +} + // Author represents an author type Author struct { Name string `json:"name,omitempty"` @@ -101,14 +128,14 @@ func ParsePackage(r io.ReaderAt, size int64) (*Package, error) { } defer f.Close() - return ParseComposerFile(f) + return ParseComposerFile(archive, path.Dir(file.Name), f) } } return nil, ErrMissingComposerFile } // ParseComposerFile parses a composer.json file to retrieve the metadata of a Composer package -func ParseComposerFile(r io.Reader) (*Package, error) { +func ParseComposerFile(archive *zip.Reader, pathPrefix string, r io.Reader) (*Package, error) { var cj struct { Name string `json:"name"` Version string `json:"version"` @@ -137,6 +164,19 @@ func ParseComposerFile(r io.Reader) (*Package, error) { cj.Type = "library" } + if cj.Readme == "" { + cj.Readme = "README.md" + } + f, err := archive.Open(path.Join(pathPrefix, cj.Readme)) + if err == nil { + // 10kb limit for readme content + buf, _ := io.ReadAll(io.LimitReader(f, 10*1024)) + cj.Readme = string(buf) + _ = f.Close() + } else { + cj.Readme = "" + } + return &Package{ Name: cj.Name, Version: cj.Version, diff --git a/modules/packages/composer/metadata_test.go b/modules/packages/composer/metadata_test.go index a0e1a77a6e..a5e317daf1 100644 --- a/modules/packages/composer/metadata_test.go +++ b/modules/packages/composer/metadata_test.go @@ -17,6 +17,8 @@ import ( const ( name = "gitea/composer-package" description = "Package Description" + readme = "Package Readme" + comments = "Package Comment" packageType = "composer-plugin" author = "Gitea Authors" email = "no.reply@gitea.io" @@ -41,7 +43,8 @@ const composerContent = `{ }, "require": { "php": ">=7.2 || ^8.0" - } + }, + "_comments": "` + comments + `" }` func TestLicenseUnmarshal(t *testing.T) { @@ -54,18 +57,30 @@ func TestLicenseUnmarshal(t *testing.T) { assert.Equal(t, "MIT", l[0]) } +func TestCommentsUnmarshal(t *testing.T) { + var c Comments + assert.NoError(t, json.NewDecoder(strings.NewReader(`["comment"]`)).Decode(&c)) + assert.Len(t, c, 1) + assert.Equal(t, "comment", c[0]) + assert.NoError(t, json.NewDecoder(strings.NewReader(`"comment"`)).Decode(&c)) + assert.Len(t, c, 1) + assert.Equal(t, "comment", c[0]) +} + func TestParsePackage(t *testing.T) { - createArchive := func(name, content string) []byte { + createArchive := func(files map[string]string) []byte { var buf bytes.Buffer archive := zip.NewWriter(&buf) - w, _ := archive.Create(name) - w.Write([]byte(content)) + for name, content := range files { + w, _ := archive.Create(name) + w.Write([]byte(content)) + } archive.Close() return buf.Bytes() } t.Run("MissingComposerFile", func(t *testing.T) { - data := createArchive("dummy.txt", "") + data := createArchive(map[string]string{"dummy.txt": ""}) cp, err := ParsePackage(bytes.NewReader(data), int64(len(data))) assert.Nil(t, cp) @@ -73,7 +88,7 @@ func TestParsePackage(t *testing.T) { }) t.Run("MissingComposerFileInRoot", func(t *testing.T) { - data := createArchive("sub/sub/composer.json", "") + data := createArchive(map[string]string{"sub/sub/composer.json": ""}) cp, err := ParsePackage(bytes.NewReader(data), int64(len(data))) assert.Nil(t, cp) @@ -81,43 +96,52 @@ func TestParsePackage(t *testing.T) { }) t.Run("InvalidComposerFile", func(t *testing.T) { - data := createArchive("composer.json", "") + data := createArchive(map[string]string{"composer.json": ""}) cp, err := ParsePackage(bytes.NewReader(data), int64(len(data))) assert.Nil(t, cp) assert.Error(t, err) }) - t.Run("Valid", func(t *testing.T) { - data := createArchive("composer.json", composerContent) + t.Run("InvalidPackageName", func(t *testing.T) { + data := createArchive(map[string]string{"composer.json": "{}"}) cp, err := ParsePackage(bytes.NewReader(data), int64(len(data))) - assert.NoError(t, err) - assert.NotNil(t, cp) - }) -} - -func TestParseComposerFile(t *testing.T) { - t.Run("InvalidPackageName", func(t *testing.T) { - cp, err := ParseComposerFile(strings.NewReader(`{}`)) assert.Nil(t, cp) assert.ErrorIs(t, err, ErrInvalidName) }) t.Run("InvalidPackageVersion", func(t *testing.T) { - cp, err := ParseComposerFile(strings.NewReader(`{"name": "gitea/composer-package", "version": "1.a.3"}`)) + data := createArchive(map[string]string{"composer.json": `{"name": "gitea/composer-package", "version": "1.a.3"}`}) + + cp, err := ParsePackage(bytes.NewReader(data), int64(len(data))) assert.Nil(t, cp) assert.ErrorIs(t, err, ErrInvalidVersion) }) + t.Run("InvalidReadmePath", func(t *testing.T) { + data := createArchive(map[string]string{"composer.json": `{"name": "gitea/composer-package", "readme": "sub/README.md"}`}) + + cp, err := ParsePackage(bytes.NewReader(data), int64(len(data))) + assert.NoError(t, err) + assert.NotNil(t, cp) + + assert.Empty(t, cp.Metadata.Readme) + }) + t.Run("Valid", func(t *testing.T) { - cp, err := ParseComposerFile(strings.NewReader(composerContent)) + data := createArchive(map[string]string{"composer.json": composerContent, "README.md": readme}) + + cp, err := ParsePackage(bytes.NewReader(data), int64(len(data))) assert.NoError(t, err) assert.NotNil(t, cp) assert.Equal(t, name, cp.Name) assert.Empty(t, cp.Version) assert.Equal(t, description, cp.Metadata.Description) + assert.Equal(t, readme, cp.Metadata.Readme) + assert.Len(t, cp.Metadata.Comments, 1) + assert.Equal(t, comments, cp.Metadata.Comments[0]) assert.Len(t, cp.Metadata.Authors, 1) assert.Equal(t, author, cp.Metadata.Authors[0].Name) assert.Equal(t, email, cp.Metadata.Authors[0].Email) diff --git a/templates/package/content/composer.tmpl b/templates/package/content/composer.tmpl index bcc6d3099f..73ab3ac7cc 100644 --- a/templates/package/content/composer.tmpl +++ b/templates/package/content/composer.tmpl @@ -22,11 +22,11 @@ - {{if .PackageDescriptor.Metadata.Description}} + {{if or .PackageDescriptor.Metadata.Description .PackageDescriptor.Metadata.Comments}}

{{ctx.Locale.Tr "packages.about"}}

-
- {{.PackageDescriptor.Metadata.Description}} -
+ {{if .PackageDescriptor.Metadata.Description}}
{{.PackageDescriptor.Metadata.Description}}
{{end}} + {{if .PackageDescriptor.Metadata.Readme}}
{{RenderMarkdownToHtml $.Context .PackageDescriptor.Metadata.Readme}}
{{end}} + {{if .PackageDescriptor.Metadata.Comments}}
{{StringUtils.Join .PackageDescriptor.Metadata.Comments " "}}
{{end}} {{end}} {{if or .PackageDescriptor.Metadata.Require .PackageDescriptor.Metadata.RequireDev}} @@ -39,7 +39,7 @@ {{end}} - {{if or .PackageDescriptor.Metadata.Keywords}} + {{if .PackageDescriptor.Metadata.Keywords}}

{{ctx.Locale.Tr "packages.keywords"}}

{{range .PackageDescriptor.Metadata.Keywords}} From ff43d02803d6177dccc7890cc34d94524ac753cc Mon Sep 17 00:00:00 2001 From: mzroot Date: Fri, 14 Jun 2024 19:56:10 +0300 Subject: [PATCH 12/18] Add tag protection via rest api #17862 (#31295) Add tag protection manage via rest API. --------- Co-authored-by: Alexander Kogay Co-authored-by: Giteabot (cherry picked from commit d4e4226c3cbfa62a6adf15f4466747468eb208c7) Conflicts: modules/structs/repo_tag.go trivial context conflict templates/swagger/v1_json.tmpl fixed with make generate-swagger --- models/git/protected_tag.go | 13 ++ modules/structs/repo_tag.go | 28 +++ routers/api/v1/api.go | 9 + routers/api/v1/repo/tag.go | 350 ++++++++++++++++++++++++++++++ routers/api/v1/swagger/options.go | 6 + routers/api/v1/swagger/repo.go | 14 ++ services/convert/convert.go | 26 +++ templates/swagger/v1_json.tmpl | 332 ++++++++++++++++++++++++++++ 8 files changed, 778 insertions(+) diff --git a/models/git/protected_tag.go b/models/git/protected_tag.go index 8a05045651..9a6646c742 100644 --- a/models/git/protected_tag.go +++ b/models/git/protected_tag.go @@ -110,6 +110,19 @@ func GetProtectedTagByID(ctx context.Context, id int64) (*ProtectedTag, error) { return tag, nil } +// GetProtectedTagByNamePattern gets protected tag by name_pattern +func GetProtectedTagByNamePattern(ctx context.Context, repoID int64, pattern string) (*ProtectedTag, error) { + tag := &ProtectedTag{NamePattern: pattern, RepoID: repoID} + has, err := db.GetEngine(ctx).Get(tag) + if err != nil { + return nil, err + } + if !has { + return nil, nil + } + return tag, nil +} + // IsUserAllowedToControlTag checks if a user can control the specific tag. // It returns true if the tag name is not protected or the user is allowed to control it. func IsUserAllowedToControlTag(ctx context.Context, tags []*ProtectedTag, tagName string, userID int64) (bool, error) { diff --git a/modules/structs/repo_tag.go b/modules/structs/repo_tag.go index 961ca4e53b..1bea5b36a5 100644 --- a/modules/structs/repo_tag.go +++ b/modules/structs/repo_tag.go @@ -3,6 +3,8 @@ package structs +import "time" + // Tag represents a repository tag type Tag struct { Name string `json:"name"` @@ -46,3 +48,29 @@ type TagArchiveDownloadCount struct { Zip int64 `json:"zip"` TarGz int64 `json:"tar_gz"` } + +// TagProtection represents a tag protection +type TagProtection struct { + ID int64 `json:"id"` + NamePattern string `json:"name_pattern"` + WhitelistUsernames []string `json:"whitelist_usernames"` + WhitelistTeams []string `json:"whitelist_teams"` + // swagger:strfmt date-time + Created time.Time `json:"created_at"` + // swagger:strfmt date-time + Updated time.Time `json:"updated_at"` +} + +// CreateTagProtectionOption options for creating a tag protection +type CreateTagProtectionOption struct { + NamePattern string `json:"name_pattern"` + WhitelistUsernames []string `json:"whitelist_usernames"` + WhitelistTeams []string `json:"whitelist_teams"` +} + +// EditTagProtectionOption options for editing a tag protection +type EditTagProtectionOption struct { + NamePattern *string `json:"name_pattern"` + WhitelistUsernames []string `json:"whitelist_usernames"` + WhitelistTeams []string `json:"whitelist_teams"` +} diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index e52cc4c366..93798d1dda 100644 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -1112,6 +1112,15 @@ func Routes() *web.Route { m.Post("", reqToken(), reqRepoWriter(unit.TypeCode), mustNotBeArchived, bind(api.CreateTagOption{}), repo.CreateTag) m.Delete("/*", reqToken(), reqRepoWriter(unit.TypeCode), mustNotBeArchived, repo.DeleteTag) }, reqRepoReader(unit.TypeCode), context.ReferencesGitRepo(true)) + m.Group("/tag_protections", func() { + m.Combo("").Get(repo.ListTagProtection). + Post(bind(api.CreateTagProtectionOption{}), mustNotBeArchived, repo.CreateTagProtection) + m.Group("/{id}", func() { + m.Combo("").Get(repo.GetTagProtection). + Patch(bind(api.EditTagProtectionOption{}), mustNotBeArchived, repo.EditTagProtection). + Delete(repo.DeleteTagProtection) + }) + }, reqToken(), reqAdmin()) m.Group("/actions", func() { m.Get("/tasks", repo.ListActionTasks) }, reqRepoReader(unit.TypeActions), context.ReferencesGitRepo(true)) diff --git a/routers/api/v1/repo/tag.go b/routers/api/v1/repo/tag.go index 69dd844298..c050883768 100644 --- a/routers/api/v1/repo/tag.go +++ b/routers/api/v1/repo/tag.go @@ -7,9 +7,13 @@ import ( "errors" "fmt" "net/http" + "strings" "code.gitea.io/gitea/models" + git_model "code.gitea.io/gitea/models/git" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" + user_model "code.gitea.io/gitea/models/user" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers/api/v1/utils" @@ -314,3 +318,349 @@ func DeleteTag(ctx *context.APIContext) { ctx.Status(http.StatusNoContent) } + +// ListTagProtection lists tag protections for a repo +func ListTagProtection(ctx *context.APIContext) { + // swagger:operation GET /repos/{owner}/{repo}/tag_protections repository repoListTagProtection + // --- + // summary: List tag protections for a repository + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the repo + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo + // type: string + // required: true + // responses: + // "200": + // "$ref": "#/responses/TagProtectionList" + + repo := ctx.Repo.Repository + pts, err := git_model.GetProtectedTags(ctx, repo.ID) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetProtectedTags", err) + return + } + apiPts := make([]*api.TagProtection, len(pts)) + for i := range pts { + apiPts[i] = convert.ToTagProtection(ctx, pts[i], repo) + } + + ctx.JSON(http.StatusOK, apiPts) +} + +// GetTagProtection gets a tag protection +func GetTagProtection(ctx *context.APIContext) { + // swagger:operation GET /repos/{owner}/{repo}/tag_protections/{id} repository repoGetTagProtection + // --- + // summary: Get a specific tag protection for the repository + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the repo + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo + // type: string + // required: true + // - name: id + // in: path + // description: id of the tag protect to get + // type: integer + // required: true + // responses: + // "200": + // "$ref": "#/responses/TagProtection" + // "404": + // "$ref": "#/responses/notFound" + + repo := ctx.Repo.Repository + id := ctx.ParamsInt64(":id") + pt, err := git_model.GetProtectedTagByID(ctx, id) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetProtectedTagByID", err) + return + } + + if pt == nil || repo.ID != pt.RepoID { + ctx.NotFound() + return + } + + ctx.JSON(http.StatusOK, convert.ToTagProtection(ctx, pt, repo)) +} + +// CreateTagProtection creates a tag protection for a repo +func CreateTagProtection(ctx *context.APIContext) { + // swagger:operation POST /repos/{owner}/{repo}/tag_protections repository repoCreateTagProtection + // --- + // summary: Create a tag protections for a repository + // consumes: + // - application/json + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the repo + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo + // type: string + // required: true + // - name: body + // in: body + // schema: + // "$ref": "#/definitions/CreateTagProtectionOption" + // responses: + // "201": + // "$ref": "#/responses/TagProtection" + // "403": + // "$ref": "#/responses/forbidden" + // "404": + // "$ref": "#/responses/notFound" + // "422": + // "$ref": "#/responses/validationError" + // "423": + // "$ref": "#/responses/repoArchivedError" + + form := web.GetForm(ctx).(*api.CreateTagProtectionOption) + repo := ctx.Repo.Repository + + namePattern := strings.TrimSpace(form.NamePattern) + if namePattern == "" { + ctx.Error(http.StatusBadRequest, "name_pattern are empty", "name_pattern are empty") + return + } + + if len(form.WhitelistUsernames) == 0 && len(form.WhitelistTeams) == 0 { + ctx.Error(http.StatusBadRequest, "both whitelist_usernames and whitelist_teams are empty", "both whitelist_usernames and whitelist_teams are empty") + return + } + + pt, err := git_model.GetProtectedTagByNamePattern(ctx, repo.ID, namePattern) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetProtectTagOfRepo", err) + return + } else if pt != nil { + ctx.Error(http.StatusForbidden, "Create tag protection", "Tag protection already exist") + return + } + + var whitelistUsers, whitelistTeams []int64 + whitelistUsers, err = user_model.GetUserIDsByNames(ctx, form.WhitelistUsernames, false) + if err != nil { + if user_model.IsErrUserNotExist(err) { + ctx.Error(http.StatusUnprocessableEntity, "User does not exist", err) + return + } + ctx.Error(http.StatusInternalServerError, "GetUserIDsByNames", err) + return + } + + if repo.Owner.IsOrganization() { + whitelistTeams, err = organization.GetTeamIDsByNames(ctx, repo.OwnerID, form.WhitelistTeams, false) + if err != nil { + if organization.IsErrTeamNotExist(err) { + ctx.Error(http.StatusUnprocessableEntity, "Team does not exist", err) + return + } + ctx.Error(http.StatusInternalServerError, "GetTeamIDsByNames", err) + return + } + } + + protectTag := &git_model.ProtectedTag{ + RepoID: repo.ID, + NamePattern: strings.TrimSpace(namePattern), + AllowlistUserIDs: whitelistUsers, + AllowlistTeamIDs: whitelistTeams, + } + if err := git_model.InsertProtectedTag(ctx, protectTag); err != nil { + ctx.Error(http.StatusInternalServerError, "InsertProtectedTag", err) + return + } + + pt, err = git_model.GetProtectedTagByID(ctx, protectTag.ID) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetProtectedTagByID", err) + return + } + + if pt == nil || pt.RepoID != repo.ID { + ctx.Error(http.StatusInternalServerError, "New tag protection not found", err) + return + } + + ctx.JSON(http.StatusCreated, convert.ToTagProtection(ctx, pt, repo)) +} + +// EditTagProtection edits a tag protection for a repo +func EditTagProtection(ctx *context.APIContext) { + // swagger:operation PATCH /repos/{owner}/{repo}/tag_protections/{id} repository repoEditTagProtection + // --- + // summary: Edit a tag protections for a repository. Only fields that are set will be changed + // consumes: + // - application/json + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the repo + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo + // type: string + // required: true + // - name: id + // in: path + // description: id of protected tag + // type: integer + // required: true + // - name: body + // in: body + // schema: + // "$ref": "#/definitions/EditTagProtectionOption" + // responses: + // "200": + // "$ref": "#/responses/TagProtection" + // "404": + // "$ref": "#/responses/notFound" + // "422": + // "$ref": "#/responses/validationError" + // "423": + // "$ref": "#/responses/repoArchivedError" + + repo := ctx.Repo.Repository + form := web.GetForm(ctx).(*api.EditTagProtectionOption) + + id := ctx.ParamsInt64(":id") + pt, err := git_model.GetProtectedTagByID(ctx, id) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetProtectedTagByID", err) + return + } + + if pt == nil || pt.RepoID != repo.ID { + ctx.NotFound() + return + } + + if form.NamePattern != nil { + pt.NamePattern = *form.NamePattern + } + + var whitelistUsers, whitelistTeams []int64 + if form.WhitelistTeams != nil { + if repo.Owner.IsOrganization() { + whitelistTeams, err = organization.GetTeamIDsByNames(ctx, repo.OwnerID, form.WhitelistTeams, false) + if err != nil { + if organization.IsErrTeamNotExist(err) { + ctx.Error(http.StatusUnprocessableEntity, "Team does not exist", err) + return + } + ctx.Error(http.StatusInternalServerError, "GetTeamIDsByNames", err) + return + } + } + pt.AllowlistTeamIDs = whitelistTeams + } + + if form.WhitelistUsernames != nil { + whitelistUsers, err = user_model.GetUserIDsByNames(ctx, form.WhitelistUsernames, false) + if err != nil { + if user_model.IsErrUserNotExist(err) { + ctx.Error(http.StatusUnprocessableEntity, "User does not exist", err) + return + } + ctx.Error(http.StatusInternalServerError, "GetUserIDsByNames", err) + return + } + pt.AllowlistUserIDs = whitelistUsers + } + + err = git_model.UpdateProtectedTag(ctx, pt) + if err != nil { + ctx.Error(http.StatusInternalServerError, "UpdateProtectedTag", err) + return + } + + pt, err = git_model.GetProtectedTagByID(ctx, id) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetProtectedTagByID", err) + return + } + + if pt == nil || pt.RepoID != repo.ID { + ctx.Error(http.StatusInternalServerError, "New tag protection not found", "New tag protection not found") + return + } + + ctx.JSON(http.StatusOK, convert.ToTagProtection(ctx, pt, repo)) +} + +// DeleteTagProtection +func DeleteTagProtection(ctx *context.APIContext) { + // swagger:operation DELETE /repos/{owner}/{repo}/tag_protections/{id} repository repoDeleteTagProtection + // --- + // summary: Delete a specific tag protection for the repository + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the repo + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo + // type: string + // required: true + // - name: id + // in: path + // description: id of protected tag + // type: integer + // required: true + // responses: + // "204": + // "$ref": "#/responses/empty" + // "404": + // "$ref": "#/responses/notFound" + + repo := ctx.Repo.Repository + id := ctx.ParamsInt64(":id") + pt, err := git_model.GetProtectedTagByID(ctx, id) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetProtectedTagByID", err) + return + } + + if pt == nil || pt.RepoID != repo.ID { + ctx.NotFound() + return + } + + err = git_model.DeleteProtectedTag(ctx, pt) + if err != nil { + ctx.Error(http.StatusInternalServerError, "DeleteProtectedTag", err) + return + } + + ctx.Status(http.StatusNoContent) +} diff --git a/routers/api/v1/swagger/options.go b/routers/api/v1/swagger/options.go index 2ebf089304..b0a5158a42 100644 --- a/routers/api/v1/swagger/options.go +++ b/routers/api/v1/swagger/options.go @@ -184,6 +184,12 @@ type swaggerParameterBodies struct { // in:body CreateTagOption api.CreateTagOption + // in:body + CreateTagProtectionOption api.CreateTagProtectionOption + + // in:body + EditTagProtectionOption api.EditTagProtectionOption + // in:body CreateAccessTokenOption api.CreateAccessTokenOption diff --git a/routers/api/v1/swagger/repo.go b/routers/api/v1/swagger/repo.go index 6d399ea185..ca214b4900 100644 --- a/routers/api/v1/swagger/repo.go +++ b/routers/api/v1/swagger/repo.go @@ -70,6 +70,20 @@ type swaggerResponseAnnotatedTag struct { Body api.AnnotatedTag `json:"body"` } +// TagProtectionList +// swagger:response TagProtectionList +type swaggerResponseTagProtectionList struct { + // in:body + Body []api.TagProtection `json:"body"` +} + +// TagProtection +// swagger:response TagProtection +type swaggerResponseTagProtection struct { + // in:body + Body api.TagProtection `json:"body"` +} + // Reference // swagger:response Reference type swaggerResponseReference struct { diff --git a/services/convert/convert.go b/services/convert/convert.go index abcdf917cd..d6dc3c9858 100644 --- a/services/convert/convert.go +++ b/services/convert/convert.go @@ -411,6 +411,32 @@ func ToAnnotatedTagObject(repo *repo_model.Repository, commit *git.Commit) *api. } } +// ToTagProtection convert a git.ProtectedTag to an api.TagProtection +func ToTagProtection(ctx context.Context, pt *git_model.ProtectedTag, repo *repo_model.Repository) *api.TagProtection { + readers, err := access_model.GetRepoReaders(ctx, repo) + if err != nil { + log.Error("GetRepoReaders: %v", err) + } + + whitelistUsernames := getWhitelistEntities(readers, pt.AllowlistUserIDs) + + teamReaders, err := organization.OrgFromUser(repo.Owner).TeamsWithAccessToRepo(ctx, repo.ID, perm.AccessModeRead) + if err != nil { + log.Error("Repo.Owner.TeamsWithAccessToRepo: %v", err) + } + + whitelistTeams := getWhitelistEntities(teamReaders, pt.AllowlistTeamIDs) + + return &api.TagProtection{ + ID: pt.ID, + NamePattern: pt.NamePattern, + WhitelistUsernames: whitelistUsernames, + WhitelistTeams: whitelistTeams, + Created: pt.CreatedUnix.AsTime(), + Updated: pt.UpdatedUnix.AsTime(), + } +} + // ToTopicResponse convert from models.Topic to api.TopicResponse func ToTopicResponse(topic *repo_model.Topic) *api.TopicResponse { return &api.TopicResponse{ diff --git a/templates/swagger/v1_json.tmpl b/templates/swagger/v1_json.tmpl index 002d81f0bb..f68b1cbe2f 100644 --- a/templates/swagger/v1_json.tmpl +++ b/templates/swagger/v1_json.tmpl @@ -14184,6 +14184,233 @@ } } }, + "/repos/{owner}/{repo}/tag_protections": { + "get": { + "produces": [ + "application/json" + ], + "tags": [ + "repository" + ], + "summary": "List tag protections for a repository", + "operationId": "repoListTagProtection", + "parameters": [ + { + "type": "string", + "description": "owner of the repo", + "name": "owner", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "name of the repo", + "name": "repo", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "$ref": "#/responses/TagProtectionList" + } + } + }, + "post": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "repository" + ], + "summary": "Create a tag protections for a repository", + "operationId": "repoCreateTagProtection", + "parameters": [ + { + "type": "string", + "description": "owner of the repo", + "name": "owner", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "name of the repo", + "name": "repo", + "in": "path", + "required": true + }, + { + "name": "body", + "in": "body", + "schema": { + "$ref": "#/definitions/CreateTagProtectionOption" + } + } + ], + "responses": { + "201": { + "$ref": "#/responses/TagProtection" + }, + "403": { + "$ref": "#/responses/forbidden" + }, + "404": { + "$ref": "#/responses/notFound" + }, + "422": { + "$ref": "#/responses/validationError" + }, + "423": { + "$ref": "#/responses/repoArchivedError" + } + } + } + }, + "/repos/{owner}/{repo}/tag_protections/{id}": { + "get": { + "produces": [ + "application/json" + ], + "tags": [ + "repository" + ], + "summary": "Get a specific tag protection for the repository", + "operationId": "repoGetTagProtection", + "parameters": [ + { + "type": "string", + "description": "owner of the repo", + "name": "owner", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "name of the repo", + "name": "repo", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "id of the tag protect to get", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "$ref": "#/responses/TagProtection" + }, + "404": { + "$ref": "#/responses/notFound" + } + } + }, + "delete": { + "produces": [ + "application/json" + ], + "tags": [ + "repository" + ], + "summary": "Delete a specific tag protection for the repository", + "operationId": "repoDeleteTagProtection", + "parameters": [ + { + "type": "string", + "description": "owner of the repo", + "name": "owner", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "name of the repo", + "name": "repo", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "id of protected tag", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "204": { + "$ref": "#/responses/empty" + }, + "404": { + "$ref": "#/responses/notFound" + } + } + }, + "patch": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "repository" + ], + "summary": "Edit a tag protections for a repository. Only fields that are set will be changed", + "operationId": "repoEditTagProtection", + "parameters": [ + { + "type": "string", + "description": "owner of the repo", + "name": "owner", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "name of the repo", + "name": "repo", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "id of protected tag", + "name": "id", + "in": "path", + "required": true + }, + { + "name": "body", + "in": "body", + "schema": { + "$ref": "#/definitions/EditTagProtectionOption" + } + } + ], + "responses": { + "200": { + "$ref": "#/responses/TagProtection" + }, + "404": { + "$ref": "#/responses/notFound" + }, + "422": { + "$ref": "#/responses/validationError" + }, + "423": { + "$ref": "#/responses/repoArchivedError" + } + } + } + }, "/repos/{owner}/{repo}/tags": { "get": { "produces": [ @@ -20331,6 +20558,31 @@ }, "x-go-package": "code.gitea.io/gitea/modules/structs" }, + "CreateTagProtectionOption": { + "description": "CreateTagProtectionOption options for creating a tag protection", + "type": "object", + "properties": { + "name_pattern": { + "type": "string", + "x-go-name": "NamePattern" + }, + "whitelist_teams": { + "type": "array", + "items": { + "type": "string" + }, + "x-go-name": "WhitelistTeams" + }, + "whitelist_usernames": { + "type": "array", + "items": { + "type": "string" + }, + "x-go-name": "WhitelistUsernames" + } + }, + "x-go-package": "code.gitea.io/gitea/modules/structs" + }, "CreateTeamOption": { "description": "CreateTeamOption options for creating a team", "type": "object", @@ -21282,6 +21534,31 @@ }, "x-go-package": "code.gitea.io/gitea/modules/structs" }, + "EditTagProtectionOption": { + "description": "EditTagProtectionOption options for editing a tag protection", + "type": "object", + "properties": { + "name_pattern": { + "type": "string", + "x-go-name": "NamePattern" + }, + "whitelist_teams": { + "type": "array", + "items": { + "type": "string" + }, + "x-go-name": "WhitelistTeams" + }, + "whitelist_usernames": { + "type": "array", + "items": { + "type": "string" + }, + "x-go-name": "WhitelistUsernames" + } + }, + "x-go-package": "code.gitea.io/gitea/modules/structs" + }, "EditTeamOption": { "description": "EditTeamOption options for editing a team", "type": "object", @@ -24507,6 +24784,46 @@ }, "x-go-package": "code.gitea.io/gitea/modules/structs" }, + "TagProtection": { + "description": "TagProtection represents a tag protection", + "type": "object", + "properties": { + "created_at": { + "type": "string", + "format": "date-time", + "x-go-name": "Created" + }, + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name_pattern": { + "type": "string", + "x-go-name": "NamePattern" + }, + "updated_at": { + "type": "string", + "format": "date-time", + "x-go-name": "Updated" + }, + "whitelist_teams": { + "type": "array", + "items": { + "type": "string" + }, + "x-go-name": "WhitelistTeams" + }, + "whitelist_usernames": { + "type": "array", + "items": { + "type": "string" + }, + "x-go-name": "WhitelistUsernames" + } + }, + "x-go-package": "code.gitea.io/gitea/modules/structs" + }, "Team": { "description": "Team represents a team in an organization", "type": "object", @@ -26121,6 +26438,21 @@ } } }, + "TagProtection": { + "description": "TagProtection", + "schema": { + "$ref": "#/definitions/TagProtection" + } + }, + "TagProtectionList": { + "description": "TagProtectionList", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/TagProtection" + } + } + }, "TasksList": { "description": "TasksList", "schema": { From 813bf24445e2388fc9346e4c0e065a5ee7ce48ea Mon Sep 17 00:00:00 2001 From: Zettat123 Date: Sat, 15 Jun 2024 12:20:14 +0800 Subject: [PATCH 13/18] Allow downloading attachments of draft releases (#31369) Fix #31362 (cherry picked from commit 42718d32af9d259205bee0fde818ffc0c3a9797f) --- routers/web/repo/repo.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/routers/web/repo/repo.go b/routers/web/repo/repo.go index fa3f0e86de..119a97f28c 100644 --- a/routers/web/repo/repo.go +++ b/routers/web/repo/repo.go @@ -415,8 +415,9 @@ func RedirectDownload(ctx *context.Context) { tagNames := []string{vTag} curRepo := ctx.Repo.Repository releases, err := db.Find[repo_model.Release](ctx, repo_model.FindReleasesOptions{ - RepoID: curRepo.ID, - TagNames: tagNames, + IncludeDrafts: ctx.Repo.CanWrite(unit.TypeReleases), + RepoID: curRepo.ID, + TagNames: tagNames, }) if err != nil { ctx.ServerError("RedirectDownload", err) From 5d10c3446af946f3f8dcd0874bb7bceb2dcb655c Mon Sep 17 00:00:00 2001 From: 6543 <6543@obermui.de> Date: Sat, 15 Jun 2024 06:48:52 +0200 Subject: [PATCH 14/18] rm const do inline (#31360) https://github.com/go-gitea/gitea/pull/30876/files#r1637288202 (cherry picked from commit e37ecd17324946d9b2db07ea10d4a9fbb53da20f) --- models/repo/search.go | 60 +++++++++++++------------------------------ 1 file changed, 18 insertions(+), 42 deletions(-) diff --git a/models/repo/search.go b/models/repo/search.go index c500d41be8..2baa85dc6f 100644 --- a/models/repo/search.go +++ b/models/repo/search.go @@ -5,53 +5,29 @@ package repo import "code.gitea.io/gitea/models/db" -// Strings for sorting result -const ( - // only used for repos - SearchOrderByAlphabetically db.SearchOrderBy = "owner_name ASC, name ASC" - SearchOrderByAlphabeticallyReverse db.SearchOrderBy = "owner_name DESC, name DESC" - SearchOrderBySize db.SearchOrderBy = "size ASC" - SearchOrderBySizeReverse db.SearchOrderBy = "size DESC" - SearchOrderByGitSize db.SearchOrderBy = "git_size ASC" - SearchOrderByGitSizeReverse db.SearchOrderBy = "git_size DESC" - SearchOrderByLFSSize db.SearchOrderBy = "lfs_size ASC" - SearchOrderByLFSSizeReverse db.SearchOrderBy = "lfs_size DESC" - // alias as also used elsewhere - SearchOrderByLeastUpdated db.SearchOrderBy = db.SearchOrderByLeastUpdated - SearchOrderByRecentUpdated db.SearchOrderBy = db.SearchOrderByRecentUpdated - SearchOrderByOldest db.SearchOrderBy = db.SearchOrderByOldest - SearchOrderByNewest db.SearchOrderBy = db.SearchOrderByNewest - SearchOrderByID db.SearchOrderBy = db.SearchOrderByID - SearchOrderByIDReverse db.SearchOrderBy = db.SearchOrderByIDReverse - SearchOrderByStars db.SearchOrderBy = db.SearchOrderByStars - SearchOrderByStarsReverse db.SearchOrderBy = db.SearchOrderByStarsReverse - SearchOrderByForks db.SearchOrderBy = db.SearchOrderByForks - SearchOrderByForksReverse db.SearchOrderBy = db.SearchOrderByForksReverse -) - // SearchOrderByMap represents all possible search order var SearchOrderByMap = map[string]map[string]db.SearchOrderBy{ "asc": { - "alpha": SearchOrderByAlphabetically, - "created": SearchOrderByOldest, - "updated": SearchOrderByLeastUpdated, - "size": SearchOrderBySize, - "git_size": SearchOrderByGitSize, - "lfs_size": SearchOrderByLFSSize, - "id": SearchOrderByID, - "stars": SearchOrderByStars, - "forks": SearchOrderByForks, + "alpha": "owner_name ASC, name ASC", + "created": db.SearchOrderByOldest, + "updated": db.SearchOrderByLeastUpdated, + "size": "size ASC", + "git_size": "git_size ASC", + "lfs_size": "lfs_size ASC", + "id": db.SearchOrderByID, + "stars": db.SearchOrderByStars, + "forks": db.SearchOrderByForks, }, "desc": { - "alpha": SearchOrderByAlphabeticallyReverse, - "created": SearchOrderByNewest, - "updated": SearchOrderByRecentUpdated, - "size": SearchOrderBySizeReverse, - "git_size": SearchOrderByGitSizeReverse, - "lfs_size": SearchOrderByLFSSizeReverse, - "id": SearchOrderByIDReverse, - "stars": SearchOrderByStarsReverse, - "forks": SearchOrderByForksReverse, + "alpha": "owner_name DESC, name DESC", + "created": db.SearchOrderByNewest, + "updated": db.SearchOrderByRecentUpdated, + "size": "size DESC", + "git_size": "git_size DESC", + "lfs_size": "lfs_size DESC", + "id": db.SearchOrderByIDReverse, + "stars": db.SearchOrderByStarsReverse, + "forks": db.SearchOrderByForksReverse, }, } From 3246e832515190cea5fb6131da55c0357dc55146 Mon Sep 17 00:00:00 2001 From: 6543 <6543@obermui.de> Date: Sat, 15 Jun 2024 08:45:02 +0200 Subject: [PATCH 15/18] Rename repo_model.SearchOrderByMap to repo_model.OrderByMap (#31359) https://github.com/go-gitea/gitea/pull/30876#discussion_r1637112394 (cherry picked from commit 78e8296e113e2fd9259ec05fe87035427821ea0b) --- models/repo/search.go | 38 ++++++++++++++++++------------------- routers/api/v1/repo/repo.go | 2 +- routers/web/explore/repo.go | 2 +- routers/web/repo/repo.go | 2 +- 4 files changed, 22 insertions(+), 22 deletions(-) diff --git a/models/repo/search.go b/models/repo/search.go index 2baa85dc6f..a73d9fc215 100644 --- a/models/repo/search.go +++ b/models/repo/search.go @@ -5,8 +5,8 @@ package repo import "code.gitea.io/gitea/models/db" -// SearchOrderByMap represents all possible search order -var SearchOrderByMap = map[string]map[string]db.SearchOrderBy{ +// OrderByMap represents all possible search order +var OrderByMap = map[string]map[string]db.SearchOrderBy{ "asc": { "alpha": "owner_name ASC, name ASC", "created": db.SearchOrderByOldest, @@ -31,22 +31,22 @@ var SearchOrderByMap = map[string]map[string]db.SearchOrderBy{ }, } -// SearchOrderByFlatMap is similar to SearchOrderByMap but use human language keywords +// OrderByFlatMap is similar to OrderByMap but use human language keywords // to decide between asc and desc -var SearchOrderByFlatMap = map[string]db.SearchOrderBy{ - "newest": SearchOrderByMap["desc"]["created"], - "oldest": SearchOrderByMap["asc"]["created"], - "leastupdate": SearchOrderByMap["asc"]["updated"], - "reversealphabetically": SearchOrderByMap["desc"]["alpha"], - "alphabetically": SearchOrderByMap["asc"]["alpha"], - "reversesize": SearchOrderByMap["desc"]["size"], - "size": SearchOrderByMap["asc"]["size"], - "reversegitsize": SearchOrderByMap["desc"]["git_size"], - "gitsize": SearchOrderByMap["asc"]["git_size"], - "reverselfssize": SearchOrderByMap["desc"]["lfs_size"], - "lfssize": SearchOrderByMap["asc"]["lfs_size"], - "moststars": SearchOrderByMap["desc"]["stars"], - "feweststars": SearchOrderByMap["asc"]["stars"], - "mostforks": SearchOrderByMap["desc"]["forks"], - "fewestforks": SearchOrderByMap["asc"]["forks"], +var OrderByFlatMap = map[string]db.SearchOrderBy{ + "newest": OrderByMap["desc"]["created"], + "oldest": OrderByMap["asc"]["created"], + "leastupdate": OrderByMap["asc"]["updated"], + "reversealphabetically": OrderByMap["desc"]["alpha"], + "alphabetically": OrderByMap["asc"]["alpha"], + "reversesize": OrderByMap["desc"]["size"], + "size": OrderByMap["asc"]["size"], + "reversegitsize": OrderByMap["desc"]["git_size"], + "gitsize": OrderByMap["asc"]["git_size"], + "reverselfssize": OrderByMap["desc"]["lfs_size"], + "lfssize": OrderByMap["asc"]["lfs_size"], + "moststars": OrderByMap["desc"]["stars"], + "feweststars": OrderByMap["asc"]["stars"], + "mostforks": OrderByMap["desc"]["forks"], + "fewestforks": OrderByMap["asc"]["forks"], } diff --git a/routers/api/v1/repo/repo.go b/routers/api/v1/repo/repo.go index 41368146da..05a63bc62b 100644 --- a/routers/api/v1/repo/repo.go +++ b/routers/api/v1/repo/repo.go @@ -184,7 +184,7 @@ func Search(ctx *context.APIContext) { if len(sortOrder) == 0 { sortOrder = "asc" } - if searchModeMap, ok := repo_model.SearchOrderByMap[sortOrder]; ok { + if searchModeMap, ok := repo_model.OrderByMap[sortOrder]; ok { if orderBy, ok := searchModeMap[sortMode]; ok { opts.OrderBy = orderBy } else { diff --git a/routers/web/explore/repo.go b/routers/web/explore/repo.go index 1d5fb2c149..4e880660b1 100644 --- a/routers/web/explore/repo.go +++ b/routers/web/explore/repo.go @@ -63,7 +63,7 @@ func RenderRepoSearch(ctx *context.Context, opts *RepoSearchOptions) { sortOrder = setting.UI.ExploreDefaultSort } - if order, ok := repo_model.SearchOrderByFlatMap[sortOrder]; ok { + if order, ok := repo_model.OrderByFlatMap[sortOrder]; ok { orderBy = order } else { sortOrder = "recentupdate" diff --git a/routers/web/repo/repo.go b/routers/web/repo/repo.go index 119a97f28c..4d17be3758 100644 --- a/routers/web/repo/repo.go +++ b/routers/web/repo/repo.go @@ -629,7 +629,7 @@ func SearchRepo(ctx *context.Context) { if len(sortOrder) == 0 { sortOrder = "asc" } - if searchModeMap, ok := repo_model.SearchOrderByMap[sortOrder]; ok { + if searchModeMap, ok := repo_model.OrderByMap[sortOrder]; ok { if orderBy, ok := searchModeMap[sortMode]; ok { opts.OrderBy = orderBy } else { From 41bea7c23a05d44978c36534923f08928ec90e26 Mon Sep 17 00:00:00 2001 From: Earl Warren Date: Sun, 16 Jun 2024 09:40:49 +0200 Subject: [PATCH 16/18] chore: update deadcode-out --- .deadcode-out | 1 - 1 file changed, 1 deletion(-) diff --git a/.deadcode-out b/.deadcode-out index 8326ed4402..186e1ef4e9 100644 --- a/.deadcode-out +++ b/.deadcode-out @@ -87,7 +87,6 @@ code.gitea.io/gitea/models/repo releaseSorter.Swap SortReleases FindReposMapByIDs - SearchOrderBy.String IsErrTopicNotExist ErrTopicNotExist.Error ErrTopicNotExist.Unwrap From baad8337f9712abc5831ed4f0926b8134f37d0a8 Mon Sep 17 00:00:00 2001 From: Zoupers Zou <1171443643@qq.com> Date: Wed, 12 Jun 2024 06:22:28 +0800 Subject: [PATCH 17/18] Fix #31185 try fix lfs download from bitbucket failed (#31201) Fix #31185 (cherry picked from commit e25d6960b5749fbf7f88ebb6b27878c0459817da) --- modules/lfs/http_client.go | 4 ++-- modules/lfs/http_client_test.go | 4 ++-- modules/lfs/shared.go | 2 ++ modules/lfs/transferadapter.go | 1 + modules/lfs/transferadapter_test.go | 2 +- services/lfs/server.go | 2 +- tests/integration/api_repo_lfs_locks_test.go | 10 +++++----- tests/integration/api_repo_lfs_test.go | 4 ++-- 8 files changed, 16 insertions(+), 13 deletions(-) diff --git a/modules/lfs/http_client.go b/modules/lfs/http_client.go index e06879baea..f5ddd38b09 100644 --- a/modules/lfs/http_client.go +++ b/modules/lfs/http_client.go @@ -211,7 +211,7 @@ func createRequest(ctx context.Context, method, url string, headers map[string]s for key, value := range headers { req.Header.Set(key, value) } - req.Header.Set("Accept", MediaType) + req.Header.Set("Accept", AcceptHeader) return req, nil } @@ -251,6 +251,6 @@ func handleErrorResponse(resp *http.Response) error { return err } - log.Trace("ErrorResponse: %v", er) + log.Trace("ErrorResponse(%v): %v", resp.Status, er) return errors.New(er.Message) } diff --git a/modules/lfs/http_client_test.go b/modules/lfs/http_client_test.go index 7459d9c0c9..7431132f76 100644 --- a/modules/lfs/http_client_test.go +++ b/modules/lfs/http_client_test.go @@ -155,7 +155,7 @@ func TestHTTPClientDownload(t *testing.T) { hc := &http.Client{Transport: RoundTripFunc(func(req *http.Request) *http.Response { assert.Equal(t, "POST", req.Method) assert.Equal(t, MediaType, req.Header.Get("Content-type")) - assert.Equal(t, MediaType, req.Header.Get("Accept")) + assert.Equal(t, AcceptHeader, req.Header.Get("Accept")) var batchRequest BatchRequest err := json.NewDecoder(req.Body).Decode(&batchRequest) @@ -263,7 +263,7 @@ func TestHTTPClientUpload(t *testing.T) { hc := &http.Client{Transport: RoundTripFunc(func(req *http.Request) *http.Response { assert.Equal(t, "POST", req.Method) assert.Equal(t, MediaType, req.Header.Get("Content-type")) - assert.Equal(t, MediaType, req.Header.Get("Accept")) + assert.Equal(t, AcceptHeader, req.Header.Get("Accept")) var batchRequest BatchRequest err := json.NewDecoder(req.Body).Decode(&batchRequest) diff --git a/modules/lfs/shared.go b/modules/lfs/shared.go index 6b2e55f2fb..80f4fed00d 100644 --- a/modules/lfs/shared.go +++ b/modules/lfs/shared.go @@ -10,6 +10,8 @@ import ( const ( // MediaType contains the media type for LFS server requests MediaType = "application/vnd.git-lfs+json" + // Some LFS servers offer content with other types, so fallback to '*/*' if application/vnd.git-lfs+json cannot be served + AcceptHeader = "application/vnd.git-lfs+json;q=0.9, */*;q=0.8" ) // BatchRequest contains multiple requests processed in one batch operation. diff --git a/modules/lfs/transferadapter.go b/modules/lfs/transferadapter.go index d425b91946..fbc3a3ad8c 100644 --- a/modules/lfs/transferadapter.go +++ b/modules/lfs/transferadapter.go @@ -37,6 +37,7 @@ func (a *BasicTransferAdapter) Download(ctx context.Context, l *Link) (io.ReadCl if err != nil { return nil, err } + log.Debug("Download Request: %+v", req) resp, err := performRequest(ctx, a.client, req) if err != nil { return nil, err diff --git a/modules/lfs/transferadapter_test.go b/modules/lfs/transferadapter_test.go index 6023cd07d3..7fec137efe 100644 --- a/modules/lfs/transferadapter_test.go +++ b/modules/lfs/transferadapter_test.go @@ -26,7 +26,7 @@ func TestBasicTransferAdapter(t *testing.T) { p := Pointer{Oid: "b5a2c96250612366ea272ffac6d9744aaf4b45aacd96aa7cfcb931ee3b558259", Size: 5} roundTripHandler := func(req *http.Request) *http.Response { - assert.Equal(t, MediaType, req.Header.Get("Accept")) + assert.Equal(t, AcceptHeader, req.Header.Get("Accept")) assert.Equal(t, "test-value", req.Header.Get("test-header")) url := req.URL.String() diff --git a/services/lfs/server.go b/services/lfs/server.go index 706be0d080..ace501e15f 100644 --- a/services/lfs/server.go +++ b/services/lfs/server.go @@ -477,7 +477,7 @@ func buildObjectResponse(rc *requestContext, pointer lfs_module.Pointer, downloa } // This is only needed to workaround https://github.com/git-lfs/git-lfs/issues/3662 - verifyHeader["Accept"] = lfs_module.MediaType + verifyHeader["Accept"] = lfs_module.AcceptHeader rep.Actions["verify"] = &lfs_module.Link{Href: rc.VerifyLink(pointer), Header: verifyHeader} } diff --git a/tests/integration/api_repo_lfs_locks_test.go b/tests/integration/api_repo_lfs_locks_test.go index 5aa1396941..427e0b9fb1 100644 --- a/tests/integration/api_repo_lfs_locks_test.go +++ b/tests/integration/api_repo_lfs_locks_test.go @@ -105,7 +105,7 @@ func TestAPILFSLocksLogged(t *testing.T) { for _, test := range tests { session := loginUser(t, test.user.Name) req := NewRequestWithJSON(t, "POST", fmt.Sprintf("/%s.git/info/lfs/locks", test.repo.FullName()), map[string]string{"path": test.path}) - req.Header.Set("Accept", lfs.MediaType) + req.Header.Set("Accept", lfs.AcceptHeader) req.Header.Set("Content-Type", lfs.MediaType) resp := session.MakeRequest(t, req, test.httpResult) if len(test.addTime) > 0 { @@ -123,7 +123,7 @@ func TestAPILFSLocksLogged(t *testing.T) { for _, test := range resultsTests { session := loginUser(t, test.user.Name) req := NewRequestf(t, "GET", "/%s.git/info/lfs/locks", test.repo.FullName()) - req.Header.Set("Accept", lfs.MediaType) + req.Header.Set("Accept", lfs.AcceptHeader) resp := session.MakeRequest(t, req, http.StatusOK) var lfsLocks api.LFSLockList DecodeJSON(t, resp, &lfsLocks) @@ -135,7 +135,7 @@ func TestAPILFSLocksLogged(t *testing.T) { } req = NewRequestWithJSON(t, "POST", fmt.Sprintf("/%s.git/info/lfs/locks/verify", test.repo.FullName()), map[string]string{}) - req.Header.Set("Accept", lfs.MediaType) + req.Header.Set("Accept", lfs.AcceptHeader) req.Header.Set("Content-Type", lfs.MediaType) resp = session.MakeRequest(t, req, http.StatusOK) var lfsLocksVerify api.LFSLockListVerify @@ -159,7 +159,7 @@ func TestAPILFSLocksLogged(t *testing.T) { for _, test := range deleteTests { session := loginUser(t, test.user.Name) req := NewRequestWithJSON(t, "POST", fmt.Sprintf("/%s.git/info/lfs/locks/%s/unlock", test.repo.FullName(), test.lockID), map[string]string{}) - req.Header.Set("Accept", lfs.MediaType) + req.Header.Set("Accept", lfs.AcceptHeader) req.Header.Set("Content-Type", lfs.MediaType) resp := session.MakeRequest(t, req, http.StatusOK) var lfsLockRep api.LFSLockResponse @@ -172,7 +172,7 @@ func TestAPILFSLocksLogged(t *testing.T) { for _, test := range resultsTests { session := loginUser(t, test.user.Name) req := NewRequestf(t, "GET", "/%s.git/info/lfs/locks", test.repo.FullName()) - req.Header.Set("Accept", lfs.MediaType) + req.Header.Set("Accept", lfs.AcceptHeader) resp := session.MakeRequest(t, req, http.StatusOK) var lfsLocks api.LFSLockList DecodeJSON(t, resp, &lfsLocks) diff --git a/tests/integration/api_repo_lfs_test.go b/tests/integration/api_repo_lfs_test.go index 763082b178..cea185a3d5 100644 --- a/tests/integration/api_repo_lfs_test.go +++ b/tests/integration/api_repo_lfs_test.go @@ -85,7 +85,7 @@ func TestAPILFSBatch(t *testing.T) { newRequest := func(t testing.TB, br *lfs.BatchRequest) *RequestWrapper { return NewRequestWithJSON(t, "POST", "/user2/lfs-batch-repo.git/info/lfs/objects/batch", br). - SetHeader("Accept", lfs.MediaType). + SetHeader("Accept", lfs.AcceptHeader). SetHeader("Content-Type", lfs.MediaType) } decodeResponse := func(t *testing.T, b *bytes.Buffer) *lfs.BatchResponse { @@ -448,7 +448,7 @@ func TestAPILFSVerify(t *testing.T) { newRequest := func(t testing.TB, p *lfs.Pointer) *RequestWrapper { return NewRequestWithJSON(t, "POST", "/user2/lfs-verify-repo.git/info/lfs/verify", p). - SetHeader("Accept", lfs.MediaType). + SetHeader("Accept", lfs.AcceptHeader). SetHeader("Content-Type", lfs.MediaType) } From 053eb65a0b7e8d8dff465061602011a8c4fce17d Mon Sep 17 00:00:00 2001 From: Earl Warren Date: Sun, 16 Jun 2024 10:52:14 +0200 Subject: [PATCH 18/18] docs(release-notes): week 2024-25 cherry pick Refs: https://codeberg.org/forgejo/forgejo/pulls/4145 --- release-notes/8.0.0/feat/4145.md | 3 +++ release-notes/8.0.0/fix/4145.md | 2 ++ release-notes/8.0.0/perf/4145.md | 1 + 3 files changed, 6 insertions(+) create mode 100644 release-notes/8.0.0/feat/4145.md create mode 100644 release-notes/8.0.0/fix/4145.md create mode 100644 release-notes/8.0.0/perf/4145.md diff --git a/release-notes/8.0.0/feat/4145.md b/release-notes/8.0.0/feat/4145.md new file mode 100644 index 0000000000..872e5d4683 --- /dev/null +++ b/release-notes/8.0.0/feat/4145.md @@ -0,0 +1,3 @@ +- [commit](https://codeberg.org/forgejo/forgejo/commit/b60e3ac7b4aeeb9b8760f43eea9576c0e23309e9) allow downloading draft releases assets. +- [commit](https://codeberg.org/forgejo/forgejo/commit/1fca15529ac8fefb60d86b0c1f4bec8dae9a8566) API endpoints for managing tag protection +- [commit](https://codeberg.org/forgejo/forgejo/commit/4334c705b5f9388b16af23c7e75a69d027d07d5e) extract and display readme and comments for Composer packages diff --git a/release-notes/8.0.0/fix/4145.md b/release-notes/8.0.0/fix/4145.md new file mode 100644 index 0000000000..c993fcc517 --- /dev/null +++ b/release-notes/8.0.0/fix/4145.md @@ -0,0 +1,2 @@ +- [commit](https://codeberg.org/forgejo/forgejo/commit/364922c6e4f28264add9e2501a352c25ad6a0993) when a repository is adopted, its object format is not set in the database +- [commit](https://codeberg.org/forgejo/forgejo/commit/e7f332a55d6a48a3f3b4f2bfa43d18455ac00acc) during a migration from bitbucket, LFS downloads fail diff --git a/release-notes/8.0.0/perf/4145.md b/release-notes/8.0.0/perf/4145.md new file mode 100644 index 0000000000..bb59fd1dc6 --- /dev/null +++ b/release-notes/8.0.0/perf/4145.md @@ -0,0 +1 @@ +- [commit](https://codeberg.org/forgejo/forgejo/commit/358cd67c4f316f2d4f1d3be6dcb891dc04a2ff07) reduce memory usage for chunked artifact uploads to S3