diff --git a/cmd/convert.go b/cmd/convert.go index 30e7d01e11a9..d9b89495c1bf 100644 --- a/cmd/convert.go +++ b/cmd/convert.go @@ -35,7 +35,7 @@ func runConvert(ctx *cli.Context) error { log.Info("Log path: %s", setting.Log.RootPath) log.Info("Configuration file: %s", setting.CustomConf) - if !setting.Database.UseMySQL { + if !setting.Database.Type.IsMySQL() { fmt.Println("This command can only be used with a MySQL database") return nil } diff --git a/cmd/dump.go b/cmd/dump.go index 600ec4f32eb0..c802849f8e50 100644 --- a/cmd/dump.go +++ b/cmd/dump.go @@ -279,7 +279,7 @@ func runDump(ctx *cli.Context) error { }() targetDBType := ctx.String("database") - if len(targetDBType) > 0 && targetDBType != setting.Database.Type { + if len(targetDBType) > 0 && targetDBType != setting.Database.Type.String() { log.Info("Dumping database %s => %s...", setting.Database.Type, targetDBType) } else { log.Info("Dumping database...") diff --git a/models/activities/action.go b/models/activities/action.go index 1412d2c051d6..c1d17517baf0 100644 --- a/models/activities/action.go +++ b/models/activities/action.go @@ -99,7 +99,7 @@ func (a *Action) TableIndices() []*schemas.Index { actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted") indices := []*schemas.Index{actUserIndex, repoIndex} - if setting.Database.UsePostgreSQL { + if setting.Database.Type.IsPostgreSQL() { cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType) cudIndex.AddColumn("created_unix", "user_id", "is_deleted") indices = append(indices, cudIndex) @@ -640,7 +640,7 @@ func DeleteIssueActions(ctx context.Context, repoID, issueID int64) error { // CountActionCreatedUnixString count actions where created_unix is an empty string func CountActionCreatedUnixString(ctx context.Context) (int64, error) { - if setting.Database.UseSQLite3 { + if setting.Database.Type.IsSQLite3() { return db.GetEngine(ctx).Where(`created_unix = ""`).Count(new(Action)) } return 0, nil @@ -648,7 +648,7 @@ func CountActionCreatedUnixString(ctx context.Context) (int64, error) { // FixActionCreatedUnixString set created_unix to zero if it is an empty string func FixActionCreatedUnixString(ctx context.Context) (int64, error) { - if setting.Database.UseSQLite3 { + if setting.Database.Type.IsSQLite3() { res, err := db.GetEngine(ctx).Exec(`UPDATE action SET created_unix = 0 WHERE created_unix = ""`) if err != nil { return 0, err diff --git a/models/activities/action_test.go b/models/activities/action_test.go index 2fd86bb8f6e7..7044bcc004ab 100644 --- a/models/activities/action_test.go +++ b/models/activities/action_test.go @@ -243,7 +243,7 @@ func TestGetFeedsCorrupted(t *testing.T) { } func TestConsistencyUpdateAction(t *testing.T) { - if !setting.Database.UseSQLite3 { + if !setting.Database.Type.IsSQLite3() { t.Skip("Test is only for SQLite database.") } assert.NoError(t, unittest.PrepareTestDatabase()) diff --git a/models/activities/user_heatmap.go b/models/activities/user_heatmap.go index 3370103a556f..d3f0f0db73b2 100644 --- a/models/activities/user_heatmap.go +++ b/models/activities/user_heatmap.go @@ -39,9 +39,9 @@ func getUserHeatmapData(user *user_model.User, team *organization.Team, doer *us groupBy := "created_unix / 900 * 900" groupByName := "timestamp" // We need this extra case because mssql doesn't allow grouping by alias switch { - case setting.Database.UseMySQL: + case setting.Database.Type.IsMySQL(): groupBy = "created_unix DIV 900 * 900" - case setting.Database.UseMSSQL: + case setting.Database.Type.IsMSSQL(): groupByName = groupBy } diff --git a/models/db/common.go b/models/db/common.go index 76c7c119f413..af6130c9f255 100644 --- a/models/db/common.go +++ b/models/db/common.go @@ -15,7 +15,7 @@ import ( // BuildCaseInsensitiveLike returns a condition to check if the given value is like the given key case-insensitively. // Handles especially SQLite correctly as UPPER there only transforms ASCII letters. func BuildCaseInsensitiveLike(key, value string) builder.Cond { - if setting.Database.UseSQLite3 { + if setting.Database.Type.IsSQLite3() { return builder.Like{"UPPER(" + key + ")", util.ToUpperASCII(value)} } return builder.Like{"UPPER(" + key + ")", strings.ToUpper(value)} diff --git a/models/db/engine.go b/models/db/engine.go index 5020101d490a..56dd209fd78f 100755 --- a/models/db/engine.go +++ b/models/db/engine.go @@ -101,12 +101,12 @@ func newXORMEngine() (*xorm.Engine, error) { var engine *xorm.Engine - if setting.Database.UsePostgreSQL && len(setting.Database.Schema) > 0 { + if setting.Database.Type.IsPostgreSQL() && len(setting.Database.Schema) > 0 { // OK whilst we sort out our schema issues - create a schema aware postgres registerPostgresSchemaDriver() engine, err = xorm.NewEngine("postgresschema", connStr) } else { - engine, err = xorm.NewEngine(setting.Database.Type, connStr) + engine, err = xorm.NewEngine(setting.Database.Type.String(), connStr) } if err != nil { diff --git a/models/db/index.go b/models/db/index.go index f840a62c8913..7609d8fb6e6c 100644 --- a/models/db/index.go +++ b/models/db/index.go @@ -73,7 +73,7 @@ func postgresGetNextResourceIndex(ctx context.Context, tableName string, groupID // GetNextResourceIndex generates a resource index, it must run in the same transaction where the resource is created func GetNextResourceIndex(ctx context.Context, tableName string, groupID int64) (int64, error) { - if setting.Database.UsePostgreSQL { + if setting.Database.Type.IsPostgreSQL() { return postgresGetNextResourceIndex(ctx, tableName, groupID) } diff --git a/models/db/sequence.go b/models/db/sequence.go index 6d801d022fbc..f49ad935de09 100644 --- a/models/db/sequence.go +++ b/models/db/sequence.go @@ -13,7 +13,7 @@ import ( // CountBadSequences looks for broken sequences from recreate-table mistakes func CountBadSequences(_ context.Context) (int64, error) { - if !setting.Database.UsePostgreSQL { + if !setting.Database.Type.IsPostgreSQL() { return 0, nil } @@ -34,7 +34,7 @@ func CountBadSequences(_ context.Context) (int64, error) { // FixBadSequences fixes for broken sequences from recreate-table mistakes func FixBadSequences(_ context.Context) error { - if !setting.Database.UsePostgreSQL { + if !setting.Database.Type.IsPostgreSQL() { return nil } diff --git a/models/git/commit_status.go b/models/git/commit_status.go index 489507f71050..82cbb2363739 100644 --- a/models/git/commit_status.go +++ b/models/git/commit_status.go @@ -65,7 +65,7 @@ func postgresGetCommitStatusIndex(ctx context.Context, repoID int64, sha string) // GetNextCommitStatusIndex retried 3 times to generate a resource index func GetNextCommitStatusIndex(ctx context.Context, repoID int64, sha string) (int64, error) { - if setting.Database.UsePostgreSQL { + if setting.Database.Type.IsPostgreSQL() { return postgresGetCommitStatusIndex(ctx, repoID, sha) } diff --git a/models/migrations/base/db.go b/models/migrations/base/db.go index dcf99c96ae81..b038ad73372e 100644 --- a/models/migrations/base/db.go +++ b/models/migrations/base/db.go @@ -89,7 +89,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error { hasID = hasID || (column.IsPrimaryKey && column.IsAutoIncrement) } - if hasID && setting.Database.UseMSSQL { + if hasID && setting.Database.Type.IsMSSQL() { if _, err := sess.Exec(fmt.Sprintf("SET IDENTITY_INSERT `%s` ON", tempTableName)); err != nil { log.Error("Unable to set identity insert for table %s. Error: %v", tempTableName, err) return err @@ -143,7 +143,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error { return err } - if hasID && setting.Database.UseMSSQL { + if hasID && setting.Database.Type.IsMSSQL() { if _, err := sess.Exec(fmt.Sprintf("SET IDENTITY_INSERT `%s` OFF", tempTableName)); err != nil { log.Error("Unable to switch off identity insert for table %s. Error: %v", tempTableName, err) return err @@ -151,7 +151,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error { } switch { - case setting.Database.UseSQLite3: + case setting.Database.Type.IsSQLite3(): // SQLite will drop all the constraints on the old table if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil { log.Error("Unable to drop old table %s. Error: %v", tableName, err) @@ -178,7 +178,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error { return err } - case setting.Database.UseMySQL: + case setting.Database.Type.IsMySQL(): // MySQL will drop all the constraints on the old table if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil { log.Error("Unable to drop old table %s. Error: %v", tableName, err) @@ -205,7 +205,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error { log.Error("Unable to recreate uniques on table %s. Error: %v", tableName, err) return err } - case setting.Database.UsePostgreSQL: + case setting.Database.Type.IsPostgreSQL(): var originalSequences []string type sequenceData struct { LastValue int `xorm:"'last_value'"` @@ -296,7 +296,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error { } - case setting.Database.UseMSSQL: + case setting.Database.Type.IsMSSQL(): // MSSQL will drop all the constraints on the old table if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil { log.Error("Unable to drop old table %s. Error: %v", tableName, err) @@ -323,7 +323,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin // TODO: This will not work if there are foreign keys switch { - case setting.Database.UseSQLite3: + case setting.Database.Type.IsSQLite3(): // First drop the indexes on the columns res, errIndex := sess.Query(fmt.Sprintf("PRAGMA index_list(`%s`)", tableName)) if errIndex != nil { @@ -405,7 +405,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin return err } - case setting.Database.UsePostgreSQL: + case setting.Database.Type.IsPostgreSQL(): cols := "" for _, col := range columnNames { if cols != "" { @@ -416,7 +416,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` %s", tableName, cols)); err != nil { return fmt.Errorf("Drop table `%s` columns %v: %v", tableName, columnNames, err) } - case setting.Database.UseMySQL: + case setting.Database.Type.IsMySQL(): // Drop indexes on columns first sql := fmt.Sprintf("SHOW INDEX FROM %s WHERE column_name IN ('%s')", tableName, strings.Join(columnNames, "','")) res, err := sess.Query(sql) @@ -444,7 +444,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` %s", tableName, cols)); err != nil { return fmt.Errorf("Drop table `%s` columns %v: %v", tableName, columnNames, err) } - case setting.Database.UseMSSQL: + case setting.Database.Type.IsMSSQL(): cols := "" for _, col := range columnNames { if cols != "" { @@ -543,13 +543,13 @@ func newXORMEngine() (*xorm.Engine, error) { func deleteDB() error { switch { - case setting.Database.UseSQLite3: + case setting.Database.Type.IsSQLite3(): if err := util.Remove(setting.Database.Path); err != nil { return err } return os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm) - case setting.Database.UseMySQL: + case setting.Database.Type.IsMySQL(): db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/", setting.Database.User, setting.Database.Passwd, setting.Database.Host)) if err != nil { @@ -565,7 +565,7 @@ func deleteDB() error { return err } return nil - case setting.Database.UsePostgreSQL: + case setting.Database.Type.IsPostgreSQL(): db, err := sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/?sslmode=%s", setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.SSLMode)) if err != nil { @@ -612,7 +612,7 @@ func deleteDB() error { } return nil } - case setting.Database.UseMSSQL: + case setting.Database.Type.IsMSSQL(): host, port := setting.ParseMSSQLHostPort(setting.Database.Host) db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;", host, port, "master", setting.Database.User, setting.Database.Passwd)) diff --git a/models/migrations/v1_12/v139.go b/models/migrations/v1_12/v139.go index 725b8fa30597..279aa7df87dc 100644 --- a/models/migrations/v1_12/v139.go +++ b/models/migrations/v1_12/v139.go @@ -13,9 +13,9 @@ func PrependRefsHeadsToIssueRefs(x *xorm.Engine) error { var query string switch { - case setting.Database.UseMSSQL: + case setting.Database.Type.IsMSSQL(): query = "UPDATE `issue` SET `ref` = 'refs/heads/' + `ref` WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%'" - case setting.Database.UseMySQL: + case setting.Database.Type.IsMySQL(): query = "UPDATE `issue` SET `ref` = CONCAT('refs/heads/', `ref`) WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%';" default: query = "UPDATE `issue` SET `ref` = 'refs/heads/' || `ref` WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%'" diff --git a/models/migrations/v1_13/v140.go b/models/migrations/v1_13/v140.go index 3de9eaaf7cda..4d87b955fdff 100644 --- a/models/migrations/v1_13/v140.go +++ b/models/migrations/v1_13/v140.go @@ -41,7 +41,7 @@ func FixLanguageStatsToSaveSize(x *xorm.Engine) error { // Delete language stat statuses truncExpr := "TRUNCATE TABLE" - if setting.Database.UseSQLite3 { + if setting.Database.Type.IsSQLite3() { truncExpr = "DELETE FROM" } diff --git a/models/migrations/v1_13/v145.go b/models/migrations/v1_13/v145.go index c96e79f8a0ee..ee40bfc77f86 100644 --- a/models/migrations/v1_13/v145.go +++ b/models/migrations/v1_13/v145.go @@ -21,7 +21,7 @@ func IncreaseLanguageField(x *xorm.Engine) error { return err } - if setting.Database.UseSQLite3 { + if setting.Database.Type.IsSQLite3() { // SQLite maps VARCHAR to TEXT without size so we're done return nil } @@ -41,11 +41,11 @@ func IncreaseLanguageField(x *xorm.Engine) error { } switch { - case setting.Database.UseMySQL: + case setting.Database.Type.IsMySQL(): if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat MODIFY COLUMN language %s", sqlType)); err != nil { return err } - case setting.Database.UseMSSQL: + case setting.Database.Type.IsMSSQL(): // Yet again MSSQL just has to be awkward. // Here we have to drop the constraints first and then rebuild them constraints := make([]string, 0) @@ -71,7 +71,7 @@ func IncreaseLanguageField(x *xorm.Engine) error { if err := sess.CreateUniques(new(LanguageStat)); err != nil { return err } - case setting.Database.UsePostgreSQL: + case setting.Database.Type.IsPostgreSQL(): if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat ALTER COLUMN language TYPE %s", sqlType)); err != nil { return err } diff --git a/models/migrations/v1_13/v151.go b/models/migrations/v1_13/v151.go index 9490c1778c6a..9aa71ec29f13 100644 --- a/models/migrations/v1_13/v151.go +++ b/models/migrations/v1_13/v151.go @@ -17,13 +17,13 @@ import ( func SetDefaultPasswordToArgon2(x *xorm.Engine) error { switch { - case setting.Database.UseMySQL: + case setting.Database.Type.IsMySQL(): _, err := x.Exec("ALTER TABLE `user` ALTER passwd_hash_algo SET DEFAULT 'argon2';") return err - case setting.Database.UsePostgreSQL: + case setting.Database.Type.IsPostgreSQL(): _, err := x.Exec("ALTER TABLE `user` ALTER COLUMN passwd_hash_algo SET DEFAULT 'argon2';") return err - case setting.Database.UseMSSQL: + case setting.Database.Type.IsMSSQL(): // need to find the constraint and drop it, then recreate it. sess := x.NewSession() defer sess.Close() @@ -53,7 +53,7 @@ func SetDefaultPasswordToArgon2(x *xorm.Engine) error { } return sess.Commit() - case setting.Database.UseSQLite3: + case setting.Database.Type.IsSQLite3(): // drop through default: log.Fatal("Unrecognized DB") diff --git a/models/migrations/v1_14/v158.go b/models/migrations/v1_14/v158.go index 7ea80a659ed2..2029829ff990 100644 --- a/models/migrations/v1_14/v158.go +++ b/models/migrations/v1_14/v158.go @@ -62,7 +62,7 @@ func UpdateCodeCommentReplies(x *xorm.Engine) error { return err } - if setting.Database.UseMSSQL { + if setting.Database.Type.IsMSSQL() { if _, err := sess.Exec(sqlSelect + " INTO #temp_comments" + sqlTail); err != nil { log.Error("unable to create temporary table") return err @@ -72,13 +72,13 @@ func UpdateCodeCommentReplies(x *xorm.Engine) error { comments := make([]*Comment, 0, batchSize) switch { - case setting.Database.UseMySQL: + case setting.Database.Type.IsMySQL(): sqlCmd = sqlSelect + sqlTail + " LIMIT " + strconv.Itoa(batchSize) + ", " + strconv.Itoa(start) - case setting.Database.UsePostgreSQL: + case setting.Database.Type.IsPostgreSQL(): fallthrough - case setting.Database.UseSQLite3: + case setting.Database.Type.IsSQLite3(): sqlCmd = sqlSelect + sqlTail + " LIMIT " + strconv.Itoa(batchSize) + " OFFSET " + strconv.Itoa(start) - case setting.Database.UseMSSQL: + case setting.Database.Type.IsMSSQL(): sqlCmd = "SELECT TOP " + strconv.Itoa(batchSize) + " * FROM #temp_comments WHERE " + "(id NOT IN ( SELECT TOP " + strconv.Itoa(start) + " id FROM #temp_comments ORDER BY id )) ORDER BY id" default: diff --git a/models/migrations/v1_14/v175.go b/models/migrations/v1_14/v175.go index f1b9b974c6aa..70d72b260033 100644 --- a/models/migrations/v1_14/v175.go +++ b/models/migrations/v1_14/v175.go @@ -14,7 +14,7 @@ import ( ) func FixPostgresIDSequences(x *xorm.Engine) error { - if !setting.Database.UsePostgreSQL { + if !setting.Database.Type.IsPostgreSQL() { return nil } diff --git a/models/migrations/v1_15/v184.go b/models/migrations/v1_15/v184.go index 48f8b62165cc..caf41b6048ed 100644 --- a/models/migrations/v1_15/v184.go +++ b/models/migrations/v1_15/v184.go @@ -54,11 +54,11 @@ func RenameTaskErrorsToMessage(x *xorm.Engine) error { } switch { - case setting.Database.UseMySQL: + case setting.Database.Type.IsMySQL(): if _, err := sess.Exec("ALTER TABLE `task` CHANGE errors message text"); err != nil { return err } - case setting.Database.UseMSSQL: + case setting.Database.Type.IsMSSQL(): if _, err := sess.Exec("sp_rename 'task.errors', 'message', 'COLUMN'"); err != nil { return err } diff --git a/models/migrations/v1_16/v191.go b/models/migrations/v1_16/v191.go index 2d2c3d1a587a..c618783c08e8 100644 --- a/models/migrations/v1_16/v191.go +++ b/models/migrations/v1_16/v191.go @@ -16,7 +16,7 @@ func AlterIssueAndCommentTextFieldsToLongText(x *xorm.Engine) error { return err } - if setting.Database.UseMySQL { + if setting.Database.Type.IsMySQL() { if _, err := sess.Exec("ALTER TABLE `issue` CHANGE `content` `content` LONGTEXT"); err != nil { return err } diff --git a/models/migrations/v1_17/v217.go b/models/migrations/v1_17/v217.go index 3ca9215f0937..3f970b68a540 100644 --- a/models/migrations/v1_17/v217.go +++ b/models/migrations/v1_17/v217.go @@ -16,7 +16,7 @@ func AlterHookTaskTextFieldsToLongText(x *xorm.Engine) error { return err } - if setting.Database.UseMySQL { + if setting.Database.Type.IsMySQL() { if _, err := sess.Exec("ALTER TABLE `hook_task` CHANGE `payload_content` `payload_content` LONGTEXT, CHANGE `request_content` `request_content` LONGTEXT, change `response_content` `response_content` LONGTEXT"); err != nil { return err } diff --git a/models/migrations/v1_17/v218.go b/models/migrations/v1_17/v218.go index 675fd1df94fb..ae91ba0c494b 100644 --- a/models/migrations/v1_17/v218.go +++ b/models/migrations/v1_17/v218.go @@ -38,7 +38,7 @@ func (*improveActionTableIndicesAction) TableIndices() []*schemas.Index { actUserIndex := schemas.NewIndex("au_r_c_u_d", schemas.IndexType) actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted") indices := []*schemas.Index{actUserIndex, repoIndex} - if setting.Database.UsePostgreSQL { + if setting.Database.Type.IsPostgreSQL() { cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType) cudIndex.AddColumn("created_unix", "user_id", "is_deleted") indices = append(indices, cudIndex) diff --git a/models/migrations/v1_17/v223.go b/models/migrations/v1_17/v223.go index a23d9916b383..6c61dbc53ae4 100644 --- a/models/migrations/v1_17/v223.go +++ b/models/migrations/v1_17/v223.go @@ -65,11 +65,11 @@ func RenameCredentialIDBytes(x *xorm.Engine) error { } switch { - case setting.Database.UseMySQL: + case setting.Database.Type.IsMySQL(): if _, err := sess.Exec("ALTER TABLE `webauthn_credential` CHANGE credential_id_bytes credential_id VARBINARY(1024)"); err != nil { return err } - case setting.Database.UseMSSQL: + case setting.Database.Type.IsMSSQL(): if _, err := sess.Exec("sp_rename 'webauthn_credential.credential_id_bytes', 'credential_id', 'COLUMN'"); err != nil { return err } diff --git a/models/migrations/v1_18/v225.go b/models/migrations/v1_18/v225.go index 786772c143bd..b0ac3777fc24 100644 --- a/models/migrations/v1_18/v225.go +++ b/models/migrations/v1_18/v225.go @@ -16,7 +16,7 @@ func AlterPublicGPGKeyContentFieldsToMediumText(x *xorm.Engine) error { return err } - if setting.Database.UseMySQL { + if setting.Database.Type.IsMySQL() { if _, err := sess.Exec("ALTER TABLE `gpg_key` CHANGE `content` `content` MEDIUMTEXT"); err != nil { return err } diff --git a/models/migrations/v1_19/v232.go b/models/migrations/v1_19/v232.go index 89b595c54375..9caf587c1e9c 100644 --- a/models/migrations/v1_19/v232.go +++ b/models/migrations/v1_19/v232.go @@ -16,7 +16,7 @@ func AlterPackageVersionMetadataToLongText(x *xorm.Engine) error { return err } - if setting.Database.UseMySQL { + if setting.Database.Type.IsMySQL() { if _, err := sess.Exec("ALTER TABLE `package_version` MODIFY COLUMN `metadata_json` LONGTEXT"); err != nil { return err } diff --git a/models/migrations/v1_19/v242.go b/models/migrations/v1_19/v242.go index 517c7767b874..4470835214f3 100644 --- a/models/migrations/v1_19/v242.go +++ b/models/migrations/v1_19/v242.go @@ -17,7 +17,7 @@ func AlterPublicGPGKeyImportContentFieldToMediumText(x *xorm.Engine) error { return err } - if setting.Database.UseMySQL { + if setting.Database.Type.IsMySQL() { if _, err := sess.Exec("ALTER TABLE `gpg_key_import` CHANGE `content` `content` MEDIUMTEXT"); err != nil { return err } diff --git a/models/project/project.go b/models/project/project.go index 46b5c07c424f..f3ed723030cd 100644 --- a/models/project/project.go +++ b/models/project/project.go @@ -409,7 +409,7 @@ func DeleteProjectByID(ctx context.Context, id int64) error { func DeleteProjectByRepoID(ctx context.Context, repoID int64) error { switch { - case setting.Database.UseSQLite3: + case setting.Database.Type.IsSQLite3(): if _, err := db.GetEngine(ctx).Exec("DELETE FROM project_issue WHERE project_issue.id IN (SELECT project_issue.id FROM project_issue INNER JOIN project WHERE project.id = project_issue.project_id AND project.repo_id = ?)", repoID); err != nil { return err } @@ -419,7 +419,7 @@ func DeleteProjectByRepoID(ctx context.Context, repoID int64) error { if _, err := db.GetEngine(ctx).Table("project").Where("repo_id = ? ", repoID).Delete(&Project{}); err != nil { return err } - case setting.Database.UsePostgreSQL: + case setting.Database.Type.IsPostgreSQL(): if _, err := db.GetEngine(ctx).Exec("DELETE FROM project_issue USING project WHERE project.id = project_issue.project_id AND project.repo_id = ? ", repoID); err != nil { return err } diff --git a/models/repo/repo_list.go b/models/repo/repo_list.go index d64368daa6b2..d9cd905a1936 100644 --- a/models/repo/repo_list.go +++ b/models/repo/repo_list.go @@ -498,7 +498,7 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { subQueryCond := builder.NewCond() // Topic checking. Topics are present. - if setting.Database.UsePostgreSQL { // postgres stores the topics as json and not as text + if setting.Database.Type.IsPostgreSQL() { // postgres stores the topics as json and not as text subQueryCond = subQueryCond.Or(builder.And(builder.NotNull{"topics"}, builder.Neq{"(topics)::text": "[]"})) } else { subQueryCond = subQueryCond.Or(builder.And(builder.Neq{"topics": "null"}, builder.Neq{"topics": "[]"})) diff --git a/models/unittest/testdb.go b/models/unittest/testdb.go index 2bc41084ba87..cff1489a7c7b 100644 --- a/models/unittest/testdb.go +++ b/models/unittest/testdb.go @@ -76,7 +76,7 @@ func MainTest(m *testing.M, testOpts *TestOptions) { setting.SSH.BuiltinServerUser = "builtinuser" setting.SSH.Port = 3000 setting.SSH.Domain = "try.gitea.io" - setting.Database.UseSQLite3 = true + setting.Database.Type = "sqlite3" setting.Repository.DefaultBranch = "master" // many test code still assume that default branch is called "master" repoRootPath, err := os.MkdirTemp(os.TempDir(), "repos") if err != nil { diff --git a/modules/doctor/dbconsistency.go b/modules/doctor/dbconsistency.go index bb560ac6a350..541fc736f44b 100644 --- a/modules/doctor/dbconsistency.go +++ b/modules/doctor/dbconsistency.go @@ -155,7 +155,7 @@ func checkDBConsistency(ctx context.Context, logger log.Logger, autofix bool) er // TODO: function to recalc all counters - if setting.Database.UsePostgreSQL { + if setting.Database.Type.IsPostgreSQL() { consistencyChecks = append(consistencyChecks, consistencyCheck{ Name: "Sequence values", Counter: db.CountBadSequences, diff --git a/modules/git/pipeline/revlist.go b/modules/git/pipeline/revlist.go index 09bb2c8b3c84..d88ebe78ef20 100644 --- a/modules/git/pipeline/revlist.go +++ b/modules/git/pipeline/revlist.go @@ -42,7 +42,10 @@ func RevListObjects(ctx context.Context, revListWriter *io.PipeWriter, wg *sync. defer revListWriter.Close() stderr := new(bytes.Buffer) var errbuf strings.Builder - cmd := git.NewCommand(ctx, "rev-list", "--objects").AddDynamicArguments(headSHA).AddArguments("--not").AddDynamicArguments(baseSHA) + cmd := git.NewCommand(ctx, "rev-list", "--objects").AddDynamicArguments(headSHA) + if baseSHA != "" { + cmd = cmd.AddArguments("--not").AddDynamicArguments(baseSHA) + } if err := cmd.Run(&git.RunOpts{ Dir: tmpBasePath, Stdout: revListWriter, diff --git a/modules/setting/database.go b/modules/setting/database.go index 49865a38a284..d7a5078fe914 100644 --- a/modules/setting/database.go +++ b/modules/setting/database.go @@ -27,7 +27,7 @@ var ( // Database holds the database settings Database = struct { - Type string + Type DatabaseType Host string Name string User string @@ -39,10 +39,6 @@ var ( Charset string Timeout int // seconds SQLiteJournalMode string - UseSQLite3 bool - UseMySQL bool - UseMSSQL bool - UsePostgreSQL bool DBConnectRetries int DBConnectBackoff time.Duration MaxIdleConns int @@ -59,24 +55,13 @@ var ( // LoadDBSetting loads the database settings func LoadDBSetting() { sec := CfgProvider.Section("database") - Database.Type = sec.Key("DB_TYPE").String() + Database.Type = DatabaseType(sec.Key("DB_TYPE").String()) defaultCharset := "utf8" - Database.UseMySQL = false - Database.UseSQLite3 = false - Database.UsePostgreSQL = false - Database.UseMSSQL = false - switch Database.Type { - case "sqlite3": - Database.UseSQLite3 = true - case "mysql": - Database.UseMySQL = true + if Database.Type.IsMySQL() { defaultCharset = "utf8mb4" - case "postgres": - Database.UsePostgreSQL = true - case "mssql": - Database.UseMSSQL = true } + Database.Host = sec.Key("HOST").String() Database.Name = sec.Key("NAME").String() Database.User = sec.Key("USER").String() @@ -86,7 +71,7 @@ func LoadDBSetting() { Database.Schema = sec.Key("SCHEMA").String() Database.SSLMode = sec.Key("SSL_MODE").MustString("disable") Database.Charset = sec.Key("CHARSET").In(defaultCharset, []string{"utf8", "utf8mb4"}) - if Database.UseMySQL && defaultCharset != "utf8mb4" { + if Database.Type.IsMySQL() && defaultCharset != "utf8mb4" { log.Error("Deprecated database mysql charset utf8 support, please use utf8mb4 or convert utf8 to utf8mb4.") } @@ -95,7 +80,7 @@ func LoadDBSetting() { Database.SQLiteJournalMode = sec.Key("SQLITE_JOURNAL_MODE").MustString("") Database.MaxIdleConns = sec.Key("MAX_IDLE_CONNS").MustInt(2) - if Database.UseMySQL { + if Database.Type.IsMySQL() { Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFETIME").MustDuration(3 * time.Second) } else { Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFETIME").MustDuration(0) @@ -207,3 +192,25 @@ func ParseMSSQLHostPort(info string) (string, string) { } return host, port } + +type DatabaseType string + +func (t DatabaseType) String() string { + return string(t) +} + +func (t DatabaseType) IsSQLite3() bool { + return t == "sqlite3" +} + +func (t DatabaseType) IsMySQL() bool { + return t == "mysql" +} + +func (t DatabaseType) IsMSSQL() bool { + return t == "mssql" +} + +func (t DatabaseType) IsPostgreSQL() bool { + return t == "postgres" +} diff --git a/modules/typesniffer/typesniffer.go b/modules/typesniffer/typesniffer.go index c9fef953ce70..5b215496b80c 100644 --- a/modules/typesniffer/typesniffer.go +++ b/modules/typesniffer/typesniffer.go @@ -4,6 +4,7 @@ package typesniffer import ( + "bytes" "fmt" "io" "net/http" @@ -24,8 +25,9 @@ const ( ) var ( - svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(||>))\s*)*\/]`) - svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(||>))\s*)*\/]`) + svgComment = regexp.MustCompile(`(?s)`) + svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(|>))\s*)*\s*(?:(|>))\s*)*")).IsSvgImage()) assert.True(t, DetectContentType([]byte(" ")).IsSvgImage()) assert.True(t, DetectContentType([]byte(``)).IsSvgImage()) - assert.True(t, DetectContentType([]byte("")).IsSvgImage()) assert.True(t, DetectContentType([]byte(``)).IsSvgImage()) assert.True(t, DetectContentType([]byte(` `)).IsSvgImage()) @@ -57,6 +56,10 @@ func TestIsSvgImage(t *testing.T) { `)).IsSvgImage()) + + // the DetectContentType should work for incomplete data, because only beginning bytes are used for detection + assert.True(t, DetectContentType([]byte(`....`)).IsSvgImage()) + assert.False(t, DetectContentType([]byte{}).IsSvgImage()) assert.False(t, DetectContentType([]byte("svg")).IsSvgImage()) assert.False(t, DetectContentType([]byte("")).IsSvgImage()) @@ -68,6 +71,26 @@ func TestIsSvgImage(t *testing.T) { assert.False(t, DetectContentType([]byte(` `)).IsSvgImage()) + + assert.False(t, DetectContentType([]byte(` + +
+ + +
+`)).IsSvgImage()) + + assert.False(t, DetectContentType([]byte(` + +
+ + +
+`)).IsSvgImage()) + assert.False(t, DetectContentType([]byte(``)).IsSvgImage()) + assert.False(t, DetectContentType([]byte(``)).IsSvgImage()) } func TestIsPDF(t *testing.T) { diff --git a/options/locale/locale_ja-JP.ini b/options/locale/locale_ja-JP.ini index 5ad66a510166..12d9423628bd 100644 --- a/options/locale/locale_ja-JP.ini +++ b/options/locale/locale_ja-JP.ini @@ -247,6 +247,7 @@ default_enable_timetracking_popup=新しいリポジトリのタイムトラッ no_reply_address=メールを隠すときのドメイン no_reply_address_helper=メールアドレスを隠しているユーザーに使用するドメイン名。 例えば 'noreply.example.org' と設定した場合、ユーザー名 'joe' はGitに 'joe@noreply.example.org' としてログインすることになります。 password_algorithm=パスワードハッシュアルゴリズム +invalid_password_algorithm=無効なパスワードハッシュアルゴリズム password_algorithm_helper=パスワードハッシュアルゴリズムを設定します。 アルゴリズムにより動作要件と強度が異なります。 `argon2`は良い特性を備えていますが、多くのメモリを使用するため小さなシステムには適さない場合があります。 enable_update_checker=アップデートチェッカーを有効にする enable_update_checker_helper=gitea.ioに接続して定期的に新しいバージョンのリリースを確認します。 diff --git a/routers/init.go b/routers/init.go index d3f822dc8345..8cf53fc108de 100644 --- a/routers/init.go +++ b/routers/init.go @@ -141,7 +141,7 @@ func GlobalInitInstalled(ctx context.Context) { if setting.EnableSQLite3 { log.Info("SQLite3 support is enabled") - } else if setting.Database.UseSQLite3 { + } else if setting.Database.Type.IsSQLite3() { log.Fatal("SQLite3 support is disabled, but it is used for database setting. Please get or build a Gitea release with SQLite3 support.") } diff --git a/routers/install/install.go b/routers/install/install.go index a377c2950ba3..8e2d19c73271 100644 --- a/routers/install/install.go +++ b/routers/install/install.go @@ -104,7 +104,7 @@ func Install(ctx *context.Context) { form.DbSchema = setting.Database.Schema form.Charset = setting.Database.Charset - curDBType := setting.Database.Type + curDBType := setting.Database.Type.String() var isCurDBTypeSupported bool for _, dbType := range setting.SupportedDatabaseTypes { if dbType == curDBType { @@ -272,7 +272,7 @@ func SubmitInstall(ctx *context.Context) { // ---- Basic checks are passed, now test configuration. // Test database setting. - setting.Database.Type = form.DbType + setting.Database.Type = setting.DatabaseType(form.DbType) setting.Database.Host = form.DbHost setting.Database.User = form.DbUser setting.Database.Passwd = form.DbPasswd @@ -392,7 +392,7 @@ func SubmitInstall(ctx *context.Context) { log.Error("Failed to load custom conf '%s': %v", setting.CustomConf, err) } } - cfg.Section("database").Key("DB_TYPE").SetValue(setting.Database.Type) + cfg.Section("database").Key("DB_TYPE").SetValue(setting.Database.Type.String()) cfg.Section("database").Key("HOST").SetValue(setting.Database.Host) cfg.Section("database").Key("NAME").SetValue(setting.Database.Name) cfg.Section("database").Key("USER").SetValue(setting.Database.User) diff --git a/routers/web/healthcheck/check.go b/routers/web/healthcheck/check.go index 1142a0aec5d5..e11dd2aca2e8 100644 --- a/routers/web/healthcheck/check.go +++ b/routers/web/healthcheck/check.go @@ -100,7 +100,7 @@ func checkDatabase(checks checks) status { st.Time = getCheckTime() } - if setting.Database.UseSQLite3 && st.Status == pass { + if setting.Database.Type.IsSQLite3() && st.Status == pass { if !setting.EnableSQLite3 { st.Status = fail st.Time = getCheckTime() diff --git a/services/pull/lfs.go b/services/pull/lfs.go index dc4ca006e491..39433724684e 100644 --- a/services/pull/lfs.go +++ b/services/pull/lfs.go @@ -116,7 +116,7 @@ func createLFSMetaObjectsFromCatFileBatch(catFileBatchReader *io.PipeReader, wg } // Then we need to check that this pointer is in the db - if _, err := git_model.GetLFSMetaObjectByOid(db.DefaultContext, pr.HeadRepo.ID, pointer.Oid); err != nil { + if _, err := git_model.GetLFSMetaObjectByOid(db.DefaultContext, pr.HeadRepoID, pointer.Oid); err != nil { if err == git_model.ErrLFSObjectNotExist { log.Warn("During merge of: %d in %-v, there is a pointer to LFS Oid: %s which although present in the LFS store is not associated with the head repo %-v", pr.Index, pr.BaseRepo, pointer.Oid, pr.HeadRepo) continue diff --git a/services/pull/merge.go b/services/pull/merge.go index ad428427cc18..afa924fc109a 100644 --- a/services/pull/merge.go +++ b/services/pull/merge.go @@ -5,8 +5,6 @@ package pull import ( - "bufio" - "bytes" "context" "fmt" "os" @@ -14,7 +12,6 @@ import ( "regexp" "strconv" "strings" - "time" "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" @@ -34,22 +31,17 @@ import ( repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" - asymkey_service "code.gitea.io/gitea/services/asymkey" issue_service "code.gitea.io/gitea/services/issue" ) // GetDefaultMergeMessage returns default message used when merging pull request func GetDefaultMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr *issues_model.PullRequest, mergeStyle repo_model.MergeStyle) (message, body string, err error) { - if err := pr.LoadHeadRepo(ctx); err != nil { - return "", "", err - } if err := pr.LoadBaseRepo(ctx); err != nil { return "", "", err } - if pr.BaseRepo == nil { - return "", "", repo_model.ErrRepoNotExist{ID: pr.BaseRepoID} + if err := pr.LoadHeadRepo(ctx); err != nil { + return "", "", err } - if err := pr.LoadIssue(ctx); err != nil { return "", "", err } @@ -144,18 +136,19 @@ func expandDefaultMergeMessage(template string, vars map[string]string) (message // Merge merges pull request to base repository. // Caller should check PR is ready to be merged (review and status checks) func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, baseGitRepo *git.Repository, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, wasAutoMerged bool) error { - if err := pr.LoadHeadRepo(ctx); err != nil { - log.Error("LoadHeadRepo: %v", err) - return fmt.Errorf("LoadHeadRepo: %w", err) - } else if err := pr.LoadBaseRepo(ctx); err != nil { - log.Error("LoadBaseRepo: %v", err) - return fmt.Errorf("LoadBaseRepo: %w", err) + if err := pr.LoadBaseRepo(ctx); err != nil { + log.Error("Unable to load base repo: %v", err) + return fmt.Errorf("unable to load base repo: %w", err) + } else if err := pr.LoadHeadRepo(ctx); err != nil { + log.Error("Unable to load head repo: %v", err) + return fmt.Errorf("unable to load head repo: %w", err) } pullWorkingPool.CheckIn(fmt.Sprint(pr.ID)) defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID)) // Removing an auto merge pull and ignore if not exist + // FIXME: is this the correct point to do this? Shouldn't this be after IsMergeStyleAllowed? if err := pull_model.DeleteScheduledAutoMerge(ctx, pr.ID); err != nil && !db.IsErrNotExist(err) { return err } @@ -179,7 +172,7 @@ func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.U // Run the merge in the hammer context to prevent cancellation hammerCtx := graceful.GetManager().HammerContext() - pr.MergedCommitID, err = rawMerge(hammerCtx, pr, doer, mergeStyle, expectedHeadCommitID, message) + pr.MergedCommitID, err = doMergeAndPush(hammerCtx, pr, doer, mergeStyle, expectedHeadCommitID, message) if err != nil { return err } @@ -189,18 +182,18 @@ func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.U pr.MergerID = doer.ID if _, err := pr.SetMerged(hammerCtx); err != nil { - log.Error("SetMerged [%d]: %v", pr.ID, err) + log.Error("SetMerged %-v: %v", pr, err) } if err := pr.LoadIssue(hammerCtx); err != nil { - log.Error("LoadIssue [%d]: %v", pr.ID, err) + log.Error("LoadIssue %-v: %v", pr, err) } if err := pr.Issue.LoadRepo(hammerCtx); err != nil { - log.Error("LoadRepo for issue [%d]: %v", pr.ID, err) + log.Error("pr.Issue.LoadRepo %-v: %v", pr, err) } if err := pr.Issue.Repo.LoadOwner(hammerCtx); err != nil { - log.Error("LoadOwner for PR [%d]: %v", pr.ID, err) + log.Error("LoadOwner for %-v: %v", pr, err) } if wasAutoMerged { @@ -239,326 +232,43 @@ func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.U return nil } -// rawMerge perform the merge operation without changing any pull information in database -func rawMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string) (string, error) { +// doMergeAndPush performs the merge operation without changing any pull information in database and pushes it up to the base repository +func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string) (string, error) { // Clone base repo. - tmpBasePath, err := createTemporaryRepo(ctx, pr) + mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, expectedHeadCommitID) if err != nil { - log.Error("CreateTemporaryPath: %v", err) return "", err } - defer func() { - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("Merge: RemoveTemporaryPath: %s", err) - } - }() - - baseBranch := "base" - trackingBranch := "tracking" - stagingBranch := "staging" - - if expectedHeadCommitID != "" { - trackingCommitID, _, err := git.NewCommand(ctx, "show-ref", "--hash").AddDynamicArguments(git.BranchPrefix + trackingBranch).RunStdString(&git.RunOpts{Dir: tmpBasePath}) - if err != nil { - log.Error("show-ref[%s] --hash refs/heads/trackingn: %v", tmpBasePath, git.BranchPrefix+trackingBranch, err) - return "", fmt.Errorf("getDiffTree: %w", err) - } - if strings.TrimSpace(trackingCommitID) != expectedHeadCommitID { - return "", models.ErrSHADoesNotMatch{ - GivenSHA: expectedHeadCommitID, - CurrentSHA: trackingCommitID, - } - } - } - - var outbuf, errbuf strings.Builder - - // Enable sparse-checkout - sparseCheckoutList, err := getDiffTree(ctx, tmpBasePath, baseBranch, trackingBranch) - if err != nil { - log.Error("getDiffTree(%s, %s, %s): %v", tmpBasePath, baseBranch, trackingBranch, err) - return "", fmt.Errorf("getDiffTree: %w", err) - } - - infoPath := filepath.Join(tmpBasePath, ".git", "info") - if err := os.MkdirAll(infoPath, 0o700); err != nil { - log.Error("Unable to create .git/info in %s: %v", tmpBasePath, err) - return "", fmt.Errorf("Unable to create .git/info in tmpBasePath: %w", err) - } - - sparseCheckoutListPath := filepath.Join(infoPath, "sparse-checkout") - if err := os.WriteFile(sparseCheckoutListPath, []byte(sparseCheckoutList), 0o600); err != nil { - log.Error("Unable to write .git/info/sparse-checkout file in %s: %v", tmpBasePath, err) - return "", fmt.Errorf("Unable to write .git/info/sparse-checkout file in tmpBasePath: %w", err) - } - - gitConfigCommand := func() *git.Command { - return git.NewCommand(ctx, "config", "--local") - } - - // Switch off LFS process (set required, clean and smudge here also) - if err := gitConfigCommand().AddArguments("filter.lfs.process", ""). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git config [filter.lfs.process -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git config [filter.lfs.process -> <> ]: %w\n%s\n%s", err, outbuf.String(), errbuf.String()) - } - outbuf.Reset() - errbuf.Reset() - - if err := gitConfigCommand().AddArguments("filter.lfs.required", "false"). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git config [filter.lfs.required -> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git config [filter.lfs.required -> ]: %w\n%s\n%s", err, outbuf.String(), errbuf.String()) - } - outbuf.Reset() - errbuf.Reset() - - if err := gitConfigCommand().AddArguments("filter.lfs.clean", ""). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git config [filter.lfs.clean -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git config [filter.lfs.clean -> <> ]: %w\n%s\n%s", err, outbuf.String(), errbuf.String()) - } - outbuf.Reset() - errbuf.Reset() - - if err := gitConfigCommand().AddArguments("filter.lfs.smudge", ""). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git config [filter.lfs.smudge -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git config [filter.lfs.smudge -> <> ]: %w\n%s\n%s", err, outbuf.String(), errbuf.String()) - } - outbuf.Reset() - errbuf.Reset() - - if err := gitConfigCommand().AddArguments("core.sparseCheckout", "true"). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git config [core.sparseCheckout -> true ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git config [core.sparsecheckout -> true]: %w\n%s\n%s", err, outbuf.String(), errbuf.String()) - } - outbuf.Reset() - errbuf.Reset() - - // Read base branch index - if err := git.NewCommand(ctx, "read-tree", "HEAD"). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git read-tree HEAD: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, outbuf.String(), errbuf.String()) - } - outbuf.Reset() - errbuf.Reset() - - sig := doer.NewGitSig() - committer := sig - - // Determine if we should sign. If no signKeyID, use --no-gpg-sign to countermand the sign config (from gitconfig) - var signArgs git.TrustedCmdArgs - sign, signKeyID, signer, _ := asymkey_service.SignMerge(ctx, pr, doer, tmpBasePath, "HEAD", trackingBranch) - if sign { - if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { - committer = signer - } - signArgs = git.ToTrustedCmdArgs([]string{"-S" + signKeyID}) - } else { - signArgs = append(signArgs, "--no-gpg-sign") - } - - commitTimeStr := time.Now().Format(time.RFC3339) - - // Because this may call hooks we should pass in the environment - env := append(os.Environ(), - "GIT_AUTHOR_NAME="+sig.Name, - "GIT_AUTHOR_EMAIL="+sig.Email, - "GIT_AUTHOR_DATE="+commitTimeStr, - "GIT_COMMITTER_NAME="+committer.Name, - "GIT_COMMITTER_EMAIL="+committer.Email, - "GIT_COMMITTER_DATE="+commitTimeStr, - ) + defer cancel() // Merge commits. switch mergeStyle { case repo_model.MergeStyleMerge: - cmd := git.NewCommand(ctx, "merge", "--no-ff", "--no-commit").AddDynamicArguments(trackingBranch) - if err := runMergeCommand(pr, mergeStyle, cmd, tmpBasePath); err != nil { - log.Error("Unable to merge tracking into base: %v", err) + if err := doMergeStyleMerge(mergeCtx, message); err != nil { return "", err } - - if err := commitAndSignNoAuthor(ctx, pr, message, signArgs, tmpBasePath, env); err != nil { - log.Error("Unable to make final commit: %v", err) + case repo_model.MergeStyleRebase, repo_model.MergeStyleRebaseMerge: + if err := doMergeStyleRebase(mergeCtx, mergeStyle, message); err != nil { return "", err } - case repo_model.MergeStyleRebase: - fallthrough - case repo_model.MergeStyleRebaseUpdate: - fallthrough - case repo_model.MergeStyleRebaseMerge: - // Checkout head branch - if err := git.NewCommand(ctx, "checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - } - outbuf.Reset() - errbuf.Reset() - - // Rebase before merging - if err := git.NewCommand(ctx, "rebase").AddDynamicArguments(baseBranch). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - // Rebase will leave a REBASE_HEAD file in .git if there is a conflict - if _, statErr := os.Stat(filepath.Join(tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil { - var commitSha string - ok := false - failingCommitPaths := []string{ - filepath.Join(tmpBasePath, ".git", "rebase-apply", "original-commit"), // Git < 2.26 - filepath.Join(tmpBasePath, ".git", "rebase-merge", "stopped-sha"), // Git >= 2.26 - } - for _, failingCommitPath := range failingCommitPaths { - if _, statErr := os.Stat(failingCommitPath); statErr == nil { - commitShaBytes, readErr := os.ReadFile(failingCommitPath) - if readErr != nil { - // Abandon this attempt to handle the error - log.Error("git rebase staging on to base [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git rebase staging on to base [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - } - commitSha = strings.TrimSpace(string(commitShaBytes)) - ok = true - break - } - } - if !ok { - log.Error("Unable to determine failing commit sha for this rebase message. Cannot cast as models.ErrRebaseConflicts.") - log.Error("git rebase staging on to base [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git rebase staging on to base [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - } - log.Debug("RebaseConflict at %s [%s:%s -> %s:%s]: %v\n%s\n%s", commitSha, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return "", models.ErrRebaseConflicts{ - Style: mergeStyle, - CommitSHA: commitSha, - StdOut: outbuf.String(), - StdErr: errbuf.String(), - Err: err, - } - } - log.Error("git rebase staging on to base [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git rebase staging on to base [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - } - outbuf.Reset() - errbuf.Reset() - - // not need merge, just update by rebase. so skip - if mergeStyle == repo_model.MergeStyleRebaseUpdate { - break - } - - // Checkout base branch again - if err := git.NewCommand(ctx, "checkout").AddDynamicArguments(baseBranch). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - } - outbuf.Reset() - errbuf.Reset() - - cmd := git.NewCommand(ctx, "merge") - if mergeStyle == repo_model.MergeStyleRebase { - cmd.AddArguments("--ff-only") - } else { - cmd.AddArguments("--no-ff", "--no-commit") - } - cmd.AddDynamicArguments(stagingBranch) - - // Prepare merge with commit - if err := runMergeCommand(pr, mergeStyle, cmd, tmpBasePath); err != nil { - log.Error("Unable to merge staging into base: %v", err) - return "", err - } - if mergeStyle == repo_model.MergeStyleRebaseMerge { - if err := commitAndSignNoAuthor(ctx, pr, message, signArgs, tmpBasePath, env); err != nil { - log.Error("Unable to make final commit: %v", err) - return "", err - } - } case repo_model.MergeStyleSquash: - // Merge with squash - cmd := git.NewCommand(ctx, "merge", "--squash").AddDynamicArguments(trackingBranch) - if err := runMergeCommand(pr, mergeStyle, cmd, tmpBasePath); err != nil { - log.Error("Unable to merge --squash tracking into base: %v", err) + if err := doMergeStyleSquash(mergeCtx, message); err != nil { return "", err } - - if err = pr.Issue.LoadPoster(ctx); err != nil { - log.Error("LoadPoster: %v", err) - return "", fmt.Errorf("LoadPoster: %w", err) - } - sig := pr.Issue.Poster.NewGitSig() - if setting.Repository.PullRequest.AddCoCommitterTrailers && committer.String() != sig.String() { - // add trailer - message += fmt.Sprintf("\nCo-authored-by: %s\nCo-committed-by: %s\n", sig.String(), sig.String()) - } - if err := git.NewCommand(ctx, "commit"). - AddArguments(signArgs...). - AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email). - AddOptionFormat("--message=%s", message). - Run(&git.RunOpts{ - Env: env, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - } - outbuf.Reset() - errbuf.Reset() default: return "", models.ErrInvalidMergeStyle{ID: pr.BaseRepo.ID, Style: mergeStyle} } // OK we should cache our current head and origin/headbranch - mergeHeadSHA, err := git.GetFullCommitID(ctx, tmpBasePath, "HEAD") + mergeHeadSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "HEAD") if err != nil { return "", fmt.Errorf("Failed to get full commit id for HEAD: %w", err) } - mergeBaseSHA, err := git.GetFullCommitID(ctx, tmpBasePath, "original_"+baseBranch) + mergeBaseSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "original_"+baseBranch) if err != nil { return "", fmt.Errorf("Failed to get full commit id for origin/%s: %w", pr.BaseBranch, err) } - mergeCommitID, err := git.GetFullCommitID(ctx, tmpBasePath, baseBranch) + mergeCommitID, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, baseBranch) if err != nil { return "", fmt.Errorf("Failed to get full commit id for the new merge: %w", err) } @@ -567,7 +277,7 @@ func rawMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_mode // I think in the interests of data safety - failures to push to the lfs should prevent // the merge as you can always remerge. if setting.LFS.StartServer { - if err := LFSPush(ctx, tmpBasePath, mergeHeadSHA, mergeBaseSHA, pr); err != nil { + if err := LFSPush(ctx, mergeCtx.tmpBasePath, mergeHeadSHA, mergeBaseSHA, pr); err != nil { return "", err } } @@ -576,167 +286,92 @@ func rawMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_mode err = pr.HeadRepo.LoadOwner(ctx) if err != nil { if !user_model.IsErrUserNotExist(err) { - log.Error("Can't find user: %d for head repository - %v", pr.HeadRepo.OwnerID, err) + log.Error("Can't find user: %d for head repository in %-v: %v", pr.HeadRepo.OwnerID, pr, err) return "", err } - log.Error("Can't find user: %d for head repository - defaulting to doer: %s - %v", pr.HeadRepo.OwnerID, doer.Name, err) + log.Warn("Can't find user: %d for head repository in %-v - defaulting to doer: %s - %v", pr.HeadRepo.OwnerID, pr, doer.Name, err) headUser = doer } else { headUser = pr.HeadRepo.Owner } - var pushCmd *git.Command - if mergeStyle == repo_model.MergeStyleRebaseUpdate { - // force push the rebase result to head branch - env = repo_module.FullPushingEnvironment( - headUser, - doer, - pr.HeadRepo, - pr.HeadRepo.Name, - pr.ID, - ) - pushCmd = git.NewCommand(ctx, "push", "-f", "head_repo").AddDynamicArguments(stagingBranch + ":" + git.BranchPrefix + pr.HeadBranch) - } else { - env = repo_module.FullPushingEnvironment( - headUser, - doer, - pr.BaseRepo, - pr.BaseRepo.Name, - pr.ID, - ) - pushCmd = git.NewCommand(ctx, "push", "origin").AddDynamicArguments(baseBranch + ":" + git.BranchPrefix + pr.BaseBranch) - } + mergeCtx.env = repo_module.FullPushingEnvironment( + headUser, + doer, + pr.BaseRepo, + pr.BaseRepo.Name, + pr.ID, + ) + pushCmd := git.NewCommand(ctx, "push", "origin").AddDynamicArguments(baseBranch + ":" + git.BranchPrefix + pr.BaseBranch) // Push back to upstream. // TODO: this cause an api call to "/api/internal/hook/post-receive/...", // that prevents us from doint the whole merge in one db transaction - if err := pushCmd.Run(&git.RunOpts{ - Env: env, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - if strings.Contains(errbuf.String(), "non-fast-forward") { + if err := pushCmd.Run(mergeCtx.RunOpts()); err != nil { + if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") { return "", &git.ErrPushOutOfDate{ - StdOut: outbuf.String(), - StdErr: errbuf.String(), + StdOut: mergeCtx.outbuf.String(), + StdErr: mergeCtx.errbuf.String(), Err: err, } - } else if strings.Contains(errbuf.String(), "! [remote rejected]") { + } else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") { err := &git.ErrPushRejected{ - StdOut: outbuf.String(), - StdErr: errbuf.String(), + StdOut: mergeCtx.outbuf.String(), + StdErr: mergeCtx.errbuf.String(), Err: err, } err.GenerateMessage() return "", err } - return "", fmt.Errorf("git push: %s", errbuf.String()) + return "", fmt.Errorf("git push: %s", mergeCtx.errbuf.String()) } - outbuf.Reset() - errbuf.Reset() + mergeCtx.outbuf.Reset() + mergeCtx.errbuf.Reset() return mergeCommitID, nil } -func commitAndSignNoAuthor(ctx context.Context, pr *issues_model.PullRequest, message string, signArgs git.TrustedCmdArgs, tmpBasePath string, env []string) error { - var outbuf, errbuf strings.Builder - if err := git.NewCommand(ctx, "commit").AddArguments(signArgs...).AddOptionFormat("--message=%s", message). - Run(&git.RunOpts{ - Env: env, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) +func commitAndSignNoAuthor(ctx *mergeContext, message string) error { + if err := git.NewCommand(ctx, "commit").AddArguments(ctx.signArg...).AddOptionFormat("--message=%s", message). + Run(ctx.RunOpts()); err != nil { + log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + return fmt.Errorf("git commit %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) } return nil } -func runMergeCommand(pr *issues_model.PullRequest, mergeStyle repo_model.MergeStyle, cmd *git.Command, tmpBasePath string) error { - var outbuf, errbuf strings.Builder - if err := cmd.Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { +func runMergeCommand(ctx *mergeContext, mergeStyle repo_model.MergeStyle, cmd *git.Command) error { + if err := cmd.Run(ctx.RunOpts()); err != nil { // Merge will leave a MERGE_HEAD file in the .git folder if there is a conflict - if _, statErr := os.Stat(filepath.Join(tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil { + if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil { // We have a merge conflict error - log.Debug("MergeConflict [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) + log.Debug("MergeConflict %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) return models.ErrMergeConflicts{ Style: mergeStyle, - StdOut: outbuf.String(), - StdErr: errbuf.String(), + StdOut: ctx.outbuf.String(), + StdErr: ctx.errbuf.String(), Err: err, } - } else if strings.Contains(errbuf.String(), "refusing to merge unrelated histories") { - log.Debug("MergeUnrelatedHistories [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) + } else if strings.Contains(ctx.errbuf.String(), "refusing to merge unrelated histories") { + log.Debug("MergeUnrelatedHistories %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) return models.ErrMergeUnrelatedHistories{ Style: mergeStyle, - StdOut: outbuf.String(), - StdErr: errbuf.String(), + StdOut: ctx.outbuf.String(), + StdErr: ctx.errbuf.String(), Err: err, } } - log.Error("git merge [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return fmt.Errorf("git merge [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) + log.Error("git merge %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + return fmt.Errorf("git merge %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) } + ctx.outbuf.Reset() + ctx.errbuf.Reset() return nil } var escapedSymbols = regexp.MustCompile(`([*[?! \\])`) -func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string) (string, error) { - getDiffTreeFromBranch := func(repoPath, baseBranch, headBranch string) (string, error) { - var outbuf, errbuf strings.Builder - // Compute the diff-tree for sparse-checkout - if err := git.NewCommand(ctx, "diff-tree", "--no-commit-id", "--name-only", "-r", "-z", "--root").AddDynamicArguments(baseBranch, headBranch). - Run(&git.RunOpts{ - Dir: repoPath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - return "", fmt.Errorf("git diff-tree [%s base:%s head:%s]: %s", repoPath, baseBranch, headBranch, errbuf.String()) - } - return outbuf.String(), nil - } - - scanNullTerminatedStrings := func(data []byte, atEOF bool) (advance int, token []byte, err error) { - if atEOF && len(data) == 0 { - return 0, nil, nil - } - if i := bytes.IndexByte(data, '\x00'); i >= 0 { - return i + 1, data[0:i], nil - } - if atEOF { - return len(data), data, nil - } - return 0, nil, nil - } - - list, err := getDiffTreeFromBranch(repoPath, baseBranch, headBranch) - if err != nil { - return "", err - } - - // Prefixing '/' for each entry, otherwise all files with the same name in subdirectories would be matched. - out := bytes.Buffer{} - scanner := bufio.NewScanner(strings.NewReader(list)) - scanner.Split(scanNullTerminatedStrings) - for scanner.Scan() { - filepath := scanner.Text() - // escape '*', '?', '[', spaces and '!' prefix - filepath = escapedSymbols.ReplaceAllString(filepath, `\$1`) - // no necessary to escape the first '#' symbol because the first symbol is '/' - fmt.Fprintf(&out, "/%s\n", filepath) - } - - return out.String(), nil -} - // IsUserAllowedToMerge check if user is allowed to merge PR with given permissions and branch protections func IsUserAllowedToMerge(ctx context.Context, pr *issues_model.PullRequest, p access_model.Permission, user *user_model.User) (bool, error) { if user == nil { diff --git a/services/pull/merge_merge.go b/services/pull/merge_merge.go new file mode 100644 index 000000000000..0f7664297aa8 --- /dev/null +++ b/services/pull/merge_merge.go @@ -0,0 +1,25 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package pull + +import ( + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/log" +) + +// doMergeStyleMerge merges the tracking into the current HEAD - which is assumed to tbe staging branch (equal to the pr.BaseBranch) +func doMergeStyleMerge(ctx *mergeContext, message string) error { + cmd := git.NewCommand(ctx, "merge", "--no-ff", "--no-commit").AddDynamicArguments(trackingBranch) + if err := runMergeCommand(ctx, repo_model.MergeStyleMerge, cmd); err != nil { + log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err) + return err + } + + if err := commitAndSignNoAuthor(ctx, message); err != nil { + log.Error("%-v Unable to make final commit: %v", ctx.pr, err) + return err + } + return nil +} diff --git a/services/pull/merge_prepare.go b/services/pull/merge_prepare.go new file mode 100644 index 000000000000..2ba821961a2b --- /dev/null +++ b/services/pull/merge_prepare.go @@ -0,0 +1,297 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package pull + +import ( + "bufio" + "bytes" + "context" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "time" + + "code.gitea.io/gitea/models" + issues_model "code.gitea.io/gitea/models/issues" + repo_model "code.gitea.io/gitea/models/repo" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/log" + asymkey_service "code.gitea.io/gitea/services/asymkey" +) + +type mergeContext struct { + *prContext + doer *user_model.User + sig *git.Signature + committer *git.Signature + signArg git.TrustedCmdArgs + env []string +} + +func (ctx *mergeContext) RunOpts() *git.RunOpts { + ctx.outbuf.Reset() + ctx.errbuf.Reset() + return &git.RunOpts{ + Env: ctx.env, + Dir: ctx.tmpBasePath, + Stdout: ctx.outbuf, + Stderr: ctx.errbuf, + } +} + +func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, expectedHeadCommitID string) (mergeCtx *mergeContext, cancel context.CancelFunc, err error) { + // Clone base repo. + prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr) + if err != nil { + log.Error("createTemporaryRepoForPR: %v", err) + return nil, cancel, err + } + + mergeCtx = &mergeContext{ + prContext: prCtx, + doer: doer, + } + + if expectedHeadCommitID != "" { + trackingCommitID, _, err := git.NewCommand(ctx, "show-ref", "--hash").AddDynamicArguments(git.BranchPrefix + trackingBranch).RunStdString(&git.RunOpts{Dir: mergeCtx.tmpBasePath}) + if err != nil { + defer cancel() + log.Error("failed to get sha of head branch in %-v: show-ref[%s] --hash refs/heads/tracking: %v", mergeCtx.pr, mergeCtx.tmpBasePath, err) + return nil, nil, fmt.Errorf("unable to get sha of head branch in %v %w", pr, err) + } + if strings.TrimSpace(trackingCommitID) != expectedHeadCommitID { + defer cancel() + return nil, nil, models.ErrSHADoesNotMatch{ + GivenSHA: expectedHeadCommitID, + CurrentSHA: trackingCommitID, + } + } + } + + mergeCtx.outbuf.Reset() + mergeCtx.errbuf.Reset() + if err := prepareTemporaryRepoForMerge(mergeCtx); err != nil { + defer cancel() + return nil, nil, err + } + + mergeCtx.sig = doer.NewGitSig() + mergeCtx.committer = mergeCtx.sig + + // Determine if we should sign + sign, keyID, signer, _ := asymkey_service.SignMerge(ctx, mergeCtx.pr, mergeCtx.doer, mergeCtx.tmpBasePath, "HEAD", trackingBranch) + if sign { + mergeCtx.signArg = git.ToTrustedCmdArgs([]string{"-S" + keyID}) + if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { + mergeCtx.committer = signer + } + } else { + mergeCtx.signArg = git.ToTrustedCmdArgs([]string{"--no-gpg-sign"}) + } + + commitTimeStr := time.Now().Format(time.RFC3339) + + // Because this may call hooks we should pass in the environment + mergeCtx.env = append(os.Environ(), + "GIT_AUTHOR_NAME="+mergeCtx.sig.Name, + "GIT_AUTHOR_EMAIL="+mergeCtx.sig.Email, + "GIT_AUTHOR_DATE="+commitTimeStr, + "GIT_COMMITTER_NAME="+mergeCtx.committer.Name, + "GIT_COMMITTER_EMAIL="+mergeCtx.committer.Email, + "GIT_COMMITTER_DATE="+commitTimeStr, + ) + + return mergeCtx, cancel, nil +} + +// prepareTemporaryRepoForMerge takes a repository that has been created using createTemporaryRepo +// it then sets up the sparse-checkout and other things +func prepareTemporaryRepoForMerge(ctx *mergeContext) error { + infoPath := filepath.Join(ctx.tmpBasePath, ".git", "info") + if err := os.MkdirAll(infoPath, 0o700); err != nil { + log.Error("%-v Unable to create .git/info in %s: %v", ctx.pr, ctx.tmpBasePath, err) + return fmt.Errorf("Unable to create .git/info in tmpBasePath: %w", err) + } + + // Enable sparse-checkout + // Here we use the .git/info/sparse-checkout file as described in the git documentation + sparseCheckoutListFile, err := os.OpenFile(filepath.Join(infoPath, "sparse-checkout"), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o600) + if err != nil { + log.Error("%-v Unable to write .git/info/sparse-checkout file in %s: %v", ctx.pr, ctx.tmpBasePath, err) + return fmt.Errorf("Unable to write .git/info/sparse-checkout file in tmpBasePath: %w", err) + } + defer sparseCheckoutListFile.Close() // we will close it earlier but we need to ensure it is closed if there is an error + + if err := getDiffTree(ctx, ctx.tmpBasePath, baseBranch, trackingBranch, sparseCheckoutListFile); err != nil { + log.Error("%-v getDiffTree(%s, %s, %s): %v", ctx.pr, ctx.tmpBasePath, baseBranch, trackingBranch, err) + return fmt.Errorf("getDiffTree: %w", err) + } + + if err := sparseCheckoutListFile.Close(); err != nil { + log.Error("%-v Unable to close .git/info/sparse-checkout file in %s: %v", ctx.pr, ctx.tmpBasePath, err) + return fmt.Errorf("Unable to close .git/info/sparse-checkout file in tmpBasePath: %w", err) + } + + gitConfigCommand := func() *git.Command { + return git.NewCommand(ctx, "config", "--local") + } + + setConfig := func(key, value string) error { + if err := gitConfigCommand().AddArguments(git.ToTrustedCmdArgs([]string{key, value})...). + Run(ctx.RunOpts()); err != nil { + if value == "" { + value = "<>" + } + log.Error("git config [%s -> %s ]: %v\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String()) + return fmt.Errorf("git config [%s -> %s ]: %w\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String()) + } + ctx.outbuf.Reset() + ctx.errbuf.Reset() + + return nil + } + + // Switch off LFS process (set required, clean and smudge here also) + if err := setConfig("filter.lfs.process", ""); err != nil { + return err + } + + if err := setConfig("filter.lfs.required", "false"); err != nil { + return err + } + + if err := setConfig("filter.lfs.clean", ""); err != nil { + return err + } + + if err := setConfig("filter.lfs.smudge", ""); err != nil { + return err + } + + if err := setConfig("core.sparseCheckout", "true"); err != nil { + return err + } + + // Read base branch index + if err := git.NewCommand(ctx, "read-tree", "HEAD"). + Run(ctx.RunOpts()); err != nil { + log.Error("git read-tree HEAD: %v\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String()) + return fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String()) + } + ctx.outbuf.Reset() + ctx.errbuf.Reset() + + return nil +} + +// getDiffTree returns a string containing all the files that were changed between headBranch and baseBranch +// the filenames are escaped so as to fit the format required for .git/info/sparse-checkout +func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string, out io.Writer) error { + diffOutReader, diffOutWriter, err := os.Pipe() + if err != nil { + log.Error("Unable to create os.Pipe for %s", repoPath) + return err + } + defer func() { + _ = diffOutReader.Close() + _ = diffOutWriter.Close() + }() + + scanNullTerminatedStrings := func(data []byte, atEOF bool) (advance int, token []byte, err error) { + if atEOF && len(data) == 0 { + return 0, nil, nil + } + if i := bytes.IndexByte(data, '\x00'); i >= 0 { + return i + 1, data[0:i], nil + } + if atEOF { + return len(data), data, nil + } + return 0, nil, nil + } + + err = git.NewCommand(ctx, "diff-tree", "--no-commit-id", "--name-only", "-r", "-r", "-z", "--root").AddDynamicArguments(baseBranch, headBranch). + Run(&git.RunOpts{ + Dir: repoPath, + Stdout: diffOutWriter, + PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error { + // Close the writer end of the pipe to begin processing + _ = diffOutWriter.Close() + defer func() { + // Close the reader on return to terminate the git command if necessary + _ = diffOutReader.Close() + }() + + // Now scan the output from the command + scanner := bufio.NewScanner(diffOutReader) + scanner.Split(scanNullTerminatedStrings) + for scanner.Scan() { + filepath := scanner.Text() + // escape '*', '?', '[', spaces and '!' prefix + filepath = escapedSymbols.ReplaceAllString(filepath, `\$1`) + // no necessary to escape the first '#' symbol because the first symbol is '/' + fmt.Fprintf(out, "/%s\n", filepath) + } + return scanner.Err() + }, + }) + return err +} + +// rebaseTrackingOnToBase checks out the tracking branch as staging and rebases it on to the base branch +// if there is a conflict it will return a models.ErrRebaseConflicts +func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle) error { + // Checkout head branch + if err := git.NewCommand(ctx, "checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch). + Run(ctx.RunOpts()); err != nil { + return fmt.Errorf("unable to git checkout tracking as staging in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + } + ctx.outbuf.Reset() + ctx.errbuf.Reset() + + // Rebase before merging + if err := git.NewCommand(ctx, "rebase").AddDynamicArguments(baseBranch). + Run(ctx.RunOpts()); err != nil { + // Rebase will leave a REBASE_HEAD file in .git if there is a conflict + if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil { + var commitSha string + ok := false + failingCommitPaths := []string{ + filepath.Join(ctx.tmpBasePath, ".git", "rebase-apply", "original-commit"), // Git < 2.26 + filepath.Join(ctx.tmpBasePath, ".git", "rebase-merge", "stopped-sha"), // Git >= 2.26 + } + for _, failingCommitPath := range failingCommitPaths { + if _, statErr := os.Stat(failingCommitPath); statErr == nil { + commitShaBytes, readErr := os.ReadFile(failingCommitPath) + if readErr != nil { + // Abandon this attempt to handle the error + return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + } + commitSha = strings.TrimSpace(string(commitShaBytes)) + ok = true + break + } + } + if !ok { + log.Error("Unable to determine failing commit sha for failing rebase in temp repo for %-v. Cannot cast as models.ErrRebaseConflicts.", ctx.pr) + return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + } + log.Debug("Conflict when rebasing staging on to base in %-v at %s: %v\n%s\n%s", ctx.pr, commitSha, err, ctx.outbuf.String(), ctx.errbuf.String()) + return models.ErrRebaseConflicts{ + CommitSHA: commitSha, + Style: mergeStyle, + StdOut: ctx.outbuf.String(), + StdErr: ctx.errbuf.String(), + Err: err, + } + } + return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + } + ctx.outbuf.Reset() + ctx.errbuf.Reset() + return nil +} diff --git a/services/pull/merge_rebase.go b/services/pull/merge_rebase.go new file mode 100644 index 000000000000..d3bb86d4aa08 --- /dev/null +++ b/services/pull/merge_rebase.go @@ -0,0 +1,50 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package pull + +import ( + "fmt" + + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/log" +) + +// doMergeStyleRebase rebaases the tracking branch on the base branch as the current HEAD with or with a merge commit to the original pr branch +func doMergeStyleRebase(ctx *mergeContext, mergeStyle repo_model.MergeStyle, message string) error { + if err := rebaseTrackingOnToBase(ctx, mergeStyle); err != nil { + return err + } + + // Checkout base branch again + if err := git.NewCommand(ctx, "checkout").AddDynamicArguments(baseBranch). + Run(ctx.RunOpts()); err != nil { + log.Error("git checkout base prior to merge post staging rebase %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + return fmt.Errorf("git checkout base prior to merge post staging rebase %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + } + ctx.outbuf.Reset() + ctx.errbuf.Reset() + + cmd := git.NewCommand(ctx, "merge") + if mergeStyle == repo_model.MergeStyleRebase { + cmd.AddArguments("--ff-only") + } else { + cmd.AddArguments("--no-ff", "--no-commit") + } + cmd.AddDynamicArguments(stagingBranch) + + // Prepare merge with commit + if err := runMergeCommand(ctx, mergeStyle, cmd); err != nil { + log.Error("Unable to merge staging into base: %v", err) + return err + } + if mergeStyle == repo_model.MergeStyleRebaseMerge { + if err := commitAndSignNoAuthor(ctx, message); err != nil { + log.Error("Unable to make final commit: %v", err) + return err + } + } + + return nil +} diff --git a/services/pull/merge_squash.go b/services/pull/merge_squash.go new file mode 100644 index 000000000000..0a8cc0167e40 --- /dev/null +++ b/services/pull/merge_squash.go @@ -0,0 +1,53 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package pull + +import ( + "fmt" + + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" +) + +// doMergeStyleSquash squashes the tracking branch on the current HEAD (=base) +func doMergeStyleSquash(ctx *mergeContext, message string) error { + cmd := git.NewCommand(ctx, "merge", "--squash").AddDynamicArguments(trackingBranch) + if err := runMergeCommand(ctx, repo_model.MergeStyleSquash, cmd); err != nil { + log.Error("%-v Unable to merge --squash tracking into base: %v", ctx.pr, err) + return err + } + + if err := ctx.pr.Issue.LoadPoster(ctx); err != nil { + log.Error("%-v Issue[%d].LoadPoster: %v", ctx.pr, ctx.pr.Issue.ID, err) + return fmt.Errorf("LoadPoster: %w", err) + } + sig := ctx.pr.Issue.Poster.NewGitSig() + if len(ctx.signArg) == 0 { + if err := git.NewCommand(ctx, "commit"). + AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email). + AddOptionFormat("--message=%s", message). + Run(ctx.RunOpts()); err != nil { + log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), ctx.errbuf.String()) + } + } else { + if setting.Repository.PullRequest.AddCoCommitterTrailers && ctx.committer.String() != sig.String() { + // add trailer + message += fmt.Sprintf("\nCo-authored-by: %s\nCo-committed-by: %s\n", sig.String(), sig.String()) + } + if err := git.NewCommand(ctx, "commit"). + AddArguments(ctx.signArg...). + AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email). + AddOptionFormat("--message=%s", message). + Run(ctx.RunOpts()); err != nil { + log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), ctx.errbuf.String()) + } + } + ctx.outbuf.Reset() + ctx.errbuf.Reset() + return nil +} diff --git a/services/pull/patch.go b/services/pull/patch.go index c2ccc75bdccd..9277355720dc 100644 --- a/services/pull/patch.go +++ b/services/pull/patch.go @@ -22,7 +22,6 @@ import ( "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/process" - repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" @@ -64,25 +63,21 @@ func TestPatch(pr *issues_model.PullRequest) error { defer finished() // Clone base repo. - tmpBasePath, err := createTemporaryRepo(ctx, pr) + prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr) if err != nil { - log.Error("CreateTemporaryPath: %v", err) + log.Error("createTemporaryRepoForPR %-v: %v", pr, err) return err } - defer func() { - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("Merge: RemoveTemporaryPath: %s", err) - } - }() + defer cancel() - gitRepo, err := git.OpenRepository(ctx, tmpBasePath) + gitRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath) if err != nil { return fmt.Errorf("OpenRepository: %w", err) } defer gitRepo.Close() // 1. update merge base - pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base", "--", "base", "tracking").RunStdString(&git.RunOpts{Dir: tmpBasePath}) + pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base", "--", "base", "tracking").RunStdString(&git.RunOpts{Dir: prCtx.tmpBasePath}) if err != nil { var err2 error pr.MergeBase, err2 = gitRepo.GetRefCommitID(git.BranchPrefix + "base") @@ -101,7 +96,7 @@ func TestPatch(pr *issues_model.PullRequest) error { } // 2. Check for conflicts - if conflicts, err := checkConflicts(ctx, pr, gitRepo, tmpBasePath); err != nil || conflicts || pr.Status == issues_model.PullRequestStatusEmpty { + if conflicts, err := checkConflicts(ctx, pr, gitRepo, prCtx.tmpBasePath); err != nil || conflicts || pr.Status == issues_model.PullRequestStatusEmpty { return err } diff --git a/services/pull/pull.go b/services/pull/pull.go index a19e88b33b65..d8923d0d5725 100644 --- a/services/pull/pull.go +++ b/services/pull/pull.go @@ -349,18 +349,14 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string, // checkIfPRContentChanged checks if diff to target branch has changed by push // A commit can be considered to leave the PR untouched if the patch/diff with its merge base is unchanged func checkIfPRContentChanged(ctx context.Context, pr *issues_model.PullRequest, oldCommitID, newCommitID string) (hasChanged bool, err error) { - tmpBasePath, err := createTemporaryRepo(ctx, pr) + prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr) if err != nil { - log.Error("CreateTemporaryRepo: %v", err) + log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err) return false, err } - defer func() { - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("checkIfPRContentChanged: RemoveTemporaryPath: %s", err) - } - }() + defer cancel() - tmpRepo, err := git.OpenRepository(ctx, tmpBasePath) + tmpRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath) if err != nil { return false, fmt.Errorf("OpenRepository: %w", err) } @@ -379,7 +375,7 @@ func checkIfPRContentChanged(ctx context.Context, pr *issues_model.PullRequest, } if err := cmd.Run(&git.RunOpts{ - Dir: tmpBasePath, + Dir: prCtx.tmpBasePath, Stdout: stdoutWriter, PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error { _ = stdoutWriter.Close() diff --git a/services/pull/temp_repo.go b/services/pull/temp_repo.go index 2bef671555e0..146470780671 100644 --- a/services/pull/temp_repo.go +++ b/services/pull/temp_repo.go @@ -19,49 +19,85 @@ import ( repo_module "code.gitea.io/gitea/modules/repository" ) -// createTemporaryRepo creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch +// Temporary repos created here use standard branch names to help simplify +// merging code +const ( + baseBranch = "base" // equivalent to pr.BaseBranch + trackingBranch = "tracking" // equivalent to pr.HeadBranch + stagingBranch = "staging" // this is used for a working branch +) + +type prContext struct { + context.Context + tmpBasePath string + pr *issues_model.PullRequest + outbuf *strings.Builder // we keep these around to help reduce needless buffer recreation, + errbuf *strings.Builder // any use should be preceded by a Reset and preferably after use +} + +func (ctx *prContext) RunOpts() *git.RunOpts { + ctx.outbuf.Reset() + ctx.errbuf.Reset() + return &git.RunOpts{ + Dir: ctx.tmpBasePath, + Stdout: ctx.outbuf, + Stderr: ctx.errbuf, + } +} + +// createTemporaryRepoForPR creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch // it also create a second base branch called "original_base" -func createTemporaryRepo(ctx context.Context, pr *issues_model.PullRequest) (string, error) { +func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) (prCtx *prContext, cancel context.CancelFunc, err error) { if err := pr.LoadHeadRepo(ctx); err != nil { - log.Error("LoadHeadRepo: %v", err) - return "", fmt.Errorf("LoadHeadRepo: %w", err) + log.Error("%-v LoadHeadRepo: %v", pr, err) + return nil, nil, fmt.Errorf("%v LoadHeadRepo: %w", pr, err) } else if pr.HeadRepo == nil { - log.Error("Pr %d HeadRepo %d does not exist", pr.ID, pr.HeadRepoID) - return "", &repo_model.ErrRepoNotExist{ + log.Error("%-v HeadRepo %d does not exist", pr, pr.HeadRepoID) + return nil, nil, &repo_model.ErrRepoNotExist{ ID: pr.HeadRepoID, } } else if err := pr.LoadBaseRepo(ctx); err != nil { - log.Error("LoadBaseRepo: %v", err) - return "", fmt.Errorf("LoadBaseRepo: %w", err) + log.Error("%-v LoadBaseRepo: %v", pr, err) + return nil, nil, fmt.Errorf("%v LoadBaseRepo: %w", pr, err) } else if pr.BaseRepo == nil { - log.Error("Pr %d BaseRepo %d does not exist", pr.ID, pr.BaseRepoID) - return "", &repo_model.ErrRepoNotExist{ + log.Error("%-v BaseRepo %d does not exist", pr, pr.BaseRepoID) + return nil, nil, &repo_model.ErrRepoNotExist{ ID: pr.BaseRepoID, } } else if err := pr.HeadRepo.LoadOwner(ctx); err != nil { - log.Error("HeadRepo.LoadOwner: %v", err) - return "", fmt.Errorf("HeadRepo.LoadOwner: %w", err) + log.Error("%-v HeadRepo.LoadOwner: %v", pr, err) + return nil, nil, fmt.Errorf("%v HeadRepo.LoadOwner: %w", pr, err) } else if err := pr.BaseRepo.LoadOwner(ctx); err != nil { - log.Error("BaseRepo.LoadOwner: %v", err) - return "", fmt.Errorf("BaseRepo.LoadOwner: %w", err) + log.Error("%-v BaseRepo.LoadOwner: %v", pr, err) + return nil, nil, fmt.Errorf("%v BaseRepo.LoadOwner: %w", pr, err) } // Clone base repo. tmpBasePath, err := repo_module.CreateTemporaryPath("pull") if err != nil { - log.Error("CreateTemporaryPath: %v", err) - return "", err + log.Error("CreateTemporaryPath[%-v]: %v", pr, err) + return nil, nil, err + } + prCtx = &prContext{ + Context: ctx, + tmpBasePath: tmpBasePath, + pr: pr, + outbuf: &strings.Builder{}, + errbuf: &strings.Builder{}, + } + cancel = func() { + if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { + log.Error("Error whilst removing removing temporary repo for %-v: %v", pr, err) + } } baseRepoPath := pr.BaseRepo.RepoPath() headRepoPath := pr.HeadRepo.RepoPath() if err := git.InitRepository(ctx, tmpBasePath, false); err != nil { - log.Error("git init tmpBasePath: %v", err) - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) - } - return "", err + log.Error("Unable to init tmpBasePath for %-v: %v", pr, err) + cancel() + return nil, nil, err } remoteRepoName := "head_repo" @@ -73,99 +109,63 @@ func createTemporaryRepo(ctx context.Context, pr *issues_model.PullRequest) (str fetchArgs = append(fetchArgs, "--no-write-commit-graph") } - // Add head repo remote. - addCacheRepo := func(staging, cache string) error { - p := filepath.Join(staging, ".git", "objects", "info", "alternates") + // addCacheRepo adds git alternatives for the cacheRepoPath in the repoPath + addCacheRepo := func(repoPath, cacheRepoPath string) error { + p := filepath.Join(repoPath, ".git", "objects", "info", "alternates") f, err := os.OpenFile(p, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o600) if err != nil { - log.Error("Could not create .git/objects/info/alternates file in %s: %v", staging, err) + log.Error("Could not create .git/objects/info/alternates file in %s: %v", repoPath, err) return err } defer f.Close() - data := filepath.Join(cache, "objects") + data := filepath.Join(cacheRepoPath, "objects") if _, err := fmt.Fprintln(f, data); err != nil { - log.Error("Could not write to .git/objects/info/alternates file in %s: %v", staging, err) + log.Error("Could not write to .git/objects/info/alternates file in %s: %v", repoPath, err) return err } return nil } + // Add head repo remote. if err := addCacheRepo(tmpBasePath, baseRepoPath); err != nil { - log.Error("Unable to add base repository to temporary repo [%s -> %s]: %v", pr.BaseRepo.FullName(), tmpBasePath, err) - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) - } - return "", fmt.Errorf("Unable to add base repository to temporary repo [%s -> tmpBasePath]: %w", pr.BaseRepo.FullName(), err) + log.Error("%-v Unable to add base repository to temporary repo [%s -> %s]: %v", pr, pr.BaseRepo.FullName(), tmpBasePath, err) + cancel() + return nil, nil, fmt.Errorf("Unable to add base repository to temporary repo [%s -> tmpBasePath]: %w", pr.BaseRepo.FullName(), err) } - var outbuf, errbuf strings.Builder if err := git.NewCommand(ctx, "remote", "add", "-t").AddDynamicArguments(pr.BaseBranch).AddArguments("-m").AddDynamicArguments(pr.BaseBranch).AddDynamicArguments("origin", baseRepoPath). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr.BaseRepo.FullName(), tmpBasePath, err, outbuf.String(), errbuf.String()) - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) - } - return "", fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, outbuf.String(), errbuf.String()) + Run(prCtx.RunOpts()); err != nil { + log.Error("%-v Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr, pr.BaseRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String()) + cancel() + return nil, nil, fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String()) } - outbuf.Reset() - errbuf.Reset() if err := git.NewCommand(ctx, "fetch", "origin").AddArguments(fetchArgs...).AddDashesAndList(pr.BaseBranch+":"+baseBranch, pr.BaseBranch+":original_"+baseBranch). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, outbuf.String(), errbuf.String()) - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) - } - return "", fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) + Run(prCtx.RunOpts()); err != nil { + log.Error("%-v Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr, pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String()) + cancel() + return nil, nil, fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String()) } - outbuf.Reset() - errbuf.Reset() if err := git.NewCommand(ctx, "symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseBranch). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("Unable to set HEAD as base branch [%s]: %v\n%s\n%s", tmpBasePath, err, outbuf.String(), errbuf.String()) - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) - } - return "", fmt.Errorf("Unable to set HEAD as base branch [tmpBasePath]: %w\n%s\n%s", err, outbuf.String(), errbuf.String()) + Run(prCtx.RunOpts()); err != nil { + log.Error("%-v Unable to set HEAD as base branch in [%s]: %v\n%s\n%s", pr, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String()) + cancel() + return nil, nil, fmt.Errorf("Unable to set HEAD as base branch in tmpBasePath: %w\n%s\n%s", err, prCtx.outbuf.String(), prCtx.errbuf.String()) } - outbuf.Reset() - errbuf.Reset() if err := addCacheRepo(tmpBasePath, headRepoPath); err != nil { - log.Error("Unable to add head repository to temporary repo [%s -> %s]: %v", pr.HeadRepo.FullName(), tmpBasePath, err) - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) - } - return "", fmt.Errorf("Unable to head base repository to temporary repo [%s -> tmpBasePath]: %w", pr.HeadRepo.FullName(), err) + log.Error("%-v Unable to add head repository to temporary repo [%s -> %s]: %v", pr, pr.HeadRepo.FullName(), tmpBasePath, err) + cancel() + return nil, nil, fmt.Errorf("Unable to add head base repository to temporary repo [%s -> tmpBasePath]: %w", pr.HeadRepo.FullName(), err) } if err := git.NewCommand(ctx, "remote", "add").AddDynamicArguments(remoteRepoName, headRepoPath). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr.HeadRepo.FullName(), tmpBasePath, err, outbuf.String(), errbuf.String()) - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) - } - return "", fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, outbuf.String(), errbuf.String()) + Run(prCtx.RunOpts()); err != nil { + log.Error("%-v Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr, pr.HeadRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String()) + cancel() + return nil, nil, fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String()) } - outbuf.Reset() - errbuf.Reset() trackingBranch := "tracking" // Fetch head branch @@ -178,24 +178,18 @@ func createTemporaryRepo(ctx context.Context, pr *issues_model.PullRequest) (str headBranch = pr.GetGitRefName() } if err := git.NewCommand(ctx, "fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+trackingBranch). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) - } + Run(prCtx.RunOpts()); err != nil { + cancel() if !git.IsBranchExist(ctx, pr.HeadRepo.RepoPath(), pr.HeadBranch) { - return "", models.ErrBranchDoesNotExist{ + return nil, nil, models.ErrBranchDoesNotExist{ BranchName: pr.HeadBranch, } } - log.Error("Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), headBranch, err, outbuf.String(), errbuf.String()) + log.Error("%-v Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr, pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String()) + return nil, nil, fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), headBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String()) } - outbuf.Reset() - errbuf.Reset() + prCtx.outbuf.Reset() + prCtx.errbuf.Reset() - return tmpBasePath, nil + return prCtx, cancel, nil } diff --git a/services/pull/update.go b/services/pull/update.go index b9525cf0c980..b977dbdba9fe 100644 --- a/services/pull/update.go +++ b/services/pull/update.go @@ -16,61 +16,67 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" - repo_module "code.gitea.io/gitea/modules/repository" ) // Update updates pull request with base branch. -func Update(ctx context.Context, pull *issues_model.PullRequest, doer *user_model.User, message string, rebase bool) error { - var ( - pr *issues_model.PullRequest - style repo_model.MergeStyle - ) +func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string, rebase bool) error { + if pr.Flow == issues_model.PullRequestFlowAGit { + // TODO: update of agit flow pull request's head branch is unsupported + return fmt.Errorf("update of agit flow pull request's head branch is unsupported") + } - pullWorkingPool.CheckIn(fmt.Sprint(pull.ID)) - defer pullWorkingPool.CheckOut(fmt.Sprint(pull.ID)) + pullWorkingPool.CheckIn(fmt.Sprint(pr.ID)) + defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID)) - if rebase { - pr = pull - style = repo_model.MergeStyleRebaseUpdate - } else { - // use merge functions but switch repo's and branch's - pr = &issues_model.PullRequest{ - HeadRepoID: pull.BaseRepoID, - BaseRepoID: pull.HeadRepoID, - HeadBranch: pull.BaseBranch, - BaseBranch: pull.HeadBranch, - } - style = repo_model.MergeStyleMerge + diffCount, err := GetDiverging(ctx, pr) + if err != nil { + return err + } else if diffCount.Behind == 0 { + return fmt.Errorf("HeadBranch of PR %d is up to date", pr.Index) } - if pull.Flow == issues_model.PullRequestFlowAGit { - // TODO: Not support update agit flow pull request's head branch - return fmt.Errorf("Not support update agit flow pull request's head branch") + if rebase { + defer func() { + go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "") + }() + + return updateHeadByRebaseOnToBase(ctx, pr, doer, message) } + if err := pr.LoadBaseRepo(ctx); err != nil { + log.Error("unable to load BaseRepo for %-v during update-by-merge: %v", pr, err) + return fmt.Errorf("unable to load BaseRepo for PR[%d] during update-by-merge: %w", pr.ID, err) + } if err := pr.LoadHeadRepo(ctx); err != nil { - log.Error("LoadHeadRepo: %v", err) - return fmt.Errorf("LoadHeadRepo: %w", err) - } else if err = pr.LoadBaseRepo(ctx); err != nil { - log.Error("LoadBaseRepo: %v", err) - return fmt.Errorf("LoadBaseRepo: %w", err) + log.Error("unable to load HeadRepo for PR %-v during update-by-merge: %v", pr, err) + return fmt.Errorf("unable to load HeadRepo for PR[%d] during update-by-merge: %w", pr.ID, err) + } + if pr.HeadRepo == nil { + // LoadHeadRepo will swallow ErrRepoNotExist so if pr.HeadRepo is still nil recreate the error + err := repo_model.ErrRepoNotExist{ + ID: pr.HeadRepoID, + } + log.Error("unable to load HeadRepo for PR %-v during update-by-merge: %v", pr, err) + return fmt.Errorf("unable to load HeadRepo for PR[%d] during update-by-merge: %w", pr.ID, err) } - diffCount, err := GetDiverging(ctx, pull) - if err != nil { - return err - } else if diffCount.Behind == 0 { - return fmt.Errorf("HeadBranch of PR %d is up to date", pull.Index) + // use merge functions but switch repos and branches + reversePR := &issues_model.PullRequest{ + ID: pr.ID, + + HeadRepoID: pr.BaseRepoID, + HeadRepo: pr.BaseRepo, + HeadBranch: pr.BaseBranch, + + BaseRepoID: pr.HeadRepoID, + BaseRepo: pr.HeadRepo, + BaseBranch: pr.HeadBranch, } - _, err = rawMerge(ctx, pr, doer, style, "", message) + _, err = doMergeAndPush(ctx, reversePR, doer, repo_model.MergeStyleMerge, "", message) defer func() { - if rebase { - go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "") - return - } - go AddTestPullRequestTask(doer, pr.HeadRepo.ID, pr.HeadBranch, false, "", "") + go AddTestPullRequestTask(doer, reversePR.HeadRepo.ID, reversePR.HeadBranch, false, "", "") }() return err @@ -159,27 +165,16 @@ func IsUserAllowedToUpdate(ctx context.Context, pull *issues_model.PullRequest, // GetDiverging determines how many commits a PR is ahead or behind the PR base branch func GetDiverging(ctx context.Context, pr *issues_model.PullRequest) (*git.DivergeObject, error) { - log.Trace("GetDiverging[%d]: compare commits", pr.ID) - if err := pr.LoadBaseRepo(ctx); err != nil { - return nil, err - } - if err := pr.LoadHeadRepo(ctx); err != nil { - return nil, err - } - - tmpRepo, err := createTemporaryRepo(ctx, pr) + log.Trace("GetDiverging[%-v]: compare commits", pr) + prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr) if err != nil { if !models.IsErrBranchDoesNotExist(err) { - log.Error("CreateTemporaryRepo: %v", err) + log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err) } return nil, err } - defer func() { - if err := repo_module.RemoveTemporaryPath(tmpRepo); err != nil { - log.Error("Merge: RemoveTemporaryPath: %s", err) - } - }() + defer cancel() - diff, err := git.GetDivergingCommits(ctx, tmpRepo, "base", "tracking") + diff, err := git.GetDivergingCommits(ctx, prCtx.tmpBasePath, baseBranch, trackingBranch) return &diff, err } diff --git a/services/pull/update_rebase.go b/services/pull/update_rebase.go new file mode 100644 index 000000000000..8e7bfa0ffd4c --- /dev/null +++ b/services/pull/update_rebase.go @@ -0,0 +1,107 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package pull + +import ( + "context" + "fmt" + "strings" + + issues_model "code.gitea.io/gitea/models/issues" + repo_model "code.gitea.io/gitea/models/repo" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/log" + repo_module "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/setting" +) + +// updateHeadByRebaseOnToBase handles updating a PR's head branch by rebasing it on the PR current base branch +func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string) error { + // "Clone" base repo and add the cache headers for the head repo and branch + mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, "") + if err != nil { + return err + } + defer cancel() + + // Determine the old merge-base before the rebase - we use this for LFS push later on + oldMergeBase, _, _ := git.NewCommand(ctx, "merge-base").AddDashesAndList(baseBranch, trackingBranch).RunStdString(&git.RunOpts{Dir: mergeCtx.tmpBasePath}) + oldMergeBase = strings.TrimSpace(oldMergeBase) + + // Rebase the tracking branch on to the base as the staging branch + if err := rebaseTrackingOnToBase(mergeCtx, repo_model.MergeStyleRebaseUpdate); err != nil { + return err + } + + if setting.LFS.StartServer { + // Now we need to ensure that the head repository contains any LFS objects between the new base and the old mergebase + // It's questionable about where this should go - either after or before the push + // I think in the interests of data safety - failures to push to the lfs should prevent + // the push as you can always re-rebase. + if err := LFSPush(ctx, mergeCtx.tmpBasePath, baseBranch, oldMergeBase, &issues_model.PullRequest{ + HeadRepoID: pr.BaseRepoID, + BaseRepoID: pr.HeadRepoID, + }); err != nil { + log.Error("Unable to push lfs objects between %s and %s up to head branch in %-v: %v", baseBranch, oldMergeBase, pr, err) + return err + } + } + + // Now determine who the pushing author should be + var headUser *user_model.User + if err := pr.HeadRepo.LoadOwner(ctx); err != nil { + if !user_model.IsErrUserNotExist(err) { + log.Error("Can't find user: %d for head repository in %-v - %v", pr.HeadRepo.OwnerID, pr, err) + return err + } + log.Error("Can't find user: %d for head repository in %-v - defaulting to doer: %-v - %v", pr.HeadRepo.OwnerID, pr, doer, err) + headUser = doer + } else { + headUser = pr.HeadRepo.Owner + } + + pushCmd := git.NewCommand(ctx, "push", "-f", "head_repo"). + AddDynamicArguments(stagingBranch + ":" + git.BranchPrefix + pr.HeadBranch) + + // Push back to the head repository. + // TODO: this cause an api call to "/api/internal/hook/post-receive/...", + // that prevents us from doint the whole merge in one db transaction + mergeCtx.outbuf.Reset() + mergeCtx.errbuf.Reset() + + if err := pushCmd.Run(&git.RunOpts{ + Env: repo_module.FullPushingEnvironment( + headUser, + doer, + pr.HeadRepo, + pr.HeadRepo.Name, + pr.ID, + ), + Dir: mergeCtx.tmpBasePath, + Stdout: mergeCtx.outbuf, + Stderr: mergeCtx.errbuf, + }); err != nil { + if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") { + return &git.ErrPushOutOfDate{ + StdOut: mergeCtx.outbuf.String(), + StdErr: mergeCtx.errbuf.String(), + Err: err, + } + } else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") { + err := &git.ErrPushRejected{ + StdOut: mergeCtx.outbuf.String(), + StdErr: mergeCtx.errbuf.String(), + Err: err, + } + err.GenerateMessage() + return err + } + return fmt.Errorf("git push: %s", mergeCtx.errbuf.String()) + } + mergeCtx.outbuf.Reset() + mergeCtx.errbuf.Reset() + + return nil +} diff --git a/templates/projects/view.tmpl b/templates/projects/view.tmpl index 074c5315ab2a..14a876d8fd94 100644 --- a/templates/projects/view.tmpl +++ b/templates/projects/view.tmpl @@ -238,7 +238,7 @@ {{end}} diff --git a/tests/integration/markup_external_test.go b/tests/integration/markup_external_test.go index 6ea0226ec620..c0e08a4f4f1f 100644 --- a/tests/integration/markup_external_test.go +++ b/tests/integration/markup_external_test.go @@ -19,7 +19,7 @@ import ( func TestExternalMarkupRenderer(t *testing.T) { defer tests.PrepareTestEnv(t)() - if !setting.Database.UseSQLite3 { + if !setting.Database.Type.IsSQLite3() { t.Skip() return } diff --git a/tests/integration/migration-test/migration_test.go b/tests/integration/migration-test/migration_test.go index f030f566ce44..4152379a9b80 100644 --- a/tests/integration/migration-test/migration_test.go +++ b/tests/integration/migration-test/migration_test.go @@ -94,7 +94,7 @@ func availableVersions() ([]string, error) { return nil, err } defer migrationsDir.Close() - versionRE, err := regexp.Compile("gitea-v(?P.+)\\." + regexp.QuoteMeta(setting.Database.Type) + "\\.sql.gz") + versionRE, err := regexp.Compile("gitea-v(?P.+)\\." + regexp.QuoteMeta(setting.Database.Type.String()) + "\\.sql.gz") if err != nil { return nil, err } @@ -149,7 +149,7 @@ func restoreOldDB(t *testing.T, version string) bool { } switch { - case setting.Database.UseSQLite3: + case setting.Database.Type.IsSQLite3(): util.Remove(setting.Database.Path) err := os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm) assert.NoError(t, err) @@ -162,7 +162,7 @@ func restoreOldDB(t *testing.T, version string) bool { assert.NoError(t, err) db.Close() - case setting.Database.UseMySQL: + case setting.Database.Type.IsMySQL(): db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/", setting.Database.User, setting.Database.Passwd, setting.Database.Host)) assert.NoError(t, err) @@ -184,7 +184,7 @@ func restoreOldDB(t *testing.T, version string) bool { assert.NoError(t, err) db.Close() - case setting.Database.UsePostgreSQL: + case setting.Database.Type.IsPostgreSQL(): var db *sql.DB var err error if setting.Database.Host[0] == '/' { @@ -252,7 +252,7 @@ func restoreOldDB(t *testing.T, version string) bool { assert.NoError(t, err) db.Close() - case setting.Database.UseMSSQL: + case setting.Database.Type.IsMSSQL(): host, port := setting.ParseMSSQLHostPort(setting.Database.Host) db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;", host, port, "master", setting.Database.User, setting.Database.Passwd)) diff --git a/tests/test_utils.go b/tests/test_utils.go index 5cc31b814ed4..e3e5becfbedf 100644 --- a/tests/test_utils.go +++ b/tests/test_utils.go @@ -74,7 +74,7 @@ func InitTest(requireGitea bool) { } switch { - case setting.Database.UseMySQL: + case setting.Database.Type.IsMySQL(): connType := "tcp" if len(setting.Database.Host) > 0 && setting.Database.Host[0] == '/' { // looks like a unix socket connType = "unix" @@ -89,7 +89,7 @@ func InitTest(requireGitea bool) { if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", setting.Database.Name)); err != nil { log.Fatal("db.Exec: %v", err) } - case setting.Database.UsePostgreSQL: + case setting.Database.Type.IsPostgreSQL(): var db *sql.DB var err error if setting.Database.Host[0] == '/' { @@ -146,7 +146,7 @@ func InitTest(requireGitea bool) { } } - case setting.Database.UseMSSQL: + case setting.Database.Type.IsMSSQL(): host, port := setting.ParseMSSQLHostPort(setting.Database.Host) db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;", host, port, "master", setting.Database.User, setting.Database.Passwd)) diff --git a/web_src/js/features/imagediff.js b/web_src/js/features/imagediff.js index 7a285f1f8dbc..a26b927c33de 100644 --- a/web_src/js/features/imagediff.js +++ b/web_src/js/features/imagediff.js @@ -129,8 +129,8 @@ export function initImageDiff() { initOverlay(createContext($imageAfter[2], $imageBefore[2])); } - hideElem($container.find('> .loader')); $container.find('> .gt-hidden').removeClass('gt-hidden'); + hideElem($container.find('.ui.loader')); } function initSideBySide(sizes) { @@ -155,7 +155,7 @@ export function initImageDiff() { height: sizes.size1.height * factor }); sizes.image1.parent().css({ - margin: `${sizes.ratio[1] * factor + 15}px ${sizes.ratio[0] * factor}px ${sizes.ratio[1] * factor}px`, + margin: `10px auto`, width: sizes.size1.width * factor + 2, height: sizes.size1.height * factor + 2 }); @@ -164,7 +164,7 @@ export function initImageDiff() { height: sizes.size2.height * factor }); sizes.image2.parent().css({ - margin: `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`, + margin: `10px auto`, width: sizes.size2.width * factor + 2, height: sizes.size2.height * factor + 2 }); @@ -255,13 +255,12 @@ export function initImageDiff() { width: sizes.size2.width * factor + 2, height: sizes.size2.height * factor + 2 }); + + // some inner elements are `position: absolute`, so the container's height must be large enough + // the "css(width, height)" is somewhat hacky and not easy to understand, it could be improved in the future sizes.image2.parent().parent().css({ width: sizes.max.width * factor + 2, - height: sizes.max.height * factor + 2 - }); - $container.find('.onion-skin').css({ - width: sizes.max.width * factor + 2, - height: sizes.max.height * factor + 4 + height: sizes.max.height * factor + 2 + 20 /* extra height for inner "position: absolute" elements */, }); const $range = $container.find("input[type='range']"); diff --git a/web_src/js/svg.js b/web_src/js/svg.js index 2132ad312037..6476f16bfb3e 100644 --- a/web_src/js/svg.js +++ b/web_src/js/svg.js @@ -80,7 +80,8 @@ export function svg(name, size = 16, className = '') { const svgNode = document.firstChild; if (size !== 16) svgNode.setAttribute('width', String(size)); if (size !== 16) svgNode.setAttribute('height', String(size)); - if (className) svgNode.classList.add(...className.split(/\s+/)); + // filter array to remove empty string + if (className) svgNode.classList.add(...className.split(/\s+/).filter(Boolean)); return serializer.serializeToString(svgNode); } diff --git a/web_src/less/features/imagediff.less b/web_src/less/features/imagediff.less index a9ba7f8c8f36..07763c15e126 100644 --- a/web_src/less/features/imagediff.less +++ b/web_src/less/features/imagediff.less @@ -1,6 +1,6 @@ .image-diff-container { text-align: center; - padding: 30px 0; + padding: 1em 0; img { border: 1px solid var(--color-primary-light-7); @@ -22,6 +22,7 @@ display: inline-block; line-height: 0; vertical-align: top; + margin: 0 1em; .side-header { font-weight: bold; @@ -98,7 +99,7 @@ } input { - width: 300px; + max-width: 300px; } } }