Refactor Chage Lead Time - Step 5: enrich cicd_deployment_commits.prev_success_deployment_commit_id (#4922)
* fix: rename cicd_deployment_commit
* refactor: simplify join condition for deployment_commits_generator
* feat: add data_enricher helper
* feat: add prev_deployment_commit_enricher
* feat: e2e test for prev_deployment_commit_enricher
* fix: unit-test and comments
diff --git a/backend/core/models/common/base.go b/backend/core/models/common/base.go
index 4ea92c9..1f37e73 100644
--- a/backend/core/models/common/base.go
+++ b/backend/core/models/common/base.go
@@ -45,6 +45,14 @@
RawDataRemark string `gorm:"column:_raw_data_remark" json:"_raw_data_remark"`
}
+type GetRawDataOrigin interface {
+ GetRawDataOrigin() *RawDataOrigin
+}
+
+func (c *RawDataOrigin) GetRawDataOrigin() *RawDataOrigin {
+ return c
+}
+
func NewNoPKModel() NoPKModel {
now := time.Now()
return NoPKModel{
diff --git a/backend/core/models/domainlayer/devops/cicd_deployment.go b/backend/core/models/domainlayer/devops/cicd_deployment_commit.go
similarity index 100%
rename from backend/core/models/domainlayer/devops/cicd_deployment.go
rename to backend/core/models/domainlayer/devops/cicd_deployment_commit.go
diff --git a/backend/helpers/pluginhelper/api/batch_save_divider.go b/backend/helpers/pluginhelper/api/batch_save_divider.go
index f1a10e3..b0c3431 100644
--- a/backend/helpers/pluginhelper/api/batch_save_divider.go
+++ b/backend/helpers/pluginhelper/api/batch_save_divider.go
@@ -19,12 +19,13 @@
import (
"fmt"
+ "reflect"
+
"github.com/apache/incubator-devlake/core/context"
"github.com/apache/incubator-devlake/core/dal"
"github.com/apache/incubator-devlake/core/errors"
"github.com/apache/incubator-devlake/core/log"
"github.com/apache/incubator-devlake/core/models/common"
- "reflect"
)
// BatchSaveDivider creates and caches BatchSave, this is helpful when dealing with massive amount of data records
@@ -76,12 +77,14 @@
}
// all good, delete outdated records before we insertion
d.log.Debug("deleting outdate records for %s", rowElemType.Name())
- err = d.db.Delete(
- row,
- dal.Where("_raw_data_table = ? AND _raw_data_params = ?", d.table, d.params),
- )
- if err != nil {
- return nil, err
+ if d.table != "" && d.params != "" {
+ err = d.db.Delete(
+ row,
+ dal.Where("_raw_data_table = ? AND _raw_data_params = ?", d.table, d.params),
+ )
+ if err != nil {
+ return nil, err
+ }
}
}
return batch, nil
diff --git a/backend/helpers/pluginhelper/api/batch_save_divider_test.go b/backend/helpers/pluginhelper/api/batch_save_divider_test.go
index 13ec366..8d26916 100644
--- a/backend/helpers/pluginhelper/api/batch_save_divider_test.go
+++ b/backend/helpers/pluginhelper/api/batch_save_divider_test.go
@@ -18,13 +18,14 @@
package api
import (
+ "reflect"
+ "testing"
+ "time"
+
"github.com/apache/incubator-devlake/core/models/common"
"github.com/apache/incubator-devlake/helpers/unithelper"
mockcontext "github.com/apache/incubator-devlake/mocks/core/context"
mockdal "github.com/apache/incubator-devlake/mocks/core/dal"
- "reflect"
- "testing"
- "time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
@@ -66,7 +67,7 @@
},
)
- divider := NewBatchSaveDivider(mockRes, 10, "", "")
+ divider := NewBatchSaveDivider(mockRes, 10, "a", "b")
// for same type should return the same BatchSave
jiraIssue1, err := divider.ForType(reflect.TypeOf(&MockJirIssueBsd{}))
diff --git a/backend/helpers/pluginhelper/api/data_enricher.go b/backend/helpers/pluginhelper/api/data_enricher.go
new file mode 100644
index 0000000..0024b75
--- /dev/null
+++ b/backend/helpers/pluginhelper/api/data_enricher.go
@@ -0,0 +1,124 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package api
+
+import (
+ "reflect"
+ "regexp"
+ "strings"
+
+ "github.com/apache/incubator-devlake/core/dal"
+ "github.com/apache/incubator-devlake/core/errors"
+ "github.com/apache/incubator-devlake/core/models/common"
+ plugin "github.com/apache/incubator-devlake/core/plugin"
+)
+
+// DataEnrichHandler Accepts row from the Input and produces arbitrary records.
+// you are free to modify given `row` in place and include it in returned result for it to be saved.
+type DataEnrichHandler[InputRowType any] func(row *InputRowType) ([]interface{}, errors.Error)
+
+// DataEnricherArgs includes the arguments needed for data enrichment
+type DataEnricherArgs[InputRowType any] struct {
+ Ctx plugin.SubTaskContext
+ Name string // Enricher name, which will be put into _raw_data_remark
+ Input dal.Rows
+ Enrich DataEnrichHandler[InputRowType]
+ BatchSize int
+}
+
+// DataEnricher helps you enrich Data with Cancellation and BatchSave supports
+type DataEnricher[InputRowType any] struct {
+ args *DataEnricherArgs[InputRowType]
+}
+
+var dataEnricherNamePattern = regexp.MustCompile(`^\w+$`)
+
+// NewDataEnricher creates a new DataEnricher
+func NewDataEnricher[InputRowType any](args DataEnricherArgs[InputRowType]) (*DataEnricher[InputRowType], errors.Error) {
+ // process args
+ if args.Name == "" || !dataEnricherNamePattern.MatchString(args.Name) {
+ return nil, errors.Default.New("DataEnricher: Name is require and should contain only word characters(a-zA-Z0-9_)")
+ }
+ if args.BatchSize == 0 {
+ args.BatchSize = 500
+ }
+ return &DataEnricher[InputRowType]{
+ args: &args,
+ }, nil
+}
+
+func (enricher *DataEnricher[InputRowType]) Execute() errors.Error {
+ // load data from database
+ db := enricher.args.Ctx.GetDal()
+
+ // batch save divider
+ divider := NewBatchSaveDivider(enricher.args.Ctx, enricher.args.BatchSize, "", "")
+
+ // set progress
+ enricher.args.Ctx.SetProgress(0, -1)
+
+ cursor := enricher.args.Input
+ defer cursor.Close()
+ ctx := enricher.args.Ctx.GetContext()
+ // iterate all rows
+ for cursor.Next() {
+ select {
+ case <-ctx.Done():
+ return errors.Convert(ctx.Err())
+ default:
+ }
+ inputRow := new(InputRowType)
+ err := db.Fetch(cursor, inputRow)
+ if err != nil {
+ return errors.Default.Wrap(err, "error fetching rows")
+ }
+
+ results, err := enricher.args.Enrich(inputRow)
+ if err != nil {
+ return errors.Default.Wrap(err, "error calling plugin implementation")
+ }
+
+ for _, result := range results {
+ // get the batch operator for the specific type
+ batch, err := divider.ForType(reflect.TypeOf(result))
+ if err != nil {
+ return errors.Default.Wrap(err, "error getting batch from result")
+ }
+ // append enricher to data origin remark
+ if getRawDataOrigin, ok := result.(common.GetRawDataOrigin); ok {
+ origin := getRawDataOrigin.GetRawDataOrigin()
+ enricherComponent := enricher.args.Name + "," // name is word characters only
+ if !strings.Contains(origin.RawDataRemark, enricherComponent) {
+ origin.RawDataRemark += enricherComponent
+ }
+ }
+ // records get saved into db when slots were max outed
+ err = batch.Add(result)
+ if err != nil {
+ return errors.Default.Wrap(err, "error adding result to batch")
+ }
+ }
+ enricher.args.Ctx.IncProgress(1)
+ }
+
+ // save the last batches
+ return divider.Close()
+}
+
+// Check if DataEnricher implements SubTask interface
+var _ plugin.SubTask = (*DataEnricher[any])(nil)
diff --git a/backend/plugins/dora/e2e/prev_deployment_commit_enricher_test.go b/backend/plugins/dora/e2e/prev_deployment_commit_enricher_test.go
new file mode 100644
index 0000000..13f7e72
--- /dev/null
+++ b/backend/plugins/dora/e2e/prev_deployment_commit_enricher_test.go
@@ -0,0 +1,58 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package e2e
+
+import (
+ "testing"
+
+ "github.com/apache/incubator-devlake/core/models/domainlayer/crossdomain"
+ "github.com/apache/incubator-devlake/core/models/domainlayer/devops"
+ "github.com/apache/incubator-devlake/helpers/e2ehelper"
+ "github.com/apache/incubator-devlake/plugins/dora/impl"
+ "github.com/apache/incubator-devlake/plugins/dora/tasks"
+)
+
+func TestPrevSuccessDeploymentCommitEnricherDataFlow(t *testing.T) {
+ var plugin impl.Dora
+ dataflowTester := e2ehelper.NewDataFlowTester(t, "dora", plugin)
+
+ taskData := &tasks.DoraTaskData{
+ Options: &tasks.DoraOptions{
+ ProjectName: "project1",
+ },
+ }
+ // import raw data table
+ dataflowTester.ImportCsvIntoTabler("./prev_success_deployment_commit/project_mapping.csv", &crossdomain.ProjectMapping{})
+ dataflowTester.ImportCsvIntoTabler("./prev_success_deployment_commit/cicd_deployment_commits_before.csv", &devops.CicdDeploymentCommit{})
+
+ // verify converter
+ dataflowTester.Subtask(tasks.EnrichPrevSuccessDeploymentCommitMeta, taskData)
+ dataflowTester.VerifyTableWithOptions(&devops.CicdDeploymentCommit{}, e2ehelper.TableOptions{
+ CSVRelPath: "./prev_success_deployment_commit/cicd_deployment_commits_after.csv",
+ TargetFields: []string{
+ "id",
+ "result",
+ "started_date",
+ "cicd_pipeline_id",
+ "cicd_scope_id",
+ "repo_url",
+ "environment",
+ "prev_success_deployment_commit_id",
+ },
+ })
+}
diff --git a/backend/plugins/dora/e2e/prev_success_deployment_commit/cicd_deployment_commits_after.csv b/backend/plugins/dora/e2e/prev_success_deployment_commit/cicd_deployment_commits_after.csv
new file mode 100644
index 0000000..b0d2b8f
--- /dev/null
+++ b/backend/plugins/dora/e2e/prev_success_deployment_commit/cicd_deployment_commits_after.csv
@@ -0,0 +1,17 @@
+id,result,started_date,cicd_pipeline_id,cicd_scope_id,repo_url,environment,prev_success_deployment_commit_id,_raw_data_remark,commit_sha,created_date
+1,SUCCESS,2023-04-10T06:51:47.000+00:00,pipeline1,cicd1,REPO111,PRODUCTION,,,1,2023-4-10 6:51:47
+2,SUCCESS,2023-04-10T06:53:51.000+00:00,pipeline1,cicd1,REPO222,PRODUCTION,,,2,2023-4-10 6:53:51
+3,SUCCESS,2023-04-13T07:21:16.000+00:00,pipeline2,cicd2,REPO111,PRODUCTION,,,3,2023-4-13 7:21:16
+4,SUCCESS,2023-04-13T07:22:14.000+00:00,pipeline2,cicd2,REPO333,PRODUCTION,,,4,2023-4-13 7:22:14
+5,SUCCESS,2023-04-13T07:28:14.000+00:00,pipeline4,cicd1,REPO111,PRODUCTION,1,,5,2023-4-13 7:28:14
+6,SUCCESS,2023-04-13T07:29:34.000+00:00,pipeline4,cicd1,REPO222,PRODUCTION,2,,6,2023-4-13 7:29:34
+7,SUCCESS,2023-04-13T07:31:53.000+00:00,pipeline5,cicd1,REPO111,STAGING,,,7,2023-4-13 7:31:53
+8,SUCCESS,2023-04-13T07:36:30.000+00:00,pipeline5,cicd1,REPO111,STAGING,7,,8,2023-4-13 7:36:30
+9,SUCCESS,2023-04-13T07:51:26.000+00:00,pipeline6,cicd1,REPO111,PRODUCTION,5,,9,2023-4-13 7:51:26
+10,SUCCESS,2023-04-13T07:53:31.000+00:00,pipeline6,cicd1,REPO222,PRODUCTION,6,,10,2023-4-13 7:53:31
+11,FAILURE,2023-04-13T07:54:39.000+00:00,pipeline7,cicd2,REPO111,PRODUCTION,,failed record should be ignored,11,2023-4-13 7:54:39
+12,SUCCESS,2023-04-13T07:55:01.000+00:00,pipeline7,cicd2,REPO333,PRODUCTION,4,,12,2023-4-13 7:55:01
+13,SUCCESS,2023-04-13T07:56:39.000+00:00,pipeline7,cicd2,REPO111,PRODUCTION,3,retried and success should be ok,13,2023-4-13 7:56:39
+14,FAILURE,2023-04-13T07:57:26.000+00:00,pipeline8,cicd3,REPO111,PRODUCTION,,,14,2023-4-13 7:57:26
+15,SUCCESS,2023-04-13T07:57:45.000+00:00,pipeline9,cicd3,REPO111,PRODUCTION,,not belongs to the project,15,2023-4-13 7:57:45
+16,SUCCESS,2023-04-13T07:58:24.000+00:00,pipeline10,cicd3,REPO333,,,,16,2023-4-13 7:58:24
\ No newline at end of file
diff --git a/backend/plugins/dora/e2e/prev_success_deployment_commit/cicd_deployment_commits_before.csv b/backend/plugins/dora/e2e/prev_success_deployment_commit/cicd_deployment_commits_before.csv
new file mode 100644
index 0000000..367ca6d
--- /dev/null
+++ b/backend/plugins/dora/e2e/prev_success_deployment_commit/cicd_deployment_commits_before.csv
@@ -0,0 +1,17 @@
+id,result,started_date,cicd_pipeline_id,cicd_scope_id,repo_url,environment,prev_success_deployment_commit_id,_raw_data_remark,commit_sha,created_date
+1,SUCCESS,2023-04-10T06:51:47.000+00:00,pipeline1,cicd1,REPO111,PRODUCTION,,,1,2023-4-10 6:51:47
+2,SUCCESS,2023-04-10T06:53:51.000+00:00,pipeline1,cicd1,REPO222,PRODUCTION,,,2,2023-4-10 6:53:51
+3,SUCCESS,2023-04-13T07:21:16.000+00:00,pipeline2,cicd2,REPO111,PRODUCTION,,,3,2023-4-13 7:21:16
+4,SUCCESS,2023-04-13T07:22:14.000+00:00,pipeline2,cicd2,REPO333,PRODUCTION,,,4,2023-4-13 7:22:14
+5,SUCCESS,2023-04-13T07:28:14.000+00:00,pipeline4,cicd1,REPO111,PRODUCTION,,,5,2023-4-13 7:28:14
+6,SUCCESS,2023-04-13T07:29:34.000+00:00,pipeline4,cicd1,REPO222,PRODUCTION,,,6,2023-4-13 7:29:34
+7,SUCCESS,2023-04-13T07:31:53.000+00:00,pipeline5,cicd1,REPO111,STAGING,,,7,2023-4-13 7:31:53
+8,SUCCESS,2023-04-13T07:36:30.000+00:00,pipeline5,cicd1,REPO111,STAGING,,,8,2023-4-13 7:36:30
+9,SUCCESS,2023-04-13T07:51:26.000+00:00,pipeline6,cicd1,REPO111,PRODUCTION,,,9,2023-4-13 7:51:26
+10,SUCCESS,2023-04-13T07:53:31.000+00:00,pipeline6,cicd1,REPO222,PRODUCTION,,,10,2023-4-13 7:53:31
+11,FAILURE,2023-04-13T07:54:39.000+00:00,pipeline7,cicd2,REPO111,PRODUCTION,,failed record should be ignored,11,2023-4-13 7:54:39
+12,SUCCESS,2023-04-13T07:55:01.000+00:00,pipeline7,cicd2,REPO333,PRODUCTION,,,12,2023-4-13 7:55:01
+13,SUCCESS,2023-04-13T07:56:39.000+00:00,pipeline7,cicd2,REPO111,PRODUCTION,,retried and success should be ok,13,2023-4-13 7:56:39
+14,FAILURE,2023-04-13T07:57:26.000+00:00,pipeline8,cicd3,REPO111,PRODUCTION,,,14,2023-4-13 7:57:26
+15,SUCCESS,2023-04-13T07:57:45.000+00:00,pipeline9,cicd3,REPO111,PRODUCTION,,not belongs to the project,15,2023-4-13 7:57:45
+16,SUCCESS,2023-04-13T07:58:24.000+00:00,pipeline10,cicd3,REPO333,,,,16,2023-4-13 7:58:24
\ No newline at end of file
diff --git a/backend/plugins/dora/e2e/prev_success_deployment_commit/project_mapping.csv b/backend/plugins/dora/e2e/prev_success_deployment_commit/project_mapping.csv
new file mode 100644
index 0000000..296ab9e
--- /dev/null
+++ b/backend/plugins/dora/e2e/prev_success_deployment_commit/project_mapping.csv
@@ -0,0 +1,9 @@
+project_name,table,row_id
+project1,cicd_scopes,cicd1
+project1,cicd_scopes,cicd2
+project1,repos,repo1
+project1,repos,repo2
+project2,cicd_scopes,cicd3
+project1,boards,board1
+project1,boards,board2
+project2,boards,board3
\ No newline at end of file
diff --git a/backend/plugins/dora/e2e/raw_tables/cicd_deployment_commits_before_enrich.csv b/backend/plugins/dora/e2e/raw_tables/cicd_deployment_commits_before_enrich.csv
new file mode 100644
index 0000000..fd31d1d
--- /dev/null
+++ b/backend/plugins/dora/e2e/raw_tables/cicd_deployment_commits_before_enrich.csv
@@ -0,0 +1,16 @@
+"id","created_at","updated_at","_raw_data_params","_raw_data_table","_raw_data_id","_raw_data_remark","cicd_scope_id","cicd_pipeline_id","name","result","status","environment","created_date","started_date","duration_sec","commit_sha","ref_name","repo_id","repo_url","prev_success_deployment_commit_id"
+bitbucket:BitbucketPipeline:1:{1cf1fe33-0a60-4b54-bf95-841619d258f5}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,792,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{1cf1fe33-0a60-4b54-bf95-841619d258f5},bitbucket:BitbucketPipeline:1:Klesh-Wong/readmemd-edited-online-with-bitbucket-1681372451595,FAILURE,DONE,"","2023-04-13 07:54:18.166","2023-04-13 07:54:39.370",38,ee9b8b6cbb5ab5a0a25fa2638ba1cf27103ba46f,Klesh-Wong/readmemd-edited-online-with-bitbucket-1681372451595,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{28639065-9cd6-4fff-9771-2fa2d88143e4}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,795,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{28639065-9cd6-4fff-9771-2fa2d88143e4},bitbucket:BitbucketPipeline:1:master,SUCCESS,DONE,"","2023-04-13 07:57:23.685","2023-04-13 07:57:44.915",8,"01799b80cdc783b086de0042159ddb9ddb664f26",master,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{36e9e1d4-33c1-4321-9112-09e15ce57f00}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-12 08:41:54.584","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,780,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{36e9e1d4-33c1-4321-9112-09e15ce57f00},bitbucket:BitbucketPipeline:1:master,SUCCESS,DONE,"","2023-04-10 06:51:08.154","2023-04-10 06:51:47.450",14,cb83612227bde32d9cf8b02b598b806f15ede7ec,master,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{4a4882ad-7ecf-42a2-a287-c495dc68d81b}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,788,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{4a4882ad-7ecf-42a2-a287-c495dc68d81b},bitbucket:BitbucketPipeline:1:production,SUCCESS,DONE,PRODUCTION,"2023-04-13 07:31:31.128","2023-04-13 07:31:52.534",8,"621bf05e78c82986ca85b13002ad600f6e1d639f",production,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{5009185c-615d-40b2-ba1a-1da5dee75e04}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,794,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{5009185c-615d-40b2-ba1a-1da5dee75e04},bitbucket:BitbucketPipeline:1:Klesh-Wong/readmemd-edited-online-with-bitbucket-1681372618019,FAILURE,DONE,"","2023-04-13 07:57:04.625","2023-04-13 07:57:25.883",40,"11bfff2b4b703483e6029c0303b27abdbade7a5d",Klesh-Wong/readmemd-edited-online-with-bitbucket-1681372618019,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{566033c8-cdbb-41c2-8b65-3e633e264cea}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,784,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{566033c8-cdbb-41c2-8b65-3e633e264cea},bitbucket:BitbucketPipeline:1:Klesh-Wong/bitbucketpipelinesyml-edited-online-with-1681370423083,SUCCESS,DONE,"","2023-04-13 07:20:44.684","2023-04-13 07:21:15.640",11,fd693c87b080a2beccfa535cc55fb3282af05450,Klesh-Wong/bitbucketpipelinesyml-edited-online-with-1681370423083,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{67933764-eef4-4453-803f-767f849da3d9}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,786,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{67933764-eef4-4453-803f-767f849da3d9},bitbucket:BitbucketPipeline:1:master,SUCCESS,DONE,"","2023-04-13 07:27:52.416","2023-04-13 07:28:13.853",9,"14616ed3c991e411c9d9acc78fb3a381e4657141",master,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{8052b6ec-5112-4366-bf2b-23b6ef1c608f}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,796,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{8052b6ec-5112-4366-bf2b-23b6ef1c608f},bitbucket:BitbucketPipeline:1:production,SUCCESS,DONE,PRODUCTION,"2023-04-13 07:58:02.261","2023-04-13 07:58:23.599",8,"01799b80cdc783b086de0042159ddb9ddb664f26",production,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{81a18389-648c-4505-bd1e-cd06550eedf7}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,789,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{81a18389-648c-4505-bd1e-cd06550eedf7},bitbucket:BitbucketPipeline:1:Klesh-Wong/readmemd-edited-online-with-bitbucket-1681371361733,SUCCESS,DONE,"","2023-04-13 07:36:08.521","2023-04-13 07:36:30.035",8,"8632b02c4e1928b1b64a560192e47f09f2245a54",Klesh-Wong/readmemd-edited-online-with-bitbucket-1681371361733,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{840f1e2b-bd50-4654-a817-8212a1c76274}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,793,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{840f1e2b-bd50-4654-a817-8212a1c76274},bitbucket:BitbucketPipeline:1:master,SUCCESS,DONE,"","2023-04-13 07:54:38.809","2023-04-13 07:55:00.616",9,"21572c884c617c910a3223e34543e2a0d97b9a6c",master,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{a7ca0bdc-2850-45aa-8b0c-3a8f843a528e}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,791,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{a7ca0bdc-2850-45aa-8b0c-3a8f843a528e},bitbucket:BitbucketPipeline:1:master,SUCCESS,DONE,"","2023-04-13 07:53:09.218","2023-04-13 07:53:30.878",7,"06483553ff334bd28cc646178c4dad920c89de48",master,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{b3ed67b1-f577-448a-b846-40fc6179f994}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,787,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{b3ed67b1-f577-448a-b846-40fc6179f994},bitbucket:BitbucketPipeline:1:Klesh-Wong/readmemd-edited-online-with-bitbucket-1681370936423,SUCCESS,DONE,"","2023-04-13 07:29:13.340","2023-04-13 07:29:34.434",7,"0a3996d862e7fdd7deb946c4d9908d2c2c6adced",Klesh-Wong/readmemd-edited-online-with-bitbucket-1681370936423,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{b9ff0087-7621-4259-82de-37cd8a2e6f03}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-12 08:41:54.584","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,783,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{b9ff0087-7621-4259-82de-37cd8a2e6f03},bitbucket:BitbucketPipeline:1:production,SUCCESS,DONE,PRODUCTION,"2023-04-10 06:53:15.843","2023-04-10 06:53:51.299",12,cb83612227bde32d9cf8b02b598b806f15ede7ec,production,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{db499f52-5afb-4833-810c-0ea7d4190cfc}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,785,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{db499f52-5afb-4833-810c-0ea7d4190cfc},bitbucket:BitbucketPipeline:1:production,SUCCESS,DONE,PRODUCTION,"2023-04-13 07:21:43.105","2023-04-13 07:22:14.135",12,"8b82a8bef37ada01f4a9eb69369485953fe4873d",production,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
+bitbucket:BitbucketPipeline:1:{e804c02f-e91a-4340-b392-bfbf965e97c1}:https://bitbucket.org/zhenmianws/helloworldrepo,"2023-04-13 08:56:48.427","2023-04-13 08:56:48.427","{""ConnectionId"":1,""FullName"":""zhenmianws/helloworldrepo""}",_raw_bitbucket_api_pipelines,790,"",bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,bitbucket:BitbucketPipeline:1:{e804c02f-e91a-4340-b392-bfbf965e97c1},bitbucket:BitbucketPipeline:1:production,SUCCESS,DONE,PRODUCTION,"2023-04-13 07:51:04.418","2023-04-13 07:51:25.800",9,"06483553ff334bd28cc646178c4dad920c89de48",production,bitbucket:BitbucketRepo:1:zhenmianws/helloworldrepo,https://bitbucket.org/zhenmianws/helloworldrepo,""
diff --git a/backend/plugins/dora/impl/impl.go b/backend/plugins/dora/impl/impl.go
index e4bbcfa..0ffd140 100644
--- a/backend/plugins/dora/impl/impl.go
+++ b/backend/plugins/dora/impl/impl.go
@@ -83,6 +83,7 @@
func (p Dora) SubTaskMetas() []plugin.SubTaskMeta {
return []plugin.SubTaskMeta{
tasks.DeploymentCommitsGeneratorMeta,
+ tasks.EnrichPrevSuccessDeploymentCommitMeta,
tasks.EnrichTaskEnvMeta,
tasks.CalculateChangeLeadTimeMeta,
tasks.ConnectIncidentToDeploymentMeta,
diff --git a/backend/plugins/dora/tasks/deployment_commits_generator.go b/backend/plugins/dora/tasks/deployment_commits_generator.go
index 2aae4a4..aebe18b 100644
--- a/backend/plugins/dora/tasks/deployment_commits_generator.go
+++ b/backend/plugins/dora/tasks/deployment_commits_generator.go
@@ -77,7 +77,7 @@
`),
dal.From("cicd_pipeline_commits pc"),
dal.Join("LEFT JOIN cicd_pipelines p ON (p.id = pc.pipeline_id)"),
- dal.Join("LEFT JOIN project_mapping pm ON (pm.table = ? AND pm.row_id = p.cicd_scope_id)"),
+ dal.Join("LEFT JOIN project_mapping pm ON (pm.table = 'cicd_scopes' AND pm.row_id = p.cicd_scope_id)"),
dal.Where(
`
pm.project_name = ? AND (
@@ -85,7 +85,7 @@
SELECT 1 FROM cicd_tasks t WHERE t.pipeline_id = p.id AND t.type = p.type
)
)
- `, devops.STAGING, devops.PRODUCTION, "cicd_scopes", data.Options.ProjectName, devops.DEPLOYMENT,
+ `, devops.TESTING, devops.STAGING, devops.PRODUCTION, data.Options.ProjectName, devops.DEPLOYMENT,
),
)
if err != nil {
diff --git a/backend/plugins/dora/tasks/prev_deployment_commit_enricher.go b/backend/plugins/dora/tasks/prev_deployment_commit_enricher.go
new file mode 100644
index 0000000..8cb3741
--- /dev/null
+++ b/backend/plugins/dora/tasks/prev_deployment_commit_enricher.go
@@ -0,0 +1,106 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package tasks
+
+import (
+ "github.com/apache/incubator-devlake/core/dal"
+ "github.com/apache/incubator-devlake/core/errors"
+ "github.com/apache/incubator-devlake/core/models/domainlayer/devops"
+ "github.com/apache/incubator-devlake/core/plugin"
+ "github.com/apache/incubator-devlake/helpers/pluginhelper/api"
+)
+
+var EnrichPrevSuccessDeploymentCommitMeta = plugin.SubTaskMeta{
+ Name: "enrichPrevSuccessDeploymentCommits",
+ EntryPoint: EnrichPrevSuccessDeploymentCommit,
+ EnabledByDefault: true,
+ Description: "filling the prev_success_deployment_commit_id for cicd_deployment_commits table",
+ DomainTypes: []string{plugin.DOMAIN_TYPE_CODE},
+}
+
+// EnrichPrevSuccessDeploymentCommit
+// Please note that deploying multiple environment (such as TESTING) copies
+// (such as testing1 and testing2) using multiple steps with Deployment tools
+// like Bitbucket or Gitlab is not supported and may result in incorrect
+// outcomes. It is recommended that you deploy all copies in a single step.
+// We arrived at this decision because we believe that deploying multiple
+// environment copies using multiple steps is not a common or reasonable
+// practice. However, if you have strong evidence to suggest otherwise, you are
+// free to file an issue on our GitHub repository.
+func EnrichPrevSuccessDeploymentCommit(taskCtx plugin.SubTaskContext) errors.Error {
+ db := taskCtx.GetDal()
+ data := taskCtx.GetData().(*DoraTaskData)
+ // step 1. select all successful deployments in the project and sort them by cicd_scope_id, repo_url, env
+ // and started_date
+ cursor, err := db.Cursor(
+ dal.Select("dc.*"),
+ dal.From("cicd_deployment_commits dc"),
+ dal.Join("LEFT JOIN project_mapping pm ON (pm.table = 'cicd_scopes' AND pm.row_id = dc.cicd_scope_id)"),
+ dal.Where(
+ `
+ dc.started_date IS NOT NULL
+ AND dc.environment IS NOT NULL AND dc.environment != ''
+ AND dc.repo_url IS NOT NULL AND dc.repo_url != ''
+ AND pm.project_name = ? AND dc.result = ?
+ `,
+ data.Options.ProjectName, devops.SUCCESS,
+ ),
+ dal.Orderby(`dc.cicd_scope_id, dc.repo_url, dc.environment, dc.started_date`),
+ )
+ if err != nil {
+ return err
+ }
+ defer cursor.Close()
+
+ prev_cicd_scope_id := ""
+ prev_repo_url := ""
+ prev_env := ""
+ prev_success_deployment_id := ""
+
+ enricher, err := api.NewDataEnricher(api.DataEnricherArgs[devops.CicdDeploymentCommit]{
+ Ctx: taskCtx,
+ Name: "prev_deployment_commit_id_enricher",
+ Input: cursor,
+ Enrich: func(deploymentCommit *devops.CicdDeploymentCommit) ([]interface{}, errors.Error) {
+ // step 2. group them by cicd_scope_id/repo_url/env
+ // whenever cicd_scope_id/repo_url/env shifted, it is a new set of consecutive deployments
+ if prev_cicd_scope_id != deploymentCommit.CicdScopeId ||
+ prev_repo_url != deploymentCommit.RepoUrl ||
+ prev_env != deploymentCommit.Environment {
+ // reset prev_success_deployment_id
+ prev_success_deployment_id = ""
+ }
+
+ // now, simply connect the consecurtive deployment to its previous one
+ deploymentCommit.PrevSuccessDeploymentCommitId = prev_success_deployment_id
+ println("paire", deploymentCommit.Id, prev_cicd_scope_id)
+
+ // preserve variables for the next record
+ prev_cicd_scope_id = deploymentCommit.CicdScopeId
+ prev_repo_url = deploymentCommit.RepoUrl
+ prev_env = deploymentCommit.Environment
+ prev_success_deployment_id = deploymentCommit.Id
+ return []interface{}{deploymentCommit}, nil
+ },
+ })
+ if err != nil {
+ return err
+ }
+
+ return enricher.Execute()
+}