blob: 7bd4e99c2da6516b97c9e27989ec0a90bf827e32 [file] [log] [blame]
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package tasks
import (
"encoding/json"
"fmt"
"net/http"
"net/url"
"github.com/apache/incubator-devlake/plugins/core/dal"
"github.com/apache/incubator-devlake/plugins/helper"
"github.com/apache/incubator-devlake/plugins/core"
"github.com/apache/incubator-devlake/plugins/github/models"
)
const RAW_COMMIT_TABLE = "github_api_commits"
var CollectApiCommitsMeta = core.SubTaskMeta{
Name: "collectApiCommits",
EntryPoint: CollectApiCommits,
EnabledByDefault: false,
Description: "Collect commits data from Github api",
DomainTypes: []string{core.DOMAIN_TYPE_CODE},
}
func CollectApiCommits(taskCtx core.SubTaskContext) error {
db := taskCtx.GetDal()
data := taskCtx.GetData().(*GithubTaskData)
since := data.Since
incremental := false
// user didn't specify a time range to sync, try load from database
if since == nil {
latestUpdated := &models.GithubCommit{}
err := db.All(
&latestUpdated,
dal.Join("left join _tool_github_repo_commits on _tool_github_commits.sha = _tool_github_repo_commits.commit_sha"),
dal.Join("left join _tool_github_repos on _tool_github_repo_commits.repo_id = _tool_github_repos.github_id"),
dal.Where("_tool_github_repo_commits.repo_id = ? AND _tool_github_repo_commits.connection_id = ?", data.Repo.GithubId, data.Repo.ConnectionId),
dal.Orderby("committed_date DESC"),
dal.Limit(1),
)
if err != nil {
return fmt.Errorf("failed to get latest github commit record: %w", err)
}
if latestUpdated.Sha != "" {
since = &latestUpdated.CommittedDate
incremental = true
}
}
collector, err := helper.NewApiCollector(helper.ApiCollectorArgs{
RawDataSubTaskArgs: helper.RawDataSubTaskArgs{
Ctx: taskCtx,
/*
This struct will be JSONEncoded and stored into database along with raw data itself, to identity minimal
set of data to be process, for example, we process JiraCommits by Board
*/
Params: GithubApiParams{
ConnectionId: data.Options.ConnectionId,
Owner: data.Options.Owner,
Repo: data.Options.Repo,
},
/*
Table store raw data
*/
Table: RAW_COMMIT_TABLE,
},
ApiClient: data.ApiClient,
PageSize: 100,
Incremental: incremental,
/*
url may use arbitrary variables from different source in any order, we need GoTemplate to allow more
flexible for all kinds of possibility.
Pager contains information for a particular page, calculated by ApiCollector, and will be passed into
GoTemplate to generate a url for that page.
We want to do page-fetching in ApiCollector, because the logic are highly similar, by doing so, we can
avoid duplicate logic for every tasks, and when we have a better idea like improving performance, we can
do it in one place
*/
UrlTemplate: "repos/{{ .Params.Owner }}/{{ .Params.Repo }}/commits",
/*
(Optional) Return query string for request, or you can plug them into UrlTemplate directly
*/
Query: func(reqData *helper.RequestData) (url.Values, error) {
query := url.Values{}
query.Set("state", "all")
if since != nil {
query.Set("since", since.String())
}
query.Set("direction", "asc")
query.Set("page", fmt.Sprintf("%v", reqData.Pager.Page))
query.Set("per_page", fmt.Sprintf("%v", reqData.Pager.Size))
return query, nil
},
/*
Some api might do pagination by http headers
*/
//Header: func(pager *core.Pager) http.Header {
//},
/*
Sometimes, we need to collect data based on previous collected data, like jira changelog, it requires
commit_id as part of the url.
We can mimic `stdin` design, to accept a `Input` function which produces a `Iterator`, collector
should iterate all records, and do data-fetching for each on, either in parallel or sequential order
UrlTemplate: "api/3/commit/{{ Input.ID }}/changelog"
*/
//Input: databaseCommitsIterator,
/*
For api endpoint that returns number of total pages, ApiCollector can collect pages in parallel with ease,
or other techniques are required if this information was missing.
*/
GetTotalPages: GetTotalPagesFromResponse,
ResponseParser: func(res *http.Response) ([]json.RawMessage, error) {
var items []json.RawMessage
err := helper.UnmarshalResponse(res, &items)
if err != nil {
return nil, err
}
return items, nil
},
})
if err != nil {
return err
}
return collector.Execute()
}