Rename to pipeline in DB and JSONs (#1296)

Closes #1282 

Follow-up to #1224, addresses #745

- changes JSON fields
- adds migration to rename columns
- fixes some comments
This commit is contained in:
qwerty287 2022-10-22 15:54:43 +02:00 committed by GitHub
parent 1114595078
commit f88c70b55e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
24 changed files with 278 additions and 163 deletions

View file

@ -32,9 +32,9 @@ type State struct {
type Info struct {
ID string `json:"id"`
Repo string `json:"repository"`
Pipeline string `json:"build_number"`
Started time.Time `json:"build_started"`
Timeout time.Duration `json:"build_timeout"`
Pipeline string `json:"pipeline_number"`
Started time.Time `json:"pipeline_started"`
Timeout time.Duration `json:"pipeline_timeout"`
}
func (s *State) Add(id string, timeout time.Duration, repo, pipeline string) {

View file

@ -36,5 +36,5 @@ type Config struct {
// PipelineConfig is the n:n relation between Pipeline and Config
type PipelineConfig struct {
ConfigID int64 `json:"-" xorm:"UNIQUE(s) NOT NULL 'config_id'"`
PipelineID int64 `json:"-" xorm:"UNIQUE(s) NOT NULL 'build_id'"`
PipelineID int64 `json:"-" xorm:"UNIQUE(s) NOT NULL 'pipeline_id'"`
}

View file

@ -21,5 +21,5 @@ type EventType string
// Event represents a pipeline event.
type Event struct {
Repo Repo `json:"repo"`
Pipeline Pipeline `json:"build"`
Pipeline Pipeline `json:"pipeline"`
}

View file

@ -23,20 +23,20 @@ type Feed struct {
Name string `json:"name" xorm:"repo_name"`
FullName string `json:"full_name" xorm:"repo_full_name"`
Number int64 `json:"number,omitempty" xorm:"build_number"`
Event string `json:"event,omitempty" xorm:"build_event"`
Status string `json:"status,omitempty" xorm:"build_status"`
Created int64 `json:"created_at,omitempty" xorm:"build_created"`
Started int64 `json:"started_at,omitempty" xorm:"build_started"`
Finished int64 `json:"finished_at,omitempty" xorm:"build_finished"`
Commit string `json:"commit,omitempty" xorm:"build_commit"`
Branch string `json:"branch,omitempty" xorm:"build_branch"`
Ref string `json:"ref,omitempty" xorm:"build_ref"`
Refspec string `json:"refspec,omitempty" xorm:"build_refspec"`
Remote string `json:"remote,omitempty" xorm:"build_remote"`
Title string `json:"title,omitempty" xorm:"build_title"`
Message string `json:"message,omitempty" xorm:"build_message"`
Author string `json:"author,omitempty" xorm:"build_author"`
Avatar string `json:"author_avatar,omitempty" xorm:"build_avatar"`
Email string `json:"author_email,omitempty" xorm:"build_email"`
Number int64 `json:"number,omitempty" xorm:"pipeline_number"`
Event string `json:"event,omitempty" xorm:"pipeline_event"`
Status string `json:"status,omitempty" xorm:"pipeline_status"`
Created int64 `json:"created_at,omitempty" xorm:"pipeline_created"`
Started int64 `json:"started_at,omitempty" xorm:"pipeline_started"`
Finished int64 `json:"finished_at,omitempty" xorm:"pipeline_finished"`
Commit string `json:"commit,omitempty" xorm:"pipeline_commit"`
Branch string `json:"branch,omitempty" xorm:"pipeline_branch"`
Ref string `json:"ref,omitempty" xorm:"pipeline_ref"`
Refspec string `json:"refspec,omitempty" xorm:"pipeline_refspec"`
Remote string `json:"remote,omitempty" xorm:"pipeline_remote"`
Title string `json:"title,omitempty" xorm:"pipeline_title"`
Message string `json:"message,omitempty" xorm:"pipeline_message"`
Author string `json:"author,omitempty" xorm:"pipeline_author"`
Avatar string `json:"author_avatar,omitempty" xorm:"pipeline_avatar"`
Email string `json:"author_email,omitempty" xorm:"pipeline_email"`
}

View file

@ -28,7 +28,7 @@ type FileStore interface {
// File represents a pipeline artifact.
type File struct {
ID int64 `json:"id" xorm:"pk autoincr 'file_id'"`
PipelineID int64 `json:"-" xorm:"INDEX 'file_build_id'"`
PipelineID int64 `json:"-" xorm:"INDEX 'file_pipeline_id'"`
ProcID int64 `json:"proc_id" xorm:"UNIQUE(s) INDEX 'file_proc_id'"`
PID int `json:"pid" xorm:"file_pid"`
Name string `json:"name" xorm:"UNIQUE(s) file_name"`

View file

@ -17,37 +17,37 @@ package model
// swagger:model pipeline
type Pipeline struct {
ID int64 `json:"id" xorm:"pk autoincr 'build_id'"`
RepoID int64 `json:"-" xorm:"UNIQUE(s) INDEX 'build_repo_id'"`
Number int64 `json:"number" xorm:"UNIQUE(s) 'build_number'"`
Author string `json:"author" xorm:"INDEX 'build_author'"`
ConfigID int64 `json:"-" xorm:"build_config_id"`
Parent int64 `json:"parent" xorm:"build_parent"`
Event WebhookEvent `json:"event" xorm:"build_event"`
Status StatusValue `json:"status" xorm:"INDEX 'build_status'"`
Error string `json:"error" xorm:"build_error"`
Enqueued int64 `json:"enqueued_at" xorm:"build_enqueued"`
Created int64 `json:"created_at" xorm:"build_created"`
ID int64 `json:"id" xorm:"pk autoincr 'pipeline_id'"`
RepoID int64 `json:"-" xorm:"UNIQUE(s) INDEX 'pipeline_repo_id'"`
Number int64 `json:"number" xorm:"UNIQUE(s) 'pipeline_number'"`
Author string `json:"author" xorm:"INDEX 'pipeline_author'"`
ConfigID int64 `json:"-" xorm:"pipeline_config_id"`
Parent int64 `json:"parent" xorm:"pipeline_parent"`
Event WebhookEvent `json:"event" xorm:"pipeline_event"`
Status StatusValue `json:"status" xorm:"INDEX 'pipeline_status'"`
Error string `json:"error" xorm:"pipeline_error"`
Enqueued int64 `json:"enqueued_at" xorm:"pipeline_enqueued"`
Created int64 `json:"created_at" xorm:"pipeline_created"`
Updated int64 `json:"updated_at" xorm:"updated NOT NULL DEFAULT 0 'updated'"`
Started int64 `json:"started_at" xorm:"build_started"`
Finished int64 `json:"finished_at" xorm:"build_finished"`
Deploy string `json:"deploy_to" xorm:"build_deploy"`
Commit string `json:"commit" xorm:"build_commit"`
Branch string `json:"branch" xorm:"build_branch"`
Ref string `json:"ref" xorm:"build_ref"`
Refspec string `json:"refspec" xorm:"build_refspec"`
Remote string `json:"remote" xorm:"build_remote"`
Title string `json:"title" xorm:"build_title"`
Message string `json:"message" xorm:"TEXT 'build_message'"`
Timestamp int64 `json:"timestamp" xorm:"build_timestamp"`
Sender string `json:"sender" xorm:"build_sender"` // uses reported user for webhooks and name of cron for cron pipelines
Avatar string `json:"author_avatar" xorm:"build_avatar"`
Email string `json:"author_email" xorm:"build_email"`
Link string `json:"link_url" xorm:"build_link"`
Signed bool `json:"signed" xorm:"build_signed"` // deprecate
Verified bool `json:"verified" xorm:"build_verified"` // deprecate
Reviewer string `json:"reviewed_by" xorm:"build_reviewer"`
Reviewed int64 `json:"reviewed_at" xorm:"build_reviewed"`
Started int64 `json:"started_at" xorm:"pipeline_started"`
Finished int64 `json:"finished_at" xorm:"pipeline_finished"`
Deploy string `json:"deploy_to" xorm:"pipeline_deploy"`
Commit string `json:"commit" xorm:"pipeline_commit"`
Branch string `json:"branch" xorm:"pipeline_branch"`
Ref string `json:"ref" xorm:"pipeline_ref"`
Refspec string `json:"refspec" xorm:"pipeline_refspec"`
Remote string `json:"remote" xorm:"pipeline_remote"`
Title string `json:"title" xorm:"pipeline_title"`
Message string `json:"message" xorm:"TEXT 'pipeline_message'"`
Timestamp int64 `json:"timestamp" xorm:"pipeline_timestamp"`
Sender string `json:"sender" xorm:"pipeline_sender"` // uses reported user for webhooks and name of cron for cron pipelines
Avatar string `json:"author_avatar" xorm:"pipeline_avatar"`
Email string `json:"author_email" xorm:"pipeline_email"`
Link string `json:"link_url" xorm:"pipeline_link"`
Signed bool `json:"signed" xorm:"pipeline_signed"` // deprecate
Verified bool `json:"verified" xorm:"pipeline_verified"` // deprecate
Reviewer string `json:"reviewed_by" xorm:"pipeline_reviewer"`
Reviewed int64 `json:"reviewed_at" xorm:"pipeline_reviewed"`
Procs []*Proc `json:"procs,omitempty" xorm:"-"`
Files []*File `json:"files,omitempty" xorm:"-"`
ChangedFiles []string `json:"changed_files,omitempty" xorm:"json 'changed_files'"`

View file

@ -32,7 +32,7 @@ type ProcStore interface {
// swagger:model proc
type Proc struct {
ID int64 `json:"id" xorm:"pk autoincr 'proc_id'"`
PipelineID int64 `json:"build_id" xorm:"UNIQUE(s) INDEX 'proc_build_id'"`
PipelineID int64 `json:"pipeline_id" xorm:"UNIQUE(s) INDEX 'proc_pipeline_id'"`
PID int `json:"pid" xorm:"UNIQUE(s) 'proc_pid'"`
PPID int `json:"ppid" xorm:"proc_ppid"`
PGID int `json:"pgid" xorm:"proc_pgid"`

View file

@ -37,7 +37,7 @@ type config struct {
type requestStructure struct {
Repo *model.Repo `json:"repo"`
Pipeline *model.Pipeline `json:"build"`
Pipeline *model.Pipeline `json:"pipeline"`
Configuration []*config `json:"configs"`
}

View file

@ -391,7 +391,7 @@ func TestFetchFromConfigService(t *testing.T) {
type incoming struct {
Repo *model.Repo `json:"repo"`
Build *model.Pipeline `json:"build"`
Build *model.Pipeline `json:"pipeline"`
Configuration []*config `json:"config"`
}

View file

@ -23,7 +23,7 @@ func (s storage) ConfigsForPipeline(pipelineID int64) ([]*model.Config, error) {
return configs, s.engine.
Table("config").
Join("LEFT", "pipeline_config", "config.config_id = pipeline_config.config_id").
Where("pipeline_config.build_id = ?", pipelineID).
Where("pipeline_config.pipeline_id = ?", pipelineID).
Find(&configs)
}
@ -41,22 +41,22 @@ func (s storage) ConfigFindIdentical(repoID int64, hash string) (*model.Config,
func (s storage) ConfigFindApproved(config *model.Config) (bool, error) {
/* TODO: use builder (do not behave same as pure sql, fix that)
return s.engine.Table(new(model.Pipeline)).
Join("INNER", "pipeline_config", "pipelines.build_id = pipeline_config.build_id" ).
Where(builder.Eq{"pipelines.build_repo_id": config.RepoID}).
Join("INNER", "pipeline_config", "pipelines.pipeline_id = pipeline_config.pipeline_id" ).
Where(builder.Eq{"pipelines.pipeline_repo_id": config.RepoID}).
And(builder.Eq{"pipeline_config.config_id": config.ID}).
And(builder.In("pipelines.build_status", "blocked", "pending")).
And(builder.In("pipelines.pipeline_status", "blocked", "pending")).
Exist(new(model.Pipeline))
*/
c, err := s.engine.SQL(`
SELECT build_id FROM pipelines
WHERE build_repo_id = ?
AND build_id in (
SELECT build_id
SELECT pipeline_id FROM pipelines
WHERE pipeline_repo_id = ?
AND pipeline_id in (
SELECT pipeline_id
FROM pipeline_config
WHERE pipeline_config.config_id = ?
)
AND build_status NOT IN ('blocked', 'pending')
AND pipeline_status NOT IN ('blocked', 'pending')
LIMIT 1
`, config.RepoID, config.ID).Count()
return c > 0, err

View file

@ -27,27 +27,27 @@ SELECT
repo_owner
,repo_name
,repo_full_name
,build_number
,build_event
,build_status
,build_created
,build_started
,build_finished
,build_commit
,build_branch
,build_ref
,build_refspec
,build_remote
,build_title
,build_message
,build_author
,build_email
,build_avatar
,pipeline_number
,pipeline_event
,pipeline_status
,pipeline_created
,pipeline_started
,pipeline_finished
,pipeline_commit
,pipeline_branch
,pipeline_ref
,pipeline_refspec
,pipeline_remote
,pipeline_title
,pipeline_message
,pipeline_author
,pipeline_email
,pipeline_avatar
FROM
pipelines p
,repos r
WHERE p.build_repo_id = r.repo_id
AND p.build_status IN ('pending','running')
WHERE p.pipeline_repo_id = r.repo_id
AND p.pipeline_status IN ('pending','running')
`).Find(&feed)
return feed, err
}
@ -60,28 +60,28 @@ SELECT
repo_owner
,repo_name
,repo_full_name
,build_number
,build_event
,build_status
,build_created
,build_started
,build_finished
,build_commit
,build_branch
,build_ref
,build_refspec
,build_remote
,build_title
,build_message
,build_author
,build_email
,build_avatar
,pipeline_number
,pipeline_event
,pipeline_status
,pipeline_created
,pipeline_started
,pipeline_finished
,pipeline_commit
,pipeline_branch
,pipeline_ref
,pipeline_refspec
,pipeline_remote
,pipeline_title
,pipeline_message
,pipeline_author
,pipeline_email
,pipeline_avatar
FROM repos
INNER JOIN perms ON perms.perm_repo_id = repos.repo_id
INNER JOIN pipelines ON pipelines.build_repo_id = repos.repo_id
INNER JOIN pipelines ON pipelines.pipeline_repo_id = repos.repo_id
WHERE perms.perm_user_id = ?
AND (perms.perm_push = ? OR perms.perm_admin = ?)
ORDER BY build_id DESC
ORDER BY pipeline_id DESC
LIMIT 50
`, user.ID, true, true).Find(&feed)
}
@ -94,26 +94,26 @@ SELECT
repo_owner
,repo_name
,repo_full_name
,build_number
,build_event
,build_status
,build_created
,build_started
,build_finished
,build_commit
,build_branch
,build_ref
,build_refspec
,build_remote
,build_title
,build_message
,build_author
,build_email
,build_avatar
FROM repos LEFT OUTER JOIN pipelines ON build_id = (
SELECT build_id FROM pipelines
WHERE pipelines.build_repo_id = repos.repo_id
ORDER BY build_id DESC
,pipeline_number
,pipeline_event
,pipeline_status
,pipeline_created
,pipeline_started
,pipeline_finished
,pipeline_commit
,pipeline_branch
,pipeline_ref
,pipeline_refspec
,pipeline_remote
,pipeline_title
,pipeline_message
,pipeline_author
,pipeline_email
,pipeline_avatar
FROM repos LEFT OUTER JOIN pipelines ON pipeline_id = (
SELECT pipeline_id FROM pipelines
WHERE pipelines.pipeline_repo_id = repos.repo_id
ORDER BY pipeline_id DESC
LIMIT 1
)
INNER JOIN perms ON perms.perm_repo_id = repos.repo_id

View file

@ -23,7 +23,7 @@ import (
func (s storage) FileList(pipeline *model.Pipeline) ([]*model.File, error) {
files := make([]*model.File, 0, perPage)
return files, s.engine.Where("file_build_id = ?", pipeline.ID).Find(&files)
return files, s.engine.Where("file_pipeline_id = ?", pipeline.ID).Find(&files)
}
func (s storage) FileFind(proc *model.Proc, name string) (*model.File, error) {

View file

@ -0,0 +1,97 @@
// Copyright 2022 Woodpecker Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package migration
import (
"strings"
"xorm.io/xorm"
)
type oldBuildColumn struct {
table string
columns []string
}
var renameColumnsBuildsToPipeline = task{
name: "rename-columns-builds-to-pipeline",
required: true,
fn: func(sess *xorm.Session) error {
var oldColumns []*oldBuildColumn
oldColumns = append(oldColumns, &oldBuildColumn{
table: "pipelines",
columns: []string{
"build_id",
"build_repo_id",
"build_number",
"build_author",
"build_config_id",
"build_parent",
"build_event",
"build_status",
"build_error",
"build_enqueued",
"build_created",
"build_started",
"build_finished",
"build_deploy",
"build_commit",
"build_branch",
"build_ref",
"build_refspec",
"build_remote",
"build_title",
"build_message",
"build_timestamp",
"build_sender",
"build_avatar",
"build_email",
"build_link",
"build_signed",
"build_verified",
"build_reviewer",
"build_reviewed",
},
},
)
oldColumns = append(oldColumns, &oldBuildColumn{
table: "pipeline_config",
columns: []string{"build_id"},
})
oldColumns = append(oldColumns, &oldBuildColumn{
table: "files",
columns: []string{"file_build_id"},
})
oldColumns = append(oldColumns, &oldBuildColumn{
table: "procs",
columns: []string{"proc_build_id"},
})
for _, table := range oldColumns {
for _, column := range table.columns {
err := renameColumn(sess, table.table, column, strings.Replace(column, "build_", "pipeline_", 1))
if err != nil {
return err
}
}
}
return nil
},
}

View file

@ -248,6 +248,19 @@ func alterColumnNull(sess *xorm.Session, table, column string, null bool) error
}
}
func renameColumn(sess *xorm.Session, table, column, newName string) error {
dialect := sess.Engine().Dialect().URI().DBType
switch dialect {
case schemas.MYSQL,
schemas.POSTGRES,
schemas.SQLITE:
_, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` RENAME COLUMN `%s` TO `%s`;", table, column, newName))
return err
default:
return fmt.Errorf("dialect '%s' not supported", dialect)
}
}
var (
whitespaces = regexp.MustCompile(`\s+`)
columnSeparator = regexp.MustCompile(`\s?,\s?`)

View file

@ -37,6 +37,7 @@ var migrationTasks = []*task{
&alterTableSecretsAddUserCol,
&lowercaseSecretNames,
&renameBuildsToPipeline,
&renameColumnsBuildsToPipeline,
}
var allBeans = []interface{}{
@ -116,6 +117,10 @@ func Migrate(e *xorm.Engine) error {
return err
}
if err := e.ClearCache(allBeans...); err != nil {
return err
}
return syncAll(e)
}

View file

@ -58,7 +58,7 @@ func (s storage) GetPipelineLast(repo *model.Repo, branch string) (*model.Pipeli
Branch: branch,
Event: model.EventPush,
}
return pipeline, wrapGet(s.engine.Desc("build_number").Get(pipeline))
return pipeline, wrapGet(s.engine.Desc("pipeline_number").Get(pipeline))
}
func (s storage) GetPipelineLastBefore(repo *model.Repo, branch string, num int64) (*model.Pipeline, error) {
@ -67,15 +67,15 @@ func (s storage) GetPipelineLastBefore(repo *model.Repo, branch string, num int6
Branch: branch,
}
return pipeline, wrapGet(s.engine.
Desc("build_number").
Where("build_id < ?", num).
Desc("pipeline_number").
Where("pipeline_id < ?", num).
Get(pipeline))
}
func (s storage) GetPipelineList(repo *model.Repo, page int) ([]*model.Pipeline, error) {
pipelines := make([]*model.Pipeline, 0, perPage)
return pipelines, s.engine.Where("build_repo_id = ?", repo.ID).
Desc("build_number").
return pipelines, s.engine.Where("pipeline_repo_id = ?", repo.ID).
Desc("pipeline_number").
Limit(perPage, perPage*(page-1)).
Find(&pipelines)
}
@ -84,9 +84,9 @@ func (s storage) GetPipelineList(repo *model.Repo, page int) ([]*model.Pipeline,
func (s storage) GetActivePipelineList(repo *model.Repo, page int) ([]*model.Pipeline, error) {
pipelines := make([]*model.Pipeline, 0, perPage)
query := s.engine.
Where("build_repo_id = ?", repo.ID).
In("build_status", model.StatusPending, model.StatusRunning, model.StatusBlocked).
Desc("build_number")
Where("pipeline_repo_id = ?", repo.ID).
In("pipeline_status", model.StatusPending, model.StatusRunning, model.StatusBlocked).
Desc("pipeline_number")
if page > 0 {
query = query.Limit(perPage, perPage*(page-1))
}
@ -115,7 +115,7 @@ func (s storage) CreatePipeline(pipeline *model.Pipeline, procList ...*model.Pro
// calc pipeline number
var number int64
if _, err := sess.SQL("SELECT MAX(build_number) FROM `pipelines` WHERE build_repo_id = ?", pipeline.RepoID).Get(&number); err != nil {
if _, err := sess.SQL("SELECT MAX(pipeline_number) FROM `pipelines` WHERE pipeline_repo_id = ?", pipeline.RepoID).Get(&number); err != nil {
return err
}
pipeline.Number = number + 1
@ -147,7 +147,7 @@ func deletePipeline(sess *xorm.Session, pipelineID int64) error {
// delete related procs
for startProcs := 0; ; startProcs += perPage {
procIDs := make([]int64, 0, perPage)
if err := sess.Limit(perPage, startProcs).Table("procs").Cols("proc_id").Where("proc_build_id = ?", pipelineID).Find(&procIDs); err != nil {
if err := sess.Limit(perPage, startProcs).Table("procs").Cols("proc_id").Where("proc_pipeline_id = ?", pipelineID).Find(&procIDs); err != nil {
return err
}
if len(procIDs) == 0 {
@ -160,7 +160,7 @@ func deletePipeline(sess *xorm.Session, pipelineID int64) error {
}
}
}
if _, err := sess.Where("build_id = ?", pipelineID).Delete(new(model.PipelineConfig)); err != nil {
if _, err := sess.Where("pipeline_id = ?", pipelineID).Delete(new(model.PipelineConfig)); err != nil {
return err
}
_, err := sess.ID(pipelineID).Delete(new(model.Pipeline))

View file

@ -45,7 +45,7 @@ func (s storage) ProcChild(pipeline *model.Pipeline, ppid int, child string) (*m
func (s storage) ProcList(pipeline *model.Pipeline) ([]*model.Proc, error) {
procList := make([]*model.Proc, 0, perPage)
return procList, s.engine.
Where("proc_build_id = ?", pipeline.ID).
Where("proc_pipeline_id = ?", pipeline.ID).
OrderBy("proc_pid").
Find(&procList)
}
@ -79,11 +79,11 @@ func (s storage) ProcClear(pipeline *model.Pipeline) error {
return err
}
if _, err := sess.Where("file_build_id = ?", pipeline.ID).Delete(new(model.File)); err != nil {
if _, err := sess.Where("file_pipeline_id = ?", pipeline.ID).Delete(new(model.File)); err != nil {
return err
}
if _, err := sess.Where("proc_build_id = ?", pipeline.ID).Delete(new(model.Proc)); err != nil {
if _, err := sess.Where("proc_pipeline_id = ?", pipeline.ID).Delete(new(model.Proc)); err != nil {
return err
}

View file

@ -117,7 +117,7 @@ func (s storage) DeleteRepo(repo *model.Repo) error {
// delete related pipelines
for startPipelines := 0; ; startPipelines += batchSize {
pipelineIDs := make([]int64, 0, batchSize)
if err := sess.Limit(batchSize, startPipelines).Table("pipelines").Cols("build_id").Where("build_repo_id = ?", repo.ID).Find(&pipelineIDs); err != nil {
if err := sess.Limit(batchSize, startPipelines).Table("pipelines").Cols("pipeline_id").Where("pipeline_repo_id = ?", repo.ID).Find(&pipelineIDs); err != nil {
return err
}
if len(pipelineIDs) == 0 {

View file

@ -1,4 +1,4 @@
import BuildStore from '~/store/pipelines';
import PipelineStore from '~/store/pipelines';
import RepoStore from '~/store/repos';
import { repoSlug } from '~/utils/helpers';
@ -12,7 +12,7 @@ export default () => {
return;
}
const repoStore = RepoStore();
const buildStore = BuildStore();
const pipelineStore = PipelineStore();
initialized = true;
@ -24,19 +24,19 @@ export default () => {
const { repo } = data;
repoStore.setRepo(repo);
// contains build update
// contains pipeline update
if (!data.pipeline) {
return;
}
const { pipeline } = data;
buildStore.setPipeline(repo.owner, repo.name, pipeline);
buildStore.setPipelineFeedItem({ ...pipeline, name: repo.name, owner: repo.owner, full_name: repoSlug(repo) });
pipelineStore.setPipeline(repo.owner, repo.name, pipeline);
pipelineStore.setPipelineFeedItem({ ...pipeline, name: repo.name, owner: repo.owner, full_name: repoSlug(repo) });
// contains proc update
if (!data.proc) {
return;
}
const { proc } = data;
buildStore.setProc(repo.owner, repo.name, pipeline.number, proc);
pipelineStore.setProc(repo.owner, repo.name, pipeline.number, proc);
});
};

View file

@ -19,7 +19,7 @@ type RepoListOptions = {
flush?: boolean;
};
type BuildOptions = {
type PipelineOptions = {
branch: string;
variables: Record<string, string>;
};
@ -58,7 +58,7 @@ export default class WoodpeckerClient extends ApiClient {
return this._post(`/api/repos/${owner}/${repo}/repair`);
}
createPipeline(owner: string, repo: string, options: BuildOptions): Promise<Pipeline> {
createPipeline(owner: string, repo: string, options: PipelineOptions): Promise<Pipeline> {
return this._post(`/api/repos/${owner}/${repo}/pipelines`, options) as Promise<Pipeline>;
}

View file

@ -1,39 +1,39 @@
// A build for a repository.
// A pipeline for a repository.
export type Pipeline = {
id: number;
// The build number.
// This number is specified within the context of the repository the build belongs to and is unique within that.
// The pipeline number.
// This number is specified within the context of the repository the pipeline belongs to and is unique within that.
number: number;
parent: number;
event: 'push' | 'tag' | 'pull_request' | 'deployment' | 'cron' | 'manual';
// The current status of the build.
// The current status of the pipeline.
status: PipelineStatus;
error: string;
// When the build request was received.
// When the pipeline request was received.
created_at: number;
// When the build was updated last time in database.
// When the pipeline was updated last time in database.
updated_at: number;
// When the build was enqueued.
// When the pipeline was enqueued.
enqueued_at: number;
// When the build began execution.
// When the pipeline began execution.
started_at: number;
// When the build was finished.
// When the pipeline was finished.
finished_at: number;
// Where the deployment should go.
deploy_to: string;
// The commit for the build.
// The commit for the pipeline.
commit: string;
// The branch the commit was pushed to.
@ -68,7 +68,7 @@ export type Pipeline = {
author_email: string;
// The link to view the repository.
// This link will point to the repository state associated with the build's commit.
// This link will point to the repository state associated with the pipeline's commit.
link_url: string;
signed: boolean;
@ -79,8 +79,8 @@ export type Pipeline = {
reviewed_at: number;
// The jobs associated with this build.
// A build will have multiple jobs if a matrix build was used or if a rebuild was requested.
// The jobs associated with this pipeline.
// A pipeline will have multiple jobs if a matrix pipeline was used or if a rebuild was requested.
procs?: PipelineProc[];
changed_files?: string[];
@ -100,7 +100,7 @@ export type PipelineStatus =
export type PipelineProc = {
id: number;
build_id: number;
pipeline_id: number;
pid: number;
ppid: number;
pgid: number;

View file

@ -1,4 +1,4 @@
// A config for a build.
// A config for a pipeline.
export type PipelineConfig = {
hash: string;
name: string;

View file

@ -35,16 +35,16 @@ export type Repo = {
// Whether the repository is publicly visible.
private: boolean;
// Whether the repository has trusted access for builds.
// Whether the repository has trusted access for pipelines.
// If the repository is trusted then the host network can be used and
// volumes can be created.
trusted: boolean;
// x-dart-type: Duration
// The amount of time in minutes before the build is killed.
// The amount of time in minutes before the pipeline is killed.
timeout: number;
// Whether pull requests should trigger a build.
// Whether pull requests should trigger a pipeline.
allow_pr: boolean;
config_file: string;

View file

@ -54,7 +54,7 @@ type (
Timeout *int64 `json:"timeout,omitempty"`
Visibility *string `json:"visibility"`
AllowPull *bool `json:"allow_pr,omitempty"`
PipelineCounter *int `json:"build_counter,omitempty"`
PipelineCounter *int `json:"pipeline_counter,omitempty"`
}
// Pipeline defines a pipeline object.