fix
This commit is contained in:
@@ -6,31 +6,29 @@ import yaml
|
||||
import collections.abc
|
||||
import argparse
|
||||
|
||||
root = '.'
|
||||
def save_json(filename,data):
|
||||
print('saving to', filename, json.dumps(data))
|
||||
with open(filename, 'w') as file:
|
||||
root = "."
|
||||
ci_root = ".tekton"
|
||||
|
||||
|
||||
def save_json(filename, data):
|
||||
"""Save a Json file."""
|
||||
print("saving to", filename, json.dumps(data))
|
||||
with open(filename, "w") as file:
|
||||
file.write(json.dumps(data))
|
||||
# detect files based on their extention
|
||||
def detect_files(root_dir):
|
||||
ret = {}
|
||||
supported_extention = ['ts','js', 'py', 'yaml','yml', 'sh', 'rs', 'Dockerfile']
|
||||
supported_filename = ['package.json', 'yarn.lock', 'schema.prisma']
|
||||
for directory, subdir_list, file_list in os.walk(root_dir):
|
||||
for filename in file_list:
|
||||
if filename in supported_filename:
|
||||
if not filename in ret:
|
||||
ret[filename] = []
|
||||
ret[filename].append(os.path.join(directory,filename))
|
||||
ext = filename.split(".")[len(filename.split("."))-1]
|
||||
if ext in supported_extention:
|
||||
if not ext in ret:
|
||||
ret[ext] = []
|
||||
ret[ext].append(os.path.join(directory,filename))
|
||||
return ret
|
||||
|
||||
|
||||
def load_json(filename):
|
||||
"""Load a json file."""
|
||||
data = {}
|
||||
with open(filename, "r") as file:
|
||||
data = json.loads(file.read())
|
||||
return data
|
||||
|
||||
|
||||
def load_yaml(filename):
|
||||
docs=[]
|
||||
with open(filename, 'r') as file:
|
||||
"""Load a file."""
|
||||
docs = []
|
||||
with open(filename, "r") as file:
|
||||
try:
|
||||
data = yaml.safe_load_all(file)
|
||||
for doc in data:
|
||||
@@ -40,37 +38,154 @@ def load_yaml(filename):
|
||||
else:
|
||||
pass
|
||||
return docs
|
||||
def load_json(filename):
|
||||
data={}
|
||||
with open(filename, 'r') as file:
|
||||
data = json.loads(file.read())
|
||||
return data
|
||||
|
||||
def append_key(to,key,val):
|
||||
|
||||
def load_config(root_dir, ci_root_dir):
|
||||
"""Load the configuration from the configuration directory."""
|
||||
ret = {
|
||||
"files": [],
|
||||
"languages": ["markdown", "docker", "rust", "shell", "python", "yaml", "js"],
|
||||
"markdown": {"extentions": ["md"]},
|
||||
"docker": {"extentions": ["Dockerfile"]},
|
||||
"rust": {"extentions": ["rs"]},
|
||||
"shell": {"extentions": ["sh", "ksh"], "shellcheck-args": []},
|
||||
"python": {
|
||||
"extentions": ["py"],
|
||||
"black-args": ["--check", "--diff"],
|
||||
"pylint-args": [],
|
||||
},
|
||||
"yaml": {
|
||||
"extentions": ["yaml", "yml"],
|
||||
"detect": True,
|
||||
"ansible": {"enable": False},
|
||||
"kube": {"enable": False},
|
||||
},
|
||||
"js": {
|
||||
"extentions": ["ts", "js"],
|
||||
"files": ["package.json", "yarn.lock", "schema.prisma"],
|
||||
},
|
||||
}
|
||||
if not os.path.isdir(ci_root_dir):
|
||||
return ret
|
||||
files = [
|
||||
f
|
||||
for f in os.listdir(ci_root_dir)
|
||||
if os.path.isfile(os.path.join(ci_root_dir, f)) and re.match(".yaml$", f)
|
||||
]
|
||||
if "auto-ci.yaml" in files:
|
||||
for doc in load_yaml(os.path.join(ci_root_dir, "auto-ci.yaml")):
|
||||
ret = {**ret, **doc}
|
||||
ret["files"] = files
|
||||
return ret
|
||||
|
||||
|
||||
def detect_files(config, root_dir):
|
||||
"""Detect files based on their extention."""
|
||||
ret = {}
|
||||
supported_extentions = []
|
||||
supported_filename = []
|
||||
for lang in config["languages"]:
|
||||
if "extentions" in config[lang]:
|
||||
supported_extentions.extend(config[lang]["extentions"])
|
||||
if "files" in lang:
|
||||
supported_filename.extend(config[lang]["files"])
|
||||
for directory, subdir_list, file_list in os.walk(root_dir):
|
||||
for filename in file_list:
|
||||
if filename in supported_filename:
|
||||
if not filename in ret:
|
||||
ret[filename] = []
|
||||
ret[filename].append(os.path.join(directory, filename))
|
||||
ext = filename.split(".")[len(filename.split(".")) - 1]
|
||||
if ext in supported_extentions:
|
||||
if not ext in ret:
|
||||
ret[ext] = []
|
||||
ret[ext].append(os.path.join(directory, filename))
|
||||
return ret
|
||||
|
||||
|
||||
def get_images_name(dockerfiles, root_dir):
|
||||
"""Generate the images names for the detected Dockerfile."""
|
||||
ret = []
|
||||
for f in dockerfiles:
|
||||
dir = os.path.dirname(f)
|
||||
if dir == root_dir:
|
||||
ret.append(
|
||||
"$(params.artifactory-url)/$(params.project-path):$(params.image-version)"
|
||||
)
|
||||
else:
|
||||
ret.append(
|
||||
"$(params.artifactory-url)/$(params.project-path)-{comp}:$(params.image-version)".format(
|
||||
comp=os.path.basename(dir)
|
||||
)
|
||||
)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def append_key(to, key, val):
|
||||
"""Append a value in {to}[{key}], create the array if not existing."""
|
||||
if not key in to:
|
||||
to[key] = []
|
||||
to[key].append(val)
|
||||
def set_js_stages(stages,files,root_dir):
|
||||
if 'package.json' in files and os.path.join(root_dir,'package.json') in files['package.json']:
|
||||
if 'yarn.lock' in files and os.path.join(root_dir,'yarn.lock') in files['yarn.lock']:
|
||||
append_key(stages,'prepare','prepare-yarn')
|
||||
|
||||
|
||||
def append_stage(to, key, val, files):
|
||||
"""Append a value in {to}[{key}], create the array if not existing. if the key-file is found in the files add a custom suffix"""
|
||||
if not key in to:
|
||||
to[key] = []
|
||||
if "{basename}.yaml".format(basename=val) in files:
|
||||
to[key].append("{stage}-custom".format(stage=val))
|
||||
else:
|
||||
append_key(stages,'prepare','prepare-npm')
|
||||
if 'schema.prisma' in files and os.path.join(root_dir,'prisma','schema.prisma') in files['schema.prisma']:
|
||||
append_key(stages,'prepare','prepare-prisma')
|
||||
defs = load_json(os.path.join(root_dir,'package.json'))
|
||||
if 'scripts' in defs and 'lint' in defs['scripts']:
|
||||
append_key(stages,'lint','lint-javascript')
|
||||
if 'scripts' in defs and 'test' in defs['scripts']:
|
||||
append_key(stages,'test','test-javascript')
|
||||
def set_yaml_stages(stages,files,root_dir):
|
||||
have_k8s = False
|
||||
have_ansible = False
|
||||
to[key].append(val)
|
||||
|
||||
|
||||
def set_js_stages(stages, config, files, root_dir):
|
||||
"""Add the stages for javascript code."""
|
||||
if (
|
||||
"package.json" in files
|
||||
and os.path.join(root_dir, "package.json") in files["package.json"]
|
||||
):
|
||||
if (
|
||||
"yarn.lock" in files
|
||||
and os.path.join(root_dir, "yarn.lock") in files["yarn.lock"]
|
||||
):
|
||||
append_stage(stages, "prepare", "prepare-yarn", config["files"])
|
||||
else:
|
||||
append_stage(stages, "prepare", "prepare-npm", config["files"])
|
||||
if (
|
||||
"schema.prisma" in files
|
||||
and os.path.join(root_dir, "prisma", "schema.prisma")
|
||||
in files["schema.prisma"]
|
||||
):
|
||||
append_stage(stages, "prepare", "prepare-prisma", config["files"])
|
||||
defs = load_json(os.path.join(root_dir, "package.json"))
|
||||
if "scripts" in defs and "lint" in defs["scripts"]:
|
||||
append_stage(stages, "lint", "lint-javascript", config["files"])
|
||||
if "scripts" in defs and "test" in defs["scripts"]:
|
||||
append_stage(stages, "test", "test-javascript", config["files"])
|
||||
|
||||
|
||||
def set_yaml_stages(stages, config, files, root_dir):
|
||||
"""Add the stages for yaml files."""
|
||||
yamls = []
|
||||
if 'yaml' in files:
|
||||
yamls += files['yaml']
|
||||
if 'yml' in files:
|
||||
yamls += files['yml']
|
||||
if "yaml" in files:
|
||||
yamls += files["yaml"]
|
||||
if "yml" in files:
|
||||
yamls += files["yml"]
|
||||
have_k8s = (
|
||||
"kube" in config["yaml"]
|
||||
and "enable" in config["yaml"]["kube"]
|
||||
and config["yaml"]["kube"]["enable"]
|
||||
)
|
||||
have_ansible = (
|
||||
"ansible" in config["yaml"]
|
||||
and "enable" in config["yaml"]["ansible"]
|
||||
and config["yaml"]["ansible"]["enable"]
|
||||
)
|
||||
should_detect = (
|
||||
"detect" not in config["yaml"] or config["yaml"]["detect"]
|
||||
) and not (have_k8s and have_ansible)
|
||||
if should_detect:
|
||||
for file in yamls:
|
||||
objs = load_yaml(file)
|
||||
for obj in objs:
|
||||
@@ -78,52 +193,90 @@ def set_yaml_stages(stages,files,root_dir):
|
||||
continue
|
||||
if isinstance(obj, collections.abc.Sequence):
|
||||
for item in obj:
|
||||
if 'name' in item and ('register' in item or 'changed_when' in item or 'loop_control' in item or 'ansible.builtin.template' in item):
|
||||
if "name" in item and (
|
||||
"register" in item
|
||||
or "changed_when" in item
|
||||
or "loop_control" in item
|
||||
or "ansible.builtin.template" in item
|
||||
):
|
||||
have_ansible = True
|
||||
elif 'apiVersion' in obj:
|
||||
elif "apiVersion" in obj:
|
||||
have_k8s = True
|
||||
append_key(stages,'lint','lint-yaml')
|
||||
append_stage(stages, "lint", "lint-yaml", config["files"])
|
||||
if have_k8s:
|
||||
append_key(stages,'lint','lint-kube')
|
||||
append_stage(stages, "lint", "lint-kube", config["files"])
|
||||
if have_ansible:
|
||||
append_key(stages,'lint','lint-ansible')
|
||||
def get_images_name(dockerfiles,root_dir):
|
||||
ret = []
|
||||
for f in dockerfiles:
|
||||
dir = os.path.dirname(f)
|
||||
ret.append("$(params.artifactory-url)/$(params.project-path):$(params.image-version)")
|
||||
print('get_image_name', dir, root_dir)
|
||||
return ret
|
||||
def get_stages(files,root_dir):
|
||||
ret = {}
|
||||
ret['prepare'] = []
|
||||
ret['lint'] = []
|
||||
ret['test'] = []
|
||||
ret['publish'] = []
|
||||
if 'Dockerfile' in files:
|
||||
append_key(ret,'lint','lint-docker')
|
||||
append_key(ret,'publish','publish-docker')
|
||||
if 'yaml' in files or 'yml' in files:
|
||||
set_yaml_stages(ret,files,root_dir)
|
||||
if 'sh' in files:
|
||||
append_key(ret,'lint', 'lint-shell')
|
||||
if 'rs' in files:
|
||||
append_key(ret,'lint', 'lint-clippy')
|
||||
if 'py' in files:
|
||||
append_key(ret,'lint','lint-python')
|
||||
append_key(ret,'lint','lint-black')
|
||||
if len([t for t in files['py'] if re.match('/test_',t) != None]) > 0:
|
||||
append_key(ret,'test','test-python')
|
||||
if 'ts' in files or 'js' in files:
|
||||
set_js_stages(ret,files,root_dir)
|
||||
return ret
|
||||
files = detect_files(root)
|
||||
stages = get_stages(files,root)
|
||||
save_json("$(results.stages-prepare.path)", stages['prepare'])
|
||||
save_json("$(results.stages-lint.path)", stages['lint'])
|
||||
save_json("$(results.stages-test.path)", stages['test'])
|
||||
save_json("$(results.stages-publish.path)", stages['publish'])
|
||||
save_json("$(results.file-shell.path)", files['sh'] if 'sh' in files else [])
|
||||
save_json("$(results.file-python.path)", files['py'] if 'py' in files else [])
|
||||
save_json("$(results.file-docker.path)", files['Dockerfile'] if 'Dockerfile' in files else [])
|
||||
save_json("$(results.images-name.path)", get_images_name(files['Dockerfile'] if 'Dockerfile' in files else [],root))
|
||||
append_stage(stages, "lint", "lint-ansible", config["files"])
|
||||
|
||||
|
||||
def get_results(config, files, root_dir):
|
||||
"""Generate the stages based on the configuration and detected files."""
|
||||
stages = {
|
||||
"global": [],
|
||||
"prepare": [],
|
||||
"lint": [],
|
||||
"build": [],
|
||||
"test": [],
|
||||
"publish": [],
|
||||
}
|
||||
args = {
|
||||
"shellcheck-args": (
|
||||
config["shell"]["shellcheck-args"]
|
||||
if "shellcheck-args" in config["shell"]
|
||||
else []
|
||||
),
|
||||
"black-args": (
|
||||
config["python"]["black-args"] if "black-args" in config["python"] else []
|
||||
),
|
||||
"pylint-args": (
|
||||
config["python"]["pylint-args"] if "pylint-args" in config["python"] else []
|
||||
),
|
||||
}
|
||||
if "on-$(params.pipeline-type).yaml" in config["files"]:
|
||||
append_stage(stages, "global", "$(params.pipeline-type)", config["files"])
|
||||
return stages, args
|
||||
|
||||
if "Dockerfile" in files:
|
||||
append_stage(stages, "lint", "lint-docker", config["files"])
|
||||
append_stage(stages, "publish", "publish-docker", config["files"])
|
||||
if "yaml" in files or "yml" in files:
|
||||
set_yaml_stages(stages, config, files, root_dir)
|
||||
if "sh" in files:
|
||||
append_stage(stages, "lint", "lint-shell", config["files"])
|
||||
args["shellcheck-args"].extend(files["sh"])
|
||||
if "rs" in files:
|
||||
append_stage(stages, "lint", "lint-clippy", config["files"])
|
||||
if "py" in files:
|
||||
append_stage(stages, "lint", "lint-python", config["files"])
|
||||
args["pylint-args"].extend(files["py"])
|
||||
append_stage(stages, "lint", "lint-black", config["files"])
|
||||
args["black-args"].extend(files["py"])
|
||||
if len([t for t in files["py"] if re.match("/test_", t) != None]) > 0:
|
||||
append_stage(stages, "test", "test-python", config["files"])
|
||||
if "ts" in files or "js" in files:
|
||||
set_js_stages(stages, config, files, root_dir)
|
||||
for stage in ["prepare", "lint", "build", "test", "publish"]:
|
||||
if "on-{stage}.yaml" in config["files"]:
|
||||
stages[stage] = ["custom"]
|
||||
return stages, args
|
||||
|
||||
|
||||
config = load_config(root, ci_root)
|
||||
files = detect_files(config, root)
|
||||
stages, args = get_results(config, files, root)
|
||||
save_json("$(results.stages-global.path)", stages["global"])
|
||||
save_json("$(results.stages-prepare.path)", stages["prepare"])
|
||||
save_json("$(results.stages-lint.path)", stages["lint"])
|
||||
save_json("$(results.stages-build.path)", stages["build"])
|
||||
save_json("$(results.stages-test.path)", stages["test"])
|
||||
save_json("$(results.stages-publish.path)", stages["publish"])
|
||||
save_json(
|
||||
"$(results.file-docker.path)", files["Dockerfile"] if "Dockerfile" in files else []
|
||||
)
|
||||
save_json(
|
||||
"$(results.images-name.path)",
|
||||
get_images_name(files["Dockerfile"] if "Dockerfile" in files else [], root),
|
||||
)
|
||||
save_json("$(results.shellcheck-args.path)", args["shellcheck-args"])
|
||||
save_json("$(results.black-args.path)", args["black-args"])
|
||||
save_json("$(results.pylint-args.path)", args["pylint-args"])
|
||||
|
||||
@@ -17,35 +17,48 @@ resource "kubectl_manifest" "auto-ci-detector" {
|
||||
labels: ${jsonencode(local.push-labels)}
|
||||
spec:
|
||||
results:
|
||||
- name: stages-global
|
||||
description: list of global actions
|
||||
type: array
|
||||
- name: stages-prepare
|
||||
description: list of prepare actions
|
||||
type: array
|
||||
- name: stages-lint
|
||||
description: list of lint actions
|
||||
type: array
|
||||
- name: stages-build
|
||||
description: list of lint actions
|
||||
type: array
|
||||
- name: stages-test
|
||||
description: list of test actions
|
||||
type: array
|
||||
- name: stages-publish
|
||||
description: list of publish actions
|
||||
type: array
|
||||
- name: file-shell
|
||||
description: list of shell files if any
|
||||
type: array
|
||||
- name: file-python
|
||||
description: list of python files if any
|
||||
type: array
|
||||
- name: file-docker
|
||||
description: list of Dockerfiles if any
|
||||
type: array
|
||||
- name: images-name
|
||||
description: list of Dockerfiles image-name
|
||||
type: array
|
||||
- name: shellcheck-args
|
||||
description: Arguments for shellcheck
|
||||
type: array
|
||||
- name: black-args
|
||||
description: Arguments for black
|
||||
type: array
|
||||
- name: pylint-args
|
||||
description: Arguments for pylint
|
||||
type: array
|
||||
params:
|
||||
- name: toolbox-image
|
||||
default: sebt3/basic-toolbox-image:1.29.4
|
||||
description: The name of the toolbox image
|
||||
type: string
|
||||
- name: pipeline-type
|
||||
default: push
|
||||
description: Type of the pipeline (push,tag,pr...)
|
||||
type: string
|
||||
- name: artifactory-url
|
||||
default: docker.io
|
||||
description: The url of the current artifactory
|
||||
|
||||
@@ -0,0 +1,96 @@
|
||||
apiVersion: tekton.dev/v1
|
||||
kind: Pipeline
|
||||
metadata:
|
||||
name: auto-ci-lint
|
||||
spec:
|
||||
workspaces:
|
||||
- name: source
|
||||
params:
|
||||
- name: on-error
|
||||
type: string
|
||||
default: stopAndFail
|
||||
- name: stages
|
||||
type: array
|
||||
- name: dockerfiles
|
||||
type: array
|
||||
default: []
|
||||
- name: shellcheck-args
|
||||
type: array
|
||||
default: []
|
||||
- name: black-args
|
||||
type: array
|
||||
default: []
|
||||
- name: pylint-args
|
||||
type: array
|
||||
default: []
|
||||
tasks:
|
||||
- name: lint-shell
|
||||
onError: $(params.on-error)
|
||||
when:
|
||||
- input: "lint-shell"
|
||||
operator: in
|
||||
values: ["$(params.stages[*])"]
|
||||
params:
|
||||
- name: args
|
||||
value: $(params.shellcheck-args)
|
||||
taskRef:
|
||||
name: shellcheck
|
||||
workspaces:
|
||||
- name: shared-workspace
|
||||
workspace: source
|
||||
- name: lint-docker
|
||||
onError: $(params.on-error)
|
||||
when:
|
||||
- input: "lint-docker"
|
||||
operator: in
|
||||
values: ["$(params.stages[*])"]
|
||||
taskRef:
|
||||
name: hadolint
|
||||
matrix:
|
||||
params:
|
||||
- name: dockerfile-path
|
||||
value: $(tasks.detect-stages.results.file-docker)
|
||||
workspaces:
|
||||
- name: source
|
||||
- name: lint-yaml
|
||||
onError: $(params.on-error)
|
||||
when:
|
||||
- input: "lint-yaml"
|
||||
operator: in
|
||||
values: ["$(params.stages[*])"]
|
||||
params:
|
||||
- name: args
|
||||
value: ["."]
|
||||
taskRef:
|
||||
name: yaml-lint
|
||||
workspaces:
|
||||
- name: shared-workspace
|
||||
workspace: source
|
||||
- name: lint-black
|
||||
onError: $(params.on-error)
|
||||
when:
|
||||
- input: "lint-black"
|
||||
operator: in
|
||||
values: ["$(params.stages[*])"]
|
||||
params:
|
||||
- name: args
|
||||
value: $(params.black-args)
|
||||
taskRef:
|
||||
name: black
|
||||
workspaces:
|
||||
- name: shared-workspace
|
||||
workspace: source
|
||||
- name: lint-python
|
||||
onError: $(params.on-error)
|
||||
when:
|
||||
- input: "lint-python"
|
||||
operator: in
|
||||
values: ["$(params.stages[*])"]
|
||||
params:
|
||||
- name: args
|
||||
value: ["$(tasks.detect-stages.results.file-python[*])"]
|
||||
taskRef:
|
||||
name: pylint
|
||||
workspaces:
|
||||
- name: shared-workspace
|
||||
workspace: source
|
||||
@@ -130,6 +130,8 @@ spec:
|
||||
- name: detect-stages
|
||||
runAfter: [git-version]
|
||||
params:
|
||||
- name: pipeline-type
|
||||
value: push
|
||||
- name: artifactory-url
|
||||
value: $(params.artifactory-url)
|
||||
- name: project-name
|
||||
@@ -142,83 +144,29 @@ spec:
|
||||
name: auto-ci-detector
|
||||
workspaces:
|
||||
- name: source
|
||||
- name: lint-shell
|
||||
- name: lint
|
||||
runAfter: [detect-stages]
|
||||
onError: continue
|
||||
when:
|
||||
- input: "lint-shell"
|
||||
operator: in
|
||||
values: ["$(tasks.detect-stages.results.stages-lint[*])"]
|
||||
- cel: "size(tasks.detect-stages.results.stages-lint)>0"
|
||||
params:
|
||||
- name: args
|
||||
value: $(tasks.detect-stages.results.file-shell)
|
||||
taskRef:
|
||||
name: shellcheck
|
||||
workspaces:
|
||||
- name: shared-workspace
|
||||
workspace: source
|
||||
- name: lint-docker
|
||||
runAfter: [detect-stages]
|
||||
onError: continue
|
||||
when:
|
||||
- input: "lint-docker"
|
||||
operator: in
|
||||
values: ["$(tasks.detect-stages.results.stages-lint[*])"]
|
||||
taskRef:
|
||||
name: hadolint
|
||||
matrix:
|
||||
params:
|
||||
- name: dockerfile-path
|
||||
- name: on-error
|
||||
value: stopAndFail
|
||||
- name: stages
|
||||
value: $(tasks.detect-stages.results.stages-lint)
|
||||
- name: dockerfiles
|
||||
value: $(tasks.detect-stages.results.file-docker)
|
||||
- name: shellcheck-args
|
||||
value: $(tasks.detect-stages.results.shellcheck-args)
|
||||
- name: black-args
|
||||
value: $(tasks.detect-stages.results.black-args)
|
||||
- name: pylint-args
|
||||
value: $(tasks.detect-stages.results.pylint-args)
|
||||
pipelineRef:
|
||||
name: auto-ci-lint
|
||||
workspaces:
|
||||
- name: source
|
||||
- name: lint-yaml
|
||||
runAfter: [detect-stages]
|
||||
onError: continue
|
||||
when:
|
||||
- input: "lint-yaml"
|
||||
operator: in
|
||||
values: ["$(tasks.detect-stages.results.stages-lint[*])"]
|
||||
params:
|
||||
- name: args
|
||||
value: ["."]
|
||||
taskRef:
|
||||
name: yaml-lint
|
||||
workspaces:
|
||||
- name: shared-workspace
|
||||
workspace: source
|
||||
- name: lint-black
|
||||
runAfter: [detect-stages]
|
||||
onError: continue
|
||||
when:
|
||||
- input: "lint-black"
|
||||
operator: in
|
||||
values: ["$(tasks.detect-stages.results.stages-lint[*])"]
|
||||
params:
|
||||
- name: args
|
||||
value: ["--check", "--diff", "$(tasks.detect-stages.results.file-python[*])"]
|
||||
taskRef:
|
||||
name: black
|
||||
workspaces:
|
||||
- name: shared-workspace
|
||||
workspace: source
|
||||
- name: lint-python
|
||||
runAfter: [detect-stages]
|
||||
onError: continue
|
||||
when:
|
||||
- input: "lint-python"
|
||||
operator: in
|
||||
values: ["$(tasks.detect-stages.results.stages-lint[*])"]
|
||||
params:
|
||||
- name: args
|
||||
value: ["$(tasks.detect-stages.results.file-python[*])"]
|
||||
taskRef:
|
||||
name: pylint
|
||||
workspaces:
|
||||
- name: shared-workspace
|
||||
workspace: source
|
||||
- name: publish-docker
|
||||
runAfter: ["lint-docker"]
|
||||
runAfter: ["lint"]
|
||||
when:
|
||||
- input: "publish-docker"
|
||||
operator: in
|
||||
|
||||
@@ -114,6 +114,8 @@ spec:
|
||||
- name: detect-stages
|
||||
runAfter: [git-clone]
|
||||
params:
|
||||
- name: pipeline-type
|
||||
value: tag
|
||||
- name: artifactory-url
|
||||
value: $(params.artifactory-url)
|
||||
- name: project-name
|
||||
@@ -202,7 +204,6 @@ spec:
|
||||
- input: "publish-docker"
|
||||
operator: in
|
||||
values: ["$(tasks.detect-stages.results.stages-publish[*])"]
|
||||
- cel: "'$(params.branch-name)' == '$(params.git-default-branch)'"
|
||||
taskRef:
|
||||
name: buildah
|
||||
matrix:
|
||||
|
||||
Reference in New Issue
Block a user