fix
This commit is contained in:
@@ -6,31 +6,29 @@ import yaml
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
root = '.'
|
root = "."
|
||||||
def save_json(filename,data):
|
ci_root = ".tekton"
|
||||||
print('saving to', filename, json.dumps(data))
|
|
||||||
with open(filename, 'w') as file:
|
|
||||||
|
def save_json(filename, data):
|
||||||
|
"""Save a Json file."""
|
||||||
|
print("saving to", filename, json.dumps(data))
|
||||||
|
with open(filename, "w") as file:
|
||||||
file.write(json.dumps(data))
|
file.write(json.dumps(data))
|
||||||
# detect files based on their extention
|
|
||||||
def detect_files(root_dir):
|
|
||||||
ret = {}
|
def load_json(filename):
|
||||||
supported_extention = ['ts','js', 'py', 'yaml','yml', 'sh', 'rs', 'Dockerfile']
|
"""Load a json file."""
|
||||||
supported_filename = ['package.json', 'yarn.lock', 'schema.prisma']
|
data = {}
|
||||||
for directory, subdir_list, file_list in os.walk(root_dir):
|
with open(filename, "r") as file:
|
||||||
for filename in file_list:
|
data = json.loads(file.read())
|
||||||
if filename in supported_filename:
|
return data
|
||||||
if not filename in ret:
|
|
||||||
ret[filename] = []
|
|
||||||
ret[filename].append(os.path.join(directory,filename))
|
|
||||||
ext = filename.split(".")[len(filename.split("."))-1]
|
|
||||||
if ext in supported_extention:
|
|
||||||
if not ext in ret:
|
|
||||||
ret[ext] = []
|
|
||||||
ret[ext].append(os.path.join(directory,filename))
|
|
||||||
return ret
|
|
||||||
def load_yaml(filename):
|
def load_yaml(filename):
|
||||||
docs=[]
|
"""Load a file."""
|
||||||
with open(filename, 'r') as file:
|
docs = []
|
||||||
|
with open(filename, "r") as file:
|
||||||
try:
|
try:
|
||||||
data = yaml.safe_load_all(file)
|
data = yaml.safe_load_all(file)
|
||||||
for doc in data:
|
for doc in data:
|
||||||
@@ -40,90 +38,245 @@ def load_yaml(filename):
|
|||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
return docs
|
return docs
|
||||||
def load_json(filename):
|
|
||||||
data={}
|
|
||||||
with open(filename, 'r') as file:
|
|
||||||
data = json.loads(file.read())
|
|
||||||
return data
|
|
||||||
|
|
||||||
def append_key(to,key,val):
|
|
||||||
if not key in to:
|
def load_config(root_dir, ci_root_dir):
|
||||||
to[key] = []
|
"""Load the configuration from the configuration directory."""
|
||||||
to[key].append(val)
|
ret = {
|
||||||
def set_js_stages(stages,files,root_dir):
|
"files": [],
|
||||||
if 'package.json' in files and os.path.join(root_dir,'package.json') in files['package.json']:
|
"languages": ["markdown", "docker", "rust", "shell", "python", "yaml", "js"],
|
||||||
if 'yarn.lock' in files and os.path.join(root_dir,'yarn.lock') in files['yarn.lock']:
|
"markdown": {"extentions": ["md"]},
|
||||||
append_key(stages,'prepare','prepare-yarn')
|
"docker": {"extentions": ["Dockerfile"]},
|
||||||
else:
|
"rust": {"extentions": ["rs"]},
|
||||||
append_key(stages,'prepare','prepare-npm')
|
"shell": {"extentions": ["sh", "ksh"], "shellcheck-args": []},
|
||||||
if 'schema.prisma' in files and os.path.join(root_dir,'prisma','schema.prisma') in files['schema.prisma']:
|
"python": {
|
||||||
append_key(stages,'prepare','prepare-prisma')
|
"extentions": ["py"],
|
||||||
defs = load_json(os.path.join(root_dir,'package.json'))
|
"black-args": ["--check", "--diff"],
|
||||||
if 'scripts' in defs and 'lint' in defs['scripts']:
|
"pylint-args": [],
|
||||||
append_key(stages,'lint','lint-javascript')
|
},
|
||||||
if 'scripts' in defs and 'test' in defs['scripts']:
|
"yaml": {
|
||||||
append_key(stages,'test','test-javascript')
|
"extentions": ["yaml", "yml"],
|
||||||
def set_yaml_stages(stages,files,root_dir):
|
"detect": True,
|
||||||
have_k8s = False
|
"ansible": {"enable": False},
|
||||||
have_ansible = False
|
"kube": {"enable": False},
|
||||||
yamls = []
|
},
|
||||||
if 'yaml' in files:
|
"js": {
|
||||||
yamls += files['yaml']
|
"extentions": ["ts", "js"],
|
||||||
if 'yml' in files:
|
"files": ["package.json", "yarn.lock", "schema.prisma"],
|
||||||
yamls += files['yml']
|
},
|
||||||
for file in yamls:
|
}
|
||||||
objs = load_yaml(file)
|
if not os.path.isdir(ci_root_dir):
|
||||||
for obj in objs:
|
return ret
|
||||||
if obj == None:
|
files = [
|
||||||
continue
|
f
|
||||||
if isinstance(obj, collections.abc.Sequence):
|
for f in os.listdir(ci_root_dir)
|
||||||
for item in obj:
|
if os.path.isfile(os.path.join(ci_root_dir, f)) and re.match(".yaml$", f)
|
||||||
if 'name' in item and ('register' in item or 'changed_when' in item or 'loop_control' in item or 'ansible.builtin.template' in item):
|
]
|
||||||
have_ansible = True
|
if "auto-ci.yaml" in files:
|
||||||
elif 'apiVersion' in obj:
|
for doc in load_yaml(os.path.join(ci_root_dir, "auto-ci.yaml")):
|
||||||
have_k8s = True
|
ret = {**ret, **doc}
|
||||||
append_key(stages,'lint','lint-yaml')
|
ret["files"] = files
|
||||||
if have_k8s:
|
return ret
|
||||||
append_key(stages,'lint','lint-kube')
|
|
||||||
if have_ansible:
|
|
||||||
append_key(stages,'lint','lint-ansible')
|
def detect_files(config, root_dir):
|
||||||
def get_images_name(dockerfiles,root_dir):
|
"""Detect files based on their extention."""
|
||||||
|
ret = {}
|
||||||
|
supported_extentions = []
|
||||||
|
supported_filename = []
|
||||||
|
for lang in config["languages"]:
|
||||||
|
if "extentions" in config[lang]:
|
||||||
|
supported_extentions.extend(config[lang]["extentions"])
|
||||||
|
if "files" in lang:
|
||||||
|
supported_filename.extend(config[lang]["files"])
|
||||||
|
for directory, subdir_list, file_list in os.walk(root_dir):
|
||||||
|
for filename in file_list:
|
||||||
|
if filename in supported_filename:
|
||||||
|
if not filename in ret:
|
||||||
|
ret[filename] = []
|
||||||
|
ret[filename].append(os.path.join(directory, filename))
|
||||||
|
ext = filename.split(".")[len(filename.split(".")) - 1]
|
||||||
|
if ext in supported_extentions:
|
||||||
|
if not ext in ret:
|
||||||
|
ret[ext] = []
|
||||||
|
ret[ext].append(os.path.join(directory, filename))
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def get_images_name(dockerfiles, root_dir):
|
||||||
|
"""Generate the images names for the detected Dockerfile."""
|
||||||
ret = []
|
ret = []
|
||||||
for f in dockerfiles:
|
for f in dockerfiles:
|
||||||
dir = os.path.dirname(f)
|
dir = os.path.dirname(f)
|
||||||
ret.append("$(params.artifactory-url)/$(params.project-path):$(params.image-version)")
|
if dir == root_dir:
|
||||||
print('get_image_name', dir, root_dir)
|
ret.append(
|
||||||
|
"$(params.artifactory-url)/$(params.project-path):$(params.image-version)"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
ret.append(
|
||||||
|
"$(params.artifactory-url)/$(params.project-path)-{comp}:$(params.image-version)".format(
|
||||||
|
comp=os.path.basename(dir)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
def get_stages(files,root_dir):
|
|
||||||
ret = {}
|
|
||||||
ret['prepare'] = []
|
def append_key(to, key, val):
|
||||||
ret['lint'] = []
|
"""Append a value in {to}[{key}], create the array if not existing."""
|
||||||
ret['test'] = []
|
if not key in to:
|
||||||
ret['publish'] = []
|
to[key] = []
|
||||||
if 'Dockerfile' in files:
|
to[key].append(val)
|
||||||
append_key(ret,'lint','lint-docker')
|
|
||||||
append_key(ret,'publish','publish-docker')
|
|
||||||
if 'yaml' in files or 'yml' in files:
|
def append_stage(to, key, val, files):
|
||||||
set_yaml_stages(ret,files,root_dir)
|
"""Append a value in {to}[{key}], create the array if not existing. if the key-file is found in the files add a custom suffix"""
|
||||||
if 'sh' in files:
|
if not key in to:
|
||||||
append_key(ret,'lint', 'lint-shell')
|
to[key] = []
|
||||||
if 'rs' in files:
|
if "{basename}.yaml".format(basename=val) in files:
|
||||||
append_key(ret,'lint', 'lint-clippy')
|
to[key].append("{stage}-custom".format(stage=val))
|
||||||
if 'py' in files:
|
else:
|
||||||
append_key(ret,'lint','lint-python')
|
to[key].append(val)
|
||||||
append_key(ret,'lint','lint-black')
|
|
||||||
if len([t for t in files['py'] if re.match('/test_',t) != None]) > 0:
|
|
||||||
append_key(ret,'test','test-python')
|
def set_js_stages(stages, config, files, root_dir):
|
||||||
if 'ts' in files or 'js' in files:
|
"""Add the stages for javascript code."""
|
||||||
set_js_stages(ret,files,root_dir)
|
if (
|
||||||
return ret
|
"package.json" in files
|
||||||
files = detect_files(root)
|
and os.path.join(root_dir, "package.json") in files["package.json"]
|
||||||
stages = get_stages(files,root)
|
):
|
||||||
save_json("$(results.stages-prepare.path)", stages['prepare'])
|
if (
|
||||||
save_json("$(results.stages-lint.path)", stages['lint'])
|
"yarn.lock" in files
|
||||||
save_json("$(results.stages-test.path)", stages['test'])
|
and os.path.join(root_dir, "yarn.lock") in files["yarn.lock"]
|
||||||
save_json("$(results.stages-publish.path)", stages['publish'])
|
):
|
||||||
save_json("$(results.file-shell.path)", files['sh'] if 'sh' in files else [])
|
append_stage(stages, "prepare", "prepare-yarn", config["files"])
|
||||||
save_json("$(results.file-python.path)", files['py'] if 'py' in files else [])
|
else:
|
||||||
save_json("$(results.file-docker.path)", files['Dockerfile'] if 'Dockerfile' in files else [])
|
append_stage(stages, "prepare", "prepare-npm", config["files"])
|
||||||
save_json("$(results.images-name.path)", get_images_name(files['Dockerfile'] if 'Dockerfile' in files else [],root))
|
if (
|
||||||
|
"schema.prisma" in files
|
||||||
|
and os.path.join(root_dir, "prisma", "schema.prisma")
|
||||||
|
in files["schema.prisma"]
|
||||||
|
):
|
||||||
|
append_stage(stages, "prepare", "prepare-prisma", config["files"])
|
||||||
|
defs = load_json(os.path.join(root_dir, "package.json"))
|
||||||
|
if "scripts" in defs and "lint" in defs["scripts"]:
|
||||||
|
append_stage(stages, "lint", "lint-javascript", config["files"])
|
||||||
|
if "scripts" in defs and "test" in defs["scripts"]:
|
||||||
|
append_stage(stages, "test", "test-javascript", config["files"])
|
||||||
|
|
||||||
|
|
||||||
|
def set_yaml_stages(stages, config, files, root_dir):
|
||||||
|
"""Add the stages for yaml files."""
|
||||||
|
yamls = []
|
||||||
|
if "yaml" in files:
|
||||||
|
yamls += files["yaml"]
|
||||||
|
if "yml" in files:
|
||||||
|
yamls += files["yml"]
|
||||||
|
have_k8s = (
|
||||||
|
"kube" in config["yaml"]
|
||||||
|
and "enable" in config["yaml"]["kube"]
|
||||||
|
and config["yaml"]["kube"]["enable"]
|
||||||
|
)
|
||||||
|
have_ansible = (
|
||||||
|
"ansible" in config["yaml"]
|
||||||
|
and "enable" in config["yaml"]["ansible"]
|
||||||
|
and config["yaml"]["ansible"]["enable"]
|
||||||
|
)
|
||||||
|
should_detect = (
|
||||||
|
"detect" not in config["yaml"] or config["yaml"]["detect"]
|
||||||
|
) and not (have_k8s and have_ansible)
|
||||||
|
if should_detect:
|
||||||
|
for file in yamls:
|
||||||
|
objs = load_yaml(file)
|
||||||
|
for obj in objs:
|
||||||
|
if obj == None:
|
||||||
|
continue
|
||||||
|
if isinstance(obj, collections.abc.Sequence):
|
||||||
|
for item in obj:
|
||||||
|
if "name" in item and (
|
||||||
|
"register" in item
|
||||||
|
or "changed_when" in item
|
||||||
|
or "loop_control" in item
|
||||||
|
or "ansible.builtin.template" in item
|
||||||
|
):
|
||||||
|
have_ansible = True
|
||||||
|
elif "apiVersion" in obj:
|
||||||
|
have_k8s = True
|
||||||
|
append_stage(stages, "lint", "lint-yaml", config["files"])
|
||||||
|
if have_k8s:
|
||||||
|
append_stage(stages, "lint", "lint-kube", config["files"])
|
||||||
|
if have_ansible:
|
||||||
|
append_stage(stages, "lint", "lint-ansible", config["files"])
|
||||||
|
|
||||||
|
|
||||||
|
def get_results(config, files, root_dir):
|
||||||
|
"""Generate the stages based on the configuration and detected files."""
|
||||||
|
stages = {
|
||||||
|
"global": [],
|
||||||
|
"prepare": [],
|
||||||
|
"lint": [],
|
||||||
|
"build": [],
|
||||||
|
"test": [],
|
||||||
|
"publish": [],
|
||||||
|
}
|
||||||
|
args = {
|
||||||
|
"shellcheck-args": (
|
||||||
|
config["shell"]["shellcheck-args"]
|
||||||
|
if "shellcheck-args" in config["shell"]
|
||||||
|
else []
|
||||||
|
),
|
||||||
|
"black-args": (
|
||||||
|
config["python"]["black-args"] if "black-args" in config["python"] else []
|
||||||
|
),
|
||||||
|
"pylint-args": (
|
||||||
|
config["python"]["pylint-args"] if "pylint-args" in config["python"] else []
|
||||||
|
),
|
||||||
|
}
|
||||||
|
if "on-$(params.pipeline-type).yaml" in config["files"]:
|
||||||
|
append_stage(stages, "global", "$(params.pipeline-type)", config["files"])
|
||||||
|
return stages, args
|
||||||
|
|
||||||
|
if "Dockerfile" in files:
|
||||||
|
append_stage(stages, "lint", "lint-docker", config["files"])
|
||||||
|
append_stage(stages, "publish", "publish-docker", config["files"])
|
||||||
|
if "yaml" in files or "yml" in files:
|
||||||
|
set_yaml_stages(stages, config, files, root_dir)
|
||||||
|
if "sh" in files:
|
||||||
|
append_stage(stages, "lint", "lint-shell", config["files"])
|
||||||
|
args["shellcheck-args"].extend(files["sh"])
|
||||||
|
if "rs" in files:
|
||||||
|
append_stage(stages, "lint", "lint-clippy", config["files"])
|
||||||
|
if "py" in files:
|
||||||
|
append_stage(stages, "lint", "lint-python", config["files"])
|
||||||
|
args["pylint-args"].extend(files["py"])
|
||||||
|
append_stage(stages, "lint", "lint-black", config["files"])
|
||||||
|
args["black-args"].extend(files["py"])
|
||||||
|
if len([t for t in files["py"] if re.match("/test_", t) != None]) > 0:
|
||||||
|
append_stage(stages, "test", "test-python", config["files"])
|
||||||
|
if "ts" in files or "js" in files:
|
||||||
|
set_js_stages(stages, config, files, root_dir)
|
||||||
|
for stage in ["prepare", "lint", "build", "test", "publish"]:
|
||||||
|
if "on-{stage}.yaml" in config["files"]:
|
||||||
|
stages[stage] = ["custom"]
|
||||||
|
return stages, args
|
||||||
|
|
||||||
|
|
||||||
|
config = load_config(root, ci_root)
|
||||||
|
files = detect_files(config, root)
|
||||||
|
stages, args = get_results(config, files, root)
|
||||||
|
save_json("$(results.stages-global.path)", stages["global"])
|
||||||
|
save_json("$(results.stages-prepare.path)", stages["prepare"])
|
||||||
|
save_json("$(results.stages-lint.path)", stages["lint"])
|
||||||
|
save_json("$(results.stages-build.path)", stages["build"])
|
||||||
|
save_json("$(results.stages-test.path)", stages["test"])
|
||||||
|
save_json("$(results.stages-publish.path)", stages["publish"])
|
||||||
|
save_json(
|
||||||
|
"$(results.file-docker.path)", files["Dockerfile"] if "Dockerfile" in files else []
|
||||||
|
)
|
||||||
|
save_json(
|
||||||
|
"$(results.images-name.path)",
|
||||||
|
get_images_name(files["Dockerfile"] if "Dockerfile" in files else [], root),
|
||||||
|
)
|
||||||
|
save_json("$(results.shellcheck-args.path)", args["shellcheck-args"])
|
||||||
|
save_json("$(results.black-args.path)", args["black-args"])
|
||||||
|
save_json("$(results.pylint-args.path)", args["pylint-args"])
|
||||||
|
|||||||
@@ -17,35 +17,48 @@ resource "kubectl_manifest" "auto-ci-detector" {
|
|||||||
labels: ${jsonencode(local.push-labels)}
|
labels: ${jsonencode(local.push-labels)}
|
||||||
spec:
|
spec:
|
||||||
results:
|
results:
|
||||||
|
- name: stages-global
|
||||||
|
description: list of global actions
|
||||||
|
type: array
|
||||||
- name: stages-prepare
|
- name: stages-prepare
|
||||||
description: list of prepare actions
|
description: list of prepare actions
|
||||||
type: array
|
type: array
|
||||||
- name: stages-lint
|
- name: stages-lint
|
||||||
description: list of lint actions
|
description: list of lint actions
|
||||||
type: array
|
type: array
|
||||||
|
- name: stages-build
|
||||||
|
description: list of lint actions
|
||||||
|
type: array
|
||||||
- name: stages-test
|
- name: stages-test
|
||||||
description: list of test actions
|
description: list of test actions
|
||||||
type: array
|
type: array
|
||||||
- name: stages-publish
|
- name: stages-publish
|
||||||
description: list of publish actions
|
description: list of publish actions
|
||||||
type: array
|
type: array
|
||||||
- name: file-shell
|
|
||||||
description: list of shell files if any
|
|
||||||
type: array
|
|
||||||
- name: file-python
|
|
||||||
description: list of python files if any
|
|
||||||
type: array
|
|
||||||
- name: file-docker
|
- name: file-docker
|
||||||
description: list of Dockerfiles if any
|
description: list of Dockerfiles if any
|
||||||
type: array
|
type: array
|
||||||
- name: images-name
|
- name: images-name
|
||||||
description: list of Dockerfiles image-name
|
description: list of Dockerfiles image-name
|
||||||
type: array
|
type: array
|
||||||
|
- name: shellcheck-args
|
||||||
|
description: Arguments for shellcheck
|
||||||
|
type: array
|
||||||
|
- name: black-args
|
||||||
|
description: Arguments for black
|
||||||
|
type: array
|
||||||
|
- name: pylint-args
|
||||||
|
description: Arguments for pylint
|
||||||
|
type: array
|
||||||
params:
|
params:
|
||||||
- name: toolbox-image
|
- name: toolbox-image
|
||||||
default: sebt3/basic-toolbox-image:1.29.4
|
default: sebt3/basic-toolbox-image:1.29.4
|
||||||
description: The name of the toolbox image
|
description: The name of the toolbox image
|
||||||
type: string
|
type: string
|
||||||
|
- name: pipeline-type
|
||||||
|
default: push
|
||||||
|
description: Type of the pipeline (push,tag,pr...)
|
||||||
|
type: string
|
||||||
- name: artifactory-url
|
- name: artifactory-url
|
||||||
default: docker.io
|
default: docker.io
|
||||||
description: The url of the current artifactory
|
description: The url of the current artifactory
|
||||||
|
|||||||
@@ -0,0 +1,96 @@
|
|||||||
|
apiVersion: tekton.dev/v1
|
||||||
|
kind: Pipeline
|
||||||
|
metadata:
|
||||||
|
name: auto-ci-lint
|
||||||
|
spec:
|
||||||
|
workspaces:
|
||||||
|
- name: source
|
||||||
|
params:
|
||||||
|
- name: on-error
|
||||||
|
type: string
|
||||||
|
default: stopAndFail
|
||||||
|
- name: stages
|
||||||
|
type: array
|
||||||
|
- name: dockerfiles
|
||||||
|
type: array
|
||||||
|
default: []
|
||||||
|
- name: shellcheck-args
|
||||||
|
type: array
|
||||||
|
default: []
|
||||||
|
- name: black-args
|
||||||
|
type: array
|
||||||
|
default: []
|
||||||
|
- name: pylint-args
|
||||||
|
type: array
|
||||||
|
default: []
|
||||||
|
tasks:
|
||||||
|
- name: lint-shell
|
||||||
|
onError: $(params.on-error)
|
||||||
|
when:
|
||||||
|
- input: "lint-shell"
|
||||||
|
operator: in
|
||||||
|
values: ["$(params.stages[*])"]
|
||||||
|
params:
|
||||||
|
- name: args
|
||||||
|
value: $(params.shellcheck-args)
|
||||||
|
taskRef:
|
||||||
|
name: shellcheck
|
||||||
|
workspaces:
|
||||||
|
- name: shared-workspace
|
||||||
|
workspace: source
|
||||||
|
- name: lint-docker
|
||||||
|
onError: $(params.on-error)
|
||||||
|
when:
|
||||||
|
- input: "lint-docker"
|
||||||
|
operator: in
|
||||||
|
values: ["$(params.stages[*])"]
|
||||||
|
taskRef:
|
||||||
|
name: hadolint
|
||||||
|
matrix:
|
||||||
|
params:
|
||||||
|
- name: dockerfile-path
|
||||||
|
value: $(tasks.detect-stages.results.file-docker)
|
||||||
|
workspaces:
|
||||||
|
- name: source
|
||||||
|
- name: lint-yaml
|
||||||
|
onError: $(params.on-error)
|
||||||
|
when:
|
||||||
|
- input: "lint-yaml"
|
||||||
|
operator: in
|
||||||
|
values: ["$(params.stages[*])"]
|
||||||
|
params:
|
||||||
|
- name: args
|
||||||
|
value: ["."]
|
||||||
|
taskRef:
|
||||||
|
name: yaml-lint
|
||||||
|
workspaces:
|
||||||
|
- name: shared-workspace
|
||||||
|
workspace: source
|
||||||
|
- name: lint-black
|
||||||
|
onError: $(params.on-error)
|
||||||
|
when:
|
||||||
|
- input: "lint-black"
|
||||||
|
operator: in
|
||||||
|
values: ["$(params.stages[*])"]
|
||||||
|
params:
|
||||||
|
- name: args
|
||||||
|
value: $(params.black-args)
|
||||||
|
taskRef:
|
||||||
|
name: black
|
||||||
|
workspaces:
|
||||||
|
- name: shared-workspace
|
||||||
|
workspace: source
|
||||||
|
- name: lint-python
|
||||||
|
onError: $(params.on-error)
|
||||||
|
when:
|
||||||
|
- input: "lint-python"
|
||||||
|
operator: in
|
||||||
|
values: ["$(params.stages[*])"]
|
||||||
|
params:
|
||||||
|
- name: args
|
||||||
|
value: ["$(tasks.detect-stages.results.file-python[*])"]
|
||||||
|
taskRef:
|
||||||
|
name: pylint
|
||||||
|
workspaces:
|
||||||
|
- name: shared-workspace
|
||||||
|
workspace: source
|
||||||
@@ -130,6 +130,8 @@ spec:
|
|||||||
- name: detect-stages
|
- name: detect-stages
|
||||||
runAfter: [git-version]
|
runAfter: [git-version]
|
||||||
params:
|
params:
|
||||||
|
- name: pipeline-type
|
||||||
|
value: push
|
||||||
- name: artifactory-url
|
- name: artifactory-url
|
||||||
value: $(params.artifactory-url)
|
value: $(params.artifactory-url)
|
||||||
- name: project-name
|
- name: project-name
|
||||||
@@ -142,83 +144,29 @@ spec:
|
|||||||
name: auto-ci-detector
|
name: auto-ci-detector
|
||||||
workspaces:
|
workspaces:
|
||||||
- name: source
|
- name: source
|
||||||
- name: lint-shell
|
- name: lint
|
||||||
runAfter: [detect-stages]
|
runAfter: [detect-stages]
|
||||||
onError: continue
|
|
||||||
when:
|
when:
|
||||||
- input: "lint-shell"
|
- cel: "size(tasks.detect-stages.results.stages-lint)>0"
|
||||||
operator: in
|
|
||||||
values: ["$(tasks.detect-stages.results.stages-lint[*])"]
|
|
||||||
params:
|
params:
|
||||||
- name: args
|
- name: on-error
|
||||||
value: $(tasks.detect-stages.results.file-shell)
|
value: stopAndFail
|
||||||
taskRef:
|
- name: stages
|
||||||
name: shellcheck
|
value: $(tasks.detect-stages.results.stages-lint)
|
||||||
workspaces:
|
- name: dockerfiles
|
||||||
- name: shared-workspace
|
value: $(tasks.detect-stages.results.file-docker)
|
||||||
workspace: source
|
- name: shellcheck-args
|
||||||
- name: lint-docker
|
value: $(tasks.detect-stages.results.shellcheck-args)
|
||||||
runAfter: [detect-stages]
|
- name: black-args
|
||||||
onError: continue
|
value: $(tasks.detect-stages.results.black-args)
|
||||||
when:
|
- name: pylint-args
|
||||||
- input: "lint-docker"
|
value: $(tasks.detect-stages.results.pylint-args)
|
||||||
operator: in
|
pipelineRef:
|
||||||
values: ["$(tasks.detect-stages.results.stages-lint[*])"]
|
name: auto-ci-lint
|
||||||
taskRef:
|
|
||||||
name: hadolint
|
|
||||||
matrix:
|
|
||||||
params:
|
|
||||||
- name: dockerfile-path
|
|
||||||
value: $(tasks.detect-stages.results.file-docker)
|
|
||||||
workspaces:
|
workspaces:
|
||||||
- name: source
|
- name: source
|
||||||
- name: lint-yaml
|
|
||||||
runAfter: [detect-stages]
|
|
||||||
onError: continue
|
|
||||||
when:
|
|
||||||
- input: "lint-yaml"
|
|
||||||
operator: in
|
|
||||||
values: ["$(tasks.detect-stages.results.stages-lint[*])"]
|
|
||||||
params:
|
|
||||||
- name: args
|
|
||||||
value: ["."]
|
|
||||||
taskRef:
|
|
||||||
name: yaml-lint
|
|
||||||
workspaces:
|
|
||||||
- name: shared-workspace
|
|
||||||
workspace: source
|
|
||||||
- name: lint-black
|
|
||||||
runAfter: [detect-stages]
|
|
||||||
onError: continue
|
|
||||||
when:
|
|
||||||
- input: "lint-black"
|
|
||||||
operator: in
|
|
||||||
values: ["$(tasks.detect-stages.results.stages-lint[*])"]
|
|
||||||
params:
|
|
||||||
- name: args
|
|
||||||
value: ["--check", "--diff", "$(tasks.detect-stages.results.file-python[*])"]
|
|
||||||
taskRef:
|
|
||||||
name: black
|
|
||||||
workspaces:
|
|
||||||
- name: shared-workspace
|
|
||||||
workspace: source
|
|
||||||
- name: lint-python
|
|
||||||
runAfter: [detect-stages]
|
|
||||||
onError: continue
|
|
||||||
when:
|
|
||||||
- input: "lint-python"
|
|
||||||
operator: in
|
|
||||||
values: ["$(tasks.detect-stages.results.stages-lint[*])"]
|
|
||||||
params:
|
|
||||||
- name: args
|
|
||||||
value: ["$(tasks.detect-stages.results.file-python[*])"]
|
|
||||||
taskRef:
|
|
||||||
name: pylint
|
|
||||||
workspaces:
|
|
||||||
- name: shared-workspace
|
|
||||||
workspace: source
|
|
||||||
- name: publish-docker
|
- name: publish-docker
|
||||||
runAfter: ["lint-docker"]
|
runAfter: ["lint"]
|
||||||
when:
|
when:
|
||||||
- input: "publish-docker"
|
- input: "publish-docker"
|
||||||
operator: in
|
operator: in
|
||||||
|
|||||||
@@ -114,6 +114,8 @@ spec:
|
|||||||
- name: detect-stages
|
- name: detect-stages
|
||||||
runAfter: [git-clone]
|
runAfter: [git-clone]
|
||||||
params:
|
params:
|
||||||
|
- name: pipeline-type
|
||||||
|
value: tag
|
||||||
- name: artifactory-url
|
- name: artifactory-url
|
||||||
value: $(params.artifactory-url)
|
value: $(params.artifactory-url)
|
||||||
- name: project-name
|
- name: project-name
|
||||||
@@ -202,7 +204,6 @@ spec:
|
|||||||
- input: "publish-docker"
|
- input: "publish-docker"
|
||||||
operator: in
|
operator: in
|
||||||
values: ["$(tasks.detect-stages.results.stages-publish[*])"]
|
values: ["$(tasks.detect-stages.results.stages-publish[*])"]
|
||||||
- cel: "'$(params.branch-name)' == '$(params.git-default-branch)'"
|
|
||||||
taskRef:
|
taskRef:
|
||||||
name: buildah
|
name: buildah
|
||||||
matrix:
|
matrix:
|
||||||
|
|||||||
Reference in New Issue
Block a user