Files
domain/share/gitea-tekton-org/tekton.dev_v1_Task_auto-ci-detector.yaml
2024-04-21 18:18:29 +02:00

184 lines
7.4 KiB
YAML

apiVersion: tekton.dev/v1
kind: Task
metadata:
name: auto-ci-detector
spec:
results:
- name: stages-prepare
description: list of prepare actions
type: array
- name: stages-lint
description: list of lint actions
type: array
- name: stages-test
description: list of test actions
type: array
- name: stages-publish
description: list of publish actions
type: array
- name: file-shell
description: list of shell files if any
type: array
- name: file-python
description: list of python files if any
type: array
- name: file-docker
description: list of Dockerfiles if any
type: array
- name: images-name
description: list of Dockerfiles image-name
type: array
params:
- name: toolbox-image
default: sebt3/basic-toolbox-image:1.30.0
description: The name of the toolbox image
type: string
- name: artifactory-url
default: docker.io
description: The url of the current artifactory
type: string
- name: project-name
description: The name of the current project
type: string
- name: project-path
description: The path of the current project
type: string
- name: image-version
type: string
steps:
- name: detect-stages
image: $(params.toolbox-image)
workingDir: $(workspaces.source.path)
script: |
#!/usr/bin/env python3
import os
import re
import json
import yaml
import collections.abc
import argparse
root = '.'
def save_json(filename,data):
print('saving to', filename, json.dumps(data))
with open(filename, 'w') as file:
file.write(json.dumps(data))
# detect files based on their extention
def detect_files(root_dir):
ret = {}
supported_extention = ['ts','js', 'py', 'yaml','yml', 'sh', 'rs', 'Dockerfile']
supported_filename = ['package.json', 'yarn.lock', 'schema.prisma']
for directory, subdir_list, file_list in os.walk(root_dir):
for filename in file_list:
if filename in supported_filename:
if not filename in ret:
ret[filename] = []
ret[filename].append(os.path.join(directory,filename))
ext = filename.split(".")[len(filename.split("."))-1]
if ext in supported_extention:
if not ext in ret:
ret[ext] = []
ret[ext].append(os.path.join(directory,filename))
return ret
def load_yaml(filename):
docs=[]
with open(filename, 'r') as file:
try:
data = yaml.safe_load_all(file)
for doc in data:
docs.append(doc)
except yaml.constructor.ConstructorError:
pass
else:
pass
return docs
def load_json(filename):
data={}
with open(filename, 'r') as file:
data = json.loads(file.read())
return data
def append_key(to,key,val):
if not key in to:
to[key] = []
to[key].append(val)
def set_js_stages(stages,files,root_dir):
if 'package.json' in files and os.path.join(root_dir,'package.json') in files['package.json']:
if 'yarn.lock' in files and os.path.join(root_dir,'yarn.lock') in files['yarn.lock']:
append_key(stages,'prepare','prepare-yarn')
else:
append_key(stages,'prepare','prepare-npm')
if 'schema.prisma' in files and os.path.join(root_dir,'prisma','schema.prisma') in files['schema.prisma']:
append_key(stages,'prepare','prepare-prisma')
defs = load_json(os.path.join(root_dir,'package.json'))
if 'scripts' in defs and 'lint' in defs['scripts']:
append_key(stages,'lint','lint-javascript')
if 'scripts' in defs and 'test' in defs['scripts']:
append_key(stages,'test','test-javascript')
def set_yaml_stages(stages,files,root_dir):
have_k8s = False
have_ansible = False
yamls = []
if 'yaml' in files:
yamls += files['yaml']
if 'yml' in files:
yamls += files['yml']
for file in yamls:
objs = load_yaml(file)
for obj in objs:
if obj == None:
continue
if isinstance(obj, collections.abc.Sequence):
for item in obj:
if 'name' in item and ('register' in item or 'changed_when' in item or 'loop_control' in item or 'ansible.builtin.template' in item):
have_ansible = True
elif 'apiVersion' in obj:
have_k8s = True
append_key(stages,'lint','lint-yaml')
if have_k8s:
append_key(stages,'lint','lint-kube')
if have_ansible:
append_key(stages,'lint','lint-ansible')
def get_images_name(dockerfiles,root_dir):
ret = []
for f in dockerfiles:
dir = os.path.dirname(f)
ret.append("$(params.artifactory-url)/$(params.project-path):$(params.image-version)")
print('get_image_name', dir, root_dir)
return ret
def get_stages(files,root_dir):
ret = {}
ret['prepare'] = []
ret['lint'] = []
ret['test'] = []
ret['publish'] = []
if 'Dockerfile' in files:
append_key(ret,'lint','lint-docker')
append_key(ret,'publish','publish-docker')
if 'yaml' in files or 'yml' in files:
set_yaml_stages(ret,files,root_dir)
if 'sh' in files:
append_key(ret,'lint', 'lint-shell')
if 'rs' in files:
append_key(ret,'lint', 'lint-clippy')
if 'py' in files:
append_key(ret,'lint','lint-python')
append_key(ret,'lint','lint-black')
if len([t for t in files['py'] if re.match('/test_',t) != None]) > 0:
append_key(ret,'test','test-python')
if 'ts' in files or 'js' in files:
set_js_stages(ret,files,root_dir)
return ret
files = detect_files(root)
stages = get_stages(files,root)
save_json("$(results.stages-prepare.path)", stages['prepare'])
save_json("$(results.stages-lint.path)", stages['lint'])
save_json("$(results.stages-test.path)", stages['test'])
save_json("$(results.stages-publish.path)", stages['publish'])
save_json("$(results.file-shell.path)", files['sh'] if 'sh' in files else [])
save_json("$(results.file-python.path)", files['py'] if 'py' in files else [])
save_json("$(results.file-docker.path)", files['Dockerfile'] if 'Dockerfile' in files else [])
save_json("$(results.images-name.path)", get_images_name(files['Dockerfile'] if 'Dockerfile' in files else [],root))
workspaces:
- name: source
mountPath: /data