mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
Remove CI config.
This commit is contained in:
parent
c12fd2474b
commit
433d021c42
14 changed files with 0 additions and 1028 deletions
|
@ -1,3 +0,0 @@
|
||||||
## Azure Pipelines Configuration
|
|
||||||
|
|
||||||
Please see the [Documentation](https://github.com/ansible/community/wiki/Testing:-Azure-Pipelines) for more information.
|
|
|
@ -1,330 +0,0 @@
|
||||||
trigger:
|
|
||||||
batch: true
|
|
||||||
branches:
|
|
||||||
include:
|
|
||||||
- main
|
|
||||||
- stable-*
|
|
||||||
|
|
||||||
pr:
|
|
||||||
autoCancel: true
|
|
||||||
branches:
|
|
||||||
include:
|
|
||||||
- main
|
|
||||||
- stable-*
|
|
||||||
|
|
||||||
schedules:
|
|
||||||
- cron: 0 8 * * *
|
|
||||||
displayName: Nightly (main)
|
|
||||||
always: true
|
|
||||||
branches:
|
|
||||||
include:
|
|
||||||
- main
|
|
||||||
- cron: 0 10 * * *
|
|
||||||
displayName: Nightly (active stable branches)
|
|
||||||
always: true
|
|
||||||
branches:
|
|
||||||
include:
|
|
||||||
- stable-5
|
|
||||||
- cron: 0 11 * * 0
|
|
||||||
displayName: Weekly (old stable branches)
|
|
||||||
always: true
|
|
||||||
branches:
|
|
||||||
include:
|
|
||||||
- stable-4
|
|
||||||
|
|
||||||
variables:
|
|
||||||
- name: checkoutPath
|
|
||||||
value: ansible_collections/community/general
|
|
||||||
- name: coverageBranches
|
|
||||||
value: main
|
|
||||||
- name: pipelinesCoverage
|
|
||||||
value: coverage
|
|
||||||
- name: entryPoint
|
|
||||||
value: tests/utils/shippable/shippable.sh
|
|
||||||
- name: fetchDepth
|
|
||||||
value: 0
|
|
||||||
|
|
||||||
resources:
|
|
||||||
containers:
|
|
||||||
- container: default
|
|
||||||
image: quay.io/ansible/azure-pipelines-test-container:3.0.0
|
|
||||||
|
|
||||||
pool: Standard
|
|
||||||
|
|
||||||
stages:
|
|
||||||
### Sanity
|
|
||||||
- stage: Sanity_2_14
|
|
||||||
displayName: Sanity 2.14
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Test {0}
|
|
||||||
testFormat: 2.14/sanity/{0}
|
|
||||||
targets:
|
|
||||||
- test: 1
|
|
||||||
- test: 2
|
|
||||||
- test: 3
|
|
||||||
- test: 4
|
|
||||||
- test: extra
|
|
||||||
- stage: Sanity_2_13
|
|
||||||
displayName: Sanity 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Test {0}
|
|
||||||
testFormat: 2.13/sanity/{0}
|
|
||||||
targets:
|
|
||||||
- test: 1
|
|
||||||
- test: 2
|
|
||||||
- test: 3
|
|
||||||
- test: 4
|
|
||||||
- stage: Sanity_2_12
|
|
||||||
displayName: Sanity 2.12
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Test {0}
|
|
||||||
testFormat: 2.12/sanity/{0}
|
|
||||||
targets:
|
|
||||||
- test: 1
|
|
||||||
- test: 2
|
|
||||||
- test: 3
|
|
||||||
- test: 4
|
|
||||||
### Units
|
|
||||||
- stage: Units_2_14
|
|
||||||
displayName: Units 2.14
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.14/units/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 2.7
|
|
||||||
- test: 3.5
|
|
||||||
- test: 3.6
|
|
||||||
- test: 3.7
|
|
||||||
- test: 3.8
|
|
||||||
- test: 3.9
|
|
||||||
- test: '3.10'
|
|
||||||
- stage: Units_2_13
|
|
||||||
displayName: Units 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.13/units/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 2.7
|
|
||||||
- test: 3.6
|
|
||||||
- test: 3.8
|
|
||||||
- test: 3.9
|
|
||||||
- stage: Units_2_12
|
|
||||||
displayName: Units 2.12
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.12/units/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 2.6
|
|
||||||
- test: 3.5
|
|
||||||
- test: 3.8
|
|
||||||
|
|
||||||
## Remote
|
|
||||||
- stage: Remote_2_14
|
|
||||||
displayName: Remote 2.14
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.14/{0}
|
|
||||||
targets:
|
|
||||||
- name: macOS 12.0
|
|
||||||
test: macos/12.0
|
|
||||||
- name: RHEL 7.9
|
|
||||||
test: rhel/7.9
|
|
||||||
- name: RHEL 9.0
|
|
||||||
test: rhel/9.0
|
|
||||||
- name: FreeBSD 12.3
|
|
||||||
test: freebsd/12.3
|
|
||||||
- name: FreeBSD 13.1
|
|
||||||
test: freebsd/13.1
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Remote_2_13
|
|
||||||
displayName: Remote 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.13/{0}
|
|
||||||
targets:
|
|
||||||
- name: macOS 12.0
|
|
||||||
test: macos/12.0
|
|
||||||
- name: RHEL 8.5
|
|
||||||
test: rhel/8.5
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Remote_2_12
|
|
||||||
displayName: Remote 2.12
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.12/{0}
|
|
||||||
targets:
|
|
||||||
- name: macOS 11.1
|
|
||||||
test: macos/11.1
|
|
||||||
- name: RHEL 8.4
|
|
||||||
test: rhel/8.4
|
|
||||||
- name: FreeBSD 13.0
|
|
||||||
test: freebsd/13.0
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
|
|
||||||
### Docker
|
|
||||||
- stage: Docker_2_14
|
|
||||||
displayName: Docker 2.14
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.14/linux/{0}
|
|
||||||
targets:
|
|
||||||
- name: CentOS 7
|
|
||||||
test: centos7
|
|
||||||
- name: Fedora 36
|
|
||||||
test: fedora36
|
|
||||||
- name: openSUSE 15
|
|
||||||
test: opensuse15
|
|
||||||
- name: Ubuntu 20.04
|
|
||||||
test: ubuntu2004
|
|
||||||
- name: Ubuntu 22.04
|
|
||||||
test: ubuntu2204
|
|
||||||
- name: Alpine 3
|
|
||||||
test: alpine3
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Docker_2_13
|
|
||||||
displayName: Docker 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.13/linux/{0}
|
|
||||||
targets:
|
|
||||||
- name: Fedora 35
|
|
||||||
test: fedora35
|
|
||||||
- name: openSUSE 15 py2
|
|
||||||
test: opensuse15py2
|
|
||||||
- name: Alpine 3
|
|
||||||
test: alpine3
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Docker_2_12
|
|
||||||
displayName: Docker 2.12
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.12/linux/{0}
|
|
||||||
targets:
|
|
||||||
- name: CentOS 6
|
|
||||||
test: centos6
|
|
||||||
- name: Fedora 34
|
|
||||||
test: fedora34
|
|
||||||
- name: Ubuntu 18.04
|
|
||||||
test: ubuntu1804
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
|
|
||||||
### Community Docker
|
|
||||||
- stage: Docker_community_2_14
|
|
||||||
displayName: Docker (community images) 2.14
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.14/linux-community/{0}
|
|
||||||
targets:
|
|
||||||
- name: Debian Bullseye
|
|
||||||
test: debian-bullseye/3.9
|
|
||||||
- name: ArchLinux
|
|
||||||
test: archlinux/3.11
|
|
||||||
- name: CentOS Stream 8
|
|
||||||
test: centos-stream8/3.9
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
|
|
||||||
### Cloud
|
|
||||||
- stage: Cloud_2_14
|
|
||||||
displayName: Cloud 2.14
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.14/cloud/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 2.7
|
|
||||||
- test: '3.10'
|
|
||||||
- stage: Cloud_2_13
|
|
||||||
displayName: Cloud 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.13/cloud/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 3.9
|
|
||||||
- stage: Cloud_2_12
|
|
||||||
displayName: Cloud 2.12
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.12/cloud/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 3.8
|
|
||||||
- stage: Summary
|
|
||||||
condition: succeededOrFailed()
|
|
||||||
dependsOn:
|
|
||||||
- Sanity_2_14
|
|
||||||
- Sanity_2_12
|
|
||||||
- Sanity_2_13
|
|
||||||
- Units_2_14
|
|
||||||
- Units_2_12
|
|
||||||
- Units_2_13
|
|
||||||
- Remote_2_14
|
|
||||||
- Remote_2_12
|
|
||||||
- Remote_2_13
|
|
||||||
- Docker_2_14
|
|
||||||
- Docker_2_12
|
|
||||||
- Docker_2_13
|
|
||||||
- Docker_community_2_14
|
|
||||||
- Cloud_2_14
|
|
||||||
- Cloud_2_12
|
|
||||||
- Cloud_2_13
|
|
||||||
jobs:
|
|
||||||
- template: templates/coverage.yml
|
|
|
@ -1,24 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
# Aggregate code coverage results for later processing.
|
|
||||||
|
|
||||||
set -o pipefail -eu
|
|
||||||
|
|
||||||
agent_temp_directory="$1"
|
|
||||||
|
|
||||||
PATH="${PWD}/bin:${PATH}"
|
|
||||||
|
|
||||||
mkdir "${agent_temp_directory}/coverage/"
|
|
||||||
|
|
||||||
if [[ "$(ansible --version)" =~ \ 2\.9\. ]]; then
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
|
|
||||||
options=(--venv --venv-system-site-packages --color -v)
|
|
||||||
|
|
||||||
ansible-test coverage combine --group-by command --export "${agent_temp_directory}/coverage/" "${options[@]}"
|
|
||||||
|
|
||||||
if ansible-test coverage analyze targets generate --help >/dev/null 2>&1; then
|
|
||||||
# Only analyze coverage if the installed version of ansible-test supports it.
|
|
||||||
# Doing so allows this script to work unmodified for multiple Ansible versions.
|
|
||||||
ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}"
|
|
||||||
fi
|
|
|
@ -1,60 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
"""
|
|
||||||
Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job.
|
|
||||||
Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}"
|
|
||||||
The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)
|
|
||||||
Keep in mind that Azure Pipelines does not enforce unique job display names (only names).
|
|
||||||
It is up to pipeline authors to avoid name collisions when deviating from the recommended format.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Main program entry point."""
|
|
||||||
source_directory = sys.argv[1]
|
|
||||||
|
|
||||||
if '/ansible_collections/' in os.getcwd():
|
|
||||||
output_path = "tests/output"
|
|
||||||
else:
|
|
||||||
output_path = "test/results"
|
|
||||||
|
|
||||||
destination_directory = os.path.join(output_path, 'coverage')
|
|
||||||
|
|
||||||
if not os.path.exists(destination_directory):
|
|
||||||
os.makedirs(destination_directory)
|
|
||||||
|
|
||||||
jobs = {}
|
|
||||||
count = 0
|
|
||||||
|
|
||||||
for name in os.listdir(source_directory):
|
|
||||||
match = re.search('^Coverage (?P<attempt>[0-9]+) (?P<label>.+)$', name)
|
|
||||||
label = match.group('label')
|
|
||||||
attempt = int(match.group('attempt'))
|
|
||||||
jobs[label] = max(attempt, jobs.get(label, 0))
|
|
||||||
|
|
||||||
for label, attempt in jobs.items():
|
|
||||||
name = 'Coverage {attempt} {label}'.format(label=label, attempt=attempt)
|
|
||||||
source = os.path.join(source_directory, name)
|
|
||||||
source_files = os.listdir(source)
|
|
||||||
|
|
||||||
for source_file in source_files:
|
|
||||||
source_path = os.path.join(source, source_file)
|
|
||||||
destination_path = os.path.join(destination_directory, source_file + '.' + label)
|
|
||||||
print('"%s" -> "%s"' % (source_path, destination_path))
|
|
||||||
shutil.copyfile(source_path, destination_path)
|
|
||||||
count += 1
|
|
||||||
|
|
||||||
print('Coverage file count: %d' % count)
|
|
||||||
print('##vso[task.setVariable variable=coverageFileCount]%d' % count)
|
|
||||||
print('##vso[task.setVariable variable=outputPath]%s' % output_path)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
|
@ -1,24 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
# Check the test results and set variables for use in later steps.
|
|
||||||
|
|
||||||
set -o pipefail -eu
|
|
||||||
|
|
||||||
if [[ "$PWD" =~ /ansible_collections/ ]]; then
|
|
||||||
output_path="tests/output"
|
|
||||||
else
|
|
||||||
output_path="test/results"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "##vso[task.setVariable variable=outputPath]${output_path}"
|
|
||||||
|
|
||||||
if compgen -G "${output_path}"'/junit/*.xml' > /dev/null; then
|
|
||||||
echo "##vso[task.setVariable variable=haveTestResults]true"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if compgen -G "${output_path}"'/bot/ansible-test-*' > /dev/null; then
|
|
||||||
echo "##vso[task.setVariable variable=haveBotResults]true"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if compgen -G "${output_path}"'/coverage/*' > /dev/null; then
|
|
||||||
echo "##vso[task.setVariable variable=haveCoverageData]true"
|
|
||||||
fi
|
|
|
@ -1,101 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
"""
|
|
||||||
Upload code coverage reports to codecov.io.
|
|
||||||
Multiple coverage files from multiple languages are accepted and aggregated after upload.
|
|
||||||
Python coverage, as well as PowerShell and Python stubs can all be uploaded.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import dataclasses
|
|
||||||
import pathlib
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import tempfile
|
|
||||||
import typing as t
|
|
||||||
import urllib.request
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass(frozen=True)
|
|
||||||
class CoverageFile:
|
|
||||||
name: str
|
|
||||||
path: pathlib.Path
|
|
||||||
flags: t.List[str]
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass(frozen=True)
|
|
||||||
class Args:
|
|
||||||
dry_run: bool
|
|
||||||
path: pathlib.Path
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args() -> Args:
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument('-n', '--dry-run', action='store_true')
|
|
||||||
parser.add_argument('path', type=pathlib.Path)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
# Store arguments in a typed dataclass
|
|
||||||
fields = dataclasses.fields(Args)
|
|
||||||
kwargs = {field.name: getattr(args, field.name) for field in fields}
|
|
||||||
|
|
||||||
return Args(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def process_files(directory: pathlib.Path) -> t.Tuple[CoverageFile, ...]:
|
|
||||||
processed = []
|
|
||||||
for file in directory.joinpath('reports').glob('coverage*.xml'):
|
|
||||||
name = file.stem.replace('coverage=', '')
|
|
||||||
|
|
||||||
# Get flags from name
|
|
||||||
flags = name.replace('-powershell', '').split('=') # Drop '-powershell' suffix
|
|
||||||
flags = [flag if not flag.startswith('stub') else flag.split('-')[0] for flag in flags] # Remove "-01" from stub files
|
|
||||||
|
|
||||||
processed.append(CoverageFile(name, file, flags))
|
|
||||||
|
|
||||||
return tuple(processed)
|
|
||||||
|
|
||||||
|
|
||||||
def upload_files(codecov_bin: pathlib.Path, files: t.Tuple[CoverageFile, ...], dry_run: bool = False) -> None:
|
|
||||||
for file in files:
|
|
||||||
cmd = [
|
|
||||||
str(codecov_bin),
|
|
||||||
'--name', file.name,
|
|
||||||
'--file', str(file.path),
|
|
||||||
]
|
|
||||||
for flag in file.flags:
|
|
||||||
cmd.extend(['--flags', flag])
|
|
||||||
|
|
||||||
if dry_run:
|
|
||||||
print(f'DRY-RUN: Would run command: {cmd}')
|
|
||||||
continue
|
|
||||||
|
|
||||||
subprocess.run(cmd, check=True)
|
|
||||||
|
|
||||||
|
|
||||||
def download_file(url: str, dest: pathlib.Path, flags: int, dry_run: bool = False) -> None:
|
|
||||||
if dry_run:
|
|
||||||
print(f'DRY-RUN: Would download {url} to {dest} and set mode to {flags:o}')
|
|
||||||
return
|
|
||||||
|
|
||||||
with urllib.request.urlopen(url) as resp:
|
|
||||||
with dest.open('w+b') as f:
|
|
||||||
# Read data in chunks rather than all at once
|
|
||||||
shutil.copyfileobj(resp, f, 64 * 1024)
|
|
||||||
|
|
||||||
dest.chmod(flags)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
args = parse_args()
|
|
||||||
url = 'https://ansible-ci-files.s3.amazonaws.com/codecov/linux/codecov'
|
|
||||||
with tempfile.TemporaryDirectory(prefix='codecov-') as tmpdir:
|
|
||||||
codecov_bin = pathlib.Path(tmpdir) / 'codecov'
|
|
||||||
download_file(url, codecov_bin, 0o755, args.dry_run)
|
|
||||||
|
|
||||||
files = process_files(args.path)
|
|
||||||
upload_files(codecov_bin, files, args.dry_run)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
|
@ -1,19 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
# Generate code coverage reports for uploading to Azure Pipelines and codecov.io.
|
|
||||||
|
|
||||||
set -o pipefail -eu
|
|
||||||
|
|
||||||
PATH="${PWD}/bin:${PATH}"
|
|
||||||
|
|
||||||
if [[ "$(ansible --version)" =~ \ 2\.9\. ]]; then
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! ansible-test --help >/dev/null 2>&1; then
|
|
||||||
# Install the devel version of ansible-test for generating code coverage reports.
|
|
||||||
# This is only used by Ansible Collections, which are typically tested against multiple Ansible versions (in separate jobs).
|
|
||||||
# Since a version of ansible-test is required that can work the output from multiple older releases, the devel version is used.
|
|
||||||
pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check
|
|
||||||
fi
|
|
||||||
|
|
||||||
ansible-test coverage xml --group-by command --stub --venv --venv-system-site-packages --color -v
|
|
|
@ -1,34 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
# Configure the test environment and run the tests.
|
|
||||||
|
|
||||||
set -o pipefail -eu
|
|
||||||
|
|
||||||
entry_point="$1"
|
|
||||||
test="$2"
|
|
||||||
read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds
|
|
||||||
|
|
||||||
export COMMIT_MESSAGE
|
|
||||||
export COMPLETE
|
|
||||||
export COVERAGE
|
|
||||||
export IS_PULL_REQUEST
|
|
||||||
|
|
||||||
if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then
|
|
||||||
IS_PULL_REQUEST=true
|
|
||||||
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2)
|
|
||||||
else
|
|
||||||
IS_PULL_REQUEST=
|
|
||||||
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD)
|
|
||||||
fi
|
|
||||||
|
|
||||||
COMPLETE=
|
|
||||||
COVERAGE=
|
|
||||||
|
|
||||||
if [ "${BUILD_REASON}" = "Schedule" ]; then
|
|
||||||
COMPLETE=yes
|
|
||||||
|
|
||||||
if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then
|
|
||||||
COVERAGE=yes
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py"
|
|
|
@ -1,25 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
"""Prepends a relative timestamp to each input line from stdin and writes it to stdout."""
|
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Main program entry point."""
|
|
||||||
start = time.time()
|
|
||||||
|
|
||||||
sys.stdin.reconfigure(errors='surrogateescape')
|
|
||||||
sys.stdout.reconfigure(errors='surrogateescape')
|
|
||||||
|
|
||||||
for line in sys.stdin:
|
|
||||||
seconds = time.time() - start
|
|
||||||
sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line))
|
|
||||||
sys.stdout.flush()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
|
@ -1,39 +0,0 @@
|
||||||
# This template adds a job for processing code coverage data.
|
|
||||||
# It will upload results to Azure Pipelines and codecov.io.
|
|
||||||
# Use it from a job stage that completes after all other jobs have completed.
|
|
||||||
# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed.
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
- job: Coverage
|
|
||||||
displayName: Code Coverage
|
|
||||||
container: default
|
|
||||||
workspace:
|
|
||||||
clean: all
|
|
||||||
steps:
|
|
||||||
- checkout: self
|
|
||||||
fetchDepth: $(fetchDepth)
|
|
||||||
path: $(checkoutPath)
|
|
||||||
- task: DownloadPipelineArtifact@2
|
|
||||||
displayName: Download Coverage Data
|
|
||||||
inputs:
|
|
||||||
path: coverage/
|
|
||||||
patterns: "Coverage */*=coverage.combined"
|
|
||||||
- bash: .azure-pipelines/scripts/combine-coverage.py coverage/
|
|
||||||
displayName: Combine Coverage Data
|
|
||||||
- bash: .azure-pipelines/scripts/report-coverage.sh
|
|
||||||
displayName: Generate Coverage Report
|
|
||||||
condition: gt(variables.coverageFileCount, 0)
|
|
||||||
- task: PublishCodeCoverageResults@1
|
|
||||||
inputs:
|
|
||||||
codeCoverageTool: Cobertura
|
|
||||||
# Azure Pipelines only accepts a single coverage data file.
|
|
||||||
# That means only Python or PowerShell coverage can be uploaded, but not both.
|
|
||||||
# Set the "pipelinesCoverage" variable to determine which type is uploaded.
|
|
||||||
# Use "coverage" for Python and "coverage-powershell" for PowerShell.
|
|
||||||
summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml"
|
|
||||||
displayName: Publish to Azure Pipelines
|
|
||||||
condition: gt(variables.coverageFileCount, 0)
|
|
||||||
- bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
|
|
||||||
displayName: Publish to codecov.io
|
|
||||||
condition: gt(variables.coverageFileCount, 0)
|
|
||||||
continueOnError: true
|
|
|
@ -1,55 +0,0 @@
|
||||||
# This template uses the provided targets and optional groups to generate a matrix which is then passed to the test template.
|
|
||||||
# If this matrix template does not provide the required functionality, consider using the test template directly instead.
|
|
||||||
|
|
||||||
parameters:
|
|
||||||
# A required list of dictionaries, one per test target.
|
|
||||||
# Each item in the list must contain a "test" or "name" key.
|
|
||||||
# Both may be provided. If one is omitted, the other will be used.
|
|
||||||
- name: targets
|
|
||||||
type: object
|
|
||||||
|
|
||||||
# An optional list of values which will be used to multiply the targets list into a matrix.
|
|
||||||
# Values can be strings or numbers.
|
|
||||||
- name: groups
|
|
||||||
type: object
|
|
||||||
default: []
|
|
||||||
|
|
||||||
# An optional format string used to generate the job name.
|
|
||||||
# - {0} is the name of an item in the targets list.
|
|
||||||
- name: nameFormat
|
|
||||||
type: string
|
|
||||||
default: "{0}"
|
|
||||||
|
|
||||||
# An optional format string used to generate the test name.
|
|
||||||
# - {0} is the name of an item in the targets list.
|
|
||||||
- name: testFormat
|
|
||||||
type: string
|
|
||||||
default: "{0}"
|
|
||||||
|
|
||||||
# An optional format string used to add the group to the job name.
|
|
||||||
# {0} is the formatted name of an item in the targets list.
|
|
||||||
# {{1}} is the group -- be sure to include the double "{{" and "}}".
|
|
||||||
- name: nameGroupFormat
|
|
||||||
type: string
|
|
||||||
default: "{0} - {{1}}"
|
|
||||||
|
|
||||||
# An optional format string used to add the group to the test name.
|
|
||||||
# {0} is the formatted test of an item in the targets list.
|
|
||||||
# {{1}} is the group -- be sure to include the double "{{" and "}}".
|
|
||||||
- name: testGroupFormat
|
|
||||||
type: string
|
|
||||||
default: "{0}/{{1}}"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
- template: test.yml
|
|
||||||
parameters:
|
|
||||||
jobs:
|
|
||||||
- ${{ if eq(length(parameters.groups), 0) }}:
|
|
||||||
- ${{ each target in parameters.targets }}:
|
|
||||||
- name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }}
|
|
||||||
test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }}
|
|
||||||
- ${{ if not(eq(length(parameters.groups), 0)) }}:
|
|
||||||
- ${{ each group in parameters.groups }}:
|
|
||||||
- ${{ each target in parameters.targets }}:
|
|
||||||
- name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }}
|
|
||||||
test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }}
|
|
|
@ -1,45 +0,0 @@
|
||||||
# This template uses the provided list of jobs to create test one or more test jobs.
|
|
||||||
# It can be used directly if needed, or through the matrix template.
|
|
||||||
|
|
||||||
parameters:
|
|
||||||
# A required list of dictionaries, one per test job.
|
|
||||||
# Each item in the list must contain a "job" and "name" key.
|
|
||||||
- name: jobs
|
|
||||||
type: object
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
- ${{ each job in parameters.jobs }}:
|
|
||||||
- job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }}
|
|
||||||
displayName: ${{ job.name }}
|
|
||||||
container: default
|
|
||||||
workspace:
|
|
||||||
clean: all
|
|
||||||
steps:
|
|
||||||
- checkout: self
|
|
||||||
fetchDepth: $(fetchDepth)
|
|
||||||
path: $(checkoutPath)
|
|
||||||
- bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)"
|
|
||||||
displayName: Run Tests
|
|
||||||
- bash: .azure-pipelines/scripts/process-results.sh
|
|
||||||
condition: succeededOrFailed()
|
|
||||||
displayName: Process Results
|
|
||||||
- bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)"
|
|
||||||
condition: eq(variables.haveCoverageData, 'true')
|
|
||||||
displayName: Aggregate Coverage Data
|
|
||||||
- task: PublishTestResults@2
|
|
||||||
condition: eq(variables.haveTestResults, 'true')
|
|
||||||
inputs:
|
|
||||||
testResultsFiles: "$(outputPath)/junit/*.xml"
|
|
||||||
displayName: Publish Test Results
|
|
||||||
- task: PublishPipelineArtifact@1
|
|
||||||
condition: eq(variables.haveBotResults, 'true')
|
|
||||||
displayName: Publish Bot Results
|
|
||||||
inputs:
|
|
||||||
targetPath: "$(outputPath)/bot/"
|
|
||||||
artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
|
||||||
- task: PublishPipelineArtifact@1
|
|
||||||
condition: eq(variables.haveCoverageData, 'true')
|
|
||||||
displayName: Publish Coverage Data
|
|
||||||
inputs:
|
|
||||||
targetPath: "$(Agent.TempDirectory)/coverage/"
|
|
||||||
artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
|
220
.github/workflows/ansible-test.yml
vendored
220
.github/workflows/ansible-test.yml
vendored
|
@ -1,220 +0,0 @@
|
||||||
---
|
|
||||||
# Copyright (c) Ansible Project
|
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
||||||
|
|
||||||
# For the comprehensive list of the inputs supported by the ansible-community/ansible-test-gh-action GitHub Action, see
|
|
||||||
# https://github.com/marketplace/actions/ansible-test
|
|
||||||
|
|
||||||
name: EOL CI
|
|
||||||
on:
|
|
||||||
# Run EOL CI against all pushes (direct commits, also merged PRs), Pull Requests
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
- stable-*
|
|
||||||
pull_request:
|
|
||||||
# Run EOL CI once per week (at 11:00 UTC)
|
|
||||||
schedule:
|
|
||||||
- cron: '0 11 * * 0'
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
# Make sure there is at most one active run per PR, but do not cancel any non-PR runs
|
|
||||||
group: ${{ github.workflow }}-${{ (github.head_ref && github.event.number) || github.run_id }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
sanity:
|
|
||||||
name: EOL Sanity (Ⓐ${{ matrix.ansible }})
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
ansible:
|
|
||||||
- '2.11'
|
|
||||||
- '2.10'
|
|
||||||
- '2.9'
|
|
||||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
|
||||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
|
||||||
# image for these stable branches. The list of branches where this is necessary will
|
|
||||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
|
||||||
# for the latest list.
|
|
||||||
runs-on: >-
|
|
||||||
${{ contains(fromJson(
|
|
||||||
'["2.9", "2.10", "2.11"]'
|
|
||||||
), matrix.ansible) && 'ubuntu-20.04' || 'ubuntu-latest' }}
|
|
||||||
steps:
|
|
||||||
- name: Perform sanity testing
|
|
||||||
uses: felixfontein/ansible-test-gh-action@main
|
|
||||||
with:
|
|
||||||
ansible-core-github-repository-slug: felixfontein/ansible
|
|
||||||
ansible-core-version: stable-${{ matrix.ansible }}
|
|
||||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
|
||||||
pull-request-change-detection: 'true'
|
|
||||||
testing-type: sanity
|
|
||||||
|
|
||||||
units:
|
|
||||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
|
||||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
|
||||||
# image for these stable branches. The list of branches where this is necessary will
|
|
||||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
|
||||||
# for the latest list.
|
|
||||||
runs-on: >-
|
|
||||||
${{ contains(fromJson(
|
|
||||||
'["2.9", "2.10", "2.11"]'
|
|
||||||
), matrix.ansible) && 'ubuntu-20.04' || 'ubuntu-latest' }}
|
|
||||||
name: EOL Units (Ⓐ${{ matrix.ansible }}+py${{ matrix.python }})
|
|
||||||
strategy:
|
|
||||||
# As soon as the first unit test fails, cancel the others to free up the CI queue
|
|
||||||
fail-fast: true
|
|
||||||
matrix:
|
|
||||||
ansible:
|
|
||||||
- ''
|
|
||||||
python:
|
|
||||||
- ''
|
|
||||||
exclude:
|
|
||||||
- ansible: ''
|
|
||||||
include:
|
|
||||||
- ansible: '2.11'
|
|
||||||
python: '2.6'
|
|
||||||
- ansible: '2.11'
|
|
||||||
python: '2.7'
|
|
||||||
- ansible: '2.11'
|
|
||||||
python: '3.5'
|
|
||||||
- ansible: '2.11'
|
|
||||||
python: '3.9'
|
|
||||||
- ansible: '2.10'
|
|
||||||
python: '2.7'
|
|
||||||
- ansible: '2.10'
|
|
||||||
python: '3.6'
|
|
||||||
- ansible: '2.9'
|
|
||||||
python: '2.6'
|
|
||||||
- ansible: '2.9'
|
|
||||||
python: '3.5'
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: >-
|
|
||||||
Perform unit testing against
|
|
||||||
Ansible version ${{ matrix.ansible }}
|
|
||||||
uses: felixfontein/ansible-test-gh-action@main
|
|
||||||
with:
|
|
||||||
ansible-core-github-repository-slug: felixfontein/ansible
|
|
||||||
ansible-core-version: stable-${{ matrix.ansible }}
|
|
||||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
|
||||||
pre-test-cmd: >-
|
|
||||||
mkdir -p ../../ansible
|
|
||||||
;
|
|
||||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
|
|
||||||
;
|
|
||||||
if [ ${{ matrix.ansible }} == "2.9" ]; then cp tests/unit/requirements-2.9.txt tests/unit/requirements.txt; fi
|
|
||||||
;
|
|
||||||
if [ ${{ matrix.ansible }} == "2.10" ]; then cp tests/unit/requirements-2.10.txt tests/unit/requirements.txt; fi
|
|
||||||
pull-request-change-detection: 'true'
|
|
||||||
target-python-version: ${{ matrix.python }}
|
|
||||||
testing-type: units
|
|
||||||
|
|
||||||
integration:
|
|
||||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
|
||||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
|
||||||
# image for these stable branches. The list of branches where this is necessary will
|
|
||||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
|
||||||
# for the latest list.
|
|
||||||
runs-on: >-
|
|
||||||
${{ contains(fromJson(
|
|
||||||
'["2.9", "2.10", "2.11"]'
|
|
||||||
), matrix.ansible) && 'ubuntu-20.04' || 'ubuntu-latest' }}
|
|
||||||
name: EOL I (Ⓐ${{ matrix.ansible }}+${{ matrix.docker }}+py${{ matrix.python }}:${{ matrix.target }})
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
ansible:
|
|
||||||
- ''
|
|
||||||
docker:
|
|
||||||
- ''
|
|
||||||
python:
|
|
||||||
- ''
|
|
||||||
target:
|
|
||||||
- ''
|
|
||||||
exclude:
|
|
||||||
- ansible: ''
|
|
||||||
include:
|
|
||||||
# 2.11
|
|
||||||
- ansible: '2.11'
|
|
||||||
docker: fedora33
|
|
||||||
python: ''
|
|
||||||
target: shippable/posix/group2/
|
|
||||||
- ansible: '2.11'
|
|
||||||
docker: fedora33
|
|
||||||
python: ''
|
|
||||||
target: shippable/posix/group3/
|
|
||||||
- ansible: '2.11'
|
|
||||||
docker: alpine3
|
|
||||||
python: ''
|
|
||||||
target: shippable/posix/group2/
|
|
||||||
- ansible: '2.11'
|
|
||||||
docker: alpine3
|
|
||||||
python: ''
|
|
||||||
target: shippable/posix/group3/
|
|
||||||
- ansible: '2.11'
|
|
||||||
docker: default
|
|
||||||
python: '3.6'
|
|
||||||
target: shippable/cloud/group1/
|
|
||||||
# 2.10
|
|
||||||
- ansible: '2.10'
|
|
||||||
docker: fedora32
|
|
||||||
python: ''
|
|
||||||
target: shippable/posix/group2/
|
|
||||||
- ansible: '2.10'
|
|
||||||
docker: fedora32
|
|
||||||
python: ''
|
|
||||||
target: shippable/posix/group3/
|
|
||||||
- ansible: '2.10'
|
|
||||||
docker: ubuntu1604
|
|
||||||
python: ''
|
|
||||||
target: shippable/posix/group2/
|
|
||||||
- ansible: '2.10'
|
|
||||||
docker: ubuntu1604
|
|
||||||
python: ''
|
|
||||||
target: shippable/posix/group3/
|
|
||||||
- ansible: '2.10'
|
|
||||||
docker: default
|
|
||||||
python: '3.5'
|
|
||||||
target: shippable/cloud/group1/
|
|
||||||
# 2.9
|
|
||||||
- ansible: '2.9'
|
|
||||||
docker: fedora31
|
|
||||||
python: ''
|
|
||||||
target: shippable/posix/group2/
|
|
||||||
- ansible: '2.9'
|
|
||||||
docker: fedora31
|
|
||||||
python: ''
|
|
||||||
target: shippable/posix/group3/
|
|
||||||
- ansible: '2.9'
|
|
||||||
docker: default
|
|
||||||
python: '2.7'
|
|
||||||
target: shippable/cloud/group1/
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: >-
|
|
||||||
Perform integration testing against
|
|
||||||
Ansible version ${{ matrix.ansible }}
|
|
||||||
under Python ${{ matrix.python }}
|
|
||||||
uses: felixfontein/ansible-test-gh-action@main
|
|
||||||
with:
|
|
||||||
ansible-core-github-repository-slug: felixfontein/ansible
|
|
||||||
ansible-core-version: stable-${{ matrix.ansible }}
|
|
||||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
|
||||||
docker-image: ${{ matrix.docker }}
|
|
||||||
integration-continue-on-error: 'false'
|
|
||||||
integration-diff: 'false'
|
|
||||||
integration-retry-on-error: 'true'
|
|
||||||
pre-test-cmd: >-
|
|
||||||
mkdir -p ../../ansible
|
|
||||||
;
|
|
||||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/ansible.posix.git ../../ansible/posix
|
|
||||||
;
|
|
||||||
git clone --depth=1 ${{ matrix.docker == 'ubuntu1604' && '--branch stable-1' || '' }} --single-branch https://github.com/ansible-collections/community.crypto.git ../../community/crypto
|
|
||||||
;
|
|
||||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
|
|
||||||
pull-request-change-detection: 'true'
|
|
||||||
target: ${{ matrix.target }}
|
|
||||||
target-python-version: ${{ matrix.python }}
|
|
||||||
testing-type: integration
|
|
49
.github/workflows/codeql-analysis.yml
vendored
49
.github/workflows/codeql-analysis.yml
vendored
|
@ -1,49 +0,0 @@
|
||||||
name: "Code scanning - action"
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '26 19 * * 1'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
CodeQL-Build:
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
# We must fetch at least the immediate parents so that if this is
|
|
||||||
# a pull request then we can checkout the head.
|
|
||||||
fetch-depth: 2
|
|
||||||
|
|
||||||
# If this run was triggered by a pull request event, then checkout
|
|
||||||
# the head of the pull request instead of the merge commit.
|
|
||||||
- run: git checkout HEAD^2
|
|
||||||
if: ${{ github.event_name == 'pull_request' }}
|
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: github/codeql-action/init@v1
|
|
||||||
# Override language selection by uncommenting this and choosing your languages
|
|
||||||
# with:
|
|
||||||
# languages: go, javascript, csharp, python, cpp, java
|
|
||||||
|
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
|
||||||
- name: Autobuild
|
|
||||||
uses: github/codeql-action/autobuild@v1
|
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
|
||||||
# 📚 https://git.io/JvXDl
|
|
||||||
|
|
||||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
|
||||||
# and modify them (or add more) to build your code if your project
|
|
||||||
# uses a compiled language
|
|
||||||
|
|
||||||
#- run: |
|
|
||||||
# make bootstrap
|
|
||||||
# make release
|
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v1
|
|
Loading…
Reference in a new issue